diff --git a/docs/asset/dynamodb.rst b/docs/asset/dynamodb.rst new file mode 100644 index 000000000..291bb6814 --- /dev/null +++ b/docs/asset/dynamodb.rst @@ -0,0 +1,10 @@ +.. _dynamodb: + +DynamoDB +======== + +.. module:: pyatlan.model.assets + :no-index: + +.. autoclass:: DynamoDB + :members: diff --git a/docs/asset/dynamodbglobalsecondaryindex.rst b/docs/asset/dynamodbglobalsecondaryindex.rst new file mode 100644 index 000000000..7852fed27 --- /dev/null +++ b/docs/asset/dynamodbglobalsecondaryindex.rst @@ -0,0 +1,10 @@ +.. _dynamodbglobalsecondaryindex: + +DynamoDBGlobalSecondaryIndex +============================ + +.. module:: pyatlan.model.assets + :no-index: + +.. autoclass:: DynamoDBGlobalSecondaryIndex + :members: diff --git a/docs/asset/dynamodblocalsecondaryindex.rst b/docs/asset/dynamodblocalsecondaryindex.rst new file mode 100644 index 000000000..9db7b0ced --- /dev/null +++ b/docs/asset/dynamodblocalsecondaryindex.rst @@ -0,0 +1,10 @@ +.. _dynamodblocalsecondaryindex: + +DynamoDBLocalSecondaryIndex +=========================== + +.. module:: pyatlan.model.assets + :no-index: + +.. autoclass:: DynamoDBLocalSecondaryIndex + :members: diff --git a/docs/asset/dynamodbsecondaryindex.rst b/docs/asset/dynamodbsecondaryindex.rst new file mode 100644 index 000000000..ce073a83b --- /dev/null +++ b/docs/asset/dynamodbsecondaryindex.rst @@ -0,0 +1,10 @@ +.. _dynamodbsecondaryindex: + +DynamoDBSecondaryIndex +====================== + +.. module:: pyatlan.model.assets + :no-index: + +.. autoclass:: DynamoDBSecondaryIndex + :members: diff --git a/docs/asset/dynamodbtable.rst b/docs/asset/dynamodbtable.rst new file mode 100644 index 000000000..f3c3f4ccd --- /dev/null +++ b/docs/asset/dynamodbtable.rst @@ -0,0 +1,10 @@ +.. _dynamodbtable: + +DynamoDBTable +============= + +.. module:: pyatlan.model.assets + :no-index: + +.. autoclass:: DynamoDBTable + :members: diff --git a/docs/asset/tagattachment.rst b/docs/asset/tagattachment.rst new file mode 100644 index 000000000..e154d1387 --- /dev/null +++ b/docs/asset/tagattachment.rst @@ -0,0 +1,10 @@ +.. _tagattachment: + +TagAttachment +============= + +.. module:: pyatlan.model.assets + :no-index: + +.. autoclass:: TagAttachment + :members: diff --git a/docs/assets.rst b/docs/assets.rst index 778f6c3f7..eb10b6cba 100644 --- a/docs/assets.rst +++ b/docs/assets.rst @@ -55,6 +55,11 @@ You can interact with all of the following different kinds of assets: asset/dbtsource asset/dbttag asset/dbttest + asset/dynamodb + asset/dynamodbglobalsecondaryindex + asset/dynamodblocalsecondaryindex + asset/dynamodbsecondaryindex + asset/dynamodbtable asset/eventstore asset/file asset/folder @@ -209,6 +214,7 @@ You can interact with all of the following different kinds of assets: asset/tableauworkbook asset/tableauworksheet asset/tag + asset/tagattachment asset/thoughtspot asset/thoughtspotanswer asset/thoughtspotdashlet diff --git a/pyatlan/generator/templates/imports.jinja2 b/pyatlan/generator/templates/imports.jinja2 index c8c397658..d2c010f7a 100644 --- a/pyatlan/generator/templates/imports.jinja2 +++ b/pyatlan/generator/templates/imports.jinja2 @@ -5,7 +5,7 @@ import sys import uuid from datetime import datetime from io import StringIO -from typing import Any, ClassVar, Optional, Set, Type, TypeVar, TYPE_CHECKING, cast +from typing import Any, ClassVar, Optional, Set, Type, TypeVar, TYPE_CHECKING, cast, overload from urllib.parse import quote, unquote from pydantic import Field, PrivateAttr, StrictStr, root_validator, validator @@ -39,6 +39,8 @@ from pyatlan.model.enums import ( DataProductSensitivity, DataProductStatus, DataAction, + DynamoDBStatus, + DynamoDBSecondaryIndexProjectionType, EntityStatus, FileType, GoogleDatastudioAssetType, diff --git a/pyatlan/generator/templates/methods/asset/asset.jinja2 b/pyatlan/generator/templates/methods/asset/asset.jinja2 index 936291c4c..4630b7807 100644 --- a/pyatlan/generator/templates/methods/asset/asset.jinja2 +++ b/pyatlan/generator/templates/methods/asset/asset.jinja2 @@ -58,10 +58,13 @@ @classmethod def _convert_to_real_type_(cls, data): - if isinstance(data, Asset): return data + # Handle the case where asset data is a list + if isinstance(data, list): + return [cls._convert_to_real_type_(item) for item in data] + data_type = ( data.get("type_name") if "type_name" in data else data.get("typeName") ) @@ -73,8 +76,7 @@ sub = cls._subtypes_.get(data_type) if sub is None: - sub = getattr(sys.modules['pyatlan.model.assets'], data_type) - + sub = getattr(sys.modules["pyatlan.model.assets"], data_type) if sub is None: raise TypeError(f"Unsupport sub-type: {data_type}") diff --git a/pyatlan/model/assets/__init__.py b/pyatlan/model/assets/__init__.py index c17a47964..55a857b41 100644 --- a/pyatlan/model/assets/__init__.py +++ b/pyatlan/model/assets/__init__.py @@ -59,61 +59,63 @@ validate_single_required_field, ) from .asset01 import DataSet -from .asset02 import Connection -from .asset04 import Badge -from .asset05 import AccessControl, AuthPolicy -from .asset06 import ProcessExecution -from .asset07 import AuthService -from .asset08 import Cloud -from .asset09 import Infrastructure -from .asset10 import BIProcess -from .asset11 import DbtProcess -from .asset12 import Persona -from .asset13 import Purpose -from .asset14 import Collection -from .asset16 import ObjectStore -from .asset18 import BI -from .asset19 import SaaS -from .asset22 import EventStore -from .asset23 import NoSQL -from .asset26 import Insight -from .asset27 import API -from .asset30 import Google -from .asset31 import Azure -from .asset32 import AWS -from .asset33 import DbtColumnProcess -from .asset34 import S3 -from .asset35 import ADLS -from .asset36 import GCS -from .asset39 import Preset -from .asset40 import Mode -from .asset41 import Sigma -from .asset42 import Tableau -from .asset43 import Looker -from .asset44 import Redash -from .asset45 import Sisense -from .asset46 import DataStudio -from .asset47 import Metabase -from .asset48 import QuickSight -from .asset49 import Thoughtspot -from .asset50 import PowerBI -from .asset51 import MicroStrategy -from .asset52 import Qlik -from .asset53 import Salesforce -from .asset54 import ReadmeTemplate -from .asset55 import Kafka -from .asset56 import MongoDB -from .asset57 import DbtTag -from .asset58 import APIPath, APISpec -from .asset59 import DataStudioAsset -from .asset60 import S3Bucket, S3Object -from .asset61 import ADLSAccount, ADLSContainer, ADLSObject -from .asset62 import GCSBucket, GCSObject -from .asset63 import PresetChart, PresetDashboard, PresetDataset, PresetWorkspace -from .asset64 import ModeChart, ModeCollection, ModeQuery, ModeReport, ModeWorkspace -from .asset65 import SigmaDataset, SigmaDatasetColumn -from .asset66 import SigmaDataElement, SigmaDataElementField, SigmaPage, SigmaWorkbook -from .asset67 import ( +from .asset02 import TagAttachment +from .asset03 import Connection +from .asset05 import Badge +from .asset06 import AccessControl, AuthPolicy +from .asset07 import ProcessExecution +from .asset08 import AuthService +from .asset09 import Cloud +from .asset10 import Infrastructure +from .asset11 import BIProcess +from .asset12 import DbtProcess +from .asset13 import Persona +from .asset14 import Purpose +from .asset15 import Collection +from .asset17 import ObjectStore +from .asset19 import BI +from .asset20 import SaaS +from .asset23 import EventStore +from .asset24 import NoSQL +from .asset27 import Insight +from .asset28 import API +from .asset31 import Google +from .asset32 import Azure +from .asset33 import AWS +from .asset34 import DbtColumnProcess +from .asset35 import S3 +from .asset36 import ADLS +from .asset37 import GCS +from .asset40 import Preset +from .asset41 import Mode +from .asset42 import Sigma +from .asset43 import Tableau +from .asset44 import Looker +from .asset45 import Redash +from .asset46 import Sisense +from .asset47 import DataStudio +from .asset48 import Metabase +from .asset49 import QuickSight +from .asset50 import Thoughtspot +from .asset51 import PowerBI +from .asset52 import MicroStrategy +from .asset53 import Qlik +from .asset54 import Salesforce +from .asset55 import ReadmeTemplate +from .asset56 import Kafka +from .asset57 import DynamoDB +from .asset58 import MongoDB +from .asset59 import DbtTag +from .asset60 import APIPath, APISpec +from .asset61 import DataStudioAsset +from .asset62 import S3Bucket, S3Object +from .asset63 import ADLSAccount, ADLSContainer, ADLSObject +from .asset64 import GCSBucket, GCSObject +from .asset65 import PresetChart, PresetDashboard, PresetDataset, PresetWorkspace +from .asset66 import ModeChart, ModeCollection, ModeQuery, ModeReport, ModeWorkspace +from .asset67 import SigmaDataset, SigmaDatasetColumn +from .asset68 import SigmaDataElement, SigmaDataElementField, SigmaPage, SigmaWorkbook +from .asset69 import ( TableauCalculatedField, TableauDashboard, TableauDatasource, @@ -124,8 +126,8 @@ TableauWorkbook, TableauWorksheet, ) -from .asset68 import TableauMetric -from .asset69 import ( +from .asset70 import TableauMetric +from .asset71 import ( LookerDashboard, LookerExplore, LookerField, @@ -137,17 +139,17 @@ LookerTile, LookerView, ) -from .asset70 import RedashDashboard -from .asset71 import RedashQuery, RedashVisualization -from .asset72 import ( +from .asset72 import RedashDashboard +from .asset73 import RedashQuery, RedashVisualization +from .asset74 import ( SisenseDashboard, SisenseDatamodel, SisenseDatamodelTable, SisenseFolder, SisenseWidget, ) -from .asset73 import MetabaseCollection, MetabaseDashboard, MetabaseQuestion -from .asset74 import ( +from .asset75 import MetabaseCollection, MetabaseDashboard, MetabaseQuestion +from .asset76 import ( QuickSightAnalysis, QuickSightAnalysisVisual, QuickSightDashboard, @@ -156,9 +158,9 @@ QuickSightDatasetField, QuickSightFolder, ) -from .asset75 import ThoughtspotDashlet, ThoughtspotLiveboard -from .asset76 import ThoughtspotAnswer -from .asset77 import ( +from .asset77 import ThoughtspotDashlet, ThoughtspotLiveboard +from .asset78 import ThoughtspotAnswer +from .asset79 import ( PowerBIColumn, PowerBIDashboard, PowerBIDataflow, @@ -171,7 +173,7 @@ PowerBITile, PowerBIWorkspace, ) -from .asset78 import ( +from .asset80 import ( MicroStrategyAttribute, MicroStrategyCube, MicroStrategyDocument, @@ -182,16 +184,22 @@ MicroStrategyReport, MicroStrategyVisualization, ) -from .asset79 import QlikApp, QlikChart, QlikDataset, QlikSheet, QlikSpace -from .asset80 import ( +from .asset81 import QlikApp, QlikChart, QlikDataset, QlikSheet, QlikSpace +from .asset82 import ( SalesforceDashboard, SalesforceField, SalesforceObject, SalesforceOrganization, SalesforceReport, ) -from .asset82 import MongoDBCollection, MongoDBDatabase -from .asset83 import KafkaConsumerGroup, KafkaTopic -from .asset84 import QlikStream -from .asset85 import AzureEventHub -from .asset86 import AzureEventHubConsumerGroup +from .asset84 import MongoDBCollection, MongoDBDatabase +from .asset85 import DynamoDBSecondaryIndex +from .asset86 import ( + DynamoDBGlobalSecondaryIndex, + DynamoDBLocalSecondaryIndex, + DynamoDBTable, +) +from .asset87 import KafkaConsumerGroup, KafkaTopic +from .asset88 import QlikStream +from .asset89 import AzureEventHub +from .asset90 import AzureEventHubConsumerGroup diff --git a/pyatlan/model/assets/asset00.py b/pyatlan/model/assets/asset00.py index 5fa4c885c..3109f174d 100644 --- a/pyatlan/model/assets/asset00.py +++ b/pyatlan/model/assets/asset00.py @@ -514,119 +514,119 @@ def __setattr__(self, name, value): "name", "name.keyword", "name", "name.stemmed" ) """ - TBC + Name of this asset. Fallback for display purposes, if displayName is empty. """ DISPLAY_NAME: ClassVar[KeywordTextField] = KeywordTextField( "displayName", "displayName.keyword", "displayName" ) """ - TBC + Human-readable name of this asset used for display purposes (in user interface). """ DESCRIPTION: ClassVar[KeywordTextField] = KeywordTextField( "description", "description.keyword", "description" ) """ - TBC - """ + Description of this asset, for example as crawled from a source. Fallback for display purposes, if userDescription is empty. + """ # noqa: E501 USER_DESCRIPTION: ClassVar[KeywordTextField] = KeywordTextField( "userDescription", "userDescription.keyword", "userDescription" ) """ - TBC - """ + Description of this asset, as provided by a user. If present, this will be used for the description in user interface. + """ # noqa: E501 TENANT_ID: ClassVar[KeywordField] = KeywordField("tenantId", "tenantId") """ - TBC + Name of the Atlan workspace in which this asset exists. """ CERTIFICATE_STATUS: ClassVar[KeywordTextField] = KeywordTextField( "certificateStatus", "certificateStatus", "certificateStatus.text" ) """ - TBC + Status of this asset's certification. """ CERTIFICATE_STATUS_MESSAGE: ClassVar[KeywordField] = KeywordField( "certificateStatusMessage", "certificateStatusMessage" ) """ - TBC + Human-readable descriptive message used to provide further detail to certificateStatus. """ CERTIFICATE_UPDATED_BY: ClassVar[KeywordField] = KeywordField( "certificateUpdatedBy", "certificateUpdatedBy" ) """ - TBC + Name of the user who last updated the certification of this asset. """ CERTIFICATE_UPDATED_AT: ClassVar[NumericField] = NumericField( "certificateUpdatedAt", "certificateUpdatedAt" ) """ - TBC + Time (epoch) at which the certification was last updated, in milliseconds. """ ANNOUNCEMENT_TITLE: ClassVar[KeywordField] = KeywordField( "announcementTitle", "announcementTitle" ) """ - TBC + Brief title for the announcement on this asset. Required when announcementType is specified. """ ANNOUNCEMENT_MESSAGE: ClassVar[KeywordField] = KeywordField( "announcementMessage", "announcementMessage" ) """ - TBC + Detailed message to include in the announcement on this asset. """ ANNOUNCEMENT_TYPE: ClassVar[KeywordField] = KeywordField( "announcementType", "announcementType" ) """ - TBC + Type of announcement on this asset. """ ANNOUNCEMENT_UPDATED_AT: ClassVar[NumericField] = NumericField( "announcementUpdatedAt", "announcementUpdatedAt" ) """ - TBC + Time (epoch) at which the announcement was last updated, in milliseconds. """ ANNOUNCEMENT_UPDATED_BY: ClassVar[KeywordField] = KeywordField( "announcementUpdatedBy", "announcementUpdatedBy" ) """ - TBC + Name of the user who last updated the announcement. """ OWNER_USERS: ClassVar[KeywordField] = KeywordField("ownerUsers", "ownerUsers") """ - TBC + List of users who own this asset. """ OWNER_GROUPS: ClassVar[KeywordField] = KeywordField("ownerGroups", "ownerGroups") """ - TBC + List of groups who own this asset. """ ADMIN_USERS: ClassVar[KeywordField] = KeywordField("adminUsers", "adminUsers") """ - TBC + List of users who administer this asset. (This is only used for certain asset types.) """ ADMIN_GROUPS: ClassVar[KeywordField] = KeywordField("adminGroups", "adminGroups") """ - TBC + List of groups who administer this asset. (This is only used for certain asset types.) """ VIEWER_USERS: ClassVar[KeywordField] = KeywordField("viewerUsers", "viewerUsers") """ - TBC + List of users who can view assets contained in a collection. (This is only used for certain asset types.) """ VIEWER_GROUPS: ClassVar[KeywordField] = KeywordField("viewerGroups", "viewerGroups") """ - TBC + List of groups who can view assets contained in a collection. (This is only used for certain asset types.) """ CONNECTOR_NAME: ClassVar[KeywordField] = KeywordField( "connectorName", "connectorName" ) """ - TBC + Type of the connector through which this asset is accessible. """ CONNECTION_NAME: ClassVar[KeywordTextField] = KeywordTextField( "connectionName", "connectionName", "connectionName.text" ) """ - TBC + Simple name of the connection through which this asset is accessible. """ CONNECTION_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( "connectionQualifiedName", @@ -634,305 +634,305 @@ def __setattr__(self, name, value): "connectionQualifiedName.text", ) """ - TBC + Unique name of the connection through which this asset is accessible. """ HAS_LINEAGE: ClassVar[BooleanField] = BooleanField("__hasLineage", "__hasLineage") """ - TBC + Whether this asset has lineage (true) or not (false). """ IS_DISCOVERABLE: ClassVar[BooleanField] = BooleanField( "isDiscoverable", "isDiscoverable" ) """ - TBC + Whether this asset is discoverable through the UI (true) or not (false). """ IS_EDITABLE: ClassVar[BooleanField] = BooleanField("isEditable", "isEditable") """ - TBC + Whether this asset can be edited in the UI (true) or not (false). """ SUB_TYPE: ClassVar[KeywordField] = KeywordField("subType", "subType") """ - TBC + Subtype of this asset. """ VIEW_SCORE: ClassVar[NumericRankField] = NumericRankField( "viewScore", "viewScore", "viewScore.rank_feature" ) """ - TBC + View score for this asset. """ POPULARITY_SCORE: ClassVar[NumericRankField] = NumericRankField( "popularityScore", "popularityScore", "popularityScore.rank_feature" ) """ - TBC + Popularity score for this asset. """ SOURCE_OWNERS: ClassVar[KeywordField] = KeywordField("sourceOwners", "sourceOwners") """ - TBC + List of owners of this asset, in the source system. """ SOURCE_CREATED_BY: ClassVar[KeywordField] = KeywordField( "sourceCreatedBy", "sourceCreatedBy" ) """ - TBC + Name of the user who created this asset, in the source system. """ SOURCE_CREATED_AT: ClassVar[NumericField] = NumericField( "sourceCreatedAt", "sourceCreatedAt" ) """ - TBC + Time (epoch) at which this asset was created in the source system, in milliseconds. """ SOURCE_UPDATED_AT: ClassVar[NumericField] = NumericField( "sourceUpdatedAt", "sourceUpdatedAt" ) """ - TBC + Time (epoch) at which this asset was last updated in the source system, in milliseconds. """ SOURCE_UPDATED_BY: ClassVar[KeywordField] = KeywordField( "sourceUpdatedBy", "sourceUpdatedBy" ) """ - TBC + Name of the user who last updated this asset, in the source system. """ SOURCE_URL: ClassVar[KeywordField] = KeywordField("sourceURL", "sourceURL") """ - TBC - """ + URL to the resource within the source application, used to create a button to view this asset in the source application. + """ # noqa: E501 SOURCE_EMBED_URL: ClassVar[KeywordField] = KeywordField( "sourceEmbedURL", "sourceEmbedURL" ) """ - TBC + URL to create an embed for a resource (for example, an image of a dashboard) within Atlan. """ LAST_SYNC_WORKFLOW_NAME: ClassVar[KeywordField] = KeywordField( "lastSyncWorkflowName", "lastSyncWorkflowName" ) """ - TBC + Name of the crawler that last synchronized this asset. """ LAST_SYNC_RUN_AT: ClassVar[NumericField] = NumericField( "lastSyncRunAt", "lastSyncRunAt" ) """ - TBC + Time (epoch) at which this asset was last crawled, in milliseconds. """ LAST_SYNC_RUN: ClassVar[KeywordField] = KeywordField("lastSyncRun", "lastSyncRun") """ - TBC + Name of the last run of the crawler that last synchronized this asset. """ ADMIN_ROLES: ClassVar[KeywordField] = KeywordField("adminRoles", "adminRoles") """ - TBC + List of roles who administer this asset. (This is only used for Connection assets.) """ SOURCE_READ_COUNT: ClassVar[NumericField] = NumericField( "sourceReadCount", "sourceReadCount" ) """ - Total count of all read operations at source + Total count of all read operations at source. """ SOURCE_READ_USER_COUNT: ClassVar[NumericField] = NumericField( "sourceReadUserCount", "sourceReadUserCount" ) """ - Total number of unique users that read data from asset + Total number of unique users that read data from asset. """ SOURCE_LAST_READ_AT: ClassVar[NumericField] = NumericField( "sourceLastReadAt", "sourceLastReadAt" ) """ - Timestamp of most recent read operation + Timestamp of most recent read operation. """ LAST_ROW_CHANGED_AT: ClassVar[NumericField] = NumericField( "lastRowChangedAt", "lastRowChangedAt" ) """ - Timestamp of last operation that inserted/updated/deleted rows. Google Sheets, Mysql table etc + Time (epoch) of the last operation that inserted, updated, or deleted rows, in milliseconds. """ SOURCE_TOTAL_COST: ClassVar[NumericField] = NumericField( "sourceTotalCost", "sourceTotalCost" ) """ - Total cost of all operations at source + Total cost of all operations at source. """ SOURCE_COST_UNIT: ClassVar[KeywordField] = KeywordField( "sourceCostUnit", "sourceCostUnit" ) """ - The unit of measure for sourceTotalCost + The unit of measure for sourceTotalCost. """ SOURCE_READ_QUERY_COST: ClassVar[NumericField] = NumericField( "sourceReadQueryCost", "sourceReadQueryCost" ) """ - Total cost of read queries at source + Total cost of read queries at source. """ SOURCE_READ_RECENT_USER_LIST: ClassVar[KeywordField] = KeywordField( "sourceReadRecentUserList", "sourceReadRecentUserList" ) """ - List of usernames of the most recent users who read the asset + List of usernames of the most recent users who read this asset. """ SOURCE_READ_RECENT_USER_RECORD_LIST: ClassVar[KeywordField] = KeywordField( "sourceReadRecentUserRecordList", "sourceReadRecentUserRecordList" ) """ - List of usernames with extra insights for the most recent users who read the asset + List of usernames with extra insights for the most recent users who read this asset. """ SOURCE_READ_TOP_USER_LIST: ClassVar[KeywordField] = KeywordField( "sourceReadTopUserList", "sourceReadTopUserList" ) """ - List of usernames of the top users who read the asset the most + List of usernames of the users who read this asset the most. """ SOURCE_READ_TOP_USER_RECORD_LIST: ClassVar[KeywordField] = KeywordField( "sourceReadTopUserRecordList", "sourceReadTopUserRecordList" ) """ - List of usernames with extra insights for the top users who read the asset the most + List of usernames with extra insights for the users who read this asset the most. """ SOURCE_READ_POPULAR_QUERY_RECORD_LIST: ClassVar[KeywordField] = KeywordField( "sourceReadPopularQueryRecordList", "sourceReadPopularQueryRecordList" ) """ - List of the most popular queries that accessed this asset + List of the most popular queries that accessed this asset. """ SOURCE_READ_EXPENSIVE_QUERY_RECORD_LIST: ClassVar[KeywordField] = KeywordField( "sourceReadExpensiveQueryRecordList", "sourceReadExpensiveQueryRecordList" ) """ - List of the most expensive queries that accessed this asset + List of the most expensive queries that accessed this asset. """ SOURCE_READ_SLOW_QUERY_RECORD_LIST: ClassVar[KeywordField] = KeywordField( "sourceReadSlowQueryRecordList", "sourceReadSlowQueryRecordList" ) """ - List of the slowest queries that accessed this asset + List of the slowest queries that accessed this asset. """ SOURCE_QUERY_COMPUTE_COST_LIST: ClassVar[KeywordField] = KeywordField( "sourceQueryComputeCostList", "sourceQueryComputeCostList" ) """ - List of most expensive warehouse names + List of most expensive warehouse names. """ SOURCE_QUERY_COMPUTE_COST_RECORD_LIST: ClassVar[KeywordField] = KeywordField( "sourceQueryComputeCostRecordList", "sourceQueryComputeCostRecordList" ) """ - List of most expensive warehouses with extra insights + List of most expensive warehouses with extra insights. """ DBT_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( "dbtQualifiedName", "dbtQualifiedName", "dbtQualifiedName.text" ) """ - TBC + Unique name of this asset in dbt. """ ASSET_DBT_ALIAS: ClassVar[KeywordTextField] = KeywordTextField( "assetDbtAlias", "assetDbtAlias.keyword", "assetDbtAlias" ) """ - TBC + Alias of this asset in dbt. """ ASSET_DBT_META: ClassVar[KeywordField] = KeywordField( "assetDbtMeta", "assetDbtMeta" ) """ - TBC + Metadata for this asset in dbt, specifically everything under the 'meta' key in the dbt object. """ ASSET_DBT_UNIQUE_ID: ClassVar[KeywordTextField] = KeywordTextField( "assetDbtUniqueId", "assetDbtUniqueId.keyword", "assetDbtUniqueId" ) """ - TBC + Unique identifier of this asset in dbt. """ ASSET_DBT_ACCOUNT_NAME: ClassVar[KeywordTextField] = KeywordTextField( "assetDbtAccountName", "assetDbtAccountName.keyword", "assetDbtAccountName" ) """ - TBC + Name of the account in which this asset exists in dbt. """ ASSET_DBT_PROJECT_NAME: ClassVar[KeywordTextField] = KeywordTextField( "assetDbtProjectName", "assetDbtProjectName.keyword", "assetDbtProjectName" ) """ - TBC + Name of the project in which this asset exists in dbt. """ ASSET_DBT_PACKAGE_NAME: ClassVar[KeywordTextField] = KeywordTextField( "assetDbtPackageName", "assetDbtPackageName.keyword", "assetDbtPackageName" ) """ - TBC + Name of the package in which this asset exists in dbt. """ ASSET_DBT_JOB_NAME: ClassVar[KeywordTextField] = KeywordTextField( "assetDbtJobName", "assetDbtJobName.keyword", "assetDbtJobName" ) """ - TBC + Name of the job that materialized this asset in dbt. """ ASSET_DBT_JOB_SCHEDULE: ClassVar[KeywordField] = KeywordField( "assetDbtJobSchedule", "assetDbtJobSchedule" ) """ - TBC + Schedule of the job that materialized this asset in dbt. """ ASSET_DBT_JOB_STATUS: ClassVar[KeywordField] = KeywordField( "assetDbtJobStatus", "assetDbtJobStatus" ) """ - TBC + Status of the job that materialized this asset in dbt. """ ASSET_DBT_TEST_STATUS: ClassVar[KeywordField] = KeywordField( "assetDbtTestStatus", "assetDbtTestStatus" ) """ - All associated dbt test statuses + All associated dbt test statuses. """ ASSET_DBT_JOB_SCHEDULE_CRON_HUMANIZED: ClassVar[TextField] = TextField( "assetDbtJobScheduleCronHumanized", "assetDbtJobScheduleCronHumanized" ) """ - TBC + Human-readable cron schedule of the job that materialized this asset in dbt. """ ASSET_DBT_JOB_LAST_RUN: ClassVar[NumericField] = NumericField( "assetDbtJobLastRun", "assetDbtJobLastRun" ) """ - TBC + Time (epoch) at which the job that materialized this asset in dbt last ran, in milliseconds. """ ASSET_DBT_JOB_LAST_RUN_URL: ClassVar[KeywordField] = KeywordField( "assetDbtJobLastRunUrl", "assetDbtJobLastRunUrl" ) """ - TBC + URL of the last run of the job that materialized this asset in dbt. """ ASSET_DBT_JOB_LAST_RUN_CREATED_AT: ClassVar[NumericField] = NumericField( "assetDbtJobLastRunCreatedAt", "assetDbtJobLastRunCreatedAt" ) """ - TBC + Time (epoch) at which the job that materialized this asset in dbt was last created, in milliseconds. """ ASSET_DBT_JOB_LAST_RUN_UPDATED_AT: ClassVar[NumericField] = NumericField( "assetDbtJobLastRunUpdatedAt", "assetDbtJobLastRunUpdatedAt" ) """ - TBC + Time (epoch) at which the job that materialized this asset in dbt was last updated, in milliseconds. """ ASSET_DBT_JOB_LAST_RUN_DEQUED_AT: ClassVar[NumericField] = NumericField( "assetDbtJobLastRunDequedAt", "assetDbtJobLastRunDequedAt" ) """ - TBC + Time (epoch) at which the job that materialized this asset in dbt was dequeued, in milliseconds. """ ASSET_DBT_JOB_LAST_RUN_STARTED_AT: ClassVar[NumericField] = NumericField( "assetDbtJobLastRunStartedAt", "assetDbtJobLastRunStartedAt" ) """ - TBC + Time (epoch) at which the job that materialized this asset in dbt was started running, in milliseconds. """ ASSET_DBT_JOB_LAST_RUN_TOTAL_DURATION: ClassVar[KeywordField] = KeywordField( "assetDbtJobLastRunTotalDuration", "assetDbtJobLastRunTotalDuration" ) """ - TBC + Total duration of the last run of the job that materialized this asset in dbt. """ ASSET_DBT_JOB_LAST_RUN_TOTAL_DURATION_HUMANIZED: ClassVar[ KeywordField @@ -941,13 +941,13 @@ def __setattr__(self, name, value): "assetDbtJobLastRunTotalDurationHumanized", ) """ - TBC + Human-readable total duration of the last run of the job that materialized this asset in dbt. """ ASSET_DBT_JOB_LAST_RUN_QUEUED_DURATION: ClassVar[KeywordField] = KeywordField( "assetDbtJobLastRunQueuedDuration", "assetDbtJobLastRunQueuedDuration" ) """ - TBC + Total duration the job that materialized this asset in dbt spent being queued. """ ASSET_DBT_JOB_LAST_RUN_QUEUED_DURATION_HUMANIZED: ClassVar[ KeywordField @@ -956,13 +956,13 @@ def __setattr__(self, name, value): "assetDbtJobLastRunQueuedDurationHumanized", ) """ - TBC + Human-readable total duration of the last run of the job that materialized this asset in dbt spend being queued. """ ASSET_DBT_JOB_LAST_RUN_RUN_DURATION: ClassVar[KeywordField] = KeywordField( "assetDbtJobLastRunRunDuration", "assetDbtJobLastRunRunDuration" ) """ - TBC + Run duration of the last run of the job that materialized this asset in dbt. """ ASSET_DBT_JOB_LAST_RUN_RUN_DURATION_HUMANIZED: ClassVar[ KeywordField @@ -971,7 +971,7 @@ def __setattr__(self, name, value): "assetDbtJobLastRunRunDurationHumanized", ) """ - TBC + Human-readable run duration of the last run of the job that materialized this asset in dbt. """ ASSET_DBT_JOB_LAST_RUN_GIT_BRANCH: ClassVar[KeywordTextField] = KeywordTextField( "assetDbtJobLastRunGitBranch", @@ -979,13 +979,13 @@ def __setattr__(self, name, value): "assetDbtJobLastRunGitBranch.text", ) """ - TBC + Branch in git from which the last run of the job that materialized this asset in dbt ran. """ ASSET_DBT_JOB_LAST_RUN_GIT_SHA: ClassVar[KeywordField] = KeywordField( "assetDbtJobLastRunGitSha", "assetDbtJobLastRunGitSha" ) """ - TBC + SHA hash in git for the last run of the job that materialized this asset in dbt. """ ASSET_DBT_JOB_LAST_RUN_STATUS_MESSAGE: ClassVar[ KeywordTextField @@ -995,55 +995,55 @@ def __setattr__(self, name, value): "assetDbtJobLastRunStatusMessage", ) """ - TBC + Status message of the last run of the job that materialized this asset in dbt. """ ASSET_DBT_JOB_LAST_RUN_OWNER_THREAD_ID: ClassVar[KeywordField] = KeywordField( "assetDbtJobLastRunOwnerThreadId", "assetDbtJobLastRunOwnerThreadId" ) """ - TBC + Thread ID of the owner of the last run of the job that materialized this asset in dbt. """ ASSET_DBT_JOB_LAST_RUN_EXECUTED_BY_THREAD_ID: ClassVar[KeywordField] = KeywordField( "assetDbtJobLastRunExecutedByThreadId", "assetDbtJobLastRunExecutedByThreadId" ) """ - TBC + Thread ID of the user who executed the last run of the job that materialized this asset in dbt. """ ASSET_DBT_JOB_LAST_RUN_ARTIFACTS_SAVED: ClassVar[BooleanField] = BooleanField( "assetDbtJobLastRunArtifactsSaved", "assetDbtJobLastRunArtifactsSaved" ) """ - TBC + Whether artifacts were saved from the last run of the job that materialized this asset in dbt (true) or not (false). """ ASSET_DBT_JOB_LAST_RUN_ARTIFACT_S3PATH: ClassVar[KeywordField] = KeywordField( "assetDbtJobLastRunArtifactS3Path", "assetDbtJobLastRunArtifactS3Path" ) """ - TBC + Path in S3 to the artifacts saved from the last run of the job that materialized this asset in dbt. """ ASSET_DBT_JOB_LAST_RUN_HAS_DOCS_GENERATED: ClassVar[BooleanField] = BooleanField( "assetDbtJobLastRunHasDocsGenerated", "assetDbtJobLastRunHasDocsGenerated" ) """ - TBC + Whether docs were generated from the last run of the job that materialized this asset in dbt (true) or not (false). """ ASSET_DBT_JOB_LAST_RUN_HAS_SOURCES_GENERATED: ClassVar[BooleanField] = BooleanField( "assetDbtJobLastRunHasSourcesGenerated", "assetDbtJobLastRunHasSourcesGenerated" ) """ - TBC - """ + Whether sources were generated from the last run of the job that materialized this asset in dbt (true) or not (false). + """ # noqa: E501 ASSET_DBT_JOB_LAST_RUN_NOTIFICATIONS_SENT: ClassVar[BooleanField] = BooleanField( "assetDbtJobLastRunNotificationsSent", "assetDbtJobLastRunNotificationsSent" ) """ - TBC - """ + Whether notifications were sent from the last run of the job that materialized this asset in dbt (true) or not (false). + """ # noqa: E501 ASSET_DBT_JOB_NEXT_RUN: ClassVar[NumericField] = NumericField( "assetDbtJobNextRun", "assetDbtJobNextRun" ) """ - TBC + Time (epoch) when the next run of the job that materializes this asset in dbt is scheduled. """ ASSET_DBT_JOB_NEXT_RUN_HUMANIZED: ClassVar[KeywordTextField] = KeywordTextField( "assetDbtJobNextRunHumanized", @@ -1051,7 +1051,7 @@ def __setattr__(self, name, value): "assetDbtJobNextRunHumanized", ) """ - TBC + Human-readable time when the next run of the job that materializes this asset in dbt is scheduled. """ ASSET_DBT_ENVIRONMENT_NAME: ClassVar[KeywordTextField] = KeywordTextField( "assetDbtEnvironmentName", @@ -1059,49 +1059,49 @@ def __setattr__(self, name, value): "assetDbtEnvironmentName", ) """ - TBC + Name of the environment in which this asset is materialized in dbt. """ ASSET_DBT_ENVIRONMENT_DBT_VERSION: ClassVar[KeywordField] = KeywordField( "assetDbtEnvironmentDbtVersion", "assetDbtEnvironmentDbtVersion" ) """ - TBC + Version of the environment in which this asset is materialized in dbt. """ ASSET_DBT_TAGS: ClassVar[KeywordTextField] = KeywordTextField( "assetDbtTags", "assetDbtTags", "assetDbtTags.text" ) """ - TBC + List of tags attached to this asset in dbt. """ ASSET_DBT_SEMANTIC_LAYER_PROXY_URL: ClassVar[KeywordField] = KeywordField( "assetDbtSemanticLayerProxyUrl", "assetDbtSemanticLayerProxyUrl" ) """ - TBC + URL of the semantic layer proxy for this asset in dbt. """ ASSET_DBT_SOURCE_FRESHNESS_CRITERIA: ClassVar[KeywordField] = KeywordField( "assetDbtSourceFreshnessCriteria", "assetDbtSourceFreshnessCriteria" ) """ - TBC + Freshness criteria for the source of this asset in dbt. """ SAMPLE_DATA_URL: ClassVar[KeywordTextField] = KeywordTextField( "sampleDataUrl", "sampleDataUrl", "sampleDataUrl.text" ) """ - TBC + URL for sample data for this asset. """ ASSET_TAGS: ClassVar[KeywordTextField] = KeywordTextField( "assetTags", "assetTags", "assetTags.text" ) """ - TBC + List of tags attached to this asset. """ ASSET_MC_INCIDENT_NAMES: ClassVar[KeywordTextField] = KeywordTextField( "assetMcIncidentNames", "assetMcIncidentNames.keyword", "assetMcIncidentNames" ) """ - TBC + List of Monte Carlo incident names attached to this asset. """ ASSET_MC_INCIDENT_QUALIFIED_NAMES: ClassVar[KeywordTextField] = KeywordTextField( "assetMcIncidentQualifiedNames", @@ -1109,13 +1109,13 @@ def __setattr__(self, name, value): "assetMcIncidentQualifiedNames.text", ) """ - TBC + List of unique Monte Carlo incident names attached to this asset. """ ASSET_MC_MONITOR_NAMES: ClassVar[KeywordTextField] = KeywordTextField( "assetMcMonitorNames", "assetMcMonitorNames.keyword", "assetMcMonitorNames" ) """ - TBC + List of Monte Carlo monitor names attached to this asset. """ ASSET_MC_MONITOR_QUALIFIED_NAMES: ClassVar[KeywordTextField] = KeywordTextField( "assetMcMonitorQualifiedNames", @@ -1123,109 +1123,109 @@ def __setattr__(self, name, value): "assetMcMonitorQualifiedNames.text", ) """ - TBC + List of unique Monte Carlo monitor names attached to this asset. """ ASSET_MC_MONITOR_STATUSES: ClassVar[KeywordField] = KeywordField( "assetMcMonitorStatuses", "assetMcMonitorStatuses" ) """ - All associated monitors statuses + Statuses of all associated Monte Carlo monitors. """ ASSET_MC_MONITOR_TYPES: ClassVar[KeywordField] = KeywordField( "assetMcMonitorTypes", "assetMcMonitorTypes" ) """ - All associated monitor types + Types of all associated Monte Carlo monitors. """ ASSET_MC_MONITOR_SCHEDULE_TYPES: ClassVar[KeywordField] = KeywordField( "assetMcMonitorScheduleTypes", "assetMcMonitorScheduleTypes" ) """ - MonteCarlo Monitor schedule type + Schedules of all associated Monte Carlo monitors. """ ASSET_MC_INCIDENT_TYPES: ClassVar[KeywordField] = KeywordField( "assetMcIncidentTypes", "assetMcIncidentTypes" ) """ - TBC + List of Monte Carlo incident types associated with this asset. """ ASSET_MC_INCIDENT_SUB_TYPES: ClassVar[KeywordField] = KeywordField( "assetMcIncidentSubTypes", "assetMcIncidentSubTypes" ) """ - TBC + List of Monte Carlo incident sub-types associated with this asset. """ ASSET_MC_INCIDENT_SEVERITIES: ClassVar[KeywordField] = KeywordField( "assetMcIncidentSeverities", "assetMcIncidentSeverities" ) """ - TBC + List of Monte Carlo incident severities associated with this asset. """ ASSET_MC_INCIDENT_STATES: ClassVar[KeywordField] = KeywordField( "assetMcIncidentStates", "assetMcIncidentStates" ) """ - TBC + List of Monte Carlo incident states associated with this asset. """ ASSET_MC_LAST_SYNC_RUN_AT: ClassVar[NumericField] = NumericField( "assetMcLastSyncRunAt", "assetMcLastSyncRunAt" ) """ - TBC + Time (epoch) at which this asset was last synced from Monte Carlo. """ STARRED_BY: ClassVar[KeywordField] = KeywordField("starredBy", "starredBy") """ - TBC + Users who have starred this asset. """ STARRED_DETAILS_LIST: ClassVar[KeywordField] = KeywordField( "starredDetailsList", "starredDetailsList" ) """ - List of usernames with extra information of the users who have starred an asset + List of usernames with extra information of the users who have starred an asset. """ STARRED_COUNT: ClassVar[NumericField] = NumericField("starredCount", "starredCount") """ - TBC + Number of users who have starred this asset. """ ASSET_SODA_DQ_STATUS: ClassVar[KeywordField] = KeywordField( "assetSodaDQStatus", "assetSodaDQStatus" ) """ - Soda DQ Status + Status of data quality from Soda. """ ASSET_SODA_CHECK_COUNT: ClassVar[NumericField] = NumericField( "assetSodaCheckCount", "assetSodaCheckCount" ) """ - Soda check count + Number of checks done via Soda. """ ASSET_SODA_LAST_SYNC_RUN_AT: ClassVar[NumericField] = NumericField( "assetSodaLastSyncRunAt", "assetSodaLastSyncRunAt" ) """ - TBC + """ ASSET_SODA_LAST_SCAN_AT: ClassVar[NumericField] = NumericField( "assetSodaLastScanAt", "assetSodaLastScanAt" ) """ - TBC + """ ASSET_SODA_CHECK_STATUSES: ClassVar[KeywordField] = KeywordField( "assetSodaCheckStatuses", "assetSodaCheckStatuses" ) """ - All associated soda check statuses + All associated Soda check statuses. """ ASSET_SODA_SOURCE_URL: ClassVar[KeywordField] = KeywordField( "assetSodaSourceURL", "assetSodaSourceURL" ) """ - TBC + """ ASSET_ICON: ClassVar[KeywordField] = KeywordField("assetIcon", "assetIcon") """ - TBC + Name of the icon to use for this asset. (Only applies to glossaries, currently.) """ IS_PARTIAL: ClassVar[BooleanField] = BooleanField("isPartial", "isPartial") """ @@ -1235,7 +1235,7 @@ def __setattr__(self, name, value): "isAIGenerated", "isAIGenerated" ) """ - TBC + """ ASSET_COVER_IMAGE: ClassVar[KeywordField] = KeywordField( "assetCoverImage", "assetCoverImage" @@ -3672,19 +3672,19 @@ def __setattr__(self, name, value): "shortDescription", "shortDescription" ) """ - TBC + Unused. Brief summary of the category. See 'description' and 'userDescription' instead. """ LONG_DESCRIPTION: ClassVar[KeywordField] = KeywordField( "longDescription", "longDescription" ) """ - TBC + Unused. Detailed description of the category. See 'readme' instead. """ ADDITIONAL_ATTRIBUTES: ClassVar[KeywordField] = KeywordField( "additionalAttributes", "additionalAttributes" ) """ - TBC + Unused. Arbitrary set of additional attributes associated with the category. """ CATEGORY_TYPE: ClassVar[KeywordField] = KeywordField("categoryType", "categoryType") """ @@ -3887,27 +3887,27 @@ def __setattr__(self, name, value): "shortDescription", "shortDescription" ) """ - TBC + Unused. A short definition of the glossary. See 'description' and 'userDescription' instead. """ LONG_DESCRIPTION: ClassVar[KeywordField] = KeywordField( "longDescription", "longDescription" ) """ - TBC + Unused. A longer description of the glossary. See 'readme' instead. """ LANGUAGE: ClassVar[KeywordField] = KeywordField("language", "language") """ - TBC + Unused. Language of the glossary's contents. """ USAGE: ClassVar[KeywordField] = KeywordField("usage", "usage") """ - TBC + Unused. Inteded usage for the glossary. """ ADDITIONAL_ATTRIBUTES: ClassVar[KeywordField] = KeywordField( "additionalAttributes", "additionalAttributes" ) """ - TBC + Unused. Arbitrary set of additional attributes associated with this glossary. """ GLOSSARY_TYPE: ClassVar[KeywordField] = KeywordField("glossaryType", "glossaryType") """ @@ -4144,31 +4144,31 @@ def __setattr__(self, name, value): "shortDescription", "shortDescription" ) """ - TBC + Unused. Brief summary of the term. See 'description' and 'userDescription' instead. """ LONG_DESCRIPTION: ClassVar[KeywordField] = KeywordField( "longDescription", "longDescription" ) """ - TBC + Unused. Detailed definition of the term. See 'readme' instead. """ EXAMPLES: ClassVar[KeywordField] = KeywordField("examples", "examples") """ - TBC + Unused. Exmaples of the term. """ ABBREVIATION: ClassVar[KeywordField] = KeywordField("abbreviation", "abbreviation") """ - TBC + Unused. Abbreviation of the term. """ USAGE: ClassVar[KeywordField] = KeywordField("usage", "usage") """ - TBC + Unused. Intended usage for the term. """ ADDITIONAL_ATTRIBUTES: ClassVar[KeywordField] = KeywordField( "additionalAttributes", "additionalAttributes" ) """ - TBC + Unused. Arbitrary set of additional attributes for the terrm. """ TERM_TYPE: ClassVar[KeywordField] = KeywordField("termType", "termType") """ @@ -4632,15 +4632,15 @@ def __setattr__(self, name, value): CODE: ClassVar[KeywordField] = KeywordField("code", "code") """ - TBC + Code that ran within the process. """ SQL: ClassVar[KeywordField] = KeywordField("sql", "sql") """ - TBC + SQL query that ran to produce the outputs. """ AST: ClassVar[KeywordField] = KeywordField("ast", "ast") """ - TBC + Parsed AST of the code or SQL statements that describe the logic of this process. """ MATILLION_COMPONENT: ClassVar[RelationField] = RelationField("matillionComponent") @@ -4920,7 +4920,7 @@ def __setattr__(self, name, value): "parentQualifiedName", "parentQualifiedName", "parentQualifiedName.text" ) """ - TBC + Unique name of the parent folder or collection in which this folder exists. """ COLLECTION_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( "collectionQualifiedName", @@ -4928,7 +4928,7 @@ def __setattr__(self, name, value): "collectionQualifiedName.text", ) """ - TBC + Unique name of the collection in which this folder exists. """ PARENT: ClassVar[RelationField] = RelationField("parent") @@ -5133,25 +5133,25 @@ def __setattr__(self, name, value): TAG_ID: ClassVar[KeywordField] = KeywordField("tagId", "tagId") """ - Unique source tag identifier + Unique identifier of the tag in the source system. """ TAG_ATTRIBUTES: ClassVar[KeywordField] = KeywordField( "tagAttributes", "tagAttributes" ) """ - Source tag attributes + Attributes associated with the tag in the source system. """ TAG_ALLOWED_VALUES: ClassVar[KeywordTextField] = KeywordTextField( "tagAllowedValues", "tagAllowedValues", "tagAllowedValues.text" ) """ - Allowed values for the tag at source. De-normalised from sourceTagAttributed for ease of querying + Allowed values for the tag in the source system. These are denormalized from tagAttributes for ease of querying. """ MAPPED_CLASSIFICATION_NAME: ClassVar[KeywordField] = KeywordField( "mappedClassificationName", "mappedClassificationName" ) """ - Mapped atlan classification name + Name of the classification in Atlan that is mapped to this tag. """ _convenience_properties: ClassVar[list[str]] = [ @@ -5262,7 +5262,7 @@ def __setattr__(self, name, value): OUTPUTS: ClassVar[RelationField] = RelationField("outputs") """ - TBC + Assets that are outputs from this process. """ PROCESS: ClassVar[RelationField] = RelationField("process") """ @@ -5270,7 +5270,7 @@ def __setattr__(self, name, value): """ INPUTS: ClassVar[RelationField] = RelationField("inputs") """ - TBC + Assets that are inputs to this process. """ _convenience_properties: ClassVar[list[str]] = [ @@ -5376,49 +5376,49 @@ def __setattr__(self, name, value): AIRFLOW_TAGS: ClassVar[KeywordField] = KeywordField("airflowTags", "airflowTags") """ - TBC + Tags assigned to the asset in Airflow. """ AIRFLOW_RUN_VERSION: ClassVar[KeywordField] = KeywordField( "airflowRunVersion", "airflowRunVersion" ) """ - Airflow Version of the run + Version of the run in Airflow. """ AIRFLOW_RUN_OPEN_LINEAGE_VERSION: ClassVar[KeywordField] = KeywordField( "airflowRunOpenLineageVersion", "airflowRunOpenLineageVersion" ) """ - OpenLineage Version of the run + Version of the run in OpenLineage. """ AIRFLOW_RUN_NAME: ClassVar[KeywordField] = KeywordField( "airflowRunName", "airflowRunName" ) """ - Name of the run + Name of the run. """ AIRFLOW_RUN_TYPE: ClassVar[KeywordField] = KeywordField( "airflowRunType", "airflowRunType" ) """ - Type of the run + Type of the run. """ AIRFLOW_RUN_START_TIME: ClassVar[NumericField] = NumericField( "airflowRunStartTime", "airflowRunStartTime" ) """ - Start time of the run + Start time of the run. """ AIRFLOW_RUN_END_TIME: ClassVar[NumericField] = NumericField( "airflowRunEndTime", "airflowRunEndTime" ) """ - End time of the run + End time of the run. """ AIRFLOW_RUN_OPEN_LINEAGE_STATE: ClassVar[KeywordField] = KeywordField( "airflowRunOpenLineageState", "airflowRunOpenLineageState" ) """ - OpenLineage state of the run + State of the run in OpenLineage. """ _convenience_properties: ClassVar[list[str]] = [ @@ -5581,13 +5581,13 @@ def __setattr__(self, name, value): "airflowDagSchedule", "airflowDagSchedule" ) """ - TBC + Schedule for the DAG. """ AIRFLOW_DAG_SCHEDULE_DELTA: ClassVar[NumericField] = NumericField( "airflowDagScheduleDelta", "airflowDagScheduleDelta" ) """ - Duration between scheduled runs in seconds + Duration between scheduled runs, in seconds. """ AIRFLOW_TASKS: ClassVar[RelationField] = RelationField("airflowTasks") @@ -5675,19 +5675,19 @@ def __setattr__(self, name, value): "airflowTaskOperatorClass", ) """ - TBC + Class name for the operator this task uses. """ AIRFLOW_DAG_NAME: ClassVar[KeywordTextField] = KeywordTextField( "airflowDagName", "airflowDagName.keyword", "airflowDagName" ) """ - TBC + Simple name of the DAG this task is contained within. """ AIRFLOW_DAG_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( "airflowDagQualifiedName", "airflowDagQualifiedName" ) """ - TBC + Unique name of the DAG this task is contained within. """ AIRFLOW_TASK_CONNECTION_ID: ClassVar[KeywordTextField] = KeywordTextField( "airflowTaskConnectionId", @@ -5695,49 +5695,49 @@ def __setattr__(self, name, value): "airflowTaskConnectionId", ) """ - TBC + Identifier for the connection this task accesses. """ AIRFLOW_TASK_SQL: ClassVar[KeywordField] = KeywordField( "airflowTaskSql", "airflowTaskSql" ) """ - TBC + SQL code that executes through this task. """ AIRFLOW_TASK_RETRY_NUMBER: ClassVar[NumericField] = NumericField( "airflowTaskRetryNumber", "airflowTaskRetryNumber" ) """ - Retry required for the run + Retry count for this task running. """ AIRFLOW_TASK_POOL: ClassVar[KeywordField] = KeywordField( "airflowTaskPool", "airflowTaskPool" ) """ - Pool on which this run happened + Pool on which this run happened. """ AIRFLOW_TASK_POOL_SLOTS: ClassVar[NumericField] = NumericField( "airflowTaskPoolSlots", "airflowTaskPoolSlots" ) """ - Pool slots used for the run + Pool slots used for the run. """ AIRFLOW_TASK_QUEUE: ClassVar[KeywordField] = KeywordField( "airflowTaskQueue", "airflowTaskQueue" ) """ - Queue on which this run happened + Queue on which this run happened. """ AIRFLOW_TASK_PRIORITY_WEIGHT: ClassVar[NumericField] = NumericField( "airflowTaskPriorityWeight", "airflowTaskPriorityWeight" ) """ - Priority weight of the run + Priority of the run. """ AIRFLOW_TASK_TRIGGER_RULE: ClassVar[KeywordField] = KeywordField( "airflowTaskTriggerRule", "airflowTaskTriggerRule" ) """ - Trigger rule of the run + Trigger for the run. """ OUTPUTS: ClassVar[RelationField] = RelationField("outputs") @@ -6042,21 +6042,21 @@ def __setattr__(self, name, value): METRIC_TYPE: ClassVar[KeywordField] = KeywordField("metricType", "metricType") """ - TBC + Type of the metric. """ METRIC_SQL: ClassVar[KeywordField] = KeywordField("metricSQL", "metricSQL") """ - TBC + SQL query used to compute the metric. """ METRIC_FILTERS: ClassVar[TextField] = TextField("metricFilters", "metricFilters") """ - TBC + Filters to be applied to the metric query. """ METRIC_TIME_GRAINS: ClassVar[TextField] = TextField( "metricTimeGrains", "metricTimeGrains" ) """ - TBC + List of time grains to be applied to the metric query. """ METRIC_TIMESTAMP_COLUMN: ClassVar[RelationField] = RelationField( @@ -6208,21 +6208,21 @@ def __setattr__(self, name, value): LINK: ClassVar[KeywordField] = KeywordField("link", "link") """ - TBC + URL to the resource. """ IS_GLOBAL: ClassVar[BooleanField] = BooleanField("isGlobal", "isGlobal") """ - TBC + Whether the resource is global (true) or not (false). """ REFERENCE: ClassVar[KeywordField] = KeywordField("reference", "reference") """ - TBC + Reference to the resource. """ RESOURCE_METADATA: ClassVar[KeywordField] = KeywordField( "resourceMetadata", "resourceMetadata" ) """ - TBC + Metadata of the resource. """ _convenience_properties: ClassVar[list[str]] = [ @@ -6434,11 +6434,11 @@ def __setattr__(self, name, value): FILE_TYPE: ClassVar[KeywordField] = KeywordField("fileType", "fileType") """ - TBC + Type (extension) of the file. """ FILE_PATH: ClassVar[KeywordField] = KeywordField("filePath", "filePath") """ - TBC + URL giving the online location where the file can be accessed. """ FILE_ASSETS: ClassVar[RelationField] = RelationField("fileAssets") @@ -6543,11 +6543,11 @@ def __setattr__(self, name, value): ICON: ClassVar[KeywordField] = KeywordField("icon", "icon") """ - TBC + Icon for the link. """ ICON_TYPE: ClassVar[KeywordField] = KeywordField("iconType", "iconType") """ - TBC + Type of icon for the link, for example: image or emoji. """ ASSET: ClassVar[RelationField] = RelationField("asset") @@ -7132,83 +7132,83 @@ def __setattr__(self, name, value): QUERY_COUNT: ClassVar[NumericField] = NumericField("queryCount", "queryCount") """ - TBC + Number of times this asset has been queried. """ QUERY_USER_COUNT: ClassVar[NumericField] = NumericField( "queryUserCount", "queryUserCount" ) """ - TBC + Number of unique users who have queried this asset. """ QUERY_USER_MAP: ClassVar[KeywordField] = KeywordField( "queryUserMap", "queryUserMap" ) """ - TBC + Map of unique users who have queried this asset to the number of times they have queried it. """ QUERY_COUNT_UPDATED_AT: ClassVar[NumericField] = NumericField( "queryCountUpdatedAt", "queryCountUpdatedAt" ) """ - TBC + Time (epoch) at which the query count was last updated, in milliseconds. """ DATABASE_NAME: ClassVar[KeywordTextField] = KeywordTextField( "databaseName", "databaseName.keyword", "databaseName" ) """ - TBC + Simple name of the database in which this SQL asset exists, or empty if it does not exist within a database. """ DATABASE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( "databaseQualifiedName", "databaseQualifiedName" ) """ - TBC + Unique name of the database in which this SQL asset exists, or empty if it does not exist within a database. """ SCHEMA_NAME: ClassVar[KeywordTextField] = KeywordTextField( "schemaName", "schemaName.keyword", "schemaName" ) """ - TBC + Simple name of the schema in which this SQL asset exists, or empty if it does not exist within a schema. """ SCHEMA_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( "schemaQualifiedName", "schemaQualifiedName" ) """ - TBC + Unique name of the schema in which this SQL asset exists, or empty if it does not exist within a schema. """ TABLE_NAME: ClassVar[KeywordTextField] = KeywordTextField( "tableName", "tableName.keyword", "tableName" ) """ - TBC + Simple name of the table in which this SQL asset exists, or empty if it does not exist within a table. """ TABLE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( "tableQualifiedName", "tableQualifiedName" ) """ - TBC + Unique name of the table in which this SQL asset exists, or empty if it does not exist within a table. """ VIEW_NAME: ClassVar[KeywordTextField] = KeywordTextField( "viewName", "viewName.keyword", "viewName" ) """ - TBC + Simple name of the view in which this SQL asset exists, or empty if it does not exist within a view. """ VIEW_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( "viewQualifiedName", "viewQualifiedName" ) """ - TBC + Unique name of the view in which this SQL asset exists, or empty if it does not exist within a view. """ IS_PROFILED: ClassVar[BooleanField] = BooleanField("isProfiled", "isProfiled") """ - TBC + Whether this asset has been profiled (true) or not (false). """ LAST_PROFILED_AT: ClassVar[NumericField] = NumericField( "lastProfiledAt", "lastProfiledAt" ) """ - TBC + Time (epoch) at which this asset was last profiled, in milliseconds. """ DBT_SOURCES: ClassVar[RelationField] = RelationField("dbtSources") @@ -7522,17 +7522,17 @@ def __setattr__(self, name, value): RAW_QUERY: ClassVar[KeywordField] = KeywordField("rawQuery", "rawQuery") """ - TBC + Deprecated. See 'longRawQuery' instead. """ LONG_RAW_QUERY: ClassVar[KeywordField] = KeywordField( "longRawQuery", "longRawQuery" ) """ - TBC + Raw SQL query string. """ RAW_QUERY_TEXT: ClassVar[RelationField] = RelationField("rawQueryText") """ - TBC + """ DEFAULT_SCHEMA_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( "defaultSchemaQualifiedName", @@ -7540,7 +7540,7 @@ def __setattr__(self, name, value): "defaultSchemaQualifiedName.text", ) """ - TBC + Unique name of the default schema to use for this query. """ DEFAULT_DATABASE_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( "defaultDatabaseQualifiedName", @@ -7548,29 +7548,29 @@ def __setattr__(self, name, value): "defaultDatabaseQualifiedName.text", ) """ - TBC + Unique name of the default database to use for this query. """ VARIABLES_SCHEMA_BASE64: ClassVar[KeywordField] = KeywordField( "variablesSchemaBase64", "variablesSchemaBase64" ) """ - TBC + Base64-encoded string of the variables to use in this query. """ IS_PRIVATE: ClassVar[BooleanField] = BooleanField("isPrivate", "isPrivate") """ - TBC + Whether this query is private (true) or shared (false). """ IS_SQL_SNIPPET: ClassVar[BooleanField] = BooleanField( "isSqlSnippet", "isSqlSnippet" ) """ - TBC + Whether this query is a SQL snippet (true) or not (false). """ PARENT_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( "parentQualifiedName", "parentQualifiedName", "parentQualifiedName.text" ) """ - TBC + Unique name of the parent collection or folder in which this query exists. """ COLLECTION_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( "collectionQualifiedName", @@ -7578,19 +7578,19 @@ def __setattr__(self, name, value): "collectionQualifiedName.text", ) """ - TBC + Unique name of the collection in which this query exists. """ IS_VISUAL_QUERY: ClassVar[BooleanField] = BooleanField( "isVisualQuery", "isVisualQuery" ) """ - TBC + Whether this query is a visual query (true) or not (false). """ VISUAL_BUILDER_SCHEMA_BASE64: ClassVar[KeywordField] = KeywordField( "visualBuilderSchemaBase64", "visualBuilderSchemaBase64" ) """ - TBC + Base64-encoded string for the visual query builder. """ PARENT: ClassVar[RelationField] = RelationField("parent") @@ -7898,11 +7898,11 @@ def __setattr__(self, name, value): TABLE_COUNT: ClassVar[NumericField] = NumericField("tableCount", "tableCount") """ - TBC + Number of tables in this schema. """ VIEWS_COUNT: ClassVar[NumericField] = NumericField("viewsCount", "viewsCount") """ - TBC + Number of views in this schema. """ SNOWFLAKE_TAGS: ClassVar[RelationField] = RelationField("snowflakeTags") @@ -8176,13 +8176,13 @@ def __setattr__(self, name, value): DEFINITION: ClassVar[KeywordField] = KeywordField("definition", "definition") """ - TBC + SQL definition of this pipe. """ SNOWFLAKE_PIPE_IS_AUTO_INGEST_ENABLED: ClassVar[BooleanField] = BooleanField( "snowflakePipeIsAutoIngestEnabled", "snowflakePipeIsAutoIngestEnabled" ) """ - TBC + Whether auto-ingest is enabled for this pipe (true) or not (false). """ SNOWFLAKE_PIPE_NOTIFICATION_CHANNEL_NAME: ClassVar[ KeywordTextField @@ -8192,7 +8192,7 @@ def __setattr__(self, name, value): "snowflakePipeNotificationChannelName.text", ) """ - TBC + Name of the notification channel for this pipe. """ ATLAN_SCHEMA: ClassVar[RelationField] = RelationField("atlanSchema") @@ -8312,39 +8312,39 @@ def __setattr__(self, name, value): COLUMN_COUNT: ClassVar[NumericField] = NumericField("columnCount", "columnCount") """ - TBC + Number of columns in this view. """ ROW_COUNT: ClassVar[NumericField] = NumericField("rowCount", "rowCount") """ - TBC + Number of rows in this view. """ SIZE_BYTES: ClassVar[NumericField] = NumericField("sizeBytes", "sizeBytes") """ - TBC + Size of this view, in bytes. """ IS_QUERY_PREVIEW: ClassVar[BooleanField] = BooleanField( "isQueryPreview", "isQueryPreview" ) """ - TBC + Whether preview queries are allowed on this view (true) or not (false). """ QUERY_PREVIEW_CONFIG: ClassVar[KeywordField] = KeywordField( "queryPreviewConfig", "queryPreviewConfig" ) """ - TBC + Configuration for preview queries on this view. """ ALIAS: ClassVar[KeywordField] = KeywordField("alias", "alias") """ - TBC + Alias for this view. """ IS_TEMPORARY: ClassVar[BooleanField] = BooleanField("isTemporary", "isTemporary") """ - TBC + Whether this view is temporary (true) or not (false). """ DEFINITION: ClassVar[KeywordField] = KeywordField("definition", "definition") """ - TBC + SQL definition of this view. """ COLUMNS: ClassVar[RelationField] = RelationField("columns") @@ -8570,59 +8570,59 @@ def __setattr__(self, name, value): REFRESH_MODE: ClassVar[KeywordField] = KeywordField("refreshMode", "refreshMode") """ - TBC + Refresh mode for this materialized view. """ REFRESH_METHOD: ClassVar[KeywordField] = KeywordField( "refreshMethod", "refreshMethod" ) """ - TBC + Refresh method for this materialized view. """ STALENESS: ClassVar[KeywordField] = KeywordField("staleness", "staleness") """ - TBC + Staleness of this materialized view. """ STALE_SINCE_DATE: ClassVar[NumericField] = NumericField( "staleSinceDate", "staleSinceDate" ) """ - TBC + Time (epoch) from which this materialized view is stale, in milliseconds. """ COLUMN_COUNT: ClassVar[NumericField] = NumericField("columnCount", "columnCount") """ - TBC + Number of columns in this materialized view. """ ROW_COUNT: ClassVar[NumericField] = NumericField("rowCount", "rowCount") """ - TBC + Number of rows in this materialized view. """ SIZE_BYTES: ClassVar[NumericField] = NumericField("sizeBytes", "sizeBytes") """ - TBC + Size of this materialized view, in bytes. """ IS_QUERY_PREVIEW: ClassVar[BooleanField] = BooleanField( "isQueryPreview", "isQueryPreview" ) """ - TBC + Whether it's possible to run a preview query on this materialized view (true) or not (false). """ QUERY_PREVIEW_CONFIG: ClassVar[KeywordField] = KeywordField( "queryPreviewConfig", "queryPreviewConfig" ) """ - TBC + Configuration for the query preview of this materialized view. """ ALIAS: ClassVar[KeywordField] = KeywordField("alias", "alias") """ - TBC + Alias for this materialized view. """ IS_TEMPORARY: ClassVar[BooleanField] = BooleanField("isTemporary", "isTemporary") """ - TBC + Whether this materialized view is temporary (true) or not (false). """ DEFINITION: ClassVar[KeywordField] = KeywordField("definition", "definition") """ - TBC + SQL definition of this materialized view. """ COLUMNS: ClassVar[RelationField] = RelationField("columns") @@ -8892,29 +8892,29 @@ def __setattr__(self, name, value): "functionLanguage", "functionLanguage" ) """ - The programming language in which the function is written. + Programming language in which the function is written. """ FUNCTION_TYPE: ClassVar[KeywordField] = KeywordField("functionType", "functionType") """ - The type of function. + Type of function. """ FUNCTION_IS_EXTERNAL: ClassVar[BooleanField] = BooleanField( "functionIsExternal", "functionIsExternal" ) """ - Determines whether the functions is stored or executed externally. + Whether the function is stored or executed externally (true) or internally (false). """ FUNCTION_IS_SECURE: ClassVar[BooleanField] = BooleanField( "functionIsSecure", "functionIsSecure" ) """ - Determines whether sensitive information of the function is omitted for unauthorized users. + Whether sensitive information of the function is omitted for unauthorized users (true) or not (false). """ FUNCTION_IS_MEMOIZABLE: ClassVar[BooleanField] = BooleanField( "functionIsMemoizable", "functionIsMemoizable" ) """ - Determines whether the function must re-compute or not if there are no underlying changes in the values. + Whether the function must re-compute if there are no underlying changes in the values (false) or not (true). """ FUNCTION_SCHEMA: ClassVar[RelationField] = RelationField("functionSchema") @@ -9078,81 +9078,81 @@ def __setattr__(self, name, value): CONSTRAINT: ClassVar[KeywordField] = KeywordField("constraint", "constraint") """ - TBC + Constraint that defines this table partition. """ COLUMN_COUNT: ClassVar[NumericField] = NumericField("columnCount", "columnCount") """ - TBC + Number of columns in this partition. """ ROW_COUNT: ClassVar[NumericField] = NumericField("rowCount", "rowCount") """ - TBC + Number of rows in this partition. """ SIZE_BYTES: ClassVar[NumericField] = NumericField("sizeBytes", "sizeBytes") """ - TBC + Size of this partition, in bytes. """ ALIAS: ClassVar[KeywordField] = KeywordField("alias", "alias") """ - TBC + Alias for this partition. """ IS_TEMPORARY: ClassVar[BooleanField] = BooleanField("isTemporary", "isTemporary") """ - TBC + Whether this partition is temporary (true) or not (false). """ IS_QUERY_PREVIEW: ClassVar[BooleanField] = BooleanField( "isQueryPreview", "isQueryPreview" ) """ - TBC + Whether preview queries for this partition are allowed (true) or not (false). """ QUERY_PREVIEW_CONFIG: ClassVar[KeywordField] = KeywordField( "queryPreviewConfig", "queryPreviewConfig" ) """ - TBC + Configuration for the preview queries. """ EXTERNAL_LOCATION: ClassVar[KeywordField] = KeywordField( "externalLocation", "externalLocation" ) """ - TBC + External location of this partition, for example: an S3 object location. """ EXTERNAL_LOCATION_REGION: ClassVar[KeywordField] = KeywordField( "externalLocationRegion", "externalLocationRegion" ) """ - TBC + Region of the external location of this partition, for example: S3 region. """ EXTERNAL_LOCATION_FORMAT: ClassVar[KeywordField] = KeywordField( "externalLocationFormat", "externalLocationFormat" ) """ - TBC + Format of the external location of this partition, for example: JSON, CSV, PARQUET, etc. """ IS_PARTITIONED: ClassVar[BooleanField] = BooleanField( "isPartitioned", "isPartitioned" ) """ - TBC + Whether this partition is further partitioned (true) or not (false). """ PARTITION_STRATEGY: ClassVar[KeywordField] = KeywordField( "partitionStrategy", "partitionStrategy" ) """ - TBC + Partition strategy of this partition. """ PARTITION_COUNT: ClassVar[NumericField] = NumericField( "partitionCount", "partitionCount" ) """ - TBC + Number of sub-partitions of this partition. """ PARTITION_LIST: ClassVar[KeywordField] = KeywordField( "partitionList", "partitionList" ) """ - TBC + List of sub-partitions in this partition. """ CHILD_TABLE_PARTITIONS: ClassVar[RelationField] = RelationField( @@ -9491,97 +9491,97 @@ def __setattr__(self, name, value): "dataType", "dataType", "dataType.text" ) """ - TBC + Data type of values in this column. """ SUB_DATA_TYPE: ClassVar[KeywordField] = KeywordField("subDataType", "subDataType") """ - TBC + Sub-data type of this column. """ RAW_DATA_TYPE_DEFINITION: ClassVar[KeywordField] = KeywordField( "rawDataTypeDefinition", "rawDataTypeDefinition" ) """ - TBC + """ ORDER: ClassVar[NumericField] = NumericField("order", "order") """ - TBC + Order (position) in which this column appears in the table (starting at 1). """ NESTED_COLUMN_COUNT: ClassVar[NumericField] = NumericField( "nestedColumnCount", "nestedColumnCount" ) """ - TBC + Number of columns nested within this (STRUCT or NESTED) column. """ IS_PARTITION: ClassVar[BooleanField] = BooleanField("isPartition", "isPartition") """ - TBC + Whether this column is a partition column (true) or not (false). """ PARTITION_ORDER: ClassVar[NumericField] = NumericField( "partitionOrder", "partitionOrder" ) """ - TBC + Order (position) of this partition column in the table. """ IS_CLUSTERED: ClassVar[BooleanField] = BooleanField("isClustered", "isClustered") """ - TBC + Whether this column is a clustered column (true) or not (false). """ IS_PRIMARY: ClassVar[BooleanField] = BooleanField("isPrimary", "isPrimary") """ - TBC + When true, this column is the primary key for the table. """ IS_FOREIGN: ClassVar[BooleanField] = BooleanField("isForeign", "isForeign") """ - TBC - """ + When true, this column is a foreign key to another table. NOTE: this must be true when using the foreignKeyTo relationship to specify columns that refer to this column as a foreign key. + """ # noqa: E501 IS_INDEXED: ClassVar[BooleanField] = BooleanField("isIndexed", "isIndexed") """ - TBC + When true, this column is indexed in the database. """ IS_SORT: ClassVar[BooleanField] = BooleanField("isSort", "isSort") """ - TBC + Whether this column is a sort column (true) or not (false). """ IS_DIST: ClassVar[BooleanField] = BooleanField("isDist", "isDist") """ - TBC + Whether this column is a distribution column (true) or not (false). """ IS_PINNED: ClassVar[BooleanField] = BooleanField("isPinned", "isPinned") """ - TBC + Whether this column is pinned (true) or not (false). """ PINNED_BY: ClassVar[KeywordField] = KeywordField("pinnedBy", "pinnedBy") """ - TBC + User who pinned this column. """ PINNED_AT: ClassVar[NumericField] = NumericField("pinnedAt", "pinnedAt") """ - TBC + Time (epoch) at which this column was pinned, in milliseconds. """ PRECISION: ClassVar[NumericField] = NumericField("precision", "precision") """ - Total number of digits allowed + Total number of digits allowed, when the dataType is numeric. """ DEFAULT_VALUE: ClassVar[KeywordField] = KeywordField("defaultValue", "defaultValue") """ - TBC + Default value for this column. """ IS_NULLABLE: ClassVar[BooleanField] = BooleanField("isNullable", "isNullable") """ - TBC + When true, the values in this column can be null. """ NUMERIC_SCALE: ClassVar[NumericField] = NumericField("numericScale", "numericScale") """ - TBC + Number of digits allowed to the right of the decimal point. """ MAX_LENGTH: ClassVar[NumericField] = NumericField("maxLength", "maxLength") """ - TBC + Maximum length of a value in this column. """ VALIDATIONS: ClassVar[KeywordField] = KeywordField("validations", "validations") """ - TBC + Validations for this column. """ PARENT_COLUMN_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( "parentColumnQualifiedName", @@ -9589,155 +9589,155 @@ def __setattr__(self, name, value): "parentColumnQualifiedName.text", ) """ - TBC + Unique name of the column this column is nested within, for STRUCT and NESTED columns. """ PARENT_COLUMN_NAME: ClassVar[KeywordTextField] = KeywordTextField( "parentColumnName", "parentColumnName.keyword", "parentColumnName" ) """ - TBC + Simple name of the column this column is nested within, for STRUCT and NESTED columns. """ COLUMN_DISTINCT_VALUES_COUNT: ClassVar[NumericField] = NumericField( "columnDistinctValuesCount", "columnDistinctValuesCount" ) """ - TBC + Number of rows that contain distinct values. """ COLUMN_DISTINCT_VALUES_COUNT_LONG: ClassVar[NumericField] = NumericField( "columnDistinctValuesCountLong", "columnDistinctValuesCountLong" ) """ - TBC + Number of rows that contain distinct values. """ COLUMN_HISTOGRAM: ClassVar[KeywordField] = KeywordField( "columnHistogram", "columnHistogram" ) """ - TBC + List of values in a histogram that represents the contents of this column. """ COLUMN_MAX: ClassVar[NumericField] = NumericField("columnMax", "columnMax") """ - TBC + Greatest value in a numeric column. """ COLUMN_MIN: ClassVar[NumericField] = NumericField("columnMin", "columnMin") """ - TBC + Least value in a numeric column. """ COLUMN_MEAN: ClassVar[NumericField] = NumericField("columnMean", "columnMean") """ - TBC + Arithmetic mean of the values in a numeric column. """ COLUMN_SUM: ClassVar[NumericField] = NumericField("columnSum", "columnSum") """ - TBC + Calculated sum of the values in a numeric column. """ COLUMN_MEDIAN: ClassVar[NumericField] = NumericField("columnMedian", "columnMedian") """ - TBC + Calculated median of the values in a numeric column. """ COLUMN_STANDARD_DEVIATION: ClassVar[NumericField] = NumericField( "columnStandardDeviation", "columnStandardDeviation" ) """ - TBC + Calculated standard deviation of the values in a numeric column. """ COLUMN_UNIQUE_VALUES_COUNT: ClassVar[NumericField] = NumericField( "columnUniqueValuesCount", "columnUniqueValuesCount" ) """ - TBC + Number of rows in which a value in this column appears only once. """ COLUMN_UNIQUE_VALUES_COUNT_LONG: ClassVar[NumericField] = NumericField( "columnUniqueValuesCountLong", "columnUniqueValuesCountLong" ) """ - TBC + Number of rows in which a value in this column appears only once. """ COLUMN_AVERAGE: ClassVar[NumericField] = NumericField( "columnAverage", "columnAverage" ) """ - TBC + Average value in this column. """ COLUMN_AVERAGE_LENGTH: ClassVar[NumericField] = NumericField( "columnAverageLength", "columnAverageLength" ) """ - TBC + Average length of values in a string column. """ COLUMN_DUPLICATE_VALUES_COUNT: ClassVar[NumericField] = NumericField( "columnDuplicateValuesCount", "columnDuplicateValuesCount" ) """ - TBC + Number of rows that contain duplicate values. """ COLUMN_DUPLICATE_VALUES_COUNT_LONG: ClassVar[NumericField] = NumericField( "columnDuplicateValuesCountLong", "columnDuplicateValuesCountLong" ) """ - TBC + Number of rows that contain duplicate values. """ COLUMN_MAXIMUM_STRING_LENGTH: ClassVar[NumericField] = NumericField( "columnMaximumStringLength", "columnMaximumStringLength" ) """ - TBC + Length of the longest value in a string column. """ COLUMN_MAXS: ClassVar[KeywordField] = KeywordField("columnMaxs", "columnMaxs") """ - TBC + List of the greatest values in a column. """ COLUMN_MINIMUM_STRING_LENGTH: ClassVar[NumericField] = NumericField( "columnMinimumStringLength", "columnMinimumStringLength" ) """ - TBC + Length of the shortest value in a string column. """ COLUMN_MINS: ClassVar[KeywordField] = KeywordField("columnMins", "columnMins") """ - TBC + List of the least values in a column. """ COLUMN_MISSING_VALUES_COUNT: ClassVar[NumericField] = NumericField( "columnMissingValuesCount", "columnMissingValuesCount" ) """ - TBC + Number of rows in a column that do not contain content. """ COLUMN_MISSING_VALUES_COUNT_LONG: ClassVar[NumericField] = NumericField( "columnMissingValuesCountLong", "columnMissingValuesCountLong" ) """ - TBC + Number of rows in a column that do not contain content. """ COLUMN_MISSING_VALUES_PERCENTAGE: ClassVar[NumericField] = NumericField( "columnMissingValuesPercentage", "columnMissingValuesPercentage" ) """ - TBC + Percentage of rows in a column that do not contain content. """ COLUMN_UNIQUENESS_PERCENTAGE: ClassVar[NumericField] = NumericField( "columnUniquenessPercentage", "columnUniquenessPercentage" ) """ - TBC - """ + Ratio indicating how unique data in this column is: 0 indicates that all values are the same, 100 indicates that all values in this column are unique. + """ # noqa: E501 COLUMN_VARIANCE: ClassVar[NumericField] = NumericField( "columnVariance", "columnVariance" ) """ - TBC + Calculated variance of the values in a numeric column. """ COLUMN_TOP_VALUES: ClassVar[KeywordField] = KeywordField( "columnTopValues", "columnTopValues" ) """ - TBC + List of top values in this column. """ COLUMN_DEPTH_LEVEL: ClassVar[NumericField] = NumericField( "columnDepthLevel", "columnDepthLevel" ) """ - Level of nesting, used for STRUCT/NESTED columns + Level of nesting of this column, used for STRUCT and NESTED columns. """ SNOWFLAKE_DYNAMIC_TABLE: ClassVar[RelationField] = RelationField( @@ -10859,31 +10859,31 @@ def __setattr__(self, name, value): "snowflakeStreamType", "snowflakeStreamType" ) """ - TBC + Type of this stream, for example: standard, append-only, insert-only, etc. """ SNOWFLAKE_STREAM_SOURCE_TYPE: ClassVar[KeywordField] = KeywordField( "snowflakeStreamSourceType", "snowflakeStreamSourceType" ) """ - TBC + Type of the source of this stream. """ SNOWFLAKE_STREAM_MODE: ClassVar[KeywordField] = KeywordField( "snowflakeStreamMode", "snowflakeStreamMode" ) """ - TBC + Mode of this stream. """ SNOWFLAKE_STREAM_IS_STALE: ClassVar[BooleanField] = BooleanField( "snowflakeStreamIsStale", "snowflakeStreamIsStale" ) """ - TBC + Whether this stream is stale (true) or not (false). """ SNOWFLAKE_STREAM_STALE_AFTER: ClassVar[NumericField] = NumericField( "snowflakeStreamStaleAfter", "snowflakeStreamStaleAfter" ) """ - TBC + Time (epoch) after which this stream will be stale, in milliseconds. """ ATLAN_SCHEMA: ClassVar[RelationField] = RelationField("atlanSchema") @@ -11023,7 +11023,7 @@ def __setattr__(self, name, value): DEFINITION: ClassVar[KeywordField] = KeywordField("definition", "definition") """ - TBC + SQL definition of the procedure. """ ATLAN_SCHEMA: ClassVar[RelationField] = RelationField("atlanSchema") @@ -11087,105 +11087,105 @@ def __setattr__(self, name, value): TAG_ID: ClassVar[KeywordField] = KeywordField("tagId", "tagId") """ - Unique source tag identifier + Unique identifier of the tag in the source system. """ TAG_ATTRIBUTES: ClassVar[KeywordField] = KeywordField( "tagAttributes", "tagAttributes" ) """ - Source tag attributes + Attributes associated with the tag in the source system. """ TAG_ALLOWED_VALUES: ClassVar[KeywordTextField] = KeywordTextField( "tagAllowedValues", "tagAllowedValues", "tagAllowedValues.text" ) """ - Allowed values for the tag at source. De-normalised from sourceTagAttributed for ease of querying + Allowed values for the tag in the source system. These are denormalized from tagAttributes for ease of querying. """ MAPPED_CLASSIFICATION_NAME: ClassVar[KeywordField] = KeywordField( "mappedClassificationName", "mappedClassificationName" ) """ - Mapped atlan classification name + Name of the classification in Atlan that is mapped to this tag. """ QUERY_COUNT: ClassVar[NumericField] = NumericField("queryCount", "queryCount") """ - TBC + Number of times this asset has been queried. """ QUERY_USER_COUNT: ClassVar[NumericField] = NumericField( "queryUserCount", "queryUserCount" ) """ - TBC + Number of unique users who have queried this asset. """ QUERY_USER_MAP: ClassVar[KeywordField] = KeywordField( "queryUserMap", "queryUserMap" ) """ - TBC + Map of unique users who have queried this asset to the number of times they have queried it. """ QUERY_COUNT_UPDATED_AT: ClassVar[NumericField] = NumericField( "queryCountUpdatedAt", "queryCountUpdatedAt" ) """ - TBC + Time (epoch) at which the query count was last updated, in milliseconds. """ DATABASE_NAME: ClassVar[KeywordTextField] = KeywordTextField( "databaseName", "databaseName.keyword", "databaseName" ) """ - TBC + Simple name of the database in which this SQL asset exists, or empty if it does not exist within a database. """ DATABASE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( "databaseQualifiedName", "databaseQualifiedName" ) """ - TBC + Unique name of the database in which this SQL asset exists, or empty if it does not exist within a database. """ SCHEMA_NAME: ClassVar[KeywordTextField] = KeywordTextField( "schemaName", "schemaName.keyword", "schemaName" ) """ - TBC + Simple name of the schema in which this SQL asset exists, or empty if it does not exist within a schema. """ SCHEMA_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( "schemaQualifiedName", "schemaQualifiedName" ) """ - TBC + Unique name of the schema in which this SQL asset exists, or empty if it does not exist within a schema. """ TABLE_NAME: ClassVar[KeywordTextField] = KeywordTextField( "tableName", "tableName.keyword", "tableName" ) """ - TBC + Simple name of the table in which this SQL asset exists, or empty if it does not exist within a table. """ TABLE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( "tableQualifiedName", "tableQualifiedName" ) """ - TBC + Unique name of the table in which this SQL asset exists, or empty if it does not exist within a table. """ VIEW_NAME: ClassVar[KeywordTextField] = KeywordTextField( "viewName", "viewName.keyword", "viewName" ) """ - TBC + Simple name of the view in which this SQL asset exists, or empty if it does not exist within a view. """ VIEW_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( "viewQualifiedName", "viewQualifiedName" ) """ - TBC + Unique name of the view in which this SQL asset exists, or empty if it does not exist within a view. """ IS_PROFILED: ClassVar[BooleanField] = BooleanField("isProfiled", "isProfiled") """ - TBC + Whether this asset has been profiled (true) or not (false). """ LAST_PROFILED_AT: ClassVar[NumericField] = NumericField( "lastProfiledAt", "lastProfiledAt" ) """ - TBC + Time (epoch) at which this asset was last profiled, in milliseconds. """ DBT_SOURCES: ClassVar[RelationField] = RelationField("dbtSources") @@ -11575,8 +11575,8 @@ def __setattr__(self, name, value): "matillionVersion", "matillionVersion" ) """ - This designates the current point in time state of a project. We can think it to be branch or version control in github - """ # noqa: E501 + Current point in time state of a project. + """ _convenience_properties: ClassVar[list[str]] = [ "matillion_version", @@ -11624,7 +11624,7 @@ def __setattr__(self, name, value): "matillionProjectCount", "matillionProjectCount" ) """ - Count of the number of matillion projects under a matillion group + Number of projects within the group. """ MATILLION_PROJECTS: ClassVar[RelationField] = RelationField("matillionProjects") @@ -11694,31 +11694,31 @@ def __setattr__(self, name, value): "matillionJobType", "matillionJobType" ) """ - The type of a job. There are two kinds of jobs in matillion - orchestration and transformation + Type of the job, for example: orchestration or transformation. """ MATILLION_JOB_PATH: ClassVar[KeywordTextField] = KeywordTextField( "matillionJobPath", "matillionJobPath", "matillionJobPath.text" ) """ - The hierarchy path of a job under a matillion project. Jobs can be managed at multiple folder levels under a matillion project - """ # noqa: E501 + Path of the job within the project. Jobs can be managed at multiple folder levels within a project. + """ MATILLION_JOB_COMPONENT_COUNT: ClassVar[NumericField] = NumericField( "matillionJobComponentCount", "matillionJobComponentCount" ) """ - The count of components under a specific matillion job + Number of components within the job. """ MATILLION_JOB_SCHEDULE: ClassVar[KeywordField] = KeywordField( "matillionJobSchedule", "matillionJobSchedule" ) """ - Specifies whether a matillion job is scheduled weekly or monthly + How the job is scheduled, for example: weekly or monthly. """ MATILLION_PROJECT_NAME: ClassVar[KeywordTextField] = KeywordTextField( "matillionProjectName", "matillionProjectName.keyword", "matillionProjectName" ) """ - Name of the matillion project to which the matillion job belongs + Simple name of the project to which the job belongs. """ MATILLION_PROJECT_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( "matillionProjectQualifiedName", @@ -11726,7 +11726,7 @@ def __setattr__(self, name, value): "matillionProjectQualifiedName.text", ) """ - Qualified name of the matillion project to which the matillion job belongs + Unique name of the project to which the job belongs. """ MATILLION_PROJECT: ClassVar[RelationField] = RelationField("matillionProject") @@ -11902,25 +11902,25 @@ def __setattr__(self, name, value): "matillionVersions", "matillionVersions" ) """ - List of versions under a matillion project + List of versions in the project. """ MATILLION_ENVIRONMENTS: ClassVar[KeywordField] = KeywordField( "matillionEnvironments", "matillionEnvironments" ) """ - List of environments under a matillion project + List of environments in the project. """ MATILLION_PROJECT_JOB_COUNT: ClassVar[NumericField] = NumericField( "matillionProjectJobCount", "matillionProjectJobCount" ) """ - Count of jobs under a matillion project + Number of jobs in the project. """ MATILLION_GROUP_NAME: ClassVar[KeywordTextField] = KeywordTextField( "matillionGroupName", "matillionGroupName.keyword", "matillionGroupName" ) """ - Name of the matillion group to which the matillion project belongs + Simple name of the Matillion group to which the project belongs. """ MATILLION_GROUP_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( "matillionGroupQualifiedName", @@ -11928,7 +11928,7 @@ def __setattr__(self, name, value): "matillionGroupQualifiedName.text", ) """ - Qualified name of the matillion group to which the matillion project belongs + Unique name of the Matillion group to which the project belongs. """ MATILLION_JOBS: ClassVar[RelationField] = RelationField("matillionJobs") @@ -12082,43 +12082,43 @@ def __setattr__(self, name, value): "matillionComponentId", "matillionComponentId" ) """ - Unique id of a matillion component + Unique identifier of the component in Matillion. """ MATILLION_COMPONENT_IMPLEMENTATION_ID: ClassVar[KeywordField] = KeywordField( "matillionComponentImplementationId", "matillionComponentImplementationId" ) """ - Unique id which represents the type of a component in matillion + Unique identifier for the type of the component in Matillion. """ MATILLION_COMPONENT_LINKED_JOB: ClassVar[KeywordField] = KeywordField( "matillionComponentLinkedJob", "matillionComponentLinkedJob" ) """ - Job details of the matillion job to which the matillion component internally links to + Job details of the job to which the component internally links. """ MATILLION_COMPONENT_LAST_RUN_STATUS: ClassVar[KeywordField] = KeywordField( "matillionComponentLastRunStatus", "matillionComponentLastRunStatus" ) """ - The latest run status of a matillion component under a matillion job + Latest run status of the component within a job. """ MATILLION_COMPONENT_LAST_FIVE_RUN_STATUS: ClassVar[KeywordField] = KeywordField( "matillionComponentLastFiveRunStatus", "matillionComponentLastFiveRunStatus" ) """ - The last five run status of a matillion component under a matillion job + Last five run statuses of the component within a job. """ MATILLION_COMPONENT_SQLS: ClassVar[KeywordField] = KeywordField( "matillionComponentSqls", "matillionComponentSqls" ) """ - SQL Query involved with a matillion component + SQL queries used by the component. """ MATILLION_JOB_NAME: ClassVar[KeywordTextField] = KeywordTextField( "matillionJobName", "matillionJobName.keyword", "matillionJobName" ) """ - Name of the matillion job to which the matillion component belongs + Simple name of the job to which the component belongs. """ MATILLION_JOB_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( "matillionJobQualifiedName", @@ -12126,7 +12126,7 @@ def __setattr__(self, name, value): "matillionJobQualifiedName.text", ) """ - Qualified name of the matillion job to which the matillion component belongs + Unique name of the job to which the component belongs. """ MATILLION_PROCESS: ClassVar[RelationField] = RelationField("matillionProcess") @@ -12350,53 +12350,53 @@ def __setattr__(self, name, value): "dbtAlias", "dbtAlias.keyword", "dbtAlias" ) """ - TBC + """ DBT_META: ClassVar[KeywordField] = KeywordField("dbtMeta", "dbtMeta") """ - TBC + """ DBT_UNIQUE_ID: ClassVar[KeywordTextField] = KeywordTextField( "dbtUniqueId", "dbtUniqueId.keyword", "dbtUniqueId" ) """ - TBC + """ DBT_ACCOUNT_NAME: ClassVar[KeywordTextField] = KeywordTextField( "dbtAccountName", "dbtAccountName.keyword", "dbtAccountName" ) """ - TBC + """ DBT_PROJECT_NAME: ClassVar[KeywordTextField] = KeywordTextField( "dbtProjectName", "dbtProjectName.keyword", "dbtProjectName" ) """ - TBC + """ DBT_PACKAGE_NAME: ClassVar[KeywordTextField] = KeywordTextField( "dbtPackageName", "dbtPackageName.keyword", "dbtPackageName" ) """ - TBC + """ DBT_JOB_NAME: ClassVar[KeywordTextField] = KeywordTextField( "dbtJobName", "dbtJobName.keyword", "dbtJobName" ) """ - TBC + """ DBT_JOB_SCHEDULE: ClassVar[KeywordField] = KeywordField( "dbtJobSchedule", "dbtJobSchedule" ) """ - TBC + """ DBT_JOB_STATUS: ClassVar[KeywordField] = KeywordField( "dbtJobStatus", "dbtJobStatus" ) """ - TBC + """ DBT_JOB_SCHEDULE_CRON_HUMANIZED: ClassVar[KeywordTextField] = KeywordTextField( "dbtJobScheduleCronHumanized", @@ -12404,19 +12404,19 @@ def __setattr__(self, name, value): "dbtJobScheduleCronHumanized", ) """ - TBC + """ DBT_JOB_LAST_RUN: ClassVar[NumericField] = NumericField( "dbtJobLastRun", "dbtJobLastRun" ) """ - TBC + """ DBT_JOB_NEXT_RUN: ClassVar[NumericField] = NumericField( "dbtJobNextRun", "dbtJobNextRun" ) """ - TBC + """ DBT_JOB_NEXT_RUN_HUMANIZED: ClassVar[KeywordTextField] = KeywordTextField( "dbtJobNextRunHumanized", @@ -12424,13 +12424,13 @@ def __setattr__(self, name, value): "dbtJobNextRunHumanized", ) """ - TBC + """ DBT_ENVIRONMENT_NAME: ClassVar[KeywordTextField] = KeywordTextField( "dbtEnvironmentName", "dbtEnvironmentName.keyword", "dbtEnvironmentName" ) """ - TBC + """ DBT_ENVIRONMENT_DBT_VERSION: ClassVar[KeywordTextField] = KeywordTextField( "dbtEnvironmentDbtVersion", @@ -12438,23 +12438,23 @@ def __setattr__(self, name, value): "dbtEnvironmentDbtVersion", ) """ - TBC + """ DBT_TAGS: ClassVar[KeywordField] = KeywordField("dbtTags", "dbtTags") """ - TBC + """ DBT_CONNECTION_CONTEXT: ClassVar[KeywordField] = KeywordField( "dbtConnectionContext", "dbtConnectionContext" ) """ - TBC + """ DBT_SEMANTIC_LAYER_PROXY_URL: ClassVar[KeywordField] = KeywordField( "dbtSemanticLayerProxyUrl", "dbtSemanticLayerProxyUrl" ) """ - TBC + """ _convenience_properties: ClassVar[list[str]] = [ @@ -12753,19 +12753,19 @@ def __setattr__(self, name, value): "dbtModelQualifiedName", "dbtModelQualifiedName", "dbtModelQualifiedName.text" ) """ - TBC + """ DBT_MODEL_COLUMN_DATA_TYPE: ClassVar[KeywordField] = KeywordField( "dbtModelColumnDataType", "dbtModelColumnDataType" ) """ - TBC + """ DBT_MODEL_COLUMN_ORDER: ClassVar[NumericField] = NumericField( "dbtModelColumnOrder", "dbtModelColumnOrder" ) """ - TBC + """ SQL_COLUMN: ClassVar[RelationField] = RelationField("sqlColumn") @@ -12933,49 +12933,49 @@ def __setattr__(self, name, value): "dbtTestStatus", "dbtTestStatus" ) """ - Status provides the details of the results of a test. For errors, it reads "ERROR". + Details of the results of the test. For errors, it reads "ERROR". """ DBT_TEST_STATE: ClassVar[KeywordField] = KeywordField( "dbtTestState", "dbtTestState" ) """ - The test results. Can be one of, in order of severity, "error", "fail", "warn", "pass" + Test results. Can be one of, in order of severity, "error", "fail", "warn", "pass". """ DBT_TEST_ERROR: ClassVar[KeywordField] = KeywordField( "dbtTestError", "dbtTestError" ) """ - The error message in the case of state being "error" + Error message in the case of state being "error". """ DBT_TEST_RAW_SQL: ClassVar[KeywordTextField] = KeywordTextField( "dbtTestRawSQL", "dbtTestRawSQL", "dbtTestRawSQL.text" ) """ - The raw sql of a test + Raw SQL of the test. """ DBT_TEST_COMPILED_SQL: ClassVar[KeywordField] = KeywordField( "dbtTestCompiledSQL", "dbtTestCompiledSQL" ) """ - The compiled sql of a test + Compiled SQL of the test. """ DBT_TEST_RAW_CODE: ClassVar[KeywordTextField] = KeywordTextField( "dbtTestRawCode", "dbtTestRawCode", "dbtTestRawCode.text" ) """ - The raw code of a test ( tests in dbt can be defined using python ) + Raw code of the test (when the test is defined using Python). """ DBT_TEST_COMPILED_CODE: ClassVar[KeywordField] = KeywordField( "dbtTestCompiledCode", "dbtTestCompiledCode" ) """ - The compiled code of a test ( tests in dbt can be defined using python ) + Compiled code of the test (when the test is defined using Python). """ DBT_TEST_LANGUAGE: ClassVar[KeywordField] = KeywordField( "dbtTestLanguage", "dbtTestLanguage" ) """ - The language in which a dbt test is written. Example: sql,python + Language in which the test is written, for example: SQL or Python. """ DBT_SOURCES: ClassVar[RelationField] = RelationField("dbtSources") @@ -13197,73 +13197,73 @@ def __setattr__(self, name, value): DBT_STATUS: ClassVar[KeywordField] = KeywordField("dbtStatus", "dbtStatus") """ - TBC + """ DBT_ERROR: ClassVar[KeywordField] = KeywordField("dbtError", "dbtError") """ - TBC + """ DBT_RAW_SQL: ClassVar[KeywordField] = KeywordField("dbtRawSQL", "dbtRawSQL") """ - TBC + """ DBT_COMPILED_SQL: ClassVar[KeywordField] = KeywordField( "dbtCompiledSQL", "dbtCompiledSQL" ) """ - TBC + """ DBT_STATS: ClassVar[KeywordField] = KeywordField("dbtStats", "dbtStats") """ - TBC + """ DBT_MATERIALIZATION_TYPE: ClassVar[KeywordField] = KeywordField( "dbtMaterializationType", "dbtMaterializationType" ) """ - TBC + """ DBT_MODEL_COMPILE_STARTED_AT: ClassVar[NumericField] = NumericField( "dbtModelCompileStartedAt", "dbtModelCompileStartedAt" ) """ - TBC + """ DBT_MODEL_COMPILE_COMPLETED_AT: ClassVar[NumericField] = NumericField( "dbtModelCompileCompletedAt", "dbtModelCompileCompletedAt" ) """ - TBC + """ DBT_MODEL_EXECUTE_STARTED_AT: ClassVar[NumericField] = NumericField( "dbtModelExecuteStartedAt", "dbtModelExecuteStartedAt" ) """ - TBC + """ DBT_MODEL_EXECUTE_COMPLETED_AT: ClassVar[NumericField] = NumericField( "dbtModelExecuteCompletedAt", "dbtModelExecuteCompletedAt" ) """ - TBC + """ DBT_MODEL_EXECUTION_TIME: ClassVar[NumericField] = NumericField( "dbtModelExecutionTime", "dbtModelExecutionTime" ) """ - TBC + """ DBT_MODEL_RUN_GENERATED_AT: ClassVar[NumericField] = NumericField( "dbtModelRunGeneratedAt", "dbtModelRunGeneratedAt" ) """ - TBC + """ DBT_MODEL_RUN_ELAPSED_TIME: ClassVar[NumericField] = NumericField( "dbtModelRunElapsedTime", "dbtModelRunElapsedTime" ) """ - TBC + """ DBT_METRICS: ClassVar[RelationField] = RelationField("dbtMetrics") @@ -13605,59 +13605,59 @@ def __setattr__(self, name, value): "dbtMetricFilters", "dbtMetricFilters" ) """ - TBC + """ DBT_ALIAS: ClassVar[KeywordTextField] = KeywordTextField( "dbtAlias", "dbtAlias.keyword", "dbtAlias" ) """ - TBC + """ DBT_META: ClassVar[KeywordField] = KeywordField("dbtMeta", "dbtMeta") """ - TBC + """ DBT_UNIQUE_ID: ClassVar[KeywordTextField] = KeywordTextField( "dbtUniqueId", "dbtUniqueId.keyword", "dbtUniqueId" ) """ - TBC + """ DBT_ACCOUNT_NAME: ClassVar[KeywordTextField] = KeywordTextField( "dbtAccountName", "dbtAccountName.keyword", "dbtAccountName" ) """ - TBC + """ DBT_PROJECT_NAME: ClassVar[KeywordTextField] = KeywordTextField( "dbtProjectName", "dbtProjectName.keyword", "dbtProjectName" ) """ - TBC + """ DBT_PACKAGE_NAME: ClassVar[KeywordTextField] = KeywordTextField( "dbtPackageName", "dbtPackageName.keyword", "dbtPackageName" ) """ - TBC + """ DBT_JOB_NAME: ClassVar[KeywordTextField] = KeywordTextField( "dbtJobName", "dbtJobName.keyword", "dbtJobName" ) """ - TBC + """ DBT_JOB_SCHEDULE: ClassVar[KeywordField] = KeywordField( "dbtJobSchedule", "dbtJobSchedule" ) """ - TBC + """ DBT_JOB_STATUS: ClassVar[KeywordField] = KeywordField( "dbtJobStatus", "dbtJobStatus" ) """ - TBC + """ DBT_JOB_SCHEDULE_CRON_HUMANIZED: ClassVar[KeywordTextField] = KeywordTextField( "dbtJobScheduleCronHumanized", @@ -13665,19 +13665,19 @@ def __setattr__(self, name, value): "dbtJobScheduleCronHumanized", ) """ - TBC + """ DBT_JOB_LAST_RUN: ClassVar[NumericField] = NumericField( "dbtJobLastRun", "dbtJobLastRun" ) """ - TBC + """ DBT_JOB_NEXT_RUN: ClassVar[NumericField] = NumericField( "dbtJobNextRun", "dbtJobNextRun" ) """ - TBC + """ DBT_JOB_NEXT_RUN_HUMANIZED: ClassVar[KeywordTextField] = KeywordTextField( "dbtJobNextRunHumanized", @@ -13685,13 +13685,13 @@ def __setattr__(self, name, value): "dbtJobNextRunHumanized", ) """ - TBC + """ DBT_ENVIRONMENT_NAME: ClassVar[KeywordTextField] = KeywordTextField( "dbtEnvironmentName", "dbtEnvironmentName.keyword", "dbtEnvironmentName" ) """ - TBC + """ DBT_ENVIRONMENT_DBT_VERSION: ClassVar[KeywordTextField] = KeywordTextField( "dbtEnvironmentDbtVersion", @@ -13699,41 +13699,41 @@ def __setattr__(self, name, value): "dbtEnvironmentDbtVersion", ) """ - TBC + """ DBT_TAGS: ClassVar[KeywordField] = KeywordField("dbtTags", "dbtTags") """ - TBC + """ DBT_CONNECTION_CONTEXT: ClassVar[KeywordField] = KeywordField( "dbtConnectionContext", "dbtConnectionContext" ) """ - TBC + """ DBT_SEMANTIC_LAYER_PROXY_URL: ClassVar[KeywordField] = KeywordField( "dbtSemanticLayerProxyUrl", "dbtSemanticLayerProxyUrl" ) """ - TBC + """ METRIC_TYPE: ClassVar[KeywordField] = KeywordField("metricType", "metricType") """ - TBC + Type of the metric. """ METRIC_SQL: ClassVar[KeywordField] = KeywordField("metricSQL", "metricSQL") """ - TBC + SQL query used to compute the metric. """ METRIC_FILTERS: ClassVar[TextField] = TextField("metricFilters", "metricFilters") """ - TBC + Filters to be applied to the metric query. """ METRIC_TIME_GRAINS: ClassVar[TextField] = TextField( "metricTimeGrains", "metricTimeGrains" ) """ - TBC + List of time grains to be applied to the metric query. """ METRIC_TIMESTAMP_COLUMN: ClassVar[RelationField] = RelationField( @@ -14207,13 +14207,13 @@ def __setattr__(self, name, value): DBT_STATE: ClassVar[KeywordField] = KeywordField("dbtState", "dbtState") """ - TBC + """ DBT_FRESHNESS_CRITERIA: ClassVar[KeywordField] = KeywordField( "dbtFreshnessCriteria", "dbtFreshnessCriteria" ) """ - TBC + """ SQL_ASSETS: ClassVar[RelationField] = RelationField("sqlAssets") @@ -14331,13 +14331,13 @@ def __setattr__(self, name, value): "schemaRegistrySchemaType", "schemaRegistrySchemaType" ) """ - Type of language/specification used to define the schema like JSON, Protobuf etc. + Type of language or specification used to define the schema, for example: JSON, Protobuf, etc. """ SCHEMA_REGISTRY_SCHEMA_ID: ClassVar[KeywordField] = KeywordField( "schemaRegistrySchemaId", "schemaRegistrySchemaId" ) """ - Unique identifier for schema definition set by the schema registry + Unique identifier for schema definition set by the schema registry. """ _convenience_properties: ClassVar[list[str]] = [ @@ -14410,13 +14410,13 @@ def __setattr__(self, name, value): "schemaRegistrySubjectBaseName", "schemaRegistrySubjectBaseName" ) """ - Base name of the subject (i.e. without -key, -value prefixes) + Base name of the subject, without -key, -value prefixes. """ SCHEMA_REGISTRY_SUBJECT_IS_KEY_SCHEMA: ClassVar[BooleanField] = BooleanField( "schemaRegistrySubjectIsKeySchema", "schemaRegistrySubjectIsKeySchema" ) """ - If the subject is a schema for the keys of the messages. + Whether the subject is a schema for the keys of the messages (true) or not (false). """ SCHEMA_REGISTRY_SUBJECT_SCHEMA_COMPATIBILITY: ClassVar[KeywordField] = KeywordField( "schemaRegistrySubjectSchemaCompatibility", @@ -14646,13 +14646,13 @@ def __setattr__(self, name, value): MC_LABELS: ClassVar[KeywordField] = KeywordField("mcLabels", "mcLabels") """ - TBC + List of labels for this Monte Carlo asset. """ MC_ASSET_QUALIFIED_NAMES: ClassVar[KeywordField] = KeywordField( "mcAssetQualifiedNames", "mcAssetQualifiedNames" ) """ - TBC + List of unique names of assets that are part of this Monte Carlo asset. """ _convenience_properties: ClassVar[list[str]] = [ @@ -14717,37 +14717,37 @@ def __setattr__(self, name, value): "mcIncidentId", "mcIncidentId" ) """ - TBC + Identifier of this incident, from Monte Carlo. """ MC_INCIDENT_TYPE: ClassVar[KeywordField] = KeywordField( "mcIncidentType", "mcIncidentType" ) """ - TBC + Type of this incident. """ MC_INCIDENT_SUB_TYPES: ClassVar[KeywordField] = KeywordField( "mcIncidentSubTypes", "mcIncidentSubTypes" ) """ - TBC + Subtypes of this incident. """ MC_INCIDENT_SEVERITY: ClassVar[KeywordField] = KeywordField( "mcIncidentSeverity", "mcIncidentSeverity" ) """ - TBC + Severity of this incident. """ MC_INCIDENT_STATE: ClassVar[KeywordField] = KeywordField( "mcIncidentState", "mcIncidentState" ) """ - TBC + State of this incident. """ MC_INCIDENT_WAREHOUSE: ClassVar[KeywordField] = KeywordField( "mcIncidentWarehouse", "mcIncidentWarehouse" ) """ - Incident warehouse name + Name of this incident's warehouse. """ MC_MONITOR: ClassVar[RelationField] = RelationField("mcMonitor") @@ -14905,103 +14905,103 @@ def __setattr__(self, name, value): MC_MONITOR_ID: ClassVar[KeywordField] = KeywordField("mcMonitorId", "mcMonitorId") """ - Monitor Id + Unique identifier for this monitor, from Monte Carlo. """ MC_MONITOR_STATUS: ClassVar[KeywordField] = KeywordField( "mcMonitorStatus", "mcMonitorStatus" ) """ - Monitor status + Status of this monitor. """ MC_MONITOR_TYPE: ClassVar[KeywordField] = KeywordField( "mcMonitorType", "mcMonitorType" ) """ - Monitor type + Type of this monitor, for example: field health (stats) or dimension tracking (categories). """ MC_MONITOR_WAREHOUSE: ClassVar[KeywordField] = KeywordField( "mcMonitorWarehouse", "mcMonitorWarehouse" ) """ - Monitor warehouse name + Name of the warehouse for this monitor. """ MC_MONITOR_SCHEDULE_TYPE: ClassVar[KeywordField] = KeywordField( "mcMonitorScheduleType", "mcMonitorScheduleType" ) """ - Monitor schedule type + Type of schedule for this monitor, for example: fixed or dynamic. """ MC_MONITOR_NAMESPACE: ClassVar[KeywordTextField] = KeywordTextField( "mcMonitorNamespace", "mcMonitorNamespace.keyword", "mcMonitorNamespace" ) """ - Monitor namespace + Namespace of this monitor. """ MC_MONITOR_RULE_TYPE: ClassVar[KeywordField] = KeywordField( "mcMonitorRuleType", "mcMonitorRuleType" ) """ - TBC + Type of rule for this monitor. """ MC_MONITOR_RULE_CUSTOM_SQL: ClassVar[KeywordField] = KeywordField( "mcMonitorRuleCustomSql", "mcMonitorRuleCustomSql" ) """ - custom sql query + SQL code for custom SQL rules. """ MC_MONITOR_RULE_SCHEDULE_CONFIG: ClassVar[KeywordField] = KeywordField( "mcMonitorRuleScheduleConfig", "mcMonitorRuleScheduleConfig" ) """ - TBC + Schedule details for the rule. """ MC_MONITOR_RULE_SCHEDULE_CONFIG_HUMANIZED: ClassVar[TextField] = TextField( "mcMonitorRuleScheduleConfigHumanized", "mcMonitorRuleScheduleConfigHumanized" ) """ - TBC + Readable description of the schedule for the rule. """ MC_MONITOR_ALERT_CONDITION: ClassVar[TextField] = TextField( "mcMonitorAlertCondition", "mcMonitorAlertCondition" ) """ - TBC + Condition on which the monitor produces an alert. """ MC_MONITOR_RULE_NEXT_EXECUTION_TIME: ClassVar[NumericField] = NumericField( "mcMonitorRuleNextExecutionTime", "mcMonitorRuleNextExecutionTime" ) """ - TBC + Time at which the next execution of the rule should occur. """ MC_MONITOR_RULE_PREVIOUS_EXECUTION_TIME: ClassVar[NumericField] = NumericField( "mcMonitorRulePreviousExecutionTime", "mcMonitorRulePreviousExecutionTime" ) """ - TBC + Time at which the previous execution of the rule occurred. """ MC_MONITOR_RULE_COMPARISONS: ClassVar[KeywordField] = KeywordField( "mcMonitorRuleComparisons", "mcMonitorRuleComparisons" ) """ - TBC + Comparison logic used for the rule. """ MC_MONITOR_RULE_IS_SNOOZED: ClassVar[BooleanField] = BooleanField( "mcMonitorRuleIsSnoozed", "mcMonitorRuleIsSnoozed" ) """ - TBC + Whether the rule is currently snoozed (true) or not (false). """ MC_MONITOR_BREACH_RATE: ClassVar[NumericField] = NumericField( "mcMonitorBreachRate", "mcMonitorBreachRate" ) """ - TBC + Rate at which this monitor is breached. """ MC_MONITOR_INCIDENT_COUNT: ClassVar[NumericField] = NumericField( "mcMonitorIncidentCount", "mcMonitorIncidentCount" ) """ - TBC + Number of incidents associated with this monitor. """ MC_MONITOR_ASSETS: ClassVar[RelationField] = RelationField("mcMonitorAssets") @@ -15368,31 +15368,31 @@ def __setattr__(self, name, value): SODA_CHECK_ID: ClassVar[KeywordField] = KeywordField("sodaCheckId", "sodaCheckId") """ - Check Id + Identifier of the check in Soda. """ SODA_CHECK_EVALUATION_STATUS: ClassVar[KeywordField] = KeywordField( "sodaCheckEvaluationStatus", "sodaCheckEvaluationStatus" ) """ - Check status + Status of the check in Soda. """ SODA_CHECK_DEFINITION: ClassVar[KeywordField] = KeywordField( "sodaCheckDefinition", "sodaCheckDefinition" ) """ - Check definition + Definition of the check in Soda. """ SODA_CHECK_LAST_SCAN_AT: ClassVar[NumericField] = NumericField( "sodaCheckLastScanAt", "sodaCheckLastScanAt" ) """ - TBC + """ SODA_CHECK_INCIDENT_COUNT: ClassVar[NumericField] = NumericField( "sodaCheckIncidentCount", "sodaCheckIncidentCount" ) """ - TBC + """ SODA_CHECK_COLUMNS: ClassVar[RelationField] = RelationField("sodaCheckColumns") @@ -15554,77 +15554,77 @@ def __setattr__(self, name, value): COLUMN_COUNT: ClassVar[NumericField] = NumericField("columnCount", "columnCount") """ - TBC + Number of columns in this table. """ ROW_COUNT: ClassVar[NumericField] = NumericField("rowCount", "rowCount") """ - TBC + Number of rows in this table. """ SIZE_BYTES: ClassVar[NumericField] = NumericField("sizeBytes", "sizeBytes") """ - TBC + Size of this table, in bytes. """ ALIAS: ClassVar[KeywordField] = KeywordField("alias", "alias") """ - TBC + Alias for this table. """ IS_TEMPORARY: ClassVar[BooleanField] = BooleanField("isTemporary", "isTemporary") """ - TBC + Whether this table is temporary (true) or not (false). """ IS_QUERY_PREVIEW: ClassVar[BooleanField] = BooleanField( "isQueryPreview", "isQueryPreview" ) """ - TBC + Whether preview queries are allowed for this table (true) or not (false). """ QUERY_PREVIEW_CONFIG: ClassVar[KeywordField] = KeywordField( "queryPreviewConfig", "queryPreviewConfig" ) """ - TBC + Configuration for preview queries. """ EXTERNAL_LOCATION: ClassVar[KeywordField] = KeywordField( "externalLocation", "externalLocation" ) """ - TBC + External location of this table, for example: an S3 object location. """ EXTERNAL_LOCATION_REGION: ClassVar[KeywordField] = KeywordField( "externalLocationRegion", "externalLocationRegion" ) """ - TBC + Region of the external location of this table, for example: S3 region. """ EXTERNAL_LOCATION_FORMAT: ClassVar[KeywordField] = KeywordField( "externalLocationFormat", "externalLocationFormat" ) """ - TBC + Format of the external location of this table, for example: JSON, CSV, PARQUET, etc. """ IS_PARTITIONED: ClassVar[BooleanField] = BooleanField( "isPartitioned", "isPartitioned" ) """ - TBC + Whether this table is partitioned (true) or not (false). """ PARTITION_STRATEGY: ClassVar[KeywordField] = KeywordField( "partitionStrategy", "partitionStrategy" ) """ - TBC + Partition strategy for this table. """ PARTITION_COUNT: ClassVar[NumericField] = NumericField( "partitionCount", "partitionCount" ) """ - TBC + Number of partitions in this table. """ PARTITION_LIST: ClassVar[KeywordField] = KeywordField( "partitionList", "partitionList" ) """ - TBC + List of partitions in this table. """ COLUMNS: ClassVar[RelationField] = RelationField("columns") @@ -15986,7 +15986,7 @@ def __setattr__(self, name, value): DEFINITION: ClassVar[KeywordField] = KeywordField("definition", "definition") """ - SQL statements used to define a Snowflake Dynamic Table + SQL statements used to define the dynamic table. """ _convenience_properties: ClassVar[list[str]] = [ @@ -16053,7 +16053,7 @@ def __setattr__(self, name, value): SCHEMA_COUNT: ClassVar[NumericField] = NumericField("schemaCount", "schemaCount") """ - TBC + Number of schemas in this database. """ SCHEMAS: ClassVar[RelationField] = RelationField("schemas") diff --git a/pyatlan/model/assets/asset02.py b/pyatlan/model/assets/asset02.py index 601d930ce..670fed4fb 100644 --- a/pyatlan/model/assets/asset02.py +++ b/pyatlan/model/assets/asset02.py @@ -4,659 +4,88 @@ from __future__ import annotations -from datetime import datetime from typing import ClassVar, Optional from pydantic import Field, validator -from pyatlan.model.enums import AtlanConnectorType, QueryUsernameStrategy -from pyatlan.model.fields.atlan_fields import BooleanField, KeywordField, NumericField -from pyatlan.utils import init_guid, validate_required_fields +from pyatlan.model.fields.atlan_fields import KeywordTextField from .asset00 import Asset -class Connection(Asset, type_name="Connection"): +class TagAttachment(Asset, type_name="TagAttachment"): """Description""" - @classmethod - # @validate_arguments() - @init_guid - def create( - cls, - *, - name: str, - connector_type: AtlanConnectorType, - admin_users: Optional[list[str]] = None, - admin_groups: Optional[list[str]] = None, - admin_roles: Optional[list[str]] = None, - ) -> Connection: - validate_required_fields(["name", "connector_type"], [name, connector_type]) - if not admin_users and not admin_groups and not admin_roles: - raise ValueError( - "One of admin_user, admin_groups or admin_roles is required" - ) - attr = cls.Attributes( - name=name, - qualified_name=connector_type.to_qualified_name(), - connector_name=connector_type.value, - category=connector_type.category.value, - ) - attr.admin_users = set() if admin_users is None else set(admin_users) - attr.admin_groups = set() if admin_groups is None else set(admin_groups) - attr.admin_roles = set() if admin_roles is None else set(admin_roles) - return cls(attributes=attr) - - type_name: str = Field("Connection", allow_mutation=False) + type_name: str = Field("TagAttachment", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "Connection": - raise ValueError("must be Connection") + if v != "TagAttachment": + raise ValueError("must be TagAttachment") return v def __setattr__(self, name, value): - if name in Connection._convenience_properties: + if name in TagAttachment._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - CATEGORY: ClassVar[KeywordField] = KeywordField("category", "category") - """ - WAREHOUSE, RDBMS, LAKE, BI - """ - SUB_CATEGORY: ClassVar[KeywordField] = KeywordField("subCategory", "subCategory") - """ - WAREHOUSE, RDBMS, LAKE, BI - """ - HOST: ClassVar[KeywordField] = KeywordField("host", "host") - """ - TBC - """ - PORT: ClassVar[NumericField] = NumericField("port", "port") - """ - TBC - """ - ALLOW_QUERY: ClassVar[BooleanField] = BooleanField("allowQuery", "allowQuery") - """ - TBC - """ - ALLOW_QUERY_PREVIEW: ClassVar[BooleanField] = BooleanField( - "allowQueryPreview", "allowQueryPreview" - ) - """ - TBC - """ - QUERY_PREVIEW_CONFIG: ClassVar[KeywordField] = KeywordField( - "queryPreviewConfig", "queryPreviewConfig" - ) - """ - TBC - """ - QUERY_CONFIG: ClassVar[KeywordField] = KeywordField("queryConfig", "queryConfig") - """ - TBC - """ - CREDENTIAL_STRATEGY: ClassVar[KeywordField] = KeywordField( - "credentialStrategy", "credentialStrategy" - ) - """ - TBC - """ - PREVIEW_CREDENTIAL_STRATEGY: ClassVar[KeywordField] = KeywordField( - "previewCredentialStrategy", "previewCredentialStrategy" - ) - """ - TBC - """ - POLICY_STRATEGY: ClassVar[KeywordField] = KeywordField( - "policyStrategy", "policyStrategy" - ) - """ - TBC - """ - QUERY_USERNAME_STRATEGY: ClassVar[KeywordField] = KeywordField( - "queryUsernameStrategy", "queryUsernameStrategy" - ) - """ - TBC - """ - ROW_LIMIT: ClassVar[NumericField] = NumericField("rowLimit", "rowLimit") - """ - TBC - """ - QUERY_TIMEOUT: ClassVar[NumericField] = NumericField("queryTimeout", "queryTimeout") - """ - TBC - """ - DEFAULT_CREDENTIAL_GUID: ClassVar[KeywordField] = KeywordField( - "defaultCredentialGuid", "defaultCredentialGuid" - ) - """ - TBC - """ - CONNECTOR_ICON: ClassVar[KeywordField] = KeywordField( - "connectorIcon", "connectorIcon" - ) - """ - TBC - """ - CONNECTOR_IMAGE: ClassVar[KeywordField] = KeywordField( - "connectorImage", "connectorImage" - ) - """ - TBC - """ - SOURCE_LOGO: ClassVar[KeywordField] = KeywordField("sourceLogo", "sourceLogo") - """ - TBC - """ - IS_SAMPLE_DATA_PREVIEW_ENABLED: ClassVar[BooleanField] = BooleanField( - "isSampleDataPreviewEnabled", "isSampleDataPreviewEnabled" - ) - """ - TBC - """ - POPULARITY_INSIGHTS_TIMEFRAME: ClassVar[NumericField] = NumericField( - "popularityInsightsTimeframe", "popularityInsightsTimeframe" - ) - """ - Number of days we are calculating popularity for, eg: 30 days - """ - HAS_POPULARITY_INSIGHTS: ClassVar[BooleanField] = BooleanField( - "hasPopularityInsights", "hasPopularityInsights" - ) - """ - Boolean flag to tell if connection has popularity insights or not - """ - CONNECTION_DBT_ENVIRONMENTS: ClassVar[KeywordField] = KeywordField( - "connectionDbtEnvironments", "connectionDbtEnvironments" - ) - """ - TBC - """ - CONNECTION_SSO_CREDENTIAL_GUID: ClassVar[KeywordField] = KeywordField( - "connectionSSOCredentialGuid", "connectionSSOCredentialGuid" - ) - """ - TBC - """ - USE_OBJECT_STORAGE: ClassVar[BooleanField] = BooleanField( - "useObjectStorage", "useObjectStorage" - ) - """ - A Boolean flag indicating whether to upload to S3, GCP, or another storage location - """ - OBJECT_STORAGE_UPLOAD_THRESHOLD: ClassVar[NumericField] = NumericField( - "objectStorageUploadThreshold", "objectStorageUploadThreshold" - ) - """ - A long integer indicating after how many rows heka should start uploading result to storage - """ - VECTOR_EMBEDDINGS_ENABLED: ClassVar[BooleanField] = BooleanField( - "vectorEmbeddingsEnabled", "vectorEmbeddingsEnabled" + TAG_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "tagQualifiedName", "tagQualifiedName", "tagQualifiedName.text" ) """ - TBC + Represents associated source tag's qualified name """ - VECTOR_EMBEDDINGS_UPDATED_AT: ClassVar[NumericField] = NumericField( - "vectorEmbeddingsUpdatedAt", "vectorEmbeddingsUpdatedAt" + TAG_ATTACHMENT_STRING_VALUE: ClassVar[KeywordTextField] = KeywordTextField( + "tagAttachmentStringValue", + "tagAttachmentStringValue", + "tagAttachmentStringValue.text", ) """ - TBC + Represents associated tag value """ _convenience_properties: ClassVar[list[str]] = [ - "category", - "sub_category", - "host", - "port", - "allow_query", - "allow_query_preview", - "query_preview_config", - "query_config", - "credential_strategy", - "preview_credential_strategy", - "policy_strategy", - "query_username_strategy", - "row_limit", - "query_timeout", - "default_credential_guid", - "connector_icon", - "connector_image", - "source_logo", - "is_sample_data_preview_enabled", - "popularity_insights_timeframe", - "has_popularity_insights", - "connection_dbt_environments", - "connection_s_s_o_credential_guid", - "use_object_storage", - "object_storage_upload_threshold", - "vector_embeddings_enabled", - "vector_embeddings_updated_at", + "tag_qualified_name", + "tag_attachment_string_value", ] @property - def category(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.category - - @category.setter - def category(self, category: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.category = category - - @property - def sub_category(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.sub_category - - @sub_category.setter - def sub_category(self, sub_category: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sub_category = sub_category - - @property - def host(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.host - - @host.setter - def host(self, host: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.host = host - - @property - def port(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.port - - @port.setter - def port(self, port: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.port = port - - @property - def allow_query(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.allow_query - - @allow_query.setter - def allow_query(self, allow_query: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.allow_query = allow_query - - @property - def allow_query_preview(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.allow_query_preview - - @allow_query_preview.setter - def allow_query_preview(self, allow_query_preview: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.allow_query_preview = allow_query_preview - - @property - def query_preview_config(self) -> Optional[dict[str, str]]: - return None if self.attributes is None else self.attributes.query_preview_config - - @query_preview_config.setter - def query_preview_config(self, query_preview_config: Optional[dict[str, str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.query_preview_config = query_preview_config - - @property - def query_config(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.query_config - - @query_config.setter - def query_config(self, query_config: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.query_config = query_config - - @property - def credential_strategy(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.credential_strategy - - @credential_strategy.setter - def credential_strategy(self, credential_strategy: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.credential_strategy = credential_strategy - - @property - def preview_credential_strategy(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.preview_credential_strategy - ) - - @preview_credential_strategy.setter - def preview_credential_strategy(self, preview_credential_strategy: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.preview_credential_strategy = preview_credential_strategy - - @property - def policy_strategy(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.policy_strategy - - @policy_strategy.setter - def policy_strategy(self, policy_strategy: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.policy_strategy = policy_strategy - - @property - def query_username_strategy(self) -> Optional[QueryUsernameStrategy]: - return ( - None if self.attributes is None else self.attributes.query_username_strategy - ) - - @query_username_strategy.setter - def query_username_strategy( - self, query_username_strategy: Optional[QueryUsernameStrategy] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.query_username_strategy = query_username_strategy - - @property - def row_limit(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.row_limit - - @row_limit.setter - def row_limit(self, row_limit: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.row_limit = row_limit - - @property - def query_timeout(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.query_timeout - - @query_timeout.setter - def query_timeout(self, query_timeout: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.query_timeout = query_timeout - - @property - def default_credential_guid(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.default_credential_guid - ) - - @default_credential_guid.setter - def default_credential_guid(self, default_credential_guid: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.default_credential_guid = default_credential_guid - - @property - def connector_icon(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.connector_icon - - @connector_icon.setter - def connector_icon(self, connector_icon: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.connector_icon = connector_icon - - @property - def connector_image(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.connector_image - - @connector_image.setter - def connector_image(self, connector_image: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.connector_image = connector_image - - @property - def source_logo(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.source_logo - - @source_logo.setter - def source_logo(self, source_logo: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.source_logo = source_logo - - @property - def is_sample_data_preview_enabled(self) -> Optional[bool]: - return ( - None - if self.attributes is None - else self.attributes.is_sample_data_preview_enabled - ) - - @is_sample_data_preview_enabled.setter - def is_sample_data_preview_enabled( - self, is_sample_data_preview_enabled: Optional[bool] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_sample_data_preview_enabled = is_sample_data_preview_enabled - - @property - def popularity_insights_timeframe(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.popularity_insights_timeframe - ) - - @popularity_insights_timeframe.setter - def popularity_insights_timeframe( - self, popularity_insights_timeframe: Optional[int] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.popularity_insights_timeframe = popularity_insights_timeframe - - @property - def has_popularity_insights(self) -> Optional[bool]: - return ( - None if self.attributes is None else self.attributes.has_popularity_insights - ) - - @has_popularity_insights.setter - def has_popularity_insights(self, has_popularity_insights: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.has_popularity_insights = has_popularity_insights - - @property - def connection_dbt_environments(self) -> Optional[set[str]]: - return ( - None - if self.attributes is None - else self.attributes.connection_dbt_environments - ) - - @connection_dbt_environments.setter - def connection_dbt_environments( - self, connection_dbt_environments: Optional[set[str]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.connection_dbt_environments = connection_dbt_environments - - @property - def connection_s_s_o_credential_guid(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.connection_s_s_o_credential_guid - ) - - @connection_s_s_o_credential_guid.setter - def connection_s_s_o_credential_guid( - self, connection_s_s_o_credential_guid: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.connection_s_s_o_credential_guid = ( - connection_s_s_o_credential_guid - ) - - @property - def use_object_storage(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.use_object_storage - - @use_object_storage.setter - def use_object_storage(self, use_object_storage: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.use_object_storage = use_object_storage - - @property - def object_storage_upload_threshold(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.object_storage_upload_threshold - ) + def tag_qualified_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.tag_qualified_name - @object_storage_upload_threshold.setter - def object_storage_upload_threshold( - self, object_storage_upload_threshold: Optional[int] - ): + @tag_qualified_name.setter + def tag_qualified_name(self, tag_qualified_name: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.object_storage_upload_threshold = ( - object_storage_upload_threshold - ) + self.attributes.tag_qualified_name = tag_qualified_name @property - def vector_embeddings_enabled(self) -> Optional[bool]: + def tag_attachment_string_value(self) -> Optional[str]: return ( None if self.attributes is None - else self.attributes.vector_embeddings_enabled + else self.attributes.tag_attachment_string_value ) - @vector_embeddings_enabled.setter - def vector_embeddings_enabled(self, vector_embeddings_enabled: Optional[bool]): + @tag_attachment_string_value.setter + def tag_attachment_string_value(self, tag_attachment_string_value: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.vector_embeddings_enabled = vector_embeddings_enabled - - @property - def vector_embeddings_updated_at(self) -> Optional[datetime]: - return ( - None - if self.attributes is None - else self.attributes.vector_embeddings_updated_at - ) - - @vector_embeddings_updated_at.setter - def vector_embeddings_updated_at( - self, vector_embeddings_updated_at: Optional[datetime] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.vector_embeddings_updated_at = vector_embeddings_updated_at + self.attributes.tag_attachment_string_value = tag_attachment_string_value class Attributes(Asset.Attributes): - category: Optional[str] = Field(None, description="", alias="category") - sub_category: Optional[str] = Field(None, description="", alias="subCategory") - host: Optional[str] = Field(None, description="", alias="host") - port: Optional[int] = Field(None, description="", alias="port") - allow_query: Optional[bool] = Field(None, description="", alias="allowQuery") - allow_query_preview: Optional[bool] = Field( - None, description="", alias="allowQueryPreview" - ) - query_preview_config: Optional[dict[str, str]] = Field( - None, description="", alias="queryPreviewConfig" - ) - query_config: Optional[str] = Field(None, description="", alias="queryConfig") - credential_strategy: Optional[str] = Field( - None, description="", alias="credentialStrategy" - ) - preview_credential_strategy: Optional[str] = Field( - None, description="", alias="previewCredentialStrategy" - ) - policy_strategy: Optional[str] = Field( - None, description="", alias="policyStrategy" - ) - query_username_strategy: Optional[QueryUsernameStrategy] = Field( - None, description="", alias="queryUsernameStrategy" - ) - row_limit: Optional[int] = Field(None, description="", alias="rowLimit") - query_timeout: Optional[int] = Field(None, description="", alias="queryTimeout") - default_credential_guid: Optional[str] = Field( - None, description="", alias="defaultCredentialGuid" - ) - connector_icon: Optional[str] = Field( - None, description="", alias="connectorIcon" - ) - connector_image: Optional[str] = Field( - None, description="", alias="connectorImage" + tag_qualified_name: Optional[str] = Field( + None, description="", alias="tagQualifiedName" ) - source_logo: Optional[str] = Field(None, description="", alias="sourceLogo") - is_sample_data_preview_enabled: Optional[bool] = Field( - None, description="", alias="isSampleDataPreviewEnabled" + tag_attachment_string_value: Optional[str] = Field( + None, description="", alias="tagAttachmentStringValue" ) - popularity_insights_timeframe: Optional[int] = Field( - None, description="", alias="popularityInsightsTimeframe" - ) - has_popularity_insights: Optional[bool] = Field( - None, description="", alias="hasPopularityInsights" - ) - connection_dbt_environments: Optional[set[str]] = Field( - None, description="", alias="connectionDbtEnvironments" - ) - connection_s_s_o_credential_guid: Optional[str] = Field( - None, description="", alias="connectionSSOCredentialGuid" - ) - use_object_storage: Optional[bool] = Field( - None, description="", alias="useObjectStorage" - ) - object_storage_upload_threshold: Optional[int] = Field( - None, description="", alias="objectStorageUploadThreshold" - ) - vector_embeddings_enabled: Optional[bool] = Field( - None, description="", alias="vectorEmbeddingsEnabled" - ) - vector_embeddings_updated_at: Optional[datetime] = Field( - None, description="", alias="vectorEmbeddingsUpdatedAt" - ) - - is_loaded: bool = Field(default=True) - - @validator("admin_users") - def admin_users_valid(cls, admin_users, values): - from pyatlan.cache.user_cache import UserCache - - if values.get("is_loaded", False): - UserCache.validate_names(names=admin_users) - return admin_users - - @validator("admin_roles") - def admin_roles_valid(cls, admin_roles, values): - from pyatlan.cache.role_cache import RoleCache - - if values.get("is_loaded", False): - RoleCache.validate_idstrs(idstrs=admin_roles) - return admin_roles - - @validator("admin_groups") - def admin_groups_valid(cls, admin_groups, values): - from pyatlan.cache.group_cache import GroupCache - - if values.get("is_loaded", False): - GroupCache.validate_aliases(aliases=admin_groups) - return admin_groups - attributes: "Connection.Attributes" = Field( - default_factory=lambda: Connection.Attributes(), + attributes: "TagAttachment.Attributes" = Field( + default_factory=lambda: TagAttachment.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -Connection.Attributes.update_forward_refs() +TagAttachment.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset03.py b/pyatlan/model/assets/asset03.py new file mode 100644 index 000000000..4325b6f95 --- /dev/null +++ b/pyatlan/model/assets/asset03.py @@ -0,0 +1,690 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from datetime import datetime +from typing import ClassVar, Optional + +from pydantic import Field, validator + +from pyatlan.model.enums import AtlanConnectorType, QueryUsernameStrategy +from pyatlan.model.fields.atlan_fields import BooleanField, KeywordField, NumericField +from pyatlan.utils import init_guid, validate_required_fields + +from .asset00 import Asset + + +class Connection(Asset, type_name="Connection"): + """Description""" + + @classmethod + # @validate_arguments() + @init_guid + def create( + cls, + *, + name: str, + connector_type: AtlanConnectorType, + admin_users: Optional[list[str]] = None, + admin_groups: Optional[list[str]] = None, + admin_roles: Optional[list[str]] = None, + ) -> Connection: + validate_required_fields(["name", "connector_type"], [name, connector_type]) + if not admin_users and not admin_groups and not admin_roles: + raise ValueError( + "One of admin_user, admin_groups or admin_roles is required" + ) + attr = cls.Attributes( + name=name, + qualified_name=connector_type.to_qualified_name(), + connector_name=connector_type.value, + category=connector_type.category.value, + ) + attr.admin_users = set() if admin_users is None else set(admin_users) + attr.admin_groups = set() if admin_groups is None else set(admin_groups) + attr.admin_roles = set() if admin_roles is None else set(admin_roles) + return cls(attributes=attr) + + type_name: str = Field("Connection", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "Connection": + raise ValueError("must be Connection") + return v + + def __setattr__(self, name, value): + if name in Connection._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + CATEGORY: ClassVar[KeywordField] = KeywordField("category", "category") + """ + Type of connection, for example WAREHOUSE, RDBMS, etc. + """ + SUB_CATEGORY: ClassVar[KeywordField] = KeywordField("subCategory", "subCategory") + """ + Subcategory of this connection. + """ + HOST: ClassVar[KeywordField] = KeywordField("host", "host") + """ + Host name of this connection's source. + """ + PORT: ClassVar[NumericField] = NumericField("port", "port") + """ + Port number to this connection's source. + """ + ALLOW_QUERY: ClassVar[BooleanField] = BooleanField("allowQuery", "allowQuery") + """ + Whether using this connection to run queries on the source is allowed (true) or not (false). + """ + ALLOW_QUERY_PREVIEW: ClassVar[BooleanField] = BooleanField( + "allowQueryPreview", "allowQueryPreview" + ) + """ + Whether using this connection to run preview queries on the source is allowed (true) or not (false). + """ + QUERY_PREVIEW_CONFIG: ClassVar[KeywordField] = KeywordField( + "queryPreviewConfig", "queryPreviewConfig" + ) + """ + Configuration for preview queries. + """ + QUERY_CONFIG: ClassVar[KeywordField] = KeywordField("queryConfig", "queryConfig") + """ + Query config for this connection. + """ + CREDENTIAL_STRATEGY: ClassVar[KeywordField] = KeywordField( + "credentialStrategy", "credentialStrategy" + ) + """ + Credential strategy to use for this connection for queries. + """ + PREVIEW_CREDENTIAL_STRATEGY: ClassVar[KeywordField] = KeywordField( + "previewCredentialStrategy", "previewCredentialStrategy" + ) + """ + Credential strategy to use for this connection for preview queries. + """ + POLICY_STRATEGY: ClassVar[KeywordField] = KeywordField( + "policyStrategy", "policyStrategy" + ) + """ + Policy strategy is a configuration that determines whether the Atlan policy will be applied to the results of insight queries and whether the query will be rewritten, applicable for stream api call made from insight screen + """ # noqa: E501 + POLICY_STRATEGY_FOR_SAMPLE_PREVIEW: ClassVar[KeywordField] = KeywordField( + "policyStrategyForSamplePreview", "policyStrategyForSamplePreview" + ) + """ + Policy strategy is a configuration that determines whether the Atlan policy will be applied to the results of insight queries and whether the query will be rewritten. policyStrategyForSamplePreview config is applicable for sample preview call from assets screen + """ # noqa: E501 + QUERY_USERNAME_STRATEGY: ClassVar[KeywordField] = KeywordField( + "queryUsernameStrategy", "queryUsernameStrategy" + ) + """ + Username strategy to use for this connection for queries. + """ + ROW_LIMIT: ClassVar[NumericField] = NumericField("rowLimit", "rowLimit") + """ + Maximum number of rows that can be returned for the source. + """ + QUERY_TIMEOUT: ClassVar[NumericField] = NumericField("queryTimeout", "queryTimeout") + """ + Maximum time a query should be allowed to run before timing out. + """ + DEFAULT_CREDENTIAL_GUID: ClassVar[KeywordField] = KeywordField( + "defaultCredentialGuid", "defaultCredentialGuid" + ) + """ + Unique identifier (GUID) for the default credentials to use for this connection. + """ + CONNECTOR_ICON: ClassVar[KeywordField] = KeywordField( + "connectorIcon", "connectorIcon" + ) + """ + Unused. Only the value of connectorType impacts icons. + """ + CONNECTOR_IMAGE: ClassVar[KeywordField] = KeywordField( + "connectorImage", "connectorImage" + ) + """ + Unused. Only the value of connectorType impacts icons. + """ + SOURCE_LOGO: ClassVar[KeywordField] = KeywordField("sourceLogo", "sourceLogo") + """ + Unused. Only the value of connectorType impacts icons. + """ + IS_SAMPLE_DATA_PREVIEW_ENABLED: ClassVar[BooleanField] = BooleanField( + "isSampleDataPreviewEnabled", "isSampleDataPreviewEnabled" + ) + """ + Whether sample data can be previewed for this connection (true) or not (false). + """ + POPULARITY_INSIGHTS_TIMEFRAME: ClassVar[NumericField] = NumericField( + "popularityInsightsTimeframe", "popularityInsightsTimeframe" + ) + """ + Number of days over which popularity is calculated, for example 30 days. + """ + HAS_POPULARITY_INSIGHTS: ClassVar[BooleanField] = BooleanField( + "hasPopularityInsights", "hasPopularityInsights" + ) + """ + Whether this connection has popularity insights (true) or not (false). + """ + CONNECTION_DBT_ENVIRONMENTS: ClassVar[KeywordField] = KeywordField( + "connectionDbtEnvironments", "connectionDbtEnvironments" + ) + """ + + """ + CONNECTION_SSO_CREDENTIAL_GUID: ClassVar[KeywordField] = KeywordField( + "connectionSSOCredentialGuid", "connectionSSOCredentialGuid" + ) + """ + Unique identifier (GUID) for the SSO credentials to use for this connection. + """ + USE_OBJECT_STORAGE: ClassVar[BooleanField] = BooleanField( + "useObjectStorage", "useObjectStorage" + ) + """ + Whether to upload to S3, GCP, or another storage location (true) or not (false). + """ + OBJECT_STORAGE_UPLOAD_THRESHOLD: ClassVar[NumericField] = NumericField( + "objectStorageUploadThreshold", "objectStorageUploadThreshold" + ) + """ + Number of rows after which results should be uploaded to storage. + """ + VECTOR_EMBEDDINGS_ENABLED: ClassVar[BooleanField] = BooleanField( + "vectorEmbeddingsEnabled", "vectorEmbeddingsEnabled" + ) + """ + + """ + VECTOR_EMBEDDINGS_UPDATED_AT: ClassVar[NumericField] = NumericField( + "vectorEmbeddingsUpdatedAt", "vectorEmbeddingsUpdatedAt" + ) + """ + + """ + + _convenience_properties: ClassVar[list[str]] = [ + "category", + "sub_category", + "host", + "port", + "allow_query", + "allow_query_preview", + "query_preview_config", + "query_config", + "credential_strategy", + "preview_credential_strategy", + "policy_strategy", + "policy_strategy_for_sample_preview", + "query_username_strategy", + "row_limit", + "query_timeout", + "default_credential_guid", + "connector_icon", + "connector_image", + "source_logo", + "is_sample_data_preview_enabled", + "popularity_insights_timeframe", + "has_popularity_insights", + "connection_dbt_environments", + "connection_s_s_o_credential_guid", + "use_object_storage", + "object_storage_upload_threshold", + "vector_embeddings_enabled", + "vector_embeddings_updated_at", + ] + + @property + def category(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.category + + @category.setter + def category(self, category: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.category = category + + @property + def sub_category(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.sub_category + + @sub_category.setter + def sub_category(self, sub_category: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sub_category = sub_category + + @property + def host(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.host + + @host.setter + def host(self, host: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.host = host + + @property + def port(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.port + + @port.setter + def port(self, port: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.port = port + + @property + def allow_query(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.allow_query + + @allow_query.setter + def allow_query(self, allow_query: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.allow_query = allow_query + + @property + def allow_query_preview(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.allow_query_preview + + @allow_query_preview.setter + def allow_query_preview(self, allow_query_preview: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.allow_query_preview = allow_query_preview + + @property + def query_preview_config(self) -> Optional[dict[str, str]]: + return None if self.attributes is None else self.attributes.query_preview_config + + @query_preview_config.setter + def query_preview_config(self, query_preview_config: Optional[dict[str, str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.query_preview_config = query_preview_config + + @property + def query_config(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.query_config + + @query_config.setter + def query_config(self, query_config: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.query_config = query_config + + @property + def credential_strategy(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.credential_strategy + + @credential_strategy.setter + def credential_strategy(self, credential_strategy: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.credential_strategy = credential_strategy + + @property + def preview_credential_strategy(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.preview_credential_strategy + ) + + @preview_credential_strategy.setter + def preview_credential_strategy(self, preview_credential_strategy: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.preview_credential_strategy = preview_credential_strategy + + @property + def policy_strategy(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.policy_strategy + + @policy_strategy.setter + def policy_strategy(self, policy_strategy: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.policy_strategy = policy_strategy + + @property + def policy_strategy_for_sample_preview(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.policy_strategy_for_sample_preview + ) + + @policy_strategy_for_sample_preview.setter + def policy_strategy_for_sample_preview( + self, policy_strategy_for_sample_preview: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.policy_strategy_for_sample_preview = ( + policy_strategy_for_sample_preview + ) + + @property + def query_username_strategy(self) -> Optional[QueryUsernameStrategy]: + return ( + None if self.attributes is None else self.attributes.query_username_strategy + ) + + @query_username_strategy.setter + def query_username_strategy( + self, query_username_strategy: Optional[QueryUsernameStrategy] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.query_username_strategy = query_username_strategy + + @property + def row_limit(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.row_limit + + @row_limit.setter + def row_limit(self, row_limit: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.row_limit = row_limit + + @property + def query_timeout(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.query_timeout + + @query_timeout.setter + def query_timeout(self, query_timeout: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.query_timeout = query_timeout + + @property + def default_credential_guid(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.default_credential_guid + ) + + @default_credential_guid.setter + def default_credential_guid(self, default_credential_guid: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.default_credential_guid = default_credential_guid + + @property + def connector_icon(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.connector_icon + + @connector_icon.setter + def connector_icon(self, connector_icon: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.connector_icon = connector_icon + + @property + def connector_image(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.connector_image + + @connector_image.setter + def connector_image(self, connector_image: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.connector_image = connector_image + + @property + def source_logo(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.source_logo + + @source_logo.setter + def source_logo(self, source_logo: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_logo = source_logo + + @property + def is_sample_data_preview_enabled(self) -> Optional[bool]: + return ( + None + if self.attributes is None + else self.attributes.is_sample_data_preview_enabled + ) + + @is_sample_data_preview_enabled.setter + def is_sample_data_preview_enabled( + self, is_sample_data_preview_enabled: Optional[bool] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_sample_data_preview_enabled = is_sample_data_preview_enabled + + @property + def popularity_insights_timeframe(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.popularity_insights_timeframe + ) + + @popularity_insights_timeframe.setter + def popularity_insights_timeframe( + self, popularity_insights_timeframe: Optional[int] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.popularity_insights_timeframe = popularity_insights_timeframe + + @property + def has_popularity_insights(self) -> Optional[bool]: + return ( + None if self.attributes is None else self.attributes.has_popularity_insights + ) + + @has_popularity_insights.setter + def has_popularity_insights(self, has_popularity_insights: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.has_popularity_insights = has_popularity_insights + + @property + def connection_dbt_environments(self) -> Optional[set[str]]: + return ( + None + if self.attributes is None + else self.attributes.connection_dbt_environments + ) + + @connection_dbt_environments.setter + def connection_dbt_environments( + self, connection_dbt_environments: Optional[set[str]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.connection_dbt_environments = connection_dbt_environments + + @property + def connection_s_s_o_credential_guid(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.connection_s_s_o_credential_guid + ) + + @connection_s_s_o_credential_guid.setter + def connection_s_s_o_credential_guid( + self, connection_s_s_o_credential_guid: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.connection_s_s_o_credential_guid = ( + connection_s_s_o_credential_guid + ) + + @property + def use_object_storage(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.use_object_storage + + @use_object_storage.setter + def use_object_storage(self, use_object_storage: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.use_object_storage = use_object_storage + + @property + def object_storage_upload_threshold(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.object_storage_upload_threshold + ) + + @object_storage_upload_threshold.setter + def object_storage_upload_threshold( + self, object_storage_upload_threshold: Optional[int] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.object_storage_upload_threshold = ( + object_storage_upload_threshold + ) + + @property + def vector_embeddings_enabled(self) -> Optional[bool]: + return ( + None + if self.attributes is None + else self.attributes.vector_embeddings_enabled + ) + + @vector_embeddings_enabled.setter + def vector_embeddings_enabled(self, vector_embeddings_enabled: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.vector_embeddings_enabled = vector_embeddings_enabled + + @property + def vector_embeddings_updated_at(self) -> Optional[datetime]: + return ( + None + if self.attributes is None + else self.attributes.vector_embeddings_updated_at + ) + + @vector_embeddings_updated_at.setter + def vector_embeddings_updated_at( + self, vector_embeddings_updated_at: Optional[datetime] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.vector_embeddings_updated_at = vector_embeddings_updated_at + + class Attributes(Asset.Attributes): + category: Optional[str] = Field(None, description="", alias="category") + sub_category: Optional[str] = Field(None, description="", alias="subCategory") + host: Optional[str] = Field(None, description="", alias="host") + port: Optional[int] = Field(None, description="", alias="port") + allow_query: Optional[bool] = Field(None, description="", alias="allowQuery") + allow_query_preview: Optional[bool] = Field( + None, description="", alias="allowQueryPreview" + ) + query_preview_config: Optional[dict[str, str]] = Field( + None, description="", alias="queryPreviewConfig" + ) + query_config: Optional[str] = Field(None, description="", alias="queryConfig") + credential_strategy: Optional[str] = Field( + None, description="", alias="credentialStrategy" + ) + preview_credential_strategy: Optional[str] = Field( + None, description="", alias="previewCredentialStrategy" + ) + policy_strategy: Optional[str] = Field( + None, description="", alias="policyStrategy" + ) + policy_strategy_for_sample_preview: Optional[str] = Field( + None, description="", alias="policyStrategyForSamplePreview" + ) + query_username_strategy: Optional[QueryUsernameStrategy] = Field( + None, description="", alias="queryUsernameStrategy" + ) + row_limit: Optional[int] = Field(None, description="", alias="rowLimit") + query_timeout: Optional[int] = Field(None, description="", alias="queryTimeout") + default_credential_guid: Optional[str] = Field( + None, description="", alias="defaultCredentialGuid" + ) + connector_icon: Optional[str] = Field( + None, description="", alias="connectorIcon" + ) + connector_image: Optional[str] = Field( + None, description="", alias="connectorImage" + ) + source_logo: Optional[str] = Field(None, description="", alias="sourceLogo") + is_sample_data_preview_enabled: Optional[bool] = Field( + None, description="", alias="isSampleDataPreviewEnabled" + ) + popularity_insights_timeframe: Optional[int] = Field( + None, description="", alias="popularityInsightsTimeframe" + ) + has_popularity_insights: Optional[bool] = Field( + None, description="", alias="hasPopularityInsights" + ) + connection_dbt_environments: Optional[set[str]] = Field( + None, description="", alias="connectionDbtEnvironments" + ) + connection_s_s_o_credential_guid: Optional[str] = Field( + None, description="", alias="connectionSSOCredentialGuid" + ) + use_object_storage: Optional[bool] = Field( + None, description="", alias="useObjectStorage" + ) + object_storage_upload_threshold: Optional[int] = Field( + None, description="", alias="objectStorageUploadThreshold" + ) + vector_embeddings_enabled: Optional[bool] = Field( + None, description="", alias="vectorEmbeddingsEnabled" + ) + vector_embeddings_updated_at: Optional[datetime] = Field( + None, description="", alias="vectorEmbeddingsUpdatedAt" + ) + + is_loaded: bool = Field(default=True) + + @validator("admin_users") + def admin_users_valid(cls, admin_users, values): + from pyatlan.cache.user_cache import UserCache + + if values.get("is_loaded", False): + UserCache.validate_names(names=admin_users) + return admin_users + + @validator("admin_roles") + def admin_roles_valid(cls, admin_roles, values): + from pyatlan.cache.role_cache import RoleCache + + if values.get("is_loaded", False): + RoleCache.validate_idstrs(idstrs=admin_roles) + return admin_roles + + @validator("admin_groups") + def admin_groups_valid(cls, admin_groups, values): + from pyatlan.cache.group_cache import GroupCache + + if values.get("is_loaded", False): + GroupCache.validate_aliases(aliases=admin_groups) + return admin_groups + + attributes: "Connection.Attributes" = Field( + default_factory=lambda: Connection.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +Connection.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset04.py b/pyatlan/model/assets/asset04.py deleted file mode 100644 index df3979547..000000000 --- a/pyatlan/model/assets/asset04.py +++ /dev/null @@ -1,141 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 Atlan Pte. Ltd. - - -from __future__ import annotations - -from typing import ClassVar, Optional - -from pydantic import Field, StrictStr, validator - -from pyatlan.model.enums import EntityStatus -from pyatlan.model.fields.atlan_fields import KeywordField -from pyatlan.model.structs import BadgeCondition -from pyatlan.utils import init_guid, validate_required_fields - -from .asset00 import Asset - - -class Badge(Asset, type_name="Badge"): - """Description""" - - @classmethod - # @validate_arguments() - @init_guid - def create( - cls, - *, - name: StrictStr, - cm_name: str, - cm_attribute: str, - badge_conditions: list[BadgeCondition], - ) -> Badge: - return cls( - status=EntityStatus.ACTIVE, - attributes=Badge.Attributes.create( - name=name, - cm_name=cm_name, - cm_attribute=cm_attribute, - badge_conditions=badge_conditions, - ), - ) - - type_name: str = Field("Badge", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "Badge": - raise ValueError("must be Badge") - return v - - def __setattr__(self, name, value): - if name in Badge._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - BADGE_CONDITIONS: ClassVar[KeywordField] = KeywordField( - "badgeConditions", "badgeConditions" - ) - """ - TBC - """ - BADGE_METADATA_ATTRIBUTE: ClassVar[KeywordField] = KeywordField( - "badgeMetadataAttribute", "badgeMetadataAttribute" - ) - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "badge_conditions", - "badge_metadata_attribute", - ] - - @property - def badge_conditions(self) -> Optional[list[BadgeCondition]]: - return None if self.attributes is None else self.attributes.badge_conditions - - @badge_conditions.setter - def badge_conditions(self, badge_conditions: Optional[list[BadgeCondition]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.badge_conditions = badge_conditions - - @property - def badge_metadata_attribute(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.badge_metadata_attribute - ) - - @badge_metadata_attribute.setter - def badge_metadata_attribute(self, badge_metadata_attribute: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.badge_metadata_attribute = badge_metadata_attribute - - class Attributes(Asset.Attributes): - badge_conditions: Optional[list[BadgeCondition]] = Field( - None, description="", alias="badgeConditions" - ) - badge_metadata_attribute: Optional[str] = Field( - None, description="", alias="badgeMetadataAttribute" - ) - - @classmethod - # @validate_arguments() - @init_guid - def create( - cls, - *, - name: StrictStr, - cm_name: str, - cm_attribute: str, - badge_conditions: list[BadgeCondition], - ) -> Badge.Attributes: - validate_required_fields( - ["name", "cm_name", "cm_attribute", "badge_conditions"], - [name, cm_name, cm_attribute, badge_conditions], - ) - from pyatlan.cache.custom_metadata_cache import CustomMetadataCache - - cm_id = CustomMetadataCache.get_id_for_name(cm_name) - cm_attr_id = CustomMetadataCache.get_attr_id_for_name( - set_name=cm_name, attr_name=cm_attribute - ) - return Badge.Attributes( - name=name, - qualified_name=f"badges/global/{cm_id}.{cm_attr_id}", - badge_metadata_attribute=f"{cm_id}.{cm_attr_id}", - badge_conditions=badge_conditions, - ) - - attributes: "Badge.Attributes" = Field( - default_factory=lambda: Badge.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -Badge.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset05.py b/pyatlan/model/assets/asset05.py index df5a6664d..d2da5bea5 100644 --- a/pyatlan/model/assets/asset05.py +++ b/pyatlan/model/assets/asset05.py @@ -6,664 +6,136 @@ from typing import ClassVar, Optional -from pydantic import Field, validator +from pydantic import Field, StrictStr, validator -from pyatlan.model.enums import AuthPolicyType -from pyatlan.model.fields.atlan_fields import ( - BooleanField, - KeywordField, - NumericField, - RelationField, -) -from pyatlan.model.structs import AuthPolicyCondition, AuthPolicyValiditySchedule -from pyatlan.utils import validate_required_fields +from pyatlan.model.enums import EntityStatus +from pyatlan.model.fields.atlan_fields import KeywordField +from pyatlan.model.structs import BadgeCondition +from pyatlan.utils import init_guid, validate_required_fields -from .asset00 import Asset, SelfAsset +from .asset00 import Asset -class AuthPolicy(Asset, type_name="AuthPolicy"): +class Badge(Asset, type_name="Badge"): """Description""" @classmethod # @validate_arguments() - def __create(cls, *, name: str) -> AuthPolicy: - validate_required_fields(["name"], [name]) - attributes = AuthPolicy.Attributes._Attributes__create(name=name) # type: ignore - return cls(attributes=attributes) - - @classmethod - def create_for_modification( - cls: type[SelfAsset], - qualified_name: str = "", - name: str = "" - """ - This method is not available for AuthPolicy. Please retrieve the existing policy and then update it in its - entirety. - """, - ) -> SelfAsset: - raise NotImplementedError( - "This method is not available for AuthPolicy. Please retrieve the existing policy" - " and then update it in its entirety." - ) - - type_name: str = Field("AuthPolicy", allow_mutation=False) + @init_guid + def create( + cls, + *, + name: StrictStr, + cm_name: str, + cm_attribute: str, + badge_conditions: list[BadgeCondition], + ) -> Badge: + return cls( + status=EntityStatus.ACTIVE, + attributes=Badge.Attributes.create( + name=name, + cm_name=cm_name, + cm_attribute=cm_attribute, + badge_conditions=badge_conditions, + ), + ) + + type_name: str = Field("Badge", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "AuthPolicy": - raise ValueError("must be AuthPolicy") + if v != "Badge": + raise ValueError("must be Badge") return v def __setattr__(self, name, value): - if name in AuthPolicy._convenience_properties: + if name in Badge._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - POLICY_TYPE: ClassVar[KeywordField] = KeywordField("policyType", "policyType") - """ - TBC - """ - POLICY_SERVICE_NAME: ClassVar[KeywordField] = KeywordField( - "policyServiceName", "policyServiceName" - ) - """ - TBC - """ - POLICY_CATEGORY: ClassVar[KeywordField] = KeywordField( - "policyCategory", "policyCategory" - ) - """ - TBC - """ - POLICY_SUB_CATEGORY: ClassVar[KeywordField] = KeywordField( - "policySubCategory", "policySubCategory" - ) - """ - TBC - """ - POLICY_USERS: ClassVar[KeywordField] = KeywordField("policyUsers", "policyUsers") - """ - TBC - """ - POLICY_GROUPS: ClassVar[KeywordField] = KeywordField("policyGroups", "policyGroups") - """ - TBC - """ - POLICY_ROLES: ClassVar[KeywordField] = KeywordField("policyRoles", "policyRoles") - """ - TBC - """ - POLICY_ACTIONS: ClassVar[KeywordField] = KeywordField( - "policyActions", "policyActions" + BADGE_CONDITIONS: ClassVar[KeywordField] = KeywordField( + "badgeConditions", "badgeConditions" ) """ - TBC + List of conditions that determine the colors to diplay for various values. """ - POLICY_RESOURCES: ClassVar[KeywordField] = KeywordField( - "policyResources", "policyResources" + BADGE_METADATA_ATTRIBUTE: ClassVar[KeywordField] = KeywordField( + "badgeMetadataAttribute", "badgeMetadataAttribute" ) """ - TBC - """ - POLICY_RESOURCE_CATEGORY: ClassVar[KeywordField] = KeywordField( - "policyResourceCategory", "policyResourceCategory" - ) - """ - TBC - """ - POLICY_PRIORITY: ClassVar[NumericField] = NumericField( - "policyPriority", "policyPriority" - ) - """ - TBC - """ - IS_POLICY_ENABLED: ClassVar[BooleanField] = BooleanField( - "isPolicyEnabled", "isPolicyEnabled" - ) - """ - TBC - """ - POLICY_MASK_TYPE: ClassVar[KeywordField] = KeywordField( - "policyMaskType", "policyMaskType" - ) - """ - TBC - """ - POLICY_VALIDITY_SCHEDULE: ClassVar[KeywordField] = KeywordField( - "policyValiditySchedule", "policyValiditySchedule" - ) - """ - TBC - """ - POLICY_RESOURCE_SIGNATURE: ClassVar[KeywordField] = KeywordField( - "policyResourceSignature", "policyResourceSignature" - ) - """ - TBC - """ - POLICY_DELEGATE_ADMIN: ClassVar[BooleanField] = BooleanField( - "policyDelegateAdmin", "policyDelegateAdmin" - ) - """ - TBC - """ - POLICY_CONDITIONS: ClassVar[KeywordField] = KeywordField( - "policyConditions", "policyConditions" - ) - """ - TBC - """ - - ACCESS_CONTROL: ClassVar[RelationField] = RelationField("accessControl") - """ - TBC + Custom metadata attribute for which to show the badge. """ _convenience_properties: ClassVar[list[str]] = [ - "policy_type", - "policy_service_name", - "policy_category", - "policy_sub_category", - "policy_users", - "policy_groups", - "policy_roles", - "policy_actions", - "policy_resources", - "policy_resource_category", - "policy_priority", - "is_policy_enabled", - "policy_mask_type", - "policy_validity_schedule", - "policy_resource_signature", - "policy_delegate_admin", - "policy_conditions", - "access_control", + "badge_conditions", + "badge_metadata_attribute", ] @property - def policy_type(self) -> Optional[AuthPolicyType]: - return None if self.attributes is None else self.attributes.policy_type + def badge_conditions(self) -> Optional[list[BadgeCondition]]: + return None if self.attributes is None else self.attributes.badge_conditions - @policy_type.setter - def policy_type(self, policy_type: Optional[AuthPolicyType]): + @badge_conditions.setter + def badge_conditions(self, badge_conditions: Optional[list[BadgeCondition]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.policy_type = policy_type + self.attributes.badge_conditions = badge_conditions @property - def policy_service_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.policy_service_name - - @policy_service_name.setter - def policy_service_name(self, policy_service_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.policy_service_name = policy_service_name - - @property - def policy_category(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.policy_category - - @policy_category.setter - def policy_category(self, policy_category: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.policy_category = policy_category - - @property - def policy_sub_category(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.policy_sub_category - - @policy_sub_category.setter - def policy_sub_category(self, policy_sub_category: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.policy_sub_category = policy_sub_category - - @property - def policy_users(self) -> Optional[set[str]]: - return None if self.attributes is None else self.attributes.policy_users - - @policy_users.setter - def policy_users(self, policy_users: Optional[set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.policy_users = policy_users - - @property - def policy_groups(self) -> Optional[set[str]]: - return None if self.attributes is None else self.attributes.policy_groups - - @policy_groups.setter - def policy_groups(self, policy_groups: Optional[set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.policy_groups = policy_groups - - @property - def policy_roles(self) -> Optional[set[str]]: - return None if self.attributes is None else self.attributes.policy_roles - - @policy_roles.setter - def policy_roles(self, policy_roles: Optional[set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.policy_roles = policy_roles - - @property - def policy_actions(self) -> Optional[set[str]]: - return None if self.attributes is None else self.attributes.policy_actions - - @policy_actions.setter - def policy_actions(self, policy_actions: Optional[set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.policy_actions = policy_actions - - @property - def policy_resources(self) -> Optional[set[str]]: - return None if self.attributes is None else self.attributes.policy_resources - - @policy_resources.setter - def policy_resources(self, policy_resources: Optional[set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.policy_resources = policy_resources - - @property - def policy_resource_category(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.policy_resource_category - ) - - @policy_resource_category.setter - def policy_resource_category(self, policy_resource_category: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.policy_resource_category = policy_resource_category - - @property - def policy_priority(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.policy_priority - - @policy_priority.setter - def policy_priority(self, policy_priority: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.policy_priority = policy_priority - - @property - def is_policy_enabled(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_policy_enabled - - @is_policy_enabled.setter - def is_policy_enabled(self, is_policy_enabled: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_policy_enabled = is_policy_enabled - - @property - def policy_mask_type(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.policy_mask_type - - @policy_mask_type.setter - def policy_mask_type(self, policy_mask_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.policy_mask_type = policy_mask_type - - @property - def policy_validity_schedule(self) -> Optional[list[AuthPolicyValiditySchedule]]: + def badge_metadata_attribute(self) -> Optional[str]: return ( None if self.attributes is None - else self.attributes.policy_validity_schedule + else self.attributes.badge_metadata_attribute ) - @policy_validity_schedule.setter - def policy_validity_schedule( - self, policy_validity_schedule: Optional[list[AuthPolicyValiditySchedule]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.policy_validity_schedule = policy_validity_schedule - - @property - def policy_resource_signature(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.policy_resource_signature - ) - - @policy_resource_signature.setter - def policy_resource_signature(self, policy_resource_signature: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.policy_resource_signature = policy_resource_signature - - @property - def policy_delegate_admin(self) -> Optional[bool]: - return ( - None if self.attributes is None else self.attributes.policy_delegate_admin - ) - - @policy_delegate_admin.setter - def policy_delegate_admin(self, policy_delegate_admin: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.policy_delegate_admin = policy_delegate_admin - - @property - def policy_conditions(self) -> Optional[list[AuthPolicyCondition]]: - return None if self.attributes is None else self.attributes.policy_conditions - - @policy_conditions.setter - def policy_conditions(self, policy_conditions: Optional[list[AuthPolicyCondition]]): + @badge_metadata_attribute.setter + def badge_metadata_attribute(self, badge_metadata_attribute: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.policy_conditions = policy_conditions - - @property - def access_control(self) -> Optional[AccessControl]: - return None if self.attributes is None else self.attributes.access_control - - @access_control.setter - def access_control(self, access_control: Optional[AccessControl]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.access_control = access_control + self.attributes.badge_metadata_attribute = badge_metadata_attribute class Attributes(Asset.Attributes): - policy_type: Optional[AuthPolicyType] = Field( - None, description="", alias="policyType" - ) - policy_service_name: Optional[str] = Field( - None, description="", alias="policyServiceName" - ) - policy_category: Optional[str] = Field( - None, description="", alias="policyCategory" - ) - policy_sub_category: Optional[str] = Field( - None, description="", alias="policySubCategory" - ) - policy_users: Optional[set[str]] = Field( - None, description="", alias="policyUsers" - ) - policy_groups: Optional[set[str]] = Field( - None, description="", alias="policyGroups" - ) - policy_roles: Optional[set[str]] = Field( - None, description="", alias="policyRoles" - ) - policy_actions: Optional[set[str]] = Field( - None, description="", alias="policyActions" - ) - policy_resources: Optional[set[str]] = Field( - None, description="", alias="policyResources" - ) - policy_resource_category: Optional[str] = Field( - None, description="", alias="policyResourceCategory" - ) - policy_priority: Optional[int] = Field( - None, description="", alias="policyPriority" - ) - is_policy_enabled: Optional[bool] = Field( - None, description="", alias="isPolicyEnabled" - ) - policy_mask_type: Optional[str] = Field( - None, description="", alias="policyMaskType" + badge_conditions: Optional[list[BadgeCondition]] = Field( + None, description="", alias="badgeConditions" ) - policy_validity_schedule: Optional[list[AuthPolicyValiditySchedule]] = Field( - None, description="", alias="policyValiditySchedule" + badge_metadata_attribute: Optional[str] = Field( + None, description="", alias="badgeMetadataAttribute" ) - policy_resource_signature: Optional[str] = Field( - None, description="", alias="policyResourceSignature" - ) - policy_delegate_admin: Optional[bool] = Field( - None, description="", alias="policyDelegateAdmin" - ) - policy_conditions: Optional[list[AuthPolicyCondition]] = Field( - None, description="", alias="policyConditions" - ) - access_control: Optional[AccessControl] = Field( - None, description="", alias="accessControl" - ) # relationship @classmethod # @validate_arguments() - def __create(cls, name: str) -> AuthPolicy.Attributes: - validate_required_fields(["name"], [name]) - return AuthPolicy.Attributes( - qualified_name=name, name=name, display_name="" + @init_guid + def create( + cls, + *, + name: StrictStr, + cm_name: str, + cm_attribute: str, + badge_conditions: list[BadgeCondition], + ) -> Badge.Attributes: + validate_required_fields( + ["name", "cm_name", "cm_attribute", "badge_conditions"], + [name, cm_name, cm_attribute, badge_conditions], ) + from pyatlan.cache.custom_metadata_cache import CustomMetadataCache - attributes: "AuthPolicy.Attributes" = Field( - default_factory=lambda: AuthPolicy.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class AccessControl(Asset, type_name="AccessControl"): - """Description""" - - type_name: str = Field("AccessControl", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "AccessControl": - raise ValueError("must be AccessControl") - return v - - def __setattr__(self, name, value): - if name in AccessControl._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - IS_ACCESS_CONTROL_ENABLED: ClassVar[BooleanField] = BooleanField( - "isAccessControlEnabled", "isAccessControlEnabled" - ) - """ - TBC - """ - DENY_CUSTOM_METADATA_GUIDS: ClassVar[KeywordField] = KeywordField( - "denyCustomMetadataGuids", "denyCustomMetadataGuids" - ) - """ - TBC - """ - DENY_ASSET_TABS: ClassVar[KeywordField] = KeywordField( - "denyAssetTabs", "denyAssetTabs" - ) - """ - TBC - """ - DENY_ASSET_FILTERS: ClassVar[KeywordField] = KeywordField( - "denyAssetFilters", "denyAssetFilters" - ) - """ - TBC - """ - CHANNEL_LINK: ClassVar[KeywordField] = KeywordField("channelLink", "channelLink") - """ - TBC - """ - DENY_ASSET_TYPES: ClassVar[KeywordField] = KeywordField( - "denyAssetTypes", "denyAssetTypes" - ) - """ - TBC - """ - DENY_NAVIGATION_PAGES: ClassVar[KeywordField] = KeywordField( - "denyNavigationPages", "denyNavigationPages" - ) - """ - TBC - """ - DEFAULT_NAVIGATION: ClassVar[KeywordField] = KeywordField( - "defaultNavigation", "defaultNavigation" - ) - """ - TBC - """ - - POLICIES: ClassVar[RelationField] = RelationField("policies") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "is_access_control_enabled", - "deny_custom_metadata_guids", - "deny_asset_tabs", - "deny_asset_filters", - "channel_link", - "deny_asset_types", - "deny_navigation_pages", - "default_navigation", - "policies", - ] - - @property - def is_access_control_enabled(self) -> Optional[bool]: - return ( - None - if self.attributes is None - else self.attributes.is_access_control_enabled - ) - - @is_access_control_enabled.setter - def is_access_control_enabled(self, is_access_control_enabled: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_access_control_enabled = is_access_control_enabled - - @property - def deny_custom_metadata_guids(self) -> Optional[set[str]]: - return ( - None - if self.attributes is None - else self.attributes.deny_custom_metadata_guids - ) - - @deny_custom_metadata_guids.setter - def deny_custom_metadata_guids( - self, deny_custom_metadata_guids: Optional[set[str]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.deny_custom_metadata_guids = deny_custom_metadata_guids - - @property - def deny_asset_tabs(self) -> Optional[set[str]]: - return None if self.attributes is None else self.attributes.deny_asset_tabs - - @deny_asset_tabs.setter - def deny_asset_tabs(self, deny_asset_tabs: Optional[set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.deny_asset_tabs = deny_asset_tabs - - @property - def deny_asset_filters(self) -> Optional[set[str]]: - return None if self.attributes is None else self.attributes.deny_asset_filters - - @deny_asset_filters.setter - def deny_asset_filters(self, deny_asset_filters: Optional[set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.deny_asset_filters = deny_asset_filters - - @property - def channel_link(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.channel_link - - @channel_link.setter - def channel_link(self, channel_link: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.channel_link = channel_link - - @property - def deny_asset_types(self) -> Optional[set[str]]: - return None if self.attributes is None else self.attributes.deny_asset_types - - @deny_asset_types.setter - def deny_asset_types(self, deny_asset_types: Optional[set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.deny_asset_types = deny_asset_types - - @property - def deny_navigation_pages(self) -> Optional[set[str]]: - return ( - None if self.attributes is None else self.attributes.deny_navigation_pages - ) - - @deny_navigation_pages.setter - def deny_navigation_pages(self, deny_navigation_pages: Optional[set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.deny_navigation_pages = deny_navigation_pages - - @property - def default_navigation(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.default_navigation - - @default_navigation.setter - def default_navigation(self, default_navigation: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.default_navigation = default_navigation - - @property - def policies(self) -> Optional[list[AuthPolicy]]: - return None if self.attributes is None else self.attributes.policies - - @policies.setter - def policies(self, policies: Optional[list[AuthPolicy]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.policies = policies - - class Attributes(Asset.Attributes): - is_access_control_enabled: Optional[bool] = Field( - None, description="", alias="isAccessControlEnabled" - ) - deny_custom_metadata_guids: Optional[set[str]] = Field( - None, description="", alias="denyCustomMetadataGuids" - ) - deny_asset_tabs: Optional[set[str]] = Field( - None, description="", alias="denyAssetTabs" - ) - deny_asset_filters: Optional[set[str]] = Field( - None, description="", alias="denyAssetFilters" - ) - channel_link: Optional[str] = Field(None, description="", alias="channelLink") - deny_asset_types: Optional[set[str]] = Field( - None, description="", alias="denyAssetTypes" - ) - deny_navigation_pages: Optional[set[str]] = Field( - None, description="", alias="denyNavigationPages" - ) - default_navigation: Optional[str] = Field( - None, description="", alias="defaultNavigation" - ) - policies: Optional[list[AuthPolicy]] = Field( - None, description="", alias="policies" - ) # relationship + cm_id = CustomMetadataCache.get_id_for_name(cm_name) + cm_attr_id = CustomMetadataCache.get_attr_id_for_name( + set_name=cm_name, attr_name=cm_attribute + ) + return Badge.Attributes( + name=name, + qualified_name=f"badges/global/{cm_id}.{cm_attr_id}", + badge_metadata_attribute=f"{cm_id}.{cm_attr_id}", + badge_conditions=badge_conditions, + ) - attributes: "AccessControl.Attributes" = Field( - default_factory=lambda: AccessControl.Attributes(), + attributes: "Badge.Attributes" = Field( + default_factory=lambda: Badge.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -AuthPolicy.Attributes.update_forward_refs() - - -AccessControl.Attributes.update_forward_refs() +Badge.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset06.py b/pyatlan/model/assets/asset06.py index 9c75e7cdf..df5a6664d 100644 --- a/pyatlan/model/assets/asset06.py +++ b/pyatlan/model/assets/asset06.py @@ -4,30 +4,666 @@ from __future__ import annotations -from typing import ClassVar +from typing import ClassVar, Optional from pydantic import Field, validator -from .asset00 import Asset +from pyatlan.model.enums import AuthPolicyType +from pyatlan.model.fields.atlan_fields import ( + BooleanField, + KeywordField, + NumericField, + RelationField, +) +from pyatlan.model.structs import AuthPolicyCondition, AuthPolicyValiditySchedule +from pyatlan.utils import validate_required_fields +from .asset00 import Asset, SelfAsset -class ProcessExecution(Asset, type_name="ProcessExecution"): + +class AuthPolicy(Asset, type_name="AuthPolicy"): """Description""" - type_name: str = Field("ProcessExecution", allow_mutation=False) + @classmethod + # @validate_arguments() + def __create(cls, *, name: str) -> AuthPolicy: + validate_required_fields(["name"], [name]) + attributes = AuthPolicy.Attributes._Attributes__create(name=name) # type: ignore + return cls(attributes=attributes) + + @classmethod + def create_for_modification( + cls: type[SelfAsset], + qualified_name: str = "", + name: str = "" + """ + This method is not available for AuthPolicy. Please retrieve the existing policy and then update it in its + entirety. + """, + ) -> SelfAsset: + raise NotImplementedError( + "This method is not available for AuthPolicy. Please retrieve the existing policy" + " and then update it in its entirety." + ) + + type_name: str = Field("AuthPolicy", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "ProcessExecution": - raise ValueError("must be ProcessExecution") + if v != "AuthPolicy": + raise ValueError("must be AuthPolicy") return v def __setattr__(self, name, value): - if name in ProcessExecution._convenience_properties: + if name in AuthPolicy._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - _convenience_properties: ClassVar[list[str]] = [] + POLICY_TYPE: ClassVar[KeywordField] = KeywordField("policyType", "policyType") + """ + TBC + """ + POLICY_SERVICE_NAME: ClassVar[KeywordField] = KeywordField( + "policyServiceName", "policyServiceName" + ) + """ + TBC + """ + POLICY_CATEGORY: ClassVar[KeywordField] = KeywordField( + "policyCategory", "policyCategory" + ) + """ + TBC + """ + POLICY_SUB_CATEGORY: ClassVar[KeywordField] = KeywordField( + "policySubCategory", "policySubCategory" + ) + """ + TBC + """ + POLICY_USERS: ClassVar[KeywordField] = KeywordField("policyUsers", "policyUsers") + """ + TBC + """ + POLICY_GROUPS: ClassVar[KeywordField] = KeywordField("policyGroups", "policyGroups") + """ + TBC + """ + POLICY_ROLES: ClassVar[KeywordField] = KeywordField("policyRoles", "policyRoles") + """ + TBC + """ + POLICY_ACTIONS: ClassVar[KeywordField] = KeywordField( + "policyActions", "policyActions" + ) + """ + TBC + """ + POLICY_RESOURCES: ClassVar[KeywordField] = KeywordField( + "policyResources", "policyResources" + ) + """ + TBC + """ + POLICY_RESOURCE_CATEGORY: ClassVar[KeywordField] = KeywordField( + "policyResourceCategory", "policyResourceCategory" + ) + """ + TBC + """ + POLICY_PRIORITY: ClassVar[NumericField] = NumericField( + "policyPriority", "policyPriority" + ) + """ + TBC + """ + IS_POLICY_ENABLED: ClassVar[BooleanField] = BooleanField( + "isPolicyEnabled", "isPolicyEnabled" + ) + """ + TBC + """ + POLICY_MASK_TYPE: ClassVar[KeywordField] = KeywordField( + "policyMaskType", "policyMaskType" + ) + """ + TBC + """ + POLICY_VALIDITY_SCHEDULE: ClassVar[KeywordField] = KeywordField( + "policyValiditySchedule", "policyValiditySchedule" + ) + """ + TBC + """ + POLICY_RESOURCE_SIGNATURE: ClassVar[KeywordField] = KeywordField( + "policyResourceSignature", "policyResourceSignature" + ) + """ + TBC + """ + POLICY_DELEGATE_ADMIN: ClassVar[BooleanField] = BooleanField( + "policyDelegateAdmin", "policyDelegateAdmin" + ) + """ + TBC + """ + POLICY_CONDITIONS: ClassVar[KeywordField] = KeywordField( + "policyConditions", "policyConditions" + ) + """ + TBC + """ + + ACCESS_CONTROL: ClassVar[RelationField] = RelationField("accessControl") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "policy_type", + "policy_service_name", + "policy_category", + "policy_sub_category", + "policy_users", + "policy_groups", + "policy_roles", + "policy_actions", + "policy_resources", + "policy_resource_category", + "policy_priority", + "is_policy_enabled", + "policy_mask_type", + "policy_validity_schedule", + "policy_resource_signature", + "policy_delegate_admin", + "policy_conditions", + "access_control", + ] + + @property + def policy_type(self) -> Optional[AuthPolicyType]: + return None if self.attributes is None else self.attributes.policy_type + + @policy_type.setter + def policy_type(self, policy_type: Optional[AuthPolicyType]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.policy_type = policy_type + + @property + def policy_service_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.policy_service_name + + @policy_service_name.setter + def policy_service_name(self, policy_service_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.policy_service_name = policy_service_name + + @property + def policy_category(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.policy_category + + @policy_category.setter + def policy_category(self, policy_category: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.policy_category = policy_category + + @property + def policy_sub_category(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.policy_sub_category + + @policy_sub_category.setter + def policy_sub_category(self, policy_sub_category: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.policy_sub_category = policy_sub_category + + @property + def policy_users(self) -> Optional[set[str]]: + return None if self.attributes is None else self.attributes.policy_users + + @policy_users.setter + def policy_users(self, policy_users: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.policy_users = policy_users + + @property + def policy_groups(self) -> Optional[set[str]]: + return None if self.attributes is None else self.attributes.policy_groups + + @policy_groups.setter + def policy_groups(self, policy_groups: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.policy_groups = policy_groups + + @property + def policy_roles(self) -> Optional[set[str]]: + return None if self.attributes is None else self.attributes.policy_roles + + @policy_roles.setter + def policy_roles(self, policy_roles: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.policy_roles = policy_roles + + @property + def policy_actions(self) -> Optional[set[str]]: + return None if self.attributes is None else self.attributes.policy_actions + + @policy_actions.setter + def policy_actions(self, policy_actions: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.policy_actions = policy_actions + + @property + def policy_resources(self) -> Optional[set[str]]: + return None if self.attributes is None else self.attributes.policy_resources + + @policy_resources.setter + def policy_resources(self, policy_resources: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.policy_resources = policy_resources + + @property + def policy_resource_category(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.policy_resource_category + ) + + @policy_resource_category.setter + def policy_resource_category(self, policy_resource_category: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.policy_resource_category = policy_resource_category + + @property + def policy_priority(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.policy_priority + + @policy_priority.setter + def policy_priority(self, policy_priority: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.policy_priority = policy_priority + + @property + def is_policy_enabled(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_policy_enabled + + @is_policy_enabled.setter + def is_policy_enabled(self, is_policy_enabled: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_policy_enabled = is_policy_enabled + + @property + def policy_mask_type(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.policy_mask_type + + @policy_mask_type.setter + def policy_mask_type(self, policy_mask_type: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.policy_mask_type = policy_mask_type + + @property + def policy_validity_schedule(self) -> Optional[list[AuthPolicyValiditySchedule]]: + return ( + None + if self.attributes is None + else self.attributes.policy_validity_schedule + ) + + @policy_validity_schedule.setter + def policy_validity_schedule( + self, policy_validity_schedule: Optional[list[AuthPolicyValiditySchedule]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.policy_validity_schedule = policy_validity_schedule + + @property + def policy_resource_signature(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.policy_resource_signature + ) + + @policy_resource_signature.setter + def policy_resource_signature(self, policy_resource_signature: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.policy_resource_signature = policy_resource_signature + + @property + def policy_delegate_admin(self) -> Optional[bool]: + return ( + None if self.attributes is None else self.attributes.policy_delegate_admin + ) + + @policy_delegate_admin.setter + def policy_delegate_admin(self, policy_delegate_admin: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.policy_delegate_admin = policy_delegate_admin + + @property + def policy_conditions(self) -> Optional[list[AuthPolicyCondition]]: + return None if self.attributes is None else self.attributes.policy_conditions + + @policy_conditions.setter + def policy_conditions(self, policy_conditions: Optional[list[AuthPolicyCondition]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.policy_conditions = policy_conditions + + @property + def access_control(self) -> Optional[AccessControl]: + return None if self.attributes is None else self.attributes.access_control + + @access_control.setter + def access_control(self, access_control: Optional[AccessControl]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.access_control = access_control + + class Attributes(Asset.Attributes): + policy_type: Optional[AuthPolicyType] = Field( + None, description="", alias="policyType" + ) + policy_service_name: Optional[str] = Field( + None, description="", alias="policyServiceName" + ) + policy_category: Optional[str] = Field( + None, description="", alias="policyCategory" + ) + policy_sub_category: Optional[str] = Field( + None, description="", alias="policySubCategory" + ) + policy_users: Optional[set[str]] = Field( + None, description="", alias="policyUsers" + ) + policy_groups: Optional[set[str]] = Field( + None, description="", alias="policyGroups" + ) + policy_roles: Optional[set[str]] = Field( + None, description="", alias="policyRoles" + ) + policy_actions: Optional[set[str]] = Field( + None, description="", alias="policyActions" + ) + policy_resources: Optional[set[str]] = Field( + None, description="", alias="policyResources" + ) + policy_resource_category: Optional[str] = Field( + None, description="", alias="policyResourceCategory" + ) + policy_priority: Optional[int] = Field( + None, description="", alias="policyPriority" + ) + is_policy_enabled: Optional[bool] = Field( + None, description="", alias="isPolicyEnabled" + ) + policy_mask_type: Optional[str] = Field( + None, description="", alias="policyMaskType" + ) + policy_validity_schedule: Optional[list[AuthPolicyValiditySchedule]] = Field( + None, description="", alias="policyValiditySchedule" + ) + policy_resource_signature: Optional[str] = Field( + None, description="", alias="policyResourceSignature" + ) + policy_delegate_admin: Optional[bool] = Field( + None, description="", alias="policyDelegateAdmin" + ) + policy_conditions: Optional[list[AuthPolicyCondition]] = Field( + None, description="", alias="policyConditions" + ) + access_control: Optional[AccessControl] = Field( + None, description="", alias="accessControl" + ) # relationship + + @classmethod + # @validate_arguments() + def __create(cls, name: str) -> AuthPolicy.Attributes: + validate_required_fields(["name"], [name]) + return AuthPolicy.Attributes( + qualified_name=name, name=name, display_name="" + ) + + attributes: "AuthPolicy.Attributes" = Field( + default_factory=lambda: AuthPolicy.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +class AccessControl(Asset, type_name="AccessControl"): + """Description""" + + type_name: str = Field("AccessControl", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "AccessControl": + raise ValueError("must be AccessControl") + return v + + def __setattr__(self, name, value): + if name in AccessControl._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + IS_ACCESS_CONTROL_ENABLED: ClassVar[BooleanField] = BooleanField( + "isAccessControlEnabled", "isAccessControlEnabled" + ) + """ + TBC + """ + DENY_CUSTOM_METADATA_GUIDS: ClassVar[KeywordField] = KeywordField( + "denyCustomMetadataGuids", "denyCustomMetadataGuids" + ) + """ + TBC + """ + DENY_ASSET_TABS: ClassVar[KeywordField] = KeywordField( + "denyAssetTabs", "denyAssetTabs" + ) + """ + TBC + """ + DENY_ASSET_FILTERS: ClassVar[KeywordField] = KeywordField( + "denyAssetFilters", "denyAssetFilters" + ) + """ + TBC + """ + CHANNEL_LINK: ClassVar[KeywordField] = KeywordField("channelLink", "channelLink") + """ + TBC + """ + DENY_ASSET_TYPES: ClassVar[KeywordField] = KeywordField( + "denyAssetTypes", "denyAssetTypes" + ) + """ + TBC + """ + DENY_NAVIGATION_PAGES: ClassVar[KeywordField] = KeywordField( + "denyNavigationPages", "denyNavigationPages" + ) + """ + TBC + """ + DEFAULT_NAVIGATION: ClassVar[KeywordField] = KeywordField( + "defaultNavigation", "defaultNavigation" + ) + """ + TBC + """ + + POLICIES: ClassVar[RelationField] = RelationField("policies") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "is_access_control_enabled", + "deny_custom_metadata_guids", + "deny_asset_tabs", + "deny_asset_filters", + "channel_link", + "deny_asset_types", + "deny_navigation_pages", + "default_navigation", + "policies", + ] + + @property + def is_access_control_enabled(self) -> Optional[bool]: + return ( + None + if self.attributes is None + else self.attributes.is_access_control_enabled + ) + + @is_access_control_enabled.setter + def is_access_control_enabled(self, is_access_control_enabled: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_access_control_enabled = is_access_control_enabled + + @property + def deny_custom_metadata_guids(self) -> Optional[set[str]]: + return ( + None + if self.attributes is None + else self.attributes.deny_custom_metadata_guids + ) + + @deny_custom_metadata_guids.setter + def deny_custom_metadata_guids( + self, deny_custom_metadata_guids: Optional[set[str]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.deny_custom_metadata_guids = deny_custom_metadata_guids + + @property + def deny_asset_tabs(self) -> Optional[set[str]]: + return None if self.attributes is None else self.attributes.deny_asset_tabs + + @deny_asset_tabs.setter + def deny_asset_tabs(self, deny_asset_tabs: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.deny_asset_tabs = deny_asset_tabs + + @property + def deny_asset_filters(self) -> Optional[set[str]]: + return None if self.attributes is None else self.attributes.deny_asset_filters + + @deny_asset_filters.setter + def deny_asset_filters(self, deny_asset_filters: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.deny_asset_filters = deny_asset_filters + + @property + def channel_link(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.channel_link + + @channel_link.setter + def channel_link(self, channel_link: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.channel_link = channel_link + + @property + def deny_asset_types(self) -> Optional[set[str]]: + return None if self.attributes is None else self.attributes.deny_asset_types + + @deny_asset_types.setter + def deny_asset_types(self, deny_asset_types: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.deny_asset_types = deny_asset_types + + @property + def deny_navigation_pages(self) -> Optional[set[str]]: + return ( + None if self.attributes is None else self.attributes.deny_navigation_pages + ) + + @deny_navigation_pages.setter + def deny_navigation_pages(self, deny_navigation_pages: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.deny_navigation_pages = deny_navigation_pages + + @property + def default_navigation(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.default_navigation + + @default_navigation.setter + def default_navigation(self, default_navigation: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.default_navigation = default_navigation + + @property + def policies(self) -> Optional[list[AuthPolicy]]: + return None if self.attributes is None else self.attributes.policies + + @policies.setter + def policies(self, policies: Optional[list[AuthPolicy]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.policies = policies + + class Attributes(Asset.Attributes): + is_access_control_enabled: Optional[bool] = Field( + None, description="", alias="isAccessControlEnabled" + ) + deny_custom_metadata_guids: Optional[set[str]] = Field( + None, description="", alias="denyCustomMetadataGuids" + ) + deny_asset_tabs: Optional[set[str]] = Field( + None, description="", alias="denyAssetTabs" + ) + deny_asset_filters: Optional[set[str]] = Field( + None, description="", alias="denyAssetFilters" + ) + channel_link: Optional[str] = Field(None, description="", alias="channelLink") + deny_asset_types: Optional[set[str]] = Field( + None, description="", alias="denyAssetTypes" + ) + deny_navigation_pages: Optional[set[str]] = Field( + None, description="", alias="denyNavigationPages" + ) + default_navigation: Optional[str] = Field( + None, description="", alias="defaultNavigation" + ) + policies: Optional[list[AuthPolicy]] = Field( + None, description="", alias="policies" + ) # relationship + + attributes: "AccessControl.Attributes" = Field( + default_factory=lambda: AccessControl.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +AuthPolicy.Attributes.update_forward_refs() -ProcessExecution.Attributes.update_forward_refs() +AccessControl.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset07.py b/pyatlan/model/assets/asset07.py index 5d4285fa5..9c75e7cdf 100644 --- a/pyatlan/model/assets/asset07.py +++ b/pyatlan/model/assets/asset07.py @@ -4,146 +4,30 @@ from __future__ import annotations -from typing import ClassVar, Optional +from typing import ClassVar from pydantic import Field, validator -from pyatlan.model.fields.atlan_fields import BooleanField, KeywordField, NumericField - from .asset00 import Asset -class AuthService(Asset, type_name="AuthService"): +class ProcessExecution(Asset, type_name="ProcessExecution"): """Description""" - type_name: str = Field("AuthService", allow_mutation=False) + type_name: str = Field("ProcessExecution", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "AuthService": - raise ValueError("must be AuthService") + if v != "ProcessExecution": + raise ValueError("must be ProcessExecution") return v def __setattr__(self, name, value): - if name in AuthService._convenience_properties: + if name in ProcessExecution._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - AUTH_SERVICE_TYPE: ClassVar[KeywordField] = KeywordField( - "authServiceType", "authServiceType" - ) - """ - TBC - """ - TAG_SERVICE: ClassVar[KeywordField] = KeywordField("tagService", "tagService") - """ - TBC - """ - AUTH_SERVICE_IS_ENABLED: ClassVar[BooleanField] = BooleanField( - "authServiceIsEnabled", "authServiceIsEnabled" - ) - """ - TBC - """ - AUTH_SERVICE_CONFIG: ClassVar[KeywordField] = KeywordField( - "authServiceConfig", "authServiceConfig" - ) - """ - TBC - """ - AUTH_SERVICE_POLICY_LAST_SYNC: ClassVar[NumericField] = NumericField( - "authServicePolicyLastSync", "authServicePolicyLastSync" - ) - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "auth_service_type", - "tag_service", - "auth_service_is_enabled", - "auth_service_config", - "auth_service_policy_last_sync", - ] - - @property - def auth_service_type(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.auth_service_type - - @auth_service_type.setter - def auth_service_type(self, auth_service_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.auth_service_type = auth_service_type - - @property - def tag_service(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.tag_service - - @tag_service.setter - def tag_service(self, tag_service: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.tag_service = tag_service - - @property - def auth_service_is_enabled(self) -> Optional[bool]: - return ( - None if self.attributes is None else self.attributes.auth_service_is_enabled - ) - - @auth_service_is_enabled.setter - def auth_service_is_enabled(self, auth_service_is_enabled: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.auth_service_is_enabled = auth_service_is_enabled - - @property - def auth_service_config(self) -> Optional[dict[str, str]]: - return None if self.attributes is None else self.attributes.auth_service_config - - @auth_service_config.setter - def auth_service_config(self, auth_service_config: Optional[dict[str, str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.auth_service_config = auth_service_config - - @property - def auth_service_policy_last_sync(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.auth_service_policy_last_sync - ) - - @auth_service_policy_last_sync.setter - def auth_service_policy_last_sync( - self, auth_service_policy_last_sync: Optional[int] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.auth_service_policy_last_sync = auth_service_policy_last_sync - - class Attributes(Asset.Attributes): - auth_service_type: Optional[str] = Field( - None, description="", alias="authServiceType" - ) - tag_service: Optional[str] = Field(None, description="", alias="tagService") - auth_service_is_enabled: Optional[bool] = Field( - None, description="", alias="authServiceIsEnabled" - ) - auth_service_config: Optional[dict[str, str]] = Field( - None, description="", alias="authServiceConfig" - ) - auth_service_policy_last_sync: Optional[int] = Field( - None, description="", alias="authServicePolicyLastSync" - ) - - attributes: "AuthService.Attributes" = Field( - default_factory=lambda: AuthService.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) + _convenience_properties: ClassVar[list[str]] = [] -AuthService.Attributes.update_forward_refs() +ProcessExecution.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset08.py b/pyatlan/model/assets/asset08.py index 54f951c2a..5d4285fa5 100644 --- a/pyatlan/model/assets/asset08.py +++ b/pyatlan/model/assets/asset08.py @@ -4,30 +4,146 @@ from __future__ import annotations -from typing import ClassVar +from typing import ClassVar, Optional from pydantic import Field, validator +from pyatlan.model.fields.atlan_fields import BooleanField, KeywordField, NumericField + from .asset00 import Asset -class Cloud(Asset, type_name="Cloud"): +class AuthService(Asset, type_name="AuthService"): """Description""" - type_name: str = Field("Cloud", allow_mutation=False) + type_name: str = Field("AuthService", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "Cloud": - raise ValueError("must be Cloud") + if v != "AuthService": + raise ValueError("must be AuthService") return v def __setattr__(self, name, value): - if name in Cloud._convenience_properties: + if name in AuthService._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - _convenience_properties: ClassVar[list[str]] = [] + AUTH_SERVICE_TYPE: ClassVar[KeywordField] = KeywordField( + "authServiceType", "authServiceType" + ) + """ + TBC + """ + TAG_SERVICE: ClassVar[KeywordField] = KeywordField("tagService", "tagService") + """ + TBC + """ + AUTH_SERVICE_IS_ENABLED: ClassVar[BooleanField] = BooleanField( + "authServiceIsEnabled", "authServiceIsEnabled" + ) + """ + TBC + """ + AUTH_SERVICE_CONFIG: ClassVar[KeywordField] = KeywordField( + "authServiceConfig", "authServiceConfig" + ) + """ + TBC + """ + AUTH_SERVICE_POLICY_LAST_SYNC: ClassVar[NumericField] = NumericField( + "authServicePolicyLastSync", "authServicePolicyLastSync" + ) + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "auth_service_type", + "tag_service", + "auth_service_is_enabled", + "auth_service_config", + "auth_service_policy_last_sync", + ] + + @property + def auth_service_type(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.auth_service_type + + @auth_service_type.setter + def auth_service_type(self, auth_service_type: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.auth_service_type = auth_service_type + + @property + def tag_service(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.tag_service + + @tag_service.setter + def tag_service(self, tag_service: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.tag_service = tag_service + + @property + def auth_service_is_enabled(self) -> Optional[bool]: + return ( + None if self.attributes is None else self.attributes.auth_service_is_enabled + ) + + @auth_service_is_enabled.setter + def auth_service_is_enabled(self, auth_service_is_enabled: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.auth_service_is_enabled = auth_service_is_enabled + + @property + def auth_service_config(self) -> Optional[dict[str, str]]: + return None if self.attributes is None else self.attributes.auth_service_config + + @auth_service_config.setter + def auth_service_config(self, auth_service_config: Optional[dict[str, str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.auth_service_config = auth_service_config + + @property + def auth_service_policy_last_sync(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.auth_service_policy_last_sync + ) + + @auth_service_policy_last_sync.setter + def auth_service_policy_last_sync( + self, auth_service_policy_last_sync: Optional[int] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.auth_service_policy_last_sync = auth_service_policy_last_sync + + class Attributes(Asset.Attributes): + auth_service_type: Optional[str] = Field( + None, description="", alias="authServiceType" + ) + tag_service: Optional[str] = Field(None, description="", alias="tagService") + auth_service_is_enabled: Optional[bool] = Field( + None, description="", alias="authServiceIsEnabled" + ) + auth_service_config: Optional[dict[str, str]] = Field( + None, description="", alias="authServiceConfig" + ) + auth_service_policy_last_sync: Optional[int] = Field( + None, description="", alias="authServicePolicyLastSync" + ) + + attributes: "AuthService.Attributes" = Field( + default_factory=lambda: AuthService.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) -Cloud.Attributes.update_forward_refs() +AuthService.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset09.py b/pyatlan/model/assets/asset09.py index 2ef41930d..54f951c2a 100644 --- a/pyatlan/model/assets/asset09.py +++ b/pyatlan/model/assets/asset09.py @@ -11,23 +11,23 @@ from .asset00 import Asset -class Infrastructure(Asset, type_name="Infrastructure"): +class Cloud(Asset, type_name="Cloud"): """Description""" - type_name: str = Field("Infrastructure", allow_mutation=False) + type_name: str = Field("Cloud", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "Infrastructure": - raise ValueError("must be Infrastructure") + if v != "Cloud": + raise ValueError("must be Cloud") return v def __setattr__(self, name, value): - if name in Infrastructure._convenience_properties: + if name in Cloud._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) _convenience_properties: ClassVar[list[str]] = [] -Infrastructure.Attributes.update_forward_refs() +Cloud.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset10.py b/pyatlan/model/assets/asset10.py index 96c3b8e1a..2ef41930d 100644 --- a/pyatlan/model/assets/asset10.py +++ b/pyatlan/model/assets/asset10.py @@ -4,78 +4,30 @@ from __future__ import annotations -from typing import ClassVar, Optional +from typing import ClassVar from pydantic import Field, validator -from pyatlan.model.fields.atlan_fields import RelationField +from .asset00 import Asset -from .asset00 import Catalog, Process - -class BIProcess(Process): +class Infrastructure(Asset, type_name="Infrastructure"): """Description""" - type_name: str = Field("BIProcess", allow_mutation=False) + type_name: str = Field("Infrastructure", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "BIProcess": - raise ValueError("must be BIProcess") + if v != "Infrastructure": + raise ValueError("must be Infrastructure") return v def __setattr__(self, name, value): - if name in BIProcess._convenience_properties: + if name in Infrastructure._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - OUTPUTS: ClassVar[RelationField] = RelationField("outputs") - """ - TBC - """ - INPUTS: ClassVar[RelationField] = RelationField("inputs") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "outputs", - "inputs", - ] - - @property - def outputs(self) -> Optional[list[Catalog]]: - return None if self.attributes is None else self.attributes.outputs - - @outputs.setter - def outputs(self, outputs: Optional[list[Catalog]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.outputs = outputs - - @property - def inputs(self) -> Optional[list[Catalog]]: - return None if self.attributes is None else self.attributes.inputs - - @inputs.setter - def inputs(self, inputs: Optional[list[Catalog]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.inputs = inputs - - class Attributes(Process.Attributes): - outputs: Optional[list[Catalog]] = Field( - None, description="", alias="outputs" - ) # relationship - inputs: Optional[list[Catalog]] = Field( - None, description="", alias="inputs" - ) # relationship - - attributes: "BIProcess.Attributes" = Field( - default_factory=lambda: BIProcess.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) + _convenience_properties: ClassVar[list[str]] = [] -BIProcess.Attributes.update_forward_refs() +Infrastructure.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset11.py b/pyatlan/model/assets/asset11.py index e6514d43d..fef540836 100644 --- a/pyatlan/model/assets/asset11.py +++ b/pyatlan/model/assets/asset11.py @@ -4,433 +4,45 @@ from __future__ import annotations -from datetime import datetime from typing import ClassVar, Optional from pydantic import Field, validator -from pyatlan.model.fields.atlan_fields import ( - KeywordField, - KeywordTextField, - NumericField, - RelationField, -) +from pyatlan.model.fields.atlan_fields import RelationField -from .asset00 import AirflowTask, Catalog, ColumnProcess, Dbt, MatillionComponent +from .asset00 import Catalog, Process -class DbtProcess(Dbt): +class BIProcess(Process): """Description""" - type_name: str = Field("DbtProcess", allow_mutation=False) + type_name: str = Field("BIProcess", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "DbtProcess": - raise ValueError("must be DbtProcess") + if v != "BIProcess": + raise ValueError("must be BIProcess") return v def __setattr__(self, name, value): - if name in DbtProcess._convenience_properties: + if name in BIProcess._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - DBT_PROCESS_JOB_STATUS: ClassVar[KeywordField] = KeywordField( - "dbtProcessJobStatus", "dbtProcessJobStatus" - ) - """ - TBC - """ - DBT_ALIAS: ClassVar[KeywordTextField] = KeywordTextField( - "dbtAlias", "dbtAlias.keyword", "dbtAlias" - ) - """ - TBC - """ - DBT_META: ClassVar[KeywordField] = KeywordField("dbtMeta", "dbtMeta") - """ - TBC - """ - DBT_UNIQUE_ID: ClassVar[KeywordTextField] = KeywordTextField( - "dbtUniqueId", "dbtUniqueId.keyword", "dbtUniqueId" - ) - """ - TBC - """ - DBT_ACCOUNT_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "dbtAccountName", "dbtAccountName.keyword", "dbtAccountName" - ) - """ - TBC + OUTPUTS: ClassVar[RelationField] = RelationField("outputs") """ - DBT_PROJECT_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "dbtProjectName", "dbtProjectName.keyword", "dbtProjectName" - ) - """ - TBC - """ - DBT_PACKAGE_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "dbtPackageName", "dbtPackageName.keyword", "dbtPackageName" - ) - """ - TBC - """ - DBT_JOB_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "dbtJobName", "dbtJobName.keyword", "dbtJobName" - ) - """ - TBC - """ - DBT_JOB_SCHEDULE: ClassVar[KeywordField] = KeywordField( - "dbtJobSchedule", "dbtJobSchedule" - ) - """ - TBC - """ - DBT_JOB_STATUS: ClassVar[KeywordField] = KeywordField( - "dbtJobStatus", "dbtJobStatus" - ) + Assets that are outputs from this process. """ - TBC - """ - DBT_JOB_SCHEDULE_CRON_HUMANIZED: ClassVar[KeywordTextField] = KeywordTextField( - "dbtJobScheduleCronHumanized", - "dbtJobScheduleCronHumanized.keyword", - "dbtJobScheduleCronHumanized", - ) - """ - TBC - """ - DBT_JOB_LAST_RUN: ClassVar[NumericField] = NumericField( - "dbtJobLastRun", "dbtJobLastRun" - ) - """ - TBC - """ - DBT_JOB_NEXT_RUN: ClassVar[NumericField] = NumericField( - "dbtJobNextRun", "dbtJobNextRun" - ) - """ - TBC - """ - DBT_JOB_NEXT_RUN_HUMANIZED: ClassVar[KeywordTextField] = KeywordTextField( - "dbtJobNextRunHumanized", - "dbtJobNextRunHumanized.keyword", - "dbtJobNextRunHumanized", - ) - """ - TBC - """ - DBT_ENVIRONMENT_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "dbtEnvironmentName", "dbtEnvironmentName.keyword", "dbtEnvironmentName" - ) - """ - TBC - """ - DBT_ENVIRONMENT_DBT_VERSION: ClassVar[KeywordTextField] = KeywordTextField( - "dbtEnvironmentDbtVersion", - "dbtEnvironmentDbtVersion.keyword", - "dbtEnvironmentDbtVersion", - ) - """ - TBC - """ - DBT_TAGS: ClassVar[KeywordField] = KeywordField("dbtTags", "dbtTags") - """ - TBC - """ - DBT_CONNECTION_CONTEXT: ClassVar[KeywordField] = KeywordField( - "dbtConnectionContext", "dbtConnectionContext" - ) - """ - TBC - """ - DBT_SEMANTIC_LAYER_PROXY_URL: ClassVar[KeywordField] = KeywordField( - "dbtSemanticLayerProxyUrl", "dbtSemanticLayerProxyUrl" - ) + INPUTS: ClassVar[RelationField] = RelationField("inputs") """ - TBC - """ - CODE: ClassVar[KeywordField] = KeywordField("code", "code") - """ - TBC - """ - SQL: ClassVar[KeywordField] = KeywordField("sql", "sql") - """ - TBC - """ - AST: ClassVar[KeywordField] = KeywordField("ast", "ast") - """ - TBC - """ - - MATILLION_COMPONENT: ClassVar[RelationField] = RelationField("matillionComponent") - """ - TBC - """ - AIRFLOW_TASKS: ClassVar[RelationField] = RelationField("airflowTasks") - """ - TBC - """ - COLUMN_PROCESSES: ClassVar[RelationField] = RelationField("columnProcesses") - """ - TBC + Assets that are inputs to this process. """ _convenience_properties: ClassVar[list[str]] = [ - "dbt_process_job_status", - "dbt_alias", - "dbt_meta", - "dbt_unique_id", - "dbt_account_name", - "dbt_project_name", - "dbt_package_name", - "dbt_job_name", - "dbt_job_schedule", - "dbt_job_status", - "dbt_job_schedule_cron_humanized", - "dbt_job_last_run", - "dbt_job_next_run", - "dbt_job_next_run_humanized", - "dbt_environment_name", - "dbt_environment_dbt_version", - "dbt_tags", - "dbt_connection_context", - "dbt_semantic_layer_proxy_url", - "inputs", "outputs", - "code", - "sql", - "ast", - "matillion_component", - "airflow_tasks", - "column_processes", + "inputs", ] - @property - def dbt_process_job_status(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.dbt_process_job_status - ) - - @dbt_process_job_status.setter - def dbt_process_job_status(self, dbt_process_job_status: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_process_job_status = dbt_process_job_status - - @property - def dbt_alias(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_alias - - @dbt_alias.setter - def dbt_alias(self, dbt_alias: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_alias = dbt_alias - - @property - def dbt_meta(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_meta - - @dbt_meta.setter - def dbt_meta(self, dbt_meta: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_meta = dbt_meta - - @property - def dbt_unique_id(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_unique_id - - @dbt_unique_id.setter - def dbt_unique_id(self, dbt_unique_id: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_unique_id = dbt_unique_id - - @property - def dbt_account_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_account_name - - @dbt_account_name.setter - def dbt_account_name(self, dbt_account_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_account_name = dbt_account_name - - @property - def dbt_project_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_project_name - - @dbt_project_name.setter - def dbt_project_name(self, dbt_project_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_project_name = dbt_project_name - - @property - def dbt_package_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_package_name - - @dbt_package_name.setter - def dbt_package_name(self, dbt_package_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_package_name = dbt_package_name - - @property - def dbt_job_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_job_name - - @dbt_job_name.setter - def dbt_job_name(self, dbt_job_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_job_name = dbt_job_name - - @property - def dbt_job_schedule(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_job_schedule - - @dbt_job_schedule.setter - def dbt_job_schedule(self, dbt_job_schedule: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_job_schedule = dbt_job_schedule - - @property - def dbt_job_status(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_job_status - - @dbt_job_status.setter - def dbt_job_status(self, dbt_job_status: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_job_status = dbt_job_status - - @property - def dbt_job_schedule_cron_humanized(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.dbt_job_schedule_cron_humanized - ) - - @dbt_job_schedule_cron_humanized.setter - def dbt_job_schedule_cron_humanized( - self, dbt_job_schedule_cron_humanized: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_job_schedule_cron_humanized = ( - dbt_job_schedule_cron_humanized - ) - - @property - def dbt_job_last_run(self) -> Optional[datetime]: - return None if self.attributes is None else self.attributes.dbt_job_last_run - - @dbt_job_last_run.setter - def dbt_job_last_run(self, dbt_job_last_run: Optional[datetime]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_job_last_run = dbt_job_last_run - - @property - def dbt_job_next_run(self) -> Optional[datetime]: - return None if self.attributes is None else self.attributes.dbt_job_next_run - - @dbt_job_next_run.setter - def dbt_job_next_run(self, dbt_job_next_run: Optional[datetime]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_job_next_run = dbt_job_next_run - - @property - def dbt_job_next_run_humanized(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.dbt_job_next_run_humanized - ) - - @dbt_job_next_run_humanized.setter - def dbt_job_next_run_humanized(self, dbt_job_next_run_humanized: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_job_next_run_humanized = dbt_job_next_run_humanized - - @property - def dbt_environment_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_environment_name - - @dbt_environment_name.setter - def dbt_environment_name(self, dbt_environment_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_environment_name = dbt_environment_name - - @property - def dbt_environment_dbt_version(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.dbt_environment_dbt_version - ) - - @dbt_environment_dbt_version.setter - def dbt_environment_dbt_version(self, dbt_environment_dbt_version: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_environment_dbt_version = dbt_environment_dbt_version - - @property - def dbt_tags(self) -> Optional[set[str]]: - return None if self.attributes is None else self.attributes.dbt_tags - - @dbt_tags.setter - def dbt_tags(self, dbt_tags: Optional[set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_tags = dbt_tags - - @property - def dbt_connection_context(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.dbt_connection_context - ) - - @dbt_connection_context.setter - def dbt_connection_context(self, dbt_connection_context: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_connection_context = dbt_connection_context - - @property - def dbt_semantic_layer_proxy_url(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.dbt_semantic_layer_proxy_url - ) - - @dbt_semantic_layer_proxy_url.setter - def dbt_semantic_layer_proxy_url(self, dbt_semantic_layer_proxy_url: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_semantic_layer_proxy_url = dbt_semantic_layer_proxy_url - - @property - def inputs(self) -> Optional[list[Catalog]]: - return None if self.attributes is None else self.attributes.inputs - - @inputs.setter - def inputs(self, inputs: Optional[list[Catalog]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.inputs = inputs - @property def outputs(self) -> Optional[list[Catalog]]: return None if self.attributes is None else self.attributes.outputs @@ -442,133 +54,28 @@ def outputs(self, outputs: Optional[list[Catalog]]): self.attributes.outputs = outputs @property - def code(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.code - - @code.setter - def code(self, code: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.code = code - - @property - def sql(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.sql - - @sql.setter - def sql(self, sql: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sql = sql - - @property - def ast(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.ast - - @ast.setter - def ast(self, ast: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.ast = ast - - @property - def matillion_component(self) -> Optional[MatillionComponent]: - return None if self.attributes is None else self.attributes.matillion_component - - @matillion_component.setter - def matillion_component(self, matillion_component: Optional[MatillionComponent]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.matillion_component = matillion_component - - @property - def airflow_tasks(self) -> Optional[list[AirflowTask]]: - return None if self.attributes is None else self.attributes.airflow_tasks - - @airflow_tasks.setter - def airflow_tasks(self, airflow_tasks: Optional[list[AirflowTask]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.airflow_tasks = airflow_tasks - - @property - def column_processes(self) -> Optional[list[ColumnProcess]]: - return None if self.attributes is None else self.attributes.column_processes + def inputs(self) -> Optional[list[Catalog]]: + return None if self.attributes is None else self.attributes.inputs - @column_processes.setter - def column_processes(self, column_processes: Optional[list[ColumnProcess]]): + @inputs.setter + def inputs(self, inputs: Optional[list[Catalog]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.column_processes = column_processes + self.attributes.inputs = inputs - class Attributes(Dbt.Attributes): - dbt_process_job_status: Optional[str] = Field( - None, description="", alias="dbtProcessJobStatus" - ) - dbt_alias: Optional[str] = Field(None, description="", alias="dbtAlias") - dbt_meta: Optional[str] = Field(None, description="", alias="dbtMeta") - dbt_unique_id: Optional[str] = Field(None, description="", alias="dbtUniqueId") - dbt_account_name: Optional[str] = Field( - None, description="", alias="dbtAccountName" - ) - dbt_project_name: Optional[str] = Field( - None, description="", alias="dbtProjectName" - ) - dbt_package_name: Optional[str] = Field( - None, description="", alias="dbtPackageName" - ) - dbt_job_name: Optional[str] = Field(None, description="", alias="dbtJobName") - dbt_job_schedule: Optional[str] = Field( - None, description="", alias="dbtJobSchedule" - ) - dbt_job_status: Optional[str] = Field( - None, description="", alias="dbtJobStatus" - ) - dbt_job_schedule_cron_humanized: Optional[str] = Field( - None, description="", alias="dbtJobScheduleCronHumanized" - ) - dbt_job_last_run: Optional[datetime] = Field( - None, description="", alias="dbtJobLastRun" - ) - dbt_job_next_run: Optional[datetime] = Field( - None, description="", alias="dbtJobNextRun" - ) - dbt_job_next_run_humanized: Optional[str] = Field( - None, description="", alias="dbtJobNextRunHumanized" - ) - dbt_environment_name: Optional[str] = Field( - None, description="", alias="dbtEnvironmentName" - ) - dbt_environment_dbt_version: Optional[str] = Field( - None, description="", alias="dbtEnvironmentDbtVersion" - ) - dbt_tags: Optional[set[str]] = Field(None, description="", alias="dbtTags") - dbt_connection_context: Optional[str] = Field( - None, description="", alias="dbtConnectionContext" - ) - dbt_semantic_layer_proxy_url: Optional[str] = Field( - None, description="", alias="dbtSemanticLayerProxyUrl" - ) - inputs: Optional[list[Catalog]] = Field(None, description="", alias="inputs") - outputs: Optional[list[Catalog]] = Field(None, description="", alias="outputs") - code: Optional[str] = Field(None, description="", alias="code") - sql: Optional[str] = Field(None, description="", alias="sql") - ast: Optional[str] = Field(None, description="", alias="ast") - matillion_component: Optional[MatillionComponent] = Field( - None, description="", alias="matillionComponent" - ) # relationship - airflow_tasks: Optional[list[AirflowTask]] = Field( - None, description="", alias="airflowTasks" + class Attributes(Process.Attributes): + outputs: Optional[list[Catalog]] = Field( + None, description="", alias="outputs" ) # relationship - column_processes: Optional[list[ColumnProcess]] = Field( - None, description="", alias="columnProcesses" + inputs: Optional[list[Catalog]] = Field( + None, description="", alias="inputs" ) # relationship - attributes: "DbtProcess.Attributes" = Field( - default_factory=lambda: DbtProcess.Attributes(), + attributes: "BIProcess.Attributes" = Field( + default_factory=lambda: BIProcess.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -DbtProcess.Attributes.update_forward_refs() +BIProcess.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset12.py b/pyatlan/model/assets/asset12.py index 5e447dd99..d7dbfd164 100644 --- a/pyatlan/model/assets/asset12.py +++ b/pyatlan/model/assets/asset12.py @@ -4,264 +4,571 @@ from __future__ import annotations -from typing import ClassVar, Optional, Set +from datetime import datetime +from typing import ClassVar, Optional from pydantic import Field, validator -from pyatlan.model.enums import ( - AuthPolicyCategory, - AuthPolicyResourceCategory, - AuthPolicyType, - DataAction, - PersonaDomainAction, - PersonaGlossaryAction, - PersonaMetadataAction, +from pyatlan.model.fields.atlan_fields import ( + KeywordField, + KeywordTextField, + NumericField, + RelationField, ) -from pyatlan.model.fields.atlan_fields import KeywordField -from pyatlan.utils import init_guid, validate_required_fields -from .asset00 import SelfAsset -from .asset05 import AccessControl, AuthPolicy +from .asset00 import AirflowTask, Catalog, ColumnProcess, Dbt, MatillionComponent -class Persona(AccessControl): +class DbtProcess(Dbt): """Description""" - @classmethod - # @validate_arguments() - @init_guid - def create(cls, *, name: str) -> Persona: - validate_required_fields(["name"], [name]) - attributes = Persona.Attributes.create(name=name) - return cls(attributes=attributes) - - @classmethod - # @validate_arguments() - def create_metadata_policy( - cls, - *, - name: str, - persona_id: str, - policy_type: AuthPolicyType, - actions: Set[PersonaMetadataAction], - connection_qualified_name: str, - resources: Set[str], - ) -> AuthPolicy: - validate_required_fields( - ["name", "persona_id", "policy_type", "actions", "resources"], - [name, persona_id, policy_type, actions, resources], - ) - policy = AuthPolicy._AuthPolicy__create(name=name) # type: ignore - policy.policy_actions = {x.value for x in actions} - policy.policy_category = AuthPolicyCategory.PERSONA.value - policy.policy_type = policy_type - policy.connection_qualified_name = connection_qualified_name - policy.policy_resources = resources - policy.policy_resource_category = AuthPolicyResourceCategory.CUSTOM.value - policy.policy_service_name = "atlas" - policy.policy_sub_category = "metadata" - persona = Persona() - persona.guid = persona_id - policy.access_control = persona - return policy - - @classmethod - # @validate_arguments() - def create_data_policy( - cls, - *, - name: str, - persona_id: str, - policy_type: AuthPolicyType, - connection_qualified_name: str, - resources: Set[str], - ) -> AuthPolicy: - validate_required_fields( - ["name", "persona_id", "policy_type", "resources"], - [name, persona_id, policy_type, resources], - ) - policy = AuthPolicy._AuthPolicy__create(name=name) # type: ignore - policy.policy_actions = {DataAction.SELECT.value} - policy.policy_category = AuthPolicyCategory.PERSONA.value - policy.policy_type = policy_type - policy.connection_qualified_name = connection_qualified_name - policy.policy_resources = resources - policy.policy_resources.add("entity-type:*") - policy.policy_resource_category = AuthPolicyResourceCategory.ENTITY.value - policy.policy_service_name = "heka" - policy.policy_sub_category = "data" - persona = Persona() - persona.guid = persona_id - policy.access_control = persona - return policy - - @classmethod - # @validate_arguments() - def create_glossary_policy( - cls, - *, - name: str, - persona_id: str, - policy_type: AuthPolicyType, - actions: Set[PersonaGlossaryAction], - resources: Set[str], - ) -> AuthPolicy: - validate_required_fields( - ["name", "persona_id", "policy_type", "actions", "resources"], - [name, persona_id, policy_type, actions, resources], - ) - policy = AuthPolicy._AuthPolicy__create(name=name) # type: ignore - policy.policy_actions = {x.value for x in actions} - policy.policy_category = AuthPolicyCategory.PERSONA.value - policy.policy_type = policy_type - policy.policy_resources = resources - policy.policy_resource_category = AuthPolicyResourceCategory.CUSTOM.value - policy.policy_service_name = "atlas" - policy.policy_sub_category = "glossary" - persona = Persona() - persona.guid = persona_id - policy.access_control = persona - return policy - - @classmethod - # @validate_arguments() - def create_domain_policy( - cls, - *, - name: str, - persona_id: str, - actions: Set[PersonaDomainAction], - resources: Set[str], - ) -> AuthPolicy: - validate_required_fields( - ["name", "persona_id", "actions", "resources"], - [name, persona_id, actions, resources], - ) - policy = AuthPolicy._AuthPolicy__create(name=name) # type: ignore - policy.policy_actions = {x.value for x in actions} - policy.policy_category = AuthPolicyCategory.PERSONA.value - policy.policy_type = AuthPolicyType.ALLOW - policy.policy_resources = resources - policy.policy_resource_category = AuthPolicyResourceCategory.CUSTOM.value - policy.policy_service_name = "atlas" - policy.policy_sub_category = "domain" - persona = Persona() - persona.guid = persona_id - policy.access_control = persona - return policy - - @classmethod - def create_for_modification( - cls: type[SelfAsset], - qualified_name: str = "", - name: str = "", - is_enabled: bool = True, - ) -> SelfAsset: - validate_required_fields( - ["name", "qualified_name", "is_enabled"], - [name, qualified_name, is_enabled], - ) - return cls( - attributes=cls.Attributes( - qualified_name=qualified_name, - name=name, - is_access_control_enabled=is_enabled, - ) - ) - - type_name: str = Field("Persona", allow_mutation=False) + type_name: str = Field("DbtProcess", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "Persona": - raise ValueError("must be Persona") + if v != "DbtProcess": + raise ValueError("must be DbtProcess") return v def __setattr__(self, name, value): - if name in Persona._convenience_properties: + if name in DbtProcess._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - PERSONA_GROUPS: ClassVar[KeywordField] = KeywordField( - "personaGroups", "personaGroups" + DBT_PROCESS_JOB_STATUS: ClassVar[KeywordField] = KeywordField( + "dbtProcessJobStatus", "dbtProcessJobStatus" + ) + """ + + """ + DBT_ALIAS: ClassVar[KeywordTextField] = KeywordTextField( + "dbtAlias", "dbtAlias.keyword", "dbtAlias" + ) + """ + + """ + DBT_META: ClassVar[KeywordField] = KeywordField("dbtMeta", "dbtMeta") + """ + + """ + DBT_UNIQUE_ID: ClassVar[KeywordTextField] = KeywordTextField( + "dbtUniqueId", "dbtUniqueId.keyword", "dbtUniqueId" + ) + """ + + """ + DBT_ACCOUNT_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "dbtAccountName", "dbtAccountName.keyword", "dbtAccountName" + ) + """ + + """ + DBT_PROJECT_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "dbtProjectName", "dbtProjectName.keyword", "dbtProjectName" + ) + """ + + """ + DBT_PACKAGE_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "dbtPackageName", "dbtPackageName.keyword", "dbtPackageName" + ) + """ + + """ + DBT_JOB_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "dbtJobName", "dbtJobName.keyword", "dbtJobName" + ) + """ + + """ + DBT_JOB_SCHEDULE: ClassVar[KeywordField] = KeywordField( + "dbtJobSchedule", "dbtJobSchedule" + ) + """ + + """ + DBT_JOB_STATUS: ClassVar[KeywordField] = KeywordField( + "dbtJobStatus", "dbtJobStatus" + ) + """ + + """ + DBT_JOB_SCHEDULE_CRON_HUMANIZED: ClassVar[KeywordTextField] = KeywordTextField( + "dbtJobScheduleCronHumanized", + "dbtJobScheduleCronHumanized.keyword", + "dbtJobScheduleCronHumanized", + ) + """ + + """ + DBT_JOB_LAST_RUN: ClassVar[NumericField] = NumericField( + "dbtJobLastRun", "dbtJobLastRun" + ) + """ + + """ + DBT_JOB_NEXT_RUN: ClassVar[NumericField] = NumericField( + "dbtJobNextRun", "dbtJobNextRun" + ) + """ + + """ + DBT_JOB_NEXT_RUN_HUMANIZED: ClassVar[KeywordTextField] = KeywordTextField( + "dbtJobNextRunHumanized", + "dbtJobNextRunHumanized.keyword", + "dbtJobNextRunHumanized", + ) + """ + + """ + DBT_ENVIRONMENT_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "dbtEnvironmentName", "dbtEnvironmentName.keyword", "dbtEnvironmentName" ) """ + + """ + DBT_ENVIRONMENT_DBT_VERSION: ClassVar[KeywordTextField] = KeywordTextField( + "dbtEnvironmentDbtVersion", + "dbtEnvironmentDbtVersion.keyword", + "dbtEnvironmentDbtVersion", + ) + """ + + """ + DBT_TAGS: ClassVar[KeywordField] = KeywordField("dbtTags", "dbtTags") + """ + + """ + DBT_CONNECTION_CONTEXT: ClassVar[KeywordField] = KeywordField( + "dbtConnectionContext", "dbtConnectionContext" + ) + """ + + """ + DBT_SEMANTIC_LAYER_PROXY_URL: ClassVar[KeywordField] = KeywordField( + "dbtSemanticLayerProxyUrl", "dbtSemanticLayerProxyUrl" + ) + """ + + """ + CODE: ClassVar[KeywordField] = KeywordField("code", "code") + """ + Code that ran within the process. + """ + SQL: ClassVar[KeywordField] = KeywordField("sql", "sql") + """ + SQL query that ran to produce the outputs. + """ + AST: ClassVar[KeywordField] = KeywordField("ast", "ast") + """ + Parsed AST of the code or SQL statements that describe the logic of this process. + """ + + MATILLION_COMPONENT: ClassVar[RelationField] = RelationField("matillionComponent") + """ TBC """ - PERSONA_USERS: ClassVar[KeywordField] = KeywordField("personaUsers", "personaUsers") + AIRFLOW_TASKS: ClassVar[RelationField] = RelationField("airflowTasks") """ TBC """ - ROLE_ID: ClassVar[KeywordField] = KeywordField("roleId", "roleId") + COLUMN_PROCESSES: ClassVar[RelationField] = RelationField("columnProcesses") """ TBC """ _convenience_properties: ClassVar[list[str]] = [ - "persona_groups", - "persona_users", - "role_id", + "dbt_process_job_status", + "dbt_alias", + "dbt_meta", + "dbt_unique_id", + "dbt_account_name", + "dbt_project_name", + "dbt_package_name", + "dbt_job_name", + "dbt_job_schedule", + "dbt_job_status", + "dbt_job_schedule_cron_humanized", + "dbt_job_last_run", + "dbt_job_next_run", + "dbt_job_next_run_humanized", + "dbt_environment_name", + "dbt_environment_dbt_version", + "dbt_tags", + "dbt_connection_context", + "dbt_semantic_layer_proxy_url", + "inputs", + "outputs", + "code", + "sql", + "ast", + "matillion_component", + "airflow_tasks", + "column_processes", ] @property - def persona_groups(self) -> Optional[set[str]]: - return None if self.attributes is None else self.attributes.persona_groups + def dbt_process_job_status(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.dbt_process_job_status + ) + + @dbt_process_job_status.setter + def dbt_process_job_status(self, dbt_process_job_status: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_process_job_status = dbt_process_job_status + + @property + def dbt_alias(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_alias + + @dbt_alias.setter + def dbt_alias(self, dbt_alias: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_alias = dbt_alias + + @property + def dbt_meta(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_meta + + @dbt_meta.setter + def dbt_meta(self, dbt_meta: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_meta = dbt_meta + + @property + def dbt_unique_id(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_unique_id + + @dbt_unique_id.setter + def dbt_unique_id(self, dbt_unique_id: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_unique_id = dbt_unique_id + + @property + def dbt_account_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_account_name - @persona_groups.setter - def persona_groups(self, persona_groups: Optional[set[str]]): + @dbt_account_name.setter + def dbt_account_name(self, dbt_account_name: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.persona_groups = persona_groups + self.attributes.dbt_account_name = dbt_account_name @property - def persona_users(self) -> Optional[set[str]]: - return None if self.attributes is None else self.attributes.persona_users + def dbt_project_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_project_name - @persona_users.setter - def persona_users(self, persona_users: Optional[set[str]]): + @dbt_project_name.setter + def dbt_project_name(self, dbt_project_name: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.persona_users = persona_users + self.attributes.dbt_project_name = dbt_project_name @property - def role_id(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.role_id + def dbt_package_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_package_name - @role_id.setter - def role_id(self, role_id: Optional[str]): + @dbt_package_name.setter + def dbt_package_name(self, dbt_package_name: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.role_id = role_id + self.attributes.dbt_package_name = dbt_package_name + + @property + def dbt_job_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_job_name - class Attributes(AccessControl.Attributes): - persona_groups: Optional[set[str]] = Field( - None, description="", alias="personaGroups" + @dbt_job_name.setter + def dbt_job_name(self, dbt_job_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_job_name = dbt_job_name + + @property + def dbt_job_schedule(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_job_schedule + + @dbt_job_schedule.setter + def dbt_job_schedule(self, dbt_job_schedule: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_job_schedule = dbt_job_schedule + + @property + def dbt_job_status(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_job_status + + @dbt_job_status.setter + def dbt_job_status(self, dbt_job_status: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_job_status = dbt_job_status + + @property + def dbt_job_schedule_cron_humanized(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.dbt_job_schedule_cron_humanized ) - persona_users: Optional[set[str]] = Field( - None, description="", alias="personaUsers" + + @dbt_job_schedule_cron_humanized.setter + def dbt_job_schedule_cron_humanized( + self, dbt_job_schedule_cron_humanized: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_job_schedule_cron_humanized = ( + dbt_job_schedule_cron_humanized ) - role_id: Optional[str] = Field(None, description="", alias="roleId") - - @classmethod - # @validate_arguments() - @init_guid - def create(cls, name: str) -> Persona.Attributes: - if not name: - raise ValueError("name cannot be blank") - validate_required_fields(["name"], [name]) - return Persona.Attributes( - qualified_name=name, - name=name, - display_name=name, - is_access_control_enabled=True, - description="", - ) - - attributes: "Persona.Attributes" = Field( - default_factory=lambda: Persona.Attributes(), + + @property + def dbt_job_last_run(self) -> Optional[datetime]: + return None if self.attributes is None else self.attributes.dbt_job_last_run + + @dbt_job_last_run.setter + def dbt_job_last_run(self, dbt_job_last_run: Optional[datetime]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_job_last_run = dbt_job_last_run + + @property + def dbt_job_next_run(self) -> Optional[datetime]: + return None if self.attributes is None else self.attributes.dbt_job_next_run + + @dbt_job_next_run.setter + def dbt_job_next_run(self, dbt_job_next_run: Optional[datetime]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_job_next_run = dbt_job_next_run + + @property + def dbt_job_next_run_humanized(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.dbt_job_next_run_humanized + ) + + @dbt_job_next_run_humanized.setter + def dbt_job_next_run_humanized(self, dbt_job_next_run_humanized: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_job_next_run_humanized = dbt_job_next_run_humanized + + @property + def dbt_environment_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_environment_name + + @dbt_environment_name.setter + def dbt_environment_name(self, dbt_environment_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_environment_name = dbt_environment_name + + @property + def dbt_environment_dbt_version(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.dbt_environment_dbt_version + ) + + @dbt_environment_dbt_version.setter + def dbt_environment_dbt_version(self, dbt_environment_dbt_version: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_environment_dbt_version = dbt_environment_dbt_version + + @property + def dbt_tags(self) -> Optional[set[str]]: + return None if self.attributes is None else self.attributes.dbt_tags + + @dbt_tags.setter + def dbt_tags(self, dbt_tags: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_tags = dbt_tags + + @property + def dbt_connection_context(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.dbt_connection_context + ) + + @dbt_connection_context.setter + def dbt_connection_context(self, dbt_connection_context: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_connection_context = dbt_connection_context + + @property + def dbt_semantic_layer_proxy_url(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.dbt_semantic_layer_proxy_url + ) + + @dbt_semantic_layer_proxy_url.setter + def dbt_semantic_layer_proxy_url(self, dbt_semantic_layer_proxy_url: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_semantic_layer_proxy_url = dbt_semantic_layer_proxy_url + + @property + def inputs(self) -> Optional[list[Catalog]]: + return None if self.attributes is None else self.attributes.inputs + + @inputs.setter + def inputs(self, inputs: Optional[list[Catalog]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.inputs = inputs + + @property + def outputs(self) -> Optional[list[Catalog]]: + return None if self.attributes is None else self.attributes.outputs + + @outputs.setter + def outputs(self, outputs: Optional[list[Catalog]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.outputs = outputs + + @property + def code(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.code + + @code.setter + def code(self, code: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.code = code + + @property + def sql(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.sql + + @sql.setter + def sql(self, sql: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sql = sql + + @property + def ast(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.ast + + @ast.setter + def ast(self, ast: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.ast = ast + + @property + def matillion_component(self) -> Optional[MatillionComponent]: + return None if self.attributes is None else self.attributes.matillion_component + + @matillion_component.setter + def matillion_component(self, matillion_component: Optional[MatillionComponent]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.matillion_component = matillion_component + + @property + def airflow_tasks(self) -> Optional[list[AirflowTask]]: + return None if self.attributes is None else self.attributes.airflow_tasks + + @airflow_tasks.setter + def airflow_tasks(self, airflow_tasks: Optional[list[AirflowTask]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.airflow_tasks = airflow_tasks + + @property + def column_processes(self) -> Optional[list[ColumnProcess]]: + return None if self.attributes is None else self.attributes.column_processes + + @column_processes.setter + def column_processes(self, column_processes: Optional[list[ColumnProcess]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.column_processes = column_processes + + class Attributes(Dbt.Attributes): + dbt_process_job_status: Optional[str] = Field( + None, description="", alias="dbtProcessJobStatus" + ) + dbt_alias: Optional[str] = Field(None, description="", alias="dbtAlias") + dbt_meta: Optional[str] = Field(None, description="", alias="dbtMeta") + dbt_unique_id: Optional[str] = Field(None, description="", alias="dbtUniqueId") + dbt_account_name: Optional[str] = Field( + None, description="", alias="dbtAccountName" + ) + dbt_project_name: Optional[str] = Field( + None, description="", alias="dbtProjectName" + ) + dbt_package_name: Optional[str] = Field( + None, description="", alias="dbtPackageName" + ) + dbt_job_name: Optional[str] = Field(None, description="", alias="dbtJobName") + dbt_job_schedule: Optional[str] = Field( + None, description="", alias="dbtJobSchedule" + ) + dbt_job_status: Optional[str] = Field( + None, description="", alias="dbtJobStatus" + ) + dbt_job_schedule_cron_humanized: Optional[str] = Field( + None, description="", alias="dbtJobScheduleCronHumanized" + ) + dbt_job_last_run: Optional[datetime] = Field( + None, description="", alias="dbtJobLastRun" + ) + dbt_job_next_run: Optional[datetime] = Field( + None, description="", alias="dbtJobNextRun" + ) + dbt_job_next_run_humanized: Optional[str] = Field( + None, description="", alias="dbtJobNextRunHumanized" + ) + dbt_environment_name: Optional[str] = Field( + None, description="", alias="dbtEnvironmentName" + ) + dbt_environment_dbt_version: Optional[str] = Field( + None, description="", alias="dbtEnvironmentDbtVersion" + ) + dbt_tags: Optional[set[str]] = Field(None, description="", alias="dbtTags") + dbt_connection_context: Optional[str] = Field( + None, description="", alias="dbtConnectionContext" + ) + dbt_semantic_layer_proxy_url: Optional[str] = Field( + None, description="", alias="dbtSemanticLayerProxyUrl" + ) + inputs: Optional[list[Catalog]] = Field(None, description="", alias="inputs") + outputs: Optional[list[Catalog]] = Field(None, description="", alias="outputs") + code: Optional[str] = Field(None, description="", alias="code") + sql: Optional[str] = Field(None, description="", alias="sql") + ast: Optional[str] = Field(None, description="", alias="ast") + matillion_component: Optional[MatillionComponent] = Field( + None, description="", alias="matillionComponent" + ) # relationship + airflow_tasks: Optional[list[AirflowTask]] = Field( + None, description="", alias="airflowTasks" + ) # relationship + column_processes: Optional[list[ColumnProcess]] = Field( + None, description="", alias="columnProcesses" + ) # relationship + + attributes: "DbtProcess.Attributes" = Field( + default_factory=lambda: DbtProcess.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -Persona.Attributes.update_forward_refs() +DbtProcess.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset13.py b/pyatlan/model/assets/asset13.py index 79f5016e6..3cd34db35 100644 --- a/pyatlan/model/assets/asset13.py +++ b/pyatlan/model/assets/asset13.py @@ -8,30 +8,31 @@ from pydantic import Field, validator -from pyatlan.model.core import AtlanTagName from pyatlan.model.enums import ( AuthPolicyCategory, AuthPolicyResourceCategory, AuthPolicyType, DataAction, - PurposeMetadataAction, + PersonaDomainAction, + PersonaGlossaryAction, + PersonaMetadataAction, ) from pyatlan.model.fields.atlan_fields import KeywordField from pyatlan.utils import init_guid, validate_required_fields from .asset00 import SelfAsset -from .asset05 import AccessControl, AuthPolicy +from .asset06 import AccessControl, AuthPolicy -class Purpose(AccessControl): +class Persona(AccessControl): """Description""" @classmethod # @validate_arguments() @init_guid - def create(cls, *, name: str, atlan_tags: list[str]) -> Purpose: - validate_required_fields(["name", "atlan_tags"], [name, atlan_tags]) - attributes = Purpose.Attributes.create(name=name, atlan_tags=atlan_tags) + def create(cls, *, name: str) -> Persona: + validate_required_fields(["name"], [name]) + attributes = Persona.Attributes.create(name=name) return cls(attributes=attributes) @classmethod @@ -40,60 +41,29 @@ def create_metadata_policy( cls, *, name: str, - purpose_id: str, + persona_id: str, policy_type: AuthPolicyType, - actions: Set[PurposeMetadataAction], - policy_groups: Optional[Set[str]] = None, - policy_users: Optional[Set[str]] = None, - all_users: bool = False, + actions: Set[PersonaMetadataAction], + connection_qualified_name: str, + resources: Set[str], ) -> AuthPolicy: validate_required_fields( - ["name", "purpose_id", "policy_type", "actions"], - [name, purpose_id, policy_type, actions], + ["name", "persona_id", "policy_type", "actions", "resources"], + [name, persona_id, policy_type, actions, resources], ) - target_found = False policy = AuthPolicy._AuthPolicy__create(name=name) # type: ignore policy.policy_actions = {x.value for x in actions} - policy.policy_category = AuthPolicyCategory.PURPOSE.value + policy.policy_category = AuthPolicyCategory.PERSONA.value policy.policy_type = policy_type - policy.policy_resource_category = AuthPolicyResourceCategory.TAG.value - policy.policy_service_name = "atlas_tag" + policy.connection_qualified_name = connection_qualified_name + policy.policy_resources = resources + policy.policy_resource_category = AuthPolicyResourceCategory.CUSTOM.value + policy.policy_service_name = "atlas" policy.policy_sub_category = "metadata" - purpose = Purpose() - purpose.guid = purpose_id - policy.access_control = purpose - if all_users: - target_found = True - policy.policy_groups = {"public"} - else: - if policy_groups: - from pyatlan.cache.group_cache import GroupCache - - for group_name in policy_groups: - if not GroupCache.get_id_for_name(group_name): - raise ValueError( - f"Provided group name {group_name} was not found in Atlan." - ) - target_found = True - policy.policy_groups = policy_groups - else: - policy.policy_groups = None - if policy_users: - from pyatlan.cache.user_cache import UserCache - - for username in policy_users: - if not UserCache.get_id_for_name(username): - raise ValueError( - f"Provided username {username} was not found in Atlan." - ) - target_found = True - policy.policy_users = policy_users - else: - policy.policy_users = None - if target_found: - return policy - else: - raise ValueError("No user or group specified for the policy.") + persona = Persona() + persona.guid = persona_id + policy.access_control = persona + return policy @classmethod # @validate_arguments() @@ -101,57 +71,84 @@ def create_data_policy( cls, *, name: str, - purpose_id: str, + persona_id: str, policy_type: AuthPolicyType, - policy_groups: Optional[Set[str]] = None, - policy_users: Optional[Set[str]] = None, - all_users: bool = False, + connection_qualified_name: str, + resources: Set[str], ) -> AuthPolicy: validate_required_fields( - ["name", "purpose_id", "policy_type"], [name, purpose_id, policy_type] + ["name", "persona_id", "policy_type", "resources"], + [name, persona_id, policy_type, resources], ) policy = AuthPolicy._AuthPolicy__create(name=name) # type: ignore policy.policy_actions = {DataAction.SELECT.value} - policy.policy_category = AuthPolicyCategory.PURPOSE.value + policy.policy_category = AuthPolicyCategory.PERSONA.value policy.policy_type = policy_type - policy.policy_resource_category = AuthPolicyResourceCategory.TAG.value - policy.policy_service_name = "atlas_tag" + policy.connection_qualified_name = connection_qualified_name + policy.policy_resources = resources + policy.policy_resources.add("entity-type:*") + policy.policy_resource_category = AuthPolicyResourceCategory.ENTITY.value + policy.policy_service_name = "heka" policy.policy_sub_category = "data" - purpose = Purpose() - purpose.guid = purpose_id - policy.access_control = purpose - if all_users: - target_found = True - policy.policy_groups = {"public"} - else: - if policy_groups: - from pyatlan.cache.group_cache import GroupCache - - for group_name in policy_groups: - if not GroupCache.get_id_for_name(group_name): - raise ValueError( - f"Provided group name {group_name} was not found in Atlan." - ) - target_found = True - policy.policy_groups = policy_groups - else: - policy.policy_groups = None - if policy_users: - from pyatlan.cache.user_cache import UserCache - - for username in policy_users: - if not UserCache.get_id_for_name(username): - raise ValueError( - f"Provided username {username} was not found in Atlan." - ) - target_found = True - policy.policy_users = policy_users - else: - policy.policy_users = None - if target_found: - return policy - else: - raise ValueError("No user or group specified for the policy.") + persona = Persona() + persona.guid = persona_id + policy.access_control = persona + return policy + + @classmethod + # @validate_arguments() + def create_glossary_policy( + cls, + *, + name: str, + persona_id: str, + policy_type: AuthPolicyType, + actions: Set[PersonaGlossaryAction], + resources: Set[str], + ) -> AuthPolicy: + validate_required_fields( + ["name", "persona_id", "policy_type", "actions", "resources"], + [name, persona_id, policy_type, actions, resources], + ) + policy = AuthPolicy._AuthPolicy__create(name=name) # type: ignore + policy.policy_actions = {x.value for x in actions} + policy.policy_category = AuthPolicyCategory.PERSONA.value + policy.policy_type = policy_type + policy.policy_resources = resources + policy.policy_resource_category = AuthPolicyResourceCategory.CUSTOM.value + policy.policy_service_name = "atlas" + policy.policy_sub_category = "glossary" + persona = Persona() + persona.guid = persona_id + policy.access_control = persona + return policy + + @classmethod + # @validate_arguments() + def create_domain_policy( + cls, + *, + name: str, + persona_id: str, + actions: Set[PersonaDomainAction], + resources: Set[str], + ) -> AuthPolicy: + validate_required_fields( + ["name", "persona_id", "actions", "resources"], + [name, persona_id, actions, resources], + ) + policy = AuthPolicy._AuthPolicy__create(name=name) # type: ignore + policy.policy_actions = {x.value for x in actions} + policy.policy_category = AuthPolicyCategory.PERSONA.value + policy.policy_type = AuthPolicyType.ALLOW + policy.policy_resources = resources + policy.policy_resource_category = AuthPolicyResourceCategory.CUSTOM.value + policy.policy_service_name = "atlas" + policy.policy_sub_category = "domain" + persona = Persona() + persona.guid = persona_id + policy.access_control = persona + return policy @classmethod def create_for_modification( @@ -172,64 +169,99 @@ def create_for_modification( ) ) - type_name: str = Field("Purpose", allow_mutation=False) + type_name: str = Field("Persona", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "Purpose": - raise ValueError("must be Purpose") + if v != "Persona": + raise ValueError("must be Persona") return v def __setattr__(self, name, value): - if name in Purpose._convenience_properties: + if name in Persona._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - PURPOSE_CLASSIFICATIONS: ClassVar[KeywordField] = KeywordField( - "purposeClassifications", "purposeClassifications" + PERSONA_GROUPS: ClassVar[KeywordField] = KeywordField( + "personaGroups", "personaGroups" ) """ TBC """ + PERSONA_USERS: ClassVar[KeywordField] = KeywordField("personaUsers", "personaUsers") + """ + TBC + """ + ROLE_ID: ClassVar[KeywordField] = KeywordField("roleId", "roleId") + """ + TBC + """ _convenience_properties: ClassVar[list[str]] = [ - "purpose_atlan_tags", + "persona_groups", + "persona_users", + "role_id", ] @property - def purpose_atlan_tags(self) -> Optional[list[AtlanTagName]]: - return None if self.attributes is None else self.attributes.purpose_atlan_tags + def persona_groups(self) -> Optional[set[str]]: + return None if self.attributes is None else self.attributes.persona_groups + + @persona_groups.setter + def persona_groups(self, persona_groups: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.persona_groups = persona_groups + + @property + def persona_users(self) -> Optional[set[str]]: + return None if self.attributes is None else self.attributes.persona_users + + @persona_users.setter + def persona_users(self, persona_users: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.persona_users = persona_users + + @property + def role_id(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.role_id - @purpose_atlan_tags.setter - def purpose_atlan_tags(self, purpose_atlan_tags: Optional[list[AtlanTagName]]): + @role_id.setter + def role_id(self, role_id: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.purpose_atlan_tags = purpose_atlan_tags + self.attributes.role_id = role_id class Attributes(AccessControl.Attributes): - purpose_atlan_tags: Optional[list[AtlanTagName]] = Field( - None, description="", alias="purposeClassifications" + persona_groups: Optional[set[str]] = Field( + None, description="", alias="personaGroups" + ) + persona_users: Optional[set[str]] = Field( + None, description="", alias="personaUsers" ) + role_id: Optional[str] = Field(None, description="", alias="roleId") @classmethod # @validate_arguments() @init_guid - def create(cls, name: str, atlan_tags: list[str]) -> Purpose.Attributes: - validate_required_fields(["name", "atlan_tags"], [name, atlan_tags]) - return Purpose.Attributes( + def create(cls, name: str) -> Persona.Attributes: + if not name: + raise ValueError("name cannot be blank") + validate_required_fields(["name"], [name]) + return Persona.Attributes( qualified_name=name, name=name, display_name=name, is_access_control_enabled=True, description="", - purpose_atlan_tags=atlan_tags, ) - attributes: "Purpose.Attributes" = Field( - default_factory=lambda: Purpose.Attributes(), + attributes: "Persona.Attributes" = Field( + default_factory=lambda: Persona.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -Purpose.Attributes.update_forward_refs() +Persona.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset14.py b/pyatlan/model/assets/asset14.py index 46f8f6772..9edd3af85 100644 --- a/pyatlan/model/assets/asset14.py +++ b/pyatlan/model/assets/asset14.py @@ -4,75 +4,232 @@ from __future__ import annotations -from typing import ClassVar, Optional +from typing import ClassVar, Optional, Set from pydantic import Field, validator -from pyatlan.model.enums import IconType +from pyatlan.model.core import AtlanTagName +from pyatlan.model.enums import ( + AuthPolicyCategory, + AuthPolicyResourceCategory, + AuthPolicyType, + DataAction, + PurposeMetadataAction, +) from pyatlan.model.fields.atlan_fields import KeywordField +from pyatlan.utils import init_guid, validate_required_fields -from .asset00 import Namespace +from .asset00 import SelfAsset +from .asset06 import AccessControl, AuthPolicy -class Collection(Namespace): +class Purpose(AccessControl): """Description""" - type_name: str = Field("Collection", allow_mutation=False) + @classmethod + # @validate_arguments() + @init_guid + def create(cls, *, name: str, atlan_tags: list[str]) -> Purpose: + validate_required_fields(["name", "atlan_tags"], [name, atlan_tags]) + attributes = Purpose.Attributes.create(name=name, atlan_tags=atlan_tags) + return cls(attributes=attributes) + + @classmethod + # @validate_arguments() + def create_metadata_policy( + cls, + *, + name: str, + purpose_id: str, + policy_type: AuthPolicyType, + actions: Set[PurposeMetadataAction], + policy_groups: Optional[Set[str]] = None, + policy_users: Optional[Set[str]] = None, + all_users: bool = False, + ) -> AuthPolicy: + validate_required_fields( + ["name", "purpose_id", "policy_type", "actions"], + [name, purpose_id, policy_type, actions], + ) + target_found = False + policy = AuthPolicy._AuthPolicy__create(name=name) # type: ignore + policy.policy_actions = {x.value for x in actions} + policy.policy_category = AuthPolicyCategory.PURPOSE.value + policy.policy_type = policy_type + policy.policy_resource_category = AuthPolicyResourceCategory.TAG.value + policy.policy_service_name = "atlas_tag" + policy.policy_sub_category = "metadata" + purpose = Purpose() + purpose.guid = purpose_id + policy.access_control = purpose + if all_users: + target_found = True + policy.policy_groups = {"public"} + else: + if policy_groups: + from pyatlan.cache.group_cache import GroupCache + + for group_name in policy_groups: + if not GroupCache.get_id_for_name(group_name): + raise ValueError( + f"Provided group name {group_name} was not found in Atlan." + ) + target_found = True + policy.policy_groups = policy_groups + else: + policy.policy_groups = None + if policy_users: + from pyatlan.cache.user_cache import UserCache + + for username in policy_users: + if not UserCache.get_id_for_name(username): + raise ValueError( + f"Provided username {username} was not found in Atlan." + ) + target_found = True + policy.policy_users = policy_users + else: + policy.policy_users = None + if target_found: + return policy + else: + raise ValueError("No user or group specified for the policy.") + + @classmethod + # @validate_arguments() + def create_data_policy( + cls, + *, + name: str, + purpose_id: str, + policy_type: AuthPolicyType, + policy_groups: Optional[Set[str]] = None, + policy_users: Optional[Set[str]] = None, + all_users: bool = False, + ) -> AuthPolicy: + validate_required_fields( + ["name", "purpose_id", "policy_type"], [name, purpose_id, policy_type] + ) + policy = AuthPolicy._AuthPolicy__create(name=name) # type: ignore + policy.policy_actions = {DataAction.SELECT.value} + policy.policy_category = AuthPolicyCategory.PURPOSE.value + policy.policy_type = policy_type + policy.policy_resource_category = AuthPolicyResourceCategory.TAG.value + policy.policy_service_name = "atlas_tag" + policy.policy_sub_category = "data" + purpose = Purpose() + purpose.guid = purpose_id + policy.access_control = purpose + if all_users: + target_found = True + policy.policy_groups = {"public"} + else: + if policy_groups: + from pyatlan.cache.group_cache import GroupCache + + for group_name in policy_groups: + if not GroupCache.get_id_for_name(group_name): + raise ValueError( + f"Provided group name {group_name} was not found in Atlan." + ) + target_found = True + policy.policy_groups = policy_groups + else: + policy.policy_groups = None + if policy_users: + from pyatlan.cache.user_cache import UserCache + + for username in policy_users: + if not UserCache.get_id_for_name(username): + raise ValueError( + f"Provided username {username} was not found in Atlan." + ) + target_found = True + policy.policy_users = policy_users + else: + policy.policy_users = None + if target_found: + return policy + else: + raise ValueError("No user or group specified for the policy.") + + @classmethod + def create_for_modification( + cls: type[SelfAsset], + qualified_name: str = "", + name: str = "", + is_enabled: bool = True, + ) -> SelfAsset: + validate_required_fields( + ["name", "qualified_name", "is_enabled"], + [name, qualified_name, is_enabled], + ) + return cls( + attributes=cls.Attributes( + qualified_name=qualified_name, + name=name, + is_access_control_enabled=is_enabled, + ) + ) + + type_name: str = Field("Purpose", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "Collection": - raise ValueError("must be Collection") + if v != "Purpose": + raise ValueError("must be Purpose") return v def __setattr__(self, name, value): - if name in Collection._convenience_properties: + if name in Purpose._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - ICON: ClassVar[KeywordField] = KeywordField("icon", "icon") - """ - TBC - """ - ICON_TYPE: ClassVar[KeywordField] = KeywordField("iconType", "iconType") + PURPOSE_CLASSIFICATIONS: ClassVar[KeywordField] = KeywordField( + "purposeClassifications", "purposeClassifications" + ) """ TBC """ _convenience_properties: ClassVar[list[str]] = [ - "icon", - "icon_type", + "purpose_atlan_tags", ] @property - def icon(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.icon + def purpose_atlan_tags(self) -> Optional[list[AtlanTagName]]: + return None if self.attributes is None else self.attributes.purpose_atlan_tags - @icon.setter - def icon(self, icon: Optional[str]): + @purpose_atlan_tags.setter + def purpose_atlan_tags(self, purpose_atlan_tags: Optional[list[AtlanTagName]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.icon = icon - - @property - def icon_type(self) -> Optional[IconType]: - return None if self.attributes is None else self.attributes.icon_type - - @icon_type.setter - def icon_type(self, icon_type: Optional[IconType]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.icon_type = icon_type - - class Attributes(Namespace.Attributes): - icon: Optional[str] = Field(None, description="", alias="icon") - icon_type: Optional[IconType] = Field(None, description="", alias="iconType") - - attributes: "Collection.Attributes" = Field( - default_factory=lambda: Collection.Attributes(), + self.attributes.purpose_atlan_tags = purpose_atlan_tags + + class Attributes(AccessControl.Attributes): + purpose_atlan_tags: Optional[list[AtlanTagName]] = Field( + None, description="", alias="purposeClassifications" + ) + + @classmethod + # @validate_arguments() + @init_guid + def create(cls, name: str, atlan_tags: list[str]) -> Purpose.Attributes: + validate_required_fields(["name", "atlan_tags"], [name, atlan_tags]) + return Purpose.Attributes( + qualified_name=name, + name=name, + display_name=name, + is_access_control_enabled=True, + description="", + purpose_atlan_tags=atlan_tags, + ) + + attributes: "Purpose.Attributes" = Field( + default_factory=lambda: Purpose.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -Collection.Attributes.update_forward_refs() +Purpose.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset15.py b/pyatlan/model/assets/asset15.py new file mode 100644 index 000000000..2fc730eb7 --- /dev/null +++ b/pyatlan/model/assets/asset15.py @@ -0,0 +1,78 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic import Field, validator + +from pyatlan.model.enums import IconType +from pyatlan.model.fields.atlan_fields import KeywordField + +from .asset00 import Namespace + + +class Collection(Namespace): + """Description""" + + type_name: str = Field("Collection", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "Collection": + raise ValueError("must be Collection") + return v + + def __setattr__(self, name, value): + if name in Collection._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + ICON: ClassVar[KeywordField] = KeywordField("icon", "icon") + """ + Image used to represent this collection. + """ + ICON_TYPE: ClassVar[KeywordField] = KeywordField("iconType", "iconType") + """ + Type of image used to represent the collection (for example, an emoji). + """ + + _convenience_properties: ClassVar[list[str]] = [ + "icon", + "icon_type", + ] + + @property + def icon(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.icon + + @icon.setter + def icon(self, icon: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.icon = icon + + @property + def icon_type(self) -> Optional[IconType]: + return None if self.attributes is None else self.attributes.icon_type + + @icon_type.setter + def icon_type(self, icon_type: Optional[IconType]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.icon_type = icon_type + + class Attributes(Namespace.Attributes): + icon: Optional[str] = Field(None, description="", alias="icon") + icon_type: Optional[IconType] = Field(None, description="", alias="iconType") + + attributes: "Collection.Attributes" = Field( + default_factory=lambda: Collection.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +Collection.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset16.py b/pyatlan/model/assets/asset17.py similarity index 100% rename from pyatlan/model/assets/asset16.py rename to pyatlan/model/assets/asset17.py diff --git a/pyatlan/model/assets/asset19.py b/pyatlan/model/assets/asset19.py index 25218db82..433a4d4b6 100644 --- a/pyatlan/model/assets/asset19.py +++ b/pyatlan/model/assets/asset19.py @@ -11,23 +11,23 @@ from .asset00 import Catalog -class SaaS(Catalog): +class BI(Catalog): """Description""" - type_name: str = Field("SaaS", allow_mutation=False) + type_name: str = Field("BI", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "SaaS": - raise ValueError("must be SaaS") + if v != "BI": + raise ValueError("must be BI") return v def __setattr__(self, name, value): - if name in SaaS._convenience_properties: + if name in BI._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) _convenience_properties: ClassVar[list[str]] = [] -SaaS.Attributes.update_forward_refs() +BI.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset18.py b/pyatlan/model/assets/asset20.py similarity index 68% rename from pyatlan/model/assets/asset18.py rename to pyatlan/model/assets/asset20.py index 433a4d4b6..25218db82 100644 --- a/pyatlan/model/assets/asset18.py +++ b/pyatlan/model/assets/asset20.py @@ -11,23 +11,23 @@ from .asset00 import Catalog -class BI(Catalog): +class SaaS(Catalog): """Description""" - type_name: str = Field("BI", allow_mutation=False) + type_name: str = Field("SaaS", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "BI": - raise ValueError("must be BI") + if v != "SaaS": + raise ValueError("must be SaaS") return v def __setattr__(self, name, value): - if name in BI._convenience_properties: + if name in SaaS._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) _convenience_properties: ClassVar[list[str]] = [] -BI.Attributes.update_forward_refs() +SaaS.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset23.py b/pyatlan/model/assets/asset23.py index 238dff11e..06146f9c8 100644 --- a/pyatlan/model/assets/asset23.py +++ b/pyatlan/model/assets/asset23.py @@ -11,23 +11,23 @@ from .asset00 import Catalog -class NoSQL(Catalog): +class EventStore(Catalog): """Description""" - type_name: str = Field("NoSQL", allow_mutation=False) + type_name: str = Field("EventStore", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "NoSQL": - raise ValueError("must be NoSQL") + if v != "EventStore": + raise ValueError("must be EventStore") return v def __setattr__(self, name, value): - if name in NoSQL._convenience_properties: + if name in EventStore._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) _convenience_properties: ClassVar[list[str]] = [] -NoSQL.Attributes.update_forward_refs() +EventStore.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset24.py b/pyatlan/model/assets/asset24.py new file mode 100644 index 000000000..8c25b2298 --- /dev/null +++ b/pyatlan/model/assets/asset24.py @@ -0,0 +1,69 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic import Field, validator + +from pyatlan.model.fields.atlan_fields import TextField + +from .asset00 import Catalog + + +class NoSQL(Catalog): + """Description""" + + type_name: str = Field("NoSQL", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "NoSQL": + raise ValueError("must be NoSQL") + return v + + def __setattr__(self, name, value): + if name in NoSQL._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + NO_SQL_SCHEMA_DEFINITION: ClassVar[TextField] = TextField( + "noSQLSchemaDefinition", "noSQLSchemaDefinition" + ) + """ + Represents attributes for describing the key schema for the table and indexes. + """ + + _convenience_properties: ClassVar[list[str]] = [ + "no_s_q_l_schema_definition", + ] + + @property + def no_s_q_l_schema_definition(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.no_s_q_l_schema_definition + ) + + @no_s_q_l_schema_definition.setter + def no_s_q_l_schema_definition(self, no_s_q_l_schema_definition: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.no_s_q_l_schema_definition = no_s_q_l_schema_definition + + class Attributes(Catalog.Attributes): + no_s_q_l_schema_definition: Optional[str] = Field( + None, description="", alias="noSQLSchemaDefinition" + ) + + attributes: "NoSQL.Attributes" = Field( + default_factory=lambda: NoSQL.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +NoSQL.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset27.py b/pyatlan/model/assets/asset27.py index d4dde7bf2..eb7a44ed4 100644 --- a/pyatlan/model/assets/asset27.py +++ b/pyatlan/model/assets/asset27.py @@ -4,162 +4,30 @@ from __future__ import annotations -from typing import ClassVar, Optional +from typing import ClassVar from pydantic import Field, validator -from pyatlan.model.fields.atlan_fields import ( - BooleanField, - KeywordField, - KeywordTextField, -) - from .asset00 import Catalog -class API(Catalog): +class Insight(Catalog): """Description""" - type_name: str = Field("API", allow_mutation=False) + type_name: str = Field("Insight", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "API": - raise ValueError("must be API") + if v != "Insight": + raise ValueError("must be Insight") return v def __setattr__(self, name, value): - if name in API._convenience_properties: + if name in Insight._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - API_SPEC_TYPE: ClassVar[KeywordField] = KeywordField("apiSpecType", "apiSpecType") - """ - TBC - """ - API_SPEC_VERSION: ClassVar[KeywordField] = KeywordField( - "apiSpecVersion", "apiSpecVersion" - ) - """ - TBC - """ - API_SPEC_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "apiSpecName", "apiSpecName.keyword", "apiSpecName" - ) - """ - TBC - """ - API_SPEC_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "apiSpecQualifiedName", "apiSpecQualifiedName", "apiSpecQualifiedName.text" - ) - """ - TBC - """ - API_EXTERNAL_DOCS: ClassVar[KeywordField] = KeywordField( - "apiExternalDocs", "apiExternalDocs" - ) - """ - TBC - """ - API_IS_AUTH_OPTIONAL: ClassVar[BooleanField] = BooleanField( - "apiIsAuthOptional", "apiIsAuthOptional" - ) - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "api_spec_type", - "api_spec_version", - "api_spec_name", - "api_spec_qualified_name", - "api_external_docs", - "api_is_auth_optional", - ] - - @property - def api_spec_type(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.api_spec_type - - @api_spec_type.setter - def api_spec_type(self, api_spec_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.api_spec_type = api_spec_type - - @property - def api_spec_version(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.api_spec_version - - @api_spec_version.setter - def api_spec_version(self, api_spec_version: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.api_spec_version = api_spec_version - - @property - def api_spec_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.api_spec_name - - @api_spec_name.setter - def api_spec_name(self, api_spec_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.api_spec_name = api_spec_name - - @property - def api_spec_qualified_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.api_spec_qualified_name - ) - - @api_spec_qualified_name.setter - def api_spec_qualified_name(self, api_spec_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.api_spec_qualified_name = api_spec_qualified_name - - @property - def api_external_docs(self) -> Optional[dict[str, str]]: - return None if self.attributes is None else self.attributes.api_external_docs - - @api_external_docs.setter - def api_external_docs(self, api_external_docs: Optional[dict[str, str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.api_external_docs = api_external_docs - - @property - def api_is_auth_optional(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.api_is_auth_optional - - @api_is_auth_optional.setter - def api_is_auth_optional(self, api_is_auth_optional: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.api_is_auth_optional = api_is_auth_optional - - class Attributes(Catalog.Attributes): - api_spec_type: Optional[str] = Field(None, description="", alias="apiSpecType") - api_spec_version: Optional[str] = Field( - None, description="", alias="apiSpecVersion" - ) - api_spec_name: Optional[str] = Field(None, description="", alias="apiSpecName") - api_spec_qualified_name: Optional[str] = Field( - None, description="", alias="apiSpecQualifiedName" - ) - api_external_docs: Optional[dict[str, str]] = Field( - None, description="", alias="apiExternalDocs" - ) - api_is_auth_optional: Optional[bool] = Field( - None, description="", alias="apiIsAuthOptional" - ) - - attributes: "API.Attributes" = Field( - default_factory=lambda: API.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) + _convenience_properties: ClassVar[list[str]] = [] -API.Attributes.update_forward_refs() +Insight.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset28.py b/pyatlan/model/assets/asset28.py new file mode 100644 index 000000000..1cf00ef9e --- /dev/null +++ b/pyatlan/model/assets/asset28.py @@ -0,0 +1,165 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic import Field, validator + +from pyatlan.model.fields.atlan_fields import ( + BooleanField, + KeywordField, + KeywordTextField, +) + +from .asset00 import Catalog + + +class API(Catalog): + """Description""" + + type_name: str = Field("API", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "API": + raise ValueError("must be API") + return v + + def __setattr__(self, name, value): + if name in API._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + API_SPEC_TYPE: ClassVar[KeywordField] = KeywordField("apiSpecType", "apiSpecType") + """ + Type of API, for example: OpenAPI, GraphQL, etc. + """ + API_SPEC_VERSION: ClassVar[KeywordField] = KeywordField( + "apiSpecVersion", "apiSpecVersion" + ) + """ + Version of the API specification. + """ + API_SPEC_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "apiSpecName", "apiSpecName.keyword", "apiSpecName" + ) + """ + Simple name of the API spec, if this asset is contained in an API spec. + """ + API_SPEC_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "apiSpecQualifiedName", "apiSpecQualifiedName", "apiSpecQualifiedName.text" + ) + """ + Unique name of the API spec, if this asset is contained in an API spec. + """ + API_EXTERNAL_DOCS: ClassVar[KeywordField] = KeywordField( + "apiExternalDocs", "apiExternalDocs" + ) + """ + External documentation of the API. + """ + API_IS_AUTH_OPTIONAL: ClassVar[BooleanField] = BooleanField( + "apiIsAuthOptional", "apiIsAuthOptional" + ) + """ + Whether authentication is optional (true) or required (false). + """ + + _convenience_properties: ClassVar[list[str]] = [ + "api_spec_type", + "api_spec_version", + "api_spec_name", + "api_spec_qualified_name", + "api_external_docs", + "api_is_auth_optional", + ] + + @property + def api_spec_type(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.api_spec_type + + @api_spec_type.setter + def api_spec_type(self, api_spec_type: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.api_spec_type = api_spec_type + + @property + def api_spec_version(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.api_spec_version + + @api_spec_version.setter + def api_spec_version(self, api_spec_version: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.api_spec_version = api_spec_version + + @property + def api_spec_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.api_spec_name + + @api_spec_name.setter + def api_spec_name(self, api_spec_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.api_spec_name = api_spec_name + + @property + def api_spec_qualified_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.api_spec_qualified_name + ) + + @api_spec_qualified_name.setter + def api_spec_qualified_name(self, api_spec_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.api_spec_qualified_name = api_spec_qualified_name + + @property + def api_external_docs(self) -> Optional[dict[str, str]]: + return None if self.attributes is None else self.attributes.api_external_docs + + @api_external_docs.setter + def api_external_docs(self, api_external_docs: Optional[dict[str, str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.api_external_docs = api_external_docs + + @property + def api_is_auth_optional(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.api_is_auth_optional + + @api_is_auth_optional.setter + def api_is_auth_optional(self, api_is_auth_optional: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.api_is_auth_optional = api_is_auth_optional + + class Attributes(Catalog.Attributes): + api_spec_type: Optional[str] = Field(None, description="", alias="apiSpecType") + api_spec_version: Optional[str] = Field( + None, description="", alias="apiSpecVersion" + ) + api_spec_name: Optional[str] = Field(None, description="", alias="apiSpecName") + api_spec_qualified_name: Optional[str] = Field( + None, description="", alias="apiSpecQualifiedName" + ) + api_external_docs: Optional[dict[str, str]] = Field( + None, description="", alias="apiExternalDocs" + ) + api_is_auth_optional: Optional[bool] = Field( + None, description="", alias="apiIsAuthOptional" + ) + + attributes: "API.Attributes" = Field( + default_factory=lambda: API.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +API.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset30.py b/pyatlan/model/assets/asset30.py deleted file mode 100644 index f1143a61b..000000000 --- a/pyatlan/model/assets/asset30.py +++ /dev/null @@ -1,208 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 Atlan Pte. Ltd. - - -from __future__ import annotations - -from typing import ClassVar, Optional - -from pydantic import Field, validator - -from pyatlan.model.fields.atlan_fields import ( - KeywordField, - KeywordTextField, - NumericField, -) -from pyatlan.model.structs import GoogleLabel, GoogleTag - -from .asset08 import Cloud - - -class Google(Cloud): - """Description""" - - type_name: str = Field("Google", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "Google": - raise ValueError("must be Google") - return v - - def __setattr__(self, name, value): - if name in Google._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - GOOGLE_SERVICE: ClassVar[KeywordField] = KeywordField( - "googleService", "googleService" - ) - """ - TBC - """ - GOOGLE_PROJECT_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "googleProjectName", "googleProjectName", "googleProjectName.text" - ) - """ - TBC - """ - GOOGLE_PROJECT_ID: ClassVar[KeywordTextField] = KeywordTextField( - "googleProjectId", "googleProjectId", "googleProjectId.text" - ) - """ - TBC - """ - GOOGLE_PROJECT_NUMBER: ClassVar[NumericField] = NumericField( - "googleProjectNumber", "googleProjectNumber" - ) - """ - TBC - """ - GOOGLE_LOCATION: ClassVar[KeywordField] = KeywordField( - "googleLocation", "googleLocation" - ) - """ - TBC - """ - GOOGLE_LOCATION_TYPE: ClassVar[KeywordField] = KeywordField( - "googleLocationType", "googleLocationType" - ) - """ - TBC - """ - GOOGLE_LABELS: ClassVar[KeywordField] = KeywordField("googleLabels", "googleLabels") - """ - TBC - """ - GOOGLE_TAGS: ClassVar[KeywordField] = KeywordField("googleTags", "googleTags") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "google_service", - "google_project_name", - "google_project_id", - "google_project_number", - "google_location", - "google_location_type", - "google_labels", - "google_tags", - ] - - @property - def google_service(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.google_service - - @google_service.setter - def google_service(self, google_service: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.google_service = google_service - - @property - def google_project_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.google_project_name - - @google_project_name.setter - def google_project_name(self, google_project_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.google_project_name = google_project_name - - @property - def google_project_id(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.google_project_id - - @google_project_id.setter - def google_project_id(self, google_project_id: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.google_project_id = google_project_id - - @property - def google_project_number(self) -> Optional[int]: - return ( - None if self.attributes is None else self.attributes.google_project_number - ) - - @google_project_number.setter - def google_project_number(self, google_project_number: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.google_project_number = google_project_number - - @property - def google_location(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.google_location - - @google_location.setter - def google_location(self, google_location: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.google_location = google_location - - @property - def google_location_type(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.google_location_type - - @google_location_type.setter - def google_location_type(self, google_location_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.google_location_type = google_location_type - - @property - def google_labels(self) -> Optional[list[GoogleLabel]]: - return None if self.attributes is None else self.attributes.google_labels - - @google_labels.setter - def google_labels(self, google_labels: Optional[list[GoogleLabel]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.google_labels = google_labels - - @property - def google_tags(self) -> Optional[list[GoogleTag]]: - return None if self.attributes is None else self.attributes.google_tags - - @google_tags.setter - def google_tags(self, google_tags: Optional[list[GoogleTag]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.google_tags = google_tags - - class Attributes(Cloud.Attributes): - google_service: Optional[str] = Field( - None, description="", alias="googleService" - ) - google_project_name: Optional[str] = Field( - None, description="", alias="googleProjectName" - ) - google_project_id: Optional[str] = Field( - None, description="", alias="googleProjectId" - ) - google_project_number: Optional[int] = Field( - None, description="", alias="googleProjectNumber" - ) - google_location: Optional[str] = Field( - None, description="", alias="googleLocation" - ) - google_location_type: Optional[str] = Field( - None, description="", alias="googleLocationType" - ) - google_labels: Optional[list[GoogleLabel]] = Field( - None, description="", alias="googleLabels" - ) - google_tags: Optional[list[GoogleTag]] = Field( - None, description="", alias="googleTags" - ) - - attributes: "Google.Attributes" = Field( - default_factory=lambda: Google.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -Google.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset31.py b/pyatlan/model/assets/asset31.py index fc73e8633..8ce729f24 100644 --- a/pyatlan/model/assets/asset31.py +++ b/pyatlan/model/assets/asset31.py @@ -8,125 +8,201 @@ from pydantic import Field, validator -from pyatlan.model.fields.atlan_fields import KeywordField, KeywordTextField -from pyatlan.model.structs import AzureTag +from pyatlan.model.fields.atlan_fields import ( + KeywordField, + KeywordTextField, + NumericField, +) +from pyatlan.model.structs import GoogleLabel, GoogleTag -from .asset08 import Cloud +from .asset09 import Cloud -class Azure(Cloud): +class Google(Cloud): """Description""" - type_name: str = Field("Azure", allow_mutation=False) + type_name: str = Field("Google", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "Azure": - raise ValueError("must be Azure") + if v != "Google": + raise ValueError("must be Google") return v def __setattr__(self, name, value): - if name in Azure._convenience_properties: + if name in Google._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - AZURE_RESOURCE_ID: ClassVar[KeywordTextField] = KeywordTextField( - "azureResourceId", "azureResourceId", "azureResourceId.text" + GOOGLE_SERVICE: ClassVar[KeywordField] = KeywordField( + "googleService", "googleService" ) """ - TBC + Service in Google in which the asset exists. """ - AZURE_LOCATION: ClassVar[KeywordField] = KeywordField( - "azureLocation", "azureLocation" + GOOGLE_PROJECT_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "googleProjectName", "googleProjectName", "googleProjectName.text" ) """ - TBC + Name of the project in which the asset exists. """ - ADLS_ACCOUNT_SECONDARY_LOCATION: ClassVar[KeywordField] = KeywordField( - "adlsAccountSecondaryLocation", "adlsAccountSecondaryLocation" + GOOGLE_PROJECT_ID: ClassVar[KeywordTextField] = KeywordTextField( + "googleProjectId", "googleProjectId", "googleProjectId.text" ) """ - TBC + ID of the project in which the asset exists. """ - AZURE_TAGS: ClassVar[KeywordField] = KeywordField("azureTags", "azureTags") + GOOGLE_PROJECT_NUMBER: ClassVar[NumericField] = NumericField( + "googleProjectNumber", "googleProjectNumber" + ) + """ + Number of the project in which the asset exists. + """ + GOOGLE_LOCATION: ClassVar[KeywordField] = KeywordField( + "googleLocation", "googleLocation" + ) + """ + Location of this asset in Google. + """ + GOOGLE_LOCATION_TYPE: ClassVar[KeywordField] = KeywordField( + "googleLocationType", "googleLocationType" + ) + """ + Type of location of this asset in Google. + """ + GOOGLE_LABELS: ClassVar[KeywordField] = KeywordField("googleLabels", "googleLabels") """ - TBC + List of labels that have been applied to the asset in Google. + """ + GOOGLE_TAGS: ClassVar[KeywordField] = KeywordField("googleTags", "googleTags") + """ + List of tags that have been applied to the asset in Google. """ _convenience_properties: ClassVar[list[str]] = [ - "azure_resource_id", - "azure_location", - "adls_account_secondary_location", - "azure_tags", + "google_service", + "google_project_name", + "google_project_id", + "google_project_number", + "google_location", + "google_location_type", + "google_labels", + "google_tags", ] @property - def azure_resource_id(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.azure_resource_id + def google_service(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.google_service - @azure_resource_id.setter - def azure_resource_id(self, azure_resource_id: Optional[str]): + @google_service.setter + def google_service(self, google_service: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.azure_resource_id = azure_resource_id + self.attributes.google_service = google_service @property - def azure_location(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.azure_location + def google_project_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.google_project_name - @azure_location.setter - def azure_location(self, azure_location: Optional[str]): + @google_project_name.setter + def google_project_name(self, google_project_name: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.azure_location = azure_location + self.attributes.google_project_name = google_project_name @property - def adls_account_secondary_location(self) -> Optional[str]: + def google_project_id(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.google_project_id + + @google_project_id.setter + def google_project_id(self, google_project_id: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.google_project_id = google_project_id + + @property + def google_project_number(self) -> Optional[int]: return ( - None - if self.attributes is None - else self.attributes.adls_account_secondary_location + None if self.attributes is None else self.attributes.google_project_number ) - @adls_account_secondary_location.setter - def adls_account_secondary_location( - self, adls_account_secondary_location: Optional[str] - ): + @google_project_number.setter + def google_project_number(self, google_project_number: Optional[int]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.adls_account_secondary_location = ( - adls_account_secondary_location - ) + self.attributes.google_project_number = google_project_number + + @property + def google_location(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.google_location + + @google_location.setter + def google_location(self, google_location: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.google_location = google_location + + @property + def google_location_type(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.google_location_type + + @google_location_type.setter + def google_location_type(self, google_location_type: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.google_location_type = google_location_type @property - def azure_tags(self) -> Optional[list[AzureTag]]: - return None if self.attributes is None else self.attributes.azure_tags + def google_labels(self) -> Optional[list[GoogleLabel]]: + return None if self.attributes is None else self.attributes.google_labels - @azure_tags.setter - def azure_tags(self, azure_tags: Optional[list[AzureTag]]): + @google_labels.setter + def google_labels(self, google_labels: Optional[list[GoogleLabel]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.azure_tags = azure_tags + self.attributes.google_labels = google_labels + + @property + def google_tags(self) -> Optional[list[GoogleTag]]: + return None if self.attributes is None else self.attributes.google_tags + + @google_tags.setter + def google_tags(self, google_tags: Optional[list[GoogleTag]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.google_tags = google_tags class Attributes(Cloud.Attributes): - azure_resource_id: Optional[str] = Field( - None, description="", alias="azureResourceId" + google_service: Optional[str] = Field( + None, description="", alias="googleService" + ) + google_project_name: Optional[str] = Field( + None, description="", alias="googleProjectName" + ) + google_project_id: Optional[str] = Field( + None, description="", alias="googleProjectId" + ) + google_project_number: Optional[int] = Field( + None, description="", alias="googleProjectNumber" + ) + google_location: Optional[str] = Field( + None, description="", alias="googleLocation" ) - azure_location: Optional[str] = Field( - None, description="", alias="azureLocation" + google_location_type: Optional[str] = Field( + None, description="", alias="googleLocationType" ) - adls_account_secondary_location: Optional[str] = Field( - None, description="", alias="adlsAccountSecondaryLocation" + google_labels: Optional[list[GoogleLabel]] = Field( + None, description="", alias="googleLabels" ) - azure_tags: Optional[list[AzureTag]] = Field( - None, description="", alias="azureTags" + google_tags: Optional[list[GoogleTag]] = Field( + None, description="", alias="googleTags" ) - attributes: "Azure.Attributes" = Field( - default_factory=lambda: Azure.Attributes(), + attributes: "Google.Attributes" = Field( + default_factory=lambda: Google.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -Azure.Attributes.update_forward_refs() +Google.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset32.py b/pyatlan/model/assets/asset32.py index 6e755cbd4..a3017e773 100644 --- a/pyatlan/model/assets/asset32.py +++ b/pyatlan/model/assets/asset32.py @@ -9,196 +9,124 @@ from pydantic import Field, validator from pyatlan.model.fields.atlan_fields import KeywordField, KeywordTextField -from pyatlan.model.structs import AwsTag +from pyatlan.model.structs import AzureTag -from .asset08 import Cloud +from .asset09 import Cloud -class AWS(Cloud): +class Azure(Cloud): """Description""" - type_name: str = Field("AWS", allow_mutation=False) + type_name: str = Field("Azure", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "AWS": - raise ValueError("must be AWS") + if v != "Azure": + raise ValueError("must be Azure") return v def __setattr__(self, name, value): - if name in AWS._convenience_properties: + if name in Azure._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - AWS_ARN: ClassVar[KeywordTextField] = KeywordTextField( - "awsArn", "awsArn", "awsArn.text" + AZURE_RESOURCE_ID: ClassVar[KeywordTextField] = KeywordTextField( + "azureResourceId", "azureResourceId", "azureResourceId.text" ) """ - TBC + Resource identifier of this asset in Azure. """ - AWS_PARTITION: ClassVar[KeywordField] = KeywordField("awsPartition", "awsPartition") - """ - TBC - """ - AWS_SERVICE: ClassVar[KeywordField] = KeywordField("awsService", "awsService") - """ - TBC - """ - AWS_REGION: ClassVar[KeywordField] = KeywordField("awsRegion", "awsRegion") - """ - TBC - """ - AWS_ACCOUNT_ID: ClassVar[KeywordField] = KeywordField( - "awsAccountId", "awsAccountId" + AZURE_LOCATION: ClassVar[KeywordField] = KeywordField( + "azureLocation", "azureLocation" ) """ - TBC + Location of this asset in Azure. """ - AWS_RESOURCE_ID: ClassVar[KeywordField] = KeywordField( - "awsResourceId", "awsResourceId" + ADLS_ACCOUNT_SECONDARY_LOCATION: ClassVar[KeywordField] = KeywordField( + "adlsAccountSecondaryLocation", "adlsAccountSecondaryLocation" ) """ - TBC - """ - AWS_OWNER_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "awsOwnerName", "awsOwnerName", "awsOwnerName.text" - ) - """ - TBC - """ - AWS_OWNER_ID: ClassVar[KeywordField] = KeywordField("awsOwnerId", "awsOwnerId") + Secondary location of the ADLS account. """ - TBC + AZURE_TAGS: ClassVar[KeywordField] = KeywordField("azureTags", "azureTags") """ - AWS_TAGS: ClassVar[KeywordField] = KeywordField("awsTags", "awsTags") - """ - TBC + Tags that have been applied to this asset in Azure. """ _convenience_properties: ClassVar[list[str]] = [ - "aws_arn", - "aws_partition", - "aws_service", - "aws_region", - "aws_account_id", - "aws_resource_id", - "aws_owner_name", - "aws_owner_id", - "aws_tags", + "azure_resource_id", + "azure_location", + "adls_account_secondary_location", + "azure_tags", ] @property - def aws_arn(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.aws_arn - - @aws_arn.setter - def aws_arn(self, aws_arn: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.aws_arn = aws_arn - - @property - def aws_partition(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.aws_partition + def azure_resource_id(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.azure_resource_id - @aws_partition.setter - def aws_partition(self, aws_partition: Optional[str]): + @azure_resource_id.setter + def azure_resource_id(self, azure_resource_id: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.aws_partition = aws_partition + self.attributes.azure_resource_id = azure_resource_id @property - def aws_service(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.aws_service + def azure_location(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.azure_location - @aws_service.setter - def aws_service(self, aws_service: Optional[str]): + @azure_location.setter + def azure_location(self, azure_location: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.aws_service = aws_service + self.attributes.azure_location = azure_location @property - def aws_region(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.aws_region - - @aws_region.setter - def aws_region(self, aws_region: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.aws_region = aws_region - - @property - def aws_account_id(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.aws_account_id - - @aws_account_id.setter - def aws_account_id(self, aws_account_id: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.aws_account_id = aws_account_id - - @property - def aws_resource_id(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.aws_resource_id - - @aws_resource_id.setter - def aws_resource_id(self, aws_resource_id: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.aws_resource_id = aws_resource_id - - @property - def aws_owner_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.aws_owner_name - - @aws_owner_name.setter - def aws_owner_name(self, aws_owner_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.aws_owner_name = aws_owner_name - - @property - def aws_owner_id(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.aws_owner_id + def adls_account_secondary_location(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.adls_account_secondary_location + ) - @aws_owner_id.setter - def aws_owner_id(self, aws_owner_id: Optional[str]): + @adls_account_secondary_location.setter + def adls_account_secondary_location( + self, adls_account_secondary_location: Optional[str] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.aws_owner_id = aws_owner_id + self.attributes.adls_account_secondary_location = ( + adls_account_secondary_location + ) @property - def aws_tags(self) -> Optional[list[AwsTag]]: - return None if self.attributes is None else self.attributes.aws_tags + def azure_tags(self) -> Optional[list[AzureTag]]: + return None if self.attributes is None else self.attributes.azure_tags - @aws_tags.setter - def aws_tags(self, aws_tags: Optional[list[AwsTag]]): + @azure_tags.setter + def azure_tags(self, azure_tags: Optional[list[AzureTag]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.aws_tags = aws_tags + self.attributes.azure_tags = azure_tags class Attributes(Cloud.Attributes): - aws_arn: Optional[str] = Field(None, description="", alias="awsArn") - aws_partition: Optional[str] = Field(None, description="", alias="awsPartition") - aws_service: Optional[str] = Field(None, description="", alias="awsService") - aws_region: Optional[str] = Field(None, description="", alias="awsRegion") - aws_account_id: Optional[str] = Field( - None, description="", alias="awsAccountId" + azure_resource_id: Optional[str] = Field( + None, description="", alias="azureResourceId" + ) + azure_location: Optional[str] = Field( + None, description="", alias="azureLocation" ) - aws_resource_id: Optional[str] = Field( - None, description="", alias="awsResourceId" + adls_account_secondary_location: Optional[str] = Field( + None, description="", alias="adlsAccountSecondaryLocation" ) - aws_owner_name: Optional[str] = Field( - None, description="", alias="awsOwnerName" + azure_tags: Optional[list[AzureTag]] = Field( + None, description="", alias="azureTags" ) - aws_owner_id: Optional[str] = Field(None, description="", alias="awsOwnerId") - aws_tags: Optional[list[AwsTag]] = Field(None, description="", alias="awsTags") - attributes: "AWS.Attributes" = Field( - default_factory=lambda: AWS.Attributes(), + attributes: "Azure.Attributes" = Field( + default_factory=lambda: Azure.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -AWS.Attributes.update_forward_refs() +Azure.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset33.py b/pyatlan/model/assets/asset33.py index 6d0d0565b..aa6c347d6 100644 --- a/pyatlan/model/assets/asset33.py +++ b/pyatlan/model/assets/asset33.py @@ -4,600 +4,201 @@ from __future__ import annotations -from datetime import datetime from typing import ClassVar, Optional from pydantic import Field, validator -from pyatlan.model.fields.atlan_fields import ( - KeywordField, - KeywordTextField, - NumericField, - RelationField, -) +from pyatlan.model.fields.atlan_fields import KeywordField, KeywordTextField +from pyatlan.model.structs import AwsTag -from .asset00 import ( - AirflowTask, - Catalog, - ColumnProcess, - Dbt, - MatillionComponent, - Process, -) +from .asset09 import Cloud -class DbtColumnProcess(Dbt): +class AWS(Cloud): """Description""" - type_name: str = Field("DbtColumnProcess", allow_mutation=False) + type_name: str = Field("AWS", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "DbtColumnProcess": - raise ValueError("must be DbtColumnProcess") + if v != "AWS": + raise ValueError("must be AWS") return v def __setattr__(self, name, value): - if name in DbtColumnProcess._convenience_properties: + if name in AWS._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - DBT_COLUMN_PROCESS_JOB_STATUS: ClassVar[KeywordField] = KeywordField( - "dbtColumnProcessJobStatus", "dbtColumnProcessJobStatus" + AWS_ARN: ClassVar[KeywordTextField] = KeywordTextField( + "awsArn", "awsArn", "awsArn.text" ) """ - TBC + Amazon Resource Name (ARN) for this asset. This uniquely identifies the asset in AWS, and thus must be unique across all AWS asset instances. + """ # noqa: E501 + AWS_PARTITION: ClassVar[KeywordField] = KeywordField("awsPartition", "awsPartition") """ - DBT_ALIAS: ClassVar[KeywordTextField] = KeywordTextField( - "dbtAlias", "dbtAlias.keyword", "dbtAlias" - ) - """ - TBC + Group of AWS region and service objects. """ - DBT_META: ClassVar[KeywordField] = KeywordField("dbtMeta", "dbtMeta") + AWS_SERVICE: ClassVar[KeywordField] = KeywordField("awsService", "awsService") """ - TBC + Type of service in which the asset exists. """ - DBT_UNIQUE_ID: ClassVar[KeywordTextField] = KeywordTextField( - "dbtUniqueId", "dbtUniqueId.keyword", "dbtUniqueId" - ) + AWS_REGION: ClassVar[KeywordField] = KeywordField("awsRegion", "awsRegion") """ - TBC + Physical region where the data center in which the asset exists is clustered. """ - DBT_ACCOUNT_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "dbtAccountName", "dbtAccountName.keyword", "dbtAccountName" + AWS_ACCOUNT_ID: ClassVar[KeywordField] = KeywordField( + "awsAccountId", "awsAccountId" ) """ - TBC + 12-digit number that uniquely identifies an AWS account. """ - DBT_PROJECT_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "dbtProjectName", "dbtProjectName.keyword", "dbtProjectName" + AWS_RESOURCE_ID: ClassVar[KeywordField] = KeywordField( + "awsResourceId", "awsResourceId" ) """ - TBC + Unique resource ID assigned when a new resource is created. """ - DBT_PACKAGE_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "dbtPackageName", "dbtPackageName.keyword", "dbtPackageName" + AWS_OWNER_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "awsOwnerName", "awsOwnerName", "awsOwnerName.text" ) """ - TBC + Root user's name. """ - DBT_JOB_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "dbtJobName", "dbtJobName.keyword", "dbtJobName" - ) - """ - TBC + AWS_OWNER_ID: ClassVar[KeywordField] = KeywordField("awsOwnerId", "awsOwnerId") """ - DBT_JOB_SCHEDULE: ClassVar[KeywordField] = KeywordField( - "dbtJobSchedule", "dbtJobSchedule" - ) + Root user's ID. """ - TBC - """ - DBT_JOB_STATUS: ClassVar[KeywordField] = KeywordField( - "dbtJobStatus", "dbtJobStatus" - ) - """ - TBC - """ - DBT_JOB_SCHEDULE_CRON_HUMANIZED: ClassVar[KeywordTextField] = KeywordTextField( - "dbtJobScheduleCronHumanized", - "dbtJobScheduleCronHumanized.keyword", - "dbtJobScheduleCronHumanized", - ) + AWS_TAGS: ClassVar[KeywordField] = KeywordField("awsTags", "awsTags") """ - TBC - """ - DBT_JOB_LAST_RUN: ClassVar[NumericField] = NumericField( - "dbtJobLastRun", "dbtJobLastRun" - ) - """ - TBC - """ - DBT_JOB_NEXT_RUN: ClassVar[NumericField] = NumericField( - "dbtJobNextRun", "dbtJobNextRun" - ) - """ - TBC - """ - DBT_JOB_NEXT_RUN_HUMANIZED: ClassVar[KeywordTextField] = KeywordTextField( - "dbtJobNextRunHumanized", - "dbtJobNextRunHumanized.keyword", - "dbtJobNextRunHumanized", - ) - """ - TBC - """ - DBT_ENVIRONMENT_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "dbtEnvironmentName", "dbtEnvironmentName.keyword", "dbtEnvironmentName" - ) - """ - TBC - """ - DBT_ENVIRONMENT_DBT_VERSION: ClassVar[KeywordTextField] = KeywordTextField( - "dbtEnvironmentDbtVersion", - "dbtEnvironmentDbtVersion.keyword", - "dbtEnvironmentDbtVersion", - ) - """ - TBC - """ - DBT_TAGS: ClassVar[KeywordField] = KeywordField("dbtTags", "dbtTags") - """ - TBC - """ - DBT_CONNECTION_CONTEXT: ClassVar[KeywordField] = KeywordField( - "dbtConnectionContext", "dbtConnectionContext" - ) - """ - TBC - """ - DBT_SEMANTIC_LAYER_PROXY_URL: ClassVar[KeywordField] = KeywordField( - "dbtSemanticLayerProxyUrl", "dbtSemanticLayerProxyUrl" - ) - """ - TBC - """ - CODE: ClassVar[KeywordField] = KeywordField("code", "code") - """ - TBC - """ - SQL: ClassVar[KeywordField] = KeywordField("sql", "sql") - """ - TBC - """ - AST: ClassVar[KeywordField] = KeywordField("ast", "ast") - """ - TBC - """ - - MATILLION_COMPONENT: ClassVar[RelationField] = RelationField("matillionComponent") - """ - TBC - """ - PROCESS: ClassVar[RelationField] = RelationField("process") - """ - TBC - """ - AIRFLOW_TASKS: ClassVar[RelationField] = RelationField("airflowTasks") - """ - TBC - """ - COLUMN_PROCESSES: ClassVar[RelationField] = RelationField("columnProcesses") - """ - TBC + List of tags that have been applied to the asset in AWS. """ _convenience_properties: ClassVar[list[str]] = [ - "dbt_column_process_job_status", - "dbt_alias", - "dbt_meta", - "dbt_unique_id", - "dbt_account_name", - "dbt_project_name", - "dbt_package_name", - "dbt_job_name", - "dbt_job_schedule", - "dbt_job_status", - "dbt_job_schedule_cron_humanized", - "dbt_job_last_run", - "dbt_job_next_run", - "dbt_job_next_run_humanized", - "dbt_environment_name", - "dbt_environment_dbt_version", - "dbt_tags", - "dbt_connection_context", - "dbt_semantic_layer_proxy_url", - "inputs", - "outputs", - "code", - "sql", - "ast", - "matillion_component", - "process", - "airflow_tasks", - "column_processes", + "aws_arn", + "aws_partition", + "aws_service", + "aws_region", + "aws_account_id", + "aws_resource_id", + "aws_owner_name", + "aws_owner_id", + "aws_tags", ] @property - def dbt_column_process_job_status(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.dbt_column_process_job_status - ) - - @dbt_column_process_job_status.setter - def dbt_column_process_job_status( - self, dbt_column_process_job_status: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_column_process_job_status = dbt_column_process_job_status - - @property - def dbt_alias(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_alias + def aws_arn(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.aws_arn - @dbt_alias.setter - def dbt_alias(self, dbt_alias: Optional[str]): + @aws_arn.setter + def aws_arn(self, aws_arn: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.dbt_alias = dbt_alias + self.attributes.aws_arn = aws_arn @property - def dbt_meta(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_meta + def aws_partition(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.aws_partition - @dbt_meta.setter - def dbt_meta(self, dbt_meta: Optional[str]): + @aws_partition.setter + def aws_partition(self, aws_partition: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.dbt_meta = dbt_meta + self.attributes.aws_partition = aws_partition @property - def dbt_unique_id(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_unique_id + def aws_service(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.aws_service - @dbt_unique_id.setter - def dbt_unique_id(self, dbt_unique_id: Optional[str]): + @aws_service.setter + def aws_service(self, aws_service: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.dbt_unique_id = dbt_unique_id + self.attributes.aws_service = aws_service @property - def dbt_account_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_account_name + def aws_region(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.aws_region - @dbt_account_name.setter - def dbt_account_name(self, dbt_account_name: Optional[str]): + @aws_region.setter + def aws_region(self, aws_region: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.dbt_account_name = dbt_account_name + self.attributes.aws_region = aws_region @property - def dbt_project_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_project_name + def aws_account_id(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.aws_account_id - @dbt_project_name.setter - def dbt_project_name(self, dbt_project_name: Optional[str]): + @aws_account_id.setter + def aws_account_id(self, aws_account_id: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.dbt_project_name = dbt_project_name + self.attributes.aws_account_id = aws_account_id @property - def dbt_package_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_package_name + def aws_resource_id(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.aws_resource_id - @dbt_package_name.setter - def dbt_package_name(self, dbt_package_name: Optional[str]): + @aws_resource_id.setter + def aws_resource_id(self, aws_resource_id: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.dbt_package_name = dbt_package_name + self.attributes.aws_resource_id = aws_resource_id @property - def dbt_job_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_job_name + def aws_owner_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.aws_owner_name - @dbt_job_name.setter - def dbt_job_name(self, dbt_job_name: Optional[str]): + @aws_owner_name.setter + def aws_owner_name(self, aws_owner_name: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.dbt_job_name = dbt_job_name + self.attributes.aws_owner_name = aws_owner_name @property - def dbt_job_schedule(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_job_schedule + def aws_owner_id(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.aws_owner_id - @dbt_job_schedule.setter - def dbt_job_schedule(self, dbt_job_schedule: Optional[str]): + @aws_owner_id.setter + def aws_owner_id(self, aws_owner_id: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.dbt_job_schedule = dbt_job_schedule + self.attributes.aws_owner_id = aws_owner_id @property - def dbt_job_status(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_job_status + def aws_tags(self) -> Optional[list[AwsTag]]: + return None if self.attributes is None else self.attributes.aws_tags - @dbt_job_status.setter - def dbt_job_status(self, dbt_job_status: Optional[str]): + @aws_tags.setter + def aws_tags(self, aws_tags: Optional[list[AwsTag]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.dbt_job_status = dbt_job_status + self.attributes.aws_tags = aws_tags - @property - def dbt_job_schedule_cron_humanized(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.dbt_job_schedule_cron_humanized + class Attributes(Cloud.Attributes): + aws_arn: Optional[str] = Field(None, description="", alias="awsArn") + aws_partition: Optional[str] = Field(None, description="", alias="awsPartition") + aws_service: Optional[str] = Field(None, description="", alias="awsService") + aws_region: Optional[str] = Field(None, description="", alias="awsRegion") + aws_account_id: Optional[str] = Field( + None, description="", alias="awsAccountId" ) - - @dbt_job_schedule_cron_humanized.setter - def dbt_job_schedule_cron_humanized( - self, dbt_job_schedule_cron_humanized: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_job_schedule_cron_humanized = ( - dbt_job_schedule_cron_humanized + aws_resource_id: Optional[str] = Field( + None, description="", alias="awsResourceId" ) - - @property - def dbt_job_last_run(self) -> Optional[datetime]: - return None if self.attributes is None else self.attributes.dbt_job_last_run - - @dbt_job_last_run.setter - def dbt_job_last_run(self, dbt_job_last_run: Optional[datetime]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_job_last_run = dbt_job_last_run - - @property - def dbt_job_next_run(self) -> Optional[datetime]: - return None if self.attributes is None else self.attributes.dbt_job_next_run - - @dbt_job_next_run.setter - def dbt_job_next_run(self, dbt_job_next_run: Optional[datetime]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_job_next_run = dbt_job_next_run - - @property - def dbt_job_next_run_humanized(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.dbt_job_next_run_humanized + aws_owner_name: Optional[str] = Field( + None, description="", alias="awsOwnerName" ) + aws_owner_id: Optional[str] = Field(None, description="", alias="awsOwnerId") + aws_tags: Optional[list[AwsTag]] = Field(None, description="", alias="awsTags") - @dbt_job_next_run_humanized.setter - def dbt_job_next_run_humanized(self, dbt_job_next_run_humanized: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_job_next_run_humanized = dbt_job_next_run_humanized - - @property - def dbt_environment_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_environment_name - - @dbt_environment_name.setter - def dbt_environment_name(self, dbt_environment_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_environment_name = dbt_environment_name - - @property - def dbt_environment_dbt_version(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.dbt_environment_dbt_version - ) - - @dbt_environment_dbt_version.setter - def dbt_environment_dbt_version(self, dbt_environment_dbt_version: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_environment_dbt_version = dbt_environment_dbt_version - - @property - def dbt_tags(self) -> Optional[set[str]]: - return None if self.attributes is None else self.attributes.dbt_tags - - @dbt_tags.setter - def dbt_tags(self, dbt_tags: Optional[set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_tags = dbt_tags - - @property - def dbt_connection_context(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.dbt_connection_context - ) - - @dbt_connection_context.setter - def dbt_connection_context(self, dbt_connection_context: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_connection_context = dbt_connection_context - - @property - def dbt_semantic_layer_proxy_url(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.dbt_semantic_layer_proxy_url - ) - - @dbt_semantic_layer_proxy_url.setter - def dbt_semantic_layer_proxy_url(self, dbt_semantic_layer_proxy_url: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_semantic_layer_proxy_url = dbt_semantic_layer_proxy_url - - @property - def inputs(self) -> Optional[list[Catalog]]: - return None if self.attributes is None else self.attributes.inputs - - @inputs.setter - def inputs(self, inputs: Optional[list[Catalog]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.inputs = inputs - - @property - def outputs(self) -> Optional[list[Catalog]]: - return None if self.attributes is None else self.attributes.outputs - - @outputs.setter - def outputs(self, outputs: Optional[list[Catalog]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.outputs = outputs - - @property - def code(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.code - - @code.setter - def code(self, code: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.code = code - - @property - def sql(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.sql - - @sql.setter - def sql(self, sql: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sql = sql - - @property - def ast(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.ast - - @ast.setter - def ast(self, ast: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.ast = ast - - @property - def matillion_component(self) -> Optional[MatillionComponent]: - return None if self.attributes is None else self.attributes.matillion_component - - @matillion_component.setter - def matillion_component(self, matillion_component: Optional[MatillionComponent]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.matillion_component = matillion_component - - @property - def process(self) -> Optional[Process]: - return None if self.attributes is None else self.attributes.process - - @process.setter - def process(self, process: Optional[Process]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.process = process - - @property - def airflow_tasks(self) -> Optional[list[AirflowTask]]: - return None if self.attributes is None else self.attributes.airflow_tasks - - @airflow_tasks.setter - def airflow_tasks(self, airflow_tasks: Optional[list[AirflowTask]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.airflow_tasks = airflow_tasks - - @property - def column_processes(self) -> Optional[list[ColumnProcess]]: - return None if self.attributes is None else self.attributes.column_processes - - @column_processes.setter - def column_processes(self, column_processes: Optional[list[ColumnProcess]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_processes = column_processes - - class Attributes(Dbt.Attributes): - dbt_column_process_job_status: Optional[str] = Field( - None, description="", alias="dbtColumnProcessJobStatus" - ) - dbt_alias: Optional[str] = Field(None, description="", alias="dbtAlias") - dbt_meta: Optional[str] = Field(None, description="", alias="dbtMeta") - dbt_unique_id: Optional[str] = Field(None, description="", alias="dbtUniqueId") - dbt_account_name: Optional[str] = Field( - None, description="", alias="dbtAccountName" - ) - dbt_project_name: Optional[str] = Field( - None, description="", alias="dbtProjectName" - ) - dbt_package_name: Optional[str] = Field( - None, description="", alias="dbtPackageName" - ) - dbt_job_name: Optional[str] = Field(None, description="", alias="dbtJobName") - dbt_job_schedule: Optional[str] = Field( - None, description="", alias="dbtJobSchedule" - ) - dbt_job_status: Optional[str] = Field( - None, description="", alias="dbtJobStatus" - ) - dbt_job_schedule_cron_humanized: Optional[str] = Field( - None, description="", alias="dbtJobScheduleCronHumanized" - ) - dbt_job_last_run: Optional[datetime] = Field( - None, description="", alias="dbtJobLastRun" - ) - dbt_job_next_run: Optional[datetime] = Field( - None, description="", alias="dbtJobNextRun" - ) - dbt_job_next_run_humanized: Optional[str] = Field( - None, description="", alias="dbtJobNextRunHumanized" - ) - dbt_environment_name: Optional[str] = Field( - None, description="", alias="dbtEnvironmentName" - ) - dbt_environment_dbt_version: Optional[str] = Field( - None, description="", alias="dbtEnvironmentDbtVersion" - ) - dbt_tags: Optional[set[str]] = Field(None, description="", alias="dbtTags") - dbt_connection_context: Optional[str] = Field( - None, description="", alias="dbtConnectionContext" - ) - dbt_semantic_layer_proxy_url: Optional[str] = Field( - None, description="", alias="dbtSemanticLayerProxyUrl" - ) - inputs: Optional[list[Catalog]] = Field(None, description="", alias="inputs") - outputs: Optional[list[Catalog]] = Field(None, description="", alias="outputs") - code: Optional[str] = Field(None, description="", alias="code") - sql: Optional[str] = Field(None, description="", alias="sql") - ast: Optional[str] = Field(None, description="", alias="ast") - matillion_component: Optional[MatillionComponent] = Field( - None, description="", alias="matillionComponent" - ) # relationship - process: Optional[Process] = Field( - None, description="", alias="process" - ) # relationship - airflow_tasks: Optional[list[AirflowTask]] = Field( - None, description="", alias="airflowTasks" - ) # relationship - column_processes: Optional[list[ColumnProcess]] = Field( - None, description="", alias="columnProcesses" - ) # relationship - - attributes: "DbtColumnProcess.Attributes" = Field( - default_factory=lambda: DbtColumnProcess.Attributes(), + attributes: "AWS.Attributes" = Field( + default_factory=lambda: AWS.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -DbtColumnProcess.Attributes.update_forward_refs() +AWS.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset34.py b/pyatlan/model/assets/asset34.py index 858ba0356..1bbf18704 100644 --- a/pyatlan/model/assets/asset34.py +++ b/pyatlan/model/assets/asset34.py @@ -4,235 +4,600 @@ from __future__ import annotations +from datetime import datetime from typing import ClassVar, Optional from pydantic import Field, validator -from pyatlan.model.fields.atlan_fields import KeywordField, KeywordTextField -from pyatlan.model.structs import AwsTag +from pyatlan.model.fields.atlan_fields import ( + KeywordField, + KeywordTextField, + NumericField, + RelationField, +) -from .asset16 import ObjectStore +from .asset00 import ( + AirflowTask, + Catalog, + ColumnProcess, + Dbt, + MatillionComponent, + Process, +) -class S3(ObjectStore): +class DbtColumnProcess(Dbt): """Description""" - type_name: str = Field("S3", allow_mutation=False) + type_name: str = Field("DbtColumnProcess", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "S3": - raise ValueError("must be S3") + if v != "DbtColumnProcess": + raise ValueError("must be DbtColumnProcess") return v def __setattr__(self, name, value): - if name in S3._convenience_properties: + if name in DbtColumnProcess._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - S3E_TAG: ClassVar[KeywordTextField] = KeywordTextField( - "s3ETag", "s3ETag", "s3ETag.text" + DBT_COLUMN_PROCESS_JOB_STATUS: ClassVar[KeywordField] = KeywordField( + "dbtColumnProcessJobStatus", "dbtColumnProcessJobStatus" ) """ - TBC + + """ + DBT_ALIAS: ClassVar[KeywordTextField] = KeywordTextField( + "dbtAlias", "dbtAlias.keyword", "dbtAlias" + ) """ - S3ENCRYPTION: ClassVar[KeywordField] = KeywordField("s3Encryption", "s3Encryption") + """ - TBC + DBT_META: ClassVar[KeywordField] = KeywordField("dbtMeta", "dbtMeta") """ - AWS_ARN: ClassVar[KeywordTextField] = KeywordTextField( - "awsArn", "awsArn", "awsArn.text" + + """ + DBT_UNIQUE_ID: ClassVar[KeywordTextField] = KeywordTextField( + "dbtUniqueId", "dbtUniqueId.keyword", "dbtUniqueId" ) """ - TBC + """ - AWS_PARTITION: ClassVar[KeywordField] = KeywordField("awsPartition", "awsPartition") + DBT_ACCOUNT_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "dbtAccountName", "dbtAccountName.keyword", "dbtAccountName" + ) """ - TBC + """ - AWS_SERVICE: ClassVar[KeywordField] = KeywordField("awsService", "awsService") + DBT_PROJECT_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "dbtProjectName", "dbtProjectName.keyword", "dbtProjectName" + ) """ - TBC + """ - AWS_REGION: ClassVar[KeywordField] = KeywordField("awsRegion", "awsRegion") + DBT_PACKAGE_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "dbtPackageName", "dbtPackageName.keyword", "dbtPackageName" + ) """ - TBC + """ - AWS_ACCOUNT_ID: ClassVar[KeywordField] = KeywordField( - "awsAccountId", "awsAccountId" + DBT_JOB_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "dbtJobName", "dbtJobName.keyword", "dbtJobName" ) """ - TBC + """ - AWS_RESOURCE_ID: ClassVar[KeywordField] = KeywordField( - "awsResourceId", "awsResourceId" + DBT_JOB_SCHEDULE: ClassVar[KeywordField] = KeywordField( + "dbtJobSchedule", "dbtJobSchedule" ) """ - TBC + + """ + DBT_JOB_STATUS: ClassVar[KeywordField] = KeywordField( + "dbtJobStatus", "dbtJobStatus" + ) + """ + + """ + DBT_JOB_SCHEDULE_CRON_HUMANIZED: ClassVar[KeywordTextField] = KeywordTextField( + "dbtJobScheduleCronHumanized", + "dbtJobScheduleCronHumanized.keyword", + "dbtJobScheduleCronHumanized", + ) + """ + + """ + DBT_JOB_LAST_RUN: ClassVar[NumericField] = NumericField( + "dbtJobLastRun", "dbtJobLastRun" + ) """ - AWS_OWNER_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "awsOwnerName", "awsOwnerName", "awsOwnerName.text" + + """ + DBT_JOB_NEXT_RUN: ClassVar[NumericField] = NumericField( + "dbtJobNextRun", "dbtJobNextRun" + ) + """ + + """ + DBT_JOB_NEXT_RUN_HUMANIZED: ClassVar[KeywordTextField] = KeywordTextField( + "dbtJobNextRunHumanized", + "dbtJobNextRunHumanized.keyword", + "dbtJobNextRunHumanized", ) """ + + """ + DBT_ENVIRONMENT_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "dbtEnvironmentName", "dbtEnvironmentName.keyword", "dbtEnvironmentName" + ) + """ + + """ + DBT_ENVIRONMENT_DBT_VERSION: ClassVar[KeywordTextField] = KeywordTextField( + "dbtEnvironmentDbtVersion", + "dbtEnvironmentDbtVersion.keyword", + "dbtEnvironmentDbtVersion", + ) + """ + + """ + DBT_TAGS: ClassVar[KeywordField] = KeywordField("dbtTags", "dbtTags") + """ + + """ + DBT_CONNECTION_CONTEXT: ClassVar[KeywordField] = KeywordField( + "dbtConnectionContext", "dbtConnectionContext" + ) + """ + + """ + DBT_SEMANTIC_LAYER_PROXY_URL: ClassVar[KeywordField] = KeywordField( + "dbtSemanticLayerProxyUrl", "dbtSemanticLayerProxyUrl" + ) + """ + + """ + CODE: ClassVar[KeywordField] = KeywordField("code", "code") + """ + Code that ran within the process. + """ + SQL: ClassVar[KeywordField] = KeywordField("sql", "sql") + """ + SQL query that ran to produce the outputs. + """ + AST: ClassVar[KeywordField] = KeywordField("ast", "ast") + """ + Parsed AST of the code or SQL statements that describe the logic of this process. + """ + + MATILLION_COMPONENT: ClassVar[RelationField] = RelationField("matillionComponent") + """ + TBC + """ + PROCESS: ClassVar[RelationField] = RelationField("process") + """ TBC """ - AWS_OWNER_ID: ClassVar[KeywordField] = KeywordField("awsOwnerId", "awsOwnerId") + AIRFLOW_TASKS: ClassVar[RelationField] = RelationField("airflowTasks") """ TBC """ - AWS_TAGS: ClassVar[KeywordField] = KeywordField("awsTags", "awsTags") + COLUMN_PROCESSES: ClassVar[RelationField] = RelationField("columnProcesses") """ TBC """ _convenience_properties: ClassVar[list[str]] = [ - "s3_e_tag", - "s3_encryption", - "aws_arn", - "aws_partition", - "aws_service", - "aws_region", - "aws_account_id", - "aws_resource_id", - "aws_owner_name", - "aws_owner_id", - "aws_tags", + "dbt_column_process_job_status", + "dbt_alias", + "dbt_meta", + "dbt_unique_id", + "dbt_account_name", + "dbt_project_name", + "dbt_package_name", + "dbt_job_name", + "dbt_job_schedule", + "dbt_job_status", + "dbt_job_schedule_cron_humanized", + "dbt_job_last_run", + "dbt_job_next_run", + "dbt_job_next_run_humanized", + "dbt_environment_name", + "dbt_environment_dbt_version", + "dbt_tags", + "dbt_connection_context", + "dbt_semantic_layer_proxy_url", + "inputs", + "outputs", + "code", + "sql", + "ast", + "matillion_component", + "process", + "airflow_tasks", + "column_processes", ] @property - def s3_e_tag(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.s3_e_tag + def dbt_column_process_job_status(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.dbt_column_process_job_status + ) - @s3_e_tag.setter - def s3_e_tag(self, s3_e_tag: Optional[str]): + @dbt_column_process_job_status.setter + def dbt_column_process_job_status( + self, dbt_column_process_job_status: Optional[str] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.s3_e_tag = s3_e_tag + self.attributes.dbt_column_process_job_status = dbt_column_process_job_status @property - def s3_encryption(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.s3_encryption + def dbt_alias(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_alias - @s3_encryption.setter - def s3_encryption(self, s3_encryption: Optional[str]): + @dbt_alias.setter + def dbt_alias(self, dbt_alias: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.s3_encryption = s3_encryption + self.attributes.dbt_alias = dbt_alias @property - def aws_arn(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.aws_arn + def dbt_meta(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_meta - @aws_arn.setter - def aws_arn(self, aws_arn: Optional[str]): + @dbt_meta.setter + def dbt_meta(self, dbt_meta: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.aws_arn = aws_arn + self.attributes.dbt_meta = dbt_meta @property - def aws_partition(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.aws_partition + def dbt_unique_id(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_unique_id - @aws_partition.setter - def aws_partition(self, aws_partition: Optional[str]): + @dbt_unique_id.setter + def dbt_unique_id(self, dbt_unique_id: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.aws_partition = aws_partition + self.attributes.dbt_unique_id = dbt_unique_id @property - def aws_service(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.aws_service + def dbt_account_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_account_name - @aws_service.setter - def aws_service(self, aws_service: Optional[str]): + @dbt_account_name.setter + def dbt_account_name(self, dbt_account_name: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.aws_service = aws_service + self.attributes.dbt_account_name = dbt_account_name @property - def aws_region(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.aws_region + def dbt_project_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_project_name - @aws_region.setter - def aws_region(self, aws_region: Optional[str]): + @dbt_project_name.setter + def dbt_project_name(self, dbt_project_name: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.aws_region = aws_region + self.attributes.dbt_project_name = dbt_project_name @property - def aws_account_id(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.aws_account_id + def dbt_package_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_package_name - @aws_account_id.setter - def aws_account_id(self, aws_account_id: Optional[str]): + @dbt_package_name.setter + def dbt_package_name(self, dbt_package_name: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.aws_account_id = aws_account_id + self.attributes.dbt_package_name = dbt_package_name @property - def aws_resource_id(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.aws_resource_id + def dbt_job_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_job_name - @aws_resource_id.setter - def aws_resource_id(self, aws_resource_id: Optional[str]): + @dbt_job_name.setter + def dbt_job_name(self, dbt_job_name: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.aws_resource_id = aws_resource_id + self.attributes.dbt_job_name = dbt_job_name @property - def aws_owner_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.aws_owner_name + def dbt_job_schedule(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_job_schedule - @aws_owner_name.setter - def aws_owner_name(self, aws_owner_name: Optional[str]): + @dbt_job_schedule.setter + def dbt_job_schedule(self, dbt_job_schedule: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.aws_owner_name = aws_owner_name + self.attributes.dbt_job_schedule = dbt_job_schedule @property - def aws_owner_id(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.aws_owner_id + def dbt_job_status(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_job_status - @aws_owner_id.setter - def aws_owner_id(self, aws_owner_id: Optional[str]): + @dbt_job_status.setter + def dbt_job_status(self, dbt_job_status: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.aws_owner_id = aws_owner_id + self.attributes.dbt_job_status = dbt_job_status @property - def aws_tags(self) -> Optional[list[AwsTag]]: - return None if self.attributes is None else self.attributes.aws_tags + def dbt_job_schedule_cron_humanized(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.dbt_job_schedule_cron_humanized + ) - @aws_tags.setter - def aws_tags(self, aws_tags: Optional[list[AwsTag]]): + @dbt_job_schedule_cron_humanized.setter + def dbt_job_schedule_cron_humanized( + self, dbt_job_schedule_cron_humanized: Optional[str] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.aws_tags = aws_tags + self.attributes.dbt_job_schedule_cron_humanized = ( + dbt_job_schedule_cron_humanized + ) - class Attributes(ObjectStore.Attributes): - s3_e_tag: Optional[str] = Field(None, description="", alias="s3ETag") - s3_encryption: Optional[str] = Field(None, description="", alias="s3Encryption") - aws_arn: Optional[str] = Field(None, description="", alias="awsArn") - aws_partition: Optional[str] = Field(None, description="", alias="awsPartition") - aws_service: Optional[str] = Field(None, description="", alias="awsService") - aws_region: Optional[str] = Field(None, description="", alias="awsRegion") - aws_account_id: Optional[str] = Field( - None, description="", alias="awsAccountId" + @property + def dbt_job_last_run(self) -> Optional[datetime]: + return None if self.attributes is None else self.attributes.dbt_job_last_run + + @dbt_job_last_run.setter + def dbt_job_last_run(self, dbt_job_last_run: Optional[datetime]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_job_last_run = dbt_job_last_run + + @property + def dbt_job_next_run(self) -> Optional[datetime]: + return None if self.attributes is None else self.attributes.dbt_job_next_run + + @dbt_job_next_run.setter + def dbt_job_next_run(self, dbt_job_next_run: Optional[datetime]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_job_next_run = dbt_job_next_run + + @property + def dbt_job_next_run_humanized(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.dbt_job_next_run_humanized ) - aws_resource_id: Optional[str] = Field( - None, description="", alias="awsResourceId" + + @dbt_job_next_run_humanized.setter + def dbt_job_next_run_humanized(self, dbt_job_next_run_humanized: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_job_next_run_humanized = dbt_job_next_run_humanized + + @property + def dbt_environment_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_environment_name + + @dbt_environment_name.setter + def dbt_environment_name(self, dbt_environment_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_environment_name = dbt_environment_name + + @property + def dbt_environment_dbt_version(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.dbt_environment_dbt_version ) - aws_owner_name: Optional[str] = Field( - None, description="", alias="awsOwnerName" + + @dbt_environment_dbt_version.setter + def dbt_environment_dbt_version(self, dbt_environment_dbt_version: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_environment_dbt_version = dbt_environment_dbt_version + + @property + def dbt_tags(self) -> Optional[set[str]]: + return None if self.attributes is None else self.attributes.dbt_tags + + @dbt_tags.setter + def dbt_tags(self, dbt_tags: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_tags = dbt_tags + + @property + def dbt_connection_context(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.dbt_connection_context ) - aws_owner_id: Optional[str] = Field(None, description="", alias="awsOwnerId") - aws_tags: Optional[list[AwsTag]] = Field(None, description="", alias="awsTags") - attributes: "S3.Attributes" = Field( - default_factory=lambda: S3.Attributes(), + @dbt_connection_context.setter + def dbt_connection_context(self, dbt_connection_context: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_connection_context = dbt_connection_context + + @property + def dbt_semantic_layer_proxy_url(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.dbt_semantic_layer_proxy_url + ) + + @dbt_semantic_layer_proxy_url.setter + def dbt_semantic_layer_proxy_url(self, dbt_semantic_layer_proxy_url: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_semantic_layer_proxy_url = dbt_semantic_layer_proxy_url + + @property + def inputs(self) -> Optional[list[Catalog]]: + return None if self.attributes is None else self.attributes.inputs + + @inputs.setter + def inputs(self, inputs: Optional[list[Catalog]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.inputs = inputs + + @property + def outputs(self) -> Optional[list[Catalog]]: + return None if self.attributes is None else self.attributes.outputs + + @outputs.setter + def outputs(self, outputs: Optional[list[Catalog]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.outputs = outputs + + @property + def code(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.code + + @code.setter + def code(self, code: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.code = code + + @property + def sql(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.sql + + @sql.setter + def sql(self, sql: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sql = sql + + @property + def ast(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.ast + + @ast.setter + def ast(self, ast: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.ast = ast + + @property + def matillion_component(self) -> Optional[MatillionComponent]: + return None if self.attributes is None else self.attributes.matillion_component + + @matillion_component.setter + def matillion_component(self, matillion_component: Optional[MatillionComponent]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.matillion_component = matillion_component + + @property + def process(self) -> Optional[Process]: + return None if self.attributes is None else self.attributes.process + + @process.setter + def process(self, process: Optional[Process]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.process = process + + @property + def airflow_tasks(self) -> Optional[list[AirflowTask]]: + return None if self.attributes is None else self.attributes.airflow_tasks + + @airflow_tasks.setter + def airflow_tasks(self, airflow_tasks: Optional[list[AirflowTask]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.airflow_tasks = airflow_tasks + + @property + def column_processes(self) -> Optional[list[ColumnProcess]]: + return None if self.attributes is None else self.attributes.column_processes + + @column_processes.setter + def column_processes(self, column_processes: Optional[list[ColumnProcess]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.column_processes = column_processes + + class Attributes(Dbt.Attributes): + dbt_column_process_job_status: Optional[str] = Field( + None, description="", alias="dbtColumnProcessJobStatus" + ) + dbt_alias: Optional[str] = Field(None, description="", alias="dbtAlias") + dbt_meta: Optional[str] = Field(None, description="", alias="dbtMeta") + dbt_unique_id: Optional[str] = Field(None, description="", alias="dbtUniqueId") + dbt_account_name: Optional[str] = Field( + None, description="", alias="dbtAccountName" + ) + dbt_project_name: Optional[str] = Field( + None, description="", alias="dbtProjectName" + ) + dbt_package_name: Optional[str] = Field( + None, description="", alias="dbtPackageName" + ) + dbt_job_name: Optional[str] = Field(None, description="", alias="dbtJobName") + dbt_job_schedule: Optional[str] = Field( + None, description="", alias="dbtJobSchedule" + ) + dbt_job_status: Optional[str] = Field( + None, description="", alias="dbtJobStatus" + ) + dbt_job_schedule_cron_humanized: Optional[str] = Field( + None, description="", alias="dbtJobScheduleCronHumanized" + ) + dbt_job_last_run: Optional[datetime] = Field( + None, description="", alias="dbtJobLastRun" + ) + dbt_job_next_run: Optional[datetime] = Field( + None, description="", alias="dbtJobNextRun" + ) + dbt_job_next_run_humanized: Optional[str] = Field( + None, description="", alias="dbtJobNextRunHumanized" + ) + dbt_environment_name: Optional[str] = Field( + None, description="", alias="dbtEnvironmentName" + ) + dbt_environment_dbt_version: Optional[str] = Field( + None, description="", alias="dbtEnvironmentDbtVersion" + ) + dbt_tags: Optional[set[str]] = Field(None, description="", alias="dbtTags") + dbt_connection_context: Optional[str] = Field( + None, description="", alias="dbtConnectionContext" + ) + dbt_semantic_layer_proxy_url: Optional[str] = Field( + None, description="", alias="dbtSemanticLayerProxyUrl" + ) + inputs: Optional[list[Catalog]] = Field(None, description="", alias="inputs") + outputs: Optional[list[Catalog]] = Field(None, description="", alias="outputs") + code: Optional[str] = Field(None, description="", alias="code") + sql: Optional[str] = Field(None, description="", alias="sql") + ast: Optional[str] = Field(None, description="", alias="ast") + matillion_component: Optional[MatillionComponent] = Field( + None, description="", alias="matillionComponent" + ) # relationship + process: Optional[Process] = Field( + None, description="", alias="process" + ) # relationship + airflow_tasks: Optional[list[AirflowTask]] = Field( + None, description="", alias="airflowTasks" + ) # relationship + column_processes: Optional[list[ColumnProcess]] = Field( + None, description="", alias="columnProcesses" + ) # relationship + + attributes: "DbtColumnProcess.Attributes" = Field( + default_factory=lambda: DbtColumnProcess.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -S3.Attributes.update_forward_refs() +DbtColumnProcess.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset35.py b/pyatlan/model/assets/asset35.py index 08fa43e1e..2f7b1d0c6 100644 --- a/pyatlan/model/assets/asset35.py +++ b/pyatlan/model/assets/asset35.py @@ -9,150 +9,230 @@ from pydantic import Field, validator from pyatlan.model.fields.atlan_fields import KeywordField, KeywordTextField -from pyatlan.model.structs import AzureTag +from pyatlan.model.structs import AwsTag -from .asset16 import ObjectStore +from .asset17 import ObjectStore -class ADLS(ObjectStore): +class S3(ObjectStore): """Description""" - type_name: str = Field("ADLS", allow_mutation=False) + type_name: str = Field("S3", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "ADLS": - raise ValueError("must be ADLS") + if v != "S3": + raise ValueError("must be S3") return v def __setattr__(self, name, value): - if name in ADLS._convenience_properties: + if name in S3._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - ADLS_ACCOUNT_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "adlsAccountQualifiedName", - "adlsAccountQualifiedName", - "adlsAccountQualifiedName.text", + S3E_TAG: ClassVar[KeywordTextField] = KeywordTextField( + "s3ETag", "s3ETag", "s3ETag.text" ) """ - TBC + Entity tag for the asset. An entity tag is a hash of the object and represents changes to the contents of an object only, not its metadata. + """ # noqa: E501 + S3ENCRYPTION: ClassVar[KeywordField] = KeywordField("s3Encryption", "s3Encryption") """ - AZURE_RESOURCE_ID: ClassVar[KeywordTextField] = KeywordTextField( - "azureResourceId", "azureResourceId", "azureResourceId.text" + + """ + AWS_ARN: ClassVar[KeywordTextField] = KeywordTextField( + "awsArn", "awsArn", "awsArn.text" + ) + """ + Amazon Resource Name (ARN) for this asset. This uniquely identifies the asset in AWS, and thus must be unique across all AWS asset instances. + """ # noqa: E501 + AWS_PARTITION: ClassVar[KeywordField] = KeywordField("awsPartition", "awsPartition") + """ + Group of AWS region and service objects. + """ + AWS_SERVICE: ClassVar[KeywordField] = KeywordField("awsService", "awsService") + """ + Type of service in which the asset exists. + """ + AWS_REGION: ClassVar[KeywordField] = KeywordField("awsRegion", "awsRegion") + """ + Physical region where the data center in which the asset exists is clustered. + """ + AWS_ACCOUNT_ID: ClassVar[KeywordField] = KeywordField( + "awsAccountId", "awsAccountId" ) """ - TBC + 12-digit number that uniquely identifies an AWS account. """ - AZURE_LOCATION: ClassVar[KeywordField] = KeywordField( - "azureLocation", "azureLocation" + AWS_RESOURCE_ID: ClassVar[KeywordField] = KeywordField( + "awsResourceId", "awsResourceId" ) """ - TBC + Unique resource ID assigned when a new resource is created. """ - ADLS_ACCOUNT_SECONDARY_LOCATION: ClassVar[KeywordField] = KeywordField( - "adlsAccountSecondaryLocation", "adlsAccountSecondaryLocation" + AWS_OWNER_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "awsOwnerName", "awsOwnerName", "awsOwnerName.text" ) """ - TBC + Root user's name. + """ + AWS_OWNER_ID: ClassVar[KeywordField] = KeywordField("awsOwnerId", "awsOwnerId") + """ + Root user's ID. """ - AZURE_TAGS: ClassVar[KeywordField] = KeywordField("azureTags", "azureTags") + AWS_TAGS: ClassVar[KeywordField] = KeywordField("awsTags", "awsTags") """ - TBC + List of tags that have been applied to the asset in AWS. """ _convenience_properties: ClassVar[list[str]] = [ - "adls_account_qualified_name", - "azure_resource_id", - "azure_location", - "adls_account_secondary_location", - "azure_tags", + "s3_e_tag", + "s3_encryption", + "aws_arn", + "aws_partition", + "aws_service", + "aws_region", + "aws_account_id", + "aws_resource_id", + "aws_owner_name", + "aws_owner_id", + "aws_tags", ] @property - def adls_account_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.adls_account_qualified_name - ) + def s3_e_tag(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.s3_e_tag - @adls_account_qualified_name.setter - def adls_account_qualified_name(self, adls_account_qualified_name: Optional[str]): + @s3_e_tag.setter + def s3_e_tag(self, s3_e_tag: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.adls_account_qualified_name = adls_account_qualified_name + self.attributes.s3_e_tag = s3_e_tag @property - def azure_resource_id(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.azure_resource_id + def s3_encryption(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.s3_encryption - @azure_resource_id.setter - def azure_resource_id(self, azure_resource_id: Optional[str]): + @s3_encryption.setter + def s3_encryption(self, s3_encryption: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.azure_resource_id = azure_resource_id + self.attributes.s3_encryption = s3_encryption @property - def azure_location(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.azure_location + def aws_arn(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.aws_arn - @azure_location.setter - def azure_location(self, azure_location: Optional[str]): + @aws_arn.setter + def aws_arn(self, aws_arn: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.azure_location = azure_location + self.attributes.aws_arn = aws_arn @property - def adls_account_secondary_location(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.adls_account_secondary_location - ) + def aws_partition(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.aws_partition - @adls_account_secondary_location.setter - def adls_account_secondary_location( - self, adls_account_secondary_location: Optional[str] - ): + @aws_partition.setter + def aws_partition(self, aws_partition: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.adls_account_secondary_location = ( - adls_account_secondary_location - ) + self.attributes.aws_partition = aws_partition + + @property + def aws_service(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.aws_service + + @aws_service.setter + def aws_service(self, aws_service: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.aws_service = aws_service @property - def azure_tags(self) -> Optional[list[AzureTag]]: - return None if self.attributes is None else self.attributes.azure_tags + def aws_region(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.aws_region - @azure_tags.setter - def azure_tags(self, azure_tags: Optional[list[AzureTag]]): + @aws_region.setter + def aws_region(self, aws_region: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.azure_tags = azure_tags + self.attributes.aws_region = aws_region + + @property + def aws_account_id(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.aws_account_id + + @aws_account_id.setter + def aws_account_id(self, aws_account_id: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.aws_account_id = aws_account_id + + @property + def aws_resource_id(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.aws_resource_id + + @aws_resource_id.setter + def aws_resource_id(self, aws_resource_id: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.aws_resource_id = aws_resource_id + + @property + def aws_owner_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.aws_owner_name + + @aws_owner_name.setter + def aws_owner_name(self, aws_owner_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.aws_owner_name = aws_owner_name + + @property + def aws_owner_id(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.aws_owner_id + + @aws_owner_id.setter + def aws_owner_id(self, aws_owner_id: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.aws_owner_id = aws_owner_id + + @property + def aws_tags(self) -> Optional[list[AwsTag]]: + return None if self.attributes is None else self.attributes.aws_tags + + @aws_tags.setter + def aws_tags(self, aws_tags: Optional[list[AwsTag]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.aws_tags = aws_tags class Attributes(ObjectStore.Attributes): - adls_account_qualified_name: Optional[str] = Field( - None, description="", alias="adlsAccountQualifiedName" - ) - azure_resource_id: Optional[str] = Field( - None, description="", alias="azureResourceId" - ) - azure_location: Optional[str] = Field( - None, description="", alias="azureLocation" + s3_e_tag: Optional[str] = Field(None, description="", alias="s3ETag") + s3_encryption: Optional[str] = Field(None, description="", alias="s3Encryption") + aws_arn: Optional[str] = Field(None, description="", alias="awsArn") + aws_partition: Optional[str] = Field(None, description="", alias="awsPartition") + aws_service: Optional[str] = Field(None, description="", alias="awsService") + aws_region: Optional[str] = Field(None, description="", alias="awsRegion") + aws_account_id: Optional[str] = Field( + None, description="", alias="awsAccountId" ) - adls_account_secondary_location: Optional[str] = Field( - None, description="", alias="adlsAccountSecondaryLocation" + aws_resource_id: Optional[str] = Field( + None, description="", alias="awsResourceId" ) - azure_tags: Optional[list[AzureTag]] = Field( - None, description="", alias="azureTags" + aws_owner_name: Optional[str] = Field( + None, description="", alias="awsOwnerName" ) + aws_owner_id: Optional[str] = Field(None, description="", alias="awsOwnerId") + aws_tags: Optional[list[AwsTag]] = Field(None, description="", alias="awsTags") - attributes: "ADLS.Attributes" = Field( - default_factory=lambda: ADLS.Attributes(), + attributes: "S3.Attributes" = Field( + default_factory=lambda: S3.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -ADLS.Attributes.update_forward_refs() +S3.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset36.py b/pyatlan/model/assets/asset36.py index 4c3781f75..4c7e1275a 100644 --- a/pyatlan/model/assets/asset36.py +++ b/pyatlan/model/assets/asset36.py @@ -8,413 +8,151 @@ from pydantic import Field, validator -from pyatlan.model.fields.atlan_fields import ( - BooleanField, - KeywordField, - KeywordTextField, - NumericField, - RelationField, -) -from pyatlan.model.structs import GoogleLabel, GoogleTag +from pyatlan.model.fields.atlan_fields import KeywordField, KeywordTextField +from pyatlan.model.structs import AzureTag -from .asset00 import AirflowTask, Process -from .asset30 import Google +from .asset17 import ObjectStore -class GCS(Google): +class ADLS(ObjectStore): """Description""" - type_name: str = Field("GCS", allow_mutation=False) + type_name: str = Field("ADLS", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "GCS": - raise ValueError("must be GCS") + if v != "ADLS": + raise ValueError("must be ADLS") return v def __setattr__(self, name, value): - if name in GCS._convenience_properties: + if name in ADLS._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - GCS_STORAGE_CLASS: ClassVar[KeywordField] = KeywordField( - "gcsStorageClass", "gcsStorageClass" + ADLS_ACCOUNT_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "adlsAccountQualifiedName", + "adlsAccountQualifiedName", + "adlsAccountQualifiedName.text", ) """ - TBC + Unique name of the account for this ADLS asset. """ - GCS_ENCRYPTION_TYPE: ClassVar[KeywordField] = KeywordField( - "gcsEncryptionType", "gcsEncryptionType" + AZURE_RESOURCE_ID: ClassVar[KeywordTextField] = KeywordTextField( + "azureResourceId", "azureResourceId", "azureResourceId.text" ) """ - TBC + Resource identifier of this asset in Azure. """ - GCS_E_TAG: ClassVar[KeywordField] = KeywordField("gcsETag", "gcsETag") - """ - TBC - """ - GCS_REQUESTER_PAYS: ClassVar[BooleanField] = BooleanField( - "gcsRequesterPays", "gcsRequesterPays" - ) - """ - TBC - """ - GCS_ACCESS_CONTROL: ClassVar[KeywordField] = KeywordField( - "gcsAccessControl", "gcsAccessControl" - ) - """ - TBC - """ - GCS_META_GENERATION_ID: ClassVar[NumericField] = NumericField( - "gcsMetaGenerationId", "gcsMetaGenerationId" - ) - """ - TBC - """ - GOOGLE_SERVICE: ClassVar[KeywordField] = KeywordField( - "googleService", "googleService" - ) - """ - TBC - """ - GOOGLE_PROJECT_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "googleProjectName", "googleProjectName", "googleProjectName.text" - ) - """ - TBC - """ - GOOGLE_PROJECT_ID: ClassVar[KeywordTextField] = KeywordTextField( - "googleProjectId", "googleProjectId", "googleProjectId.text" - ) - """ - TBC - """ - GOOGLE_PROJECT_NUMBER: ClassVar[NumericField] = NumericField( - "googleProjectNumber", "googleProjectNumber" - ) - """ - TBC - """ - GOOGLE_LOCATION: ClassVar[KeywordField] = KeywordField( - "googleLocation", "googleLocation" + AZURE_LOCATION: ClassVar[KeywordField] = KeywordField( + "azureLocation", "azureLocation" ) """ - TBC + Location of this asset in Azure. """ - GOOGLE_LOCATION_TYPE: ClassVar[KeywordField] = KeywordField( - "googleLocationType", "googleLocationType" + ADLS_ACCOUNT_SECONDARY_LOCATION: ClassVar[KeywordField] = KeywordField( + "adlsAccountSecondaryLocation", "adlsAccountSecondaryLocation" ) """ - TBC + Secondary location of the ADLS account. """ - GOOGLE_LABELS: ClassVar[KeywordField] = KeywordField("googleLabels", "googleLabels") + AZURE_TAGS: ClassVar[KeywordField] = KeywordField("azureTags", "azureTags") """ - TBC - """ - GOOGLE_TAGS: ClassVar[KeywordField] = KeywordField("googleTags", "googleTags") - """ - TBC - """ - - INPUT_TO_PROCESSES: ClassVar[RelationField] = RelationField("inputToProcesses") - """ - TBC - """ - OUTPUT_FROM_AIRFLOW_TASKS: ClassVar[RelationField] = RelationField( - "outputFromAirflowTasks" - ) - """ - TBC - """ - INPUT_TO_AIRFLOW_TASKS: ClassVar[RelationField] = RelationField( - "inputToAirflowTasks" - ) - """ - TBC - """ - OUTPUT_FROM_PROCESSES: ClassVar[RelationField] = RelationField( - "outputFromProcesses" - ) - """ - TBC + Tags that have been applied to this asset in Azure. """ _convenience_properties: ClassVar[list[str]] = [ - "gcs_storage_class", - "gcs_encryption_type", - "gcs_e_tag", - "gcs_requester_pays", - "gcs_access_control", - "gcs_meta_generation_id", - "google_service", - "google_project_name", - "google_project_id", - "google_project_number", - "google_location", - "google_location_type", - "google_labels", - "google_tags", - "input_to_processes", - "output_from_airflow_tasks", - "input_to_airflow_tasks", - "output_from_processes", + "adls_account_qualified_name", + "azure_resource_id", + "azure_location", + "adls_account_secondary_location", + "azure_tags", ] @property - def gcs_storage_class(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.gcs_storage_class - - @gcs_storage_class.setter - def gcs_storage_class(self, gcs_storage_class: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.gcs_storage_class = gcs_storage_class - - @property - def gcs_encryption_type(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.gcs_encryption_type - - @gcs_encryption_type.setter - def gcs_encryption_type(self, gcs_encryption_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.gcs_encryption_type = gcs_encryption_type - - @property - def gcs_e_tag(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.gcs_e_tag - - @gcs_e_tag.setter - def gcs_e_tag(self, gcs_e_tag: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.gcs_e_tag = gcs_e_tag - - @property - def gcs_requester_pays(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.gcs_requester_pays - - @gcs_requester_pays.setter - def gcs_requester_pays(self, gcs_requester_pays: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.gcs_requester_pays = gcs_requester_pays - - @property - def gcs_access_control(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.gcs_access_control - - @gcs_access_control.setter - def gcs_access_control(self, gcs_access_control: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.gcs_access_control = gcs_access_control - - @property - def gcs_meta_generation_id(self) -> Optional[int]: + def adls_account_qualified_name(self) -> Optional[str]: return ( - None if self.attributes is None else self.attributes.gcs_meta_generation_id + None + if self.attributes is None + else self.attributes.adls_account_qualified_name ) - @gcs_meta_generation_id.setter - def gcs_meta_generation_id(self, gcs_meta_generation_id: Optional[int]): + @adls_account_qualified_name.setter + def adls_account_qualified_name(self, adls_account_qualified_name: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.gcs_meta_generation_id = gcs_meta_generation_id + self.attributes.adls_account_qualified_name = adls_account_qualified_name @property - def google_service(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.google_service + def azure_resource_id(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.azure_resource_id - @google_service.setter - def google_service(self, google_service: Optional[str]): + @azure_resource_id.setter + def azure_resource_id(self, azure_resource_id: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.google_service = google_service + self.attributes.azure_resource_id = azure_resource_id @property - def google_project_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.google_project_name + def azure_location(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.azure_location - @google_project_name.setter - def google_project_name(self, google_project_name: Optional[str]): + @azure_location.setter + def azure_location(self, azure_location: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.google_project_name = google_project_name + self.attributes.azure_location = azure_location @property - def google_project_id(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.google_project_id - - @google_project_id.setter - def google_project_id(self, google_project_id: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.google_project_id = google_project_id - - @property - def google_project_number(self) -> Optional[int]: - return ( - None if self.attributes is None else self.attributes.google_project_number - ) - - @google_project_number.setter - def google_project_number(self, google_project_number: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.google_project_number = google_project_number - - @property - def google_location(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.google_location - - @google_location.setter - def google_location(self, google_location: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.google_location = google_location - - @property - def google_location_type(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.google_location_type - - @google_location_type.setter - def google_location_type(self, google_location_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.google_location_type = google_location_type - - @property - def google_labels(self) -> Optional[list[GoogleLabel]]: - return None if self.attributes is None else self.attributes.google_labels - - @google_labels.setter - def google_labels(self, google_labels: Optional[list[GoogleLabel]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.google_labels = google_labels - - @property - def google_tags(self) -> Optional[list[GoogleTag]]: - return None if self.attributes is None else self.attributes.google_tags - - @google_tags.setter - def google_tags(self, google_tags: Optional[list[GoogleTag]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.google_tags = google_tags - - @property - def input_to_processes(self) -> Optional[list[Process]]: - return None if self.attributes is None else self.attributes.input_to_processes - - @input_to_processes.setter - def input_to_processes(self, input_to_processes: Optional[list[Process]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.input_to_processes = input_to_processes - - @property - def output_from_airflow_tasks(self) -> Optional[list[AirflowTask]]: + def adls_account_secondary_location(self) -> Optional[str]: return ( None if self.attributes is None - else self.attributes.output_from_airflow_tasks + else self.attributes.adls_account_secondary_location ) - @output_from_airflow_tasks.setter - def output_from_airflow_tasks( - self, output_from_airflow_tasks: Optional[list[AirflowTask]] + @adls_account_secondary_location.setter + def adls_account_secondary_location( + self, adls_account_secondary_location: Optional[str] ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.output_from_airflow_tasks = output_from_airflow_tasks - - @property - def input_to_airflow_tasks(self) -> Optional[list[AirflowTask]]: - return ( - None if self.attributes is None else self.attributes.input_to_airflow_tasks + self.attributes.adls_account_secondary_location = ( + adls_account_secondary_location ) - @input_to_airflow_tasks.setter - def input_to_airflow_tasks( - self, input_to_airflow_tasks: Optional[list[AirflowTask]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.input_to_airflow_tasks = input_to_airflow_tasks - @property - def output_from_processes(self) -> Optional[list[Process]]: - return ( - None if self.attributes is None else self.attributes.output_from_processes - ) + def azure_tags(self) -> Optional[list[AzureTag]]: + return None if self.attributes is None else self.attributes.azure_tags - @output_from_processes.setter - def output_from_processes(self, output_from_processes: Optional[list[Process]]): + @azure_tags.setter + def azure_tags(self, azure_tags: Optional[list[AzureTag]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.output_from_processes = output_from_processes + self.attributes.azure_tags = azure_tags - class Attributes(Google.Attributes): - gcs_storage_class: Optional[str] = Field( - None, description="", alias="gcsStorageClass" - ) - gcs_encryption_type: Optional[str] = Field( - None, description="", alias="gcsEncryptionType" - ) - gcs_e_tag: Optional[str] = Field(None, description="", alias="gcsETag") - gcs_requester_pays: Optional[bool] = Field( - None, description="", alias="gcsRequesterPays" - ) - gcs_access_control: Optional[str] = Field( - None, description="", alias="gcsAccessControl" - ) - gcs_meta_generation_id: Optional[int] = Field( - None, description="", alias="gcsMetaGenerationId" - ) - google_service: Optional[str] = Field( - None, description="", alias="googleService" - ) - google_project_name: Optional[str] = Field( - None, description="", alias="googleProjectName" - ) - google_project_id: Optional[str] = Field( - None, description="", alias="googleProjectId" - ) - google_project_number: Optional[int] = Field( - None, description="", alias="googleProjectNumber" + class Attributes(ObjectStore.Attributes): + adls_account_qualified_name: Optional[str] = Field( + None, description="", alias="adlsAccountQualifiedName" ) - google_location: Optional[str] = Field( - None, description="", alias="googleLocation" + azure_resource_id: Optional[str] = Field( + None, description="", alias="azureResourceId" ) - google_location_type: Optional[str] = Field( - None, description="", alias="googleLocationType" + azure_location: Optional[str] = Field( + None, description="", alias="azureLocation" ) - google_labels: Optional[list[GoogleLabel]] = Field( - None, description="", alias="googleLabels" + adls_account_secondary_location: Optional[str] = Field( + None, description="", alias="adlsAccountSecondaryLocation" ) - google_tags: Optional[list[GoogleTag]] = Field( - None, description="", alias="googleTags" + azure_tags: Optional[list[AzureTag]] = Field( + None, description="", alias="azureTags" ) - input_to_processes: Optional[list[Process]] = Field( - None, description="", alias="inputToProcesses" - ) # relationship - output_from_airflow_tasks: Optional[list[AirflowTask]] = Field( - None, description="", alias="outputFromAirflowTasks" - ) # relationship - input_to_airflow_tasks: Optional[list[AirflowTask]] = Field( - None, description="", alias="inputToAirflowTasks" - ) # relationship - output_from_processes: Optional[list[Process]] = Field( - None, description="", alias="outputFromProcesses" - ) # relationship - attributes: "GCS.Attributes" = Field( - default_factory=lambda: GCS.Attributes(), + attributes: "ADLS.Attributes" = Field( + default_factory=lambda: ADLS.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -GCS.Attributes.update_forward_refs() +ADLS.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset37.py b/pyatlan/model/assets/asset37.py new file mode 100644 index 000000000..902767c84 --- /dev/null +++ b/pyatlan/model/assets/asset37.py @@ -0,0 +1,420 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic import Field, validator + +from pyatlan.model.fields.atlan_fields import ( + BooleanField, + KeywordField, + KeywordTextField, + NumericField, + RelationField, +) +from pyatlan.model.structs import GoogleLabel, GoogleTag + +from .asset00 import AirflowTask, Process +from .asset31 import Google + + +class GCS(Google): + """Description""" + + type_name: str = Field("GCS", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "GCS": + raise ValueError("must be GCS") + return v + + def __setattr__(self, name, value): + if name in GCS._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + GCS_STORAGE_CLASS: ClassVar[KeywordField] = KeywordField( + "gcsStorageClass", "gcsStorageClass" + ) + """ + Storage class of this asset. + """ + GCS_ENCRYPTION_TYPE: ClassVar[KeywordField] = KeywordField( + "gcsEncryptionType", "gcsEncryptionType" + ) + """ + Encryption algorithm used to encrypt this asset. + """ + GCS_E_TAG: ClassVar[KeywordField] = KeywordField("gcsETag", "gcsETag") + """ + Entity tag for the asset. An entity tag is a hash of the object and represents changes to the contents of an object only, not its metadata. + """ # noqa: E501 + GCS_REQUESTER_PAYS: ClassVar[BooleanField] = BooleanField( + "gcsRequesterPays", "gcsRequesterPays" + ) + """ + Whether the requester pays header was sent when this asset was created (true) or not (false). + """ + GCS_ACCESS_CONTROL: ClassVar[KeywordField] = KeywordField( + "gcsAccessControl", "gcsAccessControl" + ) + """ + Access control list for this asset. + """ + GCS_META_GENERATION_ID: ClassVar[NumericField] = NumericField( + "gcsMetaGenerationId", "gcsMetaGenerationId" + ) + """ + Version of metadata for this asset at this generation. Used for preconditions and detecting changes in metadata. A metageneration number is only meaningful in the context of a particular generation of a particular asset. + """ # noqa: E501 + GOOGLE_SERVICE: ClassVar[KeywordField] = KeywordField( + "googleService", "googleService" + ) + """ + Service in Google in which the asset exists. + """ + GOOGLE_PROJECT_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "googleProjectName", "googleProjectName", "googleProjectName.text" + ) + """ + Name of the project in which the asset exists. + """ + GOOGLE_PROJECT_ID: ClassVar[KeywordTextField] = KeywordTextField( + "googleProjectId", "googleProjectId", "googleProjectId.text" + ) + """ + ID of the project in which the asset exists. + """ + GOOGLE_PROJECT_NUMBER: ClassVar[NumericField] = NumericField( + "googleProjectNumber", "googleProjectNumber" + ) + """ + Number of the project in which the asset exists. + """ + GOOGLE_LOCATION: ClassVar[KeywordField] = KeywordField( + "googleLocation", "googleLocation" + ) + """ + Location of this asset in Google. + """ + GOOGLE_LOCATION_TYPE: ClassVar[KeywordField] = KeywordField( + "googleLocationType", "googleLocationType" + ) + """ + Type of location of this asset in Google. + """ + GOOGLE_LABELS: ClassVar[KeywordField] = KeywordField("googleLabels", "googleLabels") + """ + List of labels that have been applied to the asset in Google. + """ + GOOGLE_TAGS: ClassVar[KeywordField] = KeywordField("googleTags", "googleTags") + """ + List of tags that have been applied to the asset in Google. + """ + + INPUT_TO_PROCESSES: ClassVar[RelationField] = RelationField("inputToProcesses") + """ + TBC + """ + OUTPUT_FROM_AIRFLOW_TASKS: ClassVar[RelationField] = RelationField( + "outputFromAirflowTasks" + ) + """ + TBC + """ + INPUT_TO_AIRFLOW_TASKS: ClassVar[RelationField] = RelationField( + "inputToAirflowTasks" + ) + """ + TBC + """ + OUTPUT_FROM_PROCESSES: ClassVar[RelationField] = RelationField( + "outputFromProcesses" + ) + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "gcs_storage_class", + "gcs_encryption_type", + "gcs_e_tag", + "gcs_requester_pays", + "gcs_access_control", + "gcs_meta_generation_id", + "google_service", + "google_project_name", + "google_project_id", + "google_project_number", + "google_location", + "google_location_type", + "google_labels", + "google_tags", + "input_to_processes", + "output_from_airflow_tasks", + "input_to_airflow_tasks", + "output_from_processes", + ] + + @property + def gcs_storage_class(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.gcs_storage_class + + @gcs_storage_class.setter + def gcs_storage_class(self, gcs_storage_class: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.gcs_storage_class = gcs_storage_class + + @property + def gcs_encryption_type(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.gcs_encryption_type + + @gcs_encryption_type.setter + def gcs_encryption_type(self, gcs_encryption_type: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.gcs_encryption_type = gcs_encryption_type + + @property + def gcs_e_tag(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.gcs_e_tag + + @gcs_e_tag.setter + def gcs_e_tag(self, gcs_e_tag: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.gcs_e_tag = gcs_e_tag + + @property + def gcs_requester_pays(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.gcs_requester_pays + + @gcs_requester_pays.setter + def gcs_requester_pays(self, gcs_requester_pays: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.gcs_requester_pays = gcs_requester_pays + + @property + def gcs_access_control(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.gcs_access_control + + @gcs_access_control.setter + def gcs_access_control(self, gcs_access_control: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.gcs_access_control = gcs_access_control + + @property + def gcs_meta_generation_id(self) -> Optional[int]: + return ( + None if self.attributes is None else self.attributes.gcs_meta_generation_id + ) + + @gcs_meta_generation_id.setter + def gcs_meta_generation_id(self, gcs_meta_generation_id: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.gcs_meta_generation_id = gcs_meta_generation_id + + @property + def google_service(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.google_service + + @google_service.setter + def google_service(self, google_service: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.google_service = google_service + + @property + def google_project_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.google_project_name + + @google_project_name.setter + def google_project_name(self, google_project_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.google_project_name = google_project_name + + @property + def google_project_id(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.google_project_id + + @google_project_id.setter + def google_project_id(self, google_project_id: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.google_project_id = google_project_id + + @property + def google_project_number(self) -> Optional[int]: + return ( + None if self.attributes is None else self.attributes.google_project_number + ) + + @google_project_number.setter + def google_project_number(self, google_project_number: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.google_project_number = google_project_number + + @property + def google_location(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.google_location + + @google_location.setter + def google_location(self, google_location: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.google_location = google_location + + @property + def google_location_type(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.google_location_type + + @google_location_type.setter + def google_location_type(self, google_location_type: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.google_location_type = google_location_type + + @property + def google_labels(self) -> Optional[list[GoogleLabel]]: + return None if self.attributes is None else self.attributes.google_labels + + @google_labels.setter + def google_labels(self, google_labels: Optional[list[GoogleLabel]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.google_labels = google_labels + + @property + def google_tags(self) -> Optional[list[GoogleTag]]: + return None if self.attributes is None else self.attributes.google_tags + + @google_tags.setter + def google_tags(self, google_tags: Optional[list[GoogleTag]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.google_tags = google_tags + + @property + def input_to_processes(self) -> Optional[list[Process]]: + return None if self.attributes is None else self.attributes.input_to_processes + + @input_to_processes.setter + def input_to_processes(self, input_to_processes: Optional[list[Process]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.input_to_processes = input_to_processes + + @property + def output_from_airflow_tasks(self) -> Optional[list[AirflowTask]]: + return ( + None + if self.attributes is None + else self.attributes.output_from_airflow_tasks + ) + + @output_from_airflow_tasks.setter + def output_from_airflow_tasks( + self, output_from_airflow_tasks: Optional[list[AirflowTask]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.output_from_airflow_tasks = output_from_airflow_tasks + + @property + def input_to_airflow_tasks(self) -> Optional[list[AirflowTask]]: + return ( + None if self.attributes is None else self.attributes.input_to_airflow_tasks + ) + + @input_to_airflow_tasks.setter + def input_to_airflow_tasks( + self, input_to_airflow_tasks: Optional[list[AirflowTask]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.input_to_airflow_tasks = input_to_airflow_tasks + + @property + def output_from_processes(self) -> Optional[list[Process]]: + return ( + None if self.attributes is None else self.attributes.output_from_processes + ) + + @output_from_processes.setter + def output_from_processes(self, output_from_processes: Optional[list[Process]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.output_from_processes = output_from_processes + + class Attributes(Google.Attributes): + gcs_storage_class: Optional[str] = Field( + None, description="", alias="gcsStorageClass" + ) + gcs_encryption_type: Optional[str] = Field( + None, description="", alias="gcsEncryptionType" + ) + gcs_e_tag: Optional[str] = Field(None, description="", alias="gcsETag") + gcs_requester_pays: Optional[bool] = Field( + None, description="", alias="gcsRequesterPays" + ) + gcs_access_control: Optional[str] = Field( + None, description="", alias="gcsAccessControl" + ) + gcs_meta_generation_id: Optional[int] = Field( + None, description="", alias="gcsMetaGenerationId" + ) + google_service: Optional[str] = Field( + None, description="", alias="googleService" + ) + google_project_name: Optional[str] = Field( + None, description="", alias="googleProjectName" + ) + google_project_id: Optional[str] = Field( + None, description="", alias="googleProjectId" + ) + google_project_number: Optional[int] = Field( + None, description="", alias="googleProjectNumber" + ) + google_location: Optional[str] = Field( + None, description="", alias="googleLocation" + ) + google_location_type: Optional[str] = Field( + None, description="", alias="googleLocationType" + ) + google_labels: Optional[list[GoogleLabel]] = Field( + None, description="", alias="googleLabels" + ) + google_tags: Optional[list[GoogleTag]] = Field( + None, description="", alias="googleTags" + ) + input_to_processes: Optional[list[Process]] = Field( + None, description="", alias="inputToProcesses" + ) # relationship + output_from_airflow_tasks: Optional[list[AirflowTask]] = Field( + None, description="", alias="outputFromAirflowTasks" + ) # relationship + input_to_airflow_tasks: Optional[list[AirflowTask]] = Field( + None, description="", alias="inputToAirflowTasks" + ) # relationship + output_from_processes: Optional[list[Process]] = Field( + None, description="", alias="outputFromProcesses" + ) # relationship + + attributes: "GCS.Attributes" = Field( + default_factory=lambda: GCS.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +GCS.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset39.py b/pyatlan/model/assets/asset39.py deleted file mode 100644 index abf3b8803..000000000 --- a/pyatlan/model/assets/asset39.py +++ /dev/null @@ -1,145 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 Atlan Pte. Ltd. - - -from __future__ import annotations - -from typing import ClassVar, Optional - -from pydantic import Field, validator - -from pyatlan.model.fields.atlan_fields import KeywordTextField, NumericField - -from .asset18 import BI - - -class Preset(BI): - """Description""" - - type_name: str = Field("Preset", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "Preset": - raise ValueError("must be Preset") - return v - - def __setattr__(self, name, value): - if name in Preset._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - PRESET_WORKSPACE_ID: ClassVar[NumericField] = NumericField( - "presetWorkspaceId", "presetWorkspaceId" - ) - """ - TBC - """ - PRESET_WORKSPACE_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "presetWorkspaceQualifiedName", - "presetWorkspaceQualifiedName", - "presetWorkspaceQualifiedName.text", - ) - """ - TBC - """ - PRESET_DASHBOARD_ID: ClassVar[NumericField] = NumericField( - "presetDashboardId", "presetDashboardId" - ) - """ - TBC - """ - PRESET_DASHBOARD_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "presetDashboardQualifiedName", - "presetDashboardQualifiedName", - "presetDashboardQualifiedName.text", - ) - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "preset_workspace_id", - "preset_workspace_qualified_name", - "preset_dashboard_id", - "preset_dashboard_qualified_name", - ] - - @property - def preset_workspace_id(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.preset_workspace_id - - @preset_workspace_id.setter - def preset_workspace_id(self, preset_workspace_id: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.preset_workspace_id = preset_workspace_id - - @property - def preset_workspace_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.preset_workspace_qualified_name - ) - - @preset_workspace_qualified_name.setter - def preset_workspace_qualified_name( - self, preset_workspace_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.preset_workspace_qualified_name = ( - preset_workspace_qualified_name - ) - - @property - def preset_dashboard_id(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.preset_dashboard_id - - @preset_dashboard_id.setter - def preset_dashboard_id(self, preset_dashboard_id: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.preset_dashboard_id = preset_dashboard_id - - @property - def preset_dashboard_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.preset_dashboard_qualified_name - ) - - @preset_dashboard_qualified_name.setter - def preset_dashboard_qualified_name( - self, preset_dashboard_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.preset_dashboard_qualified_name = ( - preset_dashboard_qualified_name - ) - - class Attributes(BI.Attributes): - preset_workspace_id: Optional[int] = Field( - None, description="", alias="presetWorkspaceId" - ) - preset_workspace_qualified_name: Optional[str] = Field( - None, description="", alias="presetWorkspaceQualifiedName" - ) - preset_dashboard_id: Optional[int] = Field( - None, description="", alias="presetDashboardId" - ) - preset_dashboard_qualified_name: Optional[str] = Field( - None, description="", alias="presetDashboardQualifiedName" - ) - - attributes: "Preset.Attributes" = Field( - default_factory=lambda: Preset.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -Preset.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset40.py b/pyatlan/model/assets/asset40.py index e821b5d8a..060e68d98 100644 --- a/pyatlan/model/assets/asset40.py +++ b/pyatlan/model/assets/asset40.py @@ -8,234 +8,138 @@ from pydantic import Field, validator -from pyatlan.model.fields.atlan_fields import KeywordField, KeywordTextField +from pyatlan.model.fields.atlan_fields import KeywordTextField, NumericField -from .asset18 import BI +from .asset19 import BI -class Mode(BI): +class Preset(BI): """Description""" - type_name: str = Field("Mode", allow_mutation=False) + type_name: str = Field("Preset", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "Mode": - raise ValueError("must be Mode") + if v != "Preset": + raise ValueError("must be Preset") return v def __setattr__(self, name, value): - if name in Mode._convenience_properties: + if name in Preset._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - MODE_ID: ClassVar[KeywordField] = KeywordField("modeId", "modeId") - """ - TBC - """ - MODE_TOKEN: ClassVar[KeywordTextField] = KeywordTextField( - "modeToken", "modeToken", "modeToken.text" + PRESET_WORKSPACE_ID: ClassVar[NumericField] = NumericField( + "presetWorkspaceId", "presetWorkspaceId" ) """ - TBC + Identifier of the workspace in which this asset exists, in Preset. """ - MODE_WORKSPACE_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "modeWorkspaceName", "modeWorkspaceName.keyword", "modeWorkspaceName" + PRESET_WORKSPACE_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "presetWorkspaceQualifiedName", + "presetWorkspaceQualifiedName", + "presetWorkspaceQualifiedName.text", ) """ - TBC + Unique name of the workspace in which this asset exists. """ - MODE_WORKSPACE_USERNAME: ClassVar[KeywordTextField] = KeywordTextField( - "modeWorkspaceUsername", "modeWorkspaceUsername", "modeWorkspaceUsername.text" + PRESET_DASHBOARD_ID: ClassVar[NumericField] = NumericField( + "presetDashboardId", "presetDashboardId" ) """ - TBC + Identifier of the dashboard in which this asset exists, in Preset. """ - MODE_WORKSPACE_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "modeWorkspaceQualifiedName", - "modeWorkspaceQualifiedName", - "modeWorkspaceQualifiedName.text", + PRESET_DASHBOARD_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "presetDashboardQualifiedName", + "presetDashboardQualifiedName", + "presetDashboardQualifiedName.text", ) """ - TBC - """ - MODE_REPORT_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "modeReportName", "modeReportName.keyword", "modeReportName" - ) - """ - TBC - """ - MODE_REPORT_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "modeReportQualifiedName", - "modeReportQualifiedName", - "modeReportQualifiedName.text", - ) - """ - TBC - """ - MODE_QUERY_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "modeQueryName", "modeQueryName.keyword", "modeQueryName" - ) - """ - TBC - """ - MODE_QUERY_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "modeQueryQualifiedName", - "modeQueryQualifiedName", - "modeQueryQualifiedName.text", - ) - """ - TBC + Unique name of the dashboard in which this asset exists. """ _convenience_properties: ClassVar[list[str]] = [ - "mode_id", - "mode_token", - "mode_workspace_name", - "mode_workspace_username", - "mode_workspace_qualified_name", - "mode_report_name", - "mode_report_qualified_name", - "mode_query_name", - "mode_query_qualified_name", + "preset_workspace_id", + "preset_workspace_qualified_name", + "preset_dashboard_id", + "preset_dashboard_qualified_name", ] @property - def mode_id(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.mode_id - - @mode_id.setter - def mode_id(self, mode_id: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mode_id = mode_id - - @property - def mode_token(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.mode_token + def preset_workspace_id(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.preset_workspace_id - @mode_token.setter - def mode_token(self, mode_token: Optional[str]): + @preset_workspace_id.setter + def preset_workspace_id(self, preset_workspace_id: Optional[int]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.mode_token = mode_token + self.attributes.preset_workspace_id = preset_workspace_id @property - def mode_workspace_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.mode_workspace_name - - @mode_workspace_name.setter - def mode_workspace_name(self, mode_workspace_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mode_workspace_name = mode_workspace_name - - @property - def mode_workspace_username(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.mode_workspace_username - ) - - @mode_workspace_username.setter - def mode_workspace_username(self, mode_workspace_username: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mode_workspace_username = mode_workspace_username - - @property - def mode_workspace_qualified_name(self) -> Optional[str]: + def preset_workspace_qualified_name(self) -> Optional[str]: return ( None if self.attributes is None - else self.attributes.mode_workspace_qualified_name + else self.attributes.preset_workspace_qualified_name ) - @mode_workspace_qualified_name.setter - def mode_workspace_qualified_name( - self, mode_workspace_qualified_name: Optional[str] + @preset_workspace_qualified_name.setter + def preset_workspace_qualified_name( + self, preset_workspace_qualified_name: Optional[str] ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.mode_workspace_qualified_name = mode_workspace_qualified_name - - @property - def mode_report_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.mode_report_name - - @mode_report_name.setter - def mode_report_name(self, mode_report_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mode_report_name = mode_report_name - - @property - def mode_report_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.mode_report_qualified_name + self.attributes.preset_workspace_qualified_name = ( + preset_workspace_qualified_name ) - @mode_report_qualified_name.setter - def mode_report_qualified_name(self, mode_report_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mode_report_qualified_name = mode_report_qualified_name - @property - def mode_query_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.mode_query_name + def preset_dashboard_id(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.preset_dashboard_id - @mode_query_name.setter - def mode_query_name(self, mode_query_name: Optional[str]): + @preset_dashboard_id.setter + def preset_dashboard_id(self, preset_dashboard_id: Optional[int]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.mode_query_name = mode_query_name + self.attributes.preset_dashboard_id = preset_dashboard_id @property - def mode_query_qualified_name(self) -> Optional[str]: + def preset_dashboard_qualified_name(self) -> Optional[str]: return ( None if self.attributes is None - else self.attributes.mode_query_qualified_name + else self.attributes.preset_dashboard_qualified_name ) - @mode_query_qualified_name.setter - def mode_query_qualified_name(self, mode_query_qualified_name: Optional[str]): + @preset_dashboard_qualified_name.setter + def preset_dashboard_qualified_name( + self, preset_dashboard_qualified_name: Optional[str] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.mode_query_qualified_name = mode_query_qualified_name + self.attributes.preset_dashboard_qualified_name = ( + preset_dashboard_qualified_name + ) class Attributes(BI.Attributes): - mode_id: Optional[str] = Field(None, description="", alias="modeId") - mode_token: Optional[str] = Field(None, description="", alias="modeToken") - mode_workspace_name: Optional[str] = Field( - None, description="", alias="modeWorkspaceName" - ) - mode_workspace_username: Optional[str] = Field( - None, description="", alias="modeWorkspaceUsername" - ) - mode_workspace_qualified_name: Optional[str] = Field( - None, description="", alias="modeWorkspaceQualifiedName" - ) - mode_report_name: Optional[str] = Field( - None, description="", alias="modeReportName" + preset_workspace_id: Optional[int] = Field( + None, description="", alias="presetWorkspaceId" ) - mode_report_qualified_name: Optional[str] = Field( - None, description="", alias="modeReportQualifiedName" + preset_workspace_qualified_name: Optional[str] = Field( + None, description="", alias="presetWorkspaceQualifiedName" ) - mode_query_name: Optional[str] = Field( - None, description="", alias="modeQueryName" + preset_dashboard_id: Optional[int] = Field( + None, description="", alias="presetDashboardId" ) - mode_query_qualified_name: Optional[str] = Field( - None, description="", alias="modeQueryQualifiedName" + preset_dashboard_qualified_name: Optional[str] = Field( + None, description="", alias="presetDashboardQualifiedName" ) - attributes: "Mode.Attributes" = Field( - default_factory=lambda: Mode.Attributes(), + attributes: "Preset.Attributes" = Field( + default_factory=lambda: Preset.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -Mode.Attributes.update_forward_refs() +Preset.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset41.py b/pyatlan/model/assets/asset41.py index 33a26abbe..c82c9a95a 100644 --- a/pyatlan/model/assets/asset41.py +++ b/pyatlan/model/assets/asset41.py @@ -8,184 +8,234 @@ from pydantic import Field, validator -from pyatlan.model.fields.atlan_fields import KeywordTextField +from pyatlan.model.fields.atlan_fields import KeywordField, KeywordTextField -from .asset18 import BI +from .asset19 import BI -class Sigma(BI): +class Mode(BI): """Description""" - type_name: str = Field("Sigma", allow_mutation=False) + type_name: str = Field("Mode", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "Sigma": - raise ValueError("must be Sigma") + if v != "Mode": + raise ValueError("must be Mode") return v def __setattr__(self, name, value): - if name in Sigma._convenience_properties: + if name in Mode._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - SIGMA_WORKBOOK_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "sigmaWorkbookQualifiedName", - "sigmaWorkbookQualifiedName", - "sigmaWorkbookQualifiedName.text", + MODE_ID: ClassVar[KeywordField] = KeywordField("modeId", "modeId") + """ + + """ + MODE_TOKEN: ClassVar[KeywordTextField] = KeywordTextField( + "modeToken", "modeToken", "modeToken.text" ) """ - TBC + """ - SIGMA_WORKBOOK_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "sigmaWorkbookName", "sigmaWorkbookName.keyword", "sigmaWorkbookName" + MODE_WORKSPACE_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "modeWorkspaceName", "modeWorkspaceName.keyword", "modeWorkspaceName" ) """ - TBC + """ - SIGMA_PAGE_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "sigmaPageQualifiedName", - "sigmaPageQualifiedName", - "sigmaPageQualifiedName.text", + MODE_WORKSPACE_USERNAME: ClassVar[KeywordTextField] = KeywordTextField( + "modeWorkspaceUsername", "modeWorkspaceUsername", "modeWorkspaceUsername.text" ) """ - TBC + """ - SIGMA_PAGE_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "sigmaPageName", "sigmaPageName.keyword", "sigmaPageName" + MODE_WORKSPACE_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "modeWorkspaceQualifiedName", + "modeWorkspaceQualifiedName", + "modeWorkspaceQualifiedName.text", ) """ - TBC + """ - SIGMA_DATA_ELEMENT_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "sigmaDataElementQualifiedName", - "sigmaDataElementQualifiedName", - "sigmaDataElementQualifiedName.text", + MODE_REPORT_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "modeReportName", "modeReportName.keyword", "modeReportName" ) """ - TBC + """ - SIGMA_DATA_ELEMENT_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "sigmaDataElementName", "sigmaDataElementName.keyword", "sigmaDataElementName" + MODE_REPORT_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "modeReportQualifiedName", + "modeReportQualifiedName", + "modeReportQualifiedName.text", ) """ - TBC + + """ + MODE_QUERY_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "modeQueryName", "modeQueryName.keyword", "modeQueryName" + ) + """ + + """ + MODE_QUERY_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "modeQueryQualifiedName", + "modeQueryQualifiedName", + "modeQueryQualifiedName.text", + ) + """ + """ _convenience_properties: ClassVar[list[str]] = [ - "sigma_workbook_qualified_name", - "sigma_workbook_name", - "sigma_page_qualified_name", - "sigma_page_name", - "sigma_data_element_qualified_name", - "sigma_data_element_name", + "mode_id", + "mode_token", + "mode_workspace_name", + "mode_workspace_username", + "mode_workspace_qualified_name", + "mode_report_name", + "mode_report_qualified_name", + "mode_query_name", + "mode_query_qualified_name", ] @property - def sigma_workbook_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.sigma_workbook_qualified_name - ) + def mode_id(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.mode_id - @sigma_workbook_qualified_name.setter - def sigma_workbook_qualified_name( - self, sigma_workbook_qualified_name: Optional[str] - ): + @mode_id.setter + def mode_id(self, mode_id: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.sigma_workbook_qualified_name = sigma_workbook_qualified_name + self.attributes.mode_id = mode_id @property - def sigma_workbook_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.sigma_workbook_name + def mode_token(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.mode_token - @sigma_workbook_name.setter - def sigma_workbook_name(self, sigma_workbook_name: Optional[str]): + @mode_token.setter + def mode_token(self, mode_token: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.sigma_workbook_name = sigma_workbook_name + self.attributes.mode_token = mode_token @property - def sigma_page_qualified_name(self) -> Optional[str]: + def mode_workspace_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.mode_workspace_name + + @mode_workspace_name.setter + def mode_workspace_name(self, mode_workspace_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mode_workspace_name = mode_workspace_name + + @property + def mode_workspace_username(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.mode_workspace_username + ) + + @mode_workspace_username.setter + def mode_workspace_username(self, mode_workspace_username: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mode_workspace_username = mode_workspace_username + + @property + def mode_workspace_qualified_name(self) -> Optional[str]: return ( None if self.attributes is None - else self.attributes.sigma_page_qualified_name + else self.attributes.mode_workspace_qualified_name ) - @sigma_page_qualified_name.setter - def sigma_page_qualified_name(self, sigma_page_qualified_name: Optional[str]): + @mode_workspace_qualified_name.setter + def mode_workspace_qualified_name( + self, mode_workspace_qualified_name: Optional[str] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.sigma_page_qualified_name = sigma_page_qualified_name + self.attributes.mode_workspace_qualified_name = mode_workspace_qualified_name @property - def sigma_page_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.sigma_page_name + def mode_report_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.mode_report_name - @sigma_page_name.setter - def sigma_page_name(self, sigma_page_name: Optional[str]): + @mode_report_name.setter + def mode_report_name(self, mode_report_name: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.sigma_page_name = sigma_page_name + self.attributes.mode_report_name = mode_report_name @property - def sigma_data_element_qualified_name(self) -> Optional[str]: + def mode_report_qualified_name(self) -> Optional[str]: return ( None if self.attributes is None - else self.attributes.sigma_data_element_qualified_name + else self.attributes.mode_report_qualified_name ) - @sigma_data_element_qualified_name.setter - def sigma_data_element_qualified_name( - self, sigma_data_element_qualified_name: Optional[str] - ): + @mode_report_qualified_name.setter + def mode_report_qualified_name(self, mode_report_qualified_name: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.sigma_data_element_qualified_name = ( - sigma_data_element_qualified_name - ) + self.attributes.mode_report_qualified_name = mode_report_qualified_name @property - def sigma_data_element_name(self) -> Optional[str]: + def mode_query_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.mode_query_name + + @mode_query_name.setter + def mode_query_name(self, mode_query_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mode_query_name = mode_query_name + + @property + def mode_query_qualified_name(self) -> Optional[str]: return ( - None if self.attributes is None else self.attributes.sigma_data_element_name + None + if self.attributes is None + else self.attributes.mode_query_qualified_name ) - @sigma_data_element_name.setter - def sigma_data_element_name(self, sigma_data_element_name: Optional[str]): + @mode_query_qualified_name.setter + def mode_query_qualified_name(self, mode_query_qualified_name: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.sigma_data_element_name = sigma_data_element_name + self.attributes.mode_query_qualified_name = mode_query_qualified_name class Attributes(BI.Attributes): - sigma_workbook_qualified_name: Optional[str] = Field( - None, description="", alias="sigmaWorkbookQualifiedName" + mode_id: Optional[str] = Field(None, description="", alias="modeId") + mode_token: Optional[str] = Field(None, description="", alias="modeToken") + mode_workspace_name: Optional[str] = Field( + None, description="", alias="modeWorkspaceName" + ) + mode_workspace_username: Optional[str] = Field( + None, description="", alias="modeWorkspaceUsername" ) - sigma_workbook_name: Optional[str] = Field( - None, description="", alias="sigmaWorkbookName" + mode_workspace_qualified_name: Optional[str] = Field( + None, description="", alias="modeWorkspaceQualifiedName" ) - sigma_page_qualified_name: Optional[str] = Field( - None, description="", alias="sigmaPageQualifiedName" + mode_report_name: Optional[str] = Field( + None, description="", alias="modeReportName" ) - sigma_page_name: Optional[str] = Field( - None, description="", alias="sigmaPageName" + mode_report_qualified_name: Optional[str] = Field( + None, description="", alias="modeReportQualifiedName" ) - sigma_data_element_qualified_name: Optional[str] = Field( - None, description="", alias="sigmaDataElementQualifiedName" + mode_query_name: Optional[str] = Field( + None, description="", alias="modeQueryName" ) - sigma_data_element_name: Optional[str] = Field( - None, description="", alias="sigmaDataElementName" + mode_query_qualified_name: Optional[str] = Field( + None, description="", alias="modeQueryQualifiedName" ) - attributes: "Sigma.Attributes" = Field( - default_factory=lambda: Sigma.Attributes(), + attributes: "Mode.Attributes" = Field( + default_factory=lambda: Mode.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -Sigma.Attributes.update_forward_refs() +Mode.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset42.py b/pyatlan/model/assets/asset42.py index d422a8584..5fee1001c 100644 --- a/pyatlan/model/assets/asset42.py +++ b/pyatlan/model/assets/asset42.py @@ -4,30 +4,188 @@ from __future__ import annotations -from typing import ClassVar +from typing import ClassVar, Optional from pydantic import Field, validator -from .asset18 import BI +from pyatlan.model.fields.atlan_fields import KeywordTextField +from .asset19 import BI -class Tableau(BI): + +class Sigma(BI): """Description""" - type_name: str = Field("Tableau", allow_mutation=False) + type_name: str = Field("Sigma", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "Tableau": - raise ValueError("must be Tableau") + if v != "Sigma": + raise ValueError("must be Sigma") return v def __setattr__(self, name, value): - if name in Tableau._convenience_properties: + if name in Sigma._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - _convenience_properties: ClassVar[list[str]] = [] + SIGMA_WORKBOOK_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "sigmaWorkbookQualifiedName", + "sigmaWorkbookQualifiedName", + "sigmaWorkbookQualifiedName.text", + ) + """ + Unique name of the workbook in which this asset exists. + """ + SIGMA_WORKBOOK_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "sigmaWorkbookName", "sigmaWorkbookName.keyword", "sigmaWorkbookName" + ) + """ + Simple name of the workbook in which this asset exists. + """ + SIGMA_PAGE_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "sigmaPageQualifiedName", + "sigmaPageQualifiedName", + "sigmaPageQualifiedName.text", + ) + """ + Unique name of the page on which this asset exists. + """ + SIGMA_PAGE_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "sigmaPageName", "sigmaPageName.keyword", "sigmaPageName" + ) + """ + Simple name of the page on which this asset exists. + """ + SIGMA_DATA_ELEMENT_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "sigmaDataElementQualifiedName", + "sigmaDataElementQualifiedName", + "sigmaDataElementQualifiedName.text", + ) + """ + Unique name of the data element in which this asset exists. + """ + SIGMA_DATA_ELEMENT_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "sigmaDataElementName", "sigmaDataElementName.keyword", "sigmaDataElementName" + ) + """ + Simple name of the data element in which this asset exists. + """ + + _convenience_properties: ClassVar[list[str]] = [ + "sigma_workbook_qualified_name", + "sigma_workbook_name", + "sigma_page_qualified_name", + "sigma_page_name", + "sigma_data_element_qualified_name", + "sigma_data_element_name", + ] + + @property + def sigma_workbook_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.sigma_workbook_qualified_name + ) + + @sigma_workbook_qualified_name.setter + def sigma_workbook_qualified_name( + self, sigma_workbook_qualified_name: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sigma_workbook_qualified_name = sigma_workbook_qualified_name + + @property + def sigma_workbook_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.sigma_workbook_name + + @sigma_workbook_name.setter + def sigma_workbook_name(self, sigma_workbook_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sigma_workbook_name = sigma_workbook_name + + @property + def sigma_page_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.sigma_page_qualified_name + ) + + @sigma_page_qualified_name.setter + def sigma_page_qualified_name(self, sigma_page_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sigma_page_qualified_name = sigma_page_qualified_name + + @property + def sigma_page_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.sigma_page_name + + @sigma_page_name.setter + def sigma_page_name(self, sigma_page_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sigma_page_name = sigma_page_name + + @property + def sigma_data_element_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.sigma_data_element_qualified_name + ) + + @sigma_data_element_qualified_name.setter + def sigma_data_element_qualified_name( + self, sigma_data_element_qualified_name: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sigma_data_element_qualified_name = ( + sigma_data_element_qualified_name + ) + + @property + def sigma_data_element_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.sigma_data_element_name + ) + + @sigma_data_element_name.setter + def sigma_data_element_name(self, sigma_data_element_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sigma_data_element_name = sigma_data_element_name + + class Attributes(BI.Attributes): + sigma_workbook_qualified_name: Optional[str] = Field( + None, description="", alias="sigmaWorkbookQualifiedName" + ) + sigma_workbook_name: Optional[str] = Field( + None, description="", alias="sigmaWorkbookName" + ) + sigma_page_qualified_name: Optional[str] = Field( + None, description="", alias="sigmaPageQualifiedName" + ) + sigma_page_name: Optional[str] = Field( + None, description="", alias="sigmaPageName" + ) + sigma_data_element_qualified_name: Optional[str] = Field( + None, description="", alias="sigmaDataElementQualifiedName" + ) + sigma_data_element_name: Optional[str] = Field( + None, description="", alias="sigmaDataElementName" + ) + + attributes: "Sigma.Attributes" = Field( + default_factory=lambda: Sigma.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) -Tableau.Attributes.update_forward_refs() +Sigma.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset43.py b/pyatlan/model/assets/asset43.py index 8b9f743b3..27729fa57 100644 --- a/pyatlan/model/assets/asset43.py +++ b/pyatlan/model/assets/asset43.py @@ -8,26 +8,26 @@ from pydantic import Field, validator -from .asset18 import BI +from .asset19 import BI -class Looker(BI): +class Tableau(BI): """Description""" - type_name: str = Field("Looker", allow_mutation=False) + type_name: str = Field("Tableau", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "Looker": - raise ValueError("must be Looker") + if v != "Tableau": + raise ValueError("must be Tableau") return v def __setattr__(self, name, value): - if name in Looker._convenience_properties: + if name in Tableau._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) _convenience_properties: ClassVar[list[str]] = [] -Looker.Attributes.update_forward_refs() +Tableau.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset44.py b/pyatlan/model/assets/asset44.py index 85a1bf46c..2db54f3ea 100644 --- a/pyatlan/model/assets/asset44.py +++ b/pyatlan/model/assets/asset44.py @@ -4,62 +4,30 @@ from __future__ import annotations -from typing import ClassVar, Optional +from typing import ClassVar from pydantic import Field, validator -from pyatlan.model.fields.atlan_fields import BooleanField +from .asset19 import BI -from .asset18 import BI - -class Redash(BI): +class Looker(BI): """Description""" - type_name: str = Field("Redash", allow_mutation=False) + type_name: str = Field("Looker", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "Redash": - raise ValueError("must be Redash") + if v != "Looker": + raise ValueError("must be Looker") return v def __setattr__(self, name, value): - if name in Redash._convenience_properties: + if name in Looker._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - REDASH_IS_PUBLISHED: ClassVar[BooleanField] = BooleanField( - "redashIsPublished", "redashIsPublished" - ) - """ - Status whether the asset is published or not on source - """ - - _convenience_properties: ClassVar[list[str]] = [ - "redash_is_published", - ] - - @property - def redash_is_published(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.redash_is_published - - @redash_is_published.setter - def redash_is_published(self, redash_is_published: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.redash_is_published = redash_is_published - - class Attributes(BI.Attributes): - redash_is_published: Optional[bool] = Field( - None, description="", alias="redashIsPublished" - ) - - attributes: "Redash.Attributes" = Field( - default_factory=lambda: Redash.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) + _convenience_properties: ClassVar[list[str]] = [] -Redash.Attributes.update_forward_refs() +Looker.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset45.py b/pyatlan/model/assets/asset45.py index cb5056eaf..e4f055f9f 100644 --- a/pyatlan/model/assets/asset45.py +++ b/pyatlan/model/assets/asset45.py @@ -4,30 +4,62 @@ from __future__ import annotations -from typing import ClassVar +from typing import ClassVar, Optional from pydantic import Field, validator -from .asset18 import BI +from pyatlan.model.fields.atlan_fields import BooleanField +from .asset19 import BI -class Sisense(BI): + +class Redash(BI): """Description""" - type_name: str = Field("Sisense", allow_mutation=False) + type_name: str = Field("Redash", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "Sisense": - raise ValueError("must be Sisense") + if v != "Redash": + raise ValueError("must be Redash") return v def __setattr__(self, name, value): - if name in Sisense._convenience_properties: + if name in Redash._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - _convenience_properties: ClassVar[list[str]] = [] + REDASH_IS_PUBLISHED: ClassVar[BooleanField] = BooleanField( + "redashIsPublished", "redashIsPublished" + ) + """ + Whether this asset is published in Redash (true) or not (false). + """ + + _convenience_properties: ClassVar[list[str]] = [ + "redash_is_published", + ] + + @property + def redash_is_published(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.redash_is_published + + @redash_is_published.setter + def redash_is_published(self, redash_is_published: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.redash_is_published = redash_is_published + + class Attributes(BI.Attributes): + redash_is_published: Optional[bool] = Field( + None, description="", alias="redashIsPublished" + ) + + attributes: "Redash.Attributes" = Field( + default_factory=lambda: Redash.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) -Sisense.Attributes.update_forward_refs() +Redash.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset46.py b/pyatlan/model/assets/asset46.py index 00ddcc371..f11e27860 100644 --- a/pyatlan/model/assets/asset46.py +++ b/pyatlan/model/assets/asset46.py @@ -4,298 +4,30 @@ from __future__ import annotations -from typing import ClassVar, Optional +from typing import ClassVar from pydantic import Field, validator -from pyatlan.model.fields.atlan_fields import ( - KeywordField, - KeywordTextField, - NumericField, - RelationField, -) -from pyatlan.model.structs import GoogleLabel, GoogleTag +from .asset19 import BI -from .asset00 import AirflowTask, Process -from .asset30 import Google - -class DataStudio(Google): +class Sisense(BI): """Description""" - type_name: str = Field("DataStudio", allow_mutation=False) + type_name: str = Field("Sisense", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "DataStudio": - raise ValueError("must be DataStudio") + if v != "Sisense": + raise ValueError("must be Sisense") return v def __setattr__(self, name, value): - if name in DataStudio._convenience_properties: + if name in Sisense._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - GOOGLE_SERVICE: ClassVar[KeywordField] = KeywordField( - "googleService", "googleService" - ) - """ - TBC - """ - GOOGLE_PROJECT_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "googleProjectName", "googleProjectName", "googleProjectName.text" - ) - """ - TBC - """ - GOOGLE_PROJECT_ID: ClassVar[KeywordTextField] = KeywordTextField( - "googleProjectId", "googleProjectId", "googleProjectId.text" - ) - """ - TBC - """ - GOOGLE_PROJECT_NUMBER: ClassVar[NumericField] = NumericField( - "googleProjectNumber", "googleProjectNumber" - ) - """ - TBC - """ - GOOGLE_LOCATION: ClassVar[KeywordField] = KeywordField( - "googleLocation", "googleLocation" - ) - """ - TBC - """ - GOOGLE_LOCATION_TYPE: ClassVar[KeywordField] = KeywordField( - "googleLocationType", "googleLocationType" - ) - """ - TBC - """ - GOOGLE_LABELS: ClassVar[KeywordField] = KeywordField("googleLabels", "googleLabels") - """ - TBC - """ - GOOGLE_TAGS: ClassVar[KeywordField] = KeywordField("googleTags", "googleTags") - """ - TBC - """ - - INPUT_TO_PROCESSES: ClassVar[RelationField] = RelationField("inputToProcesses") - """ - TBC - """ - OUTPUT_FROM_AIRFLOW_TASKS: ClassVar[RelationField] = RelationField( - "outputFromAirflowTasks" - ) - """ - TBC - """ - INPUT_TO_AIRFLOW_TASKS: ClassVar[RelationField] = RelationField( - "inputToAirflowTasks" - ) - """ - TBC - """ - OUTPUT_FROM_PROCESSES: ClassVar[RelationField] = RelationField( - "outputFromProcesses" - ) - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "google_service", - "google_project_name", - "google_project_id", - "google_project_number", - "google_location", - "google_location_type", - "google_labels", - "google_tags", - "input_to_processes", - "output_from_airflow_tasks", - "input_to_airflow_tasks", - "output_from_processes", - ] - - @property - def google_service(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.google_service - - @google_service.setter - def google_service(self, google_service: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.google_service = google_service - - @property - def google_project_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.google_project_name - - @google_project_name.setter - def google_project_name(self, google_project_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.google_project_name = google_project_name - - @property - def google_project_id(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.google_project_id - - @google_project_id.setter - def google_project_id(self, google_project_id: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.google_project_id = google_project_id - - @property - def google_project_number(self) -> Optional[int]: - return ( - None if self.attributes is None else self.attributes.google_project_number - ) - - @google_project_number.setter - def google_project_number(self, google_project_number: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.google_project_number = google_project_number - - @property - def google_location(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.google_location - - @google_location.setter - def google_location(self, google_location: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.google_location = google_location - - @property - def google_location_type(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.google_location_type - - @google_location_type.setter - def google_location_type(self, google_location_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.google_location_type = google_location_type - - @property - def google_labels(self) -> Optional[list[GoogleLabel]]: - return None if self.attributes is None else self.attributes.google_labels - - @google_labels.setter - def google_labels(self, google_labels: Optional[list[GoogleLabel]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.google_labels = google_labels - - @property - def google_tags(self) -> Optional[list[GoogleTag]]: - return None if self.attributes is None else self.attributes.google_tags - - @google_tags.setter - def google_tags(self, google_tags: Optional[list[GoogleTag]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.google_tags = google_tags - - @property - def input_to_processes(self) -> Optional[list[Process]]: - return None if self.attributes is None else self.attributes.input_to_processes - - @input_to_processes.setter - def input_to_processes(self, input_to_processes: Optional[list[Process]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.input_to_processes = input_to_processes - - @property - def output_from_airflow_tasks(self) -> Optional[list[AirflowTask]]: - return ( - None - if self.attributes is None - else self.attributes.output_from_airflow_tasks - ) - - @output_from_airflow_tasks.setter - def output_from_airflow_tasks( - self, output_from_airflow_tasks: Optional[list[AirflowTask]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.output_from_airflow_tasks = output_from_airflow_tasks - - @property - def input_to_airflow_tasks(self) -> Optional[list[AirflowTask]]: - return ( - None if self.attributes is None else self.attributes.input_to_airflow_tasks - ) - - @input_to_airflow_tasks.setter - def input_to_airflow_tasks( - self, input_to_airflow_tasks: Optional[list[AirflowTask]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.input_to_airflow_tasks = input_to_airflow_tasks - - @property - def output_from_processes(self) -> Optional[list[Process]]: - return ( - None if self.attributes is None else self.attributes.output_from_processes - ) - - @output_from_processes.setter - def output_from_processes(self, output_from_processes: Optional[list[Process]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.output_from_processes = output_from_processes - - class Attributes(Google.Attributes): - google_service: Optional[str] = Field( - None, description="", alias="googleService" - ) - google_project_name: Optional[str] = Field( - None, description="", alias="googleProjectName" - ) - google_project_id: Optional[str] = Field( - None, description="", alias="googleProjectId" - ) - google_project_number: Optional[int] = Field( - None, description="", alias="googleProjectNumber" - ) - google_location: Optional[str] = Field( - None, description="", alias="googleLocation" - ) - google_location_type: Optional[str] = Field( - None, description="", alias="googleLocationType" - ) - google_labels: Optional[list[GoogleLabel]] = Field( - None, description="", alias="googleLabels" - ) - google_tags: Optional[list[GoogleTag]] = Field( - None, description="", alias="googleTags" - ) - input_to_processes: Optional[list[Process]] = Field( - None, description="", alias="inputToProcesses" - ) # relationship - output_from_airflow_tasks: Optional[list[AirflowTask]] = Field( - None, description="", alias="outputFromAirflowTasks" - ) # relationship - input_to_airflow_tasks: Optional[list[AirflowTask]] = Field( - None, description="", alias="inputToAirflowTasks" - ) # relationship - output_from_processes: Optional[list[Process]] = Field( - None, description="", alias="outputFromProcesses" - ) # relationship - - attributes: "DataStudio.Attributes" = Field( - default_factory=lambda: DataStudio.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) + _convenience_properties: ClassVar[list[str]] = [] -DataStudio.Attributes.update_forward_refs() +Sisense.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset47.py b/pyatlan/model/assets/asset47.py index 37b3beaad..63be78765 100644 --- a/pyatlan/model/assets/asset47.py +++ b/pyatlan/model/assets/asset47.py @@ -8,94 +8,294 @@ from pydantic import Field, validator -from pyatlan.model.fields.atlan_fields import KeywordTextField +from pyatlan.model.fields.atlan_fields import ( + KeywordField, + KeywordTextField, + NumericField, + RelationField, +) +from pyatlan.model.structs import GoogleLabel, GoogleTag -from .asset18 import BI +from .asset00 import AirflowTask, Process +from .asset31 import Google -class Metabase(BI): +class DataStudio(Google): """Description""" - type_name: str = Field("Metabase", allow_mutation=False) + type_name: str = Field("DataStudio", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "Metabase": - raise ValueError("must be Metabase") + if v != "DataStudio": + raise ValueError("must be DataStudio") return v def __setattr__(self, name, value): - if name in Metabase._convenience_properties: + if name in DataStudio._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - METABASE_COLLECTION_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "metabaseCollectionName", - "metabaseCollectionName.keyword", - "metabaseCollectionName", + GOOGLE_SERVICE: ClassVar[KeywordField] = KeywordField( + "googleService", "googleService" + ) + """ + Service in Google in which the asset exists. + """ + GOOGLE_PROJECT_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "googleProjectName", "googleProjectName", "googleProjectName.text" + ) + """ + Name of the project in which the asset exists. + """ + GOOGLE_PROJECT_ID: ClassVar[KeywordTextField] = KeywordTextField( + "googleProjectId", "googleProjectId", "googleProjectId.text" + ) + """ + ID of the project in which the asset exists. + """ + GOOGLE_PROJECT_NUMBER: ClassVar[NumericField] = NumericField( + "googleProjectNumber", "googleProjectNumber" + ) + """ + Number of the project in which the asset exists. + """ + GOOGLE_LOCATION: ClassVar[KeywordField] = KeywordField( + "googleLocation", "googleLocation" + ) + """ + Location of this asset in Google. + """ + GOOGLE_LOCATION_TYPE: ClassVar[KeywordField] = KeywordField( + "googleLocationType", "googleLocationType" + ) + """ + Type of location of this asset in Google. + """ + GOOGLE_LABELS: ClassVar[KeywordField] = KeywordField("googleLabels", "googleLabels") + """ + List of labels that have been applied to the asset in Google. + """ + GOOGLE_TAGS: ClassVar[KeywordField] = KeywordField("googleTags", "googleTags") + """ + List of tags that have been applied to the asset in Google. + """ + + INPUT_TO_PROCESSES: ClassVar[RelationField] = RelationField("inputToProcesses") + """ + TBC + """ + OUTPUT_FROM_AIRFLOW_TASKS: ClassVar[RelationField] = RelationField( + "outputFromAirflowTasks" + ) + """ + TBC + """ + INPUT_TO_AIRFLOW_TASKS: ClassVar[RelationField] = RelationField( + "inputToAirflowTasks" ) """ TBC """ - METABASE_COLLECTION_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "metabaseCollectionQualifiedName", - "metabaseCollectionQualifiedName", - "metabaseCollectionQualifiedName.text", + OUTPUT_FROM_PROCESSES: ClassVar[RelationField] = RelationField( + "outputFromProcesses" ) """ TBC """ _convenience_properties: ClassVar[list[str]] = [ - "metabase_collection_name", - "metabase_collection_qualified_name", + "google_service", + "google_project_name", + "google_project_id", + "google_project_number", + "google_location", + "google_location_type", + "google_labels", + "google_tags", + "input_to_processes", + "output_from_airflow_tasks", + "input_to_airflow_tasks", + "output_from_processes", ] @property - def metabase_collection_name(self) -> Optional[str]: + def google_service(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.google_service + + @google_service.setter + def google_service(self, google_service: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.google_service = google_service + + @property + def google_project_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.google_project_name + + @google_project_name.setter + def google_project_name(self, google_project_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.google_project_name = google_project_name + + @property + def google_project_id(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.google_project_id + + @google_project_id.setter + def google_project_id(self, google_project_id: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.google_project_id = google_project_id + + @property + def google_project_number(self) -> Optional[int]: return ( - None - if self.attributes is None - else self.attributes.metabase_collection_name + None if self.attributes is None else self.attributes.google_project_number ) - @metabase_collection_name.setter - def metabase_collection_name(self, metabase_collection_name: Optional[str]): + @google_project_number.setter + def google_project_number(self, google_project_number: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.google_project_number = google_project_number + + @property + def google_location(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.google_location + + @google_location.setter + def google_location(self, google_location: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.google_location = google_location + + @property + def google_location_type(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.google_location_type + + @google_location_type.setter + def google_location_type(self, google_location_type: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.google_location_type = google_location_type + + @property + def google_labels(self) -> Optional[list[GoogleLabel]]: + return None if self.attributes is None else self.attributes.google_labels + + @google_labels.setter + def google_labels(self, google_labels: Optional[list[GoogleLabel]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.google_labels = google_labels + + @property + def google_tags(self) -> Optional[list[GoogleTag]]: + return None if self.attributes is None else self.attributes.google_tags + + @google_tags.setter + def google_tags(self, google_tags: Optional[list[GoogleTag]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.google_tags = google_tags + + @property + def input_to_processes(self) -> Optional[list[Process]]: + return None if self.attributes is None else self.attributes.input_to_processes + + @input_to_processes.setter + def input_to_processes(self, input_to_processes: Optional[list[Process]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.metabase_collection_name = metabase_collection_name + self.attributes.input_to_processes = input_to_processes @property - def metabase_collection_qualified_name(self) -> Optional[str]: + def output_from_airflow_tasks(self) -> Optional[list[AirflowTask]]: return ( None if self.attributes is None - else self.attributes.metabase_collection_qualified_name + else self.attributes.output_from_airflow_tasks ) - @metabase_collection_qualified_name.setter - def metabase_collection_qualified_name( - self, metabase_collection_qualified_name: Optional[str] + @output_from_airflow_tasks.setter + def output_from_airflow_tasks( + self, output_from_airflow_tasks: Optional[list[AirflowTask]] ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.metabase_collection_qualified_name = ( - metabase_collection_qualified_name + self.attributes.output_from_airflow_tasks = output_from_airflow_tasks + + @property + def input_to_airflow_tasks(self) -> Optional[list[AirflowTask]]: + return ( + None if self.attributes is None else self.attributes.input_to_airflow_tasks ) - class Attributes(BI.Attributes): - metabase_collection_name: Optional[str] = Field( - None, description="", alias="metabaseCollectionName" + @input_to_airflow_tasks.setter + def input_to_airflow_tasks( + self, input_to_airflow_tasks: Optional[list[AirflowTask]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.input_to_airflow_tasks = input_to_airflow_tasks + + @property + def output_from_processes(self) -> Optional[list[Process]]: + return ( + None if self.attributes is None else self.attributes.output_from_processes + ) + + @output_from_processes.setter + def output_from_processes(self, output_from_processes: Optional[list[Process]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.output_from_processes = output_from_processes + + class Attributes(Google.Attributes): + google_service: Optional[str] = Field( + None, description="", alias="googleService" + ) + google_project_name: Optional[str] = Field( + None, description="", alias="googleProjectName" + ) + google_project_id: Optional[str] = Field( + None, description="", alias="googleProjectId" + ) + google_project_number: Optional[int] = Field( + None, description="", alias="googleProjectNumber" + ) + google_location: Optional[str] = Field( + None, description="", alias="googleLocation" + ) + google_location_type: Optional[str] = Field( + None, description="", alias="googleLocationType" + ) + google_labels: Optional[list[GoogleLabel]] = Field( + None, description="", alias="googleLabels" ) - metabase_collection_qualified_name: Optional[str] = Field( - None, description="", alias="metabaseCollectionQualifiedName" + google_tags: Optional[list[GoogleTag]] = Field( + None, description="", alias="googleTags" ) + input_to_processes: Optional[list[Process]] = Field( + None, description="", alias="inputToProcesses" + ) # relationship + output_from_airflow_tasks: Optional[list[AirflowTask]] = Field( + None, description="", alias="outputFromAirflowTasks" + ) # relationship + input_to_airflow_tasks: Optional[list[AirflowTask]] = Field( + None, description="", alias="inputToAirflowTasks" + ) # relationship + output_from_processes: Optional[list[Process]] = Field( + None, description="", alias="outputFromProcesses" + ) # relationship - attributes: "Metabase.Attributes" = Field( - default_factory=lambda: Metabase.Attributes(), + attributes: "DataStudio.Attributes" = Field( + default_factory=lambda: DataStudio.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -Metabase.Attributes.update_forward_refs() +DataStudio.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset48.py b/pyatlan/model/assets/asset48.py index 20a9a7f14..f72c84b6c 100644 --- a/pyatlan/model/assets/asset48.py +++ b/pyatlan/model/assets/asset48.py @@ -8,100 +8,94 @@ from pydantic import Field, validator -from pyatlan.model.fields.atlan_fields import KeywordField, KeywordTextField +from pyatlan.model.fields.atlan_fields import KeywordTextField -from .asset18 import BI +from .asset19 import BI -class QuickSight(BI): +class Metabase(BI): """Description""" - type_name: str = Field("QuickSight", allow_mutation=False) + type_name: str = Field("Metabase", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "QuickSight": - raise ValueError("must be QuickSight") + if v != "Metabase": + raise ValueError("must be Metabase") return v def __setattr__(self, name, value): - if name in QuickSight._convenience_properties: + if name in Metabase._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - QUICK_SIGHT_ID: ClassVar[KeywordField] = KeywordField( - "quickSightId", "quickSightId" + METABASE_COLLECTION_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "metabaseCollectionName", + "metabaseCollectionName.keyword", + "metabaseCollectionName", ) """ - TBC + Simple name of the Metabase collection in which this asset exists. """ - QUICK_SIGHT_SHEET_ID: ClassVar[KeywordField] = KeywordField( - "quickSightSheetId", "quickSightSheetId" + METABASE_COLLECTION_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "metabaseCollectionQualifiedName", + "metabaseCollectionQualifiedName", + "metabaseCollectionQualifiedName.text", ) """ - TBC - """ - QUICK_SIGHT_SHEET_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "quickSightSheetName", "quickSightSheetName.keyword", "quickSightSheetName" - ) - """ - TBC + Unique name of the Metabase collection in which this asset exists. """ _convenience_properties: ClassVar[list[str]] = [ - "quick_sight_id", - "quick_sight_sheet_id", - "quick_sight_sheet_name", + "metabase_collection_name", + "metabase_collection_qualified_name", ] @property - def quick_sight_id(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.quick_sight_id - - @quick_sight_id.setter - def quick_sight_id(self, quick_sight_id: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.quick_sight_id = quick_sight_id - - @property - def quick_sight_sheet_id(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.quick_sight_sheet_id + def metabase_collection_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.metabase_collection_name + ) - @quick_sight_sheet_id.setter - def quick_sight_sheet_id(self, quick_sight_sheet_id: Optional[str]): + @metabase_collection_name.setter + def metabase_collection_name(self, metabase_collection_name: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.quick_sight_sheet_id = quick_sight_sheet_id + self.attributes.metabase_collection_name = metabase_collection_name @property - def quick_sight_sheet_name(self) -> Optional[str]: + def metabase_collection_qualified_name(self) -> Optional[str]: return ( - None if self.attributes is None else self.attributes.quick_sight_sheet_name + None + if self.attributes is None + else self.attributes.metabase_collection_qualified_name ) - @quick_sight_sheet_name.setter - def quick_sight_sheet_name(self, quick_sight_sheet_name: Optional[str]): + @metabase_collection_qualified_name.setter + def metabase_collection_qualified_name( + self, metabase_collection_qualified_name: Optional[str] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.quick_sight_sheet_name = quick_sight_sheet_name + self.attributes.metabase_collection_qualified_name = ( + metabase_collection_qualified_name + ) class Attributes(BI.Attributes): - quick_sight_id: Optional[str] = Field( - None, description="", alias="quickSightId" - ) - quick_sight_sheet_id: Optional[str] = Field( - None, description="", alias="quickSightSheetId" + metabase_collection_name: Optional[str] = Field( + None, description="", alias="metabaseCollectionName" ) - quick_sight_sheet_name: Optional[str] = Field( - None, description="", alias="quickSightSheetName" + metabase_collection_qualified_name: Optional[str] = Field( + None, description="", alias="metabaseCollectionQualifiedName" ) - attributes: "QuickSight.Attributes" = Field( - default_factory=lambda: QuickSight.Attributes(), + attributes: "Metabase.Attributes" = Field( + default_factory=lambda: Metabase.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -QuickSight.Attributes.update_forward_refs() +Metabase.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset49.py b/pyatlan/model/assets/asset49.py index feefa5409..3a75388fa 100644 --- a/pyatlan/model/assets/asset49.py +++ b/pyatlan/model/assets/asset49.py @@ -8,84 +8,100 @@ from pydantic import Field, validator -from pyatlan.model.fields.atlan_fields import KeywordField, TextField +from pyatlan.model.fields.atlan_fields import KeywordField, KeywordTextField -from .asset18 import BI +from .asset19 import BI -class Thoughtspot(BI): +class QuickSight(BI): """Description""" - type_name: str = Field("Thoughtspot", allow_mutation=False) + type_name: str = Field("QuickSight", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "Thoughtspot": - raise ValueError("must be Thoughtspot") + if v != "QuickSight": + raise ValueError("must be QuickSight") return v def __setattr__(self, name, value): - if name in Thoughtspot._convenience_properties: + if name in QuickSight._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - THOUGHTSPOT_CHART_TYPE: ClassVar[KeywordField] = KeywordField( - "thoughtspotChartType", "thoughtspotChartType" + QUICK_SIGHT_ID: ClassVar[KeywordField] = KeywordField( + "quickSightId", "quickSightId" ) """ - TBC + """ - THOUGHTSPOT_QUESTION_TEXT: ClassVar[TextField] = TextField( - "thoughtspotQuestionText", "thoughtspotQuestionText" + QUICK_SIGHT_SHEET_ID: ClassVar[KeywordField] = KeywordField( + "quickSightSheetId", "quickSightSheetId" ) """ - TBC + + """ + QUICK_SIGHT_SHEET_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "quickSightSheetName", "quickSightSheetName.keyword", "quickSightSheetName" + ) + """ + """ _convenience_properties: ClassVar[list[str]] = [ - "thoughtspot_chart_type", - "thoughtspot_question_text", + "quick_sight_id", + "quick_sight_sheet_id", + "quick_sight_sheet_name", ] @property - def thoughtspot_chart_type(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.thoughtspot_chart_type - ) + def quick_sight_id(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.quick_sight_id - @thoughtspot_chart_type.setter - def thoughtspot_chart_type(self, thoughtspot_chart_type: Optional[str]): + @quick_sight_id.setter + def quick_sight_id(self, quick_sight_id: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.thoughtspot_chart_type = thoughtspot_chart_type + self.attributes.quick_sight_id = quick_sight_id @property - def thoughtspot_question_text(self) -> Optional[str]: + def quick_sight_sheet_id(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.quick_sight_sheet_id + + @quick_sight_sheet_id.setter + def quick_sight_sheet_id(self, quick_sight_sheet_id: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.quick_sight_sheet_id = quick_sight_sheet_id + + @property + def quick_sight_sheet_name(self) -> Optional[str]: return ( - None - if self.attributes is None - else self.attributes.thoughtspot_question_text + None if self.attributes is None else self.attributes.quick_sight_sheet_name ) - @thoughtspot_question_text.setter - def thoughtspot_question_text(self, thoughtspot_question_text: Optional[str]): + @quick_sight_sheet_name.setter + def quick_sight_sheet_name(self, quick_sight_sheet_name: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.thoughtspot_question_text = thoughtspot_question_text + self.attributes.quick_sight_sheet_name = quick_sight_sheet_name class Attributes(BI.Attributes): - thoughtspot_chart_type: Optional[str] = Field( - None, description="", alias="thoughtspotChartType" + quick_sight_id: Optional[str] = Field( + None, description="", alias="quickSightId" + ) + quick_sight_sheet_id: Optional[str] = Field( + None, description="", alias="quickSightSheetId" ) - thoughtspot_question_text: Optional[str] = Field( - None, description="", alias="thoughtspotQuestionText" + quick_sight_sheet_name: Optional[str] = Field( + None, description="", alias="quickSightSheetName" ) - attributes: "Thoughtspot.Attributes" = Field( - default_factory=lambda: Thoughtspot.Attributes(), + attributes: "QuickSight.Attributes" = Field( + default_factory=lambda: QuickSight.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -Thoughtspot.Attributes.update_forward_refs() +QuickSight.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset50.py b/pyatlan/model/assets/asset50.py index e9c940b78..3a5fff293 100644 --- a/pyatlan/model/assets/asset50.py +++ b/pyatlan/model/assets/asset50.py @@ -8,137 +8,84 @@ from pydantic import Field, validator -from pyatlan.model.enums import PowerbiEndorsement -from pyatlan.model.fields.atlan_fields import ( - BooleanField, - KeywordField, - KeywordTextField, -) +from pyatlan.model.fields.atlan_fields import KeywordField, TextField -from .asset18 import BI +from .asset19 import BI -class PowerBI(BI): +class Thoughtspot(BI): """Description""" - type_name: str = Field("PowerBI", allow_mutation=False) + type_name: str = Field("Thoughtspot", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "PowerBI": - raise ValueError("must be PowerBI") + if v != "Thoughtspot": + raise ValueError("must be Thoughtspot") return v def __setattr__(self, name, value): - if name in PowerBI._convenience_properties: + if name in Thoughtspot._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - POWER_BI_IS_HIDDEN: ClassVar[BooleanField] = BooleanField( - "powerBIIsHidden", "powerBIIsHidden" + THOUGHTSPOT_CHART_TYPE: ClassVar[KeywordField] = KeywordField( + "thoughtspotChartType", "thoughtspotChartType" ) """ - TBC - """ - POWER_BI_TABLE_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "powerBITableQualifiedName", - "powerBITableQualifiedName", - "powerBITableQualifiedName.text", - ) - """ - TBC - """ - POWER_BI_FORMAT_STRING: ClassVar[KeywordField] = KeywordField( - "powerBIFormatString", "powerBIFormatString" - ) - """ - TBC + """ - POWER_BI_ENDORSEMENT: ClassVar[KeywordField] = KeywordField( - "powerBIEndorsement", "powerBIEndorsement" + THOUGHTSPOT_QUESTION_TEXT: ClassVar[TextField] = TextField( + "thoughtspotQuestionText", "thoughtspotQuestionText" ) """ - TBC + """ _convenience_properties: ClassVar[list[str]] = [ - "power_b_i_is_hidden", - "power_b_i_table_qualified_name", - "power_b_i_format_string", - "power_b_i_endorsement", + "thoughtspot_chart_type", + "thoughtspot_question_text", ] @property - def power_b_i_is_hidden(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.power_b_i_is_hidden - - @power_b_i_is_hidden.setter - def power_b_i_is_hidden(self, power_b_i_is_hidden: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.power_b_i_is_hidden = power_b_i_is_hidden - - @property - def power_b_i_table_qualified_name(self) -> Optional[str]: + def thoughtspot_chart_type(self) -> Optional[str]: return ( - None - if self.attributes is None - else self.attributes.power_b_i_table_qualified_name + None if self.attributes is None else self.attributes.thoughtspot_chart_type ) - @power_b_i_table_qualified_name.setter - def power_b_i_table_qualified_name( - self, power_b_i_table_qualified_name: Optional[str] - ): + @thoughtspot_chart_type.setter + def thoughtspot_chart_type(self, thoughtspot_chart_type: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.power_b_i_table_qualified_name = power_b_i_table_qualified_name + self.attributes.thoughtspot_chart_type = thoughtspot_chart_type @property - def power_b_i_format_string(self) -> Optional[str]: + def thoughtspot_question_text(self) -> Optional[str]: return ( - None if self.attributes is None else self.attributes.power_b_i_format_string - ) - - @power_b_i_format_string.setter - def power_b_i_format_string(self, power_b_i_format_string: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.power_b_i_format_string = power_b_i_format_string - - @property - def power_b_i_endorsement(self) -> Optional[PowerbiEndorsement]: - return ( - None if self.attributes is None else self.attributes.power_b_i_endorsement + None + if self.attributes is None + else self.attributes.thoughtspot_question_text ) - @power_b_i_endorsement.setter - def power_b_i_endorsement( - self, power_b_i_endorsement: Optional[PowerbiEndorsement] - ): + @thoughtspot_question_text.setter + def thoughtspot_question_text(self, thoughtspot_question_text: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.power_b_i_endorsement = power_b_i_endorsement + self.attributes.thoughtspot_question_text = thoughtspot_question_text class Attributes(BI.Attributes): - power_b_i_is_hidden: Optional[bool] = Field( - None, description="", alias="powerBIIsHidden" - ) - power_b_i_table_qualified_name: Optional[str] = Field( - None, description="", alias="powerBITableQualifiedName" - ) - power_b_i_format_string: Optional[str] = Field( - None, description="", alias="powerBIFormatString" + thoughtspot_chart_type: Optional[str] = Field( + None, description="", alias="thoughtspotChartType" ) - power_b_i_endorsement: Optional[PowerbiEndorsement] = Field( - None, description="", alias="powerBIEndorsement" + thoughtspot_question_text: Optional[str] = Field( + None, description="", alias="thoughtspotQuestionText" ) - attributes: "PowerBI.Attributes" = Field( - default_factory=lambda: PowerBI.Attributes(), + attributes: "Thoughtspot.Attributes" = Field( + default_factory=lambda: Thoughtspot.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -PowerBI.Attributes.update_forward_refs() +Thoughtspot.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset51.py b/pyatlan/model/assets/asset51.py index 9992bd0fe..dbca9ee26 100644 --- a/pyatlan/model/assets/asset51.py +++ b/pyatlan/model/assets/asset51.py @@ -4,320 +4,141 @@ from __future__ import annotations -from datetime import datetime from typing import ClassVar, Optional from pydantic import Field, validator +from pyatlan.model.enums import PowerbiEndorsement from pyatlan.model.fields.atlan_fields import ( BooleanField, KeywordField, KeywordTextField, - NumericField, ) -from .asset18 import BI +from .asset19 import BI -class MicroStrategy(BI): +class PowerBI(BI): """Description""" - type_name: str = Field("MicroStrategy", allow_mutation=False) + type_name: str = Field("PowerBI", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "MicroStrategy": - raise ValueError("must be MicroStrategy") + if v != "PowerBI": + raise ValueError("must be PowerBI") return v def __setattr__(self, name, value): - if name in MicroStrategy._convenience_properties: + if name in PowerBI._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - MICRO_STRATEGY_PROJECT_QUALIFIED_NAME: ClassVar[ - KeywordTextField - ] = KeywordTextField( - "microStrategyProjectQualifiedName", - "microStrategyProjectQualifiedName", - "microStrategyProjectQualifiedName.text", + POWER_BI_IS_HIDDEN: ClassVar[BooleanField] = BooleanField( + "powerBIIsHidden", "powerBIIsHidden" ) """ - Related project qualified name + Whether this asset is hidden in Power BI (true) or not (false). """ - MICRO_STRATEGY_PROJECT_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "microStrategyProjectName", - "microStrategyProjectName.keyword", - "microStrategyProjectName", + POWER_BI_TABLE_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "powerBITableQualifiedName", + "powerBITableQualifiedName", + "powerBITableQualifiedName.text", ) """ - Related project name + Unique name of the Power BI table in which this asset exists. """ - MICRO_STRATEGY_CUBE_QUALIFIED_NAMES: ClassVar[KeywordTextField] = KeywordTextField( - "microStrategyCubeQualifiedNames", - "microStrategyCubeQualifiedNames", - "microStrategyCubeQualifiedNames.text", + POWER_BI_FORMAT_STRING: ClassVar[KeywordField] = KeywordField( + "powerBIFormatString", "powerBIFormatString" ) """ - Related cube qualified name list + Format of this asset, as specified in the FORMAT_STRING of the MDX cell property. """ - MICRO_STRATEGY_CUBE_NAMES: ClassVar[KeywordTextField] = KeywordTextField( - "microStrategyCubeNames", - "microStrategyCubeNames.keyword", - "microStrategyCubeNames", + POWER_BI_ENDORSEMENT: ClassVar[KeywordField] = KeywordField( + "powerBIEndorsement", "powerBIEndorsement" ) """ - Related cube name list - """ - MICRO_STRATEGY_REPORT_QUALIFIED_NAMES: ClassVar[ - KeywordTextField - ] = KeywordTextField( - "microStrategyReportQualifiedNames", - "microStrategyReportQualifiedNames", - "microStrategyReportQualifiedNames.text", - ) - """ - Related report qualified name list - """ - MICRO_STRATEGY_REPORT_NAMES: ClassVar[KeywordTextField] = KeywordTextField( - "microStrategyReportNames", - "microStrategyReportNames.keyword", - "microStrategyReportNames", - ) - """ - Related report name list - """ - MICRO_STRATEGY_IS_CERTIFIED: ClassVar[BooleanField] = BooleanField( - "microStrategyIsCertified", "microStrategyIsCertified" - ) - """ - Whether certified in MicroStrategy - """ - MICRO_STRATEGY_CERTIFIED_BY: ClassVar[KeywordField] = KeywordField( - "microStrategyCertifiedBy", "microStrategyCertifiedBy" - ) - """ - User who certified in MicroStrategy - """ - MICRO_STRATEGY_CERTIFIED_AT: ClassVar[NumericField] = NumericField( - "microStrategyCertifiedAt", "microStrategyCertifiedAt" - ) - """ - Certified date in MicroStrategy - """ - MICRO_STRATEGY_LOCATION: ClassVar[KeywordField] = KeywordField( - "microStrategyLocation", "microStrategyLocation" - ) - """ - Location path in MicroStrategy + Endorsement status of this asset, in Power BI. """ _convenience_properties: ClassVar[list[str]] = [ - "micro_strategy_project_qualified_name", - "micro_strategy_project_name", - "micro_strategy_cube_qualified_names", - "micro_strategy_cube_names", - "micro_strategy_report_qualified_names", - "micro_strategy_report_names", - "micro_strategy_is_certified", - "micro_strategy_certified_by", - "micro_strategy_certified_at", - "micro_strategy_location", + "power_b_i_is_hidden", + "power_b_i_table_qualified_name", + "power_b_i_format_string", + "power_b_i_endorsement", ] @property - def micro_strategy_project_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.micro_strategy_project_qualified_name - ) - - @micro_strategy_project_qualified_name.setter - def micro_strategy_project_qualified_name( - self, micro_strategy_project_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_project_qualified_name = ( - micro_strategy_project_qualified_name - ) - - @property - def micro_strategy_project_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.micro_strategy_project_name - ) + def power_b_i_is_hidden(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.power_b_i_is_hidden - @micro_strategy_project_name.setter - def micro_strategy_project_name(self, micro_strategy_project_name: Optional[str]): + @power_b_i_is_hidden.setter + def power_b_i_is_hidden(self, power_b_i_is_hidden: Optional[bool]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.micro_strategy_project_name = micro_strategy_project_name + self.attributes.power_b_i_is_hidden = power_b_i_is_hidden @property - def micro_strategy_cube_qualified_names(self) -> Optional[set[str]]: + def power_b_i_table_qualified_name(self) -> Optional[str]: return ( None if self.attributes is None - else self.attributes.micro_strategy_cube_qualified_names + else self.attributes.power_b_i_table_qualified_name ) - @micro_strategy_cube_qualified_names.setter - def micro_strategy_cube_qualified_names( - self, micro_strategy_cube_qualified_names: Optional[set[str]] + @power_b_i_table_qualified_name.setter + def power_b_i_table_qualified_name( + self, power_b_i_table_qualified_name: Optional[str] ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.micro_strategy_cube_qualified_names = ( - micro_strategy_cube_qualified_names - ) + self.attributes.power_b_i_table_qualified_name = power_b_i_table_qualified_name @property - def micro_strategy_cube_names(self) -> Optional[set[str]]: + def power_b_i_format_string(self) -> Optional[str]: return ( - None - if self.attributes is None - else self.attributes.micro_strategy_cube_names + None if self.attributes is None else self.attributes.power_b_i_format_string ) - @micro_strategy_cube_names.setter - def micro_strategy_cube_names(self, micro_strategy_cube_names: Optional[set[str]]): + @power_b_i_format_string.setter + def power_b_i_format_string(self, power_b_i_format_string: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.micro_strategy_cube_names = micro_strategy_cube_names + self.attributes.power_b_i_format_string = power_b_i_format_string @property - def micro_strategy_report_qualified_names(self) -> Optional[set[str]]: + def power_b_i_endorsement(self) -> Optional[PowerbiEndorsement]: return ( - None - if self.attributes is None - else self.attributes.micro_strategy_report_qualified_names + None if self.attributes is None else self.attributes.power_b_i_endorsement ) - @micro_strategy_report_qualified_names.setter - def micro_strategy_report_qualified_names( - self, micro_strategy_report_qualified_names: Optional[set[str]] + @power_b_i_endorsement.setter + def power_b_i_endorsement( + self, power_b_i_endorsement: Optional[PowerbiEndorsement] ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.micro_strategy_report_qualified_names = ( - micro_strategy_report_qualified_names - ) - - @property - def micro_strategy_report_names(self) -> Optional[set[str]]: - return ( - None - if self.attributes is None - else self.attributes.micro_strategy_report_names - ) - - @micro_strategy_report_names.setter - def micro_strategy_report_names( - self, micro_strategy_report_names: Optional[set[str]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_report_names = micro_strategy_report_names - - @property - def micro_strategy_is_certified(self) -> Optional[bool]: - return ( - None - if self.attributes is None - else self.attributes.micro_strategy_is_certified - ) - - @micro_strategy_is_certified.setter - def micro_strategy_is_certified(self, micro_strategy_is_certified: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_is_certified = micro_strategy_is_certified - - @property - def micro_strategy_certified_by(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.micro_strategy_certified_by - ) - - @micro_strategy_certified_by.setter - def micro_strategy_certified_by(self, micro_strategy_certified_by: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_certified_by = micro_strategy_certified_by - - @property - def micro_strategy_certified_at(self) -> Optional[datetime]: - return ( - None - if self.attributes is None - else self.attributes.micro_strategy_certified_at - ) - - @micro_strategy_certified_at.setter - def micro_strategy_certified_at( - self, micro_strategy_certified_at: Optional[datetime] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_certified_at = micro_strategy_certified_at - - @property - def micro_strategy_location(self) -> Optional[list[dict[str, str]]]: - return ( - None if self.attributes is None else self.attributes.micro_strategy_location - ) - - @micro_strategy_location.setter - def micro_strategy_location( - self, micro_strategy_location: Optional[list[dict[str, str]]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_location = micro_strategy_location + self.attributes.power_b_i_endorsement = power_b_i_endorsement class Attributes(BI.Attributes): - micro_strategy_project_qualified_name: Optional[str] = Field( - None, description="", alias="microStrategyProjectQualifiedName" - ) - micro_strategy_project_name: Optional[str] = Field( - None, description="", alias="microStrategyProjectName" - ) - micro_strategy_cube_qualified_names: Optional[set[str]] = Field( - None, description="", alias="microStrategyCubeQualifiedNames" - ) - micro_strategy_cube_names: Optional[set[str]] = Field( - None, description="", alias="microStrategyCubeNames" - ) - micro_strategy_report_qualified_names: Optional[set[str]] = Field( - None, description="", alias="microStrategyReportQualifiedNames" - ) - micro_strategy_report_names: Optional[set[str]] = Field( - None, description="", alias="microStrategyReportNames" - ) - micro_strategy_is_certified: Optional[bool] = Field( - None, description="", alias="microStrategyIsCertified" + power_b_i_is_hidden: Optional[bool] = Field( + None, description="", alias="powerBIIsHidden" ) - micro_strategy_certified_by: Optional[str] = Field( - None, description="", alias="microStrategyCertifiedBy" + power_b_i_table_qualified_name: Optional[str] = Field( + None, description="", alias="powerBITableQualifiedName" ) - micro_strategy_certified_at: Optional[datetime] = Field( - None, description="", alias="microStrategyCertifiedAt" + power_b_i_format_string: Optional[str] = Field( + None, description="", alias="powerBIFormatString" ) - micro_strategy_location: Optional[list[dict[str, str]]] = Field( - None, description="", alias="microStrategyLocation" + power_b_i_endorsement: Optional[PowerbiEndorsement] = Field( + None, description="", alias="powerBIEndorsement" ) - attributes: "MicroStrategy.Attributes" = Field( - default_factory=lambda: MicroStrategy.Attributes(), + attributes: "PowerBI.Attributes" = Field( + default_factory=lambda: PowerBI.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -MicroStrategy.Attributes.update_forward_refs() +PowerBI.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset52.py b/pyatlan/model/assets/asset52.py index c77609ac6..a4f5b4088 100644 --- a/pyatlan/model/assets/asset52.py +++ b/pyatlan/model/assets/asset52.py @@ -4,6 +4,7 @@ from __future__ import annotations +from datetime import datetime from typing import ClassVar, Optional from pydantic import Field, validator @@ -12,188 +13,311 @@ BooleanField, KeywordField, KeywordTextField, + NumericField, ) -from .asset18 import BI +from .asset19 import BI -class Qlik(BI): +class MicroStrategy(BI): """Description""" - type_name: str = Field("Qlik", allow_mutation=False) + type_name: str = Field("MicroStrategy", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "Qlik": - raise ValueError("must be Qlik") + if v != "MicroStrategy": + raise ValueError("must be MicroStrategy") return v def __setattr__(self, name, value): - if name in Qlik._convenience_properties: + if name in MicroStrategy._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - QLIK_ID: ClassVar[KeywordField] = KeywordField("qlikId", "qlikId") + MICRO_STRATEGY_PROJECT_QUALIFIED_NAME: ClassVar[ + KeywordTextField + ] = KeywordTextField( + "microStrategyProjectQualifiedName", + "microStrategyProjectQualifiedName", + "microStrategyProjectQualifiedName.text", + ) + """ + Unique name of the project in which this asset exists. + """ + MICRO_STRATEGY_PROJECT_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "microStrategyProjectName", + "microStrategyProjectName.keyword", + "microStrategyProjectName", + ) """ - qID/guid of the qlik object + Simple name of the project in which this asset exists. """ - QLIK_QRI: ClassVar[KeywordTextField] = KeywordTextField( - "qlikQRI", "qlikQRI", "qlikQRI.text" + MICRO_STRATEGY_CUBE_QUALIFIED_NAMES: ClassVar[KeywordTextField] = KeywordTextField( + "microStrategyCubeQualifiedNames", + "microStrategyCubeQualifiedNames", + "microStrategyCubeQualifiedNames.text", ) """ - QRI of the qlik object, kind of like qualifiedName on Atlan + Unique names of the cubes related to this asset. """ - QLIK_SPACE_ID: ClassVar[KeywordField] = KeywordField("qlikSpaceId", "qlikSpaceId") + MICRO_STRATEGY_CUBE_NAMES: ClassVar[KeywordTextField] = KeywordTextField( + "microStrategyCubeNames", + "microStrategyCubeNames.keyword", + "microStrategyCubeNames", + ) + """ + Simple names of the cubes related to this asset. + """ + MICRO_STRATEGY_REPORT_QUALIFIED_NAMES: ClassVar[ + KeywordTextField + ] = KeywordTextField( + "microStrategyReportQualifiedNames", + "microStrategyReportQualifiedNames", + "microStrategyReportQualifiedNames.text", + ) """ - qID of a space where the qlik object belongs to + Unique names of the reports related to this asset. """ - QLIK_SPACE_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "qlikSpaceQualifiedName", - "qlikSpaceQualifiedName", - "qlikSpaceQualifiedName.text", + MICRO_STRATEGY_REPORT_NAMES: ClassVar[KeywordTextField] = KeywordTextField( + "microStrategyReportNames", + "microStrategyReportNames.keyword", + "microStrategyReportNames", ) """ - qualifiedName of a space where the qlik object belongs to + Simple names of the reports related to this asset. """ - QLIK_APP_ID: ClassVar[KeywordField] = KeywordField("qlikAppId", "qlikAppId") + MICRO_STRATEGY_IS_CERTIFIED: ClassVar[BooleanField] = BooleanField( + "microStrategyIsCertified", "microStrategyIsCertified" + ) """ - qID of a app where the qlik object belongs + Whether the asset is certified in MicroStrategy (true) or not (false). """ - QLIK_APP_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "qlikAppQualifiedName", "qlikAppQualifiedName", "qlikAppQualifiedName.text" + MICRO_STRATEGY_CERTIFIED_BY: ClassVar[KeywordField] = KeywordField( + "microStrategyCertifiedBy", "microStrategyCertifiedBy" ) """ - qualifiedName of an app where the qlik object belongs to + User who certified this asset, in MicroStrategy. """ - QLIK_OWNER_ID: ClassVar[KeywordField] = KeywordField("qlikOwnerId", "qlikOwnerId") + MICRO_STRATEGY_CERTIFIED_AT: ClassVar[NumericField] = NumericField( + "microStrategyCertifiedAt", "microStrategyCertifiedAt" + ) """ - Owner's guid of the qlik object + Time (epoch) this asset was certified in MicroStrategy, in milliseconds. """ - QLIK_IS_PUBLISHED: ClassVar[BooleanField] = BooleanField( - "qlikIsPublished", "qlikIsPublished" + MICRO_STRATEGY_LOCATION: ClassVar[KeywordField] = KeywordField( + "microStrategyLocation", "microStrategyLocation" ) """ - If the qlik object is published + Location of this asset in MicroStrategy. """ _convenience_properties: ClassVar[list[str]] = [ - "qlik_id", - "qlik_q_r_i", - "qlik_space_id", - "qlik_space_qualified_name", - "qlik_app_id", - "qlik_app_qualified_name", - "qlik_owner_id", - "qlik_is_published", + "micro_strategy_project_qualified_name", + "micro_strategy_project_name", + "micro_strategy_cube_qualified_names", + "micro_strategy_cube_names", + "micro_strategy_report_qualified_names", + "micro_strategy_report_names", + "micro_strategy_is_certified", + "micro_strategy_certified_by", + "micro_strategy_certified_at", + "micro_strategy_location", ] @property - def qlik_id(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.qlik_id + def micro_strategy_project_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.micro_strategy_project_qualified_name + ) + + @micro_strategy_project_qualified_name.setter + def micro_strategy_project_qualified_name( + self, micro_strategy_project_qualified_name: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_project_qualified_name = ( + micro_strategy_project_qualified_name + ) + + @property + def micro_strategy_project_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.micro_strategy_project_name + ) + + @micro_strategy_project_name.setter + def micro_strategy_project_name(self, micro_strategy_project_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_project_name = micro_strategy_project_name + + @property + def micro_strategy_cube_qualified_names(self) -> Optional[set[str]]: + return ( + None + if self.attributes is None + else self.attributes.micro_strategy_cube_qualified_names + ) - @qlik_id.setter - def qlik_id(self, qlik_id: Optional[str]): + @micro_strategy_cube_qualified_names.setter + def micro_strategy_cube_qualified_names( + self, micro_strategy_cube_qualified_names: Optional[set[str]] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.qlik_id = qlik_id + self.attributes.micro_strategy_cube_qualified_names = ( + micro_strategy_cube_qualified_names + ) @property - def qlik_q_r_i(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.qlik_q_r_i + def micro_strategy_cube_names(self) -> Optional[set[str]]: + return ( + None + if self.attributes is None + else self.attributes.micro_strategy_cube_names + ) - @qlik_q_r_i.setter - def qlik_q_r_i(self, qlik_q_r_i: Optional[str]): + @micro_strategy_cube_names.setter + def micro_strategy_cube_names(self, micro_strategy_cube_names: Optional[set[str]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.qlik_q_r_i = qlik_q_r_i + self.attributes.micro_strategy_cube_names = micro_strategy_cube_names @property - def qlik_space_id(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.qlik_space_id + def micro_strategy_report_qualified_names(self) -> Optional[set[str]]: + return ( + None + if self.attributes is None + else self.attributes.micro_strategy_report_qualified_names + ) - @qlik_space_id.setter - def qlik_space_id(self, qlik_space_id: Optional[str]): + @micro_strategy_report_qualified_names.setter + def micro_strategy_report_qualified_names( + self, micro_strategy_report_qualified_names: Optional[set[str]] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.qlik_space_id = qlik_space_id + self.attributes.micro_strategy_report_qualified_names = ( + micro_strategy_report_qualified_names + ) @property - def qlik_space_qualified_name(self) -> Optional[str]: + def micro_strategy_report_names(self) -> Optional[set[str]]: return ( None if self.attributes is None - else self.attributes.qlik_space_qualified_name + else self.attributes.micro_strategy_report_names ) - @qlik_space_qualified_name.setter - def qlik_space_qualified_name(self, qlik_space_qualified_name: Optional[str]): + @micro_strategy_report_names.setter + def micro_strategy_report_names( + self, micro_strategy_report_names: Optional[set[str]] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.qlik_space_qualified_name = qlik_space_qualified_name + self.attributes.micro_strategy_report_names = micro_strategy_report_names @property - def qlik_app_id(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.qlik_app_id + def micro_strategy_is_certified(self) -> Optional[bool]: + return ( + None + if self.attributes is None + else self.attributes.micro_strategy_is_certified + ) - @qlik_app_id.setter - def qlik_app_id(self, qlik_app_id: Optional[str]): + @micro_strategy_is_certified.setter + def micro_strategy_is_certified(self, micro_strategy_is_certified: Optional[bool]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.qlik_app_id = qlik_app_id + self.attributes.micro_strategy_is_certified = micro_strategy_is_certified @property - def qlik_app_qualified_name(self) -> Optional[str]: + def micro_strategy_certified_by(self) -> Optional[str]: return ( - None if self.attributes is None else self.attributes.qlik_app_qualified_name + None + if self.attributes is None + else self.attributes.micro_strategy_certified_by ) - @qlik_app_qualified_name.setter - def qlik_app_qualified_name(self, qlik_app_qualified_name: Optional[str]): + @micro_strategy_certified_by.setter + def micro_strategy_certified_by(self, micro_strategy_certified_by: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.qlik_app_qualified_name = qlik_app_qualified_name + self.attributes.micro_strategy_certified_by = micro_strategy_certified_by @property - def qlik_owner_id(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.qlik_owner_id + def micro_strategy_certified_at(self) -> Optional[datetime]: + return ( + None + if self.attributes is None + else self.attributes.micro_strategy_certified_at + ) - @qlik_owner_id.setter - def qlik_owner_id(self, qlik_owner_id: Optional[str]): + @micro_strategy_certified_at.setter + def micro_strategy_certified_at( + self, micro_strategy_certified_at: Optional[datetime] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.qlik_owner_id = qlik_owner_id + self.attributes.micro_strategy_certified_at = micro_strategy_certified_at @property - def qlik_is_published(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.qlik_is_published + def micro_strategy_location(self) -> Optional[list[dict[str, str]]]: + return ( + None if self.attributes is None else self.attributes.micro_strategy_location + ) - @qlik_is_published.setter - def qlik_is_published(self, qlik_is_published: Optional[bool]): + @micro_strategy_location.setter + def micro_strategy_location( + self, micro_strategy_location: Optional[list[dict[str, str]]] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.qlik_is_published = qlik_is_published + self.attributes.micro_strategy_location = micro_strategy_location class Attributes(BI.Attributes): - qlik_id: Optional[str] = Field(None, description="", alias="qlikId") - qlik_q_r_i: Optional[str] = Field(None, description="", alias="qlikQRI") - qlik_space_id: Optional[str] = Field(None, description="", alias="qlikSpaceId") - qlik_space_qualified_name: Optional[str] = Field( - None, description="", alias="qlikSpaceQualifiedName" + micro_strategy_project_qualified_name: Optional[str] = Field( + None, description="", alias="microStrategyProjectQualifiedName" + ) + micro_strategy_project_name: Optional[str] = Field( + None, description="", alias="microStrategyProjectName" + ) + micro_strategy_cube_qualified_names: Optional[set[str]] = Field( + None, description="", alias="microStrategyCubeQualifiedNames" + ) + micro_strategy_cube_names: Optional[set[str]] = Field( + None, description="", alias="microStrategyCubeNames" + ) + micro_strategy_report_qualified_names: Optional[set[str]] = Field( + None, description="", alias="microStrategyReportQualifiedNames" + ) + micro_strategy_report_names: Optional[set[str]] = Field( + None, description="", alias="microStrategyReportNames" + ) + micro_strategy_is_certified: Optional[bool] = Field( + None, description="", alias="microStrategyIsCertified" + ) + micro_strategy_certified_by: Optional[str] = Field( + None, description="", alias="microStrategyCertifiedBy" ) - qlik_app_id: Optional[str] = Field(None, description="", alias="qlikAppId") - qlik_app_qualified_name: Optional[str] = Field( - None, description="", alias="qlikAppQualifiedName" + micro_strategy_certified_at: Optional[datetime] = Field( + None, description="", alias="microStrategyCertifiedAt" ) - qlik_owner_id: Optional[str] = Field(None, description="", alias="qlikOwnerId") - qlik_is_published: Optional[bool] = Field( - None, description="", alias="qlikIsPublished" + micro_strategy_location: Optional[list[dict[str, str]]] = Field( + None, description="", alias="microStrategyLocation" ) - attributes: "Qlik.Attributes" = Field( - default_factory=lambda: Qlik.Attributes(), + attributes: "MicroStrategy.Attributes" = Field( + default_factory=lambda: MicroStrategy.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -Qlik.Attributes.update_forward_refs() +MicroStrategy.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset53.py b/pyatlan/model/assets/asset53.py index a48a1d8a8..fd812c157 100644 --- a/pyatlan/model/assets/asset53.py +++ b/pyatlan/model/assets/asset53.py @@ -8,80 +8,192 @@ from pydantic import Field, validator -from pyatlan.model.fields.atlan_fields import KeywordField, KeywordTextField +from pyatlan.model.fields.atlan_fields import ( + BooleanField, + KeywordField, + KeywordTextField, +) -from .asset19 import SaaS +from .asset19 import BI -class Salesforce(SaaS): +class Qlik(BI): """Description""" - type_name: str = Field("Salesforce", allow_mutation=False) + type_name: str = Field("Qlik", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "Salesforce": - raise ValueError("must be Salesforce") + if v != "Qlik": + raise ValueError("must be Qlik") return v def __setattr__(self, name, value): - if name in Salesforce._convenience_properties: + if name in Qlik._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - ORGANIZATION_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "organizationQualifiedName", "organizationQualifiedName" + QLIK_ID: ClassVar[KeywordField] = KeywordField("qlikId", "qlikId") + """ + Identifier of this asset, from Qlik. + """ + QLIK_QRI: ClassVar[KeywordTextField] = KeywordTextField( + "qlikQRI", "qlikQRI", "qlikQRI.text" + ) + """ + Unique QRI of this asset, from Qlik. + """ + QLIK_SPACE_ID: ClassVar[KeywordField] = KeywordField("qlikSpaceId", "qlikSpaceId") + """ + Identifier of the space in which this asset exists, from Qlik. + """ + QLIK_SPACE_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "qlikSpaceQualifiedName", + "qlikSpaceQualifiedName", + "qlikSpaceQualifiedName.text", + ) + """ + Unique name of the space in which this asset exists. + """ + QLIK_APP_ID: ClassVar[KeywordField] = KeywordField("qlikAppId", "qlikAppId") + """ + Identifier of the app in which this asset belongs, from Qlik. + """ + QLIK_APP_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "qlikAppQualifiedName", "qlikAppQualifiedName", "qlikAppQualifiedName.text" ) """ - TBC + Unique name of the app where this asset belongs. """ - API_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "apiName", "apiName.keyword", "apiName" + QLIK_OWNER_ID: ClassVar[KeywordField] = KeywordField("qlikOwnerId", "qlikOwnerId") + """ + Identifier of the owner of this asset, in Qlik. + """ + QLIK_IS_PUBLISHED: ClassVar[BooleanField] = BooleanField( + "qlikIsPublished", "qlikIsPublished" ) """ - TBC + Whether this asset is published in Qlik (true) or not (false). """ _convenience_properties: ClassVar[list[str]] = [ - "organization_qualified_name", - "api_name", + "qlik_id", + "qlik_q_r_i", + "qlik_space_id", + "qlik_space_qualified_name", + "qlik_app_id", + "qlik_app_qualified_name", + "qlik_owner_id", + "qlik_is_published", ] @property - def organization_qualified_name(self) -> Optional[str]: + def qlik_id(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.qlik_id + + @qlik_id.setter + def qlik_id(self, qlik_id: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.qlik_id = qlik_id + + @property + def qlik_q_r_i(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.qlik_q_r_i + + @qlik_q_r_i.setter + def qlik_q_r_i(self, qlik_q_r_i: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.qlik_q_r_i = qlik_q_r_i + + @property + def qlik_space_id(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.qlik_space_id + + @qlik_space_id.setter + def qlik_space_id(self, qlik_space_id: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.qlik_space_id = qlik_space_id + + @property + def qlik_space_qualified_name(self) -> Optional[str]: return ( None if self.attributes is None - else self.attributes.organization_qualified_name + else self.attributes.qlik_space_qualified_name ) - @organization_qualified_name.setter - def organization_qualified_name(self, organization_qualified_name: Optional[str]): + @qlik_space_qualified_name.setter + def qlik_space_qualified_name(self, qlik_space_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.qlik_space_qualified_name = qlik_space_qualified_name + + @property + def qlik_app_id(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.qlik_app_id + + @qlik_app_id.setter + def qlik_app_id(self, qlik_app_id: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.organization_qualified_name = organization_qualified_name + self.attributes.qlik_app_id = qlik_app_id @property - def api_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.api_name + def qlik_app_qualified_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.qlik_app_qualified_name + ) - @api_name.setter - def api_name(self, api_name: Optional[str]): + @qlik_app_qualified_name.setter + def qlik_app_qualified_name(self, qlik_app_qualified_name: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.api_name = api_name + self.attributes.qlik_app_qualified_name = qlik_app_qualified_name - class Attributes(SaaS.Attributes): - organization_qualified_name: Optional[str] = Field( - None, description="", alias="organizationQualifiedName" + @property + def qlik_owner_id(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.qlik_owner_id + + @qlik_owner_id.setter + def qlik_owner_id(self, qlik_owner_id: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.qlik_owner_id = qlik_owner_id + + @property + def qlik_is_published(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.qlik_is_published + + @qlik_is_published.setter + def qlik_is_published(self, qlik_is_published: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.qlik_is_published = qlik_is_published + + class Attributes(BI.Attributes): + qlik_id: Optional[str] = Field(None, description="", alias="qlikId") + qlik_q_r_i: Optional[str] = Field(None, description="", alias="qlikQRI") + qlik_space_id: Optional[str] = Field(None, description="", alias="qlikSpaceId") + qlik_space_qualified_name: Optional[str] = Field( + None, description="", alias="qlikSpaceQualifiedName" + ) + qlik_app_id: Optional[str] = Field(None, description="", alias="qlikAppId") + qlik_app_qualified_name: Optional[str] = Field( + None, description="", alias="qlikAppQualifiedName" + ) + qlik_owner_id: Optional[str] = Field(None, description="", alias="qlikOwnerId") + qlik_is_published: Optional[bool] = Field( + None, description="", alias="qlikIsPublished" ) - api_name: Optional[str] = Field(None, description="", alias="apiName") - attributes: "Salesforce.Attributes" = Field( - default_factory=lambda: Salesforce.Attributes(), + attributes: "Qlik.Attributes" = Field( + default_factory=lambda: Qlik.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -Salesforce.Attributes.update_forward_refs() +Qlik.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset54.py b/pyatlan/model/assets/asset54.py index 1595713ce..bad1391be 100644 --- a/pyatlan/model/assets/asset54.py +++ b/pyatlan/model/assets/asset54.py @@ -8,71 +8,80 @@ from pydantic import Field, validator -from pyatlan.model.enums import IconType -from pyatlan.model.fields.atlan_fields import KeywordField +from pyatlan.model.fields.atlan_fields import KeywordField, KeywordTextField -from .asset00 import Resource +from .asset20 import SaaS -class ReadmeTemplate(Resource): +class Salesforce(SaaS): """Description""" - type_name: str = Field("ReadmeTemplate", allow_mutation=False) + type_name: str = Field("Salesforce", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "ReadmeTemplate": - raise ValueError("must be ReadmeTemplate") + if v != "Salesforce": + raise ValueError("must be Salesforce") return v def __setattr__(self, name, value): - if name in ReadmeTemplate._convenience_properties: + if name in Salesforce._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - ICON: ClassVar[KeywordField] = KeywordField("icon", "icon") + ORGANIZATION_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "organizationQualifiedName", "organizationQualifiedName" + ) """ - TBC + Fully-qualified name of the organization in Salesforce. """ - ICON_TYPE: ClassVar[KeywordField] = KeywordField("iconType", "iconType") + API_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "apiName", "apiName.keyword", "apiName" + ) """ - TBC + Name of this asset in the Salesforce API. """ _convenience_properties: ClassVar[list[str]] = [ - "icon", - "icon_type", + "organization_qualified_name", + "api_name", ] @property - def icon(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.icon - - @icon.setter - def icon(self, icon: Optional[str]): + def organization_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.organization_qualified_name + ) + + @organization_qualified_name.setter + def organization_qualified_name(self, organization_qualified_name: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.icon = icon + self.attributes.organization_qualified_name = organization_qualified_name @property - def icon_type(self) -> Optional[IconType]: - return None if self.attributes is None else self.attributes.icon_type + def api_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.api_name - @icon_type.setter - def icon_type(self, icon_type: Optional[IconType]): + @api_name.setter + def api_name(self, api_name: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.icon_type = icon_type + self.attributes.api_name = api_name - class Attributes(Resource.Attributes): - icon: Optional[str] = Field(None, description="", alias="icon") - icon_type: Optional[IconType] = Field(None, description="", alias="iconType") + class Attributes(SaaS.Attributes): + organization_qualified_name: Optional[str] = Field( + None, description="", alias="organizationQualifiedName" + ) + api_name: Optional[str] = Field(None, description="", alias="apiName") - attributes: "ReadmeTemplate.Attributes" = Field( - default_factory=lambda: ReadmeTemplate.Attributes(), + attributes: "Salesforce.Attributes" = Field( + default_factory=lambda: Salesforce.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -ReadmeTemplate.Attributes.update_forward_refs() +Salesforce.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset55.py b/pyatlan/model/assets/asset55.py index ae42b909e..a3a6a5b11 100644 --- a/pyatlan/model/assets/asset55.py +++ b/pyatlan/model/assets/asset55.py @@ -4,30 +4,75 @@ from __future__ import annotations -from typing import ClassVar +from typing import ClassVar, Optional from pydantic import Field, validator -from .asset22 import EventStore +from pyatlan.model.enums import IconType +from pyatlan.model.fields.atlan_fields import KeywordField +from .asset00 import Resource -class Kafka(EventStore): + +class ReadmeTemplate(Resource): """Description""" - type_name: str = Field("Kafka", allow_mutation=False) + type_name: str = Field("ReadmeTemplate", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "Kafka": - raise ValueError("must be Kafka") + if v != "ReadmeTemplate": + raise ValueError("must be ReadmeTemplate") return v def __setattr__(self, name, value): - if name in Kafka._convenience_properties: + if name in ReadmeTemplate._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - _convenience_properties: ClassVar[list[str]] = [] + ICON: ClassVar[KeywordField] = KeywordField("icon", "icon") + """ + Icon to use for the README template. + """ + ICON_TYPE: ClassVar[KeywordField] = KeywordField("iconType", "iconType") + """ + Type of icon, for example: image or emoji. + """ + + _convenience_properties: ClassVar[list[str]] = [ + "icon", + "icon_type", + ] + + @property + def icon(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.icon + + @icon.setter + def icon(self, icon: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.icon = icon + + @property + def icon_type(self) -> Optional[IconType]: + return None if self.attributes is None else self.attributes.icon_type + + @icon_type.setter + def icon_type(self, icon_type: Optional[IconType]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.icon_type = icon_type + + class Attributes(Resource.Attributes): + icon: Optional[str] = Field(None, description="", alias="icon") + icon_type: Optional[IconType] = Field(None, description="", alias="iconType") + + attributes: "ReadmeTemplate.Attributes" = Field( + default_factory=lambda: ReadmeTemplate.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) -Kafka.Attributes.update_forward_refs() +ReadmeTemplate.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset56.py b/pyatlan/model/assets/asset56.py index 2aeec904a..7eabcaf30 100644 --- a/pyatlan/model/assets/asset56.py +++ b/pyatlan/model/assets/asset56.py @@ -8,26 +8,26 @@ from pydantic import Field, validator -from .asset23 import NoSQL +from .asset23 import EventStore -class MongoDB(NoSQL): +class Kafka(EventStore): """Description""" - type_name: str = Field("MongoDB", allow_mutation=False) + type_name: str = Field("Kafka", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "MongoDB": - raise ValueError("must be MongoDB") + if v != "Kafka": + raise ValueError("must be Kafka") return v def __setattr__(self, name, value): - if name in MongoDB._convenience_properties: + if name in Kafka._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) _convenience_properties: ClassVar[list[str]] = [] -MongoDB.Attributes.update_forward_refs() +Kafka.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset57.py b/pyatlan/model/assets/asset57.py index e9d3b6a83..3c59a1028 100644 --- a/pyatlan/model/assets/asset57.py +++ b/pyatlan/model/assets/asset57.py @@ -4,500 +4,161 @@ from __future__ import annotations -from datetime import datetime from typing import ClassVar, Optional from pydantic import Field, validator -from pyatlan.model.fields.atlan_fields import ( - KeywordField, - KeywordTextField, - NumericField, -) -from pyatlan.model.structs import SourceTagAttribute +from pyatlan.model.enums import DynamoDBStatus +from pyatlan.model.fields.atlan_fields import KeywordField, NumericField -from .asset00 import Dbt +from .asset24 import NoSQL -class DbtTag(Dbt): +class DynamoDB(NoSQL): """Description""" - type_name: str = Field("DbtTag", allow_mutation=False) + type_name: str = Field("DynamoDB", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "DbtTag": - raise ValueError("must be DbtTag") + if v != "DynamoDB": + raise ValueError("must be DynamoDB") return v def __setattr__(self, name, value): - if name in DbtTag._convenience_properties: + if name in DynamoDB._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - DBT_ALIAS: ClassVar[KeywordTextField] = KeywordTextField( - "dbtAlias", "dbtAlias.keyword", "dbtAlias" + DYNAMO_DB_STATUS: ClassVar[KeywordField] = KeywordField( + "dynamoDBStatus", "dynamoDBStatus" ) """ - TBC + Status of the DynamoDB Asset """ - DBT_META: ClassVar[KeywordField] = KeywordField("dbtMeta", "dbtMeta") - """ - TBC - """ - DBT_UNIQUE_ID: ClassVar[KeywordTextField] = KeywordTextField( - "dbtUniqueId", "dbtUniqueId.keyword", "dbtUniqueId" - ) - """ - TBC - """ - DBT_ACCOUNT_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "dbtAccountName", "dbtAccountName.keyword", "dbtAccountName" - ) - """ - TBC - """ - DBT_PROJECT_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "dbtProjectName", "dbtProjectName.keyword", "dbtProjectName" - ) - """ - TBC - """ - DBT_PACKAGE_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "dbtPackageName", "dbtPackageName.keyword", "dbtPackageName" - ) - """ - TBC - """ - DBT_JOB_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "dbtJobName", "dbtJobName.keyword", "dbtJobName" - ) - """ - TBC - """ - DBT_JOB_SCHEDULE: ClassVar[KeywordField] = KeywordField( - "dbtJobSchedule", "dbtJobSchedule" - ) - """ - TBC - """ - DBT_JOB_STATUS: ClassVar[KeywordField] = KeywordField( - "dbtJobStatus", "dbtJobStatus" - ) - """ - TBC - """ - DBT_JOB_SCHEDULE_CRON_HUMANIZED: ClassVar[KeywordTextField] = KeywordTextField( - "dbtJobScheduleCronHumanized", - "dbtJobScheduleCronHumanized.keyword", - "dbtJobScheduleCronHumanized", - ) - """ - TBC - """ - DBT_JOB_LAST_RUN: ClassVar[NumericField] = NumericField( - "dbtJobLastRun", "dbtJobLastRun" - ) - """ - TBC - """ - DBT_JOB_NEXT_RUN: ClassVar[NumericField] = NumericField( - "dbtJobNextRun", "dbtJobNextRun" - ) - """ - TBC - """ - DBT_JOB_NEXT_RUN_HUMANIZED: ClassVar[KeywordTextField] = KeywordTextField( - "dbtJobNextRunHumanized", - "dbtJobNextRunHumanized.keyword", - "dbtJobNextRunHumanized", - ) - """ - TBC - """ - DBT_ENVIRONMENT_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "dbtEnvironmentName", "dbtEnvironmentName.keyword", "dbtEnvironmentName" - ) - """ - TBC - """ - DBT_ENVIRONMENT_DBT_VERSION: ClassVar[KeywordTextField] = KeywordTextField( - "dbtEnvironmentDbtVersion", - "dbtEnvironmentDbtVersion.keyword", - "dbtEnvironmentDbtVersion", - ) - """ - TBC - """ - DBT_TAGS: ClassVar[KeywordField] = KeywordField("dbtTags", "dbtTags") - """ - TBC - """ - DBT_CONNECTION_CONTEXT: ClassVar[KeywordField] = KeywordField( - "dbtConnectionContext", "dbtConnectionContext" - ) - """ - TBC - """ - DBT_SEMANTIC_LAYER_PROXY_URL: ClassVar[KeywordField] = KeywordField( - "dbtSemanticLayerProxyUrl", "dbtSemanticLayerProxyUrl" + DYNAMO_DB_PARTITION_KEY: ClassVar[KeywordField] = KeywordField( + "dynamoDBPartitionKey", "dynamoDBPartitionKey" ) """ - TBC - """ - TAG_ID: ClassVar[KeywordField] = KeywordField("tagId", "tagId") + Specifies the partition key of the DynamoDB Table/Index """ - Unique source tag identifier - """ - TAG_ATTRIBUTES: ClassVar[KeywordField] = KeywordField( - "tagAttributes", "tagAttributes" + DYNAMO_DB_SORT_KEY: ClassVar[KeywordField] = KeywordField( + "dynamoDBSortKey", "dynamoDBSortKey" ) """ - Source tag attributes + Specifies the sort key of the DynamoDB Table/Index """ - TAG_ALLOWED_VALUES: ClassVar[KeywordTextField] = KeywordTextField( - "tagAllowedValues", "tagAllowedValues", "tagAllowedValues.text" + DYNAMO_DB_READ_CAPACITY_UNITS: ClassVar[NumericField] = NumericField( + "dynamoDBReadCapacityUnits", "dynamoDBReadCapacityUnits" ) """ - Allowed values for the tag at source. De-normalised from sourceTagAttributed for ease of querying + The maximum number of strongly consistent reads consumed per second before DynamoDB returns a ThrottlingException """ - MAPPED_CLASSIFICATION_NAME: ClassVar[KeywordField] = KeywordField( - "mappedClassificationName", "mappedClassificationName" + DYNAMO_DB_WRITE_CAPACITY_UNITS: ClassVar[NumericField] = NumericField( + "dynamoDBWriteCapacityUnits", "dynamoDBWriteCapacityUnits" ) """ - Mapped atlan classification name + The maximum number of writes consumed per second before DynamoDB returns a ThrottlingException """ _convenience_properties: ClassVar[list[str]] = [ - "dbt_alias", - "dbt_meta", - "dbt_unique_id", - "dbt_account_name", - "dbt_project_name", - "dbt_package_name", - "dbt_job_name", - "dbt_job_schedule", - "dbt_job_status", - "dbt_job_schedule_cron_humanized", - "dbt_job_last_run", - "dbt_job_next_run", - "dbt_job_next_run_humanized", - "dbt_environment_name", - "dbt_environment_dbt_version", - "dbt_tags", - "dbt_connection_context", - "dbt_semantic_layer_proxy_url", - "tag_id", - "tag_attributes", - "tag_allowed_values", - "mapped_atlan_tag_name", + "dynamo_d_b_status", + "dynamo_d_b_partition_key", + "dynamo_d_b_sort_key", + "dynamo_d_b_read_capacity_units", + "dynamo_d_b_write_capacity_units", ] @property - def dbt_alias(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_alias - - @dbt_alias.setter - def dbt_alias(self, dbt_alias: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_alias = dbt_alias - - @property - def dbt_meta(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_meta - - @dbt_meta.setter - def dbt_meta(self, dbt_meta: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_meta = dbt_meta - - @property - def dbt_unique_id(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_unique_id - - @dbt_unique_id.setter - def dbt_unique_id(self, dbt_unique_id: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_unique_id = dbt_unique_id - - @property - def dbt_account_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_account_name - - @dbt_account_name.setter - def dbt_account_name(self, dbt_account_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_account_name = dbt_account_name - - @property - def dbt_project_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_project_name - - @dbt_project_name.setter - def dbt_project_name(self, dbt_project_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_project_name = dbt_project_name - - @property - def dbt_package_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_package_name - - @dbt_package_name.setter - def dbt_package_name(self, dbt_package_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_package_name = dbt_package_name - - @property - def dbt_job_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_job_name + def dynamo_d_b_status(self) -> Optional[DynamoDBStatus]: + return None if self.attributes is None else self.attributes.dynamo_d_b_status - @dbt_job_name.setter - def dbt_job_name(self, dbt_job_name: Optional[str]): + @dynamo_d_b_status.setter + def dynamo_d_b_status(self, dynamo_d_b_status: Optional[DynamoDBStatus]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.dbt_job_name = dbt_job_name + self.attributes.dynamo_d_b_status = dynamo_d_b_status @property - def dbt_job_schedule(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_job_schedule - - @dbt_job_schedule.setter - def dbt_job_schedule(self, dbt_job_schedule: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_job_schedule = dbt_job_schedule - - @property - def dbt_job_status(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_job_status - - @dbt_job_status.setter - def dbt_job_status(self, dbt_job_status: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_job_status = dbt_job_status - - @property - def dbt_job_schedule_cron_humanized(self) -> Optional[str]: + def dynamo_d_b_partition_key(self) -> Optional[str]: return ( None if self.attributes is None - else self.attributes.dbt_job_schedule_cron_humanized + else self.attributes.dynamo_d_b_partition_key ) - @dbt_job_schedule_cron_humanized.setter - def dbt_job_schedule_cron_humanized( - self, dbt_job_schedule_cron_humanized: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_job_schedule_cron_humanized = ( - dbt_job_schedule_cron_humanized - ) - - @property - def dbt_job_last_run(self) -> Optional[datetime]: - return None if self.attributes is None else self.attributes.dbt_job_last_run - - @dbt_job_last_run.setter - def dbt_job_last_run(self, dbt_job_last_run: Optional[datetime]): + @dynamo_d_b_partition_key.setter + def dynamo_d_b_partition_key(self, dynamo_d_b_partition_key: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.dbt_job_last_run = dbt_job_last_run + self.attributes.dynamo_d_b_partition_key = dynamo_d_b_partition_key @property - def dbt_job_next_run(self) -> Optional[datetime]: - return None if self.attributes is None else self.attributes.dbt_job_next_run + def dynamo_d_b_sort_key(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dynamo_d_b_sort_key - @dbt_job_next_run.setter - def dbt_job_next_run(self, dbt_job_next_run: Optional[datetime]): + @dynamo_d_b_sort_key.setter + def dynamo_d_b_sort_key(self, dynamo_d_b_sort_key: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.dbt_job_next_run = dbt_job_next_run + self.attributes.dynamo_d_b_sort_key = dynamo_d_b_sort_key @property - def dbt_job_next_run_humanized(self) -> Optional[str]: + def dynamo_d_b_read_capacity_units(self) -> Optional[int]: return ( None if self.attributes is None - else self.attributes.dbt_job_next_run_humanized + else self.attributes.dynamo_d_b_read_capacity_units ) - @dbt_job_next_run_humanized.setter - def dbt_job_next_run_humanized(self, dbt_job_next_run_humanized: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_job_next_run_humanized = dbt_job_next_run_humanized - - @property - def dbt_environment_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_environment_name - - @dbt_environment_name.setter - def dbt_environment_name(self, dbt_environment_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_environment_name = dbt_environment_name - - @property - def dbt_environment_dbt_version(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.dbt_environment_dbt_version - ) - - @dbt_environment_dbt_version.setter - def dbt_environment_dbt_version(self, dbt_environment_dbt_version: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_environment_dbt_version = dbt_environment_dbt_version - - @property - def dbt_tags(self) -> Optional[set[str]]: - return None if self.attributes is None else self.attributes.dbt_tags - - @dbt_tags.setter - def dbt_tags(self, dbt_tags: Optional[set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_tags = dbt_tags - - @property - def dbt_connection_context(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.dbt_connection_context - ) - - @dbt_connection_context.setter - def dbt_connection_context(self, dbt_connection_context: Optional[str]): + @dynamo_d_b_read_capacity_units.setter + def dynamo_d_b_read_capacity_units( + self, dynamo_d_b_read_capacity_units: Optional[int] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.dbt_connection_context = dbt_connection_context + self.attributes.dynamo_d_b_read_capacity_units = dynamo_d_b_read_capacity_units @property - def dbt_semantic_layer_proxy_url(self) -> Optional[str]: + def dynamo_d_b_write_capacity_units(self) -> Optional[int]: return ( None if self.attributes is None - else self.attributes.dbt_semantic_layer_proxy_url + else self.attributes.dynamo_d_b_write_capacity_units ) - @dbt_semantic_layer_proxy_url.setter - def dbt_semantic_layer_proxy_url(self, dbt_semantic_layer_proxy_url: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_semantic_layer_proxy_url = dbt_semantic_layer_proxy_url - - @property - def tag_id(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.tag_id - - @tag_id.setter - def tag_id(self, tag_id: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.tag_id = tag_id - - @property - def tag_attributes(self) -> Optional[list[SourceTagAttribute]]: - return None if self.attributes is None else self.attributes.tag_attributes - - @tag_attributes.setter - def tag_attributes(self, tag_attributes: Optional[list[SourceTagAttribute]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.tag_attributes = tag_attributes - - @property - def tag_allowed_values(self) -> Optional[set[str]]: - return None if self.attributes is None else self.attributes.tag_allowed_values - - @tag_allowed_values.setter - def tag_allowed_values(self, tag_allowed_values: Optional[set[str]]): + @dynamo_d_b_write_capacity_units.setter + def dynamo_d_b_write_capacity_units( + self, dynamo_d_b_write_capacity_units: Optional[int] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.tag_allowed_values = tag_allowed_values - - @property - def mapped_atlan_tag_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.mapped_atlan_tag_name + self.attributes.dynamo_d_b_write_capacity_units = ( + dynamo_d_b_write_capacity_units ) - @mapped_atlan_tag_name.setter - def mapped_atlan_tag_name(self, mapped_atlan_tag_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mapped_atlan_tag_name = mapped_atlan_tag_name - - class Attributes(Dbt.Attributes): - dbt_alias: Optional[str] = Field(None, description="", alias="dbtAlias") - dbt_meta: Optional[str] = Field(None, description="", alias="dbtMeta") - dbt_unique_id: Optional[str] = Field(None, description="", alias="dbtUniqueId") - dbt_account_name: Optional[str] = Field( - None, description="", alias="dbtAccountName" - ) - dbt_project_name: Optional[str] = Field( - None, description="", alias="dbtProjectName" - ) - dbt_package_name: Optional[str] = Field( - None, description="", alias="dbtPackageName" - ) - dbt_job_name: Optional[str] = Field(None, description="", alias="dbtJobName") - dbt_job_schedule: Optional[str] = Field( - None, description="", alias="dbtJobSchedule" - ) - dbt_job_status: Optional[str] = Field( - None, description="", alias="dbtJobStatus" - ) - dbt_job_schedule_cron_humanized: Optional[str] = Field( - None, description="", alias="dbtJobScheduleCronHumanized" - ) - dbt_job_last_run: Optional[datetime] = Field( - None, description="", alias="dbtJobLastRun" - ) - dbt_job_next_run: Optional[datetime] = Field( - None, description="", alias="dbtJobNextRun" - ) - dbt_job_next_run_humanized: Optional[str] = Field( - None, description="", alias="dbtJobNextRunHumanized" - ) - dbt_environment_name: Optional[str] = Field( - None, description="", alias="dbtEnvironmentName" - ) - dbt_environment_dbt_version: Optional[str] = Field( - None, description="", alias="dbtEnvironmentDbtVersion" - ) - dbt_tags: Optional[set[str]] = Field(None, description="", alias="dbtTags") - dbt_connection_context: Optional[str] = Field( - None, description="", alias="dbtConnectionContext" + class Attributes(NoSQL.Attributes): + dynamo_d_b_status: Optional[DynamoDBStatus] = Field( + None, description="", alias="dynamoDBStatus" ) - dbt_semantic_layer_proxy_url: Optional[str] = Field( - None, description="", alias="dbtSemanticLayerProxyUrl" + dynamo_d_b_partition_key: Optional[str] = Field( + None, description="", alias="dynamoDBPartitionKey" ) - tag_id: Optional[str] = Field(None, description="", alias="tagId") - tag_attributes: Optional[list[SourceTagAttribute]] = Field( - None, description="", alias="tagAttributes" + dynamo_d_b_sort_key: Optional[str] = Field( + None, description="", alias="dynamoDBSortKey" ) - tag_allowed_values: Optional[set[str]] = Field( - None, description="", alias="tagAllowedValues" + dynamo_d_b_read_capacity_units: Optional[int] = Field( + None, description="", alias="dynamoDBReadCapacityUnits" ) - mapped_atlan_tag_name: Optional[str] = Field( - None, description="", alias="mappedClassificationName" + dynamo_d_b_write_capacity_units: Optional[int] = Field( + None, description="", alias="dynamoDBWriteCapacityUnits" ) - attributes: "DbtTag.Attributes" = Field( - default_factory=lambda: DbtTag.Attributes(), + attributes: "DynamoDB.Attributes" = Field( + default_factory=lambda: DynamoDB.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -DbtTag.Attributes.update_forward_refs() +DynamoDB.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset58.py b/pyatlan/model/assets/asset58.py index bc453f5bf..8544433c8 100644 --- a/pyatlan/model/assets/asset58.py +++ b/pyatlan/model/assets/asset58.py @@ -4,522 +4,30 @@ from __future__ import annotations -from typing import ClassVar, Optional +from typing import ClassVar from pydantic import Field, validator -from pyatlan.model.enums import AtlanConnectorType -from pyatlan.model.fields.atlan_fields import ( - BooleanField, - KeywordField, - KeywordTextField, - RelationField, - TextField, -) -from pyatlan.utils import init_guid, validate_required_fields +from .asset24 import NoSQL -from .asset27 import API - -class APISpec(API): - """Description""" - - @classmethod - # @validate_arguments() - @init_guid - def create(cls, *, name: str, connection_qualified_name: str) -> APISpec: - validate_required_fields( - ["name", "connection_qualified_name"], [name, connection_qualified_name] - ) - attributes = APISpec.Attributes.create( - name=name, connection_qualified_name=connection_qualified_name - ) - return cls(attributes=attributes) - - type_name: str = Field("APISpec", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "APISpec": - raise ValueError("must be APISpec") - return v - - def __setattr__(self, name, value): - if name in APISpec._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - API_SPEC_TERMS_OF_SERVICE_URL: ClassVar[KeywordTextField] = KeywordTextField( - "apiSpecTermsOfServiceURL", - "apiSpecTermsOfServiceURL", - "apiSpecTermsOfServiceURL.text", - ) - """ - TBC - """ - API_SPEC_CONTACT_EMAIL: ClassVar[KeywordTextField] = KeywordTextField( - "apiSpecContactEmail", "apiSpecContactEmail", "apiSpecContactEmail.text" - ) - """ - TBC - """ - API_SPEC_CONTACT_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "apiSpecContactName", "apiSpecContactName.keyword", "apiSpecContactName" - ) - """ - TBC - """ - API_SPEC_CONTACT_URL: ClassVar[KeywordTextField] = KeywordTextField( - "apiSpecContactURL", "apiSpecContactURL", "apiSpecContactURL.text" - ) - """ - TBC - """ - API_SPEC_LICENSE_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "apiSpecLicenseName", "apiSpecLicenseName.keyword", "apiSpecLicenseName" - ) - """ - TBC - """ - API_SPEC_LICENSE_URL: ClassVar[KeywordTextField] = KeywordTextField( - "apiSpecLicenseURL", "apiSpecLicenseURL", "apiSpecLicenseURL.text" - ) - """ - TBC - """ - API_SPEC_CONTRACT_VERSION: ClassVar[KeywordField] = KeywordField( - "apiSpecContractVersion", "apiSpecContractVersion" - ) - """ - TBC - """ - API_SPEC_SERVICE_ALIAS: ClassVar[KeywordTextField] = KeywordTextField( - "apiSpecServiceAlias", "apiSpecServiceAlias", "apiSpecServiceAlias.text" - ) - """ - TBC - """ - - API_PATHS: ClassVar[RelationField] = RelationField("apiPaths") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "api_spec_terms_of_service_url", - "api_spec_contact_email", - "api_spec_contact_name", - "api_spec_contact_url", - "api_spec_license_name", - "api_spec_license_url", - "api_spec_contract_version", - "api_spec_service_alias", - "api_paths", - ] - - @property - def api_spec_terms_of_service_url(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.api_spec_terms_of_service_url - ) - - @api_spec_terms_of_service_url.setter - def api_spec_terms_of_service_url( - self, api_spec_terms_of_service_url: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.api_spec_terms_of_service_url = api_spec_terms_of_service_url - - @property - def api_spec_contact_email(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.api_spec_contact_email - ) - - @api_spec_contact_email.setter - def api_spec_contact_email(self, api_spec_contact_email: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.api_spec_contact_email = api_spec_contact_email - - @property - def api_spec_contact_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.api_spec_contact_name - ) - - @api_spec_contact_name.setter - def api_spec_contact_name(self, api_spec_contact_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.api_spec_contact_name = api_spec_contact_name - - @property - def api_spec_contact_url(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.api_spec_contact_url - - @api_spec_contact_url.setter - def api_spec_contact_url(self, api_spec_contact_url: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.api_spec_contact_url = api_spec_contact_url - - @property - def api_spec_license_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.api_spec_license_name - ) - - @api_spec_license_name.setter - def api_spec_license_name(self, api_spec_license_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.api_spec_license_name = api_spec_license_name - - @property - def api_spec_license_url(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.api_spec_license_url - - @api_spec_license_url.setter - def api_spec_license_url(self, api_spec_license_url: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.api_spec_license_url = api_spec_license_url - - @property - def api_spec_contract_version(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.api_spec_contract_version - ) - - @api_spec_contract_version.setter - def api_spec_contract_version(self, api_spec_contract_version: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.api_spec_contract_version = api_spec_contract_version - - @property - def api_spec_service_alias(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.api_spec_service_alias - ) - - @api_spec_service_alias.setter - def api_spec_service_alias(self, api_spec_service_alias: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.api_spec_service_alias = api_spec_service_alias - - @property - def api_paths(self) -> Optional[list[APIPath]]: - return None if self.attributes is None else self.attributes.api_paths - - @api_paths.setter - def api_paths(self, api_paths: Optional[list[APIPath]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.api_paths = api_paths - - class Attributes(API.Attributes): - api_spec_terms_of_service_url: Optional[str] = Field( - None, description="", alias="apiSpecTermsOfServiceURL" - ) - api_spec_contact_email: Optional[str] = Field( - None, description="", alias="apiSpecContactEmail" - ) - api_spec_contact_name: Optional[str] = Field( - None, description="", alias="apiSpecContactName" - ) - api_spec_contact_url: Optional[str] = Field( - None, description="", alias="apiSpecContactURL" - ) - api_spec_license_name: Optional[str] = Field( - None, description="", alias="apiSpecLicenseName" - ) - api_spec_license_url: Optional[str] = Field( - None, description="", alias="apiSpecLicenseURL" - ) - api_spec_contract_version: Optional[str] = Field( - None, description="", alias="apiSpecContractVersion" - ) - api_spec_service_alias: Optional[str] = Field( - None, description="", alias="apiSpecServiceAlias" - ) - api_paths: Optional[list[APIPath]] = Field( - None, description="", alias="apiPaths" - ) # relationship - - @classmethod - # @validate_arguments() - @init_guid - def create( - cls, *, name: str, connection_qualified_name: str - ) -> APISpec.Attributes: - validate_required_fields( - ["name", "connection_qualified_name"], [name, connection_qualified_name] - ) - - # Split the connection_qualified_name to extract necessary information - fields = connection_qualified_name.split("/") - if len(fields) != 3: - raise ValueError("Invalid connection_qualified_name") - - try: - connector_type = AtlanConnectorType(fields[1]) # type:ignore - except ValueError as e: - raise ValueError("Invalid connection_qualified_name") from e - - return APISpec.Attributes( - name=name, - qualified_name=f"{connection_qualified_name}/{name}", - connection_qualified_name=connection_qualified_name, - connector_name=connector_type.value, - ) - - attributes: "APISpec.Attributes" = Field( - default_factory=lambda: APISpec.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class APIPath(API): +class MongoDB(NoSQL): """Description""" - @classmethod - # @validate_arguments() - @init_guid - def create(cls, *, path_raw_uri: str, spec_qualified_name: str) -> APIPath: - validate_required_fields( - ["path_raw_uri", "spec_qualified_name"], [path_raw_uri, spec_qualified_name] - ) - attributes = APIPath.Attributes.create( - path_raw_uri=path_raw_uri, spec_qualified_name=spec_qualified_name - ) - return cls(attributes=attributes) - - type_name: str = Field("APIPath", allow_mutation=False) + type_name: str = Field("MongoDB", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "APIPath": - raise ValueError("must be APIPath") + if v != "MongoDB": + raise ValueError("must be MongoDB") return v def __setattr__(self, name, value): - if name in APIPath._convenience_properties: + if name in MongoDB._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - API_PATH_SUMMARY: ClassVar[TextField] = TextField( - "apiPathSummary", "apiPathSummary" - ) - """ - TBC - """ - API_PATH_RAW_URI: ClassVar[KeywordTextField] = KeywordTextField( - "apiPathRawURI", "apiPathRawURI", "apiPathRawURI.text" - ) - """ - TBC - """ - API_PATH_IS_TEMPLATED: ClassVar[BooleanField] = BooleanField( - "apiPathIsTemplated", "apiPathIsTemplated" - ) - """ - TBC - """ - API_PATH_AVAILABLE_OPERATIONS: ClassVar[KeywordField] = KeywordField( - "apiPathAvailableOperations", "apiPathAvailableOperations" - ) - """ - TBC - """ - API_PATH_AVAILABLE_RESPONSE_CODES: ClassVar[KeywordField] = KeywordField( - "apiPathAvailableResponseCodes", "apiPathAvailableResponseCodes" - ) - """ - TBC - """ - API_PATH_IS_INGRESS_EXPOSED: ClassVar[BooleanField] = BooleanField( - "apiPathIsIngressExposed", "apiPathIsIngressExposed" - ) - """ - TBC - """ - - API_SPEC: ClassVar[RelationField] = RelationField("apiSpec") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "api_path_summary", - "api_path_raw_u_r_i", - "api_path_is_templated", - "api_path_available_operations", - "api_path_available_response_codes", - "api_path_is_ingress_exposed", - "api_spec", - ] - - @property - def api_path_summary(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.api_path_summary - - @api_path_summary.setter - def api_path_summary(self, api_path_summary: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.api_path_summary = api_path_summary - - @property - def api_path_raw_u_r_i(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.api_path_raw_u_r_i - - @api_path_raw_u_r_i.setter - def api_path_raw_u_r_i(self, api_path_raw_u_r_i: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.api_path_raw_u_r_i = api_path_raw_u_r_i - - @property - def api_path_is_templated(self) -> Optional[bool]: - return ( - None if self.attributes is None else self.attributes.api_path_is_templated - ) - - @api_path_is_templated.setter - def api_path_is_templated(self, api_path_is_templated: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.api_path_is_templated = api_path_is_templated - - @property - def api_path_available_operations(self) -> Optional[set[str]]: - return ( - None - if self.attributes is None - else self.attributes.api_path_available_operations - ) - - @api_path_available_operations.setter - def api_path_available_operations( - self, api_path_available_operations: Optional[set[str]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.api_path_available_operations = api_path_available_operations - - @property - def api_path_available_response_codes(self) -> Optional[dict[str, str]]: - return ( - None - if self.attributes is None - else self.attributes.api_path_available_response_codes - ) - - @api_path_available_response_codes.setter - def api_path_available_response_codes( - self, api_path_available_response_codes: Optional[dict[str, str]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.api_path_available_response_codes = ( - api_path_available_response_codes - ) - - @property - def api_path_is_ingress_exposed(self) -> Optional[bool]: - return ( - None - if self.attributes is None - else self.attributes.api_path_is_ingress_exposed - ) - - @api_path_is_ingress_exposed.setter - def api_path_is_ingress_exposed(self, api_path_is_ingress_exposed: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.api_path_is_ingress_exposed = api_path_is_ingress_exposed - - @property - def api_spec(self) -> Optional[APISpec]: - return None if self.attributes is None else self.attributes.api_spec - - @api_spec.setter - def api_spec(self, api_spec: Optional[APISpec]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.api_spec = api_spec - - class Attributes(API.Attributes): - api_path_summary: Optional[str] = Field( - None, description="", alias="apiPathSummary" - ) - api_path_raw_u_r_i: Optional[str] = Field( - None, description="", alias="apiPathRawURI" - ) - api_path_is_templated: Optional[bool] = Field( - None, description="", alias="apiPathIsTemplated" - ) - api_path_available_operations: Optional[set[str]] = Field( - None, description="", alias="apiPathAvailableOperations" - ) - api_path_available_response_codes: Optional[dict[str, str]] = Field( - None, description="", alias="apiPathAvailableResponseCodes" - ) - api_path_is_ingress_exposed: Optional[bool] = Field( - None, description="", alias="apiPathIsIngressExposed" - ) - api_spec: Optional[APISpec] = Field( - None, description="", alias="apiSpec" - ) # relationship - - @classmethod - # @validate_arguments() - @init_guid - def create( - cls, *, path_raw_uri: str, spec_qualified_name: str - ) -> APIPath.Attributes: - validate_required_fields( - ["path_raw_uri", "spec_qualified_name"], - [path_raw_uri, spec_qualified_name], - ) - - # Split the spec_qualified_name to extract necessary information - fields = spec_qualified_name.split("/") - if len(fields) != 4: - raise ValueError("Invalid spec_qualified_name") - - try: - connector_type = AtlanConnectorType(fields[1]) # type:ignore - except ValueError as e: - raise ValueError("Invalid spec_qualified_name") from e - - return APIPath.Attributes( - api_path_raw_u_r_i=path_raw_uri, - name=path_raw_uri, - api_spec_qualified_name=spec_qualified_name, - connection_qualified_name=f"{fields[0]}/{fields[1]}/{fields[2]}", - qualified_name=f"{spec_qualified_name}{path_raw_uri}", - connector_name=connector_type.value, - apiSpec=APISpec.ref_by_qualified_name(spec_qualified_name), - ) - - attributes: "APIPath.Attributes" = Field( - default_factory=lambda: APIPath.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -APISpec.Attributes.update_forward_refs() + _convenience_properties: ClassVar[list[str]] = [] -APIPath.Attributes.update_forward_refs() +MongoDB.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset59.py b/pyatlan/model/assets/asset59.py index 81e10dfbb..68ed957b6 100644 --- a/pyatlan/model/assets/asset59.py +++ b/pyatlan/model/assets/asset59.py @@ -4,368 +4,500 @@ from __future__ import annotations -import uuid +from datetime import datetime from typing import ClassVar, Optional from pydantic import Field, validator -from pyatlan.model.enums import AtlanConnectorType, GoogleDatastudioAssetType from pyatlan.model.fields.atlan_fields import ( - BooleanField, KeywordField, KeywordTextField, - KeywordTextStemmedField, NumericField, ) -from pyatlan.model.structs import GoogleLabel, GoogleTag -from pyatlan.utils import init_guid, validate_required_fields +from pyatlan.model.structs import SourceTagAttribute -from .asset46 import DataStudio +from .asset00 import Dbt -class DataStudioAsset(DataStudio): +class DbtTag(Dbt): """Description""" - @classmethod - # @validate_arguments() - @init_guid - def create( - cls, - *, - name: str, - connection_qualified_name: str, - data_studio_asset_type: GoogleDatastudioAssetType, - gdsid: Optional[str] = None, - ) -> DataStudioAsset: - validate_required_fields( - ["name", "connection_qualified_name", "data_studio_asset_type"], - [name, connection_qualified_name, data_studio_asset_type], - ) - if gdsid is None: - gdsid = str(uuid.uuid4()) - attributes = DataStudioAsset.Attributes.create( - name=name, - connection_qualified_name=connection_qualified_name, - data_studio_asset_type=data_studio_asset_type, - gdsid=gdsid, - ) - return cls(attributes=attributes) - - type_name: str = Field("DataStudioAsset", allow_mutation=False) + type_name: str = Field("DbtTag", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "DataStudioAsset": - raise ValueError("must be DataStudioAsset") + if v != "DbtTag": + raise ValueError("must be DbtTag") return v def __setattr__(self, name, value): - if name in DataStudioAsset._convenience_properties: + if name in DbtTag._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - DATA_STUDIO_ASSET_TYPE: ClassVar[KeywordField] = KeywordField( - "dataStudioAssetType", "dataStudioAssetType" + DBT_ALIAS: ClassVar[KeywordTextField] = KeywordTextField( + "dbtAlias", "dbtAlias.keyword", "dbtAlias" ) """ - TBC + + """ + DBT_META: ClassVar[KeywordField] = KeywordField("dbtMeta", "dbtMeta") + """ + """ - DATA_STUDIO_ASSET_TITLE: ClassVar[ - KeywordTextStemmedField - ] = KeywordTextStemmedField( - "dataStudioAssetTitle", - "dataStudioAssetTitle.keyword", - "dataStudioAssetTitle", - "dataStudioAssetTitle.stemmed", + DBT_UNIQUE_ID: ClassVar[KeywordTextField] = KeywordTextField( + "dbtUniqueId", "dbtUniqueId.keyword", "dbtUniqueId" ) """ - TBC + """ - DATA_STUDIO_ASSET_OWNER: ClassVar[KeywordField] = KeywordField( - "dataStudioAssetOwner", "dataStudioAssetOwner" + DBT_ACCOUNT_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "dbtAccountName", "dbtAccountName.keyword", "dbtAccountName" ) """ - TBC + """ - IS_TRASHED_DATA_STUDIO_ASSET: ClassVar[BooleanField] = BooleanField( - "isTrashedDataStudioAsset", "isTrashedDataStudioAsset" + DBT_PROJECT_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "dbtProjectName", "dbtProjectName.keyword", "dbtProjectName" ) """ - TBC + """ - GOOGLE_SERVICE: ClassVar[KeywordField] = KeywordField( - "googleService", "googleService" + DBT_PACKAGE_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "dbtPackageName", "dbtPackageName.keyword", "dbtPackageName" ) """ - TBC + """ - GOOGLE_PROJECT_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "googleProjectName", "googleProjectName", "googleProjectName.text" + DBT_JOB_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "dbtJobName", "dbtJobName.keyword", "dbtJobName" ) """ - TBC + """ - GOOGLE_PROJECT_ID: ClassVar[KeywordTextField] = KeywordTextField( - "googleProjectId", "googleProjectId", "googleProjectId.text" + DBT_JOB_SCHEDULE: ClassVar[KeywordField] = KeywordField( + "dbtJobSchedule", "dbtJobSchedule" ) """ - TBC + """ - GOOGLE_PROJECT_NUMBER: ClassVar[NumericField] = NumericField( - "googleProjectNumber", "googleProjectNumber" + DBT_JOB_STATUS: ClassVar[KeywordField] = KeywordField( + "dbtJobStatus", "dbtJobStatus" ) """ - TBC + """ - GOOGLE_LOCATION: ClassVar[KeywordField] = KeywordField( - "googleLocation", "googleLocation" + DBT_JOB_SCHEDULE_CRON_HUMANIZED: ClassVar[KeywordTextField] = KeywordTextField( + "dbtJobScheduleCronHumanized", + "dbtJobScheduleCronHumanized.keyword", + "dbtJobScheduleCronHumanized", ) """ - TBC + """ - GOOGLE_LOCATION_TYPE: ClassVar[KeywordField] = KeywordField( - "googleLocationType", "googleLocationType" + DBT_JOB_LAST_RUN: ClassVar[NumericField] = NumericField( + "dbtJobLastRun", "dbtJobLastRun" ) """ - TBC + """ - GOOGLE_LABELS: ClassVar[KeywordField] = KeywordField("googleLabels", "googleLabels") + DBT_JOB_NEXT_RUN: ClassVar[NumericField] = NumericField( + "dbtJobNextRun", "dbtJobNextRun" + ) + """ + """ - TBC + DBT_JOB_NEXT_RUN_HUMANIZED: ClassVar[KeywordTextField] = KeywordTextField( + "dbtJobNextRunHumanized", + "dbtJobNextRunHumanized.keyword", + "dbtJobNextRunHumanized", + ) """ - GOOGLE_TAGS: ClassVar[KeywordField] = KeywordField("googleTags", "googleTags") + """ - TBC + DBT_ENVIRONMENT_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "dbtEnvironmentName", "dbtEnvironmentName.keyword", "dbtEnvironmentName" + ) + """ + + """ + DBT_ENVIRONMENT_DBT_VERSION: ClassVar[KeywordTextField] = KeywordTextField( + "dbtEnvironmentDbtVersion", + "dbtEnvironmentDbtVersion.keyword", + "dbtEnvironmentDbtVersion", + ) + """ + + """ + DBT_TAGS: ClassVar[KeywordField] = KeywordField("dbtTags", "dbtTags") + """ + + """ + DBT_CONNECTION_CONTEXT: ClassVar[KeywordField] = KeywordField( + "dbtConnectionContext", "dbtConnectionContext" + ) + """ + + """ + DBT_SEMANTIC_LAYER_PROXY_URL: ClassVar[KeywordField] = KeywordField( + "dbtSemanticLayerProxyUrl", "dbtSemanticLayerProxyUrl" + ) + """ + + """ + TAG_ID: ClassVar[KeywordField] = KeywordField("tagId", "tagId") + """ + Unique identifier of the tag in the source system. + """ + TAG_ATTRIBUTES: ClassVar[KeywordField] = KeywordField( + "tagAttributes", "tagAttributes" + ) + """ + Attributes associated with the tag in the source system. + """ + TAG_ALLOWED_VALUES: ClassVar[KeywordTextField] = KeywordTextField( + "tagAllowedValues", "tagAllowedValues", "tagAllowedValues.text" + ) + """ + Allowed values for the tag in the source system. These are denormalized from tagAttributes for ease of querying. + """ + MAPPED_CLASSIFICATION_NAME: ClassVar[KeywordField] = KeywordField( + "mappedClassificationName", "mappedClassificationName" + ) + """ + Name of the classification in Atlan that is mapped to this tag. """ _convenience_properties: ClassVar[list[str]] = [ - "data_studio_asset_type", - "data_studio_asset_title", - "data_studio_asset_owner", - "is_trashed_data_studio_asset", - "google_service", - "google_project_name", - "google_project_id", - "google_project_number", - "google_location", - "google_location_type", - "google_labels", - "google_tags", + "dbt_alias", + "dbt_meta", + "dbt_unique_id", + "dbt_account_name", + "dbt_project_name", + "dbt_package_name", + "dbt_job_name", + "dbt_job_schedule", + "dbt_job_status", + "dbt_job_schedule_cron_humanized", + "dbt_job_last_run", + "dbt_job_next_run", + "dbt_job_next_run_humanized", + "dbt_environment_name", + "dbt_environment_dbt_version", + "dbt_tags", + "dbt_connection_context", + "dbt_semantic_layer_proxy_url", + "tag_id", + "tag_attributes", + "tag_allowed_values", + "mapped_atlan_tag_name", ] @property - def data_studio_asset_type(self) -> Optional[GoogleDatastudioAssetType]: + def dbt_alias(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_alias + + @dbt_alias.setter + def dbt_alias(self, dbt_alias: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_alias = dbt_alias + + @property + def dbt_meta(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_meta + + @dbt_meta.setter + def dbt_meta(self, dbt_meta: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_meta = dbt_meta + + @property + def dbt_unique_id(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_unique_id + + @dbt_unique_id.setter + def dbt_unique_id(self, dbt_unique_id: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_unique_id = dbt_unique_id + + @property + def dbt_account_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_account_name + + @dbt_account_name.setter + def dbt_account_name(self, dbt_account_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_account_name = dbt_account_name + + @property + def dbt_project_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_project_name + + @dbt_project_name.setter + def dbt_project_name(self, dbt_project_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_project_name = dbt_project_name + + @property + def dbt_package_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_package_name + + @dbt_package_name.setter + def dbt_package_name(self, dbt_package_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_package_name = dbt_package_name + + @property + def dbt_job_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_job_name + + @dbt_job_name.setter + def dbt_job_name(self, dbt_job_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_job_name = dbt_job_name + + @property + def dbt_job_schedule(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_job_schedule + + @dbt_job_schedule.setter + def dbt_job_schedule(self, dbt_job_schedule: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_job_schedule = dbt_job_schedule + + @property + def dbt_job_status(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_job_status + + @dbt_job_status.setter + def dbt_job_status(self, dbt_job_status: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_job_status = dbt_job_status + + @property + def dbt_job_schedule_cron_humanized(self) -> Optional[str]: return ( - None if self.attributes is None else self.attributes.data_studio_asset_type + None + if self.attributes is None + else self.attributes.dbt_job_schedule_cron_humanized ) - @data_studio_asset_type.setter - def data_studio_asset_type( - self, data_studio_asset_type: Optional[GoogleDatastudioAssetType] + @dbt_job_schedule_cron_humanized.setter + def dbt_job_schedule_cron_humanized( + self, dbt_job_schedule_cron_humanized: Optional[str] ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.data_studio_asset_type = data_studio_asset_type + self.attributes.dbt_job_schedule_cron_humanized = ( + dbt_job_schedule_cron_humanized + ) @property - def data_studio_asset_title(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.data_studio_asset_title - ) + def dbt_job_last_run(self) -> Optional[datetime]: + return None if self.attributes is None else self.attributes.dbt_job_last_run - @data_studio_asset_title.setter - def data_studio_asset_title(self, data_studio_asset_title: Optional[str]): + @dbt_job_last_run.setter + def dbt_job_last_run(self, dbt_job_last_run: Optional[datetime]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.data_studio_asset_title = data_studio_asset_title + self.attributes.dbt_job_last_run = dbt_job_last_run @property - def data_studio_asset_owner(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.data_studio_asset_owner - ) + def dbt_job_next_run(self) -> Optional[datetime]: + return None if self.attributes is None else self.attributes.dbt_job_next_run - @data_studio_asset_owner.setter - def data_studio_asset_owner(self, data_studio_asset_owner: Optional[str]): + @dbt_job_next_run.setter + def dbt_job_next_run(self, dbt_job_next_run: Optional[datetime]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.data_studio_asset_owner = data_studio_asset_owner + self.attributes.dbt_job_next_run = dbt_job_next_run @property - def is_trashed_data_studio_asset(self) -> Optional[bool]: + def dbt_job_next_run_humanized(self) -> Optional[str]: return ( None if self.attributes is None - else self.attributes.is_trashed_data_studio_asset + else self.attributes.dbt_job_next_run_humanized ) - @is_trashed_data_studio_asset.setter - def is_trashed_data_studio_asset( - self, is_trashed_data_studio_asset: Optional[bool] - ): + @dbt_job_next_run_humanized.setter + def dbt_job_next_run_humanized(self, dbt_job_next_run_humanized: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.is_trashed_data_studio_asset = is_trashed_data_studio_asset + self.attributes.dbt_job_next_run_humanized = dbt_job_next_run_humanized @property - def google_service(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.google_service + def dbt_environment_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_environment_name - @google_service.setter - def google_service(self, google_service: Optional[str]): + @dbt_environment_name.setter + def dbt_environment_name(self, dbt_environment_name: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.google_service = google_service + self.attributes.dbt_environment_name = dbt_environment_name @property - def google_project_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.google_project_name + def dbt_environment_dbt_version(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.dbt_environment_dbt_version + ) - @google_project_name.setter - def google_project_name(self, google_project_name: Optional[str]): + @dbt_environment_dbt_version.setter + def dbt_environment_dbt_version(self, dbt_environment_dbt_version: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.google_project_name = google_project_name + self.attributes.dbt_environment_dbt_version = dbt_environment_dbt_version @property - def google_project_id(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.google_project_id + def dbt_tags(self) -> Optional[set[str]]: + return None if self.attributes is None else self.attributes.dbt_tags - @google_project_id.setter - def google_project_id(self, google_project_id: Optional[str]): + @dbt_tags.setter + def dbt_tags(self, dbt_tags: Optional[set[str]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.google_project_id = google_project_id + self.attributes.dbt_tags = dbt_tags @property - def google_project_number(self) -> Optional[int]: + def dbt_connection_context(self) -> Optional[str]: return ( - None if self.attributes is None else self.attributes.google_project_number + None if self.attributes is None else self.attributes.dbt_connection_context ) - @google_project_number.setter - def google_project_number(self, google_project_number: Optional[int]): + @dbt_connection_context.setter + def dbt_connection_context(self, dbt_connection_context: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.google_project_number = google_project_number + self.attributes.dbt_connection_context = dbt_connection_context @property - def google_location(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.google_location + def dbt_semantic_layer_proxy_url(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.dbt_semantic_layer_proxy_url + ) - @google_location.setter - def google_location(self, google_location: Optional[str]): + @dbt_semantic_layer_proxy_url.setter + def dbt_semantic_layer_proxy_url(self, dbt_semantic_layer_proxy_url: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.google_location = google_location + self.attributes.dbt_semantic_layer_proxy_url = dbt_semantic_layer_proxy_url @property - def google_location_type(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.google_location_type + def tag_id(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.tag_id - @google_location_type.setter - def google_location_type(self, google_location_type: Optional[str]): + @tag_id.setter + def tag_id(self, tag_id: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.google_location_type = google_location_type + self.attributes.tag_id = tag_id @property - def google_labels(self) -> Optional[list[GoogleLabel]]: - return None if self.attributes is None else self.attributes.google_labels + def tag_attributes(self) -> Optional[list[SourceTagAttribute]]: + return None if self.attributes is None else self.attributes.tag_attributes - @google_labels.setter - def google_labels(self, google_labels: Optional[list[GoogleLabel]]): + @tag_attributes.setter + def tag_attributes(self, tag_attributes: Optional[list[SourceTagAttribute]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.google_labels = google_labels + self.attributes.tag_attributes = tag_attributes @property - def google_tags(self) -> Optional[list[GoogleTag]]: - return None if self.attributes is None else self.attributes.google_tags + def tag_allowed_values(self) -> Optional[set[str]]: + return None if self.attributes is None else self.attributes.tag_allowed_values - @google_tags.setter - def google_tags(self, google_tags: Optional[list[GoogleTag]]): + @tag_allowed_values.setter + def tag_allowed_values(self, tag_allowed_values: Optional[set[str]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.google_tags = google_tags + self.attributes.tag_allowed_values = tag_allowed_values - class Attributes(DataStudio.Attributes): - data_studio_asset_type: Optional[GoogleDatastudioAssetType] = Field( - None, description="", alias="dataStudioAssetType" + @property + def mapped_atlan_tag_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.mapped_atlan_tag_name + ) + + @mapped_atlan_tag_name.setter + def mapped_atlan_tag_name(self, mapped_atlan_tag_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mapped_atlan_tag_name = mapped_atlan_tag_name + + class Attributes(Dbt.Attributes): + dbt_alias: Optional[str] = Field(None, description="", alias="dbtAlias") + dbt_meta: Optional[str] = Field(None, description="", alias="dbtMeta") + dbt_unique_id: Optional[str] = Field(None, description="", alias="dbtUniqueId") + dbt_account_name: Optional[str] = Field( + None, description="", alias="dbtAccountName" + ) + dbt_project_name: Optional[str] = Field( + None, description="", alias="dbtProjectName" + ) + dbt_package_name: Optional[str] = Field( + None, description="", alias="dbtPackageName" + ) + dbt_job_name: Optional[str] = Field(None, description="", alias="dbtJobName") + dbt_job_schedule: Optional[str] = Field( + None, description="", alias="dbtJobSchedule" + ) + dbt_job_status: Optional[str] = Field( + None, description="", alias="dbtJobStatus" ) - data_studio_asset_title: Optional[str] = Field( - None, description="", alias="dataStudioAssetTitle" + dbt_job_schedule_cron_humanized: Optional[str] = Field( + None, description="", alias="dbtJobScheduleCronHumanized" ) - data_studio_asset_owner: Optional[str] = Field( - None, description="", alias="dataStudioAssetOwner" + dbt_job_last_run: Optional[datetime] = Field( + None, description="", alias="dbtJobLastRun" ) - is_trashed_data_studio_asset: Optional[bool] = Field( - None, description="", alias="isTrashedDataStudioAsset" + dbt_job_next_run: Optional[datetime] = Field( + None, description="", alias="dbtJobNextRun" ) - google_service: Optional[str] = Field( - None, description="", alias="googleService" + dbt_job_next_run_humanized: Optional[str] = Field( + None, description="", alias="dbtJobNextRunHumanized" ) - google_project_name: Optional[str] = Field( - None, description="", alias="googleProjectName" + dbt_environment_name: Optional[str] = Field( + None, description="", alias="dbtEnvironmentName" ) - google_project_id: Optional[str] = Field( - None, description="", alias="googleProjectId" + dbt_environment_dbt_version: Optional[str] = Field( + None, description="", alias="dbtEnvironmentDbtVersion" ) - google_project_number: Optional[int] = Field( - None, description="", alias="googleProjectNumber" + dbt_tags: Optional[set[str]] = Field(None, description="", alias="dbtTags") + dbt_connection_context: Optional[str] = Field( + None, description="", alias="dbtConnectionContext" ) - google_location: Optional[str] = Field( - None, description="", alias="googleLocation" + dbt_semantic_layer_proxy_url: Optional[str] = Field( + None, description="", alias="dbtSemanticLayerProxyUrl" ) - google_location_type: Optional[str] = Field( - None, description="", alias="googleLocationType" + tag_id: Optional[str] = Field(None, description="", alias="tagId") + tag_attributes: Optional[list[SourceTagAttribute]] = Field( + None, description="", alias="tagAttributes" ) - google_labels: Optional[list[GoogleLabel]] = Field( - None, description="", alias="googleLabels" + tag_allowed_values: Optional[set[str]] = Field( + None, description="", alias="tagAllowedValues" ) - google_tags: Optional[list[GoogleTag]] = Field( - None, description="", alias="googleTags" + mapped_atlan_tag_name: Optional[str] = Field( + None, description="", alias="mappedClassificationName" ) - @classmethod - # @validate_arguments() - @init_guid - def create( - cls, - *, - name: str, - connection_qualified_name: str, - data_studio_asset_type: GoogleDatastudioAssetType, - gdsid: str, - ) -> DataStudioAsset.Attributes: - validate_required_fields( - ["name", "connection_qualified_name", "data_studio_asset_type"], - [name, connection_qualified_name, data_studio_asset_type], - ) - - # Split the connection_qualified_name to extract necessary information - fields = connection_qualified_name.split("/") - if len(fields) != 3: - raise ValueError("Invalid connection_qualified_name") - - try: - connector_type = AtlanConnectorType(fields[1]) # type:ignore - except ValueError as e: - raise ValueError("Invalid connection_qualified_name") from e - - return DataStudioAsset.Attributes( - name=name, - qualified_name=f"{connection_qualified_name}/{gdsid}", - connection_qualified_name=connection_qualified_name, - connector_name=connector_type.value, - data_studio_asset_type=data_studio_asset_type, - ) - - attributes: "DataStudioAsset.Attributes" = Field( - default_factory=lambda: DataStudioAsset.Attributes(), + attributes: "DbtTag.Attributes" = Field( + default_factory=lambda: DbtTag.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -DataStudioAsset.Attributes.update_forward_refs() +DbtTag.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset60.py b/pyatlan/model/assets/asset60.py index b8c0d5c82..a75b301b4 100644 --- a/pyatlan/model/assets/asset60.py +++ b/pyatlan/model/assets/asset60.py @@ -4,8 +4,7 @@ from __future__ import annotations -from datetime import datetime -from typing import ClassVar, Optional, overload +from typing import ClassVar, Optional from pydantic import Field, validator @@ -14,566 +13,513 @@ BooleanField, KeywordField, KeywordTextField, - NumericField, RelationField, + TextField, ) from pyatlan.utils import init_guid, validate_required_fields -from .asset34 import S3 +from .asset28 import API -class S3Bucket(S3): +class APISpec(API): """Description""" - @overload - @classmethod - @init_guid - def create( - cls, - *, - name: str, - connection_qualified_name: str, - aws_arn: str, - ) -> S3Bucket: - ... - - @overload - @classmethod - @init_guid - def create( - cls, - *, - name: str, - connection_qualified_name: str, - aws_arn: Optional[str] = None, - ) -> S3Bucket: - ... - @classmethod # @validate_arguments() @init_guid - def create( - cls, *, name: str, connection_qualified_name: str, aws_arn: Optional[str] = None - ) -> S3Bucket: + def create(cls, *, name: str, connection_qualified_name: str) -> APISpec: validate_required_fields( - ["name", "connection_qualified_name"], - [name, connection_qualified_name], + ["name", "connection_qualified_name"], [name, connection_qualified_name] ) - attributes = S3Bucket.Attributes.create( - name=name, - connection_qualified_name=connection_qualified_name, - aws_arn=aws_arn, + attributes = APISpec.Attributes.create( + name=name, connection_qualified_name=connection_qualified_name ) return cls(attributes=attributes) - type_name: str = Field("S3Bucket", allow_mutation=False) + type_name: str = Field("APISpec", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "S3Bucket": - raise ValueError("must be S3Bucket") + if v != "APISpec": + raise ValueError("must be APISpec") return v def __setattr__(self, name, value): - if name in S3Bucket._convenience_properties: + if name in APISpec._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - S3OBJECT_COUNT: ClassVar[NumericField] = NumericField( - "s3ObjectCount", "s3ObjectCount" + API_SPEC_TERMS_OF_SERVICE_URL: ClassVar[KeywordTextField] = KeywordTextField( + "apiSpecTermsOfServiceURL", + "apiSpecTermsOfServiceURL", + "apiSpecTermsOfServiceURL.text", ) """ - TBC + URL to the terms of service for the API specification. """ - S3BUCKET_VERSIONING_ENABLED: ClassVar[BooleanField] = BooleanField( - "s3BucketVersioningEnabled", "s3BucketVersioningEnabled" + API_SPEC_CONTACT_EMAIL: ClassVar[KeywordTextField] = KeywordTextField( + "apiSpecContactEmail", "apiSpecContactEmail", "apiSpecContactEmail.text" ) """ - TBC + Email address for a contact responsible for the API specification. + """ + API_SPEC_CONTACT_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "apiSpecContactName", "apiSpecContactName.keyword", "apiSpecContactName" + ) + """ + Name of the contact responsible for the API specification. + """ + API_SPEC_CONTACT_URL: ClassVar[KeywordTextField] = KeywordTextField( + "apiSpecContactURL", "apiSpecContactURL", "apiSpecContactURL.text" + ) + """ + URL pointing to the contact information. + """ + API_SPEC_LICENSE_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "apiSpecLicenseName", "apiSpecLicenseName.keyword", "apiSpecLicenseName" + ) + """ + Name of the license under which the API specification is available. + """ + API_SPEC_LICENSE_URL: ClassVar[KeywordTextField] = KeywordTextField( + "apiSpecLicenseURL", "apiSpecLicenseURL", "apiSpecLicenseURL.text" + ) + """ + URL to the license under which the API specification is available. + """ + API_SPEC_CONTRACT_VERSION: ClassVar[KeywordField] = KeywordField( + "apiSpecContractVersion", "apiSpecContractVersion" + ) + """ + Version of the contract for the API specification. + """ + API_SPEC_SERVICE_ALIAS: ClassVar[KeywordTextField] = KeywordTextField( + "apiSpecServiceAlias", "apiSpecServiceAlias", "apiSpecServiceAlias.text" + ) + """ + Service alias for the API specification. """ - OBJECTS: ClassVar[RelationField] = RelationField("objects") + API_PATHS: ClassVar[RelationField] = RelationField("apiPaths") """ TBC """ _convenience_properties: ClassVar[list[str]] = [ - "s3_object_count", - "s3_bucket_versioning_enabled", - "objects", + "api_spec_terms_of_service_url", + "api_spec_contact_email", + "api_spec_contact_name", + "api_spec_contact_url", + "api_spec_license_name", + "api_spec_license_url", + "api_spec_contract_version", + "api_spec_service_alias", + "api_paths", ] @property - def s3_object_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.s3_object_count + def api_spec_terms_of_service_url(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.api_spec_terms_of_service_url + ) + + @api_spec_terms_of_service_url.setter + def api_spec_terms_of_service_url( + self, api_spec_terms_of_service_url: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.api_spec_terms_of_service_url = api_spec_terms_of_service_url + + @property + def api_spec_contact_email(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.api_spec_contact_email + ) - @s3_object_count.setter - def s3_object_count(self, s3_object_count: Optional[int]): + @api_spec_contact_email.setter + def api_spec_contact_email(self, api_spec_contact_email: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.s3_object_count = s3_object_count + self.attributes.api_spec_contact_email = api_spec_contact_email @property - def s3_bucket_versioning_enabled(self) -> Optional[bool]: + def api_spec_contact_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.api_spec_contact_name + ) + + @api_spec_contact_name.setter + def api_spec_contact_name(self, api_spec_contact_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.api_spec_contact_name = api_spec_contact_name + + @property + def api_spec_contact_url(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.api_spec_contact_url + + @api_spec_contact_url.setter + def api_spec_contact_url(self, api_spec_contact_url: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.api_spec_contact_url = api_spec_contact_url + + @property + def api_spec_license_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.api_spec_license_name + ) + + @api_spec_license_name.setter + def api_spec_license_name(self, api_spec_license_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.api_spec_license_name = api_spec_license_name + + @property + def api_spec_license_url(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.api_spec_license_url + + @api_spec_license_url.setter + def api_spec_license_url(self, api_spec_license_url: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.api_spec_license_url = api_spec_license_url + + @property + def api_spec_contract_version(self) -> Optional[str]: return ( None if self.attributes is None - else self.attributes.s3_bucket_versioning_enabled + else self.attributes.api_spec_contract_version ) - @s3_bucket_versioning_enabled.setter - def s3_bucket_versioning_enabled( - self, s3_bucket_versioning_enabled: Optional[bool] - ): + @api_spec_contract_version.setter + def api_spec_contract_version(self, api_spec_contract_version: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.api_spec_contract_version = api_spec_contract_version + + @property + def api_spec_service_alias(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.api_spec_service_alias + ) + + @api_spec_service_alias.setter + def api_spec_service_alias(self, api_spec_service_alias: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.s3_bucket_versioning_enabled = s3_bucket_versioning_enabled + self.attributes.api_spec_service_alias = api_spec_service_alias @property - def objects(self) -> Optional[list[S3Object]]: - return None if self.attributes is None else self.attributes.objects + def api_paths(self) -> Optional[list[APIPath]]: + return None if self.attributes is None else self.attributes.api_paths - @objects.setter - def objects(self, objects: Optional[list[S3Object]]): + @api_paths.setter + def api_paths(self, api_paths: Optional[list[APIPath]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.objects = objects + self.attributes.api_paths = api_paths - class Attributes(S3.Attributes): - s3_object_count: Optional[int] = Field( - None, description="", alias="s3ObjectCount" + class Attributes(API.Attributes): + api_spec_terms_of_service_url: Optional[str] = Field( + None, description="", alias="apiSpecTermsOfServiceURL" + ) + api_spec_contact_email: Optional[str] = Field( + None, description="", alias="apiSpecContactEmail" ) - s3_bucket_versioning_enabled: Optional[bool] = Field( - None, description="", alias="s3BucketVersioningEnabled" + api_spec_contact_name: Optional[str] = Field( + None, description="", alias="apiSpecContactName" ) - objects: Optional[list[S3Object]] = Field( - None, description="", alias="objects" + api_spec_contact_url: Optional[str] = Field( + None, description="", alias="apiSpecContactURL" + ) + api_spec_license_name: Optional[str] = Field( + None, description="", alias="apiSpecLicenseName" + ) + api_spec_license_url: Optional[str] = Field( + None, description="", alias="apiSpecLicenseURL" + ) + api_spec_contract_version: Optional[str] = Field( + None, description="", alias="apiSpecContractVersion" + ) + api_spec_service_alias: Optional[str] = Field( + None, description="", alias="apiSpecServiceAlias" + ) + api_paths: Optional[list[APIPath]] = Field( + None, description="", alias="apiPaths" ) # relationship @classmethod # @validate_arguments() @init_guid def create( - cls, - *, - name: str, - connection_qualified_name: str, - aws_arn: Optional[str] = None, - ) -> S3Bucket.Attributes: + cls, *, name: str, connection_qualified_name: str + ) -> APISpec.Attributes: validate_required_fields( - ["name", "connection_qualified_name"], - [name, connection_qualified_name], + ["name", "connection_qualified_name"], [name, connection_qualified_name] ) + + # Split the connection_qualified_name to extract necessary information fields = connection_qualified_name.split("/") if len(fields) != 3: raise ValueError("Invalid connection_qualified_name") + try: - if fields[0].replace(" ", "") == "" or fields[2].replace(" ", "") == "": - raise ValueError("Invalid connection_qualified_name") connector_type = AtlanConnectorType(fields[1]) # type:ignore - if connector_type != AtlanConnectorType.S3: - raise ValueError("Connector type must be s3") except ValueError as e: raise ValueError("Invalid connection_qualified_name") from e - return S3Bucket.Attributes( - aws_arn=aws_arn, + + return APISpec.Attributes( name=name, + qualified_name=f"{connection_qualified_name}/{name}", connection_qualified_name=connection_qualified_name, - qualified_name=f"{connection_qualified_name}/{aws_arn if aws_arn else name}", connector_name=connector_type.value, ) - attributes: "S3Bucket.Attributes" = Field( - default_factory=lambda: S3Bucket.Attributes(), + attributes: "APISpec.Attributes" = Field( + default_factory=lambda: APISpec.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -class S3Object(S3): +class APIPath(API): """Description""" @classmethod # @validate_arguments() @init_guid - def create( - cls, - *, - name: str, - connection_qualified_name: str, - aws_arn: str, - s3_bucket_qualified_name: str, - ) -> S3Object: - validate_required_fields( - [ - "name", - "connection_qualified_name", - "aws_arn", - "s3_bucket_qualified_name", - ], - [name, connection_qualified_name, aws_arn, s3_bucket_qualified_name], - ) - attributes = S3Object.Attributes.create( - name=name, - connection_qualified_name=connection_qualified_name, - aws_arn=aws_arn, - s3_bucket_qualified_name=s3_bucket_qualified_name, - ) - return cls(attributes=attributes) - - @classmethod - # @validate_arguments() - @init_guid - def create_with_prefix( - cls, - *, - name: str, - connection_qualified_name: str, - prefix: str, - s3_bucket_qualified_name: str, - ) -> S3Object: + def create(cls, *, path_raw_uri: str, spec_qualified_name: str) -> APIPath: validate_required_fields( - [ - "name", - "connection_qualified_name", - "prefix", - "s3_bucket_qualified_name", - ], - [name, connection_qualified_name, prefix, s3_bucket_qualified_name], + ["path_raw_uri", "spec_qualified_name"], [path_raw_uri, spec_qualified_name] ) - attributes = S3Object.Attributes.create_with_prefix( - name=name, - connection_qualified_name=connection_qualified_name, - prefix=prefix, - s3_bucket_qualified_name=s3_bucket_qualified_name, + attributes = APIPath.Attributes.create( + path_raw_uri=path_raw_uri, spec_qualified_name=spec_qualified_name ) return cls(attributes=attributes) - type_name: str = Field("S3Object", allow_mutation=False) + type_name: str = Field("APIPath", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "S3Object": - raise ValueError("must be S3Object") + if v != "APIPath": + raise ValueError("must be APIPath") return v def __setattr__(self, name, value): - if name in S3Object._convenience_properties: + if name in APIPath._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - S3OBJECT_LAST_MODIFIED_TIME: ClassVar[NumericField] = NumericField( - "s3ObjectLastModifiedTime", "s3ObjectLastModifiedTime" - ) - """ - TBC - """ - S3BUCKET_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "s3BucketName", "s3BucketName", "s3BucketName.text" - ) - """ - TBC - """ - S3BUCKET_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "s3BucketQualifiedName", "s3BucketQualifiedName" + API_PATH_SUMMARY: ClassVar[TextField] = TextField( + "apiPathSummary", "apiPathSummary" ) """ - TBC - """ - S3OBJECT_SIZE: ClassVar[NumericField] = NumericField("s3ObjectSize", "s3ObjectSize") + Descriptive summary intended to apply to all operations in this path. """ - TBC - """ - S3OBJECT_STORAGE_CLASS: ClassVar[KeywordField] = KeywordField( - "s3ObjectStorageClass", "s3ObjectStorageClass" + API_PATH_RAW_URI: ClassVar[KeywordTextField] = KeywordTextField( + "apiPathRawURI", "apiPathRawURI", "apiPathRawURI.text" ) """ - TBC + Absolute path to an individual endpoint. """ - S3OBJECT_KEY: ClassVar[KeywordTextField] = KeywordTextField( - "s3ObjectKey", "s3ObjectKey", "s3ObjectKey.text" + API_PATH_IS_TEMPLATED: ClassVar[BooleanField] = BooleanField( + "apiPathIsTemplated", "apiPathIsTemplated" ) """ - TBC + Whether the endpoint's path contains replaceable parameters (true) or not (false). """ - S3OBJECT_CONTENT_TYPE: ClassVar[KeywordField] = KeywordField( - "s3ObjectContentType", "s3ObjectContentType" + API_PATH_AVAILABLE_OPERATIONS: ClassVar[KeywordField] = KeywordField( + "apiPathAvailableOperations", "apiPathAvailableOperations" ) """ - TBC + List of the operations available on the endpoint. """ - S3OBJECT_CONTENT_DISPOSITION: ClassVar[KeywordField] = KeywordField( - "s3ObjectContentDisposition", "s3ObjectContentDisposition" + API_PATH_AVAILABLE_RESPONSE_CODES: ClassVar[KeywordField] = KeywordField( + "apiPathAvailableResponseCodes", "apiPathAvailableResponseCodes" ) """ - TBC + Response codes available on the path across all operations. """ - S3OBJECT_VERSION_ID: ClassVar[KeywordField] = KeywordField( - "s3ObjectVersionId", "s3ObjectVersionId" + API_PATH_IS_INGRESS_EXPOSED: ClassVar[BooleanField] = BooleanField( + "apiPathIsIngressExposed", "apiPathIsIngressExposed" ) """ - TBC + Whether the path is exposed as an ingress (true) or not (false). """ - BUCKET: ClassVar[RelationField] = RelationField("bucket") + API_SPEC: ClassVar[RelationField] = RelationField("apiSpec") """ TBC """ _convenience_properties: ClassVar[list[str]] = [ - "s3_object_last_modified_time", - "s3_bucket_name", - "s3_bucket_qualified_name", - "s3_object_size", - "s3_object_storage_class", - "s3_object_key", - "s3_object_content_type", - "s3_object_content_disposition", - "s3_object_version_id", - "bucket", + "api_path_summary", + "api_path_raw_u_r_i", + "api_path_is_templated", + "api_path_available_operations", + "api_path_available_response_codes", + "api_path_is_ingress_exposed", + "api_spec", ] @property - def s3_object_last_modified_time(self) -> Optional[datetime]: - return ( - None - if self.attributes is None - else self.attributes.s3_object_last_modified_time - ) + def api_path_summary(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.api_path_summary - @s3_object_last_modified_time.setter - def s3_object_last_modified_time( - self, s3_object_last_modified_time: Optional[datetime] - ): + @api_path_summary.setter + def api_path_summary(self, api_path_summary: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.s3_object_last_modified_time = s3_object_last_modified_time + self.attributes.api_path_summary = api_path_summary @property - def s3_bucket_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.s3_bucket_name + def api_path_raw_u_r_i(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.api_path_raw_u_r_i - @s3_bucket_name.setter - def s3_bucket_name(self, s3_bucket_name: Optional[str]): + @api_path_raw_u_r_i.setter + def api_path_raw_u_r_i(self, api_path_raw_u_r_i: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.s3_bucket_name = s3_bucket_name + self.attributes.api_path_raw_u_r_i = api_path_raw_u_r_i @property - def s3_bucket_qualified_name(self) -> Optional[str]: + def api_path_is_templated(self) -> Optional[bool]: return ( - None - if self.attributes is None - else self.attributes.s3_bucket_qualified_name + None if self.attributes is None else self.attributes.api_path_is_templated ) - @s3_bucket_qualified_name.setter - def s3_bucket_qualified_name(self, s3_bucket_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.s3_bucket_qualified_name = s3_bucket_qualified_name - - @property - def s3_object_size(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.s3_object_size - - @s3_object_size.setter - def s3_object_size(self, s3_object_size: Optional[int]): + @api_path_is_templated.setter + def api_path_is_templated(self, api_path_is_templated: Optional[bool]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.s3_object_size = s3_object_size + self.attributes.api_path_is_templated = api_path_is_templated @property - def s3_object_storage_class(self) -> Optional[str]: + def api_path_available_operations(self) -> Optional[set[str]]: return ( - None if self.attributes is None else self.attributes.s3_object_storage_class + None + if self.attributes is None + else self.attributes.api_path_available_operations ) - @s3_object_storage_class.setter - def s3_object_storage_class(self, s3_object_storage_class: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.s3_object_storage_class = s3_object_storage_class - - @property - def s3_object_key(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.s3_object_key - - @s3_object_key.setter - def s3_object_key(self, s3_object_key: Optional[str]): + @api_path_available_operations.setter + def api_path_available_operations( + self, api_path_available_operations: Optional[set[str]] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.s3_object_key = s3_object_key + self.attributes.api_path_available_operations = api_path_available_operations @property - def s3_object_content_type(self) -> Optional[str]: + def api_path_available_response_codes(self) -> Optional[dict[str, str]]: return ( - None if self.attributes is None else self.attributes.s3_object_content_type + None + if self.attributes is None + else self.attributes.api_path_available_response_codes ) - @s3_object_content_type.setter - def s3_object_content_type(self, s3_object_content_type: Optional[str]): + @api_path_available_response_codes.setter + def api_path_available_response_codes( + self, api_path_available_response_codes: Optional[dict[str, str]] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.s3_object_content_type = s3_object_content_type + self.attributes.api_path_available_response_codes = ( + api_path_available_response_codes + ) @property - def s3_object_content_disposition(self) -> Optional[str]: + def api_path_is_ingress_exposed(self) -> Optional[bool]: return ( None if self.attributes is None - else self.attributes.s3_object_content_disposition + else self.attributes.api_path_is_ingress_exposed ) - @s3_object_content_disposition.setter - def s3_object_content_disposition( - self, s3_object_content_disposition: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.s3_object_content_disposition = s3_object_content_disposition - - @property - def s3_object_version_id(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.s3_object_version_id - - @s3_object_version_id.setter - def s3_object_version_id(self, s3_object_version_id: Optional[str]): + @api_path_is_ingress_exposed.setter + def api_path_is_ingress_exposed(self, api_path_is_ingress_exposed: Optional[bool]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.s3_object_version_id = s3_object_version_id + self.attributes.api_path_is_ingress_exposed = api_path_is_ingress_exposed @property - def bucket(self) -> Optional[S3Bucket]: - return None if self.attributes is None else self.attributes.bucket + def api_spec(self) -> Optional[APISpec]: + return None if self.attributes is None else self.attributes.api_spec - @bucket.setter - def bucket(self, bucket: Optional[S3Bucket]): + @api_spec.setter + def api_spec(self, api_spec: Optional[APISpec]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.bucket = bucket + self.attributes.api_spec = api_spec - class Attributes(S3.Attributes): - s3_object_last_modified_time: Optional[datetime] = Field( - None, description="", alias="s3ObjectLastModifiedTime" - ) - s3_bucket_name: Optional[str] = Field( - None, description="", alias="s3BucketName" + class Attributes(API.Attributes): + api_path_summary: Optional[str] = Field( + None, description="", alias="apiPathSummary" ) - s3_bucket_qualified_name: Optional[str] = Field( - None, description="", alias="s3BucketQualifiedName" + api_path_raw_u_r_i: Optional[str] = Field( + None, description="", alias="apiPathRawURI" ) - s3_object_size: Optional[int] = Field( - None, description="", alias="s3ObjectSize" + api_path_is_templated: Optional[bool] = Field( + None, description="", alias="apiPathIsTemplated" ) - s3_object_storage_class: Optional[str] = Field( - None, description="", alias="s3ObjectStorageClass" + api_path_available_operations: Optional[set[str]] = Field( + None, description="", alias="apiPathAvailableOperations" ) - s3_object_key: Optional[str] = Field(None, description="", alias="s3ObjectKey") - s3_object_content_type: Optional[str] = Field( - None, description="", alias="s3ObjectContentType" + api_path_available_response_codes: Optional[dict[str, str]] = Field( + None, description="", alias="apiPathAvailableResponseCodes" ) - s3_object_content_disposition: Optional[str] = Field( - None, description="", alias="s3ObjectContentDisposition" + api_path_is_ingress_exposed: Optional[bool] = Field( + None, description="", alias="apiPathIsIngressExposed" ) - s3_object_version_id: Optional[str] = Field( - None, description="", alias="s3ObjectVersionId" - ) - bucket: Optional[S3Bucket] = Field( - None, description="", alias="bucket" + api_spec: Optional[APISpec] = Field( + None, description="", alias="apiSpec" ) # relationship @classmethod # @validate_arguments() @init_guid def create( - cls, - *, - name: str, - connection_qualified_name: str, - aws_arn: str, - s3_bucket_qualified_name: str, - ) -> S3Object.Attributes: + cls, *, path_raw_uri: str, spec_qualified_name: str + ) -> APIPath.Attributes: validate_required_fields( - [ - "name", - "connection_qualified_name", - "aws_arn", - "s3_bucket_qualified_name", - ], - [name, connection_qualified_name, aws_arn, s3_bucket_qualified_name], - ) - fields = connection_qualified_name.split("/") - if len(fields) != 3: - raise ValueError("Invalid connection_qualified_name") - try: - if fields[0].replace(" ", "") == "" or fields[2].replace(" ", "") == "": - raise ValueError("Invalid connection_qualified_name") - connector_type = AtlanConnectorType(fields[1]) # type:ignore - if connector_type != AtlanConnectorType.S3: - raise ValueError("Connector type must be s3") - except ValueError as e: - raise ValueError("Invalid connection_qualified_name") from e - return S3Object.Attributes( - aws_arn=aws_arn, - name=name, - connection_qualified_name=connection_qualified_name, - qualified_name=f"{connection_qualified_name}/{aws_arn}", - connector_name=connector_type.value, - s3_bucket_qualified_name=s3_bucket_qualified_name, - bucket=S3Bucket.ref_by_qualified_name(s3_bucket_qualified_name), + ["path_raw_uri", "spec_qualified_name"], + [path_raw_uri, spec_qualified_name], ) - @classmethod - # @validate_arguments() - @init_guid - def create_with_prefix( - cls, - *, - name: str, - connection_qualified_name: str, - prefix: str, - s3_bucket_qualified_name: str, - ) -> S3Object.Attributes: - validate_required_fields( - [ - "name", - "connection_qualified_name", - "prefix", - "s3_bucket_qualified_name", - ], - [name, connection_qualified_name, prefix, s3_bucket_qualified_name], - ) - fields = connection_qualified_name.split("/") - if len(fields) != 3: - raise ValueError("Invalid connection_qualified_name") + # Split the spec_qualified_name to extract necessary information + fields = spec_qualified_name.split("/") + if len(fields) != 4: + raise ValueError("Invalid spec_qualified_name") + try: - if fields[0].replace(" ", "") == "" or fields[2].replace(" ", "") == "": - raise ValueError("Invalid connection_qualified_name") connector_type = AtlanConnectorType(fields[1]) # type:ignore - if connector_type != AtlanConnectorType.S3: - raise ValueError("Connector type must be s3") except ValueError as e: - raise ValueError("Invalid connection_qualified_name") from e - object_key = f"{prefix}/{name}" - return S3Object.Attributes( - name=name, - s3_object_key=object_key, - connection_qualified_name=connection_qualified_name, - qualified_name=f"{connection_qualified_name}/{object_key}", + raise ValueError("Invalid spec_qualified_name") from e + + return APIPath.Attributes( + api_path_raw_u_r_i=path_raw_uri, + name=path_raw_uri, + api_spec_qualified_name=spec_qualified_name, + connection_qualified_name=f"{fields[0]}/{fields[1]}/{fields[2]}", + qualified_name=f"{spec_qualified_name}{path_raw_uri}", connector_name=connector_type.value, - s3_bucket_qualified_name=s3_bucket_qualified_name, - bucket=S3Bucket.ref_by_qualified_name(s3_bucket_qualified_name), + apiSpec=APISpec.ref_by_qualified_name(spec_qualified_name), ) - attributes: "S3Object.Attributes" = Field( - default_factory=lambda: S3Object.Attributes(), + attributes: "APIPath.Attributes" = Field( + default_factory=lambda: APIPath.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -S3Bucket.Attributes.update_forward_refs() +APISpec.Attributes.update_forward_refs() -S3Object.Attributes.update_forward_refs() +APIPath.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset61.py b/pyatlan/model/assets/asset61.py index c1adff4ea..3b7b5d790 100644 --- a/pyatlan/model/assets/asset61.py +++ b/pyatlan/model/assets/asset61.py @@ -4,623 +4,26 @@ from __future__ import annotations -from datetime import datetime +import uuid from typing import ClassVar, Optional from pydantic import Field, validator -from pyatlan.model.enums import ( - ADLSAccessTier, - ADLSAccountStatus, - ADLSEncryptionTypes, - ADLSLeaseState, - ADLSLeaseStatus, - ADLSObjectArchiveStatus, - ADLSObjectType, - ADLSPerformance, - ADLSProvisionState, - ADLSReplicationType, - ADLSStorageKind, - AtlanConnectorType, -) +from pyatlan.model.enums import AtlanConnectorType, GoogleDatastudioAssetType from pyatlan.model.fields.atlan_fields import ( BooleanField, KeywordField, KeywordTextField, + KeywordTextStemmedField, NumericField, - RelationField, - TextField, ) -from pyatlan.utils import get_parent_qualified_name, init_guid, validate_required_fields - -from .asset35 import ADLS - - -class ADLSAccount(ADLS): - """Description""" - - @classmethod - # @validate_arguments() - @init_guid - def create(cls, *, name: str, connection_qualified_name: str) -> ADLSAccount: - validate_required_fields( - ["name", "connection_qualified_name"], [name, connection_qualified_name] - ) - attributes = ADLSAccount.Attributes.create( - name=name, connection_qualified_name=connection_qualified_name - ) - return cls(attributes=attributes) - - type_name: str = Field("ADLSAccount", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "ADLSAccount": - raise ValueError("must be ADLSAccount") - return v - - def __setattr__(self, name, value): - if name in ADLSAccount._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - ADLS_E_TAG: ClassVar[KeywordField] = KeywordField("adlsETag", "adlsETag") - """ - TBC - """ - ADLS_ENCRYPTION_TYPE: ClassVar[KeywordField] = KeywordField( - "adlsEncryptionType", "adlsEncryptionType" - ) - """ - TBC - """ - ADLS_ACCOUNT_RESOURCE_GROUP: ClassVar[KeywordTextField] = KeywordTextField( - "adlsAccountResourceGroup", - "adlsAccountResourceGroup.keyword", - "adlsAccountResourceGroup", - ) - """ - TBC - """ - ADLS_ACCOUNT_SUBSCRIPTION: ClassVar[KeywordTextField] = KeywordTextField( - "adlsAccountSubscription", - "adlsAccountSubscription.keyword", - "adlsAccountSubscription", - ) - """ - TBC - """ - ADLS_ACCOUNT_PERFORMANCE: ClassVar[KeywordField] = KeywordField( - "adlsAccountPerformance", "adlsAccountPerformance" - ) - """ - TBC - """ - ADLS_ACCOUNT_REPLICATION: ClassVar[KeywordField] = KeywordField( - "adlsAccountReplication", "adlsAccountReplication" - ) - """ - TBC - """ - ADLS_ACCOUNT_KIND: ClassVar[KeywordField] = KeywordField( - "adlsAccountKind", "adlsAccountKind" - ) - """ - TBC - """ - ADLS_PRIMARY_DISK_STATE: ClassVar[KeywordField] = KeywordField( - "adlsPrimaryDiskState", "adlsPrimaryDiskState" - ) - """ - TBC - """ - ADLS_ACCOUNT_PROVISION_STATE: ClassVar[KeywordField] = KeywordField( - "adlsAccountProvisionState", "adlsAccountProvisionState" - ) - """ - TBC - """ - ADLS_ACCOUNT_ACCESS_TIER: ClassVar[KeywordField] = KeywordField( - "adlsAccountAccessTier", "adlsAccountAccessTier" - ) - """ - TBC - """ - - ADLS_CONTAINERS: ClassVar[RelationField] = RelationField("adlsContainers") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "adls_e_tag", - "adls_encryption_type", - "adls_account_resource_group", - "adls_account_subscription", - "adls_account_performance", - "adls_account_replication", - "adls_account_kind", - "adls_primary_disk_state", - "adls_account_provision_state", - "adls_account_access_tier", - "adls_containers", - ] - - @property - def adls_e_tag(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.adls_e_tag - - @adls_e_tag.setter - def adls_e_tag(self, adls_e_tag: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_e_tag = adls_e_tag - - @property - def adls_encryption_type(self) -> Optional[ADLSEncryptionTypes]: - return None if self.attributes is None else self.attributes.adls_encryption_type - - @adls_encryption_type.setter - def adls_encryption_type(self, adls_encryption_type: Optional[ADLSEncryptionTypes]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_encryption_type = adls_encryption_type - - @property - def adls_account_resource_group(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.adls_account_resource_group - ) - - @adls_account_resource_group.setter - def adls_account_resource_group(self, adls_account_resource_group: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_account_resource_group = adls_account_resource_group - - @property - def adls_account_subscription(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.adls_account_subscription - ) - - @adls_account_subscription.setter - def adls_account_subscription(self, adls_account_subscription: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_account_subscription = adls_account_subscription - - @property - def adls_account_performance(self) -> Optional[ADLSPerformance]: - return ( - None - if self.attributes is None - else self.attributes.adls_account_performance - ) - - @adls_account_performance.setter - def adls_account_performance( - self, adls_account_performance: Optional[ADLSPerformance] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_account_performance = adls_account_performance - - @property - def adls_account_replication(self) -> Optional[ADLSReplicationType]: - return ( - None - if self.attributes is None - else self.attributes.adls_account_replication - ) - - @adls_account_replication.setter - def adls_account_replication( - self, adls_account_replication: Optional[ADLSReplicationType] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_account_replication = adls_account_replication - - @property - def adls_account_kind(self) -> Optional[ADLSStorageKind]: - return None if self.attributes is None else self.attributes.adls_account_kind - - @adls_account_kind.setter - def adls_account_kind(self, adls_account_kind: Optional[ADLSStorageKind]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_account_kind = adls_account_kind - - @property - def adls_primary_disk_state(self) -> Optional[ADLSAccountStatus]: - return ( - None if self.attributes is None else self.attributes.adls_primary_disk_state - ) - - @adls_primary_disk_state.setter - def adls_primary_disk_state( - self, adls_primary_disk_state: Optional[ADLSAccountStatus] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_primary_disk_state = adls_primary_disk_state - - @property - def adls_account_provision_state(self) -> Optional[ADLSProvisionState]: - return ( - None - if self.attributes is None - else self.attributes.adls_account_provision_state - ) - - @adls_account_provision_state.setter - def adls_account_provision_state( - self, adls_account_provision_state: Optional[ADLSProvisionState] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_account_provision_state = adls_account_provision_state - - @property - def adls_account_access_tier(self) -> Optional[ADLSAccessTier]: - return ( - None - if self.attributes is None - else self.attributes.adls_account_access_tier - ) - - @adls_account_access_tier.setter - def adls_account_access_tier( - self, adls_account_access_tier: Optional[ADLSAccessTier] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_account_access_tier = adls_account_access_tier - - @property - def adls_containers(self) -> Optional[list[ADLSContainer]]: - return None if self.attributes is None else self.attributes.adls_containers - - @adls_containers.setter - def adls_containers(self, adls_containers: Optional[list[ADLSContainer]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_containers = adls_containers - - class Attributes(ADLS.Attributes): - adls_e_tag: Optional[str] = Field(None, description="", alias="adlsETag") - adls_encryption_type: Optional[ADLSEncryptionTypes] = Field( - None, description="", alias="adlsEncryptionType" - ) - adls_account_resource_group: Optional[str] = Field( - None, description="", alias="adlsAccountResourceGroup" - ) - adls_account_subscription: Optional[str] = Field( - None, description="", alias="adlsAccountSubscription" - ) - adls_account_performance: Optional[ADLSPerformance] = Field( - None, description="", alias="adlsAccountPerformance" - ) - adls_account_replication: Optional[ADLSReplicationType] = Field( - None, description="", alias="adlsAccountReplication" - ) - adls_account_kind: Optional[ADLSStorageKind] = Field( - None, description="", alias="adlsAccountKind" - ) - adls_primary_disk_state: Optional[ADLSAccountStatus] = Field( - None, description="", alias="adlsPrimaryDiskState" - ) - adls_account_provision_state: Optional[ADLSProvisionState] = Field( - None, description="", alias="adlsAccountProvisionState" - ) - adls_account_access_tier: Optional[ADLSAccessTier] = Field( - None, description="", alias="adlsAccountAccessTier" - ) - adls_containers: Optional[list[ADLSContainer]] = Field( - None, description="", alias="adlsContainers" - ) # relationship - - @classmethod - # @validate_arguments() - @init_guid - def create( - cls, *, name: str, connection_qualified_name: str - ) -> ADLSAccount.Attributes: - validate_required_fields( - ["name", "connection_qualified_name"], [name, connection_qualified_name] - ) - - # Split the connection_qualified_name to extract necessary information - fields = connection_qualified_name.split("/") - if len(fields) != 3: - raise ValueError("Invalid connection_qualified_name") - - try: - connector_type = AtlanConnectorType(fields[1]) # type:ignore - except ValueError as e: - raise ValueError("Invalid connection_qualified_name") from e - - return ADLSAccount.Attributes( - name=name, - qualified_name=f"{connection_qualified_name}/{name}", - connection_qualified_name=connection_qualified_name, - connector_name=connector_type.value, - ) - - attributes: "ADLSAccount.Attributes" = Field( - default_factory=lambda: ADLSAccount.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class ADLSContainer(ADLS): - """Description""" - - @classmethod - # @validate_arguments() - @init_guid - def create(cls, *, name: str, adls_account_qualified_name: str) -> ADLSContainer: - validate_required_fields( - ["name", "adls_account_qualified_name"], [name, adls_account_qualified_name] - ) - attributes = ADLSContainer.Attributes.create( - name=name, adls_account_qualified_name=adls_account_qualified_name - ) - return cls(attributes=attributes) - - type_name: str = Field("ADLSContainer", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "ADLSContainer": - raise ValueError("must be ADLSContainer") - return v - - def __setattr__(self, name, value): - if name in ADLSContainer._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - ADLS_CONTAINER_URL: ClassVar[KeywordTextField] = KeywordTextField( - "adlsContainerUrl", "adlsContainerUrl.keyword", "adlsContainerUrl" - ) - """ - TBC - """ - ADLS_CONTAINER_LEASE_STATE: ClassVar[KeywordField] = KeywordField( - "adlsContainerLeaseState", "adlsContainerLeaseState" - ) - """ - TBC - """ - ADLS_CONTAINER_LEASE_STATUS: ClassVar[KeywordField] = KeywordField( - "adlsContainerLeaseStatus", "adlsContainerLeaseStatus" - ) - """ - TBC - """ - ADLS_CONTAINER_ENCRYPTION_SCOPE: ClassVar[KeywordField] = KeywordField( - "adlsContainerEncryptionScope", "adlsContainerEncryptionScope" - ) - """ - TBC - """ - ADLS_CONTAINER_VERSION_LEVEL_IMMUTABILITY_SUPPORT: ClassVar[ - BooleanField - ] = BooleanField( - "adlsContainerVersionLevelImmutabilitySupport", - "adlsContainerVersionLevelImmutabilitySupport", - ) - """ - TBC - """ - ADLS_OBJECT_COUNT: ClassVar[NumericField] = NumericField( - "adlsObjectCount", "adlsObjectCount" - ) - """ - TBC - """ - - ADLS_OBJECTS: ClassVar[RelationField] = RelationField("adlsObjects") - """ - TBC - """ - ADLS_ACCOUNT: ClassVar[RelationField] = RelationField("adlsAccount") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "adls_container_url", - "adls_container_lease_state", - "adls_container_lease_status", - "adls_container_encryption_scope", - "adls_container_version_level_immutability_support", - "adls_object_count", - "adls_objects", - "adls_account", - ] - - @property - def adls_container_url(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.adls_container_url - - @adls_container_url.setter - def adls_container_url(self, adls_container_url: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_container_url = adls_container_url - - @property - def adls_container_lease_state(self) -> Optional[ADLSLeaseState]: - return ( - None - if self.attributes is None - else self.attributes.adls_container_lease_state - ) - - @adls_container_lease_state.setter - def adls_container_lease_state( - self, adls_container_lease_state: Optional[ADLSLeaseState] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_container_lease_state = adls_container_lease_state - - @property - def adls_container_lease_status(self) -> Optional[ADLSLeaseStatus]: - return ( - None - if self.attributes is None - else self.attributes.adls_container_lease_status - ) - - @adls_container_lease_status.setter - def adls_container_lease_status( - self, adls_container_lease_status: Optional[ADLSLeaseStatus] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_container_lease_status = adls_container_lease_status - - @property - def adls_container_encryption_scope(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.adls_container_encryption_scope - ) - - @adls_container_encryption_scope.setter - def adls_container_encryption_scope( - self, adls_container_encryption_scope: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_container_encryption_scope = ( - adls_container_encryption_scope - ) - - @property - def adls_container_version_level_immutability_support(self) -> Optional[bool]: - return ( - None - if self.attributes is None - else self.attributes.adls_container_version_level_immutability_support - ) - - @adls_container_version_level_immutability_support.setter - def adls_container_version_level_immutability_support( - self, adls_container_version_level_immutability_support: Optional[bool] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_container_version_level_immutability_support = ( - adls_container_version_level_immutability_support - ) +from pyatlan.model.structs import GoogleLabel, GoogleTag +from pyatlan.utils import init_guid, validate_required_fields - @property - def adls_object_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.adls_object_count +from .asset47 import DataStudio - @adls_object_count.setter - def adls_object_count(self, adls_object_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_object_count = adls_object_count - - @property - def adls_objects(self) -> Optional[list[ADLSObject]]: - return None if self.attributes is None else self.attributes.adls_objects - @adls_objects.setter - def adls_objects(self, adls_objects: Optional[list[ADLSObject]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_objects = adls_objects - - @property - def adls_account(self) -> Optional[ADLSAccount]: - return None if self.attributes is None else self.attributes.adls_account - - @adls_account.setter - def adls_account(self, adls_account: Optional[ADLSAccount]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_account = adls_account - - class Attributes(ADLS.Attributes): - adls_container_url: Optional[str] = Field( - None, description="", alias="adlsContainerUrl" - ) - adls_container_lease_state: Optional[ADLSLeaseState] = Field( - None, description="", alias="adlsContainerLeaseState" - ) - adls_container_lease_status: Optional[ADLSLeaseStatus] = Field( - None, description="", alias="adlsContainerLeaseStatus" - ) - adls_container_encryption_scope: Optional[str] = Field( - None, description="", alias="adlsContainerEncryptionScope" - ) - adls_container_version_level_immutability_support: Optional[bool] = Field( - None, description="", alias="adlsContainerVersionLevelImmutabilitySupport" - ) - adls_object_count: Optional[int] = Field( - None, description="", alias="adlsObjectCount" - ) - adls_objects: Optional[list[ADLSObject]] = Field( - None, description="", alias="adlsObjects" - ) # relationship - adls_account: Optional[ADLSAccount] = Field( - None, description="", alias="adlsAccount" - ) # relationship - - @classmethod - # @validate_arguments() - @init_guid - def create( - cls, *, name: str, adls_account_qualified_name: str - ) -> ADLSContainer.Attributes: - validate_required_fields( - ["name", "adls_account_qualified_name"], - [name, adls_account_qualified_name], - ) - - # Split the adls_account_qualified_name to extract necessary information - fields = adls_account_qualified_name.split("/") - if len(fields) != 4: - raise ValueError("Invalid adls_account_qualified_name") - - try: - connector_type = AtlanConnectorType(fields[1]) # type:ignore - except ValueError as e: - raise ValueError("Invalid adls_account_qualified_name") from e - - return ADLSContainer.Attributes( - name=name, - adls_account_qualified_name=adls_account_qualified_name, - connection_qualified_name=f"{fields[0]}/{fields[1]}/{fields[2]}", - qualified_name=f"{adls_account_qualified_name}/{name}", - connector_name=connector_type.value, - adls_account=ADLSAccount.ref_by_qualified_name( - adls_account_qualified_name - ), - ) - - attributes: "ADLSContainer.Attributes" = Field( - default_factory=lambda: ADLSContainer.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class ADLSObject(ADLS): +class DataStudioAsset(DataStudio): """Description""" @classmethod @@ -630,517 +33,339 @@ def create( cls, *, name: str, - adls_container_qualified_name: str, - ) -> ADLSObject: + connection_qualified_name: str, + data_studio_asset_type: GoogleDatastudioAssetType, + gdsid: Optional[str] = None, + ) -> DataStudioAsset: validate_required_fields( - ["name", "adls_container_qualified_name"], - [name, adls_container_qualified_name], + ["name", "connection_qualified_name", "data_studio_asset_type"], + [name, connection_qualified_name, data_studio_asset_type], ) - attributes = ADLSObject.Attributes.create( - name=name, adls_container_qualified_name=adls_container_qualified_name + if gdsid is None: + gdsid = str(uuid.uuid4()) + attributes = DataStudioAsset.Attributes.create( + name=name, + connection_qualified_name=connection_qualified_name, + data_studio_asset_type=data_studio_asset_type, + gdsid=gdsid, ) return cls(attributes=attributes) - type_name: str = Field("ADLSObject", allow_mutation=False) + type_name: str = Field("DataStudioAsset", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "ADLSObject": - raise ValueError("must be ADLSObject") + if v != "DataStudioAsset": + raise ValueError("must be DataStudioAsset") return v def __setattr__(self, name, value): - if name in ADLSObject._convenience_properties: + if name in DataStudioAsset._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - ADLS_OBJECT_URL: ClassVar[KeywordTextField] = KeywordTextField( - "adlsObjectUrl", "adlsObjectUrl.keyword", "adlsObjectUrl" - ) - """ - TBC - """ - ADLS_OBJECT_VERSION_ID: ClassVar[KeywordField] = KeywordField( - "adlsObjectVersionId", "adlsObjectVersionId" + DATA_STUDIO_ASSET_TYPE: ClassVar[KeywordField] = KeywordField( + "dataStudioAssetType", "dataStudioAssetType" ) """ - TBC + Type of the Google Data Studio asset, for example: REPORT or DATA_SOURCE. """ - ADLS_OBJECT_TYPE: ClassVar[KeywordField] = KeywordField( - "adlsObjectType", "adlsObjectType" + DATA_STUDIO_ASSET_TITLE: ClassVar[ + KeywordTextStemmedField + ] = KeywordTextStemmedField( + "dataStudioAssetTitle", + "dataStudioAssetTitle.keyword", + "dataStudioAssetTitle", + "dataStudioAssetTitle.stemmed", ) """ - TBC + Title of the Google Data Studio asset. """ - ADLS_OBJECT_SIZE: ClassVar[NumericField] = NumericField( - "adlsObjectSize", "adlsObjectSize" + DATA_STUDIO_ASSET_OWNER: ClassVar[KeywordField] = KeywordField( + "dataStudioAssetOwner", "dataStudioAssetOwner" ) """ - TBC + Owner of the asset, from Google Data Studio. """ - ADLS_OBJECT_ACCESS_TIER: ClassVar[KeywordField] = KeywordField( - "adlsObjectAccessTier", "adlsObjectAccessTier" + IS_TRASHED_DATA_STUDIO_ASSET: ClassVar[BooleanField] = BooleanField( + "isTrashedDataStudioAsset", "isTrashedDataStudioAsset" ) """ - TBC + Whether the Google Data Studio asset has been trashed (true) or not (false). """ - ADLS_OBJECT_ACCESS_TIER_LAST_MODIFIED_TIME: ClassVar[NumericField] = NumericField( - "adlsObjectAccessTierLastModifiedTime", "adlsObjectAccessTierLastModifiedTime" + GOOGLE_SERVICE: ClassVar[KeywordField] = KeywordField( + "googleService", "googleService" ) """ - TBC + Service in Google in which the asset exists. """ - ADLS_OBJECT_ARCHIVE_STATUS: ClassVar[KeywordField] = KeywordField( - "adlsObjectArchiveStatus", "adlsObjectArchiveStatus" + GOOGLE_PROJECT_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "googleProjectName", "googleProjectName", "googleProjectName.text" ) """ - TBC + Name of the project in which the asset exists. """ - ADLS_OBJECT_SERVER_ENCRYPTED: ClassVar[BooleanField] = BooleanField( - "adlsObjectServerEncrypted", "adlsObjectServerEncrypted" + GOOGLE_PROJECT_ID: ClassVar[KeywordTextField] = KeywordTextField( + "googleProjectId", "googleProjectId", "googleProjectId.text" ) """ - TBC + ID of the project in which the asset exists. """ - ADLS_OBJECT_VERSION_LEVEL_IMMUTABILITY_SUPPORT: ClassVar[ - BooleanField - ] = BooleanField( - "adlsObjectVersionLevelImmutabilitySupport", - "adlsObjectVersionLevelImmutabilitySupport", + GOOGLE_PROJECT_NUMBER: ClassVar[NumericField] = NumericField( + "googleProjectNumber", "googleProjectNumber" ) """ - TBC + Number of the project in which the asset exists. """ - ADLS_OBJECT_CACHE_CONTROL: ClassVar[TextField] = TextField( - "adlsObjectCacheControl", "adlsObjectCacheControl" + GOOGLE_LOCATION: ClassVar[KeywordField] = KeywordField( + "googleLocation", "googleLocation" ) """ - TBC + Location of this asset in Google. """ - ADLS_OBJECT_CONTENT_TYPE: ClassVar[TextField] = TextField( - "adlsObjectContentType", "adlsObjectContentType" + GOOGLE_LOCATION_TYPE: ClassVar[KeywordField] = KeywordField( + "googleLocationType", "googleLocationType" ) """ - TBC + Type of location of this asset in Google. """ - ADLS_OBJECT_CONTENT_MD5HASH: ClassVar[KeywordField] = KeywordField( - "adlsObjectContentMD5Hash", "adlsObjectContentMD5Hash" - ) - """ - TBC - """ - ADLS_OBJECT_CONTENT_LANGUAGE: ClassVar[KeywordTextField] = KeywordTextField( - "adlsObjectContentLanguage", - "adlsObjectContentLanguage.keyword", - "adlsObjectContentLanguage", - ) - """ - TBC - """ - ADLS_OBJECT_LEASE_STATUS: ClassVar[KeywordField] = KeywordField( - "adlsObjectLeaseStatus", "adlsObjectLeaseStatus" - ) + GOOGLE_LABELS: ClassVar[KeywordField] = KeywordField("googleLabels", "googleLabels") """ - TBC + List of labels that have been applied to the asset in Google. """ - ADLS_OBJECT_LEASE_STATE: ClassVar[KeywordField] = KeywordField( - "adlsObjectLeaseState", "adlsObjectLeaseState" - ) - """ - TBC - """ - ADLS_OBJECT_METADATA: ClassVar[KeywordField] = KeywordField( - "adlsObjectMetadata", "adlsObjectMetadata" - ) - """ - TBC - """ - ADLS_CONTAINER_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "adlsContainerQualifiedName", - "adlsContainerQualifiedName", - "adlsContainerQualifiedName.text", - ) + GOOGLE_TAGS: ClassVar[KeywordField] = KeywordField("googleTags", "googleTags") """ - TBC - """ - - ADLS_CONTAINER: ClassVar[RelationField] = RelationField("adlsContainer") - """ - TBC + List of tags that have been applied to the asset in Google. """ _convenience_properties: ClassVar[list[str]] = [ - "adls_object_url", - "adls_object_version_id", - "adls_object_type", - "adls_object_size", - "adls_object_access_tier", - "adls_object_access_tier_last_modified_time", - "adls_object_archive_status", - "adls_object_server_encrypted", - "adls_object_version_level_immutability_support", - "adls_object_cache_control", - "adls_object_content_type", - "adls_object_content_m_d5_hash", - "adls_object_content_language", - "adls_object_lease_status", - "adls_object_lease_state", - "adls_object_metadata", - "adls_container_qualified_name", - "adls_container", + "data_studio_asset_type", + "data_studio_asset_title", + "data_studio_asset_owner", + "is_trashed_data_studio_asset", + "google_service", + "google_project_name", + "google_project_id", + "google_project_number", + "google_location", + "google_location_type", + "google_labels", + "google_tags", ] @property - def adls_object_url(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.adls_object_url - - @adls_object_url.setter - def adls_object_url(self, adls_object_url: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_object_url = adls_object_url - - @property - def adls_object_version_id(self) -> Optional[str]: + def data_studio_asset_type(self) -> Optional[GoogleDatastudioAssetType]: return ( - None if self.attributes is None else self.attributes.adls_object_version_id + None if self.attributes is None else self.attributes.data_studio_asset_type ) - @adls_object_version_id.setter - def adls_object_version_id(self, adls_object_version_id: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_object_version_id = adls_object_version_id - - @property - def adls_object_type(self) -> Optional[ADLSObjectType]: - return None if self.attributes is None else self.attributes.adls_object_type - - @adls_object_type.setter - def adls_object_type(self, adls_object_type: Optional[ADLSObjectType]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_object_type = adls_object_type - - @property - def adls_object_size(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.adls_object_size - - @adls_object_size.setter - def adls_object_size(self, adls_object_size: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_object_size = adls_object_size - - @property - def adls_object_access_tier(self) -> Optional[ADLSAccessTier]: - return ( - None if self.attributes is None else self.attributes.adls_object_access_tier - ) - - @adls_object_access_tier.setter - def adls_object_access_tier( - self, adls_object_access_tier: Optional[ADLSAccessTier] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_object_access_tier = adls_object_access_tier - - @property - def adls_object_access_tier_last_modified_time(self) -> Optional[datetime]: - return ( - None - if self.attributes is None - else self.attributes.adls_object_access_tier_last_modified_time - ) - - @adls_object_access_tier_last_modified_time.setter - def adls_object_access_tier_last_modified_time( - self, adls_object_access_tier_last_modified_time: Optional[datetime] + @data_studio_asset_type.setter + def data_studio_asset_type( + self, data_studio_asset_type: Optional[GoogleDatastudioAssetType] ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.adls_object_access_tier_last_modified_time = ( - adls_object_access_tier_last_modified_time - ) + self.attributes.data_studio_asset_type = data_studio_asset_type @property - def adls_object_archive_status(self) -> Optional[ADLSObjectArchiveStatus]: + def data_studio_asset_title(self) -> Optional[str]: return ( - None - if self.attributes is None - else self.attributes.adls_object_archive_status + None if self.attributes is None else self.attributes.data_studio_asset_title ) - @adls_object_archive_status.setter - def adls_object_archive_status( - self, adls_object_archive_status: Optional[ADLSObjectArchiveStatus] - ): + @data_studio_asset_title.setter + def data_studio_asset_title(self, data_studio_asset_title: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.adls_object_archive_status = adls_object_archive_status + self.attributes.data_studio_asset_title = data_studio_asset_title @property - def adls_object_server_encrypted(self) -> Optional[bool]: + def data_studio_asset_owner(self) -> Optional[str]: return ( - None - if self.attributes is None - else self.attributes.adls_object_server_encrypted + None if self.attributes is None else self.attributes.data_studio_asset_owner ) - @adls_object_server_encrypted.setter - def adls_object_server_encrypted( - self, adls_object_server_encrypted: Optional[bool] - ): + @data_studio_asset_owner.setter + def data_studio_asset_owner(self, data_studio_asset_owner: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.adls_object_server_encrypted = adls_object_server_encrypted + self.attributes.data_studio_asset_owner = data_studio_asset_owner @property - def adls_object_version_level_immutability_support(self) -> Optional[bool]: + def is_trashed_data_studio_asset(self) -> Optional[bool]: return ( None if self.attributes is None - else self.attributes.adls_object_version_level_immutability_support + else self.attributes.is_trashed_data_studio_asset ) - @adls_object_version_level_immutability_support.setter - def adls_object_version_level_immutability_support( - self, adls_object_version_level_immutability_support: Optional[bool] + @is_trashed_data_studio_asset.setter + def is_trashed_data_studio_asset( + self, is_trashed_data_studio_asset: Optional[bool] ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.adls_object_version_level_immutability_support = ( - adls_object_version_level_immutability_support - ) + self.attributes.is_trashed_data_studio_asset = is_trashed_data_studio_asset @property - def adls_object_cache_control(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.adls_object_cache_control - ) + def google_service(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.google_service - @adls_object_cache_control.setter - def adls_object_cache_control(self, adls_object_cache_control: Optional[str]): + @google_service.setter + def google_service(self, google_service: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.adls_object_cache_control = adls_object_cache_control + self.attributes.google_service = google_service @property - def adls_object_content_type(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.adls_object_content_type - ) + def google_project_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.google_project_name - @adls_object_content_type.setter - def adls_object_content_type(self, adls_object_content_type: Optional[str]): + @google_project_name.setter + def google_project_name(self, google_project_name: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.adls_object_content_type = adls_object_content_type + self.attributes.google_project_name = google_project_name @property - def adls_object_content_m_d5_hash(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.adls_object_content_m_d5_hash - ) + def google_project_id(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.google_project_id - @adls_object_content_m_d5_hash.setter - def adls_object_content_m_d5_hash( - self, adls_object_content_m_d5_hash: Optional[str] - ): + @google_project_id.setter + def google_project_id(self, google_project_id: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.adls_object_content_m_d5_hash = adls_object_content_m_d5_hash + self.attributes.google_project_id = google_project_id @property - def adls_object_content_language(self) -> Optional[str]: + def google_project_number(self) -> Optional[int]: return ( - None - if self.attributes is None - else self.attributes.adls_object_content_language + None if self.attributes is None else self.attributes.google_project_number ) - @adls_object_content_language.setter - def adls_object_content_language(self, adls_object_content_language: Optional[str]): + @google_project_number.setter + def google_project_number(self, google_project_number: Optional[int]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.adls_object_content_language = adls_object_content_language + self.attributes.google_project_number = google_project_number @property - def adls_object_lease_status(self) -> Optional[ADLSLeaseStatus]: - return ( - None - if self.attributes is None - else self.attributes.adls_object_lease_status - ) + def google_location(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.google_location - @adls_object_lease_status.setter - def adls_object_lease_status( - self, adls_object_lease_status: Optional[ADLSLeaseStatus] - ): + @google_location.setter + def google_location(self, google_location: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.adls_object_lease_status = adls_object_lease_status + self.attributes.google_location = google_location @property - def adls_object_lease_state(self) -> Optional[ADLSLeaseState]: - return ( - None if self.attributes is None else self.attributes.adls_object_lease_state - ) + def google_location_type(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.google_location_type - @adls_object_lease_state.setter - def adls_object_lease_state( - self, adls_object_lease_state: Optional[ADLSLeaseState] - ): + @google_location_type.setter + def google_location_type(self, google_location_type: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.adls_object_lease_state = adls_object_lease_state + self.attributes.google_location_type = google_location_type @property - def adls_object_metadata(self) -> Optional[dict[str, str]]: - return None if self.attributes is None else self.attributes.adls_object_metadata + def google_labels(self) -> Optional[list[GoogleLabel]]: + return None if self.attributes is None else self.attributes.google_labels - @adls_object_metadata.setter - def adls_object_metadata(self, adls_object_metadata: Optional[dict[str, str]]): + @google_labels.setter + def google_labels(self, google_labels: Optional[list[GoogleLabel]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.adls_object_metadata = adls_object_metadata + self.attributes.google_labels = google_labels @property - def adls_container_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.adls_container_qualified_name - ) + def google_tags(self) -> Optional[list[GoogleTag]]: + return None if self.attributes is None else self.attributes.google_tags - @adls_container_qualified_name.setter - def adls_container_qualified_name( - self, adls_container_qualified_name: Optional[str] - ): + @google_tags.setter + def google_tags(self, google_tags: Optional[list[GoogleTag]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.adls_container_qualified_name = adls_container_qualified_name - - @property - def adls_container(self) -> Optional[ADLSContainer]: - return None if self.attributes is None else self.attributes.adls_container + self.attributes.google_tags = google_tags - @adls_container.setter - def adls_container(self, adls_container: Optional[ADLSContainer]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_container = adls_container - - class Attributes(ADLS.Attributes): - adls_object_url: Optional[str] = Field( - None, description="", alias="adlsObjectUrl" - ) - adls_object_version_id: Optional[str] = Field( - None, description="", alias="adlsObjectVersionId" - ) - adls_object_type: Optional[ADLSObjectType] = Field( - None, description="", alias="adlsObjectType" - ) - adls_object_size: Optional[int] = Field( - None, description="", alias="adlsObjectSize" + class Attributes(DataStudio.Attributes): + data_studio_asset_type: Optional[GoogleDatastudioAssetType] = Field( + None, description="", alias="dataStudioAssetType" ) - adls_object_access_tier: Optional[ADLSAccessTier] = Field( - None, description="", alias="adlsObjectAccessTier" + data_studio_asset_title: Optional[str] = Field( + None, description="", alias="dataStudioAssetTitle" ) - adls_object_access_tier_last_modified_time: Optional[datetime] = Field( - None, description="", alias="adlsObjectAccessTierLastModifiedTime" + data_studio_asset_owner: Optional[str] = Field( + None, description="", alias="dataStudioAssetOwner" ) - adls_object_archive_status: Optional[ADLSObjectArchiveStatus] = Field( - None, description="", alias="adlsObjectArchiveStatus" + is_trashed_data_studio_asset: Optional[bool] = Field( + None, description="", alias="isTrashedDataStudioAsset" ) - adls_object_server_encrypted: Optional[bool] = Field( - None, description="", alias="adlsObjectServerEncrypted" + google_service: Optional[str] = Field( + None, description="", alias="googleService" ) - adls_object_version_level_immutability_support: Optional[bool] = Field( - None, description="", alias="adlsObjectVersionLevelImmutabilitySupport" + google_project_name: Optional[str] = Field( + None, description="", alias="googleProjectName" ) - adls_object_cache_control: Optional[str] = Field( - None, description="", alias="adlsObjectCacheControl" + google_project_id: Optional[str] = Field( + None, description="", alias="googleProjectId" ) - adls_object_content_type: Optional[str] = Field( - None, description="", alias="adlsObjectContentType" + google_project_number: Optional[int] = Field( + None, description="", alias="googleProjectNumber" ) - adls_object_content_m_d5_hash: Optional[str] = Field( - None, description="", alias="adlsObjectContentMD5Hash" + google_location: Optional[str] = Field( + None, description="", alias="googleLocation" ) - adls_object_content_language: Optional[str] = Field( - None, description="", alias="adlsObjectContentLanguage" + google_location_type: Optional[str] = Field( + None, description="", alias="googleLocationType" ) - adls_object_lease_status: Optional[ADLSLeaseStatus] = Field( - None, description="", alias="adlsObjectLeaseStatus" + google_labels: Optional[list[GoogleLabel]] = Field( + None, description="", alias="googleLabels" ) - adls_object_lease_state: Optional[ADLSLeaseState] = Field( - None, description="", alias="adlsObjectLeaseState" + google_tags: Optional[list[GoogleTag]] = Field( + None, description="", alias="googleTags" ) - adls_object_metadata: Optional[dict[str, str]] = Field( - None, description="", alias="adlsObjectMetadata" - ) - adls_container_qualified_name: Optional[str] = Field( - None, description="", alias="adlsContainerQualifiedName" - ) - adls_container: Optional[ADLSContainer] = Field( - None, description="", alias="adlsContainer" - ) # relationship @classmethod # @validate_arguments() @init_guid def create( - cls, *, name: str, adls_container_qualified_name: str - ) -> ADLSObject.Attributes: + cls, + *, + name: str, + connection_qualified_name: str, + data_studio_asset_type: GoogleDatastudioAssetType, + gdsid: str, + ) -> DataStudioAsset.Attributes: validate_required_fields( - ["name", "adls_container_qualified_name"], - [name, adls_container_qualified_name], + ["name", "connection_qualified_name", "data_studio_asset_type"], + [name, connection_qualified_name, data_studio_asset_type], ) - # Split the qualified_name to extract necessary information - fields = adls_container_qualified_name.split("/") - if len(fields) != 5: - raise ValueError("Invalid qualified_name") + # Split the connection_qualified_name to extract necessary information + fields = connection_qualified_name.split("/") + if len(fields) != 3: + raise ValueError("Invalid connection_qualified_name") try: connector_type = AtlanConnectorType(fields[1]) # type:ignore except ValueError as e: - raise ValueError("Invalid qualified_name") from e - adls_account_qualified_name = get_parent_qualified_name( - adls_container_qualified_name - ) + raise ValueError("Invalid connection_qualified_name") from e - return ADLSObject.Attributes( + return DataStudioAsset.Attributes( name=name, - adls_container_qualified_name=adls_container_qualified_name, - qualified_name=f"{adls_container_qualified_name}/{name}", - connection_qualified_name=f"{fields[0]}/{fields[1]}/{fields[2]}", + qualified_name=f"{connection_qualified_name}/{gdsid}", + connection_qualified_name=connection_qualified_name, connector_name=connector_type.value, - adls_container=ADLSContainer.ref_by_qualified_name( - adls_container_qualified_name - ), - adls_account_qualified_name=adls_account_qualified_name, + data_studio_asset_type=data_studio_asset_type, ) - attributes: "ADLSObject.Attributes" = Field( - default_factory=lambda: ADLSObject.Attributes(), + attributes: "DataStudioAsset.Attributes" = Field( + default_factory=lambda: DataStudioAsset.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -ADLSAccount.Attributes.update_forward_refs() - - -ADLSContainer.Attributes.update_forward_refs() - - -ADLSObject.Attributes.update_forward_refs() +DataStudioAsset.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset62.py b/pyatlan/model/assets/asset62.py index 6bb01e952..8b13c3a1b 100644 --- a/pyatlan/model/assets/asset62.py +++ b/pyatlan/model/assets/asset62.py @@ -5,7 +5,7 @@ from __future__ import annotations from datetime import datetime -from typing import ClassVar, Optional +from typing import ClassVar, Optional, overload from pydantic import Field, validator @@ -16,708 +16,564 @@ KeywordTextField, NumericField, RelationField, - TextField, ) from pyatlan.utils import init_guid, validate_required_fields -from .asset36 import GCS +from .asset35 import S3 -class GCSObject(GCS): +class S3Bucket(S3): """Description""" + @overload + @classmethod + @init_guid + def create( + cls, + *, + name: str, + connection_qualified_name: str, + aws_arn: str, + ) -> S3Bucket: + ... + + @overload + @classmethod + @init_guid + def create( + cls, + *, + name: str, + connection_qualified_name: str, + aws_arn: Optional[str] = None, + ) -> S3Bucket: + ... + @classmethod # @validate_arguments() @init_guid - def create(cls, *, name: str, gcs_bucket_qualified_name: str) -> GCSObject: + def create( + cls, *, name: str, connection_qualified_name: str, aws_arn: Optional[str] = None + ) -> S3Bucket: validate_required_fields( - ["name", "gcs_bucket_qualified_name"], [name, gcs_bucket_qualified_name] + ["name", "connection_qualified_name"], + [name, connection_qualified_name], ) - attributes = GCSObject.Attributes.create( - name=name, gcs_bucket_qualified_name=gcs_bucket_qualified_name + attributes = S3Bucket.Attributes.create( + name=name, + connection_qualified_name=connection_qualified_name, + aws_arn=aws_arn, ) return cls(attributes=attributes) - type_name: str = Field("GCSObject", allow_mutation=False) + type_name: str = Field("S3Bucket", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "GCSObject": - raise ValueError("must be GCSObject") + if v != "S3Bucket": + raise ValueError("must be S3Bucket") return v def __setattr__(self, name, value): - if name in GCSObject._convenience_properties: + if name in S3Bucket._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - GCS_BUCKET_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "gcsBucketName", "gcsBucketName.keyword", "gcsBucketName" - ) - """ - TBC - """ - GCS_BUCKET_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "gcsBucketQualifiedName", - "gcsBucketQualifiedName", - "gcsBucketQualifiedName.text", - ) - """ - TBC - """ - GCS_OBJECT_SIZE: ClassVar[NumericField] = NumericField( - "gcsObjectSize", "gcsObjectSize" - ) - """ - TBC - """ - GCS_OBJECT_KEY: ClassVar[KeywordTextField] = KeywordTextField( - "gcsObjectKey", "gcsObjectKey", "gcsObjectKey.text" - ) - """ - TBC - """ - GCS_OBJECT_MEDIA_LINK: ClassVar[KeywordTextField] = KeywordTextField( - "gcsObjectMediaLink", "gcsObjectMediaLink", "gcsObjectMediaLink.text" - ) - """ - TBC - """ - GCS_OBJECT_HOLD_TYPE: ClassVar[KeywordField] = KeywordField( - "gcsObjectHoldType", "gcsObjectHoldType" - ) - """ - TBC - """ - GCS_OBJECT_GENERATION_ID: ClassVar[NumericField] = NumericField( - "gcsObjectGenerationId", "gcsObjectGenerationId" + S3OBJECT_COUNT: ClassVar[NumericField] = NumericField( + "s3ObjectCount", "s3ObjectCount" ) """ - TBC + Number of objects within the bucket. """ - GCS_OBJECT_CRC32C_HASH: ClassVar[KeywordField] = KeywordField( - "gcsObjectCRC32CHash", "gcsObjectCRC32CHash" + S3BUCKET_VERSIONING_ENABLED: ClassVar[BooleanField] = BooleanField( + "s3BucketVersioningEnabled", "s3BucketVersioningEnabled" ) """ - TBC - """ - GCS_OBJECT_MD5HASH: ClassVar[KeywordField] = KeywordField( - "gcsObjectMD5Hash", "gcsObjectMD5Hash" - ) - """ - TBC - """ - GCS_OBJECT_DATA_LAST_MODIFIED_TIME: ClassVar[NumericField] = NumericField( - "gcsObjectDataLastModifiedTime", "gcsObjectDataLastModifiedTime" - ) - """ - TBC - """ - GCS_OBJECT_CONTENT_TYPE: ClassVar[KeywordField] = KeywordField( - "gcsObjectContentType", "gcsObjectContentType" - ) - """ - TBC - """ - GCS_OBJECT_CONTENT_ENCODING: ClassVar[KeywordField] = KeywordField( - "gcsObjectContentEncoding", "gcsObjectContentEncoding" - ) - """ - TBC - """ - GCS_OBJECT_CONTENT_DISPOSITION: ClassVar[KeywordField] = KeywordField( - "gcsObjectContentDisposition", "gcsObjectContentDisposition" - ) - """ - TBC - """ - GCS_OBJECT_CONTENT_LANGUAGE: ClassVar[KeywordField] = KeywordField( - "gcsObjectContentLanguage", "gcsObjectContentLanguage" - ) - """ - TBC - """ - GCS_OBJECT_RETENTION_EXPIRATION_DATE: ClassVar[NumericField] = NumericField( - "gcsObjectRetentionExpirationDate", "gcsObjectRetentionExpirationDate" - ) - """ - TBC + Whether versioning is enabled for the bucket (true) or not (false). """ - GCS_BUCKET: ClassVar[RelationField] = RelationField("gcsBucket") + OBJECTS: ClassVar[RelationField] = RelationField("objects") """ TBC """ _convenience_properties: ClassVar[list[str]] = [ - "gcs_bucket_name", - "gcs_bucket_qualified_name", - "gcs_object_size", - "gcs_object_key", - "gcs_object_media_link", - "gcs_object_hold_type", - "gcs_object_generation_id", - "gcs_object_c_r_c32_c_hash", - "gcs_object_m_d5_hash", - "gcs_object_data_last_modified_time", - "gcs_object_content_type", - "gcs_object_content_encoding", - "gcs_object_content_disposition", - "gcs_object_content_language", - "gcs_object_retention_expiration_date", - "gcs_bucket", + "s3_object_count", + "s3_bucket_versioning_enabled", + "objects", ] @property - def gcs_bucket_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.gcs_bucket_name - - @gcs_bucket_name.setter - def gcs_bucket_name(self, gcs_bucket_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.gcs_bucket_name = gcs_bucket_name - - @property - def gcs_bucket_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.gcs_bucket_qualified_name - ) - - @gcs_bucket_qualified_name.setter - def gcs_bucket_qualified_name(self, gcs_bucket_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.gcs_bucket_qualified_name = gcs_bucket_qualified_name - - @property - def gcs_object_size(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.gcs_object_size - - @gcs_object_size.setter - def gcs_object_size(self, gcs_object_size: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.gcs_object_size = gcs_object_size - - @property - def gcs_object_key(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.gcs_object_key - - @gcs_object_key.setter - def gcs_object_key(self, gcs_object_key: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.gcs_object_key = gcs_object_key - - @property - def gcs_object_media_link(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.gcs_object_media_link - ) - - @gcs_object_media_link.setter - def gcs_object_media_link(self, gcs_object_media_link: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.gcs_object_media_link = gcs_object_media_link - - @property - def gcs_object_hold_type(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.gcs_object_hold_type - - @gcs_object_hold_type.setter - def gcs_object_hold_type(self, gcs_object_hold_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.gcs_object_hold_type = gcs_object_hold_type - - @property - def gcs_object_generation_id(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.gcs_object_generation_id - ) - - @gcs_object_generation_id.setter - def gcs_object_generation_id(self, gcs_object_generation_id: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.gcs_object_generation_id = gcs_object_generation_id - - @property - def gcs_object_c_r_c32_c_hash(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.gcs_object_c_r_c32_c_hash - ) - - @gcs_object_c_r_c32_c_hash.setter - def gcs_object_c_r_c32_c_hash(self, gcs_object_c_r_c32_c_hash: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.gcs_object_c_r_c32_c_hash = gcs_object_c_r_c32_c_hash - - @property - def gcs_object_m_d5_hash(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.gcs_object_m_d5_hash - - @gcs_object_m_d5_hash.setter - def gcs_object_m_d5_hash(self, gcs_object_m_d5_hash: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.gcs_object_m_d5_hash = gcs_object_m_d5_hash - - @property - def gcs_object_data_last_modified_time(self) -> Optional[datetime]: - return ( - None - if self.attributes is None - else self.attributes.gcs_object_data_last_modified_time - ) - - @gcs_object_data_last_modified_time.setter - def gcs_object_data_last_modified_time( - self, gcs_object_data_last_modified_time: Optional[datetime] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.gcs_object_data_last_modified_time = ( - gcs_object_data_last_modified_time - ) - - @property - def gcs_object_content_type(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.gcs_object_content_type - ) - - @gcs_object_content_type.setter - def gcs_object_content_type(self, gcs_object_content_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.gcs_object_content_type = gcs_object_content_type - - @property - def gcs_object_content_encoding(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.gcs_object_content_encoding - ) + def s3_object_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.s3_object_count - @gcs_object_content_encoding.setter - def gcs_object_content_encoding(self, gcs_object_content_encoding: Optional[str]): + @s3_object_count.setter + def s3_object_count(self, s3_object_count: Optional[int]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.gcs_object_content_encoding = gcs_object_content_encoding + self.attributes.s3_object_count = s3_object_count @property - def gcs_object_content_disposition(self) -> Optional[str]: + def s3_bucket_versioning_enabled(self) -> Optional[bool]: return ( None if self.attributes is None - else self.attributes.gcs_object_content_disposition + else self.attributes.s3_bucket_versioning_enabled ) - @gcs_object_content_disposition.setter - def gcs_object_content_disposition( - self, gcs_object_content_disposition: Optional[str] + @s3_bucket_versioning_enabled.setter + def s3_bucket_versioning_enabled( + self, s3_bucket_versioning_enabled: Optional[bool] ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.gcs_object_content_disposition = gcs_object_content_disposition + self.attributes.s3_bucket_versioning_enabled = s3_bucket_versioning_enabled @property - def gcs_object_content_language(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.gcs_object_content_language - ) + def objects(self) -> Optional[list[S3Object]]: + return None if self.attributes is None else self.attributes.objects - @gcs_object_content_language.setter - def gcs_object_content_language(self, gcs_object_content_language: Optional[str]): + @objects.setter + def objects(self, objects: Optional[list[S3Object]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.gcs_object_content_language = gcs_object_content_language + self.attributes.objects = objects - @property - def gcs_object_retention_expiration_date(self) -> Optional[datetime]: - return ( - None - if self.attributes is None - else self.attributes.gcs_object_retention_expiration_date + class Attributes(S3.Attributes): + s3_object_count: Optional[int] = Field( + None, description="", alias="s3ObjectCount" ) - - @gcs_object_retention_expiration_date.setter - def gcs_object_retention_expiration_date( - self, gcs_object_retention_expiration_date: Optional[datetime] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.gcs_object_retention_expiration_date = ( - gcs_object_retention_expiration_date + s3_bucket_versioning_enabled: Optional[bool] = Field( + None, description="", alias="s3BucketVersioningEnabled" ) - - @property - def gcs_bucket(self) -> Optional[GCSBucket]: - return None if self.attributes is None else self.attributes.gcs_bucket - - @gcs_bucket.setter - def gcs_bucket(self, gcs_bucket: Optional[GCSBucket]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.gcs_bucket = gcs_bucket - - class Attributes(GCS.Attributes): - gcs_bucket_name: Optional[str] = Field( - None, description="", alias="gcsBucketName" - ) - gcs_bucket_qualified_name: Optional[str] = Field( - None, description="", alias="gcsBucketQualifiedName" - ) - gcs_object_size: Optional[int] = Field( - None, description="", alias="gcsObjectSize" - ) - gcs_object_key: Optional[str] = Field( - None, description="", alias="gcsObjectKey" - ) - gcs_object_media_link: Optional[str] = Field( - None, description="", alias="gcsObjectMediaLink" - ) - gcs_object_hold_type: Optional[str] = Field( - None, description="", alias="gcsObjectHoldType" - ) - gcs_object_generation_id: Optional[int] = Field( - None, description="", alias="gcsObjectGenerationId" - ) - gcs_object_c_r_c32_c_hash: Optional[str] = Field( - None, description="", alias="gcsObjectCRC32CHash" - ) - gcs_object_m_d5_hash: Optional[str] = Field( - None, description="", alias="gcsObjectMD5Hash" - ) - gcs_object_data_last_modified_time: Optional[datetime] = Field( - None, description="", alias="gcsObjectDataLastModifiedTime" - ) - gcs_object_content_type: Optional[str] = Field( - None, description="", alias="gcsObjectContentType" - ) - gcs_object_content_encoding: Optional[str] = Field( - None, description="", alias="gcsObjectContentEncoding" - ) - gcs_object_content_disposition: Optional[str] = Field( - None, description="", alias="gcsObjectContentDisposition" - ) - gcs_object_content_language: Optional[str] = Field( - None, description="", alias="gcsObjectContentLanguage" - ) - gcs_object_retention_expiration_date: Optional[datetime] = Field( - None, description="", alias="gcsObjectRetentionExpirationDate" - ) - gcs_bucket: Optional[GCSBucket] = Field( - None, description="", alias="gcsBucket" + objects: Optional[list[S3Object]] = Field( + None, description="", alias="objects" ) # relationship @classmethod # @validate_arguments() @init_guid def create( - cls, *, name: str, gcs_bucket_qualified_name: str - ) -> GCSObject.Attributes: + cls, + *, + name: str, + connection_qualified_name: str, + aws_arn: Optional[str] = None, + ) -> S3Bucket.Attributes: validate_required_fields( - ["name", "gcs_bucket_qualified_name"], [name, gcs_bucket_qualified_name] + ["name", "connection_qualified_name"], + [name, connection_qualified_name], ) - - # Split the gcs_bucket_qualified_name to extract necessary information - fields = gcs_bucket_qualified_name.split("/") - if len(fields) != 4: - raise ValueError("Invalid gcs_bucket_qualified_name") - + fields = connection_qualified_name.split("/") + if len(fields) != 3: + raise ValueError("Invalid connection_qualified_name") try: + if fields[0].replace(" ", "") == "" or fields[2].replace(" ", "") == "": + raise ValueError("Invalid connection_qualified_name") connector_type = AtlanConnectorType(fields[1]) # type:ignore + if connector_type != AtlanConnectorType.S3: + raise ValueError("Connector type must be s3") except ValueError as e: - raise ValueError("Invalid gcs_bucket_qualified_name") from e - - return GCSObject.Attributes( + raise ValueError("Invalid connection_qualified_name") from e + return S3Bucket.Attributes( + aws_arn=aws_arn, name=name, - gcs_bucket_qualified_name=gcs_bucket_qualified_name, - connection_qualified_name=f"{fields[0]}/{fields[1]}/{fields[2]}", - qualified_name=f"{gcs_bucket_qualified_name}/{name}", + connection_qualified_name=connection_qualified_name, + qualified_name=f"{connection_qualified_name}/{aws_arn if aws_arn else name}", connector_name=connector_type.value, - gcs_bucket=GCSBucket.ref_by_qualified_name(gcs_bucket_qualified_name), ) - attributes: "GCSObject.Attributes" = Field( - default_factory=lambda: GCSObject.Attributes(), + attributes: "S3Bucket.Attributes" = Field( + default_factory=lambda: S3Bucket.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -class GCSBucket(GCS): +class S3Object(S3): """Description""" @classmethod # @validate_arguments() @init_guid - def create(cls, *, name: str, connection_qualified_name: str) -> GCSBucket: + def create( + cls, + *, + name: str, + connection_qualified_name: str, + aws_arn: str, + s3_bucket_qualified_name: str, + ) -> S3Object: validate_required_fields( - ["name", "connection_qualified_name"], [name, connection_qualified_name] + [ + "name", + "connection_qualified_name", + "aws_arn", + "s3_bucket_qualified_name", + ], + [name, connection_qualified_name, aws_arn, s3_bucket_qualified_name], + ) + attributes = S3Object.Attributes.create( + name=name, + connection_qualified_name=connection_qualified_name, + aws_arn=aws_arn, + s3_bucket_qualified_name=s3_bucket_qualified_name, ) - attributes = GCSBucket.Attributes.create( - name=name, connection_qualified_name=connection_qualified_name + return cls(attributes=attributes) + + @classmethod + # @validate_arguments() + @init_guid + def create_with_prefix( + cls, + *, + name: str, + connection_qualified_name: str, + prefix: str, + s3_bucket_qualified_name: str, + ) -> S3Object: + validate_required_fields( + [ + "name", + "connection_qualified_name", + "prefix", + "s3_bucket_qualified_name", + ], + [name, connection_qualified_name, prefix, s3_bucket_qualified_name], + ) + attributes = S3Object.Attributes.create_with_prefix( + name=name, + connection_qualified_name=connection_qualified_name, + prefix=prefix, + s3_bucket_qualified_name=s3_bucket_qualified_name, ) return cls(attributes=attributes) - type_name: str = Field("GCSBucket", allow_mutation=False) + type_name: str = Field("S3Object", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "GCSBucket": - raise ValueError("must be GCSBucket") + if v != "S3Object": + raise ValueError("must be S3Object") return v def __setattr__(self, name, value): - if name in GCSBucket._convenience_properties: + if name in S3Object._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - GCS_OBJECT_COUNT: ClassVar[NumericField] = NumericField( - "gcsObjectCount", "gcsObjectCount" + S3OBJECT_LAST_MODIFIED_TIME: ClassVar[NumericField] = NumericField( + "s3ObjectLastModifiedTime", "s3ObjectLastModifiedTime" ) """ - TBC - """ - GCS_BUCKET_VERSIONING_ENABLED: ClassVar[BooleanField] = BooleanField( - "gcsBucketVersioningEnabled", "gcsBucketVersioningEnabled" + Time (epoch) at which this object was last updated, in milliseconds, or when it was created if it has never been modified. + """ # noqa: E501 + S3BUCKET_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "s3BucketName", "s3BucketName", "s3BucketName.text" ) """ - TBC + Simple name of the bucket in which this object exists. """ - GCS_BUCKET_RETENTION_LOCKED: ClassVar[BooleanField] = BooleanField( - "gcsBucketRetentionLocked", "gcsBucketRetentionLocked" + S3BUCKET_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "s3BucketQualifiedName", "s3BucketQualifiedName" ) """ - TBC + Unique name of the bucket in which this object exists. """ - GCS_BUCKET_RETENTION_PERIOD: ClassVar[NumericField] = NumericField( - "gcsBucketRetentionPeriod", "gcsBucketRetentionPeriod" - ) + S3OBJECT_SIZE: ClassVar[NumericField] = NumericField("s3ObjectSize", "s3ObjectSize") """ - TBC + Object size in bytes. """ - GCS_BUCKET_RETENTION_EFFECTIVE_TIME: ClassVar[NumericField] = NumericField( - "gcsBucketRetentionEffectiveTime", "gcsBucketRetentionEffectiveTime" + S3OBJECT_STORAGE_CLASS: ClassVar[KeywordField] = KeywordField( + "s3ObjectStorageClass", "s3ObjectStorageClass" ) """ - TBC + Storage class used for storing this object, for example: standard, intelligent-tiering, glacier, etc. """ - GCS_BUCKET_LIFECYCLE_RULES: ClassVar[TextField] = TextField( - "gcsBucketLifecycleRules", "gcsBucketLifecycleRules" + S3OBJECT_KEY: ClassVar[KeywordTextField] = KeywordTextField( + "s3ObjectKey", "s3ObjectKey", "s3ObjectKey.text" ) """ - TBC + Unique identity of this object in an S3 bucket. This is usually the concatenation of any prefix (folder) in the S3 bucket with the name of the object (file) itself. + """ # noqa: E501 + S3OBJECT_CONTENT_TYPE: ClassVar[KeywordField] = KeywordField( + "s3ObjectContentType", "s3ObjectContentType" + ) """ - GCS_BUCKET_RETENTION_POLICY: ClassVar[TextField] = TextField( - "gcsBucketRetentionPolicy", "gcsBucketRetentionPolicy" + Type of content in this object, for example: text/plain, application/json, etc. + """ + S3OBJECT_CONTENT_DISPOSITION: ClassVar[KeywordField] = KeywordField( + "s3ObjectContentDisposition", "s3ObjectContentDisposition" ) """ - TBC + Information about how this object's content should be presented. + """ + S3OBJECT_VERSION_ID: ClassVar[KeywordField] = KeywordField( + "s3ObjectVersionId", "s3ObjectVersionId" + ) """ + Version of this object. This is only applicable when versioning is enabled on the bucket in which this object exists. + """ # noqa: E501 - GCS_OBJECTS: ClassVar[RelationField] = RelationField("gcsObjects") + BUCKET: ClassVar[RelationField] = RelationField("bucket") """ TBC """ _convenience_properties: ClassVar[list[str]] = [ - "gcs_object_count", - "gcs_bucket_versioning_enabled", - "gcs_bucket_retention_locked", - "gcs_bucket_retention_period", - "gcs_bucket_retention_effective_time", - "gcs_bucket_lifecycle_rules", - "gcs_bucket_retention_policy", - "gcs_objects", + "s3_object_last_modified_time", + "s3_bucket_name", + "s3_bucket_qualified_name", + "s3_object_size", + "s3_object_storage_class", + "s3_object_key", + "s3_object_content_type", + "s3_object_content_disposition", + "s3_object_version_id", + "bucket", ] @property - def gcs_object_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.gcs_object_count - - @gcs_object_count.setter - def gcs_object_count(self, gcs_object_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.gcs_object_count = gcs_object_count - - @property - def gcs_bucket_versioning_enabled(self) -> Optional[bool]: + def s3_object_last_modified_time(self) -> Optional[datetime]: return ( None if self.attributes is None - else self.attributes.gcs_bucket_versioning_enabled + else self.attributes.s3_object_last_modified_time ) - @gcs_bucket_versioning_enabled.setter - def gcs_bucket_versioning_enabled( - self, gcs_bucket_versioning_enabled: Optional[bool] + @s3_object_last_modified_time.setter + def s3_object_last_modified_time( + self, s3_object_last_modified_time: Optional[datetime] ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.gcs_bucket_versioning_enabled = gcs_bucket_versioning_enabled + self.attributes.s3_object_last_modified_time = s3_object_last_modified_time @property - def gcs_bucket_retention_locked(self) -> Optional[bool]: - return ( - None - if self.attributes is None - else self.attributes.gcs_bucket_retention_locked - ) + def s3_bucket_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.s3_bucket_name - @gcs_bucket_retention_locked.setter - def gcs_bucket_retention_locked(self, gcs_bucket_retention_locked: Optional[bool]): + @s3_bucket_name.setter + def s3_bucket_name(self, s3_bucket_name: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.gcs_bucket_retention_locked = gcs_bucket_retention_locked + self.attributes.s3_bucket_name = s3_bucket_name @property - def gcs_bucket_retention_period(self) -> Optional[int]: + def s3_bucket_qualified_name(self) -> Optional[str]: return ( None if self.attributes is None - else self.attributes.gcs_bucket_retention_period + else self.attributes.s3_bucket_qualified_name ) - @gcs_bucket_retention_period.setter - def gcs_bucket_retention_period(self, gcs_bucket_retention_period: Optional[int]): + @s3_bucket_qualified_name.setter + def s3_bucket_qualified_name(self, s3_bucket_qualified_name: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.gcs_bucket_retention_period = gcs_bucket_retention_period + self.attributes.s3_bucket_qualified_name = s3_bucket_qualified_name @property - def gcs_bucket_retention_effective_time(self) -> Optional[datetime]: + def s3_object_size(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.s3_object_size + + @s3_object_size.setter + def s3_object_size(self, s3_object_size: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.s3_object_size = s3_object_size + + @property + def s3_object_storage_class(self) -> Optional[str]: return ( - None - if self.attributes is None - else self.attributes.gcs_bucket_retention_effective_time + None if self.attributes is None else self.attributes.s3_object_storage_class ) - @gcs_bucket_retention_effective_time.setter - def gcs_bucket_retention_effective_time( - self, gcs_bucket_retention_effective_time: Optional[datetime] - ): + @s3_object_storage_class.setter + def s3_object_storage_class(self, s3_object_storage_class: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.gcs_bucket_retention_effective_time = ( - gcs_bucket_retention_effective_time - ) + self.attributes.s3_object_storage_class = s3_object_storage_class + + @property + def s3_object_key(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.s3_object_key + + @s3_object_key.setter + def s3_object_key(self, s3_object_key: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.s3_object_key = s3_object_key @property - def gcs_bucket_lifecycle_rules(self) -> Optional[str]: + def s3_object_content_type(self) -> Optional[str]: return ( - None - if self.attributes is None - else self.attributes.gcs_bucket_lifecycle_rules + None if self.attributes is None else self.attributes.s3_object_content_type ) - @gcs_bucket_lifecycle_rules.setter - def gcs_bucket_lifecycle_rules(self, gcs_bucket_lifecycle_rules: Optional[str]): + @s3_object_content_type.setter + def s3_object_content_type(self, s3_object_content_type: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.gcs_bucket_lifecycle_rules = gcs_bucket_lifecycle_rules + self.attributes.s3_object_content_type = s3_object_content_type @property - def gcs_bucket_retention_policy(self) -> Optional[str]: + def s3_object_content_disposition(self) -> Optional[str]: return ( None if self.attributes is None - else self.attributes.gcs_bucket_retention_policy + else self.attributes.s3_object_content_disposition ) - @gcs_bucket_retention_policy.setter - def gcs_bucket_retention_policy(self, gcs_bucket_retention_policy: Optional[str]): + @s3_object_content_disposition.setter + def s3_object_content_disposition( + self, s3_object_content_disposition: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.s3_object_content_disposition = s3_object_content_disposition + + @property + def s3_object_version_id(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.s3_object_version_id + + @s3_object_version_id.setter + def s3_object_version_id(self, s3_object_version_id: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.gcs_bucket_retention_policy = gcs_bucket_retention_policy + self.attributes.s3_object_version_id = s3_object_version_id @property - def gcs_objects(self) -> Optional[list[GCSObject]]: - return None if self.attributes is None else self.attributes.gcs_objects + def bucket(self) -> Optional[S3Bucket]: + return None if self.attributes is None else self.attributes.bucket - @gcs_objects.setter - def gcs_objects(self, gcs_objects: Optional[list[GCSObject]]): + @bucket.setter + def bucket(self, bucket: Optional[S3Bucket]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.gcs_objects = gcs_objects + self.attributes.bucket = bucket - class Attributes(GCS.Attributes): - gcs_object_count: Optional[int] = Field( - None, description="", alias="gcsObjectCount" + class Attributes(S3.Attributes): + s3_object_last_modified_time: Optional[datetime] = Field( + None, description="", alias="s3ObjectLastModifiedTime" ) - gcs_bucket_versioning_enabled: Optional[bool] = Field( - None, description="", alias="gcsBucketVersioningEnabled" + s3_bucket_name: Optional[str] = Field( + None, description="", alias="s3BucketName" ) - gcs_bucket_retention_locked: Optional[bool] = Field( - None, description="", alias="gcsBucketRetentionLocked" + s3_bucket_qualified_name: Optional[str] = Field( + None, description="", alias="s3BucketQualifiedName" ) - gcs_bucket_retention_period: Optional[int] = Field( - None, description="", alias="gcsBucketRetentionPeriod" + s3_object_size: Optional[int] = Field( + None, description="", alias="s3ObjectSize" ) - gcs_bucket_retention_effective_time: Optional[datetime] = Field( - None, description="", alias="gcsBucketRetentionEffectiveTime" + s3_object_storage_class: Optional[str] = Field( + None, description="", alias="s3ObjectStorageClass" ) - gcs_bucket_lifecycle_rules: Optional[str] = Field( - None, description="", alias="gcsBucketLifecycleRules" + s3_object_key: Optional[str] = Field(None, description="", alias="s3ObjectKey") + s3_object_content_type: Optional[str] = Field( + None, description="", alias="s3ObjectContentType" ) - gcs_bucket_retention_policy: Optional[str] = Field( - None, description="", alias="gcsBucketRetentionPolicy" + s3_object_content_disposition: Optional[str] = Field( + None, description="", alias="s3ObjectContentDisposition" ) - gcs_objects: Optional[list[GCSObject]] = Field( - None, description="", alias="gcsObjects" + s3_object_version_id: Optional[str] = Field( + None, description="", alias="s3ObjectVersionId" + ) + bucket: Optional[S3Bucket] = Field( + None, description="", alias="bucket" ) # relationship @classmethod # @validate_arguments() @init_guid def create( - cls, *, name: str, connection_qualified_name: str - ) -> GCSBucket.Attributes: + cls, + *, + name: str, + connection_qualified_name: str, + aws_arn: str, + s3_bucket_qualified_name: str, + ) -> S3Object.Attributes: validate_required_fields( - ["name", "connection_qualified_name"], [name, connection_qualified_name] + [ + "name", + "connection_qualified_name", + "aws_arn", + "s3_bucket_qualified_name", + ], + [name, connection_qualified_name, aws_arn, s3_bucket_qualified_name], ) - - # Split the connection_qualified_name to extract necessary information fields = connection_qualified_name.split("/") if len(fields) != 3: raise ValueError("Invalid connection_qualified_name") - try: + if fields[0].replace(" ", "") == "" or fields[2].replace(" ", "") == "": + raise ValueError("Invalid connection_qualified_name") connector_type = AtlanConnectorType(fields[1]) # type:ignore + if connector_type != AtlanConnectorType.S3: + raise ValueError("Connector type must be s3") except ValueError as e: raise ValueError("Invalid connection_qualified_name") from e + return S3Object.Attributes( + aws_arn=aws_arn, + name=name, + connection_qualified_name=connection_qualified_name, + qualified_name=f"{connection_qualified_name}/{aws_arn}", + connector_name=connector_type.value, + s3_bucket_qualified_name=s3_bucket_qualified_name, + bucket=S3Bucket.ref_by_qualified_name(s3_bucket_qualified_name), + ) - return GCSBucket.Attributes( + @classmethod + # @validate_arguments() + @init_guid + def create_with_prefix( + cls, + *, + name: str, + connection_qualified_name: str, + prefix: str, + s3_bucket_qualified_name: str, + ) -> S3Object.Attributes: + validate_required_fields( + [ + "name", + "connection_qualified_name", + "prefix", + "s3_bucket_qualified_name", + ], + [name, connection_qualified_name, prefix, s3_bucket_qualified_name], + ) + fields = connection_qualified_name.split("/") + if len(fields) != 3: + raise ValueError("Invalid connection_qualified_name") + try: + if fields[0].replace(" ", "") == "" or fields[2].replace(" ", "") == "": + raise ValueError("Invalid connection_qualified_name") + connector_type = AtlanConnectorType(fields[1]) # type:ignore + if connector_type != AtlanConnectorType.S3: + raise ValueError("Connector type must be s3") + except ValueError as e: + raise ValueError("Invalid connection_qualified_name") from e + object_key = f"{prefix}/{name}" + return S3Object.Attributes( name=name, - qualified_name=f"{connection_qualified_name}/{name}", + s3_object_key=object_key, connection_qualified_name=connection_qualified_name, + qualified_name=f"{connection_qualified_name}/{object_key}", connector_name=connector_type.value, + s3_bucket_qualified_name=s3_bucket_qualified_name, + bucket=S3Bucket.ref_by_qualified_name(s3_bucket_qualified_name), ) - attributes: "GCSBucket.Attributes" = Field( - default_factory=lambda: GCSBucket.Attributes(), + attributes: "S3Object.Attributes" = Field( + default_factory=lambda: S3Object.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -GCSObject.Attributes.update_forward_refs() +S3Bucket.Attributes.update_forward_refs() -GCSBucket.Attributes.update_forward_refs() +S3Object.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset63.py b/pyatlan/model/assets/asset63.py index 71b235a46..e842fe12d 100644 --- a/pyatlan/model/assets/asset63.py +++ b/pyatlan/model/assets/asset63.py @@ -4,955 +4,1143 @@ from __future__ import annotations +from datetime import datetime from typing import ClassVar, Optional from pydantic import Field, validator -from pyatlan.model.enums import AtlanConnectorType +from pyatlan.model.enums import ( + ADLSAccessTier, + ADLSAccountStatus, + ADLSEncryptionTypes, + ADLSLeaseState, + ADLSLeaseStatus, + ADLSObjectArchiveStatus, + ADLSObjectType, + ADLSPerformance, + ADLSProvisionState, + ADLSReplicationType, + ADLSStorageKind, + AtlanConnectorType, +) from pyatlan.model.fields.atlan_fields import ( BooleanField, KeywordField, KeywordTextField, - KeywordTextStemmedField, NumericField, RelationField, TextField, ) -from pyatlan.utils import init_guid, validate_required_fields +from pyatlan.utils import get_parent_qualified_name, init_guid, validate_required_fields -from .asset39 import Preset +from .asset36 import ADLS -class PresetChart(Preset): +class ADLSAccount(ADLS): """Description""" @classmethod # @validate_arguments() @init_guid - def create(cls, *, name: str, preset_dashboard_qualified_name: str) -> PresetChart: + def create(cls, *, name: str, connection_qualified_name: str) -> ADLSAccount: validate_required_fields( - ["name", "preset_dashboard_qualified_name"], - [name, preset_dashboard_qualified_name], + ["name", "connection_qualified_name"], [name, connection_qualified_name] ) - attributes = PresetChart.Attributes.create( - name=name, preset_dashboard_qualified_name=preset_dashboard_qualified_name + attributes = ADLSAccount.Attributes.create( + name=name, connection_qualified_name=connection_qualified_name ) return cls(attributes=attributes) - type_name: str = Field("PresetChart", allow_mutation=False) + type_name: str = Field("ADLSAccount", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "PresetChart": - raise ValueError("must be PresetChart") + if v != "ADLSAccount": + raise ValueError("must be ADLSAccount") return v def __setattr__(self, name, value): - if name in PresetChart._convenience_properties: + if name in ADLSAccount._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - PRESET_CHART_DESCRIPTION_MARKDOWN: ClassVar[TextField] = TextField( - "presetChartDescriptionMarkdown", "presetChartDescriptionMarkdown" + ADLS_E_TAG: ClassVar[KeywordField] = KeywordField("adlsETag", "adlsETag") + """ + Entity tag for the asset. An entity tag is a hash of the object and represents changes to the contents of an object only, not its metadata. + """ # noqa: E501 + ADLS_ENCRYPTION_TYPE: ClassVar[KeywordField] = KeywordField( + "adlsEncryptionType", "adlsEncryptionType" ) """ - TBC + Type of encryption for this account. """ - PRESET_CHART_FORM_DATA: ClassVar[KeywordField] = KeywordField( - "presetChartFormData", "presetChartFormData" + ADLS_ACCOUNT_RESOURCE_GROUP: ClassVar[KeywordTextField] = KeywordTextField( + "adlsAccountResourceGroup", + "adlsAccountResourceGroup.keyword", + "adlsAccountResourceGroup", ) """ - TBC + Resource group for this account. + """ + ADLS_ACCOUNT_SUBSCRIPTION: ClassVar[KeywordTextField] = KeywordTextField( + "adlsAccountSubscription", + "adlsAccountSubscription.keyword", + "adlsAccountSubscription", + ) + """ + Subscription for this account. + """ + ADLS_ACCOUNT_PERFORMANCE: ClassVar[KeywordField] = KeywordField( + "adlsAccountPerformance", "adlsAccountPerformance" + ) + """ + Performance of this account. + """ + ADLS_ACCOUNT_REPLICATION: ClassVar[KeywordField] = KeywordField( + "adlsAccountReplication", "adlsAccountReplication" + ) + """ + Replication of this account. + """ + ADLS_ACCOUNT_KIND: ClassVar[KeywordField] = KeywordField( + "adlsAccountKind", "adlsAccountKind" + ) + """ + Kind of this account. + """ + ADLS_PRIMARY_DISK_STATE: ClassVar[KeywordField] = KeywordField( + "adlsPrimaryDiskState", "adlsPrimaryDiskState" + ) + """ + Primary disk state of this account. + """ + ADLS_ACCOUNT_PROVISION_STATE: ClassVar[KeywordField] = KeywordField( + "adlsAccountProvisionState", "adlsAccountProvisionState" + ) + """ + Provision state of this account. + """ + ADLS_ACCOUNT_ACCESS_TIER: ClassVar[KeywordField] = KeywordField( + "adlsAccountAccessTier", "adlsAccountAccessTier" + ) + """ + Access tier of this account. """ - PRESET_DASHBOARD: ClassVar[RelationField] = RelationField("presetDashboard") + ADLS_CONTAINERS: ClassVar[RelationField] = RelationField("adlsContainers") """ TBC """ _convenience_properties: ClassVar[list[str]] = [ - "preset_chart_description_markdown", - "preset_chart_form_data", - "preset_dashboard", + "adls_e_tag", + "adls_encryption_type", + "adls_account_resource_group", + "adls_account_subscription", + "adls_account_performance", + "adls_account_replication", + "adls_account_kind", + "adls_primary_disk_state", + "adls_account_provision_state", + "adls_account_access_tier", + "adls_containers", ] @property - def preset_chart_description_markdown(self) -> Optional[str]: + def adls_e_tag(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.adls_e_tag + + @adls_e_tag.setter + def adls_e_tag(self, adls_e_tag: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.adls_e_tag = adls_e_tag + + @property + def adls_encryption_type(self) -> Optional[ADLSEncryptionTypes]: + return None if self.attributes is None else self.attributes.adls_encryption_type + + @adls_encryption_type.setter + def adls_encryption_type(self, adls_encryption_type: Optional[ADLSEncryptionTypes]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.adls_encryption_type = adls_encryption_type + + @property + def adls_account_resource_group(self) -> Optional[str]: return ( None if self.attributes is None - else self.attributes.preset_chart_description_markdown + else self.attributes.adls_account_resource_group ) - @preset_chart_description_markdown.setter - def preset_chart_description_markdown( - self, preset_chart_description_markdown: Optional[str] - ): + @adls_account_resource_group.setter + def adls_account_resource_group(self, adls_account_resource_group: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.preset_chart_description_markdown = ( - preset_chart_description_markdown - ) + self.attributes.adls_account_resource_group = adls_account_resource_group @property - def preset_chart_form_data(self) -> Optional[dict[str, str]]: + def adls_account_subscription(self) -> Optional[str]: return ( - None if self.attributes is None else self.attributes.preset_chart_form_data + None + if self.attributes is None + else self.attributes.adls_account_subscription ) - @preset_chart_form_data.setter - def preset_chart_form_data(self, preset_chart_form_data: Optional[dict[str, str]]): + @adls_account_subscription.setter + def adls_account_subscription(self, adls_account_subscription: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.preset_chart_form_data = preset_chart_form_data + self.attributes.adls_account_subscription = adls_account_subscription @property - def preset_dashboard(self) -> Optional[PresetDashboard]: - return None if self.attributes is None else self.attributes.preset_dashboard + def adls_account_performance(self) -> Optional[ADLSPerformance]: + return ( + None + if self.attributes is None + else self.attributes.adls_account_performance + ) - @preset_dashboard.setter - def preset_dashboard(self, preset_dashboard: Optional[PresetDashboard]): + @adls_account_performance.setter + def adls_account_performance( + self, adls_account_performance: Optional[ADLSPerformance] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.preset_dashboard = preset_dashboard - - class Attributes(Preset.Attributes): - preset_chart_description_markdown: Optional[str] = Field( - None, description="", alias="presetChartDescriptionMarkdown" - ) - preset_chart_form_data: Optional[dict[str, str]] = Field( - None, description="", alias="presetChartFormData" - ) - preset_dashboard: Optional[PresetDashboard] = Field( - None, description="", alias="presetDashboard" - ) # relationship - - @classmethod - # @validate_arguments() - @init_guid - def create( - cls, *, name: str, preset_dashboard_qualified_name: str - ) -> PresetChart.Attributes: - validate_required_fields( - ["name", "preset_dashboard_qualified_name"], - [name, preset_dashboard_qualified_name], - ) - - # Split the preset_dashboard_qualified_name to extract necessary information - fields = preset_dashboard_qualified_name.split("/") - if len(fields) != 5: - raise ValueError("Invalid preset_dashboard_qualified_name") - - try: - connector_type = AtlanConnectorType(fields[1]) # type:ignore - except ValueError as e: - raise ValueError("Invalid preset_dashboard_qualified_name") from e - - return PresetChart.Attributes( - name=name, - preset_dashboard_qualified_name=preset_dashboard_qualified_name, - connection_qualified_name=f"{fields[0]}/{fields[1]}/{fields[2]}", - qualified_name=f"{preset_dashboard_qualified_name}/{name}", - connector_name=connector_type.value, - preset_dashboard=PresetDashboard.ref_by_qualified_name( - preset_dashboard_qualified_name - ), - ) - - attributes: "PresetChart.Attributes" = Field( - default_factory=lambda: PresetChart.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - + self.attributes.adls_account_performance = adls_account_performance -class PresetDataset(Preset): - """Description""" - - @classmethod - # @validate_arguments() - @init_guid - def create( - cls, *, name: str, preset_dashboard_qualified_name: str - ) -> PresetDataset: - validate_required_fields( - ["name", "preset_dashboard_qualified_name"], - [name, preset_dashboard_qualified_name], - ) - attributes = PresetDataset.Attributes.create( - name=name, preset_dashboard_qualified_name=preset_dashboard_qualified_name + @property + def adls_account_replication(self) -> Optional[ADLSReplicationType]: + return ( + None + if self.attributes is None + else self.attributes.adls_account_replication ) - return cls(attributes=attributes) - type_name: str = Field("PresetDataset", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "PresetDataset": - raise ValueError("must be PresetDataset") - return v - - def __setattr__(self, name, value): - if name in PresetDataset._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - PRESET_DATASET_DATASOURCE_NAME: ClassVar[ - KeywordTextStemmedField - ] = KeywordTextStemmedField( - "presetDatasetDatasourceName", - "presetDatasetDatasourceName.keyword", - "presetDatasetDatasourceName", - "presetDatasetDatasourceName.stemmed", - ) - """ - TBC - """ - PRESET_DATASET_ID: ClassVar[NumericField] = NumericField( - "presetDatasetId", "presetDatasetId" - ) - """ - TBC - """ - PRESET_DATASET_TYPE: ClassVar[KeywordField] = KeywordField( - "presetDatasetType", "presetDatasetType" - ) - """ - TBC - """ + @adls_account_replication.setter + def adls_account_replication( + self, adls_account_replication: Optional[ADLSReplicationType] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.adls_account_replication = adls_account_replication - PRESET_DASHBOARD: ClassVar[RelationField] = RelationField("presetDashboard") - """ - TBC - """ + @property + def adls_account_kind(self) -> Optional[ADLSStorageKind]: + return None if self.attributes is None else self.attributes.adls_account_kind - _convenience_properties: ClassVar[list[str]] = [ - "preset_dataset_datasource_name", - "preset_dataset_id", - "preset_dataset_type", - "preset_dashboard", - ] + @adls_account_kind.setter + def adls_account_kind(self, adls_account_kind: Optional[ADLSStorageKind]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.adls_account_kind = adls_account_kind @property - def preset_dataset_datasource_name(self) -> Optional[str]: + def adls_primary_disk_state(self) -> Optional[ADLSAccountStatus]: return ( - None - if self.attributes is None - else self.attributes.preset_dataset_datasource_name + None if self.attributes is None else self.attributes.adls_primary_disk_state ) - @preset_dataset_datasource_name.setter - def preset_dataset_datasource_name( - self, preset_dataset_datasource_name: Optional[str] + @adls_primary_disk_state.setter + def adls_primary_disk_state( + self, adls_primary_disk_state: Optional[ADLSAccountStatus] ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.preset_dataset_datasource_name = preset_dataset_datasource_name + self.attributes.adls_primary_disk_state = adls_primary_disk_state @property - def preset_dataset_id(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.preset_dataset_id + def adls_account_provision_state(self) -> Optional[ADLSProvisionState]: + return ( + None + if self.attributes is None + else self.attributes.adls_account_provision_state + ) - @preset_dataset_id.setter - def preset_dataset_id(self, preset_dataset_id: Optional[int]): + @adls_account_provision_state.setter + def adls_account_provision_state( + self, adls_account_provision_state: Optional[ADLSProvisionState] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.preset_dataset_id = preset_dataset_id + self.attributes.adls_account_provision_state = adls_account_provision_state @property - def preset_dataset_type(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.preset_dataset_type + def adls_account_access_tier(self) -> Optional[ADLSAccessTier]: + return ( + None + if self.attributes is None + else self.attributes.adls_account_access_tier + ) - @preset_dataset_type.setter - def preset_dataset_type(self, preset_dataset_type: Optional[str]): + @adls_account_access_tier.setter + def adls_account_access_tier( + self, adls_account_access_tier: Optional[ADLSAccessTier] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.preset_dataset_type = preset_dataset_type + self.attributes.adls_account_access_tier = adls_account_access_tier @property - def preset_dashboard(self) -> Optional[PresetDashboard]: - return None if self.attributes is None else self.attributes.preset_dashboard + def adls_containers(self) -> Optional[list[ADLSContainer]]: + return None if self.attributes is None else self.attributes.adls_containers - @preset_dashboard.setter - def preset_dashboard(self, preset_dashboard: Optional[PresetDashboard]): + @adls_containers.setter + def adls_containers(self, adls_containers: Optional[list[ADLSContainer]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.preset_dashboard = preset_dashboard + self.attributes.adls_containers = adls_containers - class Attributes(Preset.Attributes): - preset_dataset_datasource_name: Optional[str] = Field( - None, description="", alias="presetDatasetDatasourceName" + class Attributes(ADLS.Attributes): + adls_e_tag: Optional[str] = Field(None, description="", alias="adlsETag") + adls_encryption_type: Optional[ADLSEncryptionTypes] = Field( + None, description="", alias="adlsEncryptionType" + ) + adls_account_resource_group: Optional[str] = Field( + None, description="", alias="adlsAccountResourceGroup" + ) + adls_account_subscription: Optional[str] = Field( + None, description="", alias="adlsAccountSubscription" + ) + adls_account_performance: Optional[ADLSPerformance] = Field( + None, description="", alias="adlsAccountPerformance" ) - preset_dataset_id: Optional[int] = Field( - None, description="", alias="presetDatasetId" + adls_account_replication: Optional[ADLSReplicationType] = Field( + None, description="", alias="adlsAccountReplication" ) - preset_dataset_type: Optional[str] = Field( - None, description="", alias="presetDatasetType" + adls_account_kind: Optional[ADLSStorageKind] = Field( + None, description="", alias="adlsAccountKind" ) - preset_dashboard: Optional[PresetDashboard] = Field( - None, description="", alias="presetDashboard" + adls_primary_disk_state: Optional[ADLSAccountStatus] = Field( + None, description="", alias="adlsPrimaryDiskState" + ) + adls_account_provision_state: Optional[ADLSProvisionState] = Field( + None, description="", alias="adlsAccountProvisionState" + ) + adls_account_access_tier: Optional[ADLSAccessTier] = Field( + None, description="", alias="adlsAccountAccessTier" + ) + adls_containers: Optional[list[ADLSContainer]] = Field( + None, description="", alias="adlsContainers" ) # relationship @classmethod # @validate_arguments() @init_guid def create( - cls, *, name: str, preset_dashboard_qualified_name: str - ) -> PresetDataset.Attributes: + cls, *, name: str, connection_qualified_name: str + ) -> ADLSAccount.Attributes: validate_required_fields( - ["name", "preset_dashboard_qualified_name"], - [name, preset_dashboard_qualified_name], + ["name", "connection_qualified_name"], [name, connection_qualified_name] ) - # Split the preset_dashboard_qualified_name to extract necessary information - fields = preset_dashboard_qualified_name.split("/") - if len(fields) != 5: - raise ValueError("Invalid preset_dashboard_qualified_name") + # Split the connection_qualified_name to extract necessary information + fields = connection_qualified_name.split("/") + if len(fields) != 3: + raise ValueError("Invalid connection_qualified_name") try: connector_type = AtlanConnectorType(fields[1]) # type:ignore except ValueError as e: - raise ValueError("Invalid preset_dashboard_qualified_name") from e + raise ValueError("Invalid connection_qualified_name") from e - return PresetDataset.Attributes( + return ADLSAccount.Attributes( name=name, - preset_dashboard_qualified_name=preset_dashboard_qualified_name, - connection_qualified_name=f"{fields[0]}/{fields[1]}/{fields[2]}", - qualified_name=f"{preset_dashboard_qualified_name}/{name}", + qualified_name=f"{connection_qualified_name}/{name}", + connection_qualified_name=connection_qualified_name, connector_name=connector_type.value, - preset_dashboard=PresetDashboard.ref_by_qualified_name( - preset_dashboard_qualified_name - ), ) - attributes: "PresetDataset.Attributes" = Field( - default_factory=lambda: PresetDataset.Attributes(), + attributes: "ADLSAccount.Attributes" = Field( + default_factory=lambda: ADLSAccount.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -class PresetDashboard(Preset): +class ADLSContainer(ADLS): """Description""" @classmethod # @validate_arguments() @init_guid - def create( - cls, *, name: str, preset_workspace_qualified_name: str - ) -> PresetDashboard: + def create(cls, *, name: str, adls_account_qualified_name: str) -> ADLSContainer: validate_required_fields( - ["name", "preset_workspace_qualified_name"], - [name, preset_workspace_qualified_name], + ["name", "adls_account_qualified_name"], [name, adls_account_qualified_name] ) - attributes = PresetDashboard.Attributes.create( - name=name, preset_workspace_qualified_name=preset_workspace_qualified_name + attributes = ADLSContainer.Attributes.create( + name=name, adls_account_qualified_name=adls_account_qualified_name ) return cls(attributes=attributes) - type_name: str = Field("PresetDashboard", allow_mutation=False) + type_name: str = Field("ADLSContainer", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "PresetDashboard": - raise ValueError("must be PresetDashboard") + if v != "ADLSContainer": + raise ValueError("must be ADLSContainer") return v def __setattr__(self, name, value): - if name in PresetDashboard._convenience_properties: + if name in ADLSContainer._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - PRESET_DASHBOARD_CHANGED_BY_NAME: ClassVar[ - KeywordTextStemmedField - ] = KeywordTextStemmedField( - "presetDashboardChangedByName", - "presetDashboardChangedByName.keyword", - "presetDashboardChangedByName", - "presetDashboardChangedByName.stemmed", + ADLS_CONTAINER_URL: ClassVar[KeywordTextField] = KeywordTextField( + "adlsContainerUrl", "adlsContainerUrl.keyword", "adlsContainerUrl" ) """ - TBC + URL of this container. """ - PRESET_DASHBOARD_CHANGED_BY_URL: ClassVar[KeywordField] = KeywordField( - "presetDashboardChangedByURL", "presetDashboardChangedByURL" + ADLS_CONTAINER_LEASE_STATE: ClassVar[KeywordField] = KeywordField( + "adlsContainerLeaseState", "adlsContainerLeaseState" ) """ - TBC + Lease state of this container. """ - PRESET_DASHBOARD_IS_MANAGED_EXTERNALLY: ClassVar[BooleanField] = BooleanField( - "presetDashboardIsManagedExternally", "presetDashboardIsManagedExternally" + ADLS_CONTAINER_LEASE_STATUS: ClassVar[KeywordField] = KeywordField( + "adlsContainerLeaseStatus", "adlsContainerLeaseStatus" ) """ - TBC + Lease status of this container. """ - PRESET_DASHBOARD_IS_PUBLISHED: ClassVar[BooleanField] = BooleanField( - "presetDashboardIsPublished", "presetDashboardIsPublished" + ADLS_CONTAINER_ENCRYPTION_SCOPE: ClassVar[KeywordField] = KeywordField( + "adlsContainerEncryptionScope", "adlsContainerEncryptionScope" ) """ - TBC + Encryption scope of this container. """ - PRESET_DASHBOARD_THUMBNAIL_URL: ClassVar[KeywordField] = KeywordField( - "presetDashboardThumbnailURL", "presetDashboardThumbnailURL" + ADLS_CONTAINER_VERSION_LEVEL_IMMUTABILITY_SUPPORT: ClassVar[ + BooleanField + ] = BooleanField( + "adlsContainerVersionLevelImmutabilitySupport", + "adlsContainerVersionLevelImmutabilitySupport", ) """ - TBC + Whether this container supports version-level immutability (true) or not (false). """ - PRESET_DASHBOARD_CHART_COUNT: ClassVar[NumericField] = NumericField( - "presetDashboardChartCount", "presetDashboardChartCount" + ADLS_OBJECT_COUNT: ClassVar[NumericField] = NumericField( + "adlsObjectCount", "adlsObjectCount" ) """ - TBC + Number of objects that exist within this container. """ - PRESET_DATASETS: ClassVar[RelationField] = RelationField("presetDatasets") + ADLS_OBJECTS: ClassVar[RelationField] = RelationField("adlsObjects") """ TBC """ - PRESET_CHARTS: ClassVar[RelationField] = RelationField("presetCharts") - """ - TBC - """ - PRESET_WORKSPACE: ClassVar[RelationField] = RelationField("presetWorkspace") + ADLS_ACCOUNT: ClassVar[RelationField] = RelationField("adlsAccount") """ TBC """ _convenience_properties: ClassVar[list[str]] = [ - "preset_dashboard_changed_by_name", - "preset_dashboard_changed_by_url", - "preset_dashboard_is_managed_externally", - "preset_dashboard_is_published", - "preset_dashboard_thumbnail_url", - "preset_dashboard_chart_count", - "preset_datasets", - "preset_charts", - "preset_workspace", + "adls_container_url", + "adls_container_lease_state", + "adls_container_lease_status", + "adls_container_encryption_scope", + "adls_container_version_level_immutability_support", + "adls_object_count", + "adls_objects", + "adls_account", ] @property - def preset_dashboard_changed_by_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.preset_dashboard_changed_by_name - ) + def adls_container_url(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.adls_container_url - @preset_dashboard_changed_by_name.setter - def preset_dashboard_changed_by_name( - self, preset_dashboard_changed_by_name: Optional[str] - ): + @adls_container_url.setter + def adls_container_url(self, adls_container_url: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.preset_dashboard_changed_by_name = ( - preset_dashboard_changed_by_name - ) + self.attributes.adls_container_url = adls_container_url @property - def preset_dashboard_changed_by_url(self) -> Optional[str]: + def adls_container_lease_state(self) -> Optional[ADLSLeaseState]: return ( None if self.attributes is None - else self.attributes.preset_dashboard_changed_by_url + else self.attributes.adls_container_lease_state ) - @preset_dashboard_changed_by_url.setter - def preset_dashboard_changed_by_url( - self, preset_dashboard_changed_by_url: Optional[str] + @adls_container_lease_state.setter + def adls_container_lease_state( + self, adls_container_lease_state: Optional[ADLSLeaseState] ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.preset_dashboard_changed_by_url = ( - preset_dashboard_changed_by_url - ) + self.attributes.adls_container_lease_state = adls_container_lease_state @property - def preset_dashboard_is_managed_externally(self) -> Optional[bool]: + def adls_container_lease_status(self) -> Optional[ADLSLeaseStatus]: return ( None if self.attributes is None - else self.attributes.preset_dashboard_is_managed_externally + else self.attributes.adls_container_lease_status ) - @preset_dashboard_is_managed_externally.setter - def preset_dashboard_is_managed_externally( - self, preset_dashboard_is_managed_externally: Optional[bool] + @adls_container_lease_status.setter + def adls_container_lease_status( + self, adls_container_lease_status: Optional[ADLSLeaseStatus] ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.preset_dashboard_is_managed_externally = ( - preset_dashboard_is_managed_externally - ) + self.attributes.adls_container_lease_status = adls_container_lease_status @property - def preset_dashboard_is_published(self) -> Optional[bool]: + def adls_container_encryption_scope(self) -> Optional[str]: return ( None if self.attributes is None - else self.attributes.preset_dashboard_is_published + else self.attributes.adls_container_encryption_scope ) - @preset_dashboard_is_published.setter - def preset_dashboard_is_published( - self, preset_dashboard_is_published: Optional[bool] + @adls_container_encryption_scope.setter + def adls_container_encryption_scope( + self, adls_container_encryption_scope: Optional[str] ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.preset_dashboard_is_published = preset_dashboard_is_published + self.attributes.adls_container_encryption_scope = ( + adls_container_encryption_scope + ) @property - def preset_dashboard_thumbnail_url(self) -> Optional[str]: + def adls_container_version_level_immutability_support(self) -> Optional[bool]: return ( None if self.attributes is None - else self.attributes.preset_dashboard_thumbnail_url + else self.attributes.adls_container_version_level_immutability_support ) - @preset_dashboard_thumbnail_url.setter - def preset_dashboard_thumbnail_url( - self, preset_dashboard_thumbnail_url: Optional[str] + @adls_container_version_level_immutability_support.setter + def adls_container_version_level_immutability_support( + self, adls_container_version_level_immutability_support: Optional[bool] ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.preset_dashboard_thumbnail_url = preset_dashboard_thumbnail_url - - @property - def preset_dashboard_chart_count(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.preset_dashboard_chart_count + self.attributes.adls_container_version_level_immutability_support = ( + adls_container_version_level_immutability_support ) - @preset_dashboard_chart_count.setter - def preset_dashboard_chart_count(self, preset_dashboard_chart_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.preset_dashboard_chart_count = preset_dashboard_chart_count - @property - def preset_datasets(self) -> Optional[list[PresetDataset]]: - return None if self.attributes is None else self.attributes.preset_datasets + def adls_object_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.adls_object_count - @preset_datasets.setter - def preset_datasets(self, preset_datasets: Optional[list[PresetDataset]]): + @adls_object_count.setter + def adls_object_count(self, adls_object_count: Optional[int]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.preset_datasets = preset_datasets + self.attributes.adls_object_count = adls_object_count @property - def preset_charts(self) -> Optional[list[PresetChart]]: - return None if self.attributes is None else self.attributes.preset_charts + def adls_objects(self) -> Optional[list[ADLSObject]]: + return None if self.attributes is None else self.attributes.adls_objects - @preset_charts.setter - def preset_charts(self, preset_charts: Optional[list[PresetChart]]): + @adls_objects.setter + def adls_objects(self, adls_objects: Optional[list[ADLSObject]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.preset_charts = preset_charts + self.attributes.adls_objects = adls_objects @property - def preset_workspace(self) -> Optional[PresetWorkspace]: - return None if self.attributes is None else self.attributes.preset_workspace + def adls_account(self) -> Optional[ADLSAccount]: + return None if self.attributes is None else self.attributes.adls_account - @preset_workspace.setter - def preset_workspace(self, preset_workspace: Optional[PresetWorkspace]): + @adls_account.setter + def adls_account(self, adls_account: Optional[ADLSAccount]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.preset_workspace = preset_workspace + self.attributes.adls_account = adls_account - class Attributes(Preset.Attributes): - preset_dashboard_changed_by_name: Optional[str] = Field( - None, description="", alias="presetDashboardChangedByName" + class Attributes(ADLS.Attributes): + adls_container_url: Optional[str] = Field( + None, description="", alias="adlsContainerUrl" ) - preset_dashboard_changed_by_url: Optional[str] = Field( - None, description="", alias="presetDashboardChangedByURL" + adls_container_lease_state: Optional[ADLSLeaseState] = Field( + None, description="", alias="adlsContainerLeaseState" ) - preset_dashboard_is_managed_externally: Optional[bool] = Field( - None, description="", alias="presetDashboardIsManagedExternally" + adls_container_lease_status: Optional[ADLSLeaseStatus] = Field( + None, description="", alias="adlsContainerLeaseStatus" ) - preset_dashboard_is_published: Optional[bool] = Field( - None, description="", alias="presetDashboardIsPublished" + adls_container_encryption_scope: Optional[str] = Field( + None, description="", alias="adlsContainerEncryptionScope" ) - preset_dashboard_thumbnail_url: Optional[str] = Field( - None, description="", alias="presetDashboardThumbnailURL" + adls_container_version_level_immutability_support: Optional[bool] = Field( + None, description="", alias="adlsContainerVersionLevelImmutabilitySupport" ) - preset_dashboard_chart_count: Optional[int] = Field( - None, description="", alias="presetDashboardChartCount" + adls_object_count: Optional[int] = Field( + None, description="", alias="adlsObjectCount" ) - preset_datasets: Optional[list[PresetDataset]] = Field( - None, description="", alias="presetDatasets" - ) # relationship - preset_charts: Optional[list[PresetChart]] = Field( - None, description="", alias="presetCharts" + adls_objects: Optional[list[ADLSObject]] = Field( + None, description="", alias="adlsObjects" ) # relationship - preset_workspace: Optional[PresetWorkspace] = Field( - None, description="", alias="presetWorkspace" + adls_account: Optional[ADLSAccount] = Field( + None, description="", alias="adlsAccount" ) # relationship @classmethod # @validate_arguments() @init_guid def create( - cls, *, name: str, preset_workspace_qualified_name: str - ) -> PresetDashboard.Attributes: + cls, *, name: str, adls_account_qualified_name: str + ) -> ADLSContainer.Attributes: validate_required_fields( - ["name", "preset_workspace_qualified_name"], - [name, preset_workspace_qualified_name], + ["name", "adls_account_qualified_name"], + [name, adls_account_qualified_name], ) - # Split the preset_workspace_qualified_name to extract necessary information - fields = preset_workspace_qualified_name.split("/") + # Split the adls_account_qualified_name to extract necessary information + fields = adls_account_qualified_name.split("/") if len(fields) != 4: - raise ValueError("Invalid preset_workspace_qualified_name") + raise ValueError("Invalid adls_account_qualified_name") try: connector_type = AtlanConnectorType(fields[1]) # type:ignore except ValueError as e: - raise ValueError("Invalid preset_workspace_qualified_name") from e + raise ValueError("Invalid adls_account_qualified_name") from e - return PresetDashboard.Attributes( + return ADLSContainer.Attributes( name=name, - preset_workspace_qualified_name=preset_workspace_qualified_name, + adls_account_qualified_name=adls_account_qualified_name, connection_qualified_name=f"{fields[0]}/{fields[1]}/{fields[2]}", - qualified_name=f"{preset_workspace_qualified_name}/{name}", + qualified_name=f"{adls_account_qualified_name}/{name}", connector_name=connector_type.value, - preset_workspace=PresetWorkspace.ref_by_qualified_name( - preset_workspace_qualified_name + adls_account=ADLSAccount.ref_by_qualified_name( + adls_account_qualified_name ), ) - attributes: "PresetDashboard.Attributes" = Field( - default_factory=lambda: PresetDashboard.Attributes(), + attributes: "ADLSContainer.Attributes" = Field( + default_factory=lambda: ADLSContainer.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -class PresetWorkspace(Preset): +class ADLSObject(ADLS): """Description""" @classmethod # @validate_arguments() @init_guid - def create(cls, *, name: str, connection_qualified_name: str) -> PresetWorkspace: + def create( + cls, + *, + name: str, + adls_container_qualified_name: str, + ) -> ADLSObject: validate_required_fields( - ["name", "connection_qualified_name"], [name, connection_qualified_name] + ["name", "adls_container_qualified_name"], + [name, adls_container_qualified_name], ) - attributes = PresetWorkspace.Attributes.create( - name=name, connection_qualified_name=connection_qualified_name + attributes = ADLSObject.Attributes.create( + name=name, adls_container_qualified_name=adls_container_qualified_name ) return cls(attributes=attributes) - type_name: str = Field("PresetWorkspace", allow_mutation=False) + type_name: str = Field("ADLSObject", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "PresetWorkspace": - raise ValueError("must be PresetWorkspace") + if v != "ADLSObject": + raise ValueError("must be ADLSObject") return v def __setattr__(self, name, value): - if name in PresetWorkspace._convenience_properties: + if name in ADLSObject._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - PRESET_WORKSPACE_PUBLIC_DASHBOARDS_ALLOWED: ClassVar[BooleanField] = BooleanField( - "presetWorkspacePublicDashboardsAllowed", - "presetWorkspacePublicDashboardsAllowed", + ADLS_OBJECT_URL: ClassVar[KeywordTextField] = KeywordTextField( + "adlsObjectUrl", "adlsObjectUrl.keyword", "adlsObjectUrl" ) """ - TBC + URL of this object. """ - PRESET_WORKSPACE_CLUSTER_ID: ClassVar[NumericField] = NumericField( - "presetWorkspaceClusterId", "presetWorkspaceClusterId" + ADLS_OBJECT_VERSION_ID: ClassVar[KeywordField] = KeywordField( + "adlsObjectVersionId", "adlsObjectVersionId" ) """ - TBC + Identifier of the version of this object, from ADLS. """ - PRESET_WORKSPACE_HOSTNAME: ClassVar[KeywordTextField] = KeywordTextField( - "presetWorkspaceHostname", - "presetWorkspaceHostname", - "presetWorkspaceHostname.text", + ADLS_OBJECT_TYPE: ClassVar[KeywordField] = KeywordField( + "adlsObjectType", "adlsObjectType" ) """ - TBC + Type of this object. """ - PRESET_WORKSPACE_IS_IN_MAINTENANCE_MODE: ClassVar[BooleanField] = BooleanField( - "presetWorkspaceIsInMaintenanceMode", "presetWorkspaceIsInMaintenanceMode" + ADLS_OBJECT_SIZE: ClassVar[NumericField] = NumericField( + "adlsObjectSize", "adlsObjectSize" ) """ - TBC + Size of this object. """ - PRESET_WORKSPACE_REGION: ClassVar[KeywordTextField] = KeywordTextField( - "presetWorkspaceRegion", "presetWorkspaceRegion", "presetWorkspaceRegion.text" + ADLS_OBJECT_ACCESS_TIER: ClassVar[KeywordField] = KeywordField( + "adlsObjectAccessTier", "adlsObjectAccessTier" ) """ - TBC + Access tier of this object. """ - PRESET_WORKSPACE_STATUS: ClassVar[KeywordField] = KeywordField( - "presetWorkspaceStatus", "presetWorkspaceStatus" + ADLS_OBJECT_ACCESS_TIER_LAST_MODIFIED_TIME: ClassVar[NumericField] = NumericField( + "adlsObjectAccessTierLastModifiedTime", "adlsObjectAccessTierLastModifiedTime" ) """ - TBC + Time (epoch) when the acccess tier for this object was last modified, in milliseconds. """ - PRESET_WORKSPACE_DEPLOYMENT_ID: ClassVar[NumericField] = NumericField( - "presetWorkspaceDeploymentId", "presetWorkspaceDeploymentId" + ADLS_OBJECT_ARCHIVE_STATUS: ClassVar[KeywordField] = KeywordField( + "adlsObjectArchiveStatus", "adlsObjectArchiveStatus" ) """ - TBC + Archive status of this object. """ - PRESET_WORKSPACE_DASHBOARD_COUNT: ClassVar[NumericField] = NumericField( - "presetWorkspaceDashboardCount", "presetWorkspaceDashboardCount" + ADLS_OBJECT_SERVER_ENCRYPTED: ClassVar[BooleanField] = BooleanField( + "adlsObjectServerEncrypted", "adlsObjectServerEncrypted" ) """ - TBC + Whether this object is server encrypted (true) or not (false). """ - PRESET_WORKSPACE_DATASET_COUNT: ClassVar[NumericField] = NumericField( - "presetWorkspaceDatasetCount", "presetWorkspaceDatasetCount" + ADLS_OBJECT_VERSION_LEVEL_IMMUTABILITY_SUPPORT: ClassVar[ + BooleanField + ] = BooleanField( + "adlsObjectVersionLevelImmutabilitySupport", + "adlsObjectVersionLevelImmutabilitySupport", ) """ - TBC + Whether this object supports version-level immutability (true) or not (false). + """ + ADLS_OBJECT_CACHE_CONTROL: ClassVar[TextField] = TextField( + "adlsObjectCacheControl", "adlsObjectCacheControl" + ) + """ + Cache control of this object. + """ + ADLS_OBJECT_CONTENT_TYPE: ClassVar[TextField] = TextField( + "adlsObjectContentType", "adlsObjectContentType" + ) + """ + Content type of this object. + """ + ADLS_OBJECT_CONTENT_MD5HASH: ClassVar[KeywordField] = KeywordField( + "adlsObjectContentMD5Hash", "adlsObjectContentMD5Hash" + ) + """ + MD5 hash of this object's contents. + """ + ADLS_OBJECT_CONTENT_LANGUAGE: ClassVar[KeywordTextField] = KeywordTextField( + "adlsObjectContentLanguage", + "adlsObjectContentLanguage.keyword", + "adlsObjectContentLanguage", + ) + """ + Language of this object's contents. + """ + ADLS_OBJECT_LEASE_STATUS: ClassVar[KeywordField] = KeywordField( + "adlsObjectLeaseStatus", "adlsObjectLeaseStatus" + ) + """ + Status of this object's lease. + """ + ADLS_OBJECT_LEASE_STATE: ClassVar[KeywordField] = KeywordField( + "adlsObjectLeaseState", "adlsObjectLeaseState" + ) + """ + State of this object's lease. + """ + ADLS_OBJECT_METADATA: ClassVar[KeywordField] = KeywordField( + "adlsObjectMetadata", "adlsObjectMetadata" + ) + """ + Metadata associated with this object, from ADLS. + """ + ADLS_CONTAINER_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "adlsContainerQualifiedName", + "adlsContainerQualifiedName", + "adlsContainerQualifiedName.text", + ) + """ + Unique name of the container this object exists within. """ - PRESET_DASHBOARDS: ClassVar[RelationField] = RelationField("presetDashboards") + ADLS_CONTAINER: ClassVar[RelationField] = RelationField("adlsContainer") """ TBC """ _convenience_properties: ClassVar[list[str]] = [ - "preset_workspace_public_dashboards_allowed", - "preset_workspace_cluster_id", - "preset_workspace_hostname", - "preset_workspace_is_in_maintenance_mode", - "preset_workspace_region", - "preset_workspace_status", - "preset_workspace_deployment_id", - "preset_workspace_dashboard_count", - "preset_workspace_dataset_count", - "preset_dashboards", + "adls_object_url", + "adls_object_version_id", + "adls_object_type", + "adls_object_size", + "adls_object_access_tier", + "adls_object_access_tier_last_modified_time", + "adls_object_archive_status", + "adls_object_server_encrypted", + "adls_object_version_level_immutability_support", + "adls_object_cache_control", + "adls_object_content_type", + "adls_object_content_m_d5_hash", + "adls_object_content_language", + "adls_object_lease_status", + "adls_object_lease_state", + "adls_object_metadata", + "adls_container_qualified_name", + "adls_container", ] @property - def preset_workspace_public_dashboards_allowed(self) -> Optional[bool]: + def adls_object_url(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.adls_object_url + + @adls_object_url.setter + def adls_object_url(self, adls_object_url: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.adls_object_url = adls_object_url + + @property + def adls_object_version_id(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.adls_object_version_id + ) + + @adls_object_version_id.setter + def adls_object_version_id(self, adls_object_version_id: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.adls_object_version_id = adls_object_version_id + + @property + def adls_object_type(self) -> Optional[ADLSObjectType]: + return None if self.attributes is None else self.attributes.adls_object_type + + @adls_object_type.setter + def adls_object_type(self, adls_object_type: Optional[ADLSObjectType]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.adls_object_type = adls_object_type + + @property + def adls_object_size(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.adls_object_size + + @adls_object_size.setter + def adls_object_size(self, adls_object_size: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.adls_object_size = adls_object_size + + @property + def adls_object_access_tier(self) -> Optional[ADLSAccessTier]: + return ( + None if self.attributes is None else self.attributes.adls_object_access_tier + ) + + @adls_object_access_tier.setter + def adls_object_access_tier( + self, adls_object_access_tier: Optional[ADLSAccessTier] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.adls_object_access_tier = adls_object_access_tier + + @property + def adls_object_access_tier_last_modified_time(self) -> Optional[datetime]: return ( None if self.attributes is None - else self.attributes.preset_workspace_public_dashboards_allowed + else self.attributes.adls_object_access_tier_last_modified_time ) - @preset_workspace_public_dashboards_allowed.setter - def preset_workspace_public_dashboards_allowed( - self, preset_workspace_public_dashboards_allowed: Optional[bool] + @adls_object_access_tier_last_modified_time.setter + def adls_object_access_tier_last_modified_time( + self, adls_object_access_tier_last_modified_time: Optional[datetime] ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.preset_workspace_public_dashboards_allowed = ( - preset_workspace_public_dashboards_allowed + self.attributes.adls_object_access_tier_last_modified_time = ( + adls_object_access_tier_last_modified_time ) @property - def preset_workspace_cluster_id(self) -> Optional[int]: + def adls_object_archive_status(self) -> Optional[ADLSObjectArchiveStatus]: return ( None if self.attributes is None - else self.attributes.preset_workspace_cluster_id + else self.attributes.adls_object_archive_status ) - @preset_workspace_cluster_id.setter - def preset_workspace_cluster_id(self, preset_workspace_cluster_id: Optional[int]): + @adls_object_archive_status.setter + def adls_object_archive_status( + self, adls_object_archive_status: Optional[ADLSObjectArchiveStatus] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.preset_workspace_cluster_id = preset_workspace_cluster_id + self.attributes.adls_object_archive_status = adls_object_archive_status @property - def preset_workspace_hostname(self) -> Optional[str]: + def adls_object_server_encrypted(self) -> Optional[bool]: return ( None if self.attributes is None - else self.attributes.preset_workspace_hostname + else self.attributes.adls_object_server_encrypted ) - @preset_workspace_hostname.setter - def preset_workspace_hostname(self, preset_workspace_hostname: Optional[str]): + @adls_object_server_encrypted.setter + def adls_object_server_encrypted( + self, adls_object_server_encrypted: Optional[bool] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.preset_workspace_hostname = preset_workspace_hostname + self.attributes.adls_object_server_encrypted = adls_object_server_encrypted @property - def preset_workspace_is_in_maintenance_mode(self) -> Optional[bool]: + def adls_object_version_level_immutability_support(self) -> Optional[bool]: return ( None if self.attributes is None - else self.attributes.preset_workspace_is_in_maintenance_mode + else self.attributes.adls_object_version_level_immutability_support ) - @preset_workspace_is_in_maintenance_mode.setter - def preset_workspace_is_in_maintenance_mode( - self, preset_workspace_is_in_maintenance_mode: Optional[bool] + @adls_object_version_level_immutability_support.setter + def adls_object_version_level_immutability_support( + self, adls_object_version_level_immutability_support: Optional[bool] ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.preset_workspace_is_in_maintenance_mode = ( - preset_workspace_is_in_maintenance_mode + self.attributes.adls_object_version_level_immutability_support = ( + adls_object_version_level_immutability_support ) @property - def preset_workspace_region(self) -> Optional[str]: + def adls_object_cache_control(self) -> Optional[str]: return ( - None if self.attributes is None else self.attributes.preset_workspace_region + None + if self.attributes is None + else self.attributes.adls_object_cache_control ) - @preset_workspace_region.setter - def preset_workspace_region(self, preset_workspace_region: Optional[str]): + @adls_object_cache_control.setter + def adls_object_cache_control(self, adls_object_cache_control: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.preset_workspace_region = preset_workspace_region + self.attributes.adls_object_cache_control = adls_object_cache_control @property - def preset_workspace_status(self) -> Optional[str]: + def adls_object_content_type(self) -> Optional[str]: return ( - None if self.attributes is None else self.attributes.preset_workspace_status + None + if self.attributes is None + else self.attributes.adls_object_content_type ) - @preset_workspace_status.setter - def preset_workspace_status(self, preset_workspace_status: Optional[str]): + @adls_object_content_type.setter + def adls_object_content_type(self, adls_object_content_type: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.preset_workspace_status = preset_workspace_status + self.attributes.adls_object_content_type = adls_object_content_type @property - def preset_workspace_deployment_id(self) -> Optional[int]: + def adls_object_content_m_d5_hash(self) -> Optional[str]: return ( None if self.attributes is None - else self.attributes.preset_workspace_deployment_id + else self.attributes.adls_object_content_m_d5_hash ) - @preset_workspace_deployment_id.setter - def preset_workspace_deployment_id( - self, preset_workspace_deployment_id: Optional[int] + @adls_object_content_m_d5_hash.setter + def adls_object_content_m_d5_hash( + self, adls_object_content_m_d5_hash: Optional[str] ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.preset_workspace_deployment_id = preset_workspace_deployment_id + self.attributes.adls_object_content_m_d5_hash = adls_object_content_m_d5_hash @property - def preset_workspace_dashboard_count(self) -> Optional[int]: + def adls_object_content_language(self) -> Optional[str]: return ( None if self.attributes is None - else self.attributes.preset_workspace_dashboard_count + else self.attributes.adls_object_content_language ) - @preset_workspace_dashboard_count.setter - def preset_workspace_dashboard_count( - self, preset_workspace_dashboard_count: Optional[int] + @adls_object_content_language.setter + def adls_object_content_language(self, adls_object_content_language: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.adls_object_content_language = adls_object_content_language + + @property + def adls_object_lease_status(self) -> Optional[ADLSLeaseStatus]: + return ( + None + if self.attributes is None + else self.attributes.adls_object_lease_status + ) + + @adls_object_lease_status.setter + def adls_object_lease_status( + self, adls_object_lease_status: Optional[ADLSLeaseStatus] ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.preset_workspace_dashboard_count = ( - preset_workspace_dashboard_count + self.attributes.adls_object_lease_status = adls_object_lease_status + + @property + def adls_object_lease_state(self) -> Optional[ADLSLeaseState]: + return ( + None if self.attributes is None else self.attributes.adls_object_lease_state ) + @adls_object_lease_state.setter + def adls_object_lease_state( + self, adls_object_lease_state: Optional[ADLSLeaseState] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.adls_object_lease_state = adls_object_lease_state + + @property + def adls_object_metadata(self) -> Optional[dict[str, str]]: + return None if self.attributes is None else self.attributes.adls_object_metadata + + @adls_object_metadata.setter + def adls_object_metadata(self, adls_object_metadata: Optional[dict[str, str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.adls_object_metadata = adls_object_metadata + @property - def preset_workspace_dataset_count(self) -> Optional[int]: + def adls_container_qualified_name(self) -> Optional[str]: return ( None if self.attributes is None - else self.attributes.preset_workspace_dataset_count + else self.attributes.adls_container_qualified_name ) - @preset_workspace_dataset_count.setter - def preset_workspace_dataset_count( - self, preset_workspace_dataset_count: Optional[int] + @adls_container_qualified_name.setter + def adls_container_qualified_name( + self, adls_container_qualified_name: Optional[str] ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.preset_workspace_dataset_count = preset_workspace_dataset_count + self.attributes.adls_container_qualified_name = adls_container_qualified_name @property - def preset_dashboards(self) -> Optional[list[PresetDashboard]]: - return None if self.attributes is None else self.attributes.preset_dashboards + def adls_container(self) -> Optional[ADLSContainer]: + return None if self.attributes is None else self.attributes.adls_container - @preset_dashboards.setter - def preset_dashboards(self, preset_dashboards: Optional[list[PresetDashboard]]): + @adls_container.setter + def adls_container(self, adls_container: Optional[ADLSContainer]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.preset_dashboards = preset_dashboards + self.attributes.adls_container = adls_container - class Attributes(Preset.Attributes): - preset_workspace_public_dashboards_allowed: Optional[bool] = Field( - None, description="", alias="presetWorkspacePublicDashboardsAllowed" + class Attributes(ADLS.Attributes): + adls_object_url: Optional[str] = Field( + None, description="", alias="adlsObjectUrl" + ) + adls_object_version_id: Optional[str] = Field( + None, description="", alias="adlsObjectVersionId" + ) + adls_object_type: Optional[ADLSObjectType] = Field( + None, description="", alias="adlsObjectType" + ) + adls_object_size: Optional[int] = Field( + None, description="", alias="adlsObjectSize" + ) + adls_object_access_tier: Optional[ADLSAccessTier] = Field( + None, description="", alias="adlsObjectAccessTier" + ) + adls_object_access_tier_last_modified_time: Optional[datetime] = Field( + None, description="", alias="adlsObjectAccessTierLastModifiedTime" ) - preset_workspace_cluster_id: Optional[int] = Field( - None, description="", alias="presetWorkspaceClusterId" + adls_object_archive_status: Optional[ADLSObjectArchiveStatus] = Field( + None, description="", alias="adlsObjectArchiveStatus" ) - preset_workspace_hostname: Optional[str] = Field( - None, description="", alias="presetWorkspaceHostname" + adls_object_server_encrypted: Optional[bool] = Field( + None, description="", alias="adlsObjectServerEncrypted" ) - preset_workspace_is_in_maintenance_mode: Optional[bool] = Field( - None, description="", alias="presetWorkspaceIsInMaintenanceMode" + adls_object_version_level_immutability_support: Optional[bool] = Field( + None, description="", alias="adlsObjectVersionLevelImmutabilitySupport" ) - preset_workspace_region: Optional[str] = Field( - None, description="", alias="presetWorkspaceRegion" + adls_object_cache_control: Optional[str] = Field( + None, description="", alias="adlsObjectCacheControl" ) - preset_workspace_status: Optional[str] = Field( - None, description="", alias="presetWorkspaceStatus" + adls_object_content_type: Optional[str] = Field( + None, description="", alias="adlsObjectContentType" ) - preset_workspace_deployment_id: Optional[int] = Field( - None, description="", alias="presetWorkspaceDeploymentId" + adls_object_content_m_d5_hash: Optional[str] = Field( + None, description="", alias="adlsObjectContentMD5Hash" ) - preset_workspace_dashboard_count: Optional[int] = Field( - None, description="", alias="presetWorkspaceDashboardCount" + adls_object_content_language: Optional[str] = Field( + None, description="", alias="adlsObjectContentLanguage" ) - preset_workspace_dataset_count: Optional[int] = Field( - None, description="", alias="presetWorkspaceDatasetCount" + adls_object_lease_status: Optional[ADLSLeaseStatus] = Field( + None, description="", alias="adlsObjectLeaseStatus" ) - preset_dashboards: Optional[list[PresetDashboard]] = Field( - None, description="", alias="presetDashboards" + adls_object_lease_state: Optional[ADLSLeaseState] = Field( + None, description="", alias="adlsObjectLeaseState" + ) + adls_object_metadata: Optional[dict[str, str]] = Field( + None, description="", alias="adlsObjectMetadata" + ) + adls_container_qualified_name: Optional[str] = Field( + None, description="", alias="adlsContainerQualifiedName" + ) + adls_container: Optional[ADLSContainer] = Field( + None, description="", alias="adlsContainer" ) # relationship @classmethod # @validate_arguments() @init_guid def create( - cls, *, name: str, connection_qualified_name: str - ) -> PresetWorkspace.Attributes: + cls, *, name: str, adls_container_qualified_name: str + ) -> ADLSObject.Attributes: validate_required_fields( - ["name", "connection_qualified_name"], [name, connection_qualified_name] + ["name", "adls_container_qualified_name"], + [name, adls_container_qualified_name], ) - # Split the connection_qualified_name to extract necessary information - fields = connection_qualified_name.split("/") - if len(fields) != 3: - raise ValueError("Invalid connection_qualified_name") + # Split the qualified_name to extract necessary information + fields = adls_container_qualified_name.split("/") + if len(fields) != 5: + raise ValueError("Invalid qualified_name") try: connector_type = AtlanConnectorType(fields[1]) # type:ignore except ValueError as e: - raise ValueError("Invalid connection_qualified_name") from e + raise ValueError("Invalid qualified_name") from e + adls_account_qualified_name = get_parent_qualified_name( + adls_container_qualified_name + ) - return PresetWorkspace.Attributes( + return ADLSObject.Attributes( name=name, - qualified_name=f"{connection_qualified_name}/{name}", - connection_qualified_name=connection_qualified_name, + adls_container_qualified_name=adls_container_qualified_name, + qualified_name=f"{adls_container_qualified_name}/{name}", + connection_qualified_name=f"{fields[0]}/{fields[1]}/{fields[2]}", connector_name=connector_type.value, + adls_container=ADLSContainer.ref_by_qualified_name( + adls_container_qualified_name + ), + adls_account_qualified_name=adls_account_qualified_name, ) - attributes: "PresetWorkspace.Attributes" = Field( - default_factory=lambda: PresetWorkspace.Attributes(), + attributes: "ADLSObject.Attributes" = Field( + default_factory=lambda: ADLSObject.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -PresetChart.Attributes.update_forward_refs() - - -PresetDataset.Attributes.update_forward_refs() +ADLSAccount.Attributes.update_forward_refs() -PresetDashboard.Attributes.update_forward_refs() +ADLSContainer.Attributes.update_forward_refs() -PresetWorkspace.Attributes.update_forward_refs() +ADLSObject.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset64.py b/pyatlan/model/assets/asset64.py index eb559a7a6..742711873 100644 --- a/pyatlan/model/assets/asset64.py +++ b/pyatlan/model/assets/asset64.py @@ -9,593 +9,715 @@ from pydantic import Field, validator +from pyatlan.model.enums import AtlanConnectorType from pyatlan.model.fields.atlan_fields import ( BooleanField, KeywordField, + KeywordTextField, NumericField, RelationField, TextField, ) +from pyatlan.utils import init_guid, validate_required_fields -from .asset40 import Mode +from .asset37 import GCS -class ModeReport(Mode): +class GCSObject(GCS): """Description""" - type_name: str = Field("ModeReport", allow_mutation=False) + @classmethod + # @validate_arguments() + @init_guid + def create(cls, *, name: str, gcs_bucket_qualified_name: str) -> GCSObject: + validate_required_fields( + ["name", "gcs_bucket_qualified_name"], [name, gcs_bucket_qualified_name] + ) + attributes = GCSObject.Attributes.create( + name=name, gcs_bucket_qualified_name=gcs_bucket_qualified_name + ) + return cls(attributes=attributes) + + type_name: str = Field("GCSObject", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "ModeReport": - raise ValueError("must be ModeReport") + if v != "GCSObject": + raise ValueError("must be GCSObject") return v def __setattr__(self, name, value): - if name in ModeReport._convenience_properties: + if name in GCSObject._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - MODE_COLLECTION_TOKEN: ClassVar[KeywordField] = KeywordField( - "modeCollectionToken", "modeCollectionToken" + GCS_BUCKET_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "gcsBucketName", "gcsBucketName.keyword", "gcsBucketName" ) """ - TBC + Simple name of the bucket in which this object exists. """ - MODE_REPORT_PUBLISHED_AT: ClassVar[NumericField] = NumericField( - "modeReportPublishedAt", "modeReportPublishedAt" + GCS_BUCKET_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "gcsBucketQualifiedName", + "gcsBucketQualifiedName", + "gcsBucketQualifiedName.text", ) """ - TBC + Unique name of the bucket in which this object exists. """ - MODE_QUERY_COUNT: ClassVar[NumericField] = NumericField( - "modeQueryCount", "modeQueryCount" + GCS_OBJECT_SIZE: ClassVar[NumericField] = NumericField( + "gcsObjectSize", "gcsObjectSize" ) """ - TBC + Object size in bytes. """ - MODE_CHART_COUNT: ClassVar[NumericField] = NumericField( - "modeChartCount", "modeChartCount" + GCS_OBJECT_KEY: ClassVar[KeywordTextField] = KeywordTextField( + "gcsObjectKey", "gcsObjectKey", "gcsObjectKey.text" ) """ - TBC + Key of this object, in GCS. """ - MODE_QUERY_PREVIEW: ClassVar[TextField] = TextField( - "modeQueryPreview", "modeQueryPreview" + GCS_OBJECT_MEDIA_LINK: ClassVar[KeywordTextField] = KeywordTextField( + "gcsObjectMediaLink", "gcsObjectMediaLink", "gcsObjectMediaLink.text" ) """ - TBC + Media link to this object. """ - MODE_IS_PUBLIC: ClassVar[BooleanField] = BooleanField( - "modeIsPublic", "modeIsPublic" + GCS_OBJECT_HOLD_TYPE: ClassVar[KeywordField] = KeywordField( + "gcsObjectHoldType", "gcsObjectHoldType" ) """ - TBC + Type of hold on this object. """ - MODE_IS_SHARED: ClassVar[BooleanField] = BooleanField( - "modeIsShared", "modeIsShared" + GCS_OBJECT_GENERATION_ID: ClassVar[NumericField] = NumericField( + "gcsObjectGenerationId", "gcsObjectGenerationId" ) """ - TBC + Generation ID of this object. """ - - MODE_QUERIES: ClassVar[RelationField] = RelationField("modeQueries") + GCS_OBJECT_CRC32C_HASH: ClassVar[KeywordField] = KeywordField( + "gcsObjectCRC32CHash", "gcsObjectCRC32CHash" + ) """ - TBC + CRC32C hash of this object. + """ + GCS_OBJECT_MD5HASH: ClassVar[KeywordField] = KeywordField( + "gcsObjectMD5Hash", "gcsObjectMD5Hash" + ) + """ + MD5 hash of this object. """ - MODE_COLLECTIONS: ClassVar[RelationField] = RelationField("modeCollections") + GCS_OBJECT_DATA_LAST_MODIFIED_TIME: ClassVar[NumericField] = NumericField( + "gcsObjectDataLastModifiedTime", "gcsObjectDataLastModifiedTime" + ) + """ + Time (epoch) at which this object's data was last modified, in milliseconds. + """ + GCS_OBJECT_CONTENT_TYPE: ClassVar[KeywordField] = KeywordField( + "gcsObjectContentType", "gcsObjectContentType" + ) + """ + Type of content in this object. + """ + GCS_OBJECT_CONTENT_ENCODING: ClassVar[KeywordField] = KeywordField( + "gcsObjectContentEncoding", "gcsObjectContentEncoding" + ) + """ + Content encoding of this object. + """ + GCS_OBJECT_CONTENT_DISPOSITION: ClassVar[KeywordField] = KeywordField( + "gcsObjectContentDisposition", "gcsObjectContentDisposition" + ) + """ + Information about how this object's content should be presented. + """ + GCS_OBJECT_CONTENT_LANGUAGE: ClassVar[KeywordField] = KeywordField( + "gcsObjectContentLanguage", "gcsObjectContentLanguage" + ) + """ + Language of this object's contents. + """ + GCS_OBJECT_RETENTION_EXPIRATION_DATE: ClassVar[NumericField] = NumericField( + "gcsObjectRetentionExpirationDate", "gcsObjectRetentionExpirationDate" + ) + """ + Retention expiration date of this object. + """ + + GCS_BUCKET: ClassVar[RelationField] = RelationField("gcsBucket") """ TBC """ _convenience_properties: ClassVar[list[str]] = [ - "mode_collection_token", - "mode_report_published_at", - "mode_query_count", - "mode_chart_count", - "mode_query_preview", - "mode_is_public", - "mode_is_shared", - "mode_queries", - "mode_collections", + "gcs_bucket_name", + "gcs_bucket_qualified_name", + "gcs_object_size", + "gcs_object_key", + "gcs_object_media_link", + "gcs_object_hold_type", + "gcs_object_generation_id", + "gcs_object_c_r_c32_c_hash", + "gcs_object_m_d5_hash", + "gcs_object_data_last_modified_time", + "gcs_object_content_type", + "gcs_object_content_encoding", + "gcs_object_content_disposition", + "gcs_object_content_language", + "gcs_object_retention_expiration_date", + "gcs_bucket", ] @property - def mode_collection_token(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.mode_collection_token - ) + def gcs_bucket_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.gcs_bucket_name - @mode_collection_token.setter - def mode_collection_token(self, mode_collection_token: Optional[str]): + @gcs_bucket_name.setter + def gcs_bucket_name(self, gcs_bucket_name: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.mode_collection_token = mode_collection_token + self.attributes.gcs_bucket_name = gcs_bucket_name @property - def mode_report_published_at(self) -> Optional[datetime]: + def gcs_bucket_qualified_name(self) -> Optional[str]: return ( None if self.attributes is None - else self.attributes.mode_report_published_at + else self.attributes.gcs_bucket_qualified_name ) - @mode_report_published_at.setter - def mode_report_published_at(self, mode_report_published_at: Optional[datetime]): + @gcs_bucket_qualified_name.setter + def gcs_bucket_qualified_name(self, gcs_bucket_qualified_name: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.mode_report_published_at = mode_report_published_at + self.attributes.gcs_bucket_qualified_name = gcs_bucket_qualified_name @property - def mode_query_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.mode_query_count + def gcs_object_size(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.gcs_object_size - @mode_query_count.setter - def mode_query_count(self, mode_query_count: Optional[int]): + @gcs_object_size.setter + def gcs_object_size(self, gcs_object_size: Optional[int]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.mode_query_count = mode_query_count + self.attributes.gcs_object_size = gcs_object_size @property - def mode_chart_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.mode_chart_count + def gcs_object_key(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.gcs_object_key - @mode_chart_count.setter - def mode_chart_count(self, mode_chart_count: Optional[int]): + @gcs_object_key.setter + def gcs_object_key(self, gcs_object_key: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.mode_chart_count = mode_chart_count + self.attributes.gcs_object_key = gcs_object_key @property - def mode_query_preview(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.mode_query_preview + def gcs_object_media_link(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.gcs_object_media_link + ) - @mode_query_preview.setter - def mode_query_preview(self, mode_query_preview: Optional[str]): + @gcs_object_media_link.setter + def gcs_object_media_link(self, gcs_object_media_link: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.mode_query_preview = mode_query_preview + self.attributes.gcs_object_media_link = gcs_object_media_link @property - def mode_is_public(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.mode_is_public + def gcs_object_hold_type(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.gcs_object_hold_type - @mode_is_public.setter - def mode_is_public(self, mode_is_public: Optional[bool]): + @gcs_object_hold_type.setter + def gcs_object_hold_type(self, gcs_object_hold_type: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.mode_is_public = mode_is_public + self.attributes.gcs_object_hold_type = gcs_object_hold_type @property - def mode_is_shared(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.mode_is_shared + def gcs_object_generation_id(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.gcs_object_generation_id + ) - @mode_is_shared.setter - def mode_is_shared(self, mode_is_shared: Optional[bool]): + @gcs_object_generation_id.setter + def gcs_object_generation_id(self, gcs_object_generation_id: Optional[int]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.mode_is_shared = mode_is_shared + self.attributes.gcs_object_generation_id = gcs_object_generation_id @property - def mode_queries(self) -> Optional[list[ModeQuery]]: - return None if self.attributes is None else self.attributes.mode_queries + def gcs_object_c_r_c32_c_hash(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.gcs_object_c_r_c32_c_hash + ) - @mode_queries.setter - def mode_queries(self, mode_queries: Optional[list[ModeQuery]]): + @gcs_object_c_r_c32_c_hash.setter + def gcs_object_c_r_c32_c_hash(self, gcs_object_c_r_c32_c_hash: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.mode_queries = mode_queries + self.attributes.gcs_object_c_r_c32_c_hash = gcs_object_c_r_c32_c_hash @property - def mode_collections(self) -> Optional[list[ModeCollection]]: - return None if self.attributes is None else self.attributes.mode_collections + def gcs_object_m_d5_hash(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.gcs_object_m_d5_hash - @mode_collections.setter - def mode_collections(self, mode_collections: Optional[list[ModeCollection]]): + @gcs_object_m_d5_hash.setter + def gcs_object_m_d5_hash(self, gcs_object_m_d5_hash: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.mode_collections = mode_collections + self.attributes.gcs_object_m_d5_hash = gcs_object_m_d5_hash - class Attributes(Mode.Attributes): - mode_collection_token: Optional[str] = Field( - None, description="", alias="modeCollectionToken" - ) - mode_report_published_at: Optional[datetime] = Field( - None, description="", alias="modeReportPublishedAt" - ) - mode_query_count: Optional[int] = Field( - None, description="", alias="modeQueryCount" - ) - mode_chart_count: Optional[int] = Field( - None, description="", alias="modeChartCount" - ) - mode_query_preview: Optional[str] = Field( - None, description="", alias="modeQueryPreview" - ) - mode_is_public: Optional[bool] = Field( - None, description="", alias="modeIsPublic" - ) - mode_is_shared: Optional[bool] = Field( - None, description="", alias="modeIsShared" + @property + def gcs_object_data_last_modified_time(self) -> Optional[datetime]: + return ( + None + if self.attributes is None + else self.attributes.gcs_object_data_last_modified_time ) - mode_queries: Optional[list[ModeQuery]] = Field( - None, description="", alias="modeQueries" - ) # relationship - mode_collections: Optional[list[ModeCollection]] = Field( - None, description="", alias="modeCollections" - ) # relationship - - attributes: "ModeReport.Attributes" = Field( - default_factory=lambda: ModeReport.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) + @gcs_object_data_last_modified_time.setter + def gcs_object_data_last_modified_time( + self, gcs_object_data_last_modified_time: Optional[datetime] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.gcs_object_data_last_modified_time = ( + gcs_object_data_last_modified_time + ) -class ModeQuery(Mode): - """Description""" - - type_name: str = Field("ModeQuery", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "ModeQuery": - raise ValueError("must be ModeQuery") - return v - - def __setattr__(self, name, value): - if name in ModeQuery._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) + @property + def gcs_object_content_type(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.gcs_object_content_type + ) - MODE_RAW_QUERY: ClassVar[TextField] = TextField("modeRawQuery", "modeRawQuery") - """ - TBC - """ - MODE_REPORT_IMPORT_COUNT: ClassVar[NumericField] = NumericField( - "modeReportImportCount", "modeReportImportCount" - ) - """ - TBC - """ + @gcs_object_content_type.setter + def gcs_object_content_type(self, gcs_object_content_type: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.gcs_object_content_type = gcs_object_content_type - MODE_CHARTS: ClassVar[RelationField] = RelationField("modeCharts") - """ - TBC - """ - MODE_REPORT: ClassVar[RelationField] = RelationField("modeReport") - """ - TBC - """ + @property + def gcs_object_content_encoding(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.gcs_object_content_encoding + ) - _convenience_properties: ClassVar[list[str]] = [ - "mode_raw_query", - "mode_report_import_count", - "mode_charts", - "mode_report", - ] + @gcs_object_content_encoding.setter + def gcs_object_content_encoding(self, gcs_object_content_encoding: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.gcs_object_content_encoding = gcs_object_content_encoding @property - def mode_raw_query(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.mode_raw_query + def gcs_object_content_disposition(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.gcs_object_content_disposition + ) - @mode_raw_query.setter - def mode_raw_query(self, mode_raw_query: Optional[str]): + @gcs_object_content_disposition.setter + def gcs_object_content_disposition( + self, gcs_object_content_disposition: Optional[str] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.mode_raw_query = mode_raw_query + self.attributes.gcs_object_content_disposition = gcs_object_content_disposition @property - def mode_report_import_count(self) -> Optional[int]: + def gcs_object_content_language(self) -> Optional[str]: return ( None if self.attributes is None - else self.attributes.mode_report_import_count + else self.attributes.gcs_object_content_language ) - @mode_report_import_count.setter - def mode_report_import_count(self, mode_report_import_count: Optional[int]): + @gcs_object_content_language.setter + def gcs_object_content_language(self, gcs_object_content_language: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.mode_report_import_count = mode_report_import_count + self.attributes.gcs_object_content_language = gcs_object_content_language @property - def mode_charts(self) -> Optional[list[ModeChart]]: - return None if self.attributes is None else self.attributes.mode_charts + def gcs_object_retention_expiration_date(self) -> Optional[datetime]: + return ( + None + if self.attributes is None + else self.attributes.gcs_object_retention_expiration_date + ) - @mode_charts.setter - def mode_charts(self, mode_charts: Optional[list[ModeChart]]): + @gcs_object_retention_expiration_date.setter + def gcs_object_retention_expiration_date( + self, gcs_object_retention_expiration_date: Optional[datetime] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.mode_charts = mode_charts + self.attributes.gcs_object_retention_expiration_date = ( + gcs_object_retention_expiration_date + ) @property - def mode_report(self) -> Optional[ModeReport]: - return None if self.attributes is None else self.attributes.mode_report + def gcs_bucket(self) -> Optional[GCSBucket]: + return None if self.attributes is None else self.attributes.gcs_bucket - @mode_report.setter - def mode_report(self, mode_report: Optional[ModeReport]): + @gcs_bucket.setter + def gcs_bucket(self, gcs_bucket: Optional[GCSBucket]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.mode_report = mode_report + self.attributes.gcs_bucket = gcs_bucket - class Attributes(Mode.Attributes): - mode_raw_query: Optional[str] = Field( - None, description="", alias="modeRawQuery" + class Attributes(GCS.Attributes): + gcs_bucket_name: Optional[str] = Field( + None, description="", alias="gcsBucketName" ) - mode_report_import_count: Optional[int] = Field( - None, description="", alias="modeReportImportCount" + gcs_bucket_qualified_name: Optional[str] = Field( + None, description="", alias="gcsBucketQualifiedName" ) - mode_charts: Optional[list[ModeChart]] = Field( - None, description="", alias="modeCharts" - ) # relationship - mode_report: Optional[ModeReport] = Field( - None, description="", alias="modeReport" + gcs_object_size: Optional[int] = Field( + None, description="", alias="gcsObjectSize" + ) + gcs_object_key: Optional[str] = Field( + None, description="", alias="gcsObjectKey" + ) + gcs_object_media_link: Optional[str] = Field( + None, description="", alias="gcsObjectMediaLink" + ) + gcs_object_hold_type: Optional[str] = Field( + None, description="", alias="gcsObjectHoldType" + ) + gcs_object_generation_id: Optional[int] = Field( + None, description="", alias="gcsObjectGenerationId" + ) + gcs_object_c_r_c32_c_hash: Optional[str] = Field( + None, description="", alias="gcsObjectCRC32CHash" + ) + gcs_object_m_d5_hash: Optional[str] = Field( + None, description="", alias="gcsObjectMD5Hash" + ) + gcs_object_data_last_modified_time: Optional[datetime] = Field( + None, description="", alias="gcsObjectDataLastModifiedTime" + ) + gcs_object_content_type: Optional[str] = Field( + None, description="", alias="gcsObjectContentType" + ) + gcs_object_content_encoding: Optional[str] = Field( + None, description="", alias="gcsObjectContentEncoding" + ) + gcs_object_content_disposition: Optional[str] = Field( + None, description="", alias="gcsObjectContentDisposition" + ) + gcs_object_content_language: Optional[str] = Field( + None, description="", alias="gcsObjectContentLanguage" + ) + gcs_object_retention_expiration_date: Optional[datetime] = Field( + None, description="", alias="gcsObjectRetentionExpirationDate" + ) + gcs_bucket: Optional[GCSBucket] = Field( + None, description="", alias="gcsBucket" ) # relationship - attributes: "ModeQuery.Attributes" = Field( - default_factory=lambda: ModeQuery.Attributes(), + @classmethod + # @validate_arguments() + @init_guid + def create( + cls, *, name: str, gcs_bucket_qualified_name: str + ) -> GCSObject.Attributes: + validate_required_fields( + ["name", "gcs_bucket_qualified_name"], [name, gcs_bucket_qualified_name] + ) + + # Split the gcs_bucket_qualified_name to extract necessary information + fields = gcs_bucket_qualified_name.split("/") + if len(fields) != 4: + raise ValueError("Invalid gcs_bucket_qualified_name") + + try: + connector_type = AtlanConnectorType(fields[1]) # type:ignore + except ValueError as e: + raise ValueError("Invalid gcs_bucket_qualified_name") from e + + return GCSObject.Attributes( + name=name, + gcs_bucket_qualified_name=gcs_bucket_qualified_name, + connection_qualified_name=f"{fields[0]}/{fields[1]}/{fields[2]}", + qualified_name=f"{gcs_bucket_qualified_name}/{name}", + connector_name=connector_type.value, + gcs_bucket=GCSBucket.ref_by_qualified_name(gcs_bucket_qualified_name), + ) + + attributes: "GCSObject.Attributes" = Field( + default_factory=lambda: GCSObject.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -class ModeChart(Mode): +class GCSBucket(GCS): """Description""" - type_name: str = Field("ModeChart", allow_mutation=False) + @classmethod + # @validate_arguments() + @init_guid + def create(cls, *, name: str, connection_qualified_name: str) -> GCSBucket: + validate_required_fields( + ["name", "connection_qualified_name"], [name, connection_qualified_name] + ) + attributes = GCSBucket.Attributes.create( + name=name, connection_qualified_name=connection_qualified_name + ) + return cls(attributes=attributes) + + type_name: str = Field("GCSBucket", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "ModeChart": - raise ValueError("must be ModeChart") + if v != "GCSBucket": + raise ValueError("must be GCSBucket") return v def __setattr__(self, name, value): - if name in ModeChart._convenience_properties: + if name in GCSBucket._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - MODE_CHART_TYPE: ClassVar[KeywordField] = KeywordField( - "modeChartType", "modeChartType" + GCS_OBJECT_COUNT: ClassVar[NumericField] = NumericField( + "gcsObjectCount", "gcsObjectCount" ) """ - TBC + Number of objects within the bucket. + """ + GCS_BUCKET_VERSIONING_ENABLED: ClassVar[BooleanField] = BooleanField( + "gcsBucketVersioningEnabled", "gcsBucketVersioningEnabled" + ) + """ + Whether versioning is enabled on the bucket (true) or not (false). + """ + GCS_BUCKET_RETENTION_LOCKED: ClassVar[BooleanField] = BooleanField( + "gcsBucketRetentionLocked", "gcsBucketRetentionLocked" + ) + """ + Whether retention is locked for this bucket (true) or not (false). + """ + GCS_BUCKET_RETENTION_PERIOD: ClassVar[NumericField] = NumericField( + "gcsBucketRetentionPeriod", "gcsBucketRetentionPeriod" + ) + """ + Retention period for objects in this bucket. + """ + GCS_BUCKET_RETENTION_EFFECTIVE_TIME: ClassVar[NumericField] = NumericField( + "gcsBucketRetentionEffectiveTime", "gcsBucketRetentionEffectiveTime" + ) + """ + Effective time for retention of objects in this bucket. + """ + GCS_BUCKET_LIFECYCLE_RULES: ClassVar[TextField] = TextField( + "gcsBucketLifecycleRules", "gcsBucketLifecycleRules" + ) + """ + Lifecycle rules for this bucket. + """ + GCS_BUCKET_RETENTION_POLICY: ClassVar[TextField] = TextField( + "gcsBucketRetentionPolicy", "gcsBucketRetentionPolicy" + ) + """ + Retention policy for this bucket. """ - MODE_QUERY: ClassVar[RelationField] = RelationField("modeQuery") + GCS_OBJECTS: ClassVar[RelationField] = RelationField("gcsObjects") """ TBC """ _convenience_properties: ClassVar[list[str]] = [ - "mode_chart_type", - "mode_query", + "gcs_object_count", + "gcs_bucket_versioning_enabled", + "gcs_bucket_retention_locked", + "gcs_bucket_retention_period", + "gcs_bucket_retention_effective_time", + "gcs_bucket_lifecycle_rules", + "gcs_bucket_retention_policy", + "gcs_objects", ] @property - def mode_chart_type(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.mode_chart_type + def gcs_object_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.gcs_object_count - @mode_chart_type.setter - def mode_chart_type(self, mode_chart_type: Optional[str]): + @gcs_object_count.setter + def gcs_object_count(self, gcs_object_count: Optional[int]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.mode_chart_type = mode_chart_type + self.attributes.gcs_object_count = gcs_object_count @property - def mode_query(self) -> Optional[ModeQuery]: - return None if self.attributes is None else self.attributes.mode_query + def gcs_bucket_versioning_enabled(self) -> Optional[bool]: + return ( + None + if self.attributes is None + else self.attributes.gcs_bucket_versioning_enabled + ) - @mode_query.setter - def mode_query(self, mode_query: Optional[ModeQuery]): + @gcs_bucket_versioning_enabled.setter + def gcs_bucket_versioning_enabled( + self, gcs_bucket_versioning_enabled: Optional[bool] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.mode_query = mode_query - - class Attributes(Mode.Attributes): - mode_chart_type: Optional[str] = Field( - None, description="", alias="modeChartType" - ) - mode_query: Optional[ModeQuery] = Field( - None, description="", alias="modeQuery" - ) # relationship - - attributes: "ModeChart.Attributes" = Field( - default_factory=lambda: ModeChart.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class ModeWorkspace(Mode): - """Description""" - - type_name: str = Field("ModeWorkspace", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "ModeWorkspace": - raise ValueError("must be ModeWorkspace") - return v - - def __setattr__(self, name, value): - if name in ModeWorkspace._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - MODE_COLLECTION_COUNT: ClassVar[NumericField] = NumericField( - "modeCollectionCount", "modeCollectionCount" - ) - """ - TBC - """ - - MODE_COLLECTIONS: ClassVar[RelationField] = RelationField("modeCollections") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "mode_collection_count", - "mode_collections", - ] + self.attributes.gcs_bucket_versioning_enabled = gcs_bucket_versioning_enabled @property - def mode_collection_count(self) -> Optional[int]: + def gcs_bucket_retention_locked(self) -> Optional[bool]: return ( - None if self.attributes is None else self.attributes.mode_collection_count + None + if self.attributes is None + else self.attributes.gcs_bucket_retention_locked ) - @mode_collection_count.setter - def mode_collection_count(self, mode_collection_count: Optional[int]): + @gcs_bucket_retention_locked.setter + def gcs_bucket_retention_locked(self, gcs_bucket_retention_locked: Optional[bool]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.mode_collection_count = mode_collection_count + self.attributes.gcs_bucket_retention_locked = gcs_bucket_retention_locked @property - def mode_collections(self) -> Optional[list[ModeCollection]]: - return None if self.attributes is None else self.attributes.mode_collections + def gcs_bucket_retention_period(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.gcs_bucket_retention_period + ) - @mode_collections.setter - def mode_collections(self, mode_collections: Optional[list[ModeCollection]]): + @gcs_bucket_retention_period.setter + def gcs_bucket_retention_period(self, gcs_bucket_retention_period: Optional[int]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.mode_collections = mode_collections - - class Attributes(Mode.Attributes): - mode_collection_count: Optional[int] = Field( - None, description="", alias="modeCollectionCount" - ) - mode_collections: Optional[list[ModeCollection]] = Field( - None, description="", alias="modeCollections" - ) # relationship - - attributes: "ModeWorkspace.Attributes" = Field( - default_factory=lambda: ModeWorkspace.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class ModeCollection(Mode): - """Description""" - - type_name: str = Field("ModeCollection", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "ModeCollection": - raise ValueError("must be ModeCollection") - return v - - def __setattr__(self, name, value): - if name in ModeCollection._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - MODE_COLLECTION_TYPE: ClassVar[KeywordField] = KeywordField( - "modeCollectionType", "modeCollectionType" - ) - """ - TBC - """ - MODE_COLLECTION_STATE: ClassVar[KeywordField] = KeywordField( - "modeCollectionState", "modeCollectionState" - ) - """ - TBC - """ - - MODE_WORKSPACE: ClassVar[RelationField] = RelationField("modeWorkspace") - """ - TBC - """ - MODE_REPORTS: ClassVar[RelationField] = RelationField("modeReports") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "mode_collection_type", - "mode_collection_state", - "mode_workspace", - "mode_reports", - ] + self.attributes.gcs_bucket_retention_period = gcs_bucket_retention_period @property - def mode_collection_type(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.mode_collection_type + def gcs_bucket_retention_effective_time(self) -> Optional[datetime]: + return ( + None + if self.attributes is None + else self.attributes.gcs_bucket_retention_effective_time + ) - @mode_collection_type.setter - def mode_collection_type(self, mode_collection_type: Optional[str]): + @gcs_bucket_retention_effective_time.setter + def gcs_bucket_retention_effective_time( + self, gcs_bucket_retention_effective_time: Optional[datetime] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.mode_collection_type = mode_collection_type + self.attributes.gcs_bucket_retention_effective_time = ( + gcs_bucket_retention_effective_time + ) @property - def mode_collection_state(self) -> Optional[str]: + def gcs_bucket_lifecycle_rules(self) -> Optional[str]: return ( - None if self.attributes is None else self.attributes.mode_collection_state + None + if self.attributes is None + else self.attributes.gcs_bucket_lifecycle_rules ) - @mode_collection_state.setter - def mode_collection_state(self, mode_collection_state: Optional[str]): + @gcs_bucket_lifecycle_rules.setter + def gcs_bucket_lifecycle_rules(self, gcs_bucket_lifecycle_rules: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.mode_collection_state = mode_collection_state + self.attributes.gcs_bucket_lifecycle_rules = gcs_bucket_lifecycle_rules @property - def mode_workspace(self) -> Optional[ModeWorkspace]: - return None if self.attributes is None else self.attributes.mode_workspace + def gcs_bucket_retention_policy(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.gcs_bucket_retention_policy + ) - @mode_workspace.setter - def mode_workspace(self, mode_workspace: Optional[ModeWorkspace]): + @gcs_bucket_retention_policy.setter + def gcs_bucket_retention_policy(self, gcs_bucket_retention_policy: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.mode_workspace = mode_workspace + self.attributes.gcs_bucket_retention_policy = gcs_bucket_retention_policy @property - def mode_reports(self) -> Optional[list[ModeReport]]: - return None if self.attributes is None else self.attributes.mode_reports + def gcs_objects(self) -> Optional[list[GCSObject]]: + return None if self.attributes is None else self.attributes.gcs_objects - @mode_reports.setter - def mode_reports(self, mode_reports: Optional[list[ModeReport]]): + @gcs_objects.setter + def gcs_objects(self, gcs_objects: Optional[list[GCSObject]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.mode_reports = mode_reports + self.attributes.gcs_objects = gcs_objects - class Attributes(Mode.Attributes): - mode_collection_type: Optional[str] = Field( - None, description="", alias="modeCollectionType" + class Attributes(GCS.Attributes): + gcs_object_count: Optional[int] = Field( + None, description="", alias="gcsObjectCount" ) - mode_collection_state: Optional[str] = Field( - None, description="", alias="modeCollectionState" + gcs_bucket_versioning_enabled: Optional[bool] = Field( + None, description="", alias="gcsBucketVersioningEnabled" ) - mode_workspace: Optional[ModeWorkspace] = Field( - None, description="", alias="modeWorkspace" - ) # relationship - mode_reports: Optional[list[ModeReport]] = Field( - None, description="", alias="modeReports" + gcs_bucket_retention_locked: Optional[bool] = Field( + None, description="", alias="gcsBucketRetentionLocked" + ) + gcs_bucket_retention_period: Optional[int] = Field( + None, description="", alias="gcsBucketRetentionPeriod" + ) + gcs_bucket_retention_effective_time: Optional[datetime] = Field( + None, description="", alias="gcsBucketRetentionEffectiveTime" + ) + gcs_bucket_lifecycle_rules: Optional[str] = Field( + None, description="", alias="gcsBucketLifecycleRules" + ) + gcs_bucket_retention_policy: Optional[str] = Field( + None, description="", alias="gcsBucketRetentionPolicy" + ) + gcs_objects: Optional[list[GCSObject]] = Field( + None, description="", alias="gcsObjects" ) # relationship - attributes: "ModeCollection.Attributes" = Field( - default_factory=lambda: ModeCollection.Attributes(), + @classmethod + # @validate_arguments() + @init_guid + def create( + cls, *, name: str, connection_qualified_name: str + ) -> GCSBucket.Attributes: + validate_required_fields( + ["name", "connection_qualified_name"], [name, connection_qualified_name] + ) + + # Split the connection_qualified_name to extract necessary information + fields = connection_qualified_name.split("/") + if len(fields) != 3: + raise ValueError("Invalid connection_qualified_name") + + try: + connector_type = AtlanConnectorType(fields[1]) # type:ignore + except ValueError as e: + raise ValueError("Invalid connection_qualified_name") from e + + return GCSBucket.Attributes( + name=name, + qualified_name=f"{connection_qualified_name}/{name}", + connection_qualified_name=connection_qualified_name, + connector_name=connector_type.value, + ) + + attributes: "GCSBucket.Attributes" = Field( + default_factory=lambda: GCSBucket.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -ModeReport.Attributes.update_forward_refs() - - -ModeQuery.Attributes.update_forward_refs() - - -ModeChart.Attributes.update_forward_refs() - - -ModeWorkspace.Attributes.update_forward_refs() +GCSObject.Attributes.update_forward_refs() -ModeCollection.Attributes.update_forward_refs() +GCSBucket.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset65.py b/pyatlan/model/assets/asset65.py index ae494faab..c5893b0f6 100644 --- a/pyatlan/model/assets/asset65.py +++ b/pyatlan/model/assets/asset65.py @@ -8,188 +8,951 @@ from pydantic import Field, validator +from pyatlan.model.enums import AtlanConnectorType from pyatlan.model.fields.atlan_fields import ( + BooleanField, + KeywordField, KeywordTextField, + KeywordTextStemmedField, NumericField, RelationField, + TextField, ) +from pyatlan.utils import init_guid, validate_required_fields -from .asset41 import Sigma +from .asset40 import Preset -class SigmaDatasetColumn(Sigma): +class PresetChart(Preset): """Description""" - type_name: str = Field("SigmaDatasetColumn", allow_mutation=False) + @classmethod + # @validate_arguments() + @init_guid + def create(cls, *, name: str, preset_dashboard_qualified_name: str) -> PresetChart: + validate_required_fields( + ["name", "preset_dashboard_qualified_name"], + [name, preset_dashboard_qualified_name], + ) + attributes = PresetChart.Attributes.create( + name=name, preset_dashboard_qualified_name=preset_dashboard_qualified_name + ) + return cls(attributes=attributes) + + type_name: str = Field("PresetChart", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "SigmaDatasetColumn": - raise ValueError("must be SigmaDatasetColumn") + if v != "PresetChart": + raise ValueError("must be PresetChart") return v def __setattr__(self, name, value): - if name in SigmaDatasetColumn._convenience_properties: + if name in PresetChart._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - SIGMA_DATASET_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "sigmaDatasetQualifiedName", - "sigmaDatasetQualifiedName", - "sigmaDatasetQualifiedName.text", + PRESET_CHART_DESCRIPTION_MARKDOWN: ClassVar[TextField] = TextField( + "presetChartDescriptionMarkdown", "presetChartDescriptionMarkdown" ) """ - TBC + """ - SIGMA_DATASET_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "sigmaDatasetName", "sigmaDatasetName.keyword", "sigmaDatasetName" + PRESET_CHART_FORM_DATA: ClassVar[KeywordField] = KeywordField( + "presetChartFormData", "presetChartFormData" ) """ + + """ + + PRESET_DASHBOARD: ClassVar[RelationField] = RelationField("presetDashboard") + """ TBC """ - SIGMA_DATASET: ClassVar[RelationField] = RelationField("sigmaDataset") + _convenience_properties: ClassVar[list[str]] = [ + "preset_chart_description_markdown", + "preset_chart_form_data", + "preset_dashboard", + ] + + @property + def preset_chart_description_markdown(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.preset_chart_description_markdown + ) + + @preset_chart_description_markdown.setter + def preset_chart_description_markdown( + self, preset_chart_description_markdown: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.preset_chart_description_markdown = ( + preset_chart_description_markdown + ) + + @property + def preset_chart_form_data(self) -> Optional[dict[str, str]]: + return ( + None if self.attributes is None else self.attributes.preset_chart_form_data + ) + + @preset_chart_form_data.setter + def preset_chart_form_data(self, preset_chart_form_data: Optional[dict[str, str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.preset_chart_form_data = preset_chart_form_data + + @property + def preset_dashboard(self) -> Optional[PresetDashboard]: + return None if self.attributes is None else self.attributes.preset_dashboard + + @preset_dashboard.setter + def preset_dashboard(self, preset_dashboard: Optional[PresetDashboard]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.preset_dashboard = preset_dashboard + + class Attributes(Preset.Attributes): + preset_chart_description_markdown: Optional[str] = Field( + None, description="", alias="presetChartDescriptionMarkdown" + ) + preset_chart_form_data: Optional[dict[str, str]] = Field( + None, description="", alias="presetChartFormData" + ) + preset_dashboard: Optional[PresetDashboard] = Field( + None, description="", alias="presetDashboard" + ) # relationship + + @classmethod + # @validate_arguments() + @init_guid + def create( + cls, *, name: str, preset_dashboard_qualified_name: str + ) -> PresetChart.Attributes: + validate_required_fields( + ["name", "preset_dashboard_qualified_name"], + [name, preset_dashboard_qualified_name], + ) + + # Split the preset_dashboard_qualified_name to extract necessary information + fields = preset_dashboard_qualified_name.split("/") + if len(fields) != 5: + raise ValueError("Invalid preset_dashboard_qualified_name") + + try: + connector_type = AtlanConnectorType(fields[1]) # type:ignore + except ValueError as e: + raise ValueError("Invalid preset_dashboard_qualified_name") from e + + return PresetChart.Attributes( + name=name, + preset_dashboard_qualified_name=preset_dashboard_qualified_name, + connection_qualified_name=f"{fields[0]}/{fields[1]}/{fields[2]}", + qualified_name=f"{preset_dashboard_qualified_name}/{name}", + connector_name=connector_type.value, + preset_dashboard=PresetDashboard.ref_by_qualified_name( + preset_dashboard_qualified_name + ), + ) + + attributes: "PresetChart.Attributes" = Field( + default_factory=lambda: PresetChart.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +class PresetDataset(Preset): + """Description""" + + @classmethod + # @validate_arguments() + @init_guid + def create( + cls, *, name: str, preset_dashboard_qualified_name: str + ) -> PresetDataset: + validate_required_fields( + ["name", "preset_dashboard_qualified_name"], + [name, preset_dashboard_qualified_name], + ) + attributes = PresetDataset.Attributes.create( + name=name, preset_dashboard_qualified_name=preset_dashboard_qualified_name + ) + return cls(attributes=attributes) + + type_name: str = Field("PresetDataset", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "PresetDataset": + raise ValueError("must be PresetDataset") + return v + + def __setattr__(self, name, value): + if name in PresetDataset._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + PRESET_DATASET_DATASOURCE_NAME: ClassVar[ + KeywordTextStemmedField + ] = KeywordTextStemmedField( + "presetDatasetDatasourceName", + "presetDatasetDatasourceName.keyword", + "presetDatasetDatasourceName", + "presetDatasetDatasourceName.stemmed", + ) + """ + + """ + PRESET_DATASET_ID: ClassVar[NumericField] = NumericField( + "presetDatasetId", "presetDatasetId" + ) + """ + + """ + PRESET_DATASET_TYPE: ClassVar[KeywordField] = KeywordField( + "presetDatasetType", "presetDatasetType" + ) + """ + + """ + + PRESET_DASHBOARD: ClassVar[RelationField] = RelationField("presetDashboard") """ TBC """ _convenience_properties: ClassVar[list[str]] = [ - "sigma_dataset_qualified_name", - "sigma_dataset_name", - "sigma_dataset", + "preset_dataset_datasource_name", + "preset_dataset_id", + "preset_dataset_type", + "preset_dashboard", ] @property - def sigma_dataset_qualified_name(self) -> Optional[str]: + def preset_dataset_datasource_name(self) -> Optional[str]: return ( None if self.attributes is None - else self.attributes.sigma_dataset_qualified_name + else self.attributes.preset_dataset_datasource_name ) - @sigma_dataset_qualified_name.setter - def sigma_dataset_qualified_name(self, sigma_dataset_qualified_name: Optional[str]): + @preset_dataset_datasource_name.setter + def preset_dataset_datasource_name( + self, preset_dataset_datasource_name: Optional[str] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.sigma_dataset_qualified_name = sigma_dataset_qualified_name + self.attributes.preset_dataset_datasource_name = preset_dataset_datasource_name @property - def sigma_dataset_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.sigma_dataset_name + def preset_dataset_id(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.preset_dataset_id - @sigma_dataset_name.setter - def sigma_dataset_name(self, sigma_dataset_name: Optional[str]): + @preset_dataset_id.setter + def preset_dataset_id(self, preset_dataset_id: Optional[int]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.sigma_dataset_name = sigma_dataset_name + self.attributes.preset_dataset_id = preset_dataset_id @property - def sigma_dataset(self) -> Optional[SigmaDataset]: - return None if self.attributes is None else self.attributes.sigma_dataset + def preset_dataset_type(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.preset_dataset_type - @sigma_dataset.setter - def sigma_dataset(self, sigma_dataset: Optional[SigmaDataset]): + @preset_dataset_type.setter + def preset_dataset_type(self, preset_dataset_type: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.sigma_dataset = sigma_dataset + self.attributes.preset_dataset_type = preset_dataset_type + + @property + def preset_dashboard(self) -> Optional[PresetDashboard]: + return None if self.attributes is None else self.attributes.preset_dashboard - class Attributes(Sigma.Attributes): - sigma_dataset_qualified_name: Optional[str] = Field( - None, description="", alias="sigmaDatasetQualifiedName" + @preset_dashboard.setter + def preset_dashboard(self, preset_dashboard: Optional[PresetDashboard]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.preset_dashboard = preset_dashboard + + class Attributes(Preset.Attributes): + preset_dataset_datasource_name: Optional[str] = Field( + None, description="", alias="presetDatasetDatasourceName" + ) + preset_dataset_id: Optional[int] = Field( + None, description="", alias="presetDatasetId" ) - sigma_dataset_name: Optional[str] = Field( - None, description="", alias="sigmaDatasetName" + preset_dataset_type: Optional[str] = Field( + None, description="", alias="presetDatasetType" ) - sigma_dataset: Optional[SigmaDataset] = Field( - None, description="", alias="sigmaDataset" + preset_dashboard: Optional[PresetDashboard] = Field( + None, description="", alias="presetDashboard" ) # relationship - attributes: "SigmaDatasetColumn.Attributes" = Field( - default_factory=lambda: SigmaDatasetColumn.Attributes(), + @classmethod + # @validate_arguments() + @init_guid + def create( + cls, *, name: str, preset_dashboard_qualified_name: str + ) -> PresetDataset.Attributes: + validate_required_fields( + ["name", "preset_dashboard_qualified_name"], + [name, preset_dashboard_qualified_name], + ) + + # Split the preset_dashboard_qualified_name to extract necessary information + fields = preset_dashboard_qualified_name.split("/") + if len(fields) != 5: + raise ValueError("Invalid preset_dashboard_qualified_name") + + try: + connector_type = AtlanConnectorType(fields[1]) # type:ignore + except ValueError as e: + raise ValueError("Invalid preset_dashboard_qualified_name") from e + + return PresetDataset.Attributes( + name=name, + preset_dashboard_qualified_name=preset_dashboard_qualified_name, + connection_qualified_name=f"{fields[0]}/{fields[1]}/{fields[2]}", + qualified_name=f"{preset_dashboard_qualified_name}/{name}", + connector_name=connector_type.value, + preset_dashboard=PresetDashboard.ref_by_qualified_name( + preset_dashboard_qualified_name + ), + ) + + attributes: "PresetDataset.Attributes" = Field( + default_factory=lambda: PresetDataset.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -class SigmaDataset(Sigma): +class PresetDashboard(Preset): """Description""" - type_name: str = Field("SigmaDataset", allow_mutation=False) + @classmethod + # @validate_arguments() + @init_guid + def create( + cls, *, name: str, preset_workspace_qualified_name: str + ) -> PresetDashboard: + validate_required_fields( + ["name", "preset_workspace_qualified_name"], + [name, preset_workspace_qualified_name], + ) + attributes = PresetDashboard.Attributes.create( + name=name, preset_workspace_qualified_name=preset_workspace_qualified_name + ) + return cls(attributes=attributes) + + type_name: str = Field("PresetDashboard", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "SigmaDataset": - raise ValueError("must be SigmaDataset") + if v != "PresetDashboard": + raise ValueError("must be PresetDashboard") return v def __setattr__(self, name, value): - if name in SigmaDataset._convenience_properties: + if name in PresetDashboard._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - SIGMA_DATASET_COLUMN_COUNT: ClassVar[NumericField] = NumericField( - "sigmaDatasetColumnCount", "sigmaDatasetColumnCount" + PRESET_DASHBOARD_CHANGED_BY_NAME: ClassVar[ + KeywordTextStemmedField + ] = KeywordTextStemmedField( + "presetDashboardChangedByName", + "presetDashboardChangedByName.keyword", + "presetDashboardChangedByName", + "presetDashboardChangedByName.stemmed", ) """ + + """ + PRESET_DASHBOARD_CHANGED_BY_URL: ClassVar[KeywordField] = KeywordField( + "presetDashboardChangedByURL", "presetDashboardChangedByURL" + ) + """ + + """ + PRESET_DASHBOARD_IS_MANAGED_EXTERNALLY: ClassVar[BooleanField] = BooleanField( + "presetDashboardIsManagedExternally", "presetDashboardIsManagedExternally" + ) + """ + + """ + PRESET_DASHBOARD_IS_PUBLISHED: ClassVar[BooleanField] = BooleanField( + "presetDashboardIsPublished", "presetDashboardIsPublished" + ) + """ + + """ + PRESET_DASHBOARD_THUMBNAIL_URL: ClassVar[KeywordField] = KeywordField( + "presetDashboardThumbnailURL", "presetDashboardThumbnailURL" + ) + """ + + """ + PRESET_DASHBOARD_CHART_COUNT: ClassVar[NumericField] = NumericField( + "presetDashboardChartCount", "presetDashboardChartCount" + ) + """ + + """ + + PRESET_DATASETS: ClassVar[RelationField] = RelationField("presetDatasets") + """ + TBC + """ + PRESET_CHARTS: ClassVar[RelationField] = RelationField("presetCharts") + """ TBC """ + PRESET_WORKSPACE: ClassVar[RelationField] = RelationField("presetWorkspace") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "preset_dashboard_changed_by_name", + "preset_dashboard_changed_by_url", + "preset_dashboard_is_managed_externally", + "preset_dashboard_is_published", + "preset_dashboard_thumbnail_url", + "preset_dashboard_chart_count", + "preset_datasets", + "preset_charts", + "preset_workspace", + ] - SIGMA_DATASET_COLUMNS: ClassVar[RelationField] = RelationField( - "sigmaDatasetColumns" + @property + def preset_dashboard_changed_by_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.preset_dashboard_changed_by_name + ) + + @preset_dashboard_changed_by_name.setter + def preset_dashboard_changed_by_name( + self, preset_dashboard_changed_by_name: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.preset_dashboard_changed_by_name = ( + preset_dashboard_changed_by_name + ) + + @property + def preset_dashboard_changed_by_url(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.preset_dashboard_changed_by_url + ) + + @preset_dashboard_changed_by_url.setter + def preset_dashboard_changed_by_url( + self, preset_dashboard_changed_by_url: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.preset_dashboard_changed_by_url = ( + preset_dashboard_changed_by_url + ) + + @property + def preset_dashboard_is_managed_externally(self) -> Optional[bool]: + return ( + None + if self.attributes is None + else self.attributes.preset_dashboard_is_managed_externally + ) + + @preset_dashboard_is_managed_externally.setter + def preset_dashboard_is_managed_externally( + self, preset_dashboard_is_managed_externally: Optional[bool] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.preset_dashboard_is_managed_externally = ( + preset_dashboard_is_managed_externally + ) + + @property + def preset_dashboard_is_published(self) -> Optional[bool]: + return ( + None + if self.attributes is None + else self.attributes.preset_dashboard_is_published + ) + + @preset_dashboard_is_published.setter + def preset_dashboard_is_published( + self, preset_dashboard_is_published: Optional[bool] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.preset_dashboard_is_published = preset_dashboard_is_published + + @property + def preset_dashboard_thumbnail_url(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.preset_dashboard_thumbnail_url + ) + + @preset_dashboard_thumbnail_url.setter + def preset_dashboard_thumbnail_url( + self, preset_dashboard_thumbnail_url: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.preset_dashboard_thumbnail_url = preset_dashboard_thumbnail_url + + @property + def preset_dashboard_chart_count(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.preset_dashboard_chart_count + ) + + @preset_dashboard_chart_count.setter + def preset_dashboard_chart_count(self, preset_dashboard_chart_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.preset_dashboard_chart_count = preset_dashboard_chart_count + + @property + def preset_datasets(self) -> Optional[list[PresetDataset]]: + return None if self.attributes is None else self.attributes.preset_datasets + + @preset_datasets.setter + def preset_datasets(self, preset_datasets: Optional[list[PresetDataset]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.preset_datasets = preset_datasets + + @property + def preset_charts(self) -> Optional[list[PresetChart]]: + return None if self.attributes is None else self.attributes.preset_charts + + @preset_charts.setter + def preset_charts(self, preset_charts: Optional[list[PresetChart]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.preset_charts = preset_charts + + @property + def preset_workspace(self) -> Optional[PresetWorkspace]: + return None if self.attributes is None else self.attributes.preset_workspace + + @preset_workspace.setter + def preset_workspace(self, preset_workspace: Optional[PresetWorkspace]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.preset_workspace = preset_workspace + + class Attributes(Preset.Attributes): + preset_dashboard_changed_by_name: Optional[str] = Field( + None, description="", alias="presetDashboardChangedByName" + ) + preset_dashboard_changed_by_url: Optional[str] = Field( + None, description="", alias="presetDashboardChangedByURL" + ) + preset_dashboard_is_managed_externally: Optional[bool] = Field( + None, description="", alias="presetDashboardIsManagedExternally" + ) + preset_dashboard_is_published: Optional[bool] = Field( + None, description="", alias="presetDashboardIsPublished" + ) + preset_dashboard_thumbnail_url: Optional[str] = Field( + None, description="", alias="presetDashboardThumbnailURL" + ) + preset_dashboard_chart_count: Optional[int] = Field( + None, description="", alias="presetDashboardChartCount" + ) + preset_datasets: Optional[list[PresetDataset]] = Field( + None, description="", alias="presetDatasets" + ) # relationship + preset_charts: Optional[list[PresetChart]] = Field( + None, description="", alias="presetCharts" + ) # relationship + preset_workspace: Optional[PresetWorkspace] = Field( + None, description="", alias="presetWorkspace" + ) # relationship + + @classmethod + # @validate_arguments() + @init_guid + def create( + cls, *, name: str, preset_workspace_qualified_name: str + ) -> PresetDashboard.Attributes: + validate_required_fields( + ["name", "preset_workspace_qualified_name"], + [name, preset_workspace_qualified_name], + ) + + # Split the preset_workspace_qualified_name to extract necessary information + fields = preset_workspace_qualified_name.split("/") + if len(fields) != 4: + raise ValueError("Invalid preset_workspace_qualified_name") + + try: + connector_type = AtlanConnectorType(fields[1]) # type:ignore + except ValueError as e: + raise ValueError("Invalid preset_workspace_qualified_name") from e + + return PresetDashboard.Attributes( + name=name, + preset_workspace_qualified_name=preset_workspace_qualified_name, + connection_qualified_name=f"{fields[0]}/{fields[1]}/{fields[2]}", + qualified_name=f"{preset_workspace_qualified_name}/{name}", + connector_name=connector_type.value, + preset_workspace=PresetWorkspace.ref_by_qualified_name( + preset_workspace_qualified_name + ), + ) + + attributes: "PresetDashboard.Attributes" = Field( + default_factory=lambda: PresetDashboard.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", ) + + +class PresetWorkspace(Preset): + """Description""" + + @classmethod + # @validate_arguments() + @init_guid + def create(cls, *, name: str, connection_qualified_name: str) -> PresetWorkspace: + validate_required_fields( + ["name", "connection_qualified_name"], [name, connection_qualified_name] + ) + attributes = PresetWorkspace.Attributes.create( + name=name, connection_qualified_name=connection_qualified_name + ) + return cls(attributes=attributes) + + type_name: str = Field("PresetWorkspace", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "PresetWorkspace": + raise ValueError("must be PresetWorkspace") + return v + + def __setattr__(self, name, value): + if name in PresetWorkspace._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + PRESET_WORKSPACE_PUBLIC_DASHBOARDS_ALLOWED: ClassVar[BooleanField] = BooleanField( + "presetWorkspacePublicDashboardsAllowed", + "presetWorkspacePublicDashboardsAllowed", + ) + """ + + """ + PRESET_WORKSPACE_CLUSTER_ID: ClassVar[NumericField] = NumericField( + "presetWorkspaceClusterId", "presetWorkspaceClusterId" + ) + """ + + """ + PRESET_WORKSPACE_HOSTNAME: ClassVar[KeywordTextField] = KeywordTextField( + "presetWorkspaceHostname", + "presetWorkspaceHostname", + "presetWorkspaceHostname.text", + ) + """ + + """ + PRESET_WORKSPACE_IS_IN_MAINTENANCE_MODE: ClassVar[BooleanField] = BooleanField( + "presetWorkspaceIsInMaintenanceMode", "presetWorkspaceIsInMaintenanceMode" + ) + """ + + """ + PRESET_WORKSPACE_REGION: ClassVar[KeywordTextField] = KeywordTextField( + "presetWorkspaceRegion", "presetWorkspaceRegion", "presetWorkspaceRegion.text" + ) + """ + + """ + PRESET_WORKSPACE_STATUS: ClassVar[KeywordField] = KeywordField( + "presetWorkspaceStatus", "presetWorkspaceStatus" + ) + """ + + """ + PRESET_WORKSPACE_DEPLOYMENT_ID: ClassVar[NumericField] = NumericField( + "presetWorkspaceDeploymentId", "presetWorkspaceDeploymentId" + ) + """ + + """ + PRESET_WORKSPACE_DASHBOARD_COUNT: ClassVar[NumericField] = NumericField( + "presetWorkspaceDashboardCount", "presetWorkspaceDashboardCount" + ) + """ + + """ + PRESET_WORKSPACE_DATASET_COUNT: ClassVar[NumericField] = NumericField( + "presetWorkspaceDatasetCount", "presetWorkspaceDatasetCount" + ) + """ + + """ + + PRESET_DASHBOARDS: ClassVar[RelationField] = RelationField("presetDashboards") """ TBC """ _convenience_properties: ClassVar[list[str]] = [ - "sigma_dataset_column_count", - "sigma_dataset_columns", + "preset_workspace_public_dashboards_allowed", + "preset_workspace_cluster_id", + "preset_workspace_hostname", + "preset_workspace_is_in_maintenance_mode", + "preset_workspace_region", + "preset_workspace_status", + "preset_workspace_deployment_id", + "preset_workspace_dashboard_count", + "preset_workspace_dataset_count", + "preset_dashboards", ] @property - def sigma_dataset_column_count(self) -> Optional[int]: + def preset_workspace_public_dashboards_allowed(self) -> Optional[bool]: return ( None if self.attributes is None - else self.attributes.sigma_dataset_column_count + else self.attributes.preset_workspace_public_dashboards_allowed ) - @sigma_dataset_column_count.setter - def sigma_dataset_column_count(self, sigma_dataset_column_count: Optional[int]): + @preset_workspace_public_dashboards_allowed.setter + def preset_workspace_public_dashboards_allowed( + self, preset_workspace_public_dashboards_allowed: Optional[bool] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.preset_workspace_public_dashboards_allowed = ( + preset_workspace_public_dashboards_allowed + ) + + @property + def preset_workspace_cluster_id(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.preset_workspace_cluster_id + ) + + @preset_workspace_cluster_id.setter + def preset_workspace_cluster_id(self, preset_workspace_cluster_id: Optional[int]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.sigma_dataset_column_count = sigma_dataset_column_count + self.attributes.preset_workspace_cluster_id = preset_workspace_cluster_id @property - def sigma_dataset_columns(self) -> Optional[list[SigmaDatasetColumn]]: + def preset_workspace_hostname(self) -> Optional[str]: return ( - None if self.attributes is None else self.attributes.sigma_dataset_columns + None + if self.attributes is None + else self.attributes.preset_workspace_hostname + ) + + @preset_workspace_hostname.setter + def preset_workspace_hostname(self, preset_workspace_hostname: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.preset_workspace_hostname = preset_workspace_hostname + + @property + def preset_workspace_is_in_maintenance_mode(self) -> Optional[bool]: + return ( + None + if self.attributes is None + else self.attributes.preset_workspace_is_in_maintenance_mode ) - @sigma_dataset_columns.setter - def sigma_dataset_columns( - self, sigma_dataset_columns: Optional[list[SigmaDatasetColumn]] + @preset_workspace_is_in_maintenance_mode.setter + def preset_workspace_is_in_maintenance_mode( + self, preset_workspace_is_in_maintenance_mode: Optional[bool] ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.sigma_dataset_columns = sigma_dataset_columns + self.attributes.preset_workspace_is_in_maintenance_mode = ( + preset_workspace_is_in_maintenance_mode + ) - class Attributes(Sigma.Attributes): - sigma_dataset_column_count: Optional[int] = Field( - None, description="", alias="sigmaDatasetColumnCount" + @property + def preset_workspace_region(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.preset_workspace_region ) - sigma_dataset_columns: Optional[list[SigmaDatasetColumn]] = Field( - None, description="", alias="sigmaDatasetColumns" + + @preset_workspace_region.setter + def preset_workspace_region(self, preset_workspace_region: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.preset_workspace_region = preset_workspace_region + + @property + def preset_workspace_status(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.preset_workspace_status + ) + + @preset_workspace_status.setter + def preset_workspace_status(self, preset_workspace_status: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.preset_workspace_status = preset_workspace_status + + @property + def preset_workspace_deployment_id(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.preset_workspace_deployment_id + ) + + @preset_workspace_deployment_id.setter + def preset_workspace_deployment_id( + self, preset_workspace_deployment_id: Optional[int] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.preset_workspace_deployment_id = preset_workspace_deployment_id + + @property + def preset_workspace_dashboard_count(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.preset_workspace_dashboard_count + ) + + @preset_workspace_dashboard_count.setter + def preset_workspace_dashboard_count( + self, preset_workspace_dashboard_count: Optional[int] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.preset_workspace_dashboard_count = ( + preset_workspace_dashboard_count + ) + + @property + def preset_workspace_dataset_count(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.preset_workspace_dataset_count + ) + + @preset_workspace_dataset_count.setter + def preset_workspace_dataset_count( + self, preset_workspace_dataset_count: Optional[int] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.preset_workspace_dataset_count = preset_workspace_dataset_count + + @property + def preset_dashboards(self) -> Optional[list[PresetDashboard]]: + return None if self.attributes is None else self.attributes.preset_dashboards + + @preset_dashboards.setter + def preset_dashboards(self, preset_dashboards: Optional[list[PresetDashboard]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.preset_dashboards = preset_dashboards + + class Attributes(Preset.Attributes): + preset_workspace_public_dashboards_allowed: Optional[bool] = Field( + None, description="", alias="presetWorkspacePublicDashboardsAllowed" + ) + preset_workspace_cluster_id: Optional[int] = Field( + None, description="", alias="presetWorkspaceClusterId" + ) + preset_workspace_hostname: Optional[str] = Field( + None, description="", alias="presetWorkspaceHostname" + ) + preset_workspace_is_in_maintenance_mode: Optional[bool] = Field( + None, description="", alias="presetWorkspaceIsInMaintenanceMode" + ) + preset_workspace_region: Optional[str] = Field( + None, description="", alias="presetWorkspaceRegion" + ) + preset_workspace_status: Optional[str] = Field( + None, description="", alias="presetWorkspaceStatus" + ) + preset_workspace_deployment_id: Optional[int] = Field( + None, description="", alias="presetWorkspaceDeploymentId" + ) + preset_workspace_dashboard_count: Optional[int] = Field( + None, description="", alias="presetWorkspaceDashboardCount" + ) + preset_workspace_dataset_count: Optional[int] = Field( + None, description="", alias="presetWorkspaceDatasetCount" + ) + preset_dashboards: Optional[list[PresetDashboard]] = Field( + None, description="", alias="presetDashboards" ) # relationship - attributes: "SigmaDataset.Attributes" = Field( - default_factory=lambda: SigmaDataset.Attributes(), + @classmethod + # @validate_arguments() + @init_guid + def create( + cls, *, name: str, connection_qualified_name: str + ) -> PresetWorkspace.Attributes: + validate_required_fields( + ["name", "connection_qualified_name"], [name, connection_qualified_name] + ) + + # Split the connection_qualified_name to extract necessary information + fields = connection_qualified_name.split("/") + if len(fields) != 3: + raise ValueError("Invalid connection_qualified_name") + + try: + connector_type = AtlanConnectorType(fields[1]) # type:ignore + except ValueError as e: + raise ValueError("Invalid connection_qualified_name") from e + + return PresetWorkspace.Attributes( + name=name, + qualified_name=f"{connection_qualified_name}/{name}", + connection_qualified_name=connection_qualified_name, + connector_name=connector_type.value, + ) + + attributes: "PresetWorkspace.Attributes" = Field( + default_factory=lambda: PresetWorkspace.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -SigmaDatasetColumn.Attributes.update_forward_refs() +PresetChart.Attributes.update_forward_refs() + + +PresetDataset.Attributes.update_forward_refs() + + +PresetDashboard.Attributes.update_forward_refs() -SigmaDataset.Attributes.update_forward_refs() +PresetWorkspace.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset66.py b/pyatlan/model/assets/asset66.py index 08f71ffd1..c995201ba 100644 --- a/pyatlan/model/assets/asset66.py +++ b/pyatlan/model/assets/asset66.py @@ -4,6 +4,7 @@ from __future__ import annotations +from datetime import datetime from typing import ClassVar, Optional from pydantic import Field, validator @@ -16,426 +17,585 @@ TextField, ) -from .asset41 import Sigma +from .asset41 import Mode -class SigmaWorkbook(Sigma): +class ModeReport(Mode): """Description""" - type_name: str = Field("SigmaWorkbook", allow_mutation=False) + type_name: str = Field("ModeReport", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "SigmaWorkbook": - raise ValueError("must be SigmaWorkbook") + if v != "ModeReport": + raise ValueError("must be ModeReport") return v def __setattr__(self, name, value): - if name in SigmaWorkbook._convenience_properties: + if name in ModeReport._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - SIGMA_PAGE_COUNT: ClassVar[NumericField] = NumericField( - "sigmaPageCount", "sigmaPageCount" + MODE_COLLECTION_TOKEN: ClassVar[KeywordField] = KeywordField( + "modeCollectionToken", "modeCollectionToken" ) """ - TBC + + """ + MODE_REPORT_PUBLISHED_AT: ClassVar[NumericField] = NumericField( + "modeReportPublishedAt", "modeReportPublishedAt" + ) + """ + + """ + MODE_QUERY_COUNT: ClassVar[NumericField] = NumericField( + "modeQueryCount", "modeQueryCount" + ) + """ + + """ + MODE_CHART_COUNT: ClassVar[NumericField] = NumericField( + "modeChartCount", "modeChartCount" + ) + """ + + """ + MODE_QUERY_PREVIEW: ClassVar[TextField] = TextField( + "modeQueryPreview", "modeQueryPreview" + ) """ - SIGMA_PAGES: ClassVar[RelationField] = RelationField("sigmaPages") + """ + MODE_IS_PUBLIC: ClassVar[BooleanField] = BooleanField( + "modeIsPublic", "modeIsPublic" + ) + """ + + """ + MODE_IS_SHARED: ClassVar[BooleanField] = BooleanField( + "modeIsShared", "modeIsShared" + ) + """ + + """ + + MODE_QUERIES: ClassVar[RelationField] = RelationField("modeQueries") + """ + TBC + """ + MODE_COLLECTIONS: ClassVar[RelationField] = RelationField("modeCollections") """ TBC """ _convenience_properties: ClassVar[list[str]] = [ - "sigma_page_count", - "sigma_pages", + "mode_collection_token", + "mode_report_published_at", + "mode_query_count", + "mode_chart_count", + "mode_query_preview", + "mode_is_public", + "mode_is_shared", + "mode_queries", + "mode_collections", ] @property - def sigma_page_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.sigma_page_count + def mode_collection_token(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.mode_collection_token + ) - @sigma_page_count.setter - def sigma_page_count(self, sigma_page_count: Optional[int]): + @mode_collection_token.setter + def mode_collection_token(self, mode_collection_token: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.sigma_page_count = sigma_page_count + self.attributes.mode_collection_token = mode_collection_token @property - def sigma_pages(self) -> Optional[list[SigmaPage]]: - return None if self.attributes is None else self.attributes.sigma_pages + def mode_report_published_at(self) -> Optional[datetime]: + return ( + None + if self.attributes is None + else self.attributes.mode_report_published_at + ) + + @mode_report_published_at.setter + def mode_report_published_at(self, mode_report_published_at: Optional[datetime]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mode_report_published_at = mode_report_published_at - @sigma_pages.setter - def sigma_pages(self, sigma_pages: Optional[list[SigmaPage]]): + @property + def mode_query_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.mode_query_count + + @mode_query_count.setter + def mode_query_count(self, mode_query_count: Optional[int]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.sigma_pages = sigma_pages + self.attributes.mode_query_count = mode_query_count - class Attributes(Sigma.Attributes): - sigma_page_count: Optional[int] = Field( - None, description="", alias="sigmaPageCount" + @property + def mode_chart_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.mode_chart_count + + @mode_chart_count.setter + def mode_chart_count(self, mode_chart_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mode_chart_count = mode_chart_count + + @property + def mode_query_preview(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.mode_query_preview + + @mode_query_preview.setter + def mode_query_preview(self, mode_query_preview: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mode_query_preview = mode_query_preview + + @property + def mode_is_public(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.mode_is_public + + @mode_is_public.setter + def mode_is_public(self, mode_is_public: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mode_is_public = mode_is_public + + @property + def mode_is_shared(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.mode_is_shared + + @mode_is_shared.setter + def mode_is_shared(self, mode_is_shared: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mode_is_shared = mode_is_shared + + @property + def mode_queries(self) -> Optional[list[ModeQuery]]: + return None if self.attributes is None else self.attributes.mode_queries + + @mode_queries.setter + def mode_queries(self, mode_queries: Optional[list[ModeQuery]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mode_queries = mode_queries + + @property + def mode_collections(self) -> Optional[list[ModeCollection]]: + return None if self.attributes is None else self.attributes.mode_collections + + @mode_collections.setter + def mode_collections(self, mode_collections: Optional[list[ModeCollection]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mode_collections = mode_collections + + class Attributes(Mode.Attributes): + mode_collection_token: Optional[str] = Field( + None, description="", alias="modeCollectionToken" + ) + mode_report_published_at: Optional[datetime] = Field( + None, description="", alias="modeReportPublishedAt" + ) + mode_query_count: Optional[int] = Field( + None, description="", alias="modeQueryCount" + ) + mode_chart_count: Optional[int] = Field( + None, description="", alias="modeChartCount" + ) + mode_query_preview: Optional[str] = Field( + None, description="", alias="modeQueryPreview" + ) + mode_is_public: Optional[bool] = Field( + None, description="", alias="modeIsPublic" ) - sigma_pages: Optional[list[SigmaPage]] = Field( - None, description="", alias="sigmaPages" + mode_is_shared: Optional[bool] = Field( + None, description="", alias="modeIsShared" + ) + mode_queries: Optional[list[ModeQuery]] = Field( + None, description="", alias="modeQueries" + ) # relationship + mode_collections: Optional[list[ModeCollection]] = Field( + None, description="", alias="modeCollections" ) # relationship - attributes: "SigmaWorkbook.Attributes" = Field( - default_factory=lambda: SigmaWorkbook.Attributes(), + attributes: "ModeReport.Attributes" = Field( + default_factory=lambda: ModeReport.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -class SigmaDataElementField(Sigma): +class ModeQuery(Mode): """Description""" - type_name: str = Field("SigmaDataElementField", allow_mutation=False) + type_name: str = Field("ModeQuery", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "SigmaDataElementField": - raise ValueError("must be SigmaDataElementField") + if v != "ModeQuery": + raise ValueError("must be ModeQuery") return v def __setattr__(self, name, value): - if name in SigmaDataElementField._convenience_properties: + if name in ModeQuery._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - SIGMA_DATA_ELEMENT_FIELD_IS_HIDDEN: ClassVar[BooleanField] = BooleanField( - "sigmaDataElementFieldIsHidden", "sigmaDataElementFieldIsHidden" - ) + MODE_RAW_QUERY: ClassVar[TextField] = TextField("modeRawQuery", "modeRawQuery") """ - TBC + """ - SIGMA_DATA_ELEMENT_FIELD_FORMULA: ClassVar[TextField] = TextField( - "sigmaDataElementFieldFormula", "sigmaDataElementFieldFormula" + MODE_REPORT_IMPORT_COUNT: ClassVar[NumericField] = NumericField( + "modeReportImportCount", "modeReportImportCount" ) """ - TBC + """ - SIGMA_DATA_ELEMENT: ClassVar[RelationField] = RelationField("sigmaDataElement") + MODE_CHARTS: ClassVar[RelationField] = RelationField("modeCharts") + """ + TBC + """ + MODE_REPORT: ClassVar[RelationField] = RelationField("modeReport") """ TBC """ _convenience_properties: ClassVar[list[str]] = [ - "sigma_data_element_field_is_hidden", - "sigma_data_element_field_formula", - "sigma_data_element", + "mode_raw_query", + "mode_report_import_count", + "mode_charts", + "mode_report", ] @property - def sigma_data_element_field_is_hidden(self) -> Optional[bool]: - return ( - None - if self.attributes is None - else self.attributes.sigma_data_element_field_is_hidden - ) + def mode_raw_query(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.mode_raw_query - @sigma_data_element_field_is_hidden.setter - def sigma_data_element_field_is_hidden( - self, sigma_data_element_field_is_hidden: Optional[bool] - ): + @mode_raw_query.setter + def mode_raw_query(self, mode_raw_query: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.sigma_data_element_field_is_hidden = ( - sigma_data_element_field_is_hidden - ) + self.attributes.mode_raw_query = mode_raw_query @property - def sigma_data_element_field_formula(self) -> Optional[str]: + def mode_report_import_count(self) -> Optional[int]: return ( None if self.attributes is None - else self.attributes.sigma_data_element_field_formula + else self.attributes.mode_report_import_count ) - @sigma_data_element_field_formula.setter - def sigma_data_element_field_formula( - self, sigma_data_element_field_formula: Optional[str] - ): + @mode_report_import_count.setter + def mode_report_import_count(self, mode_report_import_count: Optional[int]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.sigma_data_element_field_formula = ( - sigma_data_element_field_formula - ) + self.attributes.mode_report_import_count = mode_report_import_count + + @property + def mode_charts(self) -> Optional[list[ModeChart]]: + return None if self.attributes is None else self.attributes.mode_charts + + @mode_charts.setter + def mode_charts(self, mode_charts: Optional[list[ModeChart]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mode_charts = mode_charts @property - def sigma_data_element(self) -> Optional[SigmaDataElement]: - return None if self.attributes is None else self.attributes.sigma_data_element + def mode_report(self) -> Optional[ModeReport]: + return None if self.attributes is None else self.attributes.mode_report - @sigma_data_element.setter - def sigma_data_element(self, sigma_data_element: Optional[SigmaDataElement]): + @mode_report.setter + def mode_report(self, mode_report: Optional[ModeReport]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.sigma_data_element = sigma_data_element + self.attributes.mode_report = mode_report - class Attributes(Sigma.Attributes): - sigma_data_element_field_is_hidden: Optional[bool] = Field( - None, description="", alias="sigmaDataElementFieldIsHidden" + class Attributes(Mode.Attributes): + mode_raw_query: Optional[str] = Field( + None, description="", alias="modeRawQuery" ) - sigma_data_element_field_formula: Optional[str] = Field( - None, description="", alias="sigmaDataElementFieldFormula" + mode_report_import_count: Optional[int] = Field( + None, description="", alias="modeReportImportCount" ) - sigma_data_element: Optional[SigmaDataElement] = Field( - None, description="", alias="sigmaDataElement" + mode_charts: Optional[list[ModeChart]] = Field( + None, description="", alias="modeCharts" + ) # relationship + mode_report: Optional[ModeReport] = Field( + None, description="", alias="modeReport" ) # relationship - attributes: "SigmaDataElementField.Attributes" = Field( - default_factory=lambda: SigmaDataElementField.Attributes(), + attributes: "ModeQuery.Attributes" = Field( + default_factory=lambda: ModeQuery.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -class SigmaPage(Sigma): +class ModeChart(Mode): """Description""" - type_name: str = Field("SigmaPage", allow_mutation=False) + type_name: str = Field("ModeChart", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "SigmaPage": - raise ValueError("must be SigmaPage") + if v != "ModeChart": + raise ValueError("must be ModeChart") return v def __setattr__(self, name, value): - if name in SigmaPage._convenience_properties: + if name in ModeChart._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - SIGMA_DATA_ELEMENT_COUNT: ClassVar[NumericField] = NumericField( - "sigmaDataElementCount", "sigmaDataElementCount" + MODE_CHART_TYPE: ClassVar[KeywordField] = KeywordField( + "modeChartType", "modeChartType" ) """ - TBC + Type of chart. """ - SIGMA_DATA_ELEMENTS: ClassVar[RelationField] = RelationField("sigmaDataElements") - """ - TBC - """ - SIGMA_WORKBOOK: ClassVar[RelationField] = RelationField("sigmaWorkbook") + MODE_QUERY: ClassVar[RelationField] = RelationField("modeQuery") """ TBC """ _convenience_properties: ClassVar[list[str]] = [ - "sigma_data_element_count", - "sigma_data_elements", - "sigma_workbook", + "mode_chart_type", + "mode_query", ] @property - def sigma_data_element_count(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.sigma_data_element_count - ) + def mode_chart_type(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.mode_chart_type - @sigma_data_element_count.setter - def sigma_data_element_count(self, sigma_data_element_count: Optional[int]): + @mode_chart_type.setter + def mode_chart_type(self, mode_chart_type: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.sigma_data_element_count = sigma_data_element_count + self.attributes.mode_chart_type = mode_chart_type @property - def sigma_data_elements(self) -> Optional[list[SigmaDataElement]]: - return None if self.attributes is None else self.attributes.sigma_data_elements + def mode_query(self) -> Optional[ModeQuery]: + return None if self.attributes is None else self.attributes.mode_query - @sigma_data_elements.setter - def sigma_data_elements( - self, sigma_data_elements: Optional[list[SigmaDataElement]] - ): + @mode_query.setter + def mode_query(self, mode_query: Optional[ModeQuery]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.sigma_data_elements = sigma_data_elements + self.attributes.mode_query = mode_query + + class Attributes(Mode.Attributes): + mode_chart_type: Optional[str] = Field( + None, description="", alias="modeChartType" + ) + mode_query: Optional[ModeQuery] = Field( + None, description="", alias="modeQuery" + ) # relationship + + attributes: "ModeChart.Attributes" = Field( + default_factory=lambda: ModeChart.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +class ModeWorkspace(Mode): + """Description""" + + type_name: str = Field("ModeWorkspace", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "ModeWorkspace": + raise ValueError("must be ModeWorkspace") + return v + + def __setattr__(self, name, value): + if name in ModeWorkspace._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + MODE_COLLECTION_COUNT: ClassVar[NumericField] = NumericField( + "modeCollectionCount", "modeCollectionCount" + ) + """ + Number of collections in this workspace. + """ + + MODE_COLLECTIONS: ClassVar[RelationField] = RelationField("modeCollections") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "mode_collection_count", + "mode_collections", + ] @property - def sigma_workbook(self) -> Optional[SigmaWorkbook]: - return None if self.attributes is None else self.attributes.sigma_workbook + def mode_collection_count(self) -> Optional[int]: + return ( + None if self.attributes is None else self.attributes.mode_collection_count + ) - @sigma_workbook.setter - def sigma_workbook(self, sigma_workbook: Optional[SigmaWorkbook]): + @mode_collection_count.setter + def mode_collection_count(self, mode_collection_count: Optional[int]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.sigma_workbook = sigma_workbook + self.attributes.mode_collection_count = mode_collection_count + + @property + def mode_collections(self) -> Optional[list[ModeCollection]]: + return None if self.attributes is None else self.attributes.mode_collections - class Attributes(Sigma.Attributes): - sigma_data_element_count: Optional[int] = Field( - None, description="", alias="sigmaDataElementCount" + @mode_collections.setter + def mode_collections(self, mode_collections: Optional[list[ModeCollection]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mode_collections = mode_collections + + class Attributes(Mode.Attributes): + mode_collection_count: Optional[int] = Field( + None, description="", alias="modeCollectionCount" ) - sigma_data_elements: Optional[list[SigmaDataElement]] = Field( - None, description="", alias="sigmaDataElements" - ) # relationship - sigma_workbook: Optional[SigmaWorkbook] = Field( - None, description="", alias="sigmaWorkbook" + mode_collections: Optional[list[ModeCollection]] = Field( + None, description="", alias="modeCollections" ) # relationship - attributes: "SigmaPage.Attributes" = Field( - default_factory=lambda: SigmaPage.Attributes(), + attributes: "ModeWorkspace.Attributes" = Field( + default_factory=lambda: ModeWorkspace.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -class SigmaDataElement(Sigma): +class ModeCollection(Mode): """Description""" - type_name: str = Field("SigmaDataElement", allow_mutation=False) + type_name: str = Field("ModeCollection", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "SigmaDataElement": - raise ValueError("must be SigmaDataElement") + if v != "ModeCollection": + raise ValueError("must be ModeCollection") return v def __setattr__(self, name, value): - if name in SigmaDataElement._convenience_properties: + if name in ModeCollection._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - SIGMA_DATA_ELEMENT_QUERY: ClassVar[KeywordField] = KeywordField( - "sigmaDataElementQuery", "sigmaDataElementQuery" - ) - """ - TBC - """ - SIGMA_DATA_ELEMENT_TYPE: ClassVar[KeywordField] = KeywordField( - "sigmaDataElementType", "sigmaDataElementType" + MODE_COLLECTION_TYPE: ClassVar[KeywordField] = KeywordField( + "modeCollectionType", "modeCollectionType" ) """ - TBC + Type of this collection. """ - SIGMA_DATA_ELEMENT_FIELD_COUNT: ClassVar[NumericField] = NumericField( - "sigmaDataElementFieldCount", "sigmaDataElementFieldCount" + MODE_COLLECTION_STATE: ClassVar[KeywordField] = KeywordField( + "modeCollectionState", "modeCollectionState" ) """ - TBC + State of this collection. """ - SIGMA_PAGE: ClassVar[RelationField] = RelationField("sigmaPage") + MODE_WORKSPACE: ClassVar[RelationField] = RelationField("modeWorkspace") """ TBC """ - SIGMA_DATA_ELEMENT_FIELDS: ClassVar[RelationField] = RelationField( - "sigmaDataElementFields" - ) + MODE_REPORTS: ClassVar[RelationField] = RelationField("modeReports") """ TBC """ _convenience_properties: ClassVar[list[str]] = [ - "sigma_data_element_query", - "sigma_data_element_type", - "sigma_data_element_field_count", - "sigma_page", - "sigma_data_element_fields", + "mode_collection_type", + "mode_collection_state", + "mode_workspace", + "mode_reports", ] @property - def sigma_data_element_query(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.sigma_data_element_query - ) + def mode_collection_type(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.mode_collection_type - @sigma_data_element_query.setter - def sigma_data_element_query(self, sigma_data_element_query: Optional[str]): + @mode_collection_type.setter + def mode_collection_type(self, mode_collection_type: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.sigma_data_element_query = sigma_data_element_query + self.attributes.mode_collection_type = mode_collection_type @property - def sigma_data_element_type(self) -> Optional[str]: + def mode_collection_state(self) -> Optional[str]: return ( - None if self.attributes is None else self.attributes.sigma_data_element_type + None if self.attributes is None else self.attributes.mode_collection_state ) - @sigma_data_element_type.setter - def sigma_data_element_type(self, sigma_data_element_type: Optional[str]): + @mode_collection_state.setter + def mode_collection_state(self, mode_collection_state: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.sigma_data_element_type = sigma_data_element_type + self.attributes.mode_collection_state = mode_collection_state @property - def sigma_data_element_field_count(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.sigma_data_element_field_count - ) + def mode_workspace(self) -> Optional[ModeWorkspace]: + return None if self.attributes is None else self.attributes.mode_workspace - @sigma_data_element_field_count.setter - def sigma_data_element_field_count( - self, sigma_data_element_field_count: Optional[int] - ): + @mode_workspace.setter + def mode_workspace(self, mode_workspace: Optional[ModeWorkspace]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.sigma_data_element_field_count = sigma_data_element_field_count + self.attributes.mode_workspace = mode_workspace @property - def sigma_page(self) -> Optional[SigmaPage]: - return None if self.attributes is None else self.attributes.sigma_page + def mode_reports(self) -> Optional[list[ModeReport]]: + return None if self.attributes is None else self.attributes.mode_reports - @sigma_page.setter - def sigma_page(self, sigma_page: Optional[SigmaPage]): + @mode_reports.setter + def mode_reports(self, mode_reports: Optional[list[ModeReport]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.sigma_page = sigma_page + self.attributes.mode_reports = mode_reports - @property - def sigma_data_element_fields(self) -> Optional[list[SigmaDataElementField]]: - return ( - None - if self.attributes is None - else self.attributes.sigma_data_element_fields - ) - - @sigma_data_element_fields.setter - def sigma_data_element_fields( - self, sigma_data_element_fields: Optional[list[SigmaDataElementField]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sigma_data_element_fields = sigma_data_element_fields - - class Attributes(Sigma.Attributes): - sigma_data_element_query: Optional[str] = Field( - None, description="", alias="sigmaDataElementQuery" - ) - sigma_data_element_type: Optional[str] = Field( - None, description="", alias="sigmaDataElementType" + class Attributes(Mode.Attributes): + mode_collection_type: Optional[str] = Field( + None, description="", alias="modeCollectionType" ) - sigma_data_element_field_count: Optional[int] = Field( - None, description="", alias="sigmaDataElementFieldCount" + mode_collection_state: Optional[str] = Field( + None, description="", alias="modeCollectionState" ) - sigma_page: Optional[SigmaPage] = Field( - None, description="", alias="sigmaPage" + mode_workspace: Optional[ModeWorkspace] = Field( + None, description="", alias="modeWorkspace" ) # relationship - sigma_data_element_fields: Optional[list[SigmaDataElementField]] = Field( - None, description="", alias="sigmaDataElementFields" + mode_reports: Optional[list[ModeReport]] = Field( + None, description="", alias="modeReports" ) # relationship - attributes: "SigmaDataElement.Attributes" = Field( - default_factory=lambda: SigmaDataElement.Attributes(), + attributes: "ModeCollection.Attributes" = Field( + default_factory=lambda: ModeCollection.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -SigmaWorkbook.Attributes.update_forward_refs() +ModeReport.Attributes.update_forward_refs() + + +ModeQuery.Attributes.update_forward_refs() -SigmaDataElementField.Attributes.update_forward_refs() +ModeChart.Attributes.update_forward_refs() -SigmaPage.Attributes.update_forward_refs() +ModeWorkspace.Attributes.update_forward_refs() -SigmaDataElement.Attributes.update_forward_refs() +ModeCollection.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset67.py b/pyatlan/model/assets/asset67.py index cb29ddcb4..fd0cbfcdf 100644 --- a/pyatlan/model/assets/asset67.py +++ b/pyatlan/model/assets/asset67.py @@ -9,2192 +9,187 @@ from pydantic import Field, validator from pyatlan.model.fields.atlan_fields import ( - BooleanField, - KeywordField, KeywordTextField, + NumericField, RelationField, ) -from .asset42 import Tableau +from .asset42 import Sigma -class TableauWorkbook(Tableau): +class SigmaDatasetColumn(Sigma): """Description""" - type_name: str = Field("TableauWorkbook", allow_mutation=False) + type_name: str = Field("SigmaDatasetColumn", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "TableauWorkbook": - raise ValueError("must be TableauWorkbook") + if v != "SigmaDatasetColumn": + raise ValueError("must be SigmaDatasetColumn") return v def __setattr__(self, name, value): - if name in TableauWorkbook._convenience_properties: + if name in SigmaDatasetColumn._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - SITE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "siteQualifiedName", "siteQualifiedName" + SIGMA_DATASET_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "sigmaDatasetQualifiedName", + "sigmaDatasetQualifiedName", + "sigmaDatasetQualifiedName.text", ) """ - TBC - """ - PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "projectQualifiedName", "projectQualifiedName" - ) - """ - TBC - """ - TOP_LEVEL_PROJECT_NAME: ClassVar[KeywordField] = KeywordField( - "topLevelProjectName", "topLevelProjectName" - ) - """ - TBC - """ - TOP_LEVEL_PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "topLevelProjectQualifiedName", "topLevelProjectQualifiedName" - ) - """ - TBC - """ - PROJECT_HIERARCHY: ClassVar[KeywordField] = KeywordField( - "projectHierarchy", "projectHierarchy" - ) - """ - TBC - """ - - PROJECT: ClassVar[RelationField] = RelationField("project") - """ - TBC - """ - DASHBOARDS: ClassVar[RelationField] = RelationField("dashboards") - """ - TBC - """ - WORKSHEETS: ClassVar[RelationField] = RelationField("worksheets") - """ - TBC - """ - DATASOURCES: ClassVar[RelationField] = RelationField("datasources") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "site_qualified_name", - "project_qualified_name", - "top_level_project_name", - "top_level_project_qualified_name", - "project_hierarchy", - "project", - "dashboards", - "worksheets", - "datasources", - ] - - @property - def site_qualified_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.site_qualified_name - - @site_qualified_name.setter - def site_qualified_name(self, site_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.site_qualified_name = site_qualified_name - - @property - def project_qualified_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.project_qualified_name - ) - - @project_qualified_name.setter - def project_qualified_name(self, project_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.project_qualified_name = project_qualified_name - - @property - def top_level_project_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.top_level_project_name - ) - - @top_level_project_name.setter - def top_level_project_name(self, top_level_project_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.top_level_project_name = top_level_project_name - - @property - def top_level_project_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.top_level_project_qualified_name - ) - - @top_level_project_qualified_name.setter - def top_level_project_qualified_name( - self, top_level_project_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.top_level_project_qualified_name = ( - top_level_project_qualified_name - ) - - @property - def project_hierarchy(self) -> Optional[list[dict[str, str]]]: - return None if self.attributes is None else self.attributes.project_hierarchy - - @project_hierarchy.setter - def project_hierarchy(self, project_hierarchy: Optional[list[dict[str, str]]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.project_hierarchy = project_hierarchy - - @property - def project(self) -> Optional[TableauProject]: - return None if self.attributes is None else self.attributes.project - - @project.setter - def project(self, project: Optional[TableauProject]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.project = project - - @property - def dashboards(self) -> Optional[list[TableauDashboard]]: - return None if self.attributes is None else self.attributes.dashboards - - @dashboards.setter - def dashboards(self, dashboards: Optional[list[TableauDashboard]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dashboards = dashboards - - @property - def worksheets(self) -> Optional[list[TableauWorksheet]]: - return None if self.attributes is None else self.attributes.worksheets - - @worksheets.setter - def worksheets(self, worksheets: Optional[list[TableauWorksheet]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.worksheets = worksheets - - @property - def datasources(self) -> Optional[list[TableauDatasource]]: - return None if self.attributes is None else self.attributes.datasources - - @datasources.setter - def datasources(self, datasources: Optional[list[TableauDatasource]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.datasources = datasources - - class Attributes(Tableau.Attributes): - site_qualified_name: Optional[str] = Field( - None, description="", alias="siteQualifiedName" - ) - project_qualified_name: Optional[str] = Field( - None, description="", alias="projectQualifiedName" - ) - top_level_project_name: Optional[str] = Field( - None, description="", alias="topLevelProjectName" - ) - top_level_project_qualified_name: Optional[str] = Field( - None, description="", alias="topLevelProjectQualifiedName" - ) - project_hierarchy: Optional[list[dict[str, str]]] = Field( - None, description="", alias="projectHierarchy" - ) - project: Optional[TableauProject] = Field( - None, description="", alias="project" - ) # relationship - dashboards: Optional[list[TableauDashboard]] = Field( - None, description="", alias="dashboards" - ) # relationship - worksheets: Optional[list[TableauWorksheet]] = Field( - None, description="", alias="worksheets" - ) # relationship - datasources: Optional[list[TableauDatasource]] = Field( - None, description="", alias="datasources" - ) # relationship - - attributes: "TableauWorkbook.Attributes" = Field( - default_factory=lambda: TableauWorkbook.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class TableauDatasourceField(Tableau): - """Description""" - - type_name: str = Field("TableauDatasourceField", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "TableauDatasourceField": - raise ValueError("must be TableauDatasourceField") - return v - - def __setattr__(self, name, value): - if name in TableauDatasourceField._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - SITE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "siteQualifiedName", "siteQualifiedName" - ) - """ - TBC - """ - PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "projectQualifiedName", "projectQualifiedName" - ) - """ - TBC - """ - TOP_LEVEL_PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "topLevelProjectQualifiedName", "topLevelProjectQualifiedName" - ) - """ - TBC - """ - WORKBOOK_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "workbookQualifiedName", "workbookQualifiedName" - ) - """ - TBC - """ - DATASOURCE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "datasourceQualifiedName", "datasourceQualifiedName" - ) - """ - TBC - """ - PROJECT_HIERARCHY: ClassVar[KeywordField] = KeywordField( - "projectHierarchy", "projectHierarchy" - ) - """ - TBC - """ - FULLY_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "fullyQualifiedName", "fullyQualifiedName" - ) - """ - TBC - """ - TABLEAU_DATASOURCE_FIELD_DATA_CATEGORY: ClassVar[KeywordField] = KeywordField( - "tableauDatasourceFieldDataCategory", "tableauDatasourceFieldDataCategory" - ) - """ - TBC - """ - TABLEAU_DATASOURCE_FIELD_ROLE: ClassVar[KeywordField] = KeywordField( - "tableauDatasourceFieldRole", "tableauDatasourceFieldRole" - ) - """ - TBC - """ - TABLEAU_DATASOURCE_FIELD_DATA_TYPE: ClassVar[KeywordTextField] = KeywordTextField( - "tableauDatasourceFieldDataType", - "tableauDatasourceFieldDataType", - "tableauDatasourceFieldDataType.text", - ) - """ - TBC - """ - UPSTREAM_TABLES: ClassVar[KeywordField] = KeywordField( - "upstreamTables", "upstreamTables" - ) - """ - TBC - """ - TABLEAU_DATASOURCE_FIELD_FORMULA: ClassVar[KeywordField] = KeywordField( - "tableauDatasourceFieldFormula", "tableauDatasourceFieldFormula" - ) - """ - TBC - """ - TABLEAU_DATASOURCE_FIELD_BIN_SIZE: ClassVar[KeywordField] = KeywordField( - "tableauDatasourceFieldBinSize", "tableauDatasourceFieldBinSize" - ) - """ - TBC - """ - UPSTREAM_COLUMNS: ClassVar[KeywordField] = KeywordField( - "upstreamColumns", "upstreamColumns" - ) - """ - TBC - """ - UPSTREAM_FIELDS: ClassVar[KeywordField] = KeywordField( - "upstreamFields", "upstreamFields" - ) - """ - TBC - """ - DATASOURCE_FIELD_TYPE: ClassVar[KeywordField] = KeywordField( - "datasourceFieldType", "datasourceFieldType" - ) - """ - TBC - """ - - WORKSHEETS: ClassVar[RelationField] = RelationField("worksheets") - """ - TBC - """ - DATASOURCE: ClassVar[RelationField] = RelationField("datasource") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "site_qualified_name", - "project_qualified_name", - "top_level_project_qualified_name", - "workbook_qualified_name", - "datasource_qualified_name", - "project_hierarchy", - "fully_qualified_name", - "tableau_datasource_field_data_category", - "tableau_datasource_field_role", - "tableau_datasource_field_data_type", - "upstream_tables", - "tableau_datasource_field_formula", - "tableau_datasource_field_bin_size", - "upstream_columns", - "upstream_fields", - "datasource_field_type", - "worksheets", - "datasource", - ] - - @property - def site_qualified_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.site_qualified_name - - @site_qualified_name.setter - def site_qualified_name(self, site_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.site_qualified_name = site_qualified_name - - @property - def project_qualified_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.project_qualified_name - ) - - @project_qualified_name.setter - def project_qualified_name(self, project_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.project_qualified_name = project_qualified_name - - @property - def top_level_project_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.top_level_project_qualified_name - ) - - @top_level_project_qualified_name.setter - def top_level_project_qualified_name( - self, top_level_project_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.top_level_project_qualified_name = ( - top_level_project_qualified_name - ) - - @property - def workbook_qualified_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.workbook_qualified_name - ) - - @workbook_qualified_name.setter - def workbook_qualified_name(self, workbook_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.workbook_qualified_name = workbook_qualified_name - - @property - def datasource_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.datasource_qualified_name - ) - - @datasource_qualified_name.setter - def datasource_qualified_name(self, datasource_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.datasource_qualified_name = datasource_qualified_name - - @property - def project_hierarchy(self) -> Optional[list[dict[str, str]]]: - return None if self.attributes is None else self.attributes.project_hierarchy - - @project_hierarchy.setter - def project_hierarchy(self, project_hierarchy: Optional[list[dict[str, str]]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.project_hierarchy = project_hierarchy - - @property - def fully_qualified_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.fully_qualified_name - - @fully_qualified_name.setter - def fully_qualified_name(self, fully_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.fully_qualified_name = fully_qualified_name - - @property - def tableau_datasource_field_data_category(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.tableau_datasource_field_data_category - ) - - @tableau_datasource_field_data_category.setter - def tableau_datasource_field_data_category( - self, tableau_datasource_field_data_category: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.tableau_datasource_field_data_category = ( - tableau_datasource_field_data_category - ) - - @property - def tableau_datasource_field_role(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.tableau_datasource_field_role - ) - - @tableau_datasource_field_role.setter - def tableau_datasource_field_role( - self, tableau_datasource_field_role: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.tableau_datasource_field_role = tableau_datasource_field_role - - @property - def tableau_datasource_field_data_type(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.tableau_datasource_field_data_type - ) - - @tableau_datasource_field_data_type.setter - def tableau_datasource_field_data_type( - self, tableau_datasource_field_data_type: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.tableau_datasource_field_data_type = ( - tableau_datasource_field_data_type - ) - - @property - def upstream_tables(self) -> Optional[list[dict[str, str]]]: - return None if self.attributes is None else self.attributes.upstream_tables - - @upstream_tables.setter - def upstream_tables(self, upstream_tables: Optional[list[dict[str, str]]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.upstream_tables = upstream_tables - - @property - def tableau_datasource_field_formula(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.tableau_datasource_field_formula - ) - - @tableau_datasource_field_formula.setter - def tableau_datasource_field_formula( - self, tableau_datasource_field_formula: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.tableau_datasource_field_formula = ( - tableau_datasource_field_formula - ) - - @property - def tableau_datasource_field_bin_size(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.tableau_datasource_field_bin_size - ) - - @tableau_datasource_field_bin_size.setter - def tableau_datasource_field_bin_size( - self, tableau_datasource_field_bin_size: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.tableau_datasource_field_bin_size = ( - tableau_datasource_field_bin_size - ) - - @property - def upstream_columns(self) -> Optional[list[dict[str, str]]]: - return None if self.attributes is None else self.attributes.upstream_columns - - @upstream_columns.setter - def upstream_columns(self, upstream_columns: Optional[list[dict[str, str]]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.upstream_columns = upstream_columns - - @property - def upstream_fields(self) -> Optional[list[dict[str, str]]]: - return None if self.attributes is None else self.attributes.upstream_fields - - @upstream_fields.setter - def upstream_fields(self, upstream_fields: Optional[list[dict[str, str]]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.upstream_fields = upstream_fields - - @property - def datasource_field_type(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.datasource_field_type - ) - - @datasource_field_type.setter - def datasource_field_type(self, datasource_field_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.datasource_field_type = datasource_field_type - - @property - def worksheets(self) -> Optional[list[TableauWorksheet]]: - return None if self.attributes is None else self.attributes.worksheets - - @worksheets.setter - def worksheets(self, worksheets: Optional[list[TableauWorksheet]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.worksheets = worksheets - - @property - def datasource(self) -> Optional[TableauDatasource]: - return None if self.attributes is None else self.attributes.datasource - - @datasource.setter - def datasource(self, datasource: Optional[TableauDatasource]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.datasource = datasource - - class Attributes(Tableau.Attributes): - site_qualified_name: Optional[str] = Field( - None, description="", alias="siteQualifiedName" - ) - project_qualified_name: Optional[str] = Field( - None, description="", alias="projectQualifiedName" - ) - top_level_project_qualified_name: Optional[str] = Field( - None, description="", alias="topLevelProjectQualifiedName" - ) - workbook_qualified_name: Optional[str] = Field( - None, description="", alias="workbookQualifiedName" - ) - datasource_qualified_name: Optional[str] = Field( - None, description="", alias="datasourceQualifiedName" - ) - project_hierarchy: Optional[list[dict[str, str]]] = Field( - None, description="", alias="projectHierarchy" - ) - fully_qualified_name: Optional[str] = Field( - None, description="", alias="fullyQualifiedName" - ) - tableau_datasource_field_data_category: Optional[str] = Field( - None, description="", alias="tableauDatasourceFieldDataCategory" - ) - tableau_datasource_field_role: Optional[str] = Field( - None, description="", alias="tableauDatasourceFieldRole" - ) - tableau_datasource_field_data_type: Optional[str] = Field( - None, description="", alias="tableauDatasourceFieldDataType" - ) - upstream_tables: Optional[list[dict[str, str]]] = Field( - None, description="", alias="upstreamTables" - ) - tableau_datasource_field_formula: Optional[str] = Field( - None, description="", alias="tableauDatasourceFieldFormula" - ) - tableau_datasource_field_bin_size: Optional[str] = Field( - None, description="", alias="tableauDatasourceFieldBinSize" - ) - upstream_columns: Optional[list[dict[str, str]]] = Field( - None, description="", alias="upstreamColumns" - ) - upstream_fields: Optional[list[dict[str, str]]] = Field( - None, description="", alias="upstreamFields" - ) - datasource_field_type: Optional[str] = Field( - None, description="", alias="datasourceFieldType" - ) - worksheets: Optional[list[TableauWorksheet]] = Field( - None, description="", alias="worksheets" - ) # relationship - datasource: Optional[TableauDatasource] = Field( - None, description="", alias="datasource" - ) # relationship - - attributes: "TableauDatasourceField.Attributes" = Field( - default_factory=lambda: TableauDatasourceField.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class TableauCalculatedField(Tableau): - """Description""" - - type_name: str = Field("TableauCalculatedField", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "TableauCalculatedField": - raise ValueError("must be TableauCalculatedField") - return v - - def __setattr__(self, name, value): - if name in TableauCalculatedField._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - SITE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "siteQualifiedName", "siteQualifiedName" - ) - """ - TBC - """ - PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "projectQualifiedName", "projectQualifiedName" - ) - """ - TBC - """ - TOP_LEVEL_PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "topLevelProjectQualifiedName", "topLevelProjectQualifiedName" - ) - """ - TBC - """ - WORKBOOK_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "workbookQualifiedName", "workbookQualifiedName" - ) - """ - TBC - """ - DATASOURCE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "datasourceQualifiedName", "datasourceQualifiedName" - ) - """ - TBC - """ - PROJECT_HIERARCHY: ClassVar[KeywordField] = KeywordField( - "projectHierarchy", "projectHierarchy" - ) - """ - TBC - """ - DATA_CATEGORY: ClassVar[KeywordField] = KeywordField("dataCategory", "dataCategory") - """ - TBC - """ - ROLE: ClassVar[KeywordField] = KeywordField("role", "role") - """ - TBC - """ - TABLEAU_DATA_TYPE: ClassVar[KeywordTextField] = KeywordTextField( - "tableauDataType", "tableauDataType", "tableauDataType.text" - ) - """ - TBC - """ - FORMULA: ClassVar[KeywordField] = KeywordField("formula", "formula") - """ - TBC - """ - UPSTREAM_FIELDS: ClassVar[KeywordField] = KeywordField( - "upstreamFields", "upstreamFields" - ) - """ - TBC - """ - - WORKSHEETS: ClassVar[RelationField] = RelationField("worksheets") - """ - TBC - """ - DATASOURCE: ClassVar[RelationField] = RelationField("datasource") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "site_qualified_name", - "project_qualified_name", - "top_level_project_qualified_name", - "workbook_qualified_name", - "datasource_qualified_name", - "project_hierarchy", - "data_category", - "role", - "tableau_data_type", - "formula", - "upstream_fields", - "worksheets", - "datasource", - ] - - @property - def site_qualified_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.site_qualified_name - - @site_qualified_name.setter - def site_qualified_name(self, site_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.site_qualified_name = site_qualified_name - - @property - def project_qualified_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.project_qualified_name - ) - - @project_qualified_name.setter - def project_qualified_name(self, project_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.project_qualified_name = project_qualified_name - - @property - def top_level_project_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.top_level_project_qualified_name - ) - - @top_level_project_qualified_name.setter - def top_level_project_qualified_name( - self, top_level_project_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.top_level_project_qualified_name = ( - top_level_project_qualified_name - ) - - @property - def workbook_qualified_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.workbook_qualified_name - ) - - @workbook_qualified_name.setter - def workbook_qualified_name(self, workbook_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.workbook_qualified_name = workbook_qualified_name - - @property - def datasource_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.datasource_qualified_name - ) - - @datasource_qualified_name.setter - def datasource_qualified_name(self, datasource_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.datasource_qualified_name = datasource_qualified_name - - @property - def project_hierarchy(self) -> Optional[list[dict[str, str]]]: - return None if self.attributes is None else self.attributes.project_hierarchy - - @project_hierarchy.setter - def project_hierarchy(self, project_hierarchy: Optional[list[dict[str, str]]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.project_hierarchy = project_hierarchy - - @property - def data_category(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.data_category - - @data_category.setter - def data_category(self, data_category: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.data_category = data_category - - @property - def role(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.role - - @role.setter - def role(self, role: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.role = role - - @property - def tableau_data_type(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.tableau_data_type - - @tableau_data_type.setter - def tableau_data_type(self, tableau_data_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.tableau_data_type = tableau_data_type - - @property - def formula(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.formula - - @formula.setter - def formula(self, formula: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.formula = formula - - @property - def upstream_fields(self) -> Optional[list[dict[str, str]]]: - return None if self.attributes is None else self.attributes.upstream_fields - - @upstream_fields.setter - def upstream_fields(self, upstream_fields: Optional[list[dict[str, str]]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.upstream_fields = upstream_fields - - @property - def worksheets(self) -> Optional[list[TableauWorksheet]]: - return None if self.attributes is None else self.attributes.worksheets - - @worksheets.setter - def worksheets(self, worksheets: Optional[list[TableauWorksheet]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.worksheets = worksheets - - @property - def datasource(self) -> Optional[TableauDatasource]: - return None if self.attributes is None else self.attributes.datasource - - @datasource.setter - def datasource(self, datasource: Optional[TableauDatasource]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.datasource = datasource - - class Attributes(Tableau.Attributes): - site_qualified_name: Optional[str] = Field( - None, description="", alias="siteQualifiedName" - ) - project_qualified_name: Optional[str] = Field( - None, description="", alias="projectQualifiedName" - ) - top_level_project_qualified_name: Optional[str] = Field( - None, description="", alias="topLevelProjectQualifiedName" - ) - workbook_qualified_name: Optional[str] = Field( - None, description="", alias="workbookQualifiedName" - ) - datasource_qualified_name: Optional[str] = Field( - None, description="", alias="datasourceQualifiedName" - ) - project_hierarchy: Optional[list[dict[str, str]]] = Field( - None, description="", alias="projectHierarchy" - ) - data_category: Optional[str] = Field(None, description="", alias="dataCategory") - role: Optional[str] = Field(None, description="", alias="role") - tableau_data_type: Optional[str] = Field( - None, description="", alias="tableauDataType" - ) - formula: Optional[str] = Field(None, description="", alias="formula") - upstream_fields: Optional[list[dict[str, str]]] = Field( - None, description="", alias="upstreamFields" - ) - worksheets: Optional[list[TableauWorksheet]] = Field( - None, description="", alias="worksheets" - ) # relationship - datasource: Optional[TableauDatasource] = Field( - None, description="", alias="datasource" - ) # relationship - - attributes: "TableauCalculatedField.Attributes" = Field( - default_factory=lambda: TableauCalculatedField.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class TableauProject(Tableau): - """Description""" - - type_name: str = Field("TableauProject", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "TableauProject": - raise ValueError("must be TableauProject") - return v - - def __setattr__(self, name, value): - if name in TableauProject._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - SITE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "siteQualifiedName", "siteQualifiedName" - ) - """ - TBC - """ - TOP_LEVEL_PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "topLevelProjectQualifiedName", "topLevelProjectQualifiedName" - ) - """ - TBC - """ - IS_TOP_LEVEL_PROJECT: ClassVar[BooleanField] = BooleanField( - "isTopLevelProject", "isTopLevelProject" - ) - """ - TBC - """ - PROJECT_HIERARCHY: ClassVar[KeywordField] = KeywordField( - "projectHierarchy", "projectHierarchy" - ) - """ - TBC - """ - - PARENT_PROJECT: ClassVar[RelationField] = RelationField("parentProject") - """ - TBC - """ - WORKBOOKS: ClassVar[RelationField] = RelationField("workbooks") - """ - TBC - """ - SITE: ClassVar[RelationField] = RelationField("site") - """ - TBC - """ - DATASOURCES: ClassVar[RelationField] = RelationField("datasources") - """ - TBC - """ - FLOWS: ClassVar[RelationField] = RelationField("flows") - """ - TBC - """ - CHILD_PROJECTS: ClassVar[RelationField] = RelationField("childProjects") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "site_qualified_name", - "top_level_project_qualified_name", - "is_top_level_project", - "project_hierarchy", - "parent_project", - "workbooks", - "site", - "datasources", - "flows", - "child_projects", - ] - - @property - def site_qualified_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.site_qualified_name - - @site_qualified_name.setter - def site_qualified_name(self, site_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.site_qualified_name = site_qualified_name - - @property - def top_level_project_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.top_level_project_qualified_name - ) - - @top_level_project_qualified_name.setter - def top_level_project_qualified_name( - self, top_level_project_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.top_level_project_qualified_name = ( - top_level_project_qualified_name - ) - - @property - def is_top_level_project(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_top_level_project - - @is_top_level_project.setter - def is_top_level_project(self, is_top_level_project: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_top_level_project = is_top_level_project - - @property - def project_hierarchy(self) -> Optional[list[dict[str, str]]]: - return None if self.attributes is None else self.attributes.project_hierarchy - - @project_hierarchy.setter - def project_hierarchy(self, project_hierarchy: Optional[list[dict[str, str]]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.project_hierarchy = project_hierarchy - - @property - def parent_project(self) -> Optional[TableauProject]: - return None if self.attributes is None else self.attributes.parent_project - - @parent_project.setter - def parent_project(self, parent_project: Optional[TableauProject]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.parent_project = parent_project - - @property - def workbooks(self) -> Optional[list[TableauWorkbook]]: - return None if self.attributes is None else self.attributes.workbooks - - @workbooks.setter - def workbooks(self, workbooks: Optional[list[TableauWorkbook]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.workbooks = workbooks - - @property - def site(self) -> Optional[TableauSite]: - return None if self.attributes is None else self.attributes.site - - @site.setter - def site(self, site: Optional[TableauSite]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.site = site - - @property - def datasources(self) -> Optional[list[TableauDatasource]]: - return None if self.attributes is None else self.attributes.datasources - - @datasources.setter - def datasources(self, datasources: Optional[list[TableauDatasource]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.datasources = datasources - - @property - def flows(self) -> Optional[list[TableauFlow]]: - return None if self.attributes is None else self.attributes.flows - - @flows.setter - def flows(self, flows: Optional[list[TableauFlow]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.flows = flows - - @property - def child_projects(self) -> Optional[list[TableauProject]]: - return None if self.attributes is None else self.attributes.child_projects - - @child_projects.setter - def child_projects(self, child_projects: Optional[list[TableauProject]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.child_projects = child_projects - - class Attributes(Tableau.Attributes): - site_qualified_name: Optional[str] = Field( - None, description="", alias="siteQualifiedName" - ) - top_level_project_qualified_name: Optional[str] = Field( - None, description="", alias="topLevelProjectQualifiedName" - ) - is_top_level_project: Optional[bool] = Field( - None, description="", alias="isTopLevelProject" - ) - project_hierarchy: Optional[list[dict[str, str]]] = Field( - None, description="", alias="projectHierarchy" - ) - parent_project: Optional[TableauProject] = Field( - None, description="", alias="parentProject" - ) # relationship - workbooks: Optional[list[TableauWorkbook]] = Field( - None, description="", alias="workbooks" - ) # relationship - site: Optional[TableauSite] = Field( - None, description="", alias="site" - ) # relationship - datasources: Optional[list[TableauDatasource]] = Field( - None, description="", alias="datasources" - ) # relationship - flows: Optional[list[TableauFlow]] = Field( - None, description="", alias="flows" - ) # relationship - child_projects: Optional[list[TableauProject]] = Field( - None, description="", alias="childProjects" - ) # relationship - - attributes: "TableauProject.Attributes" = Field( - default_factory=lambda: TableauProject.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class TableauSite(Tableau): - """Description""" - - type_name: str = Field("TableauSite", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "TableauSite": - raise ValueError("must be TableauSite") - return v - - def __setattr__(self, name, value): - if name in TableauSite._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - PROJECTS: ClassVar[RelationField] = RelationField("projects") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "projects", - ] - - @property - def projects(self) -> Optional[list[TableauProject]]: - return None if self.attributes is None else self.attributes.projects - - @projects.setter - def projects(self, projects: Optional[list[TableauProject]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.projects = projects - - class Attributes(Tableau.Attributes): - projects: Optional[list[TableauProject]] = Field( - None, description="", alias="projects" - ) # relationship - - attributes: "TableauSite.Attributes" = Field( - default_factory=lambda: TableauSite.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class TableauDatasource(Tableau): - """Description""" - - type_name: str = Field("TableauDatasource", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "TableauDatasource": - raise ValueError("must be TableauDatasource") - return v - - def __setattr__(self, name, value): - if name in TableauDatasource._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - SITE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "siteQualifiedName", "siteQualifiedName" - ) - """ - TBC - """ - PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "projectQualifiedName", "projectQualifiedName" - ) - """ - TBC - """ - TOP_LEVEL_PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "topLevelProjectQualifiedName", "topLevelProjectQualifiedName" - ) - """ - TBC - """ - WORKBOOK_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "workbookQualifiedName", "workbookQualifiedName" - ) - """ - TBC - """ - PROJECT_HIERARCHY: ClassVar[KeywordField] = KeywordField( - "projectHierarchy", "projectHierarchy" - ) - """ - TBC - """ - IS_PUBLISHED: ClassVar[BooleanField] = BooleanField("isPublished", "isPublished") - """ - TBC - """ - HAS_EXTRACTS: ClassVar[BooleanField] = BooleanField("hasExtracts", "hasExtracts") - """ - TBC - """ - IS_CERTIFIED: ClassVar[BooleanField] = BooleanField("isCertified", "isCertified") - """ - TBC - """ - CERTIFIER: ClassVar[KeywordField] = KeywordField("certifier", "certifier") - """ - TBC - """ - CERTIFICATION_NOTE: ClassVar[KeywordField] = KeywordField( - "certificationNote", "certificationNote" - ) - """ - TBC - """ - CERTIFIER_DISPLAY_NAME: ClassVar[KeywordField] = KeywordField( - "certifierDisplayName", "certifierDisplayName" - ) - """ - TBC - """ - UPSTREAM_TABLES: ClassVar[KeywordField] = KeywordField( - "upstreamTables", "upstreamTables" - ) - """ - TBC - """ - UPSTREAM_DATASOURCES: ClassVar[KeywordField] = KeywordField( - "upstreamDatasources", "upstreamDatasources" - ) - """ - TBC - """ - - WORKBOOK: ClassVar[RelationField] = RelationField("workbook") - """ - TBC - """ - PROJECT: ClassVar[RelationField] = RelationField("project") - """ - TBC - """ - FIELDS: ClassVar[RelationField] = RelationField("fields") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "site_qualified_name", - "project_qualified_name", - "top_level_project_qualified_name", - "workbook_qualified_name", - "project_hierarchy", - "is_published", - "has_extracts", - "is_certified", - "certifier", - "certification_note", - "certifier_display_name", - "upstream_tables", - "upstream_datasources", - "workbook", - "project", - "fields", - ] - - @property - def site_qualified_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.site_qualified_name - - @site_qualified_name.setter - def site_qualified_name(self, site_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.site_qualified_name = site_qualified_name - - @property - def project_qualified_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.project_qualified_name - ) - - @project_qualified_name.setter - def project_qualified_name(self, project_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.project_qualified_name = project_qualified_name - - @property - def top_level_project_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.top_level_project_qualified_name - ) - - @top_level_project_qualified_name.setter - def top_level_project_qualified_name( - self, top_level_project_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.top_level_project_qualified_name = ( - top_level_project_qualified_name - ) - - @property - def workbook_qualified_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.workbook_qualified_name - ) - - @workbook_qualified_name.setter - def workbook_qualified_name(self, workbook_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.workbook_qualified_name = workbook_qualified_name - - @property - def project_hierarchy(self) -> Optional[list[dict[str, str]]]: - return None if self.attributes is None else self.attributes.project_hierarchy - - @project_hierarchy.setter - def project_hierarchy(self, project_hierarchy: Optional[list[dict[str, str]]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.project_hierarchy = project_hierarchy - - @property - def is_published(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_published - - @is_published.setter - def is_published(self, is_published: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_published = is_published - - @property - def has_extracts(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.has_extracts - - @has_extracts.setter - def has_extracts(self, has_extracts: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.has_extracts = has_extracts - - @property - def is_certified(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_certified - - @is_certified.setter - def is_certified(self, is_certified: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_certified = is_certified - - @property - def certifier(self) -> Optional[dict[str, str]]: - return None if self.attributes is None else self.attributes.certifier - - @certifier.setter - def certifier(self, certifier: Optional[dict[str, str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.certifier = certifier - - @property - def certification_note(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.certification_note - - @certification_note.setter - def certification_note(self, certification_note: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.certification_note = certification_note - - @property - def certifier_display_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.certifier_display_name - ) - - @certifier_display_name.setter - def certifier_display_name(self, certifier_display_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.certifier_display_name = certifier_display_name - - @property - def upstream_tables(self) -> Optional[list[dict[str, str]]]: - return None if self.attributes is None else self.attributes.upstream_tables - - @upstream_tables.setter - def upstream_tables(self, upstream_tables: Optional[list[dict[str, str]]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.upstream_tables = upstream_tables - - @property - def upstream_datasources(self) -> Optional[list[dict[str, str]]]: - return None if self.attributes is None else self.attributes.upstream_datasources - - @upstream_datasources.setter - def upstream_datasources( - self, upstream_datasources: Optional[list[dict[str, str]]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.upstream_datasources = upstream_datasources - - @property - def workbook(self) -> Optional[TableauWorkbook]: - return None if self.attributes is None else self.attributes.workbook - - @workbook.setter - def workbook(self, workbook: Optional[TableauWorkbook]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.workbook = workbook - - @property - def project(self) -> Optional[TableauProject]: - return None if self.attributes is None else self.attributes.project - - @project.setter - def project(self, project: Optional[TableauProject]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.project = project - - @property - def fields(self) -> Optional[list[TableauDatasourceField]]: - return None if self.attributes is None else self.attributes.fields - - @fields.setter - def fields(self, fields: Optional[list[TableauDatasourceField]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.fields = fields - - class Attributes(Tableau.Attributes): - site_qualified_name: Optional[str] = Field( - None, description="", alias="siteQualifiedName" - ) - project_qualified_name: Optional[str] = Field( - None, description="", alias="projectQualifiedName" - ) - top_level_project_qualified_name: Optional[str] = Field( - None, description="", alias="topLevelProjectQualifiedName" - ) - workbook_qualified_name: Optional[str] = Field( - None, description="", alias="workbookQualifiedName" - ) - project_hierarchy: Optional[list[dict[str, str]]] = Field( - None, description="", alias="projectHierarchy" - ) - is_published: Optional[bool] = Field(None, description="", alias="isPublished") - has_extracts: Optional[bool] = Field(None, description="", alias="hasExtracts") - is_certified: Optional[bool] = Field(None, description="", alias="isCertified") - certifier: Optional[dict[str, str]] = Field( - None, description="", alias="certifier" - ) - certification_note: Optional[str] = Field( - None, description="", alias="certificationNote" - ) - certifier_display_name: Optional[str] = Field( - None, description="", alias="certifierDisplayName" - ) - upstream_tables: Optional[list[dict[str, str]]] = Field( - None, description="", alias="upstreamTables" - ) - upstream_datasources: Optional[list[dict[str, str]]] = Field( - None, description="", alias="upstreamDatasources" - ) - workbook: Optional[TableauWorkbook] = Field( - None, description="", alias="workbook" - ) # relationship - project: Optional[TableauProject] = Field( - None, description="", alias="project" - ) # relationship - fields: Optional[list[TableauDatasourceField]] = Field( - None, description="", alias="fields" - ) # relationship - - attributes: "TableauDatasource.Attributes" = Field( - default_factory=lambda: TableauDatasource.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class TableauDashboard(Tableau): - """Description""" - - type_name: str = Field("TableauDashboard", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "TableauDashboard": - raise ValueError("must be TableauDashboard") - return v - - def __setattr__(self, name, value): - if name in TableauDashboard._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - SITE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "siteQualifiedName", "siteQualifiedName" - ) + Unique name of the dataset in which this column exists. """ - TBC - """ - PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "projectQualifiedName", "projectQualifiedName" - ) - """ - TBC - """ - WORKBOOK_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "workbookQualifiedName", "workbookQualifiedName" - ) - """ - TBC - """ - TOP_LEVEL_PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "topLevelProjectQualifiedName", "topLevelProjectQualifiedName" - ) - """ - TBC - """ - PROJECT_HIERARCHY: ClassVar[KeywordField] = KeywordField( - "projectHierarchy", "projectHierarchy" + SIGMA_DATASET_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "sigmaDatasetName", "sigmaDatasetName.keyword", "sigmaDatasetName" ) """ - TBC + Simple name of the dataset in which this column exists. """ - WORKBOOK: ClassVar[RelationField] = RelationField("workbook") - """ - TBC - """ - WORKSHEETS: ClassVar[RelationField] = RelationField("worksheets") + SIGMA_DATASET: ClassVar[RelationField] = RelationField("sigmaDataset") """ TBC """ _convenience_properties: ClassVar[list[str]] = [ - "site_qualified_name", - "project_qualified_name", - "workbook_qualified_name", - "top_level_project_qualified_name", - "project_hierarchy", - "workbook", - "worksheets", + "sigma_dataset_qualified_name", + "sigma_dataset_name", + "sigma_dataset", ] @property - def site_qualified_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.site_qualified_name - - @site_qualified_name.setter - def site_qualified_name(self, site_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.site_qualified_name = site_qualified_name - - @property - def project_qualified_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.project_qualified_name - ) - - @project_qualified_name.setter - def project_qualified_name(self, project_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.project_qualified_name = project_qualified_name - - @property - def workbook_qualified_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.workbook_qualified_name - ) - - @workbook_qualified_name.setter - def workbook_qualified_name(self, workbook_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.workbook_qualified_name = workbook_qualified_name - - @property - def top_level_project_qualified_name(self) -> Optional[str]: + def sigma_dataset_qualified_name(self) -> Optional[str]: return ( None if self.attributes is None - else self.attributes.top_level_project_qualified_name - ) - - @top_level_project_qualified_name.setter - def top_level_project_qualified_name( - self, top_level_project_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.top_level_project_qualified_name = ( - top_level_project_qualified_name + else self.attributes.sigma_dataset_qualified_name ) - @property - def project_hierarchy(self) -> Optional[list[dict[str, str]]]: - return None if self.attributes is None else self.attributes.project_hierarchy - - @project_hierarchy.setter - def project_hierarchy(self, project_hierarchy: Optional[list[dict[str, str]]]): + @sigma_dataset_qualified_name.setter + def sigma_dataset_qualified_name(self, sigma_dataset_qualified_name: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.project_hierarchy = project_hierarchy + self.attributes.sigma_dataset_qualified_name = sigma_dataset_qualified_name @property - def workbook(self) -> Optional[TableauWorkbook]: - return None if self.attributes is None else self.attributes.workbook + def sigma_dataset_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.sigma_dataset_name - @workbook.setter - def workbook(self, workbook: Optional[TableauWorkbook]): + @sigma_dataset_name.setter + def sigma_dataset_name(self, sigma_dataset_name: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.workbook = workbook + self.attributes.sigma_dataset_name = sigma_dataset_name @property - def worksheets(self) -> Optional[list[TableauWorksheet]]: - return None if self.attributes is None else self.attributes.worksheets + def sigma_dataset(self) -> Optional[SigmaDataset]: + return None if self.attributes is None else self.attributes.sigma_dataset - @worksheets.setter - def worksheets(self, worksheets: Optional[list[TableauWorksheet]]): + @sigma_dataset.setter + def sigma_dataset(self, sigma_dataset: Optional[SigmaDataset]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.worksheets = worksheets + self.attributes.sigma_dataset = sigma_dataset - class Attributes(Tableau.Attributes): - site_qualified_name: Optional[str] = Field( - None, description="", alias="siteQualifiedName" + class Attributes(Sigma.Attributes): + sigma_dataset_qualified_name: Optional[str] = Field( + None, description="", alias="sigmaDatasetQualifiedName" ) - project_qualified_name: Optional[str] = Field( - None, description="", alias="projectQualifiedName" + sigma_dataset_name: Optional[str] = Field( + None, description="", alias="sigmaDatasetName" ) - workbook_qualified_name: Optional[str] = Field( - None, description="", alias="workbookQualifiedName" - ) - top_level_project_qualified_name: Optional[str] = Field( - None, description="", alias="topLevelProjectQualifiedName" - ) - project_hierarchy: Optional[list[dict[str, str]]] = Field( - None, description="", alias="projectHierarchy" - ) - workbook: Optional[TableauWorkbook] = Field( - None, description="", alias="workbook" - ) # relationship - worksheets: Optional[list[TableauWorksheet]] = Field( - None, description="", alias="worksheets" + sigma_dataset: Optional[SigmaDataset] = Field( + None, description="", alias="sigmaDataset" ) # relationship - attributes: "TableauDashboard.Attributes" = Field( - default_factory=lambda: TableauDashboard.Attributes(), + attributes: "SigmaDatasetColumn.Attributes" = Field( + default_factory=lambda: SigmaDatasetColumn.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -class TableauFlow(Tableau): +class SigmaDataset(Sigma): """Description""" - type_name: str = Field("TableauFlow", allow_mutation=False) + type_name: str = Field("SigmaDataset", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "TableauFlow": - raise ValueError("must be TableauFlow") + if v != "SigmaDataset": + raise ValueError("must be SigmaDataset") return v def __setattr__(self, name, value): - if name in TableauFlow._convenience_properties: + if name in SigmaDataset._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - SITE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "siteQualifiedName", "siteQualifiedName" + SIGMA_DATASET_COLUMN_COUNT: ClassVar[NumericField] = NumericField( + "sigmaDatasetColumnCount", "sigmaDatasetColumnCount" ) """ - TBC - """ - PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "projectQualifiedName", "projectQualifiedName" - ) - """ - TBC - """ - TOP_LEVEL_PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "topLevelProjectQualifiedName", "topLevelProjectQualifiedName" - ) - """ - TBC - """ - PROJECT_HIERARCHY: ClassVar[KeywordField] = KeywordField( - "projectHierarchy", "projectHierarchy" - ) - """ - TBC - """ - INPUT_FIELDS: ClassVar[KeywordField] = KeywordField("inputFields", "inputFields") - """ - TBC + Number of columns in this dataset. """ - OUTPUT_FIELDS: ClassVar[KeywordField] = KeywordField("outputFields", "outputFields") - """ - TBC - """ - OUTPUT_STEPS: ClassVar[KeywordField] = KeywordField("outputSteps", "outputSteps") - """ - TBC - """ - - PROJECT: ClassVar[RelationField] = RelationField("project") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "site_qualified_name", - "project_qualified_name", - "top_level_project_qualified_name", - "project_hierarchy", - "input_fields", - "output_fields", - "output_steps", - "project", - ] - - @property - def site_qualified_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.site_qualified_name - - @site_qualified_name.setter - def site_qualified_name(self, site_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.site_qualified_name = site_qualified_name - - @property - def project_qualified_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.project_qualified_name - ) - - @project_qualified_name.setter - def project_qualified_name(self, project_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.project_qualified_name = project_qualified_name - - @property - def top_level_project_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.top_level_project_qualified_name - ) - - @top_level_project_qualified_name.setter - def top_level_project_qualified_name( - self, top_level_project_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.top_level_project_qualified_name = ( - top_level_project_qualified_name - ) - - @property - def project_hierarchy(self) -> Optional[list[dict[str, str]]]: - return None if self.attributes is None else self.attributes.project_hierarchy - - @project_hierarchy.setter - def project_hierarchy(self, project_hierarchy: Optional[list[dict[str, str]]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.project_hierarchy = project_hierarchy - - @property - def input_fields(self) -> Optional[list[dict[str, str]]]: - return None if self.attributes is None else self.attributes.input_fields - - @input_fields.setter - def input_fields(self, input_fields: Optional[list[dict[str, str]]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.input_fields = input_fields - - @property - def output_fields(self) -> Optional[list[dict[str, str]]]: - return None if self.attributes is None else self.attributes.output_fields - @output_fields.setter - def output_fields(self, output_fields: Optional[list[dict[str, str]]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.output_fields = output_fields - - @property - def output_steps(self) -> Optional[list[dict[str, str]]]: - return None if self.attributes is None else self.attributes.output_steps - - @output_steps.setter - def output_steps(self, output_steps: Optional[list[dict[str, str]]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.output_steps = output_steps - - @property - def project(self) -> Optional[TableauProject]: - return None if self.attributes is None else self.attributes.project - - @project.setter - def project(self, project: Optional[TableauProject]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.project = project - - class Attributes(Tableau.Attributes): - site_qualified_name: Optional[str] = Field( - None, description="", alias="siteQualifiedName" - ) - project_qualified_name: Optional[str] = Field( - None, description="", alias="projectQualifiedName" - ) - top_level_project_qualified_name: Optional[str] = Field( - None, description="", alias="topLevelProjectQualifiedName" - ) - project_hierarchy: Optional[list[dict[str, str]]] = Field( - None, description="", alias="projectHierarchy" - ) - input_fields: Optional[list[dict[str, str]]] = Field( - None, description="", alias="inputFields" - ) - output_fields: Optional[list[dict[str, str]]] = Field( - None, description="", alias="outputFields" - ) - output_steps: Optional[list[dict[str, str]]] = Field( - None, description="", alias="outputSteps" - ) - project: Optional[TableauProject] = Field( - None, description="", alias="project" - ) # relationship - - attributes: "TableauFlow.Attributes" = Field( - default_factory=lambda: TableauFlow.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class TableauWorksheet(Tableau): - """Description""" - - type_name: str = Field("TableauWorksheet", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "TableauWorksheet": - raise ValueError("must be TableauWorksheet") - return v - - def __setattr__(self, name, value): - if name in TableauWorksheet._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - SITE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "siteQualifiedName", "siteQualifiedName" - ) - """ - TBC - """ - PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "projectQualifiedName", "projectQualifiedName" - ) - """ - TBC - """ - TOP_LEVEL_PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "topLevelProjectQualifiedName", "topLevelProjectQualifiedName" - ) - """ - TBC - """ - PROJECT_HIERARCHY: ClassVar[KeywordField] = KeywordField( - "projectHierarchy", "projectHierarchy" - ) - """ - TBC - """ - WORKBOOK_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "workbookQualifiedName", "workbookQualifiedName" + SIGMA_DATASET_COLUMNS: ClassVar[RelationField] = RelationField( + "sigmaDatasetColumns" ) """ TBC """ - WORKBOOK: ClassVar[RelationField] = RelationField("workbook") - """ - TBC - """ - DATASOURCE_FIELDS: ClassVar[RelationField] = RelationField("datasourceFields") - """ - TBC - """ - CALCULATED_FIELDS: ClassVar[RelationField] = RelationField("calculatedFields") - """ - TBC - """ - DASHBOARDS: ClassVar[RelationField] = RelationField("dashboards") - """ - TBC - """ - _convenience_properties: ClassVar[list[str]] = [ - "site_qualified_name", - "project_qualified_name", - "top_level_project_qualified_name", - "project_hierarchy", - "workbook_qualified_name", - "workbook", - "datasource_fields", - "calculated_fields", - "dashboards", + "sigma_dataset_column_count", + "sigma_dataset_columns", ] @property - def site_qualified_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.site_qualified_name - - @site_qualified_name.setter - def site_qualified_name(self, site_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.site_qualified_name = site_qualified_name - - @property - def project_qualified_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.project_qualified_name - ) - - @project_qualified_name.setter - def project_qualified_name(self, project_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.project_qualified_name = project_qualified_name - - @property - def top_level_project_qualified_name(self) -> Optional[str]: + def sigma_dataset_column_count(self) -> Optional[int]: return ( None if self.attributes is None - else self.attributes.top_level_project_qualified_name - ) - - @top_level_project_qualified_name.setter - def top_level_project_qualified_name( - self, top_level_project_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.top_level_project_qualified_name = ( - top_level_project_qualified_name + else self.attributes.sigma_dataset_column_count ) - @property - def project_hierarchy(self) -> Optional[list[dict[str, str]]]: - return None if self.attributes is None else self.attributes.project_hierarchy - - @project_hierarchy.setter - def project_hierarchy(self, project_hierarchy: Optional[list[dict[str, str]]]): + @sigma_dataset_column_count.setter + def sigma_dataset_column_count(self, sigma_dataset_column_count: Optional[int]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.project_hierarchy = project_hierarchy + self.attributes.sigma_dataset_column_count = sigma_dataset_column_count @property - def workbook_qualified_name(self) -> Optional[str]: + def sigma_dataset_columns(self) -> Optional[list[SigmaDatasetColumn]]: return ( - None if self.attributes is None else self.attributes.workbook_qualified_name + None if self.attributes is None else self.attributes.sigma_dataset_columns ) - @workbook_qualified_name.setter - def workbook_qualified_name(self, workbook_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.workbook_qualified_name = workbook_qualified_name - - @property - def workbook(self) -> Optional[TableauWorkbook]: - return None if self.attributes is None else self.attributes.workbook - - @workbook.setter - def workbook(self, workbook: Optional[TableauWorkbook]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.workbook = workbook - - @property - def datasource_fields(self) -> Optional[list[TableauDatasourceField]]: - return None if self.attributes is None else self.attributes.datasource_fields - - @datasource_fields.setter - def datasource_fields( - self, datasource_fields: Optional[list[TableauDatasourceField]] + @sigma_dataset_columns.setter + def sigma_dataset_columns( + self, sigma_dataset_columns: Optional[list[SigmaDatasetColumn]] ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.datasource_fields = datasource_fields + self.attributes.sigma_dataset_columns = sigma_dataset_columns - @property - def calculated_fields(self) -> Optional[list[TableauCalculatedField]]: - return None if self.attributes is None else self.attributes.calculated_fields - - @calculated_fields.setter - def calculated_fields( - self, calculated_fields: Optional[list[TableauCalculatedField]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.calculated_fields = calculated_fields - - @property - def dashboards(self) -> Optional[list[TableauDashboard]]: - return None if self.attributes is None else self.attributes.dashboards - - @dashboards.setter - def dashboards(self, dashboards: Optional[list[TableauDashboard]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dashboards = dashboards - - class Attributes(Tableau.Attributes): - site_qualified_name: Optional[str] = Field( - None, description="", alias="siteQualifiedName" - ) - project_qualified_name: Optional[str] = Field( - None, description="", alias="projectQualifiedName" + class Attributes(Sigma.Attributes): + sigma_dataset_column_count: Optional[int] = Field( + None, description="", alias="sigmaDatasetColumnCount" ) - top_level_project_qualified_name: Optional[str] = Field( - None, description="", alias="topLevelProjectQualifiedName" - ) - project_hierarchy: Optional[list[dict[str, str]]] = Field( - None, description="", alias="projectHierarchy" - ) - workbook_qualified_name: Optional[str] = Field( - None, description="", alias="workbookQualifiedName" - ) - workbook: Optional[TableauWorkbook] = Field( - None, description="", alias="workbook" - ) # relationship - datasource_fields: Optional[list[TableauDatasourceField]] = Field( - None, description="", alias="datasourceFields" - ) # relationship - calculated_fields: Optional[list[TableauCalculatedField]] = Field( - None, description="", alias="calculatedFields" - ) # relationship - dashboards: Optional[list[TableauDashboard]] = Field( - None, description="", alias="dashboards" + sigma_dataset_columns: Optional[list[SigmaDatasetColumn]] = Field( + None, description="", alias="sigmaDatasetColumns" ) # relationship - attributes: "TableauWorksheet.Attributes" = Field( - default_factory=lambda: TableauWorksheet.Attributes(), + attributes: "SigmaDataset.Attributes" = Field( + default_factory=lambda: SigmaDataset.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -TableauWorkbook.Attributes.update_forward_refs() - - -TableauDatasourceField.Attributes.update_forward_refs() - - -TableauCalculatedField.Attributes.update_forward_refs() - - -TableauProject.Attributes.update_forward_refs() - - -TableauSite.Attributes.update_forward_refs() - - -TableauDatasource.Attributes.update_forward_refs() - - -TableauDashboard.Attributes.update_forward_refs() - - -TableauFlow.Attributes.update_forward_refs() +SigmaDatasetColumn.Attributes.update_forward_refs() -TableauWorksheet.Attributes.update_forward_refs() +SigmaDataset.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset68.py b/pyatlan/model/assets/asset68.py index a8de84cc9..cddf6b8ed 100644 --- a/pyatlan/model/assets/asset68.py +++ b/pyatlan/model/assets/asset68.py @@ -8,148 +8,434 @@ from pydantic import Field, validator -from pyatlan.model.fields.atlan_fields import KeywordField, RelationField +from pyatlan.model.fields.atlan_fields import ( + BooleanField, + KeywordField, + NumericField, + RelationField, + TextField, +) -from .asset42 import Tableau -from .asset67 import TableauProject +from .asset42 import Sigma -class TableauMetric(Tableau): +class SigmaWorkbook(Sigma): """Description""" - type_name: str = Field("TableauMetric", allow_mutation=False) + type_name: str = Field("SigmaWorkbook", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "TableauMetric": - raise ValueError("must be TableauMetric") + if v != "SigmaWorkbook": + raise ValueError("must be SigmaWorkbook") return v def __setattr__(self, name, value): - if name in TableauMetric._convenience_properties: + if name in SigmaWorkbook._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - SITE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "siteQualifiedName", "siteQualifiedName" + SIGMA_PAGE_COUNT: ClassVar[NumericField] = NumericField( + "sigmaPageCount", "sigmaPageCount" ) """ + Number of pages in this workbook. + """ + + SIGMA_PAGES: ClassVar[RelationField] = RelationField("sigmaPages") + """ TBC """ - PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "projectQualifiedName", "projectQualifiedName" + + _convenience_properties: ClassVar[list[str]] = [ + "sigma_page_count", + "sigma_pages", + ] + + @property + def sigma_page_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.sigma_page_count + + @sigma_page_count.setter + def sigma_page_count(self, sigma_page_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sigma_page_count = sigma_page_count + + @property + def sigma_pages(self) -> Optional[list[SigmaPage]]: + return None if self.attributes is None else self.attributes.sigma_pages + + @sigma_pages.setter + def sigma_pages(self, sigma_pages: Optional[list[SigmaPage]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sigma_pages = sigma_pages + + class Attributes(Sigma.Attributes): + sigma_page_count: Optional[int] = Field( + None, description="", alias="sigmaPageCount" + ) + sigma_pages: Optional[list[SigmaPage]] = Field( + None, description="", alias="sigmaPages" + ) # relationship + + attributes: "SigmaWorkbook.Attributes" = Field( + default_factory=lambda: SigmaWorkbook.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +class SigmaDataElementField(Sigma): + """Description""" + + type_name: str = Field("SigmaDataElementField", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "SigmaDataElementField": + raise ValueError("must be SigmaDataElementField") + return v + + def __setattr__(self, name, value): + if name in SigmaDataElementField._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + SIGMA_DATA_ELEMENT_FIELD_IS_HIDDEN: ClassVar[BooleanField] = BooleanField( + "sigmaDataElementFieldIsHidden", "sigmaDataElementFieldIsHidden" ) """ - TBC + Whether this field is hidden (true) or not (false). """ - TOP_LEVEL_PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "topLevelProjectQualifiedName", "topLevelProjectQualifiedName" + SIGMA_DATA_ELEMENT_FIELD_FORMULA: ClassVar[TextField] = TextField( + "sigmaDataElementFieldFormula", "sigmaDataElementFieldFormula" ) """ + + """ + + SIGMA_DATA_ELEMENT: ClassVar[RelationField] = RelationField("sigmaDataElement") + """ TBC """ - PROJECT_HIERARCHY: ClassVar[KeywordField] = KeywordField( - "projectHierarchy", "projectHierarchy" + + _convenience_properties: ClassVar[list[str]] = [ + "sigma_data_element_field_is_hidden", + "sigma_data_element_field_formula", + "sigma_data_element", + ] + + @property + def sigma_data_element_field_is_hidden(self) -> Optional[bool]: + return ( + None + if self.attributes is None + else self.attributes.sigma_data_element_field_is_hidden + ) + + @sigma_data_element_field_is_hidden.setter + def sigma_data_element_field_is_hidden( + self, sigma_data_element_field_is_hidden: Optional[bool] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sigma_data_element_field_is_hidden = ( + sigma_data_element_field_is_hidden + ) + + @property + def sigma_data_element_field_formula(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.sigma_data_element_field_formula + ) + + @sigma_data_element_field_formula.setter + def sigma_data_element_field_formula( + self, sigma_data_element_field_formula: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sigma_data_element_field_formula = ( + sigma_data_element_field_formula + ) + + @property + def sigma_data_element(self) -> Optional[SigmaDataElement]: + return None if self.attributes is None else self.attributes.sigma_data_element + + @sigma_data_element.setter + def sigma_data_element(self, sigma_data_element: Optional[SigmaDataElement]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sigma_data_element = sigma_data_element + + class Attributes(Sigma.Attributes): + sigma_data_element_field_is_hidden: Optional[bool] = Field( + None, description="", alias="sigmaDataElementFieldIsHidden" + ) + sigma_data_element_field_formula: Optional[str] = Field( + None, description="", alias="sigmaDataElementFieldFormula" + ) + sigma_data_element: Optional[SigmaDataElement] = Field( + None, description="", alias="sigmaDataElement" + ) # relationship + + attributes: "SigmaDataElementField.Attributes" = Field( + default_factory=lambda: SigmaDataElementField.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", ) + + +class SigmaPage(Sigma): + """Description""" + + type_name: str = Field("SigmaPage", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "SigmaPage": + raise ValueError("must be SigmaPage") + return v + + def __setattr__(self, name, value): + if name in SigmaPage._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + SIGMA_DATA_ELEMENT_COUNT: ClassVar[NumericField] = NumericField( + "sigmaDataElementCount", "sigmaDataElementCount" + ) + """ + Number of data elements on this page. + """ + + SIGMA_DATA_ELEMENTS: ClassVar[RelationField] = RelationField("sigmaDataElements") """ TBC """ + SIGMA_WORKBOOK: ClassVar[RelationField] = RelationField("sigmaWorkbook") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "sigma_data_element_count", + "sigma_data_elements", + "sigma_workbook", + ] + + @property + def sigma_data_element_count(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.sigma_data_element_count + ) + + @sigma_data_element_count.setter + def sigma_data_element_count(self, sigma_data_element_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sigma_data_element_count = sigma_data_element_count + + @property + def sigma_data_elements(self) -> Optional[list[SigmaDataElement]]: + return None if self.attributes is None else self.attributes.sigma_data_elements + + @sigma_data_elements.setter + def sigma_data_elements( + self, sigma_data_elements: Optional[list[SigmaDataElement]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sigma_data_elements = sigma_data_elements + + @property + def sigma_workbook(self) -> Optional[SigmaWorkbook]: + return None if self.attributes is None else self.attributes.sigma_workbook + + @sigma_workbook.setter + def sigma_workbook(self, sigma_workbook: Optional[SigmaWorkbook]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sigma_workbook = sigma_workbook + + class Attributes(Sigma.Attributes): + sigma_data_element_count: Optional[int] = Field( + None, description="", alias="sigmaDataElementCount" + ) + sigma_data_elements: Optional[list[SigmaDataElement]] = Field( + None, description="", alias="sigmaDataElements" + ) # relationship + sigma_workbook: Optional[SigmaWorkbook] = Field( + None, description="", alias="sigmaWorkbook" + ) # relationship + + attributes: "SigmaPage.Attributes" = Field( + default_factory=lambda: SigmaPage.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +class SigmaDataElement(Sigma): + """Description""" - PROJECT: ClassVar[RelationField] = RelationField("project") + type_name: str = Field("SigmaDataElement", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "SigmaDataElement": + raise ValueError("must be SigmaDataElement") + return v + + def __setattr__(self, name, value): + if name in SigmaDataElement._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + SIGMA_DATA_ELEMENT_QUERY: ClassVar[KeywordField] = KeywordField( + "sigmaDataElementQuery", "sigmaDataElementQuery" + ) + """ + + """ + SIGMA_DATA_ELEMENT_TYPE: ClassVar[KeywordField] = KeywordField( + "sigmaDataElementType", "sigmaDataElementType" + ) + """ + + """ + SIGMA_DATA_ELEMENT_FIELD_COUNT: ClassVar[NumericField] = NumericField( + "sigmaDataElementFieldCount", "sigmaDataElementFieldCount" + ) + """ + Number of fields in this data element. + """ + + SIGMA_PAGE: ClassVar[RelationField] = RelationField("sigmaPage") + """ + TBC + """ + SIGMA_DATA_ELEMENT_FIELDS: ClassVar[RelationField] = RelationField( + "sigmaDataElementFields" + ) """ TBC """ _convenience_properties: ClassVar[list[str]] = [ - "site_qualified_name", - "project_qualified_name", - "top_level_project_qualified_name", - "project_hierarchy", - "project", + "sigma_data_element_query", + "sigma_data_element_type", + "sigma_data_element_field_count", + "sigma_page", + "sigma_data_element_fields", ] @property - def site_qualified_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.site_qualified_name + def sigma_data_element_query(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.sigma_data_element_query + ) - @site_qualified_name.setter - def site_qualified_name(self, site_qualified_name: Optional[str]): + @sigma_data_element_query.setter + def sigma_data_element_query(self, sigma_data_element_query: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.site_qualified_name = site_qualified_name + self.attributes.sigma_data_element_query = sigma_data_element_query @property - def project_qualified_name(self) -> Optional[str]: + def sigma_data_element_type(self) -> Optional[str]: return ( - None if self.attributes is None else self.attributes.project_qualified_name + None if self.attributes is None else self.attributes.sigma_data_element_type ) - @project_qualified_name.setter - def project_qualified_name(self, project_qualified_name: Optional[str]): + @sigma_data_element_type.setter + def sigma_data_element_type(self, sigma_data_element_type: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.project_qualified_name = project_qualified_name + self.attributes.sigma_data_element_type = sigma_data_element_type @property - def top_level_project_qualified_name(self) -> Optional[str]: + def sigma_data_element_field_count(self) -> Optional[int]: return ( None if self.attributes is None - else self.attributes.top_level_project_qualified_name + else self.attributes.sigma_data_element_field_count ) - @top_level_project_qualified_name.setter - def top_level_project_qualified_name( - self, top_level_project_qualified_name: Optional[str] + @sigma_data_element_field_count.setter + def sigma_data_element_field_count( + self, sigma_data_element_field_count: Optional[int] ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.top_level_project_qualified_name = ( - top_level_project_qualified_name - ) + self.attributes.sigma_data_element_field_count = sigma_data_element_field_count @property - def project_hierarchy(self) -> Optional[list[dict[str, str]]]: - return None if self.attributes is None else self.attributes.project_hierarchy + def sigma_page(self) -> Optional[SigmaPage]: + return None if self.attributes is None else self.attributes.sigma_page - @project_hierarchy.setter - def project_hierarchy(self, project_hierarchy: Optional[list[dict[str, str]]]): + @sigma_page.setter + def sigma_page(self, sigma_page: Optional[SigmaPage]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.project_hierarchy = project_hierarchy + self.attributes.sigma_page = sigma_page @property - def project(self) -> Optional[TableauProject]: - return None if self.attributes is None else self.attributes.project + def sigma_data_element_fields(self) -> Optional[list[SigmaDataElementField]]: + return ( + None + if self.attributes is None + else self.attributes.sigma_data_element_fields + ) - @project.setter - def project(self, project: Optional[TableauProject]): + @sigma_data_element_fields.setter + def sigma_data_element_fields( + self, sigma_data_element_fields: Optional[list[SigmaDataElementField]] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.project = project + self.attributes.sigma_data_element_fields = sigma_data_element_fields - class Attributes(Tableau.Attributes): - site_qualified_name: Optional[str] = Field( - None, description="", alias="siteQualifiedName" - ) - project_qualified_name: Optional[str] = Field( - None, description="", alias="projectQualifiedName" + class Attributes(Sigma.Attributes): + sigma_data_element_query: Optional[str] = Field( + None, description="", alias="sigmaDataElementQuery" ) - top_level_project_qualified_name: Optional[str] = Field( - None, description="", alias="topLevelProjectQualifiedName" + sigma_data_element_type: Optional[str] = Field( + None, description="", alias="sigmaDataElementType" ) - project_hierarchy: Optional[list[dict[str, str]]] = Field( - None, description="", alias="projectHierarchy" + sigma_data_element_field_count: Optional[int] = Field( + None, description="", alias="sigmaDataElementFieldCount" ) - project: Optional[TableauProject] = Field( - None, description="", alias="project" + sigma_page: Optional[SigmaPage] = Field( + None, description="", alias="sigmaPage" + ) # relationship + sigma_data_element_fields: Optional[list[SigmaDataElementField]] = Field( + None, description="", alias="sigmaDataElementFields" ) # relationship - attributes: "TableauMetric.Attributes" = Field( - default_factory=lambda: TableauMetric.Attributes(), + attributes: "SigmaDataElement.Attributes" = Field( + default_factory=lambda: SigmaDataElement.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -TableauMetric.Attributes.update_forward_refs() +SigmaWorkbook.Attributes.update_forward_refs() + + +SigmaDataElementField.Attributes.update_forward_refs() + + +SigmaPage.Attributes.update_forward_refs() + + +SigmaDataElement.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset69.py b/pyatlan/model/assets/asset69.py index 48df54bce..4c13614a1 100644 --- a/pyatlan/model/assets/asset69.py +++ b/pyatlan/model/assets/asset69.py @@ -4,1127 +4,1333 @@ from __future__ import annotations -from datetime import datetime from typing import ClassVar, Optional from pydantic import Field, validator from pyatlan.model.fields.atlan_fields import ( + BooleanField, KeywordField, KeywordTextField, - NumericField, RelationField, ) -from .asset43 import Looker +from .asset43 import Tableau -class LookerLook(Looker): +class TableauWorkbook(Tableau): """Description""" - type_name: str = Field("LookerLook", allow_mutation=False) + type_name: str = Field("TableauWorkbook", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "LookerLook": - raise ValueError("must be LookerLook") + if v != "TableauWorkbook": + raise ValueError("must be TableauWorkbook") return v def __setattr__(self, name, value): - if name in LookerLook._convenience_properties: + if name in TableauWorkbook._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - FOLDER_NAME: ClassVar[KeywordField] = KeywordField("folderName", "folderName") - """ - TBC - """ - SOURCE_USER_ID: ClassVar[NumericField] = NumericField( - "sourceUserId", "sourceUserId" - ) - """ - TBC - """ - SOURCE_VIEW_COUNT: ClassVar[NumericField] = NumericField( - "sourceViewCount", "sourceViewCount" - ) - """ - TBC - """ - SOURCELAST_UPDATER_ID: ClassVar[NumericField] = NumericField( - "sourcelastUpdaterId", "sourcelastUpdaterId" + SITE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "siteQualifiedName", "siteQualifiedName" ) """ - TBC + Unique name of the site in which this workbook exists. """ - SOURCE_LAST_ACCESSED_AT: ClassVar[NumericField] = NumericField( - "sourceLastAccessedAt", "sourceLastAccessedAt" + PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "projectQualifiedName", "projectQualifiedName" ) """ - TBC + Unique name of the project in which this workbook exists. """ - SOURCE_LAST_VIEWED_AT: ClassVar[NumericField] = NumericField( - "sourceLastViewedAt", "sourceLastViewedAt" + TOP_LEVEL_PROJECT_NAME: ClassVar[KeywordField] = KeywordField( + "topLevelProjectName", "topLevelProjectName" ) """ - TBC + Simple name of the top-level project in which this workbook exists. """ - SOURCE_CONTENT_METADATA_ID: ClassVar[NumericField] = NumericField( - "sourceContentMetadataId", "sourceContentMetadataId" + TOP_LEVEL_PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "topLevelProjectQualifiedName", "topLevelProjectQualifiedName" ) """ - TBC + Unique name of the top-level project in which this workbook exists. """ - SOURCE_QUERY_ID: ClassVar[NumericField] = NumericField( - "sourceQueryId", "sourceQueryId" + PROJECT_HIERARCHY: ClassVar[KeywordField] = KeywordField( + "projectHierarchy", "projectHierarchy" ) """ - TBC - """ - MODEL_NAME: ClassVar[KeywordField] = KeywordField("modelName", "modelName") - """ - TBC + List of top-level projects with their nested child projects. """ - QUERY: ClassVar[RelationField] = RelationField("query") - """ - TBC - """ - FOLDER: ClassVar[RelationField] = RelationField("folder") + PROJECT: ClassVar[RelationField] = RelationField("project") """ TBC """ - TILE: ClassVar[RelationField] = RelationField("tile") + DASHBOARDS: ClassVar[RelationField] = RelationField("dashboards") """ TBC """ - MODEL: ClassVar[RelationField] = RelationField("model") + WORKSHEETS: ClassVar[RelationField] = RelationField("worksheets") """ TBC """ - DASHBOARD: ClassVar[RelationField] = RelationField("dashboard") + DATASOURCES: ClassVar[RelationField] = RelationField("datasources") """ TBC """ _convenience_properties: ClassVar[list[str]] = [ - "folder_name", - "source_user_id", - "source_view_count", - "sourcelast_updater_id", - "source_last_accessed_at", - "source_last_viewed_at", - "source_content_metadata_id", - "source_query_id", - "model_name", - "query", - "folder", - "tile", - "model", - "dashboard", + "site_qualified_name", + "project_qualified_name", + "top_level_project_name", + "top_level_project_qualified_name", + "project_hierarchy", + "project", + "dashboards", + "worksheets", + "datasources", ] @property - def folder_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.folder_name - - @folder_name.setter - def folder_name(self, folder_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.folder_name = folder_name - - @property - def source_user_id(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.source_user_id - - @source_user_id.setter - def source_user_id(self, source_user_id: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.source_user_id = source_user_id - - @property - def source_view_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.source_view_count - - @source_view_count.setter - def source_view_count(self, source_view_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.source_view_count = source_view_count - - @property - def sourcelast_updater_id(self) -> Optional[int]: - return ( - None if self.attributes is None else self.attributes.sourcelast_updater_id - ) + def site_qualified_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.site_qualified_name - @sourcelast_updater_id.setter - def sourcelast_updater_id(self, sourcelast_updater_id: Optional[int]): + @site_qualified_name.setter + def site_qualified_name(self, site_qualified_name: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.sourcelast_updater_id = sourcelast_updater_id + self.attributes.site_qualified_name = site_qualified_name @property - def source_last_accessed_at(self) -> Optional[datetime]: + def project_qualified_name(self) -> Optional[str]: return ( - None if self.attributes is None else self.attributes.source_last_accessed_at + None if self.attributes is None else self.attributes.project_qualified_name ) - @source_last_accessed_at.setter - def source_last_accessed_at(self, source_last_accessed_at: Optional[datetime]): + @project_qualified_name.setter + def project_qualified_name(self, project_qualified_name: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.source_last_accessed_at = source_last_accessed_at + self.attributes.project_qualified_name = project_qualified_name @property - def source_last_viewed_at(self) -> Optional[datetime]: + def top_level_project_name(self) -> Optional[str]: return ( - None if self.attributes is None else self.attributes.source_last_viewed_at + None if self.attributes is None else self.attributes.top_level_project_name ) - @source_last_viewed_at.setter - def source_last_viewed_at(self, source_last_viewed_at: Optional[datetime]): + @top_level_project_name.setter + def top_level_project_name(self, top_level_project_name: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.source_last_viewed_at = source_last_viewed_at + self.attributes.top_level_project_name = top_level_project_name @property - def source_content_metadata_id(self) -> Optional[int]: + def top_level_project_qualified_name(self) -> Optional[str]: return ( None if self.attributes is None - else self.attributes.source_content_metadata_id + else self.attributes.top_level_project_qualified_name ) - @source_content_metadata_id.setter - def source_content_metadata_id(self, source_content_metadata_id: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.source_content_metadata_id = source_content_metadata_id - - @property - def source_query_id(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.source_query_id - - @source_query_id.setter - def source_query_id(self, source_query_id: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.source_query_id = source_query_id - - @property - def model_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.model_name - - @model_name.setter - def model_name(self, model_name: Optional[str]): + @top_level_project_qualified_name.setter + def top_level_project_qualified_name( + self, top_level_project_qualified_name: Optional[str] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.model_name = model_name + self.attributes.top_level_project_qualified_name = ( + top_level_project_qualified_name + ) @property - def query(self) -> Optional[LookerQuery]: - return None if self.attributes is None else self.attributes.query + def project_hierarchy(self) -> Optional[list[dict[str, str]]]: + return None if self.attributes is None else self.attributes.project_hierarchy - @query.setter - def query(self, query: Optional[LookerQuery]): + @project_hierarchy.setter + def project_hierarchy(self, project_hierarchy: Optional[list[dict[str, str]]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.query = query + self.attributes.project_hierarchy = project_hierarchy @property - def folder(self) -> Optional[LookerFolder]: - return None if self.attributes is None else self.attributes.folder + def project(self) -> Optional[TableauProject]: + return None if self.attributes is None else self.attributes.project - @folder.setter - def folder(self, folder: Optional[LookerFolder]): + @project.setter + def project(self, project: Optional[TableauProject]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.folder = folder + self.attributes.project = project @property - def tile(self) -> Optional[LookerTile]: - return None if self.attributes is None else self.attributes.tile + def dashboards(self) -> Optional[list[TableauDashboard]]: + return None if self.attributes is None else self.attributes.dashboards - @tile.setter - def tile(self, tile: Optional[LookerTile]): + @dashboards.setter + def dashboards(self, dashboards: Optional[list[TableauDashboard]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.tile = tile + self.attributes.dashboards = dashboards @property - def model(self) -> Optional[LookerModel]: - return None if self.attributes is None else self.attributes.model + def worksheets(self) -> Optional[list[TableauWorksheet]]: + return None if self.attributes is None else self.attributes.worksheets - @model.setter - def model(self, model: Optional[LookerModel]): + @worksheets.setter + def worksheets(self, worksheets: Optional[list[TableauWorksheet]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.model = model + self.attributes.worksheets = worksheets @property - def dashboard(self) -> Optional[LookerDashboard]: - return None if self.attributes is None else self.attributes.dashboard + def datasources(self) -> Optional[list[TableauDatasource]]: + return None if self.attributes is None else self.attributes.datasources - @dashboard.setter - def dashboard(self, dashboard: Optional[LookerDashboard]): + @datasources.setter + def datasources(self, datasources: Optional[list[TableauDatasource]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.dashboard = dashboard + self.attributes.datasources = datasources - class Attributes(Looker.Attributes): - folder_name: Optional[str] = Field(None, description="", alias="folderName") - source_user_id: Optional[int] = Field( - None, description="", alias="sourceUserId" - ) - source_view_count: Optional[int] = Field( - None, description="", alias="sourceViewCount" + class Attributes(Tableau.Attributes): + site_qualified_name: Optional[str] = Field( + None, description="", alias="siteQualifiedName" ) - sourcelast_updater_id: Optional[int] = Field( - None, description="", alias="sourcelastUpdaterId" + project_qualified_name: Optional[str] = Field( + None, description="", alias="projectQualifiedName" ) - source_last_accessed_at: Optional[datetime] = Field( - None, description="", alias="sourceLastAccessedAt" + top_level_project_name: Optional[str] = Field( + None, description="", alias="topLevelProjectName" ) - source_last_viewed_at: Optional[datetime] = Field( - None, description="", alias="sourceLastViewedAt" + top_level_project_qualified_name: Optional[str] = Field( + None, description="", alias="topLevelProjectQualifiedName" ) - source_content_metadata_id: Optional[int] = Field( - None, description="", alias="sourceContentMetadataId" + project_hierarchy: Optional[list[dict[str, str]]] = Field( + None, description="", alias="projectHierarchy" ) - source_query_id: Optional[int] = Field( - None, description="", alias="sourceQueryId" - ) - model_name: Optional[str] = Field(None, description="", alias="modelName") - query: Optional[LookerQuery] = Field( - None, description="", alias="query" - ) # relationship - folder: Optional[LookerFolder] = Field( - None, description="", alias="folder" + project: Optional[TableauProject] = Field( + None, description="", alias="project" ) # relationship - tile: Optional[LookerTile] = Field( - None, description="", alias="tile" + dashboards: Optional[list[TableauDashboard]] = Field( + None, description="", alias="dashboards" ) # relationship - model: Optional[LookerModel] = Field( - None, description="", alias="model" + worksheets: Optional[list[TableauWorksheet]] = Field( + None, description="", alias="worksheets" ) # relationship - dashboard: Optional[LookerDashboard] = Field( - None, description="", alias="dashboard" + datasources: Optional[list[TableauDatasource]] = Field( + None, description="", alias="datasources" ) # relationship - attributes: "LookerLook.Attributes" = Field( - default_factory=lambda: LookerLook.Attributes(), + attributes: "TableauWorkbook.Attributes" = Field( + default_factory=lambda: TableauWorkbook.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -class LookerDashboard(Looker): +class TableauDatasourceField(Tableau): """Description""" - type_name: str = Field("LookerDashboard", allow_mutation=False) + type_name: str = Field("TableauDatasourceField", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "LookerDashboard": - raise ValueError("must be LookerDashboard") + if v != "TableauDatasourceField": + raise ValueError("must be TableauDatasourceField") return v def __setattr__(self, name, value): - if name in LookerDashboard._convenience_properties: + if name in TableauDatasourceField._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - FOLDER_NAME: ClassVar[KeywordField] = KeywordField("folderName", "folderName") + SITE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "siteQualifiedName", "siteQualifiedName" + ) """ - TBC + Unique name of the site in which this datasource field exists. """ - SOURCE_USER_ID: ClassVar[NumericField] = NumericField( - "sourceUserId", "sourceUserId" + PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "projectQualifiedName", "projectQualifiedName" ) """ - TBC + Unique name of the project in which this datasource field exists. """ - SOURCE_VIEW_COUNT: ClassVar[NumericField] = NumericField( - "sourceViewCount", "sourceViewCount" + TOP_LEVEL_PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "topLevelProjectQualifiedName", "topLevelProjectQualifiedName" ) """ - TBC + Unique name of the top-level project in which this datasource field exists. """ - SOURCE_METADATA_ID: ClassVar[NumericField] = NumericField( - "sourceMetadataId", "sourceMetadataId" + WORKBOOK_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "workbookQualifiedName", "workbookQualifiedName" ) """ - TBC + Unique name of the workbook in which this datasource field exists. """ - SOURCELAST_UPDATER_ID: ClassVar[NumericField] = NumericField( - "sourcelastUpdaterId", "sourcelastUpdaterId" + DATASOURCE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "datasourceQualifiedName", "datasourceQualifiedName" ) """ - TBC + Unique name of the datasource in which this datasource field exists. """ - SOURCE_LAST_ACCESSED_AT: ClassVar[NumericField] = NumericField( - "sourceLastAccessedAt", "sourceLastAccessedAt" + PROJECT_HIERARCHY: ClassVar[KeywordField] = KeywordField( + "projectHierarchy", "projectHierarchy" ) """ - TBC + List of top-level projects and their nested child projects. """ - SOURCE_LAST_VIEWED_AT: ClassVar[NumericField] = NumericField( - "sourceLastViewedAt", "sourceLastViewedAt" + FULLY_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "fullyQualifiedName", "fullyQualifiedName" ) """ - TBC + Name used internally in Tableau to uniquely identify this field. """ - - TILES: ClassVar[RelationField] = RelationField("tiles") + TABLEAU_DATASOURCE_FIELD_DATA_CATEGORY: ClassVar[KeywordField] = KeywordField( + "tableauDatasourceFieldDataCategory", "tableauDatasourceFieldDataCategory" + ) """ - TBC + Data category of this field. + """ + TABLEAU_DATASOURCE_FIELD_ROLE: ClassVar[KeywordField] = KeywordField( + "tableauDatasourceFieldRole", "tableauDatasourceFieldRole" + ) + """ + Role of this field, for example: 'dimension', 'measure', or 'unknown'. + """ + TABLEAU_DATASOURCE_FIELD_DATA_TYPE: ClassVar[KeywordTextField] = KeywordTextField( + "tableauDatasourceFieldDataType", + "tableauDatasourceFieldDataType", + "tableauDatasourceFieldDataType.text", + ) + """ + Data type of this field. + """ + UPSTREAM_TABLES: ClassVar[KeywordField] = KeywordField( + "upstreamTables", "upstreamTables" + ) """ - LOOKS: ClassVar[RelationField] = RelationField("looks") + Tables upstream to this datasource field. + """ + TABLEAU_DATASOURCE_FIELD_FORMULA: ClassVar[KeywordField] = KeywordField( + "tableauDatasourceFieldFormula", "tableauDatasourceFieldFormula" + ) + """ + Formula for this field. + """ + TABLEAU_DATASOURCE_FIELD_BIN_SIZE: ClassVar[KeywordField] = KeywordField( + "tableauDatasourceFieldBinSize", "tableauDatasourceFieldBinSize" + ) + """ + Bin size of this field. + """ + UPSTREAM_COLUMNS: ClassVar[KeywordField] = KeywordField( + "upstreamColumns", "upstreamColumns" + ) + """ + Columns upstream to this field. + """ + UPSTREAM_FIELDS: ClassVar[KeywordField] = KeywordField( + "upstreamFields", "upstreamFields" + ) + """ + Fields upstream to this field. + """ + DATASOURCE_FIELD_TYPE: ClassVar[KeywordField] = KeywordField( + "datasourceFieldType", "datasourceFieldType" + ) + """ + Type of this datasource field. + """ + + WORKSHEETS: ClassVar[RelationField] = RelationField("worksheets") """ TBC """ - FOLDER: ClassVar[RelationField] = RelationField("folder") + DATASOURCE: ClassVar[RelationField] = RelationField("datasource") """ TBC """ _convenience_properties: ClassVar[list[str]] = [ - "folder_name", - "source_user_id", - "source_view_count", - "source_metadata_id", - "sourcelast_updater_id", - "source_last_accessed_at", - "source_last_viewed_at", - "tiles", - "looks", - "folder", + "site_qualified_name", + "project_qualified_name", + "top_level_project_qualified_name", + "workbook_qualified_name", + "datasource_qualified_name", + "project_hierarchy", + "fully_qualified_name", + "tableau_datasource_field_data_category", + "tableau_datasource_field_role", + "tableau_datasource_field_data_type", + "upstream_tables", + "tableau_datasource_field_formula", + "tableau_datasource_field_bin_size", + "upstream_columns", + "upstream_fields", + "datasource_field_type", + "worksheets", + "datasource", ] @property - def folder_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.folder_name + def site_qualified_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.site_qualified_name - @folder_name.setter - def folder_name(self, folder_name: Optional[str]): + @site_qualified_name.setter + def site_qualified_name(self, site_qualified_name: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.folder_name = folder_name + self.attributes.site_qualified_name = site_qualified_name @property - def source_user_id(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.source_user_id + def project_qualified_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.project_qualified_name + ) - @source_user_id.setter - def source_user_id(self, source_user_id: Optional[int]): + @project_qualified_name.setter + def project_qualified_name(self, project_qualified_name: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.source_user_id = source_user_id + self.attributes.project_qualified_name = project_qualified_name @property - def source_view_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.source_view_count + def top_level_project_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.top_level_project_qualified_name + ) - @source_view_count.setter - def source_view_count(self, source_view_count: Optional[int]): + @top_level_project_qualified_name.setter + def top_level_project_qualified_name( + self, top_level_project_qualified_name: Optional[str] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.source_view_count = source_view_count + self.attributes.top_level_project_qualified_name = ( + top_level_project_qualified_name + ) @property - def source_metadata_id(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.source_metadata_id + def workbook_qualified_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.workbook_qualified_name + ) - @source_metadata_id.setter - def source_metadata_id(self, source_metadata_id: Optional[int]): + @workbook_qualified_name.setter + def workbook_qualified_name(self, workbook_qualified_name: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.source_metadata_id = source_metadata_id + self.attributes.workbook_qualified_name = workbook_qualified_name @property - def sourcelast_updater_id(self) -> Optional[int]: + def datasource_qualified_name(self) -> Optional[str]: return ( - None if self.attributes is None else self.attributes.sourcelast_updater_id + None + if self.attributes is None + else self.attributes.datasource_qualified_name ) - @sourcelast_updater_id.setter - def sourcelast_updater_id(self, sourcelast_updater_id: Optional[int]): + @datasource_qualified_name.setter + def datasource_qualified_name(self, datasource_qualified_name: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.sourcelast_updater_id = sourcelast_updater_id + self.attributes.datasource_qualified_name = datasource_qualified_name @property - def source_last_accessed_at(self) -> Optional[datetime]: + def project_hierarchy(self) -> Optional[list[dict[str, str]]]: + return None if self.attributes is None else self.attributes.project_hierarchy + + @project_hierarchy.setter + def project_hierarchy(self, project_hierarchy: Optional[list[dict[str, str]]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.project_hierarchy = project_hierarchy + + @property + def fully_qualified_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.fully_qualified_name + + @fully_qualified_name.setter + def fully_qualified_name(self, fully_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.fully_qualified_name = fully_qualified_name + + @property + def tableau_datasource_field_data_category(self) -> Optional[str]: return ( - None if self.attributes is None else self.attributes.source_last_accessed_at + None + if self.attributes is None + else self.attributes.tableau_datasource_field_data_category ) - @source_last_accessed_at.setter - def source_last_accessed_at(self, source_last_accessed_at: Optional[datetime]): + @tableau_datasource_field_data_category.setter + def tableau_datasource_field_data_category( + self, tableau_datasource_field_data_category: Optional[str] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.source_last_accessed_at = source_last_accessed_at + self.attributes.tableau_datasource_field_data_category = ( + tableau_datasource_field_data_category + ) @property - def source_last_viewed_at(self) -> Optional[datetime]: + def tableau_datasource_field_role(self) -> Optional[str]: return ( - None if self.attributes is None else self.attributes.source_last_viewed_at + None + if self.attributes is None + else self.attributes.tableau_datasource_field_role ) - @source_last_viewed_at.setter - def source_last_viewed_at(self, source_last_viewed_at: Optional[datetime]): + @tableau_datasource_field_role.setter + def tableau_datasource_field_role( + self, tableau_datasource_field_role: Optional[str] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.source_last_viewed_at = source_last_viewed_at + self.attributes.tableau_datasource_field_role = tableau_datasource_field_role @property - def tiles(self) -> Optional[list[LookerTile]]: - return None if self.attributes is None else self.attributes.tiles + def tableau_datasource_field_data_type(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.tableau_datasource_field_data_type + ) - @tiles.setter - def tiles(self, tiles: Optional[list[LookerTile]]): + @tableau_datasource_field_data_type.setter + def tableau_datasource_field_data_type( + self, tableau_datasource_field_data_type: Optional[str] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.tiles = tiles + self.attributes.tableau_datasource_field_data_type = ( + tableau_datasource_field_data_type + ) @property - def looks(self) -> Optional[list[LookerLook]]: - return None if self.attributes is None else self.attributes.looks + def upstream_tables(self) -> Optional[list[dict[str, str]]]: + return None if self.attributes is None else self.attributes.upstream_tables - @looks.setter - def looks(self, looks: Optional[list[LookerLook]]): + @upstream_tables.setter + def upstream_tables(self, upstream_tables: Optional[list[dict[str, str]]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.looks = looks + self.attributes.upstream_tables = upstream_tables @property - def folder(self) -> Optional[LookerFolder]: - return None if self.attributes is None else self.attributes.folder + def tableau_datasource_field_formula(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.tableau_datasource_field_formula + ) - @folder.setter - def folder(self, folder: Optional[LookerFolder]): + @tableau_datasource_field_formula.setter + def tableau_datasource_field_formula( + self, tableau_datasource_field_formula: Optional[str] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.folder = folder + self.attributes.tableau_datasource_field_formula = ( + tableau_datasource_field_formula + ) - class Attributes(Looker.Attributes): - folder_name: Optional[str] = Field(None, description="", alias="folderName") - source_user_id: Optional[int] = Field( - None, description="", alias="sourceUserId" + @property + def tableau_datasource_field_bin_size(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.tableau_datasource_field_bin_size ) - source_view_count: Optional[int] = Field( - None, description="", alias="sourceViewCount" + + @tableau_datasource_field_bin_size.setter + def tableau_datasource_field_bin_size( + self, tableau_datasource_field_bin_size: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.tableau_datasource_field_bin_size = ( + tableau_datasource_field_bin_size ) - source_metadata_id: Optional[int] = Field( - None, description="", alias="sourceMetadataId" + + @property + def upstream_columns(self) -> Optional[list[dict[str, str]]]: + return None if self.attributes is None else self.attributes.upstream_columns + + @upstream_columns.setter + def upstream_columns(self, upstream_columns: Optional[list[dict[str, str]]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.upstream_columns = upstream_columns + + @property + def upstream_fields(self) -> Optional[list[dict[str, str]]]: + return None if self.attributes is None else self.attributes.upstream_fields + + @upstream_fields.setter + def upstream_fields(self, upstream_fields: Optional[list[dict[str, str]]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.upstream_fields = upstream_fields + + @property + def datasource_field_type(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.datasource_field_type ) - sourcelast_updater_id: Optional[int] = Field( - None, description="", alias="sourcelastUpdaterId" + + @datasource_field_type.setter + def datasource_field_type(self, datasource_field_type: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.datasource_field_type = datasource_field_type + + @property + def worksheets(self) -> Optional[list[TableauWorksheet]]: + return None if self.attributes is None else self.attributes.worksheets + + @worksheets.setter + def worksheets(self, worksheets: Optional[list[TableauWorksheet]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.worksheets = worksheets + + @property + def datasource(self) -> Optional[TableauDatasource]: + return None if self.attributes is None else self.attributes.datasource + + @datasource.setter + def datasource(self, datasource: Optional[TableauDatasource]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.datasource = datasource + + class Attributes(Tableau.Attributes): + site_qualified_name: Optional[str] = Field( + None, description="", alias="siteQualifiedName" ) - source_last_accessed_at: Optional[datetime] = Field( - None, description="", alias="sourceLastAccessedAt" + project_qualified_name: Optional[str] = Field( + None, description="", alias="projectQualifiedName" ) - source_last_viewed_at: Optional[datetime] = Field( - None, description="", alias="sourceLastViewedAt" + top_level_project_qualified_name: Optional[str] = Field( + None, description="", alias="topLevelProjectQualifiedName" ) - tiles: Optional[list[LookerTile]] = Field( - None, description="", alias="tiles" - ) # relationship - looks: Optional[list[LookerLook]] = Field( - None, description="", alias="looks" + workbook_qualified_name: Optional[str] = Field( + None, description="", alias="workbookQualifiedName" + ) + datasource_qualified_name: Optional[str] = Field( + None, description="", alias="datasourceQualifiedName" + ) + project_hierarchy: Optional[list[dict[str, str]]] = Field( + None, description="", alias="projectHierarchy" + ) + fully_qualified_name: Optional[str] = Field( + None, description="", alias="fullyQualifiedName" + ) + tableau_datasource_field_data_category: Optional[str] = Field( + None, description="", alias="tableauDatasourceFieldDataCategory" + ) + tableau_datasource_field_role: Optional[str] = Field( + None, description="", alias="tableauDatasourceFieldRole" + ) + tableau_datasource_field_data_type: Optional[str] = Field( + None, description="", alias="tableauDatasourceFieldDataType" + ) + upstream_tables: Optional[list[dict[str, str]]] = Field( + None, description="", alias="upstreamTables" + ) + tableau_datasource_field_formula: Optional[str] = Field( + None, description="", alias="tableauDatasourceFieldFormula" + ) + tableau_datasource_field_bin_size: Optional[str] = Field( + None, description="", alias="tableauDatasourceFieldBinSize" + ) + upstream_columns: Optional[list[dict[str, str]]] = Field( + None, description="", alias="upstreamColumns" + ) + upstream_fields: Optional[list[dict[str, str]]] = Field( + None, description="", alias="upstreamFields" + ) + datasource_field_type: Optional[str] = Field( + None, description="", alias="datasourceFieldType" + ) + worksheets: Optional[list[TableauWorksheet]] = Field( + None, description="", alias="worksheets" ) # relationship - folder: Optional[LookerFolder] = Field( - None, description="", alias="folder" + datasource: Optional[TableauDatasource] = Field( + None, description="", alias="datasource" ) # relationship - attributes: "LookerDashboard.Attributes" = Field( - default_factory=lambda: LookerDashboard.Attributes(), + attributes: "TableauDatasourceField.Attributes" = Field( + default_factory=lambda: TableauDatasourceField.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -class LookerFolder(Looker): +class TableauCalculatedField(Tableau): """Description""" - type_name: str = Field("LookerFolder", allow_mutation=False) + type_name: str = Field("TableauCalculatedField", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "LookerFolder": - raise ValueError("must be LookerFolder") + if v != "TableauCalculatedField": + raise ValueError("must be TableauCalculatedField") return v def __setattr__(self, name, value): - if name in LookerFolder._convenience_properties: + if name in TableauCalculatedField._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - SOURCE_CONTENT_METADATA_ID: ClassVar[NumericField] = NumericField( - "sourceContentMetadataId", "sourceContentMetadataId" + SITE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "siteQualifiedName", "siteQualifiedName" ) """ - TBC + Unique name of the site in which this calculated field exists. """ - SOURCE_CREATOR_ID: ClassVar[NumericField] = NumericField( - "sourceCreatorId", "sourceCreatorId" + PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "projectQualifiedName", "projectQualifiedName" ) """ - TBC + Unique name of the project in which this calculated field exists. """ - SOURCE_CHILD_COUNT: ClassVar[NumericField] = NumericField( - "sourceChildCount", "sourceChildCount" + TOP_LEVEL_PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "topLevelProjectQualifiedName", "topLevelProjectQualifiedName" ) """ - TBC + Unique name of the top-level project in which this calculated field exists. """ - SOURCE_PARENT_ID: ClassVar[NumericField] = NumericField( - "sourceParentID", "sourceParentID" + WORKBOOK_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "workbookQualifiedName", "workbookQualifiedName" ) """ - TBC + Unique name of the workbook in which this calculated field exists. """ - - LOOKER_SUB_FOLDERS: ClassVar[RelationField] = RelationField("lookerSubFolders") + DATASOURCE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "datasourceQualifiedName", "datasourceQualifiedName" + ) """ - TBC + Unique name of the datasource in which this calculated field exists. """ - DASHBOARDS: ClassVar[RelationField] = RelationField("dashboards") + PROJECT_HIERARCHY: ClassVar[KeywordField] = KeywordField( + "projectHierarchy", "projectHierarchy" + ) """ - TBC + List of top-level projects and their nested projects. + """ + DATA_CATEGORY: ClassVar[KeywordField] = KeywordField("dataCategory", "dataCategory") + """ + Data category of this field. + """ + ROLE: ClassVar[KeywordField] = KeywordField("role", "role") + """ + Role of this field, for example: 'dimension', 'measure', or 'unknown'. + """ + TABLEAU_DATA_TYPE: ClassVar[KeywordTextField] = KeywordTextField( + "tableauDataType", "tableauDataType", "tableauDataType.text" + ) + """ + Data type of the field, from Tableau. + """ + FORMULA: ClassVar[KeywordField] = KeywordField("formula", "formula") + """ + Formula for this calculated field. + """ + UPSTREAM_FIELDS: ClassVar[KeywordField] = KeywordField( + "upstreamFields", "upstreamFields" + ) """ - LOOKS: ClassVar[RelationField] = RelationField("looks") + List of fields that are upstream to this calculated field. + """ + + WORKSHEETS: ClassVar[RelationField] = RelationField("worksheets") """ TBC """ - LOOKER_PARENT_FOLDER: ClassVar[RelationField] = RelationField("lookerParentFolder") + DATASOURCE: ClassVar[RelationField] = RelationField("datasource") """ TBC """ _convenience_properties: ClassVar[list[str]] = [ - "source_content_metadata_id", - "source_creator_id", - "source_child_count", - "source_parent_i_d", - "looker_sub_folders", - "dashboards", - "looks", - "looker_parent_folder", + "site_qualified_name", + "project_qualified_name", + "top_level_project_qualified_name", + "workbook_qualified_name", + "datasource_qualified_name", + "project_hierarchy", + "data_category", + "role", + "tableau_data_type", + "formula", + "upstream_fields", + "worksheets", + "datasource", ] @property - def source_content_metadata_id(self) -> Optional[int]: + def site_qualified_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.site_qualified_name + + @site_qualified_name.setter + def site_qualified_name(self, site_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.site_qualified_name = site_qualified_name + + @property + def project_qualified_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.project_qualified_name + ) + + @project_qualified_name.setter + def project_qualified_name(self, project_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.project_qualified_name = project_qualified_name + + @property + def top_level_project_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.top_level_project_qualified_name + ) + + @top_level_project_qualified_name.setter + def top_level_project_qualified_name( + self, top_level_project_qualified_name: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.top_level_project_qualified_name = ( + top_level_project_qualified_name + ) + + @property + def workbook_qualified_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.workbook_qualified_name + ) + + @workbook_qualified_name.setter + def workbook_qualified_name(self, workbook_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.workbook_qualified_name = workbook_qualified_name + + @property + def datasource_qualified_name(self) -> Optional[str]: return ( None if self.attributes is None - else self.attributes.source_content_metadata_id + else self.attributes.datasource_qualified_name ) - @source_content_metadata_id.setter - def source_content_metadata_id(self, source_content_metadata_id: Optional[int]): + @datasource_qualified_name.setter + def datasource_qualified_name(self, datasource_qualified_name: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.source_content_metadata_id = source_content_metadata_id + self.attributes.datasource_qualified_name = datasource_qualified_name @property - def source_creator_id(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.source_creator_id + def project_hierarchy(self) -> Optional[list[dict[str, str]]]: + return None if self.attributes is None else self.attributes.project_hierarchy - @source_creator_id.setter - def source_creator_id(self, source_creator_id: Optional[int]): + @project_hierarchy.setter + def project_hierarchy(self, project_hierarchy: Optional[list[dict[str, str]]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.source_creator_id = source_creator_id + self.attributes.project_hierarchy = project_hierarchy @property - def source_child_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.source_child_count + def data_category(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.data_category - @source_child_count.setter - def source_child_count(self, source_child_count: Optional[int]): + @data_category.setter + def data_category(self, data_category: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.source_child_count = source_child_count + self.attributes.data_category = data_category @property - def source_parent_i_d(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.source_parent_i_d + def role(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.role - @source_parent_i_d.setter - def source_parent_i_d(self, source_parent_i_d: Optional[int]): + @role.setter + def role(self, role: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.source_parent_i_d = source_parent_i_d + self.attributes.role = role @property - def looker_sub_folders(self) -> Optional[list[LookerFolder]]: - return None if self.attributes is None else self.attributes.looker_sub_folders + def tableau_data_type(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.tableau_data_type - @looker_sub_folders.setter - def looker_sub_folders(self, looker_sub_folders: Optional[list[LookerFolder]]): + @tableau_data_type.setter + def tableau_data_type(self, tableau_data_type: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.looker_sub_folders = looker_sub_folders + self.attributes.tableau_data_type = tableau_data_type @property - def dashboards(self) -> Optional[list[LookerDashboard]]: - return None if self.attributes is None else self.attributes.dashboards + def formula(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.formula - @dashboards.setter - def dashboards(self, dashboards: Optional[list[LookerDashboard]]): + @formula.setter + def formula(self, formula: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.dashboards = dashboards + self.attributes.formula = formula + + @property + def upstream_fields(self) -> Optional[list[dict[str, str]]]: + return None if self.attributes is None else self.attributes.upstream_fields + + @upstream_fields.setter + def upstream_fields(self, upstream_fields: Optional[list[dict[str, str]]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.upstream_fields = upstream_fields @property - def looks(self) -> Optional[list[LookerLook]]: - return None if self.attributes is None else self.attributes.looks + def worksheets(self) -> Optional[list[TableauWorksheet]]: + return None if self.attributes is None else self.attributes.worksheets - @looks.setter - def looks(self, looks: Optional[list[LookerLook]]): + @worksheets.setter + def worksheets(self, worksheets: Optional[list[TableauWorksheet]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.looks = looks + self.attributes.worksheets = worksheets @property - def looker_parent_folder(self) -> Optional[LookerFolder]: - return None if self.attributes is None else self.attributes.looker_parent_folder + def datasource(self) -> Optional[TableauDatasource]: + return None if self.attributes is None else self.attributes.datasource - @looker_parent_folder.setter - def looker_parent_folder(self, looker_parent_folder: Optional[LookerFolder]): + @datasource.setter + def datasource(self, datasource: Optional[TableauDatasource]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.looker_parent_folder = looker_parent_folder + self.attributes.datasource = datasource - class Attributes(Looker.Attributes): - source_content_metadata_id: Optional[int] = Field( - None, description="", alias="sourceContentMetadataId" + class Attributes(Tableau.Attributes): + site_qualified_name: Optional[str] = Field( + None, description="", alias="siteQualifiedName" ) - source_creator_id: Optional[int] = Field( - None, description="", alias="sourceCreatorId" + project_qualified_name: Optional[str] = Field( + None, description="", alias="projectQualifiedName" ) - source_child_count: Optional[int] = Field( - None, description="", alias="sourceChildCount" + top_level_project_qualified_name: Optional[str] = Field( + None, description="", alias="topLevelProjectQualifiedName" ) - source_parent_i_d: Optional[int] = Field( - None, description="", alias="sourceParentID" + workbook_qualified_name: Optional[str] = Field( + None, description="", alias="workbookQualifiedName" ) - looker_sub_folders: Optional[list[LookerFolder]] = Field( - None, description="", alias="lookerSubFolders" - ) # relationship - dashboards: Optional[list[LookerDashboard]] = Field( - None, description="", alias="dashboards" - ) # relationship - looks: Optional[list[LookerLook]] = Field( - None, description="", alias="looks" + datasource_qualified_name: Optional[str] = Field( + None, description="", alias="datasourceQualifiedName" + ) + project_hierarchy: Optional[list[dict[str, str]]] = Field( + None, description="", alias="projectHierarchy" + ) + data_category: Optional[str] = Field(None, description="", alias="dataCategory") + role: Optional[str] = Field(None, description="", alias="role") + tableau_data_type: Optional[str] = Field( + None, description="", alias="tableauDataType" + ) + formula: Optional[str] = Field(None, description="", alias="formula") + upstream_fields: Optional[list[dict[str, str]]] = Field( + None, description="", alias="upstreamFields" + ) + worksheets: Optional[list[TableauWorksheet]] = Field( + None, description="", alias="worksheets" ) # relationship - looker_parent_folder: Optional[LookerFolder] = Field( - None, description="", alias="lookerParentFolder" + datasource: Optional[TableauDatasource] = Field( + None, description="", alias="datasource" ) # relationship - attributes: "LookerFolder.Attributes" = Field( - default_factory=lambda: LookerFolder.Attributes(), + attributes: "TableauCalculatedField.Attributes" = Field( + default_factory=lambda: TableauCalculatedField.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -class LookerTile(Looker): +class TableauProject(Tableau): """Description""" - type_name: str = Field("LookerTile", allow_mutation=False) + type_name: str = Field("TableauProject", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "LookerTile": - raise ValueError("must be LookerTile") + if v != "TableauProject": + raise ValueError("must be TableauProject") return v def __setattr__(self, name, value): - if name in LookerTile._convenience_properties: + if name in TableauProject._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - LOOKML_LINK_ID: ClassVar[KeywordField] = KeywordField( - "lookmlLinkId", "lookmlLinkId" + SITE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "siteQualifiedName", "siteQualifiedName" ) """ - TBC + Unique name of the site in which this project exists. """ - MERGE_RESULT_ID: ClassVar[KeywordField] = KeywordField( - "mergeResultId", "mergeResultId" + TOP_LEVEL_PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "topLevelProjectQualifiedName", "topLevelProjectQualifiedName" ) """ - TBC + Unique name of the top-level project in which this project exists, if this is a nested project. """ - NOTE_TEXT: ClassVar[KeywordField] = KeywordField("noteText", "noteText") + IS_TOP_LEVEL_PROJECT: ClassVar[BooleanField] = BooleanField( + "isTopLevelProject", "isTopLevelProject" + ) """ - TBC + Whether this project is a top-level project (true) or not (false). """ - QUERY_ID: ClassVar[NumericField] = NumericField("queryID", "queryID") + PROJECT_HIERARCHY: ClassVar[KeywordField] = KeywordField( + "projectHierarchy", "projectHierarchy" + ) """ - TBC + List of top-level projects with their nested child projects. """ - RESULT_MAKER_ID: ClassVar[NumericField] = NumericField( - "resultMakerID", "resultMakerID" - ) + + PARENT_PROJECT: ClassVar[RelationField] = RelationField("parentProject") """ TBC """ - SUBTITLE_TEXT: ClassVar[KeywordField] = KeywordField("subtitleText", "subtitleText") + WORKBOOKS: ClassVar[RelationField] = RelationField("workbooks") """ TBC """ - LOOK_ID: ClassVar[NumericField] = NumericField("lookId", "lookId") + SITE: ClassVar[RelationField] = RelationField("site") """ TBC """ - - QUERY: ClassVar[RelationField] = RelationField("query") + DATASOURCES: ClassVar[RelationField] = RelationField("datasources") """ TBC """ - LOOK: ClassVar[RelationField] = RelationField("look") + FLOWS: ClassVar[RelationField] = RelationField("flows") """ TBC """ - DASHBOARD: ClassVar[RelationField] = RelationField("dashboard") + CHILD_PROJECTS: ClassVar[RelationField] = RelationField("childProjects") """ TBC """ _convenience_properties: ClassVar[list[str]] = [ - "lookml_link_id", - "merge_result_id", - "note_text", - "query_i_d", - "result_maker_i_d", - "subtitle_text", - "look_id", - "query", - "look", - "dashboard", + "site_qualified_name", + "top_level_project_qualified_name", + "is_top_level_project", + "project_hierarchy", + "parent_project", + "workbooks", + "site", + "datasources", + "flows", + "child_projects", ] @property - def lookml_link_id(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.lookml_link_id + def site_qualified_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.site_qualified_name - @lookml_link_id.setter - def lookml_link_id(self, lookml_link_id: Optional[str]): + @site_qualified_name.setter + def site_qualified_name(self, site_qualified_name: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.lookml_link_id = lookml_link_id + self.attributes.site_qualified_name = site_qualified_name @property - def merge_result_id(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.merge_result_id + def top_level_project_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.top_level_project_qualified_name + ) - @merge_result_id.setter - def merge_result_id(self, merge_result_id: Optional[str]): + @top_level_project_qualified_name.setter + def top_level_project_qualified_name( + self, top_level_project_qualified_name: Optional[str] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.merge_result_id = merge_result_id + self.attributes.top_level_project_qualified_name = ( + top_level_project_qualified_name + ) @property - def note_text(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.note_text + def is_top_level_project(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_top_level_project - @note_text.setter - def note_text(self, note_text: Optional[str]): + @is_top_level_project.setter + def is_top_level_project(self, is_top_level_project: Optional[bool]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.note_text = note_text + self.attributes.is_top_level_project = is_top_level_project @property - def query_i_d(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.query_i_d + def project_hierarchy(self) -> Optional[list[dict[str, str]]]: + return None if self.attributes is None else self.attributes.project_hierarchy - @query_i_d.setter - def query_i_d(self, query_i_d: Optional[int]): + @project_hierarchy.setter + def project_hierarchy(self, project_hierarchy: Optional[list[dict[str, str]]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.query_i_d = query_i_d + self.attributes.project_hierarchy = project_hierarchy @property - def result_maker_i_d(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.result_maker_i_d + def parent_project(self) -> Optional[TableauProject]: + return None if self.attributes is None else self.attributes.parent_project - @result_maker_i_d.setter - def result_maker_i_d(self, result_maker_i_d: Optional[int]): + @parent_project.setter + def parent_project(self, parent_project: Optional[TableauProject]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.result_maker_i_d = result_maker_i_d + self.attributes.parent_project = parent_project @property - def subtitle_text(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.subtitle_text + def workbooks(self) -> Optional[list[TableauWorkbook]]: + return None if self.attributes is None else self.attributes.workbooks - @subtitle_text.setter - def subtitle_text(self, subtitle_text: Optional[str]): + @workbooks.setter + def workbooks(self, workbooks: Optional[list[TableauWorkbook]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.subtitle_text = subtitle_text + self.attributes.workbooks = workbooks @property - def look_id(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.look_id + def site(self) -> Optional[TableauSite]: + return None if self.attributes is None else self.attributes.site - @look_id.setter - def look_id(self, look_id: Optional[int]): + @site.setter + def site(self, site: Optional[TableauSite]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.look_id = look_id + self.attributes.site = site @property - def query(self) -> Optional[LookerQuery]: - return None if self.attributes is None else self.attributes.query + def datasources(self) -> Optional[list[TableauDatasource]]: + return None if self.attributes is None else self.attributes.datasources - @query.setter - def query(self, query: Optional[LookerQuery]): + @datasources.setter + def datasources(self, datasources: Optional[list[TableauDatasource]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.query = query + self.attributes.datasources = datasources @property - def look(self) -> Optional[LookerLook]: - return None if self.attributes is None else self.attributes.look + def flows(self) -> Optional[list[TableauFlow]]: + return None if self.attributes is None else self.attributes.flows - @look.setter - def look(self, look: Optional[LookerLook]): + @flows.setter + def flows(self, flows: Optional[list[TableauFlow]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.look = look + self.attributes.flows = flows @property - def dashboard(self) -> Optional[LookerDashboard]: - return None if self.attributes is None else self.attributes.dashboard + def child_projects(self) -> Optional[list[TableauProject]]: + return None if self.attributes is None else self.attributes.child_projects - @dashboard.setter - def dashboard(self, dashboard: Optional[LookerDashboard]): + @child_projects.setter + def child_projects(self, child_projects: Optional[list[TableauProject]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.dashboard = dashboard + self.attributes.child_projects = child_projects - class Attributes(Looker.Attributes): - lookml_link_id: Optional[str] = Field( - None, description="", alias="lookmlLinkId" + class Attributes(Tableau.Attributes): + site_qualified_name: Optional[str] = Field( + None, description="", alias="siteQualifiedName" ) - merge_result_id: Optional[str] = Field( - None, description="", alias="mergeResultId" + top_level_project_qualified_name: Optional[str] = Field( + None, description="", alias="topLevelProjectQualifiedName" ) - note_text: Optional[str] = Field(None, description="", alias="noteText") - query_i_d: Optional[int] = Field(None, description="", alias="queryID") - result_maker_i_d: Optional[int] = Field( - None, description="", alias="resultMakerID" + is_top_level_project: Optional[bool] = Field( + None, description="", alias="isTopLevelProject" ) - subtitle_text: Optional[str] = Field(None, description="", alias="subtitleText") - look_id: Optional[int] = Field(None, description="", alias="lookId") - query: Optional[LookerQuery] = Field( - None, description="", alias="query" + project_hierarchy: Optional[list[dict[str, str]]] = Field( + None, description="", alias="projectHierarchy" + ) + parent_project: Optional[TableauProject] = Field( + None, description="", alias="parentProject" + ) # relationship + workbooks: Optional[list[TableauWorkbook]] = Field( + None, description="", alias="workbooks" + ) # relationship + site: Optional[TableauSite] = Field( + None, description="", alias="site" ) # relationship - look: Optional[LookerLook] = Field( - None, description="", alias="look" + datasources: Optional[list[TableauDatasource]] = Field( + None, description="", alias="datasources" ) # relationship - dashboard: Optional[LookerDashboard] = Field( - None, description="", alias="dashboard" + flows: Optional[list[TableauFlow]] = Field( + None, description="", alias="flows" + ) # relationship + child_projects: Optional[list[TableauProject]] = Field( + None, description="", alias="childProjects" ) # relationship - attributes: "LookerTile.Attributes" = Field( - default_factory=lambda: LookerTile.Attributes(), + attributes: "TableauProject.Attributes" = Field( + default_factory=lambda: TableauProject.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -class LookerModel(Looker): +class TableauSite(Tableau): """Description""" - type_name: str = Field("LookerModel", allow_mutation=False) + type_name: str = Field("TableauSite", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "LookerModel": - raise ValueError("must be LookerModel") + if v != "TableauSite": + raise ValueError("must be TableauSite") return v def __setattr__(self, name, value): - if name in LookerModel._convenience_properties: + if name in TableauSite._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - PROJECT_NAME: ClassVar[KeywordField] = KeywordField("projectName", "projectName") - """ - TBC - """ - - EXPLORES: ClassVar[RelationField] = RelationField("explores") - """ - TBC - """ - PROJECT: ClassVar[RelationField] = RelationField("project") - """ - TBC - """ - LOOK: ClassVar[RelationField] = RelationField("look") - """ - TBC - """ - QUERIES: ClassVar[RelationField] = RelationField("queries") - """ - TBC - """ - FIELDS: ClassVar[RelationField] = RelationField("fields") + PROJECTS: ClassVar[RelationField] = RelationField("projects") """ TBC """ _convenience_properties: ClassVar[list[str]] = [ - "project_name", - "explores", - "project", - "look", - "queries", - "fields", + "projects", ] @property - def project_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.project_name - - @project_name.setter - def project_name(self, project_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.project_name = project_name - - @property - def explores(self) -> Optional[list[LookerExplore]]: - return None if self.attributes is None else self.attributes.explores - - @explores.setter - def explores(self, explores: Optional[list[LookerExplore]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.explores = explores - - @property - def project(self) -> Optional[LookerProject]: - return None if self.attributes is None else self.attributes.project - - @project.setter - def project(self, project: Optional[LookerProject]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.project = project - - @property - def look(self) -> Optional[LookerLook]: - return None if self.attributes is None else self.attributes.look + def projects(self) -> Optional[list[TableauProject]]: + return None if self.attributes is None else self.attributes.projects - @look.setter - def look(self, look: Optional[LookerLook]): + @projects.setter + def projects(self, projects: Optional[list[TableauProject]]): if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.look = look - - @property - def queries(self) -> Optional[list[LookerQuery]]: - return None if self.attributes is None else self.attributes.queries - - @queries.setter - def queries(self, queries: Optional[list[LookerQuery]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.queries = queries - - @property - def fields(self) -> Optional[list[LookerField]]: - return None if self.attributes is None else self.attributes.fields - - @fields.setter - def fields(self, fields: Optional[list[LookerField]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.fields = fields - - class Attributes(Looker.Attributes): - project_name: Optional[str] = Field(None, description="", alias="projectName") - explores: Optional[list[LookerExplore]] = Field( - None, description="", alias="explores" - ) # relationship - project: Optional[LookerProject] = Field( - None, description="", alias="project" - ) # relationship - look: Optional[LookerLook] = Field( - None, description="", alias="look" - ) # relationship - queries: Optional[list[LookerQuery]] = Field( - None, description="", alias="queries" - ) # relationship - fields: Optional[list[LookerField]] = Field( - None, description="", alias="fields" + self.attributes = self.Attributes() + self.attributes.projects = projects + + class Attributes(Tableau.Attributes): + projects: Optional[list[TableauProject]] = Field( + None, description="", alias="projects" ) # relationship - attributes: "LookerModel.Attributes" = Field( - default_factory=lambda: LookerModel.Attributes(), + attributes: "TableauSite.Attributes" = Field( + default_factory=lambda: TableauSite.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -class LookerExplore(Looker): +class TableauDatasource(Tableau): """Description""" - type_name: str = Field("LookerExplore", allow_mutation=False) + type_name: str = Field("TableauDatasource", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "LookerExplore": - raise ValueError("must be LookerExplore") + if v != "TableauDatasource": + raise ValueError("must be TableauDatasource") return v def __setattr__(self, name, value): - if name in LookerExplore._convenience_properties: + if name in TableauDatasource._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - PROJECT_NAME: ClassVar[KeywordField] = KeywordField("projectName", "projectName") + SITE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "siteQualifiedName", "siteQualifiedName" + ) """ - TBC + Unique name of the site in which this datasource exists. """ - MODEL_NAME: ClassVar[KeywordField] = KeywordField("modelName", "modelName") + PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "projectQualifiedName", "projectQualifiedName" + ) """ - TBC + Unique name of the project in which this datasource exists. """ - SOURCE_CONNECTION_NAME: ClassVar[KeywordField] = KeywordField( - "sourceConnectionName", "sourceConnectionName" + TOP_LEVEL_PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "topLevelProjectQualifiedName", "topLevelProjectQualifiedName" ) """ - TBC + Unique name of the top-level project in which this datasource exists. """ - VIEW_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "viewName", "viewName.keyword", "viewName" + WORKBOOK_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "workbookQualifiedName", "workbookQualifiedName" ) """ - TBC + Unique name of the workbook in which this datasource exists. """ - SQL_TABLE_NAME: ClassVar[KeywordField] = KeywordField( - "sqlTableName", "sqlTableName" + PROJECT_HIERARCHY: ClassVar[KeywordField] = KeywordField( + "projectHierarchy", "projectHierarchy" ) """ - TBC + List of top-level projects with their nested child projects. + """ + IS_PUBLISHED: ClassVar[BooleanField] = BooleanField("isPublished", "isPublished") + """ + Whether this datasource is published (true) or embedded (false). + """ + HAS_EXTRACTS: ClassVar[BooleanField] = BooleanField("hasExtracts", "hasExtracts") + """ + Whether this datasource has extracts (true) or not (false). + """ + IS_CERTIFIED: ClassVar[BooleanField] = BooleanField("isCertified", "isCertified") + """ + Whether this datasource is certified in Tableau (true) or not (false). + """ + CERTIFIER: ClassVar[KeywordField] = KeywordField("certifier", "certifier") + """ + Users that have marked this datasource as cerified, in Tableau. + """ + CERTIFICATION_NOTE: ClassVar[KeywordField] = KeywordField( + "certificationNote", "certificationNote" + ) + """ + Notes related to this datasource being cerfified, in Tableau. + """ + CERTIFIER_DISPLAY_NAME: ClassVar[KeywordField] = KeywordField( + "certifierDisplayName", "certifierDisplayName" + ) + """ + Name of the user who cerified this datasource, in Tableau. + """ + UPSTREAM_TABLES: ClassVar[KeywordField] = KeywordField( + "upstreamTables", "upstreamTables" + ) + """ + List of tables that are upstream of this datasource. + """ + UPSTREAM_DATASOURCES: ClassVar[KeywordField] = KeywordField( + "upstreamDatasources", "upstreamDatasources" + ) + """ + List of datasources that are upstream of this datasource. """ - PROJECT: ClassVar[RelationField] = RelationField("project") + WORKBOOK: ClassVar[RelationField] = RelationField("workbook") """ TBC """ - MODEL: ClassVar[RelationField] = RelationField("model") + PROJECT: ClassVar[RelationField] = RelationField("project") """ TBC """ @@ -1134,795 +1340,861 @@ def __setattr__(self, name, value): """ _convenience_properties: ClassVar[list[str]] = [ - "project_name", - "model_name", - "source_connection_name", - "view_name", - "sql_table_name", + "site_qualified_name", + "project_qualified_name", + "top_level_project_qualified_name", + "workbook_qualified_name", + "project_hierarchy", + "is_published", + "has_extracts", + "is_certified", + "certifier", + "certification_note", + "certifier_display_name", + "upstream_tables", + "upstream_datasources", + "workbook", "project", - "model", "fields", ] @property - def project_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.project_name + def site_qualified_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.site_qualified_name - @project_name.setter - def project_name(self, project_name: Optional[str]): + @site_qualified_name.setter + def site_qualified_name(self, site_qualified_name: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.project_name = project_name + self.attributes.site_qualified_name = site_qualified_name @property - def model_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.model_name + def project_qualified_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.project_qualified_name + ) - @model_name.setter - def model_name(self, model_name: Optional[str]): + @project_qualified_name.setter + def project_qualified_name(self, project_qualified_name: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.model_name = model_name + self.attributes.project_qualified_name = project_qualified_name @property - def source_connection_name(self) -> Optional[str]: + def top_level_project_qualified_name(self) -> Optional[str]: return ( - None if self.attributes is None else self.attributes.source_connection_name + None + if self.attributes is None + else self.attributes.top_level_project_qualified_name ) - @source_connection_name.setter - def source_connection_name(self, source_connection_name: Optional[str]): + @top_level_project_qualified_name.setter + def top_level_project_qualified_name( + self, top_level_project_qualified_name: Optional[str] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.source_connection_name = source_connection_name + self.attributes.top_level_project_qualified_name = ( + top_level_project_qualified_name + ) @property - def view_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.view_name + def workbook_qualified_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.workbook_qualified_name + ) - @view_name.setter - def view_name(self, view_name: Optional[str]): + @workbook_qualified_name.setter + def workbook_qualified_name(self, workbook_qualified_name: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.view_name = view_name + self.attributes.workbook_qualified_name = workbook_qualified_name @property - def sql_table_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.sql_table_name + def project_hierarchy(self) -> Optional[list[dict[str, str]]]: + return None if self.attributes is None else self.attributes.project_hierarchy - @sql_table_name.setter - def sql_table_name(self, sql_table_name: Optional[str]): + @project_hierarchy.setter + def project_hierarchy(self, project_hierarchy: Optional[list[dict[str, str]]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.sql_table_name = sql_table_name + self.attributes.project_hierarchy = project_hierarchy @property - def project(self) -> Optional[LookerProject]: - return None if self.attributes is None else self.attributes.project + def is_published(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_published - @project.setter - def project(self, project: Optional[LookerProject]): + @is_published.setter + def is_published(self, is_published: Optional[bool]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.project = project + self.attributes.is_published = is_published @property - def model(self) -> Optional[LookerModel]: - return None if self.attributes is None else self.attributes.model + def has_extracts(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.has_extracts - @model.setter - def model(self, model: Optional[LookerModel]): + @has_extracts.setter + def has_extracts(self, has_extracts: Optional[bool]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.model = model + self.attributes.has_extracts = has_extracts @property - def fields(self) -> Optional[list[LookerField]]: - return None if self.attributes is None else self.attributes.fields + def is_certified(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_certified - @fields.setter - def fields(self, fields: Optional[list[LookerField]]): + @is_certified.setter + def is_certified(self, is_certified: Optional[bool]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.fields = fields - - class Attributes(Looker.Attributes): - project_name: Optional[str] = Field(None, description="", alias="projectName") - model_name: Optional[str] = Field(None, description="", alias="modelName") - source_connection_name: Optional[str] = Field( - None, description="", alias="sourceConnectionName" - ) - view_name: Optional[str] = Field(None, description="", alias="viewName") - sql_table_name: Optional[str] = Field( - None, description="", alias="sqlTableName" - ) - project: Optional[LookerProject] = Field( - None, description="", alias="project" - ) # relationship - model: Optional[LookerModel] = Field( - None, description="", alias="model" - ) # relationship - fields: Optional[list[LookerField]] = Field( - None, description="", alias="fields" - ) # relationship + self.attributes.is_certified = is_certified - attributes: "LookerExplore.Attributes" = Field( - default_factory=lambda: LookerExplore.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) + @property + def certifier(self) -> Optional[dict[str, str]]: + return None if self.attributes is None else self.attributes.certifier + @certifier.setter + def certifier(self, certifier: Optional[dict[str, str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.certifier = certifier -class LookerProject(Looker): - """Description""" + @property + def certification_note(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.certification_note - type_name: str = Field("LookerProject", allow_mutation=False) + @certification_note.setter + def certification_note(self, certification_note: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.certification_note = certification_note - @validator("type_name") - def validate_type_name(cls, v): - if v != "LookerProject": - raise ValueError("must be LookerProject") - return v + @property + def certifier_display_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.certifier_display_name + ) - def __setattr__(self, name, value): - if name in LookerProject._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) + @certifier_display_name.setter + def certifier_display_name(self, certifier_display_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.certifier_display_name = certifier_display_name - MODELS: ClassVar[RelationField] = RelationField("models") - """ - TBC - """ - EXPLORES: ClassVar[RelationField] = RelationField("explores") - """ - TBC - """ - FIELDS: ClassVar[RelationField] = RelationField("fields") - """ - TBC - """ - VIEWS: ClassVar[RelationField] = RelationField("views") - """ - TBC - """ + @property + def upstream_tables(self) -> Optional[list[dict[str, str]]]: + return None if self.attributes is None else self.attributes.upstream_tables - _convenience_properties: ClassVar[list[str]] = [ - "models", - "explores", - "fields", - "views", - ] + @upstream_tables.setter + def upstream_tables(self, upstream_tables: Optional[list[dict[str, str]]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.upstream_tables = upstream_tables @property - def models(self) -> Optional[list[LookerModel]]: - return None if self.attributes is None else self.attributes.models + def upstream_datasources(self) -> Optional[list[dict[str, str]]]: + return None if self.attributes is None else self.attributes.upstream_datasources - @models.setter - def models(self, models: Optional[list[LookerModel]]): + @upstream_datasources.setter + def upstream_datasources( + self, upstream_datasources: Optional[list[dict[str, str]]] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.models = models + self.attributes.upstream_datasources = upstream_datasources @property - def explores(self) -> Optional[list[LookerExplore]]: - return None if self.attributes is None else self.attributes.explores + def workbook(self) -> Optional[TableauWorkbook]: + return None if self.attributes is None else self.attributes.workbook - @explores.setter - def explores(self, explores: Optional[list[LookerExplore]]): + @workbook.setter + def workbook(self, workbook: Optional[TableauWorkbook]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.explores = explores + self.attributes.workbook = workbook @property - def fields(self) -> Optional[list[LookerField]]: - return None if self.attributes is None else self.attributes.fields + def project(self) -> Optional[TableauProject]: + return None if self.attributes is None else self.attributes.project - @fields.setter - def fields(self, fields: Optional[list[LookerField]]): + @project.setter + def project(self, project: Optional[TableauProject]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.fields = fields + self.attributes.project = project @property - def views(self) -> Optional[list[LookerView]]: - return None if self.attributes is None else self.attributes.views + def fields(self) -> Optional[list[TableauDatasourceField]]: + return None if self.attributes is None else self.attributes.fields - @views.setter - def views(self, views: Optional[list[LookerView]]): + @fields.setter + def fields(self, fields: Optional[list[TableauDatasourceField]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.views = views + self.attributes.fields = fields - class Attributes(Looker.Attributes): - models: Optional[list[LookerModel]] = Field( - None, description="", alias="models" + class Attributes(Tableau.Attributes): + site_qualified_name: Optional[str] = Field( + None, description="", alias="siteQualifiedName" + ) + project_qualified_name: Optional[str] = Field( + None, description="", alias="projectQualifiedName" + ) + top_level_project_qualified_name: Optional[str] = Field( + None, description="", alias="topLevelProjectQualifiedName" + ) + workbook_qualified_name: Optional[str] = Field( + None, description="", alias="workbookQualifiedName" + ) + project_hierarchy: Optional[list[dict[str, str]]] = Field( + None, description="", alias="projectHierarchy" + ) + is_published: Optional[bool] = Field(None, description="", alias="isPublished") + has_extracts: Optional[bool] = Field(None, description="", alias="hasExtracts") + is_certified: Optional[bool] = Field(None, description="", alias="isCertified") + certifier: Optional[dict[str, str]] = Field( + None, description="", alias="certifier" + ) + certification_note: Optional[str] = Field( + None, description="", alias="certificationNote" + ) + certifier_display_name: Optional[str] = Field( + None, description="", alias="certifierDisplayName" + ) + upstream_tables: Optional[list[dict[str, str]]] = Field( + None, description="", alias="upstreamTables" + ) + upstream_datasources: Optional[list[dict[str, str]]] = Field( + None, description="", alias="upstreamDatasources" + ) + workbook: Optional[TableauWorkbook] = Field( + None, description="", alias="workbook" ) # relationship - explores: Optional[list[LookerExplore]] = Field( - None, description="", alias="explores" + project: Optional[TableauProject] = Field( + None, description="", alias="project" ) # relationship - fields: Optional[list[LookerField]] = Field( + fields: Optional[list[TableauDatasourceField]] = Field( None, description="", alias="fields" ) # relationship - views: Optional[list[LookerView]] = Field( - None, description="", alias="views" - ) # relationship - attributes: "LookerProject.Attributes" = Field( - default_factory=lambda: LookerProject.Attributes(), + attributes: "TableauDatasource.Attributes" = Field( + default_factory=lambda: TableauDatasource.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -class LookerQuery(Looker): +class TableauDashboard(Tableau): """Description""" - type_name: str = Field("LookerQuery", allow_mutation=False) + type_name: str = Field("TableauDashboard", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "LookerQuery": - raise ValueError("must be LookerQuery") + if v != "TableauDashboard": + raise ValueError("must be TableauDashboard") return v def __setattr__(self, name, value): - if name in LookerQuery._convenience_properties: + if name in TableauDashboard._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - SOURCE_DEFINITION: ClassVar[KeywordField] = KeywordField( - "sourceDefinition", "sourceDefinition" + SITE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "siteQualifiedName", "siteQualifiedName" ) """ - TBC + Unique name of the site in which this dashboard exists. """ - SOURCE_DEFINITION_DATABASE: ClassVar[KeywordField] = KeywordField( - "sourceDefinitionDatabase", "sourceDefinitionDatabase" + PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "projectQualifiedName", "projectQualifiedName" ) """ - TBC + Unique name of the project in which this dashboard exists. """ - SOURCE_DEFINITION_SCHEMA: ClassVar[KeywordField] = KeywordField( - "sourceDefinitionSchema", "sourceDefinitionSchema" + WORKBOOK_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "workbookQualifiedName", "workbookQualifiedName" ) """ - TBC + Unique name of the workbook in which this dashboard exists. """ - FIELDS: ClassVar[KeywordField] = KeywordField("fields", "fields") + TOP_LEVEL_PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "topLevelProjectQualifiedName", "topLevelProjectQualifiedName" + ) """ - TBC + Unique name of the top-level project in which this dashboard exists. """ - - TILES: ClassVar[RelationField] = RelationField("tiles") + PROJECT_HIERARCHY: ClassVar[KeywordField] = KeywordField( + "projectHierarchy", "projectHierarchy" + ) """ - TBC + List of top-level projects and their nested child projects. """ - LOOKS: ClassVar[RelationField] = RelationField("looks") + + WORKBOOK: ClassVar[RelationField] = RelationField("workbook") """ TBC """ - MODEL: ClassVar[RelationField] = RelationField("model") + WORKSHEETS: ClassVar[RelationField] = RelationField("worksheets") """ TBC """ _convenience_properties: ClassVar[list[str]] = [ - "source_definition", - "source_definition_database", - "source_definition_schema", - "fields", - "tiles", - "looks", - "model", + "site_qualified_name", + "project_qualified_name", + "workbook_qualified_name", + "top_level_project_qualified_name", + "project_hierarchy", + "workbook", + "worksheets", ] @property - def source_definition(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.source_definition + def site_qualified_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.site_qualified_name - @source_definition.setter - def source_definition(self, source_definition: Optional[str]): + @site_qualified_name.setter + def site_qualified_name(self, site_qualified_name: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.source_definition = source_definition + self.attributes.site_qualified_name = site_qualified_name @property - def source_definition_database(self) -> Optional[str]: + def project_qualified_name(self) -> Optional[str]: return ( - None - if self.attributes is None - else self.attributes.source_definition_database + None if self.attributes is None else self.attributes.project_qualified_name ) - @source_definition_database.setter - def source_definition_database(self, source_definition_database: Optional[str]): + @project_qualified_name.setter + def project_qualified_name(self, project_qualified_name: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.source_definition_database = source_definition_database + self.attributes.project_qualified_name = project_qualified_name @property - def source_definition_schema(self) -> Optional[str]: + def workbook_qualified_name(self) -> Optional[str]: return ( - None - if self.attributes is None - else self.attributes.source_definition_schema + None if self.attributes is None else self.attributes.workbook_qualified_name ) - @source_definition_schema.setter - def source_definition_schema(self, source_definition_schema: Optional[str]): + @workbook_qualified_name.setter + def workbook_qualified_name(self, workbook_qualified_name: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.source_definition_schema = source_definition_schema + self.attributes.workbook_qualified_name = workbook_qualified_name @property - def fields(self) -> Optional[set[str]]: - return None if self.attributes is None else self.attributes.fields + def top_level_project_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.top_level_project_qualified_name + ) - @fields.setter - def fields(self, fields: Optional[set[str]]): + @top_level_project_qualified_name.setter + def top_level_project_qualified_name( + self, top_level_project_qualified_name: Optional[str] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.fields = fields + self.attributes.top_level_project_qualified_name = ( + top_level_project_qualified_name + ) @property - def tiles(self) -> Optional[list[LookerTile]]: - return None if self.attributes is None else self.attributes.tiles + def project_hierarchy(self) -> Optional[list[dict[str, str]]]: + return None if self.attributes is None else self.attributes.project_hierarchy - @tiles.setter - def tiles(self, tiles: Optional[list[LookerTile]]): + @project_hierarchy.setter + def project_hierarchy(self, project_hierarchy: Optional[list[dict[str, str]]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.tiles = tiles + self.attributes.project_hierarchy = project_hierarchy @property - def looks(self) -> Optional[list[LookerLook]]: - return None if self.attributes is None else self.attributes.looks + def workbook(self) -> Optional[TableauWorkbook]: + return None if self.attributes is None else self.attributes.workbook - @looks.setter - def looks(self, looks: Optional[list[LookerLook]]): + @workbook.setter + def workbook(self, workbook: Optional[TableauWorkbook]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.looks = looks + self.attributes.workbook = workbook @property - def model(self) -> Optional[LookerModel]: - return None if self.attributes is None else self.attributes.model + def worksheets(self) -> Optional[list[TableauWorksheet]]: + return None if self.attributes is None else self.attributes.worksheets - @model.setter - def model(self, model: Optional[LookerModel]): + @worksheets.setter + def worksheets(self, worksheets: Optional[list[TableauWorksheet]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.model = model + self.attributes.worksheets = worksheets - class Attributes(Looker.Attributes): - source_definition: Optional[str] = Field( - None, description="", alias="sourceDefinition" + class Attributes(Tableau.Attributes): + site_qualified_name: Optional[str] = Field( + None, description="", alias="siteQualifiedName" ) - source_definition_database: Optional[str] = Field( - None, description="", alias="sourceDefinitionDatabase" + project_qualified_name: Optional[str] = Field( + None, description="", alias="projectQualifiedName" ) - source_definition_schema: Optional[str] = Field( - None, description="", alias="sourceDefinitionSchema" + workbook_qualified_name: Optional[str] = Field( + None, description="", alias="workbookQualifiedName" ) - fields: Optional[set[str]] = Field(None, description="", alias="fields") - tiles: Optional[list[LookerTile]] = Field( - None, description="", alias="tiles" - ) # relationship - looks: Optional[list[LookerLook]] = Field( - None, description="", alias="looks" + top_level_project_qualified_name: Optional[str] = Field( + None, description="", alias="topLevelProjectQualifiedName" + ) + project_hierarchy: Optional[list[dict[str, str]]] = Field( + None, description="", alias="projectHierarchy" + ) + workbook: Optional[TableauWorkbook] = Field( + None, description="", alias="workbook" ) # relationship - model: Optional[LookerModel] = Field( - None, description="", alias="model" + worksheets: Optional[list[TableauWorksheet]] = Field( + None, description="", alias="worksheets" ) # relationship - attributes: "LookerQuery.Attributes" = Field( - default_factory=lambda: LookerQuery.Attributes(), + attributes: "TableauDashboard.Attributes" = Field( + default_factory=lambda: TableauDashboard.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -class LookerField(Looker): +class TableauFlow(Tableau): """Description""" - type_name: str = Field("LookerField", allow_mutation=False) + type_name: str = Field("TableauFlow", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "LookerField": - raise ValueError("must be LookerField") + if v != "TableauFlow": + raise ValueError("must be TableauFlow") return v def __setattr__(self, name, value): - if name in LookerField._convenience_properties: + if name in TableauFlow._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - PROJECT_NAME: ClassVar[KeywordField] = KeywordField("projectName", "projectName") - """ - TBC - """ - LOOKER_EXPLORE_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "lookerExploreQualifiedName", - "lookerExploreQualifiedName", - "lookerExploreQualifiedName.text", + SITE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "siteQualifiedName", "siteQualifiedName" ) """ - TBC - """ - LOOKER_VIEW_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "lookerViewQualifiedName", - "lookerViewQualifiedName", - "lookerViewQualifiedName.text", - ) - """ - TBC - """ - MODEL_NAME: ClassVar[KeywordField] = KeywordField("modelName", "modelName") - """ - TBC + Unique name of the site in which this flow exists. """ - SOURCE_DEFINITION: ClassVar[KeywordField] = KeywordField( - "sourceDefinition", "sourceDefinition" + PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "projectQualifiedName", "projectQualifiedName" ) """ - TBC + Unique name of the project in which this flow exists. """ - LOOKER_FIELD_DATA_TYPE: ClassVar[KeywordField] = KeywordField( - "lookerFieldDataType", "lookerFieldDataType" + TOP_LEVEL_PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "topLevelProjectQualifiedName", "topLevelProjectQualifiedName" ) """ - TBC + Unique name of the top-level project in which this flow exists. """ - LOOKER_TIMES_USED: ClassVar[NumericField] = NumericField( - "lookerTimesUsed", "lookerTimesUsed" + PROJECT_HIERARCHY: ClassVar[KeywordField] = KeywordField( + "projectHierarchy", "projectHierarchy" ) """ - TBC + List of top-level projects with their nested child projects. """ - - EXPLORE: ClassVar[RelationField] = RelationField("explore") + INPUT_FIELDS: ClassVar[KeywordField] = KeywordField("inputFields", "inputFields") """ - TBC + List of fields that are inputs to this flow. """ - PROJECT: ClassVar[RelationField] = RelationField("project") + OUTPUT_FIELDS: ClassVar[KeywordField] = KeywordField("outputFields", "outputFields") """ - TBC + List of fields that are outputs from this flow. """ - VIEW: ClassVar[RelationField] = RelationField("view") + OUTPUT_STEPS: ClassVar[KeywordField] = KeywordField("outputSteps", "outputSteps") """ - TBC + List of steps that are outputs from this flow. """ - MODEL: ClassVar[RelationField] = RelationField("model") + + PROJECT: ClassVar[RelationField] = RelationField("project") """ TBC """ _convenience_properties: ClassVar[list[str]] = [ - "project_name", - "looker_explore_qualified_name", - "looker_view_qualified_name", - "model_name", - "source_definition", - "looker_field_data_type", - "looker_times_used", - "explore", + "site_qualified_name", + "project_qualified_name", + "top_level_project_qualified_name", + "project_hierarchy", + "input_fields", + "output_fields", + "output_steps", "project", - "view", - "model", ] @property - def project_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.project_name + def site_qualified_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.site_qualified_name - @project_name.setter - def project_name(self, project_name: Optional[str]): + @site_qualified_name.setter + def site_qualified_name(self, site_qualified_name: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.project_name = project_name + self.attributes.site_qualified_name = site_qualified_name @property - def looker_explore_qualified_name(self) -> Optional[str]: + def project_qualified_name(self) -> Optional[str]: return ( - None - if self.attributes is None - else self.attributes.looker_explore_qualified_name + None if self.attributes is None else self.attributes.project_qualified_name ) - @looker_explore_qualified_name.setter - def looker_explore_qualified_name( - self, looker_explore_qualified_name: Optional[str] - ): + @project_qualified_name.setter + def project_qualified_name(self, project_qualified_name: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.looker_explore_qualified_name = looker_explore_qualified_name + self.attributes.project_qualified_name = project_qualified_name @property - def looker_view_qualified_name(self) -> Optional[str]: + def top_level_project_qualified_name(self) -> Optional[str]: return ( None if self.attributes is None - else self.attributes.looker_view_qualified_name + else self.attributes.top_level_project_qualified_name ) - @looker_view_qualified_name.setter - def looker_view_qualified_name(self, looker_view_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.looker_view_qualified_name = looker_view_qualified_name - - @property - def model_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.model_name - - @model_name.setter - def model_name(self, model_name: Optional[str]): + @top_level_project_qualified_name.setter + def top_level_project_qualified_name( + self, top_level_project_qualified_name: Optional[str] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.model_name = model_name + self.attributes.top_level_project_qualified_name = ( + top_level_project_qualified_name + ) @property - def source_definition(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.source_definition + def project_hierarchy(self) -> Optional[list[dict[str, str]]]: + return None if self.attributes is None else self.attributes.project_hierarchy - @source_definition.setter - def source_definition(self, source_definition: Optional[str]): + @project_hierarchy.setter + def project_hierarchy(self, project_hierarchy: Optional[list[dict[str, str]]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.source_definition = source_definition + self.attributes.project_hierarchy = project_hierarchy @property - def looker_field_data_type(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.looker_field_data_type - ) + def input_fields(self) -> Optional[list[dict[str, str]]]: + return None if self.attributes is None else self.attributes.input_fields - @looker_field_data_type.setter - def looker_field_data_type(self, looker_field_data_type: Optional[str]): + @input_fields.setter + def input_fields(self, input_fields: Optional[list[dict[str, str]]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.looker_field_data_type = looker_field_data_type + self.attributes.input_fields = input_fields @property - def looker_times_used(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.looker_times_used + def output_fields(self) -> Optional[list[dict[str, str]]]: + return None if self.attributes is None else self.attributes.output_fields - @looker_times_used.setter - def looker_times_used(self, looker_times_used: Optional[int]): + @output_fields.setter + def output_fields(self, output_fields: Optional[list[dict[str, str]]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.looker_times_used = looker_times_used + self.attributes.output_fields = output_fields @property - def explore(self) -> Optional[LookerExplore]: - return None if self.attributes is None else self.attributes.explore + def output_steps(self) -> Optional[list[dict[str, str]]]: + return None if self.attributes is None else self.attributes.output_steps - @explore.setter - def explore(self, explore: Optional[LookerExplore]): + @output_steps.setter + def output_steps(self, output_steps: Optional[list[dict[str, str]]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.explore = explore + self.attributes.output_steps = output_steps @property - def project(self) -> Optional[LookerProject]: + def project(self) -> Optional[TableauProject]: return None if self.attributes is None else self.attributes.project @project.setter - def project(self, project: Optional[LookerProject]): + def project(self, project: Optional[TableauProject]): if self.attributes is None: self.attributes = self.Attributes() self.attributes.project = project - @property - def view(self) -> Optional[LookerView]: - return None if self.attributes is None else self.attributes.view - - @view.setter - def view(self, view: Optional[LookerView]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.view = view - - @property - def model(self) -> Optional[LookerModel]: - return None if self.attributes is None else self.attributes.model - - @model.setter - def model(self, model: Optional[LookerModel]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.model = model - - class Attributes(Looker.Attributes): - project_name: Optional[str] = Field(None, description="", alias="projectName") - looker_explore_qualified_name: Optional[str] = Field( - None, description="", alias="lookerExploreQualifiedName" + class Attributes(Tableau.Attributes): + site_qualified_name: Optional[str] = Field( + None, description="", alias="siteQualifiedName" ) - looker_view_qualified_name: Optional[str] = Field( - None, description="", alias="lookerViewQualifiedName" + project_qualified_name: Optional[str] = Field( + None, description="", alias="projectQualifiedName" ) - model_name: Optional[str] = Field(None, description="", alias="modelName") - source_definition: Optional[str] = Field( - None, description="", alias="sourceDefinition" + top_level_project_qualified_name: Optional[str] = Field( + None, description="", alias="topLevelProjectQualifiedName" ) - looker_field_data_type: Optional[str] = Field( - None, description="", alias="lookerFieldDataType" + project_hierarchy: Optional[list[dict[str, str]]] = Field( + None, description="", alias="projectHierarchy" ) - looker_times_used: Optional[int] = Field( - None, description="", alias="lookerTimesUsed" + input_fields: Optional[list[dict[str, str]]] = Field( + None, description="", alias="inputFields" ) - explore: Optional[LookerExplore] = Field( - None, description="", alias="explore" - ) # relationship - project: Optional[LookerProject] = Field( + output_fields: Optional[list[dict[str, str]]] = Field( + None, description="", alias="outputFields" + ) + output_steps: Optional[list[dict[str, str]]] = Field( + None, description="", alias="outputSteps" + ) + project: Optional[TableauProject] = Field( None, description="", alias="project" ) # relationship - view: Optional[LookerView] = Field( - None, description="", alias="view" - ) # relationship - model: Optional[LookerModel] = Field( - None, description="", alias="model" - ) # relationship - attributes: "LookerField.Attributes" = Field( - default_factory=lambda: LookerField.Attributes(), + attributes: "TableauFlow.Attributes" = Field( + default_factory=lambda: TableauFlow.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -class LookerView(Looker): +class TableauWorksheet(Tableau): """Description""" - type_name: str = Field("LookerView", allow_mutation=False) + type_name: str = Field("TableauWorksheet", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "LookerView": - raise ValueError("must be LookerView") + if v != "TableauWorksheet": + raise ValueError("must be TableauWorksheet") return v def __setattr__(self, name, value): - if name in LookerView._convenience_properties: + if name in TableauWorksheet._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - PROJECT_NAME: ClassVar[KeywordField] = KeywordField("projectName", "projectName") + SITE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "siteQualifiedName", "siteQualifiedName" + ) """ - TBC + Unique name of the site in which this worksheet exists. + """ + PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "projectQualifiedName", "projectQualifiedName" + ) + """ + Unique name of the project in which this worksheet exists. + """ + TOP_LEVEL_PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "topLevelProjectQualifiedName", "topLevelProjectQualifiedName" + ) """ - LOOKER_VIEW_FILE_PATH: ClassVar[KeywordField] = KeywordField( - "lookerViewFilePath", "lookerViewFilePath" + Unique name of the top-level project in which this worksheet exists. + """ + PROJECT_HIERARCHY: ClassVar[KeywordField] = KeywordField( + "projectHierarchy", "projectHierarchy" ) """ - File path of the looker view in the project + List of top-level projects with their nested child projects. """ - LOOKER_VIEW_FILE_NAME: ClassVar[KeywordField] = KeywordField( - "lookerViewFileName", "lookerViewFileName" + WORKBOOK_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "workbookQualifiedName", "workbookQualifiedName" ) """ - File name of the looker view in the project + Unique name of the workbook in which this worksheet exists. """ - PROJECT: ClassVar[RelationField] = RelationField("project") + WORKBOOK: ClassVar[RelationField] = RelationField("workbook") """ TBC """ - FIELDS: ClassVar[RelationField] = RelationField("fields") + DATASOURCE_FIELDS: ClassVar[RelationField] = RelationField("datasourceFields") + """ + TBC + """ + CALCULATED_FIELDS: ClassVar[RelationField] = RelationField("calculatedFields") + """ + TBC + """ + DASHBOARDS: ClassVar[RelationField] = RelationField("dashboards") """ TBC """ _convenience_properties: ClassVar[list[str]] = [ - "project_name", - "looker_view_file_path", - "looker_view_file_name", - "project", - "fields", + "site_qualified_name", + "project_qualified_name", + "top_level_project_qualified_name", + "project_hierarchy", + "workbook_qualified_name", + "workbook", + "datasource_fields", + "calculated_fields", + "dashboards", ] @property - def project_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.project_name + def site_qualified_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.site_qualified_name - @project_name.setter - def project_name(self, project_name: Optional[str]): + @site_qualified_name.setter + def site_qualified_name(self, site_qualified_name: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.project_name = project_name + self.attributes.site_qualified_name = site_qualified_name @property - def looker_view_file_path(self) -> Optional[str]: + def project_qualified_name(self) -> Optional[str]: return ( - None if self.attributes is None else self.attributes.looker_view_file_path + None if self.attributes is None else self.attributes.project_qualified_name ) - @looker_view_file_path.setter - def looker_view_file_path(self, looker_view_file_path: Optional[str]): + @project_qualified_name.setter + def project_qualified_name(self, project_qualified_name: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.looker_view_file_path = looker_view_file_path + self.attributes.project_qualified_name = project_qualified_name @property - def looker_view_file_name(self) -> Optional[str]: + def top_level_project_qualified_name(self) -> Optional[str]: return ( - None if self.attributes is None else self.attributes.looker_view_file_name + None + if self.attributes is None + else self.attributes.top_level_project_qualified_name ) - @looker_view_file_name.setter - def looker_view_file_name(self, looker_view_file_name: Optional[str]): + @top_level_project_qualified_name.setter + def top_level_project_qualified_name( + self, top_level_project_qualified_name: Optional[str] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.looker_view_file_name = looker_view_file_name + self.attributes.top_level_project_qualified_name = ( + top_level_project_qualified_name + ) @property - def project(self) -> Optional[LookerProject]: - return None if self.attributes is None else self.attributes.project + def project_hierarchy(self) -> Optional[list[dict[str, str]]]: + return None if self.attributes is None else self.attributes.project_hierarchy - @project.setter - def project(self, project: Optional[LookerProject]): + @project_hierarchy.setter + def project_hierarchy(self, project_hierarchy: Optional[list[dict[str, str]]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.project = project + self.attributes.project_hierarchy = project_hierarchy @property - def fields(self) -> Optional[list[LookerField]]: - return None if self.attributes is None else self.attributes.fields + def workbook_qualified_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.workbook_qualified_name + ) - @fields.setter - def fields(self, fields: Optional[list[LookerField]]): + @workbook_qualified_name.setter + def workbook_qualified_name(self, workbook_qualified_name: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.fields = fields + self.attributes.workbook_qualified_name = workbook_qualified_name + + @property + def workbook(self) -> Optional[TableauWorkbook]: + return None if self.attributes is None else self.attributes.workbook + + @workbook.setter + def workbook(self, workbook: Optional[TableauWorkbook]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.workbook = workbook + + @property + def datasource_fields(self) -> Optional[list[TableauDatasourceField]]: + return None if self.attributes is None else self.attributes.datasource_fields + + @datasource_fields.setter + def datasource_fields( + self, datasource_fields: Optional[list[TableauDatasourceField]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.datasource_fields = datasource_fields + + @property + def calculated_fields(self) -> Optional[list[TableauCalculatedField]]: + return None if self.attributes is None else self.attributes.calculated_fields + + @calculated_fields.setter + def calculated_fields( + self, calculated_fields: Optional[list[TableauCalculatedField]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.calculated_fields = calculated_fields + + @property + def dashboards(self) -> Optional[list[TableauDashboard]]: + return None if self.attributes is None else self.attributes.dashboards + + @dashboards.setter + def dashboards(self, dashboards: Optional[list[TableauDashboard]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dashboards = dashboards - class Attributes(Looker.Attributes): - project_name: Optional[str] = Field(None, description="", alias="projectName") - looker_view_file_path: Optional[str] = Field( - None, description="", alias="lookerViewFilePath" + class Attributes(Tableau.Attributes): + site_qualified_name: Optional[str] = Field( + None, description="", alias="siteQualifiedName" ) - looker_view_file_name: Optional[str] = Field( - None, description="", alias="lookerViewFileName" + project_qualified_name: Optional[str] = Field( + None, description="", alias="projectQualifiedName" ) - project: Optional[LookerProject] = Field( - None, description="", alias="project" + top_level_project_qualified_name: Optional[str] = Field( + None, description="", alias="topLevelProjectQualifiedName" + ) + project_hierarchy: Optional[list[dict[str, str]]] = Field( + None, description="", alias="projectHierarchy" + ) + workbook_qualified_name: Optional[str] = Field( + None, description="", alias="workbookQualifiedName" + ) + workbook: Optional[TableauWorkbook] = Field( + None, description="", alias="workbook" ) # relationship - fields: Optional[list[LookerField]] = Field( - None, description="", alias="fields" + datasource_fields: Optional[list[TableauDatasourceField]] = Field( + None, description="", alias="datasourceFields" + ) # relationship + calculated_fields: Optional[list[TableauCalculatedField]] = Field( + None, description="", alias="calculatedFields" + ) # relationship + dashboards: Optional[list[TableauDashboard]] = Field( + None, description="", alias="dashboards" ) # relationship - attributes: "LookerView.Attributes" = Field( - default_factory=lambda: LookerView.Attributes(), + attributes: "TableauWorksheet.Attributes" = Field( + default_factory=lambda: TableauWorksheet.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -LookerLook.Attributes.update_forward_refs() - - -LookerDashboard.Attributes.update_forward_refs() +TableauWorkbook.Attributes.update_forward_refs() -LookerFolder.Attributes.update_forward_refs() +TableauDatasourceField.Attributes.update_forward_refs() -LookerTile.Attributes.update_forward_refs() +TableauCalculatedField.Attributes.update_forward_refs() -LookerModel.Attributes.update_forward_refs() +TableauProject.Attributes.update_forward_refs() -LookerExplore.Attributes.update_forward_refs() +TableauSite.Attributes.update_forward_refs() -LookerProject.Attributes.update_forward_refs() +TableauDatasource.Attributes.update_forward_refs() -LookerQuery.Attributes.update_forward_refs() +TableauDashboard.Attributes.update_forward_refs() -LookerField.Attributes.update_forward_refs() +TableauFlow.Attributes.update_forward_refs() -LookerView.Attributes.update_forward_refs() +TableauWorksheet.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset70.py b/pyatlan/model/assets/asset70.py index 57d0a67a2..4481c59ad 100644 --- a/pyatlan/model/assets/asset70.py +++ b/pyatlan/model/assets/asset70.py @@ -8,64 +8,148 @@ from pydantic import Field, validator -from pyatlan.model.fields.atlan_fields import NumericField +from pyatlan.model.fields.atlan_fields import KeywordField, RelationField -from .asset44 import Redash +from .asset43 import Tableau +from .asset69 import TableauProject -class RedashDashboard(Redash): +class TableauMetric(Tableau): """Description""" - type_name: str = Field("RedashDashboard", allow_mutation=False) + type_name: str = Field("TableauMetric", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "RedashDashboard": - raise ValueError("must be RedashDashboard") + if v != "TableauMetric": + raise ValueError("must be TableauMetric") return v def __setattr__(self, name, value): - if name in RedashDashboard._convenience_properties: + if name in TableauMetric._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - REDASH_DASHBOARD_WIDGET_COUNT: ClassVar[NumericField] = NumericField( - "redashDashboardWidgetCount", "redashDashboardWidgetCount" + SITE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "siteQualifiedName", "siteQualifiedName" ) """ - Number of widgets in the Redash Dashboard + Unique name of the site in which this metric exists. + """ + PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "projectQualifiedName", "projectQualifiedName" + ) + """ + Unique name of the project in which this metric exists. + """ + TOP_LEVEL_PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "topLevelProjectQualifiedName", "topLevelProjectQualifiedName" + ) + """ + Unique name of the top-level project in which this metric exists. + """ + PROJECT_HIERARCHY: ClassVar[KeywordField] = KeywordField( + "projectHierarchy", "projectHierarchy" + ) + """ + List of top-level projects with their nested child projects. + """ + + PROJECT: ClassVar[RelationField] = RelationField("project") + """ + TBC """ _convenience_properties: ClassVar[list[str]] = [ - "redash_dashboard_widget_count", + "site_qualified_name", + "project_qualified_name", + "top_level_project_qualified_name", + "project_hierarchy", + "project", ] @property - def redash_dashboard_widget_count(self) -> Optional[int]: + def site_qualified_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.site_qualified_name + + @site_qualified_name.setter + def site_qualified_name(self, site_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.site_qualified_name = site_qualified_name + + @property + def project_qualified_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.project_qualified_name + ) + + @project_qualified_name.setter + def project_qualified_name(self, project_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.project_qualified_name = project_qualified_name + + @property + def top_level_project_qualified_name(self) -> Optional[str]: return ( None if self.attributes is None - else self.attributes.redash_dashboard_widget_count + else self.attributes.top_level_project_qualified_name ) - @redash_dashboard_widget_count.setter - def redash_dashboard_widget_count( - self, redash_dashboard_widget_count: Optional[int] + @top_level_project_qualified_name.setter + def top_level_project_qualified_name( + self, top_level_project_qualified_name: Optional[str] ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.redash_dashboard_widget_count = redash_dashboard_widget_count + self.attributes.top_level_project_qualified_name = ( + top_level_project_qualified_name + ) + + @property + def project_hierarchy(self) -> Optional[list[dict[str, str]]]: + return None if self.attributes is None else self.attributes.project_hierarchy - class Attributes(Redash.Attributes): - redash_dashboard_widget_count: Optional[int] = Field( - None, description="", alias="redashDashboardWidgetCount" + @project_hierarchy.setter + def project_hierarchy(self, project_hierarchy: Optional[list[dict[str, str]]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.project_hierarchy = project_hierarchy + + @property + def project(self) -> Optional[TableauProject]: + return None if self.attributes is None else self.attributes.project + + @project.setter + def project(self, project: Optional[TableauProject]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.project = project + + class Attributes(Tableau.Attributes): + site_qualified_name: Optional[str] = Field( + None, description="", alias="siteQualifiedName" + ) + project_qualified_name: Optional[str] = Field( + None, description="", alias="projectQualifiedName" + ) + top_level_project_qualified_name: Optional[str] = Field( + None, description="", alias="topLevelProjectQualifiedName" + ) + project_hierarchy: Optional[list[dict[str, str]]] = Field( + None, description="", alias="projectHierarchy" ) + project: Optional[TableauProject] = Field( + None, description="", alias="project" + ) # relationship - attributes: "RedashDashboard.Attributes" = Field( - default_factory=lambda: RedashDashboard.Attributes(), + attributes: "TableauMetric.Attributes" = Field( + default_factory=lambda: TableauMetric.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -RedashDashboard.Attributes.update_forward_refs() +TableauMetric.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset71.py b/pyatlan/model/assets/asset71.py index de15840d4..30bb4cc45 100644 --- a/pyatlan/model/assets/asset71.py +++ b/pyatlan/model/assets/asset71.py @@ -16,330 +16,1913 @@ RelationField, ) -from .asset44 import Redash +from .asset44 import Looker -class RedashQuery(Redash): +class LookerLook(Looker): """Description""" - type_name: str = Field("RedashQuery", allow_mutation=False) + type_name: str = Field("LookerLook", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "RedashQuery": - raise ValueError("must be RedashQuery") + if v != "LookerLook": + raise ValueError("must be LookerLook") return v def __setattr__(self, name, value): - if name in RedashQuery._convenience_properties: + if name in LookerLook._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - REDASH_QUERY_SQL: ClassVar[KeywordField] = KeywordField( - "redashQuerySQL", "redashQuerySQL" + FOLDER_NAME: ClassVar[KeywordField] = KeywordField("folderName", "folderName") + """ + Name of the folder in which the Look is organized. + """ + SOURCE_USER_ID: ClassVar[NumericField] = NumericField( + "sourceUserId", "sourceUserId" ) """ - SQL code of Redash Query + Identifier of the user who created the Look, from Looker. """ - REDASH_QUERY_PARAMETERS: ClassVar[KeywordField] = KeywordField( - "redashQueryParameters", "redashQueryParameters" + SOURCE_VIEW_COUNT: ClassVar[NumericField] = NumericField( + "sourceViewCount", "sourceViewCount" ) """ - Parameters of Redash Query + Number of times the look has been viewed in the Looker web UI. """ - REDASH_QUERY_SCHEDULE: ClassVar[KeywordField] = KeywordField( - "redashQuerySchedule", "redashQuerySchedule" + SOURCELAST_UPDATER_ID: ClassVar[NumericField] = NumericField( + "sourcelastUpdaterId", "sourcelastUpdaterId" ) """ - Schedule of Redash Query + Identifier of the user that last updated the Look, from Looker. """ - REDASH_QUERY_LAST_EXECUTION_RUNTIME: ClassVar[NumericField] = NumericField( - "redashQueryLastExecutionRuntime", "redashQueryLastExecutionRuntime" + SOURCE_LAST_ACCESSED_AT: ClassVar[NumericField] = NumericField( + "sourceLastAccessedAt", "sourceLastAccessedAt" ) """ - Runtime of Redash Query + Time (epoch) when the Look was last accessed by a user, in milliseconds. """ - REDASH_QUERY_LAST_EXECUTED_AT: ClassVar[NumericField] = NumericField( - "redashQueryLastExecutedAt", "redashQueryLastExecutedAt" + SOURCE_LAST_VIEWED_AT: ClassVar[NumericField] = NumericField( + "sourceLastViewedAt", "sourceLastViewedAt" ) """ - Time when the Redash Query was last executed + Time (epoch) when the Look was last viewed by a user, in milliseconds. """ - REDASH_QUERY_SCHEDULE_HUMANIZED: ClassVar[KeywordTextField] = KeywordTextField( - "redashQueryScheduleHumanized", - "redashQueryScheduleHumanized", - "redashQueryScheduleHumanized.text", + SOURCE_CONTENT_METADATA_ID: ClassVar[NumericField] = NumericField( + "sourceContentMetadataId", "sourceContentMetadataId" ) """ - Query schedule for overview tab and filtering. + Identifier of the Look's content metadata, from Looker. """ - - REDASH_VISUALIZATIONS: ClassVar[RelationField] = RelationField( - "redashVisualizations" + SOURCE_QUERY_ID: ClassVar[NumericField] = NumericField( + "sourceQueryId", "sourceQueryId" ) """ + Identifier of the query for the Look, from Looker. + """ + MODEL_NAME: ClassVar[KeywordField] = KeywordField("modelName", "modelName") + """ + Name of the model in which this Look exists. + """ + + QUERY: ClassVar[RelationField] = RelationField("query") + """ + TBC + """ + FOLDER: ClassVar[RelationField] = RelationField("folder") + """ + TBC + """ + TILE: ClassVar[RelationField] = RelationField("tile") + """ + TBC + """ + MODEL: ClassVar[RelationField] = RelationField("model") + """ + TBC + """ + DASHBOARD: ClassVar[RelationField] = RelationField("dashboard") + """ TBC """ _convenience_properties: ClassVar[list[str]] = [ - "redash_query_s_q_l", - "redash_query_parameters", - "redash_query_schedule", - "redash_query_last_execution_runtime", - "redash_query_last_executed_at", - "redash_query_schedule_humanized", - "redash_visualizations", + "folder_name", + "source_user_id", + "source_view_count", + "sourcelast_updater_id", + "source_last_accessed_at", + "source_last_viewed_at", + "source_content_metadata_id", + "source_query_id", + "model_name", + "query", + "folder", + "tile", + "model", + "dashboard", ] @property - def redash_query_s_q_l(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.redash_query_s_q_l + def folder_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.folder_name + + @folder_name.setter + def folder_name(self, folder_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.folder_name = folder_name + + @property + def source_user_id(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.source_user_id + + @source_user_id.setter + def source_user_id(self, source_user_id: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_user_id = source_user_id + + @property + def source_view_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.source_view_count - @redash_query_s_q_l.setter - def redash_query_s_q_l(self, redash_query_s_q_l: Optional[str]): + @source_view_count.setter + def source_view_count(self, source_view_count: Optional[int]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.redash_query_s_q_l = redash_query_s_q_l + self.attributes.source_view_count = source_view_count @property - def redash_query_parameters(self) -> Optional[str]: + def sourcelast_updater_id(self) -> Optional[int]: return ( - None if self.attributes is None else self.attributes.redash_query_parameters + None if self.attributes is None else self.attributes.sourcelast_updater_id ) - @redash_query_parameters.setter - def redash_query_parameters(self, redash_query_parameters: Optional[str]): + @sourcelast_updater_id.setter + def sourcelast_updater_id(self, sourcelast_updater_id: Optional[int]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.redash_query_parameters = redash_query_parameters + self.attributes.sourcelast_updater_id = sourcelast_updater_id @property - def redash_query_schedule(self) -> Optional[dict[str, str]]: + def source_last_accessed_at(self) -> Optional[datetime]: return ( - None if self.attributes is None else self.attributes.redash_query_schedule + None if self.attributes is None else self.attributes.source_last_accessed_at ) - @redash_query_schedule.setter - def redash_query_schedule(self, redash_query_schedule: Optional[dict[str, str]]): + @source_last_accessed_at.setter + def source_last_accessed_at(self, source_last_accessed_at: Optional[datetime]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.redash_query_schedule = redash_query_schedule + self.attributes.source_last_accessed_at = source_last_accessed_at @property - def redash_query_last_execution_runtime(self) -> Optional[float]: + def source_last_viewed_at(self) -> Optional[datetime]: return ( - None - if self.attributes is None - else self.attributes.redash_query_last_execution_runtime + None if self.attributes is None else self.attributes.source_last_viewed_at ) - @redash_query_last_execution_runtime.setter - def redash_query_last_execution_runtime( - self, redash_query_last_execution_runtime: Optional[float] - ): + @source_last_viewed_at.setter + def source_last_viewed_at(self, source_last_viewed_at: Optional[datetime]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.redash_query_last_execution_runtime = ( - redash_query_last_execution_runtime - ) + self.attributes.source_last_viewed_at = source_last_viewed_at @property - def redash_query_last_executed_at(self) -> Optional[datetime]: + def source_content_metadata_id(self) -> Optional[int]: return ( None if self.attributes is None - else self.attributes.redash_query_last_executed_at + else self.attributes.source_content_metadata_id ) - @redash_query_last_executed_at.setter - def redash_query_last_executed_at( - self, redash_query_last_executed_at: Optional[datetime] - ): + @source_content_metadata_id.setter + def source_content_metadata_id(self, source_content_metadata_id: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_content_metadata_id = source_content_metadata_id + + @property + def source_query_id(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.source_query_id + + @source_query_id.setter + def source_query_id(self, source_query_id: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_query_id = source_query_id + + @property + def model_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.model_name + + @model_name.setter + def model_name(self, model_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.model_name = model_name + + @property + def query(self) -> Optional[LookerQuery]: + return None if self.attributes is None else self.attributes.query + + @query.setter + def query(self, query: Optional[LookerQuery]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.query = query + + @property + def folder(self) -> Optional[LookerFolder]: + return None if self.attributes is None else self.attributes.folder + + @folder.setter + def folder(self, folder: Optional[LookerFolder]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.folder = folder + + @property + def tile(self) -> Optional[LookerTile]: + return None if self.attributes is None else self.attributes.tile + + @tile.setter + def tile(self, tile: Optional[LookerTile]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.tile = tile + + @property + def model(self) -> Optional[LookerModel]: + return None if self.attributes is None else self.attributes.model + + @model.setter + def model(self, model: Optional[LookerModel]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.model = model + + @property + def dashboard(self) -> Optional[LookerDashboard]: + return None if self.attributes is None else self.attributes.dashboard + + @dashboard.setter + def dashboard(self, dashboard: Optional[LookerDashboard]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dashboard = dashboard + + class Attributes(Looker.Attributes): + folder_name: Optional[str] = Field(None, description="", alias="folderName") + source_user_id: Optional[int] = Field( + None, description="", alias="sourceUserId" + ) + source_view_count: Optional[int] = Field( + None, description="", alias="sourceViewCount" + ) + sourcelast_updater_id: Optional[int] = Field( + None, description="", alias="sourcelastUpdaterId" + ) + source_last_accessed_at: Optional[datetime] = Field( + None, description="", alias="sourceLastAccessedAt" + ) + source_last_viewed_at: Optional[datetime] = Field( + None, description="", alias="sourceLastViewedAt" + ) + source_content_metadata_id: Optional[int] = Field( + None, description="", alias="sourceContentMetadataId" + ) + source_query_id: Optional[int] = Field( + None, description="", alias="sourceQueryId" + ) + model_name: Optional[str] = Field(None, description="", alias="modelName") + query: Optional[LookerQuery] = Field( + None, description="", alias="query" + ) # relationship + folder: Optional[LookerFolder] = Field( + None, description="", alias="folder" + ) # relationship + tile: Optional[LookerTile] = Field( + None, description="", alias="tile" + ) # relationship + model: Optional[LookerModel] = Field( + None, description="", alias="model" + ) # relationship + dashboard: Optional[LookerDashboard] = Field( + None, description="", alias="dashboard" + ) # relationship + + attributes: "LookerLook.Attributes" = Field( + default_factory=lambda: LookerLook.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +class LookerDashboard(Looker): + """Description""" + + type_name: str = Field("LookerDashboard", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "LookerDashboard": + raise ValueError("must be LookerDashboard") + return v + + def __setattr__(self, name, value): + if name in LookerDashboard._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + FOLDER_NAME: ClassVar[KeywordField] = KeywordField("folderName", "folderName") + """ + Name of the parent folder in Looker that contains this dashboard. + """ + SOURCE_USER_ID: ClassVar[NumericField] = NumericField( + "sourceUserId", "sourceUserId" + ) + """ + Identifier of the user who created this dashboard, from Looker. + """ + SOURCE_VIEW_COUNT: ClassVar[NumericField] = NumericField( + "sourceViewCount", "sourceViewCount" + ) + """ + Number of times the dashboard has been viewed through the Looker web UI. + """ + SOURCE_METADATA_ID: ClassVar[NumericField] = NumericField( + "sourceMetadataId", "sourceMetadataId" + ) + """ + Identifier of the dashboard's content metadata, from Looker. + """ + SOURCELAST_UPDATER_ID: ClassVar[NumericField] = NumericField( + "sourcelastUpdaterId", "sourcelastUpdaterId" + ) + """ + Identifier of the user who last updated the dashboard, from Looker. + """ + SOURCE_LAST_ACCESSED_AT: ClassVar[NumericField] = NumericField( + "sourceLastAccessedAt", "sourceLastAccessedAt" + ) + """ + Timestamp (epoch) when the dashboard was last accessed by a user, in milliseconds. + """ + SOURCE_LAST_VIEWED_AT: ClassVar[NumericField] = NumericField( + "sourceLastViewedAt", "sourceLastViewedAt" + ) + """ + Timestamp (epoch) when the dashboard was last viewed by a user. + """ + + TILES: ClassVar[RelationField] = RelationField("tiles") + """ + TBC + """ + LOOKS: ClassVar[RelationField] = RelationField("looks") + """ + TBC + """ + FOLDER: ClassVar[RelationField] = RelationField("folder") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "folder_name", + "source_user_id", + "source_view_count", + "source_metadata_id", + "sourcelast_updater_id", + "source_last_accessed_at", + "source_last_viewed_at", + "tiles", + "looks", + "folder", + ] + + @property + def folder_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.folder_name + + @folder_name.setter + def folder_name(self, folder_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.folder_name = folder_name + + @property + def source_user_id(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.source_user_id + + @source_user_id.setter + def source_user_id(self, source_user_id: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_user_id = source_user_id + + @property + def source_view_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.source_view_count + + @source_view_count.setter + def source_view_count(self, source_view_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_view_count = source_view_count + + @property + def source_metadata_id(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.source_metadata_id + + @source_metadata_id.setter + def source_metadata_id(self, source_metadata_id: Optional[int]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.redash_query_last_executed_at = redash_query_last_executed_at + self.attributes.source_metadata_id = source_metadata_id @property - def redash_query_schedule_humanized(self) -> Optional[str]: + def sourcelast_updater_id(self) -> Optional[int]: return ( - None - if self.attributes is None - else self.attributes.redash_query_schedule_humanized + None if self.attributes is None else self.attributes.sourcelast_updater_id ) - @redash_query_schedule_humanized.setter - def redash_query_schedule_humanized( - self, redash_query_schedule_humanized: Optional[str] - ): + @sourcelast_updater_id.setter + def sourcelast_updater_id(self, sourcelast_updater_id: Optional[int]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.redash_query_schedule_humanized = ( - redash_query_schedule_humanized + self.attributes.sourcelast_updater_id = sourcelast_updater_id + + @property + def source_last_accessed_at(self) -> Optional[datetime]: + return ( + None if self.attributes is None else self.attributes.source_last_accessed_at ) + @source_last_accessed_at.setter + def source_last_accessed_at(self, source_last_accessed_at: Optional[datetime]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_last_accessed_at = source_last_accessed_at + @property - def redash_visualizations(self) -> Optional[list[RedashVisualization]]: + def source_last_viewed_at(self) -> Optional[datetime]: return ( - None if self.attributes is None else self.attributes.redash_visualizations + None if self.attributes is None else self.attributes.source_last_viewed_at ) - @redash_visualizations.setter - def redash_visualizations( - self, redash_visualizations: Optional[list[RedashVisualization]] - ): + @source_last_viewed_at.setter + def source_last_viewed_at(self, source_last_viewed_at: Optional[datetime]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_last_viewed_at = source_last_viewed_at + + @property + def tiles(self) -> Optional[list[LookerTile]]: + return None if self.attributes is None else self.attributes.tiles + + @tiles.setter + def tiles(self, tiles: Optional[list[LookerTile]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.tiles = tiles + + @property + def looks(self) -> Optional[list[LookerLook]]: + return None if self.attributes is None else self.attributes.looks + + @looks.setter + def looks(self, looks: Optional[list[LookerLook]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.looks = looks + + @property + def folder(self) -> Optional[LookerFolder]: + return None if self.attributes is None else self.attributes.folder + + @folder.setter + def folder(self, folder: Optional[LookerFolder]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.redash_visualizations = redash_visualizations + self.attributes.folder = folder - class Attributes(Redash.Attributes): - redash_query_s_q_l: Optional[str] = Field( - None, description="", alias="redashQuerySQL" + class Attributes(Looker.Attributes): + folder_name: Optional[str] = Field(None, description="", alias="folderName") + source_user_id: Optional[int] = Field( + None, description="", alias="sourceUserId" ) - redash_query_parameters: Optional[str] = Field( - None, description="", alias="redashQueryParameters" + source_view_count: Optional[int] = Field( + None, description="", alias="sourceViewCount" ) - redash_query_schedule: Optional[dict[str, str]] = Field( - None, description="", alias="redashQuerySchedule" + source_metadata_id: Optional[int] = Field( + None, description="", alias="sourceMetadataId" ) - redash_query_last_execution_runtime: Optional[float] = Field( - None, description="", alias="redashQueryLastExecutionRuntime" + sourcelast_updater_id: Optional[int] = Field( + None, description="", alias="sourcelastUpdaterId" ) - redash_query_last_executed_at: Optional[datetime] = Field( - None, description="", alias="redashQueryLastExecutedAt" + source_last_accessed_at: Optional[datetime] = Field( + None, description="", alias="sourceLastAccessedAt" ) - redash_query_schedule_humanized: Optional[str] = Field( - None, description="", alias="redashQueryScheduleHumanized" + source_last_viewed_at: Optional[datetime] = Field( + None, description="", alias="sourceLastViewedAt" ) - redash_visualizations: Optional[list[RedashVisualization]] = Field( - None, description="", alias="redashVisualizations" + tiles: Optional[list[LookerTile]] = Field( + None, description="", alias="tiles" + ) # relationship + looks: Optional[list[LookerLook]] = Field( + None, description="", alias="looks" + ) # relationship + folder: Optional[LookerFolder] = Field( + None, description="", alias="folder" ) # relationship - attributes: "RedashQuery.Attributes" = Field( - default_factory=lambda: RedashQuery.Attributes(), + attributes: "LookerDashboard.Attributes" = Field( + default_factory=lambda: LookerDashboard.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -class RedashVisualization(Redash): +class LookerFolder(Looker): """Description""" - type_name: str = Field("RedashVisualization", allow_mutation=False) + type_name: str = Field("LookerFolder", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "RedashVisualization": - raise ValueError("must be RedashVisualization") + if v != "LookerFolder": + raise ValueError("must be LookerFolder") return v def __setattr__(self, name, value): - if name in RedashVisualization._convenience_properties: + if name in LookerFolder._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - REDASH_VISUALIZATION_TYPE: ClassVar[KeywordField] = KeywordField( - "redashVisualizationType", "redashVisualizationType" + SOURCE_CONTENT_METADATA_ID: ClassVar[NumericField] = NumericField( + "sourceContentMetadataId", "sourceContentMetadataId" ) """ - Redash Visualization Type + Identifier for the folder's content metadata in Looker. """ - REDASH_QUERY_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "redashQueryName", "redashQueryName.keyword", "redashQueryName" + SOURCE_CREATOR_ID: ClassVar[NumericField] = NumericField( + "sourceCreatorId", "sourceCreatorId" ) """ - Redash Query from which visualization is created + Identifier of the user who created the folder, from Looker. """ - REDASH_QUERY_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "redashQueryQualifiedName", - "redashQueryQualifiedName", - "redashQueryQualifiedName.text", + SOURCE_CHILD_COUNT: ClassVar[NumericField] = NumericField( + "sourceChildCount", "sourceChildCount" ) """ - Qualified name of the Redash Query from which visualization is created + Number of subfolders in this folder. + """ + SOURCE_PARENT_ID: ClassVar[NumericField] = NumericField( + "sourceParentID", "sourceParentID" + ) + """ + Identifier of the parent folder of this folder, from Looker. """ - REDASH_QUERY: ClassVar[RelationField] = RelationField("redashQuery") + LOOKER_SUB_FOLDERS: ClassVar[RelationField] = RelationField("lookerSubFolders") + """ + TBC + """ + DASHBOARDS: ClassVar[RelationField] = RelationField("dashboards") + """ + TBC + """ + LOOKS: ClassVar[RelationField] = RelationField("looks") + """ + TBC + """ + LOOKER_PARENT_FOLDER: ClassVar[RelationField] = RelationField("lookerParentFolder") """ TBC """ _convenience_properties: ClassVar[list[str]] = [ - "redash_visualization_type", - "redash_query_name", - "redash_query_qualified_name", - "redash_query", + "source_content_metadata_id", + "source_creator_id", + "source_child_count", + "source_parent_i_d", + "looker_sub_folders", + "dashboards", + "looks", + "looker_parent_folder", ] @property - def redash_visualization_type(self) -> Optional[str]: + def source_content_metadata_id(self) -> Optional[int]: return ( None if self.attributes is None - else self.attributes.redash_visualization_type + else self.attributes.source_content_metadata_id ) - @redash_visualization_type.setter - def redash_visualization_type(self, redash_visualization_type: Optional[str]): + @source_content_metadata_id.setter + def source_content_metadata_id(self, source_content_metadata_id: Optional[int]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.redash_visualization_type = redash_visualization_type + self.attributes.source_content_metadata_id = source_content_metadata_id @property - def redash_query_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.redash_query_name + def source_creator_id(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.source_creator_id - @redash_query_name.setter - def redash_query_name(self, redash_query_name: Optional[str]): + @source_creator_id.setter + def source_creator_id(self, source_creator_id: Optional[int]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.redash_query_name = redash_query_name + self.attributes.source_creator_id = source_creator_id @property - def redash_query_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.redash_query_qualified_name - ) + def source_child_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.source_child_count + + @source_child_count.setter + def source_child_count(self, source_child_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_child_count = source_child_count + + @property + def source_parent_i_d(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.source_parent_i_d + + @source_parent_i_d.setter + def source_parent_i_d(self, source_parent_i_d: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_parent_i_d = source_parent_i_d + + @property + def looker_sub_folders(self) -> Optional[list[LookerFolder]]: + return None if self.attributes is None else self.attributes.looker_sub_folders + + @looker_sub_folders.setter + def looker_sub_folders(self, looker_sub_folders: Optional[list[LookerFolder]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.looker_sub_folders = looker_sub_folders + + @property + def dashboards(self) -> Optional[list[LookerDashboard]]: + return None if self.attributes is None else self.attributes.dashboards + + @dashboards.setter + def dashboards(self, dashboards: Optional[list[LookerDashboard]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dashboards = dashboards + + @property + def looks(self) -> Optional[list[LookerLook]]: + return None if self.attributes is None else self.attributes.looks - @redash_query_qualified_name.setter - def redash_query_qualified_name(self, redash_query_qualified_name: Optional[str]): + @looks.setter + def looks(self, looks: Optional[list[LookerLook]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.redash_query_qualified_name = redash_query_qualified_name + self.attributes.looks = looks @property - def redash_query(self) -> Optional[RedashQuery]: - return None if self.attributes is None else self.attributes.redash_query + def looker_parent_folder(self) -> Optional[LookerFolder]: + return None if self.attributes is None else self.attributes.looker_parent_folder - @redash_query.setter - def redash_query(self, redash_query: Optional[RedashQuery]): + @looker_parent_folder.setter + def looker_parent_folder(self, looker_parent_folder: Optional[LookerFolder]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.redash_query = redash_query + self.attributes.looker_parent_folder = looker_parent_folder - class Attributes(Redash.Attributes): - redash_visualization_type: Optional[str] = Field( - None, description="", alias="redashVisualizationType" + class Attributes(Looker.Attributes): + source_content_metadata_id: Optional[int] = Field( + None, description="", alias="sourceContentMetadataId" ) - redash_query_name: Optional[str] = Field( - None, description="", alias="redashQueryName" + source_creator_id: Optional[int] = Field( + None, description="", alias="sourceCreatorId" ) - redash_query_qualified_name: Optional[str] = Field( - None, description="", alias="redashQueryQualifiedName" + source_child_count: Optional[int] = Field( + None, description="", alias="sourceChildCount" ) - redash_query: Optional[RedashQuery] = Field( - None, description="", alias="redashQuery" + source_parent_i_d: Optional[int] = Field( + None, description="", alias="sourceParentID" + ) + looker_sub_folders: Optional[list[LookerFolder]] = Field( + None, description="", alias="lookerSubFolders" + ) # relationship + dashboards: Optional[list[LookerDashboard]] = Field( + None, description="", alias="dashboards" + ) # relationship + looks: Optional[list[LookerLook]] = Field( + None, description="", alias="looks" + ) # relationship + looker_parent_folder: Optional[LookerFolder] = Field( + None, description="", alias="lookerParentFolder" ) # relationship - attributes: "RedashVisualization.Attributes" = Field( - default_factory=lambda: RedashVisualization.Attributes(), + attributes: "LookerFolder.Attributes" = Field( + default_factory=lambda: LookerFolder.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -RedashQuery.Attributes.update_forward_refs() +class LookerTile(Looker): + """Description""" + + type_name: str = Field("LookerTile", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "LookerTile": + raise ValueError("must be LookerTile") + return v + + def __setattr__(self, name, value): + if name in LookerTile._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + LOOKML_LINK_ID: ClassVar[KeywordField] = KeywordField( + "lookmlLinkId", "lookmlLinkId" + ) + """ + Identifier for the LoomML link. + """ + MERGE_RESULT_ID: ClassVar[KeywordField] = KeywordField( + "mergeResultId", "mergeResultId" + ) + """ + Identifier for the merge result. + """ + NOTE_TEXT: ClassVar[KeywordField] = KeywordField("noteText", "noteText") + """ + Text of notes added to the tile. + """ + QUERY_ID: ClassVar[NumericField] = NumericField("queryID", "queryID") + """ + Identifier for the query used to build this tile, from Looker. + """ + RESULT_MAKER_ID: ClassVar[NumericField] = NumericField( + "resultMakerID", "resultMakerID" + ) + """ + Identifier of the ResultMarkerLookup entry, from Looker. + """ + SUBTITLE_TEXT: ClassVar[KeywordField] = KeywordField("subtitleText", "subtitleText") + """ + Text for the subtitle for text tiles. + """ + LOOK_ID: ClassVar[NumericField] = NumericField("lookId", "lookId") + """ + Identifier of the Look used to create this tile, from Looker. + """ + + QUERY: ClassVar[RelationField] = RelationField("query") + """ + TBC + """ + LOOK: ClassVar[RelationField] = RelationField("look") + """ + TBC + """ + DASHBOARD: ClassVar[RelationField] = RelationField("dashboard") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "lookml_link_id", + "merge_result_id", + "note_text", + "query_i_d", + "result_maker_i_d", + "subtitle_text", + "look_id", + "query", + "look", + "dashboard", + ] + + @property + def lookml_link_id(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.lookml_link_id + + @lookml_link_id.setter + def lookml_link_id(self, lookml_link_id: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.lookml_link_id = lookml_link_id + + @property + def merge_result_id(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.merge_result_id + + @merge_result_id.setter + def merge_result_id(self, merge_result_id: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.merge_result_id = merge_result_id + + @property + def note_text(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.note_text + + @note_text.setter + def note_text(self, note_text: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.note_text = note_text + + @property + def query_i_d(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.query_i_d + + @query_i_d.setter + def query_i_d(self, query_i_d: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.query_i_d = query_i_d + + @property + def result_maker_i_d(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.result_maker_i_d + + @result_maker_i_d.setter + def result_maker_i_d(self, result_maker_i_d: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.result_maker_i_d = result_maker_i_d + + @property + def subtitle_text(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.subtitle_text + + @subtitle_text.setter + def subtitle_text(self, subtitle_text: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.subtitle_text = subtitle_text + + @property + def look_id(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.look_id + + @look_id.setter + def look_id(self, look_id: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.look_id = look_id + + @property + def query(self) -> Optional[LookerQuery]: + return None if self.attributes is None else self.attributes.query + + @query.setter + def query(self, query: Optional[LookerQuery]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.query = query + + @property + def look(self) -> Optional[LookerLook]: + return None if self.attributes is None else self.attributes.look + + @look.setter + def look(self, look: Optional[LookerLook]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.look = look + + @property + def dashboard(self) -> Optional[LookerDashboard]: + return None if self.attributes is None else self.attributes.dashboard + + @dashboard.setter + def dashboard(self, dashboard: Optional[LookerDashboard]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dashboard = dashboard + + class Attributes(Looker.Attributes): + lookml_link_id: Optional[str] = Field( + None, description="", alias="lookmlLinkId" + ) + merge_result_id: Optional[str] = Field( + None, description="", alias="mergeResultId" + ) + note_text: Optional[str] = Field(None, description="", alias="noteText") + query_i_d: Optional[int] = Field(None, description="", alias="queryID") + result_maker_i_d: Optional[int] = Field( + None, description="", alias="resultMakerID" + ) + subtitle_text: Optional[str] = Field(None, description="", alias="subtitleText") + look_id: Optional[int] = Field(None, description="", alias="lookId") + query: Optional[LookerQuery] = Field( + None, description="", alias="query" + ) # relationship + look: Optional[LookerLook] = Field( + None, description="", alias="look" + ) # relationship + dashboard: Optional[LookerDashboard] = Field( + None, description="", alias="dashboard" + ) # relationship + + attributes: "LookerTile.Attributes" = Field( + default_factory=lambda: LookerTile.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +class LookerModel(Looker): + """Description""" + + type_name: str = Field("LookerModel", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "LookerModel": + raise ValueError("must be LookerModel") + return v + + def __setattr__(self, name, value): + if name in LookerModel._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + PROJECT_NAME: ClassVar[KeywordField] = KeywordField("projectName", "projectName") + """ + Name of the project in which the model exists. + """ + + EXPLORES: ClassVar[RelationField] = RelationField("explores") + """ + TBC + """ + PROJECT: ClassVar[RelationField] = RelationField("project") + """ + TBC + """ + LOOK: ClassVar[RelationField] = RelationField("look") + """ + TBC + """ + QUERIES: ClassVar[RelationField] = RelationField("queries") + """ + TBC + """ + FIELDS: ClassVar[RelationField] = RelationField("fields") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "project_name", + "explores", + "project", + "look", + "queries", + "fields", + ] + + @property + def project_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.project_name + + @project_name.setter + def project_name(self, project_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.project_name = project_name + + @property + def explores(self) -> Optional[list[LookerExplore]]: + return None if self.attributes is None else self.attributes.explores + + @explores.setter + def explores(self, explores: Optional[list[LookerExplore]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.explores = explores + + @property + def project(self) -> Optional[LookerProject]: + return None if self.attributes is None else self.attributes.project + + @project.setter + def project(self, project: Optional[LookerProject]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.project = project + + @property + def look(self) -> Optional[LookerLook]: + return None if self.attributes is None else self.attributes.look + + @look.setter + def look(self, look: Optional[LookerLook]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.look = look + + @property + def queries(self) -> Optional[list[LookerQuery]]: + return None if self.attributes is None else self.attributes.queries + + @queries.setter + def queries(self, queries: Optional[list[LookerQuery]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.queries = queries + + @property + def fields(self) -> Optional[list[LookerField]]: + return None if self.attributes is None else self.attributes.fields + + @fields.setter + def fields(self, fields: Optional[list[LookerField]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.fields = fields + + class Attributes(Looker.Attributes): + project_name: Optional[str] = Field(None, description="", alias="projectName") + explores: Optional[list[LookerExplore]] = Field( + None, description="", alias="explores" + ) # relationship + project: Optional[LookerProject] = Field( + None, description="", alias="project" + ) # relationship + look: Optional[LookerLook] = Field( + None, description="", alias="look" + ) # relationship + queries: Optional[list[LookerQuery]] = Field( + None, description="", alias="queries" + ) # relationship + fields: Optional[list[LookerField]] = Field( + None, description="", alias="fields" + ) # relationship + + attributes: "LookerModel.Attributes" = Field( + default_factory=lambda: LookerModel.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +class LookerExplore(Looker): + """Description""" + + type_name: str = Field("LookerExplore", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "LookerExplore": + raise ValueError("must be LookerExplore") + return v + + def __setattr__(self, name, value): + if name in LookerExplore._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + PROJECT_NAME: ClassVar[KeywordField] = KeywordField("projectName", "projectName") + """ + Name of the parent project of this Explore. + """ + MODEL_NAME: ClassVar[KeywordField] = KeywordField("modelName", "modelName") + """ + Name of the parent model of this Explore. + """ + SOURCE_CONNECTION_NAME: ClassVar[KeywordField] = KeywordField( + "sourceConnectionName", "sourceConnectionName" + ) + """ + Connection name for the Explore, from Looker. + """ + VIEW_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "viewName", "viewName.keyword", "viewName" + ) + """ + Name of the view for the Explore. + """ + SQL_TABLE_NAME: ClassVar[KeywordField] = KeywordField( + "sqlTableName", "sqlTableName" + ) + """ + Name of the SQL table used to declare the Explore. + """ + + PROJECT: ClassVar[RelationField] = RelationField("project") + """ + TBC + """ + MODEL: ClassVar[RelationField] = RelationField("model") + """ + TBC + """ + FIELDS: ClassVar[RelationField] = RelationField("fields") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "project_name", + "model_name", + "source_connection_name", + "view_name", + "sql_table_name", + "project", + "model", + "fields", + ] + + @property + def project_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.project_name + + @project_name.setter + def project_name(self, project_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.project_name = project_name + + @property + def model_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.model_name + + @model_name.setter + def model_name(self, model_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.model_name = model_name + + @property + def source_connection_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.source_connection_name + ) + + @source_connection_name.setter + def source_connection_name(self, source_connection_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_connection_name = source_connection_name + + @property + def view_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.view_name + + @view_name.setter + def view_name(self, view_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.view_name = view_name + + @property + def sql_table_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.sql_table_name + + @sql_table_name.setter + def sql_table_name(self, sql_table_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sql_table_name = sql_table_name + + @property + def project(self) -> Optional[LookerProject]: + return None if self.attributes is None else self.attributes.project + + @project.setter + def project(self, project: Optional[LookerProject]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.project = project + + @property + def model(self) -> Optional[LookerModel]: + return None if self.attributes is None else self.attributes.model + + @model.setter + def model(self, model: Optional[LookerModel]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.model = model + + @property + def fields(self) -> Optional[list[LookerField]]: + return None if self.attributes is None else self.attributes.fields + + @fields.setter + def fields(self, fields: Optional[list[LookerField]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.fields = fields + + class Attributes(Looker.Attributes): + project_name: Optional[str] = Field(None, description="", alias="projectName") + model_name: Optional[str] = Field(None, description="", alias="modelName") + source_connection_name: Optional[str] = Field( + None, description="", alias="sourceConnectionName" + ) + view_name: Optional[str] = Field(None, description="", alias="viewName") + sql_table_name: Optional[str] = Field( + None, description="", alias="sqlTableName" + ) + project: Optional[LookerProject] = Field( + None, description="", alias="project" + ) # relationship + model: Optional[LookerModel] = Field( + None, description="", alias="model" + ) # relationship + fields: Optional[list[LookerField]] = Field( + None, description="", alias="fields" + ) # relationship + + attributes: "LookerExplore.Attributes" = Field( + default_factory=lambda: LookerExplore.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +class LookerProject(Looker): + """Description""" + + type_name: str = Field("LookerProject", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "LookerProject": + raise ValueError("must be LookerProject") + return v + + def __setattr__(self, name, value): + if name in LookerProject._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + MODELS: ClassVar[RelationField] = RelationField("models") + """ + TBC + """ + EXPLORES: ClassVar[RelationField] = RelationField("explores") + """ + TBC + """ + FIELDS: ClassVar[RelationField] = RelationField("fields") + """ + TBC + """ + VIEWS: ClassVar[RelationField] = RelationField("views") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "models", + "explores", + "fields", + "views", + ] + + @property + def models(self) -> Optional[list[LookerModel]]: + return None if self.attributes is None else self.attributes.models + + @models.setter + def models(self, models: Optional[list[LookerModel]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.models = models + + @property + def explores(self) -> Optional[list[LookerExplore]]: + return None if self.attributes is None else self.attributes.explores + + @explores.setter + def explores(self, explores: Optional[list[LookerExplore]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.explores = explores + + @property + def fields(self) -> Optional[list[LookerField]]: + return None if self.attributes is None else self.attributes.fields + + @fields.setter + def fields(self, fields: Optional[list[LookerField]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.fields = fields + + @property + def views(self) -> Optional[list[LookerView]]: + return None if self.attributes is None else self.attributes.views + + @views.setter + def views(self, views: Optional[list[LookerView]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.views = views + + class Attributes(Looker.Attributes): + models: Optional[list[LookerModel]] = Field( + None, description="", alias="models" + ) # relationship + explores: Optional[list[LookerExplore]] = Field( + None, description="", alias="explores" + ) # relationship + fields: Optional[list[LookerField]] = Field( + None, description="", alias="fields" + ) # relationship + views: Optional[list[LookerView]] = Field( + None, description="", alias="views" + ) # relationship + + attributes: "LookerProject.Attributes" = Field( + default_factory=lambda: LookerProject.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +class LookerQuery(Looker): + """Description""" + + type_name: str = Field("LookerQuery", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "LookerQuery": + raise ValueError("must be LookerQuery") + return v + + def __setattr__(self, name, value): + if name in LookerQuery._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + SOURCE_DEFINITION: ClassVar[KeywordField] = KeywordField( + "sourceDefinition", "sourceDefinition" + ) + """ + Deprecated. + """ + SOURCE_DEFINITION_DATABASE: ClassVar[KeywordField] = KeywordField( + "sourceDefinitionDatabase", "sourceDefinitionDatabase" + ) + """ + Deprecated. + """ + SOURCE_DEFINITION_SCHEMA: ClassVar[KeywordField] = KeywordField( + "sourceDefinitionSchema", "sourceDefinitionSchema" + ) + """ + Deprecated. + """ + FIELDS: ClassVar[KeywordField] = KeywordField("fields", "fields") + """ + Deprecated. + """ + + TILES: ClassVar[RelationField] = RelationField("tiles") + """ + TBC + """ + LOOKS: ClassVar[RelationField] = RelationField("looks") + """ + TBC + """ + MODEL: ClassVar[RelationField] = RelationField("model") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "source_definition", + "source_definition_database", + "source_definition_schema", + "fields", + "tiles", + "looks", + "model", + ] + + @property + def source_definition(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.source_definition + + @source_definition.setter + def source_definition(self, source_definition: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_definition = source_definition + + @property + def source_definition_database(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.source_definition_database + ) + + @source_definition_database.setter + def source_definition_database(self, source_definition_database: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_definition_database = source_definition_database + + @property + def source_definition_schema(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.source_definition_schema + ) + + @source_definition_schema.setter + def source_definition_schema(self, source_definition_schema: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_definition_schema = source_definition_schema + + @property + def fields(self) -> Optional[set[str]]: + return None if self.attributes is None else self.attributes.fields + + @fields.setter + def fields(self, fields: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.fields = fields + + @property + def tiles(self) -> Optional[list[LookerTile]]: + return None if self.attributes is None else self.attributes.tiles + + @tiles.setter + def tiles(self, tiles: Optional[list[LookerTile]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.tiles = tiles + + @property + def looks(self) -> Optional[list[LookerLook]]: + return None if self.attributes is None else self.attributes.looks + + @looks.setter + def looks(self, looks: Optional[list[LookerLook]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.looks = looks + + @property + def model(self) -> Optional[LookerModel]: + return None if self.attributes is None else self.attributes.model + + @model.setter + def model(self, model: Optional[LookerModel]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.model = model + + class Attributes(Looker.Attributes): + source_definition: Optional[str] = Field( + None, description="", alias="sourceDefinition" + ) + source_definition_database: Optional[str] = Field( + None, description="", alias="sourceDefinitionDatabase" + ) + source_definition_schema: Optional[str] = Field( + None, description="", alias="sourceDefinitionSchema" + ) + fields: Optional[set[str]] = Field(None, description="", alias="fields") + tiles: Optional[list[LookerTile]] = Field( + None, description="", alias="tiles" + ) # relationship + looks: Optional[list[LookerLook]] = Field( + None, description="", alias="looks" + ) # relationship + model: Optional[LookerModel] = Field( + None, description="", alias="model" + ) # relationship + + attributes: "LookerQuery.Attributes" = Field( + default_factory=lambda: LookerQuery.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +class LookerField(Looker): + """Description""" + + type_name: str = Field("LookerField", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "LookerField": + raise ValueError("must be LookerField") + return v + + def __setattr__(self, name, value): + if name in LookerField._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + PROJECT_NAME: ClassVar[KeywordField] = KeywordField("projectName", "projectName") + """ + Name of the project in which this field exists. + """ + LOOKER_EXPLORE_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "lookerExploreQualifiedName", + "lookerExploreQualifiedName", + "lookerExploreQualifiedName.text", + ) + """ + Unique name of the Explore in which this field exists. + """ + LOOKER_VIEW_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "lookerViewQualifiedName", + "lookerViewQualifiedName", + "lookerViewQualifiedName.text", + ) + """ + Unique name of the view in which this field exists. + """ + MODEL_NAME: ClassVar[KeywordField] = KeywordField("modelName", "modelName") + """ + Name of the model in which this field exists. + """ + SOURCE_DEFINITION: ClassVar[KeywordField] = KeywordField( + "sourceDefinition", "sourceDefinition" + ) + """ + Deprecated. + """ + LOOKER_FIELD_DATA_TYPE: ClassVar[KeywordField] = KeywordField( + "lookerFieldDataType", "lookerFieldDataType" + ) + """ + Deprecated. + """ + LOOKER_TIMES_USED: ClassVar[NumericField] = NumericField( + "lookerTimesUsed", "lookerTimesUsed" + ) + """ + Deprecated. + """ + + EXPLORE: ClassVar[RelationField] = RelationField("explore") + """ + TBC + """ + PROJECT: ClassVar[RelationField] = RelationField("project") + """ + TBC + """ + VIEW: ClassVar[RelationField] = RelationField("view") + """ + TBC + """ + MODEL: ClassVar[RelationField] = RelationField("model") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "project_name", + "looker_explore_qualified_name", + "looker_view_qualified_name", + "model_name", + "source_definition", + "looker_field_data_type", + "looker_times_used", + "explore", + "project", + "view", + "model", + ] + + @property + def project_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.project_name + + @project_name.setter + def project_name(self, project_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.project_name = project_name + + @property + def looker_explore_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.looker_explore_qualified_name + ) + + @looker_explore_qualified_name.setter + def looker_explore_qualified_name( + self, looker_explore_qualified_name: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.looker_explore_qualified_name = looker_explore_qualified_name + + @property + def looker_view_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.looker_view_qualified_name + ) + + @looker_view_qualified_name.setter + def looker_view_qualified_name(self, looker_view_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.looker_view_qualified_name = looker_view_qualified_name + + @property + def model_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.model_name + + @model_name.setter + def model_name(self, model_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.model_name = model_name + + @property + def source_definition(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.source_definition + + @source_definition.setter + def source_definition(self, source_definition: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_definition = source_definition + + @property + def looker_field_data_type(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.looker_field_data_type + ) + + @looker_field_data_type.setter + def looker_field_data_type(self, looker_field_data_type: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.looker_field_data_type = looker_field_data_type + + @property + def looker_times_used(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.looker_times_used + + @looker_times_used.setter + def looker_times_used(self, looker_times_used: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.looker_times_used = looker_times_used + + @property + def explore(self) -> Optional[LookerExplore]: + return None if self.attributes is None else self.attributes.explore + + @explore.setter + def explore(self, explore: Optional[LookerExplore]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.explore = explore + + @property + def project(self) -> Optional[LookerProject]: + return None if self.attributes is None else self.attributes.project + + @project.setter + def project(self, project: Optional[LookerProject]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.project = project + + @property + def view(self) -> Optional[LookerView]: + return None if self.attributes is None else self.attributes.view + + @view.setter + def view(self, view: Optional[LookerView]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.view = view + + @property + def model(self) -> Optional[LookerModel]: + return None if self.attributes is None else self.attributes.model + + @model.setter + def model(self, model: Optional[LookerModel]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.model = model + + class Attributes(Looker.Attributes): + project_name: Optional[str] = Field(None, description="", alias="projectName") + looker_explore_qualified_name: Optional[str] = Field( + None, description="", alias="lookerExploreQualifiedName" + ) + looker_view_qualified_name: Optional[str] = Field( + None, description="", alias="lookerViewQualifiedName" + ) + model_name: Optional[str] = Field(None, description="", alias="modelName") + source_definition: Optional[str] = Field( + None, description="", alias="sourceDefinition" + ) + looker_field_data_type: Optional[str] = Field( + None, description="", alias="lookerFieldDataType" + ) + looker_times_used: Optional[int] = Field( + None, description="", alias="lookerTimesUsed" + ) + explore: Optional[LookerExplore] = Field( + None, description="", alias="explore" + ) # relationship + project: Optional[LookerProject] = Field( + None, description="", alias="project" + ) # relationship + view: Optional[LookerView] = Field( + None, description="", alias="view" + ) # relationship + model: Optional[LookerModel] = Field( + None, description="", alias="model" + ) # relationship + + attributes: "LookerField.Attributes" = Field( + default_factory=lambda: LookerField.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +class LookerView(Looker): + """Description""" + + type_name: str = Field("LookerView", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "LookerView": + raise ValueError("must be LookerView") + return v + + def __setattr__(self, name, value): + if name in LookerView._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + PROJECT_NAME: ClassVar[KeywordField] = KeywordField("projectName", "projectName") + """ + Name of the project in which this view exists. + """ + LOOKER_VIEW_FILE_PATH: ClassVar[KeywordField] = KeywordField( + "lookerViewFilePath", "lookerViewFilePath" + ) + """ + File path of this view within the project. + """ + LOOKER_VIEW_FILE_NAME: ClassVar[KeywordField] = KeywordField( + "lookerViewFileName", "lookerViewFileName" + ) + """ + File name of this view. + """ + + PROJECT: ClassVar[RelationField] = RelationField("project") + """ + TBC + """ + FIELDS: ClassVar[RelationField] = RelationField("fields") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "project_name", + "looker_view_file_path", + "looker_view_file_name", + "project", + "fields", + ] + + @property + def project_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.project_name + + @project_name.setter + def project_name(self, project_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.project_name = project_name + + @property + def looker_view_file_path(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.looker_view_file_path + ) + + @looker_view_file_path.setter + def looker_view_file_path(self, looker_view_file_path: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.looker_view_file_path = looker_view_file_path + + @property + def looker_view_file_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.looker_view_file_name + ) + + @looker_view_file_name.setter + def looker_view_file_name(self, looker_view_file_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.looker_view_file_name = looker_view_file_name + + @property + def project(self) -> Optional[LookerProject]: + return None if self.attributes is None else self.attributes.project + + @project.setter + def project(self, project: Optional[LookerProject]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.project = project + + @property + def fields(self) -> Optional[list[LookerField]]: + return None if self.attributes is None else self.attributes.fields + + @fields.setter + def fields(self, fields: Optional[list[LookerField]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.fields = fields + + class Attributes(Looker.Attributes): + project_name: Optional[str] = Field(None, description="", alias="projectName") + looker_view_file_path: Optional[str] = Field( + None, description="", alias="lookerViewFilePath" + ) + looker_view_file_name: Optional[str] = Field( + None, description="", alias="lookerViewFileName" + ) + project: Optional[LookerProject] = Field( + None, description="", alias="project" + ) # relationship + fields: Optional[list[LookerField]] = Field( + None, description="", alias="fields" + ) # relationship + + attributes: "LookerView.Attributes" = Field( + default_factory=lambda: LookerView.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +LookerLook.Attributes.update_forward_refs() + + +LookerDashboard.Attributes.update_forward_refs() + + +LookerFolder.Attributes.update_forward_refs() + + +LookerTile.Attributes.update_forward_refs() + + +LookerModel.Attributes.update_forward_refs() + + +LookerExplore.Attributes.update_forward_refs() + + +LookerProject.Attributes.update_forward_refs() + + +LookerQuery.Attributes.update_forward_refs() + + +LookerField.Attributes.update_forward_refs() -RedashVisualization.Attributes.update_forward_refs() +LookerView.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset72.py b/pyatlan/model/assets/asset72.py index 0cecd9851..d86d1c283 100644 --- a/pyatlan/model/assets/asset72.py +++ b/pyatlan/model/assets/asset72.py @@ -4,1110 +4,68 @@ from __future__ import annotations -from datetime import datetime from typing import ClassVar, Optional from pydantic import Field, validator -from pyatlan.model.fields.atlan_fields import ( - BooleanField, - KeywordField, - KeywordTextField, - NumericField, - RelationField, -) +from pyatlan.model.fields.atlan_fields import NumericField -from .asset45 import Sisense +from .asset45 import Redash -class SisenseFolder(Sisense): +class RedashDashboard(Redash): """Description""" - type_name: str = Field("SisenseFolder", allow_mutation=False) + type_name: str = Field("RedashDashboard", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "SisenseFolder": - raise ValueError("must be SisenseFolder") + if v != "RedashDashboard": + raise ValueError("must be RedashDashboard") return v def __setattr__(self, name, value): - if name in SisenseFolder._convenience_properties: + if name in RedashDashboard._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - SISENSE_FOLDER_PARENT_FOLDER_QUALIFIED_NAME: ClassVar[ - KeywordTextField - ] = KeywordTextField( - "sisenseFolderParentFolderQualifiedName", - "sisenseFolderParentFolderQualifiedName", - "sisenseFolderParentFolderQualifiedName.text", + REDASH_DASHBOARD_WIDGET_COUNT: ClassVar[NumericField] = NumericField( + "redashDashboardWidgetCount", "redashDashboardWidgetCount" ) """ - Qualified name of the parent folder - """ - - SISENSE_CHILD_FOLDERS: ClassVar[RelationField] = RelationField( - "sisenseChildFolders" - ) - """ - TBC - """ - SISENSE_WIDGETS: ClassVar[RelationField] = RelationField("sisenseWidgets") - """ - TBC - """ - SISENSE_DASHBOARDS: ClassVar[RelationField] = RelationField("sisenseDashboards") - """ - TBC - """ - SISENSE_PARENT_FOLDER: ClassVar[RelationField] = RelationField( - "sisenseParentFolder" - ) - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "sisense_folder_parent_folder_qualified_name", - "sisense_child_folders", - "sisense_widgets", - "sisense_dashboards", - "sisense_parent_folder", - ] - - @property - def sisense_folder_parent_folder_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.sisense_folder_parent_folder_qualified_name - ) - - @sisense_folder_parent_folder_qualified_name.setter - def sisense_folder_parent_folder_qualified_name( - self, sisense_folder_parent_folder_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_folder_parent_folder_qualified_name = ( - sisense_folder_parent_folder_qualified_name - ) - - @property - def sisense_child_folders(self) -> Optional[list[SisenseFolder]]: - return ( - None if self.attributes is None else self.attributes.sisense_child_folders - ) - - @sisense_child_folders.setter - def sisense_child_folders( - self, sisense_child_folders: Optional[list[SisenseFolder]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_child_folders = sisense_child_folders - - @property - def sisense_widgets(self) -> Optional[list[SisenseWidget]]: - return None if self.attributes is None else self.attributes.sisense_widgets - - @sisense_widgets.setter - def sisense_widgets(self, sisense_widgets: Optional[list[SisenseWidget]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_widgets = sisense_widgets - - @property - def sisense_dashboards(self) -> Optional[list[SisenseDashboard]]: - return None if self.attributes is None else self.attributes.sisense_dashboards - - @sisense_dashboards.setter - def sisense_dashboards(self, sisense_dashboards: Optional[list[SisenseDashboard]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_dashboards = sisense_dashboards - - @property - def sisense_parent_folder(self) -> Optional[SisenseFolder]: - return ( - None if self.attributes is None else self.attributes.sisense_parent_folder - ) - - @sisense_parent_folder.setter - def sisense_parent_folder(self, sisense_parent_folder: Optional[SisenseFolder]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_parent_folder = sisense_parent_folder - - class Attributes(Sisense.Attributes): - sisense_folder_parent_folder_qualified_name: Optional[str] = Field( - None, description="", alias="sisenseFolderParentFolderQualifiedName" - ) - sisense_child_folders: Optional[list[SisenseFolder]] = Field( - None, description="", alias="sisenseChildFolders" - ) # relationship - sisense_widgets: Optional[list[SisenseWidget]] = Field( - None, description="", alias="sisenseWidgets" - ) # relationship - sisense_dashboards: Optional[list[SisenseDashboard]] = Field( - None, description="", alias="sisenseDashboards" - ) # relationship - sisense_parent_folder: Optional[SisenseFolder] = Field( - None, description="", alias="sisenseParentFolder" - ) # relationship - - attributes: "SisenseFolder.Attributes" = Field( - default_factory=lambda: SisenseFolder.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class SisenseWidget(Sisense): - """Description""" - - type_name: str = Field("SisenseWidget", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "SisenseWidget": - raise ValueError("must be SisenseWidget") - return v - - def __setattr__(self, name, value): - if name in SisenseWidget._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - SISENSE_WIDGET_COLUMN_COUNT: ClassVar[NumericField] = NumericField( - "sisenseWidgetColumnCount", "sisenseWidgetColumnCount" - ) - """ - Numbers of columns used in the widget - """ - SISENSE_WIDGET_SUB_TYPE: ClassVar[KeywordField] = KeywordField( - "sisenseWidgetSubType", "sisenseWidgetSubType" - ) - """ - Sub type of the widget - """ - SISENSE_WIDGET_SIZE: ClassVar[KeywordField] = KeywordField( - "sisenseWidgetSize", "sisenseWidgetSize" - ) - """ - Size of the widget - """ - SISENSE_WIDGET_DASHBOARD_QUALIFIED_NAME: ClassVar[ - KeywordTextField - ] = KeywordTextField( - "sisenseWidgetDashboardQualifiedName", - "sisenseWidgetDashboardQualifiedName", - "sisenseWidgetDashboardQualifiedName.text", - ) - """ - Qualified name of the dashboard under which this widget is present - """ - SISENSE_WIDGET_FOLDER_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "sisenseWidgetFolderQualifiedName", - "sisenseWidgetFolderQualifiedName", - "sisenseWidgetFolderQualifiedName.text", - ) - """ - Qualified name of the folder under which this widget is present - """ - - SISENSE_DATAMODEL_TABLES: ClassVar[RelationField] = RelationField( - "sisenseDatamodelTables" - ) - """ - TBC - """ - SISENSE_FOLDER: ClassVar[RelationField] = RelationField("sisenseFolder") - """ - TBC - """ - SISENSE_DASHBOARD: ClassVar[RelationField] = RelationField("sisenseDashboard") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "sisense_widget_column_count", - "sisense_widget_sub_type", - "sisense_widget_size", - "sisense_widget_dashboard_qualified_name", - "sisense_widget_folder_qualified_name", - "sisense_datamodel_tables", - "sisense_folder", - "sisense_dashboard", - ] - - @property - def sisense_widget_column_count(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.sisense_widget_column_count - ) - - @sisense_widget_column_count.setter - def sisense_widget_column_count(self, sisense_widget_column_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_widget_column_count = sisense_widget_column_count - - @property - def sisense_widget_sub_type(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.sisense_widget_sub_type - ) - - @sisense_widget_sub_type.setter - def sisense_widget_sub_type(self, sisense_widget_sub_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_widget_sub_type = sisense_widget_sub_type - - @property - def sisense_widget_size(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.sisense_widget_size - - @sisense_widget_size.setter - def sisense_widget_size(self, sisense_widget_size: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_widget_size = sisense_widget_size - - @property - def sisense_widget_dashboard_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.sisense_widget_dashboard_qualified_name - ) - - @sisense_widget_dashboard_qualified_name.setter - def sisense_widget_dashboard_qualified_name( - self, sisense_widget_dashboard_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_widget_dashboard_qualified_name = ( - sisense_widget_dashboard_qualified_name - ) - - @property - def sisense_widget_folder_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.sisense_widget_folder_qualified_name - ) - - @sisense_widget_folder_qualified_name.setter - def sisense_widget_folder_qualified_name( - self, sisense_widget_folder_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_widget_folder_qualified_name = ( - sisense_widget_folder_qualified_name - ) - - @property - def sisense_datamodel_tables(self) -> Optional[list[SisenseDatamodelTable]]: - return ( - None - if self.attributes is None - else self.attributes.sisense_datamodel_tables - ) - - @sisense_datamodel_tables.setter - def sisense_datamodel_tables( - self, sisense_datamodel_tables: Optional[list[SisenseDatamodelTable]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_datamodel_tables = sisense_datamodel_tables - - @property - def sisense_folder(self) -> Optional[SisenseFolder]: - return None if self.attributes is None else self.attributes.sisense_folder - - @sisense_folder.setter - def sisense_folder(self, sisense_folder: Optional[SisenseFolder]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_folder = sisense_folder - - @property - def sisense_dashboard(self) -> Optional[SisenseDashboard]: - return None if self.attributes is None else self.attributes.sisense_dashboard - - @sisense_dashboard.setter - def sisense_dashboard(self, sisense_dashboard: Optional[SisenseDashboard]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_dashboard = sisense_dashboard - - class Attributes(Sisense.Attributes): - sisense_widget_column_count: Optional[int] = Field( - None, description="", alias="sisenseWidgetColumnCount" - ) - sisense_widget_sub_type: Optional[str] = Field( - None, description="", alias="sisenseWidgetSubType" - ) - sisense_widget_size: Optional[str] = Field( - None, description="", alias="sisenseWidgetSize" - ) - sisense_widget_dashboard_qualified_name: Optional[str] = Field( - None, description="", alias="sisenseWidgetDashboardQualifiedName" - ) - sisense_widget_folder_qualified_name: Optional[str] = Field( - None, description="", alias="sisenseWidgetFolderQualifiedName" - ) - sisense_datamodel_tables: Optional[list[SisenseDatamodelTable]] = Field( - None, description="", alias="sisenseDatamodelTables" - ) # relationship - sisense_folder: Optional[SisenseFolder] = Field( - None, description="", alias="sisenseFolder" - ) # relationship - sisense_dashboard: Optional[SisenseDashboard] = Field( - None, description="", alias="sisenseDashboard" - ) # relationship - - attributes: "SisenseWidget.Attributes" = Field( - default_factory=lambda: SisenseWidget.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class SisenseDatamodel(Sisense): - """Description""" - - type_name: str = Field("SisenseDatamodel", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "SisenseDatamodel": - raise ValueError("must be SisenseDatamodel") - return v - - def __setattr__(self, name, value): - if name in SisenseDatamodel._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - SISENSE_DATAMODEL_TABLE_COUNT: ClassVar[NumericField] = NumericField( - "sisenseDatamodelTableCount", "sisenseDatamodelTableCount" - ) - """ - Number of tables present in the datamodel - """ - SISENSE_DATAMODEL_SERVER: ClassVar[KeywordField] = KeywordField( - "sisenseDatamodelServer", "sisenseDatamodelServer" - ) - """ - Hostname of server on which the Datamodel is created - """ - SISENSE_DATAMODEL_REVISION: ClassVar[KeywordField] = KeywordField( - "sisenseDatamodelRevision", "sisenseDatamodelRevision" - ) - """ - Revision of the datamodel - """ - SISENSE_DATAMODEL_LAST_BUILD_TIME: ClassVar[NumericField] = NumericField( - "sisenseDatamodelLastBuildTime", "sisenseDatamodelLastBuildTime" - ) - """ - Timestamp of when the datamodel was last built - """ - SISENSE_DATAMODEL_LAST_SUCCESSFUL_BUILD_TIME: ClassVar[NumericField] = NumericField( - "sisenseDatamodelLastSuccessfulBuildTime", - "sisenseDatamodelLastSuccessfulBuildTime", - ) - """ - Timestamp of when the datamodel was last built successfully - """ - SISENSE_DATAMODEL_LAST_PUBLISH_TIME: ClassVar[NumericField] = NumericField( - "sisenseDatamodelLastPublishTime", "sisenseDatamodelLastPublishTime" - ) - """ - Timestamp of when the datamodel was last published - """ - SISENSE_DATAMODEL_TYPE: ClassVar[KeywordField] = KeywordField( - "sisenseDatamodelType", "sisenseDatamodelType" - ) - """ - Type of the datamodel. It has values 'extract' or 'custom' - """ - SISENSE_DATAMODEL_RELATION_TYPE: ClassVar[KeywordField] = KeywordField( - "sisenseDatamodelRelationType", "sisenseDatamodelRelationType" - ) - """ - Default relation type for the Datamodel. 'extract' type Datamodels have regular relations by default. 'live' type Datamodels have direct relations by default. - """ # noqa: E501 - - SISENSE_DATAMODEL_TABLES: ClassVar[RelationField] = RelationField( - "sisenseDatamodelTables" - ) - """ - TBC - """ - SISENSE_DASHBOARDS: ClassVar[RelationField] = RelationField("sisenseDashboards") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "sisense_datamodel_table_count", - "sisense_datamodel_server", - "sisense_datamodel_revision", - "sisense_datamodel_last_build_time", - "sisense_datamodel_last_successful_build_time", - "sisense_datamodel_last_publish_time", - "sisense_datamodel_type", - "sisense_datamodel_relation_type", - "sisense_datamodel_tables", - "sisense_dashboards", - ] - - @property - def sisense_datamodel_table_count(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.sisense_datamodel_table_count - ) - - @sisense_datamodel_table_count.setter - def sisense_datamodel_table_count( - self, sisense_datamodel_table_count: Optional[int] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_datamodel_table_count = sisense_datamodel_table_count - - @property - def sisense_datamodel_server(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.sisense_datamodel_server - ) - - @sisense_datamodel_server.setter - def sisense_datamodel_server(self, sisense_datamodel_server: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_datamodel_server = sisense_datamodel_server - - @property - def sisense_datamodel_revision(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.sisense_datamodel_revision - ) - - @sisense_datamodel_revision.setter - def sisense_datamodel_revision(self, sisense_datamodel_revision: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_datamodel_revision = sisense_datamodel_revision - - @property - def sisense_datamodel_last_build_time(self) -> Optional[datetime]: - return ( - None - if self.attributes is None - else self.attributes.sisense_datamodel_last_build_time - ) - - @sisense_datamodel_last_build_time.setter - def sisense_datamodel_last_build_time( - self, sisense_datamodel_last_build_time: Optional[datetime] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_datamodel_last_build_time = ( - sisense_datamodel_last_build_time - ) - - @property - def sisense_datamodel_last_successful_build_time(self) -> Optional[datetime]: - return ( - None - if self.attributes is None - else self.attributes.sisense_datamodel_last_successful_build_time - ) - - @sisense_datamodel_last_successful_build_time.setter - def sisense_datamodel_last_successful_build_time( - self, sisense_datamodel_last_successful_build_time: Optional[datetime] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_datamodel_last_successful_build_time = ( - sisense_datamodel_last_successful_build_time - ) - - @property - def sisense_datamodel_last_publish_time(self) -> Optional[datetime]: - return ( - None - if self.attributes is None - else self.attributes.sisense_datamodel_last_publish_time - ) - - @sisense_datamodel_last_publish_time.setter - def sisense_datamodel_last_publish_time( - self, sisense_datamodel_last_publish_time: Optional[datetime] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_datamodel_last_publish_time = ( - sisense_datamodel_last_publish_time - ) - - @property - def sisense_datamodel_type(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.sisense_datamodel_type - ) - - @sisense_datamodel_type.setter - def sisense_datamodel_type(self, sisense_datamodel_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_datamodel_type = sisense_datamodel_type - - @property - def sisense_datamodel_relation_type(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.sisense_datamodel_relation_type - ) - - @sisense_datamodel_relation_type.setter - def sisense_datamodel_relation_type( - self, sisense_datamodel_relation_type: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_datamodel_relation_type = ( - sisense_datamodel_relation_type - ) - - @property - def sisense_datamodel_tables(self) -> Optional[list[SisenseDatamodelTable]]: - return ( - None - if self.attributes is None - else self.attributes.sisense_datamodel_tables - ) - - @sisense_datamodel_tables.setter - def sisense_datamodel_tables( - self, sisense_datamodel_tables: Optional[list[SisenseDatamodelTable]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_datamodel_tables = sisense_datamodel_tables - - @property - def sisense_dashboards(self) -> Optional[list[SisenseDashboard]]: - return None if self.attributes is None else self.attributes.sisense_dashboards - - @sisense_dashboards.setter - def sisense_dashboards(self, sisense_dashboards: Optional[list[SisenseDashboard]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_dashboards = sisense_dashboards - - class Attributes(Sisense.Attributes): - sisense_datamodel_table_count: Optional[int] = Field( - None, description="", alias="sisenseDatamodelTableCount" - ) - sisense_datamodel_server: Optional[str] = Field( - None, description="", alias="sisenseDatamodelServer" - ) - sisense_datamodel_revision: Optional[str] = Field( - None, description="", alias="sisenseDatamodelRevision" - ) - sisense_datamodel_last_build_time: Optional[datetime] = Field( - None, description="", alias="sisenseDatamodelLastBuildTime" - ) - sisense_datamodel_last_successful_build_time: Optional[datetime] = Field( - None, description="", alias="sisenseDatamodelLastSuccessfulBuildTime" - ) - sisense_datamodel_last_publish_time: Optional[datetime] = Field( - None, description="", alias="sisenseDatamodelLastPublishTime" - ) - sisense_datamodel_type: Optional[str] = Field( - None, description="", alias="sisenseDatamodelType" - ) - sisense_datamodel_relation_type: Optional[str] = Field( - None, description="", alias="sisenseDatamodelRelationType" - ) - sisense_datamodel_tables: Optional[list[SisenseDatamodelTable]] = Field( - None, description="", alias="sisenseDatamodelTables" - ) # relationship - sisense_dashboards: Optional[list[SisenseDashboard]] = Field( - None, description="", alias="sisenseDashboards" - ) # relationship - - attributes: "SisenseDatamodel.Attributes" = Field( - default_factory=lambda: SisenseDatamodel.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class SisenseDatamodelTable(Sisense): - """Description""" - - type_name: str = Field("SisenseDatamodelTable", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "SisenseDatamodelTable": - raise ValueError("must be SisenseDatamodelTable") - return v - - def __setattr__(self, name, value): - if name in SisenseDatamodelTable._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - SISENSE_DATAMODEL_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "sisenseDatamodelQualifiedName", - "sisenseDatamodelQualifiedName", - "sisenseDatamodelQualifiedName.text", - ) - """ - Qualified name of the datamodel in which this datamodel table is present - """ - SISENSE_DATAMODEL_TABLE_COLUMN_COUNT: ClassVar[NumericField] = NumericField( - "sisenseDatamodelTableColumnCount", "sisenseDatamodelTableColumnCount" - ) - """ - Number of columns present in the datamodel table - """ - SISENSE_DATAMODEL_TABLE_TYPE: ClassVar[KeywordField] = KeywordField( - "sisenseDatamodelTableType", "sisenseDatamodelTableType" - ) - """ - Type of the datamodel table. 'base' for regular tables, 'custom' for SQL expression-based tables - """ - SISENSE_DATAMODEL_TABLE_EXPRESSION: ClassVar[KeywordField] = KeywordField( - "sisenseDatamodelTableExpression", "sisenseDatamodelTableExpression" - ) - """ - Actual SQL expression of the datamodel table - """ - SISENSE_DATAMODEL_TABLE_IS_MATERIALIZED: ClassVar[BooleanField] = BooleanField( - "sisenseDatamodelTableIsMaterialized", "sisenseDatamodelTableIsMaterialized" - ) - """ - Boolean indicating if the datamodel table is materialised - """ - SISENSE_DATAMODEL_TABLE_IS_HIDDEN: ClassVar[BooleanField] = BooleanField( - "sisenseDatamodelTableIsHidden", "sisenseDatamodelTableIsHidden" - ) - """ - Boolean indicating if the datamodel table is hidden on sisense - """ - SISENSE_DATAMODEL_TABLE_SCHEDULE: ClassVar[KeywordField] = KeywordField( - "sisenseDatamodelTableSchedule", "sisenseDatamodelTableSchedule" - ) - """ - Refresh schedule of the datamodel table (JSON Object) - """ - SISENSE_DATAMODEL_TABLE_LIVE_QUERY_SETTINGS: ClassVar[KeywordField] = KeywordField( - "sisenseDatamodelTableLiveQuerySettings", - "sisenseDatamodelTableLiveQuerySettings", - ) - """ - LiveQuery settings of the datamodel table (JSON Object) - """ - - SISENSE_DATAMODEL: ClassVar[RelationField] = RelationField("sisenseDatamodel") - """ - TBC - """ - SISENSE_WIDGETS: ClassVar[RelationField] = RelationField("sisenseWidgets") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "sisense_datamodel_qualified_name", - "sisense_datamodel_table_column_count", - "sisense_datamodel_table_type", - "sisense_datamodel_table_expression", - "sisense_datamodel_table_is_materialized", - "sisense_datamodel_table_is_hidden", - "sisense_datamodel_table_schedule", - "sisense_datamodel_table_live_query_settings", - "sisense_datamodel", - "sisense_widgets", - ] - - @property - def sisense_datamodel_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.sisense_datamodel_qualified_name - ) - - @sisense_datamodel_qualified_name.setter - def sisense_datamodel_qualified_name( - self, sisense_datamodel_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_datamodel_qualified_name = ( - sisense_datamodel_qualified_name - ) - - @property - def sisense_datamodel_table_column_count(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.sisense_datamodel_table_column_count - ) - - @sisense_datamodel_table_column_count.setter - def sisense_datamodel_table_column_count( - self, sisense_datamodel_table_column_count: Optional[int] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_datamodel_table_column_count = ( - sisense_datamodel_table_column_count - ) - - @property - def sisense_datamodel_table_type(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.sisense_datamodel_table_type - ) - - @sisense_datamodel_table_type.setter - def sisense_datamodel_table_type(self, sisense_datamodel_table_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_datamodel_table_type = sisense_datamodel_table_type - - @property - def sisense_datamodel_table_expression(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.sisense_datamodel_table_expression - ) - - @sisense_datamodel_table_expression.setter - def sisense_datamodel_table_expression( - self, sisense_datamodel_table_expression: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_datamodel_table_expression = ( - sisense_datamodel_table_expression - ) - - @property - def sisense_datamodel_table_is_materialized(self) -> Optional[bool]: - return ( - None - if self.attributes is None - else self.attributes.sisense_datamodel_table_is_materialized - ) - - @sisense_datamodel_table_is_materialized.setter - def sisense_datamodel_table_is_materialized( - self, sisense_datamodel_table_is_materialized: Optional[bool] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_datamodel_table_is_materialized = ( - sisense_datamodel_table_is_materialized - ) - - @property - def sisense_datamodel_table_is_hidden(self) -> Optional[bool]: - return ( - None - if self.attributes is None - else self.attributes.sisense_datamodel_table_is_hidden - ) - - @sisense_datamodel_table_is_hidden.setter - def sisense_datamodel_table_is_hidden( - self, sisense_datamodel_table_is_hidden: Optional[bool] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_datamodel_table_is_hidden = ( - sisense_datamodel_table_is_hidden - ) - - @property - def sisense_datamodel_table_schedule(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.sisense_datamodel_table_schedule - ) - - @sisense_datamodel_table_schedule.setter - def sisense_datamodel_table_schedule( - self, sisense_datamodel_table_schedule: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_datamodel_table_schedule = ( - sisense_datamodel_table_schedule - ) - - @property - def sisense_datamodel_table_live_query_settings(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.sisense_datamodel_table_live_query_settings - ) - - @sisense_datamodel_table_live_query_settings.setter - def sisense_datamodel_table_live_query_settings( - self, sisense_datamodel_table_live_query_settings: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_datamodel_table_live_query_settings = ( - sisense_datamodel_table_live_query_settings - ) - - @property - def sisense_datamodel(self) -> Optional[SisenseDatamodel]: - return None if self.attributes is None else self.attributes.sisense_datamodel - - @sisense_datamodel.setter - def sisense_datamodel(self, sisense_datamodel: Optional[SisenseDatamodel]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_datamodel = sisense_datamodel - - @property - def sisense_widgets(self) -> Optional[list[SisenseWidget]]: - return None if self.attributes is None else self.attributes.sisense_widgets - - @sisense_widgets.setter - def sisense_widgets(self, sisense_widgets: Optional[list[SisenseWidget]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_widgets = sisense_widgets - - class Attributes(Sisense.Attributes): - sisense_datamodel_qualified_name: Optional[str] = Field( - None, description="", alias="sisenseDatamodelQualifiedName" - ) - sisense_datamodel_table_column_count: Optional[int] = Field( - None, description="", alias="sisenseDatamodelTableColumnCount" - ) - sisense_datamodel_table_type: Optional[str] = Field( - None, description="", alias="sisenseDatamodelTableType" - ) - sisense_datamodel_table_expression: Optional[str] = Field( - None, description="", alias="sisenseDatamodelTableExpression" - ) - sisense_datamodel_table_is_materialized: Optional[bool] = Field( - None, description="", alias="sisenseDatamodelTableIsMaterialized" - ) - sisense_datamodel_table_is_hidden: Optional[bool] = Field( - None, description="", alias="sisenseDatamodelTableIsHidden" - ) - sisense_datamodel_table_schedule: Optional[str] = Field( - None, description="", alias="sisenseDatamodelTableSchedule" - ) - sisense_datamodel_table_live_query_settings: Optional[str] = Field( - None, description="", alias="sisenseDatamodelTableLiveQuerySettings" - ) - sisense_datamodel: Optional[SisenseDatamodel] = Field( - None, description="", alias="sisenseDatamodel" - ) # relationship - sisense_widgets: Optional[list[SisenseWidget]] = Field( - None, description="", alias="sisenseWidgets" - ) # relationship - - attributes: "SisenseDatamodelTable.Attributes" = Field( - default_factory=lambda: SisenseDatamodelTable.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class SisenseDashboard(Sisense): - """Description""" - - type_name: str = Field("SisenseDashboard", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "SisenseDashboard": - raise ValueError("must be SisenseDashboard") - return v - - def __setattr__(self, name, value): - if name in SisenseDashboard._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - SISENSE_DASHBOARD_FOLDER_QUALIFIED_NAME: ClassVar[ - KeywordTextField - ] = KeywordTextField( - "sisenseDashboardFolderQualifiedName", - "sisenseDashboardFolderQualifiedName", - "sisenseDashboardFolderQualifiedName.text", - ) - """ - Qualified name of the folder in which this dashboard exists - """ - SISENSE_DASHBOARD_WIDGET_COUNT: ClassVar[NumericField] = NumericField( - "sisenseDashboardWidgetCount", "sisenseDashboardWidgetCount" - ) - """ - Number of widgets that are present in this dashboard - """ - - SISENSE_DATAMODELS: ClassVar[RelationField] = RelationField("sisenseDatamodels") - """ - TBC - """ - SISENSE_WIDGETS: ClassVar[RelationField] = RelationField("sisenseWidgets") - """ - TBC - """ - SISENSE_FOLDER: ClassVar[RelationField] = RelationField("sisenseFolder") - """ - TBC + Number of widgets in this dashboard. """ _convenience_properties: ClassVar[list[str]] = [ - "sisense_dashboard_folder_qualified_name", - "sisense_dashboard_widget_count", - "sisense_datamodels", - "sisense_widgets", - "sisense_folder", + "redash_dashboard_widget_count", ] @property - def sisense_dashboard_folder_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.sisense_dashboard_folder_qualified_name - ) - - @sisense_dashboard_folder_qualified_name.setter - def sisense_dashboard_folder_qualified_name( - self, sisense_dashboard_folder_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_dashboard_folder_qualified_name = ( - sisense_dashboard_folder_qualified_name - ) - - @property - def sisense_dashboard_widget_count(self) -> Optional[int]: + def redash_dashboard_widget_count(self) -> Optional[int]: return ( None if self.attributes is None - else self.attributes.sisense_dashboard_widget_count + else self.attributes.redash_dashboard_widget_count ) - @sisense_dashboard_widget_count.setter - def sisense_dashboard_widget_count( - self, sisense_dashboard_widget_count: Optional[int] + @redash_dashboard_widget_count.setter + def redash_dashboard_widget_count( + self, redash_dashboard_widget_count: Optional[int] ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.sisense_dashboard_widget_count = sisense_dashboard_widget_count - - @property - def sisense_datamodels(self) -> Optional[list[SisenseDatamodel]]: - return None if self.attributes is None else self.attributes.sisense_datamodels - - @sisense_datamodels.setter - def sisense_datamodels(self, sisense_datamodels: Optional[list[SisenseDatamodel]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_datamodels = sisense_datamodels - - @property - def sisense_widgets(self) -> Optional[list[SisenseWidget]]: - return None if self.attributes is None else self.attributes.sisense_widgets - - @sisense_widgets.setter - def sisense_widgets(self, sisense_widgets: Optional[list[SisenseWidget]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_widgets = sisense_widgets - - @property - def sisense_folder(self) -> Optional[SisenseFolder]: - return None if self.attributes is None else self.attributes.sisense_folder - - @sisense_folder.setter - def sisense_folder(self, sisense_folder: Optional[SisenseFolder]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_folder = sisense_folder + self.attributes.redash_dashboard_widget_count = redash_dashboard_widget_count - class Attributes(Sisense.Attributes): - sisense_dashboard_folder_qualified_name: Optional[str] = Field( - None, description="", alias="sisenseDashboardFolderQualifiedName" - ) - sisense_dashboard_widget_count: Optional[int] = Field( - None, description="", alias="sisenseDashboardWidgetCount" + class Attributes(Redash.Attributes): + redash_dashboard_widget_count: Optional[int] = Field( + None, description="", alias="redashDashboardWidgetCount" ) - sisense_datamodels: Optional[list[SisenseDatamodel]] = Field( - None, description="", alias="sisenseDatamodels" - ) # relationship - sisense_widgets: Optional[list[SisenseWidget]] = Field( - None, description="", alias="sisenseWidgets" - ) # relationship - sisense_folder: Optional[SisenseFolder] = Field( - None, description="", alias="sisenseFolder" - ) # relationship - attributes: "SisenseDashboard.Attributes" = Field( - default_factory=lambda: SisenseDashboard.Attributes(), + attributes: "RedashDashboard.Attributes" = Field( + default_factory=lambda: RedashDashboard.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -SisenseFolder.Attributes.update_forward_refs() - - -SisenseWidget.Attributes.update_forward_refs() - - -SisenseDatamodel.Attributes.update_forward_refs() - - -SisenseDatamodelTable.Attributes.update_forward_refs() - - -SisenseDashboard.Attributes.update_forward_refs() +RedashDashboard.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset73.py b/pyatlan/model/assets/asset73.py index 3cc60fd3c..10c355279 100644 --- a/pyatlan/model/assets/asset73.py +++ b/pyatlan/model/assets/asset73.py @@ -4,399 +4,342 @@ from __future__ import annotations +from datetime import datetime from typing import ClassVar, Optional from pydantic import Field, validator from pyatlan.model.fields.atlan_fields import ( - BooleanField, KeywordField, KeywordTextField, NumericField, RelationField, ) -from .asset47 import Metabase +from .asset45 import Redash -class MetabaseQuestion(Metabase): +class RedashQuery(Redash): """Description""" - type_name: str = Field("MetabaseQuestion", allow_mutation=False) + type_name: str = Field("RedashQuery", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "MetabaseQuestion": - raise ValueError("must be MetabaseQuestion") + if v != "RedashQuery": + raise ValueError("must be RedashQuery") return v def __setattr__(self, name, value): - if name in MetabaseQuestion._convenience_properties: + if name in RedashQuery._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - METABASE_DASHBOARD_COUNT: ClassVar[NumericField] = NumericField( - "metabaseDashboardCount", "metabaseDashboardCount" + REDASH_QUERY_SQL: ClassVar[KeywordField] = KeywordField( + "redashQuerySQL", "redashQuerySQL" ) """ - TBC + SQL code of this query. """ - METABASE_QUERY_TYPE: ClassVar[KeywordTextField] = KeywordTextField( - "metabaseQueryType", "metabaseQueryType", "metabaseQueryType.text" + REDASH_QUERY_PARAMETERS: ClassVar[KeywordField] = KeywordField( + "redashQueryParameters", "redashQueryParameters" ) """ - TBC + Parameters of this query. """ - METABASE_QUERY: ClassVar[KeywordTextField] = KeywordTextField( - "metabaseQuery", "metabaseQuery.keyword", "metabaseQuery" + REDASH_QUERY_SCHEDULE: ClassVar[KeywordField] = KeywordField( + "redashQuerySchedule", "redashQuerySchedule" ) """ - TBC - """ - - METABASE_DASHBOARDS: ClassVar[RelationField] = RelationField("metabaseDashboards") - """ - TBC - """ - METABASE_COLLECTION: ClassVar[RelationField] = RelationField("metabaseCollection") + Schedule for this query. """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "metabase_dashboard_count", - "metabase_query_type", - "metabase_query", - "metabase_dashboards", - "metabase_collection", - ] - - @property - def metabase_dashboard_count(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.metabase_dashboard_count - ) - - @metabase_dashboard_count.setter - def metabase_dashboard_count(self, metabase_dashboard_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.metabase_dashboard_count = metabase_dashboard_count - - @property - def metabase_query_type(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.metabase_query_type - - @metabase_query_type.setter - def metabase_query_type(self, metabase_query_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.metabase_query_type = metabase_query_type - - @property - def metabase_query(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.metabase_query - - @metabase_query.setter - def metabase_query(self, metabase_query: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.metabase_query = metabase_query - - @property - def metabase_dashboards(self) -> Optional[list[MetabaseDashboard]]: - return None if self.attributes is None else self.attributes.metabase_dashboards - - @metabase_dashboards.setter - def metabase_dashboards( - self, metabase_dashboards: Optional[list[MetabaseDashboard]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.metabase_dashboards = metabase_dashboards - - @property - def metabase_collection(self) -> Optional[MetabaseCollection]: - return None if self.attributes is None else self.attributes.metabase_collection - - @metabase_collection.setter - def metabase_collection(self, metabase_collection: Optional[MetabaseCollection]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.metabase_collection = metabase_collection - - class Attributes(Metabase.Attributes): - metabase_dashboard_count: Optional[int] = Field( - None, description="", alias="metabaseDashboardCount" - ) - metabase_query_type: Optional[str] = Field( - None, description="", alias="metabaseQueryType" - ) - metabase_query: Optional[str] = Field( - None, description="", alias="metabaseQuery" - ) - metabase_dashboards: Optional[list[MetabaseDashboard]] = Field( - None, description="", alias="metabaseDashboards" - ) # relationship - metabase_collection: Optional[MetabaseCollection] = Field( - None, description="", alias="metabaseCollection" - ) # relationship - - attributes: "MetabaseQuestion.Attributes" = Field( - default_factory=lambda: MetabaseQuestion.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", + REDASH_QUERY_LAST_EXECUTION_RUNTIME: ClassVar[NumericField] = NumericField( + "redashQueryLastExecutionRuntime", "redashQueryLastExecutionRuntime" ) - - -class MetabaseCollection(Metabase): - """Description""" - - type_name: str = Field("MetabaseCollection", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "MetabaseCollection": - raise ValueError("must be MetabaseCollection") - return v - - def __setattr__(self, name, value): - if name in MetabaseCollection._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - METABASE_SLUG: ClassVar[KeywordTextField] = KeywordTextField( - "metabaseSlug", "metabaseSlug", "metabaseSlug.text" - ) - """ - TBC """ - METABASE_COLOR: ClassVar[KeywordField] = KeywordField( - "metabaseColor", "metabaseColor" - ) - """ - TBC + Elapsed time of the last execution of this query. """ - METABASE_NAMESPACE: ClassVar[KeywordTextField] = KeywordTextField( - "metabaseNamespace", "metabaseNamespace", "metabaseNamespace.text" + REDASH_QUERY_LAST_EXECUTED_AT: ClassVar[NumericField] = NumericField( + "redashQueryLastExecutedAt", "redashQueryLastExecutedAt" ) """ - TBC + Time (epoch) when this query was last executed, in milliseconds. """ - METABASE_IS_PERSONAL_COLLECTION: ClassVar[BooleanField] = BooleanField( - "metabaseIsPersonalCollection", "metabaseIsPersonalCollection" + REDASH_QUERY_SCHEDULE_HUMANIZED: ClassVar[KeywordTextField] = KeywordTextField( + "redashQueryScheduleHumanized", + "redashQueryScheduleHumanized", + "redashQueryScheduleHumanized.text", ) """ - TBC + Schdule for this query in readable text for overview tab and filtering. """ - METABASE_DASHBOARDS: ClassVar[RelationField] = RelationField("metabaseDashboards") - """ - TBC - """ - METABASE_QUESTIONS: ClassVar[RelationField] = RelationField("metabaseQuestions") + REDASH_VISUALIZATIONS: ClassVar[RelationField] = RelationField( + "redashVisualizations" + ) """ TBC """ _convenience_properties: ClassVar[list[str]] = [ - "metabase_slug", - "metabase_color", - "metabase_namespace", - "metabase_is_personal_collection", - "metabase_dashboards", - "metabase_questions", + "redash_query_s_q_l", + "redash_query_parameters", + "redash_query_schedule", + "redash_query_last_execution_runtime", + "redash_query_last_executed_at", + "redash_query_schedule_humanized", + "redash_visualizations", ] @property - def metabase_slug(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.metabase_slug + def redash_query_s_q_l(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.redash_query_s_q_l - @metabase_slug.setter - def metabase_slug(self, metabase_slug: Optional[str]): + @redash_query_s_q_l.setter + def redash_query_s_q_l(self, redash_query_s_q_l: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.metabase_slug = metabase_slug + self.attributes.redash_query_s_q_l = redash_query_s_q_l @property - def metabase_color(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.metabase_color + def redash_query_parameters(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.redash_query_parameters + ) - @metabase_color.setter - def metabase_color(self, metabase_color: Optional[str]): + @redash_query_parameters.setter + def redash_query_parameters(self, redash_query_parameters: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.metabase_color = metabase_color + self.attributes.redash_query_parameters = redash_query_parameters @property - def metabase_namespace(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.metabase_namespace + def redash_query_schedule(self) -> Optional[dict[str, str]]: + return ( + None if self.attributes is None else self.attributes.redash_query_schedule + ) - @metabase_namespace.setter - def metabase_namespace(self, metabase_namespace: Optional[str]): + @redash_query_schedule.setter + def redash_query_schedule(self, redash_query_schedule: Optional[dict[str, str]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.metabase_namespace = metabase_namespace + self.attributes.redash_query_schedule = redash_query_schedule @property - def metabase_is_personal_collection(self) -> Optional[bool]: + def redash_query_last_execution_runtime(self) -> Optional[float]: return ( None if self.attributes is None - else self.attributes.metabase_is_personal_collection + else self.attributes.redash_query_last_execution_runtime ) - @metabase_is_personal_collection.setter - def metabase_is_personal_collection( - self, metabase_is_personal_collection: Optional[bool] + @redash_query_last_execution_runtime.setter + def redash_query_last_execution_runtime( + self, redash_query_last_execution_runtime: Optional[float] ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.metabase_is_personal_collection = ( - metabase_is_personal_collection + self.attributes.redash_query_last_execution_runtime = ( + redash_query_last_execution_runtime ) @property - def metabase_dashboards(self) -> Optional[list[MetabaseDashboard]]: - return None if self.attributes is None else self.attributes.metabase_dashboards + def redash_query_last_executed_at(self) -> Optional[datetime]: + return ( + None + if self.attributes is None + else self.attributes.redash_query_last_executed_at + ) - @metabase_dashboards.setter - def metabase_dashboards( - self, metabase_dashboards: Optional[list[MetabaseDashboard]] + @redash_query_last_executed_at.setter + def redash_query_last_executed_at( + self, redash_query_last_executed_at: Optional[datetime] ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.metabase_dashboards = metabase_dashboards + self.attributes.redash_query_last_executed_at = redash_query_last_executed_at @property - def metabase_questions(self) -> Optional[list[MetabaseQuestion]]: - return None if self.attributes is None else self.attributes.metabase_questions + def redash_query_schedule_humanized(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.redash_query_schedule_humanized + ) - @metabase_questions.setter - def metabase_questions(self, metabase_questions: Optional[list[MetabaseQuestion]]): + @redash_query_schedule_humanized.setter + def redash_query_schedule_humanized( + self, redash_query_schedule_humanized: Optional[str] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.metabase_questions = metabase_questions + self.attributes.redash_query_schedule_humanized = ( + redash_query_schedule_humanized + ) - class Attributes(Metabase.Attributes): - metabase_slug: Optional[str] = Field(None, description="", alias="metabaseSlug") - metabase_color: Optional[str] = Field( - None, description="", alias="metabaseColor" + @property + def redash_visualizations(self) -> Optional[list[RedashVisualization]]: + return ( + None if self.attributes is None else self.attributes.redash_visualizations ) - metabase_namespace: Optional[str] = Field( - None, description="", alias="metabaseNamespace" + + @redash_visualizations.setter + def redash_visualizations( + self, redash_visualizations: Optional[list[RedashVisualization]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.redash_visualizations = redash_visualizations + + class Attributes(Redash.Attributes): + redash_query_s_q_l: Optional[str] = Field( + None, description="", alias="redashQuerySQL" ) - metabase_is_personal_collection: Optional[bool] = Field( - None, description="", alias="metabaseIsPersonalCollection" + redash_query_parameters: Optional[str] = Field( + None, description="", alias="redashQueryParameters" ) - metabase_dashboards: Optional[list[MetabaseDashboard]] = Field( - None, description="", alias="metabaseDashboards" - ) # relationship - metabase_questions: Optional[list[MetabaseQuestion]] = Field( - None, description="", alias="metabaseQuestions" + redash_query_schedule: Optional[dict[str, str]] = Field( + None, description="", alias="redashQuerySchedule" + ) + redash_query_last_execution_runtime: Optional[float] = Field( + None, description="", alias="redashQueryLastExecutionRuntime" + ) + redash_query_last_executed_at: Optional[datetime] = Field( + None, description="", alias="redashQueryLastExecutedAt" + ) + redash_query_schedule_humanized: Optional[str] = Field( + None, description="", alias="redashQueryScheduleHumanized" + ) + redash_visualizations: Optional[list[RedashVisualization]] = Field( + None, description="", alias="redashVisualizations" ) # relationship - attributes: "MetabaseCollection.Attributes" = Field( - default_factory=lambda: MetabaseCollection.Attributes(), + attributes: "RedashQuery.Attributes" = Field( + default_factory=lambda: RedashQuery.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -class MetabaseDashboard(Metabase): +class RedashVisualization(Redash): """Description""" - type_name: str = Field("MetabaseDashboard", allow_mutation=False) + type_name: str = Field("RedashVisualization", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "MetabaseDashboard": - raise ValueError("must be MetabaseDashboard") + if v != "RedashVisualization": + raise ValueError("must be RedashVisualization") return v def __setattr__(self, name, value): - if name in MetabaseDashboard._convenience_properties: + if name in RedashVisualization._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - METABASE_QUESTION_COUNT: ClassVar[NumericField] = NumericField( - "metabaseQuestionCount", "metabaseQuestionCount" + REDASH_VISUALIZATION_TYPE: ClassVar[KeywordField] = KeywordField( + "redashVisualizationType", "redashVisualizationType" ) """ - TBC + Type of this visualization. """ - - METABASE_QUESTIONS: ClassVar[RelationField] = RelationField("metabaseQuestions") + REDASH_QUERY_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "redashQueryName", "redashQueryName.keyword", "redashQueryName" + ) """ - TBC + Simple name of the query from which this visualization is created. + """ + REDASH_QUERY_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "redashQueryQualifiedName", + "redashQueryQualifiedName", + "redashQueryQualifiedName.text", + ) + """ + Unique name of the query from which this visualization is created. """ - METABASE_COLLECTION: ClassVar[RelationField] = RelationField("metabaseCollection") + + REDASH_QUERY: ClassVar[RelationField] = RelationField("redashQuery") """ TBC """ _convenience_properties: ClassVar[list[str]] = [ - "metabase_question_count", - "metabase_questions", - "metabase_collection", + "redash_visualization_type", + "redash_query_name", + "redash_query_qualified_name", + "redash_query", ] @property - def metabase_question_count(self) -> Optional[int]: + def redash_visualization_type(self) -> Optional[str]: return ( - None if self.attributes is None else self.attributes.metabase_question_count + None + if self.attributes is None + else self.attributes.redash_visualization_type ) - @metabase_question_count.setter - def metabase_question_count(self, metabase_question_count: Optional[int]): + @redash_visualization_type.setter + def redash_visualization_type(self, redash_visualization_type: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.metabase_question_count = metabase_question_count + self.attributes.redash_visualization_type = redash_visualization_type @property - def metabase_questions(self) -> Optional[list[MetabaseQuestion]]: - return None if self.attributes is None else self.attributes.metabase_questions + def redash_query_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.redash_query_name - @metabase_questions.setter - def metabase_questions(self, metabase_questions: Optional[list[MetabaseQuestion]]): + @redash_query_name.setter + def redash_query_name(self, redash_query_name: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.metabase_questions = metabase_questions + self.attributes.redash_query_name = redash_query_name @property - def metabase_collection(self) -> Optional[MetabaseCollection]: - return None if self.attributes is None else self.attributes.metabase_collection + def redash_query_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.redash_query_qualified_name + ) - @metabase_collection.setter - def metabase_collection(self, metabase_collection: Optional[MetabaseCollection]): + @redash_query_qualified_name.setter + def redash_query_qualified_name(self, redash_query_qualified_name: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.metabase_collection = metabase_collection + self.attributes.redash_query_qualified_name = redash_query_qualified_name + + @property + def redash_query(self) -> Optional[RedashQuery]: + return None if self.attributes is None else self.attributes.redash_query - class Attributes(Metabase.Attributes): - metabase_question_count: Optional[int] = Field( - None, description="", alias="metabaseQuestionCount" + @redash_query.setter + def redash_query(self, redash_query: Optional[RedashQuery]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.redash_query = redash_query + + class Attributes(Redash.Attributes): + redash_visualization_type: Optional[str] = Field( + None, description="", alias="redashVisualizationType" ) - metabase_questions: Optional[list[MetabaseQuestion]] = Field( - None, description="", alias="metabaseQuestions" - ) # relationship - metabase_collection: Optional[MetabaseCollection] = Field( - None, description="", alias="metabaseCollection" + redash_query_name: Optional[str] = Field( + None, description="", alias="redashQueryName" + ) + redash_query_qualified_name: Optional[str] = Field( + None, description="", alias="redashQueryQualifiedName" + ) + redash_query: Optional[RedashQuery] = Field( + None, description="", alias="redashQuery" ) # relationship - attributes: "MetabaseDashboard.Attributes" = Field( - default_factory=lambda: MetabaseDashboard.Attributes(), + attributes: "RedashVisualization.Attributes" = Field( + default_factory=lambda: RedashVisualization.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -MetabaseQuestion.Attributes.update_forward_refs() - - -MetabaseCollection.Attributes.update_forward_refs() +RedashQuery.Attributes.update_forward_refs() -MetabaseDashboard.Attributes.update_forward_refs() +RedashVisualization.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset74.py b/pyatlan/model/assets/asset74.py index 6c3485961..b0d1600f1 100644 --- a/pyatlan/model/assets/asset74.py +++ b/pyatlan/model/assets/asset74.py @@ -9,924 +9,1105 @@ from pydantic import Field, validator -from pyatlan.model.enums import ( - QuickSightAnalysisStatus, - QuickSightDatasetFieldType, - QuickSightDatasetImportMode, - QuickSightFolderType, -) from pyatlan.model.fields.atlan_fields import ( + BooleanField, KeywordField, KeywordTextField, NumericField, RelationField, ) -from .asset48 import QuickSight +from .asset46 import Sisense -class QuickSightFolder(QuickSight): +class SisenseFolder(Sisense): """Description""" - type_name: str = Field("QuickSightFolder", allow_mutation=False) + type_name: str = Field("SisenseFolder", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "QuickSightFolder": - raise ValueError("must be QuickSightFolder") + if v != "SisenseFolder": + raise ValueError("must be SisenseFolder") return v def __setattr__(self, name, value): - if name in QuickSightFolder._convenience_properties: + if name in SisenseFolder._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - QUICK_SIGHT_FOLDER_TYPE: ClassVar[KeywordField] = KeywordField( - "quickSightFolderType", "quickSightFolderType" + SISENSE_FOLDER_PARENT_FOLDER_QUALIFIED_NAME: ClassVar[ + KeywordTextField + ] = KeywordTextField( + "sisenseFolderParentFolderQualifiedName", + "sisenseFolderParentFolderQualifiedName", + "sisenseFolderParentFolderQualifiedName.text", ) """ - Shared or private type of folder + Unique name of the parent folder in which this folder exists. """ - QUICK_SIGHT_FOLDER_HIERARCHY: ClassVar[KeywordField] = KeywordField( - "quickSightFolderHierarchy", "quickSightFolderHierarchy" + + SISENSE_CHILD_FOLDERS: ClassVar[RelationField] = RelationField( + "sisenseChildFolders" ) """ - Detailed path of the folder + TBC """ - - QUICK_SIGHT_DASHBOARDS: ClassVar[RelationField] = RelationField( - "quickSightDashboards" - ) + SISENSE_WIDGETS: ClassVar[RelationField] = RelationField("sisenseWidgets") """ TBC """ - QUICK_SIGHT_DATASETS: ClassVar[RelationField] = RelationField("quickSightDatasets") + SISENSE_DASHBOARDS: ClassVar[RelationField] = RelationField("sisenseDashboards") """ TBC """ - QUICK_SIGHT_ANALYSES: ClassVar[RelationField] = RelationField("quickSightAnalyses") + SISENSE_PARENT_FOLDER: ClassVar[RelationField] = RelationField( + "sisenseParentFolder" + ) """ TBC """ _convenience_properties: ClassVar[list[str]] = [ - "quick_sight_folder_type", - "quick_sight_folder_hierarchy", - "quick_sight_dashboards", - "quick_sight_datasets", - "quick_sight_analyses", + "sisense_folder_parent_folder_qualified_name", + "sisense_child_folders", + "sisense_widgets", + "sisense_dashboards", + "sisense_parent_folder", ] @property - def quick_sight_folder_type(self) -> Optional[QuickSightFolderType]: + def sisense_folder_parent_folder_qualified_name(self) -> Optional[str]: return ( - None if self.attributes is None else self.attributes.quick_sight_folder_type + None + if self.attributes is None + else self.attributes.sisense_folder_parent_folder_qualified_name ) - @quick_sight_folder_type.setter - def quick_sight_folder_type( - self, quick_sight_folder_type: Optional[QuickSightFolderType] + @sisense_folder_parent_folder_qualified_name.setter + def sisense_folder_parent_folder_qualified_name( + self, sisense_folder_parent_folder_qualified_name: Optional[str] ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.quick_sight_folder_type = quick_sight_folder_type + self.attributes.sisense_folder_parent_folder_qualified_name = ( + sisense_folder_parent_folder_qualified_name + ) @property - def quick_sight_folder_hierarchy(self) -> Optional[list[dict[str, str]]]: + def sisense_child_folders(self) -> Optional[list[SisenseFolder]]: return ( - None - if self.attributes is None - else self.attributes.quick_sight_folder_hierarchy + None if self.attributes is None else self.attributes.sisense_child_folders ) - @quick_sight_folder_hierarchy.setter - def quick_sight_folder_hierarchy( - self, quick_sight_folder_hierarchy: Optional[list[dict[str, str]]] + @sisense_child_folders.setter + def sisense_child_folders( + self, sisense_child_folders: Optional[list[SisenseFolder]] ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.quick_sight_folder_hierarchy = quick_sight_folder_hierarchy + self.attributes.sisense_child_folders = sisense_child_folders @property - def quick_sight_dashboards(self) -> Optional[list[QuickSightDashboard]]: - return ( - None if self.attributes is None else self.attributes.quick_sight_dashboards - ) + def sisense_widgets(self) -> Optional[list[SisenseWidget]]: + return None if self.attributes is None else self.attributes.sisense_widgets - @quick_sight_dashboards.setter - def quick_sight_dashboards( - self, quick_sight_dashboards: Optional[list[QuickSightDashboard]] - ): + @sisense_widgets.setter + def sisense_widgets(self, sisense_widgets: Optional[list[SisenseWidget]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.quick_sight_dashboards = quick_sight_dashboards + self.attributes.sisense_widgets = sisense_widgets @property - def quick_sight_datasets(self) -> Optional[list[QuickSightDataset]]: - return None if self.attributes is None else self.attributes.quick_sight_datasets + def sisense_dashboards(self) -> Optional[list[SisenseDashboard]]: + return None if self.attributes is None else self.attributes.sisense_dashboards - @quick_sight_datasets.setter - def quick_sight_datasets( - self, quick_sight_datasets: Optional[list[QuickSightDataset]] - ): + @sisense_dashboards.setter + def sisense_dashboards(self, sisense_dashboards: Optional[list[SisenseDashboard]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.quick_sight_datasets = quick_sight_datasets + self.attributes.sisense_dashboards = sisense_dashboards @property - def quick_sight_analyses(self) -> Optional[list[QuickSightAnalysis]]: - return None if self.attributes is None else self.attributes.quick_sight_analyses + def sisense_parent_folder(self) -> Optional[SisenseFolder]: + return ( + None if self.attributes is None else self.attributes.sisense_parent_folder + ) - @quick_sight_analyses.setter - def quick_sight_analyses( - self, quick_sight_analyses: Optional[list[QuickSightAnalysis]] - ): + @sisense_parent_folder.setter + def sisense_parent_folder(self, sisense_parent_folder: Optional[SisenseFolder]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.quick_sight_analyses = quick_sight_analyses + self.attributes.sisense_parent_folder = sisense_parent_folder - class Attributes(QuickSight.Attributes): - quick_sight_folder_type: Optional[QuickSightFolderType] = Field( - None, description="", alias="quickSightFolderType" + class Attributes(Sisense.Attributes): + sisense_folder_parent_folder_qualified_name: Optional[str] = Field( + None, description="", alias="sisenseFolderParentFolderQualifiedName" ) - quick_sight_folder_hierarchy: Optional[list[dict[str, str]]] = Field( - None, description="", alias="quickSightFolderHierarchy" - ) - quick_sight_dashboards: Optional[list[QuickSightDashboard]] = Field( - None, description="", alias="quickSightDashboards" + sisense_child_folders: Optional[list[SisenseFolder]] = Field( + None, description="", alias="sisenseChildFolders" + ) # relationship + sisense_widgets: Optional[list[SisenseWidget]] = Field( + None, description="", alias="sisenseWidgets" ) # relationship - quick_sight_datasets: Optional[list[QuickSightDataset]] = Field( - None, description="", alias="quickSightDatasets" + sisense_dashboards: Optional[list[SisenseDashboard]] = Field( + None, description="", alias="sisenseDashboards" ) # relationship - quick_sight_analyses: Optional[list[QuickSightAnalysis]] = Field( - None, description="", alias="quickSightAnalyses" + sisense_parent_folder: Optional[SisenseFolder] = Field( + None, description="", alias="sisenseParentFolder" ) # relationship - attributes: "QuickSightFolder.Attributes" = Field( - default_factory=lambda: QuickSightFolder.Attributes(), + attributes: "SisenseFolder.Attributes" = Field( + default_factory=lambda: SisenseFolder.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -class QuickSightDashboardVisual(QuickSight): +class SisenseWidget(Sisense): """Description""" - type_name: str = Field("QuickSightDashboardVisual", allow_mutation=False) + type_name: str = Field("SisenseWidget", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "QuickSightDashboardVisual": - raise ValueError("must be QuickSightDashboardVisual") + if v != "SisenseWidget": + raise ValueError("must be SisenseWidget") return v def __setattr__(self, name, value): - if name in QuickSightDashboardVisual._convenience_properties: + if name in SisenseWidget._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - QUICK_SIGHT_DASHBOARD_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "quickSightDashboardQualifiedName", - "quickSightDashboardQualifiedName", - "quickSightDashboardQualifiedName.text", + SISENSE_WIDGET_COLUMN_COUNT: ClassVar[NumericField] = NumericField( + "sisenseWidgetColumnCount", "sisenseWidgetColumnCount" ) """ - TBC + Number of columns used in this widget. + """ + SISENSE_WIDGET_SUB_TYPE: ClassVar[KeywordField] = KeywordField( + "sisenseWidgetSubType", "sisenseWidgetSubType" + ) + """ + Subtype of this widget. + """ + SISENSE_WIDGET_SIZE: ClassVar[KeywordField] = KeywordField( + "sisenseWidgetSize", "sisenseWidgetSize" + ) + """ + Size of this widget. + """ + SISENSE_WIDGET_DASHBOARD_QUALIFIED_NAME: ClassVar[ + KeywordTextField + ] = KeywordTextField( + "sisenseWidgetDashboardQualifiedName", + "sisenseWidgetDashboardQualifiedName", + "sisenseWidgetDashboardQualifiedName.text", + ) + """ + Unique name of the dashboard in which this widget exists. + """ + SISENSE_WIDGET_FOLDER_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "sisenseWidgetFolderQualifiedName", + "sisenseWidgetFolderQualifiedName", + "sisenseWidgetFolderQualifiedName.text", + ) + """ + Unique name of the folder in which this widget exists. """ - QUICK_SIGHT_DASHBOARD: ClassVar[RelationField] = RelationField( - "quickSightDashboard" + SISENSE_DATAMODEL_TABLES: ClassVar[RelationField] = RelationField( + "sisenseDatamodelTables" ) """ TBC """ + SISENSE_FOLDER: ClassVar[RelationField] = RelationField("sisenseFolder") + """ + TBC + """ + SISENSE_DASHBOARD: ClassVar[RelationField] = RelationField("sisenseDashboard") + """ + TBC + """ _convenience_properties: ClassVar[list[str]] = [ - "quick_sight_dashboard_qualified_name", - "quick_sight_dashboard", + "sisense_widget_column_count", + "sisense_widget_sub_type", + "sisense_widget_size", + "sisense_widget_dashboard_qualified_name", + "sisense_widget_folder_qualified_name", + "sisense_datamodel_tables", + "sisense_folder", + "sisense_dashboard", ] @property - def quick_sight_dashboard_qualified_name(self) -> Optional[str]: + def sisense_widget_column_count(self) -> Optional[int]: return ( None if self.attributes is None - else self.attributes.quick_sight_dashboard_qualified_name + else self.attributes.sisense_widget_column_count ) - @quick_sight_dashboard_qualified_name.setter - def quick_sight_dashboard_qualified_name( - self, quick_sight_dashboard_qualified_name: Optional[str] - ): + @sisense_widget_column_count.setter + def sisense_widget_column_count(self, sisense_widget_column_count: Optional[int]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.quick_sight_dashboard_qualified_name = ( - quick_sight_dashboard_qualified_name - ) + self.attributes.sisense_widget_column_count = sisense_widget_column_count @property - def quick_sight_dashboard(self) -> Optional[QuickSightDashboard]: + def sisense_widget_sub_type(self) -> Optional[str]: return ( - None if self.attributes is None else self.attributes.quick_sight_dashboard + None if self.attributes is None else self.attributes.sisense_widget_sub_type ) - @quick_sight_dashboard.setter - def quick_sight_dashboard( - self, quick_sight_dashboard: Optional[QuickSightDashboard] - ): + @sisense_widget_sub_type.setter + def sisense_widget_sub_type(self, sisense_widget_sub_type: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.quick_sight_dashboard = quick_sight_dashboard - - class Attributes(QuickSight.Attributes): - quick_sight_dashboard_qualified_name: Optional[str] = Field( - None, description="", alias="quickSightDashboardQualifiedName" - ) - quick_sight_dashboard: Optional[QuickSightDashboard] = Field( - None, description="", alias="quickSightDashboard" - ) # relationship - - attributes: "QuickSightDashboardVisual.Attributes" = Field( - default_factory=lambda: QuickSightDashboardVisual.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class QuickSightAnalysisVisual(QuickSight): - """Description""" + self.attributes.sisense_widget_sub_type = sisense_widget_sub_type - type_name: str = Field("QuickSightAnalysisVisual", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "QuickSightAnalysisVisual": - raise ValueError("must be QuickSightAnalysisVisual") - return v - - def __setattr__(self, name, value): - if name in QuickSightAnalysisVisual._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - QUICK_SIGHT_ANALYSIS_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "quickSightAnalysisQualifiedName", - "quickSightAnalysisQualifiedName", - "quickSightAnalysisQualifiedName.text", - ) - """ - Qualified name of the QuickSight Analysis - """ - - QUICK_SIGHT_ANALYSIS: ClassVar[RelationField] = RelationField("quickSightAnalysis") - """ - TBC - """ + @property + def sisense_widget_size(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.sisense_widget_size - _convenience_properties: ClassVar[list[str]] = [ - "quick_sight_analysis_qualified_name", - "quick_sight_analysis", - ] + @sisense_widget_size.setter + def sisense_widget_size(self, sisense_widget_size: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sisense_widget_size = sisense_widget_size @property - def quick_sight_analysis_qualified_name(self) -> Optional[str]: + def sisense_widget_dashboard_qualified_name(self) -> Optional[str]: return ( None if self.attributes is None - else self.attributes.quick_sight_analysis_qualified_name + else self.attributes.sisense_widget_dashboard_qualified_name ) - @quick_sight_analysis_qualified_name.setter - def quick_sight_analysis_qualified_name( - self, quick_sight_analysis_qualified_name: Optional[str] + @sisense_widget_dashboard_qualified_name.setter + def sisense_widget_dashboard_qualified_name( + self, sisense_widget_dashboard_qualified_name: Optional[str] ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.quick_sight_analysis_qualified_name = ( - quick_sight_analysis_qualified_name + self.attributes.sisense_widget_dashboard_qualified_name = ( + sisense_widget_dashboard_qualified_name ) @property - def quick_sight_analysis(self) -> Optional[QuickSightAnalysis]: - return None if self.attributes is None else self.attributes.quick_sight_analysis + def sisense_widget_folder_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.sisense_widget_folder_qualified_name + ) - @quick_sight_analysis.setter - def quick_sight_analysis(self, quick_sight_analysis: Optional[QuickSightAnalysis]): + @sisense_widget_folder_qualified_name.setter + def sisense_widget_folder_qualified_name( + self, sisense_widget_folder_qualified_name: Optional[str] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.quick_sight_analysis = quick_sight_analysis - - class Attributes(QuickSight.Attributes): - quick_sight_analysis_qualified_name: Optional[str] = Field( - None, description="", alias="quickSightAnalysisQualifiedName" + self.attributes.sisense_widget_folder_qualified_name = ( + sisense_widget_folder_qualified_name ) - quick_sight_analysis: Optional[QuickSightAnalysis] = Field( - None, description="", alias="quickSightAnalysis" - ) # relationship - - attributes: "QuickSightAnalysisVisual.Attributes" = Field( - default_factory=lambda: QuickSightAnalysisVisual.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class QuickSightDatasetField(QuickSight): - """Description""" - - type_name: str = Field("QuickSightDatasetField", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "QuickSightDatasetField": - raise ValueError("must be QuickSightDatasetField") - return v - - def __setattr__(self, name, value): - if name in QuickSightDatasetField._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - QUICK_SIGHT_DATASET_FIELD_TYPE: ClassVar[KeywordField] = KeywordField( - "quickSightDatasetFieldType", "quickSightDatasetFieldType" - ) - """ - Datatype of column in the dataset - """ - QUICK_SIGHT_DATASET_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "quickSightDatasetQualifiedName", - "quickSightDatasetQualifiedName", - "quickSightDatasetQualifiedName.text", - ) - """ - Qualified name of the parent dataset - """ - - QUICK_SIGHT_DATASET: ClassVar[RelationField] = RelationField("quickSightDataset") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "quick_sight_dataset_field_type", - "quick_sight_dataset_qualified_name", - "quick_sight_dataset", - ] @property - def quick_sight_dataset_field_type(self) -> Optional[QuickSightDatasetFieldType]: + def sisense_datamodel_tables(self) -> Optional[list[SisenseDatamodelTable]]: return ( None if self.attributes is None - else self.attributes.quick_sight_dataset_field_type + else self.attributes.sisense_datamodel_tables ) - @quick_sight_dataset_field_type.setter - def quick_sight_dataset_field_type( - self, quick_sight_dataset_field_type: Optional[QuickSightDatasetFieldType] + @sisense_datamodel_tables.setter + def sisense_datamodel_tables( + self, sisense_datamodel_tables: Optional[list[SisenseDatamodelTable]] ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.quick_sight_dataset_field_type = quick_sight_dataset_field_type + self.attributes.sisense_datamodel_tables = sisense_datamodel_tables @property - def quick_sight_dataset_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.quick_sight_dataset_qualified_name - ) + def sisense_folder(self) -> Optional[SisenseFolder]: + return None if self.attributes is None else self.attributes.sisense_folder - @quick_sight_dataset_qualified_name.setter - def quick_sight_dataset_qualified_name( - self, quick_sight_dataset_qualified_name: Optional[str] - ): + @sisense_folder.setter + def sisense_folder(self, sisense_folder: Optional[SisenseFolder]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.quick_sight_dataset_qualified_name = ( - quick_sight_dataset_qualified_name - ) + self.attributes.sisense_folder = sisense_folder @property - def quick_sight_dataset(self) -> Optional[QuickSightDataset]: - return None if self.attributes is None else self.attributes.quick_sight_dataset + def sisense_dashboard(self) -> Optional[SisenseDashboard]: + return None if self.attributes is None else self.attributes.sisense_dashboard - @quick_sight_dataset.setter - def quick_sight_dataset(self, quick_sight_dataset: Optional[QuickSightDataset]): + @sisense_dashboard.setter + def sisense_dashboard(self, sisense_dashboard: Optional[SisenseDashboard]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.quick_sight_dataset = quick_sight_dataset + self.attributes.sisense_dashboard = sisense_dashboard - class Attributes(QuickSight.Attributes): - quick_sight_dataset_field_type: Optional[QuickSightDatasetFieldType] = Field( - None, description="", alias="quickSightDatasetFieldType" + class Attributes(Sisense.Attributes): + sisense_widget_column_count: Optional[int] = Field( + None, description="", alias="sisenseWidgetColumnCount" ) - quick_sight_dataset_qualified_name: Optional[str] = Field( - None, description="", alias="quickSightDatasetQualifiedName" + sisense_widget_sub_type: Optional[str] = Field( + None, description="", alias="sisenseWidgetSubType" ) - quick_sight_dataset: Optional[QuickSightDataset] = Field( - None, description="", alias="quickSightDataset" + sisense_widget_size: Optional[str] = Field( + None, description="", alias="sisenseWidgetSize" + ) + sisense_widget_dashboard_qualified_name: Optional[str] = Field( + None, description="", alias="sisenseWidgetDashboardQualifiedName" + ) + sisense_widget_folder_qualified_name: Optional[str] = Field( + None, description="", alias="sisenseWidgetFolderQualifiedName" + ) + sisense_datamodel_tables: Optional[list[SisenseDatamodelTable]] = Field( + None, description="", alias="sisenseDatamodelTables" + ) # relationship + sisense_folder: Optional[SisenseFolder] = Field( + None, description="", alias="sisenseFolder" + ) # relationship + sisense_dashboard: Optional[SisenseDashboard] = Field( + None, description="", alias="sisenseDashboard" ) # relationship - attributes: "QuickSightDatasetField.Attributes" = Field( - default_factory=lambda: QuickSightDatasetField.Attributes(), + attributes: "SisenseWidget.Attributes" = Field( + default_factory=lambda: SisenseWidget.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -class QuickSightAnalysis(QuickSight): +class SisenseDatamodel(Sisense): """Description""" - type_name: str = Field("QuickSightAnalysis", allow_mutation=False) + type_name: str = Field("SisenseDatamodel", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "QuickSightAnalysis": - raise ValueError("must be QuickSightAnalysis") + if v != "SisenseDatamodel": + raise ValueError("must be SisenseDatamodel") return v def __setattr__(self, name, value): - if name in QuickSightAnalysis._convenience_properties: + if name in SisenseDatamodel._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - QUICK_SIGHT_ANALYSIS_STATUS: ClassVar[KeywordField] = KeywordField( - "quickSightAnalysisStatus", "quickSightAnalysisStatus" + SISENSE_DATAMODEL_TABLE_COUNT: ClassVar[NumericField] = NumericField( + "sisenseDatamodelTableCount", "sisenseDatamodelTableCount" ) """ - Status of quicksight analysis + Number of tables in this datamodel. """ - QUICK_SIGHT_ANALYSIS_CALCULATED_FIELDS: ClassVar[KeywordField] = KeywordField( - "quickSightAnalysisCalculatedFields", "quickSightAnalysisCalculatedFields" + SISENSE_DATAMODEL_SERVER: ClassVar[KeywordField] = KeywordField( + "sisenseDatamodelServer", "sisenseDatamodelServer" ) """ - Calculated fields of quicksight analysis + Hostname of the server on which this datamodel was created. """ - QUICK_SIGHT_ANALYSIS_PARAMETER_DECLARATIONS: ClassVar[KeywordField] = KeywordField( - "quickSightAnalysisParameterDeclarations", - "quickSightAnalysisParameterDeclarations", + SISENSE_DATAMODEL_REVISION: ClassVar[KeywordField] = KeywordField( + "sisenseDatamodelRevision", "sisenseDatamodelRevision" ) """ - parameters used for quicksight analysis + Revision of this datamodel. """ - QUICK_SIGHT_ANALYSIS_FILTER_GROUPS: ClassVar[KeywordField] = KeywordField( - "quickSightAnalysisFilterGroups", "quickSightAnalysisFilterGroups" + SISENSE_DATAMODEL_LAST_BUILD_TIME: ClassVar[NumericField] = NumericField( + "sisenseDatamodelLastBuildTime", "sisenseDatamodelLastBuildTime" ) """ - Filter groups used for quicksight analysis + Time (epoch) when this datamodel was last built, in milliseconds. + """ + SISENSE_DATAMODEL_LAST_SUCCESSFUL_BUILD_TIME: ClassVar[NumericField] = NumericField( + "sisenseDatamodelLastSuccessfulBuildTime", + "sisenseDatamodelLastSuccessfulBuildTime", + ) + """ + Time (epoch) when this datamodel was last built successfully, in milliseconds. + """ + SISENSE_DATAMODEL_LAST_PUBLISH_TIME: ClassVar[NumericField] = NumericField( + "sisenseDatamodelLastPublishTime", "sisenseDatamodelLastPublishTime" + ) + """ + Time (epoch) when this datamodel was last published, in milliseconds. + """ + SISENSE_DATAMODEL_TYPE: ClassVar[KeywordField] = KeywordField( + "sisenseDatamodelType", "sisenseDatamodelType" + ) """ + Type of this datamodel, for example: 'extract' or 'custom'. + """ + SISENSE_DATAMODEL_RELATION_TYPE: ClassVar[KeywordField] = KeywordField( + "sisenseDatamodelRelationType", "sisenseDatamodelRelationType" + ) + """ + Default relation type for this datamodel. 'extract' type Datamodels have regular relations by default. 'live' type Datamodels have direct relations by default. + """ # noqa: E501 - QUICK_SIGHT_ANALYSIS_VISUALS: ClassVar[RelationField] = RelationField( - "quickSightAnalysisVisuals" + SISENSE_DATAMODEL_TABLES: ClassVar[RelationField] = RelationField( + "sisenseDatamodelTables" ) """ TBC """ - QUICK_SIGHT_ANALYSIS_FOLDERS: ClassVar[RelationField] = RelationField( - "quickSightAnalysisFolders" - ) + SISENSE_DASHBOARDS: ClassVar[RelationField] = RelationField("sisenseDashboards") """ TBC """ _convenience_properties: ClassVar[list[str]] = [ - "quick_sight_analysis_status", - "quick_sight_analysis_calculated_fields", - "quick_sight_analysis_parameter_declarations", - "quick_sight_analysis_filter_groups", - "quick_sight_analysis_visuals", - "quick_sight_analysis_folders", + "sisense_datamodel_table_count", + "sisense_datamodel_server", + "sisense_datamodel_revision", + "sisense_datamodel_last_build_time", + "sisense_datamodel_last_successful_build_time", + "sisense_datamodel_last_publish_time", + "sisense_datamodel_type", + "sisense_datamodel_relation_type", + "sisense_datamodel_tables", + "sisense_dashboards", ] @property - def quick_sight_analysis_status(self) -> Optional[QuickSightAnalysisStatus]: + def sisense_datamodel_table_count(self) -> Optional[int]: return ( None if self.attributes is None - else self.attributes.quick_sight_analysis_status + else self.attributes.sisense_datamodel_table_count ) - @quick_sight_analysis_status.setter - def quick_sight_analysis_status( - self, quick_sight_analysis_status: Optional[QuickSightAnalysisStatus] + @sisense_datamodel_table_count.setter + def sisense_datamodel_table_count( + self, sisense_datamodel_table_count: Optional[int] ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.quick_sight_analysis_status = quick_sight_analysis_status + self.attributes.sisense_datamodel_table_count = sisense_datamodel_table_count + + @property + def sisense_datamodel_server(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.sisense_datamodel_server + ) + + @sisense_datamodel_server.setter + def sisense_datamodel_server(self, sisense_datamodel_server: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sisense_datamodel_server = sisense_datamodel_server + + @property + def sisense_datamodel_revision(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.sisense_datamodel_revision + ) + + @sisense_datamodel_revision.setter + def sisense_datamodel_revision(self, sisense_datamodel_revision: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sisense_datamodel_revision = sisense_datamodel_revision @property - def quick_sight_analysis_calculated_fields(self) -> Optional[set[str]]: + def sisense_datamodel_last_build_time(self) -> Optional[datetime]: return ( None if self.attributes is None - else self.attributes.quick_sight_analysis_calculated_fields + else self.attributes.sisense_datamodel_last_build_time ) - @quick_sight_analysis_calculated_fields.setter - def quick_sight_analysis_calculated_fields( - self, quick_sight_analysis_calculated_fields: Optional[set[str]] + @sisense_datamodel_last_build_time.setter + def sisense_datamodel_last_build_time( + self, sisense_datamodel_last_build_time: Optional[datetime] ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.quick_sight_analysis_calculated_fields = ( - quick_sight_analysis_calculated_fields + self.attributes.sisense_datamodel_last_build_time = ( + sisense_datamodel_last_build_time ) @property - def quick_sight_analysis_parameter_declarations(self) -> Optional[set[str]]: + def sisense_datamodel_last_successful_build_time(self) -> Optional[datetime]: return ( None if self.attributes is None - else self.attributes.quick_sight_analysis_parameter_declarations + else self.attributes.sisense_datamodel_last_successful_build_time ) - @quick_sight_analysis_parameter_declarations.setter - def quick_sight_analysis_parameter_declarations( - self, quick_sight_analysis_parameter_declarations: Optional[set[str]] + @sisense_datamodel_last_successful_build_time.setter + def sisense_datamodel_last_successful_build_time( + self, sisense_datamodel_last_successful_build_time: Optional[datetime] ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.quick_sight_analysis_parameter_declarations = ( - quick_sight_analysis_parameter_declarations + self.attributes.sisense_datamodel_last_successful_build_time = ( + sisense_datamodel_last_successful_build_time ) @property - def quick_sight_analysis_filter_groups(self) -> Optional[set[str]]: + def sisense_datamodel_last_publish_time(self) -> Optional[datetime]: return ( None if self.attributes is None - else self.attributes.quick_sight_analysis_filter_groups + else self.attributes.sisense_datamodel_last_publish_time ) - @quick_sight_analysis_filter_groups.setter - def quick_sight_analysis_filter_groups( - self, quick_sight_analysis_filter_groups: Optional[set[str]] + @sisense_datamodel_last_publish_time.setter + def sisense_datamodel_last_publish_time( + self, sisense_datamodel_last_publish_time: Optional[datetime] ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.quick_sight_analysis_filter_groups = ( - quick_sight_analysis_filter_groups + self.attributes.sisense_datamodel_last_publish_time = ( + sisense_datamodel_last_publish_time ) @property - def quick_sight_analysis_visuals(self) -> Optional[list[QuickSightAnalysisVisual]]: + def sisense_datamodel_type(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.sisense_datamodel_type + ) + + @sisense_datamodel_type.setter + def sisense_datamodel_type(self, sisense_datamodel_type: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sisense_datamodel_type = sisense_datamodel_type + + @property + def sisense_datamodel_relation_type(self) -> Optional[str]: return ( None if self.attributes is None - else self.attributes.quick_sight_analysis_visuals + else self.attributes.sisense_datamodel_relation_type ) - @quick_sight_analysis_visuals.setter - def quick_sight_analysis_visuals( - self, quick_sight_analysis_visuals: Optional[list[QuickSightAnalysisVisual]] + @sisense_datamodel_relation_type.setter + def sisense_datamodel_relation_type( + self, sisense_datamodel_relation_type: Optional[str] ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.quick_sight_analysis_visuals = quick_sight_analysis_visuals + self.attributes.sisense_datamodel_relation_type = ( + sisense_datamodel_relation_type + ) @property - def quick_sight_analysis_folders(self) -> Optional[list[QuickSightFolder]]: + def sisense_datamodel_tables(self) -> Optional[list[SisenseDatamodelTable]]: return ( None if self.attributes is None - else self.attributes.quick_sight_analysis_folders + else self.attributes.sisense_datamodel_tables ) - @quick_sight_analysis_folders.setter - def quick_sight_analysis_folders( - self, quick_sight_analysis_folders: Optional[list[QuickSightFolder]] + @sisense_datamodel_tables.setter + def sisense_datamodel_tables( + self, sisense_datamodel_tables: Optional[list[SisenseDatamodelTable]] ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.quick_sight_analysis_folders = quick_sight_analysis_folders + self.attributes.sisense_datamodel_tables = sisense_datamodel_tables + + @property + def sisense_dashboards(self) -> Optional[list[SisenseDashboard]]: + return None if self.attributes is None else self.attributes.sisense_dashboards + + @sisense_dashboards.setter + def sisense_dashboards(self, sisense_dashboards: Optional[list[SisenseDashboard]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sisense_dashboards = sisense_dashboards - class Attributes(QuickSight.Attributes): - quick_sight_analysis_status: Optional[QuickSightAnalysisStatus] = Field( - None, description="", alias="quickSightAnalysisStatus" + class Attributes(Sisense.Attributes): + sisense_datamodel_table_count: Optional[int] = Field( + None, description="", alias="sisenseDatamodelTableCount" ) - quick_sight_analysis_calculated_fields: Optional[set[str]] = Field( - None, description="", alias="quickSightAnalysisCalculatedFields" + sisense_datamodel_server: Optional[str] = Field( + None, description="", alias="sisenseDatamodelServer" ) - quick_sight_analysis_parameter_declarations: Optional[set[str]] = Field( - None, description="", alias="quickSightAnalysisParameterDeclarations" + sisense_datamodel_revision: Optional[str] = Field( + None, description="", alias="sisenseDatamodelRevision" ) - quick_sight_analysis_filter_groups: Optional[set[str]] = Field( - None, description="", alias="quickSightAnalysisFilterGroups" + sisense_datamodel_last_build_time: Optional[datetime] = Field( + None, description="", alias="sisenseDatamodelLastBuildTime" ) - quick_sight_analysis_visuals: Optional[list[QuickSightAnalysisVisual]] = Field( - None, description="", alias="quickSightAnalysisVisuals" + sisense_datamodel_last_successful_build_time: Optional[datetime] = Field( + None, description="", alias="sisenseDatamodelLastSuccessfulBuildTime" + ) + sisense_datamodel_last_publish_time: Optional[datetime] = Field( + None, description="", alias="sisenseDatamodelLastPublishTime" + ) + sisense_datamodel_type: Optional[str] = Field( + None, description="", alias="sisenseDatamodelType" + ) + sisense_datamodel_relation_type: Optional[str] = Field( + None, description="", alias="sisenseDatamodelRelationType" + ) + sisense_datamodel_tables: Optional[list[SisenseDatamodelTable]] = Field( + None, description="", alias="sisenseDatamodelTables" ) # relationship - quick_sight_analysis_folders: Optional[list[QuickSightFolder]] = Field( - None, description="", alias="quickSightAnalysisFolders" + sisense_dashboards: Optional[list[SisenseDashboard]] = Field( + None, description="", alias="sisenseDashboards" ) # relationship - attributes: "QuickSightAnalysis.Attributes" = Field( - default_factory=lambda: QuickSightAnalysis.Attributes(), + attributes: "SisenseDatamodel.Attributes" = Field( + default_factory=lambda: SisenseDatamodel.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -class QuickSightDashboard(QuickSight): +class SisenseDatamodelTable(Sisense): """Description""" - type_name: str = Field("QuickSightDashboard", allow_mutation=False) + type_name: str = Field("SisenseDatamodelTable", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "QuickSightDashboard": - raise ValueError("must be QuickSightDashboard") + if v != "SisenseDatamodelTable": + raise ValueError("must be SisenseDatamodelTable") return v def __setattr__(self, name, value): - if name in QuickSightDashboard._convenience_properties: + if name in SisenseDatamodelTable._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - QUICK_SIGHT_DASHBOARD_PUBLISHED_VERSION_NUMBER: ClassVar[ - NumericField - ] = NumericField( - "quickSightDashboardPublishedVersionNumber", - "quickSightDashboardPublishedVersionNumber", + SISENSE_DATAMODEL_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "sisenseDatamodelQualifiedName", + "sisenseDatamodelQualifiedName", + "sisenseDatamodelQualifiedName.text", ) """ - Version number of the dashboard published + Unique name of the datamodel in which this datamodel table exists. """ - QUICK_SIGHT_DASHBOARD_LAST_PUBLISHED_TIME: ClassVar[NumericField] = NumericField( - "quickSightDashboardLastPublishedTime", "quickSightDashboardLastPublishedTime" + SISENSE_DATAMODEL_TABLE_COLUMN_COUNT: ClassVar[NumericField] = NumericField( + "sisenseDatamodelTableColumnCount", "sisenseDatamodelTableColumnCount" ) """ - Last published time of dashboard + Number of columns present in this datamodel table. """ - - QUICK_SIGHT_DASHBOARD_FOLDERS: ClassVar[RelationField] = RelationField( - "quickSightDashboardFolders" + SISENSE_DATAMODEL_TABLE_TYPE: ClassVar[KeywordField] = KeywordField( + "sisenseDatamodelTableType", "sisenseDatamodelTableType" ) """ - TBC + Type of this datamodel table, for example: 'base' for regular tables, 'custom' for SQL expression-based tables. """ - QUICK_SIGHT_DASHBOARD_VISUALS: ClassVar[RelationField] = RelationField( - "quickSightDashboardVisuals" + SISENSE_DATAMODEL_TABLE_EXPRESSION: ClassVar[KeywordField] = KeywordField( + "sisenseDatamodelTableExpression", "sisenseDatamodelTableExpression" ) """ + SQL expression of this datamodel table. + """ + SISENSE_DATAMODEL_TABLE_IS_MATERIALIZED: ClassVar[BooleanField] = BooleanField( + "sisenseDatamodelTableIsMaterialized", "sisenseDatamodelTableIsMaterialized" + ) + """ + Whether this datamodel table is materialised (true) or not (false). + """ + SISENSE_DATAMODEL_TABLE_IS_HIDDEN: ClassVar[BooleanField] = BooleanField( + "sisenseDatamodelTableIsHidden", "sisenseDatamodelTableIsHidden" + ) + """ + Whether this datamodel table is hidden in Sisense (true) or not (false). + """ + SISENSE_DATAMODEL_TABLE_SCHEDULE: ClassVar[KeywordField] = KeywordField( + "sisenseDatamodelTableSchedule", "sisenseDatamodelTableSchedule" + ) + """ + JSON specifying the refresh schedule of this datamodel table. + """ + SISENSE_DATAMODEL_TABLE_LIVE_QUERY_SETTINGS: ClassVar[KeywordField] = KeywordField( + "sisenseDatamodelTableLiveQuerySettings", + "sisenseDatamodelTableLiveQuerySettings", + ) + """ + JSON specifying the LiveQuery settings of this datamodel table. + """ + + SISENSE_DATAMODEL: ClassVar[RelationField] = RelationField("sisenseDatamodel") + """ + TBC + """ + SISENSE_WIDGETS: ClassVar[RelationField] = RelationField("sisenseWidgets") + """ TBC """ _convenience_properties: ClassVar[list[str]] = [ - "quick_sight_dashboard_published_version_number", - "quick_sight_dashboard_last_published_time", - "quick_sight_dashboard_folders", - "quick_sight_dashboard_visuals", + "sisense_datamodel_qualified_name", + "sisense_datamodel_table_column_count", + "sisense_datamodel_table_type", + "sisense_datamodel_table_expression", + "sisense_datamodel_table_is_materialized", + "sisense_datamodel_table_is_hidden", + "sisense_datamodel_table_schedule", + "sisense_datamodel_table_live_query_settings", + "sisense_datamodel", + "sisense_widgets", ] @property - def quick_sight_dashboard_published_version_number(self) -> Optional[int]: + def sisense_datamodel_qualified_name(self) -> Optional[str]: return ( None if self.attributes is None - else self.attributes.quick_sight_dashboard_published_version_number + else self.attributes.sisense_datamodel_qualified_name ) - @quick_sight_dashboard_published_version_number.setter - def quick_sight_dashboard_published_version_number( - self, quick_sight_dashboard_published_version_number: Optional[int] + @sisense_datamodel_qualified_name.setter + def sisense_datamodel_qualified_name( + self, sisense_datamodel_qualified_name: Optional[str] ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.quick_sight_dashboard_published_version_number = ( - quick_sight_dashboard_published_version_number + self.attributes.sisense_datamodel_qualified_name = ( + sisense_datamodel_qualified_name ) @property - def quick_sight_dashboard_last_published_time(self) -> Optional[datetime]: + def sisense_datamodel_table_column_count(self) -> Optional[int]: return ( None if self.attributes is None - else self.attributes.quick_sight_dashboard_last_published_time + else self.attributes.sisense_datamodel_table_column_count ) - @quick_sight_dashboard_last_published_time.setter - def quick_sight_dashboard_last_published_time( - self, quick_sight_dashboard_last_published_time: Optional[datetime] + @sisense_datamodel_table_column_count.setter + def sisense_datamodel_table_column_count( + self, sisense_datamodel_table_column_count: Optional[int] ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.quick_sight_dashboard_last_published_time = ( - quick_sight_dashboard_last_published_time + self.attributes.sisense_datamodel_table_column_count = ( + sisense_datamodel_table_column_count + ) + + @property + def sisense_datamodel_table_type(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.sisense_datamodel_table_type ) + @sisense_datamodel_table_type.setter + def sisense_datamodel_table_type(self, sisense_datamodel_table_type: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sisense_datamodel_table_type = sisense_datamodel_table_type + @property - def quick_sight_dashboard_folders(self) -> Optional[list[QuickSightFolder]]: + def sisense_datamodel_table_expression(self) -> Optional[str]: return ( None if self.attributes is None - else self.attributes.quick_sight_dashboard_folders + else self.attributes.sisense_datamodel_table_expression ) - @quick_sight_dashboard_folders.setter - def quick_sight_dashboard_folders( - self, quick_sight_dashboard_folders: Optional[list[QuickSightFolder]] + @sisense_datamodel_table_expression.setter + def sisense_datamodel_table_expression( + self, sisense_datamodel_table_expression: Optional[str] ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.quick_sight_dashboard_folders = quick_sight_dashboard_folders + self.attributes.sisense_datamodel_table_expression = ( + sisense_datamodel_table_expression + ) @property - def quick_sight_dashboard_visuals( - self, - ) -> Optional[list[QuickSightDashboardVisual]]: + def sisense_datamodel_table_is_materialized(self) -> Optional[bool]: return ( None if self.attributes is None - else self.attributes.quick_sight_dashboard_visuals + else self.attributes.sisense_datamodel_table_is_materialized ) - @quick_sight_dashboard_visuals.setter - def quick_sight_dashboard_visuals( - self, quick_sight_dashboard_visuals: Optional[list[QuickSightDashboardVisual]] + @sisense_datamodel_table_is_materialized.setter + def sisense_datamodel_table_is_materialized( + self, sisense_datamodel_table_is_materialized: Optional[bool] ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.quick_sight_dashboard_visuals = quick_sight_dashboard_visuals + self.attributes.sisense_datamodel_table_is_materialized = ( + sisense_datamodel_table_is_materialized + ) - class Attributes(QuickSight.Attributes): - quick_sight_dashboard_published_version_number: Optional[int] = Field( - None, description="", alias="quickSightDashboardPublishedVersionNumber" + @property + def sisense_datamodel_table_is_hidden(self) -> Optional[bool]: + return ( + None + if self.attributes is None + else self.attributes.sisense_datamodel_table_is_hidden ) - quick_sight_dashboard_last_published_time: Optional[datetime] = Field( - None, description="", alias="quickSightDashboardLastPublishedTime" + + @sisense_datamodel_table_is_hidden.setter + def sisense_datamodel_table_is_hidden( + self, sisense_datamodel_table_is_hidden: Optional[bool] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sisense_datamodel_table_is_hidden = ( + sisense_datamodel_table_is_hidden + ) + + @property + def sisense_datamodel_table_schedule(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.sisense_datamodel_table_schedule + ) + + @sisense_datamodel_table_schedule.setter + def sisense_datamodel_table_schedule( + self, sisense_datamodel_table_schedule: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sisense_datamodel_table_schedule = ( + sisense_datamodel_table_schedule + ) + + @property + def sisense_datamodel_table_live_query_settings(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.sisense_datamodel_table_live_query_settings + ) + + @sisense_datamodel_table_live_query_settings.setter + def sisense_datamodel_table_live_query_settings( + self, sisense_datamodel_table_live_query_settings: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sisense_datamodel_table_live_query_settings = ( + sisense_datamodel_table_live_query_settings + ) + + @property + def sisense_datamodel(self) -> Optional[SisenseDatamodel]: + return None if self.attributes is None else self.attributes.sisense_datamodel + + @sisense_datamodel.setter + def sisense_datamodel(self, sisense_datamodel: Optional[SisenseDatamodel]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sisense_datamodel = sisense_datamodel + + @property + def sisense_widgets(self) -> Optional[list[SisenseWidget]]: + return None if self.attributes is None else self.attributes.sisense_widgets + + @sisense_widgets.setter + def sisense_widgets(self, sisense_widgets: Optional[list[SisenseWidget]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sisense_widgets = sisense_widgets + + class Attributes(Sisense.Attributes): + sisense_datamodel_qualified_name: Optional[str] = Field( + None, description="", alias="sisenseDatamodelQualifiedName" + ) + sisense_datamodel_table_column_count: Optional[int] = Field( + None, description="", alias="sisenseDatamodelTableColumnCount" + ) + sisense_datamodel_table_type: Optional[str] = Field( + None, description="", alias="sisenseDatamodelTableType" + ) + sisense_datamodel_table_expression: Optional[str] = Field( + None, description="", alias="sisenseDatamodelTableExpression" + ) + sisense_datamodel_table_is_materialized: Optional[bool] = Field( + None, description="", alias="sisenseDatamodelTableIsMaterialized" ) - quick_sight_dashboard_folders: Optional[list[QuickSightFolder]] = Field( - None, description="", alias="quickSightDashboardFolders" + sisense_datamodel_table_is_hidden: Optional[bool] = Field( + None, description="", alias="sisenseDatamodelTableIsHidden" + ) + sisense_datamodel_table_schedule: Optional[str] = Field( + None, description="", alias="sisenseDatamodelTableSchedule" + ) + sisense_datamodel_table_live_query_settings: Optional[str] = Field( + None, description="", alias="sisenseDatamodelTableLiveQuerySettings" + ) + sisense_datamodel: Optional[SisenseDatamodel] = Field( + None, description="", alias="sisenseDatamodel" ) # relationship - quick_sight_dashboard_visuals: Optional[ - list[QuickSightDashboardVisual] - ] = Field( - None, description="", alias="quickSightDashboardVisuals" + sisense_widgets: Optional[list[SisenseWidget]] = Field( + None, description="", alias="sisenseWidgets" ) # relationship - attributes: "QuickSightDashboard.Attributes" = Field( - default_factory=lambda: QuickSightDashboard.Attributes(), + attributes: "SisenseDatamodelTable.Attributes" = Field( + default_factory=lambda: SisenseDatamodelTable.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -class QuickSightDataset(QuickSight): +class SisenseDashboard(Sisense): """Description""" - type_name: str = Field("QuickSightDataset", allow_mutation=False) + type_name: str = Field("SisenseDashboard", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "QuickSightDataset": - raise ValueError("must be QuickSightDataset") + if v != "SisenseDashboard": + raise ValueError("must be SisenseDashboard") return v def __setattr__(self, name, value): - if name in QuickSightDataset._convenience_properties: + if name in SisenseDashboard._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - QUICK_SIGHT_DATASET_IMPORT_MODE: ClassVar[KeywordField] = KeywordField( - "quickSightDatasetImportMode", "quickSightDatasetImportMode" + SISENSE_DASHBOARD_FOLDER_QUALIFIED_NAME: ClassVar[ + KeywordTextField + ] = KeywordTextField( + "sisenseDashboardFolderQualifiedName", + "sisenseDashboardFolderQualifiedName", + "sisenseDashboardFolderQualifiedName.text", ) """ - Quicksight dataset importMode indicates a value that indicates whether you want to import the data into SPICE + Unique name of the folder in which this dashboard exists. """ - QUICK_SIGHT_DATASET_COLUMN_COUNT: ClassVar[NumericField] = NumericField( - "quickSightDatasetColumnCount", "quickSightDatasetColumnCount" + SISENSE_DASHBOARD_WIDGET_COUNT: ClassVar[NumericField] = NumericField( + "sisenseDashboardWidgetCount", "sisenseDashboardWidgetCount" ) """ - Quicksight dataset column count indicates number of columns present in the dataset + Number of widgets in this dashboard. """ - QUICK_SIGHT_DATASET_FOLDERS: ClassVar[RelationField] = RelationField( - "quickSightDatasetFolders" - ) + SISENSE_DATAMODELS: ClassVar[RelationField] = RelationField("sisenseDatamodels") """ TBC """ - QUICK_SIGHT_DATASET_FIELDS: ClassVar[RelationField] = RelationField( - "quickSightDatasetFields" - ) + SISENSE_WIDGETS: ClassVar[RelationField] = RelationField("sisenseWidgets") + """ + TBC + """ + SISENSE_FOLDER: ClassVar[RelationField] = RelationField("sisenseFolder") """ TBC """ _convenience_properties: ClassVar[list[str]] = [ - "quick_sight_dataset_import_mode", - "quick_sight_dataset_column_count", - "quick_sight_dataset_folders", - "quick_sight_dataset_fields", + "sisense_dashboard_folder_qualified_name", + "sisense_dashboard_widget_count", + "sisense_datamodels", + "sisense_widgets", + "sisense_folder", ] @property - def quick_sight_dataset_import_mode(self) -> Optional[QuickSightDatasetImportMode]: + def sisense_dashboard_folder_qualified_name(self) -> Optional[str]: return ( None if self.attributes is None - else self.attributes.quick_sight_dataset_import_mode + else self.attributes.sisense_dashboard_folder_qualified_name ) - @quick_sight_dataset_import_mode.setter - def quick_sight_dataset_import_mode( - self, quick_sight_dataset_import_mode: Optional[QuickSightDatasetImportMode] + @sisense_dashboard_folder_qualified_name.setter + def sisense_dashboard_folder_qualified_name( + self, sisense_dashboard_folder_qualified_name: Optional[str] ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.quick_sight_dataset_import_mode = ( - quick_sight_dataset_import_mode + self.attributes.sisense_dashboard_folder_qualified_name = ( + sisense_dashboard_folder_qualified_name ) @property - def quick_sight_dataset_column_count(self) -> Optional[int]: + def sisense_dashboard_widget_count(self) -> Optional[int]: return ( None if self.attributes is None - else self.attributes.quick_sight_dataset_column_count + else self.attributes.sisense_dashboard_widget_count ) - @quick_sight_dataset_column_count.setter - def quick_sight_dataset_column_count( - self, quick_sight_dataset_column_count: Optional[int] + @sisense_dashboard_widget_count.setter + def sisense_dashboard_widget_count( + self, sisense_dashboard_widget_count: Optional[int] ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.quick_sight_dataset_column_count = ( - quick_sight_dataset_column_count - ) + self.attributes.sisense_dashboard_widget_count = sisense_dashboard_widget_count @property - def quick_sight_dataset_folders(self) -> Optional[list[QuickSightFolder]]: - return ( - None - if self.attributes is None - else self.attributes.quick_sight_dataset_folders - ) + def sisense_datamodels(self) -> Optional[list[SisenseDatamodel]]: + return None if self.attributes is None else self.attributes.sisense_datamodels - @quick_sight_dataset_folders.setter - def quick_sight_dataset_folders( - self, quick_sight_dataset_folders: Optional[list[QuickSightFolder]] - ): + @sisense_datamodels.setter + def sisense_datamodels(self, sisense_datamodels: Optional[list[SisenseDatamodel]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.quick_sight_dataset_folders = quick_sight_dataset_folders + self.attributes.sisense_datamodels = sisense_datamodels @property - def quick_sight_dataset_fields(self) -> Optional[list[QuickSightDatasetField]]: - return ( - None - if self.attributes is None - else self.attributes.quick_sight_dataset_fields - ) + def sisense_widgets(self) -> Optional[list[SisenseWidget]]: + return None if self.attributes is None else self.attributes.sisense_widgets - @quick_sight_dataset_fields.setter - def quick_sight_dataset_fields( - self, quick_sight_dataset_fields: Optional[list[QuickSightDatasetField]] - ): + @sisense_widgets.setter + def sisense_widgets(self, sisense_widgets: Optional[list[SisenseWidget]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.quick_sight_dataset_fields = quick_sight_dataset_fields + self.attributes.sisense_widgets = sisense_widgets + + @property + def sisense_folder(self) -> Optional[SisenseFolder]: + return None if self.attributes is None else self.attributes.sisense_folder - class Attributes(QuickSight.Attributes): - quick_sight_dataset_import_mode: Optional[QuickSightDatasetImportMode] = Field( - None, description="", alias="quickSightDatasetImportMode" + @sisense_folder.setter + def sisense_folder(self, sisense_folder: Optional[SisenseFolder]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sisense_folder = sisense_folder + + class Attributes(Sisense.Attributes): + sisense_dashboard_folder_qualified_name: Optional[str] = Field( + None, description="", alias="sisenseDashboardFolderQualifiedName" ) - quick_sight_dataset_column_count: Optional[int] = Field( - None, description="", alias="quickSightDatasetColumnCount" + sisense_dashboard_widget_count: Optional[int] = Field( + None, description="", alias="sisenseDashboardWidgetCount" ) - quick_sight_dataset_folders: Optional[list[QuickSightFolder]] = Field( - None, description="", alias="quickSightDatasetFolders" + sisense_datamodels: Optional[list[SisenseDatamodel]] = Field( + None, description="", alias="sisenseDatamodels" + ) # relationship + sisense_widgets: Optional[list[SisenseWidget]] = Field( + None, description="", alias="sisenseWidgets" ) # relationship - quick_sight_dataset_fields: Optional[list[QuickSightDatasetField]] = Field( - None, description="", alias="quickSightDatasetFields" + sisense_folder: Optional[SisenseFolder] = Field( + None, description="", alias="sisenseFolder" ) # relationship - attributes: "QuickSightDataset.Attributes" = Field( - default_factory=lambda: QuickSightDataset.Attributes(), + attributes: "SisenseDashboard.Attributes" = Field( + default_factory=lambda: SisenseDashboard.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -QuickSightFolder.Attributes.update_forward_refs() - - -QuickSightDashboardVisual.Attributes.update_forward_refs() - - -QuickSightAnalysisVisual.Attributes.update_forward_refs() +SisenseFolder.Attributes.update_forward_refs() -QuickSightDatasetField.Attributes.update_forward_refs() +SisenseWidget.Attributes.update_forward_refs() -QuickSightAnalysis.Attributes.update_forward_refs() +SisenseDatamodel.Attributes.update_forward_refs() -QuickSightDashboard.Attributes.update_forward_refs() +SisenseDatamodelTable.Attributes.update_forward_refs() -QuickSightDataset.Attributes.update_forward_refs() +SisenseDashboard.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset75.py b/pyatlan/model/assets/asset75.py index 42a85a095..31b12a387 100644 --- a/pyatlan/model/assets/asset75.py +++ b/pyatlan/model/assets/asset75.py @@ -8,171 +8,395 @@ from pydantic import Field, validator -from pyatlan.model.fields.atlan_fields import KeywordTextField, RelationField +from pyatlan.model.fields.atlan_fields import ( + BooleanField, + KeywordField, + KeywordTextField, + NumericField, + RelationField, +) -from .asset49 import Thoughtspot +from .asset48 import Metabase -class ThoughtspotLiveboard(Thoughtspot): +class MetabaseQuestion(Metabase): """Description""" - type_name: str = Field("ThoughtspotLiveboard", allow_mutation=False) + type_name: str = Field("MetabaseQuestion", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "ThoughtspotLiveboard": - raise ValueError("must be ThoughtspotLiveboard") + if v != "MetabaseQuestion": + raise ValueError("must be MetabaseQuestion") return v def __setattr__(self, name, value): - if name in ThoughtspotLiveboard._convenience_properties: + if name in MetabaseQuestion._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - THOUGHTSPOT_DASHLETS: ClassVar[RelationField] = RelationField("thoughtspotDashlets") + METABASE_DASHBOARD_COUNT: ClassVar[NumericField] = NumericField( + "metabaseDashboardCount", "metabaseDashboardCount" + ) + """ + + """ + METABASE_QUERY_TYPE: ClassVar[KeywordTextField] = KeywordTextField( + "metabaseQueryType", "metabaseQueryType", "metabaseQueryType.text" + ) + """ + + """ + METABASE_QUERY: ClassVar[KeywordTextField] = KeywordTextField( + "metabaseQuery", "metabaseQuery.keyword", "metabaseQuery" + ) + """ + + """ + + METABASE_DASHBOARDS: ClassVar[RelationField] = RelationField("metabaseDashboards") + """ + TBC + """ + METABASE_COLLECTION: ClassVar[RelationField] = RelationField("metabaseCollection") """ TBC """ _convenience_properties: ClassVar[list[str]] = [ - "thoughtspot_dashlets", + "metabase_dashboard_count", + "metabase_query_type", + "metabase_query", + "metabase_dashboards", + "metabase_collection", ] @property - def thoughtspot_dashlets(self) -> Optional[list[ThoughtspotDashlet]]: - return None if self.attributes is None else self.attributes.thoughtspot_dashlets + def metabase_dashboard_count(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.metabase_dashboard_count + ) + + @metabase_dashboard_count.setter + def metabase_dashboard_count(self, metabase_dashboard_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.metabase_dashboard_count = metabase_dashboard_count + + @property + def metabase_query_type(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.metabase_query_type + + @metabase_query_type.setter + def metabase_query_type(self, metabase_query_type: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.metabase_query_type = metabase_query_type + + @property + def metabase_query(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.metabase_query + + @metabase_query.setter + def metabase_query(self, metabase_query: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.metabase_query = metabase_query + + @property + def metabase_dashboards(self) -> Optional[list[MetabaseDashboard]]: + return None if self.attributes is None else self.attributes.metabase_dashboards - @thoughtspot_dashlets.setter - def thoughtspot_dashlets( - self, thoughtspot_dashlets: Optional[list[ThoughtspotDashlet]] + @metabase_dashboards.setter + def metabase_dashboards( + self, metabase_dashboards: Optional[list[MetabaseDashboard]] ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.thoughtspot_dashlets = thoughtspot_dashlets + self.attributes.metabase_dashboards = metabase_dashboards + + @property + def metabase_collection(self) -> Optional[MetabaseCollection]: + return None if self.attributes is None else self.attributes.metabase_collection - class Attributes(Thoughtspot.Attributes): - thoughtspot_dashlets: Optional[list[ThoughtspotDashlet]] = Field( - None, description="", alias="thoughtspotDashlets" + @metabase_collection.setter + def metabase_collection(self, metabase_collection: Optional[MetabaseCollection]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.metabase_collection = metabase_collection + + class Attributes(Metabase.Attributes): + metabase_dashboard_count: Optional[int] = Field( + None, description="", alias="metabaseDashboardCount" + ) + metabase_query_type: Optional[str] = Field( + None, description="", alias="metabaseQueryType" + ) + metabase_query: Optional[str] = Field( + None, description="", alias="metabaseQuery" + ) + metabase_dashboards: Optional[list[MetabaseDashboard]] = Field( + None, description="", alias="metabaseDashboards" + ) # relationship + metabase_collection: Optional[MetabaseCollection] = Field( + None, description="", alias="metabaseCollection" ) # relationship - attributes: "ThoughtspotLiveboard.Attributes" = Field( - default_factory=lambda: ThoughtspotLiveboard.Attributes(), + attributes: "MetabaseQuestion.Attributes" = Field( + default_factory=lambda: MetabaseQuestion.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -class ThoughtspotDashlet(Thoughtspot): +class MetabaseCollection(Metabase): """Description""" - type_name: str = Field("ThoughtspotDashlet", allow_mutation=False) + type_name: str = Field("MetabaseCollection", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "ThoughtspotDashlet": - raise ValueError("must be ThoughtspotDashlet") + if v != "MetabaseCollection": + raise ValueError("must be MetabaseCollection") return v def __setattr__(self, name, value): - if name in ThoughtspotDashlet._convenience_properties: + if name in MetabaseCollection._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - THOUGHTSPOT_LIVEBOARD_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "thoughtspotLiveboardName", - "thoughtspotLiveboardName.keyword", - "thoughtspotLiveboardName", + METABASE_SLUG: ClassVar[KeywordTextField] = KeywordTextField( + "metabaseSlug", "metabaseSlug", "metabaseSlug.text" ) """ - TBC + """ - THOUGHTSPOT_LIVEBOARD_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "thoughtspotLiveboardQualifiedName", - "thoughtspotLiveboardQualifiedName", - "thoughtspotLiveboardQualifiedName.text", + METABASE_COLOR: ClassVar[KeywordField] = KeywordField( + "metabaseColor", "metabaseColor" ) """ - TBC + + """ + METABASE_NAMESPACE: ClassVar[KeywordTextField] = KeywordTextField( + "metabaseNamespace", "metabaseNamespace", "metabaseNamespace.text" + ) """ - THOUGHTSPOT_LIVEBOARD: ClassVar[RelationField] = RelationField( - "thoughtspotLiveboard" + """ + METABASE_IS_PERSONAL_COLLECTION: ClassVar[BooleanField] = BooleanField( + "metabaseIsPersonalCollection", "metabaseIsPersonalCollection" ) """ + + """ + + METABASE_DASHBOARDS: ClassVar[RelationField] = RelationField("metabaseDashboards") + """ + TBC + """ + METABASE_QUESTIONS: ClassVar[RelationField] = RelationField("metabaseQuestions") + """ TBC """ _convenience_properties: ClassVar[list[str]] = [ - "thoughtspot_liveboard_name", - "thoughtspot_liveboard_qualified_name", - "thoughtspot_liveboard", + "metabase_slug", + "metabase_color", + "metabase_namespace", + "metabase_is_personal_collection", + "metabase_dashboards", + "metabase_questions", ] @property - def thoughtspot_liveboard_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.thoughtspot_liveboard_name - ) + def metabase_slug(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.metabase_slug + + @metabase_slug.setter + def metabase_slug(self, metabase_slug: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.metabase_slug = metabase_slug + + @property + def metabase_color(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.metabase_color - @thoughtspot_liveboard_name.setter - def thoughtspot_liveboard_name(self, thoughtspot_liveboard_name: Optional[str]): + @metabase_color.setter + def metabase_color(self, metabase_color: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.thoughtspot_liveboard_name = thoughtspot_liveboard_name + self.attributes.metabase_color = metabase_color @property - def thoughtspot_liveboard_qualified_name(self) -> Optional[str]: + def metabase_namespace(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.metabase_namespace + + @metabase_namespace.setter + def metabase_namespace(self, metabase_namespace: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.metabase_namespace = metabase_namespace + + @property + def metabase_is_personal_collection(self) -> Optional[bool]: return ( None if self.attributes is None - else self.attributes.thoughtspot_liveboard_qualified_name + else self.attributes.metabase_is_personal_collection ) - @thoughtspot_liveboard_qualified_name.setter - def thoughtspot_liveboard_qualified_name( - self, thoughtspot_liveboard_qualified_name: Optional[str] + @metabase_is_personal_collection.setter + def metabase_is_personal_collection( + self, metabase_is_personal_collection: Optional[bool] ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.thoughtspot_liveboard_qualified_name = ( - thoughtspot_liveboard_qualified_name + self.attributes.metabase_is_personal_collection = ( + metabase_is_personal_collection ) @property - def thoughtspot_liveboard(self) -> Optional[ThoughtspotLiveboard]: - return ( - None if self.attributes is None else self.attributes.thoughtspot_liveboard - ) + def metabase_dashboards(self) -> Optional[list[MetabaseDashboard]]: + return None if self.attributes is None else self.attributes.metabase_dashboards - @thoughtspot_liveboard.setter - def thoughtspot_liveboard( - self, thoughtspot_liveboard: Optional[ThoughtspotLiveboard] + @metabase_dashboards.setter + def metabase_dashboards( + self, metabase_dashboards: Optional[list[MetabaseDashboard]] ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.thoughtspot_liveboard = thoughtspot_liveboard + self.attributes.metabase_dashboards = metabase_dashboards + + @property + def metabase_questions(self) -> Optional[list[MetabaseQuestion]]: + return None if self.attributes is None else self.attributes.metabase_questions - class Attributes(Thoughtspot.Attributes): - thoughtspot_liveboard_name: Optional[str] = Field( - None, description="", alias="thoughtspotLiveboardName" + @metabase_questions.setter + def metabase_questions(self, metabase_questions: Optional[list[MetabaseQuestion]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.metabase_questions = metabase_questions + + class Attributes(Metabase.Attributes): + metabase_slug: Optional[str] = Field(None, description="", alias="metabaseSlug") + metabase_color: Optional[str] = Field( + None, description="", alias="metabaseColor" ) - thoughtspot_liveboard_qualified_name: Optional[str] = Field( - None, description="", alias="thoughtspotLiveboardQualifiedName" + metabase_namespace: Optional[str] = Field( + None, description="", alias="metabaseNamespace" ) - thoughtspot_liveboard: Optional[ThoughtspotLiveboard] = Field( - None, description="", alias="thoughtspotLiveboard" + metabase_is_personal_collection: Optional[bool] = Field( + None, description="", alias="metabaseIsPersonalCollection" + ) + metabase_dashboards: Optional[list[MetabaseDashboard]] = Field( + None, description="", alias="metabaseDashboards" + ) # relationship + metabase_questions: Optional[list[MetabaseQuestion]] = Field( + None, description="", alias="metabaseQuestions" ) # relationship - attributes: "ThoughtspotDashlet.Attributes" = Field( - default_factory=lambda: ThoughtspotDashlet.Attributes(), + attributes: "MetabaseCollection.Attributes" = Field( + default_factory=lambda: MetabaseCollection.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -ThoughtspotLiveboard.Attributes.update_forward_refs() +class MetabaseDashboard(Metabase): + """Description""" + + type_name: str = Field("MetabaseDashboard", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "MetabaseDashboard": + raise ValueError("must be MetabaseDashboard") + return v + + def __setattr__(self, name, value): + if name in MetabaseDashboard._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + METABASE_QUESTION_COUNT: ClassVar[NumericField] = NumericField( + "metabaseQuestionCount", "metabaseQuestionCount" + ) + """ + + """ + + METABASE_QUESTIONS: ClassVar[RelationField] = RelationField("metabaseQuestions") + """ + TBC + """ + METABASE_COLLECTION: ClassVar[RelationField] = RelationField("metabaseCollection") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "metabase_question_count", + "metabase_questions", + "metabase_collection", + ] + + @property + def metabase_question_count(self) -> Optional[int]: + return ( + None if self.attributes is None else self.attributes.metabase_question_count + ) + + @metabase_question_count.setter + def metabase_question_count(self, metabase_question_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.metabase_question_count = metabase_question_count + + @property + def metabase_questions(self) -> Optional[list[MetabaseQuestion]]: + return None if self.attributes is None else self.attributes.metabase_questions + + @metabase_questions.setter + def metabase_questions(self, metabase_questions: Optional[list[MetabaseQuestion]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.metabase_questions = metabase_questions + + @property + def metabase_collection(self) -> Optional[MetabaseCollection]: + return None if self.attributes is None else self.attributes.metabase_collection + + @metabase_collection.setter + def metabase_collection(self, metabase_collection: Optional[MetabaseCollection]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.metabase_collection = metabase_collection + + class Attributes(Metabase.Attributes): + metabase_question_count: Optional[int] = Field( + None, description="", alias="metabaseQuestionCount" + ) + metabase_questions: Optional[list[MetabaseQuestion]] = Field( + None, description="", alias="metabaseQuestions" + ) # relationship + metabase_collection: Optional[MetabaseCollection] = Field( + None, description="", alias="metabaseCollection" + ) # relationship + + attributes: "MetabaseDashboard.Attributes" = Field( + default_factory=lambda: MetabaseDashboard.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +MetabaseQuestion.Attributes.update_forward_refs() + + +MetabaseCollection.Attributes.update_forward_refs() -ThoughtspotDashlet.Attributes.update_forward_refs() +MetabaseDashboard.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset76.py b/pyatlan/model/assets/asset76.py index 6a4aecd35..0afc5b5d6 100644 --- a/pyatlan/model/assets/asset76.py +++ b/pyatlan/model/assets/asset76.py @@ -4,30 +4,929 @@ from __future__ import annotations -from typing import ClassVar +from datetime import datetime +from typing import ClassVar, Optional from pydantic import Field, validator -from .asset49 import Thoughtspot +from pyatlan.model.enums import ( + QuickSightAnalysisStatus, + QuickSightDatasetFieldType, + QuickSightDatasetImportMode, + QuickSightFolderType, +) +from pyatlan.model.fields.atlan_fields import ( + KeywordField, + KeywordTextField, + NumericField, + RelationField, +) +from .asset49 import QuickSight -class ThoughtspotAnswer(Thoughtspot): + +class QuickSightFolder(QuickSight): + """Description""" + + type_name: str = Field("QuickSightFolder", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "QuickSightFolder": + raise ValueError("must be QuickSightFolder") + return v + + def __setattr__(self, name, value): + if name in QuickSightFolder._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + QUICK_SIGHT_FOLDER_TYPE: ClassVar[KeywordField] = KeywordField( + "quickSightFolderType", "quickSightFolderType" + ) + """ + Type of this folder, for example: SHARED. + """ + QUICK_SIGHT_FOLDER_HIERARCHY: ClassVar[KeywordField] = KeywordField( + "quickSightFolderHierarchy", "quickSightFolderHierarchy" + ) + """ + Detailed path of this folder. + """ + + QUICK_SIGHT_DASHBOARDS: ClassVar[RelationField] = RelationField( + "quickSightDashboards" + ) + """ + TBC + """ + QUICK_SIGHT_DATASETS: ClassVar[RelationField] = RelationField("quickSightDatasets") + """ + TBC + """ + QUICK_SIGHT_ANALYSES: ClassVar[RelationField] = RelationField("quickSightAnalyses") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "quick_sight_folder_type", + "quick_sight_folder_hierarchy", + "quick_sight_dashboards", + "quick_sight_datasets", + "quick_sight_analyses", + ] + + @property + def quick_sight_folder_type(self) -> Optional[QuickSightFolderType]: + return ( + None if self.attributes is None else self.attributes.quick_sight_folder_type + ) + + @quick_sight_folder_type.setter + def quick_sight_folder_type( + self, quick_sight_folder_type: Optional[QuickSightFolderType] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.quick_sight_folder_type = quick_sight_folder_type + + @property + def quick_sight_folder_hierarchy(self) -> Optional[list[dict[str, str]]]: + return ( + None + if self.attributes is None + else self.attributes.quick_sight_folder_hierarchy + ) + + @quick_sight_folder_hierarchy.setter + def quick_sight_folder_hierarchy( + self, quick_sight_folder_hierarchy: Optional[list[dict[str, str]]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.quick_sight_folder_hierarchy = quick_sight_folder_hierarchy + + @property + def quick_sight_dashboards(self) -> Optional[list[QuickSightDashboard]]: + return ( + None if self.attributes is None else self.attributes.quick_sight_dashboards + ) + + @quick_sight_dashboards.setter + def quick_sight_dashboards( + self, quick_sight_dashboards: Optional[list[QuickSightDashboard]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.quick_sight_dashboards = quick_sight_dashboards + + @property + def quick_sight_datasets(self) -> Optional[list[QuickSightDataset]]: + return None if self.attributes is None else self.attributes.quick_sight_datasets + + @quick_sight_datasets.setter + def quick_sight_datasets( + self, quick_sight_datasets: Optional[list[QuickSightDataset]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.quick_sight_datasets = quick_sight_datasets + + @property + def quick_sight_analyses(self) -> Optional[list[QuickSightAnalysis]]: + return None if self.attributes is None else self.attributes.quick_sight_analyses + + @quick_sight_analyses.setter + def quick_sight_analyses( + self, quick_sight_analyses: Optional[list[QuickSightAnalysis]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.quick_sight_analyses = quick_sight_analyses + + class Attributes(QuickSight.Attributes): + quick_sight_folder_type: Optional[QuickSightFolderType] = Field( + None, description="", alias="quickSightFolderType" + ) + quick_sight_folder_hierarchy: Optional[list[dict[str, str]]] = Field( + None, description="", alias="quickSightFolderHierarchy" + ) + quick_sight_dashboards: Optional[list[QuickSightDashboard]] = Field( + None, description="", alias="quickSightDashboards" + ) # relationship + quick_sight_datasets: Optional[list[QuickSightDataset]] = Field( + None, description="", alias="quickSightDatasets" + ) # relationship + quick_sight_analyses: Optional[list[QuickSightAnalysis]] = Field( + None, description="", alias="quickSightAnalyses" + ) # relationship + + attributes: "QuickSightFolder.Attributes" = Field( + default_factory=lambda: QuickSightFolder.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +class QuickSightDashboardVisual(QuickSight): + """Description""" + + type_name: str = Field("QuickSightDashboardVisual", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "QuickSightDashboardVisual": + raise ValueError("must be QuickSightDashboardVisual") + return v + + def __setattr__(self, name, value): + if name in QuickSightDashboardVisual._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + QUICK_SIGHT_DASHBOARD_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "quickSightDashboardQualifiedName", + "quickSightDashboardQualifiedName", + "quickSightDashboardQualifiedName.text", + ) + """ + Unique name of the dashboard in which this visual exists. + """ + + QUICK_SIGHT_DASHBOARD: ClassVar[RelationField] = RelationField( + "quickSightDashboard" + ) + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "quick_sight_dashboard_qualified_name", + "quick_sight_dashboard", + ] + + @property + def quick_sight_dashboard_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.quick_sight_dashboard_qualified_name + ) + + @quick_sight_dashboard_qualified_name.setter + def quick_sight_dashboard_qualified_name( + self, quick_sight_dashboard_qualified_name: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.quick_sight_dashboard_qualified_name = ( + quick_sight_dashboard_qualified_name + ) + + @property + def quick_sight_dashboard(self) -> Optional[QuickSightDashboard]: + return ( + None if self.attributes is None else self.attributes.quick_sight_dashboard + ) + + @quick_sight_dashboard.setter + def quick_sight_dashboard( + self, quick_sight_dashboard: Optional[QuickSightDashboard] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.quick_sight_dashboard = quick_sight_dashboard + + class Attributes(QuickSight.Attributes): + quick_sight_dashboard_qualified_name: Optional[str] = Field( + None, description="", alias="quickSightDashboardQualifiedName" + ) + quick_sight_dashboard: Optional[QuickSightDashboard] = Field( + None, description="", alias="quickSightDashboard" + ) # relationship + + attributes: "QuickSightDashboardVisual.Attributes" = Field( + default_factory=lambda: QuickSightDashboardVisual.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +class QuickSightAnalysisVisual(QuickSight): """Description""" - type_name: str = Field("ThoughtspotAnswer", allow_mutation=False) + type_name: str = Field("QuickSightAnalysisVisual", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "ThoughtspotAnswer": - raise ValueError("must be ThoughtspotAnswer") + if v != "QuickSightAnalysisVisual": + raise ValueError("must be QuickSightAnalysisVisual") return v def __setattr__(self, name, value): - if name in ThoughtspotAnswer._convenience_properties: + if name in QuickSightAnalysisVisual._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - _convenience_properties: ClassVar[list[str]] = [] + QUICK_SIGHT_ANALYSIS_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "quickSightAnalysisQualifiedName", + "quickSightAnalysisQualifiedName", + "quickSightAnalysisQualifiedName.text", + ) + """ + Unique name of the QuickSight analysis in which this visual exists. + """ + + QUICK_SIGHT_ANALYSIS: ClassVar[RelationField] = RelationField("quickSightAnalysis") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "quick_sight_analysis_qualified_name", + "quick_sight_analysis", + ] + + @property + def quick_sight_analysis_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.quick_sight_analysis_qualified_name + ) + + @quick_sight_analysis_qualified_name.setter + def quick_sight_analysis_qualified_name( + self, quick_sight_analysis_qualified_name: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.quick_sight_analysis_qualified_name = ( + quick_sight_analysis_qualified_name + ) + + @property + def quick_sight_analysis(self) -> Optional[QuickSightAnalysis]: + return None if self.attributes is None else self.attributes.quick_sight_analysis + + @quick_sight_analysis.setter + def quick_sight_analysis(self, quick_sight_analysis: Optional[QuickSightAnalysis]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.quick_sight_analysis = quick_sight_analysis + + class Attributes(QuickSight.Attributes): + quick_sight_analysis_qualified_name: Optional[str] = Field( + None, description="", alias="quickSightAnalysisQualifiedName" + ) + quick_sight_analysis: Optional[QuickSightAnalysis] = Field( + None, description="", alias="quickSightAnalysis" + ) # relationship + + attributes: "QuickSightAnalysisVisual.Attributes" = Field( + default_factory=lambda: QuickSightAnalysisVisual.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +class QuickSightDatasetField(QuickSight): + """Description""" + + type_name: str = Field("QuickSightDatasetField", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "QuickSightDatasetField": + raise ValueError("must be QuickSightDatasetField") + return v + + def __setattr__(self, name, value): + if name in QuickSightDatasetField._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + QUICK_SIGHT_DATASET_FIELD_TYPE: ClassVar[KeywordField] = KeywordField( + "quickSightDatasetFieldType", "quickSightDatasetFieldType" + ) + """ + Datatype of this field, for example: STRING, INTEGER, etc. + """ + QUICK_SIGHT_DATASET_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "quickSightDatasetQualifiedName", + "quickSightDatasetQualifiedName", + "quickSightDatasetQualifiedName.text", + ) + """ + Unique name of the dataset in which this field exists. + """ + + QUICK_SIGHT_DATASET: ClassVar[RelationField] = RelationField("quickSightDataset") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "quick_sight_dataset_field_type", + "quick_sight_dataset_qualified_name", + "quick_sight_dataset", + ] + + @property + def quick_sight_dataset_field_type(self) -> Optional[QuickSightDatasetFieldType]: + return ( + None + if self.attributes is None + else self.attributes.quick_sight_dataset_field_type + ) + + @quick_sight_dataset_field_type.setter + def quick_sight_dataset_field_type( + self, quick_sight_dataset_field_type: Optional[QuickSightDatasetFieldType] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.quick_sight_dataset_field_type = quick_sight_dataset_field_type + + @property + def quick_sight_dataset_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.quick_sight_dataset_qualified_name + ) + + @quick_sight_dataset_qualified_name.setter + def quick_sight_dataset_qualified_name( + self, quick_sight_dataset_qualified_name: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.quick_sight_dataset_qualified_name = ( + quick_sight_dataset_qualified_name + ) + + @property + def quick_sight_dataset(self) -> Optional[QuickSightDataset]: + return None if self.attributes is None else self.attributes.quick_sight_dataset + + @quick_sight_dataset.setter + def quick_sight_dataset(self, quick_sight_dataset: Optional[QuickSightDataset]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.quick_sight_dataset = quick_sight_dataset + + class Attributes(QuickSight.Attributes): + quick_sight_dataset_field_type: Optional[QuickSightDatasetFieldType] = Field( + None, description="", alias="quickSightDatasetFieldType" + ) + quick_sight_dataset_qualified_name: Optional[str] = Field( + None, description="", alias="quickSightDatasetQualifiedName" + ) + quick_sight_dataset: Optional[QuickSightDataset] = Field( + None, description="", alias="quickSightDataset" + ) # relationship + + attributes: "QuickSightDatasetField.Attributes" = Field( + default_factory=lambda: QuickSightDatasetField.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +class QuickSightAnalysis(QuickSight): + """Description""" + + type_name: str = Field("QuickSightAnalysis", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "QuickSightAnalysis": + raise ValueError("must be QuickSightAnalysis") + return v + + def __setattr__(self, name, value): + if name in QuickSightAnalysis._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + QUICK_SIGHT_ANALYSIS_STATUS: ClassVar[KeywordField] = KeywordField( + "quickSightAnalysisStatus", "quickSightAnalysisStatus" + ) + """ + Status of this analysis, for example: CREATION_IN_PROGRESS, UPDATE_SUCCESSFUL, etc. + """ + QUICK_SIGHT_ANALYSIS_CALCULATED_FIELDS: ClassVar[KeywordField] = KeywordField( + "quickSightAnalysisCalculatedFields", "quickSightAnalysisCalculatedFields" + ) + """ + List of field names calculated by this analysis. + """ + QUICK_SIGHT_ANALYSIS_PARAMETER_DECLARATIONS: ClassVar[KeywordField] = KeywordField( + "quickSightAnalysisParameterDeclarations", + "quickSightAnalysisParameterDeclarations", + ) + """ + List of parameters used for this analysis. + """ + QUICK_SIGHT_ANALYSIS_FILTER_GROUPS: ClassVar[KeywordField] = KeywordField( + "quickSightAnalysisFilterGroups", "quickSightAnalysisFilterGroups" + ) + """ + List of filter groups used for this analysis. + """ + + QUICK_SIGHT_ANALYSIS_VISUALS: ClassVar[RelationField] = RelationField( + "quickSightAnalysisVisuals" + ) + """ + TBC + """ + QUICK_SIGHT_ANALYSIS_FOLDERS: ClassVar[RelationField] = RelationField( + "quickSightAnalysisFolders" + ) + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "quick_sight_analysis_status", + "quick_sight_analysis_calculated_fields", + "quick_sight_analysis_parameter_declarations", + "quick_sight_analysis_filter_groups", + "quick_sight_analysis_visuals", + "quick_sight_analysis_folders", + ] + + @property + def quick_sight_analysis_status(self) -> Optional[QuickSightAnalysisStatus]: + return ( + None + if self.attributes is None + else self.attributes.quick_sight_analysis_status + ) + + @quick_sight_analysis_status.setter + def quick_sight_analysis_status( + self, quick_sight_analysis_status: Optional[QuickSightAnalysisStatus] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.quick_sight_analysis_status = quick_sight_analysis_status + + @property + def quick_sight_analysis_calculated_fields(self) -> Optional[set[str]]: + return ( + None + if self.attributes is None + else self.attributes.quick_sight_analysis_calculated_fields + ) + + @quick_sight_analysis_calculated_fields.setter + def quick_sight_analysis_calculated_fields( + self, quick_sight_analysis_calculated_fields: Optional[set[str]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.quick_sight_analysis_calculated_fields = ( + quick_sight_analysis_calculated_fields + ) + + @property + def quick_sight_analysis_parameter_declarations(self) -> Optional[set[str]]: + return ( + None + if self.attributes is None + else self.attributes.quick_sight_analysis_parameter_declarations + ) + + @quick_sight_analysis_parameter_declarations.setter + def quick_sight_analysis_parameter_declarations( + self, quick_sight_analysis_parameter_declarations: Optional[set[str]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.quick_sight_analysis_parameter_declarations = ( + quick_sight_analysis_parameter_declarations + ) + + @property + def quick_sight_analysis_filter_groups(self) -> Optional[set[str]]: + return ( + None + if self.attributes is None + else self.attributes.quick_sight_analysis_filter_groups + ) + + @quick_sight_analysis_filter_groups.setter + def quick_sight_analysis_filter_groups( + self, quick_sight_analysis_filter_groups: Optional[set[str]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.quick_sight_analysis_filter_groups = ( + quick_sight_analysis_filter_groups + ) + + @property + def quick_sight_analysis_visuals(self) -> Optional[list[QuickSightAnalysisVisual]]: + return ( + None + if self.attributes is None + else self.attributes.quick_sight_analysis_visuals + ) + + @quick_sight_analysis_visuals.setter + def quick_sight_analysis_visuals( + self, quick_sight_analysis_visuals: Optional[list[QuickSightAnalysisVisual]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.quick_sight_analysis_visuals = quick_sight_analysis_visuals + + @property + def quick_sight_analysis_folders(self) -> Optional[list[QuickSightFolder]]: + return ( + None + if self.attributes is None + else self.attributes.quick_sight_analysis_folders + ) + + @quick_sight_analysis_folders.setter + def quick_sight_analysis_folders( + self, quick_sight_analysis_folders: Optional[list[QuickSightFolder]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.quick_sight_analysis_folders = quick_sight_analysis_folders + + class Attributes(QuickSight.Attributes): + quick_sight_analysis_status: Optional[QuickSightAnalysisStatus] = Field( + None, description="", alias="quickSightAnalysisStatus" + ) + quick_sight_analysis_calculated_fields: Optional[set[str]] = Field( + None, description="", alias="quickSightAnalysisCalculatedFields" + ) + quick_sight_analysis_parameter_declarations: Optional[set[str]] = Field( + None, description="", alias="quickSightAnalysisParameterDeclarations" + ) + quick_sight_analysis_filter_groups: Optional[set[str]] = Field( + None, description="", alias="quickSightAnalysisFilterGroups" + ) + quick_sight_analysis_visuals: Optional[list[QuickSightAnalysisVisual]] = Field( + None, description="", alias="quickSightAnalysisVisuals" + ) # relationship + quick_sight_analysis_folders: Optional[list[QuickSightFolder]] = Field( + None, description="", alias="quickSightAnalysisFolders" + ) # relationship + + attributes: "QuickSightAnalysis.Attributes" = Field( + default_factory=lambda: QuickSightAnalysis.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +class QuickSightDashboard(QuickSight): + """Description""" + + type_name: str = Field("QuickSightDashboard", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "QuickSightDashboard": + raise ValueError("must be QuickSightDashboard") + return v + + def __setattr__(self, name, value): + if name in QuickSightDashboard._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + QUICK_SIGHT_DASHBOARD_PUBLISHED_VERSION_NUMBER: ClassVar[ + NumericField + ] = NumericField( + "quickSightDashboardPublishedVersionNumber", + "quickSightDashboardPublishedVersionNumber", + ) + """ + Version number of the published dashboard. + """ + QUICK_SIGHT_DASHBOARD_LAST_PUBLISHED_TIME: ClassVar[NumericField] = NumericField( + "quickSightDashboardLastPublishedTime", "quickSightDashboardLastPublishedTime" + ) + """ + Time (epoch) at which this dashboard was last published, in milliseconds. + """ + + QUICK_SIGHT_DASHBOARD_FOLDERS: ClassVar[RelationField] = RelationField( + "quickSightDashboardFolders" + ) + """ + TBC + """ + QUICK_SIGHT_DASHBOARD_VISUALS: ClassVar[RelationField] = RelationField( + "quickSightDashboardVisuals" + ) + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "quick_sight_dashboard_published_version_number", + "quick_sight_dashboard_last_published_time", + "quick_sight_dashboard_folders", + "quick_sight_dashboard_visuals", + ] + + @property + def quick_sight_dashboard_published_version_number(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.quick_sight_dashboard_published_version_number + ) + + @quick_sight_dashboard_published_version_number.setter + def quick_sight_dashboard_published_version_number( + self, quick_sight_dashboard_published_version_number: Optional[int] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.quick_sight_dashboard_published_version_number = ( + quick_sight_dashboard_published_version_number + ) + + @property + def quick_sight_dashboard_last_published_time(self) -> Optional[datetime]: + return ( + None + if self.attributes is None + else self.attributes.quick_sight_dashboard_last_published_time + ) + + @quick_sight_dashboard_last_published_time.setter + def quick_sight_dashboard_last_published_time( + self, quick_sight_dashboard_last_published_time: Optional[datetime] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.quick_sight_dashboard_last_published_time = ( + quick_sight_dashboard_last_published_time + ) + + @property + def quick_sight_dashboard_folders(self) -> Optional[list[QuickSightFolder]]: + return ( + None + if self.attributes is None + else self.attributes.quick_sight_dashboard_folders + ) + + @quick_sight_dashboard_folders.setter + def quick_sight_dashboard_folders( + self, quick_sight_dashboard_folders: Optional[list[QuickSightFolder]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.quick_sight_dashboard_folders = quick_sight_dashboard_folders + + @property + def quick_sight_dashboard_visuals( + self, + ) -> Optional[list[QuickSightDashboardVisual]]: + return ( + None + if self.attributes is None + else self.attributes.quick_sight_dashboard_visuals + ) + + @quick_sight_dashboard_visuals.setter + def quick_sight_dashboard_visuals( + self, quick_sight_dashboard_visuals: Optional[list[QuickSightDashboardVisual]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.quick_sight_dashboard_visuals = quick_sight_dashboard_visuals + + class Attributes(QuickSight.Attributes): + quick_sight_dashboard_published_version_number: Optional[int] = Field( + None, description="", alias="quickSightDashboardPublishedVersionNumber" + ) + quick_sight_dashboard_last_published_time: Optional[datetime] = Field( + None, description="", alias="quickSightDashboardLastPublishedTime" + ) + quick_sight_dashboard_folders: Optional[list[QuickSightFolder]] = Field( + None, description="", alias="quickSightDashboardFolders" + ) # relationship + quick_sight_dashboard_visuals: Optional[ + list[QuickSightDashboardVisual] + ] = Field( + None, description="", alias="quickSightDashboardVisuals" + ) # relationship + + attributes: "QuickSightDashboard.Attributes" = Field( + default_factory=lambda: QuickSightDashboard.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +class QuickSightDataset(QuickSight): + """Description""" + + type_name: str = Field("QuickSightDataset", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "QuickSightDataset": + raise ValueError("must be QuickSightDataset") + return v + + def __setattr__(self, name, value): + if name in QuickSightDataset._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + QUICK_SIGHT_DATASET_IMPORT_MODE: ClassVar[KeywordField] = KeywordField( + "quickSightDatasetImportMode", "quickSightDatasetImportMode" + ) + """ + Import mode for this dataset, for example: SPICE or DIRECT_QUERY. + """ + QUICK_SIGHT_DATASET_COLUMN_COUNT: ClassVar[NumericField] = NumericField( + "quickSightDatasetColumnCount", "quickSightDatasetColumnCount" + ) + """ + Number of columns present in this dataset. + """ + + QUICK_SIGHT_DATASET_FOLDERS: ClassVar[RelationField] = RelationField( + "quickSightDatasetFolders" + ) + """ + TBC + """ + QUICK_SIGHT_DATASET_FIELDS: ClassVar[RelationField] = RelationField( + "quickSightDatasetFields" + ) + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "quick_sight_dataset_import_mode", + "quick_sight_dataset_column_count", + "quick_sight_dataset_folders", + "quick_sight_dataset_fields", + ] + + @property + def quick_sight_dataset_import_mode(self) -> Optional[QuickSightDatasetImportMode]: + return ( + None + if self.attributes is None + else self.attributes.quick_sight_dataset_import_mode + ) + + @quick_sight_dataset_import_mode.setter + def quick_sight_dataset_import_mode( + self, quick_sight_dataset_import_mode: Optional[QuickSightDatasetImportMode] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.quick_sight_dataset_import_mode = ( + quick_sight_dataset_import_mode + ) + + @property + def quick_sight_dataset_column_count(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.quick_sight_dataset_column_count + ) + + @quick_sight_dataset_column_count.setter + def quick_sight_dataset_column_count( + self, quick_sight_dataset_column_count: Optional[int] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.quick_sight_dataset_column_count = ( + quick_sight_dataset_column_count + ) + + @property + def quick_sight_dataset_folders(self) -> Optional[list[QuickSightFolder]]: + return ( + None + if self.attributes is None + else self.attributes.quick_sight_dataset_folders + ) + + @quick_sight_dataset_folders.setter + def quick_sight_dataset_folders( + self, quick_sight_dataset_folders: Optional[list[QuickSightFolder]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.quick_sight_dataset_folders = quick_sight_dataset_folders + + @property + def quick_sight_dataset_fields(self) -> Optional[list[QuickSightDatasetField]]: + return ( + None + if self.attributes is None + else self.attributes.quick_sight_dataset_fields + ) + + @quick_sight_dataset_fields.setter + def quick_sight_dataset_fields( + self, quick_sight_dataset_fields: Optional[list[QuickSightDatasetField]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.quick_sight_dataset_fields = quick_sight_dataset_fields + + class Attributes(QuickSight.Attributes): + quick_sight_dataset_import_mode: Optional[QuickSightDatasetImportMode] = Field( + None, description="", alias="quickSightDatasetImportMode" + ) + quick_sight_dataset_column_count: Optional[int] = Field( + None, description="", alias="quickSightDatasetColumnCount" + ) + quick_sight_dataset_folders: Optional[list[QuickSightFolder]] = Field( + None, description="", alias="quickSightDatasetFolders" + ) # relationship + quick_sight_dataset_fields: Optional[list[QuickSightDatasetField]] = Field( + None, description="", alias="quickSightDatasetFields" + ) # relationship + + attributes: "QuickSightDataset.Attributes" = Field( + default_factory=lambda: QuickSightDataset.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +QuickSightFolder.Attributes.update_forward_refs() + + +QuickSightDashboardVisual.Attributes.update_forward_refs() + + +QuickSightAnalysisVisual.Attributes.update_forward_refs() + + +QuickSightDatasetField.Attributes.update_forward_refs() + + +QuickSightAnalysis.Attributes.update_forward_refs() + + +QuickSightDashboard.Attributes.update_forward_refs() -ThoughtspotAnswer.Attributes.update_forward_refs() +QuickSightDataset.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset77.py b/pyatlan/model/assets/asset77.py index f701acc0e..168751be6 100644 --- a/pyatlan/model/assets/asset77.py +++ b/pyatlan/model/assets/asset77.py @@ -8,1661 +8,171 @@ from pydantic import Field, validator -from pyatlan.model.fields.atlan_fields import ( - BooleanField, - KeywordField, - NumericField, - RelationField, - TextField, -) +from pyatlan.model.fields.atlan_fields import KeywordTextField, RelationField -from .asset50 import PowerBI +from .asset50 import Thoughtspot -class PowerBIReport(PowerBI): +class ThoughtspotLiveboard(Thoughtspot): """Description""" - type_name: str = Field("PowerBIReport", allow_mutation=False) + type_name: str = Field("ThoughtspotLiveboard", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "PowerBIReport": - raise ValueError("must be PowerBIReport") + if v != "ThoughtspotLiveboard": + raise ValueError("must be ThoughtspotLiveboard") return v def __setattr__(self, name, value): - if name in PowerBIReport._convenience_properties: + if name in ThoughtspotLiveboard._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - WORKSPACE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "workspaceQualifiedName", "workspaceQualifiedName" - ) - """ - TBC - """ - DATASET_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "datasetQualifiedName", "datasetQualifiedName" - ) - """ - TBC - """ - WEB_URL: ClassVar[KeywordField] = KeywordField("webUrl", "webUrl") - """ - TBC - """ - PAGE_COUNT: ClassVar[NumericField] = NumericField("pageCount", "pageCount") - """ - TBC - """ - - WORKSPACE: ClassVar[RelationField] = RelationField("workspace") - """ - TBC - """ - TILES: ClassVar[RelationField] = RelationField("tiles") - """ - TBC - """ - PAGES: ClassVar[RelationField] = RelationField("pages") - """ - TBC - """ - DATASET: ClassVar[RelationField] = RelationField("dataset") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "workspace_qualified_name", - "dataset_qualified_name", - "web_url", - "page_count", - "workspace", - "tiles", - "pages", - "dataset", - ] - - @property - def workspace_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.workspace_qualified_name - ) - - @workspace_qualified_name.setter - def workspace_qualified_name(self, workspace_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.workspace_qualified_name = workspace_qualified_name - - @property - def dataset_qualified_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.dataset_qualified_name - ) - - @dataset_qualified_name.setter - def dataset_qualified_name(self, dataset_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dataset_qualified_name = dataset_qualified_name - - @property - def web_url(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.web_url - - @web_url.setter - def web_url(self, web_url: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.web_url = web_url - - @property - def page_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.page_count - - @page_count.setter - def page_count(self, page_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.page_count = page_count - - @property - def workspace(self) -> Optional[PowerBIWorkspace]: - return None if self.attributes is None else self.attributes.workspace - - @workspace.setter - def workspace(self, workspace: Optional[PowerBIWorkspace]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.workspace = workspace - - @property - def tiles(self) -> Optional[list[PowerBITile]]: - return None if self.attributes is None else self.attributes.tiles - - @tiles.setter - def tiles(self, tiles: Optional[list[PowerBITile]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.tiles = tiles - - @property - def pages(self) -> Optional[list[PowerBIPage]]: - return None if self.attributes is None else self.attributes.pages - - @pages.setter - def pages(self, pages: Optional[list[PowerBIPage]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.pages = pages - - @property - def dataset(self) -> Optional[PowerBIDataset]: - return None if self.attributes is None else self.attributes.dataset - - @dataset.setter - def dataset(self, dataset: Optional[PowerBIDataset]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dataset = dataset - - class Attributes(PowerBI.Attributes): - workspace_qualified_name: Optional[str] = Field( - None, description="", alias="workspaceQualifiedName" - ) - dataset_qualified_name: Optional[str] = Field( - None, description="", alias="datasetQualifiedName" - ) - web_url: Optional[str] = Field(None, description="", alias="webUrl") - page_count: Optional[int] = Field(None, description="", alias="pageCount") - workspace: Optional[PowerBIWorkspace] = Field( - None, description="", alias="workspace" - ) # relationship - tiles: Optional[list[PowerBITile]] = Field( - None, description="", alias="tiles" - ) # relationship - pages: Optional[list[PowerBIPage]] = Field( - None, description="", alias="pages" - ) # relationship - dataset: Optional[PowerBIDataset] = Field( - None, description="", alias="dataset" - ) # relationship - - attributes: "PowerBIReport.Attributes" = Field( - default_factory=lambda: PowerBIReport.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class PowerBIMeasure(PowerBI): - """Description""" - - type_name: str = Field("PowerBIMeasure", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "PowerBIMeasure": - raise ValueError("must be PowerBIMeasure") - return v - - def __setattr__(self, name, value): - if name in PowerBIMeasure._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - WORKSPACE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "workspaceQualifiedName", "workspaceQualifiedName" - ) - """ - TBC - """ - DATASET_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "datasetQualifiedName", "datasetQualifiedName" - ) - """ - TBC - """ - POWER_BI_MEASURE_EXPRESSION: ClassVar[TextField] = TextField( - "powerBIMeasureExpression", "powerBIMeasureExpression" - ) - """ - TBC - """ - POWER_BI_IS_EXTERNAL_MEASURE: ClassVar[BooleanField] = BooleanField( - "powerBIIsExternalMeasure", "powerBIIsExternalMeasure" - ) - """ - TBC - """ - - TABLE: ClassVar[RelationField] = RelationField("table") + THOUGHTSPOT_DASHLETS: ClassVar[RelationField] = RelationField("thoughtspotDashlets") """ TBC """ _convenience_properties: ClassVar[list[str]] = [ - "workspace_qualified_name", - "dataset_qualified_name", - "power_b_i_measure_expression", - "power_b_i_is_external_measure", - "table", + "thoughtspot_dashlets", ] @property - def workspace_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.workspace_qualified_name - ) - - @workspace_qualified_name.setter - def workspace_qualified_name(self, workspace_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.workspace_qualified_name = workspace_qualified_name - - @property - def dataset_qualified_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.dataset_qualified_name - ) - - @dataset_qualified_name.setter - def dataset_qualified_name(self, dataset_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dataset_qualified_name = dataset_qualified_name - - @property - def power_b_i_measure_expression(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.power_b_i_measure_expression - ) - - @power_b_i_measure_expression.setter - def power_b_i_measure_expression(self, power_b_i_measure_expression: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.power_b_i_measure_expression = power_b_i_measure_expression - - @property - def power_b_i_is_external_measure(self) -> Optional[bool]: - return ( - None - if self.attributes is None - else self.attributes.power_b_i_is_external_measure - ) + def thoughtspot_dashlets(self) -> Optional[list[ThoughtspotDashlet]]: + return None if self.attributes is None else self.attributes.thoughtspot_dashlets - @power_b_i_is_external_measure.setter - def power_b_i_is_external_measure( - self, power_b_i_is_external_measure: Optional[bool] + @thoughtspot_dashlets.setter + def thoughtspot_dashlets( + self, thoughtspot_dashlets: Optional[list[ThoughtspotDashlet]] ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.power_b_i_is_external_measure = power_b_i_is_external_measure - - @property - def table(self) -> Optional[PowerBITable]: - return None if self.attributes is None else self.attributes.table - - @table.setter - def table(self, table: Optional[PowerBITable]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.table = table + self.attributes.thoughtspot_dashlets = thoughtspot_dashlets - class Attributes(PowerBI.Attributes): - workspace_qualified_name: Optional[str] = Field( - None, description="", alias="workspaceQualifiedName" - ) - dataset_qualified_name: Optional[str] = Field( - None, description="", alias="datasetQualifiedName" - ) - power_b_i_measure_expression: Optional[str] = Field( - None, description="", alias="powerBIMeasureExpression" - ) - power_b_i_is_external_measure: Optional[bool] = Field( - None, description="", alias="powerBIIsExternalMeasure" - ) - table: Optional[PowerBITable] = Field( - None, description="", alias="table" + class Attributes(Thoughtspot.Attributes): + thoughtspot_dashlets: Optional[list[ThoughtspotDashlet]] = Field( + None, description="", alias="thoughtspotDashlets" ) # relationship - attributes: "PowerBIMeasure.Attributes" = Field( - default_factory=lambda: PowerBIMeasure.Attributes(), + attributes: "ThoughtspotLiveboard.Attributes" = Field( + default_factory=lambda: ThoughtspotLiveboard.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -class PowerBIColumn(PowerBI): +class ThoughtspotDashlet(Thoughtspot): """Description""" - type_name: str = Field("PowerBIColumn", allow_mutation=False) + type_name: str = Field("ThoughtspotDashlet", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "PowerBIColumn": - raise ValueError("must be PowerBIColumn") + if v != "ThoughtspotDashlet": + raise ValueError("must be ThoughtspotDashlet") return v def __setattr__(self, name, value): - if name in PowerBIColumn._convenience_properties: + if name in ThoughtspotDashlet._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - WORKSPACE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "workspaceQualifiedName", "workspaceQualifiedName" - ) - """ - TBC - """ - DATASET_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "datasetQualifiedName", "datasetQualifiedName" - ) - """ - TBC - """ - POWER_BI_COLUMN_DATA_CATEGORY: ClassVar[KeywordField] = KeywordField( - "powerBIColumnDataCategory", "powerBIColumnDataCategory" - ) - """ - TBC - """ - POWER_BI_COLUMN_DATA_TYPE: ClassVar[KeywordField] = KeywordField( - "powerBIColumnDataType", "powerBIColumnDataType" - ) - """ - TBC - """ - POWER_BI_SORT_BY_COLUMN: ClassVar[KeywordField] = KeywordField( - "powerBISortByColumn", "powerBISortByColumn" + THOUGHTSPOT_LIVEBOARD_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "thoughtspotLiveboardName", + "thoughtspotLiveboardName.keyword", + "thoughtspotLiveboardName", ) """ - TBC + Simple name of the liveboard in which this dashlet exists. """ - POWER_BI_COLUMN_SUMMARIZE_BY: ClassVar[KeywordField] = KeywordField( - "powerBIColumnSummarizeBy", "powerBIColumnSummarizeBy" + THOUGHTSPOT_LIVEBOARD_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "thoughtspotLiveboardQualifiedName", + "thoughtspotLiveboardQualifiedName", + "thoughtspotLiveboardQualifiedName.text", ) """ - TBC + Unique name of the liveboard in which this dashlet exists. """ - TABLE: ClassVar[RelationField] = RelationField("table") + THOUGHTSPOT_LIVEBOARD: ClassVar[RelationField] = RelationField( + "thoughtspotLiveboard" + ) """ TBC """ _convenience_properties: ClassVar[list[str]] = [ - "workspace_qualified_name", - "dataset_qualified_name", - "power_b_i_column_data_category", - "power_b_i_column_data_type", - "power_b_i_sort_by_column", - "power_b_i_column_summarize_by", - "table", + "thoughtspot_liveboard_name", + "thoughtspot_liveboard_qualified_name", + "thoughtspot_liveboard", ] @property - def workspace_qualified_name(self) -> Optional[str]: + def thoughtspot_liveboard_name(self) -> Optional[str]: return ( None if self.attributes is None - else self.attributes.workspace_qualified_name - ) - - @workspace_qualified_name.setter - def workspace_qualified_name(self, workspace_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.workspace_qualified_name = workspace_qualified_name - - @property - def dataset_qualified_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.dataset_qualified_name + else self.attributes.thoughtspot_liveboard_name ) - @dataset_qualified_name.setter - def dataset_qualified_name(self, dataset_qualified_name: Optional[str]): + @thoughtspot_liveboard_name.setter + def thoughtspot_liveboard_name(self, thoughtspot_liveboard_name: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.dataset_qualified_name = dataset_qualified_name + self.attributes.thoughtspot_liveboard_name = thoughtspot_liveboard_name @property - def power_b_i_column_data_category(self) -> Optional[str]: + def thoughtspot_liveboard_qualified_name(self) -> Optional[str]: return ( None if self.attributes is None - else self.attributes.power_b_i_column_data_category + else self.attributes.thoughtspot_liveboard_qualified_name ) - @power_b_i_column_data_category.setter - def power_b_i_column_data_category( - self, power_b_i_column_data_category: Optional[str] + @thoughtspot_liveboard_qualified_name.setter + def thoughtspot_liveboard_qualified_name( + self, thoughtspot_liveboard_qualified_name: Optional[str] ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.power_b_i_column_data_category = power_b_i_column_data_category - - @property - def power_b_i_column_data_type(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.power_b_i_column_data_type - ) - - @power_b_i_column_data_type.setter - def power_b_i_column_data_type(self, power_b_i_column_data_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.power_b_i_column_data_type = power_b_i_column_data_type - - @property - def power_b_i_sort_by_column(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.power_b_i_sort_by_column + self.attributes.thoughtspot_liveboard_qualified_name = ( + thoughtspot_liveboard_qualified_name ) - @power_b_i_sort_by_column.setter - def power_b_i_sort_by_column(self, power_b_i_sort_by_column: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.power_b_i_sort_by_column = power_b_i_sort_by_column - @property - def power_b_i_column_summarize_by(self) -> Optional[str]: + def thoughtspot_liveboard(self) -> Optional[ThoughtspotLiveboard]: return ( - None - if self.attributes is None - else self.attributes.power_b_i_column_summarize_by + None if self.attributes is None else self.attributes.thoughtspot_liveboard ) - @power_b_i_column_summarize_by.setter - def power_b_i_column_summarize_by( - self, power_b_i_column_summarize_by: Optional[str] + @thoughtspot_liveboard.setter + def thoughtspot_liveboard( + self, thoughtspot_liveboard: Optional[ThoughtspotLiveboard] ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.power_b_i_column_summarize_by = power_b_i_column_summarize_by + self.attributes.thoughtspot_liveboard = thoughtspot_liveboard - @property - def table(self) -> Optional[PowerBITable]: - return None if self.attributes is None else self.attributes.table - - @table.setter - def table(self, table: Optional[PowerBITable]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.table = table - - class Attributes(PowerBI.Attributes): - workspace_qualified_name: Optional[str] = Field( - None, description="", alias="workspaceQualifiedName" - ) - dataset_qualified_name: Optional[str] = Field( - None, description="", alias="datasetQualifiedName" - ) - power_b_i_column_data_category: Optional[str] = Field( - None, description="", alias="powerBIColumnDataCategory" - ) - power_b_i_column_data_type: Optional[str] = Field( - None, description="", alias="powerBIColumnDataType" + class Attributes(Thoughtspot.Attributes): + thoughtspot_liveboard_name: Optional[str] = Field( + None, description="", alias="thoughtspotLiveboardName" ) - power_b_i_sort_by_column: Optional[str] = Field( - None, description="", alias="powerBISortByColumn" + thoughtspot_liveboard_qualified_name: Optional[str] = Field( + None, description="", alias="thoughtspotLiveboardQualifiedName" ) - power_b_i_column_summarize_by: Optional[str] = Field( - None, description="", alias="powerBIColumnSummarizeBy" - ) - table: Optional[PowerBITable] = Field( - None, description="", alias="table" + thoughtspot_liveboard: Optional[ThoughtspotLiveboard] = Field( + None, description="", alias="thoughtspotLiveboard" ) # relationship - attributes: "PowerBIColumn.Attributes" = Field( - default_factory=lambda: PowerBIColumn.Attributes(), + attributes: "ThoughtspotDashlet.Attributes" = Field( + default_factory=lambda: ThoughtspotDashlet.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -class PowerBITable(PowerBI): - """Description""" - - type_name: str = Field("PowerBITable", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "PowerBITable": - raise ValueError("must be PowerBITable") - return v - - def __setattr__(self, name, value): - if name in PowerBITable._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - WORKSPACE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "workspaceQualifiedName", "workspaceQualifiedName" - ) - """ - TBC - """ - DATASET_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "datasetQualifiedName", "datasetQualifiedName" - ) - """ - TBC - """ - POWER_BI_TABLE_SOURCE_EXPRESSIONS: ClassVar[KeywordField] = KeywordField( - "powerBITableSourceExpressions", "powerBITableSourceExpressions" - ) - """ - TBC - """ - POWER_BI_TABLE_COLUMN_COUNT: ClassVar[NumericField] = NumericField( - "powerBITableColumnCount", "powerBITableColumnCount" - ) - """ - TBC - """ - POWER_BI_TABLE_MEASURE_COUNT: ClassVar[NumericField] = NumericField( - "powerBITableMeasureCount", "powerBITableMeasureCount" - ) - """ - TBC - """ - - COLUMNS: ClassVar[RelationField] = RelationField("columns") - """ - TBC - """ - MEASURES: ClassVar[RelationField] = RelationField("measures") - """ - TBC - """ - DATASET: ClassVar[RelationField] = RelationField("dataset") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "workspace_qualified_name", - "dataset_qualified_name", - "power_b_i_table_source_expressions", - "power_b_i_table_column_count", - "power_b_i_table_measure_count", - "columns", - "measures", - "dataset", - ] - - @property - def workspace_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.workspace_qualified_name - ) - - @workspace_qualified_name.setter - def workspace_qualified_name(self, workspace_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.workspace_qualified_name = workspace_qualified_name - - @property - def dataset_qualified_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.dataset_qualified_name - ) - - @dataset_qualified_name.setter - def dataset_qualified_name(self, dataset_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dataset_qualified_name = dataset_qualified_name - - @property - def power_b_i_table_source_expressions(self) -> Optional[set[str]]: - return ( - None - if self.attributes is None - else self.attributes.power_b_i_table_source_expressions - ) - - @power_b_i_table_source_expressions.setter - def power_b_i_table_source_expressions( - self, power_b_i_table_source_expressions: Optional[set[str]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.power_b_i_table_source_expressions = ( - power_b_i_table_source_expressions - ) - - @property - def power_b_i_table_column_count(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.power_b_i_table_column_count - ) - - @power_b_i_table_column_count.setter - def power_b_i_table_column_count(self, power_b_i_table_column_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.power_b_i_table_column_count = power_b_i_table_column_count - - @property - def power_b_i_table_measure_count(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.power_b_i_table_measure_count - ) - - @power_b_i_table_measure_count.setter - def power_b_i_table_measure_count( - self, power_b_i_table_measure_count: Optional[int] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.power_b_i_table_measure_count = power_b_i_table_measure_count - - @property - def columns(self) -> Optional[list[PowerBIColumn]]: - return None if self.attributes is None else self.attributes.columns - - @columns.setter - def columns(self, columns: Optional[list[PowerBIColumn]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.columns = columns - - @property - def measures(self) -> Optional[list[PowerBIMeasure]]: - return None if self.attributes is None else self.attributes.measures - - @measures.setter - def measures(self, measures: Optional[list[PowerBIMeasure]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.measures = measures - - @property - def dataset(self) -> Optional[PowerBIDataset]: - return None if self.attributes is None else self.attributes.dataset - - @dataset.setter - def dataset(self, dataset: Optional[PowerBIDataset]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dataset = dataset - - class Attributes(PowerBI.Attributes): - workspace_qualified_name: Optional[str] = Field( - None, description="", alias="workspaceQualifiedName" - ) - dataset_qualified_name: Optional[str] = Field( - None, description="", alias="datasetQualifiedName" - ) - power_b_i_table_source_expressions: Optional[set[str]] = Field( - None, description="", alias="powerBITableSourceExpressions" - ) - power_b_i_table_column_count: Optional[int] = Field( - None, description="", alias="powerBITableColumnCount" - ) - power_b_i_table_measure_count: Optional[int] = Field( - None, description="", alias="powerBITableMeasureCount" - ) - columns: Optional[list[PowerBIColumn]] = Field( - None, description="", alias="columns" - ) # relationship - measures: Optional[list[PowerBIMeasure]] = Field( - None, description="", alias="measures" - ) # relationship - dataset: Optional[PowerBIDataset] = Field( - None, description="", alias="dataset" - ) # relationship - - attributes: "PowerBITable.Attributes" = Field( - default_factory=lambda: PowerBITable.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class PowerBITile(PowerBI): - """Description""" - - type_name: str = Field("PowerBITile", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "PowerBITile": - raise ValueError("must be PowerBITile") - return v - - def __setattr__(self, name, value): - if name in PowerBITile._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - WORKSPACE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "workspaceQualifiedName", "workspaceQualifiedName" - ) - """ - TBC - """ - DASHBOARD_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "dashboardQualifiedName", "dashboardQualifiedName" - ) - """ - TBC - """ - - REPORT: ClassVar[RelationField] = RelationField("report") - """ - TBC - """ - DATASET: ClassVar[RelationField] = RelationField("dataset") - """ - TBC - """ - DASHBOARD: ClassVar[RelationField] = RelationField("dashboard") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "workspace_qualified_name", - "dashboard_qualified_name", - "report", - "dataset", - "dashboard", - ] - - @property - def workspace_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.workspace_qualified_name - ) - - @workspace_qualified_name.setter - def workspace_qualified_name(self, workspace_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.workspace_qualified_name = workspace_qualified_name - - @property - def dashboard_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.dashboard_qualified_name - ) - - @dashboard_qualified_name.setter - def dashboard_qualified_name(self, dashboard_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dashboard_qualified_name = dashboard_qualified_name - - @property - def report(self) -> Optional[PowerBIReport]: - return None if self.attributes is None else self.attributes.report - - @report.setter - def report(self, report: Optional[PowerBIReport]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.report = report - - @property - def dataset(self) -> Optional[PowerBIDataset]: - return None if self.attributes is None else self.attributes.dataset - - @dataset.setter - def dataset(self, dataset: Optional[PowerBIDataset]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dataset = dataset - - @property - def dashboard(self) -> Optional[PowerBIDashboard]: - return None if self.attributes is None else self.attributes.dashboard - - @dashboard.setter - def dashboard(self, dashboard: Optional[PowerBIDashboard]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dashboard = dashboard - - class Attributes(PowerBI.Attributes): - workspace_qualified_name: Optional[str] = Field( - None, description="", alias="workspaceQualifiedName" - ) - dashboard_qualified_name: Optional[str] = Field( - None, description="", alias="dashboardQualifiedName" - ) - report: Optional[PowerBIReport] = Field( - None, description="", alias="report" - ) # relationship - dataset: Optional[PowerBIDataset] = Field( - None, description="", alias="dataset" - ) # relationship - dashboard: Optional[PowerBIDashboard] = Field( - None, description="", alias="dashboard" - ) # relationship - - attributes: "PowerBITile.Attributes" = Field( - default_factory=lambda: PowerBITile.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class PowerBIDatasource(PowerBI): - """Description""" - - type_name: str = Field("PowerBIDatasource", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "PowerBIDatasource": - raise ValueError("must be PowerBIDatasource") - return v - - def __setattr__(self, name, value): - if name in PowerBIDatasource._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - CONNECTION_DETAILS: ClassVar[KeywordField] = KeywordField( - "connectionDetails", "connectionDetails" - ) - """ - TBC - """ - - DATASETS: ClassVar[RelationField] = RelationField("datasets") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "connection_details", - "datasets", - ] - - @property - def connection_details(self) -> Optional[dict[str, str]]: - return None if self.attributes is None else self.attributes.connection_details - - @connection_details.setter - def connection_details(self, connection_details: Optional[dict[str, str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.connection_details = connection_details - - @property - def datasets(self) -> Optional[list[PowerBIDataset]]: - return None if self.attributes is None else self.attributes.datasets - - @datasets.setter - def datasets(self, datasets: Optional[list[PowerBIDataset]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.datasets = datasets - - class Attributes(PowerBI.Attributes): - connection_details: Optional[dict[str, str]] = Field( - None, description="", alias="connectionDetails" - ) - datasets: Optional[list[PowerBIDataset]] = Field( - None, description="", alias="datasets" - ) # relationship - - attributes: "PowerBIDatasource.Attributes" = Field( - default_factory=lambda: PowerBIDatasource.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class PowerBIWorkspace(PowerBI): - """Description""" - - type_name: str = Field("PowerBIWorkspace", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "PowerBIWorkspace": - raise ValueError("must be PowerBIWorkspace") - return v - - def __setattr__(self, name, value): - if name in PowerBIWorkspace._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - WEB_URL: ClassVar[KeywordField] = KeywordField("webUrl", "webUrl") - """ - TBC - """ - REPORT_COUNT: ClassVar[NumericField] = NumericField("reportCount", "reportCount") - """ - TBC - """ - DASHBOARD_COUNT: ClassVar[NumericField] = NumericField( - "dashboardCount", "dashboardCount" - ) - """ - TBC - """ - DATASET_COUNT: ClassVar[NumericField] = NumericField("datasetCount", "datasetCount") - """ - TBC - """ - DATAFLOW_COUNT: ClassVar[NumericField] = NumericField( - "dataflowCount", "dataflowCount" - ) - """ - TBC - """ - - REPORTS: ClassVar[RelationField] = RelationField("reports") - """ - TBC - """ - DATASETS: ClassVar[RelationField] = RelationField("datasets") - """ - TBC - """ - DASHBOARDS: ClassVar[RelationField] = RelationField("dashboards") - """ - TBC - """ - DATAFLOWS: ClassVar[RelationField] = RelationField("dataflows") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "web_url", - "report_count", - "dashboard_count", - "dataset_count", - "dataflow_count", - "reports", - "datasets", - "dashboards", - "dataflows", - ] - - @property - def web_url(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.web_url - - @web_url.setter - def web_url(self, web_url: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.web_url = web_url - - @property - def report_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.report_count - - @report_count.setter - def report_count(self, report_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.report_count = report_count - - @property - def dashboard_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.dashboard_count - - @dashboard_count.setter - def dashboard_count(self, dashboard_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dashboard_count = dashboard_count - - @property - def dataset_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.dataset_count - - @dataset_count.setter - def dataset_count(self, dataset_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dataset_count = dataset_count - - @property - def dataflow_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.dataflow_count - - @dataflow_count.setter - def dataflow_count(self, dataflow_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dataflow_count = dataflow_count - - @property - def reports(self) -> Optional[list[PowerBIReport]]: - return None if self.attributes is None else self.attributes.reports - - @reports.setter - def reports(self, reports: Optional[list[PowerBIReport]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.reports = reports - - @property - def datasets(self) -> Optional[list[PowerBIDataset]]: - return None if self.attributes is None else self.attributes.datasets - - @datasets.setter - def datasets(self, datasets: Optional[list[PowerBIDataset]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.datasets = datasets - - @property - def dashboards(self) -> Optional[list[PowerBIDashboard]]: - return None if self.attributes is None else self.attributes.dashboards - - @dashboards.setter - def dashboards(self, dashboards: Optional[list[PowerBIDashboard]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dashboards = dashboards - - @property - def dataflows(self) -> Optional[list[PowerBIDataflow]]: - return None if self.attributes is None else self.attributes.dataflows - - @dataflows.setter - def dataflows(self, dataflows: Optional[list[PowerBIDataflow]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dataflows = dataflows - - class Attributes(PowerBI.Attributes): - web_url: Optional[str] = Field(None, description="", alias="webUrl") - report_count: Optional[int] = Field(None, description="", alias="reportCount") - dashboard_count: Optional[int] = Field( - None, description="", alias="dashboardCount" - ) - dataset_count: Optional[int] = Field(None, description="", alias="datasetCount") - dataflow_count: Optional[int] = Field( - None, description="", alias="dataflowCount" - ) - reports: Optional[list[PowerBIReport]] = Field( - None, description="", alias="reports" - ) # relationship - datasets: Optional[list[PowerBIDataset]] = Field( - None, description="", alias="datasets" - ) # relationship - dashboards: Optional[list[PowerBIDashboard]] = Field( - None, description="", alias="dashboards" - ) # relationship - dataflows: Optional[list[PowerBIDataflow]] = Field( - None, description="", alias="dataflows" - ) # relationship - - attributes: "PowerBIWorkspace.Attributes" = Field( - default_factory=lambda: PowerBIWorkspace.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class PowerBIDataset(PowerBI): - """Description""" - - type_name: str = Field("PowerBIDataset", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "PowerBIDataset": - raise ValueError("must be PowerBIDataset") - return v - - def __setattr__(self, name, value): - if name in PowerBIDataset._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - WORKSPACE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "workspaceQualifiedName", "workspaceQualifiedName" - ) - """ - TBC - """ - WEB_URL: ClassVar[KeywordField] = KeywordField("webUrl", "webUrl") - """ - TBC - """ - - REPORTS: ClassVar[RelationField] = RelationField("reports") - """ - TBC - """ - WORKSPACE: ClassVar[RelationField] = RelationField("workspace") - """ - TBC - """ - DATAFLOWS: ClassVar[RelationField] = RelationField("dataflows") - """ - TBC - """ - TILES: ClassVar[RelationField] = RelationField("tiles") - """ - TBC - """ - TABLES: ClassVar[RelationField] = RelationField("tables") - """ - TBC - """ - DATASOURCES: ClassVar[RelationField] = RelationField("datasources") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "workspace_qualified_name", - "web_url", - "reports", - "workspace", - "dataflows", - "tiles", - "tables", - "datasources", - ] - - @property - def workspace_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.workspace_qualified_name - ) - - @workspace_qualified_name.setter - def workspace_qualified_name(self, workspace_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.workspace_qualified_name = workspace_qualified_name - - @property - def web_url(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.web_url - - @web_url.setter - def web_url(self, web_url: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.web_url = web_url - - @property - def reports(self) -> Optional[list[PowerBIReport]]: - return None if self.attributes is None else self.attributes.reports - - @reports.setter - def reports(self, reports: Optional[list[PowerBIReport]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.reports = reports - - @property - def workspace(self) -> Optional[PowerBIWorkspace]: - return None if self.attributes is None else self.attributes.workspace - - @workspace.setter - def workspace(self, workspace: Optional[PowerBIWorkspace]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.workspace = workspace - - @property - def dataflows(self) -> Optional[list[PowerBIDataflow]]: - return None if self.attributes is None else self.attributes.dataflows - - @dataflows.setter - def dataflows(self, dataflows: Optional[list[PowerBIDataflow]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dataflows = dataflows - - @property - def tiles(self) -> Optional[list[PowerBITile]]: - return None if self.attributes is None else self.attributes.tiles - - @tiles.setter - def tiles(self, tiles: Optional[list[PowerBITile]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.tiles = tiles - - @property - def tables(self) -> Optional[list[PowerBITable]]: - return None if self.attributes is None else self.attributes.tables - - @tables.setter - def tables(self, tables: Optional[list[PowerBITable]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.tables = tables - - @property - def datasources(self) -> Optional[list[PowerBIDatasource]]: - return None if self.attributes is None else self.attributes.datasources - - @datasources.setter - def datasources(self, datasources: Optional[list[PowerBIDatasource]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.datasources = datasources - - class Attributes(PowerBI.Attributes): - workspace_qualified_name: Optional[str] = Field( - None, description="", alias="workspaceQualifiedName" - ) - web_url: Optional[str] = Field(None, description="", alias="webUrl") - reports: Optional[list[PowerBIReport]] = Field( - None, description="", alias="reports" - ) # relationship - workspace: Optional[PowerBIWorkspace] = Field( - None, description="", alias="workspace" - ) # relationship - dataflows: Optional[list[PowerBIDataflow]] = Field( - None, description="", alias="dataflows" - ) # relationship - tiles: Optional[list[PowerBITile]] = Field( - None, description="", alias="tiles" - ) # relationship - tables: Optional[list[PowerBITable]] = Field( - None, description="", alias="tables" - ) # relationship - datasources: Optional[list[PowerBIDatasource]] = Field( - None, description="", alias="datasources" - ) # relationship - - attributes: "PowerBIDataset.Attributes" = Field( - default_factory=lambda: PowerBIDataset.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class PowerBIDashboard(PowerBI): - """Description""" - - type_name: str = Field("PowerBIDashboard", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "PowerBIDashboard": - raise ValueError("must be PowerBIDashboard") - return v - - def __setattr__(self, name, value): - if name in PowerBIDashboard._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - WORKSPACE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "workspaceQualifiedName", "workspaceQualifiedName" - ) - """ - TBC - """ - WEB_URL: ClassVar[KeywordField] = KeywordField("webUrl", "webUrl") - """ - TBC - """ - TILE_COUNT: ClassVar[NumericField] = NumericField("tileCount", "tileCount") - """ - TBC - """ - - WORKSPACE: ClassVar[RelationField] = RelationField("workspace") - """ - TBC - """ - TILES: ClassVar[RelationField] = RelationField("tiles") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "workspace_qualified_name", - "web_url", - "tile_count", - "workspace", - "tiles", - ] - - @property - def workspace_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.workspace_qualified_name - ) - - @workspace_qualified_name.setter - def workspace_qualified_name(self, workspace_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.workspace_qualified_name = workspace_qualified_name - - @property - def web_url(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.web_url - - @web_url.setter - def web_url(self, web_url: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.web_url = web_url - - @property - def tile_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.tile_count - - @tile_count.setter - def tile_count(self, tile_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.tile_count = tile_count - - @property - def workspace(self) -> Optional[PowerBIWorkspace]: - return None if self.attributes is None else self.attributes.workspace - - @workspace.setter - def workspace(self, workspace: Optional[PowerBIWorkspace]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.workspace = workspace - - @property - def tiles(self) -> Optional[list[PowerBITile]]: - return None if self.attributes is None else self.attributes.tiles - - @tiles.setter - def tiles(self, tiles: Optional[list[PowerBITile]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.tiles = tiles - - class Attributes(PowerBI.Attributes): - workspace_qualified_name: Optional[str] = Field( - None, description="", alias="workspaceQualifiedName" - ) - web_url: Optional[str] = Field(None, description="", alias="webUrl") - tile_count: Optional[int] = Field(None, description="", alias="tileCount") - workspace: Optional[PowerBIWorkspace] = Field( - None, description="", alias="workspace" - ) # relationship - tiles: Optional[list[PowerBITile]] = Field( - None, description="", alias="tiles" - ) # relationship - - attributes: "PowerBIDashboard.Attributes" = Field( - default_factory=lambda: PowerBIDashboard.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class PowerBIDataflow(PowerBI): - """Description""" - - type_name: str = Field("PowerBIDataflow", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "PowerBIDataflow": - raise ValueError("must be PowerBIDataflow") - return v - - def __setattr__(self, name, value): - if name in PowerBIDataflow._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - WORKSPACE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "workspaceQualifiedName", "workspaceQualifiedName" - ) - """ - TBC - """ - WEB_URL: ClassVar[KeywordField] = KeywordField("webUrl", "webUrl") - """ - TBC - """ - - WORKSPACE: ClassVar[RelationField] = RelationField("workspace") - """ - TBC - """ - DATASETS: ClassVar[RelationField] = RelationField("datasets") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "workspace_qualified_name", - "web_url", - "workspace", - "datasets", - ] - - @property - def workspace_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.workspace_qualified_name - ) - - @workspace_qualified_name.setter - def workspace_qualified_name(self, workspace_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.workspace_qualified_name = workspace_qualified_name - - @property - def web_url(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.web_url - - @web_url.setter - def web_url(self, web_url: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.web_url = web_url - - @property - def workspace(self) -> Optional[PowerBIWorkspace]: - return None if self.attributes is None else self.attributes.workspace - - @workspace.setter - def workspace(self, workspace: Optional[PowerBIWorkspace]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.workspace = workspace - - @property - def datasets(self) -> Optional[list[PowerBIDataset]]: - return None if self.attributes is None else self.attributes.datasets - - @datasets.setter - def datasets(self, datasets: Optional[list[PowerBIDataset]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.datasets = datasets - - class Attributes(PowerBI.Attributes): - workspace_qualified_name: Optional[str] = Field( - None, description="", alias="workspaceQualifiedName" - ) - web_url: Optional[str] = Field(None, description="", alias="webUrl") - workspace: Optional[PowerBIWorkspace] = Field( - None, description="", alias="workspace" - ) # relationship - datasets: Optional[list[PowerBIDataset]] = Field( - None, description="", alias="datasets" - ) # relationship - - attributes: "PowerBIDataflow.Attributes" = Field( - default_factory=lambda: PowerBIDataflow.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class PowerBIPage(PowerBI): - """Description""" - - type_name: str = Field("PowerBIPage", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "PowerBIPage": - raise ValueError("must be PowerBIPage") - return v - - def __setattr__(self, name, value): - if name in PowerBIPage._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - WORKSPACE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "workspaceQualifiedName", "workspaceQualifiedName" - ) - """ - TBC - """ - REPORT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "reportQualifiedName", "reportQualifiedName" - ) - """ - TBC - """ - - REPORT: ClassVar[RelationField] = RelationField("report") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "workspace_qualified_name", - "report_qualified_name", - "report", - ] - - @property - def workspace_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.workspace_qualified_name - ) - - @workspace_qualified_name.setter - def workspace_qualified_name(self, workspace_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.workspace_qualified_name = workspace_qualified_name - - @property - def report_qualified_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.report_qualified_name - ) - - @report_qualified_name.setter - def report_qualified_name(self, report_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.report_qualified_name = report_qualified_name - - @property - def report(self) -> Optional[PowerBIReport]: - return None if self.attributes is None else self.attributes.report - - @report.setter - def report(self, report: Optional[PowerBIReport]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.report = report - - class Attributes(PowerBI.Attributes): - workspace_qualified_name: Optional[str] = Field( - None, description="", alias="workspaceQualifiedName" - ) - report_qualified_name: Optional[str] = Field( - None, description="", alias="reportQualifiedName" - ) - report: Optional[PowerBIReport] = Field( - None, description="", alias="report" - ) # relationship - - attributes: "PowerBIPage.Attributes" = Field( - default_factory=lambda: PowerBIPage.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -PowerBIReport.Attributes.update_forward_refs() - - -PowerBIMeasure.Attributes.update_forward_refs() - - -PowerBIColumn.Attributes.update_forward_refs() - - -PowerBITable.Attributes.update_forward_refs() - - -PowerBITile.Attributes.update_forward_refs() - - -PowerBIDatasource.Attributes.update_forward_refs() - - -PowerBIWorkspace.Attributes.update_forward_refs() - - -PowerBIDataset.Attributes.update_forward_refs() - - -PowerBIDashboard.Attributes.update_forward_refs() - - -PowerBIDataflow.Attributes.update_forward_refs() +ThoughtspotLiveboard.Attributes.update_forward_refs() -PowerBIPage.Attributes.update_forward_refs() +ThoughtspotDashlet.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset78.py b/pyatlan/model/assets/asset78.py index f8003ec8e..a87cca0af 100644 --- a/pyatlan/model/assets/asset78.py +++ b/pyatlan/model/assets/asset78.py @@ -4,1529 +4,30 @@ from __future__ import annotations -from typing import ClassVar, Optional +from typing import ClassVar from pydantic import Field, validator -from pyatlan.model.fields.atlan_fields import ( - KeywordField, - KeywordTextField, - RelationField, -) +from .asset50 import Thoughtspot -from .asset51 import MicroStrategy - -class MicroStrategyReport(MicroStrategy): - """Description""" - - type_name: str = Field("MicroStrategyReport", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "MicroStrategyReport": - raise ValueError("must be MicroStrategyReport") - return v - - def __setattr__(self, name, value): - if name in MicroStrategyReport._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - MICRO_STRATEGY_REPORT_TYPE: ClassVar[KeywordField] = KeywordField( - "microStrategyReportType", "microStrategyReportType" - ) - """ - Whether the report is a Grid or Chart report - """ - - MICRO_STRATEGY_METRICS: ClassVar[RelationField] = RelationField( - "microStrategyMetrics" - ) - """ - TBC - """ - MICRO_STRATEGY_PROJECT: ClassVar[RelationField] = RelationField( - "microStrategyProject" - ) - """ - TBC - """ - MICRO_STRATEGY_ATTRIBUTES: ClassVar[RelationField] = RelationField( - "microStrategyAttributes" - ) - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "micro_strategy_report_type", - "micro_strategy_metrics", - "micro_strategy_project", - "micro_strategy_attributes", - ] - - @property - def micro_strategy_report_type(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.micro_strategy_report_type - ) - - @micro_strategy_report_type.setter - def micro_strategy_report_type(self, micro_strategy_report_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_report_type = micro_strategy_report_type - - @property - def micro_strategy_metrics(self) -> Optional[list[MicroStrategyMetric]]: - return ( - None if self.attributes is None else self.attributes.micro_strategy_metrics - ) - - @micro_strategy_metrics.setter - def micro_strategy_metrics( - self, micro_strategy_metrics: Optional[list[MicroStrategyMetric]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_metrics = micro_strategy_metrics - - @property - def micro_strategy_project(self) -> Optional[MicroStrategyProject]: - return ( - None if self.attributes is None else self.attributes.micro_strategy_project - ) - - @micro_strategy_project.setter - def micro_strategy_project( - self, micro_strategy_project: Optional[MicroStrategyProject] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_project = micro_strategy_project - - @property - def micro_strategy_attributes(self) -> Optional[list[MicroStrategyAttribute]]: - return ( - None - if self.attributes is None - else self.attributes.micro_strategy_attributes - ) - - @micro_strategy_attributes.setter - def micro_strategy_attributes( - self, micro_strategy_attributes: Optional[list[MicroStrategyAttribute]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_attributes = micro_strategy_attributes - - class Attributes(MicroStrategy.Attributes): - micro_strategy_report_type: Optional[str] = Field( - None, description="", alias="microStrategyReportType" - ) - micro_strategy_metrics: Optional[list[MicroStrategyMetric]] = Field( - None, description="", alias="microStrategyMetrics" - ) # relationship - micro_strategy_project: Optional[MicroStrategyProject] = Field( - None, description="", alias="microStrategyProject" - ) # relationship - micro_strategy_attributes: Optional[list[MicroStrategyAttribute]] = Field( - None, description="", alias="microStrategyAttributes" - ) # relationship - - attributes: "MicroStrategyReport.Attributes" = Field( - default_factory=lambda: MicroStrategyReport.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class MicroStrategyProject(MicroStrategy): - """Description""" - - type_name: str = Field("MicroStrategyProject", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "MicroStrategyProject": - raise ValueError("must be MicroStrategyProject") - return v - - def __setattr__(self, name, value): - if name in MicroStrategyProject._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - MICRO_STRATEGY_REPORTS: ClassVar[RelationField] = RelationField( - "microStrategyReports" - ) - """ - TBC - """ - MICRO_STRATEGY_FACTS: ClassVar[RelationField] = RelationField("microStrategyFacts") - """ - TBC - """ - MICRO_STRATEGY_METRICS: ClassVar[RelationField] = RelationField( - "microStrategyMetrics" - ) - """ - TBC - """ - MICRO_STRATEGY_VISUALIZATIONS: ClassVar[RelationField] = RelationField( - "microStrategyVisualizations" - ) - """ - TBC - """ - MICRO_STRATEGY_DOCUMENTS: ClassVar[RelationField] = RelationField( - "microStrategyDocuments" - ) - """ - TBC - """ - MICRO_STRATEGY_CUBES: ClassVar[RelationField] = RelationField("microStrategyCubes") - """ - TBC - """ - MICRO_STRATEGY_DOSSIERS: ClassVar[RelationField] = RelationField( - "microStrategyDossiers" - ) - """ - TBC - """ - MICRO_STRATEGY_ATTRIBUTES: ClassVar[RelationField] = RelationField( - "microStrategyAttributes" - ) - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "micro_strategy_reports", - "micro_strategy_facts", - "micro_strategy_metrics", - "micro_strategy_visualizations", - "micro_strategy_documents", - "micro_strategy_cubes", - "micro_strategy_dossiers", - "micro_strategy_attributes", - ] - - @property - def micro_strategy_reports(self) -> Optional[list[MicroStrategyReport]]: - return ( - None if self.attributes is None else self.attributes.micro_strategy_reports - ) - - @micro_strategy_reports.setter - def micro_strategy_reports( - self, micro_strategy_reports: Optional[list[MicroStrategyReport]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_reports = micro_strategy_reports - - @property - def micro_strategy_facts(self) -> Optional[list[MicroStrategyFact]]: - return None if self.attributes is None else self.attributes.micro_strategy_facts - - @micro_strategy_facts.setter - def micro_strategy_facts( - self, micro_strategy_facts: Optional[list[MicroStrategyFact]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_facts = micro_strategy_facts - - @property - def micro_strategy_metrics(self) -> Optional[list[MicroStrategyMetric]]: - return ( - None if self.attributes is None else self.attributes.micro_strategy_metrics - ) - - @micro_strategy_metrics.setter - def micro_strategy_metrics( - self, micro_strategy_metrics: Optional[list[MicroStrategyMetric]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_metrics = micro_strategy_metrics - - @property - def micro_strategy_visualizations( - self, - ) -> Optional[list[MicroStrategyVisualization]]: - return ( - None - if self.attributes is None - else self.attributes.micro_strategy_visualizations - ) - - @micro_strategy_visualizations.setter - def micro_strategy_visualizations( - self, micro_strategy_visualizations: Optional[list[MicroStrategyVisualization]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_visualizations = micro_strategy_visualizations - - @property - def micro_strategy_documents(self) -> Optional[list[MicroStrategyDocument]]: - return ( - None - if self.attributes is None - else self.attributes.micro_strategy_documents - ) - - @micro_strategy_documents.setter - def micro_strategy_documents( - self, micro_strategy_documents: Optional[list[MicroStrategyDocument]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_documents = micro_strategy_documents - - @property - def micro_strategy_cubes(self) -> Optional[list[MicroStrategyCube]]: - return None if self.attributes is None else self.attributes.micro_strategy_cubes - - @micro_strategy_cubes.setter - def micro_strategy_cubes( - self, micro_strategy_cubes: Optional[list[MicroStrategyCube]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_cubes = micro_strategy_cubes - - @property - def micro_strategy_dossiers(self) -> Optional[list[MicroStrategyDossier]]: - return ( - None if self.attributes is None else self.attributes.micro_strategy_dossiers - ) - - @micro_strategy_dossiers.setter - def micro_strategy_dossiers( - self, micro_strategy_dossiers: Optional[list[MicroStrategyDossier]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_dossiers = micro_strategy_dossiers - - @property - def micro_strategy_attributes(self) -> Optional[list[MicroStrategyAttribute]]: - return ( - None - if self.attributes is None - else self.attributes.micro_strategy_attributes - ) - - @micro_strategy_attributes.setter - def micro_strategy_attributes( - self, micro_strategy_attributes: Optional[list[MicroStrategyAttribute]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_attributes = micro_strategy_attributes - - class Attributes(MicroStrategy.Attributes): - micro_strategy_reports: Optional[list[MicroStrategyReport]] = Field( - None, description="", alias="microStrategyReports" - ) # relationship - micro_strategy_facts: Optional[list[MicroStrategyFact]] = Field( - None, description="", alias="microStrategyFacts" - ) # relationship - micro_strategy_metrics: Optional[list[MicroStrategyMetric]] = Field( - None, description="", alias="microStrategyMetrics" - ) # relationship - micro_strategy_visualizations: Optional[ - list[MicroStrategyVisualization] - ] = Field( - None, description="", alias="microStrategyVisualizations" - ) # relationship - micro_strategy_documents: Optional[list[MicroStrategyDocument]] = Field( - None, description="", alias="microStrategyDocuments" - ) # relationship - micro_strategy_cubes: Optional[list[MicroStrategyCube]] = Field( - None, description="", alias="microStrategyCubes" - ) # relationship - micro_strategy_dossiers: Optional[list[MicroStrategyDossier]] = Field( - None, description="", alias="microStrategyDossiers" - ) # relationship - micro_strategy_attributes: Optional[list[MicroStrategyAttribute]] = Field( - None, description="", alias="microStrategyAttributes" - ) # relationship - - attributes: "MicroStrategyProject.Attributes" = Field( - default_factory=lambda: MicroStrategyProject.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class MicroStrategyMetric(MicroStrategy): - """Description""" - - type_name: str = Field("MicroStrategyMetric", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "MicroStrategyMetric": - raise ValueError("must be MicroStrategyMetric") - return v - - def __setattr__(self, name, value): - if name in MicroStrategyMetric._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - MICRO_STRATEGY_METRIC_EXPRESSION: ClassVar[KeywordField] = KeywordField( - "microStrategyMetricExpression", "microStrategyMetricExpression" - ) - """ - Metric expression text - """ - MICRO_STRATEGY_ATTRIBUTE_QUALIFIED_NAMES: ClassVar[ - KeywordTextField - ] = KeywordTextField( - "microStrategyAttributeQualifiedNames", - "microStrategyAttributeQualifiedNames", - "microStrategyAttributeQualifiedNames.text", - ) - """ - Related attribute qualified name list - """ - MICRO_STRATEGY_ATTRIBUTE_NAMES: ClassVar[KeywordTextField] = KeywordTextField( - "microStrategyAttributeNames", - "microStrategyAttributeNames.keyword", - "microStrategyAttributeNames", - ) - """ - Related attribute name list - """ - MICRO_STRATEGY_FACT_QUALIFIED_NAMES: ClassVar[KeywordTextField] = KeywordTextField( - "microStrategyFactQualifiedNames", - "microStrategyFactQualifiedNames", - "microStrategyFactQualifiedNames.text", - ) - """ - Related fact qualified name list - """ - MICRO_STRATEGY_FACT_NAMES: ClassVar[KeywordTextField] = KeywordTextField( - "microStrategyFactNames", - "microStrategyFactNames.keyword", - "microStrategyFactNames", - ) - """ - Related fact name list - """ - MICRO_STRATEGY_METRIC_PARENT_QUALIFIED_NAMES: ClassVar[ - KeywordTextField - ] = KeywordTextField( - "microStrategyMetricParentQualifiedNames", - "microStrategyMetricParentQualifiedNames", - "microStrategyMetricParentQualifiedNames.text", - ) - """ - Related parent metric qualified name list - """ - MICRO_STRATEGY_METRIC_PARENT_NAMES: ClassVar[KeywordTextField] = KeywordTextField( - "microStrategyMetricParentNames", - "microStrategyMetricParentNames.keyword", - "microStrategyMetricParentNames", - ) - """ - Related parent metric name list - """ - - MICRO_STRATEGY_METRIC_PARENTS: ClassVar[RelationField] = RelationField( - "microStrategyMetricParents" - ) - """ - TBC - """ - MICRO_STRATEGY_FACTS: ClassVar[RelationField] = RelationField("microStrategyFacts") - """ - TBC - """ - MICRO_STRATEGY_REPORTS: ClassVar[RelationField] = RelationField( - "microStrategyReports" - ) - """ - TBC - """ - MICRO_STRATEGY_CUBES: ClassVar[RelationField] = RelationField("microStrategyCubes") - """ - TBC - """ - MICRO_STRATEGY_METRIC_CHILDREN: ClassVar[RelationField] = RelationField( - "microStrategyMetricChildren" - ) - """ - TBC - """ - MICRO_STRATEGY_PROJECT: ClassVar[RelationField] = RelationField( - "microStrategyProject" - ) - """ - TBC - """ - MICRO_STRATEGY_ATTRIBUTES: ClassVar[RelationField] = RelationField( - "microStrategyAttributes" - ) - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "micro_strategy_metric_expression", - "micro_strategy_attribute_qualified_names", - "micro_strategy_attribute_names", - "micro_strategy_fact_qualified_names", - "micro_strategy_fact_names", - "micro_strategy_metric_parent_qualified_names", - "micro_strategy_metric_parent_names", - "micro_strategy_metric_parents", - "micro_strategy_facts", - "micro_strategy_reports", - "micro_strategy_cubes", - "micro_strategy_metric_children", - "micro_strategy_project", - "micro_strategy_attributes", - ] - - @property - def micro_strategy_metric_expression(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.micro_strategy_metric_expression - ) - - @micro_strategy_metric_expression.setter - def micro_strategy_metric_expression( - self, micro_strategy_metric_expression: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_metric_expression = ( - micro_strategy_metric_expression - ) - - @property - def micro_strategy_attribute_qualified_names(self) -> Optional[set[str]]: - return ( - None - if self.attributes is None - else self.attributes.micro_strategy_attribute_qualified_names - ) - - @micro_strategy_attribute_qualified_names.setter - def micro_strategy_attribute_qualified_names( - self, micro_strategy_attribute_qualified_names: Optional[set[str]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_attribute_qualified_names = ( - micro_strategy_attribute_qualified_names - ) - - @property - def micro_strategy_attribute_names(self) -> Optional[set[str]]: - return ( - None - if self.attributes is None - else self.attributes.micro_strategy_attribute_names - ) - - @micro_strategy_attribute_names.setter - def micro_strategy_attribute_names( - self, micro_strategy_attribute_names: Optional[set[str]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_attribute_names = micro_strategy_attribute_names - - @property - def micro_strategy_fact_qualified_names(self) -> Optional[set[str]]: - return ( - None - if self.attributes is None - else self.attributes.micro_strategy_fact_qualified_names - ) - - @micro_strategy_fact_qualified_names.setter - def micro_strategy_fact_qualified_names( - self, micro_strategy_fact_qualified_names: Optional[set[str]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_fact_qualified_names = ( - micro_strategy_fact_qualified_names - ) - - @property - def micro_strategy_fact_names(self) -> Optional[set[str]]: - return ( - None - if self.attributes is None - else self.attributes.micro_strategy_fact_names - ) - - @micro_strategy_fact_names.setter - def micro_strategy_fact_names(self, micro_strategy_fact_names: Optional[set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_fact_names = micro_strategy_fact_names - - @property - def micro_strategy_metric_parent_qualified_names(self) -> Optional[set[str]]: - return ( - None - if self.attributes is None - else self.attributes.micro_strategy_metric_parent_qualified_names - ) - - @micro_strategy_metric_parent_qualified_names.setter - def micro_strategy_metric_parent_qualified_names( - self, micro_strategy_metric_parent_qualified_names: Optional[set[str]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_metric_parent_qualified_names = ( - micro_strategy_metric_parent_qualified_names - ) - - @property - def micro_strategy_metric_parent_names(self) -> Optional[set[str]]: - return ( - None - if self.attributes is None - else self.attributes.micro_strategy_metric_parent_names - ) - - @micro_strategy_metric_parent_names.setter - def micro_strategy_metric_parent_names( - self, micro_strategy_metric_parent_names: Optional[set[str]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_metric_parent_names = ( - micro_strategy_metric_parent_names - ) - - @property - def micro_strategy_metric_parents(self) -> Optional[list[MicroStrategyMetric]]: - return ( - None - if self.attributes is None - else self.attributes.micro_strategy_metric_parents - ) - - @micro_strategy_metric_parents.setter - def micro_strategy_metric_parents( - self, micro_strategy_metric_parents: Optional[list[MicroStrategyMetric]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_metric_parents = micro_strategy_metric_parents - - @property - def micro_strategy_facts(self) -> Optional[list[MicroStrategyFact]]: - return None if self.attributes is None else self.attributes.micro_strategy_facts - - @micro_strategy_facts.setter - def micro_strategy_facts( - self, micro_strategy_facts: Optional[list[MicroStrategyFact]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_facts = micro_strategy_facts - - @property - def micro_strategy_reports(self) -> Optional[list[MicroStrategyReport]]: - return ( - None if self.attributes is None else self.attributes.micro_strategy_reports - ) - - @micro_strategy_reports.setter - def micro_strategy_reports( - self, micro_strategy_reports: Optional[list[MicroStrategyReport]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_reports = micro_strategy_reports - - @property - def micro_strategy_cubes(self) -> Optional[list[MicroStrategyCube]]: - return None if self.attributes is None else self.attributes.micro_strategy_cubes - - @micro_strategy_cubes.setter - def micro_strategy_cubes( - self, micro_strategy_cubes: Optional[list[MicroStrategyCube]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_cubes = micro_strategy_cubes - - @property - def micro_strategy_metric_children(self) -> Optional[list[MicroStrategyMetric]]: - return ( - None - if self.attributes is None - else self.attributes.micro_strategy_metric_children - ) - - @micro_strategy_metric_children.setter - def micro_strategy_metric_children( - self, micro_strategy_metric_children: Optional[list[MicroStrategyMetric]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_metric_children = micro_strategy_metric_children - - @property - def micro_strategy_project(self) -> Optional[MicroStrategyProject]: - return ( - None if self.attributes is None else self.attributes.micro_strategy_project - ) - - @micro_strategy_project.setter - def micro_strategy_project( - self, micro_strategy_project: Optional[MicroStrategyProject] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_project = micro_strategy_project - - @property - def micro_strategy_attributes(self) -> Optional[list[MicroStrategyAttribute]]: - return ( - None - if self.attributes is None - else self.attributes.micro_strategy_attributes - ) - - @micro_strategy_attributes.setter - def micro_strategy_attributes( - self, micro_strategy_attributes: Optional[list[MicroStrategyAttribute]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_attributes = micro_strategy_attributes - - class Attributes(MicroStrategy.Attributes): - micro_strategy_metric_expression: Optional[str] = Field( - None, description="", alias="microStrategyMetricExpression" - ) - micro_strategy_attribute_qualified_names: Optional[set[str]] = Field( - None, description="", alias="microStrategyAttributeQualifiedNames" - ) - micro_strategy_attribute_names: Optional[set[str]] = Field( - None, description="", alias="microStrategyAttributeNames" - ) - micro_strategy_fact_qualified_names: Optional[set[str]] = Field( - None, description="", alias="microStrategyFactQualifiedNames" - ) - micro_strategy_fact_names: Optional[set[str]] = Field( - None, description="", alias="microStrategyFactNames" - ) - micro_strategy_metric_parent_qualified_names: Optional[set[str]] = Field( - None, description="", alias="microStrategyMetricParentQualifiedNames" - ) - micro_strategy_metric_parent_names: Optional[set[str]] = Field( - None, description="", alias="microStrategyMetricParentNames" - ) - micro_strategy_metric_parents: Optional[list[MicroStrategyMetric]] = Field( - None, description="", alias="microStrategyMetricParents" - ) # relationship - micro_strategy_facts: Optional[list[MicroStrategyFact]] = Field( - None, description="", alias="microStrategyFacts" - ) # relationship - micro_strategy_reports: Optional[list[MicroStrategyReport]] = Field( - None, description="", alias="microStrategyReports" - ) # relationship - micro_strategy_cubes: Optional[list[MicroStrategyCube]] = Field( - None, description="", alias="microStrategyCubes" - ) # relationship - micro_strategy_metric_children: Optional[list[MicroStrategyMetric]] = Field( - None, description="", alias="microStrategyMetricChildren" - ) # relationship - micro_strategy_project: Optional[MicroStrategyProject] = Field( - None, description="", alias="microStrategyProject" - ) # relationship - micro_strategy_attributes: Optional[list[MicroStrategyAttribute]] = Field( - None, description="", alias="microStrategyAttributes" - ) # relationship - - attributes: "MicroStrategyMetric.Attributes" = Field( - default_factory=lambda: MicroStrategyMetric.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class MicroStrategyCube(MicroStrategy): +class ThoughtspotAnswer(Thoughtspot): """Description""" - type_name: str = Field("MicroStrategyCube", allow_mutation=False) + type_name: str = Field("ThoughtspotAnswer", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "MicroStrategyCube": - raise ValueError("must be MicroStrategyCube") + if v != "ThoughtspotAnswer": + raise ValueError("must be ThoughtspotAnswer") return v def __setattr__(self, name, value): - if name in MicroStrategyCube._convenience_properties: + if name in ThoughtspotAnswer._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - MICRO_STRATEGY_CUBE_TYPE: ClassVar[KeywordField] = KeywordField( - "microStrategyCubeType", "microStrategyCubeType" - ) - """ - Whether the cube is an OLAP or MTDI cube - """ - MICRO_STRATEGY_CUBE_QUERY: ClassVar[KeywordField] = KeywordField( - "microStrategyCubeQuery", "microStrategyCubeQuery" - ) - """ - The query used to create the cube - """ - - MICRO_STRATEGY_METRICS: ClassVar[RelationField] = RelationField( - "microStrategyMetrics" - ) - """ - TBC - """ - MICRO_STRATEGY_PROJECT: ClassVar[RelationField] = RelationField( - "microStrategyProject" - ) - """ - TBC - """ - MICRO_STRATEGY_ATTRIBUTES: ClassVar[RelationField] = RelationField( - "microStrategyAttributes" - ) - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "micro_strategy_cube_type", - "micro_strategy_cube_query", - "micro_strategy_metrics", - "micro_strategy_project", - "micro_strategy_attributes", - ] - - @property - def micro_strategy_cube_type(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.micro_strategy_cube_type - ) - - @micro_strategy_cube_type.setter - def micro_strategy_cube_type(self, micro_strategy_cube_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_cube_type = micro_strategy_cube_type - - @property - def micro_strategy_cube_query(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.micro_strategy_cube_query - ) - - @micro_strategy_cube_query.setter - def micro_strategy_cube_query(self, micro_strategy_cube_query: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_cube_query = micro_strategy_cube_query - - @property - def micro_strategy_metrics(self) -> Optional[list[MicroStrategyMetric]]: - return ( - None if self.attributes is None else self.attributes.micro_strategy_metrics - ) - - @micro_strategy_metrics.setter - def micro_strategy_metrics( - self, micro_strategy_metrics: Optional[list[MicroStrategyMetric]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_metrics = micro_strategy_metrics - - @property - def micro_strategy_project(self) -> Optional[MicroStrategyProject]: - return ( - None if self.attributes is None else self.attributes.micro_strategy_project - ) - - @micro_strategy_project.setter - def micro_strategy_project( - self, micro_strategy_project: Optional[MicroStrategyProject] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_project = micro_strategy_project - - @property - def micro_strategy_attributes(self) -> Optional[list[MicroStrategyAttribute]]: - return ( - None - if self.attributes is None - else self.attributes.micro_strategy_attributes - ) - - @micro_strategy_attributes.setter - def micro_strategy_attributes( - self, micro_strategy_attributes: Optional[list[MicroStrategyAttribute]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_attributes = micro_strategy_attributes - - class Attributes(MicroStrategy.Attributes): - micro_strategy_cube_type: Optional[str] = Field( - None, description="", alias="microStrategyCubeType" - ) - micro_strategy_cube_query: Optional[str] = Field( - None, description="", alias="microStrategyCubeQuery" - ) - micro_strategy_metrics: Optional[list[MicroStrategyMetric]] = Field( - None, description="", alias="microStrategyMetrics" - ) # relationship - micro_strategy_project: Optional[MicroStrategyProject] = Field( - None, description="", alias="microStrategyProject" - ) # relationship - micro_strategy_attributes: Optional[list[MicroStrategyAttribute]] = Field( - None, description="", alias="microStrategyAttributes" - ) # relationship - - attributes: "MicroStrategyCube.Attributes" = Field( - default_factory=lambda: MicroStrategyCube.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class MicroStrategyDossier(MicroStrategy): - """Description""" - - type_name: str = Field("MicroStrategyDossier", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "MicroStrategyDossier": - raise ValueError("must be MicroStrategyDossier") - return v - - def __setattr__(self, name, value): - if name in MicroStrategyDossier._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - MICRO_STRATEGY_DOSSIER_CHAPTER_NAMES: ClassVar[KeywordField] = KeywordField( - "microStrategyDossierChapterNames", "microStrategyDossierChapterNames" - ) - """ - Dossier chapter name list - """ - - MICRO_STRATEGY_VISUALIZATIONS: ClassVar[RelationField] = RelationField( - "microStrategyVisualizations" - ) - """ - TBC - """ - MICRO_STRATEGY_PROJECT: ClassVar[RelationField] = RelationField( - "microStrategyProject" - ) - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "micro_strategy_dossier_chapter_names", - "micro_strategy_visualizations", - "micro_strategy_project", - ] - - @property - def micro_strategy_dossier_chapter_names(self) -> Optional[set[str]]: - return ( - None - if self.attributes is None - else self.attributes.micro_strategy_dossier_chapter_names - ) - - @micro_strategy_dossier_chapter_names.setter - def micro_strategy_dossier_chapter_names( - self, micro_strategy_dossier_chapter_names: Optional[set[str]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_dossier_chapter_names = ( - micro_strategy_dossier_chapter_names - ) - - @property - def micro_strategy_visualizations( - self, - ) -> Optional[list[MicroStrategyVisualization]]: - return ( - None - if self.attributes is None - else self.attributes.micro_strategy_visualizations - ) - - @micro_strategy_visualizations.setter - def micro_strategy_visualizations( - self, micro_strategy_visualizations: Optional[list[MicroStrategyVisualization]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_visualizations = micro_strategy_visualizations - - @property - def micro_strategy_project(self) -> Optional[MicroStrategyProject]: - return ( - None if self.attributes is None else self.attributes.micro_strategy_project - ) - - @micro_strategy_project.setter - def micro_strategy_project( - self, micro_strategy_project: Optional[MicroStrategyProject] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_project = micro_strategy_project - - class Attributes(MicroStrategy.Attributes): - micro_strategy_dossier_chapter_names: Optional[set[str]] = Field( - None, description="", alias="microStrategyDossierChapterNames" - ) - micro_strategy_visualizations: Optional[ - list[MicroStrategyVisualization] - ] = Field( - None, description="", alias="microStrategyVisualizations" - ) # relationship - micro_strategy_project: Optional[MicroStrategyProject] = Field( - None, description="", alias="microStrategyProject" - ) # relationship - - attributes: "MicroStrategyDossier.Attributes" = Field( - default_factory=lambda: MicroStrategyDossier.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class MicroStrategyFact(MicroStrategy): - """Description""" - - type_name: str = Field("MicroStrategyFact", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "MicroStrategyFact": - raise ValueError("must be MicroStrategyFact") - return v - - def __setattr__(self, name, value): - if name in MicroStrategyFact._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - MICRO_STRATEGY_FACT_EXPRESSIONS: ClassVar[KeywordField] = KeywordField( - "microStrategyFactExpressions", "microStrategyFactExpressions" - ) - """ - Fact expression list - """ - - MICRO_STRATEGY_METRICS: ClassVar[RelationField] = RelationField( - "microStrategyMetrics" - ) - """ - TBC - """ - MICRO_STRATEGY_PROJECT: ClassVar[RelationField] = RelationField( - "microStrategyProject" - ) - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "micro_strategy_fact_expressions", - "micro_strategy_metrics", - "micro_strategy_project", - ] - - @property - def micro_strategy_fact_expressions(self) -> Optional[set[str]]: - return ( - None - if self.attributes is None - else self.attributes.micro_strategy_fact_expressions - ) - - @micro_strategy_fact_expressions.setter - def micro_strategy_fact_expressions( - self, micro_strategy_fact_expressions: Optional[set[str]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_fact_expressions = ( - micro_strategy_fact_expressions - ) - - @property - def micro_strategy_metrics(self) -> Optional[list[MicroStrategyMetric]]: - return ( - None if self.attributes is None else self.attributes.micro_strategy_metrics - ) - - @micro_strategy_metrics.setter - def micro_strategy_metrics( - self, micro_strategy_metrics: Optional[list[MicroStrategyMetric]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_metrics = micro_strategy_metrics - - @property - def micro_strategy_project(self) -> Optional[MicroStrategyProject]: - return ( - None if self.attributes is None else self.attributes.micro_strategy_project - ) - - @micro_strategy_project.setter - def micro_strategy_project( - self, micro_strategy_project: Optional[MicroStrategyProject] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_project = micro_strategy_project - - class Attributes(MicroStrategy.Attributes): - micro_strategy_fact_expressions: Optional[set[str]] = Field( - None, description="", alias="microStrategyFactExpressions" - ) - micro_strategy_metrics: Optional[list[MicroStrategyMetric]] = Field( - None, description="", alias="microStrategyMetrics" - ) # relationship - micro_strategy_project: Optional[MicroStrategyProject] = Field( - None, description="", alias="microStrategyProject" - ) # relationship - - attributes: "MicroStrategyFact.Attributes" = Field( - default_factory=lambda: MicroStrategyFact.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class MicroStrategyDocument(MicroStrategy): - """Description""" - - type_name: str = Field("MicroStrategyDocument", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "MicroStrategyDocument": - raise ValueError("must be MicroStrategyDocument") - return v - - def __setattr__(self, name, value): - if name in MicroStrategyDocument._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - MICRO_STRATEGY_PROJECT: ClassVar[RelationField] = RelationField( - "microStrategyProject" - ) - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "micro_strategy_project", - ] - - @property - def micro_strategy_project(self) -> Optional[MicroStrategyProject]: - return ( - None if self.attributes is None else self.attributes.micro_strategy_project - ) - - @micro_strategy_project.setter - def micro_strategy_project( - self, micro_strategy_project: Optional[MicroStrategyProject] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_project = micro_strategy_project - - class Attributes(MicroStrategy.Attributes): - micro_strategy_project: Optional[MicroStrategyProject] = Field( - None, description="", alias="microStrategyProject" - ) # relationship - - attributes: "MicroStrategyDocument.Attributes" = Field( - default_factory=lambda: MicroStrategyDocument.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class MicroStrategyAttribute(MicroStrategy): - """Description""" - - type_name: str = Field("MicroStrategyAttribute", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "MicroStrategyAttribute": - raise ValueError("must be MicroStrategyAttribute") - return v - - def __setattr__(self, name, value): - if name in MicroStrategyAttribute._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - MICRO_STRATEGY_ATTRIBUTE_FORMS: ClassVar[KeywordField] = KeywordField( - "microStrategyAttributeForms", "microStrategyAttributeForms" - ) - """ - Attribute form name, description, displayFormat and expression as JSON string - """ - - MICRO_STRATEGY_REPORTS: ClassVar[RelationField] = RelationField( - "microStrategyReports" - ) - """ - TBC - """ - MICRO_STRATEGY_METRICS: ClassVar[RelationField] = RelationField( - "microStrategyMetrics" - ) - """ - TBC - """ - MICRO_STRATEGY_CUBES: ClassVar[RelationField] = RelationField("microStrategyCubes") - """ - TBC - """ - MICRO_STRATEGY_PROJECT: ClassVar[RelationField] = RelationField( - "microStrategyProject" - ) - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "micro_strategy_attribute_forms", - "micro_strategy_reports", - "micro_strategy_metrics", - "micro_strategy_cubes", - "micro_strategy_project", - ] - - @property - def micro_strategy_attribute_forms(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.micro_strategy_attribute_forms - ) - - @micro_strategy_attribute_forms.setter - def micro_strategy_attribute_forms( - self, micro_strategy_attribute_forms: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_attribute_forms = micro_strategy_attribute_forms - - @property - def micro_strategy_reports(self) -> Optional[list[MicroStrategyReport]]: - return ( - None if self.attributes is None else self.attributes.micro_strategy_reports - ) - - @micro_strategy_reports.setter - def micro_strategy_reports( - self, micro_strategy_reports: Optional[list[MicroStrategyReport]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_reports = micro_strategy_reports - - @property - def micro_strategy_metrics(self) -> Optional[list[MicroStrategyMetric]]: - return ( - None if self.attributes is None else self.attributes.micro_strategy_metrics - ) - - @micro_strategy_metrics.setter - def micro_strategy_metrics( - self, micro_strategy_metrics: Optional[list[MicroStrategyMetric]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_metrics = micro_strategy_metrics - - @property - def micro_strategy_cubes(self) -> Optional[list[MicroStrategyCube]]: - return None if self.attributes is None else self.attributes.micro_strategy_cubes - - @micro_strategy_cubes.setter - def micro_strategy_cubes( - self, micro_strategy_cubes: Optional[list[MicroStrategyCube]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_cubes = micro_strategy_cubes - - @property - def micro_strategy_project(self) -> Optional[MicroStrategyProject]: - return ( - None if self.attributes is None else self.attributes.micro_strategy_project - ) - - @micro_strategy_project.setter - def micro_strategy_project( - self, micro_strategy_project: Optional[MicroStrategyProject] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_project = micro_strategy_project - - class Attributes(MicroStrategy.Attributes): - micro_strategy_attribute_forms: Optional[str] = Field( - None, description="", alias="microStrategyAttributeForms" - ) - micro_strategy_reports: Optional[list[MicroStrategyReport]] = Field( - None, description="", alias="microStrategyReports" - ) # relationship - micro_strategy_metrics: Optional[list[MicroStrategyMetric]] = Field( - None, description="", alias="microStrategyMetrics" - ) # relationship - micro_strategy_cubes: Optional[list[MicroStrategyCube]] = Field( - None, description="", alias="microStrategyCubes" - ) # relationship - micro_strategy_project: Optional[MicroStrategyProject] = Field( - None, description="", alias="microStrategyProject" - ) # relationship - - attributes: "MicroStrategyAttribute.Attributes" = Field( - default_factory=lambda: MicroStrategyAttribute.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class MicroStrategyVisualization(MicroStrategy): - """Description""" - - type_name: str = Field("MicroStrategyVisualization", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "MicroStrategyVisualization": - raise ValueError("must be MicroStrategyVisualization") - return v - - def __setattr__(self, name, value): - if name in MicroStrategyVisualization._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - MICRO_STRATEGY_VISUALIZATION_TYPE: ClassVar[KeywordField] = KeywordField( - "microStrategyVisualizationType", "microStrategyVisualizationType" - ) - """ - Visualization type name - """ - MICRO_STRATEGY_DOSSIER_QUALIFIED_NAME: ClassVar[ - KeywordTextField - ] = KeywordTextField( - "microStrategyDossierQualifiedName", - "microStrategyDossierQualifiedName", - "microStrategyDossierQualifiedName.text", - ) - """ - Parent dossier qualified name - """ - MICRO_STRATEGY_DOSSIER_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "microStrategyDossierName", - "microStrategyDossierName.keyword", - "microStrategyDossierName", - ) - """ - Parent dossier name - """ - - MICRO_STRATEGY_DOSSIER: ClassVar[RelationField] = RelationField( - "microStrategyDossier" - ) - """ - TBC - """ - MICRO_STRATEGY_PROJECT: ClassVar[RelationField] = RelationField( - "microStrategyProject" - ) - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "micro_strategy_visualization_type", - "micro_strategy_dossier_qualified_name", - "micro_strategy_dossier_name", - "micro_strategy_dossier", - "micro_strategy_project", - ] - - @property - def micro_strategy_visualization_type(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.micro_strategy_visualization_type - ) - - @micro_strategy_visualization_type.setter - def micro_strategy_visualization_type( - self, micro_strategy_visualization_type: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_visualization_type = ( - micro_strategy_visualization_type - ) - - @property - def micro_strategy_dossier_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.micro_strategy_dossier_qualified_name - ) - - @micro_strategy_dossier_qualified_name.setter - def micro_strategy_dossier_qualified_name( - self, micro_strategy_dossier_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_dossier_qualified_name = ( - micro_strategy_dossier_qualified_name - ) - - @property - def micro_strategy_dossier_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.micro_strategy_dossier_name - ) - - @micro_strategy_dossier_name.setter - def micro_strategy_dossier_name(self, micro_strategy_dossier_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_dossier_name = micro_strategy_dossier_name - - @property - def micro_strategy_dossier(self) -> Optional[MicroStrategyDossier]: - return ( - None if self.attributes is None else self.attributes.micro_strategy_dossier - ) - - @micro_strategy_dossier.setter - def micro_strategy_dossier( - self, micro_strategy_dossier: Optional[MicroStrategyDossier] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_dossier = micro_strategy_dossier - - @property - def micro_strategy_project(self) -> Optional[MicroStrategyProject]: - return ( - None if self.attributes is None else self.attributes.micro_strategy_project - ) - - @micro_strategy_project.setter - def micro_strategy_project( - self, micro_strategy_project: Optional[MicroStrategyProject] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_project = micro_strategy_project - - class Attributes(MicroStrategy.Attributes): - micro_strategy_visualization_type: Optional[str] = Field( - None, description="", alias="microStrategyVisualizationType" - ) - micro_strategy_dossier_qualified_name: Optional[str] = Field( - None, description="", alias="microStrategyDossierQualifiedName" - ) - micro_strategy_dossier_name: Optional[str] = Field( - None, description="", alias="microStrategyDossierName" - ) - micro_strategy_dossier: Optional[MicroStrategyDossier] = Field( - None, description="", alias="microStrategyDossier" - ) # relationship - micro_strategy_project: Optional[MicroStrategyProject] = Field( - None, description="", alias="microStrategyProject" - ) # relationship - - attributes: "MicroStrategyVisualization.Attributes" = Field( - default_factory=lambda: MicroStrategyVisualization.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -MicroStrategyReport.Attributes.update_forward_refs() - - -MicroStrategyProject.Attributes.update_forward_refs() - - -MicroStrategyMetric.Attributes.update_forward_refs() - - -MicroStrategyCube.Attributes.update_forward_refs() - - -MicroStrategyDossier.Attributes.update_forward_refs() - - -MicroStrategyFact.Attributes.update_forward_refs() - - -MicroStrategyDocument.Attributes.update_forward_refs() - - -MicroStrategyAttribute.Attributes.update_forward_refs() + _convenience_properties: ClassVar[list[str]] = [] -MicroStrategyVisualization.Attributes.update_forward_refs() +ThoughtspotAnswer.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset79.py b/pyatlan/model/assets/asset79.py index 2a120965d..6efeca4e8 100644 --- a/pyatlan/model/assets/asset79.py +++ b/pyatlan/model/assets/asset79.py @@ -11,639 +11,1658 @@ from pyatlan.model.fields.atlan_fields import ( BooleanField, KeywordField, - KeywordTextField, NumericField, RelationField, TextField, ) -from .asset52 import Qlik +from .asset51 import PowerBI -class QlikApp(Qlik): +class PowerBIReport(PowerBI): """Description""" - type_name: str = Field("QlikApp", allow_mutation=False) + type_name: str = Field("PowerBIReport", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "QlikApp": - raise ValueError("must be QlikApp") + if v != "PowerBIReport": + raise ValueError("must be PowerBIReport") return v def __setattr__(self, name, value): - if name in QlikApp._convenience_properties: + if name in PowerBIReport._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - QLIK_HAS_SECTION_ACCESS: ClassVar[BooleanField] = BooleanField( - "qlikHasSectionAccess", "qlikHasSectionAccess" + WORKSPACE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "workspaceQualifiedName", "workspaceQualifiedName" ) """ - Whether section access/data masking is enabled on source + Unique name of the workspace in which this report exists. """ - QLIK_ORIGIN_APP_ID: ClassVar[KeywordField] = KeywordField( - "qlikOriginAppId", "qlikOriginAppId" + DATASET_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "datasetQualifiedName", "datasetQualifiedName" ) """ - originAppId value for a qlik app + Unique name of the dataset used to build this report. """ - QLIK_IS_ENCRYPTED: ClassVar[BooleanField] = BooleanField( - "qlikIsEncrypted", "qlikIsEncrypted" - ) + WEB_URL: ClassVar[KeywordField] = KeywordField("webUrl", "webUrl") """ - Whether a qlik app is encrypted + Deprecated. See 'sourceUrl' instead. """ - QLIK_IS_DIRECT_QUERY_MODE: ClassVar[BooleanField] = BooleanField( - "qlikIsDirectQueryMode", "qlikIsDirectQueryMode" - ) + PAGE_COUNT: ClassVar[NumericField] = NumericField("pageCount", "pageCount") """ - Whether a qlik app is in direct query mode + Number of pages in this report. """ - QLIK_APP_STATIC_BYTE_SIZE: ClassVar[NumericField] = NumericField( - "qlikAppStaticByteSize", "qlikAppStaticByteSize" - ) + + WORKSPACE: ClassVar[RelationField] = RelationField("workspace") """ - Static space taken by a qlik app + TBC """ - - QLIK_SPACE: ClassVar[RelationField] = RelationField("qlikSpace") + TILES: ClassVar[RelationField] = RelationField("tiles") + """ + TBC + """ + PAGES: ClassVar[RelationField] = RelationField("pages") """ TBC """ - QLIK_SHEETS: ClassVar[RelationField] = RelationField("qlikSheets") + DATASET: ClassVar[RelationField] = RelationField("dataset") """ TBC """ _convenience_properties: ClassVar[list[str]] = [ - "qlik_has_section_access", - "qlik_origin_app_id", - "qlik_is_encrypted", - "qlik_is_direct_query_mode", - "qlik_app_static_byte_size", - "qlik_space", - "qlik_sheets", + "workspace_qualified_name", + "dataset_qualified_name", + "web_url", + "page_count", + "workspace", + "tiles", + "pages", + "dataset", ] @property - def qlik_has_section_access(self) -> Optional[bool]: + def workspace_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.workspace_qualified_name + ) + + @workspace_qualified_name.setter + def workspace_qualified_name(self, workspace_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.workspace_qualified_name = workspace_qualified_name + + @property + def dataset_qualified_name(self) -> Optional[str]: return ( - None if self.attributes is None else self.attributes.qlik_has_section_access + None if self.attributes is None else self.attributes.dataset_qualified_name ) - @qlik_has_section_access.setter - def qlik_has_section_access(self, qlik_has_section_access: Optional[bool]): + @dataset_qualified_name.setter + def dataset_qualified_name(self, dataset_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dataset_qualified_name = dataset_qualified_name + + @property + def web_url(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.web_url + + @web_url.setter + def web_url(self, web_url: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.web_url = web_url + + @property + def page_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.page_count + + @page_count.setter + def page_count(self, page_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.page_count = page_count + + @property + def workspace(self) -> Optional[PowerBIWorkspace]: + return None if self.attributes is None else self.attributes.workspace + + @workspace.setter + def workspace(self, workspace: Optional[PowerBIWorkspace]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.workspace = workspace + + @property + def tiles(self) -> Optional[list[PowerBITile]]: + return None if self.attributes is None else self.attributes.tiles + + @tiles.setter + def tiles(self, tiles: Optional[list[PowerBITile]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.qlik_has_section_access = qlik_has_section_access + self.attributes.tiles = tiles @property - def qlik_origin_app_id(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.qlik_origin_app_id + def pages(self) -> Optional[list[PowerBIPage]]: + return None if self.attributes is None else self.attributes.pages - @qlik_origin_app_id.setter - def qlik_origin_app_id(self, qlik_origin_app_id: Optional[str]): + @pages.setter + def pages(self, pages: Optional[list[PowerBIPage]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.qlik_origin_app_id = qlik_origin_app_id + self.attributes.pages = pages @property - def qlik_is_encrypted(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.qlik_is_encrypted + def dataset(self) -> Optional[PowerBIDataset]: + return None if self.attributes is None else self.attributes.dataset - @qlik_is_encrypted.setter - def qlik_is_encrypted(self, qlik_is_encrypted: Optional[bool]): + @dataset.setter + def dataset(self, dataset: Optional[PowerBIDataset]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.qlik_is_encrypted = qlik_is_encrypted + self.attributes.dataset = dataset + + class Attributes(PowerBI.Attributes): + workspace_qualified_name: Optional[str] = Field( + None, description="", alias="workspaceQualifiedName" + ) + dataset_qualified_name: Optional[str] = Field( + None, description="", alias="datasetQualifiedName" + ) + web_url: Optional[str] = Field(None, description="", alias="webUrl") + page_count: Optional[int] = Field(None, description="", alias="pageCount") + workspace: Optional[PowerBIWorkspace] = Field( + None, description="", alias="workspace" + ) # relationship + tiles: Optional[list[PowerBITile]] = Field( + None, description="", alias="tiles" + ) # relationship + pages: Optional[list[PowerBIPage]] = Field( + None, description="", alias="pages" + ) # relationship + dataset: Optional[PowerBIDataset] = Field( + None, description="", alias="dataset" + ) # relationship + + attributes: "PowerBIReport.Attributes" = Field( + default_factory=lambda: PowerBIReport.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +class PowerBIMeasure(PowerBI): + """Description""" + + type_name: str = Field("PowerBIMeasure", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "PowerBIMeasure": + raise ValueError("must be PowerBIMeasure") + return v + + def __setattr__(self, name, value): + if name in PowerBIMeasure._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + WORKSPACE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "workspaceQualifiedName", "workspaceQualifiedName" + ) + """ + Unique name of the workspace in which this measure exists. + """ + DATASET_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "datasetQualifiedName", "datasetQualifiedName" + ) + """ + Unique name of the dataset in which this measure exists. + """ + POWER_BI_MEASURE_EXPRESSION: ClassVar[TextField] = TextField( + "powerBIMeasureExpression", "powerBIMeasureExpression" + ) + """ + DAX expression for this measure. + """ + POWER_BI_IS_EXTERNAL_MEASURE: ClassVar[BooleanField] = BooleanField( + "powerBIIsExternalMeasure", "powerBIIsExternalMeasure" + ) + """ + Whether this measure is external (true) or internal (false). + """ + + TABLE: ClassVar[RelationField] = RelationField("table") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "workspace_qualified_name", + "dataset_qualified_name", + "power_b_i_measure_expression", + "power_b_i_is_external_measure", + "table", + ] @property - def qlik_is_direct_query_mode(self) -> Optional[bool]: + def workspace_qualified_name(self) -> Optional[str]: return ( None if self.attributes is None - else self.attributes.qlik_is_direct_query_mode + else self.attributes.workspace_qualified_name + ) + + @workspace_qualified_name.setter + def workspace_qualified_name(self, workspace_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.workspace_qualified_name = workspace_qualified_name + + @property + def dataset_qualified_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.dataset_qualified_name ) - @qlik_is_direct_query_mode.setter - def qlik_is_direct_query_mode(self, qlik_is_direct_query_mode: Optional[bool]): + @dataset_qualified_name.setter + def dataset_qualified_name(self, dataset_qualified_name: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.qlik_is_direct_query_mode = qlik_is_direct_query_mode + self.attributes.dataset_qualified_name = dataset_qualified_name @property - def qlik_app_static_byte_size(self) -> Optional[int]: + def power_b_i_measure_expression(self) -> Optional[str]: return ( None if self.attributes is None - else self.attributes.qlik_app_static_byte_size + else self.attributes.power_b_i_measure_expression ) - @qlik_app_static_byte_size.setter - def qlik_app_static_byte_size(self, qlik_app_static_byte_size: Optional[int]): + @power_b_i_measure_expression.setter + def power_b_i_measure_expression(self, power_b_i_measure_expression: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.qlik_app_static_byte_size = qlik_app_static_byte_size + self.attributes.power_b_i_measure_expression = power_b_i_measure_expression @property - def qlik_space(self) -> Optional[QlikSpace]: - return None if self.attributes is None else self.attributes.qlik_space + def power_b_i_is_external_measure(self) -> Optional[bool]: + return ( + None + if self.attributes is None + else self.attributes.power_b_i_is_external_measure + ) - @qlik_space.setter - def qlik_space(self, qlik_space: Optional[QlikSpace]): + @power_b_i_is_external_measure.setter + def power_b_i_is_external_measure( + self, power_b_i_is_external_measure: Optional[bool] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.qlik_space = qlik_space + self.attributes.power_b_i_is_external_measure = power_b_i_is_external_measure @property - def qlik_sheets(self) -> Optional[list[QlikSheet]]: - return None if self.attributes is None else self.attributes.qlik_sheets + def table(self) -> Optional[PowerBITable]: + return None if self.attributes is None else self.attributes.table - @qlik_sheets.setter - def qlik_sheets(self, qlik_sheets: Optional[list[QlikSheet]]): + @table.setter + def table(self, table: Optional[PowerBITable]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.qlik_sheets = qlik_sheets + self.attributes.table = table - class Attributes(Qlik.Attributes): - qlik_has_section_access: Optional[bool] = Field( - None, description="", alias="qlikHasSectionAccess" + class Attributes(PowerBI.Attributes): + workspace_qualified_name: Optional[str] = Field( + None, description="", alias="workspaceQualifiedName" ) - qlik_origin_app_id: Optional[str] = Field( - None, description="", alias="qlikOriginAppId" + dataset_qualified_name: Optional[str] = Field( + None, description="", alias="datasetQualifiedName" ) - qlik_is_encrypted: Optional[bool] = Field( - None, description="", alias="qlikIsEncrypted" + power_b_i_measure_expression: Optional[str] = Field( + None, description="", alias="powerBIMeasureExpression" ) - qlik_is_direct_query_mode: Optional[bool] = Field( - None, description="", alias="qlikIsDirectQueryMode" + power_b_i_is_external_measure: Optional[bool] = Field( + None, description="", alias="powerBIIsExternalMeasure" ) - qlik_app_static_byte_size: Optional[int] = Field( - None, description="", alias="qlikAppStaticByteSize" - ) - qlik_space: Optional[QlikSpace] = Field( - None, description="", alias="qlikSpace" - ) # relationship - qlik_sheets: Optional[list[QlikSheet]] = Field( - None, description="", alias="qlikSheets" + table: Optional[PowerBITable] = Field( + None, description="", alias="table" ) # relationship - attributes: "QlikApp.Attributes" = Field( - default_factory=lambda: QlikApp.Attributes(), + attributes: "PowerBIMeasure.Attributes" = Field( + default_factory=lambda: PowerBIMeasure.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -class QlikChart(Qlik): +class PowerBIColumn(PowerBI): """Description""" - type_name: str = Field("QlikChart", allow_mutation=False) + type_name: str = Field("PowerBIColumn", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "QlikChart": - raise ValueError("must be QlikChart") + if v != "PowerBIColumn": + raise ValueError("must be PowerBIColumn") return v def __setattr__(self, name, value): - if name in QlikChart._convenience_properties: + if name in PowerBIColumn._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - QLIK_CHART_SUBTITLE: ClassVar[TextField] = TextField( - "qlikChartSubtitle", "qlikChartSubtitle" + WORKSPACE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "workspaceQualifiedName", "workspaceQualifiedName" + ) + """ + Unique name of the workspace in which this column exists. + """ + DATASET_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "datasetQualifiedName", "datasetQualifiedName" + ) + """ + Unique name of the dataset in which this column exists. + """ + POWER_BI_COLUMN_DATA_CATEGORY: ClassVar[KeywordField] = KeywordField( + "powerBIColumnDataCategory", "powerBIColumnDataCategory" ) """ - Subtitle of a qlik chart + Data category that describes the data in this column. """ - QLIK_CHART_FOOTNOTE: ClassVar[TextField] = TextField( - "qlikChartFootnote", "qlikChartFootnote" + POWER_BI_COLUMN_DATA_TYPE: ClassVar[KeywordField] = KeywordField( + "powerBIColumnDataType", "powerBIColumnDataType" ) """ - Footnote of a qlik chart + Data type of this column. """ - QLIK_CHART_ORIENTATION: ClassVar[KeywordField] = KeywordField( - "qlikChartOrientation", "qlikChartOrientation" + POWER_BI_SORT_BY_COLUMN: ClassVar[KeywordField] = KeywordField( + "powerBISortByColumn", "powerBISortByColumn" ) """ - Orientation of a qlik chart + Name of a column in the same table to use to order this column. """ - QLIK_CHART_TYPE: ClassVar[KeywordField] = KeywordField( - "qlikChartType", "qlikChartType" + POWER_BI_COLUMN_SUMMARIZE_BY: ClassVar[KeywordField] = KeywordField( + "powerBIColumnSummarizeBy", "powerBIColumnSummarizeBy" ) """ - Subtype of an qlik chart. E.g. bar, graph, pie etc + Aggregate function to use for summarizing this column. """ - QLIK_SHEET: ClassVar[RelationField] = RelationField("qlikSheet") + TABLE: ClassVar[RelationField] = RelationField("table") """ TBC """ _convenience_properties: ClassVar[list[str]] = [ - "qlik_chart_subtitle", - "qlik_chart_footnote", - "qlik_chart_orientation", - "qlik_chart_type", - "qlik_sheet", + "workspace_qualified_name", + "dataset_qualified_name", + "power_b_i_column_data_category", + "power_b_i_column_data_type", + "power_b_i_sort_by_column", + "power_b_i_column_summarize_by", + "table", ] @property - def qlik_chart_subtitle(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.qlik_chart_subtitle + def workspace_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.workspace_qualified_name + ) + + @workspace_qualified_name.setter + def workspace_qualified_name(self, workspace_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.workspace_qualified_name = workspace_qualified_name + + @property + def dataset_qualified_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.dataset_qualified_name + ) + + @dataset_qualified_name.setter + def dataset_qualified_name(self, dataset_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dataset_qualified_name = dataset_qualified_name + + @property + def power_b_i_column_data_category(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.power_b_i_column_data_category + ) - @qlik_chart_subtitle.setter - def qlik_chart_subtitle(self, qlik_chart_subtitle: Optional[str]): + @power_b_i_column_data_category.setter + def power_b_i_column_data_category( + self, power_b_i_column_data_category: Optional[str] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.qlik_chart_subtitle = qlik_chart_subtitle + self.attributes.power_b_i_column_data_category = power_b_i_column_data_category @property - def qlik_chart_footnote(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.qlik_chart_footnote + def power_b_i_column_data_type(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.power_b_i_column_data_type + ) - @qlik_chart_footnote.setter - def qlik_chart_footnote(self, qlik_chart_footnote: Optional[str]): + @power_b_i_column_data_type.setter + def power_b_i_column_data_type(self, power_b_i_column_data_type: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.qlik_chart_footnote = qlik_chart_footnote + self.attributes.power_b_i_column_data_type = power_b_i_column_data_type @property - def qlik_chart_orientation(self) -> Optional[str]: + def power_b_i_sort_by_column(self) -> Optional[str]: return ( - None if self.attributes is None else self.attributes.qlik_chart_orientation + None + if self.attributes is None + else self.attributes.power_b_i_sort_by_column ) - @qlik_chart_orientation.setter - def qlik_chart_orientation(self, qlik_chart_orientation: Optional[str]): + @power_b_i_sort_by_column.setter + def power_b_i_sort_by_column(self, power_b_i_sort_by_column: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.qlik_chart_orientation = qlik_chart_orientation + self.attributes.power_b_i_sort_by_column = power_b_i_sort_by_column @property - def qlik_chart_type(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.qlik_chart_type + def power_b_i_column_summarize_by(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.power_b_i_column_summarize_by + ) - @qlik_chart_type.setter - def qlik_chart_type(self, qlik_chart_type: Optional[str]): + @power_b_i_column_summarize_by.setter + def power_b_i_column_summarize_by( + self, power_b_i_column_summarize_by: Optional[str] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.qlik_chart_type = qlik_chart_type + self.attributes.power_b_i_column_summarize_by = power_b_i_column_summarize_by @property - def qlik_sheet(self) -> Optional[QlikSheet]: - return None if self.attributes is None else self.attributes.qlik_sheet + def table(self) -> Optional[PowerBITable]: + return None if self.attributes is None else self.attributes.table - @qlik_sheet.setter - def qlik_sheet(self, qlik_sheet: Optional[QlikSheet]): + @table.setter + def table(self, table: Optional[PowerBITable]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.qlik_sheet = qlik_sheet + self.attributes.table = table - class Attributes(Qlik.Attributes): - qlik_chart_subtitle: Optional[str] = Field( - None, description="", alias="qlikChartSubtitle" + class Attributes(PowerBI.Attributes): + workspace_qualified_name: Optional[str] = Field( + None, description="", alias="workspaceQualifiedName" + ) + dataset_qualified_name: Optional[str] = Field( + None, description="", alias="datasetQualifiedName" + ) + power_b_i_column_data_category: Optional[str] = Field( + None, description="", alias="powerBIColumnDataCategory" ) - qlik_chart_footnote: Optional[str] = Field( - None, description="", alias="qlikChartFootnote" + power_b_i_column_data_type: Optional[str] = Field( + None, description="", alias="powerBIColumnDataType" ) - qlik_chart_orientation: Optional[str] = Field( - None, description="", alias="qlikChartOrientation" + power_b_i_sort_by_column: Optional[str] = Field( + None, description="", alias="powerBISortByColumn" ) - qlik_chart_type: Optional[str] = Field( - None, description="", alias="qlikChartType" + power_b_i_column_summarize_by: Optional[str] = Field( + None, description="", alias="powerBIColumnSummarizeBy" ) - qlik_sheet: Optional[QlikSheet] = Field( - None, description="", alias="qlikSheet" + table: Optional[PowerBITable] = Field( + None, description="", alias="table" ) # relationship - attributes: "QlikChart.Attributes" = Field( - default_factory=lambda: QlikChart.Attributes(), + attributes: "PowerBIColumn.Attributes" = Field( + default_factory=lambda: PowerBIColumn.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -class QlikDataset(Qlik): +class PowerBITable(PowerBI): """Description""" - type_name: str = Field("QlikDataset", allow_mutation=False) + type_name: str = Field("PowerBITable", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "QlikDataset": - raise ValueError("must be QlikDataset") + if v != "PowerBITable": + raise ValueError("must be PowerBITable") return v def __setattr__(self, name, value): - if name in QlikDataset._convenience_properties: + if name in PowerBITable._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - QLIK_DATASET_TECHNICAL_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "qlikDatasetTechnicalName", - "qlikDatasetTechnicalName.keyword", - "qlikDatasetTechnicalName", + WORKSPACE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "workspaceQualifiedName", "workspaceQualifiedName" ) """ - Technical name of a qlik data asset + Unique name of the workspace in which this table exists. """ - QLIK_DATASET_TYPE: ClassVar[KeywordField] = KeywordField( - "qlikDatasetType", "qlikDatasetType" + DATASET_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "datasetQualifiedName", "datasetQualifiedName" ) """ - Type of an qlik data asset. E.g. qix-df, snowflake etc + Unique name of the dataset in which this table exists. """ - QLIK_DATASET_URI: ClassVar[KeywordTextField] = KeywordTextField( - "qlikDatasetUri", "qlikDatasetUri", "qlikDatasetUri.text" + POWER_BI_TABLE_SOURCE_EXPRESSIONS: ClassVar[KeywordField] = KeywordField( + "powerBITableSourceExpressions", "powerBITableSourceExpressions" ) """ - URI of a qlik dataset + Power Query M expressions for the table. """ - QLIK_DATASET_SUBTYPE: ClassVar[KeywordField] = KeywordField( - "qlikDatasetSubtype", "qlikDatasetSubtype" + POWER_BI_TABLE_COLUMN_COUNT: ClassVar[NumericField] = NumericField( + "powerBITableColumnCount", "powerBITableColumnCount" ) """ - Subtype of an qlik dataset asset + Number of columns in this table. + """ + POWER_BI_TABLE_MEASURE_COUNT: ClassVar[NumericField] = NumericField( + "powerBITableMeasureCount", "powerBITableMeasureCount" + ) + """ + Number of measures in this table. """ - QLIK_SPACE: ClassVar[RelationField] = RelationField("qlikSpace") + COLUMNS: ClassVar[RelationField] = RelationField("columns") + """ + TBC + """ + MEASURES: ClassVar[RelationField] = RelationField("measures") + """ + TBC + """ + DATASET: ClassVar[RelationField] = RelationField("dataset") """ TBC """ _convenience_properties: ClassVar[list[str]] = [ - "qlik_dataset_technical_name", - "qlik_dataset_type", - "qlik_dataset_uri", - "qlik_dataset_subtype", - "qlik_space", + "workspace_qualified_name", + "dataset_qualified_name", + "power_b_i_table_source_expressions", + "power_b_i_table_column_count", + "power_b_i_table_measure_count", + "columns", + "measures", + "dataset", ] @property - def qlik_dataset_technical_name(self) -> Optional[str]: + def workspace_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.workspace_qualified_name + ) + + @workspace_qualified_name.setter + def workspace_qualified_name(self, workspace_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.workspace_qualified_name = workspace_qualified_name + + @property + def dataset_qualified_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.dataset_qualified_name + ) + + @dataset_qualified_name.setter + def dataset_qualified_name(self, dataset_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dataset_qualified_name = dataset_qualified_name + + @property + def power_b_i_table_source_expressions(self) -> Optional[set[str]]: + return ( + None + if self.attributes is None + else self.attributes.power_b_i_table_source_expressions + ) + + @power_b_i_table_source_expressions.setter + def power_b_i_table_source_expressions( + self, power_b_i_table_source_expressions: Optional[set[str]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.power_b_i_table_source_expressions = ( + power_b_i_table_source_expressions + ) + + @property + def power_b_i_table_column_count(self) -> Optional[int]: return ( None if self.attributes is None - else self.attributes.qlik_dataset_technical_name + else self.attributes.power_b_i_table_column_count ) - @qlik_dataset_technical_name.setter - def qlik_dataset_technical_name(self, qlik_dataset_technical_name: Optional[str]): + @power_b_i_table_column_count.setter + def power_b_i_table_column_count(self, power_b_i_table_column_count: Optional[int]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.qlik_dataset_technical_name = qlik_dataset_technical_name + self.attributes.power_b_i_table_column_count = power_b_i_table_column_count @property - def qlik_dataset_type(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.qlik_dataset_type + def power_b_i_table_measure_count(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.power_b_i_table_measure_count + ) - @qlik_dataset_type.setter - def qlik_dataset_type(self, qlik_dataset_type: Optional[str]): + @power_b_i_table_measure_count.setter + def power_b_i_table_measure_count( + self, power_b_i_table_measure_count: Optional[int] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.qlik_dataset_type = qlik_dataset_type + self.attributes.power_b_i_table_measure_count = power_b_i_table_measure_count @property - def qlik_dataset_uri(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.qlik_dataset_uri + def columns(self) -> Optional[list[PowerBIColumn]]: + return None if self.attributes is None else self.attributes.columns - @qlik_dataset_uri.setter - def qlik_dataset_uri(self, qlik_dataset_uri: Optional[str]): + @columns.setter + def columns(self, columns: Optional[list[PowerBIColumn]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.qlik_dataset_uri = qlik_dataset_uri + self.attributes.columns = columns @property - def qlik_dataset_subtype(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.qlik_dataset_subtype + def measures(self) -> Optional[list[PowerBIMeasure]]: + return None if self.attributes is None else self.attributes.measures - @qlik_dataset_subtype.setter - def qlik_dataset_subtype(self, qlik_dataset_subtype: Optional[str]): + @measures.setter + def measures(self, measures: Optional[list[PowerBIMeasure]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.qlik_dataset_subtype = qlik_dataset_subtype + self.attributes.measures = measures @property - def qlik_space(self) -> Optional[QlikSpace]: - return None if self.attributes is None else self.attributes.qlik_space + def dataset(self) -> Optional[PowerBIDataset]: + return None if self.attributes is None else self.attributes.dataset - @qlik_space.setter - def qlik_space(self, qlik_space: Optional[QlikSpace]): + @dataset.setter + def dataset(self, dataset: Optional[PowerBIDataset]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.qlik_space = qlik_space + self.attributes.dataset = dataset - class Attributes(Qlik.Attributes): - qlik_dataset_technical_name: Optional[str] = Field( - None, description="", alias="qlikDatasetTechnicalName" + class Attributes(PowerBI.Attributes): + workspace_qualified_name: Optional[str] = Field( + None, description="", alias="workspaceQualifiedName" + ) + dataset_qualified_name: Optional[str] = Field( + None, description="", alias="datasetQualifiedName" ) - qlik_dataset_type: Optional[str] = Field( - None, description="", alias="qlikDatasetType" + power_b_i_table_source_expressions: Optional[set[str]] = Field( + None, description="", alias="powerBITableSourceExpressions" ) - qlik_dataset_uri: Optional[str] = Field( - None, description="", alias="qlikDatasetUri" + power_b_i_table_column_count: Optional[int] = Field( + None, description="", alias="powerBITableColumnCount" ) - qlik_dataset_subtype: Optional[str] = Field( - None, description="", alias="qlikDatasetSubtype" + power_b_i_table_measure_count: Optional[int] = Field( + None, description="", alias="powerBITableMeasureCount" ) - qlik_space: Optional[QlikSpace] = Field( - None, description="", alias="qlikSpace" + columns: Optional[list[PowerBIColumn]] = Field( + None, description="", alias="columns" + ) # relationship + measures: Optional[list[PowerBIMeasure]] = Field( + None, description="", alias="measures" + ) # relationship + dataset: Optional[PowerBIDataset] = Field( + None, description="", alias="dataset" ) # relationship - attributes: "QlikDataset.Attributes" = Field( - default_factory=lambda: QlikDataset.Attributes(), + attributes: "PowerBITable.Attributes" = Field( + default_factory=lambda: PowerBITable.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -class QlikSheet(Qlik): +class PowerBITile(PowerBI): """Description""" - type_name: str = Field("QlikSheet", allow_mutation=False) + type_name: str = Field("PowerBITile", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "QlikSheet": - raise ValueError("must be QlikSheet") + if v != "PowerBITile": + raise ValueError("must be PowerBITile") return v def __setattr__(self, name, value): - if name in QlikSheet._convenience_properties: + if name in PowerBITile._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - QLIK_SHEET_IS_APPROVED: ClassVar[BooleanField] = BooleanField( - "qlikSheetIsApproved", "qlikSheetIsApproved" + WORKSPACE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "workspaceQualifiedName", "workspaceQualifiedName" + ) + """ + Unique name of the workspace in which this tile exists. + """ + DASHBOARD_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "dashboardQualifiedName", "dashboardQualifiedName" ) """ - Whether a qlik sheet is approved + Unique name of the dashboard in which this tile is pinned. """ - QLIK_APP: ClassVar[RelationField] = RelationField("qlikApp") + REPORT: ClassVar[RelationField] = RelationField("report") + """ + TBC + """ + DATASET: ClassVar[RelationField] = RelationField("dataset") """ TBC """ - QLIK_CHARTS: ClassVar[RelationField] = RelationField("qlikCharts") + DASHBOARD: ClassVar[RelationField] = RelationField("dashboard") """ TBC """ _convenience_properties: ClassVar[list[str]] = [ - "qlik_sheet_is_approved", - "qlik_app", - "qlik_charts", + "workspace_qualified_name", + "dashboard_qualified_name", + "report", + "dataset", + "dashboard", ] @property - def qlik_sheet_is_approved(self) -> Optional[bool]: + def workspace_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.workspace_qualified_name + ) + + @workspace_qualified_name.setter + def workspace_qualified_name(self, workspace_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.workspace_qualified_name = workspace_qualified_name + + @property + def dashboard_qualified_name(self) -> Optional[str]: return ( - None if self.attributes is None else self.attributes.qlik_sheet_is_approved + None + if self.attributes is None + else self.attributes.dashboard_qualified_name ) - @qlik_sheet_is_approved.setter - def qlik_sheet_is_approved(self, qlik_sheet_is_approved: Optional[bool]): + @dashboard_qualified_name.setter + def dashboard_qualified_name(self, dashboard_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dashboard_qualified_name = dashboard_qualified_name + + @property + def report(self) -> Optional[PowerBIReport]: + return None if self.attributes is None else self.attributes.report + + @report.setter + def report(self, report: Optional[PowerBIReport]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.qlik_sheet_is_approved = qlik_sheet_is_approved + self.attributes.report = report @property - def qlik_app(self) -> Optional[QlikApp]: - return None if self.attributes is None else self.attributes.qlik_app + def dataset(self) -> Optional[PowerBIDataset]: + return None if self.attributes is None else self.attributes.dataset - @qlik_app.setter - def qlik_app(self, qlik_app: Optional[QlikApp]): + @dataset.setter + def dataset(self, dataset: Optional[PowerBIDataset]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.qlik_app = qlik_app + self.attributes.dataset = dataset @property - def qlik_charts(self) -> Optional[list[QlikChart]]: - return None if self.attributes is None else self.attributes.qlik_charts + def dashboard(self) -> Optional[PowerBIDashboard]: + return None if self.attributes is None else self.attributes.dashboard - @qlik_charts.setter - def qlik_charts(self, qlik_charts: Optional[list[QlikChart]]): + @dashboard.setter + def dashboard(self, dashboard: Optional[PowerBIDashboard]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.qlik_charts = qlik_charts + self.attributes.dashboard = dashboard - class Attributes(Qlik.Attributes): - qlik_sheet_is_approved: Optional[bool] = Field( - None, description="", alias="qlikSheetIsApproved" + class Attributes(PowerBI.Attributes): + workspace_qualified_name: Optional[str] = Field( + None, description="", alias="workspaceQualifiedName" ) - qlik_app: Optional[QlikApp] = Field( - None, description="", alias="qlikApp" + dashboard_qualified_name: Optional[str] = Field( + None, description="", alias="dashboardQualifiedName" + ) + report: Optional[PowerBIReport] = Field( + None, description="", alias="report" + ) # relationship + dataset: Optional[PowerBIDataset] = Field( + None, description="", alias="dataset" ) # relationship - qlik_charts: Optional[list[QlikChart]] = Field( - None, description="", alias="qlikCharts" + dashboard: Optional[PowerBIDashboard] = Field( + None, description="", alias="dashboard" + ) # relationship + + attributes: "PowerBITile.Attributes" = Field( + default_factory=lambda: PowerBITile.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +class PowerBIDatasource(PowerBI): + """Description""" + + type_name: str = Field("PowerBIDatasource", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "PowerBIDatasource": + raise ValueError("must be PowerBIDatasource") + return v + + def __setattr__(self, name, value): + if name in PowerBIDatasource._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + CONNECTION_DETAILS: ClassVar[KeywordField] = KeywordField( + "connectionDetails", "connectionDetails" + ) + """ + Connection details of the datasource. + """ + + DATASETS: ClassVar[RelationField] = RelationField("datasets") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "connection_details", + "datasets", + ] + + @property + def connection_details(self) -> Optional[dict[str, str]]: + return None if self.attributes is None else self.attributes.connection_details + + @connection_details.setter + def connection_details(self, connection_details: Optional[dict[str, str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.connection_details = connection_details + + @property + def datasets(self) -> Optional[list[PowerBIDataset]]: + return None if self.attributes is None else self.attributes.datasets + + @datasets.setter + def datasets(self, datasets: Optional[list[PowerBIDataset]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.datasets = datasets + + class Attributes(PowerBI.Attributes): + connection_details: Optional[dict[str, str]] = Field( + None, description="", alias="connectionDetails" + ) + datasets: Optional[list[PowerBIDataset]] = Field( + None, description="", alias="datasets" ) # relationship - attributes: "QlikSheet.Attributes" = Field( - default_factory=lambda: QlikSheet.Attributes(), + attributes: "PowerBIDatasource.Attributes" = Field( + default_factory=lambda: PowerBIDatasource.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -class QlikSpace(Qlik): +class PowerBIWorkspace(PowerBI): """Description""" - type_name: str = Field("QlikSpace", allow_mutation=False) + type_name: str = Field("PowerBIWorkspace", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "QlikSpace": - raise ValueError("must be QlikSpace") + if v != "PowerBIWorkspace": + raise ValueError("must be PowerBIWorkspace") return v def __setattr__(self, name, value): - if name in QlikSpace._convenience_properties: + if name in PowerBIWorkspace._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - QLIK_SPACE_TYPE: ClassVar[KeywordField] = KeywordField( - "qlikSpaceType", "qlikSpaceType" + WEB_URL: ClassVar[KeywordField] = KeywordField("webUrl", "webUrl") + """ + Deprecated. + """ + REPORT_COUNT: ClassVar[NumericField] = NumericField("reportCount", "reportCount") + """ + Number of reports in this workspace. + """ + DASHBOARD_COUNT: ClassVar[NumericField] = NumericField( + "dashboardCount", "dashboardCount" + ) + """ + Number of dashboards in this workspace. + """ + DATASET_COUNT: ClassVar[NumericField] = NumericField("datasetCount", "datasetCount") + """ + Number of datasets in this workspace. + """ + DATAFLOW_COUNT: ClassVar[NumericField] = NumericField( + "dataflowCount", "dataflowCount" ) """ - Type of a qlik space. E.g. Private, Shared etc + Number of dataflows in this workspace. """ - QLIK_DATASETS: ClassVar[RelationField] = RelationField("qlikDatasets") + REPORTS: ClassVar[RelationField] = RelationField("reports") + """ + TBC + """ + DATASETS: ClassVar[RelationField] = RelationField("datasets") + """ + TBC + """ + DASHBOARDS: ClassVar[RelationField] = RelationField("dashboards") """ TBC """ - QLIK_APPS: ClassVar[RelationField] = RelationField("qlikApps") + DATAFLOWS: ClassVar[RelationField] = RelationField("dataflows") """ TBC """ _convenience_properties: ClassVar[list[str]] = [ - "qlik_space_type", - "qlik_datasets", - "qlik_apps", + "web_url", + "report_count", + "dashboard_count", + "dataset_count", + "dataflow_count", + "reports", + "datasets", + "dashboards", + "dataflows", ] @property - def qlik_space_type(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.qlik_space_type + def web_url(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.web_url - @qlik_space_type.setter - def qlik_space_type(self, qlik_space_type: Optional[str]): + @web_url.setter + def web_url(self, web_url: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.qlik_space_type = qlik_space_type + self.attributes.web_url = web_url @property - def qlik_datasets(self) -> Optional[list[QlikDataset]]: - return None if self.attributes is None else self.attributes.qlik_datasets + def report_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.report_count - @qlik_datasets.setter - def qlik_datasets(self, qlik_datasets: Optional[list[QlikDataset]]): + @report_count.setter + def report_count(self, report_count: Optional[int]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.qlik_datasets = qlik_datasets + self.attributes.report_count = report_count @property - def qlik_apps(self) -> Optional[list[QlikApp]]: - return None if self.attributes is None else self.attributes.qlik_apps + def dashboard_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.dashboard_count - @qlik_apps.setter - def qlik_apps(self, qlik_apps: Optional[list[QlikApp]]): + @dashboard_count.setter + def dashboard_count(self, dashboard_count: Optional[int]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.qlik_apps = qlik_apps + self.attributes.dashboard_count = dashboard_count - class Attributes(Qlik.Attributes): - qlik_space_type: Optional[str] = Field( - None, description="", alias="qlikSpaceType" - ) - qlik_datasets: Optional[list[QlikDataset]] = Field( - None, description="", alias="qlikDatasets" - ) # relationship - qlik_apps: Optional[list[QlikApp]] = Field( - None, description="", alias="qlikApps" - ) # relationship + @property + def dataset_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.dataset_count + + @dataset_count.setter + def dataset_count(self, dataset_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dataset_count = dataset_count + + @property + def dataflow_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.dataflow_count + + @dataflow_count.setter + def dataflow_count(self, dataflow_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dataflow_count = dataflow_count + + @property + def reports(self) -> Optional[list[PowerBIReport]]: + return None if self.attributes is None else self.attributes.reports + + @reports.setter + def reports(self, reports: Optional[list[PowerBIReport]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.reports = reports + + @property + def datasets(self) -> Optional[list[PowerBIDataset]]: + return None if self.attributes is None else self.attributes.datasets + + @datasets.setter + def datasets(self, datasets: Optional[list[PowerBIDataset]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.datasets = datasets + + @property + def dashboards(self) -> Optional[list[PowerBIDashboard]]: + return None if self.attributes is None else self.attributes.dashboards + + @dashboards.setter + def dashboards(self, dashboards: Optional[list[PowerBIDashboard]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dashboards = dashboards + + @property + def dataflows(self) -> Optional[list[PowerBIDataflow]]: + return None if self.attributes is None else self.attributes.dataflows + + @dataflows.setter + def dataflows(self, dataflows: Optional[list[PowerBIDataflow]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dataflows = dataflows + + class Attributes(PowerBI.Attributes): + web_url: Optional[str] = Field(None, description="", alias="webUrl") + report_count: Optional[int] = Field(None, description="", alias="reportCount") + dashboard_count: Optional[int] = Field( + None, description="", alias="dashboardCount" + ) + dataset_count: Optional[int] = Field(None, description="", alias="datasetCount") + dataflow_count: Optional[int] = Field( + None, description="", alias="dataflowCount" + ) + reports: Optional[list[PowerBIReport]] = Field( + None, description="", alias="reports" + ) # relationship + datasets: Optional[list[PowerBIDataset]] = Field( + None, description="", alias="datasets" + ) # relationship + dashboards: Optional[list[PowerBIDashboard]] = Field( + None, description="", alias="dashboards" + ) # relationship + dataflows: Optional[list[PowerBIDataflow]] = Field( + None, description="", alias="dataflows" + ) # relationship + + attributes: "PowerBIWorkspace.Attributes" = Field( + default_factory=lambda: PowerBIWorkspace.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +class PowerBIDataset(PowerBI): + """Description""" + + type_name: str = Field("PowerBIDataset", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "PowerBIDataset": + raise ValueError("must be PowerBIDataset") + return v + + def __setattr__(self, name, value): + if name in PowerBIDataset._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + WORKSPACE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "workspaceQualifiedName", "workspaceQualifiedName" + ) + """ + Unique name of the workspace in which this dataset exists. + """ + WEB_URL: ClassVar[KeywordField] = KeywordField("webUrl", "webUrl") + """ + Deprecated. See 'sourceUrl' instead. + """ + + REPORTS: ClassVar[RelationField] = RelationField("reports") + """ + TBC + """ + WORKSPACE: ClassVar[RelationField] = RelationField("workspace") + """ + TBC + """ + DATAFLOWS: ClassVar[RelationField] = RelationField("dataflows") + """ + TBC + """ + TILES: ClassVar[RelationField] = RelationField("tiles") + """ + TBC + """ + TABLES: ClassVar[RelationField] = RelationField("tables") + """ + TBC + """ + DATASOURCES: ClassVar[RelationField] = RelationField("datasources") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "workspace_qualified_name", + "web_url", + "reports", + "workspace", + "dataflows", + "tiles", + "tables", + "datasources", + ] + + @property + def workspace_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.workspace_qualified_name + ) + + @workspace_qualified_name.setter + def workspace_qualified_name(self, workspace_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.workspace_qualified_name = workspace_qualified_name + + @property + def web_url(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.web_url + + @web_url.setter + def web_url(self, web_url: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.web_url = web_url + + @property + def reports(self) -> Optional[list[PowerBIReport]]: + return None if self.attributes is None else self.attributes.reports + + @reports.setter + def reports(self, reports: Optional[list[PowerBIReport]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.reports = reports + + @property + def workspace(self) -> Optional[PowerBIWorkspace]: + return None if self.attributes is None else self.attributes.workspace + + @workspace.setter + def workspace(self, workspace: Optional[PowerBIWorkspace]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.workspace = workspace + + @property + def dataflows(self) -> Optional[list[PowerBIDataflow]]: + return None if self.attributes is None else self.attributes.dataflows + + @dataflows.setter + def dataflows(self, dataflows: Optional[list[PowerBIDataflow]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dataflows = dataflows + + @property + def tiles(self) -> Optional[list[PowerBITile]]: + return None if self.attributes is None else self.attributes.tiles + + @tiles.setter + def tiles(self, tiles: Optional[list[PowerBITile]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.tiles = tiles + + @property + def tables(self) -> Optional[list[PowerBITable]]: + return None if self.attributes is None else self.attributes.tables + + @tables.setter + def tables(self, tables: Optional[list[PowerBITable]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.tables = tables + + @property + def datasources(self) -> Optional[list[PowerBIDatasource]]: + return None if self.attributes is None else self.attributes.datasources + + @datasources.setter + def datasources(self, datasources: Optional[list[PowerBIDatasource]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.datasources = datasources + + class Attributes(PowerBI.Attributes): + workspace_qualified_name: Optional[str] = Field( + None, description="", alias="workspaceQualifiedName" + ) + web_url: Optional[str] = Field(None, description="", alias="webUrl") + reports: Optional[list[PowerBIReport]] = Field( + None, description="", alias="reports" + ) # relationship + workspace: Optional[PowerBIWorkspace] = Field( + None, description="", alias="workspace" + ) # relationship + dataflows: Optional[list[PowerBIDataflow]] = Field( + None, description="", alias="dataflows" + ) # relationship + tiles: Optional[list[PowerBITile]] = Field( + None, description="", alias="tiles" + ) # relationship + tables: Optional[list[PowerBITable]] = Field( + None, description="", alias="tables" + ) # relationship + datasources: Optional[list[PowerBIDatasource]] = Field( + None, description="", alias="datasources" + ) # relationship + + attributes: "PowerBIDataset.Attributes" = Field( + default_factory=lambda: PowerBIDataset.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +class PowerBIDashboard(PowerBI): + """Description""" + + type_name: str = Field("PowerBIDashboard", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "PowerBIDashboard": + raise ValueError("must be PowerBIDashboard") + return v + + def __setattr__(self, name, value): + if name in PowerBIDashboard._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + WORKSPACE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "workspaceQualifiedName", "workspaceQualifiedName" + ) + """ + Unique name of the workspace in which this dashboard exists. + """ + WEB_URL: ClassVar[KeywordField] = KeywordField("webUrl", "webUrl") + """ + Deprecated. See 'sourceUrl' instead. + """ + TILE_COUNT: ClassVar[NumericField] = NumericField("tileCount", "tileCount") + """ + Number of tiles in this table. + """ + + WORKSPACE: ClassVar[RelationField] = RelationField("workspace") + """ + TBC + """ + TILES: ClassVar[RelationField] = RelationField("tiles") + """ + TBC + """ - attributes: "QlikSpace.Attributes" = Field( - default_factory=lambda: QlikSpace.Attributes(), + _convenience_properties: ClassVar[list[str]] = [ + "workspace_qualified_name", + "web_url", + "tile_count", + "workspace", + "tiles", + ] + + @property + def workspace_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.workspace_qualified_name + ) + + @workspace_qualified_name.setter + def workspace_qualified_name(self, workspace_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.workspace_qualified_name = workspace_qualified_name + + @property + def web_url(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.web_url + + @web_url.setter + def web_url(self, web_url: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.web_url = web_url + + @property + def tile_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.tile_count + + @tile_count.setter + def tile_count(self, tile_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.tile_count = tile_count + + @property + def workspace(self) -> Optional[PowerBIWorkspace]: + return None if self.attributes is None else self.attributes.workspace + + @workspace.setter + def workspace(self, workspace: Optional[PowerBIWorkspace]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.workspace = workspace + + @property + def tiles(self) -> Optional[list[PowerBITile]]: + return None if self.attributes is None else self.attributes.tiles + + @tiles.setter + def tiles(self, tiles: Optional[list[PowerBITile]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.tiles = tiles + + class Attributes(PowerBI.Attributes): + workspace_qualified_name: Optional[str] = Field( + None, description="", alias="workspaceQualifiedName" + ) + web_url: Optional[str] = Field(None, description="", alias="webUrl") + tile_count: Optional[int] = Field(None, description="", alias="tileCount") + workspace: Optional[PowerBIWorkspace] = Field( + None, description="", alias="workspace" + ) # relationship + tiles: Optional[list[PowerBITile]] = Field( + None, description="", alias="tiles" + ) # relationship + + attributes: "PowerBIDashboard.Attributes" = Field( + default_factory=lambda: PowerBIDashboard.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -QlikApp.Attributes.update_forward_refs() +class PowerBIDataflow(PowerBI): + """Description""" + + type_name: str = Field("PowerBIDataflow", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "PowerBIDataflow": + raise ValueError("must be PowerBIDataflow") + return v + + def __setattr__(self, name, value): + if name in PowerBIDataflow._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + WORKSPACE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "workspaceQualifiedName", "workspaceQualifiedName" + ) + """ + Unique name of the workspace in which this dataflow exists. + """ + WEB_URL: ClassVar[KeywordField] = KeywordField("webUrl", "webUrl") + """ + Deprecated. See 'sourceUrl' instead. + """ + + WORKSPACE: ClassVar[RelationField] = RelationField("workspace") + """ + TBC + """ + DATASETS: ClassVar[RelationField] = RelationField("datasets") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "workspace_qualified_name", + "web_url", + "workspace", + "datasets", + ] + + @property + def workspace_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.workspace_qualified_name + ) + + @workspace_qualified_name.setter + def workspace_qualified_name(self, workspace_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.workspace_qualified_name = workspace_qualified_name + + @property + def web_url(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.web_url + + @web_url.setter + def web_url(self, web_url: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.web_url = web_url + + @property + def workspace(self) -> Optional[PowerBIWorkspace]: + return None if self.attributes is None else self.attributes.workspace + + @workspace.setter + def workspace(self, workspace: Optional[PowerBIWorkspace]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.workspace = workspace + + @property + def datasets(self) -> Optional[list[PowerBIDataset]]: + return None if self.attributes is None else self.attributes.datasets + + @datasets.setter + def datasets(self, datasets: Optional[list[PowerBIDataset]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.datasets = datasets + + class Attributes(PowerBI.Attributes): + workspace_qualified_name: Optional[str] = Field( + None, description="", alias="workspaceQualifiedName" + ) + web_url: Optional[str] = Field(None, description="", alias="webUrl") + workspace: Optional[PowerBIWorkspace] = Field( + None, description="", alias="workspace" + ) # relationship + datasets: Optional[list[PowerBIDataset]] = Field( + None, description="", alias="datasets" + ) # relationship + + attributes: "PowerBIDataflow.Attributes" = Field( + default_factory=lambda: PowerBIDataflow.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +class PowerBIPage(PowerBI): + """Description""" + + type_name: str = Field("PowerBIPage", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "PowerBIPage": + raise ValueError("must be PowerBIPage") + return v + + def __setattr__(self, name, value): + if name in PowerBIPage._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + WORKSPACE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "workspaceQualifiedName", "workspaceQualifiedName" + ) + """ + Unique name of the workspace in which this page exists. + """ + REPORT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "reportQualifiedName", "reportQualifiedName" + ) + """ + Unique name of the report in which this page exists. + """ + + REPORT: ClassVar[RelationField] = RelationField("report") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "workspace_qualified_name", + "report_qualified_name", + "report", + ] + + @property + def workspace_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.workspace_qualified_name + ) + + @workspace_qualified_name.setter + def workspace_qualified_name(self, workspace_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.workspace_qualified_name = workspace_qualified_name + + @property + def report_qualified_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.report_qualified_name + ) + + @report_qualified_name.setter + def report_qualified_name(self, report_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.report_qualified_name = report_qualified_name + + @property + def report(self) -> Optional[PowerBIReport]: + return None if self.attributes is None else self.attributes.report + + @report.setter + def report(self, report: Optional[PowerBIReport]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.report = report + + class Attributes(PowerBI.Attributes): + workspace_qualified_name: Optional[str] = Field( + None, description="", alias="workspaceQualifiedName" + ) + report_qualified_name: Optional[str] = Field( + None, description="", alias="reportQualifiedName" + ) + report: Optional[PowerBIReport] = Field( + None, description="", alias="report" + ) # relationship + + attributes: "PowerBIPage.Attributes" = Field( + default_factory=lambda: PowerBIPage.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +PowerBIReport.Attributes.update_forward_refs() + + +PowerBIMeasure.Attributes.update_forward_refs() + + +PowerBIColumn.Attributes.update_forward_refs() + + +PowerBITable.Attributes.update_forward_refs() + + +PowerBITile.Attributes.update_forward_refs() + + +PowerBIDatasource.Attributes.update_forward_refs() + + +PowerBIWorkspace.Attributes.update_forward_refs() -QlikChart.Attributes.update_forward_refs() +PowerBIDataset.Attributes.update_forward_refs() -QlikDataset.Attributes.update_forward_refs() +PowerBIDashboard.Attributes.update_forward_refs() -QlikSheet.Attributes.update_forward_refs() +PowerBIDataflow.Attributes.update_forward_refs() -QlikSpace.Attributes.update_forward_refs() +PowerBIPage.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset80.py b/pyatlan/model/assets/asset80.py index c3a0e3f05..ebfbb3602 100644 --- a/pyatlan/model/assets/asset80.py +++ b/pyatlan/model/assets/asset80.py @@ -9,873 +9,1524 @@ from pydantic import Field, validator from pyatlan.model.fields.atlan_fields import ( - BooleanField, KeywordField, KeywordTextField, - NumericField, RelationField, - TextField, ) -from .asset53 import Salesforce +from .asset52 import MicroStrategy -class SalesforceObject(Salesforce): +class MicroStrategyReport(MicroStrategy): """Description""" - type_name: str = Field("SalesforceObject", allow_mutation=False) + type_name: str = Field("MicroStrategyReport", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "SalesforceObject": - raise ValueError("must be SalesforceObject") + if v != "MicroStrategyReport": + raise ValueError("must be MicroStrategyReport") return v def __setattr__(self, name, value): - if name in SalesforceObject._convenience_properties: + if name in MicroStrategyReport._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - IS_CUSTOM: ClassVar[BooleanField] = BooleanField("isCustom", "isCustom") + MICRO_STRATEGY_REPORT_TYPE: ClassVar[KeywordField] = KeywordField( + "microStrategyReportType", "microStrategyReportType" + ) """ - isCustom captures whether the object is a custom object or not + Type of report, for example: Grid or Chart. """ - IS_MERGABLE: ClassVar[BooleanField] = BooleanField("isMergable", "isMergable") + + MICRO_STRATEGY_METRICS: ClassVar[RelationField] = RelationField( + "microStrategyMetrics" + ) """ TBC """ - IS_QUERYABLE: ClassVar[BooleanField] = BooleanField("isQueryable", "isQueryable") + MICRO_STRATEGY_PROJECT: ClassVar[RelationField] = RelationField( + "microStrategyProject" + ) """ TBC """ - FIELD_COUNT: ClassVar[NumericField] = NumericField("fieldCount", "fieldCount") + MICRO_STRATEGY_ATTRIBUTES: ClassVar[RelationField] = RelationField( + "microStrategyAttributes" + ) """ - fieldCount is the number of fields in the object entity + TBC """ - LOOKUP_FIELDS: ClassVar[RelationField] = RelationField("lookupFields") + _convenience_properties: ClassVar[list[str]] = [ + "micro_strategy_report_type", + "micro_strategy_metrics", + "micro_strategy_project", + "micro_strategy_attributes", + ] + + @property + def micro_strategy_report_type(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.micro_strategy_report_type + ) + + @micro_strategy_report_type.setter + def micro_strategy_report_type(self, micro_strategy_report_type: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_report_type = micro_strategy_report_type + + @property + def micro_strategy_metrics(self) -> Optional[list[MicroStrategyMetric]]: + return ( + None if self.attributes is None else self.attributes.micro_strategy_metrics + ) + + @micro_strategy_metrics.setter + def micro_strategy_metrics( + self, micro_strategy_metrics: Optional[list[MicroStrategyMetric]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_metrics = micro_strategy_metrics + + @property + def micro_strategy_project(self) -> Optional[MicroStrategyProject]: + return ( + None if self.attributes is None else self.attributes.micro_strategy_project + ) + + @micro_strategy_project.setter + def micro_strategy_project( + self, micro_strategy_project: Optional[MicroStrategyProject] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_project = micro_strategy_project + + @property + def micro_strategy_attributes(self) -> Optional[list[MicroStrategyAttribute]]: + return ( + None + if self.attributes is None + else self.attributes.micro_strategy_attributes + ) + + @micro_strategy_attributes.setter + def micro_strategy_attributes( + self, micro_strategy_attributes: Optional[list[MicroStrategyAttribute]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_attributes = micro_strategy_attributes + + class Attributes(MicroStrategy.Attributes): + micro_strategy_report_type: Optional[str] = Field( + None, description="", alias="microStrategyReportType" + ) + micro_strategy_metrics: Optional[list[MicroStrategyMetric]] = Field( + None, description="", alias="microStrategyMetrics" + ) # relationship + micro_strategy_project: Optional[MicroStrategyProject] = Field( + None, description="", alias="microStrategyProject" + ) # relationship + micro_strategy_attributes: Optional[list[MicroStrategyAttribute]] = Field( + None, description="", alias="microStrategyAttributes" + ) # relationship + + attributes: "MicroStrategyReport.Attributes" = Field( + default_factory=lambda: MicroStrategyReport.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +class MicroStrategyProject(MicroStrategy): + """Description""" + + type_name: str = Field("MicroStrategyProject", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "MicroStrategyProject": + raise ValueError("must be MicroStrategyProject") + return v + + def __setattr__(self, name, value): + if name in MicroStrategyProject._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + MICRO_STRATEGY_REPORTS: ClassVar[RelationField] = RelationField( + "microStrategyReports" + ) + """ + TBC + """ + MICRO_STRATEGY_FACTS: ClassVar[RelationField] = RelationField("microStrategyFacts") """ TBC """ - ORGANIZATION: ClassVar[RelationField] = RelationField("organization") + MICRO_STRATEGY_METRICS: ClassVar[RelationField] = RelationField( + "microStrategyMetrics" + ) """ TBC """ - FIELDS: ClassVar[RelationField] = RelationField("fields") + MICRO_STRATEGY_VISUALIZATIONS: ClassVar[RelationField] = RelationField( + "microStrategyVisualizations" + ) + """ + TBC + """ + MICRO_STRATEGY_DOCUMENTS: ClassVar[RelationField] = RelationField( + "microStrategyDocuments" + ) + """ + TBC + """ + MICRO_STRATEGY_CUBES: ClassVar[RelationField] = RelationField("microStrategyCubes") + """ + TBC + """ + MICRO_STRATEGY_DOSSIERS: ClassVar[RelationField] = RelationField( + "microStrategyDossiers" + ) + """ + TBC + """ + MICRO_STRATEGY_ATTRIBUTES: ClassVar[RelationField] = RelationField( + "microStrategyAttributes" + ) """ TBC """ _convenience_properties: ClassVar[list[str]] = [ - "is_custom", - "is_mergable", - "is_queryable", - "field_count", - "lookup_fields", - "organization", - "fields", + "micro_strategy_reports", + "micro_strategy_facts", + "micro_strategy_metrics", + "micro_strategy_visualizations", + "micro_strategy_documents", + "micro_strategy_cubes", + "micro_strategy_dossiers", + "micro_strategy_attributes", ] @property - def is_custom(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_custom + def micro_strategy_reports(self) -> Optional[list[MicroStrategyReport]]: + return ( + None if self.attributes is None else self.attributes.micro_strategy_reports + ) - @is_custom.setter - def is_custom(self, is_custom: Optional[bool]): + @micro_strategy_reports.setter + def micro_strategy_reports( + self, micro_strategy_reports: Optional[list[MicroStrategyReport]] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.is_custom = is_custom + self.attributes.micro_strategy_reports = micro_strategy_reports @property - def is_mergable(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_mergable + def micro_strategy_facts(self) -> Optional[list[MicroStrategyFact]]: + return None if self.attributes is None else self.attributes.micro_strategy_facts - @is_mergable.setter - def is_mergable(self, is_mergable: Optional[bool]): + @micro_strategy_facts.setter + def micro_strategy_facts( + self, micro_strategy_facts: Optional[list[MicroStrategyFact]] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.is_mergable = is_mergable + self.attributes.micro_strategy_facts = micro_strategy_facts @property - def is_queryable(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_queryable + def micro_strategy_metrics(self) -> Optional[list[MicroStrategyMetric]]: + return ( + None if self.attributes is None else self.attributes.micro_strategy_metrics + ) - @is_queryable.setter - def is_queryable(self, is_queryable: Optional[bool]): + @micro_strategy_metrics.setter + def micro_strategy_metrics( + self, micro_strategy_metrics: Optional[list[MicroStrategyMetric]] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.is_queryable = is_queryable + self.attributes.micro_strategy_metrics = micro_strategy_metrics @property - def field_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.field_count + def micro_strategy_visualizations( + self, + ) -> Optional[list[MicroStrategyVisualization]]: + return ( + None + if self.attributes is None + else self.attributes.micro_strategy_visualizations + ) + + @micro_strategy_visualizations.setter + def micro_strategy_visualizations( + self, micro_strategy_visualizations: Optional[list[MicroStrategyVisualization]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_visualizations = micro_strategy_visualizations + + @property + def micro_strategy_documents(self) -> Optional[list[MicroStrategyDocument]]: + return ( + None + if self.attributes is None + else self.attributes.micro_strategy_documents + ) - @field_count.setter - def field_count(self, field_count: Optional[int]): + @micro_strategy_documents.setter + def micro_strategy_documents( + self, micro_strategy_documents: Optional[list[MicroStrategyDocument]] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.field_count = field_count + self.attributes.micro_strategy_documents = micro_strategy_documents @property - def lookup_fields(self) -> Optional[list[SalesforceField]]: - return None if self.attributes is None else self.attributes.lookup_fields + def micro_strategy_cubes(self) -> Optional[list[MicroStrategyCube]]: + return None if self.attributes is None else self.attributes.micro_strategy_cubes - @lookup_fields.setter - def lookup_fields(self, lookup_fields: Optional[list[SalesforceField]]): + @micro_strategy_cubes.setter + def micro_strategy_cubes( + self, micro_strategy_cubes: Optional[list[MicroStrategyCube]] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.lookup_fields = lookup_fields + self.attributes.micro_strategy_cubes = micro_strategy_cubes @property - def organization(self) -> Optional[SalesforceOrganization]: - return None if self.attributes is None else self.attributes.organization + def micro_strategy_dossiers(self) -> Optional[list[MicroStrategyDossier]]: + return ( + None if self.attributes is None else self.attributes.micro_strategy_dossiers + ) - @organization.setter - def organization(self, organization: Optional[SalesforceOrganization]): + @micro_strategy_dossiers.setter + def micro_strategy_dossiers( + self, micro_strategy_dossiers: Optional[list[MicroStrategyDossier]] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.organization = organization + self.attributes.micro_strategy_dossiers = micro_strategy_dossiers @property - def fields(self) -> Optional[list[SalesforceField]]: - return None if self.attributes is None else self.attributes.fields + def micro_strategy_attributes(self) -> Optional[list[MicroStrategyAttribute]]: + return ( + None + if self.attributes is None + else self.attributes.micro_strategy_attributes + ) - @fields.setter - def fields(self, fields: Optional[list[SalesforceField]]): + @micro_strategy_attributes.setter + def micro_strategy_attributes( + self, micro_strategy_attributes: Optional[list[MicroStrategyAttribute]] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.fields = fields + self.attributes.micro_strategy_attributes = micro_strategy_attributes - class Attributes(Salesforce.Attributes): - is_custom: Optional[bool] = Field(None, description="", alias="isCustom") - is_mergable: Optional[bool] = Field(None, description="", alias="isMergable") - is_queryable: Optional[bool] = Field(None, description="", alias="isQueryable") - field_count: Optional[int] = Field(None, description="", alias="fieldCount") - lookup_fields: Optional[list[SalesforceField]] = Field( - None, description="", alias="lookupFields" + class Attributes(MicroStrategy.Attributes): + micro_strategy_reports: Optional[list[MicroStrategyReport]] = Field( + None, description="", alias="microStrategyReports" + ) # relationship + micro_strategy_facts: Optional[list[MicroStrategyFact]] = Field( + None, description="", alias="microStrategyFacts" + ) # relationship + micro_strategy_metrics: Optional[list[MicroStrategyMetric]] = Field( + None, description="", alias="microStrategyMetrics" + ) # relationship + micro_strategy_visualizations: Optional[ + list[MicroStrategyVisualization] + ] = Field( + None, description="", alias="microStrategyVisualizations" ) # relationship - organization: Optional[SalesforceOrganization] = Field( - None, description="", alias="organization" + micro_strategy_documents: Optional[list[MicroStrategyDocument]] = Field( + None, description="", alias="microStrategyDocuments" ) # relationship - fields: Optional[list[SalesforceField]] = Field( - None, description="", alias="fields" + micro_strategy_cubes: Optional[list[MicroStrategyCube]] = Field( + None, description="", alias="microStrategyCubes" + ) # relationship + micro_strategy_dossiers: Optional[list[MicroStrategyDossier]] = Field( + None, description="", alias="microStrategyDossiers" + ) # relationship + micro_strategy_attributes: Optional[list[MicroStrategyAttribute]] = Field( + None, description="", alias="microStrategyAttributes" ) # relationship - attributes: "SalesforceObject.Attributes" = Field( - default_factory=lambda: SalesforceObject.Attributes(), + attributes: "MicroStrategyProject.Attributes" = Field( + default_factory=lambda: MicroStrategyProject.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -class SalesforceField(Salesforce): +class MicroStrategyMetric(MicroStrategy): """Description""" - type_name: str = Field("SalesforceField", allow_mutation=False) + type_name: str = Field("MicroStrategyMetric", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "SalesforceField": - raise ValueError("must be SalesforceField") + if v != "MicroStrategyMetric": + raise ValueError("must be MicroStrategyMetric") return v def __setattr__(self, name, value): - if name in SalesforceField._convenience_properties: + if name in MicroStrategyMetric._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - DATA_TYPE: ClassVar[KeywordTextField] = KeywordTextField( - "dataType", "dataType", "dataType.text" + MICRO_STRATEGY_METRIC_EXPRESSION: ClassVar[KeywordField] = KeywordField( + "microStrategyMetricExpression", "microStrategyMetricExpression" ) """ - data type of the field + Text specifiying this metric's expression. """ - OBJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "objectQualifiedName", "objectQualifiedName" + MICRO_STRATEGY_ATTRIBUTE_QUALIFIED_NAMES: ClassVar[ + KeywordTextField + ] = KeywordTextField( + "microStrategyAttributeQualifiedNames", + "microStrategyAttributeQualifiedNames", + "microStrategyAttributeQualifiedNames.text", ) """ - TBC - """ - ORDER: ClassVar[NumericField] = NumericField("order", "order") - """ - TBC + List of unique names of attributes related to this metric. """ - INLINE_HELP_TEXT: ClassVar[TextField] = TextField( - "inlineHelpText", "inlineHelpText.text" + MICRO_STRATEGY_ATTRIBUTE_NAMES: ClassVar[KeywordTextField] = KeywordTextField( + "microStrategyAttributeNames", + "microStrategyAttributeNames.keyword", + "microStrategyAttributeNames", ) """ - TBC + List of simple names of attributes related to this metric. """ - IS_CALCULATED: ClassVar[BooleanField] = BooleanField("isCalculated", "isCalculated") + MICRO_STRATEGY_FACT_QUALIFIED_NAMES: ClassVar[KeywordTextField] = KeywordTextField( + "microStrategyFactQualifiedNames", + "microStrategyFactQualifiedNames", + "microStrategyFactQualifiedNames.text", + ) """ - TBC + List of unique names of facts related to this metric. """ - FORMULA: ClassVar[KeywordField] = KeywordField("formula", "formula") + MICRO_STRATEGY_FACT_NAMES: ClassVar[KeywordTextField] = KeywordTextField( + "microStrategyFactNames", + "microStrategyFactNames.keyword", + "microStrategyFactNames", + ) """ - TBC + List of simple names of facts related to this metric. """ - IS_CASE_SENSITIVE: ClassVar[BooleanField] = BooleanField( - "isCaseSensitive", "isCaseSensitive" + MICRO_STRATEGY_METRIC_PARENT_QUALIFIED_NAMES: ClassVar[ + KeywordTextField + ] = KeywordTextField( + "microStrategyMetricParentQualifiedNames", + "microStrategyMetricParentQualifiedNames", + "microStrategyMetricParentQualifiedNames.text", ) """ - TBC + List of unique names of parent metrics of this metric. """ - IS_ENCRYPTED: ClassVar[BooleanField] = BooleanField("isEncrypted", "isEncrypted") + MICRO_STRATEGY_METRIC_PARENT_NAMES: ClassVar[KeywordTextField] = KeywordTextField( + "microStrategyMetricParentNames", + "microStrategyMetricParentNames.keyword", + "microStrategyMetricParentNames", + ) """ - TBC + List of simple names of parent metrics of this metric. """ - MAX_LENGTH: ClassVar[NumericField] = NumericField("maxLength", "maxLength") + + MICRO_STRATEGY_METRIC_PARENTS: ClassVar[RelationField] = RelationField( + "microStrategyMetricParents" + ) """ TBC """ - IS_NULLABLE: ClassVar[BooleanField] = BooleanField("isNullable", "isNullable") + MICRO_STRATEGY_FACTS: ClassVar[RelationField] = RelationField("microStrategyFacts") """ TBC """ - PRECISION: ClassVar[NumericField] = NumericField("precision", "precision") - """ - Total number of digits allowed - """ - NUMERIC_SCALE: ClassVar[NumericField] = NumericField("numericScale", "numericScale") + MICRO_STRATEGY_REPORTS: ClassVar[RelationField] = RelationField( + "microStrategyReports" + ) """ TBC """ - IS_UNIQUE: ClassVar[BooleanField] = BooleanField("isUnique", "isUnique") + MICRO_STRATEGY_CUBES: ClassVar[RelationField] = RelationField("microStrategyCubes") """ TBC """ - PICKLIST_VALUES: ClassVar[KeywordField] = KeywordField( - "picklistValues", "picklistValues" - ) - """ - picklistValues is a list of values from which a user can pick from while adding a record - """ - IS_POLYMORPHIC_FOREIGN_KEY: ClassVar[BooleanField] = BooleanField( - "isPolymorphicForeignKey", "isPolymorphicForeignKey" - ) - """ - isPolymorphicForeignKey captures whether the field references to record of multiple objects - """ - DEFAULT_VALUE_FORMULA: ClassVar[KeywordField] = KeywordField( - "defaultValueFormula", "defaultValueFormula" + MICRO_STRATEGY_METRIC_CHILDREN: ClassVar[RelationField] = RelationField( + "microStrategyMetricChildren" ) """ TBC """ - - LOOKUP_OBJECTS: ClassVar[RelationField] = RelationField("lookupObjects") + MICRO_STRATEGY_PROJECT: ClassVar[RelationField] = RelationField( + "microStrategyProject" + ) """ TBC """ - OBJECT: ClassVar[RelationField] = RelationField("object") + MICRO_STRATEGY_ATTRIBUTES: ClassVar[RelationField] = RelationField( + "microStrategyAttributes" + ) """ TBC """ _convenience_properties: ClassVar[list[str]] = [ - "data_type", - "object_qualified_name", - "order", - "inline_help_text", - "is_calculated", - "formula", - "is_case_sensitive", - "is_encrypted", - "max_length", - "is_nullable", - "precision", - "numeric_scale", - "is_unique", - "picklist_values", - "is_polymorphic_foreign_key", - "default_value_formula", - "lookup_objects", - "object", + "micro_strategy_metric_expression", + "micro_strategy_attribute_qualified_names", + "micro_strategy_attribute_names", + "micro_strategy_fact_qualified_names", + "micro_strategy_fact_names", + "micro_strategy_metric_parent_qualified_names", + "micro_strategy_metric_parent_names", + "micro_strategy_metric_parents", + "micro_strategy_facts", + "micro_strategy_reports", + "micro_strategy_cubes", + "micro_strategy_metric_children", + "micro_strategy_project", + "micro_strategy_attributes", ] @property - def data_type(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.data_type + def micro_strategy_metric_expression(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.micro_strategy_metric_expression + ) - @data_type.setter - def data_type(self, data_type: Optional[str]): + @micro_strategy_metric_expression.setter + def micro_strategy_metric_expression( + self, micro_strategy_metric_expression: Optional[str] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.data_type = data_type + self.attributes.micro_strategy_metric_expression = ( + micro_strategy_metric_expression + ) @property - def object_qualified_name(self) -> Optional[str]: + def micro_strategy_attribute_qualified_names(self) -> Optional[set[str]]: return ( - None if self.attributes is None else self.attributes.object_qualified_name + None + if self.attributes is None + else self.attributes.micro_strategy_attribute_qualified_names ) - @object_qualified_name.setter - def object_qualified_name(self, object_qualified_name: Optional[str]): + @micro_strategy_attribute_qualified_names.setter + def micro_strategy_attribute_qualified_names( + self, micro_strategy_attribute_qualified_names: Optional[set[str]] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.object_qualified_name = object_qualified_name + self.attributes.micro_strategy_attribute_qualified_names = ( + micro_strategy_attribute_qualified_names + ) @property - def order(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.order + def micro_strategy_attribute_names(self) -> Optional[set[str]]: + return ( + None + if self.attributes is None + else self.attributes.micro_strategy_attribute_names + ) - @order.setter - def order(self, order: Optional[int]): + @micro_strategy_attribute_names.setter + def micro_strategy_attribute_names( + self, micro_strategy_attribute_names: Optional[set[str]] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.order = order + self.attributes.micro_strategy_attribute_names = micro_strategy_attribute_names @property - def inline_help_text(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.inline_help_text + def micro_strategy_fact_qualified_names(self) -> Optional[set[str]]: + return ( + None + if self.attributes is None + else self.attributes.micro_strategy_fact_qualified_names + ) - @inline_help_text.setter - def inline_help_text(self, inline_help_text: Optional[str]): + @micro_strategy_fact_qualified_names.setter + def micro_strategy_fact_qualified_names( + self, micro_strategy_fact_qualified_names: Optional[set[str]] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.inline_help_text = inline_help_text + self.attributes.micro_strategy_fact_qualified_names = ( + micro_strategy_fact_qualified_names + ) @property - def is_calculated(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_calculated + def micro_strategy_fact_names(self) -> Optional[set[str]]: + return ( + None + if self.attributes is None + else self.attributes.micro_strategy_fact_names + ) - @is_calculated.setter - def is_calculated(self, is_calculated: Optional[bool]): + @micro_strategy_fact_names.setter + def micro_strategy_fact_names(self, micro_strategy_fact_names: Optional[set[str]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.is_calculated = is_calculated + self.attributes.micro_strategy_fact_names = micro_strategy_fact_names @property - def formula(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.formula + def micro_strategy_metric_parent_qualified_names(self) -> Optional[set[str]]: + return ( + None + if self.attributes is None + else self.attributes.micro_strategy_metric_parent_qualified_names + ) - @formula.setter - def formula(self, formula: Optional[str]): + @micro_strategy_metric_parent_qualified_names.setter + def micro_strategy_metric_parent_qualified_names( + self, micro_strategy_metric_parent_qualified_names: Optional[set[str]] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.formula = formula + self.attributes.micro_strategy_metric_parent_qualified_names = ( + micro_strategy_metric_parent_qualified_names + ) @property - def is_case_sensitive(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_case_sensitive + def micro_strategy_metric_parent_names(self) -> Optional[set[str]]: + return ( + None + if self.attributes is None + else self.attributes.micro_strategy_metric_parent_names + ) - @is_case_sensitive.setter - def is_case_sensitive(self, is_case_sensitive: Optional[bool]): + @micro_strategy_metric_parent_names.setter + def micro_strategy_metric_parent_names( + self, micro_strategy_metric_parent_names: Optional[set[str]] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.is_case_sensitive = is_case_sensitive + self.attributes.micro_strategy_metric_parent_names = ( + micro_strategy_metric_parent_names + ) @property - def is_encrypted(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_encrypted + def micro_strategy_metric_parents(self) -> Optional[list[MicroStrategyMetric]]: + return ( + None + if self.attributes is None + else self.attributes.micro_strategy_metric_parents + ) - @is_encrypted.setter - def is_encrypted(self, is_encrypted: Optional[bool]): + @micro_strategy_metric_parents.setter + def micro_strategy_metric_parents( + self, micro_strategy_metric_parents: Optional[list[MicroStrategyMetric]] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.is_encrypted = is_encrypted + self.attributes.micro_strategy_metric_parents = micro_strategy_metric_parents @property - def max_length(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.max_length + def micro_strategy_facts(self) -> Optional[list[MicroStrategyFact]]: + return None if self.attributes is None else self.attributes.micro_strategy_facts - @max_length.setter - def max_length(self, max_length: Optional[int]): + @micro_strategy_facts.setter + def micro_strategy_facts( + self, micro_strategy_facts: Optional[list[MicroStrategyFact]] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.max_length = max_length + self.attributes.micro_strategy_facts = micro_strategy_facts @property - def is_nullable(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_nullable + def micro_strategy_reports(self) -> Optional[list[MicroStrategyReport]]: + return ( + None if self.attributes is None else self.attributes.micro_strategy_reports + ) - @is_nullable.setter - def is_nullable(self, is_nullable: Optional[bool]): + @micro_strategy_reports.setter + def micro_strategy_reports( + self, micro_strategy_reports: Optional[list[MicroStrategyReport]] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.is_nullable = is_nullable + self.attributes.micro_strategy_reports = micro_strategy_reports @property - def precision(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.precision + def micro_strategy_cubes(self) -> Optional[list[MicroStrategyCube]]: + return None if self.attributes is None else self.attributes.micro_strategy_cubes - @precision.setter - def precision(self, precision: Optional[int]): + @micro_strategy_cubes.setter + def micro_strategy_cubes( + self, micro_strategy_cubes: Optional[list[MicroStrategyCube]] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.precision = precision + self.attributes.micro_strategy_cubes = micro_strategy_cubes @property - def numeric_scale(self) -> Optional[float]: - return None if self.attributes is None else self.attributes.numeric_scale + def micro_strategy_metric_children(self) -> Optional[list[MicroStrategyMetric]]: + return ( + None + if self.attributes is None + else self.attributes.micro_strategy_metric_children + ) - @numeric_scale.setter - def numeric_scale(self, numeric_scale: Optional[float]): + @micro_strategy_metric_children.setter + def micro_strategy_metric_children( + self, micro_strategy_metric_children: Optional[list[MicroStrategyMetric]] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.numeric_scale = numeric_scale + self.attributes.micro_strategy_metric_children = micro_strategy_metric_children @property - def is_unique(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_unique + def micro_strategy_project(self) -> Optional[MicroStrategyProject]: + return ( + None if self.attributes is None else self.attributes.micro_strategy_project + ) - @is_unique.setter - def is_unique(self, is_unique: Optional[bool]): + @micro_strategy_project.setter + def micro_strategy_project( + self, micro_strategy_project: Optional[MicroStrategyProject] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.is_unique = is_unique + self.attributes.micro_strategy_project = micro_strategy_project @property - def picklist_values(self) -> Optional[set[str]]: - return None if self.attributes is None else self.attributes.picklist_values + def micro_strategy_attributes(self) -> Optional[list[MicroStrategyAttribute]]: + return ( + None + if self.attributes is None + else self.attributes.micro_strategy_attributes + ) - @picklist_values.setter - def picklist_values(self, picklist_values: Optional[set[str]]): + @micro_strategy_attributes.setter + def micro_strategy_attributes( + self, micro_strategy_attributes: Optional[list[MicroStrategyAttribute]] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.picklist_values = picklist_values + self.attributes.micro_strategy_attributes = micro_strategy_attributes + + class Attributes(MicroStrategy.Attributes): + micro_strategy_metric_expression: Optional[str] = Field( + None, description="", alias="microStrategyMetricExpression" + ) + micro_strategy_attribute_qualified_names: Optional[set[str]] = Field( + None, description="", alias="microStrategyAttributeQualifiedNames" + ) + micro_strategy_attribute_names: Optional[set[str]] = Field( + None, description="", alias="microStrategyAttributeNames" + ) + micro_strategy_fact_qualified_names: Optional[set[str]] = Field( + None, description="", alias="microStrategyFactQualifiedNames" + ) + micro_strategy_fact_names: Optional[set[str]] = Field( + None, description="", alias="microStrategyFactNames" + ) + micro_strategy_metric_parent_qualified_names: Optional[set[str]] = Field( + None, description="", alias="microStrategyMetricParentQualifiedNames" + ) + micro_strategy_metric_parent_names: Optional[set[str]] = Field( + None, description="", alias="microStrategyMetricParentNames" + ) + micro_strategy_metric_parents: Optional[list[MicroStrategyMetric]] = Field( + None, description="", alias="microStrategyMetricParents" + ) # relationship + micro_strategy_facts: Optional[list[MicroStrategyFact]] = Field( + None, description="", alias="microStrategyFacts" + ) # relationship + micro_strategy_reports: Optional[list[MicroStrategyReport]] = Field( + None, description="", alias="microStrategyReports" + ) # relationship + micro_strategy_cubes: Optional[list[MicroStrategyCube]] = Field( + None, description="", alias="microStrategyCubes" + ) # relationship + micro_strategy_metric_children: Optional[list[MicroStrategyMetric]] = Field( + None, description="", alias="microStrategyMetricChildren" + ) # relationship + micro_strategy_project: Optional[MicroStrategyProject] = Field( + None, description="", alias="microStrategyProject" + ) # relationship + micro_strategy_attributes: Optional[list[MicroStrategyAttribute]] = Field( + None, description="", alias="microStrategyAttributes" + ) # relationship + + attributes: "MicroStrategyMetric.Attributes" = Field( + default_factory=lambda: MicroStrategyMetric.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +class MicroStrategyCube(MicroStrategy): + """Description""" + + type_name: str = Field("MicroStrategyCube", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "MicroStrategyCube": + raise ValueError("must be MicroStrategyCube") + return v + + def __setattr__(self, name, value): + if name in MicroStrategyCube._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + MICRO_STRATEGY_CUBE_TYPE: ClassVar[KeywordField] = KeywordField( + "microStrategyCubeType", "microStrategyCubeType" + ) + """ + Type of cube, for example: OLAP or MTDI. + """ + MICRO_STRATEGY_CUBE_QUERY: ClassVar[KeywordField] = KeywordField( + "microStrategyCubeQuery", "microStrategyCubeQuery" + ) + """ + Query used to create the cube. + """ + + MICRO_STRATEGY_METRICS: ClassVar[RelationField] = RelationField( + "microStrategyMetrics" + ) + """ + TBC + """ + MICRO_STRATEGY_PROJECT: ClassVar[RelationField] = RelationField( + "microStrategyProject" + ) + """ + TBC + """ + MICRO_STRATEGY_ATTRIBUTES: ClassVar[RelationField] = RelationField( + "microStrategyAttributes" + ) + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "micro_strategy_cube_type", + "micro_strategy_cube_query", + "micro_strategy_metrics", + "micro_strategy_project", + "micro_strategy_attributes", + ] @property - def is_polymorphic_foreign_key(self) -> Optional[bool]: + def micro_strategy_cube_type(self) -> Optional[str]: return ( None if self.attributes is None - else self.attributes.is_polymorphic_foreign_key + else self.attributes.micro_strategy_cube_type ) - @is_polymorphic_foreign_key.setter - def is_polymorphic_foreign_key(self, is_polymorphic_foreign_key: Optional[bool]): + @micro_strategy_cube_type.setter + def micro_strategy_cube_type(self, micro_strategy_cube_type: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.is_polymorphic_foreign_key = is_polymorphic_foreign_key + self.attributes.micro_strategy_cube_type = micro_strategy_cube_type @property - def default_value_formula(self) -> Optional[str]: + def micro_strategy_cube_query(self) -> Optional[str]: return ( - None if self.attributes is None else self.attributes.default_value_formula + None + if self.attributes is None + else self.attributes.micro_strategy_cube_query ) - @default_value_formula.setter - def default_value_formula(self, default_value_formula: Optional[str]): + @micro_strategy_cube_query.setter + def micro_strategy_cube_query(self, micro_strategy_cube_query: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.default_value_formula = default_value_formula + self.attributes.micro_strategy_cube_query = micro_strategy_cube_query @property - def lookup_objects(self) -> Optional[list[SalesforceObject]]: - return None if self.attributes is None else self.attributes.lookup_objects + def micro_strategy_metrics(self) -> Optional[list[MicroStrategyMetric]]: + return ( + None if self.attributes is None else self.attributes.micro_strategy_metrics + ) - @lookup_objects.setter - def lookup_objects(self, lookup_objects: Optional[list[SalesforceObject]]): + @micro_strategy_metrics.setter + def micro_strategy_metrics( + self, micro_strategy_metrics: Optional[list[MicroStrategyMetric]] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.lookup_objects = lookup_objects + self.attributes.micro_strategy_metrics = micro_strategy_metrics @property - def object(self) -> Optional[SalesforceObject]: - return None if self.attributes is None else self.attributes.object + def micro_strategy_project(self) -> Optional[MicroStrategyProject]: + return ( + None if self.attributes is None else self.attributes.micro_strategy_project + ) - @object.setter - def object(self, object: Optional[SalesforceObject]): + @micro_strategy_project.setter + def micro_strategy_project( + self, micro_strategy_project: Optional[MicroStrategyProject] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.object = object + self.attributes.micro_strategy_project = micro_strategy_project - class Attributes(Salesforce.Attributes): - data_type: Optional[str] = Field(None, description="", alias="dataType") - object_qualified_name: Optional[str] = Field( - None, description="", alias="objectQualifiedName" + @property + def micro_strategy_attributes(self) -> Optional[list[MicroStrategyAttribute]]: + return ( + None + if self.attributes is None + else self.attributes.micro_strategy_attributes ) - order: Optional[int] = Field(None, description="", alias="order") - inline_help_text: Optional[str] = Field( - None, description="", alias="inlineHelpText" + + @micro_strategy_attributes.setter + def micro_strategy_attributes( + self, micro_strategy_attributes: Optional[list[MicroStrategyAttribute]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_attributes = micro_strategy_attributes + + class Attributes(MicroStrategy.Attributes): + micro_strategy_cube_type: Optional[str] = Field( + None, description="", alias="microStrategyCubeType" ) - is_calculated: Optional[bool] = Field( - None, description="", alias="isCalculated" + micro_strategy_cube_query: Optional[str] = Field( + None, description="", alias="microStrategyCubeQuery" ) - formula: Optional[str] = Field(None, description="", alias="formula") - is_case_sensitive: Optional[bool] = Field( - None, description="", alias="isCaseSensitive" + micro_strategy_metrics: Optional[list[MicroStrategyMetric]] = Field( + None, description="", alias="microStrategyMetrics" + ) # relationship + micro_strategy_project: Optional[MicroStrategyProject] = Field( + None, description="", alias="microStrategyProject" + ) # relationship + micro_strategy_attributes: Optional[list[MicroStrategyAttribute]] = Field( + None, description="", alias="microStrategyAttributes" + ) # relationship + + attributes: "MicroStrategyCube.Attributes" = Field( + default_factory=lambda: MicroStrategyCube.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +class MicroStrategyDossier(MicroStrategy): + """Description""" + + type_name: str = Field("MicroStrategyDossier", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "MicroStrategyDossier": + raise ValueError("must be MicroStrategyDossier") + return v + + def __setattr__(self, name, value): + if name in MicroStrategyDossier._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + MICRO_STRATEGY_DOSSIER_CHAPTER_NAMES: ClassVar[KeywordField] = KeywordField( + "microStrategyDossierChapterNames", "microStrategyDossierChapterNames" + ) + """ + List of chapter names in this dossier. + """ + + MICRO_STRATEGY_VISUALIZATIONS: ClassVar[RelationField] = RelationField( + "microStrategyVisualizations" + ) + """ + TBC + """ + MICRO_STRATEGY_PROJECT: ClassVar[RelationField] = RelationField( + "microStrategyProject" + ) + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "micro_strategy_dossier_chapter_names", + "micro_strategy_visualizations", + "micro_strategy_project", + ] + + @property + def micro_strategy_dossier_chapter_names(self) -> Optional[set[str]]: + return ( + None + if self.attributes is None + else self.attributes.micro_strategy_dossier_chapter_names ) - is_encrypted: Optional[bool] = Field(None, description="", alias="isEncrypted") - max_length: Optional[int] = Field(None, description="", alias="maxLength") - is_nullable: Optional[bool] = Field(None, description="", alias="isNullable") - precision: Optional[int] = Field(None, description="", alias="precision") - numeric_scale: Optional[float] = Field( - None, description="", alias="numericScale" + + @micro_strategy_dossier_chapter_names.setter + def micro_strategy_dossier_chapter_names( + self, micro_strategy_dossier_chapter_names: Optional[set[str]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_dossier_chapter_names = ( + micro_strategy_dossier_chapter_names ) - is_unique: Optional[bool] = Field(None, description="", alias="isUnique") - picklist_values: Optional[set[str]] = Field( - None, description="", alias="picklistValues" + + @property + def micro_strategy_visualizations( + self, + ) -> Optional[list[MicroStrategyVisualization]]: + return ( + None + if self.attributes is None + else self.attributes.micro_strategy_visualizations ) - is_polymorphic_foreign_key: Optional[bool] = Field( - None, description="", alias="isPolymorphicForeignKey" + + @micro_strategy_visualizations.setter + def micro_strategy_visualizations( + self, micro_strategy_visualizations: Optional[list[MicroStrategyVisualization]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_visualizations = micro_strategy_visualizations + + @property + def micro_strategy_project(self) -> Optional[MicroStrategyProject]: + return ( + None if self.attributes is None else self.attributes.micro_strategy_project ) - default_value_formula: Optional[str] = Field( - None, description="", alias="defaultValueFormula" + + @micro_strategy_project.setter + def micro_strategy_project( + self, micro_strategy_project: Optional[MicroStrategyProject] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_project = micro_strategy_project + + class Attributes(MicroStrategy.Attributes): + micro_strategy_dossier_chapter_names: Optional[set[str]] = Field( + None, description="", alias="microStrategyDossierChapterNames" ) - lookup_objects: Optional[list[SalesforceObject]] = Field( - None, description="", alias="lookupObjects" + micro_strategy_visualizations: Optional[ + list[MicroStrategyVisualization] + ] = Field( + None, description="", alias="microStrategyVisualizations" ) # relationship - object: Optional[SalesforceObject] = Field( - None, description="", alias="object" + micro_strategy_project: Optional[MicroStrategyProject] = Field( + None, description="", alias="microStrategyProject" ) # relationship - attributes: "SalesforceField.Attributes" = Field( - default_factory=lambda: SalesforceField.Attributes(), + attributes: "MicroStrategyDossier.Attributes" = Field( + default_factory=lambda: MicroStrategyDossier.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -class SalesforceOrganization(Salesforce): +class MicroStrategyFact(MicroStrategy): """Description""" - type_name: str = Field("SalesforceOrganization", allow_mutation=False) + type_name: str = Field("MicroStrategyFact", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "SalesforceOrganization": - raise ValueError("must be SalesforceOrganization") + if v != "MicroStrategyFact": + raise ValueError("must be MicroStrategyFact") return v def __setattr__(self, name, value): - if name in SalesforceOrganization._convenience_properties: + if name in MicroStrategyFact._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - SOURCE_ID: ClassVar[KeywordField] = KeywordField("sourceId", "sourceId") + MICRO_STRATEGY_FACT_EXPRESSIONS: ClassVar[KeywordField] = KeywordField( + "microStrategyFactExpressions", "microStrategyFactExpressions" + ) """ - sourceId is the Id of the organization entity on salesforce + List of expressions for this fact. """ - REPORTS: ClassVar[RelationField] = RelationField("reports") - """ - TBC - """ - OBJECTS: ClassVar[RelationField] = RelationField("objects") + MICRO_STRATEGY_METRICS: ClassVar[RelationField] = RelationField( + "microStrategyMetrics" + ) """ TBC """ - DASHBOARDS: ClassVar[RelationField] = RelationField("dashboards") + MICRO_STRATEGY_PROJECT: ClassVar[RelationField] = RelationField( + "microStrategyProject" + ) """ TBC """ _convenience_properties: ClassVar[list[str]] = [ - "source_id", - "reports", - "objects", - "dashboards", + "micro_strategy_fact_expressions", + "micro_strategy_metrics", + "micro_strategy_project", ] @property - def source_id(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.source_id + def micro_strategy_fact_expressions(self) -> Optional[set[str]]: + return ( + None + if self.attributes is None + else self.attributes.micro_strategy_fact_expressions + ) - @source_id.setter - def source_id(self, source_id: Optional[str]): + @micro_strategy_fact_expressions.setter + def micro_strategy_fact_expressions( + self, micro_strategy_fact_expressions: Optional[set[str]] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.source_id = source_id + self.attributes.micro_strategy_fact_expressions = ( + micro_strategy_fact_expressions + ) @property - def reports(self) -> Optional[list[SalesforceReport]]: - return None if self.attributes is None else self.attributes.reports + def micro_strategy_metrics(self) -> Optional[list[MicroStrategyMetric]]: + return ( + None if self.attributes is None else self.attributes.micro_strategy_metrics + ) - @reports.setter - def reports(self, reports: Optional[list[SalesforceReport]]): + @micro_strategy_metrics.setter + def micro_strategy_metrics( + self, micro_strategy_metrics: Optional[list[MicroStrategyMetric]] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.reports = reports + self.attributes.micro_strategy_metrics = micro_strategy_metrics @property - def objects(self) -> Optional[list[SalesforceObject]]: - return None if self.attributes is None else self.attributes.objects + def micro_strategy_project(self) -> Optional[MicroStrategyProject]: + return ( + None if self.attributes is None else self.attributes.micro_strategy_project + ) - @objects.setter - def objects(self, objects: Optional[list[SalesforceObject]]): + @micro_strategy_project.setter + def micro_strategy_project( + self, micro_strategy_project: Optional[MicroStrategyProject] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.objects = objects + self.attributes.micro_strategy_project = micro_strategy_project + + class Attributes(MicroStrategy.Attributes): + micro_strategy_fact_expressions: Optional[set[str]] = Field( + None, description="", alias="microStrategyFactExpressions" + ) + micro_strategy_metrics: Optional[list[MicroStrategyMetric]] = Field( + None, description="", alias="microStrategyMetrics" + ) # relationship + micro_strategy_project: Optional[MicroStrategyProject] = Field( + None, description="", alias="microStrategyProject" + ) # relationship + + attributes: "MicroStrategyFact.Attributes" = Field( + default_factory=lambda: MicroStrategyFact.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +class MicroStrategyDocument(MicroStrategy): + """Description""" + + type_name: str = Field("MicroStrategyDocument", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "MicroStrategyDocument": + raise ValueError("must be MicroStrategyDocument") + return v + + def __setattr__(self, name, value): + if name in MicroStrategyDocument._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + MICRO_STRATEGY_PROJECT: ClassVar[RelationField] = RelationField( + "microStrategyProject" + ) + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "micro_strategy_project", + ] @property - def dashboards(self) -> Optional[list[SalesforceDashboard]]: - return None if self.attributes is None else self.attributes.dashboards + def micro_strategy_project(self) -> Optional[MicroStrategyProject]: + return ( + None if self.attributes is None else self.attributes.micro_strategy_project + ) - @dashboards.setter - def dashboards(self, dashboards: Optional[list[SalesforceDashboard]]): + @micro_strategy_project.setter + def micro_strategy_project( + self, micro_strategy_project: Optional[MicroStrategyProject] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.dashboards = dashboards + self.attributes.micro_strategy_project = micro_strategy_project - class Attributes(Salesforce.Attributes): - source_id: Optional[str] = Field(None, description="", alias="sourceId") - reports: Optional[list[SalesforceReport]] = Field( - None, description="", alias="reports" - ) # relationship - objects: Optional[list[SalesforceObject]] = Field( - None, description="", alias="objects" - ) # relationship - dashboards: Optional[list[SalesforceDashboard]] = Field( - None, description="", alias="dashboards" + class Attributes(MicroStrategy.Attributes): + micro_strategy_project: Optional[MicroStrategyProject] = Field( + None, description="", alias="microStrategyProject" ) # relationship - attributes: "SalesforceOrganization.Attributes" = Field( - default_factory=lambda: SalesforceOrganization.Attributes(), + attributes: "MicroStrategyDocument.Attributes" = Field( + default_factory=lambda: MicroStrategyDocument.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -class SalesforceDashboard(Salesforce): +class MicroStrategyAttribute(MicroStrategy): """Description""" - type_name: str = Field("SalesforceDashboard", allow_mutation=False) + type_name: str = Field("MicroStrategyAttribute", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "SalesforceDashboard": - raise ValueError("must be SalesforceDashboard") + if v != "MicroStrategyAttribute": + raise ValueError("must be MicroStrategyAttribute") return v def __setattr__(self, name, value): - if name in SalesforceDashboard._convenience_properties: + if name in MicroStrategyAttribute._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - SOURCE_ID: ClassVar[KeywordField] = KeywordField("sourceId", "sourceId") + MICRO_STRATEGY_ATTRIBUTE_FORMS: ClassVar[KeywordField] = KeywordField( + "microStrategyAttributeForms", "microStrategyAttributeForms" + ) """ - sourceId is the Id of the dashboard entity on salesforce + JSON string specifying the attribute's name, description, displayFormat, etc. """ - DASHBOARD_TYPE: ClassVar[KeywordField] = KeywordField( - "dashboardType", "dashboardType" + + MICRO_STRATEGY_REPORTS: ClassVar[RelationField] = RelationField( + "microStrategyReports" ) """ - dashboardType is the type of dashboard in salesforce + TBC """ - REPORT_COUNT: ClassVar[NumericField] = NumericField("reportCount", "reportCount") + MICRO_STRATEGY_METRICS: ClassVar[RelationField] = RelationField( + "microStrategyMetrics" + ) """ - reportCount is the number of reports linked to the dashboard entity on salesforce + TBC """ - - REPORTS: ClassVar[RelationField] = RelationField("reports") + MICRO_STRATEGY_CUBES: ClassVar[RelationField] = RelationField("microStrategyCubes") """ TBC """ - ORGANIZATION: ClassVar[RelationField] = RelationField("organization") + MICRO_STRATEGY_PROJECT: ClassVar[RelationField] = RelationField( + "microStrategyProject" + ) """ TBC """ _convenience_properties: ClassVar[list[str]] = [ - "source_id", - "dashboard_type", - "report_count", - "reports", - "organization", + "micro_strategy_attribute_forms", + "micro_strategy_reports", + "micro_strategy_metrics", + "micro_strategy_cubes", + "micro_strategy_project", ] @property - def source_id(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.source_id + def micro_strategy_attribute_forms(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.micro_strategy_attribute_forms + ) - @source_id.setter - def source_id(self, source_id: Optional[str]): + @micro_strategy_attribute_forms.setter + def micro_strategy_attribute_forms( + self, micro_strategy_attribute_forms: Optional[str] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.source_id = source_id + self.attributes.micro_strategy_attribute_forms = micro_strategy_attribute_forms @property - def dashboard_type(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dashboard_type + def micro_strategy_reports(self) -> Optional[list[MicroStrategyReport]]: + return ( + None if self.attributes is None else self.attributes.micro_strategy_reports + ) - @dashboard_type.setter - def dashboard_type(self, dashboard_type: Optional[str]): + @micro_strategy_reports.setter + def micro_strategy_reports( + self, micro_strategy_reports: Optional[list[MicroStrategyReport]] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.dashboard_type = dashboard_type + self.attributes.micro_strategy_reports = micro_strategy_reports @property - def report_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.report_count + def micro_strategy_metrics(self) -> Optional[list[MicroStrategyMetric]]: + return ( + None if self.attributes is None else self.attributes.micro_strategy_metrics + ) - @report_count.setter - def report_count(self, report_count: Optional[int]): + @micro_strategy_metrics.setter + def micro_strategy_metrics( + self, micro_strategy_metrics: Optional[list[MicroStrategyMetric]] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.report_count = report_count + self.attributes.micro_strategy_metrics = micro_strategy_metrics @property - def reports(self) -> Optional[list[SalesforceReport]]: - return None if self.attributes is None else self.attributes.reports + def micro_strategy_cubes(self) -> Optional[list[MicroStrategyCube]]: + return None if self.attributes is None else self.attributes.micro_strategy_cubes - @reports.setter - def reports(self, reports: Optional[list[SalesforceReport]]): + @micro_strategy_cubes.setter + def micro_strategy_cubes( + self, micro_strategy_cubes: Optional[list[MicroStrategyCube]] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.reports = reports + self.attributes.micro_strategy_cubes = micro_strategy_cubes @property - def organization(self) -> Optional[SalesforceOrganization]: - return None if self.attributes is None else self.attributes.organization + def micro_strategy_project(self) -> Optional[MicroStrategyProject]: + return ( + None if self.attributes is None else self.attributes.micro_strategy_project + ) - @organization.setter - def organization(self, organization: Optional[SalesforceOrganization]): + @micro_strategy_project.setter + def micro_strategy_project( + self, micro_strategy_project: Optional[MicroStrategyProject] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.organization = organization + self.attributes.micro_strategy_project = micro_strategy_project - class Attributes(Salesforce.Attributes): - source_id: Optional[str] = Field(None, description="", alias="sourceId") - dashboard_type: Optional[str] = Field( - None, description="", alias="dashboardType" + class Attributes(MicroStrategy.Attributes): + micro_strategy_attribute_forms: Optional[str] = Field( + None, description="", alias="microStrategyAttributeForms" ) - report_count: Optional[int] = Field(None, description="", alias="reportCount") - reports: Optional[list[SalesforceReport]] = Field( - None, description="", alias="reports" + micro_strategy_reports: Optional[list[MicroStrategyReport]] = Field( + None, description="", alias="microStrategyReports" + ) # relationship + micro_strategy_metrics: Optional[list[MicroStrategyMetric]] = Field( + None, description="", alias="microStrategyMetrics" + ) # relationship + micro_strategy_cubes: Optional[list[MicroStrategyCube]] = Field( + None, description="", alias="microStrategyCubes" ) # relationship - organization: Optional[SalesforceOrganization] = Field( - None, description="", alias="organization" + micro_strategy_project: Optional[MicroStrategyProject] = Field( + None, description="", alias="microStrategyProject" ) # relationship - attributes: "SalesforceDashboard.Attributes" = Field( - default_factory=lambda: SalesforceDashboard.Attributes(), + attributes: "MicroStrategyAttribute.Attributes" = Field( + default_factory=lambda: MicroStrategyAttribute.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -class SalesforceReport(Salesforce): +class MicroStrategyVisualization(MicroStrategy): """Description""" - type_name: str = Field("SalesforceReport", allow_mutation=False) + type_name: str = Field("MicroStrategyVisualization", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "SalesforceReport": - raise ValueError("must be SalesforceReport") + if v != "MicroStrategyVisualization": + raise ValueError("must be MicroStrategyVisualization") return v def __setattr__(self, name, value): - if name in SalesforceReport._convenience_properties: + if name in MicroStrategyVisualization._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - SOURCE_ID: ClassVar[KeywordField] = KeywordField("sourceId", "sourceId") + MICRO_STRATEGY_VISUALIZATION_TYPE: ClassVar[KeywordField] = KeywordField( + "microStrategyVisualizationType", "microStrategyVisualizationType" + ) """ - sourceId is the Id of the report entity on salesforce + Type of visualization. """ - REPORT_TYPE: ClassVar[KeywordField] = KeywordField("reportType", "reportType") + MICRO_STRATEGY_DOSSIER_QUALIFIED_NAME: ClassVar[ + KeywordTextField + ] = KeywordTextField( + "microStrategyDossierQualifiedName", + "microStrategyDossierQualifiedName", + "microStrategyDossierQualifiedName.text", + ) """ - reportType is the type of report in salesforce + Unique name of the dossier in which this visualization exists. """ - DETAIL_COLUMNS: ClassVar[KeywordField] = KeywordField( - "detailColumns", "detailColumns" + MICRO_STRATEGY_DOSSIER_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "microStrategyDossierName", + "microStrategyDossierName.keyword", + "microStrategyDossierName", ) """ - detailColumns is a list of column names on the report + Simple name of the dossier in which this visualization exists. """ - DASHBOARDS: ClassVar[RelationField] = RelationField("dashboards") + MICRO_STRATEGY_DOSSIER: ClassVar[RelationField] = RelationField( + "microStrategyDossier" + ) """ TBC """ - ORGANIZATION: ClassVar[RelationField] = RelationField("organization") + MICRO_STRATEGY_PROJECT: ClassVar[RelationField] = RelationField( + "microStrategyProject" + ) """ TBC """ _convenience_properties: ClassVar[list[str]] = [ - "source_id", - "report_type", - "detail_columns", - "dashboards", - "organization", + "micro_strategy_visualization_type", + "micro_strategy_dossier_qualified_name", + "micro_strategy_dossier_name", + "micro_strategy_dossier", + "micro_strategy_project", ] @property - def source_id(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.source_id + def micro_strategy_visualization_type(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.micro_strategy_visualization_type + ) - @source_id.setter - def source_id(self, source_id: Optional[str]): + @micro_strategy_visualization_type.setter + def micro_strategy_visualization_type( + self, micro_strategy_visualization_type: Optional[str] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.source_id = source_id + self.attributes.micro_strategy_visualization_type = ( + micro_strategy_visualization_type + ) @property - def report_type(self) -> Optional[dict[str, str]]: - return None if self.attributes is None else self.attributes.report_type + def micro_strategy_dossier_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.micro_strategy_dossier_qualified_name + ) - @report_type.setter - def report_type(self, report_type: Optional[dict[str, str]]): + @micro_strategy_dossier_qualified_name.setter + def micro_strategy_dossier_qualified_name( + self, micro_strategy_dossier_qualified_name: Optional[str] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.report_type = report_type + self.attributes.micro_strategy_dossier_qualified_name = ( + micro_strategy_dossier_qualified_name + ) @property - def detail_columns(self) -> Optional[set[str]]: - return None if self.attributes is None else self.attributes.detail_columns + def micro_strategy_dossier_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.micro_strategy_dossier_name + ) - @detail_columns.setter - def detail_columns(self, detail_columns: Optional[set[str]]): + @micro_strategy_dossier_name.setter + def micro_strategy_dossier_name(self, micro_strategy_dossier_name: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.detail_columns = detail_columns + self.attributes.micro_strategy_dossier_name = micro_strategy_dossier_name @property - def dashboards(self) -> Optional[list[SalesforceDashboard]]: - return None if self.attributes is None else self.attributes.dashboards + def micro_strategy_dossier(self) -> Optional[MicroStrategyDossier]: + return ( + None if self.attributes is None else self.attributes.micro_strategy_dossier + ) - @dashboards.setter - def dashboards(self, dashboards: Optional[list[SalesforceDashboard]]): + @micro_strategy_dossier.setter + def micro_strategy_dossier( + self, micro_strategy_dossier: Optional[MicroStrategyDossier] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.dashboards = dashboards + self.attributes.micro_strategy_dossier = micro_strategy_dossier @property - def organization(self) -> Optional[SalesforceOrganization]: - return None if self.attributes is None else self.attributes.organization + def micro_strategy_project(self) -> Optional[MicroStrategyProject]: + return ( + None if self.attributes is None else self.attributes.micro_strategy_project + ) - @organization.setter - def organization(self, organization: Optional[SalesforceOrganization]): + @micro_strategy_project.setter + def micro_strategy_project( + self, micro_strategy_project: Optional[MicroStrategyProject] + ): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.organization = organization + self.attributes.micro_strategy_project = micro_strategy_project - class Attributes(Salesforce.Attributes): - source_id: Optional[str] = Field(None, description="", alias="sourceId") - report_type: Optional[dict[str, str]] = Field( - None, description="", alias="reportType" + class Attributes(MicroStrategy.Attributes): + micro_strategy_visualization_type: Optional[str] = Field( + None, description="", alias="microStrategyVisualizationType" ) - detail_columns: Optional[set[str]] = Field( - None, description="", alias="detailColumns" + micro_strategy_dossier_qualified_name: Optional[str] = Field( + None, description="", alias="microStrategyDossierQualifiedName" ) - dashboards: Optional[list[SalesforceDashboard]] = Field( - None, description="", alias="dashboards" + micro_strategy_dossier_name: Optional[str] = Field( + None, description="", alias="microStrategyDossierName" + ) + micro_strategy_dossier: Optional[MicroStrategyDossier] = Field( + None, description="", alias="microStrategyDossier" ) # relationship - organization: Optional[SalesforceOrganization] = Field( - None, description="", alias="organization" + micro_strategy_project: Optional[MicroStrategyProject] = Field( + None, description="", alias="microStrategyProject" ) # relationship - attributes: "SalesforceReport.Attributes" = Field( - default_factory=lambda: SalesforceReport.Attributes(), + attributes: "MicroStrategyVisualization.Attributes" = Field( + default_factory=lambda: MicroStrategyVisualization.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -SalesforceObject.Attributes.update_forward_refs() +MicroStrategyReport.Attributes.update_forward_refs() + + +MicroStrategyProject.Attributes.update_forward_refs() + + +MicroStrategyMetric.Attributes.update_forward_refs() + + +MicroStrategyCube.Attributes.update_forward_refs() + + +MicroStrategyDossier.Attributes.update_forward_refs() -SalesforceField.Attributes.update_forward_refs() +MicroStrategyFact.Attributes.update_forward_refs() -SalesforceOrganization.Attributes.update_forward_refs() +MicroStrategyDocument.Attributes.update_forward_refs() -SalesforceDashboard.Attributes.update_forward_refs() +MicroStrategyAttribute.Attributes.update_forward_refs() -SalesforceReport.Attributes.update_forward_refs() +MicroStrategyVisualization.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset81.py b/pyatlan/model/assets/asset81.py new file mode 100644 index 000000000..433b22264 --- /dev/null +++ b/pyatlan/model/assets/asset81.py @@ -0,0 +1,649 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic import Field, validator + +from pyatlan.model.fields.atlan_fields import ( + BooleanField, + KeywordField, + KeywordTextField, + NumericField, + RelationField, + TextField, +) + +from .asset53 import Qlik + + +class QlikApp(Qlik): + """Description""" + + type_name: str = Field("QlikApp", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "QlikApp": + raise ValueError("must be QlikApp") + return v + + def __setattr__(self, name, value): + if name in QlikApp._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + QLIK_HAS_SECTION_ACCESS: ClassVar[BooleanField] = BooleanField( + "qlikHasSectionAccess", "qlikHasSectionAccess" + ) + """ + Whether section access or data masking is enabled on the source (true) or not (false). + """ + QLIK_ORIGIN_APP_ID: ClassVar[KeywordField] = KeywordField( + "qlikOriginAppId", "qlikOriginAppId" + ) + """ + Value of originAppId for this app. + """ + QLIK_IS_ENCRYPTED: ClassVar[BooleanField] = BooleanField( + "qlikIsEncrypted", "qlikIsEncrypted" + ) + """ + Whether this app is encrypted (true) or not (false). + """ + QLIK_IS_DIRECT_QUERY_MODE: ClassVar[BooleanField] = BooleanField( + "qlikIsDirectQueryMode", "qlikIsDirectQueryMode" + ) + """ + Whether this app is in direct query mode (true) or not (false). + """ + QLIK_APP_STATIC_BYTE_SIZE: ClassVar[NumericField] = NumericField( + "qlikAppStaticByteSize", "qlikAppStaticByteSize" + ) + """ + Static space used by this app, in bytes. + """ + + QLIK_SPACE: ClassVar[RelationField] = RelationField("qlikSpace") + """ + TBC + """ + QLIK_SHEETS: ClassVar[RelationField] = RelationField("qlikSheets") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "qlik_has_section_access", + "qlik_origin_app_id", + "qlik_is_encrypted", + "qlik_is_direct_query_mode", + "qlik_app_static_byte_size", + "qlik_space", + "qlik_sheets", + ] + + @property + def qlik_has_section_access(self) -> Optional[bool]: + return ( + None if self.attributes is None else self.attributes.qlik_has_section_access + ) + + @qlik_has_section_access.setter + def qlik_has_section_access(self, qlik_has_section_access: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.qlik_has_section_access = qlik_has_section_access + + @property + def qlik_origin_app_id(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.qlik_origin_app_id + + @qlik_origin_app_id.setter + def qlik_origin_app_id(self, qlik_origin_app_id: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.qlik_origin_app_id = qlik_origin_app_id + + @property + def qlik_is_encrypted(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.qlik_is_encrypted + + @qlik_is_encrypted.setter + def qlik_is_encrypted(self, qlik_is_encrypted: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.qlik_is_encrypted = qlik_is_encrypted + + @property + def qlik_is_direct_query_mode(self) -> Optional[bool]: + return ( + None + if self.attributes is None + else self.attributes.qlik_is_direct_query_mode + ) + + @qlik_is_direct_query_mode.setter + def qlik_is_direct_query_mode(self, qlik_is_direct_query_mode: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.qlik_is_direct_query_mode = qlik_is_direct_query_mode + + @property + def qlik_app_static_byte_size(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.qlik_app_static_byte_size + ) + + @qlik_app_static_byte_size.setter + def qlik_app_static_byte_size(self, qlik_app_static_byte_size: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.qlik_app_static_byte_size = qlik_app_static_byte_size + + @property + def qlik_space(self) -> Optional[QlikSpace]: + return None if self.attributes is None else self.attributes.qlik_space + + @qlik_space.setter + def qlik_space(self, qlik_space: Optional[QlikSpace]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.qlik_space = qlik_space + + @property + def qlik_sheets(self) -> Optional[list[QlikSheet]]: + return None if self.attributes is None else self.attributes.qlik_sheets + + @qlik_sheets.setter + def qlik_sheets(self, qlik_sheets: Optional[list[QlikSheet]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.qlik_sheets = qlik_sheets + + class Attributes(Qlik.Attributes): + qlik_has_section_access: Optional[bool] = Field( + None, description="", alias="qlikHasSectionAccess" + ) + qlik_origin_app_id: Optional[str] = Field( + None, description="", alias="qlikOriginAppId" + ) + qlik_is_encrypted: Optional[bool] = Field( + None, description="", alias="qlikIsEncrypted" + ) + qlik_is_direct_query_mode: Optional[bool] = Field( + None, description="", alias="qlikIsDirectQueryMode" + ) + qlik_app_static_byte_size: Optional[int] = Field( + None, description="", alias="qlikAppStaticByteSize" + ) + qlik_space: Optional[QlikSpace] = Field( + None, description="", alias="qlikSpace" + ) # relationship + qlik_sheets: Optional[list[QlikSheet]] = Field( + None, description="", alias="qlikSheets" + ) # relationship + + attributes: "QlikApp.Attributes" = Field( + default_factory=lambda: QlikApp.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +class QlikChart(Qlik): + """Description""" + + type_name: str = Field("QlikChart", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "QlikChart": + raise ValueError("must be QlikChart") + return v + + def __setattr__(self, name, value): + if name in QlikChart._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + QLIK_CHART_SUBTITLE: ClassVar[TextField] = TextField( + "qlikChartSubtitle", "qlikChartSubtitle" + ) + """ + Subtitle of this chart. + """ + QLIK_CHART_FOOTNOTE: ClassVar[TextField] = TextField( + "qlikChartFootnote", "qlikChartFootnote" + ) + """ + Footnote of this chart. + """ + QLIK_CHART_ORIENTATION: ClassVar[KeywordField] = KeywordField( + "qlikChartOrientation", "qlikChartOrientation" + ) + """ + Orientation of this chart. + """ + QLIK_CHART_TYPE: ClassVar[KeywordField] = KeywordField( + "qlikChartType", "qlikChartType" + ) + """ + Subtype of this chart, for example: bar, graph, pie, etc. + """ + + QLIK_SHEET: ClassVar[RelationField] = RelationField("qlikSheet") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "qlik_chart_subtitle", + "qlik_chart_footnote", + "qlik_chart_orientation", + "qlik_chart_type", + "qlik_sheet", + ] + + @property + def qlik_chart_subtitle(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.qlik_chart_subtitle + + @qlik_chart_subtitle.setter + def qlik_chart_subtitle(self, qlik_chart_subtitle: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.qlik_chart_subtitle = qlik_chart_subtitle + + @property + def qlik_chart_footnote(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.qlik_chart_footnote + + @qlik_chart_footnote.setter + def qlik_chart_footnote(self, qlik_chart_footnote: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.qlik_chart_footnote = qlik_chart_footnote + + @property + def qlik_chart_orientation(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.qlik_chart_orientation + ) + + @qlik_chart_orientation.setter + def qlik_chart_orientation(self, qlik_chart_orientation: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.qlik_chart_orientation = qlik_chart_orientation + + @property + def qlik_chart_type(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.qlik_chart_type + + @qlik_chart_type.setter + def qlik_chart_type(self, qlik_chart_type: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.qlik_chart_type = qlik_chart_type + + @property + def qlik_sheet(self) -> Optional[QlikSheet]: + return None if self.attributes is None else self.attributes.qlik_sheet + + @qlik_sheet.setter + def qlik_sheet(self, qlik_sheet: Optional[QlikSheet]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.qlik_sheet = qlik_sheet + + class Attributes(Qlik.Attributes): + qlik_chart_subtitle: Optional[str] = Field( + None, description="", alias="qlikChartSubtitle" + ) + qlik_chart_footnote: Optional[str] = Field( + None, description="", alias="qlikChartFootnote" + ) + qlik_chart_orientation: Optional[str] = Field( + None, description="", alias="qlikChartOrientation" + ) + qlik_chart_type: Optional[str] = Field( + None, description="", alias="qlikChartType" + ) + qlik_sheet: Optional[QlikSheet] = Field( + None, description="", alias="qlikSheet" + ) # relationship + + attributes: "QlikChart.Attributes" = Field( + default_factory=lambda: QlikChart.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +class QlikDataset(Qlik): + """Description""" + + type_name: str = Field("QlikDataset", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "QlikDataset": + raise ValueError("must be QlikDataset") + return v + + def __setattr__(self, name, value): + if name in QlikDataset._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + QLIK_DATASET_TECHNICAL_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "qlikDatasetTechnicalName", + "qlikDatasetTechnicalName.keyword", + "qlikDatasetTechnicalName", + ) + """ + Technical name of this asset. + """ + QLIK_DATASET_TYPE: ClassVar[KeywordField] = KeywordField( + "qlikDatasetType", "qlikDatasetType" + ) + """ + Type of this data asset, for example: qix-df, snowflake, etc. + """ + QLIK_DATASET_URI: ClassVar[KeywordTextField] = KeywordTextField( + "qlikDatasetUri", "qlikDatasetUri", "qlikDatasetUri.text" + ) + """ + URI of this dataset. + """ + QLIK_DATASET_SUBTYPE: ClassVar[KeywordField] = KeywordField( + "qlikDatasetSubtype", "qlikDatasetSubtype" + ) + """ + Subtype this dataset asset. + """ + + QLIK_SPACE: ClassVar[RelationField] = RelationField("qlikSpace") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "qlik_dataset_technical_name", + "qlik_dataset_type", + "qlik_dataset_uri", + "qlik_dataset_subtype", + "qlik_space", + ] + + @property + def qlik_dataset_technical_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.qlik_dataset_technical_name + ) + + @qlik_dataset_technical_name.setter + def qlik_dataset_technical_name(self, qlik_dataset_technical_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.qlik_dataset_technical_name = qlik_dataset_technical_name + + @property + def qlik_dataset_type(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.qlik_dataset_type + + @qlik_dataset_type.setter + def qlik_dataset_type(self, qlik_dataset_type: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.qlik_dataset_type = qlik_dataset_type + + @property + def qlik_dataset_uri(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.qlik_dataset_uri + + @qlik_dataset_uri.setter + def qlik_dataset_uri(self, qlik_dataset_uri: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.qlik_dataset_uri = qlik_dataset_uri + + @property + def qlik_dataset_subtype(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.qlik_dataset_subtype + + @qlik_dataset_subtype.setter + def qlik_dataset_subtype(self, qlik_dataset_subtype: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.qlik_dataset_subtype = qlik_dataset_subtype + + @property + def qlik_space(self) -> Optional[QlikSpace]: + return None if self.attributes is None else self.attributes.qlik_space + + @qlik_space.setter + def qlik_space(self, qlik_space: Optional[QlikSpace]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.qlik_space = qlik_space + + class Attributes(Qlik.Attributes): + qlik_dataset_technical_name: Optional[str] = Field( + None, description="", alias="qlikDatasetTechnicalName" + ) + qlik_dataset_type: Optional[str] = Field( + None, description="", alias="qlikDatasetType" + ) + qlik_dataset_uri: Optional[str] = Field( + None, description="", alias="qlikDatasetUri" + ) + qlik_dataset_subtype: Optional[str] = Field( + None, description="", alias="qlikDatasetSubtype" + ) + qlik_space: Optional[QlikSpace] = Field( + None, description="", alias="qlikSpace" + ) # relationship + + attributes: "QlikDataset.Attributes" = Field( + default_factory=lambda: QlikDataset.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +class QlikSheet(Qlik): + """Description""" + + type_name: str = Field("QlikSheet", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "QlikSheet": + raise ValueError("must be QlikSheet") + return v + + def __setattr__(self, name, value): + if name in QlikSheet._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + QLIK_SHEET_IS_APPROVED: ClassVar[BooleanField] = BooleanField( + "qlikSheetIsApproved", "qlikSheetIsApproved" + ) + """ + Whether this is approved (true) or not (false). + """ + + QLIK_APP: ClassVar[RelationField] = RelationField("qlikApp") + """ + TBC + """ + QLIK_CHARTS: ClassVar[RelationField] = RelationField("qlikCharts") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "qlik_sheet_is_approved", + "qlik_app", + "qlik_charts", + ] + + @property + def qlik_sheet_is_approved(self) -> Optional[bool]: + return ( + None if self.attributes is None else self.attributes.qlik_sheet_is_approved + ) + + @qlik_sheet_is_approved.setter + def qlik_sheet_is_approved(self, qlik_sheet_is_approved: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.qlik_sheet_is_approved = qlik_sheet_is_approved + + @property + def qlik_app(self) -> Optional[QlikApp]: + return None if self.attributes is None else self.attributes.qlik_app + + @qlik_app.setter + def qlik_app(self, qlik_app: Optional[QlikApp]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.qlik_app = qlik_app + + @property + def qlik_charts(self) -> Optional[list[QlikChart]]: + return None if self.attributes is None else self.attributes.qlik_charts + + @qlik_charts.setter + def qlik_charts(self, qlik_charts: Optional[list[QlikChart]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.qlik_charts = qlik_charts + + class Attributes(Qlik.Attributes): + qlik_sheet_is_approved: Optional[bool] = Field( + None, description="", alias="qlikSheetIsApproved" + ) + qlik_app: Optional[QlikApp] = Field( + None, description="", alias="qlikApp" + ) # relationship + qlik_charts: Optional[list[QlikChart]] = Field( + None, description="", alias="qlikCharts" + ) # relationship + + attributes: "QlikSheet.Attributes" = Field( + default_factory=lambda: QlikSheet.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +class QlikSpace(Qlik): + """Description""" + + type_name: str = Field("QlikSpace", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "QlikSpace": + raise ValueError("must be QlikSpace") + return v + + def __setattr__(self, name, value): + if name in QlikSpace._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + QLIK_SPACE_TYPE: ClassVar[KeywordField] = KeywordField( + "qlikSpaceType", "qlikSpaceType" + ) + """ + Type of this space, for exmaple: Private, Shared, etc. + """ + + QLIK_DATASETS: ClassVar[RelationField] = RelationField("qlikDatasets") + """ + TBC + """ + QLIK_APPS: ClassVar[RelationField] = RelationField("qlikApps") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "qlik_space_type", + "qlik_datasets", + "qlik_apps", + ] + + @property + def qlik_space_type(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.qlik_space_type + + @qlik_space_type.setter + def qlik_space_type(self, qlik_space_type: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.qlik_space_type = qlik_space_type + + @property + def qlik_datasets(self) -> Optional[list[QlikDataset]]: + return None if self.attributes is None else self.attributes.qlik_datasets + + @qlik_datasets.setter + def qlik_datasets(self, qlik_datasets: Optional[list[QlikDataset]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.qlik_datasets = qlik_datasets + + @property + def qlik_apps(self) -> Optional[list[QlikApp]]: + return None if self.attributes is None else self.attributes.qlik_apps + + @qlik_apps.setter + def qlik_apps(self, qlik_apps: Optional[list[QlikApp]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.qlik_apps = qlik_apps + + class Attributes(Qlik.Attributes): + qlik_space_type: Optional[str] = Field( + None, description="", alias="qlikSpaceType" + ) + qlik_datasets: Optional[list[QlikDataset]] = Field( + None, description="", alias="qlikDatasets" + ) # relationship + qlik_apps: Optional[list[QlikApp]] = Field( + None, description="", alias="qlikApps" + ) # relationship + + attributes: "QlikSpace.Attributes" = Field( + default_factory=lambda: QlikSpace.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +QlikApp.Attributes.update_forward_refs() + + +QlikChart.Attributes.update_forward_refs() + + +QlikDataset.Attributes.update_forward_refs() + + +QlikSheet.Attributes.update_forward_refs() + + +QlikSpace.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset82.py b/pyatlan/model/assets/asset82.py index a77b949a1..0cecb27eb 100644 --- a/pyatlan/model/assets/asset82.py +++ b/pyatlan/model/assets/asset82.py @@ -4,7 +4,6 @@ from __future__ import annotations -from datetime import datetime from typing import ClassVar, Optional from pydantic import Field, validator @@ -18,1292 +17,865 @@ TextField, ) -from .asset00 import Database, Table +from .asset54 import Salesforce -class MongoDBCollection(Table): +class SalesforceObject(Salesforce): """Description""" - type_name: str = Field("MongoDBCollection", allow_mutation=False) + type_name: str = Field("SalesforceObject", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "MongoDBCollection": - raise ValueError("must be MongoDBCollection") + if v != "SalesforceObject": + raise ValueError("must be SalesforceObject") return v def __setattr__(self, name, value): - if name in MongoDBCollection._convenience_properties: + if name in SalesforceObject._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - MONGO_DB_COLLECTION_SUBTYPE: ClassVar[KeywordTextField] = KeywordTextField( - "mongoDBCollectionSubtype", - "mongoDBCollectionSubtype", - "mongoDBCollectionSubtype.text", - ) - """ - Subtype of a MongoDB collection (e.g. Capped, Time Series etc.) - """ - MONGO_DB_COLLECTION_IS_CAPPED: ClassVar[BooleanField] = BooleanField( - "mongoDBCollectionIsCapped", "mongoDBCollectionIsCapped" - ) - """ - If the collection is a capped collection - """ - MONGO_DB_COLLECTION_TIME_FIELD: ClassVar[KeywordField] = KeywordField( - "mongoDBCollectionTimeField", "mongoDBCollectionTimeField" - ) - """ - The name of the field which contains the date in each time series document - """ - MONGO_DB_COLLECTION_TIME_GRANULARITY: ClassVar[KeywordField] = KeywordField( - "mongoDBCollectionTimeGranularity", "mongoDBCollectionTimeGranularity" - ) - """ - Set the granularity to the value that is the closest match to the time span between consecutive incoming measurements - """ # noqa: E501 - MONGO_DB_COLLECTION_EXPIRE_AFTER_SECONDS: ClassVar[NumericField] = NumericField( - "mongoDBCollectionExpireAfterSeconds", "mongoDBCollectionExpireAfterSeconds" - ) - """ - Specifies the seconds after which documents in a time series collection or clustered collection expire - """ - MONGO_DB_COLLECTION_MAXIMUM_DOCUMENT_COUNT: ClassVar[NumericField] = NumericField( - "mongoDBCollectionMaximumDocumentCount", "mongoDBCollectionMaximumDocumentCount" - ) - """ - The maximum number of documents allowed in the capped collection - """ - MONGO_DB_COLLECTION_MAX_SIZE: ClassVar[NumericField] = NumericField( - "mongoDBCollectionMaxSize", "mongoDBCollectionMaxSize" - ) - """ - The maximum size allowed in the capped collection - """ - MONGO_DB_COLLECTION_NUM_ORPHAN_DOCS: ClassVar[NumericField] = NumericField( - "mongoDBCollectionNumOrphanDocs", "mongoDBCollectionNumOrphanDocs" - ) - """ - The number of orphaned documents in the collection - """ - MONGO_DB_COLLECTION_NUM_INDEXES: ClassVar[NumericField] = NumericField( - "mongoDBCollectionNumIndexes", "mongoDBCollectionNumIndexes" - ) - """ - The number of indexes on the collection - """ - MONGO_DB_COLLECTION_TOTAL_INDEX_SIZE: ClassVar[NumericField] = NumericField( - "mongoDBCollectionTotalIndexSize", "mongoDBCollectionTotalIndexSize" - ) - """ - The total size of all indexes - """ - MONGO_DB_COLLECTION_AVERAGE_OBJECT_SIZE: ClassVar[NumericField] = NumericField( - "mongoDBCollectionAverageObjectSize", "mongoDBCollectionAverageObjectSize" - ) + IS_CUSTOM: ClassVar[BooleanField] = BooleanField("isCustom", "isCustom") """ - The average size of an object in the collection + Whether this object is a custom object (true) or not (false). """ - MONGO_DB_COLLECTION_SCHEMA_DEFINITION: ClassVar[TextField] = TextField( - "mongoDBCollectionSchemaDefinition", "mongoDBCollectionSchemaDefinition" - ) - """ - Definition of the schema applicable for the collection. - """ - COLUMN_COUNT: ClassVar[NumericField] = NumericField("columnCount", "columnCount") - """ - TBC - """ - ROW_COUNT: ClassVar[NumericField] = NumericField("rowCount", "rowCount") + IS_MERGABLE: ClassVar[BooleanField] = BooleanField("isMergable", "isMergable") """ - TBC + Whether this object is mergable (true) or not (false). """ - SIZE_BYTES: ClassVar[NumericField] = NumericField("sizeBytes", "sizeBytes") + IS_QUERYABLE: ClassVar[BooleanField] = BooleanField("isQueryable", "isQueryable") """ - TBC + Whether this object is queryable (true) or not (false). """ - ALIAS: ClassVar[KeywordField] = KeywordField("alias", "alias") - """ - TBC - """ - IS_TEMPORARY: ClassVar[BooleanField] = BooleanField("isTemporary", "isTemporary") - """ - TBC - """ - IS_QUERY_PREVIEW: ClassVar[BooleanField] = BooleanField( - "isQueryPreview", "isQueryPreview" - ) + FIELD_COUNT: ClassVar[NumericField] = NumericField("fieldCount", "fieldCount") """ - TBC + Number of fields in this object. """ - QUERY_PREVIEW_CONFIG: ClassVar[KeywordField] = KeywordField( - "queryPreviewConfig", "queryPreviewConfig" - ) + + LOOKUP_FIELDS: ClassVar[RelationField] = RelationField("lookupFields") """ TBC """ - EXTERNAL_LOCATION: ClassVar[KeywordField] = KeywordField( - "externalLocation", "externalLocation" - ) + ORGANIZATION: ClassVar[RelationField] = RelationField("organization") """ TBC """ - EXTERNAL_LOCATION_REGION: ClassVar[KeywordField] = KeywordField( - "externalLocationRegion", "externalLocationRegion" - ) + FIELDS: ClassVar[RelationField] = RelationField("fields") """ TBC """ - EXTERNAL_LOCATION_FORMAT: ClassVar[KeywordField] = KeywordField( - "externalLocationFormat", "externalLocationFormat" + + _convenience_properties: ClassVar[list[str]] = [ + "is_custom", + "is_mergable", + "is_queryable", + "field_count", + "lookup_fields", + "organization", + "fields", + ] + + @property + def is_custom(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_custom + + @is_custom.setter + def is_custom(self, is_custom: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_custom = is_custom + + @property + def is_mergable(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_mergable + + @is_mergable.setter + def is_mergable(self, is_mergable: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_mergable = is_mergable + + @property + def is_queryable(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_queryable + + @is_queryable.setter + def is_queryable(self, is_queryable: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_queryable = is_queryable + + @property + def field_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.field_count + + @field_count.setter + def field_count(self, field_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.field_count = field_count + + @property + def lookup_fields(self) -> Optional[list[SalesforceField]]: + return None if self.attributes is None else self.attributes.lookup_fields + + @lookup_fields.setter + def lookup_fields(self, lookup_fields: Optional[list[SalesforceField]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.lookup_fields = lookup_fields + + @property + def organization(self) -> Optional[SalesforceOrganization]: + return None if self.attributes is None else self.attributes.organization + + @organization.setter + def organization(self, organization: Optional[SalesforceOrganization]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.organization = organization + + @property + def fields(self) -> Optional[list[SalesforceField]]: + return None if self.attributes is None else self.attributes.fields + + @fields.setter + def fields(self, fields: Optional[list[SalesforceField]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.fields = fields + + class Attributes(Salesforce.Attributes): + is_custom: Optional[bool] = Field(None, description="", alias="isCustom") + is_mergable: Optional[bool] = Field(None, description="", alias="isMergable") + is_queryable: Optional[bool] = Field(None, description="", alias="isQueryable") + field_count: Optional[int] = Field(None, description="", alias="fieldCount") + lookup_fields: Optional[list[SalesforceField]] = Field( + None, description="", alias="lookupFields" + ) # relationship + organization: Optional[SalesforceOrganization] = Field( + None, description="", alias="organization" + ) # relationship + fields: Optional[list[SalesforceField]] = Field( + None, description="", alias="fields" + ) # relationship + + attributes: "SalesforceObject.Attributes" = Field( + default_factory=lambda: SalesforceObject.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", ) - """ - TBC - """ - IS_PARTITIONED: ClassVar[BooleanField] = BooleanField( - "isPartitioned", "isPartitioned" + + +class SalesforceField(Salesforce): + """Description""" + + type_name: str = Field("SalesforceField", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "SalesforceField": + raise ValueError("must be SalesforceField") + return v + + def __setattr__(self, name, value): + if name in SalesforceField._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + DATA_TYPE: ClassVar[KeywordTextField] = KeywordTextField( + "dataType", "dataType", "dataType.text" ) """ - TBC + Data type of values in this field. """ - PARTITION_STRATEGY: ClassVar[KeywordField] = KeywordField( - "partitionStrategy", "partitionStrategy" + OBJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "objectQualifiedName", "objectQualifiedName" ) """ - TBC + Unique name of the object in which this field exists. """ - PARTITION_COUNT: ClassVar[NumericField] = NumericField( - "partitionCount", "partitionCount" - ) + ORDER: ClassVar[NumericField] = NumericField("order", "order") """ - TBC + Order (position) of this field within the object. """ - PARTITION_LIST: ClassVar[KeywordField] = KeywordField( - "partitionList", "partitionList" + INLINE_HELP_TEXT: ClassVar[TextField] = TextField( + "inlineHelpText", "inlineHelpText.text" ) """ - TBC + Help text for this field. """ - QUERY_COUNT: ClassVar[NumericField] = NumericField("queryCount", "queryCount") + IS_CALCULATED: ClassVar[BooleanField] = BooleanField("isCalculated", "isCalculated") """ - TBC + Whether this field is calculated (true) or not (false). """ - QUERY_USER_COUNT: ClassVar[NumericField] = NumericField( - "queryUserCount", "queryUserCount" - ) + FORMULA: ClassVar[KeywordField] = KeywordField("formula", "formula") """ - TBC + Formula for this field, if it is a calculated field. """ - QUERY_USER_MAP: ClassVar[KeywordField] = KeywordField( - "queryUserMap", "queryUserMap" + IS_CASE_SENSITIVE: ClassVar[BooleanField] = BooleanField( + "isCaseSensitive", "isCaseSensitive" ) """ - TBC + Whether this field is case sensitive (true) or in-sensitive (false). """ - QUERY_COUNT_UPDATED_AT: ClassVar[NumericField] = NumericField( - "queryCountUpdatedAt", "queryCountUpdatedAt" - ) + IS_ENCRYPTED: ClassVar[BooleanField] = BooleanField("isEncrypted", "isEncrypted") """ - TBC + Whether this field is encrypted (true) or not (false). """ - DATABASE_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "databaseName", "databaseName.keyword", "databaseName" - ) + MAX_LENGTH: ClassVar[NumericField] = NumericField("maxLength", "maxLength") """ - TBC + Maximum length of this field. """ - DATABASE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "databaseQualifiedName", "databaseQualifiedName" - ) + IS_NULLABLE: ClassVar[BooleanField] = BooleanField("isNullable", "isNullable") """ - TBC + Whether this field allows null values (true) or not (false). """ - SCHEMA_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "schemaName", "schemaName.keyword", "schemaName" - ) + PRECISION: ClassVar[NumericField] = NumericField("precision", "precision") """ - TBC + Total number of digits allowed """ - SCHEMA_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "schemaQualifiedName", "schemaQualifiedName" - ) + NUMERIC_SCALE: ClassVar[NumericField] = NumericField("numericScale", "numericScale") """ - TBC + Number of digits allowed to the right of the decimal point. """ - TABLE_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "tableName", "tableName.keyword", "tableName" - ) + IS_UNIQUE: ClassVar[BooleanField] = BooleanField("isUnique", "isUnique") """ - TBC + Whether this field must have unique values (true) or not (false). """ - TABLE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "tableQualifiedName", "tableQualifiedName" + PICKLIST_VALUES: ClassVar[KeywordField] = KeywordField( + "picklistValues", "picklistValues" ) """ - TBC + List of values from which a user can pick while adding a record. """ - VIEW_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "viewName", "viewName.keyword", "viewName" + IS_POLYMORPHIC_FOREIGN_KEY: ClassVar[BooleanField] = BooleanField( + "isPolymorphicForeignKey", "isPolymorphicForeignKey" ) """ - TBC + Whether this field references a record of multiple objects (true) or not (false). """ - VIEW_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "viewQualifiedName", "viewQualifiedName" + DEFAULT_VALUE_FORMULA: ClassVar[KeywordField] = KeywordField( + "defaultValueFormula", "defaultValueFormula" ) """ - TBC - """ - IS_PROFILED: ClassVar[BooleanField] = BooleanField("isProfiled", "isProfiled") - """ - TBC + Formula for the default value for this field. """ - LAST_PROFILED_AT: ClassVar[NumericField] = NumericField( - "lastProfiledAt", "lastProfiledAt" - ) + + LOOKUP_OBJECTS: ClassVar[RelationField] = RelationField("lookupObjects") """ TBC """ - - MONGO_DB_DATABASE: ClassVar[RelationField] = RelationField("mongoDBDatabase") + OBJECT: ClassVar[RelationField] = RelationField("object") """ TBC """ _convenience_properties: ClassVar[list[str]] = [ - "mongo_d_b_collection_subtype", - "mongo_d_b_collection_is_capped", - "mongo_d_b_collection_time_field", - "mongo_d_b_collection_time_granularity", - "mongo_d_b_collection_expire_after_seconds", - "mongo_d_b_collection_maximum_document_count", - "mongo_d_b_collection_max_size", - "mongo_d_b_collection_num_orphan_docs", - "mongo_d_b_collection_num_indexes", - "mongo_d_b_collection_total_index_size", - "mongo_d_b_collection_average_object_size", - "mongo_d_b_collection_schema_definition", - "column_count", - "row_count", - "size_bytes", - "alias", - "is_temporary", - "is_query_preview", - "query_preview_config", - "external_location", - "external_location_region", - "external_location_format", - "is_partitioned", - "partition_strategy", - "partition_count", - "partition_list", - "query_count", - "query_user_count", - "query_user_map", - "query_count_updated_at", - "database_name", - "database_qualified_name", - "schema_name", - "schema_qualified_name", - "table_name", - "table_qualified_name", - "view_name", - "view_qualified_name", - "is_profiled", - "last_profiled_at", - "mongo_d_b_database", + "data_type", + "object_qualified_name", + "order", + "inline_help_text", + "is_calculated", + "formula", + "is_case_sensitive", + "is_encrypted", + "max_length", + "is_nullable", + "precision", + "numeric_scale", + "is_unique", + "picklist_values", + "is_polymorphic_foreign_key", + "default_value_formula", + "lookup_objects", + "object", ] @property - def mongo_d_b_collection_subtype(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.mongo_d_b_collection_subtype - ) - - @mongo_d_b_collection_subtype.setter - def mongo_d_b_collection_subtype(self, mongo_d_b_collection_subtype: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mongo_d_b_collection_subtype = mongo_d_b_collection_subtype - - @property - def mongo_d_b_collection_is_capped(self) -> Optional[bool]: - return ( - None - if self.attributes is None - else self.attributes.mongo_d_b_collection_is_capped - ) - - @mongo_d_b_collection_is_capped.setter - def mongo_d_b_collection_is_capped( - self, mongo_d_b_collection_is_capped: Optional[bool] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mongo_d_b_collection_is_capped = mongo_d_b_collection_is_capped - - @property - def mongo_d_b_collection_time_field(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.mongo_d_b_collection_time_field - ) - - @mongo_d_b_collection_time_field.setter - def mongo_d_b_collection_time_field( - self, mongo_d_b_collection_time_field: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mongo_d_b_collection_time_field = ( - mongo_d_b_collection_time_field - ) - - @property - def mongo_d_b_collection_time_granularity(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.mongo_d_b_collection_time_granularity - ) - - @mongo_d_b_collection_time_granularity.setter - def mongo_d_b_collection_time_granularity( - self, mongo_d_b_collection_time_granularity: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mongo_d_b_collection_time_granularity = ( - mongo_d_b_collection_time_granularity - ) - - @property - def mongo_d_b_collection_expire_after_seconds(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.mongo_d_b_collection_expire_after_seconds - ) - - @mongo_d_b_collection_expire_after_seconds.setter - def mongo_d_b_collection_expire_after_seconds( - self, mongo_d_b_collection_expire_after_seconds: Optional[int] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mongo_d_b_collection_expire_after_seconds = ( - mongo_d_b_collection_expire_after_seconds - ) - - @property - def mongo_d_b_collection_maximum_document_count(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.mongo_d_b_collection_maximum_document_count - ) - - @mongo_d_b_collection_maximum_document_count.setter - def mongo_d_b_collection_maximum_document_count( - self, mongo_d_b_collection_maximum_document_count: Optional[int] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mongo_d_b_collection_maximum_document_count = ( - mongo_d_b_collection_maximum_document_count - ) - - @property - def mongo_d_b_collection_max_size(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.mongo_d_b_collection_max_size - ) + def data_type(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.data_type - @mongo_d_b_collection_max_size.setter - def mongo_d_b_collection_max_size( - self, mongo_d_b_collection_max_size: Optional[int] - ): + @data_type.setter + def data_type(self, data_type: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.mongo_d_b_collection_max_size = mongo_d_b_collection_max_size + self.attributes.data_type = data_type @property - def mongo_d_b_collection_num_orphan_docs(self) -> Optional[int]: + def object_qualified_name(self) -> Optional[str]: return ( - None - if self.attributes is None - else self.attributes.mongo_d_b_collection_num_orphan_docs + None if self.attributes is None else self.attributes.object_qualified_name ) - @mongo_d_b_collection_num_orphan_docs.setter - def mongo_d_b_collection_num_orphan_docs( - self, mongo_d_b_collection_num_orphan_docs: Optional[int] - ): + @object_qualified_name.setter + def object_qualified_name(self, object_qualified_name: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.mongo_d_b_collection_num_orphan_docs = ( - mongo_d_b_collection_num_orphan_docs - ) + self.attributes.object_qualified_name = object_qualified_name @property - def mongo_d_b_collection_num_indexes(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.mongo_d_b_collection_num_indexes - ) + def order(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.order - @mongo_d_b_collection_num_indexes.setter - def mongo_d_b_collection_num_indexes( - self, mongo_d_b_collection_num_indexes: Optional[int] - ): + @order.setter + def order(self, order: Optional[int]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.mongo_d_b_collection_num_indexes = ( - mongo_d_b_collection_num_indexes - ) + self.attributes.order = order @property - def mongo_d_b_collection_total_index_size(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.mongo_d_b_collection_total_index_size - ) + def inline_help_text(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.inline_help_text - @mongo_d_b_collection_total_index_size.setter - def mongo_d_b_collection_total_index_size( - self, mongo_d_b_collection_total_index_size: Optional[int] - ): + @inline_help_text.setter + def inline_help_text(self, inline_help_text: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.mongo_d_b_collection_total_index_size = ( - mongo_d_b_collection_total_index_size - ) + self.attributes.inline_help_text = inline_help_text @property - def mongo_d_b_collection_average_object_size(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.mongo_d_b_collection_average_object_size - ) + def is_calculated(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_calculated - @mongo_d_b_collection_average_object_size.setter - def mongo_d_b_collection_average_object_size( - self, mongo_d_b_collection_average_object_size: Optional[int] - ): + @is_calculated.setter + def is_calculated(self, is_calculated: Optional[bool]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.mongo_d_b_collection_average_object_size = ( - mongo_d_b_collection_average_object_size - ) + self.attributes.is_calculated = is_calculated @property - def mongo_d_b_collection_schema_definition(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.mongo_d_b_collection_schema_definition - ) + def formula(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.formula - @mongo_d_b_collection_schema_definition.setter - def mongo_d_b_collection_schema_definition( - self, mongo_d_b_collection_schema_definition: Optional[str] - ): + @formula.setter + def formula(self, formula: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.mongo_d_b_collection_schema_definition = ( - mongo_d_b_collection_schema_definition - ) + self.attributes.formula = formula @property - def column_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.column_count + def is_case_sensitive(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_case_sensitive - @column_count.setter - def column_count(self, column_count: Optional[int]): + @is_case_sensitive.setter + def is_case_sensitive(self, is_case_sensitive: Optional[bool]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.column_count = column_count + self.attributes.is_case_sensitive = is_case_sensitive @property - def row_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.row_count + def is_encrypted(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_encrypted - @row_count.setter - def row_count(self, row_count: Optional[int]): + @is_encrypted.setter + def is_encrypted(self, is_encrypted: Optional[bool]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.row_count = row_count + self.attributes.is_encrypted = is_encrypted @property - def size_bytes(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.size_bytes + def max_length(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.max_length - @size_bytes.setter - def size_bytes(self, size_bytes: Optional[int]): + @max_length.setter + def max_length(self, max_length: Optional[int]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.size_bytes = size_bytes + self.attributes.max_length = max_length @property - def alias(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.alias + def is_nullable(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_nullable - @alias.setter - def alias(self, alias: Optional[str]): + @is_nullable.setter + def is_nullable(self, is_nullable: Optional[bool]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.alias = alias + self.attributes.is_nullable = is_nullable @property - def is_temporary(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_temporary + def precision(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.precision - @is_temporary.setter - def is_temporary(self, is_temporary: Optional[bool]): + @precision.setter + def precision(self, precision: Optional[int]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.is_temporary = is_temporary + self.attributes.precision = precision @property - def is_query_preview(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_query_preview + def numeric_scale(self) -> Optional[float]: + return None if self.attributes is None else self.attributes.numeric_scale - @is_query_preview.setter - def is_query_preview(self, is_query_preview: Optional[bool]): + @numeric_scale.setter + def numeric_scale(self, numeric_scale: Optional[float]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.is_query_preview = is_query_preview + self.attributes.numeric_scale = numeric_scale @property - def query_preview_config(self) -> Optional[dict[str, str]]: - return None if self.attributes is None else self.attributes.query_preview_config + def is_unique(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_unique - @query_preview_config.setter - def query_preview_config(self, query_preview_config: Optional[dict[str, str]]): + @is_unique.setter + def is_unique(self, is_unique: Optional[bool]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.query_preview_config = query_preview_config + self.attributes.is_unique = is_unique @property - def external_location(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.external_location + def picklist_values(self) -> Optional[set[str]]: + return None if self.attributes is None else self.attributes.picklist_values - @external_location.setter - def external_location(self, external_location: Optional[str]): + @picklist_values.setter + def picklist_values(self, picklist_values: Optional[set[str]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.external_location = external_location + self.attributes.picklist_values = picklist_values @property - def external_location_region(self) -> Optional[str]: + def is_polymorphic_foreign_key(self) -> Optional[bool]: return ( None if self.attributes is None - else self.attributes.external_location_region + else self.attributes.is_polymorphic_foreign_key ) - @external_location_region.setter - def external_location_region(self, external_location_region: Optional[str]): + @is_polymorphic_foreign_key.setter + def is_polymorphic_foreign_key(self, is_polymorphic_foreign_key: Optional[bool]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.external_location_region = external_location_region + self.attributes.is_polymorphic_foreign_key = is_polymorphic_foreign_key @property - def external_location_format(self) -> Optional[str]: + def default_value_formula(self) -> Optional[str]: return ( - None - if self.attributes is None - else self.attributes.external_location_format + None if self.attributes is None else self.attributes.default_value_formula ) - @external_location_format.setter - def external_location_format(self, external_location_format: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.external_location_format = external_location_format - - @property - def is_partitioned(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_partitioned - - @is_partitioned.setter - def is_partitioned(self, is_partitioned: Optional[bool]): + @default_value_formula.setter + def default_value_formula(self, default_value_formula: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.is_partitioned = is_partitioned + self.attributes.default_value_formula = default_value_formula @property - def partition_strategy(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.partition_strategy + def lookup_objects(self) -> Optional[list[SalesforceObject]]: + return None if self.attributes is None else self.attributes.lookup_objects - @partition_strategy.setter - def partition_strategy(self, partition_strategy: Optional[str]): + @lookup_objects.setter + def lookup_objects(self, lookup_objects: Optional[list[SalesforceObject]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.partition_strategy = partition_strategy + self.attributes.lookup_objects = lookup_objects @property - def partition_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.partition_count + def object(self) -> Optional[SalesforceObject]: + return None if self.attributes is None else self.attributes.object - @partition_count.setter - def partition_count(self, partition_count: Optional[int]): + @object.setter + def object(self, object: Optional[SalesforceObject]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.partition_count = partition_count - - @property - def partition_list(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.partition_list - - @partition_list.setter - def partition_list(self, partition_list: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.partition_list = partition_list - - @property - def query_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.query_count + self.attributes.object = object - @query_count.setter - def query_count(self, query_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.query_count = query_count - - @property - def query_user_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.query_user_count - - @query_user_count.setter - def query_user_count(self, query_user_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.query_user_count = query_user_count - - @property - def query_user_map(self) -> Optional[dict[str, int]]: - return None if self.attributes is None else self.attributes.query_user_map - - @query_user_map.setter - def query_user_map(self, query_user_map: Optional[dict[str, int]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.query_user_map = query_user_map - - @property - def query_count_updated_at(self) -> Optional[datetime]: - return ( - None if self.attributes is None else self.attributes.query_count_updated_at + class Attributes(Salesforce.Attributes): + data_type: Optional[str] = Field(None, description="", alias="dataType") + object_qualified_name: Optional[str] = Field( + None, description="", alias="objectQualifiedName" ) - - @query_count_updated_at.setter - def query_count_updated_at(self, query_count_updated_at: Optional[datetime]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.query_count_updated_at = query_count_updated_at - - @property - def database_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.database_name - - @database_name.setter - def database_name(self, database_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.database_name = database_name - - @property - def database_qualified_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.database_qualified_name + order: Optional[int] = Field(None, description="", alias="order") + inline_help_text: Optional[str] = Field( + None, description="", alias="inlineHelpText" ) + is_calculated: Optional[bool] = Field( + None, description="", alias="isCalculated" + ) + formula: Optional[str] = Field(None, description="", alias="formula") + is_case_sensitive: Optional[bool] = Field( + None, description="", alias="isCaseSensitive" + ) + is_encrypted: Optional[bool] = Field(None, description="", alias="isEncrypted") + max_length: Optional[int] = Field(None, description="", alias="maxLength") + is_nullable: Optional[bool] = Field(None, description="", alias="isNullable") + precision: Optional[int] = Field(None, description="", alias="precision") + numeric_scale: Optional[float] = Field( + None, description="", alias="numericScale" + ) + is_unique: Optional[bool] = Field(None, description="", alias="isUnique") + picklist_values: Optional[set[str]] = Field( + None, description="", alias="picklistValues" + ) + is_polymorphic_foreign_key: Optional[bool] = Field( + None, description="", alias="isPolymorphicForeignKey" + ) + default_value_formula: Optional[str] = Field( + None, description="", alias="defaultValueFormula" + ) + lookup_objects: Optional[list[SalesforceObject]] = Field( + None, description="", alias="lookupObjects" + ) # relationship + object: Optional[SalesforceObject] = Field( + None, description="", alias="object" + ) # relationship - @database_qualified_name.setter - def database_qualified_name(self, database_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.database_qualified_name = database_qualified_name - - @property - def schema_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.schema_name - - @schema_name.setter - def schema_name(self, schema_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.schema_name = schema_name + attributes: "SalesforceField.Attributes" = Field( + default_factory=lambda: SalesforceField.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) - @property - def schema_qualified_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.schema_qualified_name - ) - @schema_qualified_name.setter - def schema_qualified_name(self, schema_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.schema_qualified_name = schema_qualified_name +class SalesforceOrganization(Salesforce): + """Description""" - @property - def table_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.table_name + type_name: str = Field("SalesforceOrganization", allow_mutation=False) - @table_name.setter - def table_name(self, table_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.table_name = table_name + @validator("type_name") + def validate_type_name(cls, v): + if v != "SalesforceOrganization": + raise ValueError("must be SalesforceOrganization") + return v - @property - def table_qualified_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.table_qualified_name + def __setattr__(self, name, value): + if name in SalesforceOrganization._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) - @table_qualified_name.setter - def table_qualified_name(self, table_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.table_qualified_name = table_qualified_name + SOURCE_ID: ClassVar[KeywordField] = KeywordField("sourceId", "sourceId") + """ + Identifier of the organization in Salesforce. + """ - @property - def view_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.view_name + REPORTS: ClassVar[RelationField] = RelationField("reports") + """ + TBC + """ + OBJECTS: ClassVar[RelationField] = RelationField("objects") + """ + TBC + """ + DASHBOARDS: ClassVar[RelationField] = RelationField("dashboards") + """ + TBC + """ - @view_name.setter - def view_name(self, view_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.view_name = view_name + _convenience_properties: ClassVar[list[str]] = [ + "source_id", + "reports", + "objects", + "dashboards", + ] @property - def view_qualified_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.view_qualified_name + def source_id(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.source_id - @view_qualified_name.setter - def view_qualified_name(self, view_qualified_name: Optional[str]): + @source_id.setter + def source_id(self, source_id: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.view_qualified_name = view_qualified_name + self.attributes.source_id = source_id @property - def is_profiled(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_profiled + def reports(self) -> Optional[list[SalesforceReport]]: + return None if self.attributes is None else self.attributes.reports - @is_profiled.setter - def is_profiled(self, is_profiled: Optional[bool]): + @reports.setter + def reports(self, reports: Optional[list[SalesforceReport]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.is_profiled = is_profiled + self.attributes.reports = reports @property - def last_profiled_at(self) -> Optional[datetime]: - return None if self.attributes is None else self.attributes.last_profiled_at + def objects(self) -> Optional[list[SalesforceObject]]: + return None if self.attributes is None else self.attributes.objects - @last_profiled_at.setter - def last_profiled_at(self, last_profiled_at: Optional[datetime]): + @objects.setter + def objects(self, objects: Optional[list[SalesforceObject]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.last_profiled_at = last_profiled_at + self.attributes.objects = objects @property - def mongo_d_b_database(self) -> Optional[MongoDBDatabase]: - return None if self.attributes is None else self.attributes.mongo_d_b_database + def dashboards(self) -> Optional[list[SalesforceDashboard]]: + return None if self.attributes is None else self.attributes.dashboards - @mongo_d_b_database.setter - def mongo_d_b_database(self, mongo_d_b_database: Optional[MongoDBDatabase]): + @dashboards.setter + def dashboards(self, dashboards: Optional[list[SalesforceDashboard]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.mongo_d_b_database = mongo_d_b_database + self.attributes.dashboards = dashboards - class Attributes(Table.Attributes): - mongo_d_b_collection_subtype: Optional[str] = Field( - None, description="", alias="mongoDBCollectionSubtype" - ) - mongo_d_b_collection_is_capped: Optional[bool] = Field( - None, description="", alias="mongoDBCollectionIsCapped" - ) - mongo_d_b_collection_time_field: Optional[str] = Field( - None, description="", alias="mongoDBCollectionTimeField" - ) - mongo_d_b_collection_time_granularity: Optional[str] = Field( - None, description="", alias="mongoDBCollectionTimeGranularity" - ) - mongo_d_b_collection_expire_after_seconds: Optional[int] = Field( - None, description="", alias="mongoDBCollectionExpireAfterSeconds" - ) - mongo_d_b_collection_maximum_document_count: Optional[int] = Field( - None, description="", alias="mongoDBCollectionMaximumDocumentCount" - ) - mongo_d_b_collection_max_size: Optional[int] = Field( - None, description="", alias="mongoDBCollectionMaxSize" - ) - mongo_d_b_collection_num_orphan_docs: Optional[int] = Field( - None, description="", alias="mongoDBCollectionNumOrphanDocs" - ) - mongo_d_b_collection_num_indexes: Optional[int] = Field( - None, description="", alias="mongoDBCollectionNumIndexes" - ) - mongo_d_b_collection_total_index_size: Optional[int] = Field( - None, description="", alias="mongoDBCollectionTotalIndexSize" - ) - mongo_d_b_collection_average_object_size: Optional[int] = Field( - None, description="", alias="mongoDBCollectionAverageObjectSize" - ) - mongo_d_b_collection_schema_definition: Optional[str] = Field( - None, description="", alias="mongoDBCollectionSchemaDefinition" - ) - column_count: Optional[int] = Field(None, description="", alias="columnCount") - row_count: Optional[int] = Field(None, description="", alias="rowCount") - size_bytes: Optional[int] = Field(None, description="", alias="sizeBytes") - alias: Optional[str] = Field(None, description="", alias="alias") - is_temporary: Optional[bool] = Field(None, description="", alias="isTemporary") - is_query_preview: Optional[bool] = Field( - None, description="", alias="isQueryPreview" - ) - query_preview_config: Optional[dict[str, str]] = Field( - None, description="", alias="queryPreviewConfig" - ) - external_location: Optional[str] = Field( - None, description="", alias="externalLocation" - ) - external_location_region: Optional[str] = Field( - None, description="", alias="externalLocationRegion" - ) - external_location_format: Optional[str] = Field( - None, description="", alias="externalLocationFormat" - ) - is_partitioned: Optional[bool] = Field( - None, description="", alias="isPartitioned" - ) - partition_strategy: Optional[str] = Field( - None, description="", alias="partitionStrategy" - ) - partition_count: Optional[int] = Field( - None, description="", alias="partitionCount" - ) - partition_list: Optional[str] = Field( - None, description="", alias="partitionList" - ) - query_count: Optional[int] = Field(None, description="", alias="queryCount") - query_user_count: Optional[int] = Field( - None, description="", alias="queryUserCount" - ) - query_user_map: Optional[dict[str, int]] = Field( - None, description="", alias="queryUserMap" - ) - query_count_updated_at: Optional[datetime] = Field( - None, description="", alias="queryCountUpdatedAt" - ) - database_name: Optional[str] = Field(None, description="", alias="databaseName") - database_qualified_name: Optional[str] = Field( - None, description="", alias="databaseQualifiedName" - ) - schema_name: Optional[str] = Field(None, description="", alias="schemaName") - schema_qualified_name: Optional[str] = Field( - None, description="", alias="schemaQualifiedName" - ) - table_name: Optional[str] = Field(None, description="", alias="tableName") - table_qualified_name: Optional[str] = Field( - None, description="", alias="tableQualifiedName" - ) - view_name: Optional[str] = Field(None, description="", alias="viewName") - view_qualified_name: Optional[str] = Field( - None, description="", alias="viewQualifiedName" - ) - is_profiled: Optional[bool] = Field(None, description="", alias="isProfiled") - last_profiled_at: Optional[datetime] = Field( - None, description="", alias="lastProfiledAt" - ) - mongo_d_b_database: Optional[MongoDBDatabase] = Field( - None, description="", alias="mongoDBDatabase" + class Attributes(Salesforce.Attributes): + source_id: Optional[str] = Field(None, description="", alias="sourceId") + reports: Optional[list[SalesforceReport]] = Field( + None, description="", alias="reports" + ) # relationship + objects: Optional[list[SalesforceObject]] = Field( + None, description="", alias="objects" + ) # relationship + dashboards: Optional[list[SalesforceDashboard]] = Field( + None, description="", alias="dashboards" ) # relationship - attributes: "MongoDBCollection.Attributes" = Field( - default_factory=lambda: MongoDBCollection.Attributes(), + attributes: "SalesforceOrganization.Attributes" = Field( + default_factory=lambda: SalesforceOrganization.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -class MongoDBDatabase(Database): +class SalesforceDashboard(Salesforce): """Description""" - type_name: str = Field("MongoDBDatabase", allow_mutation=False) + type_name: str = Field("SalesforceDashboard", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "MongoDBDatabase": - raise ValueError("must be MongoDBDatabase") + if v != "SalesforceDashboard": + raise ValueError("must be SalesforceDashboard") return v def __setattr__(self, name, value): - if name in MongoDBDatabase._convenience_properties: + if name in SalesforceDashboard._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - MONGO_DB_DATABASE_COLLECTION_COUNT: ClassVar[NumericField] = NumericField( - "mongoDBDatabaseCollectionCount", "mongoDBDatabaseCollectionCount" - ) - """ - The number of collection in a MongoDB Database + SOURCE_ID: ClassVar[KeywordField] = KeywordField("sourceId", "sourceId") """ - SCHEMA_COUNT: ClassVar[NumericField] = NumericField("schemaCount", "schemaCount") - """ - TBC + Identifier of the dashboard in Salesforce. """ - QUERY_COUNT: ClassVar[NumericField] = NumericField("queryCount", "queryCount") - """ - TBC - """ - QUERY_USER_COUNT: ClassVar[NumericField] = NumericField( - "queryUserCount", "queryUserCount" + DASHBOARD_TYPE: ClassVar[KeywordField] = KeywordField( + "dashboardType", "dashboardType" ) """ - TBC + Type of dashboard in Salesforce. """ - QUERY_USER_MAP: ClassVar[KeywordField] = KeywordField( - "queryUserMap", "queryUserMap" - ) + REPORT_COUNT: ClassVar[NumericField] = NumericField("reportCount", "reportCount") """ - TBC + Number of reports linked to the dashboard in Salesforce. """ - QUERY_COUNT_UPDATED_AT: ClassVar[NumericField] = NumericField( - "queryCountUpdatedAt", "queryCountUpdatedAt" - ) - """ - TBC - """ - DATABASE_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "databaseName", "databaseName.keyword", "databaseName" - ) - """ - TBC - """ - DATABASE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "databaseQualifiedName", "databaseQualifiedName" - ) - """ - TBC - """ - SCHEMA_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "schemaName", "schemaName.keyword", "schemaName" - ) - """ - TBC - """ - SCHEMA_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "schemaQualifiedName", "schemaQualifiedName" - ) - """ - TBC - """ - TABLE_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "tableName", "tableName.keyword", "tableName" - ) - """ - TBC - """ - TABLE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "tableQualifiedName", "tableQualifiedName" - ) - """ - TBC - """ - VIEW_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "viewName", "viewName.keyword", "viewName" - ) - """ - TBC - """ - VIEW_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "viewQualifiedName", "viewQualifiedName" - ) - """ - TBC - """ - IS_PROFILED: ClassVar[BooleanField] = BooleanField("isProfiled", "isProfiled") - """ - TBC - """ - LAST_PROFILED_AT: ClassVar[NumericField] = NumericField( - "lastProfiledAt", "lastProfiledAt" - ) + + REPORTS: ClassVar[RelationField] = RelationField("reports") """ TBC """ - - MONGO_DB_COLLECTIONS: ClassVar[RelationField] = RelationField("mongoDBCollections") + ORGANIZATION: ClassVar[RelationField] = RelationField("organization") """ TBC """ _convenience_properties: ClassVar[list[str]] = [ - "mongo_d_b_database_collection_count", - "schema_count", - "query_count", - "query_user_count", - "query_user_map", - "query_count_updated_at", - "database_name", - "database_qualified_name", - "schema_name", - "schema_qualified_name", - "table_name", - "table_qualified_name", - "view_name", - "view_qualified_name", - "is_profiled", - "last_profiled_at", - "mongo_d_b_collections", + "source_id", + "dashboard_type", + "report_count", + "reports", + "organization", ] @property - def mongo_d_b_database_collection_count(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.mongo_d_b_database_collection_count - ) - - @mongo_d_b_database_collection_count.setter - def mongo_d_b_database_collection_count( - self, mongo_d_b_database_collection_count: Optional[int] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mongo_d_b_database_collection_count = ( - mongo_d_b_database_collection_count - ) - - @property - def schema_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.schema_count + def source_id(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.source_id - @schema_count.setter - def schema_count(self, schema_count: Optional[int]): + @source_id.setter + def source_id(self, source_id: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.schema_count = schema_count + self.attributes.source_id = source_id @property - def query_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.query_count + def dashboard_type(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dashboard_type - @query_count.setter - def query_count(self, query_count: Optional[int]): + @dashboard_type.setter + def dashboard_type(self, dashboard_type: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.query_count = query_count + self.attributes.dashboard_type = dashboard_type @property - def query_user_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.query_user_count + def report_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.report_count - @query_user_count.setter - def query_user_count(self, query_user_count: Optional[int]): + @report_count.setter + def report_count(self, report_count: Optional[int]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.query_user_count = query_user_count + self.attributes.report_count = report_count @property - def query_user_map(self) -> Optional[dict[str, int]]: - return None if self.attributes is None else self.attributes.query_user_map + def reports(self) -> Optional[list[SalesforceReport]]: + return None if self.attributes is None else self.attributes.reports - @query_user_map.setter - def query_user_map(self, query_user_map: Optional[dict[str, int]]): + @reports.setter + def reports(self, reports: Optional[list[SalesforceReport]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.query_user_map = query_user_map + self.attributes.reports = reports @property - def query_count_updated_at(self) -> Optional[datetime]: - return ( - None if self.attributes is None else self.attributes.query_count_updated_at - ) + def organization(self) -> Optional[SalesforceOrganization]: + return None if self.attributes is None else self.attributes.organization - @query_count_updated_at.setter - def query_count_updated_at(self, query_count_updated_at: Optional[datetime]): + @organization.setter + def organization(self, organization: Optional[SalesforceOrganization]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.query_count_updated_at = query_count_updated_at + self.attributes.organization = organization - @property - def database_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.database_name - - @database_name.setter - def database_name(self, database_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.database_name = database_name - - @property - def database_qualified_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.database_qualified_name + class Attributes(Salesforce.Attributes): + source_id: Optional[str] = Field(None, description="", alias="sourceId") + dashboard_type: Optional[str] = Field( + None, description="", alias="dashboardType" ) + report_count: Optional[int] = Field(None, description="", alias="reportCount") + reports: Optional[list[SalesforceReport]] = Field( + None, description="", alias="reports" + ) # relationship + organization: Optional[SalesforceOrganization] = Field( + None, description="", alias="organization" + ) # relationship - @database_qualified_name.setter - def database_qualified_name(self, database_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.database_qualified_name = database_qualified_name + attributes: "SalesforceDashboard.Attributes" = Field( + default_factory=lambda: SalesforceDashboard.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) - @property - def schema_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.schema_name - @schema_name.setter - def schema_name(self, schema_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.schema_name = schema_name +class SalesforceReport(Salesforce): + """Description""" - @property - def schema_qualified_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.schema_qualified_name - ) + type_name: str = Field("SalesforceReport", allow_mutation=False) - @schema_qualified_name.setter - def schema_qualified_name(self, schema_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.schema_qualified_name = schema_qualified_name + @validator("type_name") + def validate_type_name(cls, v): + if v != "SalesforceReport": + raise ValueError("must be SalesforceReport") + return v - @property - def table_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.table_name + def __setattr__(self, name, value): + if name in SalesforceReport._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) - @table_name.setter - def table_name(self, table_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.table_name = table_name + SOURCE_ID: ClassVar[KeywordField] = KeywordField("sourceId", "sourceId") + """ + Identifier of the report in Salesforce. + """ + REPORT_TYPE: ClassVar[KeywordField] = KeywordField("reportType", "reportType") + """ + Type of report in Salesforce. + """ + DETAIL_COLUMNS: ClassVar[KeywordField] = KeywordField( + "detailColumns", "detailColumns" + ) + """ + List of column names on the report. + """ - @property - def table_qualified_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.table_qualified_name + DASHBOARDS: ClassVar[RelationField] = RelationField("dashboards") + """ + TBC + """ + ORGANIZATION: ClassVar[RelationField] = RelationField("organization") + """ + TBC + """ - @table_qualified_name.setter - def table_qualified_name(self, table_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.table_qualified_name = table_qualified_name + _convenience_properties: ClassVar[list[str]] = [ + "source_id", + "report_type", + "detail_columns", + "dashboards", + "organization", + ] @property - def view_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.view_name + def source_id(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.source_id - @view_name.setter - def view_name(self, view_name: Optional[str]): + @source_id.setter + def source_id(self, source_id: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.view_name = view_name + self.attributes.source_id = source_id @property - def view_qualified_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.view_qualified_name + def report_type(self) -> Optional[dict[str, str]]: + return None if self.attributes is None else self.attributes.report_type - @view_qualified_name.setter - def view_qualified_name(self, view_qualified_name: Optional[str]): + @report_type.setter + def report_type(self, report_type: Optional[dict[str, str]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.view_qualified_name = view_qualified_name + self.attributes.report_type = report_type @property - def is_profiled(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_profiled + def detail_columns(self) -> Optional[set[str]]: + return None if self.attributes is None else self.attributes.detail_columns - @is_profiled.setter - def is_profiled(self, is_profiled: Optional[bool]): + @detail_columns.setter + def detail_columns(self, detail_columns: Optional[set[str]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.is_profiled = is_profiled + self.attributes.detail_columns = detail_columns @property - def last_profiled_at(self) -> Optional[datetime]: - return None if self.attributes is None else self.attributes.last_profiled_at + def dashboards(self) -> Optional[list[SalesforceDashboard]]: + return None if self.attributes is None else self.attributes.dashboards - @last_profiled_at.setter - def last_profiled_at(self, last_profiled_at: Optional[datetime]): + @dashboards.setter + def dashboards(self, dashboards: Optional[list[SalesforceDashboard]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.last_profiled_at = last_profiled_at + self.attributes.dashboards = dashboards @property - def mongo_d_b_collections(self) -> Optional[list[MongoDBCollection]]: - return ( - None if self.attributes is None else self.attributes.mongo_d_b_collections - ) + def organization(self) -> Optional[SalesforceOrganization]: + return None if self.attributes is None else self.attributes.organization - @mongo_d_b_collections.setter - def mongo_d_b_collections( - self, mongo_d_b_collections: Optional[list[MongoDBCollection]] - ): + @organization.setter + def organization(self, organization: Optional[SalesforceOrganization]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.mongo_d_b_collections = mongo_d_b_collections + self.attributes.organization = organization - class Attributes(Database.Attributes): - mongo_d_b_database_collection_count: Optional[int] = Field( - None, description="", alias="mongoDBDatabaseCollectionCount" + class Attributes(Salesforce.Attributes): + source_id: Optional[str] = Field(None, description="", alias="sourceId") + report_type: Optional[dict[str, str]] = Field( + None, description="", alias="reportType" ) - schema_count: Optional[int] = Field(None, description="", alias="schemaCount") - query_count: Optional[int] = Field(None, description="", alias="queryCount") - query_user_count: Optional[int] = Field( - None, description="", alias="queryUserCount" + detail_columns: Optional[set[str]] = Field( + None, description="", alias="detailColumns" ) - query_user_map: Optional[dict[str, int]] = Field( - None, description="", alias="queryUserMap" - ) - query_count_updated_at: Optional[datetime] = Field( - None, description="", alias="queryCountUpdatedAt" - ) - database_name: Optional[str] = Field(None, description="", alias="databaseName") - database_qualified_name: Optional[str] = Field( - None, description="", alias="databaseQualifiedName" - ) - schema_name: Optional[str] = Field(None, description="", alias="schemaName") - schema_qualified_name: Optional[str] = Field( - None, description="", alias="schemaQualifiedName" - ) - table_name: Optional[str] = Field(None, description="", alias="tableName") - table_qualified_name: Optional[str] = Field( - None, description="", alias="tableQualifiedName" - ) - view_name: Optional[str] = Field(None, description="", alias="viewName") - view_qualified_name: Optional[str] = Field( - None, description="", alias="viewQualifiedName" - ) - is_profiled: Optional[bool] = Field(None, description="", alias="isProfiled") - last_profiled_at: Optional[datetime] = Field( - None, description="", alias="lastProfiledAt" - ) - mongo_d_b_collections: Optional[list[MongoDBCollection]] = Field( - None, description="", alias="mongoDBCollections" + dashboards: Optional[list[SalesforceDashboard]] = Field( + None, description="", alias="dashboards" + ) # relationship + organization: Optional[SalesforceOrganization] = Field( + None, description="", alias="organization" ) # relationship - attributes: "MongoDBDatabase.Attributes" = Field( - default_factory=lambda: MongoDBDatabase.Attributes(), + attributes: "SalesforceReport.Attributes" = Field( + default_factory=lambda: SalesforceReport.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -MongoDBCollection.Attributes.update_forward_refs() +SalesforceObject.Attributes.update_forward_refs() + + +SalesforceField.Attributes.update_forward_refs() + + +SalesforceOrganization.Attributes.update_forward_refs() + + +SalesforceDashboard.Attributes.update_forward_refs() -MongoDBDatabase.Attributes.update_forward_refs() +SalesforceReport.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset84.py b/pyatlan/model/assets/asset84.py index 8ea6c3ae5..f7470fc7e 100644 --- a/pyatlan/model/assets/asset84.py +++ b/pyatlan/model/assets/asset84.py @@ -4,30 +4,1354 @@ from __future__ import annotations -from typing import ClassVar +from datetime import datetime +from typing import ClassVar, Optional from pydantic import Field, validator -from .asset79 import QlikSpace +from pyatlan.model.fields.atlan_fields import ( + BooleanField, + KeywordField, + KeywordTextField, + NumericField, + RelationField, + TextField, +) +from .asset00 import Database, Table -class QlikStream(QlikSpace): + +class MongoDBCollection(Table): + """Description""" + + type_name: str = Field("MongoDBCollection", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "MongoDBCollection": + raise ValueError("must be MongoDBCollection") + return v + + def __setattr__(self, name, value): + if name in MongoDBCollection._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + MONGO_DB_COLLECTION_SUBTYPE: ClassVar[KeywordTextField] = KeywordTextField( + "mongoDBCollectionSubtype", + "mongoDBCollectionSubtype", + "mongoDBCollectionSubtype.text", + ) + """ + Subtype of a MongoDB collection, for example: Capped, Time Series, etc. + """ + MONGO_DB_COLLECTION_IS_CAPPED: ClassVar[BooleanField] = BooleanField( + "mongoDBCollectionIsCapped", "mongoDBCollectionIsCapped" + ) + """ + Whether the collection is capped (true) or not (false). + """ + MONGO_DB_COLLECTION_TIME_FIELD: ClassVar[KeywordField] = KeywordField( + "mongoDBCollectionTimeField", "mongoDBCollectionTimeField" + ) + """ + Name of the field containing the date in each time series document. + """ + MONGO_DB_COLLECTION_TIME_GRANULARITY: ClassVar[KeywordField] = KeywordField( + "mongoDBCollectionTimeGranularity", "mongoDBCollectionTimeGranularity" + ) + """ + Closest match to the time span between consecutive incoming measurements. + """ + MONGO_DB_COLLECTION_EXPIRE_AFTER_SECONDS: ClassVar[NumericField] = NumericField( + "mongoDBCollectionExpireAfterSeconds", "mongoDBCollectionExpireAfterSeconds" + ) + """ + Seconds after which documents in a time series collection or clustered collection expire. + """ + MONGO_DB_COLLECTION_MAXIMUM_DOCUMENT_COUNT: ClassVar[NumericField] = NumericField( + "mongoDBCollectionMaximumDocumentCount", "mongoDBCollectionMaximumDocumentCount" + ) + """ + Maximum number of documents allowed in a capped collection. + """ + MONGO_DB_COLLECTION_MAX_SIZE: ClassVar[NumericField] = NumericField( + "mongoDBCollectionMaxSize", "mongoDBCollectionMaxSize" + ) + """ + Maximum size allowed in a capped collection. + """ + MONGO_DB_COLLECTION_NUM_ORPHAN_DOCS: ClassVar[NumericField] = NumericField( + "mongoDBCollectionNumOrphanDocs", "mongoDBCollectionNumOrphanDocs" + ) + """ + Number of orphaned documents in the collection. + """ + MONGO_DB_COLLECTION_NUM_INDEXES: ClassVar[NumericField] = NumericField( + "mongoDBCollectionNumIndexes", "mongoDBCollectionNumIndexes" + ) + """ + Number of indexes on the collection. + """ + MONGO_DB_COLLECTION_TOTAL_INDEX_SIZE: ClassVar[NumericField] = NumericField( + "mongoDBCollectionTotalIndexSize", "mongoDBCollectionTotalIndexSize" + ) + """ + Total size of all indexes. + """ + MONGO_DB_COLLECTION_AVERAGE_OBJECT_SIZE: ClassVar[NumericField] = NumericField( + "mongoDBCollectionAverageObjectSize", "mongoDBCollectionAverageObjectSize" + ) + """ + Average size of an object in the collection. + """ + MONGO_DB_COLLECTION_SCHEMA_DEFINITION: ClassVar[TextField] = TextField( + "mongoDBCollectionSchemaDefinition", "mongoDBCollectionSchemaDefinition" + ) + """ + Definition of the schema applicable for the collection. + """ + COLUMN_COUNT: ClassVar[NumericField] = NumericField("columnCount", "columnCount") + """ + Number of columns in this table. + """ + ROW_COUNT: ClassVar[NumericField] = NumericField("rowCount", "rowCount") + """ + Number of rows in this table. + """ + SIZE_BYTES: ClassVar[NumericField] = NumericField("sizeBytes", "sizeBytes") + """ + Size of this table, in bytes. + """ + ALIAS: ClassVar[KeywordField] = KeywordField("alias", "alias") + """ + Alias for this table. + """ + IS_TEMPORARY: ClassVar[BooleanField] = BooleanField("isTemporary", "isTemporary") + """ + Whether this table is temporary (true) or not (false). + """ + IS_QUERY_PREVIEW: ClassVar[BooleanField] = BooleanField( + "isQueryPreview", "isQueryPreview" + ) + """ + Whether preview queries are allowed for this table (true) or not (false). + """ + QUERY_PREVIEW_CONFIG: ClassVar[KeywordField] = KeywordField( + "queryPreviewConfig", "queryPreviewConfig" + ) + """ + Configuration for preview queries. + """ + EXTERNAL_LOCATION: ClassVar[KeywordField] = KeywordField( + "externalLocation", "externalLocation" + ) + """ + External location of this table, for example: an S3 object location. + """ + EXTERNAL_LOCATION_REGION: ClassVar[KeywordField] = KeywordField( + "externalLocationRegion", "externalLocationRegion" + ) + """ + Region of the external location of this table, for example: S3 region. + """ + EXTERNAL_LOCATION_FORMAT: ClassVar[KeywordField] = KeywordField( + "externalLocationFormat", "externalLocationFormat" + ) + """ + Format of the external location of this table, for example: JSON, CSV, PARQUET, etc. + """ + IS_PARTITIONED: ClassVar[BooleanField] = BooleanField( + "isPartitioned", "isPartitioned" + ) + """ + Whether this table is partitioned (true) or not (false). + """ + PARTITION_STRATEGY: ClassVar[KeywordField] = KeywordField( + "partitionStrategy", "partitionStrategy" + ) + """ + Partition strategy for this table. + """ + PARTITION_COUNT: ClassVar[NumericField] = NumericField( + "partitionCount", "partitionCount" + ) + """ + Number of partitions in this table. + """ + PARTITION_LIST: ClassVar[KeywordField] = KeywordField( + "partitionList", "partitionList" + ) + """ + List of partitions in this table. + """ + QUERY_COUNT: ClassVar[NumericField] = NumericField("queryCount", "queryCount") + """ + Number of times this asset has been queried. + """ + QUERY_USER_COUNT: ClassVar[NumericField] = NumericField( + "queryUserCount", "queryUserCount" + ) + """ + Number of unique users who have queried this asset. + """ + QUERY_USER_MAP: ClassVar[KeywordField] = KeywordField( + "queryUserMap", "queryUserMap" + ) + """ + Map of unique users who have queried this asset to the number of times they have queried it. + """ + QUERY_COUNT_UPDATED_AT: ClassVar[NumericField] = NumericField( + "queryCountUpdatedAt", "queryCountUpdatedAt" + ) + """ + Time (epoch) at which the query count was last updated, in milliseconds. + """ + DATABASE_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "databaseName", "databaseName.keyword", "databaseName" + ) + """ + Simple name of the database in which this SQL asset exists, or empty if it does not exist within a database. + """ + DATABASE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "databaseQualifiedName", "databaseQualifiedName" + ) + """ + Unique name of the database in which this SQL asset exists, or empty if it does not exist within a database. + """ + SCHEMA_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "schemaName", "schemaName.keyword", "schemaName" + ) + """ + Simple name of the schema in which this SQL asset exists, or empty if it does not exist within a schema. + """ + SCHEMA_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "schemaQualifiedName", "schemaQualifiedName" + ) + """ + Unique name of the schema in which this SQL asset exists, or empty if it does not exist within a schema. + """ + TABLE_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "tableName", "tableName.keyword", "tableName" + ) + """ + Simple name of the table in which this SQL asset exists, or empty if it does not exist within a table. + """ + TABLE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "tableQualifiedName", "tableQualifiedName" + ) + """ + Unique name of the table in which this SQL asset exists, or empty if it does not exist within a table. + """ + VIEW_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "viewName", "viewName.keyword", "viewName" + ) + """ + Simple name of the view in which this SQL asset exists, or empty if it does not exist within a view. + """ + VIEW_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "viewQualifiedName", "viewQualifiedName" + ) + """ + Unique name of the view in which this SQL asset exists, or empty if it does not exist within a view. + """ + IS_PROFILED: ClassVar[BooleanField] = BooleanField("isProfiled", "isProfiled") + """ + Whether this asset has been profiled (true) or not (false). + """ + LAST_PROFILED_AT: ClassVar[NumericField] = NumericField( + "lastProfiledAt", "lastProfiledAt" + ) + """ + Time (epoch) at which this asset was last profiled, in milliseconds. + """ + NO_SQL_SCHEMA_DEFINITION: ClassVar[TextField] = TextField( + "noSQLSchemaDefinition", "noSQLSchemaDefinition" + ) + """ + Represents attributes for describing the key schema for the table and indexes. + """ + + MONGO_DB_DATABASE: ClassVar[RelationField] = RelationField("mongoDBDatabase") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "mongo_d_b_collection_subtype", + "mongo_d_b_collection_is_capped", + "mongo_d_b_collection_time_field", + "mongo_d_b_collection_time_granularity", + "mongo_d_b_collection_expire_after_seconds", + "mongo_d_b_collection_maximum_document_count", + "mongo_d_b_collection_max_size", + "mongo_d_b_collection_num_orphan_docs", + "mongo_d_b_collection_num_indexes", + "mongo_d_b_collection_total_index_size", + "mongo_d_b_collection_average_object_size", + "mongo_d_b_collection_schema_definition", + "column_count", + "row_count", + "size_bytes", + "alias", + "is_temporary", + "is_query_preview", + "query_preview_config", + "external_location", + "external_location_region", + "external_location_format", + "is_partitioned", + "partition_strategy", + "partition_count", + "partition_list", + "query_count", + "query_user_count", + "query_user_map", + "query_count_updated_at", + "database_name", + "database_qualified_name", + "schema_name", + "schema_qualified_name", + "table_name", + "table_qualified_name", + "view_name", + "view_qualified_name", + "is_profiled", + "last_profiled_at", + "no_s_q_l_schema_definition", + "mongo_d_b_database", + ] + + @property + def mongo_d_b_collection_subtype(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.mongo_d_b_collection_subtype + ) + + @mongo_d_b_collection_subtype.setter + def mongo_d_b_collection_subtype(self, mongo_d_b_collection_subtype: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mongo_d_b_collection_subtype = mongo_d_b_collection_subtype + + @property + def mongo_d_b_collection_is_capped(self) -> Optional[bool]: + return ( + None + if self.attributes is None + else self.attributes.mongo_d_b_collection_is_capped + ) + + @mongo_d_b_collection_is_capped.setter + def mongo_d_b_collection_is_capped( + self, mongo_d_b_collection_is_capped: Optional[bool] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mongo_d_b_collection_is_capped = mongo_d_b_collection_is_capped + + @property + def mongo_d_b_collection_time_field(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.mongo_d_b_collection_time_field + ) + + @mongo_d_b_collection_time_field.setter + def mongo_d_b_collection_time_field( + self, mongo_d_b_collection_time_field: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mongo_d_b_collection_time_field = ( + mongo_d_b_collection_time_field + ) + + @property + def mongo_d_b_collection_time_granularity(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.mongo_d_b_collection_time_granularity + ) + + @mongo_d_b_collection_time_granularity.setter + def mongo_d_b_collection_time_granularity( + self, mongo_d_b_collection_time_granularity: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mongo_d_b_collection_time_granularity = ( + mongo_d_b_collection_time_granularity + ) + + @property + def mongo_d_b_collection_expire_after_seconds(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.mongo_d_b_collection_expire_after_seconds + ) + + @mongo_d_b_collection_expire_after_seconds.setter + def mongo_d_b_collection_expire_after_seconds( + self, mongo_d_b_collection_expire_after_seconds: Optional[int] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mongo_d_b_collection_expire_after_seconds = ( + mongo_d_b_collection_expire_after_seconds + ) + + @property + def mongo_d_b_collection_maximum_document_count(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.mongo_d_b_collection_maximum_document_count + ) + + @mongo_d_b_collection_maximum_document_count.setter + def mongo_d_b_collection_maximum_document_count( + self, mongo_d_b_collection_maximum_document_count: Optional[int] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mongo_d_b_collection_maximum_document_count = ( + mongo_d_b_collection_maximum_document_count + ) + + @property + def mongo_d_b_collection_max_size(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.mongo_d_b_collection_max_size + ) + + @mongo_d_b_collection_max_size.setter + def mongo_d_b_collection_max_size( + self, mongo_d_b_collection_max_size: Optional[int] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mongo_d_b_collection_max_size = mongo_d_b_collection_max_size + + @property + def mongo_d_b_collection_num_orphan_docs(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.mongo_d_b_collection_num_orphan_docs + ) + + @mongo_d_b_collection_num_orphan_docs.setter + def mongo_d_b_collection_num_orphan_docs( + self, mongo_d_b_collection_num_orphan_docs: Optional[int] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mongo_d_b_collection_num_orphan_docs = ( + mongo_d_b_collection_num_orphan_docs + ) + + @property + def mongo_d_b_collection_num_indexes(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.mongo_d_b_collection_num_indexes + ) + + @mongo_d_b_collection_num_indexes.setter + def mongo_d_b_collection_num_indexes( + self, mongo_d_b_collection_num_indexes: Optional[int] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mongo_d_b_collection_num_indexes = ( + mongo_d_b_collection_num_indexes + ) + + @property + def mongo_d_b_collection_total_index_size(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.mongo_d_b_collection_total_index_size + ) + + @mongo_d_b_collection_total_index_size.setter + def mongo_d_b_collection_total_index_size( + self, mongo_d_b_collection_total_index_size: Optional[int] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mongo_d_b_collection_total_index_size = ( + mongo_d_b_collection_total_index_size + ) + + @property + def mongo_d_b_collection_average_object_size(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.mongo_d_b_collection_average_object_size + ) + + @mongo_d_b_collection_average_object_size.setter + def mongo_d_b_collection_average_object_size( + self, mongo_d_b_collection_average_object_size: Optional[int] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mongo_d_b_collection_average_object_size = ( + mongo_d_b_collection_average_object_size + ) + + @property + def mongo_d_b_collection_schema_definition(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.mongo_d_b_collection_schema_definition + ) + + @mongo_d_b_collection_schema_definition.setter + def mongo_d_b_collection_schema_definition( + self, mongo_d_b_collection_schema_definition: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mongo_d_b_collection_schema_definition = ( + mongo_d_b_collection_schema_definition + ) + + @property + def column_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.column_count + + @column_count.setter + def column_count(self, column_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.column_count = column_count + + @property + def row_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.row_count + + @row_count.setter + def row_count(self, row_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.row_count = row_count + + @property + def size_bytes(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.size_bytes + + @size_bytes.setter + def size_bytes(self, size_bytes: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.size_bytes = size_bytes + + @property + def alias(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.alias + + @alias.setter + def alias(self, alias: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.alias = alias + + @property + def is_temporary(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_temporary + + @is_temporary.setter + def is_temporary(self, is_temporary: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_temporary = is_temporary + + @property + def is_query_preview(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_query_preview + + @is_query_preview.setter + def is_query_preview(self, is_query_preview: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_query_preview = is_query_preview + + @property + def query_preview_config(self) -> Optional[dict[str, str]]: + return None if self.attributes is None else self.attributes.query_preview_config + + @query_preview_config.setter + def query_preview_config(self, query_preview_config: Optional[dict[str, str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.query_preview_config = query_preview_config + + @property + def external_location(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.external_location + + @external_location.setter + def external_location(self, external_location: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.external_location = external_location + + @property + def external_location_region(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.external_location_region + ) + + @external_location_region.setter + def external_location_region(self, external_location_region: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.external_location_region = external_location_region + + @property + def external_location_format(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.external_location_format + ) + + @external_location_format.setter + def external_location_format(self, external_location_format: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.external_location_format = external_location_format + + @property + def is_partitioned(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_partitioned + + @is_partitioned.setter + def is_partitioned(self, is_partitioned: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_partitioned = is_partitioned + + @property + def partition_strategy(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.partition_strategy + + @partition_strategy.setter + def partition_strategy(self, partition_strategy: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.partition_strategy = partition_strategy + + @property + def partition_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.partition_count + + @partition_count.setter + def partition_count(self, partition_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.partition_count = partition_count + + @property + def partition_list(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.partition_list + + @partition_list.setter + def partition_list(self, partition_list: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.partition_list = partition_list + + @property + def query_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.query_count + + @query_count.setter + def query_count(self, query_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.query_count = query_count + + @property + def query_user_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.query_user_count + + @query_user_count.setter + def query_user_count(self, query_user_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.query_user_count = query_user_count + + @property + def query_user_map(self) -> Optional[dict[str, int]]: + return None if self.attributes is None else self.attributes.query_user_map + + @query_user_map.setter + def query_user_map(self, query_user_map: Optional[dict[str, int]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.query_user_map = query_user_map + + @property + def query_count_updated_at(self) -> Optional[datetime]: + return ( + None if self.attributes is None else self.attributes.query_count_updated_at + ) + + @query_count_updated_at.setter + def query_count_updated_at(self, query_count_updated_at: Optional[datetime]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.query_count_updated_at = query_count_updated_at + + @property + def database_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.database_name + + @database_name.setter + def database_name(self, database_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.database_name = database_name + + @property + def database_qualified_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.database_qualified_name + ) + + @database_qualified_name.setter + def database_qualified_name(self, database_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.database_qualified_name = database_qualified_name + + @property + def schema_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.schema_name + + @schema_name.setter + def schema_name(self, schema_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.schema_name = schema_name + + @property + def schema_qualified_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.schema_qualified_name + ) + + @schema_qualified_name.setter + def schema_qualified_name(self, schema_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.schema_qualified_name = schema_qualified_name + + @property + def table_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.table_name + + @table_name.setter + def table_name(self, table_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.table_name = table_name + + @property + def table_qualified_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.table_qualified_name + + @table_qualified_name.setter + def table_qualified_name(self, table_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.table_qualified_name = table_qualified_name + + @property + def view_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.view_name + + @view_name.setter + def view_name(self, view_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.view_name = view_name + + @property + def view_qualified_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.view_qualified_name + + @view_qualified_name.setter + def view_qualified_name(self, view_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.view_qualified_name = view_qualified_name + + @property + def is_profiled(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_profiled + + @is_profiled.setter + def is_profiled(self, is_profiled: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_profiled = is_profiled + + @property + def last_profiled_at(self) -> Optional[datetime]: + return None if self.attributes is None else self.attributes.last_profiled_at + + @last_profiled_at.setter + def last_profiled_at(self, last_profiled_at: Optional[datetime]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.last_profiled_at = last_profiled_at + + @property + def no_s_q_l_schema_definition(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.no_s_q_l_schema_definition + ) + + @no_s_q_l_schema_definition.setter + def no_s_q_l_schema_definition(self, no_s_q_l_schema_definition: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.no_s_q_l_schema_definition = no_s_q_l_schema_definition + + @property + def mongo_d_b_database(self) -> Optional[MongoDBDatabase]: + return None if self.attributes is None else self.attributes.mongo_d_b_database + + @mongo_d_b_database.setter + def mongo_d_b_database(self, mongo_d_b_database: Optional[MongoDBDatabase]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mongo_d_b_database = mongo_d_b_database + + class Attributes(Table.Attributes): + mongo_d_b_collection_subtype: Optional[str] = Field( + None, description="", alias="mongoDBCollectionSubtype" + ) + mongo_d_b_collection_is_capped: Optional[bool] = Field( + None, description="", alias="mongoDBCollectionIsCapped" + ) + mongo_d_b_collection_time_field: Optional[str] = Field( + None, description="", alias="mongoDBCollectionTimeField" + ) + mongo_d_b_collection_time_granularity: Optional[str] = Field( + None, description="", alias="mongoDBCollectionTimeGranularity" + ) + mongo_d_b_collection_expire_after_seconds: Optional[int] = Field( + None, description="", alias="mongoDBCollectionExpireAfterSeconds" + ) + mongo_d_b_collection_maximum_document_count: Optional[int] = Field( + None, description="", alias="mongoDBCollectionMaximumDocumentCount" + ) + mongo_d_b_collection_max_size: Optional[int] = Field( + None, description="", alias="mongoDBCollectionMaxSize" + ) + mongo_d_b_collection_num_orphan_docs: Optional[int] = Field( + None, description="", alias="mongoDBCollectionNumOrphanDocs" + ) + mongo_d_b_collection_num_indexes: Optional[int] = Field( + None, description="", alias="mongoDBCollectionNumIndexes" + ) + mongo_d_b_collection_total_index_size: Optional[int] = Field( + None, description="", alias="mongoDBCollectionTotalIndexSize" + ) + mongo_d_b_collection_average_object_size: Optional[int] = Field( + None, description="", alias="mongoDBCollectionAverageObjectSize" + ) + mongo_d_b_collection_schema_definition: Optional[str] = Field( + None, description="", alias="mongoDBCollectionSchemaDefinition" + ) + column_count: Optional[int] = Field(None, description="", alias="columnCount") + row_count: Optional[int] = Field(None, description="", alias="rowCount") + size_bytes: Optional[int] = Field(None, description="", alias="sizeBytes") + alias: Optional[str] = Field(None, description="", alias="alias") + is_temporary: Optional[bool] = Field(None, description="", alias="isTemporary") + is_query_preview: Optional[bool] = Field( + None, description="", alias="isQueryPreview" + ) + query_preview_config: Optional[dict[str, str]] = Field( + None, description="", alias="queryPreviewConfig" + ) + external_location: Optional[str] = Field( + None, description="", alias="externalLocation" + ) + external_location_region: Optional[str] = Field( + None, description="", alias="externalLocationRegion" + ) + external_location_format: Optional[str] = Field( + None, description="", alias="externalLocationFormat" + ) + is_partitioned: Optional[bool] = Field( + None, description="", alias="isPartitioned" + ) + partition_strategy: Optional[str] = Field( + None, description="", alias="partitionStrategy" + ) + partition_count: Optional[int] = Field( + None, description="", alias="partitionCount" + ) + partition_list: Optional[str] = Field( + None, description="", alias="partitionList" + ) + query_count: Optional[int] = Field(None, description="", alias="queryCount") + query_user_count: Optional[int] = Field( + None, description="", alias="queryUserCount" + ) + query_user_map: Optional[dict[str, int]] = Field( + None, description="", alias="queryUserMap" + ) + query_count_updated_at: Optional[datetime] = Field( + None, description="", alias="queryCountUpdatedAt" + ) + database_name: Optional[str] = Field(None, description="", alias="databaseName") + database_qualified_name: Optional[str] = Field( + None, description="", alias="databaseQualifiedName" + ) + schema_name: Optional[str] = Field(None, description="", alias="schemaName") + schema_qualified_name: Optional[str] = Field( + None, description="", alias="schemaQualifiedName" + ) + table_name: Optional[str] = Field(None, description="", alias="tableName") + table_qualified_name: Optional[str] = Field( + None, description="", alias="tableQualifiedName" + ) + view_name: Optional[str] = Field(None, description="", alias="viewName") + view_qualified_name: Optional[str] = Field( + None, description="", alias="viewQualifiedName" + ) + is_profiled: Optional[bool] = Field(None, description="", alias="isProfiled") + last_profiled_at: Optional[datetime] = Field( + None, description="", alias="lastProfiledAt" + ) + no_s_q_l_schema_definition: Optional[str] = Field( + None, description="", alias="noSQLSchemaDefinition" + ) + mongo_d_b_database: Optional[MongoDBDatabase] = Field( + None, description="", alias="mongoDBDatabase" + ) # relationship + + attributes: "MongoDBCollection.Attributes" = Field( + default_factory=lambda: MongoDBCollection.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +class MongoDBDatabase(Database): """Description""" - type_name: str = Field("QlikStream", allow_mutation=False) + type_name: str = Field("MongoDBDatabase", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "QlikStream": - raise ValueError("must be QlikStream") + if v != "MongoDBDatabase": + raise ValueError("must be MongoDBDatabase") return v def __setattr__(self, name, value): - if name in QlikStream._convenience_properties: + if name in MongoDBDatabase._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - _convenience_properties: ClassVar[list[str]] = [] + MONGO_DB_DATABASE_COLLECTION_COUNT: ClassVar[NumericField] = NumericField( + "mongoDBDatabaseCollectionCount", "mongoDBDatabaseCollectionCount" + ) + """ + Number of collections in the database. + """ + SCHEMA_COUNT: ClassVar[NumericField] = NumericField("schemaCount", "schemaCount") + """ + Number of schemas in this database. + """ + QUERY_COUNT: ClassVar[NumericField] = NumericField("queryCount", "queryCount") + """ + Number of times this asset has been queried. + """ + QUERY_USER_COUNT: ClassVar[NumericField] = NumericField( + "queryUserCount", "queryUserCount" + ) + """ + Number of unique users who have queried this asset. + """ + QUERY_USER_MAP: ClassVar[KeywordField] = KeywordField( + "queryUserMap", "queryUserMap" + ) + """ + Map of unique users who have queried this asset to the number of times they have queried it. + """ + QUERY_COUNT_UPDATED_AT: ClassVar[NumericField] = NumericField( + "queryCountUpdatedAt", "queryCountUpdatedAt" + ) + """ + Time (epoch) at which the query count was last updated, in milliseconds. + """ + DATABASE_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "databaseName", "databaseName.keyword", "databaseName" + ) + """ + Simple name of the database in which this SQL asset exists, or empty if it does not exist within a database. + """ + DATABASE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "databaseQualifiedName", "databaseQualifiedName" + ) + """ + Unique name of the database in which this SQL asset exists, or empty if it does not exist within a database. + """ + SCHEMA_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "schemaName", "schemaName.keyword", "schemaName" + ) + """ + Simple name of the schema in which this SQL asset exists, or empty if it does not exist within a schema. + """ + SCHEMA_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "schemaQualifiedName", "schemaQualifiedName" + ) + """ + Unique name of the schema in which this SQL asset exists, or empty if it does not exist within a schema. + """ + TABLE_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "tableName", "tableName.keyword", "tableName" + ) + """ + Simple name of the table in which this SQL asset exists, or empty if it does not exist within a table. + """ + TABLE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "tableQualifiedName", "tableQualifiedName" + ) + """ + Unique name of the table in which this SQL asset exists, or empty if it does not exist within a table. + """ + VIEW_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "viewName", "viewName.keyword", "viewName" + ) + """ + Simple name of the view in which this SQL asset exists, or empty if it does not exist within a view. + """ + VIEW_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "viewQualifiedName", "viewQualifiedName" + ) + """ + Unique name of the view in which this SQL asset exists, or empty if it does not exist within a view. + """ + IS_PROFILED: ClassVar[BooleanField] = BooleanField("isProfiled", "isProfiled") + """ + Whether this asset has been profiled (true) or not (false). + """ + LAST_PROFILED_AT: ClassVar[NumericField] = NumericField( + "lastProfiledAt", "lastProfiledAt" + ) + """ + Time (epoch) at which this asset was last profiled, in milliseconds. + """ + NO_SQL_SCHEMA_DEFINITION: ClassVar[TextField] = TextField( + "noSQLSchemaDefinition", "noSQLSchemaDefinition" + ) + """ + Represents attributes for describing the key schema for the table and indexes. + """ + + MONGO_DB_COLLECTIONS: ClassVar[RelationField] = RelationField("mongoDBCollections") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "mongo_d_b_database_collection_count", + "schema_count", + "query_count", + "query_user_count", + "query_user_map", + "query_count_updated_at", + "database_name", + "database_qualified_name", + "schema_name", + "schema_qualified_name", + "table_name", + "table_qualified_name", + "view_name", + "view_qualified_name", + "is_profiled", + "last_profiled_at", + "no_s_q_l_schema_definition", + "mongo_d_b_collections", + ] + + @property + def mongo_d_b_database_collection_count(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.mongo_d_b_database_collection_count + ) + + @mongo_d_b_database_collection_count.setter + def mongo_d_b_database_collection_count( + self, mongo_d_b_database_collection_count: Optional[int] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mongo_d_b_database_collection_count = ( + mongo_d_b_database_collection_count + ) + + @property + def schema_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.schema_count + + @schema_count.setter + def schema_count(self, schema_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.schema_count = schema_count + + @property + def query_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.query_count + + @query_count.setter + def query_count(self, query_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.query_count = query_count + + @property + def query_user_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.query_user_count + + @query_user_count.setter + def query_user_count(self, query_user_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.query_user_count = query_user_count + + @property + def query_user_map(self) -> Optional[dict[str, int]]: + return None if self.attributes is None else self.attributes.query_user_map + + @query_user_map.setter + def query_user_map(self, query_user_map: Optional[dict[str, int]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.query_user_map = query_user_map + + @property + def query_count_updated_at(self) -> Optional[datetime]: + return ( + None if self.attributes is None else self.attributes.query_count_updated_at + ) + + @query_count_updated_at.setter + def query_count_updated_at(self, query_count_updated_at: Optional[datetime]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.query_count_updated_at = query_count_updated_at + + @property + def database_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.database_name + + @database_name.setter + def database_name(self, database_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.database_name = database_name + + @property + def database_qualified_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.database_qualified_name + ) + + @database_qualified_name.setter + def database_qualified_name(self, database_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.database_qualified_name = database_qualified_name + + @property + def schema_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.schema_name + + @schema_name.setter + def schema_name(self, schema_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.schema_name = schema_name + + @property + def schema_qualified_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.schema_qualified_name + ) + + @schema_qualified_name.setter + def schema_qualified_name(self, schema_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.schema_qualified_name = schema_qualified_name + + @property + def table_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.table_name + + @table_name.setter + def table_name(self, table_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.table_name = table_name + + @property + def table_qualified_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.table_qualified_name + + @table_qualified_name.setter + def table_qualified_name(self, table_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.table_qualified_name = table_qualified_name + + @property + def view_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.view_name + + @view_name.setter + def view_name(self, view_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.view_name = view_name + + @property + def view_qualified_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.view_qualified_name + + @view_qualified_name.setter + def view_qualified_name(self, view_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.view_qualified_name = view_qualified_name + + @property + def is_profiled(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_profiled + + @is_profiled.setter + def is_profiled(self, is_profiled: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_profiled = is_profiled + + @property + def last_profiled_at(self) -> Optional[datetime]: + return None if self.attributes is None else self.attributes.last_profiled_at + + @last_profiled_at.setter + def last_profiled_at(self, last_profiled_at: Optional[datetime]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.last_profiled_at = last_profiled_at + + @property + def no_s_q_l_schema_definition(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.no_s_q_l_schema_definition + ) + + @no_s_q_l_schema_definition.setter + def no_s_q_l_schema_definition(self, no_s_q_l_schema_definition: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.no_s_q_l_schema_definition = no_s_q_l_schema_definition + + @property + def mongo_d_b_collections(self) -> Optional[list[MongoDBCollection]]: + return ( + None if self.attributes is None else self.attributes.mongo_d_b_collections + ) + + @mongo_d_b_collections.setter + def mongo_d_b_collections( + self, mongo_d_b_collections: Optional[list[MongoDBCollection]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mongo_d_b_collections = mongo_d_b_collections + + class Attributes(Database.Attributes): + mongo_d_b_database_collection_count: Optional[int] = Field( + None, description="", alias="mongoDBDatabaseCollectionCount" + ) + schema_count: Optional[int] = Field(None, description="", alias="schemaCount") + query_count: Optional[int] = Field(None, description="", alias="queryCount") + query_user_count: Optional[int] = Field( + None, description="", alias="queryUserCount" + ) + query_user_map: Optional[dict[str, int]] = Field( + None, description="", alias="queryUserMap" + ) + query_count_updated_at: Optional[datetime] = Field( + None, description="", alias="queryCountUpdatedAt" + ) + database_name: Optional[str] = Field(None, description="", alias="databaseName") + database_qualified_name: Optional[str] = Field( + None, description="", alias="databaseQualifiedName" + ) + schema_name: Optional[str] = Field(None, description="", alias="schemaName") + schema_qualified_name: Optional[str] = Field( + None, description="", alias="schemaQualifiedName" + ) + table_name: Optional[str] = Field(None, description="", alias="tableName") + table_qualified_name: Optional[str] = Field( + None, description="", alias="tableQualifiedName" + ) + view_name: Optional[str] = Field(None, description="", alias="viewName") + view_qualified_name: Optional[str] = Field( + None, description="", alias="viewQualifiedName" + ) + is_profiled: Optional[bool] = Field(None, description="", alias="isProfiled") + last_profiled_at: Optional[datetime] = Field( + None, description="", alias="lastProfiledAt" + ) + no_s_q_l_schema_definition: Optional[str] = Field( + None, description="", alias="noSQLSchemaDefinition" + ) + mongo_d_b_collections: Optional[list[MongoDBCollection]] = Field( + None, description="", alias="mongoDBCollections" + ) # relationship + + attributes: "MongoDBDatabase.Attributes" = Field( + default_factory=lambda: MongoDBDatabase.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +MongoDBCollection.Attributes.update_forward_refs() -QlikStream.Attributes.update_forward_refs() +MongoDBDatabase.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset85.py b/pyatlan/model/assets/asset85.py index ed1d17d3d..451cc9385 100644 --- a/pyatlan/model/assets/asset85.py +++ b/pyatlan/model/assets/asset85.py @@ -4,64 +4,763 @@ from __future__ import annotations +from datetime import datetime from typing import ClassVar, Optional from pydantic import Field, validator -from pyatlan.model.fields.atlan_fields import KeywordField +from pyatlan.model.enums import DynamoDBSecondaryIndexProjectionType, DynamoDBStatus +from pyatlan.model.fields.atlan_fields import ( + BooleanField, + KeywordField, + KeywordTextField, + NumericField, + TextField, +) -from .asset83 import KafkaTopic +from .asset00 import Table -class AzureEventHub(KafkaTopic): +class DynamoDBSecondaryIndex(Table): """Description""" - type_name: str = Field("AzureEventHub", allow_mutation=False) + type_name: str = Field("DynamoDBSecondaryIndex", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "AzureEventHub": - raise ValueError("must be AzureEventHub") + if v != "DynamoDBSecondaryIndex": + raise ValueError("must be DynamoDBSecondaryIndex") return v def __setattr__(self, name, value): - if name in AzureEventHub._convenience_properties: + if name in DynamoDBSecondaryIndex._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - AZURE_EVENT_HUB_STATUS: ClassVar[KeywordField] = KeywordField( - "azureEventHubStatus", "azureEventHubStatus" + DYNAMO_DB_SECONDARY_INDEX_PROJECTION_TYPE: ClassVar[KeywordField] = KeywordField( + "dynamoDBSecondaryIndexProjectionType", "dynamoDBSecondaryIndexProjectionType" ) """ - TBC + Specifies attributes that are projected from the DynamoDB table into the index + """ + COLUMN_COUNT: ClassVar[NumericField] = NumericField("columnCount", "columnCount") + """ + Number of columns in this table. + """ + ROW_COUNT: ClassVar[NumericField] = NumericField("rowCount", "rowCount") + """ + Number of rows in this table. + """ + SIZE_BYTES: ClassVar[NumericField] = NumericField("sizeBytes", "sizeBytes") + """ + Size of this table, in bytes. + """ + ALIAS: ClassVar[KeywordField] = KeywordField("alias", "alias") + """ + Alias for this table. + """ + IS_TEMPORARY: ClassVar[BooleanField] = BooleanField("isTemporary", "isTemporary") + """ + Whether this table is temporary (true) or not (false). + """ + IS_QUERY_PREVIEW: ClassVar[BooleanField] = BooleanField( + "isQueryPreview", "isQueryPreview" + ) + """ + Whether preview queries are allowed for this table (true) or not (false). + """ + QUERY_PREVIEW_CONFIG: ClassVar[KeywordField] = KeywordField( + "queryPreviewConfig", "queryPreviewConfig" + ) + """ + Configuration for preview queries. + """ + EXTERNAL_LOCATION: ClassVar[KeywordField] = KeywordField( + "externalLocation", "externalLocation" + ) + """ + External location of this table, for example: an S3 object location. + """ + EXTERNAL_LOCATION_REGION: ClassVar[KeywordField] = KeywordField( + "externalLocationRegion", "externalLocationRegion" + ) + """ + Region of the external location of this table, for example: S3 region. + """ + EXTERNAL_LOCATION_FORMAT: ClassVar[KeywordField] = KeywordField( + "externalLocationFormat", "externalLocationFormat" + ) + """ + Format of the external location of this table, for example: JSON, CSV, PARQUET, etc. + """ + IS_PARTITIONED: ClassVar[BooleanField] = BooleanField( + "isPartitioned", "isPartitioned" + ) + """ + Whether this table is partitioned (true) or not (false). + """ + PARTITION_STRATEGY: ClassVar[KeywordField] = KeywordField( + "partitionStrategy", "partitionStrategy" + ) + """ + Partition strategy for this table. + """ + PARTITION_COUNT: ClassVar[NumericField] = NumericField( + "partitionCount", "partitionCount" + ) + """ + Number of partitions in this table. + """ + PARTITION_LIST: ClassVar[KeywordField] = KeywordField( + "partitionList", "partitionList" + ) + """ + List of partitions in this table. + """ + QUERY_COUNT: ClassVar[NumericField] = NumericField("queryCount", "queryCount") + """ + Number of times this asset has been queried. + """ + QUERY_USER_COUNT: ClassVar[NumericField] = NumericField( + "queryUserCount", "queryUserCount" + ) + """ + Number of unique users who have queried this asset. + """ + QUERY_USER_MAP: ClassVar[KeywordField] = KeywordField( + "queryUserMap", "queryUserMap" + ) + """ + Map of unique users who have queried this asset to the number of times they have queried it. + """ + QUERY_COUNT_UPDATED_AT: ClassVar[NumericField] = NumericField( + "queryCountUpdatedAt", "queryCountUpdatedAt" + ) + """ + Time (epoch) at which the query count was last updated, in milliseconds. + """ + DATABASE_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "databaseName", "databaseName.keyword", "databaseName" + ) + """ + Simple name of the database in which this SQL asset exists, or empty if it does not exist within a database. + """ + DATABASE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "databaseQualifiedName", "databaseQualifiedName" + ) + """ + Unique name of the database in which this SQL asset exists, or empty if it does not exist within a database. + """ + SCHEMA_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "schemaName", "schemaName.keyword", "schemaName" + ) + """ + Simple name of the schema in which this SQL asset exists, or empty if it does not exist within a schema. + """ + SCHEMA_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "schemaQualifiedName", "schemaQualifiedName" + ) + """ + Unique name of the schema in which this SQL asset exists, or empty if it does not exist within a schema. + """ + TABLE_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "tableName", "tableName.keyword", "tableName" + ) + """ + Simple name of the table in which this SQL asset exists, or empty if it does not exist within a table. + """ + TABLE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "tableQualifiedName", "tableQualifiedName" + ) + """ + Unique name of the table in which this SQL asset exists, or empty if it does not exist within a table. + """ + VIEW_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "viewName", "viewName.keyword", "viewName" + ) + """ + Simple name of the view in which this SQL asset exists, or empty if it does not exist within a view. + """ + VIEW_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "viewQualifiedName", "viewQualifiedName" + ) + """ + Unique name of the view in which this SQL asset exists, or empty if it does not exist within a view. + """ + IS_PROFILED: ClassVar[BooleanField] = BooleanField("isProfiled", "isProfiled") + """ + Whether this asset has been profiled (true) or not (false). + """ + LAST_PROFILED_AT: ClassVar[NumericField] = NumericField( + "lastProfiledAt", "lastProfiledAt" + ) + """ + Time (epoch) at which this asset was last profiled, in milliseconds. + """ + DYNAMO_DB_STATUS: ClassVar[KeywordField] = KeywordField( + "dynamoDBStatus", "dynamoDBStatus" + ) + """ + Status of the DynamoDB Asset + """ + DYNAMO_DB_PARTITION_KEY: ClassVar[KeywordField] = KeywordField( + "dynamoDBPartitionKey", "dynamoDBPartitionKey" + ) + """ + Specifies the partition key of the DynamoDB Table/Index + """ + DYNAMO_DB_SORT_KEY: ClassVar[KeywordField] = KeywordField( + "dynamoDBSortKey", "dynamoDBSortKey" + ) + """ + Specifies the sort key of the DynamoDB Table/Index + """ + DYNAMO_DB_READ_CAPACITY_UNITS: ClassVar[NumericField] = NumericField( + "dynamoDBReadCapacityUnits", "dynamoDBReadCapacityUnits" + ) + """ + The maximum number of strongly consistent reads consumed per second before DynamoDB returns a ThrottlingException + """ + DYNAMO_DB_WRITE_CAPACITY_UNITS: ClassVar[NumericField] = NumericField( + "dynamoDBWriteCapacityUnits", "dynamoDBWriteCapacityUnits" + ) + """ + The maximum number of writes consumed per second before DynamoDB returns a ThrottlingException + """ + NO_SQL_SCHEMA_DEFINITION: ClassVar[TextField] = TextField( + "noSQLSchemaDefinition", "noSQLSchemaDefinition" + ) + """ + Represents attributes for describing the key schema for the table and indexes. """ _convenience_properties: ClassVar[list[str]] = [ - "azure_event_hub_status", + "dynamo_d_b_secondary_index_projection_type", + "column_count", + "row_count", + "size_bytes", + "alias", + "is_temporary", + "is_query_preview", + "query_preview_config", + "external_location", + "external_location_region", + "external_location_format", + "is_partitioned", + "partition_strategy", + "partition_count", + "partition_list", + "query_count", + "query_user_count", + "query_user_map", + "query_count_updated_at", + "database_name", + "database_qualified_name", + "schema_name", + "schema_qualified_name", + "table_name", + "table_qualified_name", + "view_name", + "view_qualified_name", + "is_profiled", + "last_profiled_at", + "dynamo_d_b_status", + "dynamo_d_b_partition_key", + "dynamo_d_b_sort_key", + "dynamo_d_b_read_capacity_units", + "dynamo_d_b_write_capacity_units", + "no_s_q_l_schema_definition", ] @property - def azure_event_hub_status(self) -> Optional[str]: + def dynamo_d_b_secondary_index_projection_type( + self, + ) -> Optional[DynamoDBSecondaryIndexProjectionType]: return ( - None if self.attributes is None else self.attributes.azure_event_hub_status + None + if self.attributes is None + else self.attributes.dynamo_d_b_secondary_index_projection_type ) - @azure_event_hub_status.setter - def azure_event_hub_status(self, azure_event_hub_status: Optional[str]): + @dynamo_d_b_secondary_index_projection_type.setter + def dynamo_d_b_secondary_index_projection_type( + self, + dynamo_d_b_secondary_index_projection_type: Optional[ + DynamoDBSecondaryIndexProjectionType + ], + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dynamo_d_b_secondary_index_projection_type = ( + dynamo_d_b_secondary_index_projection_type + ) + + @property + def column_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.column_count + + @column_count.setter + def column_count(self, column_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.column_count = column_count + + @property + def row_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.row_count + + @row_count.setter + def row_count(self, row_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.row_count = row_count + + @property + def size_bytes(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.size_bytes + + @size_bytes.setter + def size_bytes(self, size_bytes: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.size_bytes = size_bytes + + @property + def alias(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.alias + + @alias.setter + def alias(self, alias: Optional[str]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.azure_event_hub_status = azure_event_hub_status + self.attributes.alias = alias - class Attributes(KafkaTopic.Attributes): - azure_event_hub_status: Optional[str] = Field( - None, description="", alias="azureEventHubStatus" + @property + def is_temporary(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_temporary + + @is_temporary.setter + def is_temporary(self, is_temporary: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_temporary = is_temporary + + @property + def is_query_preview(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_query_preview + + @is_query_preview.setter + def is_query_preview(self, is_query_preview: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_query_preview = is_query_preview + + @property + def query_preview_config(self) -> Optional[dict[str, str]]: + return None if self.attributes is None else self.attributes.query_preview_config + + @query_preview_config.setter + def query_preview_config(self, query_preview_config: Optional[dict[str, str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.query_preview_config = query_preview_config + + @property + def external_location(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.external_location + + @external_location.setter + def external_location(self, external_location: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.external_location = external_location + + @property + def external_location_region(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.external_location_region + ) + + @external_location_region.setter + def external_location_region(self, external_location_region: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.external_location_region = external_location_region + + @property + def external_location_format(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.external_location_format + ) + + @external_location_format.setter + def external_location_format(self, external_location_format: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.external_location_format = external_location_format + + @property + def is_partitioned(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_partitioned + + @is_partitioned.setter + def is_partitioned(self, is_partitioned: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_partitioned = is_partitioned + + @property + def partition_strategy(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.partition_strategy + + @partition_strategy.setter + def partition_strategy(self, partition_strategy: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.partition_strategy = partition_strategy + + @property + def partition_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.partition_count + + @partition_count.setter + def partition_count(self, partition_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.partition_count = partition_count + + @property + def partition_list(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.partition_list + + @partition_list.setter + def partition_list(self, partition_list: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.partition_list = partition_list + + @property + def query_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.query_count + + @query_count.setter + def query_count(self, query_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.query_count = query_count + + @property + def query_user_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.query_user_count + + @query_user_count.setter + def query_user_count(self, query_user_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.query_user_count = query_user_count + + @property + def query_user_map(self) -> Optional[dict[str, int]]: + return None if self.attributes is None else self.attributes.query_user_map + + @query_user_map.setter + def query_user_map(self, query_user_map: Optional[dict[str, int]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.query_user_map = query_user_map + + @property + def query_count_updated_at(self) -> Optional[datetime]: + return ( + None if self.attributes is None else self.attributes.query_count_updated_at + ) + + @query_count_updated_at.setter + def query_count_updated_at(self, query_count_updated_at: Optional[datetime]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.query_count_updated_at = query_count_updated_at + + @property + def database_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.database_name + + @database_name.setter + def database_name(self, database_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.database_name = database_name + + @property + def database_qualified_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.database_qualified_name + ) + + @database_qualified_name.setter + def database_qualified_name(self, database_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.database_qualified_name = database_qualified_name + + @property + def schema_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.schema_name + + @schema_name.setter + def schema_name(self, schema_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.schema_name = schema_name + + @property + def schema_qualified_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.schema_qualified_name + ) + + @schema_qualified_name.setter + def schema_qualified_name(self, schema_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.schema_qualified_name = schema_qualified_name + + @property + def table_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.table_name + + @table_name.setter + def table_name(self, table_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.table_name = table_name + + @property + def table_qualified_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.table_qualified_name + + @table_qualified_name.setter + def table_qualified_name(self, table_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.table_qualified_name = table_qualified_name + + @property + def view_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.view_name + + @view_name.setter + def view_name(self, view_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.view_name = view_name + + @property + def view_qualified_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.view_qualified_name + + @view_qualified_name.setter + def view_qualified_name(self, view_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.view_qualified_name = view_qualified_name + + @property + def is_profiled(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_profiled + + @is_profiled.setter + def is_profiled(self, is_profiled: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_profiled = is_profiled + + @property + def last_profiled_at(self) -> Optional[datetime]: + return None if self.attributes is None else self.attributes.last_profiled_at + + @last_profiled_at.setter + def last_profiled_at(self, last_profiled_at: Optional[datetime]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.last_profiled_at = last_profiled_at + + @property + def dynamo_d_b_status(self) -> Optional[DynamoDBStatus]: + return None if self.attributes is None else self.attributes.dynamo_d_b_status + + @dynamo_d_b_status.setter + def dynamo_d_b_status(self, dynamo_d_b_status: Optional[DynamoDBStatus]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dynamo_d_b_status = dynamo_d_b_status + + @property + def dynamo_d_b_partition_key(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.dynamo_d_b_partition_key + ) + + @dynamo_d_b_partition_key.setter + def dynamo_d_b_partition_key(self, dynamo_d_b_partition_key: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dynamo_d_b_partition_key = dynamo_d_b_partition_key + + @property + def dynamo_d_b_sort_key(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dynamo_d_b_sort_key + + @dynamo_d_b_sort_key.setter + def dynamo_d_b_sort_key(self, dynamo_d_b_sort_key: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dynamo_d_b_sort_key = dynamo_d_b_sort_key + + @property + def dynamo_d_b_read_capacity_units(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.dynamo_d_b_read_capacity_units + ) + + @dynamo_d_b_read_capacity_units.setter + def dynamo_d_b_read_capacity_units( + self, dynamo_d_b_read_capacity_units: Optional[int] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dynamo_d_b_read_capacity_units = dynamo_d_b_read_capacity_units + + @property + def dynamo_d_b_write_capacity_units(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.dynamo_d_b_write_capacity_units + ) + + @dynamo_d_b_write_capacity_units.setter + def dynamo_d_b_write_capacity_units( + self, dynamo_d_b_write_capacity_units: Optional[int] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dynamo_d_b_write_capacity_units = ( + dynamo_d_b_write_capacity_units + ) + + @property + def no_s_q_l_schema_definition(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.no_s_q_l_schema_definition + ) + + @no_s_q_l_schema_definition.setter + def no_s_q_l_schema_definition(self, no_s_q_l_schema_definition: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.no_s_q_l_schema_definition = no_s_q_l_schema_definition + + class Attributes(Table.Attributes): + dynamo_d_b_secondary_index_projection_type: Optional[ + DynamoDBSecondaryIndexProjectionType + ] = Field(None, description="", alias="dynamoDBSecondaryIndexProjectionType") + column_count: Optional[int] = Field(None, description="", alias="columnCount") + row_count: Optional[int] = Field(None, description="", alias="rowCount") + size_bytes: Optional[int] = Field(None, description="", alias="sizeBytes") + alias: Optional[str] = Field(None, description="", alias="alias") + is_temporary: Optional[bool] = Field(None, description="", alias="isTemporary") + is_query_preview: Optional[bool] = Field( + None, description="", alias="isQueryPreview" + ) + query_preview_config: Optional[dict[str, str]] = Field( + None, description="", alias="queryPreviewConfig" + ) + external_location: Optional[str] = Field( + None, description="", alias="externalLocation" + ) + external_location_region: Optional[str] = Field( + None, description="", alias="externalLocationRegion" + ) + external_location_format: Optional[str] = Field( + None, description="", alias="externalLocationFormat" + ) + is_partitioned: Optional[bool] = Field( + None, description="", alias="isPartitioned" + ) + partition_strategy: Optional[str] = Field( + None, description="", alias="partitionStrategy" + ) + partition_count: Optional[int] = Field( + None, description="", alias="partitionCount" + ) + partition_list: Optional[str] = Field( + None, description="", alias="partitionList" + ) + query_count: Optional[int] = Field(None, description="", alias="queryCount") + query_user_count: Optional[int] = Field( + None, description="", alias="queryUserCount" + ) + query_user_map: Optional[dict[str, int]] = Field( + None, description="", alias="queryUserMap" + ) + query_count_updated_at: Optional[datetime] = Field( + None, description="", alias="queryCountUpdatedAt" + ) + database_name: Optional[str] = Field(None, description="", alias="databaseName") + database_qualified_name: Optional[str] = Field( + None, description="", alias="databaseQualifiedName" + ) + schema_name: Optional[str] = Field(None, description="", alias="schemaName") + schema_qualified_name: Optional[str] = Field( + None, description="", alias="schemaQualifiedName" + ) + table_name: Optional[str] = Field(None, description="", alias="tableName") + table_qualified_name: Optional[str] = Field( + None, description="", alias="tableQualifiedName" + ) + view_name: Optional[str] = Field(None, description="", alias="viewName") + view_qualified_name: Optional[str] = Field( + None, description="", alias="viewQualifiedName" + ) + is_profiled: Optional[bool] = Field(None, description="", alias="isProfiled") + last_profiled_at: Optional[datetime] = Field( + None, description="", alias="lastProfiledAt" + ) + dynamo_d_b_status: Optional[DynamoDBStatus] = Field( + None, description="", alias="dynamoDBStatus" + ) + dynamo_d_b_partition_key: Optional[str] = Field( + None, description="", alias="dynamoDBPartitionKey" + ) + dynamo_d_b_sort_key: Optional[str] = Field( + None, description="", alias="dynamoDBSortKey" + ) + dynamo_d_b_read_capacity_units: Optional[int] = Field( + None, description="", alias="dynamoDBReadCapacityUnits" + ) + dynamo_d_b_write_capacity_units: Optional[int] = Field( + None, description="", alias="dynamoDBWriteCapacityUnits" + ) + no_s_q_l_schema_definition: Optional[str] = Field( + None, description="", alias="noSQLSchemaDefinition" ) - attributes: "AzureEventHub.Attributes" = Field( - default_factory=lambda: AzureEventHub.Attributes(), + attributes: "DynamoDBSecondaryIndex.Attributes" = Field( + default_factory=lambda: DynamoDBSecondaryIndex.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) -AzureEventHub.Attributes.update_forward_refs() +DynamoDBSecondaryIndex.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset86.py b/pyatlan/model/assets/asset86.py index 6db094d59..16d3ff265 100644 --- a/pyatlan/model/assets/asset86.py +++ b/pyatlan/model/assets/asset86.py @@ -4,30 +4,949 @@ from __future__ import annotations -from typing import ClassVar +from datetime import datetime +from typing import ClassVar, Optional from pydantic import Field, validator -from .asset83 import KafkaConsumerGroup +from pyatlan.model.enums import DynamoDBStatus +from pyatlan.model.fields.atlan_fields import ( + BooleanField, + KeywordField, + KeywordTextField, + NumericField, + RelationField, + TextField, +) +from .asset00 import Table +from .asset85 import DynamoDBSecondaryIndex -class AzureEventHubConsumerGroup(KafkaConsumerGroup): + +class DynamoDBTable(Table): """Description""" - type_name: str = Field("AzureEventHubConsumerGroup", allow_mutation=False) + type_name: str = Field("DynamoDBTable", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "AzureEventHubConsumerGroup": - raise ValueError("must be AzureEventHubConsumerGroup") + if v != "DynamoDBTable": + raise ValueError("must be DynamoDBTable") return v def __setattr__(self, name, value): - if name in AzureEventHubConsumerGroup._convenience_properties: + if name in DynamoDBTable._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) - _convenience_properties: ClassVar[list[str]] = [] + DYNAMO_DB_TABLE_GSI_COUNT: ClassVar[NumericField] = NumericField( + "dynamoDBTableGSICount", "dynamoDBTableGSICount" + ) + """ + Represents the number of global secondary indexes on the table. + """ + DYNAMO_DB_TABLE_LSI_COUNT: ClassVar[NumericField] = NumericField( + "dynamoDBTableLSICount", "dynamoDBTableLSICount" + ) + """ + Represents the number of local secondary indexes on the table. + """ + COLUMN_COUNT: ClassVar[NumericField] = NumericField("columnCount", "columnCount") + """ + Number of columns in this table. + """ + ROW_COUNT: ClassVar[NumericField] = NumericField("rowCount", "rowCount") + """ + Number of rows in this table. + """ + SIZE_BYTES: ClassVar[NumericField] = NumericField("sizeBytes", "sizeBytes") + """ + Size of this table, in bytes. + """ + ALIAS: ClassVar[KeywordField] = KeywordField("alias", "alias") + """ + Alias for this table. + """ + IS_TEMPORARY: ClassVar[BooleanField] = BooleanField("isTemporary", "isTemporary") + """ + Whether this table is temporary (true) or not (false). + """ + IS_QUERY_PREVIEW: ClassVar[BooleanField] = BooleanField( + "isQueryPreview", "isQueryPreview" + ) + """ + Whether preview queries are allowed for this table (true) or not (false). + """ + QUERY_PREVIEW_CONFIG: ClassVar[KeywordField] = KeywordField( + "queryPreviewConfig", "queryPreviewConfig" + ) + """ + Configuration for preview queries. + """ + EXTERNAL_LOCATION: ClassVar[KeywordField] = KeywordField( + "externalLocation", "externalLocation" + ) + """ + External location of this table, for example: an S3 object location. + """ + EXTERNAL_LOCATION_REGION: ClassVar[KeywordField] = KeywordField( + "externalLocationRegion", "externalLocationRegion" + ) + """ + Region of the external location of this table, for example: S3 region. + """ + EXTERNAL_LOCATION_FORMAT: ClassVar[KeywordField] = KeywordField( + "externalLocationFormat", "externalLocationFormat" + ) + """ + Format of the external location of this table, for example: JSON, CSV, PARQUET, etc. + """ + IS_PARTITIONED: ClassVar[BooleanField] = BooleanField( + "isPartitioned", "isPartitioned" + ) + """ + Whether this table is partitioned (true) or not (false). + """ + PARTITION_STRATEGY: ClassVar[KeywordField] = KeywordField( + "partitionStrategy", "partitionStrategy" + ) + """ + Partition strategy for this table. + """ + PARTITION_COUNT: ClassVar[NumericField] = NumericField( + "partitionCount", "partitionCount" + ) + """ + Number of partitions in this table. + """ + PARTITION_LIST: ClassVar[KeywordField] = KeywordField( + "partitionList", "partitionList" + ) + """ + List of partitions in this table. + """ + QUERY_COUNT: ClassVar[NumericField] = NumericField("queryCount", "queryCount") + """ + Number of times this asset has been queried. + """ + QUERY_USER_COUNT: ClassVar[NumericField] = NumericField( + "queryUserCount", "queryUserCount" + ) + """ + Number of unique users who have queried this asset. + """ + QUERY_USER_MAP: ClassVar[KeywordField] = KeywordField( + "queryUserMap", "queryUserMap" + ) + """ + Map of unique users who have queried this asset to the number of times they have queried it. + """ + QUERY_COUNT_UPDATED_AT: ClassVar[NumericField] = NumericField( + "queryCountUpdatedAt", "queryCountUpdatedAt" + ) + """ + Time (epoch) at which the query count was last updated, in milliseconds. + """ + DATABASE_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "databaseName", "databaseName.keyword", "databaseName" + ) + """ + Simple name of the database in which this SQL asset exists, or empty if it does not exist within a database. + """ + DATABASE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "databaseQualifiedName", "databaseQualifiedName" + ) + """ + Unique name of the database in which this SQL asset exists, or empty if it does not exist within a database. + """ + SCHEMA_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "schemaName", "schemaName.keyword", "schemaName" + ) + """ + Simple name of the schema in which this SQL asset exists, or empty if it does not exist within a schema. + """ + SCHEMA_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "schemaQualifiedName", "schemaQualifiedName" + ) + """ + Unique name of the schema in which this SQL asset exists, or empty if it does not exist within a schema. + """ + TABLE_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "tableName", "tableName.keyword", "tableName" + ) + """ + Simple name of the table in which this SQL asset exists, or empty if it does not exist within a table. + """ + TABLE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "tableQualifiedName", "tableQualifiedName" + ) + """ + Unique name of the table in which this SQL asset exists, or empty if it does not exist within a table. + """ + VIEW_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "viewName", "viewName.keyword", "viewName" + ) + """ + Simple name of the view in which this SQL asset exists, or empty if it does not exist within a view. + """ + VIEW_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "viewQualifiedName", "viewQualifiedName" + ) + """ + Unique name of the view in which this SQL asset exists, or empty if it does not exist within a view. + """ + IS_PROFILED: ClassVar[BooleanField] = BooleanField("isProfiled", "isProfiled") + """ + Whether this asset has been profiled (true) or not (false). + """ + LAST_PROFILED_AT: ClassVar[NumericField] = NumericField( + "lastProfiledAt", "lastProfiledAt" + ) + """ + Time (epoch) at which this asset was last profiled, in milliseconds. + """ + DYNAMO_DB_STATUS: ClassVar[KeywordField] = KeywordField( + "dynamoDBStatus", "dynamoDBStatus" + ) + """ + Status of the DynamoDB Asset + """ + DYNAMO_DB_PARTITION_KEY: ClassVar[KeywordField] = KeywordField( + "dynamoDBPartitionKey", "dynamoDBPartitionKey" + ) + """ + Specifies the partition key of the DynamoDB Table/Index + """ + DYNAMO_DB_SORT_KEY: ClassVar[KeywordField] = KeywordField( + "dynamoDBSortKey", "dynamoDBSortKey" + ) + """ + Specifies the sort key of the DynamoDB Table/Index + """ + DYNAMO_DB_READ_CAPACITY_UNITS: ClassVar[NumericField] = NumericField( + "dynamoDBReadCapacityUnits", "dynamoDBReadCapacityUnits" + ) + """ + The maximum number of strongly consistent reads consumed per second before DynamoDB returns a ThrottlingException + """ + DYNAMO_DB_WRITE_CAPACITY_UNITS: ClassVar[NumericField] = NumericField( + "dynamoDBWriteCapacityUnits", "dynamoDBWriteCapacityUnits" + ) + """ + The maximum number of writes consumed per second before DynamoDB returns a ThrottlingException + """ + NO_SQL_SCHEMA_DEFINITION: ClassVar[TextField] = TextField( + "noSQLSchemaDefinition", "noSQLSchemaDefinition" + ) + """ + Represents attributes for describing the key schema for the table and indexes. + """ + + DYNAMO_DB_LOCAL_SECONDARY_INDEXES: ClassVar[RelationField] = RelationField( + "dynamoDBLocalSecondaryIndexes" + ) + """ + TBC + """ + DYNAMO_DB_GLOBAL_SECONDARY_INDEXES: ClassVar[RelationField] = RelationField( + "dynamoDBGlobalSecondaryIndexes" + ) + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "dynamo_dbtable_g_s_i_count", + "dynamo_dbtable_l_s_i_count", + "column_count", + "row_count", + "size_bytes", + "alias", + "is_temporary", + "is_query_preview", + "query_preview_config", + "external_location", + "external_location_region", + "external_location_format", + "is_partitioned", + "partition_strategy", + "partition_count", + "partition_list", + "query_count", + "query_user_count", + "query_user_map", + "query_count_updated_at", + "database_name", + "database_qualified_name", + "schema_name", + "schema_qualified_name", + "table_name", + "table_qualified_name", + "view_name", + "view_qualified_name", + "is_profiled", + "last_profiled_at", + "dynamo_d_b_status", + "dynamo_d_b_partition_key", + "dynamo_d_b_sort_key", + "dynamo_d_b_read_capacity_units", + "dynamo_d_b_write_capacity_units", + "no_s_q_l_schema_definition", + "dynamo_d_b_local_secondary_indexes", + "dynamo_d_b_global_secondary_indexes", + ] + + @property + def dynamo_dbtable_g_s_i_count(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.dynamo_dbtable_g_s_i_count + ) + + @dynamo_dbtable_g_s_i_count.setter + def dynamo_dbtable_g_s_i_count(self, dynamo_dbtable_g_s_i_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dynamo_dbtable_g_s_i_count = dynamo_dbtable_g_s_i_count + + @property + def dynamo_dbtable_l_s_i_count(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.dynamo_dbtable_l_s_i_count + ) + + @dynamo_dbtable_l_s_i_count.setter + def dynamo_dbtable_l_s_i_count(self, dynamo_dbtable_l_s_i_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dynamo_dbtable_l_s_i_count = dynamo_dbtable_l_s_i_count + + @property + def column_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.column_count + + @column_count.setter + def column_count(self, column_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.column_count = column_count + + @property + def row_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.row_count + + @row_count.setter + def row_count(self, row_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.row_count = row_count + + @property + def size_bytes(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.size_bytes + + @size_bytes.setter + def size_bytes(self, size_bytes: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.size_bytes = size_bytes + + @property + def alias(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.alias + + @alias.setter + def alias(self, alias: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.alias = alias + + @property + def is_temporary(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_temporary + + @is_temporary.setter + def is_temporary(self, is_temporary: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_temporary = is_temporary + + @property + def is_query_preview(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_query_preview + + @is_query_preview.setter + def is_query_preview(self, is_query_preview: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_query_preview = is_query_preview + + @property + def query_preview_config(self) -> Optional[dict[str, str]]: + return None if self.attributes is None else self.attributes.query_preview_config + + @query_preview_config.setter + def query_preview_config(self, query_preview_config: Optional[dict[str, str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.query_preview_config = query_preview_config + + @property + def external_location(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.external_location + + @external_location.setter + def external_location(self, external_location: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.external_location = external_location + + @property + def external_location_region(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.external_location_region + ) + + @external_location_region.setter + def external_location_region(self, external_location_region: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.external_location_region = external_location_region + + @property + def external_location_format(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.external_location_format + ) + + @external_location_format.setter + def external_location_format(self, external_location_format: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.external_location_format = external_location_format + + @property + def is_partitioned(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_partitioned + + @is_partitioned.setter + def is_partitioned(self, is_partitioned: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_partitioned = is_partitioned + + @property + def partition_strategy(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.partition_strategy + + @partition_strategy.setter + def partition_strategy(self, partition_strategy: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.partition_strategy = partition_strategy + + @property + def partition_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.partition_count + + @partition_count.setter + def partition_count(self, partition_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.partition_count = partition_count + + @property + def partition_list(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.partition_list + + @partition_list.setter + def partition_list(self, partition_list: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.partition_list = partition_list + + @property + def query_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.query_count + + @query_count.setter + def query_count(self, query_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.query_count = query_count + + @property + def query_user_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.query_user_count + + @query_user_count.setter + def query_user_count(self, query_user_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.query_user_count = query_user_count + + @property + def query_user_map(self) -> Optional[dict[str, int]]: + return None if self.attributes is None else self.attributes.query_user_map + + @query_user_map.setter + def query_user_map(self, query_user_map: Optional[dict[str, int]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.query_user_map = query_user_map + + @property + def query_count_updated_at(self) -> Optional[datetime]: + return ( + None if self.attributes is None else self.attributes.query_count_updated_at + ) + + @query_count_updated_at.setter + def query_count_updated_at(self, query_count_updated_at: Optional[datetime]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.query_count_updated_at = query_count_updated_at + + @property + def database_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.database_name + + @database_name.setter + def database_name(self, database_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.database_name = database_name + + @property + def database_qualified_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.database_qualified_name + ) + + @database_qualified_name.setter + def database_qualified_name(self, database_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.database_qualified_name = database_qualified_name + + @property + def schema_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.schema_name + + @schema_name.setter + def schema_name(self, schema_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.schema_name = schema_name + + @property + def schema_qualified_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.schema_qualified_name + ) + + @schema_qualified_name.setter + def schema_qualified_name(self, schema_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.schema_qualified_name = schema_qualified_name + + @property + def table_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.table_name + + @table_name.setter + def table_name(self, table_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.table_name = table_name + + @property + def table_qualified_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.table_qualified_name + + @table_qualified_name.setter + def table_qualified_name(self, table_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.table_qualified_name = table_qualified_name + + @property + def view_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.view_name + + @view_name.setter + def view_name(self, view_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.view_name = view_name + + @property + def view_qualified_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.view_qualified_name + + @view_qualified_name.setter + def view_qualified_name(self, view_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.view_qualified_name = view_qualified_name + + @property + def is_profiled(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_profiled + + @is_profiled.setter + def is_profiled(self, is_profiled: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_profiled = is_profiled + + @property + def last_profiled_at(self) -> Optional[datetime]: + return None if self.attributes is None else self.attributes.last_profiled_at + + @last_profiled_at.setter + def last_profiled_at(self, last_profiled_at: Optional[datetime]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.last_profiled_at = last_profiled_at + + @property + def dynamo_d_b_status(self) -> Optional[DynamoDBStatus]: + return None if self.attributes is None else self.attributes.dynamo_d_b_status + + @dynamo_d_b_status.setter + def dynamo_d_b_status(self, dynamo_d_b_status: Optional[DynamoDBStatus]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dynamo_d_b_status = dynamo_d_b_status + + @property + def dynamo_d_b_partition_key(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.dynamo_d_b_partition_key + ) + + @dynamo_d_b_partition_key.setter + def dynamo_d_b_partition_key(self, dynamo_d_b_partition_key: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dynamo_d_b_partition_key = dynamo_d_b_partition_key + + @property + def dynamo_d_b_sort_key(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dynamo_d_b_sort_key + + @dynamo_d_b_sort_key.setter + def dynamo_d_b_sort_key(self, dynamo_d_b_sort_key: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dynamo_d_b_sort_key = dynamo_d_b_sort_key + + @property + def dynamo_d_b_read_capacity_units(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.dynamo_d_b_read_capacity_units + ) + + @dynamo_d_b_read_capacity_units.setter + def dynamo_d_b_read_capacity_units( + self, dynamo_d_b_read_capacity_units: Optional[int] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dynamo_d_b_read_capacity_units = dynamo_d_b_read_capacity_units + + @property + def dynamo_d_b_write_capacity_units(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.dynamo_d_b_write_capacity_units + ) + + @dynamo_d_b_write_capacity_units.setter + def dynamo_d_b_write_capacity_units( + self, dynamo_d_b_write_capacity_units: Optional[int] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dynamo_d_b_write_capacity_units = ( + dynamo_d_b_write_capacity_units + ) + + @property + def no_s_q_l_schema_definition(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.no_s_q_l_schema_definition + ) + + @no_s_q_l_schema_definition.setter + def no_s_q_l_schema_definition(self, no_s_q_l_schema_definition: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.no_s_q_l_schema_definition = no_s_q_l_schema_definition + + @property + def dynamo_d_b_local_secondary_indexes( + self, + ) -> Optional[list[DynamoDBLocalSecondaryIndex]]: + return ( + None + if self.attributes is None + else self.attributes.dynamo_d_b_local_secondary_indexes + ) + + @dynamo_d_b_local_secondary_indexes.setter + def dynamo_d_b_local_secondary_indexes( + self, + dynamo_d_b_local_secondary_indexes: Optional[list[DynamoDBLocalSecondaryIndex]], + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dynamo_d_b_local_secondary_indexes = ( + dynamo_d_b_local_secondary_indexes + ) + + @property + def dynamo_d_b_global_secondary_indexes( + self, + ) -> Optional[list[DynamoDBGlobalSecondaryIndex]]: + return ( + None + if self.attributes is None + else self.attributes.dynamo_d_b_global_secondary_indexes + ) + + @dynamo_d_b_global_secondary_indexes.setter + def dynamo_d_b_global_secondary_indexes( + self, + dynamo_d_b_global_secondary_indexes: Optional[ + list[DynamoDBGlobalSecondaryIndex] + ], + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dynamo_d_b_global_secondary_indexes = ( + dynamo_d_b_global_secondary_indexes + ) + + class Attributes(Table.Attributes): + dynamo_dbtable_g_s_i_count: Optional[int] = Field( + None, description="", alias="dynamoDBTableGSICount" + ) + dynamo_dbtable_l_s_i_count: Optional[int] = Field( + None, description="", alias="dynamoDBTableLSICount" + ) + column_count: Optional[int] = Field(None, description="", alias="columnCount") + row_count: Optional[int] = Field(None, description="", alias="rowCount") + size_bytes: Optional[int] = Field(None, description="", alias="sizeBytes") + alias: Optional[str] = Field(None, description="", alias="alias") + is_temporary: Optional[bool] = Field(None, description="", alias="isTemporary") + is_query_preview: Optional[bool] = Field( + None, description="", alias="isQueryPreview" + ) + query_preview_config: Optional[dict[str, str]] = Field( + None, description="", alias="queryPreviewConfig" + ) + external_location: Optional[str] = Field( + None, description="", alias="externalLocation" + ) + external_location_region: Optional[str] = Field( + None, description="", alias="externalLocationRegion" + ) + external_location_format: Optional[str] = Field( + None, description="", alias="externalLocationFormat" + ) + is_partitioned: Optional[bool] = Field( + None, description="", alias="isPartitioned" + ) + partition_strategy: Optional[str] = Field( + None, description="", alias="partitionStrategy" + ) + partition_count: Optional[int] = Field( + None, description="", alias="partitionCount" + ) + partition_list: Optional[str] = Field( + None, description="", alias="partitionList" + ) + query_count: Optional[int] = Field(None, description="", alias="queryCount") + query_user_count: Optional[int] = Field( + None, description="", alias="queryUserCount" + ) + query_user_map: Optional[dict[str, int]] = Field( + None, description="", alias="queryUserMap" + ) + query_count_updated_at: Optional[datetime] = Field( + None, description="", alias="queryCountUpdatedAt" + ) + database_name: Optional[str] = Field(None, description="", alias="databaseName") + database_qualified_name: Optional[str] = Field( + None, description="", alias="databaseQualifiedName" + ) + schema_name: Optional[str] = Field(None, description="", alias="schemaName") + schema_qualified_name: Optional[str] = Field( + None, description="", alias="schemaQualifiedName" + ) + table_name: Optional[str] = Field(None, description="", alias="tableName") + table_qualified_name: Optional[str] = Field( + None, description="", alias="tableQualifiedName" + ) + view_name: Optional[str] = Field(None, description="", alias="viewName") + view_qualified_name: Optional[str] = Field( + None, description="", alias="viewQualifiedName" + ) + is_profiled: Optional[bool] = Field(None, description="", alias="isProfiled") + last_profiled_at: Optional[datetime] = Field( + None, description="", alias="lastProfiledAt" + ) + dynamo_d_b_status: Optional[DynamoDBStatus] = Field( + None, description="", alias="dynamoDBStatus" + ) + dynamo_d_b_partition_key: Optional[str] = Field( + None, description="", alias="dynamoDBPartitionKey" + ) + dynamo_d_b_sort_key: Optional[str] = Field( + None, description="", alias="dynamoDBSortKey" + ) + dynamo_d_b_read_capacity_units: Optional[int] = Field( + None, description="", alias="dynamoDBReadCapacityUnits" + ) + dynamo_d_b_write_capacity_units: Optional[int] = Field( + None, description="", alias="dynamoDBWriteCapacityUnits" + ) + no_s_q_l_schema_definition: Optional[str] = Field( + None, description="", alias="noSQLSchemaDefinition" + ) + dynamo_d_b_local_secondary_indexes: Optional[ + list[DynamoDBLocalSecondaryIndex] + ] = Field( + None, description="", alias="dynamoDBLocalSecondaryIndexes" + ) # relationship + dynamo_d_b_global_secondary_indexes: Optional[ + list[DynamoDBGlobalSecondaryIndex] + ] = Field( + None, description="", alias="dynamoDBGlobalSecondaryIndexes" + ) # relationship + + attributes: "DynamoDBTable.Attributes" = Field( + default_factory=lambda: DynamoDBTable.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +class DynamoDBLocalSecondaryIndex(DynamoDBSecondaryIndex): + """Description""" + + type_name: str = Field("DynamoDBLocalSecondaryIndex", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "DynamoDBLocalSecondaryIndex": + raise ValueError("must be DynamoDBLocalSecondaryIndex") + return v + + def __setattr__(self, name, value): + if name in DynamoDBLocalSecondaryIndex._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + DYNAMO_DB_TABLE: ClassVar[RelationField] = RelationField("dynamoDBTable") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "dynamo_dbtable", + ] + + @property + def dynamo_dbtable(self) -> Optional[DynamoDBTable]: + return None if self.attributes is None else self.attributes.dynamo_dbtable + + @dynamo_dbtable.setter + def dynamo_dbtable(self, dynamo_dbtable: Optional[DynamoDBTable]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dynamo_dbtable = dynamo_dbtable + + class Attributes(DynamoDBSecondaryIndex.Attributes): + dynamo_dbtable: Optional[DynamoDBTable] = Field( + None, description="", alias="dynamoDBTable" + ) # relationship + + attributes: "DynamoDBLocalSecondaryIndex.Attributes" = Field( + default_factory=lambda: DynamoDBLocalSecondaryIndex.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +class DynamoDBGlobalSecondaryIndex(DynamoDBSecondaryIndex): + """Description""" + + type_name: str = Field("DynamoDBGlobalSecondaryIndex", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "DynamoDBGlobalSecondaryIndex": + raise ValueError("must be DynamoDBGlobalSecondaryIndex") + return v + + def __setattr__(self, name, value): + if name in DynamoDBGlobalSecondaryIndex._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + DYNAMO_DB_TABLE: ClassVar[RelationField] = RelationField("dynamoDBTable") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "dynamo_dbtable", + ] + + @property + def dynamo_dbtable(self) -> Optional[DynamoDBTable]: + return None if self.attributes is None else self.attributes.dynamo_dbtable + + @dynamo_dbtable.setter + def dynamo_dbtable(self, dynamo_dbtable: Optional[DynamoDBTable]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dynamo_dbtable = dynamo_dbtable + + class Attributes(DynamoDBSecondaryIndex.Attributes): + dynamo_dbtable: Optional[DynamoDBTable] = Field( + None, description="", alias="dynamoDBTable" + ) # relationship + + attributes: "DynamoDBGlobalSecondaryIndex.Attributes" = Field( + default_factory=lambda: DynamoDBGlobalSecondaryIndex.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +DynamoDBTable.Attributes.update_forward_refs() + + +DynamoDBLocalSecondaryIndex.Attributes.update_forward_refs() -AzureEventHubConsumerGroup.Attributes.update_forward_refs() +DynamoDBGlobalSecondaryIndex.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset83.py b/pyatlan/model/assets/asset87.py similarity index 95% rename from pyatlan/model/assets/asset83.py rename to pyatlan/model/assets/asset87.py index 4289119f5..95b8812ba 100644 --- a/pyatlan/model/assets/asset83.py +++ b/pyatlan/model/assets/asset87.py @@ -17,7 +17,7 @@ ) from pyatlan.model.structs import KafkaTopicConsumption -from .asset55 import Kafka +from .asset56 import Kafka class KafkaTopic(Kafka): @@ -40,55 +40,55 @@ def __setattr__(self, name, value): "kafkaTopicIsInternal", "kafkaTopicIsInternal" ) """ - TBC + Whether this topic is an internal topic (true) or not (false). """ KAFKA_TOPIC_COMPRESSION_TYPE: ClassVar[KeywordField] = KeywordField( "kafkaTopicCompressionType", "kafkaTopicCompressionType" ) """ - TBC + Type of compression used for this topic. """ KAFKA_TOPIC_REPLICATION_FACTOR: ClassVar[NumericField] = NumericField( "kafkaTopicReplicationFactor", "kafkaTopicReplicationFactor" ) """ - TBC + Replication factor for this topic. """ KAFKA_TOPIC_SEGMENT_BYTES: ClassVar[NumericField] = NumericField( "kafkaTopicSegmentBytes", "kafkaTopicSegmentBytes" ) """ - TBC + Segment size for this topic. """ KAFKA_TOPIC_RETENTION_TIME_IN_MS: ClassVar[NumericField] = NumericField( "kafkaTopicRetentionTimeInMs", "kafkaTopicRetentionTimeInMs" ) """ - TBC + Amount of time messages will be retained in this topic, in milliseconds. """ KAFKA_TOPIC_PARTITIONS_COUNT: ClassVar[NumericField] = NumericField( "kafkaTopicPartitionsCount", "kafkaTopicPartitionsCount" ) """ - TBC + Number of partitions for this topic. """ KAFKA_TOPIC_SIZE_IN_BYTES: ClassVar[NumericField] = NumericField( "kafkaTopicSizeInBytes", "kafkaTopicSizeInBytes" ) """ - TBC + Size of this topic, in bytes. """ KAFKA_TOPIC_RECORD_COUNT: ClassVar[NumericField] = NumericField( "kafkaTopicRecordCount", "kafkaTopicRecordCount" ) """ - TBC + Number of (unexpired) messages in this topic. """ KAFKA_TOPIC_CLEANUP_POLICY: ClassVar[KeywordField] = KeywordField( "kafkaTopicCleanupPolicy", "kafkaTopicCleanupPolicy" ) """ - TBC + Cleanup policy for this topic. """ KAFKA_CONSUMER_GROUPS: ClassVar[RelationField] = RelationField( @@ -321,25 +321,25 @@ def __setattr__(self, name, value): "kafkaConsumerGroupTopicConsumptionProperties", ) """ - TBC + List of consumption properties for Kafka topics, for this consumer group. """ KAFKA_CONSUMER_GROUP_MEMBER_COUNT: ClassVar[NumericField] = NumericField( "kafkaConsumerGroupMemberCount", "kafkaConsumerGroupMemberCount" ) """ - TBC + Number of members in this consumer group. """ KAFKA_TOPIC_NAMES: ClassVar[KeywordField] = KeywordField( "kafkaTopicNames", "kafkaTopicNames" ) """ - TBC + Simple names of the topics consumed by this consumer group. """ KAFKA_TOPIC_QUALIFIED_NAMES: ClassVar[KeywordField] = KeywordField( "kafkaTopicQualifiedNames", "kafkaTopicQualifiedNames" ) """ - TBC + Unique names of the topics consumed by this consumer group. """ KAFKA_TOPICS: ClassVar[RelationField] = RelationField("kafkaTopics") diff --git a/pyatlan/model/assets/asset22.py b/pyatlan/model/assets/asset88.py similarity index 61% rename from pyatlan/model/assets/asset22.py rename to pyatlan/model/assets/asset88.py index 06146f9c8..2ca5705cd 100644 --- a/pyatlan/model/assets/asset22.py +++ b/pyatlan/model/assets/asset88.py @@ -8,26 +8,26 @@ from pydantic import Field, validator -from .asset00 import Catalog +from .asset81 import QlikSpace -class EventStore(Catalog): +class QlikStream(QlikSpace): """Description""" - type_name: str = Field("EventStore", allow_mutation=False) + type_name: str = Field("QlikStream", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "EventStore": - raise ValueError("must be EventStore") + if v != "QlikStream": + raise ValueError("must be QlikStream") return v def __setattr__(self, name, value): - if name in EventStore._convenience_properties: + if name in QlikStream._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) _convenience_properties: ClassVar[list[str]] = [] -EventStore.Attributes.update_forward_refs() +QlikStream.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset89.py b/pyatlan/model/assets/asset89.py new file mode 100644 index 000000000..165d01a35 --- /dev/null +++ b/pyatlan/model/assets/asset89.py @@ -0,0 +1,67 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic import Field, validator + +from pyatlan.model.fields.atlan_fields import KeywordField + +from .asset87 import KafkaTopic + + +class AzureEventHub(KafkaTopic): + """Description""" + + type_name: str = Field("AzureEventHub", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "AzureEventHub": + raise ValueError("must be AzureEventHub") + return v + + def __setattr__(self, name, value): + if name in AzureEventHub._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + AZURE_EVENT_HUB_STATUS: ClassVar[KeywordField] = KeywordField( + "azureEventHubStatus", "azureEventHubStatus" + ) + """ + + """ + + _convenience_properties: ClassVar[list[str]] = [ + "azure_event_hub_status", + ] + + @property + def azure_event_hub_status(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.azure_event_hub_status + ) + + @azure_event_hub_status.setter + def azure_event_hub_status(self, azure_event_hub_status: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.azure_event_hub_status = azure_event_hub_status + + class Attributes(KafkaTopic.Attributes): + azure_event_hub_status: Optional[str] = Field( + None, description="", alias="azureEventHubStatus" + ) + + attributes: "AzureEventHub.Attributes" = Field( + default_factory=lambda: AzureEventHub.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +AzureEventHub.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset26.py b/pyatlan/model/assets/asset90.py similarity index 53% rename from pyatlan/model/assets/asset26.py rename to pyatlan/model/assets/asset90.py index eb7a44ed4..21c2d27e7 100644 --- a/pyatlan/model/assets/asset26.py +++ b/pyatlan/model/assets/asset90.py @@ -8,26 +8,26 @@ from pydantic import Field, validator -from .asset00 import Catalog +from .asset87 import KafkaConsumerGroup -class Insight(Catalog): +class AzureEventHubConsumerGroup(KafkaConsumerGroup): """Description""" - type_name: str = Field("Insight", allow_mutation=False) + type_name: str = Field("AzureEventHubConsumerGroup", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): - if v != "Insight": - raise ValueError("must be Insight") + if v != "AzureEventHubConsumerGroup": + raise ValueError("must be AzureEventHubConsumerGroup") return v def __setattr__(self, name, value): - if name in Insight._convenience_properties: + if name in AzureEventHubConsumerGroup._convenience_properties: return object.__setattr__(self, name, value) super().__setattr__(name, value) _convenience_properties: ClassVar[list[str]] = [] -Insight.Attributes.update_forward_refs() +AzureEventHubConsumerGroup.Attributes.update_forward_refs() diff --git a/pyatlan/model/enums.py b/pyatlan/model/enums.py index 346638c3f..802a54d2f 100644 --- a/pyatlan/model/enums.py +++ b/pyatlan/model/enums.py @@ -2092,6 +2092,22 @@ class DataProductStatus(str, Enum): ARCHIVED = "Archived" +class DynamoDBSecondaryIndexProjectionType(str, Enum): + KEYS_ONLY = "KEYS_ONLY" + INCLUDE = "INCLUDE" + ALL = "ALL" + + +class DynamoDBStatus(str, Enum): + CREATING = "CREATING" + UPDATING = "UPDATING" + DELETING = "DELETING" + ACTIVE = "ACTIVE" + INACCESSIBLE_ENCRYPTION_CREDENTIALS = "INACCESSIBLE_ENCRYPTION_CREDENTIALS" + ARCHIVING = "ARCHIVING" + ARCHIVED = "ARCHIVED" + + class FileType(str, Enum): PDF = "pdf" DOC = "doc" diff --git a/pyatlan/model/structs.py b/pyatlan/model/structs.py index 3ba0b62f4..da30e5a79 100644 --- a/pyatlan/model/structs.py +++ b/pyatlan/model/structs.py @@ -40,6 +40,13 @@ class AwsCloudWatchMetric(AtlanObject): aws_cloud_watch_metric_scope: str = Field(description="") +class Histogram(AtlanObject): + """Description""" + + boundaries: set[float] = Field(description="") + frequencies: set[float] = Field(description="") + + class KafkaTopicConsumption(AtlanObject): """Description""" @@ -49,13 +56,6 @@ class KafkaTopicConsumption(AtlanObject): topic_current_offset: Optional[int] = Field(None, description="") -class Histogram(AtlanObject): - """Description""" - - boundaries: set[float] = Field(description="") - frequencies: set[float] = Field(description="") - - class ColumnValueFrequencyMap(AtlanObject): """Description""" @@ -96,6 +96,13 @@ def create( badge_condition_colorhex: Optional[str] = Field(None, description="") +class SourceTagAttachmentValue(AtlanObject): + """Description""" + + tag_attachment_key: Optional[str] = Field(None, description="") + tag_attachment_value: Optional[str] = Field(None, description="") + + class SourceTagAttachment(AtlanObject): """Description""" @@ -111,13 +118,6 @@ class SourceTagAttachment(AtlanObject): source_tag_sync_error: Optional[str] = Field(None, description="") -class SourceTagAttachmentValue(AtlanObject): - """Description""" - - tag_attachment_key: Optional[str] = Field(None, description="") - tag_attachment_value: Optional[str] = Field(None, description="") - - class AzureTag(AtlanObject): """Description""" @@ -146,6 +146,13 @@ class AwsTag(AtlanObject): aws_tag_value: str = Field(description="") +class GoogleTag(AtlanObject): + """Description""" + + google_tag_key: str = Field(description="") + google_tag_value: str = Field(description="") + + class DbtMetricFilter(AtlanObject): """Description""" @@ -155,13 +162,6 @@ class DbtMetricFilter(AtlanObject): dbt_metric_filter_value: Optional[str] = Field(None, description="") -class GoogleTag(AtlanObject): - """Description""" - - google_tag_key: str = Field(description="") - google_tag_value: str = Field(description="") - - class AuthPolicyValiditySchedule(AtlanObject): """Description""" @@ -217,18 +217,18 @@ class SourceTagAttribute(AtlanObject): AwsCloudWatchMetric.update_forward_refs() -KafkaTopicConsumption.update_forward_refs() - Histogram.update_forward_refs() +KafkaTopicConsumption.update_forward_refs() + ColumnValueFrequencyMap.update_forward_refs() BadgeCondition.update_forward_refs() -SourceTagAttachment.update_forward_refs() - SourceTagAttachmentValue.update_forward_refs() +SourceTagAttachment.update_forward_refs() + AzureTag.update_forward_refs() StarredDetails.update_forward_refs() @@ -237,10 +237,10 @@ class SourceTagAttribute(AtlanObject): AwsTag.update_forward_refs() -DbtMetricFilter.update_forward_refs() - GoogleTag.update_forward_refs() +DbtMetricFilter.update_forward_refs() + AuthPolicyValiditySchedule.update_forward_refs() MCRuleComparison.update_forward_refs()