From 5f8891b2f61ba0f8c2739b18959dbf6eada0dd9f Mon Sep 17 00:00:00 2001 From: Rohit Ashiwal Date: Thu, 8 Feb 2024 09:31:31 +0530 Subject: [PATCH] lint: add rules and apply formatting Signed-off-by: Rohit Ashiwal --- .editorconfig | 13 +- detekt.yml | 3 +- .../IndexManagementExtension.kt | 1 - .../indexstatemanagement/Action.kt | 3 +- .../indexstatemanagement/ActionParser.kt | 1 - .../IndexMetadataService.kt | 1 - .../indexstatemanagement/StatusChecker.kt | 4 +- .../indexstatemanagement/Step.kt | 4 +- .../indexstatemanagement/Validate.kt | 8 +- .../model/ActionMetaData.kt | 11 +- .../model/ActionProperties.kt | 8 +- .../indexstatemanagement/model/ActionRetry.kt | 27 +- .../model/ActionTimeout.kt | 5 +- .../model/ManagedIndexMetaData.kt | 28 +- .../model/PolicyRetryInfoMetaData.kt | 11 +- .../model/ShrinkActionProperties.kt | 9 +- .../model/StateMetaData.kt | 11 +- .../indexstatemanagement/model/StepContext.kt | 2 +- .../model/StepMetaData.kt | 11 +- .../model/TransformActionProperties.kt | 7 +- .../model/ValidationResult.kt | 11 +- .../indexmanagement/IndexManagementIndices.kt | 96 +- .../indexmanagement/IndexManagementPlugin.kt | 148 +- .../indexmanagement/IndexManagementRunner.kt | 1 - .../common/model/dimension/DateHistogram.kt | 9 +- .../common/model/dimension/Dimension.kt | 18 +- .../common/model/dimension/Histogram.kt | 10 +- .../common/model/dimension/Terms.kt | 11 +- .../common/model/notification/Channel.kt | 36 +- .../model/notification/NotificationUtils.kt | 14 +- .../common/model/rest/SearchParams.kt | 5 +- .../notification/ControlCenterIndices.kt | 33 +- .../notification/LRONConfigResponse.kt | 6 +- .../action/delete/DeleteLRONConfigRequest.kt | 13 +- .../delete/TransportDeleteLRONConfigAction.kt | 25 +- .../action/get/GetLRONConfigRequest.kt | 31 +- .../get/TransportGetLRONConfigAction.kt | 69 +- .../action/index/IndexLRONConfigRequest.kt | 4 +- .../index/TransportIndexLRONConfigAction.kt | 48 +- .../filter/IndexOperationActionFilter.kt | 31 +- .../filter/NotificationActionListener.kt | 69 +- .../notification/filter/OperationResult.kt | 5 +- .../parser/ForceMergeIndexRespParser.kt | 25 +- .../filter/parser/OpenIndexRespParser.kt | 40 +- .../filter/parser/ReindexRespParser.kt | 36 +- .../filter/parser/ResizeIndexRespParser.kt | 35 +- .../filter/parser/ResponseParser.kt | 18 +- .../notification/model/LRONCondition.kt | 9 +- .../notification/model/LRONConfig.kt | 38 +- .../resthandler/RestDeleteLRONConfigAction.kt | 2 +- .../resthandler/RestGetLRONConfigAction.kt | 6 +- .../resthandler/RestIndexLRONConfigAction.kt | 5 +- .../notification/util/LRONUtils.kt | 32 +- .../DefaultIndexMetadataService.kt | 16 +- .../ExtensionStatusChecker.kt | 1 - .../indexstatemanagement/ISMActionsParser.kt | 39 +- .../ISMTemplateService.kt | 6 +- .../IndexMetadataProvider.kt | 43 +- .../IndexStateManagementHistory.kt | 82 +- .../ManagedIndexCoordinator.kt | 122 +- .../ManagedIndexRunner.kt | 225 +-- .../PluginVersionSweepCoordinator.kt | 23 +- .../indexstatemanagement/SkipExecution.kt | 14 +- .../action/AliasAction.kt | 7 +- .../action/AliasActionParser.kt | 2 +- .../action/AllocationAction.kt | 3 +- .../action/CloseAction.kt | 4 +- .../action/DeleteAction.kt | 3 +- .../action/ForceMergeAction.kt | 14 +- .../action/ForceMergeActionParser.kt | 2 +- .../action/IndexPriorityAction.kt | 3 +- .../action/IndexPriorityActionParser.kt | 2 +- .../action/NotificationAction.kt | 3 +- .../action/NotificationActionParser.kt | 4 +- .../indexstatemanagement/action/OpenAction.kt | 4 +- .../action/ReadOnlyAction.kt | 4 +- .../action/ReadWriteAction.kt | 3 +- .../action/ReplicaCountAction.kt | 3 +- .../action/ReplicaCountActionParser.kt | 2 +- .../action/RolloverAction.kt | 11 +- .../action/RolloverActionParser.kt | 14 +- .../action/RollupAction.kt | 3 +- .../action/ShrinkAction.kt | 21 +- .../action/SnapshotAction.kt | 3 +- .../action/SnapshotActionParser.kt | 2 +- .../action/TransformAction.kt | 3 +- .../action/TransitionsAction.kt | 3 +- .../model/ChangePolicy.kt | 14 +- .../model/ErrorNotification.kt | 7 +- .../model/ExplainFilter.kt | 5 +- .../indexstatemanagement/model/ISMTemplate.kt | 7 +- .../model/ManagedIndexConfig.kt | 14 +- .../indexstatemanagement/model/Policy.kt | 23 +- .../indexstatemanagement/model/State.kt | 16 +- .../indexstatemanagement/model/StateFilter.kt | 3 +- .../indexstatemanagement/model/Transition.kt | 14 +- .../ClusterStateManagedIndexConfig.kt | 2 +- .../coordinator/SweptManagedIndexConfig.kt | 5 +- .../model/destination/Chime.kt | 3 +- .../model/destination/CustomWebhook.kt | 5 +- .../model/destination/Destination.kt | 55 +- .../model/destination/Slack.kt | 3 +- .../opensearchapi/OpenSearchExtensions.kt | 31 +- .../resthandler/RestAddPolicyAction.kt | 20 +- .../resthandler/RestChangePolicyAction.kt | 7 +- .../resthandler/RestDeletePolicyAction.kt | 5 +- .../resthandler/RestExplainAction.kt | 62 +- .../resthandler/RestGetPolicyAction.kt | 9 +- .../resthandler/RestIndexPolicyAction.kt | 28 +- .../resthandler/RestRemovePolicyAction.kt | 7 +- .../RestRetryFailedManagedIndexAction.kt | 38 +- .../LegacyOpenDistroManagedIndexSettings.kt | 329 ++-- .../settings/ManagedIndexSettings.kt | 344 +++-- .../step/alias/AttemptAliasActionsStep.kt | 8 +- .../step/allocation/AttemptAllocationStep.kt | 12 +- .../step/close/AttemptCloseStep.kt | 16 +- .../step/delete/AttemptDeleteStep.kt | 11 +- .../forcemerge/AttemptCallForceMergeStep.kt | 20 +- .../step/forcemerge/AttemptSetReadOnlyStep.kt | 17 +- .../step/forcemerge/WaitForForceMergeStep.kt | 43 +- .../AttemptSetIndexPriorityStep.kt | 17 +- .../notification/AttemptNotificationStep.kt | 5 +- .../step/open/AttemptOpenStep.kt | 10 +- .../step/readonly/SetReadOnlyStep.kt | 17 +- .../step/readwrite/SetReadWriteStep.kt | 21 +- .../replicacount/AttemptReplicaCountStep.kt | 17 +- .../step/rollover/AttemptRolloverStep.kt | 215 +-- .../step/rollup/AttemptCreateRollupJobStep.kt | 8 +- .../rollup/WaitForRollupCompletionStep.kt | 18 +- .../step/shrink/AttemptMoveShardsStep.kt | 181 ++- .../step/shrink/AttemptShrinkStep.kt | 30 +- .../step/shrink/ShrinkStep.kt | 8 +- .../step/shrink/WaitForMoveShardsStep.kt | 32 +- .../step/shrink/WaitForShrinkStep.kt | 49 +- .../step/snapshot/AttemptSnapshotStep.kt | 47 +- .../step/snapshot/WaitForSnapshotStep.kt | 26 +- .../AttemptCreateTransformJobStep.kt | 10 +- .../WaitForTransformCompletionStep.kt | 16 +- .../step/transition/AttemptTransitionStep.kt | 42 +- .../transport/action/ISMStatusResponse.kt | 5 +- .../action/addpolicy/AddPolicyRequest.kt | 14 +- .../addpolicy/TransportAddPolicyAction.kt | 60 +- .../changepolicy/ChangePolicyRequest.kt | 14 +- .../TransportChangePolicyAction.kt | 131 +- .../deletepolicy/DeletePolicyRequest.kt | 12 +- .../TransportDeletePolicyAction.kt | 29 +- .../action/explain/ExplainRequest.kt | 16 +- .../action/explain/ExplainResponse.kt | 9 +- .../action/explain/TransportExplainAction.kt | 133 +- .../action/getpolicy/GetPoliciesRequest.kt | 5 +- .../action/getpolicy/GetPoliciesResponse.kt | 5 +- .../action/getpolicy/GetPolicyRequest.kt | 14 +- .../action/getpolicy/GetPolicyResponse.kt | 5 +- .../getpolicy/TransportGetPoliciesAction.kt | 50 +- .../getpolicy/TransportGetPolicyAction.kt | 39 +- .../action/indexpolicy/IndexPolicyRequest.kt | 5 +- .../action/indexpolicy/IndexPolicyResponse.kt | 7 +- .../indexpolicy/TransportIndexPolicyAction.kt | 109 +- .../managedIndex/ManagedIndexRequest.kt | 1 - .../TransportManagedIndexAction.kt | 9 +- .../removepolicy/RemovePolicyRequest.kt | 14 +- .../TransportRemovePolicyAction.kt | 100 +- .../RetryFailedManagedIndexRequest.kt | 16 +- .../TransportRetryFailedManagedIndexAction.kt | 63 +- .../util/DestinationType.kt | 2 +- .../util/ManagedIndexUtils.kt | 156 +- .../util/NotificationUtils.kt | 20 +- .../util/RestHandlerUtils.kt | 24 +- .../indexstatemanagement/util/StepUtils.kt | 37 +- .../validation/ActionValidation.kt | 50 +- .../validation/ValidateClose.kt | 6 +- .../validation/ValidateDelete.kt | 16 +- .../validation/ValidateForceMerge.kt | 7 +- .../validation/ValidateIndexPriority.kt | 12 +- .../validation/ValidateNothing.kt | 5 +- .../validation/ValidateOpen.kt | 11 +- .../validation/ValidateReadOnly.kt | 11 +- .../validation/ValidateReadWrite.kt | 10 +- .../validation/ValidateReplicaCount.kt | 11 +- .../validation/ValidateRollover.kt | 20 +- .../validation/ValidateSnapshot.kt | 11 +- .../validation/ValidateTransition.kt | 6 +- .../opensearchapi/OpenSearchExtensions.kt | 95 +- .../RefreshSearchAnalyzerResponse.kt | 29 +- .../RestRefreshSearchAnalyzerAction.kt | 12 +- .../TransportRefreshSearchAnalyzerAction.kt | 17 +- .../indexmanagement/rollup/RollupIndexer.kt | 14 +- .../rollup/RollupMapperService.kt | 91 +- .../rollup/RollupMetadataService.kt | 197 +-- .../indexmanagement/rollup/RollupRunner.kt | 184 +-- .../rollup/RollupSearchService.kt | 20 +- .../action/delete/DeleteRollupRequest.kt | 1 - .../delete/TransportDeleteRollupAction.kt | 29 +- .../action/explain/ExplainRollupRequest.kt | 1 - .../action/explain/ExplainRollupResponse.kt | 5 +- .../explain/TransportExplainRollupAction.kt | 41 +- .../rollup/action/get/GetRollupRequest.kt | 4 +- .../rollup/action/get/GetRollupResponse.kt | 6 +- .../rollup/action/get/GetRollupsRequest.kt | 5 +- .../rollup/action/get/GetRollupsResponse.kt | 6 +- .../action/get/TransportGetRollupAction.kt | 32 +- .../action/get/TransportGetRollupsAction.kt | 37 +- .../rollup/action/index/IndexRollupRequest.kt | 2 +- .../action/index/IndexRollupResponse.kt | 6 +- .../index/TransportIndexRollupAction.kt | 38 +- .../TransportUpdateRollupMappingAction.kt | 36 +- .../mapping/UpdateRollupMappingAction.kt | 1 - .../rollup/action/start/StartRollupRequest.kt | 1 - .../start/TransportStartRollupAction.kt | 88 +- .../rollup/action/stop/StopRollupRequest.kt | 1 - .../action/stop/TransportStopRollupAction.kt | 96 +- .../rollup/actionfilter/FieldCapsFilter.kt | 78 +- .../rollup/actionfilter/SerDeHelper.kt | 26 +- .../rollup/interceptor/RollupInterceptor.kt | 79 +- .../rollup/model/ExplainRollup.kt | 5 +- .../indexmanagement/rollup/model/ISMRollup.kt | 22 +- .../indexmanagement/rollup/model/Rollup.kt | 60 +- .../rollup/model/RollupFieldMapping.kt | 16 +- .../rollup/model/RollupMetadata.kt | 43 +- .../rollup/model/RollupMetrics.kt | 12 +- .../rollup/model/metric/Average.kt | 1 - .../rollup/model/metric/Metric.kt | 21 +- .../rollup/query/QueryStringQueryParserExt.kt | 14 +- .../rollup/query/QueryStringQueryUtil.kt | 109 +- .../resthandler/RestDeleteRollupAction.kt | 10 +- .../resthandler/RestExplainRollupAction.kt | 5 +- .../rollup/resthandler/RestGetRollupAction.kt | 24 +- .../resthandler/RestIndexRollupAction.kt | 28 +- .../resthandler/RestStartRollupAction.kt | 5 +- .../resthandler/RestStopRollupAction.kt | 5 +- .../LegacyOpenDistroRollupSettings.kt | 121 +- .../rollup/settings/RollupSettings.kt | 118 +- .../rollup/util/QueryShardContextFactory.kt | 67 +- .../RollupFieldValueExpressionResolver.kt | 16 +- .../rollup/util/RollupUtils.kt | 126 +- .../settings/IndexManagementSettings.kt | 15 +- .../snapshotmanagement/SMRunner.kt | 60 +- .../snapshotmanagement/SMUtils.kt | 107 +- .../SnapshotManagementException.kt | 12 +- .../RestBaseIndexSMPolicyHandler.kt | 21 +- .../resthandler/RestCreateSMPolicyHandler.kt | 3 +- .../resthandler/RestDeleteSMPolicyHandler.kt | 16 +- .../resthandler/RestExplainSMPolicyHandler.kt | 5 +- .../api/resthandler/RestGetSMPolicyHandler.kt | 5 +- .../resthandler/RestStartSMPolicyHandler.kt | 5 +- .../resthandler/RestStopSMPolicyHandler.kt | 5 +- .../resthandler/RestUpdateSMPolicyHandler.kt | 3 +- .../api/transport/BaseTransportAction.kt | 15 +- .../api/transport/SMActions.kt | 4 +- .../delete/TransportDeleteSMPolicyAction.kt | 11 +- .../explain/ExplainSMPolicyRequest.kt | 2 +- .../explain/ExplainSMPolicyResponse.kt | 5 +- .../explain/TransportExplainSMAction.kt | 57 +- .../api/transport/get/GetSMPoliciesRequest.kt | 2 +- .../transport/get/GetSMPoliciesResponse.kt | 5 +- .../api/transport/get/GetSMPolicyRequest.kt | 2 +- .../api/transport/get/GetSMPolicyResponse.kt | 5 +- .../get/TransportGetSMPoliciesAction.kt | 41 +- .../get/TransportGetSMPolicyAction.kt | 35 +- .../transport/index/IndexSMPolicyRequest.kt | 8 +- .../transport/index/IndexSMPolicyResponse.kt | 7 +- .../index/TransportIndexSMPolicyAction.kt | 20 +- .../transport/start/TransportStartSMAction.kt | 43 +- .../transport/stop/TransportStopSMAction.kt | 43 +- .../engine/SMStateMachine.kt | 81 +- .../engine/states/SMState.kt | 33 +- .../engine/states/creation/CreatingState.kt | 31 +- .../creation/CreationConditionMetState.kt | 13 +- .../states/creation/CreationFinishedState.kt | 30 +- .../states/creation/CreationStartState.kt | 8 +- .../engine/states/deletion/DeletingState.kt | 43 +- .../deletion/DeletionConditionMetState.kt | 30 +- .../states/deletion/DeletionFinishedState.kt | 28 +- .../states/deletion/DeletionStartState.kt | 8 +- .../model/ExplainSMPolicy.kt | 5 +- .../model/NotificationConfig.kt | 16 +- .../snapshotmanagement/model/SMMetadata.kt | 233 +-- .../snapshotmanagement/model/SMPolicy.kt | 27 +- .../settings/SnapshotManagementSettings.kt | 14 +- .../util/RestHandlerUtils.kt | 1 + .../transform/TargetIndexMappingService.kt | 24 +- .../transform/TransformIndexer.kt | 23 +- .../transform/TransformMetadataService.kt | 42 +- .../transform/TransformProcessedBucketLog.kt | 8 +- .../transform/TransformRunner.kt | 224 +-- .../transform/TransformSearchService.kt | 182 +-- .../transform/TransformValidator.kt | 12 +- .../action/delete/DeleteTransformsRequest.kt | 5 +- .../delete/TransportDeleteTransformsAction.kt | 43 +- .../action/explain/ExplainTransformRequest.kt | 1 - .../explain/ExplainTransformResponse.kt | 10 +- .../TransportExplainTransformAction.kt | 58 +- .../action/get/GetTransformRequest.kt | 5 +- .../action/get/GetTransformResponse.kt | 7 +- .../action/get/GetTransformsRequest.kt | 5 +- .../action/get/GetTransformsResponse.kt | 7 +- .../action/get/TransportGetTransformAction.kt | 36 +- .../get/TransportGetTransformsAction.kt | 32 +- .../action/index/IndexTransformRequest.kt | 2 +- .../action/index/IndexTransformResponse.kt | 7 +- .../index/TransportIndexTransformAction.kt | 36 +- .../action/preview/PreviewTransformRequest.kt | 5 +- .../preview/PreviewTransformResponse.kt | 10 +- .../TransportPreviewTransformAction.kt | 57 +- .../action/start/StartTransformRequest.kt | 1 - .../start/TransportStartTransformAction.kt | 93 +- .../action/stop/StopTransformRequest.kt | 1 - .../stop/TransportStopTransformAction.kt | 88 +- .../model/ContinuousTransformStats.kt | 5 +- .../transform/model/ExplainTransform.kt | 5 +- .../transform/model/ISMTransform.kt | 25 +- .../transform/model/Transform.kt | 68 +- .../transform/model/TransformMetadata.kt | 24 +- .../transform/model/TransformSearchResult.kt | 6 +- .../transform/model/TransformStats.kt | 7 +- .../opensearchapi/OpenSearchExtensions.kt | 6 +- .../resthandler/RestDeleteTransformAction.kt | 3 +- .../resthandler/RestExplainTransformAction.kt | 1 - .../resthandler/RestGetTransformAction.kt | 18 +- .../resthandler/RestIndexTransformAction.kt | 24 +- .../resthandler/RestPreviewTransformAction.kt | 3 +- .../resthandler/RestStartTransformAction.kt | 3 +- .../resthandler/RestStopTransformAction.kt | 3 +- .../transform/settings/TransformSettings.kt | 93 +- .../transform/util/TransformLockManager.kt | 12 +- .../util/IndexManagementException.kt | 6 +- .../indexmanagement/util/IndexUtils.kt | 33 +- .../indexmanagement/util/RestHandlerUtils.kt | 1 + .../indexmanagement/util/ScheduledJobUtils.kt | 31 +- .../indexmanagement/util/SecurityUtils.kt | 45 +- .../opensearch/indexmanagement/AccessRoles.kt | 5 + .../IndexManagementIndicesIT.kt | 123 +- .../IndexManagementRestTestCase.kt | 81 +- .../IndexManagementSettingsTests.kt | 88 +- .../IndexStateManagementSecurityBehaviorIT.kt | 107 +- .../indexmanagement/MocksTestCase.kt | 38 +- .../indexmanagement/ODFERestTestCase.kt | 1 - .../PolicySecurityBehaviorIT.kt | 51 +- .../RollupSecurityBehaviorIT.kt | 50 +- .../indexmanagement/SecurityBehaviorIT.kt | 68 +- .../indexmanagement/SecurityRestTestCase.kt | 111 +- .../opensearch/indexmanagement/TestHelpers.kt | 6 +- .../TransformSecurityBehaviorIT.kt | 53 +- .../bwc/ISMBackwardsCompatibilityIT.kt | 15 +- ...IndexManagementBackwardsCompatibilityIT.kt | 58 +- .../LRONConfigSecurityBehaviorIT.kt | 41 +- .../notification/SerializationTests.kt | 15 +- .../controlcenter/notification/TestHelpers.kt | 32 +- .../notification/XContentTests.kt | 112 +- .../filter/IndexOperationActionFilterTests.kt | 52 +- .../filter/NotificationActionListenerIT.kt | 288 ++-- .../filter/NotificationActionListenerTests.kt | 51 +- .../parser/ForceMergeIndexRespParserTests.kt | 49 +- .../filter/parser/OpenRespParserTests.kt | 1 - .../filter/parser/ReindexRespParserTests.kt | 147 +- .../parser/ResizeIndexRespParserTests.kt | 13 +- .../resthandler/LRONConfigRestTestCase.kt | 10 +- .../RestDeleteLRONConfigActionIT.kt | 4 +- .../resthandler/RestGetLRONConfigActionIT.kt | 19 +- .../RestIndexLRONConfigActionIT.kt | 94 +- .../notification/util/LRONUtilsTests.kt | 2 +- .../IndexMetadataProviderTests.kt | 5 +- .../IndexStateManagementRestTestCase.kt | 424 +++--- .../ManagedIndexConfigTests.kt | 4 +- .../indexstatemanagement/TestHelpers.kt | 60 +- .../action/ActionRetryIT.kt | 107 +- .../action/ActionTimeoutIT.kt | 62 +- .../action/AliasActionIT.kt | 57 +- .../action/AllocationActionIT.kt | 132 +- .../action/CloseActionIT.kt | 75 +- .../action/DeleteActionIT.kt | 26 +- .../action/ForceMergeActionIT.kt | 40 +- .../action/IndexPolicyActionIT.kt | 46 +- .../action/IndexPriorityActionIT.kt | 19 +- .../action/IndexStateManagementHistoryIT.kt | 161 +- .../action/NotificationActionIT.kt | 55 +- .../action/OpenActionIT.kt | 56 +- .../action/ReadOnlyActionIT.kt | 28 +- .../action/ReadWriteActionIT.kt | 29 +- .../action/ReplicaCountActionIT.kt | 20 +- .../action/RolloverActionIT.kt | 330 ++-- .../action/RollupActionIT.kt | 442 +++--- .../action/ShrinkActionIT.kt | 293 ++-- .../action/SnapshotActionIT.kt | 187 +-- .../action/TransformActionIT.kt | 139 +- .../action/TransitionActionIT.kt | 91 +- .../coordinator/ManagedIndexCoordinatorIT.kt | 158 +- .../ManagedIndexCoordinatorTests.kt | 10 +- .../coordinator/SkipExecutionTests.kt | 1 - .../extension/ISMActionsParserTests.kt | 3 +- .../extension/SampleCustomActionParser.kt | 5 +- .../model/ActionPropertiesTests.kt | 10 +- .../indexstatemanagement/model/ActionTests.kt | 50 +- .../model/ConditionsTests.kt | 7 +- .../model/DestinationTests.kt | 1 - .../model/ISMTemplateTests.kt | 1 - .../model/ManagedIndexMetaDataTests.kt | 133 +- .../indexstatemanagement/model/PolicyTests.kt | 1 - .../indexstatemanagement/model/StateTests.kt | 1 - .../model/XContentTests.kt | 59 +- .../opensearchapi/ExtensionsTests.kt | 27 +- .../resthandler/ISMTemplateRestAPIIT.kt | 69 +- .../IndexStateManagementRestApiIT.kt | 181 ++- .../resthandler/RestAddPolicyActionIT.kt | 241 +-- .../resthandler/RestChangePolicyActionIT.kt | 490 +++--- .../resthandler/RestExplainActionIT.kt | 695 +++++---- .../resthandler/RestRemovePolicyActionIT.kt | 190 +-- .../RestRetryFailedManagedIndexActionIT.kt | 370 +++-- .../runner/ManagedIndexRunnerIT.kt | 70 +- .../runner/ManagedIndexRunnerTests.kt | 18 +- .../step/AttemptCloseStepTests.kt | 12 +- .../step/AttemptCreateRollupJobStepTests.kt | 12 +- .../AttemptCreateTransformJobStepTests.kt | 46 +- .../step/AttemptDeleteStepTests.kt | 12 +- .../step/AttemptOpenStepTests.kt | 12 +- .../step/AttemptRolloverStepTests.kt | 98 +- .../step/AttemptSetIndexPriorityStepTests.kt | 12 +- .../step/AttemptSetReplicaCountStepTests.kt | 12 +- .../step/AttemptSnapshotStepTests.kt | 14 +- .../step/AttemptTransitionStepTests.kt | 33 +- .../step/SetReadOnlyStepTests.kt | 12 +- .../step/SetReadWriteStepTests.kt | 12 +- .../step/WaitForRollupCompletionStepTests.kt | 39 +- .../step/WaitForShrinkStepTests.kt | 117 +- .../step/WaitForSnapshotStepTests.kt | 12 +- .../WaitForTransformCompletionStepTests.kt | 75 +- .../action/ISMStatusResponseTests.kt | 1 - .../action/addpolicy/AddPolicyRequestTests.kt | 1 - .../changepolicy/ChangePolicyRequestTests.kt | 1 - .../deletepolicy/DeletePolicyRequestTests.kt | 1 - .../action/explain/ExplainRequestTests.kt | 3 +- .../action/explain/ExplainResponseTests.kt | 34 +- .../getpolicy/GetPoliciesRequestTests.kt | 1 - .../getpolicy/GetPoliciesResponseTests.kt | 24 +- .../action/getpolicy/GetPolicyRequestTests.kt | 1 - .../getpolicy/GetPolicyResponseTests.kt | 43 +- .../indexpolicy/IndexPolicyRequestTests.kt | 77 +- .../indexpolicy/IndexPolicyResponseTests.kt | 45 +- .../removepolicy/RemovePolicyRequestTests.kt | 1 - .../RetryFailedManagedIndexRequestTests.kt | 3 +- .../util/ManagedIndexUtilsTests.kt | 130 +- .../util/StepUtilsTests.kt | 113 +- .../RefreshSearchAnalyzerActionIT.kt | 105 +- .../RefreshSearchAnalyzerResponseTests.kt | 1 - ...RefreshSearchAnalyzerShardResponseTests.kt | 1 - .../RestRefreshSearchAnalyzerActionIT.kt | 26 +- .../rollup/RollupMapperServiceTests.kt | 303 ++-- .../rollup/RollupMetadataServiceTests.kt | 721 +++++---- .../rollup/RollupRestTestCase.kt | 98 +- .../indexmanagement/rollup/TestHelpers.kt | 61 +- .../rollup/action/ActionTests.kt | 1 - .../rollup/action/RequestTests.kt | 19 +- .../rollup/action/ResponseTests.kt | 3 +- .../rollup/actionfilter/FieldCapsFilterIT.kt | 3 +- .../actionfilter/FieldCapsFilterTests.kt | 10 +- .../rollup/actionfilter/SerDeTests.kt | 1 - .../rollup/interceptor/RollupInterceptorIT.kt | 1197 ++++++++------- .../rollup/model/DimensionTests.kt | 1 - .../rollup/model/ISMRollupTests.kt | 19 +- .../rollup/model/RollupFieldMappingTests.kt | 1 - .../rollup/model/RollupTests.kt | 20 +- .../rollup/model/WriteableTests.kt | 10 +- .../rollup/model/XContentTests.kt | 3 +- .../resthandler/RestDeleteRollupActionIT.kt | 5 +- .../resthandler/RestExplainRollupActionIT.kt | 20 +- .../resthandler/RestGetRollupActionIT.kt | 22 +- .../resthandler/RestIndexRollupActionIT.kt | 193 +-- .../resthandler/RestStartRollupActionIT.kt | 123 +- .../resthandler/RestStopRollupActionIT.kt | 167 +- .../rollup/runner/RollupRunnerIT.kt | 1352 +++++++++-------- ...RollupFieldValueExpressionResolverTests.kt | 3 +- .../rollup/util/RollupUtilsTests.kt | 14 +- .../snapshotmanagement/SMRunnerIT.kt | 16 +- .../SnapshotManagementRestTestCase.kt | 67 +- .../snapshotmanagement/TestUtils.kt | 106 +- .../snapshotmanagement/action/ActionTests.kt | 5 +- .../snapshotmanagement/action/RequestTests.kt | 1 - .../action/ResponseTests.kt | 3 +- .../engine/SMStateMachineTests.kt | 371 ++--- .../states/creation/CreatingStateTests.kt | 197 +-- .../CreationConditionMetStateTests.kt | 53 +- .../creation/CreationFinishedStateTests.kt | 309 ++-- .../creation/CreationStartStateTests.kt | 21 +- .../states/deletion/DeletingStateTests.kt | 323 ++-- .../DeletionConditionMetStateTests.kt | 92 +- .../deletion/DeletionFinishedStateTests.kt | 180 ++- .../deletion/DeletionStartStateTests.kt | 21 +- .../model/WriteableTests.kt | 1 - .../snapshotmanagement/model/XContentTests.kt | 1 - .../RestDeleteSnapshotManagementIT.kt | 3 +- .../RestExplainSnapshotManagementIT.kt | 42 +- .../RestGetSnapshotManagementIT.kt | 36 +- .../RestIndexSnapshotManagementIT.kt | 38 +- .../RestStartSnapshotManagementIT.kt | 3 +- .../RestStopSnapshotManagementIT.kt | 3 +- .../TargetIndexMappingServiceTests.kt | 1 - .../indexmanagement/transform/TestHelpers.kt | 60 +- .../transform/TransformRestTestCase.kt | 66 +- .../transform/TransformRunnerIT.kt | 1228 ++++++++------- .../transform/action/ActionTests.kt | 1 - .../transform/action/RequestTests.kt | 19 +- .../transform/action/ResponseTests.kt | 12 +- .../transform/model/ISMTransformTests.kt | 7 +- .../transform/model/TransformTests.kt | 1 - .../transform/model/WriteableTests.kt | 1 - .../transform/model/XContentTests.kt | 17 +- .../opensearchapi/ExtensionsTests.kt | 32 +- .../RestDeleteTransformActionIT.kt | 13 +- .../RestExplainTransformActionIT.kt | 60 +- .../resthandler/RestGetTransformActionIT.kt | 24 +- .../resthandler/RestIndexTransformActionIT.kt | 55 +- .../RestPreviewTransformActionIT.kt | 108 +- .../resthandler/RestStartTransformActionIT.kt | 83 +- .../resthandler/RestStopTransformActionIT.kt | 79 +- .../indexmanagement/util/IndexUtilsTests.kt | 16 +- 515 files changed, 14166 insertions(+), 12333 deletions(-) diff --git a/.editorconfig b/.editorconfig index a9c874b1c..7ba6925bf 100644 --- a/.editorconfig +++ b/.editorconfig @@ -3,9 +3,16 @@ root = true [*.{kt,kts}] +ktlint_code_style = intellij_idea # we have detekt also checking for max line length. Disable the linter and use only one tool to check for max line length. - # See https://github.com/arturbosch/detekt -max_line_length=off +ktlint_standard_max-line-length = disabled +ktlint_ignore_back_ticked_identifier = true + +ktlint_standard_function-naming = disabled +ktlint_standard_property-naming = disabled +ktlint_standard_function-signature = disabled +ktlint_standard_value-argument-comment = disabled +ktlint_standard_argument-list-wrapping = disabled +ktlint_standard_value-parameter-comment = disabled -disabled_rules=import-ordering \ No newline at end of file diff --git a/detekt.yml b/detekt.yml index 8ed992998..57ab2f3c5 100644 --- a/detekt.yml +++ b/detekt.yml @@ -11,7 +11,7 @@ style: ForbiddenComment: active: false MaxLineLength: - maxLineLength: 150 + maxLineLength: 160 excludes: ['**/test/**'] FunctionOnlyReturningConstant: active: false @@ -20,6 +20,7 @@ complexity: LargeClass: excludes: ['**/test/**'] LongMethod: + threshold: 80 excludes: ['**/test/**'] LongParameterList: excludes: ['**/test/**'] diff --git a/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/IndexManagementExtension.kt b/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/IndexManagementExtension.kt index 0d2891581..701e92eda 100644 --- a/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/IndexManagementExtension.kt +++ b/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/IndexManagementExtension.kt @@ -14,7 +14,6 @@ import org.opensearch.indexmanagement.spi.indexstatemanagement.StatusChecker * SPI for IndexManagement */ interface IndexManagementExtension { - /** * List of action parsers that are supported by the extension, each of the action parser will parse the policy action into the defined action. * The ActionParser provides the ability to parse the action diff --git a/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/Action.kt b/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/Action.kt index 4d251b9d2..4c355c1ae 100644 --- a/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/Action.kt +++ b/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/Action.kt @@ -19,9 +19,8 @@ import java.time.Instant abstract class Action( val type: String, - val actionIndex: Int + val actionIndex: Int, ) : ToXContentObject, Writeable { - var configTimeout: ActionTimeout? = null var configRetry: ActionRetry? = ActionRetry(DEFAULT_RETRIES) var customAction: Boolean = false diff --git a/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/ActionParser.kt b/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/ActionParser.kt index 744c69062..d664b79cd 100644 --- a/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/ActionParser.kt +++ b/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/ActionParser.kt @@ -9,7 +9,6 @@ import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.core.xcontent.XContentParser abstract class ActionParser(var customAction: Boolean = false) { - /** * The action type parser will parse */ diff --git a/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/IndexMetadataService.kt b/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/IndexMetadataService.kt index f7506d53c..172a21e85 100644 --- a/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/IndexMetadataService.kt +++ b/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/IndexMetadataService.kt @@ -21,7 +21,6 @@ import org.opensearch.indexmanagement.spi.indexstatemanagement.model.ISMIndexMet * else uses the default i.e cluster state */ interface IndexMetadataService { - /** * Returns the index metadata needed for ISM */ diff --git a/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/StatusChecker.kt b/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/StatusChecker.kt index d2adf9ad3..f3bf94467 100644 --- a/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/StatusChecker.kt +++ b/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/StatusChecker.kt @@ -8,7 +8,6 @@ package org.opensearch.indexmanagement.spi.indexstatemanagement import org.opensearch.cluster.ClusterState interface StatusChecker { - /** * checks and returns the status of the extension */ @@ -19,7 +18,8 @@ interface StatusChecker { enum class Status(private val value: String) { ENABLED("enabled"), - DISABLED("disabled"); + DISABLED("disabled"), + ; override fun toString(): String { return value diff --git a/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/Step.kt b/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/Step.kt index abb476aed..16c41c8f1 100644 --- a/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/Step.kt +++ b/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/Step.kt @@ -16,7 +16,6 @@ import java.time.Instant import java.util.Locale abstract class Step(val name: String, val isSafeToDisableOn: Boolean = true) { - var context: StepContext? = null private set @@ -56,7 +55,8 @@ abstract class Step(val name: String, val isSafeToDisableOn: Boolean = true) { STARTING("starting"), CONDITION_NOT_MET("condition_not_met"), FAILED("failed"), - COMPLETED("completed"); + COMPLETED("completed"), + ; override fun toString(): String { return status diff --git a/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/Validate.kt b/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/Validate.kt index e64d3637f..24a316372 100644 --- a/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/Validate.kt +++ b/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/Validate.kt @@ -6,19 +6,18 @@ package org.opensearch.indexmanagement.spi.indexstatemanagement import org.opensearch.cluster.service.ClusterService +import org.opensearch.common.settings.Settings import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.core.common.io.stream.StreamOutput import org.opensearch.core.common.io.stream.Writeable -import org.opensearch.common.settings.Settings import org.opensearch.monitor.jvm.JvmService import java.util.Locale abstract class Validate( val settings: Settings, val clusterService: ClusterService, - val jvmService: JvmService + val jvmService: JvmService, ) { - var validationStatus = ValidationStatus.PASSED var validationMessage: String? = "Starting Validation" @@ -27,7 +26,8 @@ abstract class Validate( enum class ValidationStatus(val status: String) : Writeable { PASSED("passed"), RE_VALIDATING("re_validating"), - FAILED("failed"); + FAILED("failed"), + ; override fun toString(): String { return status diff --git a/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/model/ActionMetaData.kt b/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/model/ActionMetaData.kt index 1d7e94a74..4a16b272b 100644 --- a/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/model/ActionMetaData.kt +++ b/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/model/ActionMetaData.kt @@ -5,17 +5,17 @@ package org.opensearch.indexmanagement.spi.indexstatemanagement.model +import org.opensearch.common.xcontent.LoggingDeprecationHandler +import org.opensearch.common.xcontent.XContentType import org.opensearch.core.common.Strings import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.core.common.io.stream.StreamOutput import org.opensearch.core.common.io.stream.Writeable -import org.opensearch.common.xcontent.LoggingDeprecationHandler import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.core.xcontent.ToXContent import org.opensearch.core.xcontent.ToXContentFragment import org.opensearch.core.xcontent.XContentBuilder import org.opensearch.core.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentType import org.opensearch.core.xcontent.XContentParserUtils import org.opensearch.indexmanagement.spi.indexstatemanagement.model.ManagedIndexMetaData.Companion.NAME import org.opensearch.indexmanagement.spi.indexstatemanagement.model.ManagedIndexMetaData.Companion.START_TIME @@ -29,9 +29,8 @@ data class ActionMetaData( val failed: Boolean, val consumedRetries: Int, val lastRetryTime: Long?, - val actionProperties: ActionProperties? + val actionProperties: ActionProperties?, ) : Writeable, ToXContentFragment { - override fun writeTo(out: StreamOutput) { out.writeString(name) out.writeOptionalLong(startTime) @@ -89,7 +88,7 @@ data class ActionMetaData( requireNotNull(failed) { "$FAILED is null" }, requireNotNull(consumedRetries) { "$CONSUMED_RETRIES is null" }, lastRetryTime, - actionProperties + actionProperties, ) } @@ -139,7 +138,7 @@ data class ActionMetaData( requireNotNull(failed) { "$FAILED is null" }, requireNotNull(consumedRetries) { "$CONSUMED_RETRIES is null" }, lastRetryTime, - actionProperties + actionProperties, ) } } diff --git a/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/model/ActionProperties.kt b/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/model/ActionProperties.kt index 1b75aec11..2fec33ef8 100644 --- a/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/model/ActionProperties.kt +++ b/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/model/ActionProperties.kt @@ -16,17 +16,17 @@ import org.opensearch.core.xcontent.XContentParser.Token import org.opensearch.core.xcontent.XContentParserUtils.ensureExpectedToken import org.opensearch.indexmanagement.spi.indexstatemanagement.addObject -/** Properties that will persist across steps of a single Action. Will be stored in the [ActionMetaData]. */ // TODO: Create namespaces to group properties together + +/** Properties that will persist across steps of a single Action. Will be stored in the [ActionMetaData]. */ data class ActionProperties( val maxNumSegments: Int? = null, val snapshotName: String? = null, val rollupId: String? = null, val hasRollupFailed: Boolean? = null, val shrinkActionProperties: ShrinkActionProperties? = null, - val transformActionProperties: TransformActionProperties? = null + val transformActionProperties: TransformActionProperties? = null, ) : Writeable, ToXContentFragment { - override fun writeTo(out: StreamOutput) { out.writeOptionalInt(maxNumSegments) out.writeOptionalString(snapshotName) @@ -94,6 +94,6 @@ data class ActionProperties( MAX_NUM_SEGMENTS("max_num_segments"), SNAPSHOT_NAME("snapshot_name"), ROLLUP_ID("rollup_id"), - HAS_ROLLUP_FAILED("has_rollup_failed") + HAS_ROLLUP_FAILED("has_rollup_failed"), } } diff --git a/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/model/ActionRetry.kt b/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/model/ActionRetry.kt index 338d4a016..370320ac3 100644 --- a/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/model/ActionRetry.kt +++ b/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/model/ActionRetry.kt @@ -6,10 +6,10 @@ package org.opensearch.indexmanagement.spi.indexstatemanagement.model import org.apache.logging.log4j.LogManager +import org.opensearch.common.unit.TimeValue import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.core.common.io.stream.StreamOutput import org.opensearch.core.common.io.stream.Writeable -import org.opensearch.common.unit.TimeValue import org.opensearch.core.xcontent.ToXContent import org.opensearch.core.xcontent.ToXContentFragment import org.opensearch.core.xcontent.XContentBuilder @@ -23,10 +23,11 @@ import kotlin.math.pow data class ActionRetry( val count: Long, val backoff: Backoff = Backoff.EXPONENTIAL, - val delay: TimeValue = TimeValue.timeValueMinutes(1) + val delay: TimeValue = TimeValue.timeValueMinutes(1), ) : ToXContentFragment, Writeable { - - init { require(count >= 0) { "Count for ActionRetry must be a non-negative number" } } + init { + require(count >= 0) { "Count for ActionRetry must be a non-negative number" } + } override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { builder @@ -42,7 +43,7 @@ data class ActionRetry( constructor(sin: StreamInput) : this( count = sin.readLong(), backoff = sin.readEnum(Backoff::class.java), - delay = sin.readTimeValue() + delay = sin.readTimeValue(), ) @Throws(IOException::class) @@ -80,7 +81,7 @@ data class ActionRetry( return ActionRetry( count = requireNotNull(count) { "ActionRetry count is null" }, backoff = backoff, - delay = delay + delay = delay, ) } } @@ -90,20 +91,21 @@ data class ActionRetry( "exponential", { consumedRetries, timeValue -> (2.0.pow(consumedRetries - 1)).toLong() * timeValue.millis - } + }, ), CONSTANT( "constant", { _, timeValue -> timeValue.millis - } + }, ), LINEAR( "linear", { consumedRetries, timeValue -> consumedRetries * timeValue.millis - } - ); + }, + ), + ; private val logger = LogManager.getLogger(javaClass) @@ -120,8 +122,9 @@ data class ActionRetry( if (actionMetaData.consumedRetries > 0) { if (actionMetaData.lastRetryTime != null) { - val remainingTime = getNextRetryTime(actionMetaData.consumedRetries, actionRetry.delay) - - (Instant.now().toEpochMilli() - actionMetaData.lastRetryTime) + val remainingTime = + getNextRetryTime(actionMetaData.consumedRetries, actionRetry.delay) - + (Instant.now().toEpochMilli() - actionMetaData.lastRetryTime) return Pair(remainingTime > 0, remainingTime) } diff --git a/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/model/ActionTimeout.kt b/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/model/ActionTimeout.kt index bf56d7322..18e5f0385 100644 --- a/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/model/ActionTimeout.kt +++ b/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/model/ActionTimeout.kt @@ -5,10 +5,10 @@ package org.opensearch.indexmanagement.spi.indexstatemanagement.model +import org.opensearch.common.unit.TimeValue import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.core.common.io.stream.StreamOutput import org.opensearch.core.common.io.stream.Writeable -import org.opensearch.common.unit.TimeValue import org.opensearch.core.xcontent.ToXContent import org.opensearch.core.xcontent.ToXContentFragment import org.opensearch.core.xcontent.XContentBuilder @@ -16,14 +16,13 @@ import org.opensearch.core.xcontent.XContentParser import java.io.IOException data class ActionTimeout(val timeout: TimeValue) : ToXContentFragment, Writeable { - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { return builder.field(TIMEOUT_FIELD, timeout.stringRep) } @Throws(IOException::class) constructor(sin: StreamInput) : this( - timeout = sin.readTimeValue() + timeout = sin.readTimeValue(), ) @Throws(IOException::class) diff --git a/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/model/ManagedIndexMetaData.kt b/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/model/ManagedIndexMetaData.kt index f099bb788..c52747a24 100644 --- a/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/model/ManagedIndexMetaData.kt +++ b/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/model/ManagedIndexMetaData.kt @@ -5,17 +5,17 @@ package org.opensearch.indexmanagement.spi.indexstatemanagement.model +import org.opensearch.common.xcontent.XContentFactory +import org.opensearch.common.xcontent.XContentHelper +import org.opensearch.common.xcontent.json.JsonXContent import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.core.common.io.stream.StreamOutput import org.opensearch.core.common.io.stream.Writeable import org.opensearch.core.xcontent.ToXContent import org.opensearch.core.xcontent.ToXContentFragment import org.opensearch.core.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentFactory -import org.opensearch.common.xcontent.XContentHelper import org.opensearch.core.xcontent.XContentParser import org.opensearch.core.xcontent.XContentParserUtils -import org.opensearch.common.xcontent.json.JsonXContent import org.opensearch.index.seqno.SequenceNumbers import org.opensearch.indexmanagement.spi.indexstatemanagement.addObject import java.io.IOException @@ -40,10 +40,9 @@ data class ManagedIndexMetaData( val primaryTerm: Long = SequenceNumbers.UNASSIGNED_PRIMARY_TERM, val rolledOverIndexName: String? = null, ) : Writeable, ToXContentFragment { - @Suppress("ComplexMethod") fun toMap(): Map { - val resultMap = mutableMapOf () + val resultMap = mutableMapOf() resultMap[INDEX] = index resultMap[INDEX_UUID] = indexUuid resultMap[POLICY_ID] = policyID @@ -199,11 +198,12 @@ data class ManagedIndexMetaData( val step: StepMetaData? = si.readOptionalWriteable { StepMetaData.fromStreamInput(it) } val retryInfo: PolicyRetryInfoMetaData? = si.readOptionalWriteable { PolicyRetryInfoMetaData.fromStreamInput(it) } - val info = if (si.readBoolean()) { - si.readMap() - } else { - null - } + val info = + if (si.readBoolean()) { + si.readMap() + } else { + null + } return ManagedIndexMetaData( index = requireNotNull(index) { "$INDEX is null" }, @@ -220,7 +220,7 @@ data class ManagedIndexMetaData( actionMetaData = action, stepMetaData = step, policyRetryInfo = retryInfo, - info = info + info = info, ) } @@ -232,7 +232,7 @@ data class ManagedIndexMetaData( xcp: XContentParser, id: String = NO_ID, seqNo: Long = SequenceNumbers.UNASSIGNED_SEQ_NO, - primaryTerm: Long = SequenceNumbers.UNASSIGNED_PRIMARY_TERM + primaryTerm: Long = SequenceNumbers.UNASSIGNED_PRIMARY_TERM, ): ManagedIndexMetaData { var index: String? = null var indexUuid: String? = null @@ -314,7 +314,7 @@ data class ManagedIndexMetaData( xcp: XContentParser, id: String = NO_ID, seqNo: Long = SequenceNumbers.UNASSIGNED_SEQ_NO, - primaryTerm: Long = SequenceNumbers.UNASSIGNED_PRIMARY_TERM + primaryTerm: Long = SequenceNumbers.UNASSIGNED_PRIMARY_TERM, ): ManagedIndexMetaData { XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, xcp.nextToken(), xcp) @@ -340,7 +340,7 @@ data class ManagedIndexMetaData( actionMetaData = ActionMetaData.fromManagedIndexMetaDataMap(map), stepMetaData = StepMetaData.fromManagedIndexMetaDataMap(map), policyRetryInfo = PolicyRetryInfoMetaData.fromManagedIndexMetaDataMap(map), - info = map[INFO]?.let { XContentHelper.convertToMap(JsonXContent.jsonXContent, it, false) } + info = map[INFO]?.let { XContentHelper.convertToMap(JsonXContent.jsonXContent, it, false) }, ) } } diff --git a/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/model/PolicyRetryInfoMetaData.kt b/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/model/PolicyRetryInfoMetaData.kt index a8fe8bf29..039ac10a6 100644 --- a/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/model/PolicyRetryInfoMetaData.kt +++ b/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/model/PolicyRetryInfoMetaData.kt @@ -5,26 +5,25 @@ package org.opensearch.indexmanagement.spi.indexstatemanagement.model +import org.opensearch.common.xcontent.LoggingDeprecationHandler +import org.opensearch.common.xcontent.XContentType import org.opensearch.core.common.Strings import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.core.common.io.stream.StreamOutput import org.opensearch.core.common.io.stream.Writeable -import org.opensearch.common.xcontent.LoggingDeprecationHandler import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.core.xcontent.ToXContent import org.opensearch.core.xcontent.ToXContentFragment import org.opensearch.core.xcontent.XContentBuilder import org.opensearch.core.xcontent.XContentParser import org.opensearch.core.xcontent.XContentParserUtils -import org.opensearch.common.xcontent.XContentType import java.io.ByteArrayInputStream import java.nio.charset.StandardCharsets data class PolicyRetryInfoMetaData( val failed: Boolean, - val consumedRetries: Int + val consumedRetries: Int, ) : Writeable, ToXContentFragment { - override fun writeTo(out: StreamOutput) { out.writeBoolean(failed) out.writeInt(consumedRetries) @@ -49,7 +48,7 @@ data class PolicyRetryInfoMetaData( return PolicyRetryInfoMetaData( requireNotNull(failed) { "$FAILED is null" }, - requireNotNull(consumedRetries) { "$CONSUMED_RETRIES is null" } + requireNotNull(consumedRetries) { "$CONSUMED_RETRIES is null" }, ) } @@ -82,7 +81,7 @@ data class PolicyRetryInfoMetaData( return PolicyRetryInfoMetaData( requireNotNull(failed) { "$FAILED is null" }, - requireNotNull(consumedRetries) { "$CONSUMED_RETRIES is null" } + requireNotNull(consumedRetries) { "$CONSUMED_RETRIES is null" }, ) } } diff --git a/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/model/ShrinkActionProperties.kt b/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/model/ShrinkActionProperties.kt index 9de9209b6..55c346b3f 100644 --- a/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/model/ShrinkActionProperties.kt +++ b/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/model/ShrinkActionProperties.kt @@ -23,9 +23,8 @@ data class ShrinkActionProperties( val lockEpochSecond: Long, val lockDurationSecond: Long, // Used to store the original index allocation and write block setting to reapply after shrink - val originalIndexSettings: Map + val originalIndexSettings: Map, ) : Writeable, ToXContentFragment { - override fun writeTo(out: StreamOutput) { out.writeString(nodeName) out.writeString(targetIndexName) @@ -63,7 +62,7 @@ data class ShrinkActionProperties( val originalIndexSettings: Map = si.readMap({ it.readString() }, { it.readString() }) return ShrinkActionProperties( - nodeName, targetIndexName, targetNumShards, lockPrimaryTerm, lockSeqNo, lockEpochSecond, lockDurationSecond, originalIndexSettings + nodeName, targetIndexName, targetNumShards, lockPrimaryTerm, lockSeqNo, lockEpochSecond, lockDurationSecond, originalIndexSettings, ) } @@ -102,7 +101,7 @@ data class ShrinkActionProperties( requireNotNull(lockSeqNo), requireNotNull(lockEpochSecond), requireNotNull(lockDurationSecond), - requireNotNull(originalIndexSettings) + requireNotNull(originalIndexSettings), ) } } @@ -115,6 +114,6 @@ data class ShrinkActionProperties( LOCK_PRIMARY_TERM("lock_primary_term"), LOCK_EPOCH_SECOND("lock_epoch_second"), LOCK_DURATION_SECOND("lock_duration_second"), - ORIGINAL_INDEX_SETTINGS("original_index_settings") + ORIGINAL_INDEX_SETTINGS("original_index_settings"), } } diff --git a/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/model/StateMetaData.kt b/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/model/StateMetaData.kt index 7c6174106..3a46d6a1b 100644 --- a/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/model/StateMetaData.kt +++ b/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/model/StateMetaData.kt @@ -5,18 +5,18 @@ package org.opensearch.indexmanagement.spi.indexstatemanagement.model +import org.opensearch.common.xcontent.LoggingDeprecationHandler +import org.opensearch.common.xcontent.XContentType import org.opensearch.core.common.Strings import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.core.common.io.stream.StreamOutput import org.opensearch.core.common.io.stream.Writeable -import org.opensearch.common.xcontent.LoggingDeprecationHandler import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.core.xcontent.ToXContent import org.opensearch.core.xcontent.ToXContentFragment import org.opensearch.core.xcontent.XContentBuilder import org.opensearch.core.xcontent.XContentParser import org.opensearch.core.xcontent.XContentParserUtils -import org.opensearch.common.xcontent.XContentType import org.opensearch.indexmanagement.spi.indexstatemanagement.model.ManagedIndexMetaData.Companion.NAME import org.opensearch.indexmanagement.spi.indexstatemanagement.model.ManagedIndexMetaData.Companion.START_TIME import java.io.ByteArrayInputStream @@ -24,9 +24,8 @@ import java.nio.charset.StandardCharsets data class StateMetaData( val name: String, - val startTime: Long + val startTime: Long, ) : Writeable, ToXContentFragment { - override fun writeTo(out: StreamOutput) { out.writeString(name) out.writeLong(startTime) @@ -49,7 +48,7 @@ data class StateMetaData( return StateMetaData( requireNotNull(name) { "$NAME is null" }, - requireNotNull(startTime) { "$START_TIME is null" } + requireNotNull(startTime) { "$START_TIME is null" }, ) } @@ -82,7 +81,7 @@ data class StateMetaData( return StateMetaData( requireNotNull(name) { "$NAME is null" }, - requireNotNull(startTime) { "$START_TIME is null" } + requireNotNull(startTime) { "$START_TIME is null" }, ) } } diff --git a/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/model/StepContext.kt b/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/model/StepContext.kt index 6ee4ff4f9..e0cc84c64 100644 --- a/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/model/StepContext.kt +++ b/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/model/StepContext.kt @@ -21,7 +21,7 @@ class StepContext( val user: User?, val scriptService: ScriptService, val settings: Settings, - val lockService: LockService + val lockService: LockService, ) { fun getUpdatedContext(metadata: ManagedIndexMetaData): StepContext { return StepContext(metadata, this.clusterService, this.client, this.threadContext, this.user, this.scriptService, this.settings, this.lockService) diff --git a/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/model/StepMetaData.kt b/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/model/StepMetaData.kt index defe49287..f9773dab7 100644 --- a/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/model/StepMetaData.kt +++ b/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/model/StepMetaData.kt @@ -5,18 +5,18 @@ package org.opensearch.indexmanagement.spi.indexstatemanagement.model +import org.opensearch.common.xcontent.LoggingDeprecationHandler +import org.opensearch.common.xcontent.XContentType import org.opensearch.core.common.Strings import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.core.common.io.stream.StreamOutput import org.opensearch.core.common.io.stream.Writeable -import org.opensearch.common.xcontent.LoggingDeprecationHandler import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.core.xcontent.ToXContent import org.opensearch.core.xcontent.ToXContentFragment import org.opensearch.core.xcontent.XContentBuilder import org.opensearch.core.xcontent.XContentParser import org.opensearch.core.xcontent.XContentParserUtils -import org.opensearch.common.xcontent.XContentType import org.opensearch.indexmanagement.spi.indexstatemanagement.Step import org.opensearch.indexmanagement.spi.indexstatemanagement.model.ManagedIndexMetaData.Companion.NAME import org.opensearch.indexmanagement.spi.indexstatemanagement.model.ManagedIndexMetaData.Companion.START_TIME @@ -27,9 +27,8 @@ import java.util.Locale data class StepMetaData( val name: String, val startTime: Long, - val stepStatus: Step.StepStatus + val stepStatus: Step.StepStatus, ) : Writeable, ToXContentFragment { - override fun writeTo(out: StreamOutput) { out.writeString(name) out.writeLong(startTime) @@ -61,7 +60,7 @@ data class StepMetaData( return StepMetaData( requireNotNull(name) { "$NAME is null" }, requireNotNull(startTime) { "$START_TIME is null" }, - requireNotNull(stepStatus) { "$STEP_STATUS is null" } + requireNotNull(stepStatus) { "$STEP_STATUS is null" }, ) } @@ -97,7 +96,7 @@ data class StepMetaData( return StepMetaData( requireNotNull(name) { "$NAME is null" }, requireNotNull(startTime) { "$START_TIME is null" }, - requireNotNull(stepStatus) { "$STEP_STATUS is null" } + requireNotNull(stepStatus) { "$STEP_STATUS is null" }, ) } } diff --git a/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/model/TransformActionProperties.kt b/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/model/TransformActionProperties.kt index 70b593750..1b9249b61 100644 --- a/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/model/TransformActionProperties.kt +++ b/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/model/TransformActionProperties.kt @@ -8,16 +8,15 @@ package org.opensearch.indexmanagement.spi.indexstatemanagement.model import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.core.common.io.stream.StreamOutput import org.opensearch.core.common.io.stream.Writeable -import org.opensearch.core.xcontent.XContentParserUtils.ensureExpectedToken import org.opensearch.core.xcontent.ToXContent import org.opensearch.core.xcontent.ToXContentFragment import org.opensearch.core.xcontent.XContentBuilder import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils.ensureExpectedToken data class TransformActionProperties( - val transformId: String? + val transformId: String?, ) : Writeable, ToXContentFragment { - override fun writeTo(out: StreamOutput) { out.writeOptionalString(transformId) } @@ -53,6 +52,6 @@ data class TransformActionProperties( } enum class Properties(val key: String) { - TRANSFORM_ID("transform_id") + TRANSFORM_ID("transform_id"), } } diff --git a/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/model/ValidationResult.kt b/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/model/ValidationResult.kt index c2d24a86a..28b077828 100644 --- a/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/model/ValidationResult.kt +++ b/spi/src/main/kotlin/org.opensearch.indexmanagement.spi/indexstatemanagement/model/ValidationResult.kt @@ -5,18 +5,18 @@ package org.opensearch.indexmanagement.spi.indexstatemanagement.model +import org.opensearch.common.xcontent.LoggingDeprecationHandler +import org.opensearch.common.xcontent.XContentType import org.opensearch.core.common.Strings import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.core.common.io.stream.StreamOutput import org.opensearch.core.common.io.stream.Writeable -import org.opensearch.common.xcontent.LoggingDeprecationHandler import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.core.xcontent.ToXContent import org.opensearch.core.xcontent.ToXContentFragment import org.opensearch.core.xcontent.XContentBuilder import org.opensearch.core.xcontent.XContentParser import org.opensearch.core.xcontent.XContentParserUtils -import org.opensearch.common.xcontent.XContentType import org.opensearch.indexmanagement.spi.indexstatemanagement.Validate import java.io.ByteArrayInputStream import java.nio.charset.StandardCharsets @@ -24,9 +24,8 @@ import java.util.Locale data class ValidationResult( val validationMessage: String, - val validationStatus: Validate.ValidationStatus + val validationStatus: Validate.ValidationStatus, ) : Writeable, ToXContentFragment { - override fun writeTo(out: StreamOutput) { out.writeString(validationMessage) validationStatus.writeTo(out) @@ -54,7 +53,7 @@ data class ValidationResult( return ValidationResult( requireNotNull(validationMessage) { "$VALIDATION_MESSAGE is null" }, - requireNotNull(validationStatus) { "$VALIDATION_STATUS is null" } + requireNotNull(validationStatus) { "$VALIDATION_STATUS is null" }, ) } @@ -87,7 +86,7 @@ data class ValidationResult( return ValidationResult( requireNotNull(validationMessage) { "$VALIDATION_MESSAGE is null" }, - requireNotNull(validationStatus) { "$VALIDATION_STATUS is null" } + requireNotNull(validationStatus) { "$VALIDATION_STATUS is null" }, ) } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/IndexManagementIndices.kt b/src/main/kotlin/org/opensearch/indexmanagement/IndexManagementIndices.kt index 8eb6a5156..81d6c1146 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/IndexManagementIndices.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/IndexManagementIndices.kt @@ -4,13 +4,13 @@ */ @file:Suppress("ReturnCount") + package org.opensearch.indexmanagement import org.apache.logging.log4j.LogManager import org.apache.logging.log4j.Logger import org.opensearch.OpenSearchStatusException import org.opensearch.ResourceAlreadyExistsException -import org.opensearch.core.action.ActionListener import org.opensearch.action.admin.indices.alias.Alias import org.opensearch.action.admin.indices.create.CreateIndexRequest import org.opensearch.action.admin.indices.create.CreateIndexResponse @@ -21,6 +21,8 @@ import org.opensearch.client.Client import org.opensearch.client.IndicesAdminClient import org.opensearch.cluster.service.ClusterService import org.opensearch.common.settings.Settings +import org.opensearch.core.action.ActionListener +import org.opensearch.core.rest.RestStatus import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.INDEX_MANAGEMENT_INDEX import org.opensearch.indexmanagement.indexstatemanagement.settings.ManagedIndexSettings import org.opensearch.indexmanagement.indexstatemanagement.util.INDEX_HIDDEN @@ -29,7 +31,6 @@ import org.opensearch.indexmanagement.indexstatemanagement.util.INDEX_NUMBER_OF_ import org.opensearch.indexmanagement.opensearchapi.suspendUntil import org.opensearch.indexmanagement.util.IndexUtils import org.opensearch.indexmanagement.util.OpenForTesting -import org.opensearch.core.rest.RestStatus import kotlin.coroutines.resume import kotlin.coroutines.resumeWithException import kotlin.coroutines.suspendCoroutine @@ -38,12 +39,12 @@ import kotlin.coroutines.suspendCoroutine class IndexManagementIndices( settings: Settings, private val client: IndicesAdminClient, - private val clusterService: ClusterService + private val clusterService: ClusterService, ) { - private val logger = LogManager.getLogger(javaClass) @Volatile private var historyNumberOfShards = ManagedIndexSettings.HISTORY_NUMBER_OF_SHARDS.get(settings) + @Volatile private var historyNumberOfReplicas = ManagedIndexSettings.HISTORY_NUMBER_OF_REPLICAS.get(settings) init { @@ -57,9 +58,10 @@ class IndexManagementIndices( fun checkAndUpdateIMConfigIndex(actionListener: ActionListener) { if (!indexManagementIndexExists()) { - val indexRequest = CreateIndexRequest(INDEX_MANAGEMENT_INDEX) - .mapping(indexManagementMappings) - .settings(Settings.builder().put(INDEX_HIDDEN, true).build()) + val indexRequest = + CreateIndexRequest(INDEX_MANAGEMENT_INDEX) + .mapping(indexManagementMappings) + .settings(Settings.builder().put(INDEX_HIDDEN, true).build()) client.create( indexRequest, object : ActionListener { @@ -70,7 +72,7 @@ class IndexManagementIndices( override fun onResponse(response: CreateIndexResponse) { actionListener.onResponse(response) } - } + }, ) } else { IndexUtils.checkAndUpdateConfigIndexMapping(clusterService.state(), client, actionListener) @@ -78,21 +80,23 @@ class IndexManagementIndices( } suspend fun checkAndUpdateIMConfigIndex(logger: Logger): Boolean { - val response: AcknowledgedResponse = suspendCoroutine { cont -> - checkAndUpdateIMConfigIndex( - object : ActionListener { - override fun onResponse(response: AcknowledgedResponse) = cont.resume(response) - override fun onFailure(e: Exception) = cont.resumeWithException(e) - } - ) - } + val response: AcknowledgedResponse = + suspendCoroutine { cont -> + checkAndUpdateIMConfigIndex( + object : ActionListener { + override fun onResponse(response: AcknowledgedResponse) = cont.resume(response) + + override fun onFailure(e: Exception) = cont.resumeWithException(e) + }, + ) + } if (response.isAcknowledged) { return true } else { logger.error("Unable to create or update $INDEX_MANAGEMENT_INDEX with newest mapping.") throw OpenSearchStatusException( "Unable to create or update $INDEX_MANAGEMENT_INDEX with newest mapping.", - RestStatus.INTERNAL_SERVER_ERROR + RestStatus.INTERNAL_SERVER_ERROR, ) } } @@ -122,9 +126,10 @@ class IndexManagementIndices( suspend fun attemptUpdateConfigIndexMapping(): Boolean { return try { - val response: AcknowledgedResponse = client.suspendUntil { - IndexUtils.checkAndUpdateConfigIndexMapping(clusterService.state(), client, it) - } + val response: AcknowledgedResponse = + client.suspendUntil { + IndexUtils.checkAndUpdateConfigIndexMapping(clusterService.state(), client, it) + } if (response.isAcknowledged) return true logger.error("Trying to update config index mapping not acknowledged.") return false @@ -144,9 +149,10 @@ class IndexManagementIndices( if (!indexStateManagementIndexHistoryExists()) { return createHistoryIndex(HISTORY_INDEX_PATTERN, HISTORY_WRITE_INDEX_ALIAS) } else { - val response: AcknowledgedResponse = client.suspendUntil { - IndexUtils.checkAndUpdateHistoryIndexMapping(clusterService.state(), client, it) - } + val response: AcknowledgedResponse = + client.suspendUntil { + IndexUtils.checkAndUpdateHistoryIndexMapping(clusterService.state(), client, it) + } if (response.isAcknowledged) { return true } @@ -159,19 +165,21 @@ class IndexManagementIndices( // This should be a fast check of local cluster state. Should be exceedingly rare that the local cluster // state does not contain the index and multiple nodes concurrently try to create the index. // If it does happen that error is handled we catch the ResourceAlreadyExistsException - val existsResponse: IndicesExistsResponse = client.suspendUntil { - client.exists(IndicesExistsRequest(index).local(true), it) - } + val existsResponse: IndicesExistsResponse = + client.suspendUntil { + client.exists(IndicesExistsRequest(index).local(true), it) + } if (existsResponse.isExists) return true - val request = CreateIndexRequest(index) - .mapping(indexStateManagementHistoryMappings) - .settings( - Settings.builder() - .put(INDEX_HIDDEN, true) - .put(INDEX_NUMBER_OF_SHARDS, historyNumberOfShards) - .put(INDEX_NUMBER_OF_REPLICAS, historyNumberOfReplicas).build() - ) + val request = + CreateIndexRequest(index) + .mapping(indexStateManagementHistoryMappings) + .settings( + Settings.builder() + .put(INDEX_HIDDEN, true) + .put(INDEX_NUMBER_OF_SHARDS, historyNumberOfShards) + .put(INDEX_NUMBER_OF_REPLICAS, historyNumberOfReplicas).build(), + ) if (alias != null) request.alias(Alias(alias)) return try { val createIndexResponse: CreateIndexResponse = client.suspendUntil { client.create(request, it) } @@ -195,13 +203,17 @@ class IndexManagementIndices( const val HISTORY_INDEX_PATTERN = "<$HISTORY_INDEX_BASE-{now/d{yyyy.MM.dd}}-1>" const val HISTORY_ALL = "$HISTORY_INDEX_BASE*" - val indexManagementMappings = IndexManagementIndices::class.java.classLoader - .getResource("mappings/opendistro-ism-config.json").readText() - val indexStateManagementHistoryMappings = IndexManagementIndices::class.java.classLoader - .getResource("mappings/opendistro-ism-history.json").readText() - val rollupTargetMappings = IndexManagementIndices::class.java.classLoader - .getResource("mappings/opendistro-rollup-target.json").readText() - val transformTargetMappings = IndexManagementIndices::class.java.classLoader - .getResource("mappings/opendistro-transform-target.json").readText() + val indexManagementMappings = + IndexManagementIndices::class.java.classLoader + .getResource("mappings/opendistro-ism-config.json").readText() + val indexStateManagementHistoryMappings = + IndexManagementIndices::class.java.classLoader + .getResource("mappings/opendistro-ism-history.json").readText() + val rollupTargetMappings = + IndexManagementIndices::class.java.classLoader + .getResource("mappings/opendistro-rollup-target.json").readText() + val transformTargetMappings = + IndexManagementIndices::class.java.classLoader + .getResource("mappings/opendistro-transform-target.json").readText() } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/IndexManagementPlugin.kt b/src/main/kotlin/org/opensearch/indexmanagement/IndexManagementPlugin.kt index 23d2655c1..3a67de075 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/IndexManagementPlugin.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/IndexManagementPlugin.kt @@ -13,11 +13,10 @@ import org.opensearch.client.Client import org.opensearch.cluster.metadata.IndexNameExpressionResolver import org.opensearch.cluster.node.DiscoveryNodes import org.opensearch.cluster.service.ClusterService +import org.opensearch.common.inject.Inject import org.opensearch.common.lifecycle.Lifecycle import org.opensearch.common.lifecycle.LifecycleComponent import org.opensearch.common.lifecycle.LifecycleListener -import org.opensearch.common.inject.Inject -import org.opensearch.core.common.io.stream.NamedWriteableRegistry import org.opensearch.common.settings.ClusterSettings import org.opensearch.common.settings.IndexScopedSettings import org.opensearch.common.settings.Setting @@ -25,6 +24,7 @@ import org.opensearch.common.settings.Settings import org.opensearch.common.settings.SettingsFilter import org.opensearch.common.util.concurrent.ThreadContext import org.opensearch.core.action.ActionResponse +import org.opensearch.core.common.io.stream.NamedWriteableRegistry import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.core.xcontent.XContentParser.Token import org.opensearch.core.xcontent.XContentParserUtils.ensureExpectedToken @@ -123,6 +123,7 @@ import org.opensearch.indexmanagement.rollup.settings.RollupSettings import org.opensearch.indexmanagement.rollup.util.QueryShardContextFactory import org.opensearch.indexmanagement.rollup.util.RollupFieldValueExpressionResolver import org.opensearch.indexmanagement.settings.IndexManagementSettings +import org.opensearch.indexmanagement.snapshotmanagement.SMRunner import org.opensearch.indexmanagement.snapshotmanagement.api.resthandler.RestCreateSMPolicyHandler import org.opensearch.indexmanagement.snapshotmanagement.api.resthandler.RestDeleteSMPolicyHandler import org.opensearch.indexmanagement.snapshotmanagement.api.resthandler.RestExplainSMPolicyHandler @@ -138,7 +139,6 @@ import org.opensearch.indexmanagement.snapshotmanagement.api.transport.get.Trans import org.opensearch.indexmanagement.snapshotmanagement.api.transport.index.TransportIndexSMPolicyAction import org.opensearch.indexmanagement.snapshotmanagement.api.transport.start.TransportStartSMAction import org.opensearch.indexmanagement.snapshotmanagement.api.transport.stop.TransportStopSMAction -import org.opensearch.indexmanagement.snapshotmanagement.SMRunner import org.opensearch.indexmanagement.snapshotmanagement.model.SMMetadata import org.opensearch.indexmanagement.snapshotmanagement.model.SMPolicy import org.opensearch.indexmanagement.snapshotmanagement.settings.SnapshotManagementSettings @@ -197,7 +197,6 @@ import java.util.function.Supplier @Suppress("TooManyFunctions") class IndexManagementPlugin : JobSchedulerExtension, NetworkPlugin, ActionPlugin, ExtensiblePlugin, SystemIndexPlugin, Plugin() { - private val logger = LogManager.getLogger(javaClass) lateinit var indexManagementIndices: IndexManagementIndices lateinit var actionValidation: ActionValidation @@ -308,7 +307,7 @@ class IndexManagementPlugin : JobSchedulerExtension, NetworkPlugin, ActionPlugin extension.overrideClusterStateIndexUuidSetting()?.let { if (customIndexUUIDSetting != null) { error( - "Multiple extensions of IndexManagement plugin overriding ClusterStateIndexUUIDSetting - not supported" + "Multiple extensions of IndexManagement plugin overriding ClusterStateIndexUUIDSetting - not supported", ) } customIndexUUIDSetting = extension.overrideClusterStateIndexUuidSetting() @@ -324,7 +323,7 @@ class IndexManagementPlugin : JobSchedulerExtension, NetworkPlugin, ActionPlugin indexScopedSettings: IndexScopedSettings, settingsFilter: SettingsFilter, indexNameExpressionResolver: IndexNameExpressionResolver, - nodesInCluster: Supplier + nodesInCluster: Supplier, ): List { return listOf( RestRefreshSearchAnalyzerAction(), @@ -358,7 +357,7 @@ class IndexManagementPlugin : JobSchedulerExtension, NetworkPlugin, ActionPlugin RestUpdateSMPolicyHandler(), RestIndexLRONConfigAction(), RestGetLRONConfigAction(), - RestDeleteLRONConfigAction() + RestDeleteLRONConfigAction(), ) } @@ -374,7 +373,7 @@ class IndexManagementPlugin : JobSchedulerExtension, NetworkPlugin, ActionPlugin nodeEnvironment: NodeEnvironment, namedWriteableRegistry: NamedWriteableRegistry, indexNameExpressionResolver: IndexNameExpressionResolver, - repositoriesServiceSupplier: Supplier + repositoriesServiceSupplier: Supplier, ): Collection { val settings = environment.settings() this.clusterService = clusterService @@ -384,37 +383,39 @@ class IndexManagementPlugin : JobSchedulerExtension, NetworkPlugin, ActionPlugin scriptService, xContentRegistry, namedWriteableRegistry, - environment + environment, ) rollupInterceptor = RollupInterceptor(clusterService, settings, indexNameExpressionResolver) val jvmService = JvmService(environment.settings()) - val transformRunner = TransformRunner.initialize( - client, - clusterService, - xContentRegistry, - settings, - indexNameExpressionResolver, - jvmService, - threadPool - ) + val transformRunner = + TransformRunner.initialize( + client, + clusterService, + xContentRegistry, + settings, + indexNameExpressionResolver, + jvmService, + threadPool, + ) fieldCapsFilter = FieldCapsFilter(clusterService, settings, indexNameExpressionResolver) this.indexNameExpressionResolver = indexNameExpressionResolver val skipFlag = SkipExecution(client) RollupFieldValueExpressionResolver.registerServices(scriptService, clusterService) - val rollupRunner = RollupRunner - .registerClient(client) - .registerClusterService(clusterService) - .registerNamedXContentRegistry(xContentRegistry) - .registerScriptService(scriptService) - .registerSettings(settings) - .registerThreadPool(threadPool) - .registerMapperService(RollupMapperService(client, clusterService, indexNameExpressionResolver)) - .registerIndexer(RollupIndexer(settings, clusterService, client)) - .registerSearcher(RollupSearchService(settings, clusterService, client)) - .registerMetadataServices(RollupMetadataService(client, xContentRegistry)) - .registerConsumers() - .registerClusterConfigurationProvider(skipFlag) + val rollupRunner = + RollupRunner + .registerClient(client) + .registerClusterService(clusterService) + .registerNamedXContentRegistry(xContentRegistry) + .registerScriptService(scriptService) + .registerSettings(settings) + .registerThreadPool(threadPool) + .registerMapperService(RollupMapperService(client, clusterService, indexNameExpressionResolver)) + .registerIndexer(RollupIndexer(settings, clusterService, client)) + .registerSearcher(RollupSearchService(settings, clusterService, client)) + .registerMetadataServices(RollupMetadataService(client, xContentRegistry)) + .registerConsumers() + .registerClusterConfigurationProvider(skipFlag) indexManagementIndices = IndexManagementIndices(settings, client.admin().indices(), clusterService) val controlCenterIndices = ControlCenterIndices(client.admin().indices(), clusterService) actionValidation = ActionValidation(settings, clusterService, jvmService) @@ -424,47 +425,51 @@ class IndexManagementPlugin : JobSchedulerExtension, NetworkPlugin, ActionPlugin client, threadPool, clusterService, - indexManagementIndices + indexManagementIndices, ) - indexMetadataProvider = IndexMetadataProvider( - settings, client, clusterService, - hashMapOf( - DEFAULT_INDEX_TYPE to DefaultIndexMetadataService(customIndexUUIDSetting) + indexMetadataProvider = + IndexMetadataProvider( + settings, client, clusterService, + hashMapOf( + DEFAULT_INDEX_TYPE to DefaultIndexMetadataService(customIndexUUIDSetting), + ), ) - ) indexMetadataServices.forEach { indexMetadataProvider.addMetadataServices(it) } val extensionChecker = ExtensionStatusChecker(extensionCheckerMap, clusterService) - val managedIndexRunner = ManagedIndexRunner - .registerClient(client) - .registerClusterService(clusterService) - .registerValidationService(actionValidation) - .registerNamedXContentRegistry(xContentRegistry) - .registerScriptService(scriptService) - .registerSettings(settings) - .registerConsumers() // registerConsumers must happen after registerSettings/clusterService - .registerIMIndex(indexManagementIndices) - .registerHistoryIndex(indexStateManagementHistory) - .registerSkipFlag(skipFlag) - .registerThreadPool(threadPool) - .registerExtensionChecker(extensionChecker) - .registerIndexMetadataProvider(indexMetadataProvider) - - val managedIndexCoordinator = ManagedIndexCoordinator( - environment.settings(), - client, clusterService, threadPool, indexManagementIndices, indexMetadataProvider, xContentRegistry - ) + val managedIndexRunner = + ManagedIndexRunner + .registerClient(client) + .registerClusterService(clusterService) + .registerValidationService(actionValidation) + .registerNamedXContentRegistry(xContentRegistry) + .registerScriptService(scriptService) + .registerSettings(settings) + .registerConsumers() // registerConsumers must happen after registerSettings/clusterService + .registerIMIndex(indexManagementIndices) + .registerHistoryIndex(indexStateManagementHistory) + .registerSkipFlag(skipFlag) + .registerThreadPool(threadPool) + .registerExtensionChecker(extensionChecker) + .registerIndexMetadataProvider(indexMetadataProvider) + + val managedIndexCoordinator = + ManagedIndexCoordinator( + environment.settings(), + client, clusterService, threadPool, indexManagementIndices, indexMetadataProvider, xContentRegistry, + ) val smRunner = SMRunner.init(client, threadPool, settings, indexManagementIndices, clusterService) val pluginVersionSweepCoordinator = PluginVersionSweepCoordinator(skipFlag, settings, threadPool, clusterService) - indexOperationActionFilter = IndexOperationActionFilter( - client, clusterService, - ActiveShardsObserver(clusterService, client.threadPool()), - indexNameExpressionResolver, - ) + indexOperationActionFilter = + IndexOperationActionFilter( + client, clusterService, + ActiveShardsObserver(clusterService, client.threadPool()), + indexNameExpressionResolver, + ) TargetIndexMappingService.initialize(client) @@ -479,7 +484,7 @@ class IndexManagementPlugin : JobSchedulerExtension, NetworkPlugin, ActionPlugin indexStateManagementHistory, indexMetadataProvider, smRunner, - pluginVersionSweepCoordinator + pluginVersionSweepCoordinator, ) } @@ -551,7 +556,7 @@ class IndexManagementPlugin : JobSchedulerExtension, NetworkPlugin, ActionPlugin LegacyOpenDistroRollupSettings.ROLLUP_ENABLED, LegacyOpenDistroRollupSettings.ROLLUP_SEARCH_ENABLED, LegacyOpenDistroRollupSettings.ROLLUP_DASHBOARDS, - SnapshotManagementSettings.FILTER_BY_BACKEND_ROLES + SnapshotManagementSettings.FILTER_BY_BACKEND_ROLES, ) } @@ -593,7 +598,7 @@ class IndexManagementPlugin : JobSchedulerExtension, NetworkPlugin, ActionPlugin ActionPlugin.ActionHandler(SMActions.GET_SM_POLICIES_ACTION_TYPE, TransportGetSMPoliciesAction::class.java), ActionPlugin.ActionHandler(IndexLRONConfigAction.INSTANCE, TransportIndexLRONConfigAction::class.java), ActionPlugin.ActionHandler(GetLRONConfigAction.INSTANCE, TransportGetLRONConfigAction::class.java), - ActionPlugin.ActionHandler(DeleteLRONConfigAction.INSTANCE, TransportDeleteLRONConfigAction::class.java) + ActionPlugin.ActionHandler(DeleteLRONConfigAction.INSTANCE, TransportDeleteLRONConfigAction::class.java), ) } @@ -609,28 +614,33 @@ class IndexManagementPlugin : JobSchedulerExtension, NetworkPlugin, ActionPlugin return listOf( SystemIndexDescriptor( INDEX_MANAGEMENT_INDEX, - "Index for storing index management configuration and metadata." + "Index for storing index management configuration and metadata.", ), SystemIndexDescriptor( CONTROL_CENTER_INDEX, - "Index for storing notification policy of long running index operations." + "Index for storing notification policy of long running index operations.", ), - ) } } -class GuiceHolder @Inject constructor( - remoteClusterService: TransportService +class GuiceHolder +@Inject +constructor( + remoteClusterService: TransportService, ) : LifecycleComponent { override fun close() { /* do nothing */ } + override fun lifecycleState(): Lifecycle.State? { return null } override fun addLifecycleListener(listener: LifecycleListener) { /* do nothing */ } + override fun removeLifecycleListener(listener: LifecycleListener) { /* do nothing */ } + override fun start() { /* do nothing */ } + override fun stop() { /* do nothing */ } companion object { diff --git a/src/main/kotlin/org/opensearch/indexmanagement/IndexManagementRunner.kt b/src/main/kotlin/org/opensearch/indexmanagement/IndexManagementRunner.kt index e80ad021c..cbeec0e57 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/IndexManagementRunner.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/IndexManagementRunner.kt @@ -19,7 +19,6 @@ import org.opensearch.jobscheduler.spi.ScheduledJobParameter import org.opensearch.jobscheduler.spi.ScheduledJobRunner object IndexManagementRunner : ScheduledJobRunner { - private val logger = LogManager.getLogger(javaClass) override fun runJob(job: ScheduledJobParameter, context: JobExecutionContext) { diff --git a/src/main/kotlin/org/opensearch/indexmanagement/common/model/dimension/DateHistogram.kt b/src/main/kotlin/org/opensearch/indexmanagement/common/model/dimension/DateHistogram.kt index 4f09758ce..6da5b4ed4 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/common/model/dimension/DateHistogram.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/common/model/dimension/DateHistogram.kt @@ -29,9 +29,8 @@ data class DateHistogram( val fixedInterval: String? = null, val calendarInterval: String? = null, val timezone: ZoneId = ZoneId.of(UTC), - val format: String? = null + val format: String? = null, ) : Dimension(Type.DATE_HISTOGRAM, sourceField, targetField) { - init { require(sourceField.isNotEmpty() && targetField.isNotEmpty()) { "Source and target field must not be empty" } require(fixedInterval != null || calendarInterval != null) { "Must specify a fixed or calendar interval" } @@ -44,7 +43,7 @@ data class DateHistogram( targetField = sin.readString(), fixedInterval = sin.readOptionalString(), calendarInterval = sin.readOptionalString(), - timezone = sin.readZoneId() + timezone = sin.readZoneId(), ) override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { @@ -109,7 +108,7 @@ data class DateHistogram( fun getRewrittenAggregation( aggregationBuilder: DateHistogramAggregationBuilder, - subAggregations: AggregatorFactories.Builder + subAggregations: AggregatorFactories.Builder, ): DateHistogramAggregationBuilder = DateHistogramAggregationBuilder(aggregationBuilder.name) .also { aggregationBuilder.calendarInterval?.apply { it.calendarInterval(this) } } @@ -168,7 +167,7 @@ data class DateHistogram( fixedInterval = fixedInterval, calendarInterval = calendarInterval, timezone = timezone, - format = format + format = format, ) } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/common/model/dimension/Dimension.kt b/src/main/kotlin/org/opensearch/indexmanagement/common/model/dimension/Dimension.kt index cea2d094c..882af86aa 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/common/model/dimension/Dimension.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/common/model/dimension/Dimension.kt @@ -17,12 +17,13 @@ import java.io.IOException abstract class Dimension( val type: Type, open val sourceField: String, - open val targetField: String + open val targetField: String, ) : ToXContentObject, Writeable { enum class Type(val type: String) { DATE_HISTOGRAM("date_histogram"), TERMS("terms"), - HISTOGRAM("histogram"); + HISTOGRAM("histogram"), + ; override fun toString(): String { return type @@ -59,12 +60,13 @@ abstract class Dimension( val fieldName = xcp.currentName() xcp.nextToken() - dimension = when (fieldName) { - Type.DATE_HISTOGRAM.type -> DateHistogram.parse(xcp) - Type.TERMS.type -> Terms.parse(xcp) - Type.HISTOGRAM.type -> Histogram.parse(xcp) - else -> throw IllegalArgumentException("Invalid dimension type [$fieldName] found in dimensions") - } + dimension = + when (fieldName) { + Type.DATE_HISTOGRAM.type -> DateHistogram.parse(xcp) + Type.TERMS.type -> Terms.parse(xcp) + Type.HISTOGRAM.type -> Histogram.parse(xcp) + else -> throw IllegalArgumentException("Invalid dimension type [$fieldName] found in dimensions") + } } return requireNotNull(dimension) { "Dimension cannot be null" } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/common/model/dimension/Histogram.kt b/src/main/kotlin/org/opensearch/indexmanagement/common/model/dimension/Histogram.kt index 558a42b5d..f0161251a 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/common/model/dimension/Histogram.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/common/model/dimension/Histogram.kt @@ -26,9 +26,8 @@ import java.io.IOException data class Histogram( override val sourceField: String, override val targetField: String, - val interval: Double + val interval: Double, ) : Dimension(Type.HISTOGRAM, sourceField, targetField) { - init { require(sourceField.isNotEmpty() && targetField.isNotEmpty()) { "Source and target field must not be empty" } require(interval > 0.0) { "Interval must be a positive decimal" } @@ -38,7 +37,7 @@ data class Histogram( constructor(sin: StreamInput) : this( sourceField = sin.readString(), targetField = sin.readString(), - interval = sin.readDouble() + interval = sin.readDouble(), ) override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { @@ -87,7 +86,7 @@ data class Histogram( fun getRewrittenAggregation( aggregationBuilder: HistogramAggregationBuilder, - subAggregations: AggregatorFactories.Builder + subAggregations: AggregatorFactories.Builder, ): HistogramAggregationBuilder = HistogramAggregationBuilder(aggregationBuilder.name) .interval(aggregationBuilder.interval()) @@ -109,6 +108,7 @@ data class Histogram( companion object { const val HISTOGRAM_INTERVAL_FIELD = "interval" + // There can be rounding issues with small intervals where the range query will select documents differently than the Histogram // so add an error to the range query and then limit the buckets indexed later. private const val bucketError = 0.00005 @@ -137,7 +137,7 @@ data class Histogram( return Histogram( requireNotNull(sourceField) { "Source field must not be null" }, requireNotNull(targetField) { "Target field must not be null" }, - requireNotNull(interval) { "Interval field must not be null" } + requireNotNull(interval) { "Interval field must not be null" }, ) } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/common/model/dimension/Terms.kt b/src/main/kotlin/org/opensearch/indexmanagement/common/model/dimension/Terms.kt index 11eb6ef3b..2e32a6c28 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/common/model/dimension/Terms.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/common/model/dimension/Terms.kt @@ -23,9 +23,8 @@ import java.io.IOException data class Terms( override val sourceField: String, - override val targetField: String + override val targetField: String, ) : Dimension(Type.TERMS, sourceField, targetField) { - init { require(sourceField.isNotEmpty() && targetField.isNotEmpty()) { "Source and target field must not be empty" } } @@ -33,7 +32,7 @@ data class Terms( @Throws(IOException::class) constructor(sin: StreamInput) : this( sourceField = sin.readString(), - targetField = sin.readString() + targetField = sin.readString(), ) override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { @@ -72,7 +71,7 @@ data class Terms( // TODO missing terms field fun getRewrittenAggregation( aggregationBuilder: TermsAggregationBuilder, - subAggregations: AggregatorFactories.Builder + subAggregations: AggregatorFactories.Builder, ): TermsAggregationBuilder = TermsAggregationBuilder(aggregationBuilder.name) .also { aggregationBuilder.collectMode()?.apply { it.collectMode(this) } } @@ -114,7 +113,7 @@ data class Terms( ensureExpectedToken( Token.START_OBJECT, xcp.currentToken(), - xcp + xcp, ) while (xcp.nextToken() != Token.END_OBJECT) { val fieldName = xcp.currentName() @@ -129,7 +128,7 @@ data class Terms( if (targetField == null) targetField = sourceField return Terms( requireNotNull(sourceField) { "Source field cannot be null" }, - requireNotNull(targetField) { "Target field cannot be null" } + requireNotNull(targetField) { "Target field cannot be null" }, ) } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/common/model/notification/Channel.kt b/src/main/kotlin/org/opensearch/indexmanagement/common/model/notification/Channel.kt index ebdff4e35..a9e723062 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/common/model/notification/Channel.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/common/model/notification/Channel.kt @@ -7,6 +7,12 @@ package org.opensearch.indexmanagement.common.model.notification import org.opensearch.client.Client import org.opensearch.client.node.NodeClient +import org.opensearch.commons.ConfigConstants +import org.opensearch.commons.authuser.User +import org.opensearch.commons.notifications.NotificationsPluginInterface +import org.opensearch.commons.notifications.action.SendNotificationResponse +import org.opensearch.commons.notifications.model.ChannelMessage +import org.opensearch.commons.notifications.model.EventSource import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.core.common.io.stream.StreamOutput import org.opensearch.core.common.io.stream.Writeable @@ -14,18 +20,11 @@ import org.opensearch.core.xcontent.ToXContent import org.opensearch.core.xcontent.XContentBuilder import org.opensearch.core.xcontent.XContentParser import org.opensearch.core.xcontent.XContentParserUtils.ensureExpectedToken -import org.opensearch.commons.ConfigConstants -import org.opensearch.commons.authuser.User -import org.opensearch.commons.notifications.NotificationsPluginInterface -import org.opensearch.commons.notifications.action.SendNotificationResponse -import org.opensearch.commons.notifications.model.ChannelMessage -import org.opensearch.commons.notifications.model.EventSource import org.opensearch.indexmanagement.opensearchapi.suspendUntil import org.opensearch.indexmanagement.util.SecurityUtils.Companion.generateUserString import java.io.IOException data class Channel(val id: String) : ToXContent, Writeable { - init { require(id.isNotEmpty()) { "Channel ID cannot be empty" } } @@ -38,7 +37,7 @@ data class Channel(val id: String) : ToXContent, Writeable { @Throws(IOException::class) constructor(sin: StreamInput) : this( - sin.readString() + sin.readString(), ) @Throws(IOException::class) @@ -77,21 +76,22 @@ data class Channel(val id: String) : ToXContent, Writeable { client: Client, eventSource: EventSource, message: String, - user: User? + user: User?, ) { val channel = this client.threadPool().threadContext.stashContext().use { // We need to set the user context information in the thread context for notification plugin to correctly resolve the user object client.threadPool().threadContext.putTransient(ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT, generateUserString(user)) - val res: SendNotificationResponse = NotificationsPluginInterface.suspendUntil { - this.sendNotification( - (client as NodeClient), - eventSource, - ChannelMessage(message, null, null), - listOf(channel.id), - it - ) - } + val res: SendNotificationResponse = + NotificationsPluginInterface.suspendUntil { + this.sendNotification( + (client as NodeClient), + eventSource, + ChannelMessage(message, null, null), + listOf(channel.id), + it, + ) + } validateResponseStatus(res.getStatus(), res.notificationEvent.eventSource.referenceId) } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/common/model/notification/NotificationUtils.kt b/src/main/kotlin/org/opensearch/indexmanagement/common/model/notification/NotificationUtils.kt index e39ef0445..93db93841 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/common/model/notification/NotificationUtils.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/common/model/notification/NotificationUtils.kt @@ -4,6 +4,7 @@ */ @file:JvmName("NotificationUtils") + package org.opensearch.indexmanagement.common.model.notification import org.opensearch.OpenSearchStatusException @@ -12,12 +13,13 @@ import org.opensearch.core.rest.RestStatus /** * all valid response status */ -private val VALID_RESPONSE_STATUS = setOf( - RestStatus.OK.status, RestStatus.CREATED.status, RestStatus.ACCEPTED.status, - RestStatus.NON_AUTHORITATIVE_INFORMATION.status, RestStatus.NO_CONTENT.status, - RestStatus.RESET_CONTENT.status, RestStatus.PARTIAL_CONTENT.status, - RestStatus.MULTI_STATUS.status -) +private val VALID_RESPONSE_STATUS = + setOf( + RestStatus.OK.status, RestStatus.CREATED.status, RestStatus.ACCEPTED.status, + RestStatus.NON_AUTHORITATIVE_INFORMATION.status, RestStatus.NO_CONTENT.status, + RestStatus.RESET_CONTENT.status, RestStatus.PARTIAL_CONTENT.status, + RestStatus.MULTI_STATUS.status, + ) @Throws(OpenSearchStatusException::class) fun validateResponseStatus(restStatus: RestStatus, responseContent: String) { diff --git a/src/main/kotlin/org/opensearch/indexmanagement/common/model/rest/SearchParams.kt b/src/main/kotlin/org/opensearch/indexmanagement/common/model/rest/SearchParams.kt index 53a6ac795..62157ecb1 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/common/model/rest/SearchParams.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/common/model/rest/SearchParams.kt @@ -24,16 +24,15 @@ data class SearchParams( val from: Int, val sortField: String, val sortOrder: String, - val queryString: String + val queryString: String, ) : Writeable { - @Throws(IOException::class) constructor(sin: StreamInput) : this( size = sin.readInt(), from = sin.readInt(), sortField = sin.readString(), sortOrder = sin.readString(), - queryString = sin.readString() + queryString = sin.readString(), ) @Throws(IOException::class) diff --git a/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/ControlCenterIndices.kt b/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/ControlCenterIndices.kt index 1aa70dbf1..11710ea23 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/ControlCenterIndices.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/ControlCenterIndices.kt @@ -7,13 +7,13 @@ package org.opensearch.indexmanagement.controlcenter.notification import org.opensearch.ExceptionsHelper import org.opensearch.ResourceAlreadyExistsException -import org.opensearch.core.action.ActionListener import org.opensearch.action.admin.indices.create.CreateIndexRequest import org.opensearch.action.admin.indices.create.CreateIndexResponse import org.opensearch.action.support.master.AcknowledgedResponse import org.opensearch.client.IndicesAdminClient import org.opensearch.cluster.service.ClusterService import org.opensearch.common.settings.Settings +import org.opensearch.core.action.ActionListener import org.opensearch.indexmanagement.IndexManagementPlugin import org.opensearch.indexmanagement.indexstatemanagement.util.INDEX_HIDDEN import org.opensearch.indexmanagement.util.IndexUtils @@ -22,33 +22,37 @@ class ControlCenterIndices( private val client: IndicesAdminClient, private val clusterService: ClusterService, ) { - fun checkAndUpdateControlCenterIndex(actionListener: ActionListener) { if (!controlCenterIndexExists()) { - val indexRequest = CreateIndexRequest(IndexManagementPlugin.CONTROL_CENTER_INDEX) - .mapping(controlCenterMappings) - .settings(Settings.builder().put(INDEX_HIDDEN, true).build()) + val indexRequest = + CreateIndexRequest(IndexManagementPlugin.CONTROL_CENTER_INDEX) + .mapping(controlCenterMappings) + .settings(Settings.builder().put(INDEX_HIDDEN, true).build()) client.create( indexRequest, object : ActionListener { override fun onFailure(e: Exception) { if (ExceptionsHelper.unwrapCause(e) is ResourceAlreadyExistsException) { - /* if two request create the control center index at the same time, may raise this exception */ - /* but we don't take it as error */ + /* + * if two request create the control center index at the same time, may raise this exception + * but we don't take it as error + */ actionListener.onResponse( CreateIndexResponse( true, true, - IndexManagementPlugin.CONTROL_CENTER_INDEX - ) + IndexManagementPlugin.CONTROL_CENTER_INDEX, + ), ) - } else actionListener.onFailure(e) + } else { + actionListener.onFailure(e) + } } override fun onResponse(response: CreateIndexResponse) { actionListener.onResponse(response) } - } + }, ) } else { IndexUtils.checkAndUpdateIndexMapping( @@ -57,7 +61,7 @@ class ControlCenterIndices( controlCenterMappings, clusterService.state(), client, - actionListener + actionListener, ) } } @@ -65,7 +69,8 @@ class ControlCenterIndices( private fun controlCenterIndexExists(): Boolean = clusterService.state().routingTable.hasIndex(IndexManagementPlugin.CONTROL_CENTER_INDEX) companion object { - val controlCenterMappings = ControlCenterIndices::class.java.classLoader - .getResource("mappings/opensearch-control-center.json")!!.readText() + val controlCenterMappings = + ControlCenterIndices::class.java.classLoader + .getResource("mappings/opensearch-control-center.json")!!.readText() } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/LRONConfigResponse.kt b/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/LRONConfigResponse.kt index 560384ed1..8e95262d6 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/LRONConfigResponse.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/LRONConfigResponse.kt @@ -20,12 +20,12 @@ import java.io.IOException class LRONConfigResponse( val id: String, - val lronConfig: LRONConfig + val lronConfig: LRONConfig, ) : ActionResponse(), ToXContentObject { @Throws(IOException::class) constructor(sin: StreamInput) : this( id = sin.readString(), - lronConfig = LRONConfig(sin) + lronConfig = LRONConfig(sin), ) override fun writeTo(out: StreamOutput) { @@ -37,7 +37,7 @@ class LRONConfigResponse( builder.startObject() .field(_ID, id) - /* drop user info in rest layer. only keep user info in transport layer */ + // drop user info in rest layer. only keep user info in transport layer val lronConfigParams = ToXContent.MapParams(mapOf(WITH_TYPE to "false", WITH_USER to "false", WITH_PRIORITY to "false")) builder.field(LRONConfig.LRON_CONFIG_FIELD, lronConfig, lronConfigParams) diff --git a/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/action/delete/DeleteLRONConfigRequest.kt b/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/action/delete/DeleteLRONConfigRequest.kt index e6a73030a..390e33aec 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/action/delete/DeleteLRONConfigRequest.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/action/delete/DeleteLRONConfigRequest.kt @@ -14,20 +14,21 @@ import org.opensearch.indexmanagement.controlcenter.notification.util.LRON_DOC_I import java.io.IOException class DeleteLRONConfigRequest( - val docId: String + val docId: String, ) : ActionRequest() { @Throws(IOException::class) constructor(sin: StreamInput) : this( - docId = sin.readString() + docId = sin.readString(), ) override fun validate(): ActionRequestValidationException? { var validationException: ActionRequestValidationException? = null if (!(docId.startsWith(LRON_DOC_ID_PREFIX))) { - validationException = ValidateActions.addValidationError( - "Invalid LRONConfig ID", - validationException - ) + validationException = + ValidateActions.addValidationError( + "Invalid LRONConfig ID", + validationException, + ) } return validationException } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/action/delete/TransportDeleteLRONConfigAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/action/delete/TransportDeleteLRONConfigAction.kt index ea9d40fba..051718374 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/action/delete/TransportDeleteLRONConfigAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/action/delete/TransportDeleteLRONConfigAction.kt @@ -6,7 +6,6 @@ package org.opensearch.indexmanagement.controlcenter.notification.action.delete import org.apache.logging.log4j.LogManager -import org.opensearch.core.action.ActionListener import org.opensearch.action.delete.DeleteRequest import org.opensearch.action.delete.DeleteResponse import org.opensearch.action.support.ActionFilters @@ -15,16 +14,19 @@ import org.opensearch.action.support.WriteRequest import org.opensearch.client.node.NodeClient import org.opensearch.common.inject.Inject import org.opensearch.commons.ConfigConstants +import org.opensearch.core.action.ActionListener import org.opensearch.indexmanagement.IndexManagementPlugin import org.opensearch.tasks.Task import org.opensearch.transport.TransportService -class TransportDeleteLRONConfigAction @Inject constructor( +class TransportDeleteLRONConfigAction +@Inject +constructor( val client: NodeClient, transportService: TransportService, - actionFilters: ActionFilters + actionFilters: ActionFilters, ) : HandledTransportAction( - DeleteLRONConfigAction.NAME, transportService, actionFilters, ::DeleteLRONConfigRequest + DeleteLRONConfigAction.NAME, transportService, actionFilters, ::DeleteLRONConfigRequest, ) { private val log = LogManager.getLogger(javaClass) @@ -36,20 +38,21 @@ class TransportDeleteLRONConfigAction @Inject constructor( private val client: NodeClient, private val actionListener: ActionListener, private val request: DeleteLRONConfigRequest, - private val docId: String = request.docId + private val docId: String = request.docId, ) { fun start() { log.debug( "User and roles string from thread context: ${ - client.threadPool().threadContext.getTransient( - ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT - ) - }" + client.threadPool().threadContext.getTransient( + ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT, + ) + }", ) client.threadPool().threadContext.stashContext().use { - val deleteRequest = DeleteRequest(IndexManagementPlugin.CONTROL_CENTER_INDEX, docId) - .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + val deleteRequest = + DeleteRequest(IndexManagementPlugin.CONTROL_CENTER_INDEX, docId) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) client.delete(deleteRequest, actionListener) } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/action/get/GetLRONConfigRequest.kt b/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/action/get/GetLRONConfigRequest.kt index 6d41f7fdb..730a6a6f8 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/action/get/GetLRONConfigRequest.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/action/get/GetLRONConfigRequest.kt @@ -16,33 +16,36 @@ import java.io.IOException class GetLRONConfigRequest( val docId: String? = null, - val searchParams: SearchParams? = null + val searchParams: SearchParams? = null, ) : ActionRequest() { @Throws(IOException::class) constructor(sin: StreamInput) : this( docId = sin.readOptionalString(), - searchParams = sin.readOptionalWriteable(::SearchParams) + searchParams = sin.readOptionalWriteable(::SearchParams), ) override fun validate(): ActionRequestValidationException? { var validationException: ActionRequestValidationException? = null if (null == docId && null == searchParams) { - validationException = ValidateActions.addValidationError( - "GetLRONConfigRequest must contain docId or searchParams", - validationException - ) + validationException = + ValidateActions.addValidationError( + "GetLRONConfigRequest must contain docId or searchParams", + validationException, + ) } if (null != docId && null != searchParams) { - validationException = ValidateActions.addValidationError( - "Get LRONConfig requires either docId or searchParams to be specified", - validationException - ) + validationException = + ValidateActions.addValidationError( + "Get LRONConfig requires either docId or searchParams to be specified", + validationException, + ) } if (null != docId && !docId.startsWith(LRON_DOC_ID_PREFIX)) { - validationException = ValidateActions.addValidationError( - "Invalid LRONConfig ID", - validationException - ) + validationException = + ValidateActions.addValidationError( + "Invalid LRONConfig ID", + validationException, + ) } return validationException } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/action/get/TransportGetLRONConfigAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/action/get/TransportGetLRONConfigAction.kt index 25c2a42a8..b54474f97 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/action/get/TransportGetLRONConfigAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/action/get/TransportGetLRONConfigAction.kt @@ -7,7 +7,6 @@ package org.opensearch.indexmanagement.controlcenter.notification.action.get import org.apache.logging.log4j.LogManager import org.opensearch.ExceptionsHelper -import org.opensearch.core.action.ActionListener import org.opensearch.action.search.SearchRequest import org.opensearch.action.search.SearchResponse import org.opensearch.action.support.ActionFilters @@ -18,8 +17,9 @@ import org.opensearch.common.inject.Inject import org.opensearch.common.xcontent.LoggingDeprecationHandler import org.opensearch.common.xcontent.XContentHelper import org.opensearch.common.xcontent.XContentType -import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.commons.ConfigConstants +import org.opensearch.core.action.ActionListener +import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.index.IndexNotFoundException import org.opensearch.index.query.QueryBuilders import org.opensearch.indexmanagement.IndexManagementPlugin @@ -31,13 +31,15 @@ import org.opensearch.search.builder.SearchSourceBuilder import org.opensearch.tasks.Task import org.opensearch.transport.TransportService -class TransportGetLRONConfigAction @Inject constructor( +class TransportGetLRONConfigAction +@Inject +constructor( val client: NodeClient, transportService: TransportService, actionFilters: ActionFilters, val xContentRegistry: NamedXContentRegistry, ) : HandledTransportAction( - GetLRONConfigAction.NAME, transportService, actionFilters, ::GetLRONConfigRequest + GetLRONConfigAction.NAME, transportService, actionFilters, ::GetLRONConfigRequest, ) { private val log = LogManager.getLogger(javaClass) @@ -48,13 +50,13 @@ class TransportGetLRONConfigAction @Inject constructor( inner class GetLRONConfigHandler( private val client: NodeClient, private val actionListener: ActionListener, - private val request: GetLRONConfigRequest + private val request: GetLRONConfigRequest, ) { fun start() { log.debug( "User and roles string from thread context: ${client.threadPool().threadContext.getTransient( - ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT - )}" + ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT, + )}", ) client.threadPool().threadContext.stashContext().use { if (null != request.docId) { @@ -70,7 +72,7 @@ class TransportGetLRONConfigAction @Inject constructor( override fun onFailure(e: Exception) { actionListener.onFailure(ExceptionsHelper.unwrapCause(e) as Exception) } - } + }, ) } else { doSearch() @@ -81,36 +83,41 @@ class TransportGetLRONConfigAction @Inject constructor( private fun doSearch() { val params = request.searchParams val sortBuilder = params!!.getSortBuilder() - val queryBuilder = QueryBuilders.boolQuery() - .must(QueryBuilders.existsQuery("lron_config")) - .must(QueryBuilders.queryStringQuery(params.queryString)) + val queryBuilder = + QueryBuilders.boolQuery() + .must(QueryBuilders.existsQuery("lron_config")) + .must(QueryBuilders.queryStringQuery(params.queryString)) - val searchSourceBuilder = SearchSourceBuilder() - .query(queryBuilder) - .sort(sortBuilder) - .from(params.from) - .size(params.size) + val searchSourceBuilder = + SearchSourceBuilder() + .query(queryBuilder) + .sort(sortBuilder) + .from(params.from) + .size(params.size) - val searchRequest = SearchRequest() - .source(searchSourceBuilder) - .indices(IndexManagementPlugin.CONTROL_CENTER_INDEX) - .preference(Preference.PRIMARY_FIRST.type()) + val searchRequest = + SearchRequest() + .source(searchSourceBuilder) + .indices(IndexManagementPlugin.CONTROL_CENTER_INDEX) + .preference(Preference.PRIMARY_FIRST.type()) client.search( searchRequest, object : ActionListener { override fun onResponse(response: SearchResponse) { val totalNumber = response.hits.totalHits?.value ?: 0 - val lronConfigResponses = response.hits.hits.map { - val xcp = XContentHelper.createParser( - xContentRegistry, - LoggingDeprecationHandler.INSTANCE, it.sourceRef, XContentType.JSON - ) - LRONConfigResponse( - id = it.id, - lronConfig = xcp.parseWithType(id = it.id, parse = LRONConfig.Companion::parse) - ) - } + val lronConfigResponses = + response.hits.hits.map { + val xcp = + XContentHelper.createParser( + xContentRegistry, + LoggingDeprecationHandler.INSTANCE, it.sourceRef, XContentType.JSON, + ) + LRONConfigResponse( + id = it.id, + lronConfig = xcp.parseWithType(id = it.id, parse = LRONConfig.Companion::parse), + ) + } actionListener.onResponse(GetLRONConfigResponse(lronConfigResponses, totalNumber.toInt())) } @@ -122,7 +129,7 @@ class TransportGetLRONConfigAction @Inject constructor( } actionListener.onFailure(ExceptionsHelper.unwrapCause(e) as Exception) } - } + }, ) } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/action/index/IndexLRONConfigRequest.kt b/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/action/index/IndexLRONConfigRequest.kt index f8041930c..32e71d37d 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/action/index/IndexLRONConfigRequest.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/action/index/IndexLRONConfigRequest.kt @@ -14,13 +14,13 @@ import java.io.IOException class IndexLRONConfigRequest( val lronConfig: LRONConfig, val isUpdate: Boolean = false, - val dryRun: Boolean = false + val dryRun: Boolean = false, ) : ActionRequest() { @Throws(IOException::class) constructor(sin: StreamInput) : this( lronConfig = LRONConfig(sin), isUpdate = sin.readBoolean(), - dryRun = sin.readBoolean() + dryRun = sin.readBoolean(), ) override fun validate() = null diff --git a/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/action/index/TransportIndexLRONConfigAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/action/index/TransportIndexLRONConfigAction.kt index 811a69a8c..fc1ee3156 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/action/index/TransportIndexLRONConfigAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/action/index/TransportIndexLRONConfigAction.kt @@ -8,7 +8,6 @@ package org.opensearch.indexmanagement.controlcenter.notification.action.index import org.apache.logging.log4j.LogManager import org.opensearch.ExceptionsHelper import org.opensearch.OpenSearchStatusException -import org.opensearch.core.action.ActionListener import org.opensearch.action.DocWriteRequest import org.opensearch.action.index.IndexRequest import org.opensearch.action.index.IndexResponse @@ -22,6 +21,8 @@ import org.opensearch.common.inject.Inject import org.opensearch.common.xcontent.XContentFactory import org.opensearch.commons.ConfigConstants import org.opensearch.commons.authuser.User +import org.opensearch.core.action.ActionListener +import org.opensearch.core.rest.RestStatus import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.indexmanagement.IndexManagementPlugin import org.opensearch.indexmanagement.controlcenter.notification.ControlCenterIndices @@ -29,12 +30,13 @@ import org.opensearch.indexmanagement.controlcenter.notification.LRONConfigRespo import org.opensearch.indexmanagement.controlcenter.notification.util.getDocID import org.opensearch.indexmanagement.controlcenter.notification.util.getPriority import org.opensearch.indexmanagement.util.SecurityUtils -import org.opensearch.core.rest.RestStatus import org.opensearch.tasks.Task import org.opensearch.transport.TransportService @Suppress("LongParameterList") -class TransportIndexLRONConfigAction @Inject constructor( +class TransportIndexLRONConfigAction +@Inject +constructor( val client: NodeClient, transportService: TransportService, actionFilters: ActionFilters, @@ -42,7 +44,7 @@ class TransportIndexLRONConfigAction @Inject constructor( val controlCenterIndices: ControlCenterIndices, val xContentRegistry: NamedXContentRegistry, ) : HandledTransportAction( - IndexLRONConfigAction.NAME, transportService, actionFilters, ::IndexLRONConfigRequest + IndexLRONConfigAction.NAME, transportService, actionFilters, ::IndexLRONConfigRequest, ) { private val log = LogManager.getLogger(javaClass) @@ -55,22 +57,22 @@ class TransportIndexLRONConfigAction @Inject constructor( private val actionListener: ActionListener, private val request: IndexLRONConfigRequest, private val user: User? = SecurityUtils.buildUser(client.threadPool().threadContext), - private val docId: String = getDocID(request.lronConfig.taskId, request.lronConfig.actionName) + private val docId: String = getDocID(request.lronConfig.taskId, request.lronConfig.actionName), ) { fun start() { log.debug( "User and roles string from thread context: ${client.threadPool().threadContext.getTransient( - ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT - )}" + ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT, + )}", ) client.threadPool().threadContext.stashContext().use { - /* we use dryRun to help check permission and do request validation */ + // we use dryRun to help check permission and do request validation if (request.dryRun) { validate() return } controlCenterIndices.checkAndUpdateControlCenterIndex( - ActionListener.wrap(::onCreateMappingsResponse, actionListener::onFailure) + ActionListener.wrap(::onCreateMappingsResponse, actionListener::onFailure), ) } } @@ -87,7 +89,7 @@ class TransportIndexLRONConfigAction @Inject constructor( } private fun validate() { - /* check whether the node id in task id exists */ + // check whether the node id in task id exists if (null != request.lronConfig.taskId && null == clusterService.state().nodes.get(request.lronConfig.taskId.nodeId)) { actionListener.onFailure(IllegalArgumentException("Illegal taskID. NodeID not exists.")) return @@ -96,21 +98,23 @@ class TransportIndexLRONConfigAction @Inject constructor( } private fun putLRONConfig() { - val lronConfig = request.lronConfig.copy( - user = this.user, - priority = getPriority(request.lronConfig.taskId, request.lronConfig.actionName) - ) + val lronConfig = + request.lronConfig.copy( + user = this.user, + priority = getPriority(request.lronConfig.taskId, request.lronConfig.actionName), + ) if (request.dryRun) { actionListener.onResponse(LRONConfigResponse(docId, lronConfig)) return } - val indexRequest = IndexRequest(IndexManagementPlugin.CONTROL_CENTER_INDEX) - .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) - .source(lronConfig.toXContent(XContentFactory.jsonBuilder())) - .id(docId) - .timeout(IndexRequest.DEFAULT_TIMEOUT) + val indexRequest = + IndexRequest(IndexManagementPlugin.CONTROL_CENTER_INDEX) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .source(lronConfig.toXContent(XContentFactory.jsonBuilder())) + .id(docId) + .timeout(IndexRequest.DEFAULT_TIMEOUT) if (!request.isUpdate) { indexRequest.opType(DocWriteRequest.OpType.CREATE) } @@ -126,8 +130,8 @@ class TransportIndexLRONConfigAction @Inject constructor( actionListener.onResponse( LRONConfigResponse( response.id, - lronConfig - ) + lronConfig, + ), ) } } @@ -135,7 +139,7 @@ class TransportIndexLRONConfigAction @Inject constructor( override fun onFailure(e: Exception) { actionListener.onFailure(ExceptionsHelper.unwrapCause(e) as Exception) } - } + }, ) } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/filter/IndexOperationActionFilter.kt b/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/filter/IndexOperationActionFilter.kt index 6b578b4bd..da06accc9 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/filter/IndexOperationActionFilter.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/filter/IndexOperationActionFilter.kt @@ -6,15 +6,15 @@ package org.opensearch.indexmanagement.controlcenter.notification.filter import org.apache.logging.log4j.LogManager -import org.opensearch.core.action.ActionListener import org.opensearch.action.ActionRequest -import org.opensearch.core.action.ActionResponse import org.opensearch.action.support.ActionFilter import org.opensearch.action.support.ActionFilterChain import org.opensearch.action.support.ActiveShardsObserver import org.opensearch.client.Client import org.opensearch.cluster.metadata.IndexNameExpressionResolver import org.opensearch.cluster.service.ClusterService +import org.opensearch.core.action.ActionListener +import org.opensearch.core.action.ActionResponse import org.opensearch.core.tasks.TaskId import org.opensearch.indexmanagement.controlcenter.notification.util.supportedActions import org.opensearch.tasks.Task @@ -23,18 +23,18 @@ class IndexOperationActionFilter( val client: Client, val clusterService: ClusterService, val activeShardsObserver: ActiveShardsObserver, - val indexNameExpressionResolver: IndexNameExpressionResolver + val indexNameExpressionResolver: IndexNameExpressionResolver, ) : ActionFilter { - private val logger = LogManager.getLogger(IndexOperationActionFilter::class.java) override fun order() = Integer.MAX_VALUE + override fun apply( task: Task, action: String, request: Request, listener: ActionListener, - chain: ActionFilterChain + chain: ActionFilterChain, ) { chain.proceed(task, action, request, wrapActionListener(task, action, request, listener)) } @@ -50,16 +50,17 @@ class IndexOperationActionFilter( if (task.parentTaskId.isSet == false) { val taskId = TaskId(clusterService.localNode().id, task.id) logger.info("Add notification action listener for tasks: {} and action: {} ", taskId.toString(), action) - wrappedListener = NotificationActionListener( - delegate = listener, - client = client, - action = action, - clusterService = clusterService, - task = task, - request = request, - activeShardsObserver = activeShardsObserver, - indexNameExpressionResolver = indexNameExpressionResolver - ) + wrappedListener = + NotificationActionListener( + delegate = listener, + client = client, + action = action, + clusterService = clusterService, + task = task, + request = request, + activeShardsObserver = activeShardsObserver, + indexNameExpressionResolver = indexNameExpressionResolver, + ) } } return wrappedListener diff --git a/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/filter/NotificationActionListener.kt b/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/filter/NotificationActionListener.kt index c69409dca..46ea2fd07 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/filter/NotificationActionListener.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/filter/NotificationActionListener.kt @@ -12,9 +12,7 @@ import kotlinx.coroutines.SupervisorJob import kotlinx.coroutines.launch import org.apache.logging.log4j.LogManager import org.opensearch.OpenSearchStatusException -import org.opensearch.core.action.ActionListener import org.opensearch.action.ActionRequest -import org.opensearch.core.action.ActionResponse import org.opensearch.action.DocWriteResponse import org.opensearch.action.admin.indices.forcemerge.ForceMergeAction import org.opensearch.action.admin.indices.forcemerge.ForceMergeRequest @@ -34,13 +32,23 @@ import org.opensearch.cluster.service.ClusterService import org.opensearch.common.unit.TimeValue import org.opensearch.commons.notifications.model.EventSource import org.opensearch.commons.notifications.model.SeverityType +import org.opensearch.core.action.ActionListener +import org.opensearch.core.action.ActionResponse +import org.opensearch.core.tasks.TaskId import org.opensearch.index.IndexNotFoundException import org.opensearch.index.reindex.BulkByScrollResponse import org.opensearch.index.reindex.ReindexAction import org.opensearch.index.reindex.ReindexRequest +import org.opensearch.indexmanagement.common.model.rest.DEFAULT_PAGINATION_SIZE +import org.opensearch.indexmanagement.common.model.rest.SORT_ORDER_DESC +import org.opensearch.indexmanagement.common.model.rest.SearchParams import org.opensearch.indexmanagement.controlcenter.notification.LRONConfigResponse import org.opensearch.indexmanagement.controlcenter.notification.action.delete.DeleteLRONConfigAction import org.opensearch.indexmanagement.controlcenter.notification.action.delete.DeleteLRONConfigRequest +import org.opensearch.indexmanagement.controlcenter.notification.action.get.GetLRONConfigAction +import org.opensearch.indexmanagement.controlcenter.notification.action.get.GetLRONConfigRequest +import org.opensearch.indexmanagement.controlcenter.notification.action.get.GetLRONConfigResponse +import org.opensearch.indexmanagement.controlcenter.notification.filter.parser.ActionRespParseResult import org.opensearch.indexmanagement.controlcenter.notification.filter.parser.ForceMergeIndexRespParser import org.opensearch.indexmanagement.controlcenter.notification.filter.parser.OpenIndexRespParser import org.opensearch.indexmanagement.controlcenter.notification.filter.parser.ReindexRespParser @@ -48,17 +56,9 @@ import org.opensearch.indexmanagement.controlcenter.notification.filter.parser.R import org.opensearch.indexmanagement.controlcenter.notification.model.LRONConfig import org.opensearch.indexmanagement.controlcenter.notification.util.DEFAULT_LRON_CONFIG_SORT_FIELD import org.opensearch.indexmanagement.controlcenter.notification.util.getDocID -import org.opensearch.indexmanagement.common.model.rest.DEFAULT_PAGINATION_SIZE -import org.opensearch.indexmanagement.common.model.rest.SORT_ORDER_DESC -import org.opensearch.indexmanagement.common.model.rest.SearchParams -import org.opensearch.indexmanagement.controlcenter.notification.action.get.GetLRONConfigAction -import org.opensearch.indexmanagement.controlcenter.notification.action.get.GetLRONConfigRequest -import org.opensearch.indexmanagement.controlcenter.notification.action.get.GetLRONConfigResponse -import org.opensearch.indexmanagement.controlcenter.notification.filter.parser.ActionRespParseResult import org.opensearch.indexmanagement.opensearchapi.retry import org.opensearch.indexmanagement.util.OpenForTesting import org.opensearch.tasks.Task -import org.opensearch.core.tasks.TaskId import org.opensearch.threadpool.ThreadPool.Names.GENERIC import java.util.function.Consumer @@ -75,7 +75,6 @@ class NotificationActionListener, CoroutineScope by CoroutineScope(SupervisorJob() + Dispatchers.Default + CoroutineName("NotificationActionListener")) { - private val logger = LogManager.getLogger(NotificationActionListener::class.java) @Suppress("MagicNumber") @@ -102,17 +101,18 @@ class NotificationActionListener { result -> - // delay the sending time 5s for runtime policy - client.threadPool().schedule({ - notify(action, result) - }, DELAY, GENERIC) - } + val callback = + Consumer { result -> + // delay the sending time 5s for runtime policy + client.threadPool().schedule({ + notify(action, result) + }, DELAY, GENERIC) + } when (action) { ResizeAction.NAME -> { ResizeIndexRespParser( - activeShardsObserver, request as ResizeRequest, clusterService + activeShardsObserver, request as ResizeRequest, clusterService, ).parseAndSendNotification(if (response == null) null else response as ResizeResponse, ex, callback) } @@ -120,30 +120,30 @@ class NotificationActionListener { OpenIndexRespParser( - activeShardsObserver, request as OpenIndexRequest, indexNameExpressionResolver, clusterService + activeShardsObserver, request as OpenIndexRequest, indexNameExpressionResolver, clusterService, ).parseAndSendNotification( if (response == null) null else response as OpenIndexResponse, ex, - callback + callback, ) } ForceMergeAction.NAME -> { ForceMergeIndexRespParser( request as ForceMergeRequest, - clusterService + clusterService, ).parseAndSendNotification( - if (response == null) null else response as ForceMergeResponse, ex, callback + if (response == null) null else response as ForceMergeResponse, ex, callback, ) } @@ -163,9 +163,10 @@ class NotificationActionListener { override fun onResponse(response: DeleteResponse) { if (response.result == DocWriteResponse.Result.DELETED) { logger.info( - "One time notification policy for task: {} has been removed", taskId + "One time notification policy for task: {} has been removed", taskId, ) } } @@ -266,7 +267,7 @@ class NotificationActionListener { - private val indexNameWithCluster = getIndexName(request, clusterService) override fun parseAndSendNotification( @@ -29,8 +28,8 @@ class ForceMergeIndexRespParser(val request: ForceMergeRequest, val clusterServi ActionRespParseResult( OperationResult.FAILED, buildNotificationMessage(null, ex), - buildNotificationTitle(OperationResult.FAILED) - ) + buildNotificationTitle(OperationResult.FAILED), + ), ) return } @@ -43,16 +42,16 @@ class ForceMergeIndexRespParser(val request: ForceMergeRequest, val clusterServi ActionRespParseResult( OperationResult.FAILED, buildNotificationMessage(response), - buildNotificationTitle(OperationResult.FAILED) - ) + buildNotificationTitle(OperationResult.FAILED), + ), ) } else { callback.accept( ActionRespParseResult( OperationResult.COMPLETE, buildNotificationMessage(response), - buildNotificationTitle(OperationResult.COMPLETE) - ) + buildNotificationTitle(OperationResult.COMPLETE), + ), ) } } @@ -63,16 +62,18 @@ class ForceMergeIndexRespParser(val request: ForceMergeRequest, val clusterServi isTimeout: Boolean, ): String { return if (exception != null) { - if (exception is OpenSearchException) + if (exception is OpenSearchException) { "index [" + exception.index.name + "] ${exception.message}." - else + } else { exception.message ?: "" - } else if (response != null && !response.shardFailures.isNullOrEmpty()) + } + } else if (response != null && !response.shardFailures.isNullOrEmpty()) { response.shardFailures.joinToString(",") { "index [${it.index()}] shard [${it.shardId()}] ${it.reason()}" } - else if (request.indices().size == 1) + } else if (request.indices().size == 1) { "The force merge operation on $indexNameWithCluster ${NotificationActionListener.COMPLETED}" - else + } else { "$indexNameWithCluster have been merged." + } } override fun buildNotificationTitle(operationResult: OperationResult): String { diff --git a/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/filter/parser/OpenIndexRespParser.kt b/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/filter/parser/OpenIndexRespParser.kt index 4f6d2d011..e927bf66d 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/filter/parser/OpenIndexRespParser.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/filter/parser/OpenIndexRespParser.kt @@ -24,7 +24,6 @@ class OpenIndexRespParser( val indexNameExpressionResolver: IndexNameExpressionResolver, val clusterService: ClusterService, ) : ResponseParser { - private var totalWaitTime: TimeValue = NotificationActionListener.MAX_WAIT_TIME private val indexNameWithCluster = getIndexName(request, clusterService) @@ -39,8 +38,8 @@ class OpenIndexRespParser( ActionRespParseResult( OperationResult.FAILED, buildNotificationMessage(null, ex), - buildNotificationTitle(OperationResult.FAILED) - ) + buildNotificationTitle(OperationResult.FAILED), + ), ) return } @@ -68,8 +67,8 @@ class OpenIndexRespParser( ActionRespParseResult( if (shardsAcknowledged) OperationResult.COMPLETE else OperationResult.TIMEOUT, buildNotificationMessage(response, isTimeout = !shardsAcknowledged), - buildNotificationTitle(if (shardsAcknowledged) OperationResult.COMPLETE else OperationResult.TIMEOUT) - ) + buildNotificationTitle(if (shardsAcknowledged) OperationResult.COMPLETE else OperationResult.TIMEOUT), + ), ) }, { e: Exception -> @@ -78,18 +77,18 @@ class OpenIndexRespParser( ActionRespParseResult( OperationResult.FAILED, buildNotificationMessage(response, e), - buildNotificationTitle(OperationResult.FAILED) - ) + buildNotificationTitle(OperationResult.FAILED), + ), ) - } + }, ) } else { callback.accept( ActionRespParseResult( OperationResult.TIMEOUT, buildNotificationMessage(response, isTimeout = true), - buildNotificationTitle(OperationResult.TIMEOUT) - ) + buildNotificationTitle(OperationResult.TIMEOUT), + ), ) } } else { @@ -97,8 +96,8 @@ class OpenIndexRespParser( ActionRespParseResult( OperationResult.COMPLETE, buildNotificationMessage(response), - buildNotificationTitle(OperationResult.COMPLETE) - ) + buildNotificationTitle(OperationResult.COMPLETE), + ), ) } } @@ -110,16 +109,18 @@ class OpenIndexRespParser( ): String { val indexes = indexNameWithCluster + if (request.indices().size == 1) " has" else " have" - return if (isTimeout) + return if (isTimeout) { "Opening the index $indexes taken more than ${totalWaitTime.toHumanReadableString(1)} to complete. " + "To see the latest status, use `GET /${request.indices().joinToString(",")}/_recovery`" - else if (exception != null) - if (exception is OpenSearchException) + } else if (exception != null) { + if (exception is OpenSearchException) { "index [" + exception.index.name + "] ${exception.message}." - else + } else { exception.message ?: "" - else + } + } else { "$indexes been set to open." + } } override fun buildNotificationTitle(operationResult: OperationResult): String { @@ -130,9 +131,10 @@ class OpenIndexRespParser( else -> "timed out to open" } - return if (request.indices().size == 1) + return if (request.indices().size == 1) { "$indexNameWithCluster has $result" - else + } else { "${request.indices().size} indexes from [${clusterService.clusterName.value()}] have $result" + } } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/filter/parser/ReindexRespParser.kt b/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/filter/parser/ReindexRespParser.kt index 3e47bb55e..a420ba3cd 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/filter/parser/ReindexRespParser.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/filter/parser/ReindexRespParser.kt @@ -6,34 +6,33 @@ package org.opensearch.indexmanagement.controlcenter.notification.filter.parser import org.opensearch.cluster.service.ClusterService +import org.opensearch.core.tasks.TaskId import org.opensearch.index.reindex.BulkByScrollResponse import org.opensearch.index.reindex.ReindexRequest import org.opensearch.indexmanagement.controlcenter.notification.filter.NotificationActionListener import org.opensearch.indexmanagement.controlcenter.notification.filter.OperationResult import org.opensearch.tasks.Task -import org.opensearch.core.tasks.TaskId import java.util.function.Consumer class ReindexRespParser( val task: Task, val request: ReindexRequest, - val clusterService: ClusterService + val clusterService: ClusterService, ) : ResponseParser { - private val sourceIndex = getIndexName(request, clusterService) override fun parseAndSendNotification( response: BulkByScrollResponse?, ex: Exception?, - callback: Consumer + callback: Consumer, ) { if (ex != null) { callback.accept( ActionRespParseResult( OperationResult.FAILED, buildNotificationMessage(null, ex), - buildNotificationTitle(OperationResult.FAILED) - ) + buildNotificationTitle(OperationResult.FAILED), + ), ) return } @@ -45,24 +44,24 @@ class ReindexRespParser( ActionRespParseResult( OperationResult.FAILED, buildNotificationMessage(response), - buildNotificationTitle(OperationResult.FAILED) - ) + buildNotificationTitle(OperationResult.FAILED), + ), ) } else if (!response.reasonCancelled.isNullOrEmpty()) { callback.accept( ActionRespParseResult( OperationResult.FAILED, buildNotificationMessage(response), - buildNotificationTitle(OperationResult.CANCELLED) - ) + buildNotificationTitle(OperationResult.CANCELLED), + ), ) } else { callback.accept( ActionRespParseResult( OperationResult.COMPLETE, buildNotificationMessage(response), - buildNotificationTitle(OperationResult.COMPLETE) - ) + buildNotificationTitle(OperationResult.COMPLETE), + ), ) } } @@ -70,11 +69,12 @@ class ReindexRespParser( override fun buildNotificationMessage( response: BulkByScrollResponse?, exception: Exception?, - isTimeout: Boolean + isTimeout: Boolean, ): String { - val result = StringBuilder( - "The reindex operation from $sourceIndex to ${getIndexName(request.destination, clusterService)} " - ) + val result = + StringBuilder( + "The reindex operation from $sourceIndex to ${getIndexName(request.destination, clusterService)} ", + ) if (exception != null) { result.append("${NotificationActionListener.FAILED} ${exception.message}") return result.toString() @@ -104,14 +104,14 @@ class ReindexRespParser( "\nTo see full errors, use `GET /_tasks/$taskId`" } else { NotificationActionListener.COMPLETED - } + }, ) append("\n\n") append("*Summary (number of documents)* \n") append( "Total: ${response.total}, Created: ${response.created}, " + "Updated: ${response.updated}, Deleted: ${response.deleted}, " + - "Conflicts: ${response.versionConflicts}" + "Conflicts: ${response.versionConflicts}", ) } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/filter/parser/ResizeIndexRespParser.kt b/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/filter/parser/ResizeIndexRespParser.kt index 6531f33cb..397180ef8 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/filter/parser/ResizeIndexRespParser.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/filter/parser/ResizeIndexRespParser.kt @@ -9,23 +9,22 @@ import org.apache.logging.log4j.LogManager import org.apache.logging.log4j.Logger import org.opensearch.ResourceAlreadyExistsException import org.opensearch.action.admin.indices.shrink.ResizeRequest +import org.opensearch.action.admin.indices.shrink.ResizeResponse import org.opensearch.action.support.ActiveShardCount import org.opensearch.action.support.ActiveShardsObserver import org.opensearch.cluster.service.ClusterService import org.opensearch.common.unit.TimeValue +import org.opensearch.index.IndexNotFoundException import org.opensearch.indexmanagement.controlcenter.notification.filter.NotificationActionListener import org.opensearch.indexmanagement.controlcenter.notification.filter.OperationResult -import java.util.function.Consumer -import org.opensearch.action.admin.indices.shrink.ResizeResponse -import org.opensearch.index.IndexNotFoundException import java.lang.IllegalStateException +import java.util.function.Consumer class ResizeIndexRespParser( val activeShardsObserver: ActiveShardsObserver, val request: ResizeRequest, val clusterService: ClusterService, ) : ResponseParser { - val logger: Logger = LogManager.getLogger(this::class.java) private var totalWaitTime: TimeValue = NotificationActionListener.MAX_WAIT_TIME private val indexWithCluster = getIndexName(request, clusterService) @@ -42,8 +41,8 @@ class ResizeIndexRespParser( ActionRespParseResult( OperationResult.FAILED, buildNotificationMessage(null, ex), - buildNotificationTitle(OperationResult.FAILED) - ) + buildNotificationTitle(OperationResult.FAILED), + ), ) return } @@ -69,8 +68,8 @@ class ResizeIndexRespParser( ActionRespParseResult( result, buildNotificationMessage(response, isTimeout = !shardsAcknowledged), - buildNotificationTitle(result) - ) + buildNotificationTitle(result), + ), ) }, { e: Exception -> @@ -79,18 +78,18 @@ class ResizeIndexRespParser( ActionRespParseResult( OperationResult.FAILED, buildNotificationMessage(response, e), - buildNotificationTitle(OperationResult.FAILED) - ) + buildNotificationTitle(OperationResult.FAILED), + ), ) - } + }, ) } else { callback.accept( ActionRespParseResult( OperationResult.TIMEOUT, buildNotificationMessage(response, isTimeout = true), - buildNotificationTitle(OperationResult.TIMEOUT) - ) + buildNotificationTitle(OperationResult.TIMEOUT), + ), ) } } else { @@ -98,8 +97,8 @@ class ResizeIndexRespParser( ActionRespParseResult( OperationResult.COMPLETE, buildNotificationMessage(response), - buildNotificationTitle(OperationResult.COMPLETE) - ) + buildNotificationTitle(OperationResult.COMPLETE), + ), ) } } @@ -113,8 +112,8 @@ class ResizeIndexRespParser( val action = request.resizeType.name.lowercase() result.append( "The $action operation from $indexWithCluster to ${ - getIndexName(request.targetIndexRequest, clusterService) - } " + getIndexName(request.targetIndexRequest, clusterService) + } ", ).append( if (isTimeout) { "has taken more than ${totalWaitTime.toHumanReadableString(1)} to complete. " + @@ -148,7 +147,7 @@ class ResizeIndexRespParser( } } else { NotificationActionListener.COMPLETED - } + }, ) return result.toString() } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/filter/parser/ResponseParser.kt b/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/filter/parser/ResponseParser.kt index d865e8faa..6644e3eca 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/filter/parser/ResponseParser.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/filter/parser/ResponseParser.kt @@ -6,9 +6,9 @@ package org.opensearch.indexmanagement.controlcenter.notification.filter.parser import org.opensearch.action.ActionRequest -import org.opensearch.core.action.ActionResponse import org.opensearch.action.IndicesRequest import org.opensearch.cluster.service.ClusterService +import org.opensearch.core.action.ActionResponse import org.opensearch.index.reindex.ReindexRequest import org.opensearch.indexmanagement.controlcenter.notification.filter.OperationResult import java.util.function.Consumer @@ -17,36 +17,38 @@ interface ResponseParser { fun parseAndSendNotification( response: Response?, ex: Exception? = null, - callback: Consumer + callback: Consumer, ) fun buildNotificationMessage( response: Response?, exception: Exception? = null, - isTimeout: Boolean = false + isTimeout: Boolean = false, ): String fun buildNotificationTitle( - operationResult: OperationResult + operationResult: OperationResult, ): String fun getIndexName(req: ActionRequest, clusterService: ClusterService): String? { var clusterName = clusterService.clusterName.value() return when (req) { is IndicesRequest -> { - if (req.indices().size == 1) + if (req.indices().size == 1) { "[$clusterName/${req.indices().joinToString(",")}]" - else + } else { "[${req.indices().joinToString(",")}] from [$clusterName]" + } } is ReindexRequest -> { clusterName = if (req.remoteInfo != null) "remote cluster ${req.remoteInfo.host}" else clusterName - if (req.searchRequest.indices().size == 1) + if (req.searchRequest.indices().size == 1) { "[$clusterName/${req.searchRequest.indices().joinToString(",")}]" - else + } else { "[${req.searchRequest.indices().joinToString(",")}] from [$clusterName]" + } } else -> { diff --git a/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/model/LRONCondition.kt b/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/model/LRONCondition.kt index ebdf14039..c4954befb 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/model/LRONCondition.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/model/LRONCondition.kt @@ -19,9 +19,8 @@ import java.io.IOException data class LRONCondition( val success: Boolean = DEFAULT_ENABLED, - val failure: Boolean = DEFAULT_ENABLED + val failure: Boolean = DEFAULT_ENABLED, ) : ToXContentObject, Writeable { - fun toXContent(builder: XContentBuilder): XContentBuilder { return toXContent(builder, ToXContent.EMPTY_PARAMS) } @@ -36,7 +35,7 @@ data class LRONCondition( @Throws(IOException::class) constructor(sin: StreamInput) : this( success = sin.readBoolean(), - failure = sin.readBoolean() + failure = sin.readBoolean(), ) @Throws(IOException::class) @@ -62,7 +61,7 @@ data class LRONCondition( xcp: XContentParser, id: String = NO_ID, seqNo: Long = SequenceNumbers.UNASSIGNED_SEQ_NO, - primaryTerm: Long = SequenceNumbers.UNASSIGNED_PRIMARY_TERM + primaryTerm: Long = SequenceNumbers.UNASSIGNED_PRIMARY_TERM, ): LRONCondition { return parse(xcp) } @@ -88,7 +87,7 @@ data class LRONCondition( return LRONCondition( success = success, - failure = failure + failure = failure, ) } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/model/LRONConfig.kt b/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/model/LRONConfig.kt index 8dab2978a..4b7973bb8 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/model/LRONConfig.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/model/LRONConfig.kt @@ -5,24 +5,24 @@ package org.opensearch.indexmanagement.controlcenter.notification.model +import org.opensearch.commons.authuser.User import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.core.common.io.stream.StreamOutput import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.tasks.TaskId import org.opensearch.core.xcontent.ToXContent import org.opensearch.core.xcontent.ToXContentObject import org.opensearch.core.xcontent.XContentBuilder import org.opensearch.core.xcontent.XContentParser import org.opensearch.core.xcontent.XContentParserUtils.ensureExpectedToken -import org.opensearch.commons.authuser.User import org.opensearch.index.seqno.SequenceNumbers -import org.opensearch.indexmanagement.controlcenter.notification.util.WITH_PRIORITY import org.opensearch.indexmanagement.common.model.notification.Channel +import org.opensearch.indexmanagement.controlcenter.notification.util.WITH_PRIORITY import org.opensearch.indexmanagement.controlcenter.notification.util.validateTaskIdAndActionName import org.opensearch.indexmanagement.indexstatemanagement.util.WITH_TYPE import org.opensearch.indexmanagement.indexstatemanagement.util.WITH_USER import org.opensearch.indexmanagement.opensearchapi.optionalUserField import org.opensearch.indexmanagement.util.NO_ID -import org.opensearch.core.tasks.TaskId import java.io.IOException data class LRONConfig( @@ -31,7 +31,7 @@ data class LRONConfig( val actionName: String?, val channels: List?, val user: User?, - val priority: Int? + val priority: Int?, ) : ToXContentObject, Writeable { init { validateTaskIdAndActionName(taskId, actionName) @@ -64,15 +64,21 @@ data class LRONConfig( @Throws(IOException::class) constructor(sin: StreamInput) : this( lronCondition = LRONCondition(sin), - taskId = if (sin.readBoolean()) { + taskId = + if (sin.readBoolean()) { TaskId(sin.readString()) - } else null, + } else { + null + }, actionName = sin.readOptionalString(), - channels = if (sin.readBoolean()) { + channels = + if (sin.readBoolean()) { sin.readList(::Channel) - } else null, + } else { + null + }, user = sin.readOptionalWriteable(::User), - priority = sin.readOptionalInt() + priority = sin.readOptionalInt(), ) @Throws(IOException::class) @@ -81,12 +87,16 @@ data class LRONConfig( if (null != taskId) { out.writeBoolean(true) out.writeString(taskId.toString()) - } else out.writeBoolean(false) + } else { + out.writeBoolean(false) + } out.writeOptionalString(actionName) if (null != channels) { out.writeBoolean(true) out.writeList(channels) - } else out.writeBoolean(false) + } else { + out.writeBoolean(false) + } out.writeOptionalWriteable(user) out.writeOptionalInt(priority) } @@ -101,7 +111,7 @@ data class LRONConfig( const val PRIORITY_FIELD = "priority" const val CHANNEL_TITLE = "Long Running Operation Notification" - /* to fit with ISM XContentParser.parseWithType function */ + // to fit with ISM XContentParser.parseWithType function @JvmStatic @Throws(IOException::class) @Suppress("UNUSED_PARAMETER") @@ -109,7 +119,7 @@ data class LRONConfig( xcp: XContentParser, id: String = NO_ID, seqNo: Long = SequenceNumbers.UNASSIGNED_SEQ_NO, - primaryTerm: Long = SequenceNumbers.UNASSIGNED_PRIMARY_TERM + primaryTerm: Long = SequenceNumbers.UNASSIGNED_PRIMARY_TERM, ): LRONConfig { return parse(xcp) } @@ -159,7 +169,7 @@ data class LRONConfig( actionName = actionName, channels = channels, user = user, - priority = priority + priority = priority, ) } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/resthandler/RestDeleteLRONConfigAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/resthandler/RestDeleteLRONConfigAction.kt index f24e243de..b8eef8e1c 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/resthandler/RestDeleteLRONConfigAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/resthandler/RestDeleteLRONConfigAction.kt @@ -18,7 +18,7 @@ import java.io.IOException class RestDeleteLRONConfigAction : BaseRestHandler() { override fun routes(): List { return listOf( - RestHandler.Route(RestRequest.Method.DELETE, "${IndexManagementPlugin.LRON_BASE_URI}/{id}") + RestHandler.Route(RestRequest.Method.DELETE, "${IndexManagementPlugin.LRON_BASE_URI}/{id}"), ) } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/resthandler/RestGetLRONConfigAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/resthandler/RestGetLRONConfigAction.kt index e38825aae..514c96b7e 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/resthandler/RestGetLRONConfigAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/resthandler/RestGetLRONConfigAction.kt @@ -9,11 +9,11 @@ import org.opensearch.indexmanagement.IndexManagementPlugin import org.opensearch.indexmanagement.controlcenter.notification.action.get.GetLRONConfigAction import org.opensearch.indexmanagement.controlcenter.notification.action.get.GetLRONConfigRequest import org.opensearch.indexmanagement.controlcenter.notification.util.DEFAULT_LRON_CONFIG_SORT_FIELD +import org.opensearch.indexmanagement.util.getSearchParams import org.opensearch.rest.BaseRestHandler import org.opensearch.rest.BaseRestHandler.RestChannelConsumer -import org.opensearch.rest.RestRequest -import org.opensearch.indexmanagement.util.getSearchParams import org.opensearch.rest.RestHandler +import org.opensearch.rest.RestRequest import org.opensearch.rest.action.RestToXContentListener import java.io.IOException @@ -21,7 +21,7 @@ class RestGetLRONConfigAction : BaseRestHandler() { override fun routes(): List { return listOf( RestHandler.Route(RestRequest.Method.GET, IndexManagementPlugin.LRON_BASE_URI), - RestHandler.Route(RestRequest.Method.GET, "${IndexManagementPlugin.LRON_BASE_URI}/{id}") + RestHandler.Route(RestRequest.Method.GET, "${IndexManagementPlugin.LRON_BASE_URI}/{id}"), ) } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/resthandler/RestIndexLRONConfigAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/resthandler/RestIndexLRONConfigAction.kt index b7e0588be..cac76cb9b 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/resthandler/RestIndexLRONConfigAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/resthandler/RestIndexLRONConfigAction.kt @@ -6,10 +6,10 @@ package org.opensearch.indexmanagement.controlcenter.notification.resthandler import org.opensearch.client.node.NodeClient +import org.opensearch.indexmanagement.IndexManagementPlugin import org.opensearch.indexmanagement.controlcenter.notification.action.index.IndexLRONConfigAction import org.opensearch.indexmanagement.controlcenter.notification.action.index.IndexLRONConfigRequest import org.opensearch.indexmanagement.controlcenter.notification.model.LRONConfig -import org.opensearch.indexmanagement.IndexManagementPlugin import org.opensearch.indexmanagement.controlcenter.notification.util.getDocID import org.opensearch.indexmanagement.opensearchapi.parseWithType import org.opensearch.indexmanagement.util.DRY_RUN @@ -21,11 +21,10 @@ import org.opensearch.rest.action.RestToXContentListener import java.io.IOException class RestIndexLRONConfigAction : BaseRestHandler() { - override fun routes(): List { return listOf( RestHandler.Route(RestRequest.Method.POST, IndexManagementPlugin.LRON_BASE_URI), - RestHandler.Route(RestRequest.Method.PUT, "${IndexManagementPlugin.LRON_BASE_URI}/{id}") + RestHandler.Route(RestRequest.Method.PUT, "${IndexManagementPlugin.LRON_BASE_URI}/{id}"), ) } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/util/LRONUtils.kt b/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/util/LRONUtils.kt index dfcf81398..db06bc07a 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/util/LRONUtils.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/controlcenter/notification/util/LRONUtils.kt @@ -4,24 +4,25 @@ */ @file:JvmName("LRONUtils") + package org.opensearch.indexmanagement.controlcenter.notification.util import org.opensearch.OpenSearchStatusException -import org.opensearch.core.action.ActionListener import org.opensearch.action.admin.indices.forcemerge.ForceMergeAction import org.opensearch.action.admin.indices.open.OpenIndexAction import org.opensearch.action.admin.indices.shrink.ResizeAction import org.opensearch.action.get.GetRequest import org.opensearch.action.get.GetResponse import org.opensearch.client.node.NodeClient +import org.opensearch.core.action.ActionListener +import org.opensearch.core.rest.RestStatus +import org.opensearch.core.tasks.TaskId import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.index.reindex.ReindexAction import org.opensearch.indexmanagement.IndexManagementPlugin import org.opensearch.indexmanagement.controlcenter.notification.LRONConfigResponse import org.opensearch.indexmanagement.controlcenter.notification.model.LRONConfig import org.opensearch.indexmanagement.opensearchapi.parseFromGetResponse -import org.opensearch.core.rest.RestStatus -import org.opensearch.core.tasks.TaskId const val LRON_DOC_ID_PREFIX = "LRON:" @@ -30,12 +31,13 @@ const val PRIORITY_TASK_ID = 200 const val PRIORITY_DEFAULT_ACTION = 100 const val DEFAULT_LRON_CONFIG_SORT_FIELD = "lron_config.priority" -val supportedActions = setOf( - ReindexAction.NAME, - ResizeAction.NAME, - ForceMergeAction.NAME, - OpenIndexAction.NAME -) +val supportedActions = + setOf( + ReindexAction.NAME, + ResizeAction.NAME, + ForceMergeAction.NAME, + OpenIndexAction.NAME, + ) fun validateTaskIdAndActionName(taskId: TaskId?, actionName: String?) { require(null != actionName || null != taskId) { "LRONConfig must contain taskID or actionName" } @@ -69,7 +71,7 @@ fun getLRONConfigAndParse( client: NodeClient, docId: String, xContentRegistry: NamedXContentRegistry, - actionListener: ActionListener + actionListener: ActionListener, ) { val getRequest = GetRequest(IndexManagementPlugin.CONTROL_CENTER_INDEX, docId) client.get( @@ -80,8 +82,8 @@ fun getLRONConfigAndParse( actionListener.onFailure( OpenSearchStatusException( "lronConfig $docId not found", - RestStatus.NOT_FOUND - ) + RestStatus.NOT_FOUND, + ), ) return } @@ -97,14 +99,14 @@ fun getLRONConfigAndParse( actionListener.onResponse( LRONConfigResponse( id = response.id, - lronConfig = lronConfig - ) + lronConfig = lronConfig, + ), ) } override fun onFailure(t: Exception) { actionListener.onFailure(t) } - } + }, ) } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/DefaultIndexMetadataService.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/DefaultIndexMetadataService.kt index fdaa3f1be..1e9b06950 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/DefaultIndexMetadataService.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/DefaultIndexMetadataService.kt @@ -17,7 +17,6 @@ import org.opensearch.indexmanagement.spi.indexstatemanagement.IndexMetadataServ import org.opensearch.indexmanagement.spi.indexstatemanagement.model.ISMIndexMetadata class DefaultIndexMetadataService(private val customUUIDSetting: String? = null) : IndexMetadataService { - /** * Returns the default index metadata needed for ISM */ @@ -27,13 +26,14 @@ class DefaultIndexMetadataService(private val customUUIDSetting: String? = null) // We want to go through all cluster indices - open/closed/hidden val lenientExpandOptions = IndicesOptions.lenientExpandHidden() - val clusterStateRequest = ClusterStateRequest() - .clear() - .indices(*indices.toTypedArray()) - .metadata(true) - .local(false) - .waitForTimeout(TimeValue.timeValueMillis(DEFAULT_GET_METADATA_TIMEOUT_IN_MILLIS)) - .indicesOptions(lenientExpandOptions) + val clusterStateRequest = + ClusterStateRequest() + .clear() + .indices(*indices.toTypedArray()) + .metadata(true) + .local(false) + .waitForTimeout(TimeValue.timeValueMillis(DEFAULT_GET_METADATA_TIMEOUT_IN_MILLIS)) + .indicesOptions(lenientExpandOptions) val response: ClusterStateResponse = client.suspendUntil { client.admin().cluster().state(clusterStateRequest, it) } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/ExtensionStatusChecker.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/ExtensionStatusChecker.kt index aa1d681de..d2a7d37ca 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/ExtensionStatusChecker.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/ExtensionStatusChecker.kt @@ -14,7 +14,6 @@ import org.opensearch.indexmanagement.spi.indexstatemanagement.StatusChecker * not as a health check denoting availability. */ class ExtensionStatusChecker(private val extensionCheckers: Map, val clusterService: ClusterService) { - fun isEnabled(extensionName: String?): Boolean { val checker = extensionCheckers[extensionName] ?: return true val clusterState = clusterService.state() diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/ISMActionsParser.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/ISMActionsParser.kt index a45cfddae..a4c95b461 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/ISMActionsParser.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/ISMActionsParser.kt @@ -30,29 +30,29 @@ import org.opensearch.indexmanagement.spi.indexstatemanagement.model.ActionRetry import org.opensearch.indexmanagement.spi.indexstatemanagement.model.ActionTimeout class ISMActionsParser private constructor() { - private object HOLDER { val instance = ISMActionsParser() } - val parsers = mutableListOf( - AliasActionParser(), - AllocationActionParser(), - CloseActionParser(), - DeleteActionParser(), - ForceMergeActionParser(), - IndexPriorityActionParser(), - NotificationActionParser(), - OpenActionParser(), - ReadOnlyActionParser(), - ReadWriteActionParser(), - ReplicaCountActionParser(), - RollupActionParser(), - RolloverActionParser(), - ShrinkActionParser(), - SnapshotActionParser(), - TransformActionParser(), - ) + val parsers = + mutableListOf( + AliasActionParser(), + AllocationActionParser(), + CloseActionParser(), + DeleteActionParser(), + ForceMergeActionParser(), + IndexPriorityActionParser(), + NotificationActionParser(), + OpenActionParser(), + ReadOnlyActionParser(), + ReadWriteActionParser(), + ReplicaCountActionParser(), + RollupActionParser(), + RolloverActionParser(), + ShrinkActionParser(), + SnapshotActionParser(), + TransformActionParser(), + ) val customActionExtensionMap = mutableMapOf() @@ -132,6 +132,7 @@ class ISMActionsParser private constructor() { companion object { val instance: ISMActionsParser by lazy { HOLDER.instance } + fun getDuplicateActionTypesMessage(actionType: String) = "Multiple action parsers attempted to register the same action type [$actionType]" } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/ISMTemplateService.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/ISMTemplateService.kt index 0cb0a7b84..2f102a2ba 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/ISMTemplateService.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/ISMTemplateService.kt @@ -7,9 +7,9 @@ package org.opensearch.indexmanagement.indexstatemanagement import org.apache.lucene.util.automaton.Operations import org.opensearch.OpenSearchException -import org.opensearch.core.common.Strings import org.opensearch.common.ValidationException import org.opensearch.common.regex.Regex +import org.opensearch.core.common.Strings import org.opensearch.indexmanagement.indexstatemanagement.model.ISMTemplate import org.opensearch.indexmanagement.util.IndexManagementException @@ -35,7 +35,7 @@ fun validateFormat(indexPatterns: List): OpenSearchException? { if (!Strings.validFileNameExcludingAstrix(indexPattern)) { indexPatternFormatErrors.add( "index_pattern [" + indexPattern + "] must not contain the following characters " + - Strings.INVALID_FILENAME_CHARS + Strings.INVALID_FILENAME_CHARS, ) } } @@ -90,7 +90,7 @@ fun overlapping(p1: List, p2: List): Boolean { fun Map>.findConflictingPolicyTemplates( candidate: String, indexPatterns: List, - priority: Int + priority: Int, ): Map> { val overlappingTemplates = mutableMapOf>() diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/IndexMetadataProvider.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/IndexMetadataProvider.kt index 471742c87..5d928127d 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/IndexMetadataProvider.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/IndexMetadataProvider.kt @@ -27,7 +27,6 @@ class IndexMetadataProvider( val clusterService: ClusterService, val services: MutableMap, ) { - @Volatile private var restrictedIndexPattern = ManagedIndexSettings.RESTRICTED_INDEX_PATTERN.get(settings) init { @@ -57,17 +56,18 @@ class IndexMetadataProvider( */ suspend fun getMultiTypeISMIndexMetadata( types: List = services.keys.toList(), - indexNames: List - ): Map> = coroutineScope { - if (types.any { it != DEFAULT_INDEX_TYPE } && indexNames.size > 1) throw IllegalArgumentException(MULTIPLE_INDICES_CUSTOM_INDEX_TYPE_ERROR) - val requests = ArrayList>>>() - // Start all index metadata requests at the same time - types.forEach { type -> - requests.add(async { type to getISMIndexMetadataByType(type, indexNames) }) + indexNames: List, + ): Map> = + coroutineScope { + if (types.any { it != DEFAULT_INDEX_TYPE } && indexNames.size > 1) throw IllegalArgumentException(MULTIPLE_INDICES_CUSTOM_INDEX_TYPE_ERROR) + val requests = ArrayList>>>() + // Start all index metadata requests at the same time + types.forEach { type -> + requests.add(async { type to getISMIndexMetadataByType(type, indexNames) }) + } + // Wait for all index metadata responses, and return + requests.awaitAll().toMap() } - // Wait for all index metadata responses, and return - requests.awaitAll().toMap() - } fun addMetadataServices(newServices: Map) { val duplicateIndexType = newServices.keys.firstOrNull { services.containsKey(it) } @@ -77,17 +77,18 @@ class IndexMetadataProvider( services.putAll(newServices) } - suspend fun getAllISMIndexMetadata(): Set = coroutineScope { - val metadata = mutableSetOf() - val requests = ArrayList>>() - services.forEach { (_, service) -> - requests.add(async { service.getMetadataForAllIndices(client, clusterService) }) - } + suspend fun getAllISMIndexMetadata(): Set = + coroutineScope { + val metadata = mutableSetOf() + val requests = ArrayList>>() + services.forEach { (_, service) -> + requests.add(async { service.getMetadataForAllIndices(client, clusterService) }) + } - requests.awaitAll().forEach { metadata.addAll(it.values) } + requests.awaitAll().forEach { metadata.addAll(it.values) } - metadata - } + metadata + } fun getIndexMetadataWriteOverrideSettings(): List { return services.values.mapNotNull { it.getIndexMetadataWriteOverrideSetting() } @@ -96,7 +97,9 @@ class IndexMetadataProvider( companion object { const val EVALUATION_FAILURE_MESSAGE = "Matches restricted index pattern defined in the cluster setting" const val MULTIPLE_INDICES_CUSTOM_INDEX_TYPE_ERROR = "Cannot get metadata for more than one index name/pattern when using a custom index type" + fun getTypeNotRecognizedMessage(indexType: String) = "Index type [type=$indexType] was not recognized when trying to get index metadata" + fun getDuplicateServicesMessage(indexType: String) = "Multiple metadata services attempted to assign a service to the index type [$indexType]" } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/IndexStateManagementHistory.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/IndexStateManagementHistory.kt index a3e0fd313..b281f6811 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/IndexStateManagementHistory.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/IndexStateManagementHistory.kt @@ -6,7 +6,6 @@ package org.opensearch.indexmanagement.indexstatemanagement import org.apache.logging.log4j.LogManager -import org.opensearch.core.action.ActionListener import org.opensearch.action.DocWriteRequest import org.opensearch.action.admin.cluster.state.ClusterStateRequest import org.opensearch.action.admin.cluster.state.ClusterStateResponse @@ -22,8 +21,9 @@ import org.opensearch.client.Client import org.opensearch.cluster.LocalNodeClusterManagerListener import org.opensearch.cluster.service.ClusterService import org.opensearch.common.settings.Settings -import org.opensearch.core.xcontent.ToXContent import org.opensearch.common.xcontent.XContentFactory +import org.opensearch.core.action.ActionListener +import org.opensearch.core.xcontent.ToXContent import org.opensearch.indexmanagement.IndexManagementIndices import org.opensearch.indexmanagement.IndexManagementPlugin import org.opensearch.indexmanagement.indexstatemanagement.settings.ManagedIndexSettings @@ -46,18 +46,23 @@ class IndexStateManagementHistory( private val client: Client, private val threadPool: ThreadPool, private val clusterService: ClusterService, - private val indexManagementIndices: IndexManagementIndices + private val indexManagementIndices: IndexManagementIndices, ) : LocalNodeClusterManagerListener { - private val logger = LogManager.getLogger(javaClass) private var scheduledRollover: Scheduler.Cancellable? = null @Volatile private var historyEnabled = ManagedIndexSettings.HISTORY_ENABLED.get(settings) + @Volatile private var historyMaxDocs = ManagedIndexSettings.HISTORY_MAX_DOCS.get(settings) + @Volatile private var historyMaxAge = ManagedIndexSettings.HISTORY_INDEX_MAX_AGE.get(settings) + @Volatile private var historyRolloverCheckPeriod = ManagedIndexSettings.HISTORY_ROLLOVER_CHECK_PERIOD.get(settings) + @Volatile private var historyRetentionPeriod = ManagedIndexSettings.HISTORY_RETENTION_PERIOD.get(settings) + @Volatile private var historyNumberOfShards = ManagedIndexSettings.HISTORY_NUMBER_OF_SHARDS.get(settings) + @Volatile private var historyNumberOfReplicas = ManagedIndexSettings.HISTORY_NUMBER_OF_REPLICAS.get(settings) init { @@ -87,10 +92,11 @@ class IndexStateManagementHistory( // try to rollover immediately as we might be restarting the cluster if (historyEnabled) rolloverHistoryIndex() // schedule the next rollover for approx MAX_AGE later - scheduledRollover = threadPool.scheduleWithFixedDelay( - { rolloverAndDeleteHistoryIndex() }, - historyRolloverCheckPeriod, ThreadPool.Names.MANAGEMENT - ) + scheduledRollover = + threadPool.scheduleWithFixedDelay( + { rolloverAndDeleteHistoryIndex() }, + historyRolloverCheckPeriod, ThreadPool.Names.MANAGEMENT, + ) } catch (e: Exception) { // This should be run on cluster startup logger.error("Error creating ISM history index.", e) @@ -104,10 +110,11 @@ class IndexStateManagementHistory( private fun rescheduleRollover() { if (clusterService.state().nodes.isLocalNodeElectedClusterManager) { scheduledRollover?.cancel() - scheduledRollover = threadPool.scheduleWithFixedDelay( - { rolloverAndDeleteHistoryIndex() }, - historyRolloverCheckPeriod, ThreadPool.Names.MANAGEMENT - ) + scheduledRollover = + threadPool.scheduleWithFixedDelay( + { rolloverAndDeleteHistoryIndex() }, + historyRolloverCheckPeriod, ThreadPool.Names.MANAGEMENT, + ) } } @@ -137,7 +144,7 @@ class IndexStateManagementHistory( Settings.builder() .put(INDEX_HIDDEN, true) .put(INDEX_NUMBER_OF_SHARDS, historyNumberOfShards) - .put(INDEX_NUMBER_OF_REPLICAS, historyNumberOfReplicas) + .put(INDEX_NUMBER_OF_REPLICAS, historyNumberOfReplicas), ) request.addMaxIndexDocsCondition(historyMaxDocs) request.addMaxIndexAgeCondition(historyMaxAge) @@ -150,7 +157,7 @@ class IndexStateManagementHistory( } else { logger.info( "${IndexManagementIndices.HISTORY_WRITE_INDEX_ALIAS} not rolled over. " + - "Conditions were: ${response.conditionStatus}" + "Conditions were: ${response.conditionStatus}", ) } } @@ -158,19 +165,19 @@ class IndexStateManagementHistory( override fun onFailure(e: Exception) { logger.error("${IndexManagementIndices.HISTORY_WRITE_INDEX_ALIAS} roll over failed.", e) } - } + }, ) } @Suppress("SpreadOperator", "NestedBlockDepth", "ComplexMethod") private fun deleteOldHistoryIndex() { - - val clusterStateRequest = ClusterStateRequest() - .clear() - .indices(IndexManagementIndices.HISTORY_ALL) - .metadata(true) - .local(true) - .indicesOptions(IndicesOptions.strictExpand()) + val clusterStateRequest = + ClusterStateRequest() + .clear() + .indices(IndexManagementIndices.HISTORY_ALL) + .metadata(true) + .local(true) + .indicesOptions(IndicesOptions.strictExpand()) client.admin().cluster().state( clusterStateRequest, @@ -188,7 +195,7 @@ class IndexStateManagementHistory( override fun onFailure(exception: Exception) { logger.error("Error fetching cluster state ${exception.message}") } - } + }, ) } @@ -199,10 +206,11 @@ class IndexStateManagementHistory( val creationTime = indexMetaData.creationDate if ((Instant.now().toEpochMilli() - creationTime) > historyRetentionPeriod.millis) { - val alias = indexMetaData.aliases.firstNotNullOfOrNull { - alias -> - IndexManagementIndices.HISTORY_WRITE_INDEX_ALIAS == alias.value.alias - } + val alias = + indexMetaData.aliases.firstNotNullOfOrNull { + alias -> + IndexManagementIndices.HISTORY_WRITE_INDEX_ALIAS == alias.value.alias + } if (alias != null && historyEnabled) { // If index has write alias and history is enable, don't delete the index. continue @@ -227,11 +235,12 @@ class IndexStateManagementHistory( deleteOldHistoryIndex(indicesToDelete) } } + override fun onFailure(exception: Exception) { logger.error("Error deleting old history indices ${exception.message}") deleteOldHistoryIndex(indicesToDelete) } - } + }, ) } } @@ -248,10 +257,11 @@ class IndexStateManagementHistory( logger.error("could not delete one or more ISM history index. $index.") } } + override fun onFailure(exception: Exception) { logger.debug("Exception ${exception.message} while deleting the index $index") } - } + }, ) } } @@ -268,9 +278,10 @@ class IndexStateManagementHistory( return // we can't continue to add the history documents below as it would potentially create dynamic mappings } - val docWriteRequest: List> = managedIndexMetaData - .filter { shouldAddManagedIndexMetaDataToHistory(it) } - .map { createManagedIndexMetaDataHistoryIndexRequest(it) } + val docWriteRequest: List> = + managedIndexMetaData + .filter { shouldAddManagedIndexMetaDataToHistory(it) } + .map { createManagedIndexMetaDataHistoryIndexRequest(it) } if (docWriteRequest.isNotEmpty()) { val bulkRequest = BulkRequest().add(docWriteRequest) @@ -298,9 +309,10 @@ class IndexStateManagementHistory( } private fun createManagedIndexMetaDataHistoryIndexRequest(managedIndexMetaData: ManagedIndexMetaData): IndexRequest { - val builder = XContentFactory.jsonBuilder() - .startObject() - .startObject(IndexManagementPlugin.INDEX_STATE_MANAGEMENT_HISTORY_TYPE) + val builder = + XContentFactory.jsonBuilder() + .startObject() + .startObject(IndexManagementPlugin.INDEX_STATE_MANAGEMENT_HISTORY_TYPE) managedIndexMetaData.toXContent(builder, ToXContent.EMPTY_PARAMS) builder .field("history_timestamp", Instant.now().toEpochMilli()) diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/ManagedIndexCoordinator.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/ManagedIndexCoordinator.kt index 341ccf62b..07c0d7306 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/ManagedIndexCoordinator.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/ManagedIndexCoordinator.kt @@ -42,8 +42,10 @@ import org.opensearch.common.regex.Regex import org.opensearch.common.settings.Settings import org.opensearch.common.unit.TimeValue import org.opensearch.commons.authuser.User -import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.core.index.Index +import org.opensearch.core.rest.RestStatus +import org.opensearch.core.xcontent.NamedXContentRegistry +import org.opensearch.index.IndexNotFoundException import org.opensearch.index.query.QueryBuilders import org.opensearch.indexmanagement.IndexManagementIndices import org.opensearch.indexmanagement.IndexManagementPlugin @@ -82,8 +84,6 @@ import org.opensearch.indexmanagement.opensearchapi.withClosableContext import org.opensearch.indexmanagement.spi.indexstatemanagement.model.ISMIndexMetadata import org.opensearch.indexmanagement.spi.indexstatemanagement.model.ManagedIndexMetaData import org.opensearch.indexmanagement.util.OpenForTesting -import org.opensearch.core.rest.RestStatus -import org.opensearch.index.IndexNotFoundException import org.opensearch.search.builder.SearchSourceBuilder import org.opensearch.threadpool.Scheduler import org.opensearch.threadpool.ThreadPool @@ -112,25 +112,30 @@ class ManagedIndexCoordinator( private val threadPool: ThreadPool, indexManagementIndices: IndexManagementIndices, private val indexMetadataProvider: IndexMetadataProvider, - private val xContentRegistry: NamedXContentRegistry + private val xContentRegistry: NamedXContentRegistry, ) : ClusterStateListener, CoroutineScope by CoroutineScope(SupervisorJob() + Dispatchers.Default + CoroutineName("ManagedIndexCoordinator")), LifecycleListener() { - private val logger = LogManager.getLogger(javaClass) private val ismIndices = indexManagementIndices private var scheduledFullSweep: Scheduler.Cancellable? = null @Volatile private var lastFullSweepTimeNano = System.nanoTime() + @Volatile private var indexStateManagementEnabled = INDEX_STATE_MANAGEMENT_ENABLED.get(settings) + @Volatile private var sweepPeriod = SWEEP_PERIOD.get(settings) + @Volatile private var retryPolicy = BackoffPolicy.constantBackoff(COORDINATOR_BACKOFF_MILLIS.get(settings), COORDINATOR_BACKOFF_COUNT.get(settings)) + @Volatile private var jobInterval = JOB_INTERVAL.get(settings) + @Volatile private var jobJitter = JITTER.get(settings) @Volatile private var isClusterManager = false + @Volatile private var onClusterManagerTimeStamp: Long = 0L init { @@ -233,6 +238,7 @@ class ManagedIndexCoordinator( * 2. Does not have a completed Policy * 3. Does not have a failed Policy */ + // If IM config doesn't exist skip if (!ismIndices.indexManagementIndexExists()) return val currentManagedIndexUuids = sweepManagedIndexJobs(client) @@ -266,9 +272,10 @@ class ManagedIndexCoordinator( if (event.indicesDeleted().isNotEmpty()) { val managedIndices = getManagedIndices(event.indicesDeleted().map { it.uuid }) val deletedIndices = event.indicesDeleted().map { it.name } - val allIndicesUuid = indexMetadataProvider.getMultiTypeISMIndexMetadata(indexNames = deletedIndices).map { (_, metadataMapForType) -> - metadataMapForType.values.map { it.indexUuid } - }.flatten().toSet() + val allIndicesUuid = + indexMetadataProvider.getMultiTypeISMIndexMetadata(indexNames = deletedIndices).map { (_, metadataMapForType) -> + metadataMapForType.values.map { it.indexUuid } + }.flatten().toSet() // Check if the deleted index uuid is still part of any metadata service in the cluster and has an existing managed index job indicesToClean = event.indicesDeleted().filter { it.uuid in managedIndices.keys && !allIndicesUuid.contains(it.uuid) } removeManagedIndexReq = indicesToClean.map { deleteManagedIndexRequest(it.uuid) } @@ -287,7 +294,7 @@ class ManagedIndexCoordinator( @Suppress("NestedBlockDepth", "ComplexCondition") private suspend fun createManagedIndexRequests( clusterState: ClusterState, - indexNames: List + indexNames: List, ): List> { val updateManagedIndexReqs = mutableListOf>() if (indexNames.isEmpty()) return updateManagedIndexReqs @@ -318,8 +325,8 @@ class ManagedIndexCoordinator( policy.id, jobInterval, policy, - jobJitter - ) + jobJitter, + ), ) } } @@ -331,11 +338,12 @@ class ManagedIndexCoordinator( private fun findIndexLookupName(indexName: String, clusterState: ClusterState): String? { if (clusterState.metadata.hasIndex(indexName)) { val indexMetadata = clusterState.metadata.index(indexName) - val autoManage = if (AUTO_MANAGE.get(indexMetadata.settings)) { - true - } else { - LegacyOpenDistroManagedIndexSettings.AUTO_MANAGE.get(indexMetadata.settings) - } + val autoManage = + if (AUTO_MANAGE.get(indexMetadata.settings)) { + true + } else { + LegacyOpenDistroManagedIndexSettings.AUTO_MANAGE.get(indexMetadata.settings) + } if (autoManage) { val isHiddenIndex = IndexMetadata.INDEX_HIDDEN_SETTING.get(indexMetadata.settings) || indexName.startsWith(".") @@ -413,14 +421,15 @@ class ManagedIndexCoordinator( private suspend fun getPoliciesWithISMTemplates(): List { val errorMessage = "Failed to get ISM policies with templates" - val searchRequest = SearchRequest() - .source( - SearchSourceBuilder().query( - QueryBuilders.existsQuery(ISM_TEMPLATE_FIELD) - ).size(MAX_HITS).seqNoAndPrimaryTerm(true) - ) - .indices(INDEX_MANAGEMENT_INDEX) - .preference(Preference.PRIMARY_FIRST.type()) + val searchRequest = + SearchRequest() + .source( + SearchSourceBuilder().query( + QueryBuilders.existsQuery(ISM_TEMPLATE_FIELD), + ).size(MAX_HITS).seqNoAndPrimaryTerm(true), + ) + .indices(INDEX_MANAGEMENT_INDEX) + .preference(Preference.PRIMARY_FIRST.type()) return try { val response: SearchResponse = client.suspendUntil { search(searchRequest, it) } @@ -457,24 +466,25 @@ class ManagedIndexCoordinator( scheduledFullSweep?.cancel() // Set up an anti-entropy/self-healing background sweep, in case we fail to create a ManagedIndexConfig job - val scheduledSweep = Runnable { - val elapsedTime = getFullSweepElapsedTime() - - // Rate limit to at most one full sweep per sweep period - // The schedule runs may wake up a few milliseconds early - // Delta will be giving some buffer on the schedule to allow waking up slightly earlier - val delta = sweepPeriod.millis - elapsedTime.millis - if (delta < BUFFER) { // give 20ms buffer. - launch { - try { - logger.debug("Performing background sweep of managed indices") - sweep() - } catch (e: Exception) { - logger.error("Failed to sweep managed indices", e) + val scheduledSweep = + Runnable { + val elapsedTime = getFullSweepElapsedTime() + + // Rate limit to at most one full sweep per sweep period + // The schedule runs may wake up a few milliseconds early + // Delta will be giving some buffer on the schedule to allow waking up slightly earlier + val delta = sweepPeriod.millis - elapsedTime.millis + if (delta < BUFFER) { // give 20ms buffer. + launch { + try { + logger.debug("Performing background sweep of managed indices") + sweep() + } catch (e: Exception) { + logger.error("Failed to sweep managed indices", e) + } } } } - } scheduledFullSweep = threadPool.scheduleWithFixedDelay(scheduledSweep, sweepPeriod, executorName()) } @@ -501,9 +511,10 @@ class ManagedIndexCoordinator( val unManagedIndices = getUnManagedIndices(currentIndices, currentManagedIndexUuids.toHashSet()) // Get the matching policyIds for applicable indices - val updateMatchingIndicesReqs = createManagedIndexRequests( - clusterService.state(), unManagedIndices.map { (indexName, _) -> indexName } - ) + val updateMatchingIndicesReqs = + createManagedIndexRequests( + clusterService.state(), unManagedIndices.map { (indexName, _) -> indexName }, + ) // check all managed indices, if the index has already been deleted val allIndicesUuids = indexMetadataProvider.getAllISMIndexMetadata().map { it.indexUuid } @@ -576,8 +587,9 @@ class ManagedIndexCoordinator( fun transformManagedIndexSearchRes(response: SearchResponse): List { if (response.isTimedOut || response.failedShards > 0 || response.skippedShards > 0) { - val errorMsg = "Sweep managed indices failed. Timed out: ${response.isTimedOut} | " + - "Failed shards: ${response.failedShards} | Skipped shards: ${response.skippedShards}." + val errorMsg = + "Sweep managed indices failed. Timed out: ${response.isTimedOut} | " + + "Failed shards: ${response.failedShards} | Skipped shards: ${response.skippedShards}." logger.error(errorMsg) throw ISMCoordinatorSearchException(message = errorMsg) } @@ -605,9 +617,10 @@ class ManagedIndexCoordinator( } mRes.forEach { if (it.response.isExists) { - result[it.id] = contentParser(it.response.sourceAsBytesRef, xContentRegistry).parseWithType( - it.response.id, it.response.seqNo, it.response.primaryTerm, ManagedIndexConfig.Companion::parse - ) + result[it.id] = + contentParser(it.response.sourceAsBytesRef, xContentRegistry).parseWithType( + it.response.id, it.response.seqNo, it.response.primaryTerm, ManagedIndexConfig.Companion::parse, + ) } } return result @@ -622,8 +635,9 @@ class ManagedIndexCoordinator( val bulkRequest = BulkRequest().add(requestsToRetry) val bulkResponse: BulkResponse = client.suspendUntil { bulk(bulkRequest, it) } val failedResponses = (bulkResponse.items ?: arrayOf()).filter { it.isFailed } - requestsToRetry = failedResponses.filter { it.status() == RestStatus.TOO_MANY_REQUESTS } - .map { bulkRequest.requests()[it.itemId] } + requestsToRetry = + failedResponses.filter { it.status() == RestStatus.TOO_MANY_REQUESTS } + .map { bulkRequest.requests()[it.itemId] } if (requestsToRetry.isNotEmpty()) { val retryCause = failedResponses.first { it.status() == RestStatus.TOO_MANY_REQUESTS }.failure.cause @@ -647,10 +661,12 @@ class ManagedIndexCoordinator( val bulkRequest = BulkRequest().add(deleteRequests) val bulkResponse: BulkResponse = client.suspendUntil { bulk(bulkRequest, it) } bulkResponse.forEach { - if (it.isFailed) logger.error( - "Failed to clear ManagedIndexMetadata for " + - "index uuid: [${it.id}], failureMessage: ${it.failureMessage}" - ) + if (it.isFailed) { + logger.error( + "Failed to clear ManagedIndexMetadata for " + + "index uuid: [${it.id}], failureMessage: ${it.failureMessage}", + ) + } } } } catch (e: Exception) { diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/ManagedIndexRunner.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/ManagedIndexRunner.kt index e3bf7e4aa..ab7ed58d4 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/ManagedIndexRunner.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/ManagedIndexRunner.kt @@ -31,13 +31,14 @@ import org.opensearch.cluster.service.ClusterService import org.opensearch.common.settings.Settings import org.opensearch.common.unit.TimeValue import org.opensearch.common.xcontent.LoggingDeprecationHandler -import org.opensearch.core.xcontent.NamedXContentRegistry -import org.opensearch.core.xcontent.ToXContent import org.opensearch.common.xcontent.XContentFactory import org.opensearch.common.xcontent.XContentHelper +import org.opensearch.common.xcontent.XContentType +import org.opensearch.core.rest.RestStatus +import org.opensearch.core.xcontent.NamedXContentRegistry +import org.opensearch.core.xcontent.ToXContent import org.opensearch.core.xcontent.XContentParser.Token import org.opensearch.core.xcontent.XContentParserUtils.ensureExpectedToken -import org.opensearch.common.xcontent.XContentType import org.opensearch.index.engine.VersionConflictEngineException import org.opensearch.index.seqno.SequenceNumbers import org.opensearch.indexmanagement.IndexManagementIndices @@ -47,20 +48,21 @@ import org.opensearch.indexmanagement.indexstatemanagement.model.ErrorNotificati import org.opensearch.indexmanagement.indexstatemanagement.model.ManagedIndexConfig import org.opensearch.indexmanagement.indexstatemanagement.model.Policy import org.opensearch.indexmanagement.indexstatemanagement.opensearchapi.getManagedIndexMetadata +import org.opensearch.indexmanagement.indexstatemanagement.settings.ManagedIndexSettings.Companion.ACTION_VALIDATION_ENABLED import org.opensearch.indexmanagement.indexstatemanagement.settings.ManagedIndexSettings.Companion.ALLOW_LIST import org.opensearch.indexmanagement.indexstatemanagement.settings.ManagedIndexSettings.Companion.ALLOW_LIST_NONE +import org.opensearch.indexmanagement.indexstatemanagement.settings.ManagedIndexSettings.Companion.DEFAULT_ACTION_VALIDATION_ENABLED import org.opensearch.indexmanagement.indexstatemanagement.settings.ManagedIndexSettings.Companion.DEFAULT_ISM_ENABLED import org.opensearch.indexmanagement.indexstatemanagement.settings.ManagedIndexSettings.Companion.DEFAULT_JOB_INTERVAL -import org.opensearch.indexmanagement.indexstatemanagement.settings.ManagedIndexSettings.Companion.DEFAULT_ACTION_VALIDATION_ENABLED import org.opensearch.indexmanagement.indexstatemanagement.settings.ManagedIndexSettings.Companion.INDEX_STATE_MANAGEMENT_ENABLED import org.opensearch.indexmanagement.indexstatemanagement.settings.ManagedIndexSettings.Companion.JOB_INTERVAL -import org.opensearch.indexmanagement.indexstatemanagement.settings.ManagedIndexSettings.Companion.ACTION_VALIDATION_ENABLED import org.opensearch.indexmanagement.indexstatemanagement.util.DEFAULT_INDEX_TYPE import org.opensearch.indexmanagement.indexstatemanagement.util.deleteManagedIndexMetadataRequest import org.opensearch.indexmanagement.indexstatemanagement.util.deleteManagedIndexRequest import org.opensearch.indexmanagement.indexstatemanagement.util.getCompletedManagedIndexMetaData import org.opensearch.indexmanagement.indexstatemanagement.util.getStartingManagedIndexMetaData import org.opensearch.indexmanagement.indexstatemanagement.util.hasDifferentJobInterval +import org.opensearch.indexmanagement.indexstatemanagement.util.hasDifferentPolicyVersion import org.opensearch.indexmanagement.indexstatemanagement.util.hasTimedOut import org.opensearch.indexmanagement.indexstatemanagement.util.isAllowed import org.opensearch.indexmanagement.indexstatemanagement.util.isFailed @@ -94,8 +96,6 @@ import org.opensearch.jobscheduler.spi.LockModel import org.opensearch.jobscheduler.spi.ScheduledJobParameter import org.opensearch.jobscheduler.spi.ScheduledJobRunner import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule -import org.opensearch.core.rest.RestStatus -import org.opensearch.indexmanagement.indexstatemanagement.util.hasDifferentPolicyVersion import org.opensearch.script.Script import org.opensearch.script.ScriptService import org.opensearch.script.TemplateScript @@ -107,7 +107,6 @@ import java.time.temporal.ChronoUnit object ManagedIndexRunner : ScheduledJobRunner, CoroutineScope by CoroutineScope(SupervisorJob() + Dispatchers.Default + CoroutineName("ManagedIndexRunner")) { - private val logger = LogManager.getLogger(javaClass) private lateinit var clusterService: ClusterService @@ -124,10 +123,13 @@ object ManagedIndexRunner : private lateinit var indexMetadataProvider: IndexMetadataProvider private var indexStateManagementEnabled: Boolean = DEFAULT_ISM_ENABLED private var validationServiceEnabled: Boolean = DEFAULT_ACTION_VALIDATION_ENABLED + @Suppress("MagicNumber") private val savePolicyRetryPolicy = BackoffPolicy.exponentialBackoff(TimeValue.timeValueMillis(250), 3) + @Suppress("MagicNumber") private val updateMetaDataRetryPolicy = BackoffPolicy.exponentialBackoff(TimeValue.timeValueMillis(250), 3) + @Suppress("MagicNumber") private val errorNotificationRetryPolicy = BackoffPolicy.exponentialBackoff(TimeValue.timeValueMillis(250), 3) private var jobInterval: Int = DEFAULT_JOB_INTERVAL @@ -270,9 +272,10 @@ object ManagedIndexRunner : val nonDefaultIndexTypes = indexMetadataProvider.services.keys.filter { it != DEFAULT_INDEX_TYPE } val multiTypeIndexNameToMetaData = indexMetadataProvider.getMultiTypeISMIndexMetadata(nonDefaultIndexTypes, listOf(managedIndexConfig.index)) - val someTypeMatchedUuid = multiTypeIndexNameToMetaData.values.any { - it[managedIndexConfig.index]?.indexUuid == managedIndexConfig.indexUuid - } + val someTypeMatchedUuid = + multiTypeIndexNameToMetaData.values.any { + it[managedIndexConfig.index]?.indexUuid == managedIndexConfig.indexUuid + } // If no index types had an index with a matching name and uuid combination, return if (!someTypeMatchedUuid) { logger.warn("Failed to find IndexMetadata for ${managedIndexConfig.index}.") @@ -294,12 +297,13 @@ object ManagedIndexRunner : val policy = managedIndexConfig.policy if (managedIndexMetaData.hasDifferentPolicyVersion(managedIndexConfig)) { val info = mapOf("message" to "There is a version conflict between your previous execution and your managed index") - val result = updateManagedIndexMetaData( - managedIndexMetaData.copy( - policyRetryInfo = PolicyRetryInfoMetaData(true, 0), - info = info + val result = + updateManagedIndexMetaData( + managedIndexMetaData.copy( + policyRetryInfo = PolicyRetryInfoMetaData(true, 0), + info = info, + ), ) - ) if (result.metadataSaved) { disableManagedIndexConfig(managedIndexConfig) publishErrorNotification(policy, managedIndexMetaData) @@ -309,9 +313,10 @@ object ManagedIndexRunner : val state = policy.getStateToExecute(managedIndexMetaData) val action: Action? = state?.getActionToExecute(managedIndexMetaData, indexMetadataProvider) - val stepContext = StepContext( - managedIndexMetaData, clusterService, client, threadPool.threadContext, policy.user, scriptService, settings, jobContext.lockService - ) + val stepContext = + StepContext( + managedIndexMetaData, clusterService, client, threadPool.threadContext, policy.user, scriptService, settings, jobContext.lockService, + ) val step: Step? = action?.getStepToExecute(stepContext) val currentActionMetaData = action?.getUpdatedActionMetadata(managedIndexMetaData, state.name) @@ -325,10 +330,11 @@ object ManagedIndexRunner : if (action?.hasTimedOut(currentActionMetaData) == true) { val info = mapOf("message" to "Action timed out") logger.error("Action=${action.type} has timed out") - val updated = updateManagedIndexMetaData( - managedIndexMetaData - .copy(actionMetaData = currentActionMetaData?.copy(failed = true), info = info) - ) + val updated = + updateManagedIndexMetaData( + managedIndexMetaData + .copy(actionMetaData = currentActionMetaData?.copy(failed = true), info = info), + ) if (updated.metadataSaved) { disableManagedIndexConfig(managedIndexConfig) publishErrorNotification(policy, managedIndexMetaData) @@ -353,11 +359,12 @@ object ManagedIndexRunner : logger.info("Previous execution failed to update step status, isIdempotent=$isIdempotent") if (isIdempotent != true) { val info = mapOf("message" to "Previous action was not able to update IndexMetaData.") - val updated = updateManagedIndexMetaData( - managedIndexMetaData.copy( - policyRetryInfo = PolicyRetryInfoMetaData(true, 0), info = info + val updated = + updateManagedIndexMetaData( + managedIndexMetaData.copy( + policyRetryInfo = PolicyRetryInfoMetaData(true, 0), info = info, + ), ) - ) if (updated.metadataSaved) { disableManagedIndexConfig(managedIndexConfig) publishErrorNotification(policy, managedIndexMetaData) @@ -370,11 +377,12 @@ object ManagedIndexRunner : val actionExtensionName = ISMActionsParser.instance.customActionExtensionMap[action?.type] if (!extensionStatusChecker.isEnabled(actionExtensionName)) { val info = mapOf("message" to "Failed to execute action=${action?.type} as extension [$actionExtensionName] is not enabled.") - val updated = updateManagedIndexMetaData( - managedIndexMetaData.copy( - policyRetryInfo = PolicyRetryInfoMetaData(true, 0), info = info + val updated = + updateManagedIndexMetaData( + managedIndexMetaData.copy( + policyRetryInfo = PolicyRetryInfoMetaData(true, 0), info = info, + ), ) - ) if (updated.metadataSaved) { disableManagedIndexConfig(managedIndexConfig) publishErrorNotification(policy, managedIndexMetaData) @@ -386,11 +394,12 @@ object ManagedIndexRunner : // as this action has been removed from the AllowList, but if it's not the first step we will let it finish as it's already inflight if (action?.isAllowed(allowList) == false && step != null && action.isFirstStep(step.name) && action.type != TransitionsAction.name) { val info = mapOf("message" to "Attempted to execute action=${action.type} which is not allowed.") - val updated = updateManagedIndexMetaData( - managedIndexMetaData.copy( - policyRetryInfo = PolicyRetryInfoMetaData(true, 0), info = info + val updated = + updateManagedIndexMetaData( + managedIndexMetaData.copy( + policyRetryInfo = PolicyRetryInfoMetaData(true, 0), info = info, + ), ) - ) if (updated.metadataSaved) { disableManagedIndexConfig(managedIndexConfig) publishErrorNotification(policy, managedIndexMetaData) @@ -405,13 +414,14 @@ object ManagedIndexRunner : @Suppress("ComplexCondition", "MaxLineLength") if (updateResult.metadataSaved && state != null && action != null && step != null && currentActionMetaData != null) { if (validationServiceEnabled) { - val validationResult = withClosableContext( - IndexManagementSecurityContext( - managedIndexConfig.id, settings, threadPool.threadContext, managedIndexConfig.policy.user - ) - ) { - actionValidation.validate(action.type, stepContext.metadata.index) - } + val validationResult = + withClosableContext( + IndexManagementSecurityContext( + managedIndexConfig.id, settings, threadPool.threadContext, managedIndexConfig.policy.user, + ), + ) { + actionValidation.validate(action.type, stepContext.metadata.index) + } if (validationResult.validationStatus == Validate.ValidationStatus.RE_VALIDATING) { logger.warn("Validation Status is: RE_VALIDATING. The action is {}, state is {}, step is {}.", action.type, state.name, step.name) publishErrorNotification(policy, managedIndexMetaData) @@ -428,8 +438,8 @@ object ManagedIndexRunner : // Step null check is done in getStartingManagedIndexMetaData withClosableContext( IndexManagementSecurityContext( - managedIndexConfig.id, settings, threadPool.threadContext, managedIndexConfig.policy.user - ) + managedIndexConfig.id, settings, threadPool.threadContext, managedIndexConfig.policy.user, + ), ) { step.preExecute(logger, stepContext.getUpdatedContext(startingManagedIndexMetaData)).execute().postExecute(logger) } @@ -499,10 +509,11 @@ object ManagedIndexRunner : val policySource = getResponse.sourceAsBytesRef // Intellij complains about createParser/parseWithType blocking because it sees they throw IOExceptions return withContext(Dispatchers.IO) { - val xcp = XContentHelper.createParser( - xContentRegistry, LoggingDeprecationHandler.INSTANCE, - policySource, XContentType.JSON - ) + val xcp = + XContentHelper.createParser( + xContentRegistry, LoggingDeprecationHandler.INSTANCE, + policySource, XContentType.JSON, + ) xcp.parseWithType(getResponse.id, getResponse.seqNo, getResponse.primaryTerm, Policy.Companion::parse) } } catch (e: Exception) { @@ -527,10 +538,11 @@ object ManagedIndexRunner : @Suppress("TooGenericExceptionCaught") private suspend fun savePolicyToManagedIndexConfig(managedIndexConfig: ManagedIndexConfig, policy: Policy): Boolean { - val updatedManagedIndexConfig = managedIndexConfig.copy( - policyID = policy.id, policy = policy, - policySeqNo = policy.seqNo, policyPrimaryTerm = policy.primaryTerm, changePolicy = null - ) + val updatedManagedIndexConfig = + managedIndexConfig.copy( + policyID = policy.id, policy = policy, + policySeqNo = policy.seqNo, policyPrimaryTerm = policy.primaryTerm, changePolicy = null, + ) val indexRequest = managedIndexConfigIndexRequest(updatedManagedIndexConfig) var savedPolicy = false try { @@ -549,8 +561,9 @@ object ManagedIndexRunner : @Suppress("TooGenericExceptionCaught") private suspend fun updateJobInterval(managedIndexConfig: ManagedIndexConfig, jobInterval: Int) { try { - val updatedManagedIndexConfig = managedIndexConfig - .copy(jobSchedule = IntervalSchedule(getIntervalStartTime(managedIndexConfig), jobInterval, ChronoUnit.MINUTES)) + val updatedManagedIndexConfig = + managedIndexConfig + .copy(jobSchedule = IntervalSchedule(getIntervalStartTime(managedIndexConfig), jobInterval, ChronoUnit.MINUTES)) val indexRequest = managedIndexConfigIndexRequest(updatedManagedIndexConfig) val indexResponse: IndexResponse = client.suspendUntil { index(indexRequest, it) } if (indexResponse.status() != RestStatus.OK) { @@ -586,7 +599,7 @@ object ManagedIndexRunner : actionMetaData = null, stepMetaData = null, policyRetryInfo = PolicyRetryInfoMetaData(failed = true, consumedRetries = 0), - info = mapOf("message" to "Fail to load policy: $policyID") + info = mapOf("message" to "Fail to load policy: $policyID"), ) } else { val state = managedIndexConfig.changePolicy?.state ?: policy.defaultState @@ -605,7 +618,7 @@ object ManagedIndexRunner : actionMetaData = null, stepMetaData = null, policyRetryInfo = PolicyRetryInfoMetaData(failed = false, consumedRetries = 0), - info = mapOf("message" to "Successfully initialized policy: ${policy.id}") + info = mapOf("message" to "Successfully initialized policy: ${policy.id}"), ) } } @@ -617,7 +630,7 @@ object ManagedIndexRunner : private suspend fun updateManagedIndexMetaData( managedIndexMetaData: ManagedIndexMetaData, lastUpdateResult: UpdateMetadataResult? = null, - create: Boolean = false + create: Boolean = false, ): UpdateMetadataResult { var result = UpdateMetadataResult() if (!imIndices.attemptUpdateConfigIndexMapping()) { @@ -634,8 +647,9 @@ object ManagedIndexRunner : try { updateMetaDataRetryPolicy.retry(logger) { val indexResponse: IndexResponse = client.suspendUntil { index(indexRequest, it) } - val metadataSaved = indexResponse.status() == RestStatus.OK || - indexResponse.status() == RestStatus.CREATED + val metadataSaved = + indexResponse.status() == RestStatus.OK || + indexResponse.status() == RestStatus.CREATED result = UpdateMetadataResult(metadataSaved, indexResponse.seqNo, indexResponse.primaryTerm) } @@ -646,7 +660,7 @@ object ManagedIndexRunner : logger.error( "There was VersionConflictEngineException trying to update the metadata for " + "${managedIndexMetaData.index}. Message: ${e.message}", - e + e, ) } catch (e: Exception) { logger.error("Failed to save ManagedIndexMetaData for [index=${managedIndexMetaData.index}]", e) @@ -657,7 +671,7 @@ object ManagedIndexRunner : data class UpdateMetadataResult( val metadataSaved: Boolean = false, val seqNo: Long = SequenceNumbers.UNASSIGNED_SEQ_NO, - val primaryTerm: Long = SequenceNumbers.UNASSIGNED_PRIMARY_TERM + val primaryTerm: Long = SequenceNumbers.UNASSIGNED_PRIMARY_TERM, ) /** @@ -668,49 +682,51 @@ object ManagedIndexRunner : private suspend fun initChangePolicy( managedIndexConfig: ManagedIndexConfig, managedIndexMetaData: ManagedIndexMetaData, - actionToExecute: Action? + actionToExecute: Action?, ) { val changePolicy = managedIndexConfig.changePolicy // should never happen since we only call this if there is a changePolicy, but we'll do it to make changePolicy non-null if (changePolicy == null) { logger.debug( "initChangePolicy was called with a null ChangePolicy, ManagedIndexConfig: {}", - managedIndexConfig + managedIndexConfig, ) return } val policy = getPolicy(changePolicy.policyID) // update the ManagedIndexMetaData with new information - val updatedManagedIndexMetaData = if (policy == null) { - managedIndexMetaData.copy( - info = mapOf("message" to "Failed to load change policy: ${changePolicy.policyID}"), - policyRetryInfo = PolicyRetryInfoMetaData(failed = true, consumedRetries = 0) - ) - } else { - // 1. entering transition action in this run - // 2. has been in transition action - // Refresh the transition action metadata, meaning we start the transition for change policy - val actionMetaData = if (actionToExecute?.type == TransitionsAction.name) { - ActionMetaData( - TransitionsAction.name, Instant.now().toEpochMilli(), -1, - false, 0, 0, null + val updatedManagedIndexMetaData = + if (policy == null) { + managedIndexMetaData.copy( + info = mapOf("message" to "Failed to load change policy: ${changePolicy.policyID}"), + policyRetryInfo = PolicyRetryInfoMetaData(failed = true, consumedRetries = 0), ) } else { - managedIndexMetaData.actionMetaData - } + // 1. entering transition action in this run + // 2. has been in transition action + // Refresh the transition action metadata, meaning we start the transition for change policy + val actionMetaData = + if (actionToExecute?.type == TransitionsAction.name) { + ActionMetaData( + TransitionsAction.name, Instant.now().toEpochMilli(), -1, + false, 0, 0, null, + ) + } else { + managedIndexMetaData.actionMetaData + } - managedIndexMetaData.copy( - info = mapOf("message" to "Attempting to change policy to ${policy.id}"), - transitionTo = changePolicy.state, - actionMetaData = actionMetaData, - stepMetaData = null, - policyCompleted = false, - policySeqNo = policy.seqNo, - policyPrimaryTerm = policy.primaryTerm, - policyID = policy.id - ) - } + managedIndexMetaData.copy( + info = mapOf("message" to "Attempting to change policy to ${policy.id}"), + transitionTo = changePolicy.state, + actionMetaData = actionMetaData, + stepMetaData = null, + policyCompleted = false, + policySeqNo = policy.seqNo, + policyPrimaryTerm = policy.primaryTerm, + policyID = policy.id, + ) + } /** * The freshness of isSafe may change between runs, and we use it to decide whether to enter this method @@ -769,7 +785,7 @@ object ManagedIndexRunner : ErrorNotification.CHANNEL_TITLE, metadata, compiledMessage, - policy.user + policy.user, ) } } @@ -807,12 +823,13 @@ object ManagedIndexRunner : private suspend fun getIndexMetadata(index: String): IndexMetadata? { var indexMetaData: IndexMetadata? = null try { - val clusterStateRequest = ClusterStateRequest() - .clear() - .indices(index) - .metadata(true) - .local(false) - .indicesOptions(IndicesOptions.strictExpand()) + val clusterStateRequest = + ClusterStateRequest() + .clear() + .indices(index) + .metadata(true) + .local(false) + .indicesOptions(IndicesOptions.strictExpand()) val response: ClusterStateResponse = client.admin().cluster().suspendUntil { state(clusterStateRequest, it) } @@ -854,9 +871,10 @@ object ManagedIndexRunner : try { val multiTypeIndexNameToMetaData = indexMetadataProvider.getMultiTypeISMIndexMetadata(indexNames = listOf(managedIndexConfig.index)) // the managedIndexConfig.indexUuid should be unique across all index types - val indexCreationDate = multiTypeIndexNameToMetaData.values.firstOrNull { - it[managedIndexConfig.index]?.indexUuid == managedIndexConfig.indexUuid - }?.get(managedIndexConfig.index)?.indexCreationDate + val indexCreationDate = + multiTypeIndexNameToMetaData.values.firstOrNull { + it[managedIndexConfig.index]?.indexUuid == managedIndexConfig.indexUuid + }?.get(managedIndexConfig.index)?.indexCreationDate return indexCreationDate } catch (e: Exception) { logger.error("Failed to get the index creation date", e) @@ -873,16 +891,17 @@ object ManagedIndexRunner : */ private suspend fun deleteFromManagedIndex(managedIndexConfig: ManagedIndexConfig, actionType: String) { try { - val bulkRequest = BulkRequest() - .add(deleteManagedIndexRequest(managedIndexConfig.indexUuid)) - .add(deleteManagedIndexMetadataRequest(managedIndexConfig.indexUuid)) + val bulkRequest = + BulkRequest() + .add(deleteManagedIndexRequest(managedIndexConfig.indexUuid)) + .add(deleteManagedIndexMetadataRequest(managedIndexConfig.indexUuid)) val bulkResponse: BulkResponse = client.suspendUntil { bulk(bulkRequest, it) } for (bulkItemResponse in bulkResponse) { if (bulkItemResponse.isFailed) { logger.warn( "Failed to delete managed index job/metadata [id=${bulkItemResponse.id}] for ${managedIndexConfig.index}" + - " after a successful $actionType [result=${bulkItemResponse.failureMessage}]" + " after a successful $actionType [result=${bulkItemResponse.failureMessage}]", ) } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/PluginVersionSweepCoordinator.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/PluginVersionSweepCoordinator.kt index ccf341fe4..20dc3bcea 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/PluginVersionSweepCoordinator.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/PluginVersionSweepCoordinator.kt @@ -69,20 +69,21 @@ class PluginVersionSweepCoordinator( if (!isIndexStateManagementEnabled()) return // Cancel existing background sweep scheduledSkipExecution?.cancel() - val scheduledJob = Runnable { - launch { - try { - if (!skipExecution.flag) { - logger.info("Canceling sweep ism plugin version job") - scheduledSkipExecution?.cancel() - } else { - skipExecution.sweepISMPluginVersion() + val scheduledJob = + Runnable { + launch { + try { + if (!skipExecution.flag) { + logger.info("Canceling sweep ism plugin version job") + scheduledSkipExecution?.cancel() + } else { + skipExecution.sweepISMPluginVersion() + } + } catch (e: Exception) { + logger.error("Failed to sweep ism plugin version", e) } - } catch (e: Exception) { - logger.error("Failed to sweep ism plugin version", e) } } - } scheduledSkipExecution = threadPool.scheduleWithFixedDelay(scheduledJob, sweepSkipPeriod, ThreadPool.Names.MANAGEMENT) } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/SkipExecution.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/SkipExecution.kt index 78ccfb299..c74f27c89 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/SkipExecution.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/SkipExecution.kt @@ -6,19 +6,19 @@ package org.opensearch.indexmanagement.indexstatemanagement import org.apache.logging.log4j.LogManager -import org.opensearch.core.action.ActionListener import org.opensearch.action.admin.cluster.node.info.NodesInfoAction import org.opensearch.action.admin.cluster.node.info.NodesInfoRequest import org.opensearch.action.admin.cluster.node.info.NodesInfoResponse import org.opensearch.action.admin.cluster.node.info.PluginsAndModules import org.opensearch.client.Client +import org.opensearch.core.action.ActionListener import org.opensearch.indexmanagement.util.OpenForTesting // TODO this can be moved to job scheduler, so that all extended plugin // can avoid running jobs in an upgrading cluster @OpenForTesting class SkipExecution( - private val client: Client + private val client: Client, ) { private val logger = LogManager.getLogger(javaClass) @@ -61,19 +61,23 @@ class SkipExecution( if ((versionSet.size + legacyVersionSet.size) > 1) { flag = true logger.info("There are multiple versions of Index Management plugins in the cluster: [$versionSet, $legacyVersionSet]") - } else flag = false + } else { + flag = false + } if (versionSet.isNotEmpty() && legacyVersionSet.isNotEmpty()) { hasLegacyPlugin = true logger.info("Found legacy plugin versions [$legacyVersionSet] and opensearch plugins versions [$versionSet] in the cluster") - } else hasLegacyPlugin = false + } else { + hasLegacyPlugin = false + } } override fun onFailure(e: Exception) { logger.error("Failed sweeping nodes for ISM plugin versions: $e") flag = false } - } + }, ) } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/AliasAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/AliasAction.kt index 47111a823..3d03e92b0 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/AliasAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/AliasAction.kt @@ -16,9 +16,8 @@ import org.opensearch.indexmanagement.spi.indexstatemanagement.model.StepContext class AliasAction( val actions: List, - index: Int + index: Int, ) : Action(name, index) { - /** * Allowing the alias action to be only applicable on the managed index for ADD and REMOVE actions only. * https://github.com/opensearch-project/OpenSearch/blob/4d045a164e12a382881140e32f9285a3224fecc7/server/src/main/java/org/opensearch/action/admin/indices/alias/IndicesAliasesRequest.java#L105 @@ -28,10 +27,10 @@ class AliasAction( val allowedActionTypes = listOf(IndicesAliasesRequest.AliasActions.Type.ADD, IndicesAliasesRequest.AliasActions.Type.REMOVE) require(actions.all { it.actionType() in allowedActionTypes }) { "Only ADD and REMOVE actions are allowed." } require( - actions.all { it.indices().isNullOrEmpty() } + actions.all { it.indices().isNullOrEmpty() }, ) { "Alias action can only work on its applied index so don't accept index/indices parameter." } require( - actions.all { it.aliases().isNotEmpty() } + actions.all { it.aliases().isNotEmpty() }, ) { "At least one alias needs to be specified." } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/AliasActionParser.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/AliasActionParser.kt index cee344faf..f72635337 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/AliasActionParser.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/AliasActionParser.kt @@ -16,8 +16,8 @@ import org.opensearch.indexmanagement.spi.indexstatemanagement.Action import org.opensearch.indexmanagement.spi.indexstatemanagement.ActionParser class AliasActionParser : ActionParser() { - private val logger = LogManager.getLogger(javaClass) + override fun fromStreamInput(sin: StreamInput): Action { val actions = sin.readList(IndicesAliasesRequest::AliasActions) val index = sin.readInt() diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/AllocationAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/AllocationAction.kt index d1bf0d6df..39287eee7 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/AllocationAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/AllocationAction.kt @@ -18,9 +18,8 @@ class AllocationAction( val include: Map, val exclude: Map, val waitFor: Boolean = false, - index: Int + index: Int, ) : Action(name, index) { - init { require(require.isNotEmpty() || include.isNotEmpty() || exclude.isNotEmpty()) { "At least one allocation parameter need to be specified." } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/CloseAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/CloseAction.kt index 59d08ba68..60d7a02cb 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/CloseAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/CloseAction.kt @@ -11,12 +11,12 @@ import org.opensearch.indexmanagement.spi.indexstatemanagement.Step import org.opensearch.indexmanagement.spi.indexstatemanagement.model.StepContext class CloseAction( - index: Int + index: Int, ) : Action(name, index) { - companion object { const val name = "close" } + private val attemptCloseStep = AttemptCloseStep() private val steps = listOf(attemptCloseStep) diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/DeleteAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/DeleteAction.kt index fdd325e81..822b01799 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/DeleteAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/DeleteAction.kt @@ -11,9 +11,8 @@ import org.opensearch.indexmanagement.spi.indexstatemanagement.Step import org.opensearch.indexmanagement.spi.indexstatemanagement.model.StepContext class DeleteAction( - index: Int + index: Int, ) : Action(name, index) { - companion object { const val name = "delete" } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/ForceMergeAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/ForceMergeAction.kt index e4516b068..59e92f274 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/ForceMergeAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/ForceMergeAction.kt @@ -17,9 +17,8 @@ import org.opensearch.indexmanagement.spi.indexstatemanagement.model.StepContext class ForceMergeAction( val maxNumSegments: Int, - index: Int + index: Int, ) : Action(name, index) { - init { require(maxNumSegments > 0) { "Force merge {$MAX_NUM_SEGMENTS_FIELD} must be greater than 0" } } @@ -30,11 +29,12 @@ class ForceMergeAction( // Using a LinkedHashMap here to maintain order of steps for getSteps() while providing a convenient way to // get the current Step object using the current step's name in getStepToExecute() - private val stepNameToStep: LinkedHashMap = linkedMapOf( - AttemptSetReadOnlyStep.name to attemptSetReadOnlyStep, - AttemptCallForceMergeStep.name to attemptCallForceMergeStep, - WaitForForceMergeStep.name to waitForForceMergeStep - ) + private val stepNameToStep: LinkedHashMap = + linkedMapOf( + AttemptSetReadOnlyStep.name to attemptSetReadOnlyStep, + AttemptCallForceMergeStep.name to attemptCallForceMergeStep, + WaitForForceMergeStep.name to waitForForceMergeStep, + ) @Suppress("ReturnCount") override fun getStepToExecute(context: StepContext): Step { diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/ForceMergeActionParser.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/ForceMergeActionParser.kt index 4eb14de2f..f0f0f26a3 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/ForceMergeActionParser.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/ForceMergeActionParser.kt @@ -35,7 +35,7 @@ class ForceMergeActionParser : ActionParser() { return ForceMergeAction( requireNotNull(maxNumSegments) { "ForceMergeActionConfig maxNumSegments is null" }, - index + index, ) } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/IndexPriorityAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/IndexPriorityAction.kt index 4be14448b..be183ab53 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/IndexPriorityAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/IndexPriorityAction.kt @@ -15,9 +15,8 @@ import org.opensearch.indexmanagement.spi.indexstatemanagement.model.StepContext class IndexPriorityAction( val indexPriority: Int, - index: Int + index: Int, ) : Action(name, index) { - init { require(indexPriority >= 0) { "IndexPriorityAction index_priority value must be a non-negative number" } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/IndexPriorityActionParser.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/IndexPriorityActionParser.kt index 2db7d6758..cc7c3c3ab 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/IndexPriorityActionParser.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/IndexPriorityActionParser.kt @@ -35,7 +35,7 @@ class IndexPriorityActionParser : ActionParser() { return IndexPriorityAction( indexPriority = requireNotNull(indexPriority) { "$INDEX_PRIORITY_FIELD is null" }, - index = index + index = index, ) } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/NotificationAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/NotificationAction.kt index c361d080f..fcc665012 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/NotificationAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/NotificationAction.kt @@ -20,9 +20,8 @@ class NotificationAction( val destination: Destination?, val channel: Channel?, val messageTemplate: Script, - index: Int + index: Int, ) : Action(name, index) { - init { require(destination != null || channel != null) { "Notification must contain a destination or channel" } require(destination == null || channel == null) { "Notification can only contain a single destination or channel" } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/NotificationActionParser.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/NotificationActionParser.kt index 760f81c28..5e3aed437 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/NotificationActionParser.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/NotificationActionParser.kt @@ -9,10 +9,10 @@ import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.core.xcontent.XContentParser import org.opensearch.core.xcontent.XContentParser.Token import org.opensearch.core.xcontent.XContentParserUtils.ensureExpectedToken +import org.opensearch.indexmanagement.common.model.notification.Channel import org.opensearch.indexmanagement.indexstatemanagement.action.NotificationAction.Companion.CHANNEL_FIELD import org.opensearch.indexmanagement.indexstatemanagement.action.NotificationAction.Companion.DESTINATION_FIELD import org.opensearch.indexmanagement.indexstatemanagement.action.NotificationAction.Companion.MESSAGE_TEMPLATE_FIELD -import org.opensearch.indexmanagement.common.model.notification.Channel import org.opensearch.indexmanagement.indexstatemanagement.model.destination.Destination import org.opensearch.indexmanagement.spi.indexstatemanagement.Action import org.opensearch.indexmanagement.spi.indexstatemanagement.ActionParser @@ -50,7 +50,7 @@ class NotificationActionParser : ActionParser() { destination = destination, channel = channel, messageTemplate = requireNotNull(messageTemplate) { "NotificationAction message template is null" }, - index = index + index = index, ) } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/OpenAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/OpenAction.kt index de3247521..433859be3 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/OpenAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/OpenAction.kt @@ -11,12 +11,12 @@ import org.opensearch.indexmanagement.spi.indexstatemanagement.Step import org.opensearch.indexmanagement.spi.indexstatemanagement.model.StepContext class OpenAction( - index: Int + index: Int, ) : Action(name, index) { - companion object { const val name = "open" } + private val attemptOpenStep = AttemptOpenStep() private val steps = listOf(attemptOpenStep) diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/ReadOnlyAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/ReadOnlyAction.kt index e17e1ae5c..cd8e3d5f6 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/ReadOnlyAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/ReadOnlyAction.kt @@ -11,12 +11,12 @@ import org.opensearch.indexmanagement.spi.indexstatemanagement.Step import org.opensearch.indexmanagement.spi.indexstatemanagement.model.StepContext class ReadOnlyAction( - index: Int + index: Int, ) : Action(name, index) { - companion object { const val name = "read_only" } + private val setReadOnlyStep = SetReadOnlyStep() private val steps = listOf(setReadOnlyStep) diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/ReadWriteAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/ReadWriteAction.kt index 3da520302..3d3383bfa 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/ReadWriteAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/ReadWriteAction.kt @@ -11,9 +11,8 @@ import org.opensearch.indexmanagement.spi.indexstatemanagement.Step import org.opensearch.indexmanagement.spi.indexstatemanagement.model.StepContext class ReadWriteAction( - index: Int + index: Int, ) : Action(name, index) { - companion object { const val name = "read_write" } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/ReplicaCountAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/ReplicaCountAction.kt index 88ef2f450..f6d148f1c 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/ReplicaCountAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/ReplicaCountAction.kt @@ -15,9 +15,8 @@ import org.opensearch.indexmanagement.spi.indexstatemanagement.model.StepContext class ReplicaCountAction( val numOfReplicas: Int, - index: Int + index: Int, ) : Action(name, index) { - init { require(numOfReplicas >= 0) { "ReplicaCountAction number_of_replicas value must be a non-negative number" } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/ReplicaCountActionParser.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/ReplicaCountActionParser.kt index a92156167..90f4e1497 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/ReplicaCountActionParser.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/ReplicaCountActionParser.kt @@ -34,7 +34,7 @@ class ReplicaCountActionParser : ActionParser() { return ReplicaCountAction( numOfReplicas = requireNotNull(numOfReplicas) { "$ReplicaCountAction.NUMBER_OF_REPLICAS_FIELD is null" }, - index = index + index = index, ) } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/RolloverAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/RolloverAction.kt index def34512a..52016068c 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/RolloverAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/RolloverAction.kt @@ -5,9 +5,9 @@ package org.opensearch.indexmanagement.indexstatemanagement.action +import org.opensearch.common.unit.TimeValue import org.opensearch.core.common.io.stream.StreamOutput import org.opensearch.core.common.unit.ByteSizeValue -import org.opensearch.common.unit.TimeValue import org.opensearch.core.xcontent.ToXContent import org.opensearch.core.xcontent.XContentBuilder import org.opensearch.indexmanagement.indexstatemanagement.step.rollover.AttemptRolloverStep @@ -21,14 +21,15 @@ class RolloverAction( val minAge: TimeValue?, val minPrimaryShardSize: ByteSizeValue?, val copyAlias: Boolean = false, - index: Int + index: Int, ) : Action(name, index) { - init { if (minSize != null) require(minSize.bytes > 0) { "RolloverAction minSize value must be greater than 0" } - if (minPrimaryShardSize != null) require(minPrimaryShardSize.bytes > 0) { - "RolloverActionConfig minPrimaryShardSize value must be greater than 0" + if (minPrimaryShardSize != null) { + require(minPrimaryShardSize.bytes > 0) { + "RolloverActionConfig minPrimaryShardSize value must be greater than 0" + } } if (minDocs != null) require(minDocs > 0) { "RolloverAction minDocs value must be greater than 0" } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/RolloverActionParser.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/RolloverActionParser.kt index 5da00a714..f23de8f39 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/RolloverActionParser.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/RolloverActionParser.kt @@ -5,9 +5,9 @@ package org.opensearch.indexmanagement.indexstatemanagement.action +import org.opensearch.common.unit.TimeValue import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.core.common.unit.ByteSizeValue -import org.opensearch.common.unit.TimeValue import org.opensearch.core.xcontent.XContentParser import org.opensearch.core.xcontent.XContentParserUtils.ensureExpectedToken import org.opensearch.indexmanagement.spi.indexstatemanagement.Action @@ -41,11 +41,13 @@ class RolloverActionParser : ActionParser() { RolloverAction.MIN_SIZE_FIELD -> minSize = ByteSizeValue.parseBytesSizeValue(xcp.text(), RolloverAction.MIN_SIZE_FIELD) RolloverAction.MIN_DOC_COUNT_FIELD -> minDocs = xcp.longValue() RolloverAction.MIN_INDEX_AGE_FIELD -> minAge = TimeValue.parseTimeValue(xcp.text(), RolloverAction.MIN_INDEX_AGE_FIELD) - RolloverAction.MIN_PRIMARY_SHARD_SIZE_FIELD -> minPrimaryShardSize = ByteSizeValue.parseBytesSizeValue( - xcp.text(), - RolloverAction - .MIN_PRIMARY_SHARD_SIZE_FIELD - ) + RolloverAction.MIN_PRIMARY_SHARD_SIZE_FIELD -> + minPrimaryShardSize = + ByteSizeValue.parseBytesSizeValue( + xcp.text(), + RolloverAction + .MIN_PRIMARY_SHARD_SIZE_FIELD, + ) RolloverAction.COPY_ALIAS_FIELD -> copyAlias = xcp.booleanValue() else -> throw IllegalArgumentException("Invalid field: [$fieldName] found in RolloverAction.") } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/RollupAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/RollupAction.kt index 8adbf9f01..fca8c06e4 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/RollupAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/RollupAction.kt @@ -17,9 +17,8 @@ import org.opensearch.indexmanagement.spi.indexstatemanagement.model.StepContext class RollupAction( val ismRollup: ISMRollup, - index: Int + index: Int, ) : Action(name, index) { - companion object { const val name = "rollup" const val ISM_ROLLUP_FIELD = "ism_rollup" diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/ShrinkAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/ShrinkAction.kt index 0a599fd0c..05dd7af4a 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/ShrinkAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/ShrinkAction.kt @@ -30,7 +30,7 @@ class ShrinkAction( val aliases: List?, val switchAliases: Boolean = false, val forceUnsafe: Boolean?, - index: Int + index: Int, ) : Action(name, index) { init { val numSet = arrayOf(maxShardSize != null, percentageOfSourceShards != null, numNewShards != null).count { it } @@ -57,12 +57,14 @@ class ShrinkAction( private val attemptShrinkStep = AttemptShrinkStep(this) private val waitForShrinkStep = WaitForShrinkStep(this) - private val stepNameToStep: LinkedHashMap = linkedMapOf( - AttemptMoveShardsStep.name to attemptMoveShardsStep, - WaitForMoveShardsStep.name to waitForMoveShardsStep, - AttemptShrinkStep.name to attemptShrinkStep, - WaitForShrinkStep.name to waitForShrinkStep - ) + private val stepNameToStep: LinkedHashMap = + linkedMapOf( + AttemptMoveShardsStep.name to attemptMoveShardsStep, + WaitForMoveShardsStep.name to waitForMoveShardsStep, + AttemptShrinkStep.name to attemptShrinkStep, + WaitForShrinkStep.name to waitForShrinkStep, + ) + override fun getSteps(): List = listOf(attemptMoveShardsStep, waitForMoveShardsStep, attemptShrinkStep, waitForShrinkStep) @SuppressWarnings("ReturnCount") @@ -104,7 +106,9 @@ class ShrinkAction( if (maxShardSize != null) builder.field(MAX_SHARD_SIZE_FIELD, maxShardSize.stringRep) if (percentageOfSourceShards != null) builder.field(PERCENTAGE_OF_SOURCE_SHARDS_FIELD, percentageOfSourceShards) if (targetIndexTemplate != null) builder.field(TARGET_INDEX_TEMPLATE_FIELD, targetIndexTemplate) - if (aliases != null) { builder.aliasesField(aliases) } + if (aliases != null) { + builder.aliasesField(aliases) + } builder.field(SWITCH_ALIASES, switchAliases) if (forceUnsafe != null) builder.field(FORCE_UNSAFE_FIELD, forceUnsafe) builder.endObject() @@ -137,6 +141,7 @@ class ShrinkAction( const val SWITCH_ALIASES = "switch_aliases" const val FORCE_UNSAFE_FIELD = "force_unsafe" const val LOCK_SOURCE_JOB_ID = "shrink-node_name" + fun getSecurityFailureMessage(failure: String) = "Shrink action failed because of missing permissions: $failure" } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/SnapshotAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/SnapshotAction.kt index 05489d8bf..5b818d5bd 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/SnapshotAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/SnapshotAction.kt @@ -17,9 +17,8 @@ import org.opensearch.indexmanagement.spi.indexstatemanagement.model.StepContext class SnapshotAction( val repository: String, val snapshot: String, - index: Int + index: Int, ) : Action(name, index) { - companion object { const val name = "snapshot" const val REPOSITORY_FIELD = "repository" diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/SnapshotActionParser.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/SnapshotActionParser.kt index 97bf15c0d..c545e3cb2 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/SnapshotActionParser.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/SnapshotActionParser.kt @@ -42,7 +42,7 @@ class SnapshotActionParser : ActionParser() { return SnapshotAction( repository = requireNotNull(repository) { "SnapshotAction repository must be specified" }, snapshot = requireNotNull(snapshot) { "SnapshotAction snapshot must be specified" }, - index = index + index = index, ) } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/TransformAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/TransformAction.kt index 0de34c15b..21520afb2 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/TransformAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/TransformAction.kt @@ -17,9 +17,8 @@ import org.opensearch.indexmanagement.transform.model.ISMTransform class TransformAction( val ismTransform: ISMTransform, - index: Int + index: Int, ) : Action(name, index) { - companion object { const val name = "transform" const val ISM_TRANSFORM_FIELD = "ism_transform" diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/TransitionsAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/TransitionsAction.kt index 60f3a7929..e2bd7747e 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/TransitionsAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/TransitionsAction.kt @@ -14,9 +14,8 @@ import org.opensearch.indexmanagement.spi.indexstatemanagement.model.StepContext class TransitionsAction( val transitions: List, - val indexMetadataProvider: IndexMetadataProvider + val indexMetadataProvider: IndexMetadataProvider, ) : Action(name, -1) { - private val attemptTransitionStep = AttemptTransitionStep(this) private val steps = listOf(attemptTransitionStep) diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/ChangePolicy.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/ChangePolicy.kt index 36b51af61..f596cbe45 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/ChangePolicy.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/ChangePolicy.kt @@ -5,6 +5,7 @@ package org.opensearch.indexmanagement.indexstatemanagement.model +import org.opensearch.commons.authuser.User import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.core.common.io.stream.StreamOutput import org.opensearch.core.common.io.stream.Writeable @@ -14,7 +15,6 @@ import org.opensearch.core.xcontent.XContentBuilder import org.opensearch.core.xcontent.XContentParser import org.opensearch.core.xcontent.XContentParser.Token import org.opensearch.core.xcontent.XContentParserUtils.ensureExpectedToken -import org.opensearch.commons.authuser.User import org.opensearch.indexmanagement.indexstatemanagement.util.WITH_USER import org.opensearch.indexmanagement.opensearchapi.optionalUserField import org.opensearch.indexmanagement.spi.indexstatemanagement.model.StateMetaData @@ -35,18 +35,20 @@ data class ChangePolicy( val state: String?, val include: List, val isSafe: Boolean, - val user: User? = null + val user: User? = null, ) : Writeable, ToXContentObject { - @Throws(IOException::class) constructor(sin: StreamInput) : this( policyID = sin.readString(), state = sin.readOptionalString(), include = sin.readList(::StateFilter), isSafe = sin.readBoolean(), - user = if (sin.readBoolean()) { + user = + if (sin.readBoolean()) { User(sin) - } else null + } else { + null + }, ) override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { @@ -112,7 +114,7 @@ data class ChangePolicy( state, include.toList(), isSafe, - user + user, ) } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/ErrorNotification.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/ErrorNotification.kt index 6fbef5edb..cdf42227c 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/ErrorNotification.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/ErrorNotification.kt @@ -22,9 +22,8 @@ import java.io.IOException data class ErrorNotification( val destination: Destination?, val channel: Channel?, - val messageTemplate: Script + val messageTemplate: Script, ) : ToXContentObject, Writeable { - init { require(destination != null || channel != null) { "ErrorNotification must contain a destination or channel" } require(destination == null || channel == null) { "ErrorNotification can only contain a single destination or channel" } @@ -44,7 +43,7 @@ data class ErrorNotification( constructor(sin: StreamInput) : this( sin.readOptionalWriteable(::Destination), sin.readOptionalWriteable(::Channel), - Script(sin) + Script(sin), ) @Throws(IOException::class) @@ -84,7 +83,7 @@ data class ErrorNotification( return ErrorNotification( destination = destination, channel = channel, - messageTemplate = requireNotNull(messageTemplate) { "ErrorNotification message template is null" } + messageTemplate = requireNotNull(messageTemplate) { "ErrorNotification message template is null" }, ) } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/ExplainFilter.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/ExplainFilter.kt index 7a2f7c1ca..ecab3c21c 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/ExplainFilter.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/ExplainFilter.kt @@ -24,15 +24,14 @@ data class ExplainFilter( val policyID: String? = null, val state: String? = null, val actionType: String? = null, - val failed: Boolean? = null + val failed: Boolean? = null, ) : ToXContentObject, Writeable { - @Throws(IOException::class) constructor(sin: StreamInput) : this( policyID = sin.readOptionalString(), state = sin.readOptionalString(), actionType = sin.readOptionalString(), - failed = sin.readOptionalBoolean() + failed = sin.readOptionalBoolean(), ) override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/ISMTemplate.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/ISMTemplate.kt index dde11627e..e22611e08 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/ISMTemplate.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/ISMTemplate.kt @@ -23,9 +23,8 @@ import java.time.Instant data class ISMTemplate( val indexPatterns: List, val priority: Int, - val lastUpdatedTime: Instant + val lastUpdatedTime: Instant, ) : ToXContentObject, Writeable { - init { require(priority >= 0) { "Requires priority to be >= 0" } require(indexPatterns.isNotEmpty()) { "Requires at least one index pattern" } @@ -43,7 +42,7 @@ data class ISMTemplate( constructor(sin: StreamInput) : this( sin.readStringList(), sin.readInt(), - sin.readInstant() + sin.readInstant(), ) @Throws(IOException::class) @@ -86,7 +85,7 @@ data class ISMTemplate( return ISMTemplate( indexPatterns = indexPatterns, priority = priority, - lastUpdatedTime = lastUpdatedTime ?: Instant.now() + lastUpdatedTime = lastUpdatedTime ?: Instant.now(), ) } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/ManagedIndexConfig.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/ManagedIndexConfig.kt index d92af725e..f42d77cde 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/ManagedIndexConfig.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/ManagedIndexConfig.kt @@ -37,9 +37,8 @@ data class ManagedIndexConfig( val policyPrimaryTerm: Long?, val policy: Policy, val changePolicy: ChangePolicy?, - val jobJitter: Double? + val jobJitter: Double?, ) : ScheduledJobParameter { - init { if (enabled) { requireNotNull(jobEnabledTime) { "jobEnabledTime must be present if the job is enabled" } @@ -109,7 +108,7 @@ data class ManagedIndexConfig( xcp: XContentParser, id: String = NO_ID, seqNo: Long = SequenceNumbers.UNASSIGNED_SEQ_NO, - primaryTerm: Long = SequenceNumbers.UNASSIGNED_PRIMARY_TERM + primaryTerm: Long = SequenceNumbers.UNASSIGNED_PRIMARY_TERM, ): ManagedIndexConfig { var name: String? = null var index: String? = null @@ -177,15 +176,16 @@ data class ManagedIndexConfig( policyID = requireNotNull(policyID) { "ManagedIndexConfig policy id is null" }, policySeqNo = policySeqNo, policyPrimaryTerm = policyPrimaryTerm, - policy = requireNotNull( + policy = + requireNotNull( policy?.copy( id = policyID, seqNo = policySeqNo ?: SequenceNumbers.UNASSIGNED_SEQ_NO, - primaryTerm = policyPrimaryTerm ?: SequenceNumbers.UNASSIGNED_PRIMARY_TERM - ) + primaryTerm = policyPrimaryTerm ?: SequenceNumbers.UNASSIGNED_PRIMARY_TERM, + ), ) { "ManagedIndexConfig policy is null" }, changePolicy = changePolicy, - jobJitter = jitter + jobJitter = jitter, ) } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/Policy.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/Policy.kt index 1d96e0fc4..8b8722b83 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/Policy.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/Policy.kt @@ -5,6 +5,7 @@ package org.opensearch.indexmanagement.indexstatemanagement.model +import org.opensearch.commons.authuser.User import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.core.common.io.stream.StreamOutput import org.opensearch.core.common.io.stream.Writeable @@ -14,7 +15,6 @@ import org.opensearch.core.xcontent.XContentBuilder import org.opensearch.core.xcontent.XContentParser import org.opensearch.core.xcontent.XContentParser.Token import org.opensearch.core.xcontent.XContentParserUtils.ensureExpectedToken -import org.opensearch.commons.authuser.User import org.opensearch.index.seqno.SequenceNumbers import org.opensearch.indexmanagement.indexstatemanagement.util.WITH_TYPE import org.opensearch.indexmanagement.indexstatemanagement.util.WITH_USER @@ -39,9 +39,8 @@ data class Policy( val defaultState: String, val states: List, val ismTemplate: List? = null, - val user: User? = null + val user: User? = null, ) : ToXContentObject, Writeable { - init { val distinctStateNames = states.map { it.name }.distinct() states.forEach { state -> @@ -89,12 +88,18 @@ data class Policy( errorNotification = sin.readOptionalWriteable(::ErrorNotification), defaultState = sin.readString(), states = sin.readList(::State), - ismTemplate = if (sin.readBoolean()) { + ismTemplate = + if (sin.readBoolean()) { sin.readList(::ISMTemplate) - } else null, - user = if (sin.readBoolean()) { + } else { + null + }, + user = + if (sin.readBoolean()) { User(sin) - } else null + } else { + null + }, ) @Throws(IOException::class) @@ -164,7 +169,7 @@ data class Policy( xcp: XContentParser, id: String = NO_ID, seqNo: Long = SequenceNumbers.UNASSIGNED_SEQ_NO, - primaryTerm: Long = SequenceNumbers.UNASSIGNED_PRIMARY_TERM + primaryTerm: Long = SequenceNumbers.UNASSIGNED_PRIMARY_TERM, ): Policy { var description: String? = null var defaultState: String? = null @@ -225,7 +230,7 @@ data class Policy( defaultState = requireNotNull(defaultState) { "$DEFAULT_STATE_FIELD is null" }, states = states.toList(), ismTemplate = ismTemplates, - user = user + user = user, ) } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/State.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/State.kt index 43be9a6d8..e34ff41fd 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/State.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/State.kt @@ -26,9 +26,8 @@ import java.io.IOException data class State( val name: String, val actions: List, - val transitions: List + val transitions: List, ) : ToXContentObject, Writeable { - init { require(name.isNotBlank()) { "State must contain a valid name" } var hasDelete = false @@ -58,7 +57,7 @@ data class State( constructor(sin: StreamInput) : this( sin.readString(), sin.readList { ISMActionsParser.instance.fromStreamInput(it) }, - sin.readList(::Transition) + sin.readList(::Transition), ) @Throws(IOException::class) @@ -70,7 +69,7 @@ data class State( fun getActionToExecute( managedIndexMetaData: ManagedIndexMetaData, - indexMetadataProvider: IndexMetadataProvider + indexMetadataProvider: IndexMetadataProvider, ): Action? { var actionConfig: Action? val actionMetaData = managedIndexMetaData.actionMetaData @@ -83,9 +82,10 @@ data class State( actionConfig = TransitionsAction(this.transitions, indexMetadataProvider) } else { // Get the current actionConfig that is in the ManagedIndexMetaData - actionConfig = this.actions.filterIndexed { index, config -> - index == actionMetaData.index && config.type == actionMetaData.name - }.firstOrNull() + actionConfig = + this.actions.filterIndexed { index, config -> + index == actionMetaData.index && config.type == actionMetaData.name + }.firstOrNull() if (actionConfig == null) return null val stepMetaData = managedIndexMetaData.stepMetaData @@ -140,7 +140,7 @@ data class State( return State( name = requireNotNull(name) { "State name is null" }, actions = actions.toList(), - transitions = transitions.toList() + transitions = transitions.toList(), ) } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/StateFilter.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/StateFilter.kt index 615bac27a..482b22492 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/StateFilter.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/StateFilter.kt @@ -14,10 +14,9 @@ import org.opensearch.core.xcontent.XContentParserUtils.ensureExpectedToken import java.io.IOException data class StateFilter(val state: String) : Writeable { - @Throws(IOException::class) constructor(sin: StreamInput) : this( - state = sin.readString() + state = sin.readString(), ) @Throws(IOException::class) diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/Transition.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/Transition.kt index bab7bf1e1..6a1bebd25 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/Transition.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/Transition.kt @@ -5,11 +5,11 @@ package org.opensearch.indexmanagement.indexstatemanagement.model +import org.opensearch.common.unit.TimeValue import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.core.common.io.stream.StreamOutput import org.opensearch.core.common.io.stream.Writeable import org.opensearch.core.common.unit.ByteSizeValue -import org.opensearch.common.unit.TimeValue import org.opensearch.core.xcontent.ToXContent import org.opensearch.core.xcontent.ToXContentObject import org.opensearch.core.xcontent.XContentBuilder @@ -22,9 +22,8 @@ import java.io.IOException data class Transition( val stateName: String, - val conditions: Conditions? + val conditions: Conditions?, ) : ToXContentObject, Writeable { - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { builder.startObject() .field(STATE_NAME_FIELD, stateName) @@ -35,7 +34,7 @@ data class Transition( @Throws(IOException::class) constructor(sin: StreamInput) : this( stateName = sin.readString(), - conditions = sin.readOptionalWriteable(::Conditions) + conditions = sin.readOptionalWriteable(::Conditions), ) @Throws(IOException::class) @@ -68,7 +67,7 @@ data class Transition( return Transition( stateName = requireNotNull(name) { "Transition state name is null" }, - conditions = conditions + conditions = conditions, ) } } @@ -79,9 +78,8 @@ data class Conditions( val docCount: Long? = null, val size: ByteSizeValue? = null, val cron: CronSchedule? = null, - val rolloverAge: TimeValue? = null + val rolloverAge: TimeValue? = null, ) : ToXContentObject, Writeable { - init { val conditionsList = listOf(indexAge, docCount, size, cron, rolloverAge) require(conditionsList.filterNotNull().size == 1) { "Cannot provide more than one Transition condition" } @@ -109,7 +107,7 @@ data class Conditions( docCount = sin.readOptionalLong(), size = sin.readOptionalWriteable(::ByteSizeValue), cron = sin.readOptionalWriteable(::CronSchedule), - rolloverAge = sin.readOptionalTimeValue() + rolloverAge = sin.readOptionalTimeValue(), ) @Throws(IOException::class) diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/coordinator/ClusterStateManagedIndexConfig.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/coordinator/ClusterStateManagedIndexConfig.kt index 4f63b5e4a..2b9074dc6 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/coordinator/ClusterStateManagedIndexConfig.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/coordinator/ClusterStateManagedIndexConfig.kt @@ -21,5 +21,5 @@ data class ClusterStateManagedIndexConfig( val seqNo: Long = SequenceNumbers.UNASSIGNED_SEQ_NO, val primaryTerm: Long = SequenceNumbers.UNASSIGNED_PRIMARY_TERM, val uuid: String, - val policyID: String + val policyID: String, ) diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/coordinator/SweptManagedIndexConfig.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/coordinator/SweptManagedIndexConfig.kt index a84dba159..4e0cc8a6c 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/coordinator/SweptManagedIndexConfig.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/coordinator/SweptManagedIndexConfig.kt @@ -27,9 +27,8 @@ data class SweptManagedIndexConfig( val uuid: String, val policyID: String, val policy: Policy?, - val changePolicy: ChangePolicy? + val changePolicy: ChangePolicy?, ) { - companion object { @Suppress("ComplexMethod", "UNUSED_PARAMETER") @JvmStatic @@ -66,7 +65,7 @@ data class SweptManagedIndexConfig( uuid, policyID, policy, - changePolicy + changePolicy, ) } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/destination/Chime.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/destination/Chime.kt index 802569f10..c308474cc 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/destination/Chime.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/destination/Chime.kt @@ -24,7 +24,6 @@ import java.io.IOException * alerting so all plugins can consume and use. */ data class Chime(val url: String) : ToXContent, Writeable { - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { return builder.startObject(TYPE) .field(URL, url) @@ -33,7 +32,7 @@ data class Chime(val url: String) : ToXContent, Writeable { @Throws(IOException::class) constructor(sin: StreamInput) : this( - sin.readString() + sin.readString(), ) @Throws(IOException::class) diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/destination/CustomWebhook.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/destination/CustomWebhook.kt index 6d5d9d4ce..35d382a25 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/destination/CustomWebhook.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/destination/CustomWebhook.kt @@ -32,9 +32,8 @@ data class CustomWebhook( val queryParams: Map, val headerParams: Map, val username: String?, - val password: String? + val password: String?, ) : ToXContent, Writeable { - init { require(!(Strings.isNullOrEmpty(url) && Strings.isNullOrEmpty(host))) { "Url or Host name must be provided." @@ -64,7 +63,7 @@ data class CustomWebhook( suppressWarning(sin.readMap()), suppressWarning(sin.readMap()), sin.readOptionalString(), - sin.readOptionalString() + sin.readOptionalString(), ) override fun writeTo(out: StreamOutput) { diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/destination/Destination.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/destination/Destination.kt index 32ff35b38..480a65aa1 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/destination/Destination.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/destination/Destination.kt @@ -5,6 +5,10 @@ package org.opensearch.indexmanagement.indexstatemanagement.model.destination +import org.opensearch.commons.destination.message.LegacyBaseMessage +import org.opensearch.commons.destination.message.LegacyChimeMessage +import org.opensearch.commons.destination.message.LegacyCustomWebhookMessage +import org.opensearch.commons.destination.message.LegacySlackMessage import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.core.common.io.stream.StreamOutput import org.opensearch.core.common.io.stream.Writeable @@ -14,10 +18,6 @@ import org.opensearch.core.xcontent.XContentBuilder import org.opensearch.core.xcontent.XContentParser import org.opensearch.core.xcontent.XContentParser.Token import org.opensearch.core.xcontent.XContentParserUtils.ensureExpectedToken -import org.opensearch.commons.destination.message.LegacyBaseMessage -import org.opensearch.commons.destination.message.LegacyChimeMessage -import org.opensearch.commons.destination.message.LegacyCustomWebhookMessage -import org.opensearch.commons.destination.message.LegacySlackMessage import org.opensearch.indexmanagement.opensearchapi.convertToMap import java.io.IOException @@ -31,9 +31,8 @@ data class Destination( val type: DestinationType, val chime: Chime?, val slack: Slack?, - val customWebhook: CustomWebhook? + val customWebhook: CustomWebhook?, ) : ToXContentObject, Writeable { - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { builder.startObject() .field(type.value, constructResponseForDestinationType(type)) @@ -46,7 +45,7 @@ data class Destination( sin.readEnum(DestinationType::class.java), sin.readOptionalWriteable(::Chime), sin.readOptionalWriteable(::Slack), - sin.readOptionalWriteable(::CustomWebhook) + sin.readOptionalWriteable(::CustomWebhook), ) @Throws(IOException::class) @@ -83,18 +82,19 @@ data class Destination( } } - val type = when { - chime != null -> DestinationType.CHIME - slack != null -> DestinationType.SLACK - customWebhook != null -> DestinationType.CUSTOM_WEBHOOK - else -> throw IllegalArgumentException("Must specify a destination type") - } + val type = + when { + chime != null -> DestinationType.CHIME + slack != null -> DestinationType.SLACK + customWebhook != null -> DestinationType.CUSTOM_WEBHOOK + else -> throw IllegalArgumentException("Must specify a destination type") + } return Destination( type, chime, slack, - customWebhook + customWebhook, ) } } @@ -105,23 +105,26 @@ data class Destination( when (type) { DestinationType.CHIME -> { val messageContent = chime?.constructMessageContent(compiledSubject, compiledMessage) - destinationMessage = LegacyChimeMessage.Builder("chime_message") - .withUrl(chime?.url) - .withMessage(messageContent) - .build() + destinationMessage = + LegacyChimeMessage.Builder("chime_message") + .withUrl(chime?.url) + .withMessage(messageContent) + .build() } DestinationType.SLACK -> { val messageContent = slack?.constructMessageContent(compiledSubject, compiledMessage) - destinationMessage = LegacySlackMessage.Builder("slack_message") - .withUrl(slack?.url) - .withMessage(messageContent) - .build() + destinationMessage = + LegacySlackMessage.Builder("slack_message") + .withUrl(slack?.url) + .withMessage(messageContent) + .build() } DestinationType.CUSTOM_WEBHOOK -> { - destinationMessage = LegacyCustomWebhookMessage.Builder("custom_webhook") - .withUrl(getLegacyCustomWebhookMessageURL(customWebhook, compiledMessage)) - .withHeaderParams(customWebhook?.headerParams) - .withMessage(compiledMessage).build() + destinationMessage = + LegacyCustomWebhookMessage.Builder("custom_webhook") + .withUrl(getLegacyCustomWebhookMessageURL(customWebhook, compiledMessage)) + .withHeaderParams(customWebhook?.headerParams) + .withMessage(compiledMessage).build() } } return destinationMessage diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/destination/Slack.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/destination/Slack.kt index c75759310..8181fd66c 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/destination/Slack.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/destination/Slack.kt @@ -24,7 +24,6 @@ import java.io.IOException * alerting so all plugins can consume and use. */ data class Slack(val url: String) : ToXContent, Writeable { - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { return builder.startObject(TYPE) .field(URL, url) @@ -33,7 +32,7 @@ data class Slack(val url: String) : ToXContent, Writeable { @Throws(IOException::class) constructor(sin: StreamInput) : this( - sin.readString() + sin.readString(), ) @Throws(IOException::class) diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/opensearchapi/OpenSearchExtensions.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/opensearchapi/OpenSearchExtensions.kt index ceba19187..7e0d1ac60 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/opensearchapi/OpenSearchExtensions.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/opensearchapi/OpenSearchExtensions.kt @@ -20,12 +20,12 @@ import org.opensearch.client.Client import org.opensearch.cluster.ClusterState import org.opensearch.cluster.metadata.IndexMetadata import org.opensearch.common.xcontent.LoggingDeprecationHandler +import org.opensearch.common.xcontent.XContentHelper +import org.opensearch.common.xcontent.XContentType import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.core.xcontent.ToXContent import org.opensearch.core.xcontent.ToXContentFragment import org.opensearch.core.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentHelper -import org.opensearch.common.xcontent.XContentType import org.opensearch.index.IndexNotFoundException import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.INDEX_MANAGEMENT_INDEX import org.opensearch.indexmanagement.indexstatemanagement.DefaultIndexMetadataService @@ -85,21 +85,24 @@ fun Map.filterNotNullValues(): Map = @Suppress("ReturnCount") suspend fun Client.getManagedIndexMetadata(indexUUID: String): Pair { try { - val getRequest = GetRequest(INDEX_MANAGEMENT_INDEX, managedIndexMetadataID(indexUUID)) - .routing(indexUUID) + val getRequest = + GetRequest(INDEX_MANAGEMENT_INDEX, managedIndexMetadataID(indexUUID)) + .routing(indexUUID) val getResponse: GetResponse = this.suspendUntil { get(getRequest, it) } if (!getResponse.isExists || getResponse.isSourceEmpty) { return Pair(null, true) } - val metadata = withContext(Dispatchers.IO) { - val xcp = XContentHelper.createParser( - NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, - getResponse.sourceAsBytesRef, XContentType.JSON - ) - ManagedIndexMetaData.parseWithType(xcp, getResponse.id, getResponse.seqNo, getResponse.primaryTerm) - } + val metadata = + withContext(Dispatchers.IO) { + val xcp = + XContentHelper.createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + getResponse.sourceAsBytesRef, XContentType.JSON, + ) + ManagedIndexMetaData.parseWithType(xcp, getResponse.id, getResponse.seqNo, getResponse.primaryTerm) + } return Pair(metadata, true) } catch (e: Exception) { when (e) { @@ -126,8 +129,8 @@ suspend fun Client.mgetManagedIndexMetadata(indexUuids: List): List?>() diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/RestAddPolicyAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/RestAddPolicyAction.kt index 2601b2b40..3b447177b 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/RestAddPolicyAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/RestAddPolicyAction.kt @@ -6,8 +6,8 @@ package org.opensearch.indexmanagement.indexstatemanagement.resthandler import org.opensearch.client.node.NodeClient -import org.opensearch.core.common.Strings import org.opensearch.common.xcontent.XContentHelper +import org.opensearch.core.common.Strings import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.ISM_BASE_URI import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.LEGACY_ISM_BASE_URI import org.opensearch.indexmanagement.indexstatemanagement.transport.action.addpolicy.AddPolicyAction @@ -24,7 +24,6 @@ import org.opensearch.rest.action.RestToXContentListener import java.io.IOException class RestAddPolicyAction : BaseRestHandler() { - override fun getName(): String = "add_policy_action" override fun routes(): List { @@ -35,12 +34,12 @@ class RestAddPolicyAction : BaseRestHandler() { return listOf( ReplacedRoute( POST, ADD_POLICY_BASE_URI, - POST, LEGACY_ADD_POLICY_BASE_URI + POST, LEGACY_ADD_POLICY_BASE_URI, ), ReplacedRoute( POST, "$ADD_POLICY_BASE_URI/{index}", - POST, "$LEGACY_ADD_POLICY_BASE_URI/{index}" - ) + POST, "$LEGACY_ADD_POLICY_BASE_URI/{index}", + ), ) } @@ -53,11 +52,12 @@ class RestAddPolicyAction : BaseRestHandler() { throw IllegalArgumentException("Missing indices") } - val body = if (request.hasContent()) { - XContentHelper.convertToMap(request.requiredContent(), false, request.mediaType).v2() - } else { - mapOf() - } + val body = + if (request.hasContent()) { + XContentHelper.convertToMap(request.requiredContent(), false, request.mediaType).v2() + } else { + mapOf() + } val indexType = request.param(TYPE_PARAM_KEY, DEFAULT_INDEX_TYPE) diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/RestChangePolicyAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/RestChangePolicyAction.kt index 8b2dc16a4..010349e67 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/RestChangePolicyAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/RestChangePolicyAction.kt @@ -26,7 +26,6 @@ import org.opensearch.rest.action.RestToXContentListener import java.io.IOException class RestChangePolicyAction : BaseRestHandler() { - override fun routes(): List { return emptyList() } @@ -35,12 +34,12 @@ class RestChangePolicyAction : BaseRestHandler() { return listOf( ReplacedRoute( POST, CHANGE_POLICY_BASE_URI, - POST, LEGACY_CHANGE_POLICY_BASE_URI + POST, LEGACY_CHANGE_POLICY_BASE_URI, ), ReplacedRoute( POST, "$CHANGE_POLICY_BASE_URI/{index}", - POST, "$LEGACY_CHANGE_POLICY_BASE_URI/{index}" - ) + POST, "$LEGACY_CHANGE_POLICY_BASE_URI/{index}", + ), ) } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/RestDeletePolicyAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/RestDeletePolicyAction.kt index f73c3bf4d..b897955f9 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/RestDeletePolicyAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/RestDeletePolicyAction.kt @@ -21,7 +21,6 @@ import org.opensearch.rest.action.RestStatusToXContentListener import java.io.IOException class RestDeletePolicyAction : BaseRestHandler() { - override fun routes(): List { return emptyList() } @@ -30,8 +29,8 @@ class RestDeletePolicyAction : BaseRestHandler() { return listOf( ReplacedRoute( DELETE, "$POLICY_BASE_URI/{policyID}", - DELETE, "$LEGACY_POLICY_BASE_URI/{policyID}" - ) + DELETE, "$LEGACY_POLICY_BASE_URI/{policyID}", + ), ) } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/RestExplainAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/RestExplainAction.kt index 5877a3da0..2cb2e391f 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/RestExplainAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/RestExplainAction.kt @@ -7,8 +7,8 @@ package org.opensearch.indexmanagement.indexstatemanagement.resthandler import org.apache.logging.log4j.LogManager import org.opensearch.client.node.NodeClient -import org.opensearch.core.common.Strings import org.opensearch.common.logging.DeprecationLogger +import org.opensearch.core.common.Strings import org.opensearch.core.xcontent.XContentParser.Token import org.opensearch.core.xcontent.XContentParserUtils.ensureExpectedToken import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.ISM_BASE_URI @@ -16,12 +16,12 @@ import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.LEGACY_ISM import org.opensearch.indexmanagement.indexstatemanagement.model.ExplainFilter import org.opensearch.indexmanagement.indexstatemanagement.transport.action.explain.ExplainAction import org.opensearch.indexmanagement.indexstatemanagement.transport.action.explain.ExplainRequest -import org.opensearch.indexmanagement.indexstatemanagement.util.DEFAULT_EXPLAIN_VALIDATE_ACTION import org.opensearch.indexmanagement.indexstatemanagement.util.DEFAULT_EXPLAIN_SHOW_POLICY -import org.opensearch.indexmanagement.indexstatemanagement.util.SHOW_VALIDATE_ACTION +import org.opensearch.indexmanagement.indexstatemanagement.util.DEFAULT_EXPLAIN_VALIDATE_ACTION import org.opensearch.indexmanagement.indexstatemanagement.util.DEFAULT_INDEX_TYPE import org.opensearch.indexmanagement.indexstatemanagement.util.DEFAULT_JOB_SORT_FIELD import org.opensearch.indexmanagement.indexstatemanagement.util.SHOW_POLICY_QUERY_PARAM +import org.opensearch.indexmanagement.indexstatemanagement.util.SHOW_VALIDATE_ACTION import org.opensearch.indexmanagement.indexstatemanagement.util.TYPE_PARAM_KEY import org.opensearch.indexmanagement.indexstatemanagement.util.parseClusterManagerTimeout import org.opensearch.indexmanagement.util.getSearchParams @@ -37,7 +37,6 @@ import org.opensearch.rest.action.RestToXContentListener private val log = LogManager.getLogger(RestExplainAction::class.java) class RestExplainAction : BaseRestHandler() { - companion object { const val EXPLAIN_BASE_URI = "$ISM_BASE_URI/explain" const val LEGACY_EXPLAIN_BASE_URI = "$LEGACY_ISM_BASE_URI/explain" @@ -51,20 +50,20 @@ class RestExplainAction : BaseRestHandler() { return listOf( ReplacedRoute( GET, EXPLAIN_BASE_URI, - GET, LEGACY_EXPLAIN_BASE_URI + GET, LEGACY_EXPLAIN_BASE_URI, ), ReplacedRoute( GET, "$EXPLAIN_BASE_URI/{index}", - GET, "$LEGACY_EXPLAIN_BASE_URI/{index}" + GET, "$LEGACY_EXPLAIN_BASE_URI/{index}", ), ReplacedRoute( POST, EXPLAIN_BASE_URI, - POST, LEGACY_EXPLAIN_BASE_URI + POST, LEGACY_EXPLAIN_BASE_URI, ), ReplacedRoute( POST, "$EXPLAIN_BASE_URI/{index}", - POST, "$LEGACY_EXPLAIN_BASE_URI/{index}" - ) + POST, "$LEGACY_EXPLAIN_BASE_URI/{index}", + ), ) } @@ -81,28 +80,31 @@ class RestExplainAction : BaseRestHandler() { val indexType = request.param(TYPE_PARAM_KEY, DEFAULT_INDEX_TYPE) - val explainFilter = if (request.method() == RestRequest.Method.POST) { - val xcp = request.contentParser() - ensureExpectedToken(Token.START_OBJECT, xcp.nextToken(), xcp) - ExplainFilter.parse(xcp) - } else { - null - } - - val clusterManagerTimeout = parseClusterManagerTimeout( - request, DeprecationLogger.getLogger(RestExplainAction::class.java), name - ) + val explainFilter = + if (request.method() == RestRequest.Method.POST) { + val xcp = request.contentParser() + ensureExpectedToken(Token.START_OBJECT, xcp.nextToken(), xcp) + ExplainFilter.parse(xcp) + } else { + null + } + + val clusterManagerTimeout = + parseClusterManagerTimeout( + request, DeprecationLogger.getLogger(RestExplainAction::class.java), name, + ) - val explainRequest = ExplainRequest( - indices.toList(), - request.paramAsBoolean("local", false), - clusterManagerTimeout, - searchParams, - explainFilter, - request.paramAsBoolean(SHOW_POLICY_QUERY_PARAM, DEFAULT_EXPLAIN_SHOW_POLICY), - request.paramAsBoolean(SHOW_VALIDATE_ACTION, DEFAULT_EXPLAIN_VALIDATE_ACTION), - indexType - ) + val explainRequest = + ExplainRequest( + indices.toList(), + request.paramAsBoolean("local", false), + clusterManagerTimeout, + searchParams, + explainFilter, + request.paramAsBoolean(SHOW_POLICY_QUERY_PARAM, DEFAULT_EXPLAIN_SHOW_POLICY), + request.paramAsBoolean(SHOW_VALIDATE_ACTION, DEFAULT_EXPLAIN_VALIDATE_ACTION), + indexType, + ) return RestChannelConsumer { channel -> client.execute(ExplainAction.INSTANCE, explainRequest, RestToXContentListener(channel)) diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/RestGetPolicyAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/RestGetPolicyAction.kt index c8d4acf87..8a2c5c9d6 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/RestGetPolicyAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/RestGetPolicyAction.kt @@ -29,7 +29,6 @@ import org.opensearch.search.fetch.subphase.FetchSourceContext private val log = LogManager.getLogger(RestGetPolicyAction::class.java) class RestGetPolicyAction : BaseRestHandler() { - override fun routes(): List { return emptyList() } @@ -38,16 +37,16 @@ class RestGetPolicyAction : BaseRestHandler() { return listOf( ReplacedRoute( GET, POLICY_BASE_URI, - GET, LEGACY_POLICY_BASE_URI + GET, LEGACY_POLICY_BASE_URI, ), ReplacedRoute( GET, "$POLICY_BASE_URI/{policyID}", - GET, "$LEGACY_POLICY_BASE_URI/{policyID}" + GET, "$LEGACY_POLICY_BASE_URI/{policyID}", ), ReplacedRoute( HEAD, "$POLICY_BASE_URI/{policyID}", - HEAD, "$LEGACY_POLICY_BASE_URI/{policyID}" - ) + HEAD, "$LEGACY_POLICY_BASE_URI/{policyID}", + ), ) } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/RestIndexPolicyAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/RestIndexPolicyAction.kt index a75b5dfc6..d55f5eb15 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/RestIndexPolicyAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/RestIndexPolicyAction.kt @@ -9,6 +9,7 @@ import org.opensearch.action.support.WriteRequest import org.opensearch.client.node.NodeClient import org.opensearch.cluster.service.ClusterService import org.opensearch.common.settings.Settings +import org.opensearch.core.rest.RestStatus import org.opensearch.core.xcontent.ToXContent import org.opensearch.index.seqno.SequenceNumbers import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.LEGACY_POLICY_BASE_URI @@ -31,16 +32,14 @@ import org.opensearch.rest.RestHandler.Route import org.opensearch.rest.RestRequest import org.opensearch.rest.RestRequest.Method.PUT import org.opensearch.rest.RestResponse -import org.opensearch.core.rest.RestStatus import org.opensearch.rest.action.RestResponseListener import java.io.IOException import java.time.Instant class RestIndexPolicyAction( settings: Settings, - val clusterService: ClusterService + val clusterService: ClusterService, ) : BaseRestHandler() { - @Volatile private var allowList = ALLOW_LIST.get(settings) init { @@ -55,12 +54,12 @@ class RestIndexPolicyAction( return listOf( ReplacedRoute( PUT, POLICY_BASE_URI, - PUT, LEGACY_POLICY_BASE_URI + PUT, LEGACY_POLICY_BASE_URI, ), ReplacedRoute( PUT, "$POLICY_BASE_URI/{policyID}", - PUT, "$LEGACY_POLICY_BASE_URI/{policyID}" - ) + PUT, "$LEGACY_POLICY_BASE_URI/{policyID}", + ), ) } @@ -80,11 +79,12 @@ class RestIndexPolicyAction( val seqNo = request.paramAsLong(IF_SEQ_NO, SequenceNumbers.UNASSIGNED_SEQ_NO) val primaryTerm = request.paramAsLong(IF_PRIMARY_TERM, SequenceNumbers.UNASSIGNED_PRIMARY_TERM) - val refreshPolicy = if (request.hasParam(REFRESH)) { - WriteRequest.RefreshPolicy.parse(request.param(REFRESH)) - } else { - WriteRequest.RefreshPolicy.IMMEDIATE - } + val refreshPolicy = + if (request.hasParam(REFRESH)) { + WriteRequest.RefreshPolicy.parse(request.param(REFRESH)) + } else { + WriteRequest.RefreshPolicy.IMMEDIATE + } val disallowedActions = policy.getDisallowedActions(allowList) if (disallowedActions.isNotEmpty()) { @@ -92,8 +92,8 @@ class RestIndexPolicyAction( channel.sendResponse( BytesRestResponse( RestStatus.FORBIDDEN, - "You have actions that are not allowed in your policy $disallowedActions" - ) + "You have actions that are not allowed in your policy $disallowedActions", + ), ) } } @@ -112,7 +112,7 @@ class RestIndexPolicyAction( } return restResponse } - } + }, ) } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/RestRemovePolicyAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/RestRemovePolicyAction.kt index 33720ae70..e3b60f4b9 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/RestRemovePolicyAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/RestRemovePolicyAction.kt @@ -22,7 +22,6 @@ import org.opensearch.rest.action.RestToXContentListener import java.io.IOException class RestRemovePolicyAction : BaseRestHandler() { - override fun routes(): List { return emptyList() } @@ -31,12 +30,12 @@ class RestRemovePolicyAction : BaseRestHandler() { return listOf( ReplacedRoute( POST, REMOVE_POLICY_BASE_URI, - POST, LEGACY_REMOVE_POLICY_BASE_URI + POST, LEGACY_REMOVE_POLICY_BASE_URI, ), ReplacedRoute( POST, "$REMOVE_POLICY_BASE_URI/{index}", - POST, "$LEGACY_REMOVE_POLICY_BASE_URI/{index}" - ) + POST, "$LEGACY_REMOVE_POLICY_BASE_URI/{index}", + ), ) } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/RestRetryFailedManagedIndexAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/RestRetryFailedManagedIndexAction.kt index 7d92534a0..6372d6485 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/RestRetryFailedManagedIndexAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/RestRetryFailedManagedIndexAction.kt @@ -6,9 +6,9 @@ package org.opensearch.indexmanagement.indexstatemanagement.resthandler import org.opensearch.client.node.NodeClient -import org.opensearch.core.common.Strings import org.opensearch.common.logging.DeprecationLogger import org.opensearch.common.xcontent.XContentHelper +import org.opensearch.core.common.Strings import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.ISM_BASE_URI import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.LEGACY_ISM_BASE_URI import org.opensearch.indexmanagement.indexstatemanagement.transport.action.retryfailedmanagedindex.RetryFailedManagedIndexAction @@ -25,7 +25,6 @@ import org.opensearch.rest.RestRequest.Method.POST import org.opensearch.rest.action.RestToXContentListener class RestRetryFailedManagedIndexAction : BaseRestHandler() { - override fun routes(): List { return emptyList() } @@ -34,12 +33,12 @@ class RestRetryFailedManagedIndexAction : BaseRestHandler() { return listOf( ReplacedRoute( POST, RETRY_BASE_URI, - POST, LEGACY_RETRY_BASE_URI + POST, LEGACY_RETRY_BASE_URI, ), ReplacedRoute( POST, "$RETRY_BASE_URI/{index}", - POST, "$LEGACY_RETRY_BASE_URI/{index}" - ) + POST, "$LEGACY_RETRY_BASE_URI/{index}", + ), ) } @@ -53,23 +52,26 @@ class RestRetryFailedManagedIndexAction : BaseRestHandler() { if (indices == null || indices.isEmpty()) { throw IllegalArgumentException("Missing indices") } - val body = if (request.hasContent()) { - XContentHelper.convertToMap(request.requiredContent(), false, request.mediaType).v2() - } else { - mapOf() - } + val body = + if (request.hasContent()) { + XContentHelper.convertToMap(request.requiredContent(), false, request.mediaType).v2() + } else { + mapOf() + } val indexType = request.param(TYPE_PARAM_KEY, DEFAULT_INDEX_TYPE) - val clusterManagerTimeout = parseClusterManagerTimeout( - request, DeprecationLogger.getLogger(RestRetryFailedManagedIndexAction::class.java), name - ) + val clusterManagerTimeout = + parseClusterManagerTimeout( + request, DeprecationLogger.getLogger(RestRetryFailedManagedIndexAction::class.java), name, + ) - val retryFailedRequest = RetryFailedManagedIndexRequest( - indices.toList(), body["state"] as String?, - clusterManagerTimeout, - indexType - ) + val retryFailedRequest = + RetryFailedManagedIndexRequest( + indices.toList(), body["state"] as String?, + clusterManagerTimeout, + indexType, + ) return RestChannelConsumer { channel -> client.execute(RetryFailedManagedIndexAction.INSTANCE, retryFailedRequest, RestToXContentListener(channel)) diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/settings/LegacyOpenDistroManagedIndexSettings.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/settings/LegacyOpenDistroManagedIndexSettings.kt index 7858ef93d..24e810919 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/settings/LegacyOpenDistroManagedIndexSettings.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/settings/LegacyOpenDistroManagedIndexSettings.kt @@ -7,9 +7,9 @@ package org.opensearch.indexmanagement.indexstatemanagement.settings import org.opensearch.common.settings.Setting import org.opensearch.common.unit.TimeValue +import org.opensearch.indexmanagement.indexstatemanagement.settings.ManagedIndexSettings.Companion.ALLOW_LIST_ALL import org.opensearch.indexmanagement.indexstatemanagement.settings.ManagedIndexSettings.Companion.DEFAULT_ISM_ENABLED import org.opensearch.indexmanagement.indexstatemanagement.settings.ManagedIndexSettings.Companion.DEFAULT_JOB_INTERVAL -import org.opensearch.indexmanagement.indexstatemanagement.settings.ManagedIndexSettings.Companion.ALLOW_LIST_ALL import org.opensearch.indexmanagement.indexstatemanagement.settings.ManagedIndexSettings.Companion.SNAPSHOT_DENY_LIST_NONE import java.util.concurrent.TimeUnit import java.util.function.Function @@ -17,159 +17,178 @@ import java.util.function.Function @Suppress("UtilityClassWithPublicConstructor") class LegacyOpenDistroManagedIndexSettings { companion object { - val INDEX_STATE_MANAGEMENT_ENABLED: Setting = Setting.boolSetting( - "opendistro.index_state_management.enabled", - DEFAULT_ISM_ENABLED, - Setting.Property.NodeScope, - Setting.Property.Dynamic, - Setting.Property.Deprecated - ) - - val POLICY_ID: Setting = Setting.simpleString( - "index.opendistro.index_state_management.policy_id", - Setting.Property.IndexScope, - Setting.Property.Dynamic, - Setting.Property.Deprecated - ) - - val ROLLOVER_ALIAS: Setting = Setting.simpleString( - "index.opendistro.index_state_management.rollover_alias", - Setting.Property.IndexScope, - Setting.Property.Dynamic, - Setting.Property.Deprecated - ) - - val ROLLOVER_SKIP: Setting = Setting.boolSetting( - "index.opendistro.index_state_management.rollover_skip", - false, - Setting.Property.IndexScope, - Setting.Property.Dynamic, - Setting.Property.Deprecated - ) - - val AUTO_MANAGE: Setting = Setting.boolSetting( - "index.opendistro.index_state_management.auto_manage", - true, - Setting.Property.IndexScope, - Setting.Property.Dynamic - ) - - val JOB_INTERVAL: Setting = Setting.intSetting( - "opendistro.index_state_management.job_interval", - DEFAULT_JOB_INTERVAL, - 1, - Setting.Property.NodeScope, - Setting.Property.Dynamic, - Setting.Property.Deprecated - ) - - val SWEEP_PERIOD: Setting = Setting.timeSetting( - "opendistro.index_state_management.coordinator.sweep_period", - TimeValue.timeValueMinutes(10), - TimeValue.timeValueMinutes(5), - Setting.Property.NodeScope, - Setting.Property.Dynamic, - Setting.Property.Deprecated - ) - - val COORDINATOR_BACKOFF_MILLIS: Setting = Setting.positiveTimeSetting( - "opendistro.index_state_management.coordinator.backoff_millis", - TimeValue.timeValueMillis(50), - Setting.Property.NodeScope, - Setting.Property.Dynamic, - Setting.Property.Deprecated - ) - - val COORDINATOR_BACKOFF_COUNT: Setting = Setting.intSetting( - "opendistro.index_state_management.coordinator.backoff_count", - 2, - 0, - Setting.Property.NodeScope, - Setting.Property.Dynamic, - Setting.Property.Deprecated - ) - - val HISTORY_ENABLED: Setting = Setting.boolSetting( - "opendistro.index_state_management.history.enabled", - true, - Setting.Property.NodeScope, - Setting.Property.Dynamic, - Setting.Property.Deprecated - ) - - val HISTORY_MAX_DOCS: Setting = Setting.longSetting( - "opendistro.index_state_management.history.max_docs", - 2500000L, // 1 doc is ~10kb or less. This many doc is roughly 25gb - 0L, - Setting.Property.NodeScope, - Setting.Property.Dynamic, - Setting.Property.Deprecated - ) - - val HISTORY_INDEX_MAX_AGE: Setting = Setting.positiveTimeSetting( - "opendistro.index_state_management.history.max_age", - TimeValue.timeValueHours(24), - Setting.Property.NodeScope, - Setting.Property.Dynamic, - Setting.Property.Deprecated - ) - - val HISTORY_ROLLOVER_CHECK_PERIOD: Setting = Setting.positiveTimeSetting( - "opendistro.index_state_management.history.rollover_check_period", - TimeValue.timeValueHours(8), - Setting.Property.NodeScope, - Setting.Property.Dynamic, - Setting.Property.Deprecated - ) - - val HISTORY_RETENTION_PERIOD: Setting = Setting.positiveTimeSetting( - "opendistro.index_state_management.history.rollover_retention_period", - TimeValue(30, TimeUnit.DAYS), - Setting.Property.NodeScope, - Setting.Property.Dynamic, - Setting.Property.Deprecated - ) - - val HISTORY_NUMBER_OF_SHARDS: Setting = Setting.intSetting( - "opendistro.index_state_management.history.number_of_shards", - 1, - Setting.Property.NodeScope, - Setting.Property.Dynamic, - Setting.Property.Deprecated - ) - - val HISTORY_NUMBER_OF_REPLICAS: Setting = Setting.intSetting( - "opendistro.index_state_management.history.number_of_replicas", - 1, - Setting.Property.NodeScope, - Setting.Property.Dynamic, - Setting.Property.Deprecated - ) - - val ALLOW_LIST: Setting> = Setting.listSetting( - "opendistro.index_state_management.allow_list", - ALLOW_LIST_ALL, - Function.identity(), - Setting.Property.NodeScope, - Setting.Property.Dynamic, - Setting.Property.Deprecated - ) - - val SNAPSHOT_DENY_LIST: Setting> = Setting.listSetting( - "opendistro.index_state_management.snapshot.deny_list", - SNAPSHOT_DENY_LIST_NONE, - Function.identity(), - Setting.Property.NodeScope, - Setting.Property.Dynamic, - Setting.Property.Deprecated - ) - - val RESTRICTED_INDEX_PATTERN = Setting.simpleString( - "opendistro.index_state_management.restricted_index_pattern", - ManagedIndexSettings.DEFAULT_RESTRICTED_PATTERN, - Setting.Property.NodeScope, - Setting.Property.Dynamic, - Setting.Property.Deprecated - ) + val INDEX_STATE_MANAGEMENT_ENABLED: Setting = + Setting.boolSetting( + "opendistro.index_state_management.enabled", + DEFAULT_ISM_ENABLED, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated, + ) + + val POLICY_ID: Setting = + Setting.simpleString( + "index.opendistro.index_state_management.policy_id", + Setting.Property.IndexScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated, + ) + + val ROLLOVER_ALIAS: Setting = + Setting.simpleString( + "index.opendistro.index_state_management.rollover_alias", + Setting.Property.IndexScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated, + ) + + val ROLLOVER_SKIP: Setting = + Setting.boolSetting( + "index.opendistro.index_state_management.rollover_skip", + false, + Setting.Property.IndexScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated, + ) + + val AUTO_MANAGE: Setting = + Setting.boolSetting( + "index.opendistro.index_state_management.auto_manage", + true, + Setting.Property.IndexScope, + Setting.Property.Dynamic, + ) + + val JOB_INTERVAL: Setting = + Setting.intSetting( + "opendistro.index_state_management.job_interval", + DEFAULT_JOB_INTERVAL, + 1, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated, + ) + + val SWEEP_PERIOD: Setting = + Setting.timeSetting( + "opendistro.index_state_management.coordinator.sweep_period", + TimeValue.timeValueMinutes(10), + TimeValue.timeValueMinutes(5), + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated, + ) + + val COORDINATOR_BACKOFF_MILLIS: Setting = + Setting.positiveTimeSetting( + "opendistro.index_state_management.coordinator.backoff_millis", + TimeValue.timeValueMillis(50), + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated, + ) + + val COORDINATOR_BACKOFF_COUNT: Setting = + Setting.intSetting( + "opendistro.index_state_management.coordinator.backoff_count", + 2, + 0, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated, + ) + + val HISTORY_ENABLED: Setting = + Setting.boolSetting( + "opendistro.index_state_management.history.enabled", + true, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated, + ) + + val HISTORY_MAX_DOCS: Setting = + Setting.longSetting( + "opendistro.index_state_management.history.max_docs", + 2500000L, // 1 doc is ~10kb or less. This many doc is roughly 25gb + 0L, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated, + ) + + val HISTORY_INDEX_MAX_AGE: Setting = + Setting.positiveTimeSetting( + "opendistro.index_state_management.history.max_age", + TimeValue.timeValueHours(24), + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated, + ) + + val HISTORY_ROLLOVER_CHECK_PERIOD: Setting = + Setting.positiveTimeSetting( + "opendistro.index_state_management.history.rollover_check_period", + TimeValue.timeValueHours(8), + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated, + ) + + val HISTORY_RETENTION_PERIOD: Setting = + Setting.positiveTimeSetting( + "opendistro.index_state_management.history.rollover_retention_period", + TimeValue(30, TimeUnit.DAYS), + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated, + ) + + val HISTORY_NUMBER_OF_SHARDS: Setting = + Setting.intSetting( + "opendistro.index_state_management.history.number_of_shards", + 1, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated, + ) + + val HISTORY_NUMBER_OF_REPLICAS: Setting = + Setting.intSetting( + "opendistro.index_state_management.history.number_of_replicas", + 1, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated, + ) + + val ALLOW_LIST: Setting> = + Setting.listSetting( + "opendistro.index_state_management.allow_list", + ALLOW_LIST_ALL, + Function.identity(), + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated, + ) + + val SNAPSHOT_DENY_LIST: Setting> = + Setting.listSetting( + "opendistro.index_state_management.snapshot.deny_list", + SNAPSHOT_DENY_LIST_NONE, + Function.identity(), + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated, + ) + + val RESTRICTED_INDEX_PATTERN = + Setting.simpleString( + "opendistro.index_state_management.restricted_index_pattern", + ManagedIndexSettings.DEFAULT_RESTRICTED_PATTERN, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated, + ) } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/settings/ManagedIndexSettings.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/settings/ManagedIndexSettings.kt index 125844f4a..27a0579d5 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/settings/ManagedIndexSettings.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/settings/ManagedIndexSettings.kt @@ -23,166 +23,188 @@ class ManagedIndexSettings { val ALLOW_LIST_ALL = ISMActionsParser.instance.parsers.map { it.getActionType() }.toList() val SNAPSHOT_DENY_LIST_NONE = emptyList() - val INDEX_STATE_MANAGEMENT_ENABLED: Setting = Setting.boolSetting( - "plugins.index_state_management.enabled", - LegacyOpenDistroManagedIndexSettings.INDEX_STATE_MANAGEMENT_ENABLED, - Setting.Property.NodeScope, - Setting.Property.Dynamic - ) - - val ACTION_VALIDATION_ENABLED: Setting = Setting.boolSetting( - "plugins.index_state_management.action_validation.enabled", - DEFAULT_ACTION_VALIDATION_ENABLED, - Setting.Property.NodeScope, - Setting.Property.Dynamic - ) - - val POLICY_ID: Setting = Setting.simpleString( - "index.plugins.index_state_management.policy_id", - LegacyOpenDistroManagedIndexSettings.POLICY_ID, - Setting.Property.IndexScope, - Setting.Property.Dynamic - ) - - val ROLLOVER_ALIAS: Setting = Setting.simpleString( - "index.plugins.index_state_management.rollover_alias", - LegacyOpenDistroManagedIndexSettings.ROLLOVER_ALIAS, - Setting.Property.IndexScope, - Setting.Property.Dynamic - ) - - val ROLLOVER_SKIP: Setting = Setting.boolSetting( - "index.plugins.index_state_management.rollover_skip", - LegacyOpenDistroManagedIndexSettings.ROLLOVER_SKIP, - Setting.Property.IndexScope, - Setting.Property.Dynamic - ) - - val AUTO_MANAGE: Setting = Setting.boolSetting( - "index.plugins.index_state_management.auto_manage", - LegacyOpenDistroManagedIndexSettings.AUTO_MANAGE, - Setting.Property.IndexScope, - Setting.Property.Dynamic - ) - - val JOB_INTERVAL: Setting = Setting.intSetting( - "plugins.index_state_management.job_interval", - LegacyOpenDistroManagedIndexSettings.JOB_INTERVAL, - 1, - Setting.Property.NodeScope, - Setting.Property.Dynamic - ) - - val SWEEP_PERIOD: Setting = Setting.timeSetting( - "plugins.index_state_management.coordinator.sweep_period", - LegacyOpenDistroManagedIndexSettings.SWEEP_PERIOD, - TimeValue.timeValueMinutes(5), - Setting.Property.NodeScope, - Setting.Property.Dynamic - ) - - val SWEEP_SKIP_PERIOD: Setting = Setting.timeSetting( - "plugins.index_state_management.coordinator.sweep_skip_period", - TimeValue.timeValueMinutes(5), - Setting.Property.NodeScope, - Setting.Property.Dynamic - ) - - val COORDINATOR_BACKOFF_MILLIS: Setting = Setting.positiveTimeSetting( - "plugins.index_state_management.coordinator.backoff_millis", - LegacyOpenDistroManagedIndexSettings.COORDINATOR_BACKOFF_MILLIS, - Setting.Property.NodeScope, - Setting.Property.Dynamic - ) - - val COORDINATOR_BACKOFF_COUNT: Setting = Setting.intSetting( - "plugins.index_state_management.coordinator.backoff_count", - LegacyOpenDistroManagedIndexSettings.COORDINATOR_BACKOFF_COUNT, - 0, - Setting.Property.NodeScope, - Setting.Property.Dynamic - ) - - val HISTORY_ENABLED: Setting = Setting.boolSetting( - "plugins.index_state_management.history.enabled", - LegacyOpenDistroManagedIndexSettings.HISTORY_ENABLED, - Setting.Property.NodeScope, - Setting.Property.Dynamic - ) - - val HISTORY_MAX_DOCS: Setting = Setting.longSetting( - "plugins.index_state_management.history.max_docs", - LegacyOpenDistroManagedIndexSettings.HISTORY_MAX_DOCS, // 1 doc is ~10kb or less. This many doc is roughly 25gb - 0L, - Setting.Property.NodeScope, - Setting.Property.Dynamic - ) - - val HISTORY_INDEX_MAX_AGE: Setting = Setting.positiveTimeSetting( - "plugins.index_state_management.history.max_age", - LegacyOpenDistroManagedIndexSettings.HISTORY_INDEX_MAX_AGE, - Setting.Property.NodeScope, - Setting.Property.Dynamic - ) - - val HISTORY_ROLLOVER_CHECK_PERIOD: Setting = Setting.positiveTimeSetting( - "plugins.index_state_management.history.rollover_check_period", - LegacyOpenDistroManagedIndexSettings.HISTORY_ROLLOVER_CHECK_PERIOD, - Setting.Property.NodeScope, - Setting.Property.Dynamic - ) - - val HISTORY_RETENTION_PERIOD: Setting = Setting.positiveTimeSetting( - "plugins.index_state_management.history.rollover_retention_period", - LegacyOpenDistroManagedIndexSettings.HISTORY_RETENTION_PERIOD, - Setting.Property.NodeScope, - Setting.Property.Dynamic - ) - - val HISTORY_NUMBER_OF_SHARDS: Setting = Setting.intSetting( - "plugins.index_state_management.history.number_of_shards", - LegacyOpenDistroManagedIndexSettings.HISTORY_NUMBER_OF_SHARDS, - Setting.Property.NodeScope, - Setting.Property.Dynamic - ) - - val HISTORY_NUMBER_OF_REPLICAS: Setting = Setting.intSetting( - "plugins.index_state_management.history.number_of_replicas", - LegacyOpenDistroManagedIndexSettings.HISTORY_NUMBER_OF_REPLICAS, - Setting.Property.NodeScope, - Setting.Property.Dynamic - ) - - val ALLOW_LIST: Setting> = Setting.listSetting( - "plugins.index_state_management.allow_list", - LegacyOpenDistroManagedIndexSettings.ALLOW_LIST, - Function.identity(), - Setting.Property.NodeScope, - Setting.Property.Dynamic - ) - - val SNAPSHOT_DENY_LIST: Setting> = Setting.listSetting( - "plugins.index_state_management.snapshot.deny_list", - LegacyOpenDistroManagedIndexSettings.SNAPSHOT_DENY_LIST, - Function.identity(), - Setting.Property.NodeScope, - Setting.Property.Dynamic - ) - - val JITTER: Setting = Setting.doubleSetting( - "plugins.index_state_management.jitter", - DEFAULT_JITTER, - 0.0, - 1.0, - Setting.Property.NodeScope, - Setting.Property.Dynamic - ) - - val RESTRICTED_INDEX_PATTERN = Setting.simpleString( - "plugins.index_state_management.restricted_index_pattern", - LegacyOpenDistroManagedIndexSettings.RESTRICTED_INDEX_PATTERN, - Setting.Property.NodeScope, - Setting.Property.Dynamic - ) + val INDEX_STATE_MANAGEMENT_ENABLED: Setting = + Setting.boolSetting( + "plugins.index_state_management.enabled", + LegacyOpenDistroManagedIndexSettings.INDEX_STATE_MANAGEMENT_ENABLED, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val ACTION_VALIDATION_ENABLED: Setting = + Setting.boolSetting( + "plugins.index_state_management.action_validation.enabled", + DEFAULT_ACTION_VALIDATION_ENABLED, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val POLICY_ID: Setting = + Setting.simpleString( + "index.plugins.index_state_management.policy_id", + LegacyOpenDistroManagedIndexSettings.POLICY_ID, + Setting.Property.IndexScope, + Setting.Property.Dynamic, + ) + + val ROLLOVER_ALIAS: Setting = + Setting.simpleString( + "index.plugins.index_state_management.rollover_alias", + LegacyOpenDistroManagedIndexSettings.ROLLOVER_ALIAS, + Setting.Property.IndexScope, + Setting.Property.Dynamic, + ) + + val ROLLOVER_SKIP: Setting = + Setting.boolSetting( + "index.plugins.index_state_management.rollover_skip", + LegacyOpenDistroManagedIndexSettings.ROLLOVER_SKIP, + Setting.Property.IndexScope, + Setting.Property.Dynamic, + ) + + val AUTO_MANAGE: Setting = + Setting.boolSetting( + "index.plugins.index_state_management.auto_manage", + LegacyOpenDistroManagedIndexSettings.AUTO_MANAGE, + Setting.Property.IndexScope, + Setting.Property.Dynamic, + ) + + val JOB_INTERVAL: Setting = + Setting.intSetting( + "plugins.index_state_management.job_interval", + LegacyOpenDistroManagedIndexSettings.JOB_INTERVAL, + 1, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val SWEEP_PERIOD: Setting = + Setting.timeSetting( + "plugins.index_state_management.coordinator.sweep_period", + LegacyOpenDistroManagedIndexSettings.SWEEP_PERIOD, + TimeValue.timeValueMinutes(5), + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val SWEEP_SKIP_PERIOD: Setting = + Setting.timeSetting( + "plugins.index_state_management.coordinator.sweep_skip_period", + TimeValue.timeValueMinutes(5), + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val COORDINATOR_BACKOFF_MILLIS: Setting = + Setting.positiveTimeSetting( + "plugins.index_state_management.coordinator.backoff_millis", + LegacyOpenDistroManagedIndexSettings.COORDINATOR_BACKOFF_MILLIS, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val COORDINATOR_BACKOFF_COUNT: Setting = + Setting.intSetting( + "plugins.index_state_management.coordinator.backoff_count", + LegacyOpenDistroManagedIndexSettings.COORDINATOR_BACKOFF_COUNT, + 0, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val HISTORY_ENABLED: Setting = + Setting.boolSetting( + "plugins.index_state_management.history.enabled", + LegacyOpenDistroManagedIndexSettings.HISTORY_ENABLED, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val HISTORY_MAX_DOCS: Setting = + Setting.longSetting( + "plugins.index_state_management.history.max_docs", + LegacyOpenDistroManagedIndexSettings.HISTORY_MAX_DOCS, // 1 doc is ~10kb or less. This many doc is roughly 25gb + 0L, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val HISTORY_INDEX_MAX_AGE: Setting = + Setting.positiveTimeSetting( + "plugins.index_state_management.history.max_age", + LegacyOpenDistroManagedIndexSettings.HISTORY_INDEX_MAX_AGE, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val HISTORY_ROLLOVER_CHECK_PERIOD: Setting = + Setting.positiveTimeSetting( + "plugins.index_state_management.history.rollover_check_period", + LegacyOpenDistroManagedIndexSettings.HISTORY_ROLLOVER_CHECK_PERIOD, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val HISTORY_RETENTION_PERIOD: Setting = + Setting.positiveTimeSetting( + "plugins.index_state_management.history.rollover_retention_period", + LegacyOpenDistroManagedIndexSettings.HISTORY_RETENTION_PERIOD, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val HISTORY_NUMBER_OF_SHARDS: Setting = + Setting.intSetting( + "plugins.index_state_management.history.number_of_shards", + LegacyOpenDistroManagedIndexSettings.HISTORY_NUMBER_OF_SHARDS, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val HISTORY_NUMBER_OF_REPLICAS: Setting = + Setting.intSetting( + "plugins.index_state_management.history.number_of_replicas", + LegacyOpenDistroManagedIndexSettings.HISTORY_NUMBER_OF_REPLICAS, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val ALLOW_LIST: Setting> = + Setting.listSetting( + "plugins.index_state_management.allow_list", + LegacyOpenDistroManagedIndexSettings.ALLOW_LIST, + Function.identity(), + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val SNAPSHOT_DENY_LIST: Setting> = + Setting.listSetting( + "plugins.index_state_management.snapshot.deny_list", + LegacyOpenDistroManagedIndexSettings.SNAPSHOT_DENY_LIST, + Function.identity(), + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val JITTER: Setting = + Setting.doubleSetting( + "plugins.index_state_management.jitter", + DEFAULT_JITTER, + 0.0, + 1.0, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val RESTRICTED_INDEX_PATTERN = + Setting.simpleString( + "plugins.index_state_management.restricted_index_pattern", + LegacyOpenDistroManagedIndexSettings.RESTRICTED_INDEX_PATTERN, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/alias/AttemptAliasActionsStep.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/alias/AttemptAliasActionsStep.kt index 6952416a0..f756fe547 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/alias/AttemptAliasActionsStep.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/alias/AttemptAliasActionsStep.kt @@ -15,7 +15,6 @@ import org.opensearch.indexmanagement.spi.indexstatemanagement.model.ManagedInde import org.opensearch.indexmanagement.spi.indexstatemanagement.model.StepMetaData class AttemptAliasActionsStep(private val action: AliasAction) : Step(name) { - private val logger = LogManager.getLogger(javaClass) private var stepStatus = StepStatus.STARTING private var info: Map? = null @@ -51,7 +50,7 @@ class AttemptAliasActionsStep(private val action: AliasAction) : Step(name) { private fun handleResponse( response: AcknowledgedResponse, indexName: String, - actions: List + actions: List, ) { if (response.isAcknowledged) { stepStatus = StepStatus.COMPLETED @@ -66,7 +65,7 @@ class AttemptAliasActionsStep(private val action: AliasAction) : Step(name) { return currentMetadata.copy( stepMetaData = StepMetaData(name, getStepStartTime(currentMetadata).toEpochMilli(), stepStatus), transitionTo = null, - info = info + info = info, ) } @@ -75,9 +74,10 @@ class AttemptAliasActionsStep(private val action: AliasAction) : Step(name) { companion object { val validTopContextFields = setOf("index") const val name = "attempt_alias" + fun getFailedMessage( index: String, - actions: List + actions: List, ) = "Failed to update alias [index=$index] for actions: [actions=$actions]" fun getSuccessMessage(index: String) = "Successfully updated alias [index=$index]" diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/allocation/AttemptAllocationStep.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/allocation/AttemptAllocationStep.kt index 4d18652b1..1c42612ba 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/allocation/AttemptAllocationStep.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/allocation/AttemptAllocationStep.kt @@ -16,7 +16,6 @@ import org.opensearch.indexmanagement.spi.indexstatemanagement.model.ManagedInde import org.opensearch.indexmanagement.spi.indexstatemanagement.model.StepMetaData class AttemptAllocationStep(private val action: AllocationAction) : Step(name) { - private val logger = LogManager.getLogger(javaClass) private var stepStatus = StepStatus.STARTING private var info: Map? = null @@ -25,9 +24,10 @@ class AttemptAllocationStep(private val action: AllocationAction) : Step(name) { val context = this.context ?: return this val indexName = context.metadata.index try { - val response: AcknowledgedResponse = context.client.admin() - .indices() - .suspendUntil { updateSettings(UpdateSettingsRequest(buildSettings(), indexName), it) } + val response: AcknowledgedResponse = + context.client.admin() + .indices() + .suspendUntil { updateSettings(UpdateSettingsRequest(buildSettings(), indexName), it) } handleResponse(response, indexName) } catch (e: Exception) { handleException(e, indexName) @@ -68,7 +68,7 @@ class AttemptAllocationStep(private val action: AllocationAction) : Step(name) { return currentMetadata.copy( stepMetaData = StepMetaData(name, getStepStartTime(currentMetadata).toEpochMilli(), stepStatus), transitionTo = null, - info = info + info = info, ) } @@ -77,7 +77,9 @@ class AttemptAllocationStep(private val action: AllocationAction) : Step(name) { companion object { const val name = "attempt_allocation" private const val SETTINGS_PREFIX = "index.routing.allocation." + fun getFailedMessage(index: String) = "Failed to update allocation setting [index=$index]" + fun getSuccessMessage(index: String) = "Successfully updated allocation setting [index=$index]" } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/close/AttemptCloseStep.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/close/AttemptCloseStep.kt index 3e02ecb91..5e4b01fdd 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/close/AttemptCloseStep.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/close/AttemptCloseStep.kt @@ -17,7 +17,6 @@ import org.opensearch.snapshots.SnapshotInProgressException import org.opensearch.transport.RemoteTransportException class AttemptCloseStep : Step(name) { - private val logger = LogManager.getLogger(javaClass) private var stepStatus = StepStatus.STARTING private var info: Map? = null @@ -26,11 +25,13 @@ class AttemptCloseStep : Step(name) { val context = this.context ?: return this val indexName = context.metadata.index try { - val closeIndexRequest = CloseIndexRequest() - .indices(indexName) + val closeIndexRequest = + CloseIndexRequest() + .indices(indexName) - val response: CloseIndexResponse = context.client.admin().indices() - .suspendUntil { close(closeIndexRequest, it) } + val response: CloseIndexResponse = + context.client.admin().indices() + .suspendUntil { close(closeIndexRequest, it) } if (response.isAcknowledged) { stepStatus = StepStatus.COMPLETED @@ -78,7 +79,7 @@ class AttemptCloseStep : Step(name) { return currentMetadata.copy( stepMetaData = StepMetaData(name, getStepStartTime(currentMetadata).toEpochMilli(), stepStatus), transitionTo = null, - info = info + info = info, ) } @@ -86,8 +87,11 @@ class AttemptCloseStep : Step(name) { companion object { const val name = "attempt_close" + fun getFailedMessage(index: String) = "Failed to close index [index=$index]" + fun getSuccessMessage(index: String) = "Successfully closed index [index=$index]" + fun getSnapshotMessage(index: String) = "Index had snapshot in progress, retrying closing [index=$index]" } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/delete/AttemptDeleteStep.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/delete/AttemptDeleteStep.kt index 8019130a2..bbbc390d3 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/delete/AttemptDeleteStep.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/delete/AttemptDeleteStep.kt @@ -17,7 +17,6 @@ import org.opensearch.snapshots.SnapshotInProgressException import org.opensearch.transport.RemoteTransportException class AttemptDeleteStep : Step(name) { - private val logger = LogManager.getLogger(javaClass) private var stepStatus = StepStatus.STARTING private var info: Map? = null @@ -26,8 +25,9 @@ class AttemptDeleteStep : Step(name) { val context = this.context ?: return this val indexName = context.metadata.index try { - val response: AcknowledgedResponse = context.client.admin().indices() - .suspendUntil { delete(DeleteIndexRequest(indexName), it) } + val response: AcknowledgedResponse = + context.client.admin().indices() + .suspendUntil { delete(DeleteIndexRequest(indexName), it) } if (response.isAcknowledged) { stepStatus = StepStatus.COMPLETED @@ -75,7 +75,7 @@ class AttemptDeleteStep : Step(name) { return currentMetadata.copy( stepMetaData = StepMetaData(name, getStepStartTime(currentMetadata).toEpochMilli(), stepStatus), transitionTo = null, - info = info + info = info, ) } @@ -83,8 +83,11 @@ class AttemptDeleteStep : Step(name) { companion object { const val name = "attempt_delete" + fun getFailedMessage(indexName: String) = "Failed to delete index [index=$indexName]" + fun getSuccessMessage(indexName: String) = "Successfully deleted index [index=$indexName]" + fun getSnapshotMessage(indexName: String) = "Index had snapshot in progress, retrying deletion [index=$indexName]" } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/forcemerge/AttemptCallForceMergeStep.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/forcemerge/AttemptCallForceMergeStep.kt index 58b455d00..3ff796e55 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/forcemerge/AttemptCallForceMergeStep.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/forcemerge/AttemptCallForceMergeStep.kt @@ -14,6 +14,7 @@ import org.apache.logging.log4j.LogManager import org.opensearch.ExceptionsHelper import org.opensearch.action.admin.indices.forcemerge.ForceMergeRequest import org.opensearch.action.admin.indices.forcemerge.ForceMergeResponse +import org.opensearch.core.rest.RestStatus import org.opensearch.indexmanagement.indexstatemanagement.action.ForceMergeAction import org.opensearch.indexmanagement.opensearchapi.getUsefulCauseString import org.opensearch.indexmanagement.opensearchapi.suspendUntil @@ -21,12 +22,10 @@ import org.opensearch.indexmanagement.spi.indexstatemanagement.Step import org.opensearch.indexmanagement.spi.indexstatemanagement.model.ActionProperties import org.opensearch.indexmanagement.spi.indexstatemanagement.model.ManagedIndexMetaData import org.opensearch.indexmanagement.spi.indexstatemanagement.model.StepMetaData -import org.opensearch.core.rest.RestStatus import org.opensearch.transport.RemoteTransportException import java.time.Instant class AttemptCallForceMergeStep(private val action: ForceMergeAction) : Step(name) { - private val logger = LogManager.getLogger(javaClass) private var stepStatus = StepStatus.STARTING private var info: Map? = null @@ -36,7 +35,6 @@ class AttemptCallForceMergeStep(private val action: ForceMergeAction) : Step(nam val context = this.context ?: return this val indexName = context.metadata.index try { - val startTime = Instant.now().toEpochMilli() val request = ForceMergeRequest(indexName).maxNumSegments(action.maxNumSegments) var response: ForceMergeResponse? = null @@ -66,11 +64,12 @@ class AttemptCallForceMergeStep(private val action: ForceMergeAction) : Step(nam } else { // Otherwise the request to force merge encountered some problem stepStatus = StepStatus.FAILED - info = mapOf( - "message" to getFailedMessage(indexName), - "status" to shadowedResponse.status, - "shard_failures" to shadowedResponse.shardFailures.map { it.getUsefulCauseString() } - ) + info = + mapOf( + "message" to getFailedMessage(indexName), + "status" to shadowedResponse.status, + "shard_failures" to shadowedResponse.shardFailures.map { it.getUsefulCauseString() }, + ) } } catch (e: RemoteTransportException) { handleException(indexName, ExceptionsHelper.unwrapCause(e) as Exception) @@ -99,7 +98,7 @@ class AttemptCallForceMergeStep(private val action: ForceMergeAction) : Step(nam actionMetaData = currentActionMetaData?.copy(actionProperties = ActionProperties(maxNumSegments = action.maxNumSegments)), stepMetaData = StepMetaData(name, getStepStartTime(currentMetadata).toEpochMilli(), stepStatus), transitionTo = null, - info = info + info = info, ) } @@ -109,8 +108,11 @@ class AttemptCallForceMergeStep(private val action: ForceMergeAction) : Step(nam const val name = "attempt_call_force_merge" const val FIVE_MINUTES_IN_MILLIS = 1000 * 60 * 5 // how long to wait for the force merge request before moving on const val FIVE_SECONDS_IN_MILLIS = 1000L * 5L // delay + fun getFailedMessage(index: String) = "Failed to start force merge [index=$index]" + fun getSuccessfulCallMessage(index: String) = "Successfully called force merge [index=$index]" + fun getSuccessMessage(index: String) = "Successfully completed force merge [index=$index]" } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/forcemerge/AttemptSetReadOnlyStep.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/forcemerge/AttemptSetReadOnlyStep.kt index 60399ec51..1f931404b 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/forcemerge/AttemptSetReadOnlyStep.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/forcemerge/AttemptSetReadOnlyStep.kt @@ -20,7 +20,6 @@ import org.opensearch.indexmanagement.spi.indexstatemanagement.model.StepMetaDat import org.opensearch.transport.RemoteTransportException class AttemptSetReadOnlyStep(private val action: ForceMergeAction) : Step(name) { - private val logger = LogManager.getLogger(javaClass) private var stepStatus = StepStatus.STARTING private var info: Map? = null @@ -44,11 +43,13 @@ class AttemptSetReadOnlyStep(private val action: ForceMergeAction) : Step(name) @Suppress("TooGenericExceptionCaught") private suspend fun setIndexToReadOnly(indexName: String, context: StepContext): Boolean { try { - val updateSettingsRequest = UpdateSettingsRequest() - .indices(indexName) - .settings(Settings.builder().put(SETTING_BLOCKS_WRITE, true)) - val response: AcknowledgedResponse = context.client.admin().indices() - .suspendUntil { updateSettings(updateSettingsRequest, it) } + val updateSettingsRequest = + UpdateSettingsRequest() + .indices(indexName) + .settings(Settings.builder().put(SETTING_BLOCKS_WRITE, true)) + val response: AcknowledgedResponse = + context.client.admin().indices() + .suspendUntil { updateSettings(updateSettingsRequest, it) } if (response.isAcknowledged) { return true @@ -82,14 +83,16 @@ class AttemptSetReadOnlyStep(private val action: ForceMergeAction) : Step(name) currentMetadata.copy( stepMetaData = StepMetaData(name, getStepStartTime(currentMetadata).toEpochMilli(), stepStatus), transitionTo = null, - info = info + info = info, ) override fun isIdempotent() = true companion object { const val name = "attempt_set_read_only" + fun getFailedMessage(index: String) = "Failed to set index to read-only [index=$index]" + fun getSuccessMessage(index: String) = "Successfully set index to read-only [index=$index]" } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/forcemerge/WaitForForceMergeStep.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/forcemerge/WaitForForceMergeStep.kt index e07560975..612739251 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/forcemerge/WaitForForceMergeStep.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/forcemerge/WaitForForceMergeStep.kt @@ -8,6 +8,7 @@ package org.opensearch.indexmanagement.indexstatemanagement.step.forcemerge import org.apache.logging.log4j.LogManager import org.opensearch.action.admin.indices.stats.IndicesStatsRequest import org.opensearch.action.admin.indices.stats.IndicesStatsResponse +import org.opensearch.core.rest.RestStatus import org.opensearch.indexmanagement.indexstatemanagement.action.ForceMergeAction import org.opensearch.indexmanagement.opensearchapi.getUsefulCauseString import org.opensearch.indexmanagement.opensearchapi.suspendUntil @@ -16,12 +17,10 @@ import org.opensearch.indexmanagement.spi.indexstatemanagement.model.ActionPrope import org.opensearch.indexmanagement.spi.indexstatemanagement.model.ManagedIndexMetaData import org.opensearch.indexmanagement.spi.indexstatemanagement.model.StepContext import org.opensearch.indexmanagement.spi.indexstatemanagement.model.StepMetaData -import org.opensearch.core.rest.RestStatus import java.time.Duration import java.time.Instant class WaitForForceMergeStep(private val action: ForceMergeAction) : Step(name, false) { - private val logger = LogManager.getLogger(javaClass) private var stepStatus = StepStatus.STARTING private var info: Map? = null @@ -62,7 +61,7 @@ class WaitForForceMergeStep(private val action: ForceMergeAction) : Step(name, f if (timeWaitingForForceMerge.seconds > timeoutInSeconds) { logger.error( "Force merge on [$indexName] timed out with" + - " [$shardsStillMergingSegments] shards containing unmerged segments" + " [$shardsStillMergingSegments] shards containing unmerged segments", ) stepStatus = StepStatus.FAILED @@ -70,7 +69,7 @@ class WaitForForceMergeStep(private val action: ForceMergeAction) : Step(name, f } else { logger.debug( "Force merge still running on [$indexName] with" + - " [$shardsStillMergingSegments] shards containing unmerged segments" + " [$shardsStillMergingSegments] shards containing unmerged segments", ) stepStatus = StepStatus.CONDITION_NOT_MET @@ -87,10 +86,11 @@ class WaitForForceMergeStep(private val action: ForceMergeAction) : Step(name, f if (actionProperties?.maxNumSegments == null) { stepStatus = StepStatus.FAILED - info = mapOf( - "message" to "Unable to retrieve [${ActionProperties.Properties.MAX_NUM_SEGMENTS.key}]" + - " from ActionProperties=$actionProperties" - ) + info = + mapOf( + "message" to "Unable to retrieve [${ActionProperties.Properties.MAX_NUM_SEGMENTS.key}]" + + " from ActionProperties=$actionProperties", + ) return null } @@ -117,10 +117,11 @@ class WaitForForceMergeStep(private val action: ForceMergeAction) : Step(name, f val message = getFailedSegmentCheckMessage(indexName) logger.warn("$message - ${statsResponse.status}") stepStatus = StepStatus.FAILED - info = mapOf( - "message" to message, - "shard_failures" to statsResponse.shardFailures.map { it.getUsefulCauseString() } - ) + info = + mapOf( + "message" to message, + "shard_failures" to statsResponse.shardFailures.map { it.getUsefulCauseString() }, + ) } catch (e: Exception) { val message = getFailedSegmentCheckMessage(indexName) logger.error(message, e) @@ -144,15 +145,19 @@ class WaitForForceMergeStep(private val action: ForceMergeAction) : Step(name, f override fun getUpdatedManagedIndexMetadata(currentMetadata: ManagedIndexMetaData): ManagedIndexMetaData { // if the step is completed set actionProperties back to null val currentActionMetaData = currentMetadata.actionMetaData - val updatedActionMetaData = currentActionMetaData?.let { - if (stepStatus != StepStatus.COMPLETED) it - else currentActionMetaData.copy(actionProperties = null) - } + val updatedActionMetaData = + currentActionMetaData?.let { + if (stepStatus != StepStatus.COMPLETED) { + it + } else { + currentActionMetaData.copy(actionProperties = null) + } + } return currentMetadata.copy( actionMetaData = updatedActionMetaData, stepMetaData = StepMetaData(name, getStepStartTime(currentMetadata).toEpochMilli(), stepStatus), transitionTo = null, - info = info + info = info, ) } @@ -161,9 +166,13 @@ class WaitForForceMergeStep(private val action: ForceMergeAction) : Step(name, f companion object { const val name = "wait_for_force_merge" const val FORCE_MERGE_TIMEOUT_IN_SECONDS = 43200L // 12 hours + fun getFailedTimedOutMessage(index: String) = "Force merge timed out [index=$index]" + fun getFailedSegmentCheckMessage(index: String) = "Failed to check segments when waiting for force merge to complete [index=$index]" + fun getWaitingMessage(index: String) = "Waiting for force merge to complete [index=$index]" + fun getSuccessMessage(index: String) = "Successfully confirmed segments force merged [index=$index]" } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/indexpriority/AttemptSetIndexPriorityStep.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/indexpriority/AttemptSetIndexPriorityStep.kt index 1ce082aaf..09aaa5b0b 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/indexpriority/AttemptSetIndexPriorityStep.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/indexpriority/AttemptSetIndexPriorityStep.kt @@ -19,7 +19,6 @@ import org.opensearch.indexmanagement.spi.indexstatemanagement.model.StepMetaDat import org.opensearch.transport.RemoteTransportException class AttemptSetIndexPriorityStep(private val action: IndexPriorityAction) : Step(name) { - private val logger = LogManager.getLogger(javaClass) private var stepStatus = StepStatus.STARTING private var info: Map? = null @@ -30,11 +29,13 @@ class AttemptSetIndexPriorityStep(private val action: IndexPriorityAction) : Ste val indexName = context.metadata.index val managedIndexMetaData = context.metadata try { - val updateSettingsRequest = UpdateSettingsRequest() - .indices(managedIndexMetaData.index) - .settings(Settings.builder().put(SETTING_PRIORITY, action.indexPriority)) - val response: AcknowledgedResponse = context.client.admin().indices() - .suspendUntil { updateSettings(updateSettingsRequest, it) } + val updateSettingsRequest = + UpdateSettingsRequest() + .indices(managedIndexMetaData.index) + .settings(Settings.builder().put(SETTING_PRIORITY, action.indexPriority)) + val response: AcknowledgedResponse = + context.client.admin().indices() + .suspendUntil { updateSettings(updateSettingsRequest, it) } if (response.isAcknowledged) { stepStatus = StepStatus.COMPLETED @@ -68,7 +69,7 @@ class AttemptSetIndexPriorityStep(private val action: IndexPriorityAction) : Ste return currentMetadata.copy( stepMetaData = StepMetaData(name, getStepStartTime(currentMetadata).toEpochMilli(), stepStatus), transitionTo = null, - info = info + info = info, ) } @@ -76,7 +77,9 @@ class AttemptSetIndexPriorityStep(private val action: IndexPriorityAction) : Ste companion object { const val name = "attempt_set_index_priority" + fun getFailedMessage(index: String, indexPriority: Int) = "Failed to set index priority to $indexPriority [index=$index]" + fun getSuccessMessage(index: String, indexPriority: Int) = "Successfully set index priority to $indexPriority [index=$index]" } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/notification/AttemptNotificationStep.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/notification/AttemptNotificationStep.kt index 22953bc61..04cdd2bc0 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/notification/AttemptNotificationStep.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/notification/AttemptNotificationStep.kt @@ -18,7 +18,6 @@ import org.opensearch.script.ScriptService import org.opensearch.script.TemplateScript class AttemptNotificationStep(private val action: NotificationAction) : Step(name) { - private val logger = LogManager.getLogger(javaClass) private var stepStatus = StepStatus.STARTING private var info: Map? = null @@ -55,7 +54,7 @@ class AttemptNotificationStep(private val action: NotificationAction) : Step(nam return currentMetadata.copy( stepMetaData = StepMetaData(name, getStepStartTime(currentMetadata).toEpochMilli(), stepStatus), transitionTo = null, - info = info + info = info, ) } @@ -70,7 +69,9 @@ class AttemptNotificationStep(private val action: NotificationAction) : Step(nam companion object { const val name = "attempt_notification" const val CHANNEL_TITLE = "Index Management-ISM-Notification Action" + fun getFailedMessage(index: String) = "Failed to send notification [index=$index]" + fun getSuccessMessage(index: String) = "Successfully sent notification [index=$index]" } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/open/AttemptOpenStep.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/open/AttemptOpenStep.kt index 2f9987d6e..7f384f845 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/open/AttemptOpenStep.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/open/AttemptOpenStep.kt @@ -24,8 +24,9 @@ class AttemptOpenStep : Step(name) { val context = this.context ?: return this val indexName = context.metadata.index try { - val openIndexRequest = OpenIndexRequest() - .indices(indexName) + val openIndexRequest = + OpenIndexRequest() + .indices(indexName) val response: OpenIndexResponse = context.client.admin().indices().suspendUntil { open(openIndexRequest, it) } if (response.isAcknowledged) { @@ -45,6 +46,7 @@ class AttemptOpenStep : Step(name) { return this } + private fun handleException(indexName: String, e: Exception) { val message = getFailedMessage(indexName) logger.error(message, e) @@ -59,7 +61,7 @@ class AttemptOpenStep : Step(name) { return currentMetadata.copy( stepMetaData = StepMetaData(name, getStepStartTime(currentMetadata).toEpochMilli(), stepStatus), transitionTo = null, - info = info + info = info, ) } @@ -67,7 +69,9 @@ class AttemptOpenStep : Step(name) { companion object { const val name = "attempt_open" + fun getFailedMessage(indexName: String) = "Failed to open index [index=$indexName]" + fun getSuccessMessage(indexName: String) = "Successfully opened index [index=$indexName]" } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/readonly/SetReadOnlyStep.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/readonly/SetReadOnlyStep.kt index b821d5b59..2560bb208 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/readonly/SetReadOnlyStep.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/readonly/SetReadOnlyStep.kt @@ -18,7 +18,6 @@ import org.opensearch.indexmanagement.spi.indexstatemanagement.model.StepMetaDat import org.opensearch.transport.RemoteTransportException class SetReadOnlyStep : Step(name) { - private val logger = LogManager.getLogger(javaClass) private var stepStatus = StepStatus.STARTING private var info: Map? = null @@ -27,11 +26,13 @@ class SetReadOnlyStep : Step(name) { val context = this.context ?: return this val indexName = context.metadata.index try { - val updateSettingsRequest = UpdateSettingsRequest() - .indices(indexName) - .settings(Settings.builder().put(SETTING_BLOCKS_WRITE, true)) - val response: AcknowledgedResponse = context.client.admin().indices() - .suspendUntil { updateSettings(updateSettingsRequest, it) } + val updateSettingsRequest = + UpdateSettingsRequest() + .indices(indexName) + .settings(Settings.builder().put(SETTING_BLOCKS_WRITE, true)) + val response: AcknowledgedResponse = + context.client.admin().indices() + .suspendUntil { updateSettings(updateSettingsRequest, it) } if (response.isAcknowledged) { stepStatus = StepStatus.COMPLETED @@ -65,7 +66,7 @@ class SetReadOnlyStep : Step(name) { return currentMetadata.copy( stepMetaData = StepMetaData(name, getStepStartTime(currentMetadata).toEpochMilli(), stepStatus), transitionTo = null, - info = info + info = info, ) } @@ -73,7 +74,9 @@ class SetReadOnlyStep : Step(name) { companion object { const val name = "set_read_only" + fun getFailedMessage(index: String) = "Failed to set index to read-only [index=$index]" + fun getSuccessMessage(index: String) = "Successfully set index to read-only [index=$index]" } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/readwrite/SetReadWriteStep.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/readwrite/SetReadWriteStep.kt index 91e4d1a54..11e634c7a 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/readwrite/SetReadWriteStep.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/readwrite/SetReadWriteStep.kt @@ -18,7 +18,6 @@ import org.opensearch.indexmanagement.spi.indexstatemanagement.model.StepMetaDat import org.opensearch.transport.RemoteTransportException class SetReadWriteStep : Step(name) { - private val logger = LogManager.getLogger(javaClass) private var stepStatus = StepStatus.STARTING private var info: Map? = null @@ -27,13 +26,15 @@ class SetReadWriteStep : Step(name) { val context = this.context ?: return this val indexName = context.metadata.index try { - val updateSettingsRequest = UpdateSettingsRequest() - .indices(indexName) - .settings( - Settings.builder().put(SETTING_BLOCKS_WRITE, false) - ) - val response: AcknowledgedResponse = context.client.admin().indices() - .suspendUntil { updateSettings(updateSettingsRequest, it) } + val updateSettingsRequest = + UpdateSettingsRequest() + .indices(indexName) + .settings( + Settings.builder().put(SETTING_BLOCKS_WRITE, false), + ) + val response: AcknowledgedResponse = + context.client.admin().indices() + .suspendUntil { updateSettings(updateSettingsRequest, it) } if (response.isAcknowledged) { stepStatus = StepStatus.COMPLETED @@ -67,7 +68,7 @@ class SetReadWriteStep : Step(name) { return currentMetadata.copy( stepMetaData = StepMetaData(name, getStepStartTime(currentMetadata).toEpochMilli(), stepStatus), transitionTo = null, - info = info + info = info, ) } @@ -75,7 +76,9 @@ class SetReadWriteStep : Step(name) { companion object { const val name = "set_read_write" + fun getFailedMessage(index: String) = "Failed to set index to read-write [index=$index]" + fun getSuccessMessage(index: String) = "Successfully set index to read-write [index=$index]" } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/replicacount/AttemptReplicaCountStep.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/replicacount/AttemptReplicaCountStep.kt index 4599668c8..68eccae8f 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/replicacount/AttemptReplicaCountStep.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/replicacount/AttemptReplicaCountStep.kt @@ -19,7 +19,6 @@ import org.opensearch.indexmanagement.spi.indexstatemanagement.model.StepMetaDat import org.opensearch.transport.RemoteTransportException class AttemptReplicaCountStep(private val action: ReplicaCountAction) : Step(name) { - private val logger = LogManager.getLogger(javaClass) private var stepStatus = StepStatus.STARTING private var info: Map? = null @@ -29,11 +28,13 @@ class AttemptReplicaCountStep(private val action: ReplicaCountAction) : Step(nam val context = this.context ?: return this val indexName = context.metadata.index try { - val updateSettingsRequest = UpdateSettingsRequest() - .indices(indexName) - .settings(Settings.builder().put(SETTING_NUMBER_OF_REPLICAS, numOfReplicas)) - val response: AcknowledgedResponse = context.client.admin().indices() - .suspendUntil { updateSettings(updateSettingsRequest, it) } + val updateSettingsRequest = + UpdateSettingsRequest() + .indices(indexName) + .settings(Settings.builder().put(SETTING_NUMBER_OF_REPLICAS, numOfReplicas)) + val response: AcknowledgedResponse = + context.client.admin().indices() + .suspendUntil { updateSettings(updateSettingsRequest, it) } if (response.isAcknowledged) { stepStatus = StepStatus.COMPLETED @@ -67,7 +68,7 @@ class AttemptReplicaCountStep(private val action: ReplicaCountAction) : Step(nam return currentMetadata.copy( stepMetaData = StepMetaData(name, getStepStartTime(currentMetadata).toEpochMilli(), stepStatus), transitionTo = null, - info = info + info = info, ) } @@ -75,7 +76,9 @@ class AttemptReplicaCountStep(private val action: ReplicaCountAction) : Step(nam companion object { const val name = "attempt_set_replica_count" + fun getFailedMessage(index: String, numOfReplicas: Int) = "Failed to set number_of_replicas to $numOfReplicas [index=$index]" + fun getSuccessMessage(index: String, numOfReplicas: Int) = "Successfully set number_of_replicas to $numOfReplicas [index=$index]" } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/rollover/AttemptRolloverStep.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/rollover/AttemptRolloverStep.kt index 4c7356e07..66f9486c2 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/rollover/AttemptRolloverStep.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/rollover/AttemptRolloverStep.kt @@ -13,11 +13,12 @@ import org.opensearch.action.admin.indices.rollover.RolloverRequest import org.opensearch.action.admin.indices.rollover.RolloverResponse import org.opensearch.action.admin.indices.stats.IndicesStatsRequest import org.opensearch.action.admin.indices.stats.IndicesStatsResponse -import org.opensearch.core.common.unit.ByteSizeValue import org.opensearch.action.support.master.AcknowledgedResponse import org.opensearch.client.Client import org.opensearch.cluster.service.ClusterService import org.opensearch.common.unit.TimeValue +import org.opensearch.core.common.unit.ByteSizeValue +import org.opensearch.core.rest.RestStatus import org.opensearch.index.IndexNotFoundException import org.opensearch.indexmanagement.indexstatemanagement.action.RolloverAction import org.opensearch.indexmanagement.indexstatemanagement.opensearchapi.getRolloverAlias @@ -29,13 +30,11 @@ import org.opensearch.indexmanagement.spi.indexstatemanagement.Step import org.opensearch.indexmanagement.spi.indexstatemanagement.model.ManagedIndexMetaData import org.opensearch.indexmanagement.spi.indexstatemanagement.model.StepContext import org.opensearch.indexmanagement.spi.indexstatemanagement.model.StepMetaData -import org.opensearch.core.rest.RestStatus import org.opensearch.transport.RemoteTransportException import java.time.Instant @Suppress("ReturnCount") class AttemptRolloverStep(private val action: RolloverAction) : Step(name) { - private val logger = LogManager.getLogger(javaClass) private var stepStatus = StepStatus.STARTING private var info: Map? = null @@ -77,51 +76,57 @@ class AttemptRolloverStep(private val action: RolloverAction) : Step(name) { statsResponse ?: return this val indexCreationDate = clusterService.state().metadata().index(indexName).creationDate - val indexAgeTimeValue = if (indexCreationDate == -1L) { - logger.warn("$indexName had an indexCreationDate=-1L, cannot use for comparison") - // since we cannot use for comparison, we can set it to 0 as minAge will never be <= 0 - TimeValue.timeValueMillis(0) - } else { - TimeValue.timeValueMillis(Instant.now().toEpochMilli() - indexCreationDate) - } + val indexAgeTimeValue = + if (indexCreationDate == -1L) { + logger.warn("$indexName had an indexCreationDate=-1L, cannot use for comparison") + // since we cannot use for comparison, we can set it to 0 as minAge will never be <= 0 + TimeValue.timeValueMillis(0) + } else { + TimeValue.timeValueMillis(Instant.now().toEpochMilli() - indexCreationDate) + } val numDocs = statsResponse.primaries.docs?.count ?: 0 val indexSize = ByteSizeValue(statsResponse.primaries.docs?.totalSizeInBytes ?: 0) val largestPrimaryShard = statsResponse.shards.maxByOrNull { it.stats.docs?.totalSizeInBytes ?: 0 } val largestPrimaryShardSize = ByteSizeValue(largestPrimaryShard?.stats?.docs?.totalSizeInBytes ?: 0) - val conditions = listOfNotNull( - action.minAge?.let { - RolloverAction.MIN_INDEX_AGE_FIELD to mapOf( - "condition" to it.toString(), - "current" to indexAgeTimeValue.toString(), - "creationDate" to indexCreationDate - ) - }, - action.minDocs?.let { - RolloverAction.MIN_DOC_COUNT_FIELD to mapOf( - "condition" to it, - "current" to numDocs - ) - }, - action.minSize?.let { - RolloverAction.MIN_SIZE_FIELD to mapOf( - "condition" to it.toString(), - "current" to indexSize.toString() - ) - }, - action.minPrimaryShardSize?.let { - RolloverAction.MIN_PRIMARY_SHARD_SIZE_FIELD to mapOf( - "condition" to it.toString(), - "current" to largestPrimaryShardSize.toString(), - "shard" to largestPrimaryShard?.shardRouting?.id() - ) - } - ).toMap() + val conditions = + listOfNotNull( + action.minAge?.let { + RolloverAction.MIN_INDEX_AGE_FIELD to + mapOf( + "condition" to it.toString(), + "current" to indexAgeTimeValue.toString(), + "creationDate" to indexCreationDate, + ) + }, + action.minDocs?.let { + RolloverAction.MIN_DOC_COUNT_FIELD to + mapOf( + "condition" to it, + "current" to numDocs, + ) + }, + action.minSize?.let { + RolloverAction.MIN_SIZE_FIELD to + mapOf( + "condition" to it.toString(), + "current" to indexSize.toString(), + ) + }, + action.minPrimaryShardSize?.let { + RolloverAction.MIN_PRIMARY_SHARD_SIZE_FIELD to + mapOf( + "condition" to it.toString(), + "current" to largestPrimaryShardSize.toString(), + "shard" to largestPrimaryShard?.shardRouting?.id(), + ) + }, + ).toMap() if (action.evaluateConditions(indexAgeTimeValue, numDocs, indexSize, largestPrimaryShardSize)) { logger.info( "$indexName rollover conditions evaluated to true [indexCreationDate=$indexCreationDate," + - " numDocs=$numDocs, indexSize=${indexSize.bytes}, primaryShardSize=${largestPrimaryShardSize.bytes}]" + " numDocs=$numDocs, indexSize=${indexSize.bytes}, primaryShardSize=${largestPrimaryShardSize.bytes}]", ) executeRollover(context, rolloverTarget, isDataStream, conditions) copyAlias(clusterService, indexName, context.client, rolloverTarget, context.metadata) @@ -139,10 +144,11 @@ class AttemptRolloverStep(private val action: RolloverAction) : Step(name) { val indexAbstraction = metadata.indicesLookup[indexName] val isDataStreamIndex = indexAbstraction?.parentDataStream != null - val rolloverTarget = when { - isDataStreamIndex -> indexAbstraction?.parentDataStream?.name - else -> metadata.index(indexName).getRolloverAlias() - } + val rolloverTarget = + when { + isDataStreamIndex -> indexAbstraction?.parentDataStream?.name + else -> metadata.index(indexName).getRolloverAlias() + } if (rolloverTarget == null) { val message = getFailedNoValidAliasMessage(indexName) @@ -188,8 +194,9 @@ class AttemptRolloverStep(private val action: RolloverAction) : Step(name) { private suspend fun getIndexStatsOrUpdateInfo(context: StepContext): IndicesStatsResponse? { val indexName = context.metadata.index try { - val statsRequest = IndicesStatsRequest() - .indices(indexName).clear().docs(true) + val statsRequest = + IndicesStatsRequest() + .indices(indexName).clear().docs(true) val statsResponse: IndicesStatsResponse = context.client.admin().indices().suspendUntil { stats(statsRequest, it) } if (statsResponse.status == RestStatus.OK) { @@ -199,10 +206,11 @@ class AttemptRolloverStep(private val action: RolloverAction) : Step(name) { val message = getFailedEvaluateMessage(indexName) logger.warn("$message - ${statsResponse.status}") stepStatus = StepStatus.FAILED - info = mapOf( - "message" to message, - "shard_failures" to statsResponse.shardFailures.map { it.getUsefulCauseString() } - ) + info = + mapOf( + "message" to message, + "shard_failures" to statsResponse.shardFailures.map { it.getUsefulCauseString() }, + ) } catch (e: RemoteTransportException) { handleException(indexName, ExceptionsHelper.unwrapCause(e) as Exception) } catch (e: Exception) { @@ -217,7 +225,7 @@ class AttemptRolloverStep(private val action: RolloverAction) : Step(name) { context: StepContext, rolloverTarget: String, isDataStream: Boolean, - conditions: Map> + conditions: Map>, ) { val indexName = context.metadata.index try { @@ -231,32 +239,36 @@ class AttemptRolloverStep(private val action: RolloverAction) : Step(name) { // 1. IndexAbstraction.Type.DATA_STREAM - the new index is added to the data stream indicated by the 'rolloverTarget' // 2. IndexAbstraction.Type.ALIAS - the new index is added to the alias indicated by the 'rolloverTarget' if (response.isAcknowledged) { - val message = when { - isDataStream -> getSuccessDataStreamRolloverMessage(rolloverTarget, indexName) - else -> getSuccessMessage(indexName) - } + val message = + when { + isDataStream -> getSuccessDataStreamRolloverMessage(rolloverTarget, indexName) + else -> getSuccessMessage(indexName) + } // Save newIndex later to metadata to be reused in case of failures newIndex = response.newIndex stepStatus = StepStatus.COMPLETED - info = listOfNotNull( - "message" to message, - if (conditions.isEmpty()) null else "conditions" to conditions // don't show empty conditions object if no conditions specified - ).toMap() + info = + listOfNotNull( + "message" to message, + if (conditions.isEmpty()) null else "conditions" to conditions, // don't show empty conditions object if no conditions specified + ).toMap() } else { - val message = when { - isDataStream -> getFailedDataStreamRolloverMessage(rolloverTarget) + val message = + when { + isDataStream -> getFailedDataStreamRolloverMessage(rolloverTarget) - // If the alias update response was NOT acknowledged, then the new index was created but we failed to swap the alias - else -> getFailedAliasUpdateMessage(indexName, response.newIndex) - } + // If the alias update response was NOT acknowledged, then the new index was created but we failed to swap the alias + else -> getFailedAliasUpdateMessage(indexName, response.newIndex) + } logger.warn(message) stepStatus = StepStatus.FAILED - info = listOfNotNull( - "message" to message, - if (conditions.isEmpty()) null else "conditions" to conditions // don't show empty conditions object if no conditions specified - ).toMap() + info = + listOfNotNull( + "message" to message, + if (conditions.isEmpty()) null else "conditions" to conditions, // don't show empty conditions object if no conditions specified + ).toMap() } } catch (e: RemoteTransportException) { handleException(indexName, ExceptionsHelper.unwrapCause(e) as Exception) @@ -280,7 +292,7 @@ class AttemptRolloverStep(private val action: RolloverAction) : Step(name) { indexName: String, client: Client, rolloverTarget: String, - metadata: ManagedIndexMetaData + metadata: ManagedIndexMetaData, ) { if (!action.copyAlias) return @@ -293,10 +305,11 @@ class AttemptRolloverStep(private val action: RolloverAction) : Step(name) { // ISM cannot auto recover from this case, so the status is COMPLETED logger.error("$indexName rolled over but cannot find the rolledOverIndexName to copy aliases to") stepStatus = StepStatus.COMPLETED - info = listOfNotNull( - "message" to getCopyAliasRolledOverIndexNotFoundMessage(indexName), - if (conditions != null) "conditions" to conditions else null - ).toMap() + info = + listOfNotNull( + "message" to getCopyAliasRolledOverIndexNotFoundMessage(indexName), + if (conditions != null) "conditions" to conditions else null, + ).toMap() return } @@ -308,12 +321,13 @@ class AttemptRolloverStep(private val action: RolloverAction) : Step(name) { if (aliasName == rolloverTarget) continue val aliasMetadata = alias.value - val aliasAction = AliasActions(AliasActions.Type.ADD).index(rolledOverIndexName) - .alias(aliasMetadata.alias) - .filter(aliasMetadata.filter?.toString()) - .searchRouting(aliasMetadata.searchRouting) - .indexRouting(aliasMetadata.indexRouting) - .isHidden(aliasMetadata.isHidden) + val aliasAction = + AliasActions(AliasActions.Type.ADD).index(rolledOverIndexName) + .alias(aliasMetadata.alias) + .filter(aliasMetadata.filter?.toString()) + .searchRouting(aliasMetadata.searchRouting) + .indexRouting(aliasMetadata.indexRouting) + .isHidden(aliasMetadata.isHidden) aliasActions.add(aliasAction) } val aliasReq = IndicesAliasesRequest() @@ -323,24 +337,27 @@ class AttemptRolloverStep(private val action: RolloverAction) : Step(name) { val aliasRes: AcknowledgedResponse = client.admin().indices().suspendUntil { aliases(aliasReq, it) } if (aliasRes.isAcknowledged) { stepStatus = StepStatus.COMPLETED - info = listOfNotNull( - "message" to getSuccessCopyAliasMessage(indexName, rolledOverIndexName), - if (conditions != null) "conditions" to conditions else null - ).toMap() + info = + listOfNotNull( + "message" to getSuccessCopyAliasMessage(indexName, rolledOverIndexName), + if (conditions != null) "conditions" to conditions else null, + ).toMap() } else { stepStatus = StepStatus.FAILED - info = listOfNotNull( - "message" to getCopyAliasNotAckMessage(indexName, rolledOverIndexName), - if (conditions != null) "conditions" to conditions else null - ).toMap() + info = + listOfNotNull( + "message" to getCopyAliasNotAckMessage(indexName, rolledOverIndexName), + if (conditions != null) "conditions" to conditions else null, + ).toMap() } } catch (e: IndexNotFoundException) { logger.error("Index not found while copying alias from $indexName to $rolledOverIndexName", e) stepStatus = StepStatus.FAILED - info = listOfNotNull( - "message" to getCopyAliasIndexNotFoundMessage(rolledOverIndexName), - if (conditions != null) "conditions" to conditions else null - ).toMap() + info = + listOfNotNull( + "message" to getCopyAliasIndexNotFoundMessage(rolledOverIndexName), + if (conditions != null) "conditions" to conditions else null, + ).toMap() } catch (e: Exception) { handleException(indexName, e, getFailedCopyAliasMessage(indexName, rolledOverIndexName), conditions) } @@ -352,7 +369,7 @@ class AttemptRolloverStep(private val action: RolloverAction) : Step(name) { rolledOver = if (currentMetadata.rolledOver == true) true else stepStatus == StepStatus.COMPLETED, rolledOverIndexName = if (currentMetadata.rolledOverIndexName != null) currentMetadata.rolledOverIndexName else newIndex, transitionTo = null, - info = info + info = info, ) } @@ -371,28 +388,44 @@ class AttemptRolloverStep(private val action: RolloverAction) : Step(name) { @Suppress("TooManyFunctions") companion object { const val name = "attempt_rollover" + fun getFailedMessage(index: String) = "Failed to rollover index [index=$index]" + fun getFailedAliasUpdateMessage(index: String, newIndex: String) = "New index created, but failed to update alias [index=$index, newIndex=$newIndex]" + fun getFailedDataStreamRolloverMessage(dataStream: String) = "Failed to rollover data stream [data_stream=$dataStream]" + fun getFailedNoValidAliasMessage(index: String) = "Missing rollover_alias index setting [index=$index]" + fun getFailedEvaluateMessage(index: String) = "Failed to evaluate conditions for rollover [index=$index]" + fun getPendingMessage(index: String) = "Pending rollover of index [index=$index]" + fun getSuccessMessage(index: String) = "Successfully rolled over index [index=$index]" + fun getSuccessDataStreamRolloverMessage(dataStream: String, index: String) = "Successfully rolled over data stream [data_stream=$dataStream index=$index]" + fun getFailedPreCheckMessage(index: String) = "Missing alias or not the write index when rollover [index=$index]" + fun getSkipRolloverMessage(index: String) = "Skipped rollover action for [index=$index]" + fun getAlreadyRolledOverMessage(index: String, alias: String) = "This index has already been rolled over using this alias, treating as a success [index=$index, alias=$alias]" + fun getSuccessCopyAliasMessage(index: String, newIndex: String) = "Successfully rolled over and copied alias from [index=$index] to [index=$newIndex]" + fun getFailedCopyAliasMessage(index: String, newIndex: String) = "Successfully rolled over but failed to copied alias from [index=$index] to [index=$newIndex]" + fun getCopyAliasNotAckMessage(index: String, newIndex: String) = "Successfully rolled over but copy alias from [index=$index] to [index=$newIndex] is not acknowledged" + fun getCopyAliasIndexNotFoundMessage(newIndex: String?) = "Successfully rolled over but new index [index=$newIndex] not found during copy alias" + fun getCopyAliasRolledOverIndexNotFoundMessage(index: String?) = "Successfully rolled over [index=$index] but ISM cannot find rolled over index from metadata to copy aliases to, please manually copy" } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/rollup/AttemptCreateRollupJobStep.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/rollup/AttemptCreateRollupJobStep.kt index a5e767d64..ee166ec4a 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/rollup/AttemptCreateRollupJobStep.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/rollup/AttemptCreateRollupJobStep.kt @@ -27,7 +27,6 @@ import org.opensearch.indexmanagement.spi.indexstatemanagement.model.StepMetaDat import org.opensearch.transport.RemoteTransportException class AttemptCreateRollupJobStep(private val action: RollupAction) : Step(name) { - private val logger = LogManager.getLogger(javaClass) private var stepStatus = StepStatus.STARTING private var info: Map? = null @@ -103,7 +102,7 @@ class AttemptCreateRollupJobStep(private val action: RollupAction) : Step(name) actionMetaData = currentActionMetaData?.copy(actionProperties = ActionProperties(rollupId = rollupId)), stepMetaData = StepMetaData(name, getStepStartTime(currentMetadata).toEpochMilli(), stepStatus), transitionTo = null, - info = info + info = info, ) } @@ -111,10 +110,15 @@ class AttemptCreateRollupJobStep(private val action: RollupAction) : Step(name) companion object { const val name = "attempt_create_rollup" + fun getFailedMessage(rollupId: String, index: String) = "Failed to create the rollup job [$rollupId] [index=$index]" + fun getFailedJobExistsMessage(rollupId: String, index: String) = "Rollup job [$rollupId] already exists, skipping creation [index=$index]" + fun getFailedToStartMessage(rollupId: String, index: String) = "Failed to start the rollup job [$rollupId] [index=$index]" + fun getSuccessMessage(rollupId: String, index: String) = "Successfully created the rollup job [$rollupId] [index=$index]" + fun getSuccessRestartMessage(rollupId: String, index: String) = "Successfully restarted the rollup job [$rollupId] [index=$index]" } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/rollup/WaitForRollupCompletionStep.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/rollup/WaitForRollupCompletionStep.kt index d7170c031..7bdc1f4be 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/rollup/WaitForRollupCompletionStep.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/rollup/WaitForRollupCompletionStep.kt @@ -17,7 +17,6 @@ import org.opensearch.indexmanagement.spi.indexstatemanagement.model.StepMetaDat import org.opensearch.transport.RemoteTransportException class WaitForRollupCompletionStep : Step(name) { - private val logger = LogManager.getLogger(javaClass) private var stepStatus = StepStatus.STARTING private var info: Map? = null @@ -102,14 +101,16 @@ class WaitForRollupCompletionStep : Step(name) { val currentActionMetaData = currentMetadata.actionMetaData val currentActionProperties = currentActionMetaData?.actionProperties return currentMetadata.copy( - actionMetaData = currentActionMetaData?.copy( - actionProperties = currentActionProperties?.copy( - hasRollupFailed = hasRollupFailed - ) + actionMetaData = + currentActionMetaData?.copy( + actionProperties = + currentActionProperties?.copy( + hasRollupFailed = hasRollupFailed, + ), ), stepMetaData = StepMetaData(name, getStepStartTime(currentMetadata).toEpochMilli(), stepStatus), transitionTo = null, - info = info + info = info, ) } @@ -118,10 +119,15 @@ class WaitForRollupCompletionStep : Step(name) { companion object { const val name = "wait_for_rollup_completion" const val JOB_STOPPED_MESSAGE = "Rollup job was stopped" + fun getFailedMessage(rollupJob: String, index: String) = "Failed to get the status of rollup job [$rollupJob] [index=$index]" + fun getJobProcessingMessage(rollupJob: String, index: String) = "Rollup job [$rollupJob] is still processing [index=$index]" + fun getJobCompletionMessage(rollupJob: String, index: String) = "Rollup job [$rollupJob] completed [index=$index]" + fun getJobFailedMessage(rollupJob: String, index: String) = "Rollup job [$rollupJob] failed [index=$index]" + fun getMissingRollupJobMessage(index: String) = "Rollup job was not found [index=$index]" } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/shrink/AttemptMoveShardsStep.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/shrink/AttemptMoveShardsStep.kt index 9754f5d8a..0a5b8e1f5 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/shrink/AttemptMoveShardsStep.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/shrink/AttemptMoveShardsStep.kt @@ -55,7 +55,6 @@ import kotlin.math.sqrt @SuppressWarnings("TooManyFunctions") class AttemptMoveShardsStep(private val action: ShrinkAction) : ShrinkStep(name, false, false, false) { - @Suppress("ReturnCount") override suspend fun wrappedExecute(context: StepContext): AttemptMoveShardsStep { val client = context.client @@ -74,7 +73,7 @@ class AttemptMoveShardsStep(private val action: ShrinkAction) : ShrinkStep(name, action.targetIndexTemplate, context.metadata, indexName + DEFAULT_TARGET_SUFFIX, - context.scriptService + context.scriptService, ) if (targetIndexNameIsInvalid(context.clusterService, shrinkTargetIndexName)) return this @@ -84,8 +83,9 @@ class AttemptMoveShardsStep(private val action: ShrinkAction) : ShrinkStep(name, val (statsStore, statsDocs, shardStats) = getIndexStats(indexName, client) ?: return this val indexSize = statsStore.sizeInBytes // Get stats of current and target shards - val numOriginalShards = context.clusterService.state().metadata.indices[indexName]?.numberOfShards - ?: error("numOriginalShards should not be null") + val numOriginalShards = + context.clusterService.state().metadata.indices[indexName]?.numberOfShards + ?: error("numOriginalShards should not be null") val numTargetShards = getNumTargetShards(numOriginalShards, indexSize) if (shouldFailTooManyDocuments(statsDocs, numTargetShards)) return this @@ -119,18 +119,20 @@ class AttemptMoveShardsStep(private val action: ShrinkAction) : ShrinkStep(name, } // Iterate through the suitable nodes and try to acquire a lock on one - val (lock, nodeName) = acquireLockFromNodeList(context.lockService, suitableNodes, interval, indexName) - ?: return this - shrinkActionProperties = ShrinkActionProperties( - nodeName, - shrinkTargetIndexName, - numTargetShards, - lock.primaryTerm, - lock.seqNo, - lock.lockTime.epochSecond, - lock.lockDurationSeconds, - originalIndexSettings - ) + val (lock, nodeName) = + acquireLockFromNodeList(context.lockService, suitableNodes, interval, indexName) + ?: return this + shrinkActionProperties = + ShrinkActionProperties( + nodeName, + shrinkTargetIndexName, + numTargetShards, + lock.primaryTerm, + lock.seqNo, + lock.lockTime.epochSecond, + lock.lockDurationSeconds, + originalIndexSettings, + ) setToReadOnlyAndMoveIndexToNode(context, nodeName, lock) info = mapOf("message" to getSuccessMessage(nodeName)) @@ -142,19 +144,20 @@ class AttemptMoveShardsStep(private val action: ShrinkAction) : ShrinkStep(name, private suspend fun getIndexStats( indexName: String, - client: Client + client: Client, ): Triple>? { val statsRequest = IndicesStatsRequest().indices(indexName) - val statsResponse: IndicesStatsResponse = client.admin().indices().suspendUntil { - stats(statsRequest, it) - } + val statsResponse: IndicesStatsResponse = + client.admin().indices().suspendUntil { + stats(statsRequest, it) + } val statsStore = statsResponse.total.store val statsDocs = statsResponse.total.docs val statsShards = statsResponse.shards if (statsStore == null || statsDocs == null || statsShards == null) { setStepFailed( FAILURE_MESSAGE, - "Failed to move shards in shrink action as IndicesStatsResponse was missing some stats." + "Failed to move shards in shrink action as IndicesStatsResponse was missing some stats.", ) return null } @@ -174,15 +177,17 @@ class AttemptMoveShardsStep(private val action: ShrinkAction) : ShrinkStep(name, template: Script?, managedIndexMetaData: ManagedIndexMetaData, defaultValue: String, - scriptService: ScriptService + scriptService: ScriptService, ): String { if (template == null) return defaultValue - val contextMap = managedIndexMetaData.convertToMap().filterKeys { key -> - key in ALLOWED_TEMPLATE_FIELDS - } - val compiledValue = scriptService.compile(template, TemplateScript.CONTEXT) - .newInstance(template.params + mapOf("ctx" to contextMap)) - .execute() + val contextMap = + managedIndexMetaData.convertToMap().filterKeys { key -> + key in ALLOWED_TEMPLATE_FIELDS + } + val compiledValue = + scriptService.compile(template, TemplateScript.CONTEXT) + .newInstance(template.params + mapOf("ctx" to contextMap)) + .execute() return compiledValue.ifBlank { defaultValue } } @@ -245,12 +250,13 @@ class AttemptMoveShardsStep(private val action: ShrinkAction) : ShrinkStep(name, private suspend fun setToReadOnlyAndMoveIndexToNode( stepContext: StepContext, node: String, - lock: LockModel + lock: LockModel, ): Boolean { - val updateSettings = Settings.builder() - .put(SETTING_BLOCKS_WRITE, true) - .put(ROUTING_SETTING, node) - .build() + val updateSettings = + Settings.builder() + .put(SETTING_BLOCKS_WRITE, true) + .put(ROUTING_SETTING, node) + .build() val lockService = stepContext.lockService var response: AcknowledgedResponse? = null val isUpdateAcknowledged: Boolean @@ -277,13 +283,14 @@ class AttemptMoveShardsStep(private val action: ShrinkAction) : ShrinkStep(name, lockService: LockService, suitableNodes: List, jobIntervalSeconds: Long?, - indexName: String + indexName: String, ): Pair? { for (nodeName in suitableNodes) { val lockID = getShrinkJobID(nodeName) - val lock: LockModel? = lockService.suspendUntil { - acquireLockWithId(INDEX_MANAGEMENT_INDEX, getShrinkLockDuration(jobIntervalSeconds), lockID, it) - } + val lock: LockModel? = + lockService.suspendUntil { + acquireLockWithId(INDEX_MANAGEMENT_INDEX, getShrinkLockDuration(jobIntervalSeconds), lockID, it) + } if (lock != null) { return lock to nodeName } else { @@ -302,28 +309,31 @@ class AttemptMoveShardsStep(private val action: ShrinkAction) : ShrinkStep(name, private suspend fun findSuitableNodes( stepContext: StepContext, shardStats: Array, - indexSizeInBytes: Long + indexSizeInBytes: Long, ): List { val nodesStatsReq = NodesStatsRequest().addMetric(FS_METRIC) - val nodeStatsResponse: NodesStatsResponse = stepContext.client.admin().cluster().suspendUntil { - nodesStats(nodesStatsReq, it) - } + val nodeStatsResponse: NodesStatsResponse = + stepContext.client.admin().cluster().suspendUntil { + nodesStats(nodesStatsReq, it) + } val nodesList = nodeStatsResponse.nodes.filter { it.node.isDataNode } val suitableNodes: ArrayList = ArrayList() // Sort in increasing order of keys, in our case this is memory remaining - val comparator = kotlin.Comparator { o1: Tuple, o2: Tuple -> - o1.v1().compareTo(o2.v1()) - } + val comparator = + kotlin.Comparator { o1: Tuple, o2: Tuple -> + o1.v1().compareTo(o2.v1()) + } val nodesWithSpace = PriorityQueue(comparator) for (node in nodesList) { // Gets the amount of disk space in the node which will be free below the high watermark level after adding 2*indexSizeInBytes, // as the source index is duplicated during the shrink - val remainingDiskSpace = getNodeFreeDiskSpaceAfterShrink( - node, - indexSizeInBytes, - stepContext.clusterService.clusterSettings - ) + val remainingDiskSpace = + getNodeFreeDiskSpaceAfterShrink( + node, + indexSizeInBytes, + stepContext.clusterService.clusterSettings, + ) if (remainingDiskSpace > 0L) { nodesWithSpace.add(Tuple(remainingDiskSpace, node.node.name)) } @@ -333,10 +343,11 @@ class AttemptMoveShardsStep(private val action: ShrinkAction) : ShrinkStep(name, logger.info("No node has enough disk space for shrink action.") return suitableNodes } - val shardIdToNodeList: Map> = getShardIdToNodeNameSet( - shardStats, - stepContext.clusterService.state().nodes - ) + val shardIdToNodeList: Map> = + getShardIdToNodeNameSet( + shardStats, + stepContext.clusterService.state().nodes, + ) // For each node, do a dry run of moving all shards to the node to make sure that there aren't any other blockers // to the allocation. for (sizeNodeTuple in nodesWithSpace) { @@ -348,29 +359,35 @@ class AttemptMoveShardsStep(private val action: ShrinkAction) : ShrinkStep(name, val shardId = shard.shardRouting.shardId() val currentShardNode = stepContext.clusterService.state().nodes[shard.shardRouting.currentNodeId()] // Don't attempt a dry run for shards which have a copy already on that node - if (shardIdToNodeList[shardId.id]?.contains(targetNodeName) == true || requestedShardIds.contains( - shardId.id + if (shardIdToNodeList[shardId.id]?.contains(targetNodeName) == true || + requestedShardIds.contains( + shardId.id, ) - ) continue + ) { + continue + } clusterRerouteRequest.add( - MoveAllocationCommand(indexName, shardId.id, currentShardNode.name, targetNodeName) + MoveAllocationCommand(indexName, shardId.id, currentShardNode.name, targetNodeName), ) requestedShardIds.add(shardId.id) } val clusterRerouteResponse: ClusterRerouteResponse = stepContext.client.admin().cluster().suspendUntil { reroute(clusterRerouteRequest, it) } val numOfDecisions = clusterRerouteResponse.explanations.explanations().size - val numNoDecisions = clusterRerouteResponse.explanations.explanations().count { - it.decisions().type().equals((Decision.Type.NO)) - } - val numYesDecisions = clusterRerouteResponse.explanations.explanations().count { - it.decisions().type().equals((Decision.Type.YES)) - } - val numThrottleDecisions = clusterRerouteResponse.explanations.explanations().count { - it.decisions().type().equals((Decision.Type.THROTTLE)) - } + val numNoDecisions = + clusterRerouteResponse.explanations.explanations().count { + it.decisions().type().equals((Decision.Type.NO)) + } + val numYesDecisions = + clusterRerouteResponse.explanations.explanations().count { + it.decisions().type().equals((Decision.Type.YES)) + } + val numThrottleDecisions = + clusterRerouteResponse.explanations.explanations().count { + it.decisions().type().equals((Decision.Type.THROTTLE)) + } logger.debug( - getShardMovingDecisionInfo(numNoDecisions, numYesDecisions, numThrottleDecisions, targetNodeName) + getShardMovingDecisionInfo(numNoDecisions, numYesDecisions, numThrottleDecisions, targetNodeName), ) // NO decision type is not counted; YES and THROTTLE decision type are available for shrink. if (numOfDecisions - numNoDecisions >= requestedShardIds.size) { @@ -447,20 +464,23 @@ class AttemptMoveShardsStep(private val action: ShrinkAction) : ShrinkStep(name, override fun getUpdatedManagedIndexMetadata(currentMetadata: ManagedIndexMetaData): ManagedIndexMetaData { val currentActionMetaData = currentMetadata.actionMetaData // If we succeeded because there was only one source primary shard, we no-op by skipping to the last step - val stepMetaData = if (info?.get("message") == ONE_PRIMARY_SHARD_MESSAGE) { - StepMetaData(WaitForShrinkStep.name, getStepStartTime(currentMetadata).toEpochMilli(), stepStatus) - } else { - StepMetaData(name, getStepStartTime(currentMetadata).toEpochMilli(), stepStatus) - } + val stepMetaData = + if (info?.get("message") == ONE_PRIMARY_SHARD_MESSAGE) { + StepMetaData(WaitForShrinkStep.name, getStepStartTime(currentMetadata).toEpochMilli(), stepStatus) + } else { + StepMetaData(name, getStepStartTime(currentMetadata).toEpochMilli(), stepStatus) + } return currentMetadata.copy( - actionMetaData = currentActionMetaData?.copy( - actionProperties = ActionProperties( - shrinkActionProperties = shrinkActionProperties - ) + actionMetaData = + currentActionMetaData?.copy( + actionProperties = + ActionProperties( + shrinkActionProperties = shrinkActionProperties, + ), ), stepMetaData = stepMetaData, transitionTo = null, - info = info + info = info, ) } @@ -488,20 +508,23 @@ class AttemptMoveShardsStep(private val action: ShrinkAction) : ShrinkStep(name, private const val JOB_INTERVAL_LOCK_MULTIPLIER = 3 private const val LOCK_BUFFER_SECONDS = 1800 private const val MAXIMUM_DOCS_PER_SHARD = 0x80000000 // The maximum number of documents per shard is 2^31 + fun getSuccessMessage(node: String) = "Successfully initialized moving the shards to $node for a shrink action." + fun getIndexExistsMessage(newIndex: String) = "Shrink failed because $newIndex already exists." + fun getShardMovingDecisionInfo( noCount: Int, yesCount: Int, throttleCount: Int, - node: String + node: String, ) = "Shard moving decisions on node $node, NO: $noCount, YES: $yesCount, THROTTLE: $throttleCount." // If we couldn't get the job interval for the lock, use the default of 12 hours. // Lock is 3x + 30 minutes the job interval to allow the next step's execution to extend the lock without losing it. // If user sets maximum jitter, it could be 2x the job interval before the next step is executed. private fun getShrinkLockDuration( - jobInterval: Long? + jobInterval: Long?, ) = jobInterval?.let { (it * JOB_INTERVAL_LOCK_MULTIPLIER) + LOCK_BUFFER_SECONDS } ?: DEFAULT_LOCK_INTERVAL diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/shrink/AttemptShrinkStep.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/shrink/AttemptShrinkStep.kt index 87dba6361..bf5166b5a 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/shrink/AttemptShrinkStep.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/shrink/AttemptShrinkStep.kt @@ -27,7 +27,6 @@ import org.opensearch.indexmanagement.spi.indexstatemanagement.model.StepContext import org.opensearch.indexmanagement.spi.indexstatemanagement.model.StepMetaData class AttemptShrinkStep(private val action: ShrinkAction) : ShrinkStep(name, true, true, false) { - @Suppress("ReturnCount") override suspend fun wrappedExecute(context: StepContext): AttemptShrinkStep { val indexName = context.metadata.index @@ -57,9 +56,10 @@ class AttemptShrinkStep(private val action: ShrinkAction) : ShrinkStep(name, tru private suspend fun isNodeStillSuitable(nodeName: String, indexName: String, context: StepContext): Boolean { // Get the size of the index val statsRequest = IndicesStatsRequest().indices(indexName) - val statsResponse: IndicesStatsResponse = context.client.admin().indices().suspendUntil { - stats(statsRequest, it) - } + val statsResponse: IndicesStatsResponse = + context.client.admin().indices().suspendUntil { + stats(statsRequest, it) + } val statsStore = statsResponse.total.store if (statsStore == null) { cleanupAndFail(FAILURE_MESSAGE, "Shrink action failed as indices stats request was missing store stats.") @@ -85,9 +85,10 @@ class AttemptShrinkStep(private val action: ShrinkAction) : ShrinkStep(name, tru // Set index write block again before sending shrink request, in case of write block flipped by other processes in previous steps. private suspend fun confirmIndexWriteBlock(stepContext: StepContext, indexName: String): Boolean { - val updateSettings = Settings.builder() - .put(IndexMetadata.SETTING_BLOCKS_WRITE, true) - .build() + val updateSettings = + Settings.builder() + .put(IndexMetadata.SETTING_BLOCKS_WRITE, true) + .build() var response: AcknowledgedResponse? = null val isUpdateAcknowledged: Boolean @@ -110,7 +111,7 @@ class AttemptShrinkStep(private val action: ShrinkAction) : ShrinkStep(name, tru Settings.builder() .put(AttemptMoveShardsStep.ROUTING_SETTING, shrinkActionProperties.nodeName) .put(INDEX_NUMBER_OF_SHARDS, shrinkActionProperties.targetNumShards) - .build() + .build(), ) action.aliases?.forEach { req.targetIndexRequest.alias(it) } val resizeResponse: ResizeResponse = context.client.admin().indices().suspendUntil { resizeIndex(req, it) } @@ -123,14 +124,16 @@ class AttemptShrinkStep(private val action: ShrinkAction) : ShrinkStep(name, tru override fun getUpdatedManagedIndexMetadata(currentMetadata: ManagedIndexMetaData): ManagedIndexMetaData { return currentMetadata.copy( - actionMetaData = currentMetadata.actionMetaData?.copy( - actionProperties = ActionProperties( - shrinkActionProperties = shrinkActionProperties - ) + actionMetaData = + currentMetadata.actionMetaData?.copy( + actionProperties = + ActionProperties( + shrinkActionProperties = shrinkActionProperties, + ), ), stepMetaData = StepMetaData(name, getStepStartTime(currentMetadata).toEpochMilli(), stepStatus), transitionTo = null, - info = info + info = info, ) } @@ -142,6 +145,7 @@ class AttemptShrinkStep(private val action: ShrinkAction) : ShrinkStep(name, tru const val WRITE_BLOCK_FAILED_MESSAGE = "Failed to set write block before sending shrink request." const val NOT_ENOUGH_SPACE_FAILURE_MESSAGE = "Shrink failed as the selected node no longer had enough free space to shrink to." const val INDEX_HEALTH_NOT_GREEN_MESSAGE = "Shrink delayed because index health is not green." + fun getSuccessMessage(newIndex: String) = "Shrink started. $newIndex currently being populated." } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/shrink/ShrinkStep.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/shrink/ShrinkStep.kt index d45845cb1..e0baae6a0 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/shrink/ShrinkStep.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/shrink/ShrinkStep.kt @@ -26,7 +26,7 @@ abstract class ShrinkStep( name: String, private val cleanupSettings: Boolean, private val cleanupLock: Boolean, - private val cleanupTargetIndex: Boolean + private val cleanupTargetIndex: Boolean, ) : Step(name) { protected val logger: Logger = LogManager.getLogger(javaClass) protected var stepStatus = StepStatus.STARTING @@ -74,7 +74,7 @@ abstract class ShrinkStep( if (lock == null) { cleanupAndFail( "Failed to renew lock on node [${localShrinkActionProperties.nodeName}]", - "Shrink action failed to renew lock on node [${localShrinkActionProperties.nodeName}]" + "Shrink action failed to renew lock on node [${localShrinkActionProperties.nodeName}]", ) return null } @@ -141,7 +141,7 @@ abstract class ShrinkStep( } } else { logger.error( - "Shrink action failed to delete target index [$targetIndexName] after a failure due to a null client in the step context" + "Shrink action failed to delete target index [$targetIndexName] after a failure due to a null client in the step context", ) } } catch (e: Exception) { @@ -157,7 +157,7 @@ abstract class ShrinkStep( if (!released) logger.error("Failed to release Shrink action lock on node [${shrinkActionProperties.nodeName}]") } else { logger.error( - "Shrink action failed to release lock on node [${shrinkActionProperties.nodeName}] due to uninitialized metadata values." + "Shrink action failed to release lock on node [${shrinkActionProperties.nodeName}] due to uninitialized metadata values.", ) } } catch (e: Exception) { diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/shrink/WaitForMoveShardsStep.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/shrink/WaitForMoveShardsStep.kt index 943f59279..e40262b37 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/shrink/WaitForMoveShardsStep.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/shrink/WaitForMoveShardsStep.kt @@ -21,7 +21,6 @@ import java.time.Duration import java.time.Instant class WaitForMoveShardsStep(private val action: ShrinkAction) : ShrinkStep(name, true, true, false) { - @Suppress("ReturnCount") override suspend fun wrappedExecute(context: StepContext): WaitForMoveShardsStep { val indexName = context.metadata.index @@ -33,8 +32,9 @@ class WaitForMoveShardsStep(private val action: ShrinkAction) : ShrinkStep(name, val numShardsInSync = getNumShardsInSync(shardStats, context.clusterService.state(), indexName) val nodeToMoveOnto = localShrinkActionProperties.nodeName val numShardsOnNode = getNumShardsWithCopyOnNode(shardStats, context.clusterService.state(), nodeToMoveOnto) - val numPrimaryShards = context.clusterService.state().metadata.indices[indexName]?.numberOfShards - ?: error("numberOfShards should not be null") + val numPrimaryShards = + context.clusterService.state().metadata.indices[indexName]?.numberOfShards + ?: error("numberOfShards should not be null") // If a copy of each shard is on the node, and all shards are in sync, move on if (numShardsOnNode >= numPrimaryShards && numShardsInSync >= numPrimaryShards) { @@ -98,14 +98,16 @@ class WaitForMoveShardsStep(private val action: ShrinkAction) : ShrinkStep(name, override fun getUpdatedManagedIndexMetadata(currentMetadata: ManagedIndexMetaData): ManagedIndexMetaData { return currentMetadata.copy( - actionMetaData = currentMetadata.actionMetaData?.copy( - actionProperties = ActionProperties( - shrinkActionProperties = shrinkActionProperties - ) + actionMetaData = + currentMetadata.actionMetaData?.copy( + actionProperties = + ActionProperties( + shrinkActionProperties = shrinkActionProperties, + ), ), stepMetaData = StepMetaData(name, getStepStartTime(currentMetadata).toEpochMilli(), stepStatus), transitionTo = null, - info = info + info = info, ) } @@ -113,7 +115,7 @@ class WaitForMoveShardsStep(private val action: ShrinkAction) : ShrinkStep(name, stepContext: StepContext, numShardsNotOnNode: Int, numShardsNotInSync: Int, - nodeToMoveOnto: String + nodeToMoveOnto: String, ) { val managedIndexMetadata = stepContext.metadata val indexName = managedIndexMetadata.index @@ -125,7 +127,7 @@ class WaitForMoveShardsStep(private val action: ShrinkAction) : ShrinkStep(name, } else { logger.debug( "Shrink action move shards step running on [$indexName], [$numShardsNotOnNode] shards need to be moved, " + - "[$numShardsNotInSync] shards need an in sync replica." + "[$numShardsNotInSync] shards need an in sync replica.", ) info = mapOf("message" to getTimeoutDelay(nodeToMoveOnto)) stepStatus = StepStatus.CONDITION_NOT_MET @@ -136,11 +138,17 @@ class WaitForMoveShardsStep(private val action: ShrinkAction) : ShrinkStep(name, companion object { const val name = "wait_for_move_shards_step" + fun getSuccessMessage(node: String) = "The shards successfully moved to $node." + fun getTimeoutFailure(node: String) = "Shrink failed because it took too long to move shards to $node" + fun getTimeoutDelay(node: String) = "Shrink delayed because it took too long to move shards to $node" - fun getLoggedTimeoutError(index: String, numShardsNotOnNode: Int, numShardsNotInSync: Int) = "Shrink Action move shards failed on [$index]," + - " the action timed out with [$numShardsNotOnNode] shards not yet moved and [$numShardsNotInSync] shards without an in sync replica." + + fun getLoggedTimeoutError(index: String, numShardsNotOnNode: Int, numShardsNotInSync: Int) = + "Shrink Action move shards failed on [$index]," + + " the action timed out with [$numShardsNotOnNode] shards not yet moved and [$numShardsNotInSync] shards without an in sync replica." + const val FAILURE_MESSAGE = "Shrink failed when waiting for shards to move." const val MOVE_SHARDS_TIMEOUT_IN_SECONDS = 43200L // 12hrs in seconds } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/shrink/WaitForShrinkStep.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/shrink/WaitForShrinkStep.kt index 1c6e446e1..98d60e83e 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/shrink/WaitForShrinkStep.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/shrink/WaitForShrinkStep.kt @@ -28,7 +28,6 @@ import java.time.Duration import java.time.Instant class WaitForShrinkStep(private val action: ShrinkAction) : ShrinkStep(name, true, true, true) { - @Suppress("ReturnCount") override suspend fun wrappedExecute(context: StepContext): WaitForShrinkStep { val indexName = context.metadata.index @@ -74,7 +73,7 @@ class WaitForShrinkStep(private val action: ShrinkAction) : ShrinkStep(name, tru if (!response.isAcknowledged) { cleanupAndFail( getFailureMessage(index), - "Shrink action to clear the allocation settings on index [$index] following shrinking." + "Shrink action to clear the allocation settings on index [$index] following shrinking.", ) return false } @@ -102,7 +101,6 @@ class WaitForShrinkStep(private val action: ShrinkAction) : ShrinkStep(name, tru } suspend fun switchAliases(context: StepContext, shrinkActionProperties: ShrinkActionProperties): Boolean { - val sourceIndexName = context.metadata.index val targetIndexName = shrinkActionProperties.targetIndexName @@ -113,20 +111,22 @@ class WaitForShrinkStep(private val action: ShrinkAction) : ShrinkStep(name, tru logger.info("Switching aliases from [$sourceIndexName] to [$targetIndexName].") - val targetIndexAliasesNames = context - .clusterService - .state() - .metadata() - .index(targetIndexName) - .aliases - .keys - val sourceIndexAliases = context - .clusterService - .state() - .metadata() - .index(sourceIndexName) - .aliases - .values + val targetIndexAliasesNames = + context + .clusterService + .state() + .metadata() + .index(targetIndexName) + .aliases + .keys + val sourceIndexAliases = + context + .clusterService + .state() + .metadata() + .index(sourceIndexName) + .aliases + .values val req = IndicesAliasesRequest() sourceIndexAliases.map { it.alias }.forEach { req.addAliasAction(AliasActions(AliasActions.Type.REMOVE).index(sourceIndexName).alias(it)) } @@ -161,14 +161,16 @@ class WaitForShrinkStep(private val action: ShrinkAction) : ShrinkStep(name, tru override fun getUpdatedManagedIndexMetadata(currentMetadata: ManagedIndexMetaData): ManagedIndexMetaData { return currentMetadata.copy( - actionMetaData = currentMetadata.actionMetaData?.copy( - actionProperties = ActionProperties( - shrinkActionProperties = shrinkActionProperties - ) + actionMetaData = + currentMetadata.actionMetaData?.copy( + actionProperties = + ActionProperties( + shrinkActionProperties = shrinkActionProperties, + ), ), stepMetaData = StepMetaData(name, getStepStartTime(currentMetadata).toEpochMilli(), stepStatus), transitionTo = null, - info = info + info = info, ) } @@ -178,8 +180,11 @@ class WaitForShrinkStep(private val action: ShrinkAction) : ShrinkStep(name, tru const val name = "wait_for_shrink_step" const val SUCCESS_MESSAGE = "Shrink finished successfully." const val GENERIC_FAILURE_MESSAGE = "Shrink failed while waiting for shards to start." + fun getDelayedMessage(newIndex: String) = "Shrink delayed because $newIndex shards not in started state." + fun getFailureMessage(newIndex: String) = "Shrink failed while waiting for $newIndex shards to start." + fun getTimeoutFailure(newIndex: String) = "Shrink failed because it timed out while waiting for $newIndex shrink to finish." } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/snapshot/AttemptSnapshotStep.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/snapshot/AttemptSnapshotStep.kt index 4256cd0aa..6e02ad716 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/snapshot/AttemptSnapshotStep.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/snapshot/AttemptSnapshotStep.kt @@ -10,6 +10,7 @@ import org.opensearch.ExceptionsHelper import org.opensearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest import org.opensearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse import org.opensearch.common.regex.Regex +import org.opensearch.core.rest.RestStatus import org.opensearch.indexmanagement.indexstatemanagement.action.SnapshotAction import org.opensearch.indexmanagement.indexstatemanagement.settings.ManagedIndexSettings.Companion.SNAPSHOT_DENY_LIST import org.opensearch.indexmanagement.opensearchapi.convertToMap @@ -18,7 +19,6 @@ import org.opensearch.indexmanagement.spi.indexstatemanagement.Step import org.opensearch.indexmanagement.spi.indexstatemanagement.model.ActionProperties import org.opensearch.indexmanagement.spi.indexstatemanagement.model.ManagedIndexMetaData import org.opensearch.indexmanagement.spi.indexstatemanagement.model.StepMetaData -import org.opensearch.core.rest.RestStatus import org.opensearch.script.Script import org.opensearch.script.ScriptService import org.opensearch.script.ScriptType @@ -31,7 +31,6 @@ import java.time.format.DateTimeFormatter import java.util.Locale class AttemptSnapshotStep(private val action: SnapshotAction) : Step(name) { - private val logger = LogManager.getLogger(javaClass) private var stepStatus = StepStatus.STARTING private var info: Map? = null @@ -55,22 +54,24 @@ class AttemptSnapshotStep(private val action: SnapshotAction) : Step(name) { info = mutableInfo.toMap() return this } - val snapshotNameSuffix = "-".plus( - LocalDateTime.now(ZoneId.of("UTC")) - .format(DateTimeFormatter.ofPattern("uuuu.MM.dd-HH:mm:ss.SSS", Locale.ROOT)) - ) + val snapshotNameSuffix = + "-".plus( + LocalDateTime.now(ZoneId.of("UTC")) + .format(DateTimeFormatter.ofPattern("uuuu.MM.dd-HH:mm:ss.SSS", Locale.ROOT)), + ) val snapshotScript = Script(ScriptType.INLINE, Script.DEFAULT_TEMPLATE_LANG, snapshot, mapOf()) // If user intentionally set the snapshot name empty then we are going to honor it val defaultSnapshotName = if (snapshot.isBlank()) snapshot else indexName snapshotName = compileTemplate(snapshotScript, managedIndexMetadata, defaultSnapshotName, scriptService).plus(snapshotNameSuffix) - val createSnapshotRequest = CreateSnapshotRequest() - .userMetadata(mapOf("snapshot_created" to "Open Distro for Elasticsearch Index Management")) - .indices(indexName) - .snapshot(snapshotName) - .repository(repository) - .waitForCompletion(false) + val createSnapshotRequest = + CreateSnapshotRequest() + .userMetadata(mapOf("snapshot_created" to "Open Distro for Elasticsearch Index Management")) + .indices(indexName) + .snapshot(snapshotName) + .repository(repository) + .waitForCompletion(false) val response: CreateSnapshotResponse = context.client.admin().cluster().suspendUntil { createSnapshot(createSnapshotRequest, it) } when (response.status()) { @@ -133,14 +134,16 @@ class AttemptSnapshotStep(private val action: SnapshotAction) : Step(name) { template: Script, managedIndexMetaData: ManagedIndexMetaData, defaultValue: String, - scriptService: ScriptService + scriptService: ScriptService, ): String { - val contextMap = managedIndexMetaData.convertToMap().filterKeys { key -> - key in validTopContextFields - } - val compiledValue = scriptService.compile(template, TemplateScript.CONTEXT) - .newInstance(template.params + mapOf("ctx" to contextMap)) - .execute() + val contextMap = + managedIndexMetaData.convertToMap().filterKeys { key -> + key in validTopContextFields + } + val compiledValue = + scriptService.compile(template, TemplateScript.CONTEXT) + .newInstance(template.params + mapOf("ctx" to contextMap)) + .execute() return if (compiledValue.isBlank()) defaultValue else compiledValue } @@ -150,7 +153,7 @@ class AttemptSnapshotStep(private val action: SnapshotAction) : Step(name) { actionMetaData = currentActionMetaData?.copy(actionProperties = ActionProperties(snapshotName = snapshotName)), stepMetaData = StepMetaData(name, getStepStartTime(currentMetadata).toEpochMilli(), stepStatus), transitionTo = null, - info = info + info = info, ) } @@ -159,10 +162,14 @@ class AttemptSnapshotStep(private val action: SnapshotAction) : Step(name) { companion object { val validTopContextFields = setOf("index", "indexUuid") const val name = "attempt_snapshot" + fun getBlockedMessage(denyList: List, repoName: String, index: String) = "Snapshot repository [$repoName] is blocked in $denyList [index=$index]" + fun getFailedMessage(index: String) = "Failed to create snapshot [index=$index]" + fun getFailedConcurrentSnapshotMessage(index: String) = "Concurrent snapshot in progress, retrying next execution [index=$index]" + fun getSuccessMessage(index: String) = "Successfully started snapshot [index=$index]" } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/snapshot/WaitForSnapshotStep.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/snapshot/WaitForSnapshotStep.kt index 6018dd2d3..36157f5c9 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/snapshot/WaitForSnapshotStep.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/snapshot/WaitForSnapshotStep.kt @@ -20,7 +20,6 @@ import org.opensearch.indexmanagement.spi.indexstatemanagement.model.StepMetaDat import org.opensearch.transport.RemoteTransportException class WaitForSnapshotStep(private val action: SnapshotAction) : Step(name) { - private val logger = LogManager.getLogger(javaClass) private var stepStatus = StepStatus.STARTING private var info: Map? = null @@ -34,15 +33,17 @@ class WaitForSnapshotStep(private val action: SnapshotAction) : Step(name) { try { val snapshotName = getSnapshotName(managedIndexMetadata, indexName) ?: return this - val request = SnapshotsStatusRequest() - .snapshots(arrayOf(snapshotName)) - .repository(repository) + val request = + SnapshotsStatusRequest() + .snapshots(arrayOf(snapshotName)) + .repository(repository) val response: SnapshotsStatusResponse = context.client.admin().cluster().suspendUntil { snapshotsStatus(request, it) } - val status: SnapshotStatus? = response - .snapshots - .find { snapshotStatus -> - snapshotStatus.snapshot.snapshotId.name == snapshotName && snapshotStatus.snapshot.repository == repository - } + val status: SnapshotStatus? = + response + .snapshots + .find { snapshotStatus -> + snapshotStatus.snapshot.snapshotId.name == snapshotName && snapshotStatus.snapshot.repository == repository + } if (status != null) { when (status.state) { State.INIT, State.STARTED -> { @@ -101,7 +102,7 @@ class WaitForSnapshotStep(private val action: SnapshotAction) : Step(name) { return currentMetadata.copy( stepMetaData = StepMetaData(name, getStepStartTime(currentMetadata).toEpochMilli(), stepStatus), transitionTo = null, - info = info + info = info, ) } @@ -109,11 +110,16 @@ class WaitForSnapshotStep(private val action: SnapshotAction) : Step(name) { companion object { const val name = "wait_for_snapshot" + fun getFailedMessage(index: String) = "Failed to get status of snapshot [index=$index]" + fun getFailedExistsMessage(index: String) = "Snapshot doesn't exist [index=$index]" + fun getFailedActionPropertiesMessage(index: String, actionProperties: ActionProperties?) = "Unable to retrieve [${ActionProperties.Properties.SNAPSHOT_NAME.key}] from ActionProperties=$actionProperties [index=$index]" + fun getSuccessMessage(index: String) = "Successfully created snapshot [index=$index]" + fun getSnapshotInProgressMessage(index: String) = "Snapshot currently in progress [index=$index]" } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/transform/AttemptCreateTransformJobStep.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/transform/AttemptCreateTransformJobStep.kt index 806803e3c..41aa6ff92 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/transform/AttemptCreateTransformJobStep.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/transform/AttemptCreateTransformJobStep.kt @@ -28,9 +28,8 @@ import org.opensearch.indexmanagement.transform.action.start.StartTransformReque import org.opensearch.transport.RemoteTransportException class AttemptCreateTransformJobStep( - private val action: TransformAction + private val action: TransformAction, ) : Step(name) { - private val logger = LogManager.getLogger(javaClass) private var stepStatus = StepStatus.STARTING private var info: Map? = null @@ -109,7 +108,7 @@ class AttemptCreateTransformJobStep( actionMetaData = currentActionMetaData?.copy(actionProperties = ActionProperties(transformActionProperties = transformActionProperties)), stepMetaData = StepMetaData(name, getStepStartTime(currentMetadata).toEpochMilli(), stepStatus), transitionTo = null, - info = info + info = info, ) } @@ -117,11 +116,16 @@ class AttemptCreateTransformJobStep( companion object { const val name = "attempt_create_transform" + fun getFailedMessage(transformId: String, index: String) = "Failed to create the transform job [$transformId] [index=$index]" + fun getTransformJobAlreadyExistsMessage(transformId: String, index: String) = "Transform job [$transformId] already exists, skipping creation [index=$index]" + fun getFailedToStartMessage(transformId: String, index: String) = "Failed to start the transform job [$transformId] [index=$index]" + fun getSuccessMessage(transformId: String, index: String) = "Successfully created the transform job [$transformId] [index=$index]" + fun getSuccessRestartMessage(transformId: String, index: String) = "Successfully restarted the transform job [$transformId] [index=$index]" } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/transform/WaitForTransformCompletionStep.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/transform/WaitForTransformCompletionStep.kt index 78aa41f3d..8b2085403 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/transform/WaitForTransformCompletionStep.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/transform/WaitForTransformCompletionStep.kt @@ -20,7 +20,6 @@ import org.opensearch.transport.RemoteTransportException @Suppress("ReturnCount") class WaitForTransformCompletionStep : Step(name) { - private val logger = LogManager.getLogger(javaClass) private var stepStatus = StepStatus.STARTING private var info: Map? = null @@ -64,9 +63,10 @@ class WaitForTransformCompletionStep : Step(name) { private suspend fun explainTransformJob(transformJobId: String, indexName: String, context: StepContext): ExplainTransformResponse? { val explainTransformRequest = ExplainTransformRequest(listOf(transformJobId)) try { - val response = context.client.suspendUntil { - execute(ExplainTransformAction.INSTANCE, explainTransformRequest, it) - } + val response = + context.client.suspendUntil { + execute(ExplainTransformAction.INSTANCE, explainTransformRequest, it) + } logger.info("Received the status for jobs [${response.getIdsToExplain().keys}]") return response } catch (e: RemoteTransportException) { @@ -113,7 +113,7 @@ class WaitForTransformCompletionStep : Step(name) { actionMetaData = currentMetadata.actionMetaData, stepMetaData = StepMetaData(name, getStepStartTime(currentMetadata).toEpochMilli(), stepStatus), transitionTo = null, - info = info + info = info, ) } @@ -122,11 +122,17 @@ class WaitForTransformCompletionStep : Step(name) { companion object { const val name = "wait_for_transform_completion" const val JOB_STOPPED_MESSAGE = "Transform job was stopped" + fun getFailedMessage(transformJob: String, index: String) = "Failed to get the status of transform job [$transformJob] [index=$index]" + fun getJobProcessingMessage(transformJob: String, index: String) = "Transform job [$transformJob] is still processing [index=$index]" + fun getJobCompletionMessage(transformJob: String, index: String) = "Transform job [$transformJob] completed [index=$index]" + fun getJobFailedMessage(transformJob: String, index: String) = "Transform job [$transformJob] failed [index=$index]" + fun getMissingTransformJobMessage(index: String) = "Transform job was not found [index=$index]" + fun getJobNotFoundMessage(transformJob: String, index: String) = "Transform job [$transformJob] is not found [index=$index]" } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/transition/AttemptTransitionStep.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/transition/AttemptTransitionStep.kt index a71fa1f18..cd12916cd 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/transition/AttemptTransitionStep.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/transition/AttemptTransitionStep.kt @@ -11,6 +11,7 @@ import org.opensearch.action.admin.indices.stats.IndicesStatsRequest import org.opensearch.action.admin.indices.stats.IndicesStatsResponse import org.opensearch.cluster.service.ClusterService import org.opensearch.core.common.unit.ByteSizeValue +import org.opensearch.core.rest.RestStatus import org.opensearch.indexmanagement.indexstatemanagement.IndexMetadataProvider import org.opensearch.indexmanagement.indexstatemanagement.action.TransitionsAction import org.opensearch.indexmanagement.indexstatemanagement.opensearchapi.getOldestRolloverTime @@ -22,12 +23,10 @@ import org.opensearch.indexmanagement.opensearchapi.suspendUntil import org.opensearch.indexmanagement.spi.indexstatemanagement.Step import org.opensearch.indexmanagement.spi.indexstatemanagement.model.ManagedIndexMetaData import org.opensearch.indexmanagement.spi.indexstatemanagement.model.StepMetaData -import org.opensearch.core.rest.RestStatus import org.opensearch.transport.RemoteTransportException import java.time.Instant class AttemptTransitionStep(private val action: TransitionsAction) : Step(name) { - private val logger = LogManager.getLogger(javaClass) private var stateName: String? = null private var stepStatus = StepStatus.STARTING @@ -76,8 +75,9 @@ class AttemptTransitionStep(private val action: TransitionsAction) : Step(name) if (transitions.any { it.hasStatsConditions() }) { if (inCluster) { - val statsRequest = IndicesStatsRequest() - .indices(indexName).clear().docs(true) + val statsRequest = + IndicesStatsRequest() + .indices(indexName).clear().docs(true) val statsResponse: IndicesStatsResponse = context.client.admin().indices().suspendUntil { stats(statsRequest, it) } @@ -85,10 +85,11 @@ class AttemptTransitionStep(private val action: TransitionsAction) : Step(name) val message = getFailedStatsMessage(indexName) logger.warn("$message - ${statsResponse.status}") stepStatus = StepStatus.FAILED - info = mapOf( - "message" to message, - "shard_failures" to statsResponse.shardFailures.map { it.getUsefulCauseString() } - ) + info = + mapOf( + "message" to message, + "shard_failures" to statsResponse.shardFailures.map { it.getUsefulCauseString() }, + ) return this } numDocs = statsResponse.primaries.getDocs()?.count ?: 0 @@ -99,15 +100,16 @@ class AttemptTransitionStep(private val action: TransitionsAction) : Step(name) } // Find the first transition that evaluates to true and get the state to transition to, otherwise return null if none are true - stateName = transitions.find { - it.evaluateConditions(indexCreationDateInstant, numDocs, indexSize, stepStartTime, rolloverDate) - }?.stateName + stateName = + transitions.find { + it.evaluateConditions(indexCreationDateInstant, numDocs, indexSize, stepStartTime, rolloverDate) + }?.stateName val message: String val stateName = stateName // shadowed on purpose to prevent var from changing if (stateName != null) { logger.info( "$indexName transition conditions evaluated to true [indexCreationDate=$indexCreationDate," + - " numDocs=$numDocs, indexSize=${indexSize?.bytes},stepStartTime=${stepStartTime.toEpochMilli()}]" + " numDocs=$numDocs, indexSize=${indexSize?.bytes},stepStartTime=${stepStartTime.toEpochMilli()}]", ) stepStatus = StepStatus.COMPLETED message = getSuccessMessage(indexName, stateName) @@ -140,7 +142,7 @@ class AttemptTransitionStep(private val action: TransitionsAction) : Step(name) policyCompleted = policyCompleted, transitionTo = stateName, stepMetaData = StepMetaData(name, getStepStartTime(currentMetadata).toEpochMilli(), stepStatus), - info = info + info = info, ) } @@ -150,7 +152,7 @@ class AttemptTransitionStep(private val action: TransitionsAction) : Step(name) indexMetadataProvider: IndexMetadataProvider, clusterService: ClusterService, indexName: String, - inCluster: Boolean + inCluster: Boolean, ): Long { try { // If we do have an index creation date cached already then use that @@ -164,9 +166,10 @@ class AttemptTransitionStep(private val action: TransitionsAction) : Step(name) val nonDefaultIndexTypes = indexMetadataProvider.services.keys.filter { it != DEFAULT_INDEX_TYPE } val multiTypeIndexNameToMetaData = indexMetadataProvider.getMultiTypeISMIndexMetadata(nonDefaultIndexTypes, listOf(indexName)) // the managedIndexConfig.indexUuid should be unique across all index types - val indexCreationDate = multiTypeIndexNameToMetaData.values.firstOrNull { - it[indexName]?.indexUuid == metadata.indexUuid - }?.get(indexName)?.indexCreationDate + val indexCreationDate = + multiTypeIndexNameToMetaData.values.firstOrNull { + it[indexName]?.indexUuid == metadata.indexUuid + }?.get(indexName)?.indexCreationDate indexCreationDate ?: -1 } } catch (e: Exception) { @@ -180,11 +183,16 @@ class AttemptTransitionStep(private val action: TransitionsAction) : Step(name) companion object { const val name = "attempt_transition_step" + fun getFailedMessage(index: String) = "Failed to transition index [index=$index]" + fun getFailedStatsMessage(index: String) = "Failed to get stats information for the index [index=$index]" + fun getFailedRolloverDateMessage(index: String) = "Failed to transition index as min_rollover_age condition was used, but the index has never been rolled over [index=$index]" + fun getEvaluatingMessage(index: String) = "Evaluating transition conditions [index=$index]" + fun getSuccessMessage(index: String, state: String) = "Transitioning to $state [index=$index]" } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/ISMStatusResponse.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/ISMStatusResponse.kt index 96bbffec6..b6aeb13db 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/ISMStatusResponse.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/ISMStatusResponse.kt @@ -17,13 +17,12 @@ import org.opensearch.indexmanagement.indexstatemanagement.util.buildInvalidInde import java.io.IOException open class ISMStatusResponse : ActionResponse, ToXContentObject { - val updated: Int val failedIndices: List constructor( updated: Int, - failedIndices: List + failedIndices: List, ) : super() { this.updated = updated this.failedIndices = failedIndices @@ -32,7 +31,7 @@ open class ISMStatusResponse : ActionResponse, ToXContentObject { @Throws(IOException::class) constructor(sin: StreamInput) : this( updated = sin.readInt(), - failedIndices = sin.readList(::FailedIndex) + failedIndices = sin.readList(::FailedIndex), ) override fun writeTo(out: StreamOutput) { diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/addpolicy/AddPolicyRequest.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/addpolicy/AddPolicyRequest.kt index 99304e00c..fe76f0c89 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/addpolicy/AddPolicyRequest.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/addpolicy/AddPolicyRequest.kt @@ -16,14 +16,13 @@ import java.io.IOException class AddPolicyRequest( val indices: List, val policyID: String, - val indexType: String + val indexType: String, ) : ActionRequest() { - @Throws(IOException::class) constructor(sin: StreamInput) : this( indices = sin.readStringList(), policyID = sin.readString(), - indexType = sin.readString() + indexType = sin.readString(), ) override fun validate(): ActionRequestValidationException? { @@ -31,10 +30,11 @@ class AddPolicyRequest( if (indices.isEmpty()) { validationException = ValidateActions.addValidationError("Missing indices", validationException) } else if (indexType != DEFAULT_INDEX_TYPE && indices.size > 1) { - validationException = ValidateActions.addValidationError( - MULTIPLE_INDICES_CUSTOM_INDEX_TYPE_ERROR, - validationException - ) + validationException = + ValidateActions.addValidationError( + MULTIPLE_INDICES_CUSTOM_INDEX_TYPE_ERROR, + validationException, + ) } return validationException } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/addpolicy/TransportAddPolicyAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/addpolicy/TransportAddPolicyAction.kt index d2307120a..3e2b48ee4 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/addpolicy/TransportAddPolicyAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/addpolicy/TransportAddPolicyAction.kt @@ -13,7 +13,6 @@ import org.opensearch.ExceptionsHelper import org.opensearch.OpenSearchSecurityException import org.opensearch.OpenSearchStatusException import org.opensearch.OpenSearchTimeoutException -import org.opensearch.core.action.ActionListener import org.opensearch.action.admin.cluster.state.ClusterStateRequest import org.opensearch.action.admin.cluster.state.ClusterStateResponse import org.opensearch.action.bulk.BulkRequest @@ -34,7 +33,9 @@ import org.opensearch.common.settings.Settings import org.opensearch.common.unit.TimeValue import org.opensearch.commons.ConfigConstants import org.opensearch.commons.authuser.User +import org.opensearch.core.action.ActionListener import org.opensearch.core.index.Index +import org.opensearch.core.rest.RestStatus import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.INDEX_MANAGEMENT_INDEX import org.opensearch.indexmanagement.indexstatemanagement.DefaultIndexMetadataService @@ -48,6 +49,7 @@ import org.opensearch.indexmanagement.indexstatemanagement.transport.action.mana import org.opensearch.indexmanagement.indexstatemanagement.transport.action.managedIndex.ManagedIndexRequest import org.opensearch.indexmanagement.indexstatemanagement.util.DEFAULT_INDEX_TYPE import org.opensearch.indexmanagement.indexstatemanagement.util.FailedIndex +import org.opensearch.indexmanagement.indexstatemanagement.util.deleteManagedIndexMetadataRequest import org.opensearch.indexmanagement.indexstatemanagement.util.managedIndexConfigIndexRequest import org.opensearch.indexmanagement.opensearchapi.IndexManagementSecurityContext import org.opensearch.indexmanagement.opensearchapi.parseFromGetResponse @@ -59,8 +61,6 @@ import org.opensearch.indexmanagement.util.IndexUtils import org.opensearch.indexmanagement.util.SecurityUtils.Companion.buildUser import org.opensearch.indexmanagement.util.SecurityUtils.Companion.userHasPermissionForResource import org.opensearch.indexmanagement.util.SecurityUtils.Companion.validateUserConfiguration -import org.opensearch.core.rest.RestStatus -import org.opensearch.indexmanagement.indexstatemanagement.util.deleteManagedIndexMetadataRequest import org.opensearch.tasks.Task import org.opensearch.transport.TransportService import java.time.Duration @@ -69,20 +69,23 @@ import java.time.Instant private val log = LogManager.getLogger(TransportAddPolicyAction::class.java) @Suppress("SpreadOperator", "ReturnCount", "LongParameterList") -class TransportAddPolicyAction @Inject constructor( +class TransportAddPolicyAction +@Inject +constructor( val client: NodeClient, transportService: TransportService, actionFilters: ActionFilters, val settings: Settings, val clusterService: ClusterService, val xContentRegistry: NamedXContentRegistry, - val indexMetadataProvider: IndexMetadataProvider + val indexMetadataProvider: IndexMetadataProvider, ) : HandledTransportAction( - AddPolicyAction.NAME, transportService, actionFilters, ::AddPolicyRequest + AddPolicyAction.NAME, transportService, actionFilters, ::AddPolicyRequest, ) { - @Volatile private var jobInterval = ManagedIndexSettings.JOB_INTERVAL.get(settings) + @Volatile private var jobJitter = ManagedIndexSettings.JITTER.get(settings) + @Volatile private var filterByEnabled = IndexManagementSettings.FILTER_BY_BACKEND_ROLES.get(settings) init { @@ -116,8 +119,8 @@ class TransportAddPolicyAction @Inject constructor( fun start() { log.debug( "User and roles string from thread context: ${client.threadPool().threadContext.getTransient( - ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT - )}" + ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT, + )}", ) if (!validateUserConfiguration(user, filterByEnabled, actionListener)) { return @@ -180,13 +183,14 @@ class TransportAddPolicyAction @Inject constructor( // Do another cluster state request to fail closed indices if (request.indexType == DEFAULT_INDEX_TYPE) { val strictExpandOptions = IndicesOptions.strictExpand() - val clusterStateRequest = ClusterStateRequest() - .clear() - .indices(*indicesToAdd.values.toTypedArray()) - .metadata(true) - .local(false) - .waitForTimeout(TimeValue.timeValueMillis(ADD_POLICY_TIMEOUT_IN_MILLIS)) - .indicesOptions(strictExpandOptions) + val clusterStateRequest = + ClusterStateRequest() + .clear() + .indices(*indicesToAdd.values.toTypedArray()) + .metadata(true) + .local(false) + .waitForTimeout(TimeValue.timeValueMillis(ADD_POLICY_TIMEOUT_IN_MILLIS)) + .indicesOptions(strictExpandOptions) client.admin() .cluster() .state( @@ -204,7 +208,7 @@ class TransportAddPolicyAction @Inject constructor( override fun onFailure(t: Exception) { actionListener.onFailure(ExceptionsHelper.unwrapCause(t) as Exception) } - } + }, ) } else { getPolicy() @@ -240,7 +244,7 @@ class TransportAddPolicyAction @Inject constructor( IndexUtils.checkAndUpdateConfigIndexMapping( clusterService.state(), client.admin().indices(), - ActionListener.wrap(::onUpdateMapping, ::onFailure) + ActionListener.wrap(::onUpdateMapping, ::onFailure), ) } @@ -254,8 +258,8 @@ class TransportAddPolicyAction @Inject constructor( actionListener.onFailure( OpenSearchStatusException( "Unable to create or update $INDEX_MANAGEMENT_INDEX with newest mapping.", - RestStatus.INTERNAL_SERVER_ERROR - ) + RestStatus.INTERNAL_SERVER_ERROR, + ), ) } } @@ -287,8 +291,8 @@ class TransportAddPolicyAction @Inject constructor( failedIndices.add( FailedIndex( indicesToAdd[docId] as String, docId, - "This index already has a policy, use the update policy API to update index policies" - ) + "This index already has a policy, use the update policy API to update index policies", + ), ) indicesToAdd.remove(docId) } @@ -300,7 +304,7 @@ class TransportAddPolicyAction @Inject constructor( override fun onFailure(t: Exception) { actionListener.onFailure(ExceptionsHelper.unwrapCause(t) as Exception) } - } + }, ) } @@ -321,7 +325,7 @@ class TransportAddPolicyAction @Inject constructor( val bulkReq = BulkRequest().timeout(TimeValue.timeValueMillis(bulkReqTimeout)) indicesToAdd.forEach { (uuid, name) -> bulkReq.add( - managedIndexConfigIndexRequest(name, uuid, request.policyID, jobInterval, policy = policy.copy(user = this.user), jobJitter) + managedIndexConfigIndexRequest(name, uuid, request.policyID, jobInterval, policy = policy.copy(user = this.user), jobJitter), ) } @@ -335,8 +339,8 @@ class TransportAddPolicyAction @Inject constructor( failedIndices.add( FailedIndex( indicesToAdd[docId] as String, docId, - "Failed to add policy due to: ${it.failureMessage}" - ) + "Failed to add policy due to: ${it.failureMessage}", + ), ) indicesToAdd.remove(docId) } @@ -357,7 +361,7 @@ class TransportAddPolicyAction @Inject constructor( actionListener.onFailure(ExceptionsHelper.unwrapCause(t) as Exception) } } - } + }, ) } else { actionListener.onResponse(ISMStatusResponse(0, failedIndices)) @@ -381,7 +385,7 @@ class TransportAddPolicyAction @Inject constructor( override fun onFailure(e: Exception) { log.error("Failed to clean metadata for remove policy indices.", e) } - } + }, ) } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/changepolicy/ChangePolicyRequest.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/changepolicy/ChangePolicyRequest.kt index cc53f6d78..c5f503793 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/changepolicy/ChangePolicyRequest.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/changepolicy/ChangePolicyRequest.kt @@ -17,14 +17,13 @@ import java.io.IOException class ChangePolicyRequest( val indices: List, val changePolicy: ChangePolicy, - val indexType: String + val indexType: String, ) : ActionRequest() { - @Throws(IOException::class) constructor(sin: StreamInput) : this( indices = sin.readStringList(), changePolicy = ChangePolicy(sin), - indexType = sin.readString() + indexType = sin.readString(), ) override fun validate(): ActionRequestValidationException? { @@ -32,10 +31,11 @@ class ChangePolicyRequest( if (indices.isEmpty()) { validationException = ValidateActions.addValidationError("Missing indices", validationException) } else if (indexType != DEFAULT_INDEX_TYPE && indices.size > 1) { - validationException = ValidateActions.addValidationError( - MULTIPLE_INDICES_CUSTOM_INDEX_TYPE_ERROR, - validationException - ) + validationException = + ValidateActions.addValidationError( + MULTIPLE_INDICES_CUSTOM_INDEX_TYPE_ERROR, + validationException, + ) } return validationException } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/changepolicy/TransportChangePolicyAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/changepolicy/TransportChangePolicyAction.kt index 1e9d09a0c..dffc0f3e1 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/changepolicy/TransportChangePolicyAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/changepolicy/TransportChangePolicyAction.kt @@ -12,7 +12,6 @@ import org.apache.logging.log4j.LogManager import org.opensearch.ExceptionsHelper import org.opensearch.OpenSearchSecurityException import org.opensearch.OpenSearchStatusException -import org.opensearch.core.action.ActionListener import org.opensearch.action.admin.cluster.state.ClusterStateRequest import org.opensearch.action.admin.cluster.state.ClusterStateResponse import org.opensearch.action.bulk.BulkRequest @@ -30,10 +29,12 @@ import org.opensearch.cluster.metadata.IndexMetadata import org.opensearch.cluster.service.ClusterService import org.opensearch.common.inject.Inject import org.opensearch.common.settings.Settings -import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.commons.ConfigConstants import org.opensearch.commons.authuser.User +import org.opensearch.core.action.ActionListener import org.opensearch.core.index.Index +import org.opensearch.core.rest.RestStatus +import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.indexmanagement.IndexManagementPlugin import org.opensearch.indexmanagement.indexstatemanagement.DefaultIndexMetadataService import org.opensearch.indexmanagement.indexstatemanagement.IndexMetadataProvider @@ -63,7 +64,6 @@ import org.opensearch.indexmanagement.util.NO_ID import org.opensearch.indexmanagement.util.SecurityUtils.Companion.buildUser import org.opensearch.indexmanagement.util.SecurityUtils.Companion.userHasPermissionForResource import org.opensearch.indexmanagement.util.SecurityUtils.Companion.validateUserConfiguration -import org.opensearch.core.rest.RestStatus import org.opensearch.search.fetch.subphase.FetchSourceContext import org.opensearch.tasks.Task import org.opensearch.transport.TransportService @@ -72,18 +72,19 @@ import java.lang.IllegalArgumentException private val log = LogManager.getLogger(TransportChangePolicyAction::class.java) @Suppress("SpreadOperator", "TooManyFunctions", "LongParameterList") -class TransportChangePolicyAction @Inject constructor( +class TransportChangePolicyAction +@Inject +constructor( val client: NodeClient, transportService: TransportService, actionFilters: ActionFilters, val clusterService: ClusterService, val settings: Settings, val xContentRegistry: NamedXContentRegistry, - val indexMetadataProvider: IndexMetadataProvider + val indexMetadataProvider: IndexMetadataProvider, ) : HandledTransportAction( - ChangePolicyAction.NAME, transportService, actionFilters, ::ChangePolicyRequest + ChangePolicyAction.NAME, transportService, actionFilters, ::ChangePolicyRequest, ) { - @Volatile private var filterByEnabled = IndexManagementSettings.FILTER_BY_BACKEND_ROLES.get(settings) init { @@ -100,9 +101,8 @@ class TransportChangePolicyAction @Inject constructor( private val client: NodeClient, private val actionListener: ActionListener, private val request: ChangePolicyRequest, - private val user: User? = buildUser(client.threadPool().threadContext) + private val user: User? = buildUser(client.threadPool().threadContext), ) { - private val failedIndices = mutableListOf() private val managedIndicesToUpdate = mutableListOf>() private val indexUuidToCurrentState = mutableMapOf() @@ -115,8 +115,8 @@ class TransportChangePolicyAction @Inject constructor( fun start() { log.debug( "User and roles string from thread context: ${client.threadPool().threadContext.getTransient( - ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT - )}" + ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT, + )}", ) if (user == null) { getPolicy() @@ -139,16 +139,17 @@ class TransportChangePolicyAction @Inject constructor( actionListener.onFailure( IndexManagementException.wrap( when (e is OpenSearchSecurityException) { - true -> OpenSearchStatusException( - "User doesn't have required index permissions on one or more requested indices: ${e.localizedMessage}", - RestStatus.FORBIDDEN - ) + true -> + OpenSearchStatusException( + "User doesn't have required index permissions on one or more requested indices: ${e.localizedMessage}", + RestStatus.FORBIDDEN, + ) false -> e - } - ) + }, + ), ) } - } + }, ) } @@ -182,7 +183,7 @@ class TransportChangePolicyAction @Inject constructor( IndexUtils.checkAndUpdateConfigIndexMapping( clusterService.state(), client.admin().indices(), - ActionListener.wrap(::onUpdateMapping, ::onFailure) + ActionListener.wrap(::onUpdateMapping, ::onFailure), ) } @@ -191,8 +192,8 @@ class TransportChangePolicyAction @Inject constructor( actionListener.onFailure( OpenSearchStatusException( "Could not update ${IndexManagementPlugin.INDEX_MANAGEMENT_INDEX} with new mapping.", - RestStatus.FAILED_DEPENDENCY - ) + RestStatus.FAILED_DEPENDENCY, + ), ) return } @@ -223,12 +224,13 @@ class TransportChangePolicyAction @Inject constructor( @Suppress("SpreadOperator") private fun getClusterState() { val strictExpandOptions = IndicesOptions.strictExpand() - val clusterStateRequest = ClusterStateRequest() - .clear() - .indices(*request.indices.toTypedArray()) - .metadata(true) - .local(false) - .indicesOptions(strictExpandOptions) + val clusterStateRequest = + ClusterStateRequest() + .clear() + .indices(*request.indices.toTypedArray()) + .metadata(true) + .local(false) + .indicesOptions(strictExpandOptions) client.admin() .cluster() .state( @@ -250,14 +252,14 @@ class TransportChangePolicyAction @Inject constructor( override fun onFailure(t: Exception) { actionListener.onFailure(ExceptionsHelper.unwrapCause(t) as Exception) } - } + }, ) } private fun getManagedIndexMetadata() { client.multiGet( buildMgetMetadataRequest(indicesToUpdate.toList().map { it.first }), - ActionListener.wrap(::onMgetMetadataResponse, ::onFailure) + ActionListener.wrap(::onMgetMetadataResponse, ::onFailure), ) } @@ -280,8 +282,8 @@ class TransportChangePolicyAction @Inject constructor( failedIndices.add( FailedIndex( indexName, indexUuid, - "Failed to get managed index metadata, $mgetFailure" - ) + "Failed to get managed index metadata, $mgetFailure", + ), ) // if there exists a transitionTo on the ManagedIndexMetaData then we will // fail as they might not of meant to add a ChangePolicy when it's on the next state @@ -289,8 +291,8 @@ class TransportChangePolicyAction @Inject constructor( failedIndices.add( FailedIndex( indexName, indexUuid, - RestChangePolicyAction.INDEX_IN_TRANSITION - ) + RestChangePolicyAction.INDEX_IN_TRANSITION, + ), ) // else if there is no ManagedIndexMetaData yet then the managed index has not initialized, and we can change the policy safely managedIndexMetadata == null -> { @@ -313,32 +315,33 @@ class TransportChangePolicyAction @Inject constructor( } else { client.multiGet( mgetManagedIndexConfigRequest(managedIndicesToUpdate.map { (_, indexUuid) -> indexUuid }.toTypedArray()), - ActionListener.wrap(::onMultiGetResponse, ::onFailure) + ActionListener.wrap(::onMultiGetResponse, ::onFailure), ) } } private fun onMultiGetResponse(response: MultiGetResponse) { val foundManagedIndices = mutableSetOf() - val sweptConfigs = response.responses.mapNotNull { - // The id is the index uuid - if (!it.response.isExists) { // meaning this index is not managed - val indexUuid = it.response.id - val indexName = managedIndicesToUpdate.find { (_, second) -> second == indexUuid }?.first - if (indexName != null) { - failedIndices.add(FailedIndex(indexName, indexUuid, RestChangePolicyAction.INDEX_NOT_MANAGED)) + val sweptConfigs = + response.responses.mapNotNull { + // The id is the index uuid + if (!it.response.isExists) { // meaning this index is not managed + val indexUuid = it.response.id + val indexName = managedIndicesToUpdate.find { (_, second) -> second == indexUuid }?.first + if (indexName != null) { + failedIndices.add(FailedIndex(indexName, indexUuid, RestChangePolicyAction.INDEX_NOT_MANAGED)) + } + } + if (!it.isFailed && !it.response.isSourceEmpty) { + foundManagedIndices.add(it.response.id) + contentParser(it.response.sourceAsBytesRef).parseWithType( + NO_ID, it.response.seqNo, + it.response.primaryTerm, SweptManagedIndexConfig.Companion::parse, + ) + } else { + null } } - if (!it.isFailed && !it.response.isSourceEmpty) { - foundManagedIndices.add(it.response.id) - contentParser(it.response.sourceAsBytesRef).parseWithType( - NO_ID, it.response.seqNo, - it.response.primaryTerm, SweptManagedIndexConfig.Companion::parse - ) - } else { - null - } - } if (sweptConfigs.isEmpty()) { updated = 0 @@ -355,8 +358,9 @@ class TransportChangePolicyAction @Inject constructor( sweptConfigs.forEachIndexed { id, sweptConfig -> // compare the sweptConfig policy to the get policy here and update changePolicy val currentStateName = indexUuidToCurrentState[sweptConfig.uuid] - val updatedChangePolicy = changePolicy - .copy(isSafe = sweptConfig.policy?.isSafeToChange(currentStateName, policy, changePolicy) == true, user = this.user) + val updatedChangePolicy = + changePolicy + .copy(isSafe = sweptConfig.policy?.isSafeToChange(currentStateName, policy, changePolicy) == true, user = this.user) bulkUpdateManagedIndexRequest.add(updateManagedIndexRequest(sweptConfig.copy(changePolicy = updatedChangePolicy))) mapOfItemIdToIndex[id] = Index(sweptConfig.index, sweptConfig.uuid) } @@ -370,7 +374,7 @@ class TransportChangePolicyAction @Inject constructor( override fun onFailure(t: Exception) { actionListener.onFailure(ExceptionsHelper.unwrapCause(t) as Exception) } - } + }, ) } @@ -390,20 +394,21 @@ class TransportChangePolicyAction @Inject constructor( @Suppress("SpreadOperator") private fun mgetManagedIndexConfigRequest(managedIndexUuids: Array): MultiGetRequest { val request = MultiGetRequest() - val includes = arrayOf( - "${ManagedIndexConfig.MANAGED_INDEX_TYPE}.${ManagedIndexConfig.INDEX_FIELD}", - "${ManagedIndexConfig.MANAGED_INDEX_TYPE}.${ManagedIndexConfig.INDEX_UUID_FIELD}", - "${ManagedIndexConfig.MANAGED_INDEX_TYPE}.${ManagedIndexConfig.POLICY_ID_FIELD}", - "${ManagedIndexConfig.MANAGED_INDEX_TYPE}.${ManagedIndexConfig.POLICY_FIELD}", - "${ManagedIndexConfig.MANAGED_INDEX_TYPE}.${ManagedIndexConfig.CHANGE_POLICY_FIELD}" - ) + val includes = + arrayOf( + "${ManagedIndexConfig.MANAGED_INDEX_TYPE}.${ManagedIndexConfig.INDEX_FIELD}", + "${ManagedIndexConfig.MANAGED_INDEX_TYPE}.${ManagedIndexConfig.INDEX_UUID_FIELD}", + "${ManagedIndexConfig.MANAGED_INDEX_TYPE}.${ManagedIndexConfig.POLICY_ID_FIELD}", + "${ManagedIndexConfig.MANAGED_INDEX_TYPE}.${ManagedIndexConfig.POLICY_FIELD}", + "${ManagedIndexConfig.MANAGED_INDEX_TYPE}.${ManagedIndexConfig.CHANGE_POLICY_FIELD}", + ) val excludes = emptyArray() val fetchSourceContext = FetchSourceContext(true, includes, excludes) managedIndexUuids.forEach { request.add( MultiGetRequest.Item( - IndexManagementPlugin.INDEX_MANAGEMENT_INDEX, it - ).fetchSourceContext(fetchSourceContext).routing(it) + IndexManagementPlugin.INDEX_MANAGEMENT_INDEX, it, + ).fetchSourceContext(fetchSourceContext).routing(it), ) } return request diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/deletepolicy/DeletePolicyRequest.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/deletepolicy/DeletePolicyRequest.kt index ded140242..3e8aaaef7 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/deletepolicy/DeletePolicyRequest.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/deletepolicy/DeletePolicyRequest.kt @@ -14,20 +14,20 @@ import org.opensearch.core.common.io.stream.StreamOutput import java.io.IOException class DeletePolicyRequest(val policyID: String, val refreshPolicy: WriteRequest.RefreshPolicy) : ActionRequest() { - @Throws(IOException::class) constructor(sin: StreamInput) : this( policyID = sin.readString(), - refreshPolicy = sin.readEnum(WriteRequest.RefreshPolicy::class.java) + refreshPolicy = sin.readEnum(WriteRequest.RefreshPolicy::class.java), ) override fun validate(): ActionRequestValidationException? { var validationException: ActionRequestValidationException? = null if (policyID.isBlank()) { - validationException = ValidateActions.addValidationError( - "Missing policy ID", - validationException - ) + validationException = + ValidateActions.addValidationError( + "Missing policy ID", + validationException, + ) } return validationException } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/deletepolicy/TransportDeletePolicyAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/deletepolicy/TransportDeletePolicyAction.kt index 9be3966af..afdbdea5f 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/deletepolicy/TransportDeletePolicyAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/deletepolicy/TransportDeletePolicyAction.kt @@ -8,7 +8,6 @@ package org.opensearch.indexmanagement.indexstatemanagement.transport.action.del import org.apache.logging.log4j.LogManager import org.opensearch.ExceptionsHelper import org.opensearch.OpenSearchStatusException -import org.opensearch.core.action.ActionListener import org.opensearch.action.delete.DeleteRequest import org.opensearch.action.delete.DeleteResponse import org.opensearch.action.get.GetRequest @@ -20,16 +19,17 @@ import org.opensearch.client.node.NodeClient import org.opensearch.cluster.service.ClusterService import org.opensearch.common.inject.Inject import org.opensearch.common.settings.Settings -import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.commons.ConfigConstants import org.opensearch.commons.authuser.User +import org.opensearch.core.action.ActionListener +import org.opensearch.core.rest.RestStatus +import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.indexmanagement.IndexManagementPlugin import org.opensearch.indexmanagement.indexstatemanagement.model.Policy import org.opensearch.indexmanagement.opensearchapi.parseFromGetResponse import org.opensearch.indexmanagement.settings.IndexManagementSettings import org.opensearch.indexmanagement.util.SecurityUtils.Companion.buildUser import org.opensearch.indexmanagement.util.SecurityUtils.Companion.userHasPermissionForResource -import org.opensearch.core.rest.RestStatus import org.opensearch.tasks.Task import org.opensearch.transport.TransportService import java.lang.IllegalArgumentException @@ -37,17 +37,18 @@ import java.lang.IllegalArgumentException private val log = LogManager.getLogger(TransportDeletePolicyAction::class.java) @Suppress("ReturnCount") -class TransportDeletePolicyAction @Inject constructor( +class TransportDeletePolicyAction +@Inject +constructor( val client: NodeClient, transportService: TransportService, actionFilters: ActionFilters, val clusterService: ClusterService, val settings: Settings, - val xContentRegistry: NamedXContentRegistry + val xContentRegistry: NamedXContentRegistry, ) : HandledTransportAction( - DeletePolicyAction.NAME, transportService, actionFilters, ::DeletePolicyRequest + DeletePolicyAction.NAME, transportService, actionFilters, ::DeletePolicyRequest, ) { - @Volatile private var filterByEnabled = IndexManagementSettings.FILTER_BY_BACKEND_ROLES.get(settings) init { @@ -64,14 +65,13 @@ class TransportDeletePolicyAction @Inject constructor( private val client: Client, private val actionListener: ActionListener, private val request: DeletePolicyRequest, - private val user: User? = buildUser(client.threadPool().threadContext) + private val user: User? = buildUser(client.threadPool().threadContext), ) { - fun start() { log.debug( "User and roles string from thread context: ${client.threadPool().threadContext.getTransient( - ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT - )}" + ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT, + )}", ) client.threadPool().threadContext.stashContext().use { getPolicy() @@ -106,13 +106,14 @@ class TransportDeletePolicyAction @Inject constructor( override fun onFailure(t: Exception) { actionListener.onFailure(ExceptionsHelper.unwrapCause(t) as Exception) } - } + }, ) } private fun delete() { - val deleteRequest = DeleteRequest(IndexManagementPlugin.INDEX_MANAGEMENT_INDEX, request.policyID) - .setRefreshPolicy(request.refreshPolicy) + val deleteRequest = + DeleteRequest(IndexManagementPlugin.INDEX_MANAGEMENT_INDEX, request.policyID) + .setRefreshPolicy(request.refreshPolicy) client.threadPool().threadContext.stashContext().use { client.delete(deleteRequest, actionListener) diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/explain/ExplainRequest.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/explain/ExplainRequest.kt index cbbddf03b..8985b6355 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/explain/ExplainRequest.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/explain/ExplainRequest.kt @@ -8,16 +8,15 @@ package org.opensearch.indexmanagement.indexstatemanagement.transport.action.exp import org.opensearch.action.ActionRequest import org.opensearch.action.ActionRequestValidationException import org.opensearch.action.ValidateActions +import org.opensearch.common.unit.TimeValue import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.core.common.io.stream.StreamOutput -import org.opensearch.common.unit.TimeValue import org.opensearch.indexmanagement.common.model.rest.SearchParams import org.opensearch.indexmanagement.indexstatemanagement.model.ExplainFilter import org.opensearch.indexmanagement.indexstatemanagement.util.DEFAULT_INDEX_TYPE import java.io.IOException class ExplainRequest : ActionRequest { - val indices: List val local: Boolean val clusterManagerTimeout: TimeValue @@ -36,7 +35,7 @@ class ExplainRequest : ActionRequest { explainFilter: ExplainFilter?, showPolicy: Boolean, validateAction: Boolean, - indexType: String + indexType: String, ) : super() { this.indices = indices this.local = local @@ -57,16 +56,17 @@ class ExplainRequest : ActionRequest { explainFilter = sin.readOptionalWriteable(::ExplainFilter), showPolicy = sin.readBoolean(), validateAction = sin.readBoolean(), - indexType = sin.readString() + indexType = sin.readString(), ) override fun validate(): ActionRequestValidationException? { var validationException: ActionRequestValidationException? = null if (indexType != DEFAULT_INDEX_TYPE && indices.size > 1) { - validationException = ValidateActions.addValidationError( - MULTIPLE_INDICES_CUSTOM_INDEX_TYPE_ERROR, - validationException - ) + validationException = + ValidateActions.addValidationError( + MULTIPLE_INDICES_CUSTOM_INDEX_TYPE_ERROR, + validationException, + ) } return validationException } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/explain/ExplainResponse.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/explain/ExplainResponse.kt index 7ba09b4f0..ea16b62a9 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/explain/ExplainResponse.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/explain/ExplainResponse.kt @@ -22,7 +22,6 @@ import org.opensearch.indexmanagement.spi.indexstatemanagement.model.ValidationR import java.io.IOException open class ExplainResponse : ActionResponse, ToXContentObject { - // TODO refactor these lists usage to map val indexNames: List val indexPolicyIDs: List @@ -40,7 +39,7 @@ open class ExplainResponse : ActionResponse, ToXContentObject { totalManagedIndices: Int, enabledState: Map, policies: Map, - validationResults: List + validationResults: List, ) : super() { this.indexNames = indexNames this.indexPolicyIDs = indexPolicyIDs @@ -59,7 +58,7 @@ open class ExplainResponse : ActionResponse, ToXContentObject { totalManagedIndices = sin.readInt(), enabledState = sin.readMap(StreamInput::readString, StreamInput::readBoolean), policies = sin.readMap(StreamInput::readString, ::Policy), - validationResults = sin.readList { ValidationResult.fromStreamInput(it) } + validationResults = sin.readList { ValidationResult.fromStreamInput(it) }, ) @Throws(IOException::class) @@ -71,12 +70,12 @@ open class ExplainResponse : ActionResponse, ToXContentObject { out.writeMap( enabledState, { _out, key -> _out.writeString(key) }, - { _out, enable -> _out.writeBoolean(enable) } + { _out, enable -> _out.writeBoolean(enable) }, ) out.writeMap( policies, { _out, key -> _out.writeString(key) }, - { _out, policy -> policy.writeTo(_out) } + { _out, policy -> policy.writeTo(_out) }, ) out.writeCollection(validationResults) } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/explain/TransportExplainAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/explain/TransportExplainAction.kt index 1f79eba4a..f1d91ff58 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/explain/TransportExplainAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/explain/TransportExplainAction.kt @@ -11,7 +11,6 @@ import kotlinx.coroutines.launch import org.apache.logging.log4j.LogManager import org.opensearch.ExceptionsHelper import org.opensearch.OpenSearchSecurityException -import org.opensearch.core.action.ActionListener import org.opensearch.action.admin.cluster.state.ClusterStateRequest import org.opensearch.action.admin.cluster.state.ClusterStateResponse import org.opensearch.action.get.GetResponse @@ -29,21 +28,22 @@ import org.opensearch.cluster.service.ClusterService import org.opensearch.common.inject.Inject import org.opensearch.common.util.concurrent.ThreadContext import org.opensearch.common.xcontent.LoggingDeprecationHandler -import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.common.xcontent.XContentHelper import org.opensearch.common.xcontent.XContentType import org.opensearch.commons.ConfigConstants import org.opensearch.commons.authuser.User +import org.opensearch.core.action.ActionListener +import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.index.IndexNotFoundException import org.opensearch.index.query.Operator import org.opensearch.index.query.QueryBuilders import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.INDEX_MANAGEMENT_INDEX +import org.opensearch.indexmanagement.common.model.rest.SearchParams import org.opensearch.indexmanagement.indexstatemanagement.IndexMetadataProvider import org.opensearch.indexmanagement.indexstatemanagement.ManagedIndexCoordinator.Companion.MAX_HITS +import org.opensearch.indexmanagement.indexstatemanagement.ManagedIndexRunner.actionValidation import org.opensearch.indexmanagement.indexstatemanagement.model.ManagedIndexConfig import org.opensearch.indexmanagement.indexstatemanagement.model.Policy -import org.opensearch.indexmanagement.common.model.rest.SearchParams -import org.opensearch.indexmanagement.indexstatemanagement.ManagedIndexRunner.actionValidation import org.opensearch.indexmanagement.indexstatemanagement.model.filterByPolicyID import org.opensearch.indexmanagement.indexstatemanagement.transport.action.managedIndex.ManagedIndexAction import org.opensearch.indexmanagement.indexstatemanagement.transport.action.managedIndex.ManagedIndexRequest @@ -74,17 +74,18 @@ typealias ManagedIndexMetadataDocUUID = String // managedIndexMetadataID(indexUu typealias ManagedIndexMetadataMap = Map @Suppress("SpreadOperator", "TooManyFunctions", "UnusedPrivateMember") -class TransportExplainAction @Inject constructor( +class TransportExplainAction +@Inject +constructor( val client: NodeClient, transportService: TransportService, actionFilters: ActionFilters, val clusterService: ClusterService, val xContentRegistry: NamedXContentRegistry, - val indexMetadataProvider: IndexMetadataProvider + val indexMetadataProvider: IndexMetadataProvider, ) : HandledTransportAction( - ExplainAction.NAME, transportService, actionFilters, ::ExplainRequest + ExplainAction.NAME, transportService, actionFilters, ::ExplainRequest, ) { - override fun doExecute(task: Task, request: ExplainRequest, listener: ActionListener) { ExplainHandler(client, listener, request).start() } @@ -100,7 +101,7 @@ class TransportExplainAction @Inject constructor( private val client: NodeClient, private val actionListener: ActionListener, private val request: ExplainRequest, - private val user: User? = buildUser(client.threadPool().threadContext) + private val user: User? = buildUser(client.threadPool().threadContext), ) { private val indices: List = request.indices private val explainAll: Boolean = indices.isEmpty() @@ -127,8 +128,8 @@ class TransportExplainAction @Inject constructor( fun start() { log.debug( "User and roles string from thread context: ${client.threadPool().threadContext.getTransient( - ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT - )}" + ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT, + )}", ) // Use the indexMetadataProvider to get the index names and uuids corresponding to this index type CoroutineScope(Dispatchers.IO).launch { @@ -156,23 +157,25 @@ class TransportExplainAction @Inject constructor( private fun getSearchMetadataRequest(params: SearchParams, indexUUIDs: List, searchSize: Int): SearchRequest { val sortBuilder = params.getSortBuilder() - val queryBuilder = QueryBuilders.boolQuery() - .must( - QueryBuilders - .queryStringQuery(params.queryString) - .field(MANAGED_INDEX_NAME_KEYWORD_FIELD) - .defaultOperator(Operator.AND) - ).filter(QueryBuilders.termsQuery(MANAGED_INDEX_INDEX_UUID_FIELD, indexUUIDs)) - .filterByPolicyID(request.explainFilter) - - val searchSourceBuilder = SearchSourceBuilder() - .from(params.from) - .fetchSource(FETCH_SOURCE) - .seqNoAndPrimaryTerm(true) - .version(true) - .sort(sortBuilder) - .size(searchSize) - .query(queryBuilder) + val queryBuilder = + QueryBuilders.boolQuery() + .must( + QueryBuilders + .queryStringQuery(params.queryString) + .field(MANAGED_INDEX_NAME_KEYWORD_FIELD) + .defaultOperator(Operator.AND), + ).filter(QueryBuilders.termsQuery(MANAGED_INDEX_INDEX_UUID_FIELD, indexUUIDs)) + .filterByPolicyID(request.explainFilter) + + val searchSourceBuilder = + SearchSourceBuilder() + .from(params.from) + .fetchSource(FETCH_SOURCE) + .seqNoAndPrimaryTerm(true) + .version(true) + .sort(sortBuilder) + .size(searchSize) + .query(queryBuilder) return SearchRequest() .indices(INDEX_MANAGEMENT_INDEX) @@ -194,12 +197,13 @@ class TransportExplainAction @Inject constructor( parseSearchHits(response.hits.hits).forEach { managedIndex -> managedIndices.add(managedIndex.index) enabledState[managedIndex.index] = managedIndex.enabled - managedIndicesMetaDataMap[managedIndex.index] = mapOf( - "index" to managedIndex.index, - "index_uuid" to managedIndex.indexUuid, - "policy_id" to managedIndex.policyID, - "enabled" to managedIndex.enabled.toString() - ) + managedIndicesMetaDataMap[managedIndex.index] = + mapOf( + "index" to managedIndex.index, + "index_uuid" to managedIndex.indexUuid, + "policy_id" to managedIndex.policyID, + "enabled" to managedIndex.enabled.toString(), + ) if (showPolicy) { managedIndex.policy.let { appliedPolicies[managedIndex.index] = it } } @@ -216,7 +220,7 @@ class TransportExplainAction @Inject constructor( indexNames.clear() sendResponse( indexNames, indexMetadatas, indexPolicyIDs, enabledState, - totalManagedIndices, appliedPolicies, validationResults + totalManagedIndices, appliedPolicies, validationResults, ) return } else { @@ -245,13 +249,13 @@ class TransportExplainAction @Inject constructor( indexNames.clear() sendResponse( indexNames, indexMetadatas, indexPolicyIDs, - enabledState, totalManagedIndices, appliedPolicies, validationResults + enabledState, totalManagedIndices, appliedPolicies, validationResults, ) return } actionListener.onFailure(ExceptionsHelper.unwrapCause(t) as Exception) } - } + }, ) } } @@ -277,7 +281,7 @@ class TransportExplainAction @Inject constructor( override fun onFailure(t: Exception) { actionListener.onFailure(ExceptionsHelper.unwrapCause(t) as Exception) } - } + }, ) } else { getMetadataMap(null, threadContext) @@ -297,26 +301,27 @@ class TransportExplainAction @Inject constructor( response.responses.associate { it.id to getMetadata(it.response)?.toMap() } if (request.explainFilter != null) { - metadataMap = metadataMap.filter { (_, value) -> - var isValid = true + metadataMap = + metadataMap.filter { (_, value) -> + var isValid = true - if (value != null) { - val metaData = ManagedIndexMetaData.fromMap(value) + if (value != null) { + val metaData = ManagedIndexMetaData.fromMap(value) - if (!request.explainFilter.byMetaData(metaData)) { - indexNames.remove(metaData.index) - indexNamesToUUIDs.remove(metaData.index) + if (!request.explainFilter.byMetaData(metaData)) { + indexNames.remove(metaData.index) + indexNamesToUUIDs.remove(metaData.index) - if (managedIndices.contains(metaData.index)) { - totalManagedIndices-- - } + if (managedIndices.contains(metaData.index)) { + totalManagedIndices-- + } - isValid = false + isValid = false + } } - } - isValid - } + isValid + } } buildResponse(indexNamesToUUIDs, metadataMap, clusterStateIndexMetadatas, threadContext) @@ -325,7 +330,7 @@ class TransportExplainAction @Inject constructor( override fun onFailure(t: Exception) { actionListener.onFailure(ExceptionsHelper.unwrapCause(t) as Exception) } - } + }, ) } @@ -334,7 +339,7 @@ class TransportExplainAction @Inject constructor( indices: Map, metadataMap: Map, clusterStateIndexMetadatas: Map?, - threadContext: ThreadContext.StoredContext + threadContext: ThreadContext.StoredContext, ) { // cluster state response will not resist the sort order // so use the order from previous search result saved in indexNames @@ -416,7 +421,7 @@ class TransportExplainAction @Inject constructor( } sendResponse( filteredIndices, filteredMetadata, filteredPolicies, enabledStatus, - totalManagedIndices, filteredAppliedPolicies, filteredValidationResult + totalManagedIndices, filteredAppliedPolicies, filteredValidationResult, ) } } @@ -436,16 +441,18 @@ class TransportExplainAction @Inject constructor( @Suppress("ReturnCount") private fun getMetadata(response: GetResponse?): ManagedIndexMetaData? { - if (response == null || response.sourceAsBytesRef == null) + if (response == null || response.sourceAsBytesRef == null) { return null + } try { - val xcp = XContentHelper.createParser( - xContentRegistry, - LoggingDeprecationHandler.INSTANCE, - response.sourceAsBytesRef, - XContentType.JSON - ) + val xcp = + XContentHelper.createParser( + xContentRegistry, + LoggingDeprecationHandler.INSTANCE, + response.sourceAsBytesRef, + XContentType.JSON, + ) return ManagedIndexMetaData.parseWithType(xcp, response.id, response.seqNo, response.primaryTerm) } catch (e: Exception) { log.error("Failed to parse the ManagedIndexMetadata for ${response.id}", e) @@ -460,7 +467,7 @@ class TransportExplainAction @Inject constructor( xContentRegistry, LoggingDeprecationHandler.INSTANCE, hit.sourceRef, - XContentType.JSON + XContentType.JSON, ).parseWithType(parse = ManagedIndexConfig.Companion::parse) } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/getpolicy/GetPoliciesRequest.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/getpolicy/GetPoliciesRequest.kt index 0bf74d646..931d74144 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/getpolicy/GetPoliciesRequest.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/getpolicy/GetPoliciesRequest.kt @@ -13,18 +13,17 @@ import org.opensearch.indexmanagement.common.model.rest.SearchParams import java.io.IOException class GetPoliciesRequest : ActionRequest { - val searchParams: SearchParams constructor( - searchParams: SearchParams + searchParams: SearchParams, ) : super() { this.searchParams = searchParams } @Throws(IOException::class) constructor(sin: StreamInput) : this( - searchParams = SearchParams(sin) + searchParams = SearchParams(sin), ) override fun validate(): ActionRequestValidationException? { diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/getpolicy/GetPoliciesResponse.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/getpolicy/GetPoliciesResponse.kt index 16dff5fe4..8ec86b043 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/getpolicy/GetPoliciesResponse.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/getpolicy/GetPoliciesResponse.kt @@ -21,13 +21,12 @@ import org.opensearch.indexmanagement.util._SEQ_NO import java.io.IOException class GetPoliciesResponse : ActionResponse, ToXContentObject { - val policies: List val totalPolicies: Int constructor( policies: List, - totalPolicies: Int + totalPolicies: Int, ) : super() { this.policies = policies this.totalPolicies = totalPolicies @@ -36,7 +35,7 @@ class GetPoliciesResponse : ActionResponse, ToXContentObject { @Throws(IOException::class) constructor(sin: StreamInput) : this( policies = sin.readList(::Policy), - totalPolicies = sin.readInt() + totalPolicies = sin.readInt(), ) override fun writeTo(out: StreamOutput) { diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/getpolicy/GetPolicyRequest.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/getpolicy/GetPolicyRequest.kt index 5e9762ed9..342cdb882 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/getpolicy/GetPolicyRequest.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/getpolicy/GetPolicyRequest.kt @@ -14,7 +14,6 @@ import org.opensearch.search.fetch.subphase.FetchSourceContext import java.io.IOException class GetPolicyRequest : ActionRequest { - val policyID: String val version: Long val fetchSrcContext: FetchSourceContext @@ -22,7 +21,7 @@ class GetPolicyRequest : ActionRequest { constructor( policyID: String, version: Long, - fetchSrcContext: FetchSourceContext + fetchSrcContext: FetchSourceContext, ) : super() { this.policyID = policyID this.version = version @@ -33,16 +32,17 @@ class GetPolicyRequest : ActionRequest { constructor(sin: StreamInput) : this( policyID = sin.readString(), version = sin.readLong(), - fetchSrcContext = FetchSourceContext(sin) + fetchSrcContext = FetchSourceContext(sin), ) override fun validate(): ActionRequestValidationException? { var validationException: ActionRequestValidationException? = null if (policyID.isBlank()) { - validationException = ValidateActions.addValidationError( - "Missing policy ID", - validationException - ) + validationException = + ValidateActions.addValidationError( + "Missing policy ID", + validationException, + ) } return validationException } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/getpolicy/GetPolicyResponse.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/getpolicy/GetPolicyResponse.kt index 54c64be1f..d4b636376 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/getpolicy/GetPolicyResponse.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/getpolicy/GetPolicyResponse.kt @@ -22,7 +22,6 @@ import org.opensearch.indexmanagement.util._VERSION import java.io.IOException class GetPolicyResponse : ActionResponse, ToXContentObject { - val id: String val version: Long val seqNo: Long @@ -34,7 +33,7 @@ class GetPolicyResponse : ActionResponse, ToXContentObject { version: Long, seqNo: Long, primaryTerm: Long, - policy: Policy? + policy: Policy?, ) : super() { this.id = id this.version = version @@ -49,7 +48,7 @@ class GetPolicyResponse : ActionResponse, ToXContentObject { version = sin.readLong(), seqNo = sin.readLong(), primaryTerm = sin.readLong(), - policy = sin.readOptionalWriteable(::Policy) + policy = sin.readOptionalWriteable(::Policy), ) override fun writeTo(out: StreamOutput) { diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/getpolicy/TransportGetPoliciesAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/getpolicy/TransportGetPoliciesAction.kt index 9aaa9c91c..0f43af03b 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/getpolicy/TransportGetPoliciesAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/getpolicy/TransportGetPoliciesAction.kt @@ -7,7 +7,6 @@ package org.opensearch.indexmanagement.indexstatemanagement.transport.action.get import org.apache.logging.log4j.LogManager import org.opensearch.ExceptionsHelper -import org.opensearch.core.action.ActionListener import org.opensearch.action.search.SearchRequest import org.opensearch.action.search.SearchResponse import org.opensearch.action.support.ActionFilters @@ -17,8 +16,9 @@ import org.opensearch.cluster.routing.Preference import org.opensearch.cluster.service.ClusterService import org.opensearch.common.inject.Inject import org.opensearch.common.settings.Settings -import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.commons.ConfigConstants +import org.opensearch.core.action.ActionListener +import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.index.IndexNotFoundException import org.opensearch.index.query.Operator import org.opensearch.index.query.QueryBuilders @@ -34,17 +34,18 @@ import org.opensearch.transport.TransportService private val log = LogManager.getLogger(TransportGetPoliciesAction::class.java) -class TransportGetPoliciesAction @Inject constructor( +class TransportGetPoliciesAction +@Inject +constructor( transportService: TransportService, val client: Client, actionFilters: ActionFilters, val clusterService: ClusterService, val settings: Settings, - val xContentRegistry: NamedXContentRegistry + val xContentRegistry: NamedXContentRegistry, ) : HandledTransportAction( - GetPoliciesAction.NAME, transportService, actionFilters, ::GetPoliciesRequest + GetPoliciesAction.NAME, transportService, actionFilters, ::GetPoliciesRequest, ) { - @Volatile private var filterByEnabled = IndexManagementSettings.FILTER_BY_BACKEND_ROLES.get(settings) init { @@ -56,20 +57,21 @@ class TransportGetPoliciesAction @Inject constructor( override fun doExecute( task: Task, getPoliciesRequest: GetPoliciesRequest, - actionListener: ActionListener + actionListener: ActionListener, ) { log.debug( "User and roles string from thread context: ${client.threadPool().threadContext.getTransient( - ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT - )}" + ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT, + )}", ) val params = getPoliciesRequest.searchParams val user = buildUser(client.threadPool().threadContext) val sortBuilder = params.getSortBuilder() - val queryBuilder = QueryBuilders.boolQuery() - .must(QueryBuilders.existsQuery("policy")) + val queryBuilder = + QueryBuilders.boolQuery() + .must(QueryBuilders.existsQuery("policy")) // Add user filter if enabled addUserFilter(user, queryBuilder, filterByEnabled, "policy.user") @@ -78,20 +80,22 @@ class TransportGetPoliciesAction @Inject constructor( QueryBuilders .queryStringQuery(params.queryString) .defaultOperator(Operator.AND) - .field("policy.policy_id.keyword") + .field("policy.policy_id.keyword"), ) - val searchSourceBuilder = SearchSourceBuilder() - .query(queryBuilder) - .sort(sortBuilder) - .from(params.from) - .size(params.size) - .seqNoAndPrimaryTerm(true) + val searchSourceBuilder = + SearchSourceBuilder() + .query(queryBuilder) + .sort(sortBuilder) + .from(params.from) + .size(params.size) + .seqNoAndPrimaryTerm(true) - val searchRequest = SearchRequest() - .source(searchSourceBuilder) - .indices(INDEX_MANAGEMENT_INDEX) - .preference(Preference.PRIMARY_FIRST.type()) + val searchRequest = + SearchRequest() + .source(searchSourceBuilder) + .indices(INDEX_MANAGEMENT_INDEX) + .preference(Preference.PRIMARY_FIRST.type()) client.threadPool().threadContext.stashContext().use { client.search( @@ -111,7 +115,7 @@ class TransportGetPoliciesAction @Inject constructor( } actionListener.onFailure(ExceptionsHelper.unwrapCause(t) as Exception) } - } + }, ) } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/getpolicy/TransportGetPolicyAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/getpolicy/TransportGetPolicyAction.kt index a54d3868e..8e495813f 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/getpolicy/TransportGetPolicyAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/getpolicy/TransportGetPolicyAction.kt @@ -8,7 +8,6 @@ package org.opensearch.indexmanagement.indexstatemanagement.transport.action.get import org.apache.logging.log4j.LogManager import org.opensearch.ExceptionsHelper import org.opensearch.OpenSearchStatusException -import org.opensearch.core.action.ActionListener import org.opensearch.action.get.GetRequest import org.opensearch.action.get.GetResponse import org.opensearch.action.support.ActionFilters @@ -17,32 +16,34 @@ import org.opensearch.client.node.NodeClient import org.opensearch.cluster.service.ClusterService import org.opensearch.common.inject.Inject import org.opensearch.common.settings.Settings -import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.commons.ConfigConstants import org.opensearch.commons.authuser.User +import org.opensearch.core.action.ActionListener +import org.opensearch.core.rest.RestStatus +import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.indexmanagement.IndexManagementPlugin import org.opensearch.indexmanagement.indexstatemanagement.model.Policy import org.opensearch.indexmanagement.opensearchapi.parseFromGetResponse import org.opensearch.indexmanagement.settings.IndexManagementSettings.Companion.FILTER_BY_BACKEND_ROLES import org.opensearch.indexmanagement.util.SecurityUtils.Companion.buildUser import org.opensearch.indexmanagement.util.SecurityUtils.Companion.userHasPermissionForResource -import org.opensearch.core.rest.RestStatus import org.opensearch.tasks.Task import org.opensearch.transport.TransportService import java.lang.IllegalArgumentException @Suppress("ReturnCount") -class TransportGetPolicyAction @Inject constructor( +class TransportGetPolicyAction +@Inject +constructor( val client: NodeClient, transportService: TransportService, actionFilters: ActionFilters, val clusterService: ClusterService, val settings: Settings, - val xContentRegistry: NamedXContentRegistry + val xContentRegistry: NamedXContentRegistry, ) : HandledTransportAction( - GetPolicyAction.NAME, transportService, actionFilters, ::GetPolicyRequest + GetPolicyAction.NAME, transportService, actionFilters, ::GetPolicyRequest, ) { - @Volatile private var filterByEnabled = FILTER_BY_BACKEND_ROLES.get(settings) private val log = LogManager.getLogger(javaClass) @@ -60,16 +61,17 @@ class TransportGetPolicyAction @Inject constructor( private val client: NodeClient, private val actionListener: ActionListener, private val request: GetPolicyRequest, - private val user: User? = buildUser(client.threadPool().threadContext) + private val user: User? = buildUser(client.threadPool().threadContext), ) { fun start() { log.debug( "User and roles string from thread context: ${client.threadPool().threadContext.getTransient( - ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT - )}" + ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT, + )}", ) - val getRequest = GetRequest(IndexManagementPlugin.INDEX_MANAGEMENT_INDEX, request.policyID) - .version(request.version) + val getRequest = + GetRequest(IndexManagementPlugin.INDEX_MANAGEMENT_INDEX, request.policyID) + .version(request.version) client.threadPool().threadContext.stashContext().use { client.get( @@ -82,7 +84,7 @@ class TransportGetPolicyAction @Inject constructor( override fun onFailure(t: Exception) { actionListener.onFailure(ExceptionsHelper.unwrapCause(t) as Exception) } - } + }, ) } } @@ -104,11 +106,12 @@ class TransportGetPolicyAction @Inject constructor( return } else { // if HEAD request don't return the policy - val policyResponse = if (!request.fetchSrcContext.fetchSource()) { - GetPolicyResponse(response.id, response.version, response.seqNo, response.primaryTerm, null) - } else { - GetPolicyResponse(response.id, response.version, response.seqNo, response.primaryTerm, policy) - } + val policyResponse = + if (!request.fetchSrcContext.fetchSource()) { + GetPolicyResponse(response.id, response.version, response.seqNo, response.primaryTerm, null) + } else { + GetPolicyResponse(response.id, response.version, response.seqNo, response.primaryTerm, policy) + } actionListener.onResponse(policyResponse) } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/indexpolicy/IndexPolicyRequest.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/indexpolicy/IndexPolicyRequest.kt index e4a04f788..adcfa64db 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/indexpolicy/IndexPolicyRequest.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/indexpolicy/IndexPolicyRequest.kt @@ -16,7 +16,6 @@ import org.opensearch.indexmanagement.util.NO_ID import java.io.IOException class IndexPolicyRequest : ActionRequest { - val policyID: String val policy: Policy val seqNo: Long @@ -28,7 +27,7 @@ class IndexPolicyRequest : ActionRequest { policy: Policy, seqNo: Long, primaryTerm: Long, - refreshPolicy: WriteRequest.RefreshPolicy + refreshPolicy: WriteRequest.RefreshPolicy, ) : super() { this.policyID = policyID this.policy = policy @@ -43,7 +42,7 @@ class IndexPolicyRequest : ActionRequest { policy = Policy(sin), seqNo = sin.readLong(), primaryTerm = sin.readLong(), - refreshPolicy = sin.readEnum(WriteRequest.RefreshPolicy::class.java) + refreshPolicy = sin.readEnum(WriteRequest.RefreshPolicy::class.java), ) override fun validate(): ActionRequestValidationException? { diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/indexpolicy/IndexPolicyResponse.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/indexpolicy/IndexPolicyResponse.kt index f1453058b..d8a7febc7 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/indexpolicy/IndexPolicyResponse.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/indexpolicy/IndexPolicyResponse.kt @@ -8,6 +8,7 @@ package org.opensearch.indexmanagement.indexstatemanagement.transport.action.ind import org.opensearch.core.action.ActionResponse import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.rest.RestStatus import org.opensearch.core.xcontent.ToXContent import org.opensearch.core.xcontent.ToXContentObject import org.opensearch.core.xcontent.XContentBuilder @@ -18,11 +19,9 @@ import org.opensearch.indexmanagement.util._ID import org.opensearch.indexmanagement.util._PRIMARY_TERM import org.opensearch.indexmanagement.util._SEQ_NO import org.opensearch.indexmanagement.util._VERSION -import org.opensearch.core.rest.RestStatus import java.io.IOException class IndexPolicyResponse : ActionResponse, ToXContentObject { - val id: String val version: Long val primaryTerm: Long @@ -36,7 +35,7 @@ class IndexPolicyResponse : ActionResponse, ToXContentObject { primaryTerm: Long, seqNo: Long, policy: Policy, - status: RestStatus + status: RestStatus, ) : super() { this.id = id this.version = version @@ -53,7 +52,7 @@ class IndexPolicyResponse : ActionResponse, ToXContentObject { primaryTerm = sin.readLong(), seqNo = sin.readLong(), policy = Policy(sin), - status = sin.readEnum(RestStatus::class.java) + status = sin.readEnum(RestStatus::class.java), ) @Throws(IOException::class) diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/indexpolicy/TransportIndexPolicyAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/indexpolicy/TransportIndexPolicyAction.kt index 75a916281..85cee8524 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/indexpolicy/TransportIndexPolicyAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/indexpolicy/TransportIndexPolicyAction.kt @@ -9,7 +9,6 @@ import org.apache.logging.log4j.LogManager import org.opensearch.ExceptionsHelper import org.opensearch.OpenSearchStatusException import org.opensearch.ResourceAlreadyExistsException -import org.opensearch.core.action.ActionListener import org.opensearch.action.DocWriteRequest import org.opensearch.action.index.IndexRequest import org.opensearch.action.index.IndexResponse @@ -26,10 +25,12 @@ import org.opensearch.cluster.service.ClusterService import org.opensearch.common.ValidationException import org.opensearch.common.inject.Inject import org.opensearch.common.settings.Settings -import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.common.xcontent.XContentFactory import org.opensearch.commons.ConfigConstants import org.opensearch.commons.authuser.User +import org.opensearch.core.action.ActionListener +import org.opensearch.core.rest.RestStatus +import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.index.query.QueryBuilders import org.opensearch.index.seqno.SequenceNumbers import org.opensearch.indexmanagement.IndexManagementIndices @@ -49,7 +50,6 @@ import org.opensearch.indexmanagement.util.IndexManagementException import org.opensearch.indexmanagement.util.IndexUtils import org.opensearch.indexmanagement.util.SecurityUtils.Companion.buildUser import org.opensearch.indexmanagement.util.SecurityUtils.Companion.validateUserConfiguration -import org.opensearch.core.rest.RestStatus import org.opensearch.search.builder.SearchSourceBuilder import org.opensearch.tasks.Task import org.opensearch.transport.TransportService @@ -57,7 +57,9 @@ import org.opensearch.transport.TransportService private val log = LogManager.getLogger(TransportIndexPolicyAction::class.java) @Suppress("LongParameterList") -class TransportIndexPolicyAction @Inject constructor( +class TransportIndexPolicyAction +@Inject +constructor( val client: NodeClient, transportService: TransportService, actionFilters: ActionFilters, @@ -67,9 +69,8 @@ class TransportIndexPolicyAction @Inject constructor( val xContentRegistry: NamedXContentRegistry, var awarenessReplicaBalance: AwarenessReplicaBalance, ) : HandledTransportAction( - IndexPolicyAction.NAME, transportService, actionFilters, ::IndexPolicyRequest + IndexPolicyAction.NAME, transportService, actionFilters, ::IndexPolicyRequest, ) { - @Volatile private var filterByEnabled = IndexManagementSettings.FILTER_BY_BACKEND_ROLES.get(settings) @@ -87,32 +88,34 @@ class TransportIndexPolicyAction @Inject constructor( private val client: NodeClient, private val actionListener: ActionListener, private val request: IndexPolicyRequest, - private val user: User? = buildUser(client.threadPool().threadContext) + private val user: User? = buildUser(client.threadPool().threadContext), ) { fun start() { validate() log.debug( "User and roles string from thread context: ${client.threadPool().threadContext.getTransient( - ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT - )}" + ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT, + )}", ) client.threadPool().threadContext.stashContext().use { if (!validateUserConfiguration(user, filterByEnabled, actionListener)) { return } - ismIndices.checkAndUpdateIMConfigIndex(object : ActionListener { - override fun onResponse(response: AcknowledgedResponse) { - onCreateMappingsResponse(response) - } + ismIndices.checkAndUpdateIMConfigIndex( + object : ActionListener { + override fun onResponse(response: AcknowledgedResponse) { + onCreateMappingsResponse(response) + } - override fun onFailure(t: Exception) { - if (t is ResourceAlreadyExistsException) { - actionListener.onFailure(OpenSearchStatusException(t.localizedMessage, RestStatus.CONFLICT)) - } else { - actionListener.onFailure(ExceptionsHelper.unwrapCause(t) as Exception) + override fun onFailure(t: Exception) { + if (t is ResourceAlreadyExistsException) { + actionListener.onFailure(OpenSearchStatusException(t.localizedMessage, RestStatus.CONFLICT)) + } else { + actionListener.onFailure(ExceptionsHelper.unwrapCause(t) as Exception) + } } - } - }) + }, + ) } } @@ -143,15 +146,17 @@ class TransportIndexPolicyAction @Inject constructor( val reqTemplates = request.policy.ismTemplate if (reqTemplates != null) { validateISMTemplates(reqTemplates) - } else putPolicy() + } else { + putPolicy() + } } else { log.error("Unable to create or update ${IndexManagementPlugin.INDEX_MANAGEMENT_INDEX} with newest mapping.") actionListener.onFailure( OpenSearchStatusException( "Unable to create or update ${IndexManagementPlugin.INDEX_MANAGEMENT_INDEX} with newest mapping.", - RestStatus.INTERNAL_SERVER_ERROR - ) + RestStatus.INTERNAL_SERVER_ERROR, + ), ) } } @@ -174,14 +179,15 @@ class TransportIndexPolicyAction @Inject constructor( return } - val searchRequest = SearchRequest() - .source( - SearchSourceBuilder().query( - QueryBuilders.existsQuery(ISM_TEMPLATE_FIELD) - ).size(MAX_HITS).seqNoAndPrimaryTerm(true) - ) - .indices(IndexManagementPlugin.INDEX_MANAGEMENT_INDEX) - .preference(Preference.PRIMARY_FIRST.type()) + val searchRequest = + SearchRequest() + .source( + SearchSourceBuilder().query( + QueryBuilders.existsQuery(ISM_TEMPLATE_FIELD), + ).size(MAX_HITS).seqNoAndPrimaryTerm(true), + ) + .indices(IndexManagementPlugin.INDEX_MANAGEMENT_INDEX) + .preference(Preference.PRIMARY_FIRST.type()) client.search( searchRequest, @@ -191,8 +197,9 @@ class TransportIndexPolicyAction @Inject constructor( val policyToTemplateMap: Map> = policies.map { it.id to it.ismTemplate }.toMap().filterNotNullValues() ismTemplateList.forEach { - val conflictingPolicyTemplates = policyToTemplateMap - .findConflictingPolicyTemplates(request.policyID, it.indexPatterns, it.priority) + val conflictingPolicyTemplates = + policyToTemplateMap + .findConflictingPolicyTemplates(request.policyID, it.indexPatterns, it.priority) if (conflictingPolicyTemplates.isNotEmpty()) { val errorMessage = "New policy ${request.policyID} has an ISM template with index pattern ${it.indexPatterns} " + @@ -201,9 +208,9 @@ class TransportIndexPolicyAction @Inject constructor( actionListener.onFailure( IndexManagementException.wrap( IllegalArgumentException( - errorMessage - ) - ) + errorMessage, + ), + ), ) return } @@ -215,20 +222,22 @@ class TransportIndexPolicyAction @Inject constructor( override fun onFailure(t: Exception) { actionListener.onFailure(ExceptionsHelper.unwrapCause(t) as Exception) } - } + }, ) } private fun putPolicy() { - val policy = request.policy.copy( - schemaVersion = IndexUtils.indexManagementConfigSchemaVersion, user = this.user - ) + val policy = + request.policy.copy( + schemaVersion = IndexUtils.indexManagementConfigSchemaVersion, user = this.user, + ) - val indexRequest = IndexRequest(IndexManagementPlugin.INDEX_MANAGEMENT_INDEX) - .setRefreshPolicy(request.refreshPolicy) - .source(policy.toXContent(XContentFactory.jsonBuilder())) - .id(request.policyID) - .timeout(IndexRequest.DEFAULT_TIMEOUT) + val indexRequest = + IndexRequest(IndexManagementPlugin.INDEX_MANAGEMENT_INDEX) + .setRefreshPolicy(request.refreshPolicy) + .source(policy.toXContent(XContentFactory.jsonBuilder())) + .id(request.policyID) + .timeout(IndexRequest.DEFAULT_TIMEOUT) if (request.seqNo == SequenceNumbers.UNASSIGNED_SEQ_NO || request.primaryTerm == SequenceNumbers.UNASSIGNED_PRIMARY_TERM) { indexRequest.opType(DocWriteRequest.OpType.CREATE) @@ -246,8 +255,8 @@ class TransportIndexPolicyAction @Inject constructor( actionListener.onFailure( OpenSearchStatusException( failureReasons.toString(), - response.status() - ) + response.status(), + ), ) return } @@ -258,8 +267,8 @@ class TransportIndexPolicyAction @Inject constructor( response.primaryTerm, response.seqNo, request.policy, - response.status() - ) + response.status(), + ), ) } @@ -268,7 +277,7 @@ class TransportIndexPolicyAction @Inject constructor( // provide a direct message asking user to use seqNo and primaryTerm actionListener.onFailure(ExceptionsHelper.unwrapCause(t) as Exception) } - } + }, ) } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/managedIndex/ManagedIndexRequest.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/managedIndex/ManagedIndexRequest.kt index 808abda52..6ea2ba4c7 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/managedIndex/ManagedIndexRequest.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/managedIndex/ManagedIndexRequest.kt @@ -11,7 +11,6 @@ import java.io.IOException @Suppress("SpreadOperator") class ManagedIndexRequest : BroadcastRequest { - constructor(vararg indices: String) : super(*indices) @Throws(IOException::class) diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/managedIndex/TransportManagedIndexAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/managedIndex/TransportManagedIndexAction.kt index 4e82b38bc..197f844af 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/managedIndex/TransportManagedIndexAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/managedIndex/TransportManagedIndexAction.kt @@ -5,26 +5,27 @@ package org.opensearch.indexmanagement.indexstatemanagement.transport.action.managedIndex -import org.opensearch.core.action.ActionListener import org.opensearch.action.support.ActionFilters import org.opensearch.action.support.HandledTransportAction import org.opensearch.action.support.master.AcknowledgedResponse import org.opensearch.cluster.service.ClusterService import org.opensearch.common.inject.Inject +import org.opensearch.core.action.ActionListener import org.opensearch.tasks.Task import org.opensearch.transport.TransportService /** * This is a non operational transport action that is used by ISM to check if the user has required index permissions to manage index */ -class TransportManagedIndexAction @Inject constructor( +class TransportManagedIndexAction +@Inject +constructor( transportService: TransportService, actionFilters: ActionFilters, val clusterService: ClusterService, ) : HandledTransportAction( - ManagedIndexAction.NAME, transportService, actionFilters, ::ManagedIndexRequest + ManagedIndexAction.NAME, transportService, actionFilters, ::ManagedIndexRequest, ) { - override fun doExecute(task: Task, request: ManagedIndexRequest, listener: ActionListener) { // Do nothing return listener.onResponse(AcknowledgedResponse(true)) diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/removepolicy/RemovePolicyRequest.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/removepolicy/RemovePolicyRequest.kt index 66caa4d74..fcd995ae9 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/removepolicy/RemovePolicyRequest.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/removepolicy/RemovePolicyRequest.kt @@ -15,13 +15,12 @@ import java.io.IOException class RemovePolicyRequest( val indices: List, - val indexType: String + val indexType: String, ) : ActionRequest() { - @Throws(IOException::class) constructor(sin: StreamInput) : this( indices = sin.readStringList(), - indexType = sin.readString() + indexType = sin.readString(), ) override fun validate(): ActionRequestValidationException? { @@ -29,10 +28,11 @@ class RemovePolicyRequest( if (indices.isEmpty()) { validationException = ValidateActions.addValidationError("Missing indices", validationException) } else if (indexType != DEFAULT_INDEX_TYPE && indices.size > 1) { - validationException = ValidateActions.addValidationError( - MULTIPLE_INDICES_CUSTOM_INDEX_TYPE_ERROR, - validationException - ) + validationException = + ValidateActions.addValidationError( + MULTIPLE_INDICES_CUSTOM_INDEX_TYPE_ERROR, + validationException, + ) } return validationException } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/removepolicy/TransportRemovePolicyAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/removepolicy/TransportRemovePolicyAction.kt index b9db84887..dcefa694e 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/removepolicy/TransportRemovePolicyAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/removepolicy/TransportRemovePolicyAction.kt @@ -12,7 +12,6 @@ import org.apache.logging.log4j.LogManager import org.opensearch.ExceptionsHelper import org.opensearch.OpenSearchSecurityException import org.opensearch.OpenSearchStatusException -import org.opensearch.core.action.ActionListener import org.opensearch.action.admin.cluster.state.ClusterStateRequest import org.opensearch.action.admin.cluster.state.ClusterStateResponse import org.opensearch.action.admin.indices.settings.put.UpdateSettingsRequest @@ -34,7 +33,9 @@ import org.opensearch.common.inject.Inject import org.opensearch.common.settings.Settings import org.opensearch.commons.ConfigConstants import org.opensearch.commons.authuser.User +import org.opensearch.core.action.ActionListener import org.opensearch.core.index.Index +import org.opensearch.core.rest.RestStatus import org.opensearch.index.IndexNotFoundException import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.INDEX_MANAGEMENT_INDEX import org.opensearch.indexmanagement.indexstatemanagement.DefaultIndexMetadataService @@ -51,20 +52,20 @@ import org.opensearch.indexmanagement.indexstatemanagement.util.deleteManagedInd import org.opensearch.indexmanagement.spi.indexstatemanagement.model.ISMIndexMetadata import org.opensearch.indexmanagement.util.IndexManagementException import org.opensearch.indexmanagement.util.SecurityUtils.Companion.buildUser -import org.opensearch.core.rest.RestStatus import org.opensearch.tasks.Task import org.opensearch.transport.TransportService @Suppress("SpreadOperator") -class TransportRemovePolicyAction @Inject constructor( +class TransportRemovePolicyAction +@Inject +constructor( val client: NodeClient, transportService: TransportService, actionFilters: ActionFilters, - val indexMetadataProvider: IndexMetadataProvider + val indexMetadataProvider: IndexMetadataProvider, ) : HandledTransportAction( - RemovePolicyAction.NAME, transportService, actionFilters, ::RemovePolicyRequest + RemovePolicyAction.NAME, transportService, actionFilters, ::RemovePolicyRequest, ) { - private val log = LogManager.getLogger(javaClass) override fun doExecute(task: Task, request: RemovePolicyRequest, listener: ActionListener) { @@ -75,9 +76,8 @@ class TransportRemovePolicyAction @Inject constructor( private val client: NodeClient, private val actionListener: ActionListener, private val request: RemovePolicyRequest, - private val user: User? = buildUser(client.threadPool().threadContext) + private val user: User? = buildUser(client.threadPool().threadContext), ) { - private val failedIndices: MutableList = mutableListOf() private val indicesToRemove = mutableMapOf() // uuid: name private val indicesWithAutoManageFalseBlock = mutableSetOf() @@ -87,8 +87,8 @@ class TransportRemovePolicyAction @Inject constructor( fun start() { log.debug( "User and roles string from thread context: ${client.threadPool().threadContext.getTransient( - ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT - )}" + ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT, + )}", ) if (user == null) { getIndicesToRemove() @@ -111,16 +111,17 @@ class TransportRemovePolicyAction @Inject constructor( actionListener.onFailure( IndexManagementException.wrap( when (e is OpenSearchSecurityException) { - true -> OpenSearchStatusException( - "User doesn't have required index permissions on one or more requested indices: ${e.localizedMessage}", - RestStatus.FORBIDDEN - ) + true -> + OpenSearchStatusException( + "User doesn't have required index permissions on one or more requested indices: ${e.localizedMessage}", + RestStatus.FORBIDDEN, + ) false -> e - } - ) + }, + ), ) } - } + }, ) } @@ -147,12 +148,13 @@ class TransportRemovePolicyAction @Inject constructor( private fun getClusterState() { val strictExpandOptions = IndicesOptions.strictExpand() - val clusterStateRequest = ClusterStateRequest() - .clear() - .indices(*request.indices.toTypedArray()) - .metadata(true) - .local(false) - .indicesOptions(strictExpandOptions) + val clusterStateRequest = + ClusterStateRequest() + .clear() + .indices(*request.indices.toTypedArray()) + .metadata(true) + .local(false) + .indicesOptions(strictExpandOptions) client.threadPool().threadContext.stashContext().use { client.admin() @@ -186,7 +188,7 @@ class TransportRemovePolicyAction @Inject constructor( override fun onFailure(t: Exception) { actionListener.onFailure(ExceptionsHelper.unwrapCause(t) as Exception) } - } + }, ) } } @@ -212,8 +214,8 @@ class TransportRemovePolicyAction @Inject constructor( FailedIndex( name, uuid, - "This index does not have a policy to remove" - ) + "This index does not have a policy to remove", + ), ) } actionListener.onResponse(ISMStatusResponse(0, failedIndices)) @@ -226,8 +228,8 @@ class TransportRemovePolicyAction @Inject constructor( failedIndices.add( FailedIndex( indicesToRemove[docId] as String, docId, - "This index does not have a policy to remove" - ) + "This index does not have a policy to remove", + ), ) indicesToRemove.remove(docId) } @@ -243,7 +245,7 @@ class TransportRemovePolicyAction @Inject constructor( override fun onFailure(t: Exception) { actionListener.onFailure(ExceptionsHelper.unwrapCause(t) as Exception) } - } + }, ) } @@ -267,8 +269,8 @@ class TransportRemovePolicyAction @Inject constructor( UpdateSettingsRequest().indices(*readOnlyIndices.map { indices[it] }.toTypedArray()) .settings( Settings.builder().put(ManagedIndexSettings.AUTO_MANAGE.key, false) - .put(INDEX_READ_ONLY_SETTING.key, true) - ) + .put(INDEX_READ_ONLY_SETTING.key, true), + ), ) } if (readOnlyAllowDeleteIndices.isNotEmpty()) { @@ -276,14 +278,14 @@ class TransportRemovePolicyAction @Inject constructor( UpdateSettingsRequest().indices(*readOnlyAllowDeleteIndices.map { indices[it] }.toTypedArray()) .settings( Settings.builder().put(ManagedIndexSettings.AUTO_MANAGE.key, false) - .put(INDEX_BLOCKS_READ_ONLY_ALLOW_DELETE_SETTING.key, true) - ) + .put(INDEX_BLOCKS_READ_ONLY_ALLOW_DELETE_SETTING.key, true), + ), ) } if (normalIndices.isNotEmpty()) { updateSettingReqsList.add( UpdateSettingsRequest().indices(*normalIndices.map { indices[it] }.toTypedArray()) - .settings(Settings.builder().put(ManagedIndexSettings.AUTO_MANAGE.key, false)) + .settings(Settings.builder().put(ManagedIndexSettings.AUTO_MANAGE.key, false)), ) } @@ -302,8 +304,8 @@ class TransportRemovePolicyAction @Inject constructor( if (!response.isAcknowledged) { actionListener.onFailure( IndexManagementException.wrap( - Exception("Failed to remove policy because ISM auto_manage setting update requests are not fully acknowledged.") - ) + Exception("Failed to remove policy because ISM auto_manage setting update requests are not fully acknowledged."), + ), ) return } @@ -318,11 +320,11 @@ class TransportRemovePolicyAction @Inject constructor( val ex = ExceptionsHelper.unwrapCause(t) as Exception actionListener.onFailure( IndexManagementException.wrap( - Exception("Failed to remove policy because ISM auto_manage setting update requests failed with exception:", ex) - ) + Exception("Failed to remove policy because ISM auto_manage setting update requests failed with exception:", ex), + ), ) } - } + }, ) } @@ -342,8 +344,8 @@ class TransportRemovePolicyAction @Inject constructor( FailedIndex( indicesToRemove[docId] as String, docId, - "Failed to remove policy" - ) + "Failed to remove policy", + ), ) indicesToRemove.remove(docId) } @@ -360,8 +362,8 @@ class TransportRemovePolicyAction @Inject constructor( failedIndices.add( FailedIndex( name, uuid, - "Failed to remove policy due to ClusterBlockingException: ${t.message}" - ) + "Failed to remove policy due to ClusterBlockingException: ${t.message}", + ), ) } actionListener.onResponse(ISMStatusResponse(0, failedIndices)) @@ -369,7 +371,7 @@ class TransportRemovePolicyAction @Inject constructor( actionListener.onFailure(ExceptionsHelper.unwrapCause(t) as Exception) } } - } + }, ) } else { actionListener.onResponse(ISMStatusResponse(0, failedIndices)) @@ -389,8 +391,8 @@ class TransportRemovePolicyAction @Inject constructor( failedIndices.add( FailedIndex( indicesToRemove[docId] as String, docId, - "Failed to clean metadata due to: ${it.failureMessage}" - ) + "Failed to clean metadata due to: ${it.failureMessage}", + ), ) indicesToRemove.remove(docId) } @@ -401,11 +403,11 @@ class TransportRemovePolicyAction @Inject constructor( override fun onFailure(e: Exception) { actionListener.onFailure( IndexManagementException.wrap( - Exception("Failed to clean metadata for remove policy indices.", e) - ) + Exception("Failed to clean metadata for remove policy indices.", e), + ), ) } - } + }, ) } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/retryfailedmanagedindex/RetryFailedManagedIndexRequest.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/retryfailedmanagedindex/RetryFailedManagedIndexRequest.kt index b6d6eea60..0c03380e1 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/retryfailedmanagedindex/RetryFailedManagedIndexRequest.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/retryfailedmanagedindex/RetryFailedManagedIndexRequest.kt @@ -8,9 +8,9 @@ package org.opensearch.indexmanagement.indexstatemanagement.transport.action.ret import org.opensearch.action.ActionRequest import org.opensearch.action.ActionRequestValidationException import org.opensearch.action.ValidateActions +import org.opensearch.common.unit.TimeValue import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.core.common.io.stream.StreamOutput -import org.opensearch.common.unit.TimeValue import org.opensearch.indexmanagement.indexstatemanagement.util.DEFAULT_INDEX_TYPE import java.io.IOException @@ -18,15 +18,14 @@ class RetryFailedManagedIndexRequest( val indices: List, val startState: String?, val clusterManagerTimeout: TimeValue, - val indexType: String + val indexType: String, ) : ActionRequest() { - @Throws(IOException::class) constructor(sin: StreamInput) : this( indices = sin.readStringList(), startState = sin.readOptionalString(), clusterManagerTimeout = sin.readTimeValue(), - indexType = sin.readString() + indexType = sin.readString(), ) override fun validate(): ActionRequestValidationException? { @@ -34,10 +33,11 @@ class RetryFailedManagedIndexRequest( if (indices.isEmpty()) { validationException = ValidateActions.addValidationError("Missing indices", validationException) } else if (indexType != DEFAULT_INDEX_TYPE && indices.size > 1) { - validationException = ValidateActions.addValidationError( - MULTIPLE_INDICES_CUSTOM_INDEX_TYPE_ERROR, - validationException - ) + validationException = + ValidateActions.addValidationError( + MULTIPLE_INDICES_CUSTOM_INDEX_TYPE_ERROR, + validationException, + ) } return validationException } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/retryfailedmanagedindex/TransportRetryFailedManagedIndexAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/retryfailedmanagedindex/TransportRetryFailedManagedIndexAction.kt index 4d85b1c02..61737050a 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/retryfailedmanagedindex/TransportRetryFailedManagedIndexAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/retryfailedmanagedindex/TransportRetryFailedManagedIndexAction.kt @@ -12,7 +12,6 @@ import org.apache.logging.log4j.LogManager import org.opensearch.ExceptionsHelper import org.opensearch.OpenSearchSecurityException import org.opensearch.OpenSearchStatusException -import org.opensearch.core.action.ActionListener import org.opensearch.action.admin.cluster.state.ClusterStateRequest import org.opensearch.action.admin.cluster.state.ClusterStateResponse import org.opensearch.action.bulk.BulkRequest @@ -28,11 +27,13 @@ import org.opensearch.client.node.NodeClient import org.opensearch.cluster.block.ClusterBlockException import org.opensearch.cluster.metadata.IndexMetadata import org.opensearch.common.inject.Inject -import org.opensearch.core.xcontent.ToXContent import org.opensearch.common.xcontent.XContentFactory import org.opensearch.commons.ConfigConstants import org.opensearch.commons.authuser.User +import org.opensearch.core.action.ActionListener import org.opensearch.core.index.Index +import org.opensearch.core.rest.RestStatus +import org.opensearch.core.xcontent.ToXContent import org.opensearch.index.IndexNotFoundException import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.INDEX_MANAGEMENT_INDEX import org.opensearch.indexmanagement.indexstatemanagement.DefaultIndexMetadataService @@ -52,20 +53,21 @@ import org.opensearch.indexmanagement.spi.indexstatemanagement.model.ManagedInde import org.opensearch.indexmanagement.spi.indexstatemanagement.model.PolicyRetryInfoMetaData import org.opensearch.indexmanagement.util.IndexManagementException import org.opensearch.indexmanagement.util.SecurityUtils.Companion.buildUser -import org.opensearch.core.rest.RestStatus import org.opensearch.tasks.Task import org.opensearch.transport.TransportService private val log = LogManager.getLogger(TransportRetryFailedManagedIndexAction::class.java) @Suppress("SpreadOperator") -class TransportRetryFailedManagedIndexAction @Inject constructor( +class TransportRetryFailedManagedIndexAction +@Inject +constructor( val client: NodeClient, transportService: TransportService, actionFilters: ActionFilters, - val indexMetadataProvider: IndexMetadataProvider + val indexMetadataProvider: IndexMetadataProvider, ) : HandledTransportAction( - RetryFailedManagedIndexAction.NAME, transportService, actionFilters, ::RetryFailedManagedIndexRequest + RetryFailedManagedIndexAction.NAME, transportService, actionFilters, ::RetryFailedManagedIndexRequest, ) { override fun doExecute(task: Task, request: RetryFailedManagedIndexRequest, listener: ActionListener) { RetryFailedManagedIndexHandler(client, listener, request).start() @@ -75,7 +77,7 @@ class TransportRetryFailedManagedIndexAction @Inject constructor( private val client: NodeClient, private val actionListener: ActionListener, private val request: RetryFailedManagedIndexRequest, - private val user: User? = buildUser(client.threadPool().threadContext) + private val user: User? = buildUser(client.threadPool().threadContext), ) { private val failedIndices: MutableList = mutableListOf() private val listOfMetadata: MutableList = mutableListOf() @@ -89,8 +91,8 @@ class TransportRetryFailedManagedIndexAction @Inject constructor( fun start() { log.debug( "User and roles string from thread context: ${client.threadPool().threadContext.getTransient( - ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT - )}" + ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT, + )}", ) if (user == null) { // Security plugin is not enabled @@ -114,16 +116,17 @@ class TransportRetryFailedManagedIndexAction @Inject constructor( actionListener.onFailure( IndexManagementException.wrap( when (e is OpenSearchSecurityException) { - true -> OpenSearchStatusException( - "User doesn't have required index permissions on one or more requested indices: ${e.localizedMessage}", - RestStatus.FORBIDDEN - ) + true -> + OpenSearchStatusException( + "User doesn't have required index permissions on one or more requested indices: ${e.localizedMessage}", + RestStatus.FORBIDDEN, + ) false -> e - } - ) + }, + ), ) } - } + }, ) } @@ -176,7 +179,7 @@ class TransportRetryFailedManagedIndexAction @Inject constructor( override fun onFailure(t: Exception) { actionListener.onFailure(ExceptionsHelper.unwrapCause(t) as Exception) } - } + }, ) } } @@ -206,14 +209,14 @@ class TransportRetryFailedManagedIndexAction @Inject constructor( // get back metadata from config index client.multiGet( buildMgetMetadataRequest(indicesToRetry.toList().map { it.first }), - ActionListener.wrap(::onMgetMetadataResponse, ::onFailure) + ActionListener.wrap(::onMgetMetadataResponse, ::onFailure), ) } override fun onFailure(t: Exception) { actionListener.onFailure(ExceptionsHelper.unwrapCause(t) as Exception) } - } + }, ) } @@ -264,16 +267,17 @@ class TransportRetryFailedManagedIndexAction @Inject constructor( managedIndexMetaData.copy( stepMetaData = null, policyRetryInfo = PolicyRetryInfoMetaData(false, 0), - actionMetaData = managedIndexMetaData.actionMetaData?.copy( + actionMetaData = + managedIndexMetaData.actionMetaData?.copy( failed = false, consumedRetries = 0, lastRetryTime = null, - startTime = null + startTime = null, ), transitionTo = request.startState, - info = mapOf("message" to "Pending retry of failed managed index") - ) - ) + info = mapOf("message" to "Pending retry of failed managed index"), + ), + ), ) } } @@ -283,10 +287,11 @@ class TransportRetryFailedManagedIndexAction @Inject constructor( mapOfItemIdToIndex[ind] = index } - val updateMetadataRequests = listOfIndexToMetadata.map { (index, metadata) -> - val builder = metadata.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS, true) - UpdateRequest(INDEX_MANAGEMENT_INDEX, managedIndexMetadataID(index.uuid)).routing(index.uuid).doc(builder) - } + val updateMetadataRequests = + listOfIndexToMetadata.map { (index, metadata) -> + val builder = metadata.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS, true) + UpdateRequest(INDEX_MANAGEMENT_INDEX, managedIndexMetadataID(index.uuid)).routing(index.uuid).doc(builder) + } val bulkUpdateMetadataRequest = BulkRequest().add(updateMetadataRequests) client.bulk(bulkUpdateMetadataRequest, ActionListener.wrap(::onBulkUpdateMetadataResponse, ::onFailure)) @@ -314,7 +319,7 @@ class TransportRetryFailedManagedIndexAction @Inject constructor( failedIndices.addAll( listOfIndexToMetadata.map { FailedIndex(it.first.name, it.first.uuid, "Failed to update due to ClusterBlockException. ${e.message}") - } + }, ) } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/util/DestinationType.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/util/DestinationType.kt index eeb9535ec..56df34e9e 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/util/DestinationType.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/util/DestinationType.kt @@ -13,5 +13,5 @@ enum class DestinationType(val value: String) { CHIME("chime"), SLACK("slack"), CUSTOM_WEBHOOK("custom_webhook"), - TEST_ACTION("test_action") + TEST_ACTION("test_action"), } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/util/ManagedIndexUtils.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/util/ManagedIndexUtils.kt index ebb2d7ee5..a99ec0dba 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/util/ManagedIndexUtils.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/util/ManagedIndexUtils.kt @@ -5,6 +5,7 @@ @file:Suppress("TooManyFunctions", "MatchingDeclarationName") @file:JvmName("ManagedIndexUtils") + package org.opensearch.indexmanagement.indexstatemanagement.util import kotlinx.coroutines.Dispatchers @@ -18,14 +19,14 @@ import org.opensearch.action.support.WriteRequest import org.opensearch.action.update.UpdateRequest import org.opensearch.client.Client import org.opensearch.cluster.routing.Preference -import org.opensearch.core.common.unit.ByteSizeValue import org.opensearch.common.unit.TimeValue import org.opensearch.common.xcontent.LoggingDeprecationHandler -import org.opensearch.core.xcontent.NamedXContentRegistry -import org.opensearch.core.xcontent.ToXContent import org.opensearch.common.xcontent.XContentFactory import org.opensearch.common.xcontent.XContentHelper import org.opensearch.common.xcontent.XContentType +import org.opensearch.core.common.unit.ByteSizeValue +import org.opensearch.core.xcontent.NamedXContentRegistry +import org.opensearch.core.xcontent.ToXContent import org.opensearch.index.query.BoolQueryBuilder import org.opensearch.index.query.QueryBuilders import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.INDEX_MANAGEMENT_INDEX @@ -62,23 +63,24 @@ fun managedIndexConfigIndexRequest( policyID: String, jobInterval: Int, policy: Policy, - jobJitter: Double? + jobJitter: Double?, ): IndexRequest { - val managedIndexConfig = ManagedIndexConfig( - jobName = index, - index = index, - indexUuid = uuid, - enabled = true, - jobSchedule = IntervalSchedule(Instant.now(), jobInterval, ChronoUnit.MINUTES), - jobLastUpdatedTime = Instant.now(), - jobEnabledTime = Instant.now(), - policyID = policyID, - policy = policy, - policySeqNo = policy.seqNo, - policyPrimaryTerm = policy.primaryTerm, - changePolicy = null, - jobJitter = jobJitter - ) + val managedIndexConfig = + ManagedIndexConfig( + jobName = index, + index = index, + indexUuid = uuid, + enabled = true, + jobSchedule = IntervalSchedule(Instant.now(), jobInterval, ChronoUnit.MINUTES), + jobLastUpdatedTime = Instant.now(), + jobEnabledTime = Instant.now(), + policyID = policyID, + policy = policy, + policySeqNo = policy.seqNo, + policyPrimaryTerm = policy.primaryTerm, + changePolicy = null, + jobJitter = jobJitter, + ) return IndexRequest(INDEX_MANAGEMENT_INDEX) .id(uuid) @@ -107,28 +109,31 @@ fun revertManagedIndexMetadataID(metadataID: String) = fun managedIndexMetadataIndexRequest(managedIndexMetadata: ManagedIndexMetaData, waitRefresh: Boolean = true, create: Boolean = false): IndexRequest { // routing set using managed index's uuid // so that metadata doc and managed-index doc are in the same place - val req = IndexRequest(INDEX_MANAGEMENT_INDEX) - .id(managedIndexMetadataID(managedIndexMetadata.indexUuid)) - .setIfPrimaryTerm(managedIndexMetadata.primaryTerm) - .setIfSeqNo(managedIndexMetadata.seqNo) - .routing(managedIndexMetadata.indexUuid) - .create(create) - .source(managedIndexMetadata.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS, true)) - - if (waitRefresh) + val req = + IndexRequest(INDEX_MANAGEMENT_INDEX) + .id(managedIndexMetadataID(managedIndexMetadata.indexUuid)) + .setIfPrimaryTerm(managedIndexMetadata.primaryTerm) + .setIfSeqNo(managedIndexMetadata.seqNo) + .routing(managedIndexMetadata.indexUuid) + .create(create) + .source(managedIndexMetadata.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS, true)) + + if (waitRefresh) { return req.setRefreshPolicy(WriteRequest.RefreshPolicy.WAIT_UNTIL) + } return req } private fun updateEnabledField(uuid: String, enabled: Boolean, enabledTime: Long?): UpdateRequest { - val builder = XContentFactory.jsonBuilder() - .startObject() - .startObject(ManagedIndexConfig.MANAGED_INDEX_TYPE) - .optionalTimeField(ManagedIndexConfig.LAST_UPDATED_TIME_FIELD, Instant.now()) - .field(ManagedIndexConfig.ENABLED_FIELD, enabled) - .field(ManagedIndexConfig.ENABLED_TIME_FIELD, enabledTime) - .endObject() - .endObject() + val builder = + XContentFactory.jsonBuilder() + .startObject() + .startObject(ManagedIndexConfig.MANAGED_INDEX_TYPE) + .optionalTimeField(ManagedIndexConfig.LAST_UPDATED_TIME_FIELD, Instant.now()) + .field(ManagedIndexConfig.ENABLED_FIELD, enabled) + .field(ManagedIndexConfig.ENABLED_TIME_FIELD, enabledTime) + .endObject() + .endObject() return UpdateRequest(INDEX_MANAGEMENT_INDEX, uuid).doc(builder) } @@ -165,7 +170,7 @@ fun updateManagedIndexRequest(sweptManagedIndexConfig: SweptManagedIndexConfig): */ fun getManagedIndicesToDelete( currentIndexUuids: List, - currentManagedIndexUuids: List + currentManagedIndexUuids: List, ): List { return currentManagedIndexUuids.filter { currentManagedIndex -> !currentIndexUuids.contains(currentManagedIndex) @@ -174,16 +179,17 @@ fun getManagedIndicesToDelete( fun getSweptManagedIndexSearchRequest(scroll: Boolean = false, size: Int = ManagedIndexCoordinator.MAX_HITS): SearchRequest { val boolQueryBuilder = BoolQueryBuilder().filter(QueryBuilders.existsQuery(ManagedIndexConfig.MANAGED_INDEX_TYPE)) - val req = SearchRequest().indices(INDEX_MANAGEMENT_INDEX) - .allowPartialSearchResults(false) - .source( - SearchSourceBuilder.searchSource() - .size(size) - .seqNoAndPrimaryTerm(true) - .fetchSource(emptyArray(), emptyArray()) - .query(boolQueryBuilder) - ) - .preference(Preference.PRIMARY_FIRST.type()) + val req = + SearchRequest().indices(INDEX_MANAGEMENT_INDEX) + .allowPartialSearchResults(false) + .source( + SearchSourceBuilder.searchSource() + .size(size) + .seqNoAndPrimaryTerm(true) + .fetchSource(emptyArray(), emptyArray()) + .query(boolQueryBuilder), + ) + .preference(Preference.PRIMARY_FIRST.type()) if (scroll) req.scroll(TimeValue.timeValueMinutes(1)) return req } @@ -236,7 +242,7 @@ fun RolloverAction.evaluateConditions( indexAgeTimeValue: TimeValue, numDocs: Long, indexSize: ByteSizeValue, - primaryShardSize: ByteSizeValue + primaryShardSize: ByteSizeValue, ): Boolean { if (this.minDocs == null && this.minAge == null && @@ -293,13 +299,13 @@ fun Action.hasTimedOut(actionMetaData: ActionMetaData?): Boolean { fun ManagedIndexMetaData.getStartingManagedIndexMetaData( state: State?, action: Action?, - step: Step? + step: Step?, ): ManagedIndexMetaData { // State can be null if the transition_to state or the current metadata state is not found in the policy if (state == null) { return this.copy( policyRetryInfo = PolicyRetryInfoMetaData(true, 0), - info = mapOf("message" to "Failed to find state=${this.transitionTo ?: this.stateMetaData} in policy=${this.policyID}") + info = mapOf("message" to "Failed to find state=${this.transitionTo ?: this.stateMetaData} in policy=${this.policyID}"), ) } @@ -308,7 +314,7 @@ fun ManagedIndexMetaData.getStartingManagedIndexMetaData( if (action == null || step == null) { return this.copy( policyRetryInfo = PolicyRetryInfoMetaData(true, 0), - info = mapOf("message" to "Failed to find action=${this.actionMetaData} in state=${this.stateMetaData}") + info = mapOf("message" to "Failed to find action=${this.actionMetaData} in state=${this.stateMetaData}"), ) } @@ -320,34 +326,37 @@ fun ManagedIndexMetaData.getStartingManagedIndexMetaData( stateMetaData = updatedStateMetaData, actionMetaData = updatedActionMetaData, stepMetaData = updatedStepMetaData, - info = mapOf("message" to "Starting action ${action.type} and working on ${step.name}") + info = mapOf("message" to "Starting action ${action.type} and working on ${step.name}"), ) } @Suppress("ReturnCount") fun ManagedIndexMetaData.getCompletedManagedIndexMetaData( action: Action, - step: Step + step: Step, ): ManagedIndexMetaData { val updatedStepMetaData = step.getUpdatedManagedIndexMetadata(this) - val actionMetaData = updatedStepMetaData.actionMetaData ?: return this.copy( - policyRetryInfo = PolicyRetryInfoMetaData(true, 0), - info = mapOf("message" to "Failed due to ActionMetaData being null") - ) + val actionMetaData = + updatedStepMetaData.actionMetaData ?: return this.copy( + policyRetryInfo = PolicyRetryInfoMetaData(true, 0), + info = mapOf("message" to "Failed due to ActionMetaData being null"), + ) - val updatedActionMetaData = if (updatedStepMetaData.stepMetaData?.stepStatus == Step.StepStatus.FAILED) { - when { - action.configRetry == null -> actionMetaData.copy(failed = true) - actionMetaData.consumedRetries >= action.configRetry!!.count -> actionMetaData.copy(failed = true) - else -> actionMetaData.copy( - failed = false, - consumedRetries = actionMetaData.consumedRetries + 1, - lastRetryTime = Instant.now().toEpochMilli() - ) + val updatedActionMetaData = + if (updatedStepMetaData.stepMetaData?.stepStatus == Step.StepStatus.FAILED) { + when { + action.configRetry == null -> actionMetaData.copy(failed = true) + actionMetaData.consumedRetries >= action.configRetry!!.count -> actionMetaData.copy(failed = true) + else -> + actionMetaData.copy( + failed = false, + consumedRetries = actionMetaData.consumedRetries + 1, + lastRetryTime = Instant.now().toEpochMilli(), + ) + } + } else { + actionMetaData } - } else { - actionMetaData - } return this.copy( policyCompleted = updatedStepMetaData.policyCompleted, @@ -357,14 +366,15 @@ fun ManagedIndexMetaData.getCompletedManagedIndexMetaData( stepMetaData = updatedStepMetaData.stepMetaData, transitionTo = updatedStepMetaData.transitionTo, policyRetryInfo = updatedStepMetaData.policyRetryInfo, - info = updatedStepMetaData.info + info = updatedStepMetaData.info, ) } val ManagedIndexMetaData.isSuccessfulDelete: Boolean - get() = (this.actionMetaData?.name == DeleteAction.name && !this.actionMetaData!!.failed) && - (this.stepMetaData?.name == DeleteAction.name && this.stepMetaData!!.stepStatus == Step.StepStatus.COMPLETED) && - (this.policyRetryInfo?.failed != true) + get() = + (this.actionMetaData?.name == DeleteAction.name && !this.actionMetaData!!.failed) && + (this.stepMetaData?.name == DeleteAction.name && this.stepMetaData!!.stepStatus == Step.StepStatus.COMPLETED) && + (this.policyRetryInfo?.failed != true) val ManagedIndexMetaData.isFailed: Boolean get() { diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/util/NotificationUtils.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/util/NotificationUtils.kt index b943e08ee..43df1aadf 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/util/NotificationUtils.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/util/NotificationUtils.kt @@ -4,6 +4,7 @@ */ @file:JvmName("NotificationUtils") + package org.opensearch.indexmanagement.indexstatemanagement.util import org.opensearch.client.Client @@ -15,11 +16,11 @@ import org.opensearch.commons.notifications.action.LegacyPublishNotificationRequ import org.opensearch.commons.notifications.action.LegacyPublishNotificationResponse import org.opensearch.commons.notifications.model.EventSource import org.opensearch.commons.notifications.model.SeverityType +import org.opensearch.core.rest.RestStatus import org.opensearch.indexmanagement.common.model.notification.Channel import org.opensearch.indexmanagement.common.model.notification.validateResponseStatus import org.opensearch.indexmanagement.opensearchapi.suspendUntil import org.opensearch.indexmanagement.spi.indexstatemanagement.model.ManagedIndexMetaData -import org.opensearch.core.rest.RestStatus /** * Extension function for publishing a notification to a legacy destination. @@ -31,13 +32,14 @@ import org.opensearch.core.rest.RestStatus */ suspend fun LegacyBaseMessage.publishLegacyNotification(client: Client) { val baseMessage = this - val res: LegacyPublishNotificationResponse = NotificationsPluginInterface.suspendUntil { - this.publishLegacyNotification( - (client as NodeClient), - LegacyPublishNotificationRequest(baseMessage), - it - ) - } + val res: LegacyPublishNotificationResponse = + NotificationsPluginInterface.suspendUntil { + this.publishLegacyNotification( + (client as NodeClient), + LegacyPublishNotificationRequest(baseMessage), + it, + ) + } validateResponseStatus(RestStatus.fromCode(res.destinationResponse.statusCode), res.destinationResponse.responseContent) } @@ -50,7 +52,7 @@ suspend fun Channel.sendNotification( title: String, managedIndexMetaData: ManagedIndexMetaData, compiledMessage: String, - user: User? + user: User?, ) { val eventSource = managedIndexMetaData.getEventSource(title) this.sendNotification(client, eventSource, compiledMessage, user) diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/util/RestHandlerUtils.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/util/RestHandlerUtils.kt index 6c5a45694..2c006d027 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/util/RestHandlerUtils.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/util/RestHandlerUtils.kt @@ -4,19 +4,20 @@ */ @file:Suppress("TopLevelPropertyNaming", "MatchingDeclarationName") + package org.opensearch.indexmanagement.indexstatemanagement.util import org.opensearch.OpenSearchParseException import org.opensearch.action.support.clustermanager.ClusterManagerNodeRequest +import org.opensearch.common.logging.DeprecationLogger +import org.opensearch.common.unit.TimeValue +import org.opensearch.common.xcontent.XContentFactory import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.core.common.io.stream.StreamOutput import org.opensearch.core.common.io.stream.Writeable -import org.opensearch.common.logging.DeprecationLogger -import org.opensearch.common.unit.TimeValue import org.opensearch.core.xcontent.ToXContent import org.opensearch.core.xcontent.ToXContentFragment import org.opensearch.core.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentFactory import org.opensearch.indexmanagement.indexstatemanagement.model.ChangePolicy import org.opensearch.indexmanagement.indexstatemanagement.model.ManagedIndexConfig import org.opensearch.indexmanagement.opensearchapi.optionalTimeField @@ -72,7 +73,6 @@ fun buildInvalidIndexResponse(builder: XContentBuilder, failedIndices: List 0.001) { // If the user set value is 95%, diskThresholdPercent will be returned as 5% from the DiskThresholdSettings object ((diskThresholdPercent / 100) * totalNodeBytes).toLong() - } else diskThresholdBytes.bytes + } else { + diskThresholdBytes.bytes + } } fun getDiskSettings(clusterSettings: ClusterSettings): Settings { return Settings.builder().put( CLUSTER_ROUTING_ALLOCATION_DISK_FLOOD_STAGE_WATERMARK_SETTING.key, - clusterSettings.get(CLUSTER_ROUTING_ALLOCATION_DISK_FLOOD_STAGE_WATERMARK_SETTING) + clusterSettings.get(CLUSTER_ROUTING_ALLOCATION_DISK_FLOOD_STAGE_WATERMARK_SETTING), ).put( CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.key, - clusterSettings.get(CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING) + clusterSettings.get(CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING), ).put( CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.key, - clusterSettings.get(CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING) + clusterSettings.get(CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING), ).build() } @@ -182,7 +184,7 @@ fun getNodeFreeDiskSpaceAfterShrink(node: NodeStats, indexSizeInBytes: Long, clu suspend fun isIndexGreen( client: Client, indexName: String, - timeout: TimeValue = TimeValue(AttemptMoveShardsStep.THIRTY_SECONDS_IN_MILLIS) + timeout: TimeValue = TimeValue(AttemptMoveShardsStep.THIRTY_SECONDS_IN_MILLIS), ): Boolean { // get index health, waiting for a green status val healthReq = ClusterHealthRequest().indices(indexName).waitForGreenStatus().timeout(timeout) @@ -192,9 +194,10 @@ suspend fun isIndexGreen( } suspend fun resetReadOnlyAndRouting(index: String, client: Client, originalSettings: Map): Boolean { - val allocationSettings = Settings.builder() - .put(AttemptMoveShardsStep.ROUTING_SETTING, originalSettings[AttemptMoveShardsStep.ROUTING_SETTING]) - .put(IndexMetadata.SETTING_BLOCKS_WRITE, originalSettings[IndexMetadata.SETTING_BLOCKS_WRITE]).build() + val allocationSettings = + Settings.builder() + .put(AttemptMoveShardsStep.ROUTING_SETTING, originalSettings[AttemptMoveShardsStep.ROUTING_SETTING]) + .put(IndexMetadata.SETTING_BLOCKS_WRITE, originalSettings[IndexMetadata.SETTING_BLOCKS_WRITE]).build() val response: AcknowledgedResponse = issueUpdateSettingsRequest(client, index, allocationSettings) if (!response.isAcknowledged) { return false @@ -205,7 +208,7 @@ suspend fun resetReadOnlyAndRouting(index: String, client: Client, originalSetti fun getShrinkLockID(nodeName: String): String { return LockModel.generateLockId( INDEX_MANAGEMENT_INDEX, - getShrinkJobID(nodeName) + getShrinkJobID(nodeName), ) } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/validation/ActionValidation.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/validation/ActionValidation.kt index 3486c5956..e26f2d32e 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/validation/ActionValidation.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/validation/ActionValidation.kt @@ -7,43 +7,43 @@ package org.opensearch.indexmanagement.indexstatemanagement.validation import org.opensearch.cluster.service.ClusterService import org.opensearch.common.settings.Settings -import org.opensearch.indexmanagement.util.OpenForTesting import org.opensearch.indexmanagement.spi.indexstatemanagement.model.ValidationResult +import org.opensearch.indexmanagement.util.OpenForTesting import org.opensearch.monitor.jvm.JvmService @OpenForTesting class ActionValidation( val settings: Settings, val clusterService: ClusterService, - val jvmService: JvmService + val jvmService: JvmService, ) { - @Suppress("ComplexMethod") fun validate(actionName: String, indexName: String): ValidationResult { // map action to validation class - val validation = when (actionName) { - "rollover" -> ValidateRollover(settings, clusterService, jvmService).execute(indexName) - "delete" -> ValidateDelete(settings, clusterService, jvmService).execute(indexName) - "force_merge" -> ValidateForceMerge(settings, clusterService, jvmService).execute(indexName) - "open" -> ValidateOpen(settings, clusterService, jvmService).execute(indexName) - "read_only" -> ValidateReadOnly(settings, clusterService, jvmService).execute(indexName) - "read_write" -> ValidateReadWrite(settings, clusterService, jvmService).execute(indexName) - "replica_count" -> ValidateReplicaCount(settings, clusterService, jvmService).execute(indexName) - "snapshot" -> ValidateSnapshot(settings, clusterService, jvmService).execute(indexName) - "transition" -> ValidateTransition(settings, clusterService, jvmService).execute(indexName) - "close" -> ValidateClose(settings, clusterService, jvmService).execute(indexName) - "index_priority" -> ValidateIndexPriority(settings, clusterService, jvmService).execute(indexName) - // No validations for these actions at current stage. - // Reason: https://github.com/opensearch-project/index-management/issues/587 - "notification" -> ValidateNothing(settings, clusterService, jvmService).execute(indexName) - "shrink" -> ValidateNothing(settings, clusterService, jvmService).execute(indexName) - "allocation" -> ValidateNothing(settings, clusterService, jvmService).execute(indexName) - "rollup" -> ValidateNothing(settings, clusterService, jvmService).execute(indexName) - else -> { - // temporary call until all actions are mapped - ValidateNothing(settings, clusterService, jvmService).execute(indexName) + val validation = + when (actionName) { + "rollover" -> ValidateRollover(settings, clusterService, jvmService).execute(indexName) + "delete" -> ValidateDelete(settings, clusterService, jvmService).execute(indexName) + "force_merge" -> ValidateForceMerge(settings, clusterService, jvmService).execute(indexName) + "open" -> ValidateOpen(settings, clusterService, jvmService).execute(indexName) + "read_only" -> ValidateReadOnly(settings, clusterService, jvmService).execute(indexName) + "read_write" -> ValidateReadWrite(settings, clusterService, jvmService).execute(indexName) + "replica_count" -> ValidateReplicaCount(settings, clusterService, jvmService).execute(indexName) + "snapshot" -> ValidateSnapshot(settings, clusterService, jvmService).execute(indexName) + "transition" -> ValidateTransition(settings, clusterService, jvmService).execute(indexName) + "close" -> ValidateClose(settings, clusterService, jvmService).execute(indexName) + "index_priority" -> ValidateIndexPriority(settings, clusterService, jvmService).execute(indexName) + // No validations for these actions at current stage. + // Reason: https://github.com/opensearch-project/index-management/issues/587 + "notification" -> ValidateNothing(settings, clusterService, jvmService).execute(indexName) + "shrink" -> ValidateNothing(settings, clusterService, jvmService).execute(indexName) + "allocation" -> ValidateNothing(settings, clusterService, jvmService).execute(indexName) + "rollup" -> ValidateNothing(settings, clusterService, jvmService).execute(indexName) + else -> { + // temporary call until all actions are mapped + ValidateNothing(settings, clusterService, jvmService).execute(indexName) + } } - } return ValidationResult(validation.validationMessage.toString(), validation.validationStatus) } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/validation/ValidateClose.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/validation/ValidateClose.kt index aa65fe967..dae6e8507 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/validation/ValidateClose.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/validation/ValidateClose.kt @@ -18,9 +18,8 @@ import org.opensearch.monitor.jvm.JvmService class ValidateClose( settings: Settings, clusterService: ClusterService, - jvmService: JvmService + jvmService: JvmService, ) : Validate(settings, clusterService, jvmService) { - private val logger = LogManager.getLogger(javaClass) @Suppress("ReturnSuppressCount", "ReturnCount") @@ -63,8 +62,11 @@ class ValidateClose( @Suppress("TooManyFunctions") companion object { const val name = "validate_close" + fun getNoIndexMessage(index: String) = "No such index [index=$index] for close action." + fun getIndexNotValidMessage(index: String) = "Index [index=$index] is not valid. Abort close action on it." + fun getValidationPassedMessage(index: String) = "Close action validation passed for [index=$index]" } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/validation/ValidateDelete.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/validation/ValidateDelete.kt index 70ce78d58..0c9991a0e 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/validation/ValidateDelete.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/validation/ValidateDelete.kt @@ -19,9 +19,8 @@ import org.opensearch.monitor.jvm.JvmService class ValidateDelete( settings: Settings, clusterService: ClusterService, - jvmService: JvmService + jvmService: JvmService, ) : Validate(settings, clusterService, jvmService) { - private val logger = LogManager.getLogger(javaClass) @Suppress("ReturnSuppressCount", "ReturnCount") @@ -42,10 +41,11 @@ class ValidateDelete( val metadata = clusterService.state().metadata() val indexAbstraction = metadata.indicesLookup[indexName] val isDataStreamIndex = indexAbstraction?.parentDataStream != null - val rolloverTarget = when { - isDataStreamIndex -> indexAbstraction?.parentDataStream?.name - else -> metadata.index(indexName).getRolloverAlias() - } + val rolloverTarget = + when { + isDataStreamIndex -> indexAbstraction?.parentDataStream?.name + else -> metadata.index(indexName).getRolloverAlias() + } return rolloverTarget to isDataStreamIndex } @@ -100,9 +100,13 @@ class ValidateDelete( @Suppress("TooManyFunctions") companion object { const val name = "validate_delete" + fun getNoIndexMessage(index: String) = "no such index [index=$index]" + fun getIndexNotValidMessage(index: String) = "delete index [index=$index] not valid" + fun getFailedIsWriteIndexMessage(index: String) = "Index [index=$index] is the write index for data stream and cannot be deleted" + fun getValidationPassedMessage(index: String) = "Delete validation passed for [index=$index]" } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/validation/ValidateForceMerge.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/validation/ValidateForceMerge.kt index 14a8328c7..4c8360ec2 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/validation/ValidateForceMerge.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/validation/ValidateForceMerge.kt @@ -8,18 +8,17 @@ package org.opensearch.indexmanagement.indexstatemanagement.validation import org.apache.logging.log4j.LogManager import org.opensearch.cluster.service.ClusterService import org.opensearch.common.settings.Settings -import org.opensearch.indexmanagement.util.OpenForTesting import org.opensearch.indexmanagement.spi.indexstatemanagement.Validate import org.opensearch.indexmanagement.transform.settings.TransformSettings +import org.opensearch.indexmanagement.util.OpenForTesting import org.opensearch.monitor.jvm.JvmService @OpenForTesting class ValidateForceMerge( settings: Settings, clusterService: ClusterService, - jvmService: JvmService + jvmService: JvmService, ) : Validate(settings, clusterService, jvmService) { - private val logger = LogManager.getLogger(javaClass) @Suppress("ReturnSuppressCount", "ReturnCount") @@ -46,7 +45,9 @@ class ValidateForceMerge( @Suppress("TooManyFunctions") companion object { const val name = "validate_force_merge" + fun getFailedDataTooLargeMessage(index: String) = "Data too large and is over the allowed limit for index [index=$index]" + fun getValidationPassedMessage(index: String) = "Force merge validation passed for [index=$index]" } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/validation/ValidateIndexPriority.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/validation/ValidateIndexPriority.kt index 1ecde743e..dea874a43 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/validation/ValidateIndexPriority.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/validation/ValidateIndexPriority.kt @@ -3,28 +3,22 @@ * SPDX-License-Identifier: Apache-2.0 */ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - package org.opensearch.indexmanagement.indexstatemanagement.validation import org.apache.logging.log4j.LogManager import org.opensearch.cluster.metadata.IndexMetadata import org.opensearch.cluster.service.ClusterService import org.opensearch.common.settings.Settings -import org.opensearch.indexmanagement.util.OpenForTesting import org.opensearch.indexmanagement.spi.indexstatemanagement.Validate +import org.opensearch.indexmanagement.util.OpenForTesting import org.opensearch.monitor.jvm.JvmService @OpenForTesting class ValidateIndexPriority( settings: Settings, clusterService: ClusterService, - jvmService: JvmService + jvmService: JvmService, ) : Validate(settings, clusterService, jvmService) { - private val logger = LogManager.getLogger(javaClass) @Suppress("ReturnSuppressCount", "ReturnCount") @@ -51,7 +45,9 @@ class ValidateIndexPriority( @Suppress("TooManyFunctions") companion object { const val name = "validate_index_priority" + fun getReadOnlyAllowDeleteBlockMessage(index: String) = "read_only_allow_delete block is not null for index [index=$index]" + fun getValidationPassedMessage(index: String) = "Index Priority action validation passed for [index=$index]" } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/validation/ValidateNothing.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/validation/ValidateNothing.kt index 596c46edd..e894e9703 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/validation/ValidateNothing.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/validation/ValidateNothing.kt @@ -7,17 +7,16 @@ package org.opensearch.indexmanagement.indexstatemanagement.validation import org.opensearch.cluster.service.ClusterService import org.opensearch.common.settings.Settings -import org.opensearch.indexmanagement.util.OpenForTesting import org.opensearch.indexmanagement.spi.indexstatemanagement.Validate +import org.opensearch.indexmanagement.util.OpenForTesting import org.opensearch.monitor.jvm.JvmService @OpenForTesting class ValidateNothing( settings: Settings, clusterService: ClusterService, - jvmService: JvmService + jvmService: JvmService, ) : Validate(settings, clusterService, jvmService) { - // skips validation override fun execute(indexName: String): Validate { return this diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/validation/ValidateOpen.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/validation/ValidateOpen.kt index 3c9668fec..0ba756bf1 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/validation/ValidateOpen.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/validation/ValidateOpen.kt @@ -3,11 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - package org.opensearch.indexmanagement.indexstatemanagement.validation import org.apache.logging.log4j.LogManager @@ -21,9 +16,8 @@ import org.opensearch.monitor.jvm.JvmService class ValidateOpen( settings: Settings, clusterService: ClusterService, - jvmService: JvmService + jvmService: JvmService, ) : Validate(settings, clusterService, jvmService) { - private val logger = LogManager.getLogger(javaClass) @Suppress("ReturnSuppressCount", "ReturnCount") @@ -65,8 +59,11 @@ class ValidateOpen( @Suppress("TooManyFunctions") companion object { const val name = "validate_open" + fun getReadOnlyAllowDeleteBlockMessage(index: String) = "read_only_allow_delete block is not null for index [index=$index]" + fun getMaxNumberOfShardsExceededMessage(index: String) = "Maximum number of shards exceeded for index [index=$index]" + fun getValidationPassedMessage(index: String) = "Open action validation passed for [index=$index]" } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/validation/ValidateReadOnly.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/validation/ValidateReadOnly.kt index ab2d02abd..47b54e7a5 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/validation/ValidateReadOnly.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/validation/ValidateReadOnly.kt @@ -3,11 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - package org.opensearch.indexmanagement.indexstatemanagement.validation import org.apache.logging.log4j.LogManager @@ -22,9 +17,8 @@ import org.opensearch.monitor.jvm.JvmService class ValidateReadOnly( settings: Settings, clusterService: ClusterService, - jvmService: JvmService + jvmService: JvmService, ) : Validate(settings, clusterService, jvmService) { - private val logger = LogManager.getLogger(javaClass) @Suppress("ReturnSuppressCount", "ReturnCount") @@ -64,8 +58,11 @@ class ValidateReadOnly( companion object { const val name = "validate_read_only" const val settingKey = "read_only_allow_delete" + fun getReadOnlyAllowDeleteBlockMessage(index: String) = "read_only_allow_delete block is not null for index [index=$index]" + fun getFailedDataTooLargeMessage(index: String) = "Data too large and is over the allowed limit for index [index=$index]" + fun getValidationPassedMessage(index: String) = "validate_read_only action validation passed for [index=$index]" } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/validation/ValidateReadWrite.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/validation/ValidateReadWrite.kt index 78521bb56..b941c090a 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/validation/ValidateReadWrite.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/validation/ValidateReadWrite.kt @@ -3,11 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - package org.opensearch.indexmanagement.indexstatemanagement.validation import org.apache.logging.log4j.LogManager @@ -21,9 +16,8 @@ import org.opensearch.monitor.jvm.JvmService class ValidateReadWrite( settings: Settings, clusterService: ClusterService, - jvmService: JvmService + jvmService: JvmService, ) : Validate(settings, clusterService, jvmService) { - private val logger = LogManager.getLogger(javaClass) @Suppress("ReturnSuppressCount", "ReturnCount") @@ -51,7 +45,9 @@ class ValidateReadWrite( companion object { const val name = "validate_read_write" const val settingKey = "read_only_allow_delete" + fun getReadOnlyAllowDeleteBlockMessage(index: String) = "read_only_allow_delete block is not null for index [index=$index]" + fun getValidationPassedMessage(index: String) = "read_write validation passed for [index=$index]" } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/validation/ValidateReplicaCount.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/validation/ValidateReplicaCount.kt index 0e9410248..3cfb3e488 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/validation/ValidateReplicaCount.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/validation/ValidateReplicaCount.kt @@ -3,11 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - package org.opensearch.indexmanagement.indexstatemanagement.validation import org.apache.logging.log4j.LogManager @@ -22,9 +17,8 @@ import org.opensearch.monitor.jvm.JvmService class ValidateReplicaCount( settings: Settings, clusterService: ClusterService, - jvmService: JvmService + jvmService: JvmService, ) : Validate(settings, clusterService, jvmService) { - private val logger = LogManager.getLogger(javaClass) @Suppress("ReturnSuppressCount", "ReturnCount") @@ -67,8 +61,11 @@ class ValidateReplicaCount( @Suppress("TooManyFunctions") companion object { const val name = "validate_replica_count" + fun getFailedDataTooLargeMessage(index: String) = "Data too large and is over the allowed limit for index [index=$index]" + fun getMaxNumberOfShardsExceededMessage(index: String) = "Maximum number of shards exceeded for index [index=$index]" + fun getValidationPassedMessage(index: String) = "Replica Count validation passed for [index=$index]" } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/validation/ValidateRollover.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/validation/ValidateRollover.kt index aaa0ea4cb..8fa1cc292 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/validation/ValidateRollover.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/validation/ValidateRollover.kt @@ -10,17 +10,16 @@ import org.opensearch.cluster.service.ClusterService import org.opensearch.common.settings.Settings import org.opensearch.indexmanagement.indexstatemanagement.opensearchapi.getRolloverAlias import org.opensearch.indexmanagement.indexstatemanagement.opensearchapi.getRolloverSkip -import org.opensearch.indexmanagement.util.OpenForTesting import org.opensearch.indexmanagement.spi.indexstatemanagement.Validate +import org.opensearch.indexmanagement.util.OpenForTesting import org.opensearch.monitor.jvm.JvmService @OpenForTesting class ValidateRollover( settings: Settings, clusterService: ClusterService, - jvmService: JvmService + jvmService: JvmService, ) : Validate(settings, clusterService, jvmService) { - private val logger = LogManager.getLogger(javaClass) // returns a Validate object with updated validation and step status @@ -99,10 +98,11 @@ class ValidateRollover( val indexAbstraction = metadata.indicesLookup[indexName] val isDataStreamIndex = indexAbstraction?.parentDataStream != null - val rolloverTarget = when { - isDataStreamIndex -> indexAbstraction?.parentDataStream?.name - else -> metadata.index(indexName).getRolloverAlias() - } + val rolloverTarget = + when { + isDataStreamIndex -> indexAbstraction?.parentDataStream?.name + else -> metadata.index(indexName).getRolloverAlias() + } if (rolloverTarget == null) { val message = getFailedNoValidAliasMessage(indexName) @@ -117,12 +117,18 @@ class ValidateRollover( @Suppress("TooManyFunctions") companion object { const val name = "validate_rollover" + fun getFailedWriteIndexMessage(index: String) = "Not the write index when rollover [index=$index]" + fun getMissingAliasMessage(index: String) = "Missing alias when rollover [index=$index]" + fun getFailedNoValidAliasMessage(index: String) = "Missing rollover_alias index setting [index=$index]" + fun getAlreadyRolledOverMessage(index: String, alias: String?) = "This index has already been rolled over using this alias, treating as a success [index=$index, alias=$alias]" + fun getSkipRolloverMessage(index: String) = "Skipped rollover action for [index=$index]" + fun getValidationPassedMessage(index: String) = "Rollover validation passed for [index=$index]" } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/validation/ValidateSnapshot.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/validation/ValidateSnapshot.kt index 91febd570..e6e67a5f4 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/validation/ValidateSnapshot.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/validation/ValidateSnapshot.kt @@ -3,11 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - package org.opensearch.indexmanagement.indexstatemanagement.validation import org.apache.logging.log4j.LogManager @@ -23,9 +18,8 @@ import org.opensearch.monitor.jvm.JvmService class ValidateSnapshot( settings: Settings, clusterService: ClusterService, - jvmService: JvmService + jvmService: JvmService, ) : Validate(settings, clusterService, jvmService) { - private val logger = LogManager.getLogger(javaClass) @Suppress("ReturnSuppressCount", "ReturnCount") @@ -67,8 +61,11 @@ class ValidateSnapshot( @Suppress("TooManyFunctions") companion object { const val name = "validate_snapshot" + fun getNoIndexMessage(index: String) = "Index [index=$index] does not exist for snapshot action." + fun getIndexNotValidMessage(index: String) = "Index [index=$index] is not valid for snapshot action." + fun getValidationPassedMessage(index: String) = "Snapshot action validation passed for [index=$index]" } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/validation/ValidateTransition.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/validation/ValidateTransition.kt index 9faeff20f..d727eec1f 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/validation/ValidateTransition.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/validation/ValidateTransition.kt @@ -18,9 +18,8 @@ import org.opensearch.monitor.jvm.JvmService class ValidateTransition( settings: Settings, clusterService: ClusterService, - jvmService: JvmService + jvmService: JvmService, ) : Validate(settings, clusterService, jvmService) { - private val logger = LogManager.getLogger(javaClass) @Suppress("ReturnSuppressCount", "ReturnCount") @@ -64,8 +63,11 @@ class ValidateTransition( @Suppress("TooManyFunctions") companion object { const val name = "validate_transition" + fun getNoIndexMessage(index: String) = "Index [index=$index] does not exist for transition" + fun getIndexNotValidMessage(index: String) = "Index [index=$index] is not valid for transition" + fun getValidationPassedMessage(index: String) = "Transition action validation passed for [index=$index]" } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/opensearchapi/OpenSearchExtensions.kt b/src/main/kotlin/org/opensearch/indexmanagement/opensearchapi/OpenSearchExtensions.kt index 80f460028..a4464c343 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/opensearchapi/OpenSearchExtensions.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/opensearchapi/OpenSearchExtensions.kt @@ -15,35 +15,36 @@ import org.apache.logging.log4j.LogManager import org.apache.logging.log4j.Logger import org.opensearch.ExceptionsHelper import org.opensearch.OpenSearchException -import org.opensearch.core.action.ActionListener import org.opensearch.action.admin.indices.alias.Alias import org.opensearch.action.bulk.BackoffPolicy import org.opensearch.action.get.GetResponse import org.opensearch.action.search.SearchResponse import org.opensearch.client.OpenSearchClient -import org.opensearch.core.common.io.stream.StreamInput -import org.opensearch.core.common.io.stream.StreamOutput -import org.opensearch.core.common.io.stream.Writeable import org.opensearch.common.settings.Settings import org.opensearch.common.unit.TimeValue import org.opensearch.common.util.concurrent.ThreadContext import org.opensearch.common.xcontent.LoggingDeprecationHandler import org.opensearch.common.xcontent.XContentHelper -import org.opensearch.core.xcontent.XContentParserUtils -import org.opensearch.core.xcontent.XContentParserUtils.ensureExpectedToken import org.opensearch.common.xcontent.XContentType import org.opensearch.commons.InjectSecurity import org.opensearch.commons.authuser.User import org.opensearch.commons.notifications.NotificationsPluginInterface +import org.opensearch.core.action.ActionListener import org.opensearch.core.action.support.DefaultShardOperationFailedException import org.opensearch.core.common.bytes.BytesReference +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable import org.opensearch.core.concurrency.OpenSearchRejectedExecutionException +import org.opensearch.core.rest.RestStatus import org.opensearch.core.xcontent.MediaType import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.core.xcontent.ToXContent import org.opensearch.core.xcontent.XContentBuilder import org.opensearch.core.xcontent.XContentParser import org.opensearch.core.xcontent.XContentParser.Token +import org.opensearch.core.xcontent.XContentParserUtils +import org.opensearch.core.xcontent.XContentParserUtils.ensureExpectedToken import org.opensearch.index.seqno.SequenceNumbers import org.opensearch.indexmanagement.indexstatemanagement.action.ShrinkAction import org.opensearch.indexmanagement.indexstatemanagement.model.ISMTemplate @@ -53,7 +54,6 @@ import org.opensearch.indexmanagement.util.NO_ID import org.opensearch.indexmanagement.util.SecurityUtils.Companion.DEFAULT_INJECT_ROLES import org.opensearch.indexmanagement.util.SecurityUtils.Companion.INTERNAL_REQUEST import org.opensearch.jobscheduler.spi.utils.LockService -import org.opensearch.core.rest.RestStatus import org.opensearch.transport.RemoteTransportException import java.io.IOException import java.time.Instant @@ -69,15 +69,16 @@ fun contentParser(bytesReference: BytesReference, xContentRegistry: NamedXConten xContentRegistry, LoggingDeprecationHandler.INSTANCE, bytesReference, - XContentType.JSON + XContentType.JSON, ) } /** Convert an object to maps and lists representation */ fun ToXContent.convertToMap(): Map { - val bytesReference = org.opensearch.core.xcontent.XContentHelper.toXContent( - this, XContentType.JSON, ToXContent.EMPTY_PARAMS, false - ) + val bytesReference = + org.opensearch.core.xcontent.XContentHelper.toXContent( + this, XContentType.JSON, ToXContent.EMPTY_PARAMS, false, + ) return XContentHelper.convertToMap(bytesReference, false, XContentType.JSON as (MediaType)).v2() } @@ -126,7 +127,7 @@ fun XContentBuilder.optionalUserField(name: String, user: User?): XContentBuilde fun parseFromSearchResponse( response: SearchResponse, xContentRegistry: NamedXContentRegistry = NamedXContentRegistry.EMPTY, - parse: (xcp: XContentParser, id: String, seqNo: Long, primaryTerm: Long) -> T + parse: (xcp: XContentParser, id: String, seqNo: Long, primaryTerm: Long) -> T, ): List { return response.hits.hits.map { val id = it.id @@ -143,14 +144,15 @@ fun parseFromSearchResponse( fun parseFromGetResponse( response: GetResponse, xContentRegistry: NamedXContentRegistry = NamedXContentRegistry.EMPTY, - parse: (xcp: XContentParser, id: String, seqNo: Long, primaryTerm: Long) -> T + parse: (xcp: XContentParser, id: String, seqNo: Long, primaryTerm: Long) -> T, ): T { - val xcp = XContentHelper.createParser( - xContentRegistry, - LoggingDeprecationHandler.INSTANCE, - response.sourceAsBytesRef, - XContentType.JSON - ) + val xcp = + XContentHelper.createParser( + xContentRegistry, + LoggingDeprecationHandler.INSTANCE, + response.sourceAsBytesRef, + XContentType.JSON, + ) return xcp.parseWithType(response.id, response.seqNo, response.primaryTerm, parse) } @@ -169,7 +171,7 @@ fun parseFromGetResponse( suspend fun BackoffPolicy.retry( logger: Logger, retryOn: List = emptyList(), - block: suspend (backoff: TimeValue) -> T + block: suspend (backoff: TimeValue) -> T, ): T { val iter = iterator() var backoff: TimeValue = TimeValue.ZERO @@ -216,11 +218,13 @@ fun XContentBuilder.toMap(): Map = XContentHelper.convertToMap(Byte */ suspend fun C.suspendUntil(block: C.(ActionListener) -> Unit): T = suspendCoroutine { cont -> - block(object : ActionListener { - override fun onResponse(response: T) = cont.resume(response) + block( + object : ActionListener { + override fun onResponse(response: T) = cont.resume(response) - override fun onFailure(e: Exception) = cont.resumeWithException(e) - }) + override fun onFailure(e: Exception) = cont.resumeWithException(e) + }, + ) } /** @@ -230,11 +234,13 @@ suspend fun C.suspendUntil(block: C.(ActionListener */ suspend fun LockService.suspendUntil(block: LockService.(ActionListener) -> Unit): T = suspendCoroutine { cont -> - block(object : ActionListener { - override fun onResponse(response: T) = cont.resume(response) + block( + object : ActionListener { + override fun onResponse(response: T) = cont.resume(response) - override fun onFailure(e: Exception) = cont.resumeWithException(e) - }) + override fun onFailure(e: Exception) = cont.resumeWithException(e) + }, + ) } /** @@ -244,11 +250,13 @@ suspend fun LockService.suspendUntil(block: LockService.(ActionListener) */ suspend fun NotificationsPluginInterface.suspendUntil(block: NotificationsPluginInterface.(ActionListener) -> Unit): T = suspendCoroutine { cont -> - block(object : ActionListener { - override fun onResponse(response: T) = cont.resume(response) + block( + object : ActionListener { + override fun onResponse(response: T) = cont.resume(response) - override fun onFailure(e: Exception) = cont.resumeWithException(e) - }) + override fun onFailure(e: Exception) = cont.resumeWithException(e) + }, + ) } fun Throwable.findRemoteTransportException(): RemoteTransportException? { @@ -267,7 +275,7 @@ fun XContentParser.parseWithType( id: String = NO_ID, seqNo: Long = SequenceNumbers.UNASSIGNED_SEQ_NO, primaryTerm: Long = SequenceNumbers.UNASSIGNED_PRIMARY_TERM, - parse: (xcp: XContentParser, id: String, seqNo: Long, primaryTerm: Long) -> T + parse: (xcp: XContentParser, id: String, seqNo: Long, primaryTerm: Long) -> T, ): T { ensureExpectedToken(Token.START_OBJECT, nextToken(), this) ensureExpectedToken(Token.FIELD_NAME, nextToken(), this) @@ -281,9 +289,8 @@ class IndexManagementSecurityContext( private val id: String, settings: Settings, private val threadContext: ThreadContext, - private val user: User? + private val user: User?, ) : ThreadContextElement { - companion object Key : CoroutineContext.Key private val logger: Logger = LogManager.getLogger(javaClass) @@ -312,7 +319,7 @@ class IndexManagementSecurityContext( suspend fun withClosableContext( context: IndexManagementSecurityContext, - block: suspend CoroutineScope.() -> T + block: suspend CoroutineScope.() -> T, ): T { try { return withContext(context) { block() } @@ -322,7 +329,11 @@ suspend fun withClosableContext( } fun XContentBuilder.optionalField(name: String, value: Any?): XContentBuilder { - return if (value != null) { this.field(name, value) } else this + return if (value != null) { + this.field(name, value) + } else { + this + } } fun XContentBuilder.optionalInfoField(name: String, info: SMMetadata.Info?): XContentBuilder { @@ -332,7 +343,9 @@ fun XContentBuilder.optionalInfoField(name: String, info: SMMetadata.Info?): XCo } else { this } - } else this + } else { + this + } } inline fun XContentParser.nullValueHandler(block: XContentParser.() -> T): T? { @@ -350,7 +363,11 @@ inline fun XContentParser.parseArray(block: XContentParser.() -> T): List // similar to readOptionalWriteable fun StreamInput.readOptionalValue(reader: Writeable.Reader): T? { - return if (readBoolean()) { reader.read(this) } else null + return if (readBoolean()) { + reader.read(this) + } else { + null + } } fun StreamOutput.writeOptionalValue(value: T, writer: Writeable.Writer) { diff --git a/src/main/kotlin/org/opensearch/indexmanagement/refreshanalyzer/RefreshSearchAnalyzerResponse.kt b/src/main/kotlin/org/opensearch/indexmanagement/refreshanalyzer/RefreshSearchAnalyzerResponse.kt index f4722644c..686a9480a 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/refreshanalyzer/RefreshSearchAnalyzerResponse.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/refreshanalyzer/RefreshSearchAnalyzerResponse.kt @@ -5,8 +5,8 @@ package org.opensearch.indexmanagement.refreshanalyzer -import org.opensearch.core.action.support.DefaultShardOperationFailedException import org.opensearch.action.support.broadcast.BroadcastResponse +import org.opensearch.core.action.support.DefaultShardOperationFailedException import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.core.common.io.stream.StreamOutput import org.opensearch.core.xcontent.ConstructingObjectParser @@ -17,7 +17,6 @@ import java.io.IOException import java.util.function.Function class RefreshSearchAnalyzerResponse : BroadcastResponse { - private lateinit var shardResponses: MutableList private lateinit var shardFailures: MutableList @@ -32,9 +31,9 @@ class RefreshSearchAnalyzerResponse : BroadcastResponse { successfulShards: Int, failedShards: Int, shardFailures: List, - shardResponses: List + shardResponses: List, ) : super( - totalShards, successfulShards, failedShards, shardFailures + totalShards, successfulShards, failedShards, shardFailures, ) { this.shardResponses = shardResponses.toMutableList() this.shardFailures = shardFailures.toMutableList() @@ -74,16 +73,18 @@ class RefreshSearchAnalyzerResponse : BroadcastResponse { } companion object { - private val PARSER = ConstructingObjectParser( - "_refresh_search_analyzers", true, - Function { arg: Array -> - val response = arg[0] as RefreshSearchAnalyzerResponse - RefreshSearchAnalyzerResponse( - response.totalShards, response.successfulShards, response.failedShards, - response.shardFailures, response.shardResponses - ) - } - ) + private val PARSER = + ConstructingObjectParser( + "_refresh_search_analyzers", true, + Function { arg: Array -> + val response = arg[0] as RefreshSearchAnalyzerResponse + RefreshSearchAnalyzerResponse( + response.totalShards, response.successfulShards, response.failedShards, + response.shardFailures, response.shardResponses, + ) + }, + ) + init { declareBroadcastFields(PARSER) } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/refreshanalyzer/RestRefreshSearchAnalyzerAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/refreshanalyzer/RestRefreshSearchAnalyzerAction.kt index 3f1e83f0c..12a88b4bc 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/refreshanalyzer/RestRefreshSearchAnalyzerAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/refreshanalyzer/RestRefreshSearchAnalyzerAction.kt @@ -18,7 +18,6 @@ import org.opensearch.rest.action.RestToXContentListener import java.io.IOException class RestRefreshSearchAnalyzerAction : BaseRestHandler() { - override fun getName(): String = "refresh_search_analyzer_action" override fun routes(): List { @@ -29,12 +28,12 @@ class RestRefreshSearchAnalyzerAction : BaseRestHandler() { return listOf( ReplacedRoute( POST, REFRESH_SEARCH_ANALYZER_BASE_URI, - POST, LEGACY_REFRESH_SEARCH_ANALYZER_BASE_URI + POST, LEGACY_REFRESH_SEARCH_ANALYZER_BASE_URI, ), ReplacedRoute( POST, "$REFRESH_SEARCH_ANALYZER_BASE_URI/{index}", - POST, "$LEGACY_REFRESH_SEARCH_ANALYZER_BASE_URI/{index}" - ) + POST, "$LEGACY_REFRESH_SEARCH_ANALYZER_BASE_URI/{index}", + ), ) } @@ -49,8 +48,9 @@ class RestRefreshSearchAnalyzerAction : BaseRestHandler() { throw IllegalArgumentException("Missing indices") } - val refreshSearchAnalyzerRequest: RefreshSearchAnalyzerRequest = RefreshSearchAnalyzerRequest() - .indices(*indices) + val refreshSearchAnalyzerRequest: RefreshSearchAnalyzerRequest = + RefreshSearchAnalyzerRequest() + .indices(*indices) return RestChannelConsumer { channel -> client.execute(RefreshSearchAnalyzerAction.INSTANCE, refreshSearchAnalyzerRequest, RestToXContentListener(channel)) diff --git a/src/main/kotlin/org/opensearch/indexmanagement/refreshanalyzer/TransportRefreshSearchAnalyzerAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/refreshanalyzer/TransportRefreshSearchAnalyzerAction.kt index 28473d451..3cdd6a599 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/refreshanalyzer/TransportRefreshSearchAnalyzerAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/refreshanalyzer/TransportRefreshSearchAnalyzerAction.kt @@ -7,7 +7,6 @@ package org.opensearch.indexmanagement.refreshanalyzer import org.apache.logging.log4j.LogManager import org.opensearch.action.support.ActionFilters -import org.opensearch.core.action.support.DefaultShardOperationFailedException import org.opensearch.action.support.broadcast.node.TransportBroadcastByNodeAction import org.opensearch.cluster.ClusterState import org.opensearch.cluster.block.ClusterBlockException @@ -17,6 +16,7 @@ import org.opensearch.cluster.routing.ShardRouting import org.opensearch.cluster.routing.ShardsIterator import org.opensearch.cluster.service.ClusterService import org.opensearch.common.inject.Inject +import org.opensearch.core.action.support.DefaultShardOperationFailedException import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.core.common.io.stream.Writeable import org.opensearch.index.analysis.AnalysisRegistry @@ -30,8 +30,8 @@ class TransportRefreshSearchAnalyzerAction : TransportBroadcastByNodeAction< RefreshSearchAnalyzerRequest, RefreshSearchAnalyzerResponse, - RefreshSearchAnalyzerShardResponse> { - + RefreshSearchAnalyzerShardResponse, + > { private val log = LogManager.getLogger(javaClass) @Inject @@ -41,7 +41,7 @@ class TransportRefreshSearchAnalyzerAction : indicesService: IndicesService, actionFilters: ActionFilters, analysisRegistry: AnalysisRegistry, - indexNameExpressionResolver: IndexNameExpressionResolver? + indexNameExpressionResolver: IndexNameExpressionResolver?, ) : super( RefreshSearchAnalyzerAction.NAME, clusterService, @@ -49,7 +49,7 @@ class TransportRefreshSearchAnalyzerAction : actionFilters, indexNameExpressionResolver, Writeable.Reader { RefreshSearchAnalyzerRequest() }, - ThreadPool.Names.MANAGEMENT + ThreadPool.Names.MANAGEMENT, ) { this.analysisRegistry = analysisRegistry this.indicesService = indicesService @@ -70,7 +70,7 @@ class TransportRefreshSearchAnalyzerAction : failedShards: Int, shardResponses: List, shardFailures: List, - clusterState: ClusterState + clusterState: ClusterState, ): RefreshSearchAnalyzerResponse { return RefreshSearchAnalyzerResponse(totalShards, successfulShards, failedShards, shardFailures, shardResponses) } @@ -86,7 +86,7 @@ class TransportRefreshSearchAnalyzerAction : val reloadedAnalyzers: List = indexShard.mapperService().reloadSearchAnalyzers(analysisRegistry) log.info( "Reload successful, index: ${shardRouting.shardId().index.name}, shard: ${shardRouting.shardId().id}, " + - "is_primary: ${shardRouting.primary()}" + "is_primary: ${shardRouting.primary()}", ) return RefreshSearchAnalyzerShardResponse(shardRouting.shardId(), reloadedAnalyzers) } @@ -102,8 +102,7 @@ class TransportRefreshSearchAnalyzerAction : return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE) } - override fun checkRequestBlock(state: ClusterState, request: RefreshSearchAnalyzerRequest?, concreteIndices: Array?): - ClusterBlockException? { + override fun checkRequestBlock(state: ClusterState, request: RefreshSearchAnalyzerRequest?, concreteIndices: Array?): ClusterBlockException? { return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA_WRITE, concreteIndices) } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/rollup/RollupIndexer.kt b/src/main/kotlin/org/opensearch/indexmanagement/rollup/RollupIndexer.kt index 55db96c21..cab676487 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/rollup/RollupIndexer.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/rollup/RollupIndexer.kt @@ -40,7 +40,7 @@ import org.opensearch.transport.RemoteTransportException class RollupIndexer( settings: Settings, clusterService: ClusterService, - private val client: Client + private val client: Client, ) { private val logger = LogManager.getLogger(javaClass) @@ -63,7 +63,7 @@ class RollupIndexer( retryIngestPolicy.retry(logger, listOf(RestStatus.TOO_MANY_REQUESTS)) { if (it.seconds >= (Rollup.ROLLUP_LOCK_DURATION_SECONDS / 2)) { throw ExceptionsHelper.convertToOpenSearchException( - IllegalStateException("Cannot retry ingestion with a delay more than half of the rollup lock TTL") + IllegalStateException("Cannot retry ingestion with a delay more than half of the rollup lock TTL"), ) } val bulkRequest = BulkRequest().add(requestsToRetry) @@ -126,9 +126,10 @@ class RollupIndexer( } mapOfKeyValues.putAll(aggResults) val targetIndexResolvedName = RollupFieldValueExpressionResolver.resolve(job, job.targetIndex) - val indexRequest = IndexRequest(targetIndexResolvedName) - .id(documentId) - .source(mapOfKeyValues, XContentType.JSON) + val indexRequest = + IndexRequest(targetIndexResolvedName) + .id(documentId) + .source(mapOfKeyValues, XContentType.JSON) requests.add(indexRequest) } return requests @@ -137,8 +138,9 @@ class RollupIndexer( sealed class RollupIndexResult { data class Success(val stats: RollupStats) : RollupIndexResult() + data class Failure( val message: String = "An error occurred while indexing to the rollup target index", - val cause: Exception + val cause: Exception, ) : RollupIndexResult() } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/rollup/RollupMapperService.kt b/src/main/kotlin/org/opensearch/indexmanagement/rollup/RollupMapperService.kt index 8ac3b365e..fc6902ff3 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/rollup/RollupMapperService.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/rollup/RollupMapperService.kt @@ -50,9 +50,8 @@ import org.opensearch.transport.RemoteTransportException class RollupMapperService( val client: Client, val clusterService: ClusterService, - private val indexNameExpressionResolver: IndexNameExpressionResolver + private val indexNameExpressionResolver: IndexNameExpressionResolver, ) { - private val logger = LogManager.getLogger(javaClass) /** @@ -69,7 +68,7 @@ class RollupMapperService( private suspend fun validateAndAttemptToUpdateTargetIndex( rollup: Rollup, targetIndexResolvedName: String, - hasLegacyPlugin: Boolean + hasLegacyPlugin: Boolean, ): RollupJobValidationResult { if (rollup.isTargetIndexAlias()) { val aliasValidationResult = validateTargetIndexAlias(rollup, targetIndexResolvedName) @@ -94,7 +93,6 @@ class RollupMapperService( */ @Suppress("ReturnCount") suspend fun validateTargetIndexAlias(rollup: Rollup, targetIndexResolvedName: String): RollupJobValidationResult { - val errorMessage: String if (!RollupFieldValueExpressionResolver.indexAliasUtils.hasAlias(targetIndexResolvedName)) { @@ -180,16 +178,19 @@ class RollupMapperService( } suspend fun addRollupSettingToIndex(targetIndexResolvedName: String, hasLegacyPlugin: Boolean): Boolean { - val settings = if (hasLegacyPlugin) { - Settings.builder().put(LegacyOpenDistroRollupSettings.ROLLUP_INDEX.key, true).build() - } else { - Settings.builder().put(RollupSettings.ROLLUP_INDEX.key, true).build() - } - val resp: AcknowledgedResponse = client.admin().indices().suspendUntil { - updateSettings(UpdateSettingsRequest(settings, targetIndexResolvedName), it) - } + val settings = + if (hasLegacyPlugin) { + Settings.builder().put(LegacyOpenDistroRollupSettings.ROLLUP_INDEX.key, true).build() + } else { + Settings.builder().put(RollupSettings.ROLLUP_INDEX.key, true).build() + } + val resp: AcknowledgedResponse = + client.admin().indices().suspendUntil { + updateSettings(UpdateSettingsRequest(settings, targetIndexResolvedName), it) + } return resp.isAcknowledged } + @Suppress("ReturnCount") suspend fun prepareTargetIndex(rollup: Rollup, targetIndexResolvedName: String, hasLegacyPlugin: Boolean): RollupJobValidationResult { var errorMessage = "" @@ -203,9 +204,10 @@ class RollupMapperService( // 2. Put rollup target_index mappings val putMappingRequest: PutMappingRequest = PutMappingRequest(targetIndexResolvedName).source(IndexManagementIndices.rollupTargetMappings, XContentType.JSON) - val respMappings: AcknowledgedResponse = client.admin().indices().suspendUntil { - putMapping(putMappingRequest, it) - } + val respMappings: AcknowledgedResponse = + client.admin().indices().suspendUntil { + putMapping(putMappingRequest, it) + } if (!respMappings.isAcknowledged) { return RollupJobValidationResult.Invalid("Failed to put initial rollup mappings for target index [$targetIndexResolvedName]") } @@ -227,14 +229,16 @@ class RollupMapperService( } private suspend fun createTargetIndex(targetIndexName: String, hasLegacyPlugin: Boolean): CreateIndexResponse { - val settings = if (hasLegacyPlugin) { - Settings.builder().put(LegacyOpenDistroRollupSettings.ROLLUP_INDEX.key, true).build() - } else { - Settings.builder().put(RollupSettings.ROLLUP_INDEX.key, true).build() - } - val request = CreateIndexRequest(targetIndexName) - .settings(settings) - .mapping(IndexManagementIndices.rollupTargetMappings) + val settings = + if (hasLegacyPlugin) { + Settings.builder().put(LegacyOpenDistroRollupSettings.ROLLUP_INDEX.key, true).build() + } else { + Settings.builder().put(RollupSettings.ROLLUP_INDEX.key, true).build() + } + val request = + CreateIndexRequest(targetIndexName) + .settings(settings) + .mapping(IndexManagementIndices.rollupTargetMappings) // TODO: Perhaps we can do better than this for mappings... as it'll be dynamic for rest // Can we read in the actual mappings from the source index and use that? // Can it have issues with metrics? i.e. an int mapping with 3, 5, 6 added up and divided by 3 for avg is 14/3 = 4.6666 @@ -271,11 +275,12 @@ class RollupMapperService( @Suppress("ReturnCount", "ComplexMethod") private suspend fun isSourceIndexMappingsValid(index: String, rollup: Rollup): RollupJobValidationResult { try { - val res = when (val getMappingsResult = getMappings(index)) { - is GetMappingsResult.Success -> getMappingsResult.response - is GetMappingsResult.Failure -> - return RollupJobValidationResult.Failure(getMappingsResult.message, getMappingsResult.cause) - } + val res = + when (val getMappingsResult = getMappings(index)) { + is GetMappingsResult.Success -> getMappingsResult.response + is GetMappingsResult.Failure -> + return RollupJobValidationResult.Failure(getMappingsResult.message, getMappingsResult.cause) + } val indexTypeMappings = res.mappings[index] if (indexTypeMappings == null) { @@ -287,8 +292,9 @@ class RollupMapperService( val issues = mutableSetOf() // Validate source fields in dimensions rollup.dimensions.forEach { dimension -> - if (!isFieldInMappings(dimension.sourceField, indexMappingSource)) + if (!isFieldInMappings(dimension.sourceField, indexMappingSource)) { issues.add("missing field ${dimension.sourceField}") + } when (dimension) { is DateHistogram -> { @@ -305,8 +311,9 @@ class RollupMapperService( // Validate source fields in metrics rollup.metrics.forEach { metric -> - if (!isFieldInMappings(metric.sourceField, indexMappingSource)) + if (!isFieldInMappings(metric.sourceField, indexMappingSource)) { issues.add("missing field ${metric.sourceField}") + } // TODO: Validate field type for metrics, // are all Numeric field types valid? @@ -337,11 +344,12 @@ class RollupMapperService( } private suspend fun jobExistsInRollupIndex(rollup: Rollup, targetIndexResolvedName: String): RollupJobValidationResult { - val res = when (val getMappingsResult = getMappings(targetIndexResolvedName)) { - is GetMappingsResult.Success -> getMappingsResult.response - is GetMappingsResult.Failure -> - return RollupJobValidationResult.Failure(getMappingsResult.message, getMappingsResult.cause) - } + val res = + when (val getMappingsResult = getMappings(targetIndexResolvedName)) { + is GetMappingsResult.Success -> getMappingsResult.response + is GetMappingsResult.Failure -> + return RollupJobValidationResult.Failure(getMappingsResult.message, getMappingsResult.cause) + } val indexMapping: MappingMetadata? = res.mappings[targetIndexResolvedName] @@ -389,12 +397,14 @@ class RollupMapperService( private suspend fun updateRollupIndexMappings(rollup: Rollup, targetIndexResolvedName: String): RollupJobValidationResult { val errorMessage = "Failed to update mappings of target index [$targetIndexResolvedName] with rollup job" try { - val response = withContext(Dispatchers.IO) { - val resp: AcknowledgedResponse = client.suspendUntil { - execute(UpdateRollupMappingAction.INSTANCE, UpdateRollupMappingRequest(rollup), it) + val response = + withContext(Dispatchers.IO) { + val resp: AcknowledgedResponse = + client.suspendUntil { + execute(UpdateRollupMappingAction.INSTANCE, UpdateRollupMappingRequest(rollup), it) + } + resp.isAcknowledged } - resp.isAcknowledged - } if (!response) { // TODO: when this happens is it failure or invalid? @@ -417,6 +427,7 @@ class RollupMapperService( sealed class GetMappingsResult { data class Success(val response: GetMappingsResponse) : GetMappingsResult() + data class Failure(val message: String = "An error occurred when getting mappings", val cause: Exception) : GetMappingsResult() } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/rollup/RollupMetadataService.kt b/src/main/kotlin/org/opensearch/indexmanagement/rollup/RollupMetadataService.kt index 41eb84a6a..7f96efe5e 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/rollup/RollupMetadataService.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/rollup/RollupMetadataService.kt @@ -49,7 +49,6 @@ import java.time.Instant // Service that handles CRUD operations for rollup metadata @Suppress("TooManyFunctions") class RollupMetadataService(val client: Client, val xContentRegistry: NamedXContentRegistry) { - private val logger = LogManager.getLogger(javaClass) // If the job does not have a metadataID then we need to initialize the first metadata @@ -57,22 +56,24 @@ class RollupMetadataService(val client: Client, val xContentRegistry: NamedXCont @Suppress("ReturnCount", "ComplexMethod", "NestedBlockDepth") suspend fun init(rollup: Rollup): MetadataResult { if (rollup.metadataID != null) { - val existingMetadata = when (val getMetadataResult = getExistingMetadata(rollup)) { - is MetadataResult.Success -> getMetadataResult.metadata - is MetadataResult.NoMetadata -> null - is MetadataResult.Failure -> return getMetadataResult - } + val existingMetadata = + when (val getMetadataResult = getExistingMetadata(rollup)) { + is MetadataResult.Success -> getMetadataResult.metadata + is MetadataResult.NoMetadata -> null + is MetadataResult.Failure -> return getMetadataResult + } if (existingMetadata != null) { if (existingMetadata.status == RollupMetadata.Status.RETRY) { - val recoveredMetadata = when (val recoverMetadataResult = recoverRetryMetadata(rollup, existingMetadata)) { - is MetadataResult.Success -> recoverMetadataResult.metadata - // NoMetadata here means that there were no documents when initializing start time - // for a continuous rollup so we will propagate the response to no-op in the runner - is MetadataResult.NoMetadata -> return recoverMetadataResult - // In case of failure, return early with the result - is MetadataResult.Failure -> return recoverMetadataResult - } + val recoveredMetadata = + when (val recoverMetadataResult = recoverRetryMetadata(rollup, existingMetadata)) { + is MetadataResult.Success -> recoverMetadataResult.metadata + // NoMetadata here means that there were no documents when initializing start time + // for a continuous rollup so we will propagate the response to no-op in the runner + is MetadataResult.NoMetadata -> return recoverMetadataResult + // In case of failure, return early with the result + is MetadataResult.Failure -> return recoverMetadataResult + } // Update to the recovered metadata if recovery was successful return submitMetadataUpdate(recoveredMetadata, true) @@ -86,9 +87,9 @@ class RollupMetadataService(val client: Client, val xContentRegistry: NamedXCont RollupMetadata( rollupID = rollup.id, lastUpdatedTime = Instant.now(), status = RollupMetadata.Status.FAILED, failureReason = "Not able to get the rollup metadata [${rollup.metadataID}]", - stats = RollupStats(0, 0, 0, 0, 0) + stats = RollupStats(0, 0, 0, 0, 0), ), - false + false, ) } } @@ -106,12 +107,13 @@ class RollupMetadataService(val client: Client, val xContentRegistry: NamedXCont private suspend fun recoverRetryMetadata(rollup: Rollup, metadata: RollupMetadata): MetadataResult { var continuousMetadata = metadata.continuous if (rollup.continuous && metadata.continuous == null) { - val nextWindowStartTime = when (val initStartTimeResult = getInitialStartTime(rollup)) { - is StartingTimeResult.Success -> initStartTimeResult.startingTime - is StartingTimeResult.NoDocumentsFound -> return MetadataResult.NoMetadata - is StartingTimeResult.Failure -> - return MetadataResult.Failure("Failed to initialize start time for retried rollup job [${rollup.id}]", initStartTimeResult.e) - } + val nextWindowStartTime = + when (val initStartTimeResult = getInitialStartTime(rollup)) { + is StartingTimeResult.Success -> initStartTimeResult.startingTime + is StartingTimeResult.NoDocumentsFound -> return MetadataResult.NoMetadata + is StartingTimeResult.Failure -> + return MetadataResult.Failure("Failed to initialize start time for retried rollup job [${rollup.id}]", initStartTimeResult.e) + } val nextWindowEndTime = getShiftedTime(nextWindowStartTime, rollup) continuousMetadata = ContinuousMetadata(nextWindowStartTime, nextWindowEndTime) } @@ -119,8 +121,8 @@ class RollupMetadataService(val client: Client, val xContentRegistry: NamedXCont return MetadataResult.Success( metadata.copy( continuous = continuousMetadata, - status = RollupMetadata.Status.STARTED - ) + status = RollupMetadata.Status.STARTED, + ), ) } @@ -129,32 +131,33 @@ class RollupMetadataService(val client: Client, val xContentRegistry: NamedXCont MetadataResult.Success( RollupMetadata( rollupID = rollup.id, lastUpdatedTime = Instant.now(), status = RollupMetadata.Status.INIT, - stats = RollupStats(0, 0, 0, 0, 0) - ) + stats = RollupStats(0, 0, 0, 0, 0), + ), ) // This updates the metadata for a non-continuous rollup after an execution of the composite search and ingestion of rollup data private fun getUpdatedNonContinuousMetadata( metadata: RollupMetadata, - internalComposite: InternalComposite + internalComposite: InternalComposite, ): RollupMetadata { val afterKey = internalComposite.afterKey() return metadata.copy( afterKey = afterKey, lastUpdatedTime = Instant.now(), - status = if (afterKey == null) RollupMetadata.Status.FINISHED else RollupMetadata.Status.STARTED + status = if (afterKey == null) RollupMetadata.Status.FINISHED else RollupMetadata.Status.STARTED, ) } // This returns the first instantiation of a RollupMetadata for a continuous rollup @Suppress("ReturnCount") private suspend fun createContinuousMetadata(rollup: Rollup): MetadataResult { - val nextWindowStartTime = when (val initStartTimeResult = getInitialStartTime(rollup)) { - is StartingTimeResult.Success -> initStartTimeResult.startingTime - is StartingTimeResult.NoDocumentsFound -> return MetadataResult.NoMetadata - is StartingTimeResult.Failure -> - return MetadataResult.Failure("Failed to initialize start time for rollup [${rollup.id}]", initStartTimeResult.e) - } + val nextWindowStartTime = + when (val initStartTimeResult = getInitialStartTime(rollup)) { + is StartingTimeResult.Success -> initStartTimeResult.startingTime + is StartingTimeResult.NoDocumentsFound -> return MetadataResult.NoMetadata + is StartingTimeResult.Failure -> + return MetadataResult.Failure("Failed to initialize start time for rollup [${rollup.id}]", initStartTimeResult.e) + } // The first end time is just the next window start time val nextWindowEndTime = getShiftedTime(nextWindowStartTime, rollup) return MetadataResult.Success( @@ -165,8 +168,8 @@ class RollupMetadataService(val client: Client, val xContentRegistry: NamedXCont continuous = ContinuousMetadata(nextWindowStartTime, nextWindowEndTime), status = RollupMetadata.Status.INIT, failureReason = null, - stats = RollupStats(0, 0, 0, 0, 0) - ) + stats = RollupStats(0, 0, 0, 0, 0), + ), ) } @@ -177,16 +180,18 @@ class RollupMetadataService(val client: Client, val xContentRegistry: NamedXCont try { // Rollup requires the first dimension to be the date histogram val dateHistogram = rollup.dimensions.first() as DateHistogram - val searchSourceBuilder = SearchSourceBuilder() - .size(1) - .query(MatchAllQueryBuilder()) - .sort(dateHistogram.sourceField, SortOrder.ASC) // TODO: figure out where nulls are sorted - .trackTotalHits(false) - .fetchSource(false) - .docValueField(dateHistogram.sourceField, DATE_FIELD_STRICT_DATE_OPTIONAL_TIME_FORMAT) - val searchRequest = SearchRequest(rollup.sourceIndex) - .source(searchSourceBuilder) - .allowPartialSearchResults(false) + val searchSourceBuilder = + SearchSourceBuilder() + .size(1) + .query(MatchAllQueryBuilder()) + .sort(dateHistogram.sourceField, SortOrder.ASC) // TODO: figure out where nulls are sorted + .trackTotalHits(false) + .fetchSource(false) + .docValueField(dateHistogram.sourceField, DATE_FIELD_STRICT_DATE_OPTIONAL_TIME_FORMAT) + val searchRequest = + SearchRequest(rollup.sourceIndex) + .source(searchSourceBuilder) + .allowPartialSearchResults(false) val response: SearchResponse = client.suspendUntil { search(searchRequest, it) } if (response.hits.hits.isEmpty()) { @@ -196,8 +201,9 @@ class RollupMetadataService(val client: Client, val xContentRegistry: NamedXCont // Get the doc value field of the dateHistogram.sourceField for the first search hit converted to epoch millis // If the doc value is null or empty it will be treated the same as empty doc hits - val firstHitTimestampAsString: String = response.hits.hits.first().field(dateHistogram.sourceField).getValue() - ?: return StartingTimeResult.NoDocumentsFound + val firstHitTimestampAsString: String = + response.hits.hits.first().field(dateHistogram.sourceField).getValue() + ?: return StartingTimeResult.NoDocumentsFound // Parse date and extract epochMillis val formatter = DateFormatter.forPattern(DATE_FIELD_STRICT_DATE_OPTIONAL_TIME_FORMAT) val epochMillis = DateFormatters.from(formatter.parse(firstHitTimestampAsString), formatter.locale()).toInstant().toEpochMilli() @@ -219,9 +225,10 @@ class RollupMetadataService(val client: Client, val xContentRegistry: NamedXCont */ private fun getRoundedTime(timestamp: Long, dateHistogram: DateHistogram): Instant { val roundingStrategy = getRoundingStrategy(dateHistogram) - val roundedMillis = roundingStrategy - .prepare(timestamp, timestamp) - .round(timestamp) + val roundedMillis = + roundingStrategy + .prepare(timestamp, timestamp) + .round(timestamp) return Instant.ofEpochMilli(roundedMillis) } @@ -231,17 +238,19 @@ class RollupMetadataService(val client: Client, val xContentRegistry: NamedXCont val roundingStrategy = getRoundingStrategy(dateHistogram) val timeInMillis = time.toEpochMilli() - val nextRoundedMillis = roundingStrategy - .prepare(timeInMillis, timeInMillis) - .nextRoundingValue(timeInMillis) + val nextRoundedMillis = + roundingStrategy + .prepare(timeInMillis, timeInMillis) + .nextRoundingValue(timeInMillis) return Instant.ofEpochMilli(nextRoundedMillis) } + // TODO: Could make this an extension function of DateHistogram and add to some utility file + /** * Get the rounding strategy for the given time interval in the DateHistogram. * This is used to calculate time windows by rounding the given time based on the interval. */ - // TODO: Could make this an extension function of DateHistogram and add to some utility file private fun getRoundingStrategy(dateHistogram: DateHistogram): Rounding { val intervalString = (dateHistogram.calendarInterval ?: dateHistogram.fixedInterval) as String // TODO: Make sure the interval string is validated before getting here so we don't get errors @@ -264,21 +273,27 @@ class RollupMetadataService(val client: Client, val xContentRegistry: NamedXCont private fun getUpdatedContinuousMetadata( rollup: Rollup, metadata: RollupMetadata, - internalComposite: InternalComposite + internalComposite: InternalComposite, ): RollupMetadata { val afterKey = internalComposite.afterKey() // TODO: get rid of !! - val nextStart = if (afterKey == null) { - getShiftedTime(metadata.continuous!!.nextWindowStartTime, rollup) - } else metadata.continuous!!.nextWindowStartTime - val nextEnd = if (afterKey == null) { - getShiftedTime(metadata.continuous.nextWindowEndTime, rollup) - } else metadata.continuous.nextWindowEndTime + val nextStart = + if (afterKey == null) { + getShiftedTime(metadata.continuous!!.nextWindowStartTime, rollup) + } else { + metadata.continuous!!.nextWindowStartTime + } + val nextEnd = + if (afterKey == null) { + getShiftedTime(metadata.continuous.nextWindowEndTime, rollup) + } else { + metadata.continuous.nextWindowEndTime + } return metadata.copy( afterKey = internalComposite.afterKey(), lastUpdatedTime = Instant.now(), continuous = ContinuousMetadata(nextStart, nextEnd), - status = RollupMetadata.Status.STARTED + status = RollupMetadata.Status.STARTED, ) } @@ -302,7 +317,9 @@ class RollupMetadataService(val client: Client, val xContentRegistry: NamedXCont return if (rollupMetadata != null) { MetadataResult.Success(rollupMetadata!!) - } else MetadataResult.NoMetadata + } else { + MetadataResult.NoMetadata + } } catch (e: RemoteTransportException) { val unwrappedException = ExceptionsHelper.unwrapCause(e) as Exception logger.error("$errorMessage: $unwrappedException") @@ -315,11 +332,12 @@ class RollupMetadataService(val client: Client, val xContentRegistry: NamedXCont } suspend fun updateMetadata(rollup: Rollup, metadata: RollupMetadata, internalComposite: InternalComposite): RollupMetadata { - val updatedMetadata = if (rollup.continuous) { - getUpdatedContinuousMetadata(rollup, metadata, internalComposite) - } else { - getUpdatedNonContinuousMetadata(metadata, internalComposite) - } + val updatedMetadata = + if (rollup.continuous) { + getUpdatedContinuousMetadata(rollup, metadata, internalComposite) + } else { + getUpdatedNonContinuousMetadata(metadata, internalComposite) + } return updateMetadata(updatedMetadata) } @@ -343,20 +361,22 @@ class RollupMetadataService(val client: Client, val xContentRegistry: NamedXCont val updatedMetadata: RollupMetadata? if (existingMetadata == null) { // Create new metadata - updatedMetadata = RollupMetadata( - rollupID = job.id, - status = RollupMetadata.Status.FAILED, - failureReason = reason, - lastUpdatedTime = Instant.now(), - stats = RollupStats(0, 0, 0, 0, 0) - ) + updatedMetadata = + RollupMetadata( + rollupID = job.id, + status = RollupMetadata.Status.FAILED, + failureReason = reason, + lastUpdatedTime = Instant.now(), + stats = RollupStats(0, 0, 0, 0, 0), + ) } else { // Update the given existing metadata - updatedMetadata = existingMetadata.copy( - status = RollupMetadata.Status.FAILED, - failureReason = reason, - lastUpdatedTime = Instant.now() - ) + updatedMetadata = + existingMetadata.copy( + status = RollupMetadata.Status.FAILED, + failureReason = reason, + lastUpdatedTime = Instant.now(), + ) } return submitMetadataUpdate(updatedMetadata, updatedMetadata.id != NO_ID) @@ -367,9 +387,10 @@ class RollupMetadataService(val client: Client, val xContentRegistry: NamedXCont val errorMessage = "An error occurred when ${if (updating) "updating" else "creating"} rollup metadata" try { @Suppress("BlockingMethodInNonBlockingContext") - val builder = XContentFactory.jsonBuilder().startObject() - .field(RollupMetadata.ROLLUP_METADATA_TYPE, metadata) - .endObject() + val builder = + XContentFactory.jsonBuilder().startObject() + .field(RollupMetadata.ROLLUP_METADATA_TYPE, metadata) + .endObject() val indexRequest = IndexRequest(IndexManagementPlugin.INDEX_MANAGEMENT_INDEX).source(builder).routing(metadata.rollupID) if (updating) { indexRequest.id(metadata.id).setIfSeqNo(metadata.seqNo).setIfPrimaryTerm(metadata.primaryTerm) @@ -397,8 +418,8 @@ class RollupMetadataService(val client: Client, val xContentRegistry: NamedXCont seqNo = response.seqNo, primaryTerm = response.primaryTerm, status = status, - failureReason = failureReason - ) + failureReason = failureReason, + ), ) } catch (e: RemoteTransportException) { val unwrappedException = ExceptionsHelper.unwrapCause(e) as Exception @@ -416,13 +437,17 @@ sealed class MetadataResult { // A successful MetadataResult just means a metadata was returned, // it can still have a FAILED status data class Success(val metadata: RollupMetadata) : MetadataResult() + data class Failure(val message: String = "An error occurred for rollup metadata", val cause: Exception) : MetadataResult() + object NoMetadata : MetadataResult() } sealed class StartingTimeResult { data class Success(val startingTime: Instant) : StartingTimeResult() + data class Failure(val e: Exception) : StartingTimeResult() + object NoDocumentsFound : StartingTimeResult() } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/rollup/RollupRunner.kt b/src/main/kotlin/org/opensearch/indexmanagement/rollup/RollupRunner.kt index e4725fb28..5b7978ef4 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/rollup/RollupRunner.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/rollup/RollupRunner.kt @@ -11,13 +11,13 @@ import kotlinx.coroutines.Dispatchers import kotlinx.coroutines.SupervisorJob import kotlinx.coroutines.launch import org.apache.logging.log4j.LogManager -import org.opensearch.core.action.ActionListener import org.opensearch.action.bulk.BackoffPolicy import org.opensearch.action.support.WriteRequest import org.opensearch.client.Client import org.opensearch.cluster.service.ClusterService import org.opensearch.common.settings.Settings import org.opensearch.common.unit.TimeValue +import org.opensearch.core.action.ActionListener import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.indexmanagement.indexstatemanagement.SkipExecution import org.opensearch.indexmanagement.opensearchapi.IndexManagementSecurityContext @@ -51,12 +51,12 @@ import org.opensearch.threadpool.ThreadPool object RollupRunner : ScheduledJobRunner, CoroutineScope by CoroutineScope(SupervisorJob() + Dispatchers.Default + CoroutineName("RollupRunner")) { - private val logger = LogManager.getLogger(javaClass) - private val backoffPolicy = BackoffPolicy.exponentialBackoff( - TimeValue.timeValueMillis(RollupSettings.DEFAULT_ACQUIRE_LOCK_RETRY_DELAY), - RollupSettings.DEFAULT_ACQUIRE_LOCK_RETRY_COUNT - ) + private val backoffPolicy = + BackoffPolicy.exponentialBackoff( + TimeValue.timeValueMillis(RollupSettings.DEFAULT_ACQUIRE_LOCK_RETRY_DELAY), + RollupSettings.DEFAULT_ACQUIRE_LOCK_RETRY_COUNT, + ) private lateinit var clusterService: ClusterService private lateinit var client: Client @@ -116,7 +116,7 @@ object RollupRunner : } fun registerMetadataServices( - rollupMetadataService: RollupMetadataService + rollupMetadataService: RollupMetadataService, ): RollupRunner { this.rollupMetadataService = rollupMetadataService return this @@ -143,12 +143,13 @@ object RollupRunner : // Get Metadata does a get request to the config index which the role will not have access to. This is an internal // call used by the plugin to populate the metadata itself so do not run this with role's context if (job.metadataID != null) { - metadata = when (val getMetadataResult = rollupMetadataService.getExistingMetadata(job)) { - is MetadataResult.Success -> getMetadataResult.metadata - is MetadataResult.NoMetadata -> null - is MetadataResult.Failure -> - throw RollupMetadataException("Failed to get existing rollup metadata [${job.metadataID}]", getMetadataResult.cause) - } + metadata = + when (val getMetadataResult = rollupMetadataService.getExistingMetadata(job)) { + is MetadataResult.Success -> getMetadataResult.metadata + is MetadataResult.NoMetadata -> null + is MetadataResult.Failure -> + throw RollupMetadataException("Failed to get existing rollup metadata [${job.metadataID}]", getMetadataResult.cause) + } } } catch (e: RollupMetadataException) { // If the metadata was not able to be retrieved, the exception will be logged and the job run will be a no-op @@ -186,11 +187,12 @@ object RollupRunner : // TODO: Clean up runner // TODO: Scenario: The rollup job is finished, but I (the user) want to redo it all again + /* - * TODO situations: - * There is a rollup.metadataID and doc but theres no job in target index? - * -> index was deleted and recreated as rollup -> just recreate (but we would have to start over)? Or move to FAILED? - * */ + * TODO situations: + * There is a rollup.metadataID and doc but theres no job in target index? + * -> index was deleted and recreated as rollup -> just recreate (but we would have to start over)? Or move to FAILED? + * */ @Suppress("ReturnCount", "NestedBlockDepth", "ComplexMethod", "LongMethod", "ThrowsCount") private suspend fun runRollupJob(job: Rollup, context: JobExecutionContext, lock: LockModel) { logger.debug("Running rollup job [${job.id}]") @@ -211,15 +213,16 @@ object RollupRunner : } // Anything related to creating, reading, and deleting metadata should not require role's context - var metadata = when (val initMetadataResult = rollupMetadataService.init(job)) { - is MetadataResult.Success -> initMetadataResult.metadata - is MetadataResult.NoMetadata -> { - logger.info("Init metadata NoMetadata returning early") - return - } // No-op this execution - is MetadataResult.Failure -> - throw RollupMetadataException("Failed to initialize rollup metadata", initMetadataResult.cause) - } + var metadata = + when (val initMetadataResult = rollupMetadataService.init(job)) { + is MetadataResult.Success -> initMetadataResult.metadata + is MetadataResult.NoMetadata -> { + logger.info("Init metadata NoMetadata returning early") + return + } // No-op this execution + is MetadataResult.Failure -> + throw RollupMetadataException("Failed to initialize rollup metadata", initMetadataResult.cause) + } if (metadata.status == RollupMetadata.Status.FAILED) { logger.info("Metadata status is FAILED, disabling job $metadata") disableJob(job, metadata) @@ -233,18 +236,19 @@ object RollupRunner : is RollupJobResult.Success -> updatableJob = updateRollupJobResult.rollup is RollupJobResult.Failure -> { logger.error( - "Failed to update the rollup job [${updatableJob.id}] with metadata id [${metadata.id}]", updateRollupJobResult.cause + "Failed to update the rollup job [${updatableJob.id}] with metadata id [${metadata.id}]", updateRollupJobResult.cause, ) return // Exit runner early } } } - val result = withClosableContext( - IndexManagementSecurityContext(job.id, settings, threadPool.threadContext, job.user) - ) { - rollupMapperService.attemptCreateRollupTargetIndex(updatableJob, clusterConfigurationProvider.hasLegacyPlugin) - } + val result = + withClosableContext( + IndexManagementSecurityContext(job.id, settings, threadPool.threadContext, job.user), + ) { + rollupMapperService.attemptCreateRollupTargetIndex(updatableJob, clusterConfigurationProvider.hasLegacyPlugin) + } when (result) { is RollupJobValidationResult.Failure -> { setFailedMetadataAndDisableJob(updatableJob, result.message, metadata) @@ -260,46 +264,51 @@ object RollupRunner : while (rollupSearchService.shouldProcessRollup(updatableJob, metadata)) { do { try { - val rollupSearchResult = withClosableContext( - IndexManagementSecurityContext(job.id, settings, threadPool.threadContext, job.user) - ) { - rollupSearchService.executeCompositeSearch(updatableJob, metadata) - } - val rollupResult = when (rollupSearchResult) { - is RollupSearchResult.Success -> { - val compositeRes: InternalComposite = rollupSearchResult.searchResponse.aggregations.get(updatableJob.id) - metadata = metadata.incrementStats(rollupSearchResult.searchResponse, compositeRes) - val rollupIndexResult = withClosableContext( - IndexManagementSecurityContext(job.id, settings, threadPool.threadContext, job.user) - ) { - rollupIndexer.indexRollups(updatableJob, compositeRes) + val rollupSearchResult = + withClosableContext( + IndexManagementSecurityContext(job.id, settings, threadPool.threadContext, job.user), + ) { + rollupSearchService.executeCompositeSearch(updatableJob, metadata) + } + val rollupResult = + when (rollupSearchResult) { + is RollupSearchResult.Success -> { + val compositeRes: InternalComposite = rollupSearchResult.searchResponse.aggregations.get(updatableJob.id) + metadata = metadata.incrementStats(rollupSearchResult.searchResponse, compositeRes) + val rollupIndexResult = + withClosableContext( + IndexManagementSecurityContext(job.id, settings, threadPool.threadContext, job.user), + ) { + rollupIndexer.indexRollups(updatableJob, compositeRes) + } + when (rollupIndexResult) { + is RollupIndexResult.Success -> RollupResult.Success(compositeRes, rollupIndexResult.stats) + is RollupIndexResult.Failure -> RollupResult.Failure(rollupIndexResult.message, rollupIndexResult.cause) + } } - when (rollupIndexResult) { - is RollupIndexResult.Success -> RollupResult.Success(compositeRes, rollupIndexResult.stats) - is RollupIndexResult.Failure -> RollupResult.Failure(rollupIndexResult.message, rollupIndexResult.cause) + is RollupSearchResult.Failure -> { + RollupResult.Failure(rollupSearchResult.message, rollupSearchResult.cause) } } - is RollupSearchResult.Failure -> { - RollupResult.Failure(rollupSearchResult.message, rollupSearchResult.cause) - } - } when (rollupResult) { is RollupResult.Success -> { - metadata = rollupMetadataService.updateMetadata( - updatableJob, - metadata.mergeStats(rollupResult.stats), rollupResult.internalComposite - ) - updatableJob = withClosableContext( - IndexManagementSecurityContext(job.id, settings, threadPool.threadContext, null) - ) { - client.suspendUntil { listener: ActionListener -> - execute(GetRollupAction.INSTANCE, GetRollupRequest(updatableJob.id, null, "_local"), listener) - }.rollup ?: error("Unable to get rollup job") - } + metadata = + rollupMetadataService.updateMetadata( + updatableJob, + metadata.mergeStats(rollupResult.stats), rollupResult.internalComposite, + ) + updatableJob = + withClosableContext( + IndexManagementSecurityContext(job.id, settings, threadPool.threadContext, null), + ) { + client.suspendUntil { listener: ActionListener -> + execute(GetRollupAction.INSTANCE, GetRollupRequest(updatableJob.id, null, "_local"), listener) + }.rollup ?: error("Unable to get rollup job") + } } is RollupResult.Failure -> { rollupMetadataService.updateMetadata( - metadata.copy(status = RollupMetadata.Status.FAILED, failureReason = rollupResult.cause.message) + metadata.copy(status = RollupMetadata.Status.FAILED, failureReason = rollupResult.cause.message), ) } } @@ -356,7 +365,7 @@ object RollupRunner : private suspend fun updateRollupJob(job: Rollup, metadata: RollupMetadata): RollupJobResult { try { return withClosableContext( - IndexManagementSecurityContext(job.id, settings, threadPool.threadContext, null) + IndexManagementSecurityContext(job.id, settings, threadPool.threadContext, null), ) { val req = IndexRollupRequest(rollup = job, refreshPolicy = WriteRequest.RefreshPolicy.IMMEDIATE) val res: IndexRollupResponse = client.suspendUntil { execute(IndexRollupAction.INSTANCE, req, it) } @@ -384,17 +393,18 @@ object RollupRunner : @Suppress("ReturnCount", "ComplexMethod") private suspend fun isJobValid(job: Rollup): RollupJobValidationResult { return withClosableContext( - IndexManagementSecurityContext(job.id, settings, threadPool.threadContext, job.user) + IndexManagementSecurityContext(job.id, settings, threadPool.threadContext, job.user), ) { var metadata: RollupMetadata? = null if (job.metadataID != null) { logger.debug("Fetching associated metadata for rollup job [${job.id}]") - metadata = when (val getMetadataResult = rollupMetadataService.getExistingMetadata(job)) { - is MetadataResult.Success -> getMetadataResult.metadata - is MetadataResult.NoMetadata -> null - is MetadataResult.Failure -> - throw RollupMetadataException("Failed to get existing rollup metadata [${job.metadataID}]", getMetadataResult.cause) - } + metadata = + when (val getMetadataResult = rollupMetadataService.getExistingMetadata(job)) { + is MetadataResult.Success -> getMetadataResult.metadata + is MetadataResult.NoMetadata -> null + is MetadataResult.Failure -> + throw RollupMetadataException("Failed to get existing rollup metadata [${job.metadataID}]", getMetadataResult.cause) + } } logger.debug("Validating source index [${job.sourceIndex}] for rollup job [${job.id}]") @@ -421,14 +431,15 @@ object RollupRunner : * is thrown to be caught by the runner. */ private suspend fun setFailedMetadataAndDisableJob(job: Rollup, reason: String, existingMetadata: RollupMetadata? = null): Boolean { - val updatedMetadata = when (val setFailedMetadataResult = rollupMetadataService.setFailedMetadata(job, reason, existingMetadata)) { - is MetadataResult.Success -> setFailedMetadataResult.metadata - is MetadataResult.Failure -> - throw RollupMetadataException(setFailedMetadataResult.message, setFailedMetadataResult.cause) - // Should not get NoMetadata here - is MetadataResult.NoMetadata -> - throw RollupMetadataException("Unexpected state when setting failed metadata", null) - } + val updatedMetadata = + when (val setFailedMetadataResult = rollupMetadataService.setFailedMetadata(job, reason, existingMetadata)) { + is MetadataResult.Success -> setFailedMetadataResult.metadata + is MetadataResult.Failure -> + throw RollupMetadataException(setFailedMetadataResult.message, setFailedMetadataResult.cause) + // Should not get NoMetadata here + is MetadataResult.NoMetadata -> + throw RollupMetadataException("Unexpected state when setting failed metadata", null) + } return disableJob(job, updatedMetadata) } @@ -446,11 +457,12 @@ object RollupRunner : * will not update the metadata (unless updateRollupJob job fails). */ private suspend fun disableJob(job: Rollup, metadata: RollupMetadata): Boolean { - val updatedRollupJob = if (metadata.id != job.metadataID) { - job.copy(metadataID = metadata.id, enabled = false, jobEnabledTime = null) - } else { - job.copy(enabled = false, jobEnabledTime = null) - } + val updatedRollupJob = + if (metadata.id != job.metadataID) { + job.copy(metadataID = metadata.id, enabled = false, jobEnabledTime = null) + } else { + job.copy(enabled = false, jobEnabledTime = null) + } return when (val updateRollupJobResult = updateRollupJob(updatedRollupJob, metadata)) { is RollupJobResult.Success -> true @@ -464,10 +476,12 @@ object RollupRunner : sealed class RollupJobResult { data class Success(val rollup: Rollup) : RollupJobResult() + data class Failure(val message: String = "An error occurred for rollup job", val cause: Exception) : RollupJobResult() } sealed class RollupResult { data class Success(val internalComposite: InternalComposite, val stats: RollupStats) : RollupResult() + data class Failure(val message: String = "An error occurred while rolling up", val cause: Exception) : RollupResult() } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/rollup/RollupSearchService.kt b/src/main/kotlin/org/opensearch/indexmanagement/rollup/RollupSearchService.kt index 91b575a0b..14e4a42c9 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/rollup/RollupSearchService.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/rollup/RollupSearchService.kt @@ -8,16 +8,16 @@ package org.opensearch.indexmanagement.rollup import org.apache.logging.log4j.LogManager import org.opensearch.ExceptionsHelper import org.opensearch.OpenSearchSecurityException -import org.opensearch.core.action.ActionListener import org.opensearch.action.bulk.BackoffPolicy import org.opensearch.action.search.SearchPhaseExecutionException import org.opensearch.action.search.SearchResponse import org.opensearch.action.search.TransportSearchAction.SEARCH_CANCEL_AFTER_TIME_INTERVAL_SETTING import org.opensearch.client.Client import org.opensearch.cluster.service.ClusterService -import org.opensearch.core.common.breaker.CircuitBreakingException import org.opensearch.common.settings.Settings import org.opensearch.common.unit.TimeValue +import org.opensearch.core.action.ActionListener +import org.opensearch.core.common.breaker.CircuitBreakingException import org.opensearch.indexmanagement.opensearchapi.retry import org.opensearch.indexmanagement.opensearchapi.suspendUntil import org.opensearch.indexmanagement.rollup.model.Rollup @@ -39,9 +39,8 @@ import kotlin.math.pow class RollupSearchService( settings: Settings, clusterService: ClusterService, - val client: Client + val client: Client, ) { - private val logger = LogManager.getLogger(javaClass) @Volatile private var retrySearchPolicy = @@ -108,10 +107,12 @@ class RollupSearchService( val decay = 2f.pow(retryCount++) client.suspendUntil { listener: ActionListener -> val pageSize = max(1, job.pageSize.div(decay.toInt())) - if (decay > 1) logger.warn( - "Composite search failed for rollup, retrying [#${retryCount - 1}] -" + - " reducing page size of composite aggregation from ${job.pageSize} to $pageSize" - ) + if (decay > 1) { + logger.warn( + "Composite search failed for rollup, retrying [#${retryCount - 1}] -" + + " reducing page size of composite aggregation from ${job.pageSize} to $pageSize", + ) + } val searchRequest = job.copy(pageSize = pageSize).getRollupSearchRequest(metadata) val cancelTimeoutTimeValue = TimeValue.timeValueMinutes(getCancelAfterTimeInterval(cancelAfterTimeInterval.minutes)) @@ -119,7 +120,7 @@ class RollupSearchService( search(searchRequest, listener) } - } + }, ) } catch (e: SearchPhaseExecutionException) { logger.error(e.message, e.cause) @@ -160,5 +161,6 @@ class RollupSearchService( sealed class RollupSearchResult { data class Success(val searchResponse: SearchResponse) : RollupSearchResult() + data class Failure(val message: String = "An error occurred while searching the rollup source index", val cause: Exception) : RollupSearchResult() } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/delete/DeleteRollupRequest.kt b/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/delete/DeleteRollupRequest.kt index ab0a87b91..585043d70 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/delete/DeleteRollupRequest.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/delete/DeleteRollupRequest.kt @@ -13,7 +13,6 @@ import org.opensearch.core.common.io.stream.StreamOutput import java.io.IOException class DeleteRollupRequest : DeleteRequest { - @Throws(IOException::class) constructor(sin: StreamInput) : super(sin) diff --git a/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/delete/TransportDeleteRollupAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/delete/TransportDeleteRollupAction.kt index b61f70d8b..8c8470c98 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/delete/TransportDeleteRollupAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/delete/TransportDeleteRollupAction.kt @@ -8,7 +8,6 @@ package org.opensearch.indexmanagement.rollup.action.delete import org.apache.logging.log4j.LogManager import org.opensearch.ExceptionsHelper import org.opensearch.OpenSearchStatusException -import org.opensearch.core.action.ActionListener import org.opensearch.action.delete.DeleteRequest import org.opensearch.action.delete.DeleteResponse import org.opensearch.action.get.GetRequest @@ -19,32 +18,34 @@ import org.opensearch.client.Client import org.opensearch.cluster.service.ClusterService import org.opensearch.common.inject.Inject import org.opensearch.common.settings.Settings -import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.commons.ConfigConstants import org.opensearch.commons.authuser.User +import org.opensearch.core.action.ActionListener +import org.opensearch.core.rest.RestStatus +import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.INDEX_MANAGEMENT_INDEX import org.opensearch.indexmanagement.rollup.model.Rollup import org.opensearch.indexmanagement.rollup.util.parseRollup import org.opensearch.indexmanagement.settings.IndexManagementSettings import org.opensearch.indexmanagement.util.SecurityUtils import org.opensearch.indexmanagement.util.SecurityUtils.Companion.userHasPermissionForResource -import org.opensearch.core.rest.RestStatus import org.opensearch.tasks.Task import org.opensearch.transport.TransportService import java.lang.Exception @Suppress("ReturnCount") -class TransportDeleteRollupAction @Inject constructor( +class TransportDeleteRollupAction +@Inject +constructor( transportService: TransportService, val client: Client, val clusterService: ClusterService, val settings: Settings, actionFilters: ActionFilters, - val xContentRegistry: NamedXContentRegistry + val xContentRegistry: NamedXContentRegistry, ) : HandledTransportAction( - DeleteRollupAction.NAME, transportService, actionFilters, ::DeleteRollupRequest + DeleteRollupAction.NAME, transportService, actionFilters, ::DeleteRollupRequest, ) { - @Volatile private var filterByEnabled = IndexManagementSettings.FILTER_BY_BACKEND_ROLES.get(settings) private val log = LogManager.getLogger(javaClass) @@ -62,14 +63,13 @@ class TransportDeleteRollupAction @Inject constructor( private val client: Client, private val actionListener: ActionListener, private val request: DeleteRollupRequest, - private val user: User? = SecurityUtils.buildUser(client.threadPool().threadContext) + private val user: User? = SecurityUtils.buildUser(client.threadPool().threadContext), ) { - fun start() { log.debug( "User and roles string from thread context: ${client.threadPool().threadContext.getTransient( - ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT - )}" + ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT, + )}", ) client.threadPool().threadContext.stashContext().use { getRollup() @@ -104,13 +104,14 @@ class TransportDeleteRollupAction @Inject constructor( override fun onFailure(e: Exception) { actionListener.onFailure(ExceptionsHelper.unwrapCause(e) as Exception) } - } + }, ) } private fun delete() { - val deleteRequest = DeleteRequest(INDEX_MANAGEMENT_INDEX, request.id()) - .setRefreshPolicy(request.refreshPolicy) + val deleteRequest = + DeleteRequest(INDEX_MANAGEMENT_INDEX, request.id()) + .setRefreshPolicy(request.refreshPolicy) client.threadPool().threadContext.stashContext().use { client.delete(deleteRequest, actionListener) } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/explain/ExplainRollupRequest.kt b/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/explain/ExplainRollupRequest.kt index e24fc5f9f..c4fd83c4e 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/explain/ExplainRollupRequest.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/explain/ExplainRollupRequest.kt @@ -13,7 +13,6 @@ import org.opensearch.core.common.io.stream.StreamOutput import java.io.IOException class ExplainRollupRequest : ActionRequest { - val rollupIDs: List @Throws(IOException::class) diff --git a/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/explain/ExplainRollupResponse.kt b/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/explain/ExplainRollupResponse.kt index 69ad4e260..ca4718401 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/explain/ExplainRollupResponse.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/explain/ExplainRollupResponse.kt @@ -27,14 +27,15 @@ class ExplainRollupResponse : ActionResponse, ToXContentObject { @Throws(IOException::class) constructor(sin: StreamInput) : this( - idsToExplain = sin.let { + idsToExplain = + sin.let { val idsToExplain = mutableMapOf() val size = it.readVInt() repeat(size) { _ -> idsToExplain[it.readString()] = if (sin.readBoolean()) ExplainRollup(it) else null } idsToExplain.toMap() - } + }, ) @Throws(IOException::class) diff --git a/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/explain/TransportExplainRollupAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/explain/TransportExplainRollupAction.kt index 0d90fee45..3961cee61 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/explain/TransportExplainRollupAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/explain/TransportExplainRollupAction.kt @@ -8,7 +8,6 @@ package org.opensearch.indexmanagement.rollup.action.explain import org.apache.logging.log4j.LogManager import org.opensearch.ExceptionsHelper import org.opensearch.ResourceNotFoundException -import org.opensearch.core.action.ActionListener import org.opensearch.action.search.SearchRequest import org.opensearch.action.search.SearchResponse import org.opensearch.action.support.ActionFilters @@ -18,6 +17,7 @@ import org.opensearch.cluster.service.ClusterService import org.opensearch.common.inject.Inject import org.opensearch.common.settings.Settings import org.opensearch.commons.ConfigConstants +import org.opensearch.core.action.ActionListener import org.opensearch.index.query.BoolQueryBuilder import org.opensearch.index.query.IdsQueryBuilder import org.opensearch.index.query.WildcardQueryBuilder @@ -37,16 +37,17 @@ import org.opensearch.transport.RemoteTransportException import org.opensearch.transport.TransportService import kotlin.Exception -class TransportExplainRollupAction @Inject constructor( +class TransportExplainRollupAction +@Inject +constructor( transportService: TransportService, val client: Client, val settings: Settings, val clusterService: ClusterService, - actionFilters: ActionFilters + actionFilters: ActionFilters, ) : HandledTransportAction( - ExplainRollupAction.NAME, transportService, actionFilters, ::ExplainRollupRequest + ExplainRollupAction.NAME, transportService, actionFilters, ::ExplainRollupRequest, ) { - @Volatile private var filterByEnabled = IndexManagementSettings.FILTER_BY_BACKEND_ROLES.get(settings) init { @@ -61,18 +62,19 @@ class TransportExplainRollupAction @Inject constructor( override fun doExecute(task: Task, request: ExplainRollupRequest, actionListener: ActionListener) { log.debug( "User and roles string from thread context: ${client.threadPool().threadContext.getTransient( - ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT - )}" + ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT, + )}", ) val ids = request.rollupIDs // Instantiate concrete ids to metadata map by removing wildcard matches val idsToExplain: MutableMap = ids.filter { !it.contains("*") }.map { it to null }.toMap(mutableMapOf()) // First search is for all rollup documents that match at least one of the given rollupIDs - val queryBuilder = BoolQueryBuilder().minimumShouldMatch(1).apply { - ids.forEach { - this.should(WildcardQueryBuilder("${Rollup.ROLLUP_TYPE}.${Rollup.ROLLUP_ID_FIELD}.keyword", "*$it*")) + val queryBuilder = + BoolQueryBuilder().minimumShouldMatch(1).apply { + ids.forEach { + this.should(WildcardQueryBuilder("${Rollup.ROLLUP_TYPE}.${Rollup.ROLLUP_ID_FIELD}.keyword", "*$it*")) + } } - } val user = buildUser(client.threadPool().threadContext) addUserFilter(user, queryBuilder, filterByEnabled, "rollup.user") @@ -94,18 +96,21 @@ class TransportExplainRollupAction @Inject constructor( } val metadataIds = idsToExplain.values.mapNotNull { it?.metadataID } - val metadataSearchRequest = SearchRequest(INDEX_MANAGEMENT_INDEX) - .source(SearchSourceBuilder().size(MAX_HITS).query(IdsQueryBuilder().addIds(*metadataIds.toTypedArray()))) + val metadataSearchRequest = + SearchRequest(INDEX_MANAGEMENT_INDEX) + .source(SearchSourceBuilder().size(MAX_HITS).query(IdsQueryBuilder().addIds(*metadataIds.toTypedArray()))) client.search( metadataSearchRequest, object : ActionListener { override fun onResponse(response: SearchResponse) { try { response.hits.hits.forEach { - val metadata = contentParser(it.sourceRef) - .parseWithType(it.id, it.seqNo, it.primaryTerm, RollupMetadata.Companion::parse) + val metadata = + contentParser(it.sourceRef) + .parseWithType(it.id, it.seqNo, it.primaryTerm, RollupMetadata.Companion::parse) idsToExplain.computeIfPresent(metadata.rollupID) { _, - explainRollup -> + explainRollup, + -> explainRollup.copy(metadata = metadata) } } @@ -124,7 +129,7 @@ class TransportExplainRollupAction @Inject constructor( else -> actionListener.onFailure(e) } } - } + }, ) } @@ -139,7 +144,7 @@ class TransportExplainRollupAction @Inject constructor( else -> actionListener.onFailure(e) } } - } + }, ) } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/get/GetRollupRequest.kt b/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/get/GetRollupRequest.kt index 95f319ff1..d5b3fc5b9 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/get/GetRollupRequest.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/get/GetRollupRequest.kt @@ -21,7 +21,7 @@ class GetRollupRequest : ActionRequest { constructor( id: String, srcContext: FetchSourceContext? = null, - preference: String? = null + preference: String? = null, ) : super() { this.id = id this.srcContext = srcContext @@ -32,7 +32,7 @@ class GetRollupRequest : ActionRequest { constructor(sin: StreamInput) : this( id = sin.readString(), srcContext = if (sin.readBoolean()) FetchSourceContext(sin) else null, - preference = sin.readOptionalString() + preference = sin.readOptionalString(), ) override fun validate(): ActionRequestValidationException? { diff --git a/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/get/GetRollupResponse.kt b/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/get/GetRollupResponse.kt index 526fa15b7..ca5c21d68 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/get/GetRollupResponse.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/get/GetRollupResponse.kt @@ -8,6 +8,7 @@ package org.opensearch.indexmanagement.rollup.action.get import org.opensearch.core.action.ActionResponse import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.rest.RestStatus import org.opensearch.core.xcontent.ToXContent import org.opensearch.core.xcontent.ToXContentObject import org.opensearch.core.xcontent.XContentBuilder @@ -18,7 +19,6 @@ import org.opensearch.indexmanagement.util._ID import org.opensearch.indexmanagement.util._PRIMARY_TERM import org.opensearch.indexmanagement.util._SEQ_NO import org.opensearch.indexmanagement.util._VERSION -import org.opensearch.core.rest.RestStatus import java.io.IOException class GetRollupResponse : ActionResponse, ToXContentObject { @@ -35,7 +35,7 @@ class GetRollupResponse : ActionResponse, ToXContentObject { seqNo: Long, primaryTerm: Long, status: RestStatus, - rollup: Rollup? + rollup: Rollup?, ) : super() { this.id = id this.version = version @@ -52,7 +52,7 @@ class GetRollupResponse : ActionResponse, ToXContentObject { seqNo = sin.readLong(), primaryTerm = sin.readLong(), status = sin.readEnum(RestStatus::class.java), - rollup = if (sin.readBoolean()) Rollup(sin) else null + rollup = if (sin.readBoolean()) Rollup(sin) else null, ) @Throws(IOException::class) diff --git a/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/get/GetRollupsRequest.kt b/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/get/GetRollupsRequest.kt index e5f78662b..82097a1e3 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/get/GetRollupsRequest.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/get/GetRollupsRequest.kt @@ -13,7 +13,6 @@ import org.opensearch.indexmanagement.rollup.model.Rollup import java.io.IOException class GetRollupsRequest : ActionRequest { - val searchString: String val from: Int val size: Int @@ -25,7 +24,7 @@ class GetRollupsRequest : ActionRequest { from: Int = DEFAULT_FROM, size: Int = DEFAULT_SIZE, sortField: String = DEFAULT_SORT_FIELD, - sortDirection: String = DEFAULT_SORT_DIRECTION + sortDirection: String = DEFAULT_SORT_DIRECTION, ) : super() { this.searchString = searchString this.from = from @@ -40,7 +39,7 @@ class GetRollupsRequest : ActionRequest { from = sin.readInt(), size = sin.readInt(), sortField = sin.readString(), - sortDirection = sin.readString() + sortDirection = sin.readString(), ) override fun validate(): ActionRequestValidationException? = null diff --git a/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/get/GetRollupsResponse.kt b/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/get/GetRollupsResponse.kt index d4a58291a..79100e3e5 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/get/GetRollupsResponse.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/get/GetRollupsResponse.kt @@ -8,6 +8,7 @@ package org.opensearch.indexmanagement.rollup.action.get import org.opensearch.core.action.ActionResponse import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.rest.RestStatus import org.opensearch.core.xcontent.ToXContent import org.opensearch.core.xcontent.ToXContentObject import org.opensearch.core.xcontent.XContentBuilder @@ -17,7 +18,6 @@ import org.opensearch.indexmanagement.rollup.model.Rollup.Companion.ROLLUP_TYPE import org.opensearch.indexmanagement.util._ID import org.opensearch.indexmanagement.util._PRIMARY_TERM import org.opensearch.indexmanagement.util._SEQ_NO -import org.opensearch.core.rest.RestStatus import java.io.IOException class GetRollupsResponse : ActionResponse, ToXContentObject { @@ -28,7 +28,7 @@ class GetRollupsResponse : ActionResponse, ToXContentObject { constructor( rollups: List, totalRollups: Int, - status: RestStatus + status: RestStatus, ) : super() { this.rollups = rollups this.totalRollups = totalRollups @@ -39,7 +39,7 @@ class GetRollupsResponse : ActionResponse, ToXContentObject { constructor(sin: StreamInput) : this( rollups = sin.readList(::Rollup), totalRollups = sin.readInt(), - status = sin.readEnum(RestStatus::class.java) + status = sin.readEnum(RestStatus::class.java), ) @Throws(IOException::class) diff --git a/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/get/TransportGetRollupAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/get/TransportGetRollupAction.kt index 76e6a18b9..b0886dfaa 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/get/TransportGetRollupAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/get/TransportGetRollupAction.kt @@ -7,7 +7,6 @@ package org.opensearch.indexmanagement.rollup.action.get import org.apache.logging.log4j.LogManager import org.opensearch.OpenSearchStatusException -import org.opensearch.core.action.ActionListener import org.opensearch.action.get.GetRequest import org.opensearch.action.get.GetResponse import org.opensearch.action.support.ActionFilters @@ -16,30 +15,32 @@ import org.opensearch.client.Client import org.opensearch.cluster.service.ClusterService import org.opensearch.common.inject.Inject import org.opensearch.common.settings.Settings -import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.commons.ConfigConstants +import org.opensearch.core.action.ActionListener +import org.opensearch.core.rest.RestStatus +import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.INDEX_MANAGEMENT_INDEX import org.opensearch.indexmanagement.rollup.model.Rollup import org.opensearch.indexmanagement.rollup.util.parseRollup import org.opensearch.indexmanagement.settings.IndexManagementSettings import org.opensearch.indexmanagement.util.SecurityUtils import org.opensearch.indexmanagement.util.SecurityUtils.Companion.buildUser -import org.opensearch.core.rest.RestStatus import org.opensearch.tasks.Task import org.opensearch.transport.TransportService import java.lang.Exception -class TransportGetRollupAction @Inject constructor( +class TransportGetRollupAction +@Inject +constructor( transportService: TransportService, val client: Client, actionFilters: ActionFilters, val settings: Settings, val clusterService: ClusterService, - val xContentRegistry: NamedXContentRegistry + val xContentRegistry: NamedXContentRegistry, ) : HandledTransportAction ( - GetRollupAction.NAME, transportService, actionFilters, ::GetRollupRequest + GetRollupAction.NAME, transportService, actionFilters, ::GetRollupRequest, ) { - @Volatile private var filterByEnabled = IndexManagementSettings.FILTER_BY_BACKEND_ROLES.get(settings) private val log = LogManager.getLogger(javaClass) @@ -53,8 +54,8 @@ class TransportGetRollupAction @Inject constructor( override fun doExecute(task: Task, request: GetRollupRequest, listener: ActionListener) { log.debug( "User and roles string from thread context: ${client.threadPool().threadContext.getTransient( - ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT - )}" + ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT, + )}", ) val getRequest = GetRequest(INDEX_MANAGEMENT_INDEX, request.id).preference(request.preference) val user = buildUser(client.threadPool().threadContext) @@ -78,11 +79,12 @@ class TransportGetRollupAction @Inject constructor( return } else { // if HEAD request don't return the rollup - val rollupResponse = if (request.srcContext != null && !request.srcContext.fetchSource()) { - GetRollupResponse(response.id, response.version, response.seqNo, response.primaryTerm, RestStatus.OK, null) - } else { - GetRollupResponse(response.id, response.version, response.seqNo, response.primaryTerm, RestStatus.OK, rollup) - } + val rollupResponse = + if (request.srcContext != null && !request.srcContext.fetchSource()) { + GetRollupResponse(response.id, response.version, response.seqNo, response.primaryTerm, RestStatus.OK, null) + } else { + GetRollupResponse(response.id, response.version, response.seqNo, response.primaryTerm, RestStatus.OK, rollup) + } listener.onResponse(rollupResponse) } } @@ -90,7 +92,7 @@ class TransportGetRollupAction @Inject constructor( override fun onFailure(e: Exception) { listener.onFailure(e) } - } + }, ) } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/get/TransportGetRollupsAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/get/TransportGetRollupsAction.kt index ba7da1895..55e2ba7ef 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/get/TransportGetRollupsAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/get/TransportGetRollupsAction.kt @@ -8,7 +8,6 @@ package org.opensearch.indexmanagement.rollup.action.get import org.apache.logging.log4j.LogManager import org.opensearch.ExceptionsHelper import org.opensearch.OpenSearchStatusException -import org.opensearch.core.action.ActionListener import org.opensearch.action.search.SearchRequest import org.opensearch.action.search.SearchResponse import org.opensearch.action.support.ActionFilters @@ -17,8 +16,10 @@ import org.opensearch.client.Client import org.opensearch.cluster.service.ClusterService import org.opensearch.common.inject.Inject import org.opensearch.common.settings.Settings -import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.commons.ConfigConstants +import org.opensearch.core.action.ActionListener +import org.opensearch.core.rest.RestStatus +import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.index.query.BoolQueryBuilder import org.opensearch.index.query.ExistsQueryBuilder import org.opensearch.index.query.WildcardQueryBuilder @@ -29,24 +30,24 @@ import org.opensearch.indexmanagement.rollup.model.Rollup import org.opensearch.indexmanagement.settings.IndexManagementSettings import org.opensearch.indexmanagement.util.SecurityUtils.Companion.addUserFilter import org.opensearch.indexmanagement.util.SecurityUtils.Companion.buildUser -import org.opensearch.core.rest.RestStatus import org.opensearch.search.builder.SearchSourceBuilder import org.opensearch.search.sort.SortOrder import org.opensearch.tasks.Task import org.opensearch.transport.TransportService import kotlin.Exception -class TransportGetRollupsAction @Inject constructor( +class TransportGetRollupsAction +@Inject +constructor( transportService: TransportService, val client: Client, actionFilters: ActionFilters, val clusterService: ClusterService, val settings: Settings, - val xContentRegistry: NamedXContentRegistry + val xContentRegistry: NamedXContentRegistry, ) : HandledTransportAction ( - GetRollupsAction.NAME, transportService, actionFilters, ::GetRollupsRequest + GetRollupsAction.NAME, transportService, actionFilters, ::GetRollupsRequest, ) { - @Volatile private var filterByEnabled = IndexManagementSettings.FILTER_BY_BACKEND_ROLES.get(settings) private val log = LogManager.getLogger(javaClass) @@ -59,8 +60,8 @@ class TransportGetRollupsAction @Inject constructor( override fun doExecute(task: Task, request: GetRollupsRequest, listener: ActionListener) { log.debug( "User and roles string from thread context: ${client.threadPool().threadContext.getTransient( - ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT - )}" + ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT, + )}", ) val searchString = request.searchString.trim() val from = request.from @@ -74,8 +75,9 @@ class TransportGetRollupsAction @Inject constructor( } val user = buildUser(client.threadPool().threadContext) addUserFilter(user, boolQueryBuilder, filterByEnabled, "rollup.user") - val searchSourceBuilder = SearchSourceBuilder().query(boolQueryBuilder).from(from).size(size).seqNoAndPrimaryTerm(true) - .sort(sortField, SortOrder.fromString(sortDirection)) + val searchSourceBuilder = + SearchSourceBuilder().query(boolQueryBuilder).from(from).size(size).seqNoAndPrimaryTerm(true) + .sort(sortField, SortOrder.fromString(sortDirection)) val searchRequest = SearchRequest(INDEX_MANAGEMENT_INDEX).source(searchSourceBuilder) client.threadPool().threadContext.stashContext().use { client.search( @@ -89,23 +91,24 @@ class TransportGetRollupsAction @Inject constructor( listener.onFailure(OpenSearchStatusException("Get rollups failed on some shards", failure.status(), failure.cause)) } else { try { - val rollups = response.hits.hits.map { - contentParser(it.sourceRef).parseWithType(it.id, it.seqNo, it.primaryTerm, Rollup.Companion::parse) - } + val rollups = + response.hits.hits.map { + contentParser(it.sourceRef).parseWithType(it.id, it.seqNo, it.primaryTerm, Rollup.Companion::parse) + } listener.onResponse(GetRollupsResponse(rollups, totalRollups.toInt(), RestStatus.OK)) } catch (e: Exception) { listener.onFailure( OpenSearchStatusException( "Failed to parse rollups", - RestStatus.INTERNAL_SERVER_ERROR, ExceptionsHelper.unwrapCause(e) - ) + RestStatus.INTERNAL_SERVER_ERROR, ExceptionsHelper.unwrapCause(e), + ), ) } } } override fun onFailure(e: Exception) = listener.onFailure(e) - } + }, ) } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/index/IndexRollupRequest.kt b/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/index/IndexRollupRequest.kt index 3983621f7..8896f0600 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/index/IndexRollupRequest.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/index/IndexRollupRequest.kt @@ -27,7 +27,7 @@ class IndexRollupRequest : IndexRequest { constructor( rollup: Rollup, - refreshPolicy: WriteRequest.RefreshPolicy + refreshPolicy: WriteRequest.RefreshPolicy, ) { this.rollup = rollup if (rollup.seqNo == SequenceNumbers.UNASSIGNED_SEQ_NO || rollup.primaryTerm == SequenceNumbers.UNASSIGNED_PRIMARY_TERM) { diff --git a/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/index/IndexRollupResponse.kt b/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/index/IndexRollupResponse.kt index 30f2ecccc..1fd220815 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/index/IndexRollupResponse.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/index/IndexRollupResponse.kt @@ -8,6 +8,7 @@ package org.opensearch.indexmanagement.rollup.action.index import org.opensearch.core.action.ActionResponse import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.rest.RestStatus import org.opensearch.core.xcontent.ToXContent import org.opensearch.core.xcontent.ToXContentObject import org.opensearch.core.xcontent.XContentBuilder @@ -18,7 +19,6 @@ import org.opensearch.indexmanagement.util._ID import org.opensearch.indexmanagement.util._PRIMARY_TERM import org.opensearch.indexmanagement.util._SEQ_NO import org.opensearch.indexmanagement.util._VERSION -import org.opensearch.core.rest.RestStatus import java.io.IOException class IndexRollupResponse : ActionResponse, ToXContentObject { @@ -35,7 +35,7 @@ class IndexRollupResponse : ActionResponse, ToXContentObject { seqNo: Long, primaryTerm: Long, status: RestStatus, - rollup: Rollup + rollup: Rollup, ) : super() { this.id = id this.version = version @@ -52,7 +52,7 @@ class IndexRollupResponse : ActionResponse, ToXContentObject { seqNo = sin.readLong(), primaryTerm = sin.readLong(), status = sin.readEnum(RestStatus::class.java), - rollup = Rollup(sin) + rollup = Rollup(sin), ) @Throws(IOException::class) diff --git a/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/index/TransportIndexRollupAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/index/TransportIndexRollupAction.kt index ccfcfcad8..4c0c64b5d 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/index/TransportIndexRollupAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/index/TransportIndexRollupAction.kt @@ -7,7 +7,6 @@ package org.opensearch.indexmanagement.rollup.action.index import org.apache.logging.log4j.LogManager import org.opensearch.OpenSearchStatusException -import org.opensearch.core.action.ActionListener import org.opensearch.action.DocWriteRequest import org.opensearch.action.get.GetRequest import org.opensearch.action.get.GetResponse @@ -20,11 +19,13 @@ import org.opensearch.client.Client import org.opensearch.cluster.service.ClusterService import org.opensearch.common.inject.Inject import org.opensearch.common.settings.Settings -import org.opensearch.core.xcontent.NamedXContentRegistry -import org.opensearch.core.xcontent.ToXContent import org.opensearch.common.xcontent.XContentFactory.jsonBuilder import org.opensearch.commons.ConfigConstants import org.opensearch.commons.authuser.User +import org.opensearch.core.action.ActionListener +import org.opensearch.core.rest.RestStatus +import org.opensearch.core.xcontent.NamedXContentRegistry +import org.opensearch.core.xcontent.ToXContent import org.opensearch.indexmanagement.IndexManagementIndices import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.INDEX_MANAGEMENT_INDEX import org.opensearch.indexmanagement.rollup.model.Rollup @@ -35,24 +36,24 @@ import org.opensearch.indexmanagement.util.IndexUtils import org.opensearch.indexmanagement.util.SecurityUtils import org.opensearch.indexmanagement.util.SecurityUtils.Companion.buildUser import org.opensearch.indexmanagement.util.SecurityUtils.Companion.validateUserConfiguration -import org.opensearch.core.rest.RestStatus import org.opensearch.tasks.Task import org.opensearch.transport.TransportService // TODO: Field and mappings validations of source and target index, i.e. reject a histogram agg on example_field if its not possible @Suppress("LongParameterList") -class TransportIndexRollupAction @Inject constructor( +class TransportIndexRollupAction +@Inject +constructor( transportService: TransportService, val client: Client, actionFilters: ActionFilters, val indexManagementIndices: IndexManagementIndices, val clusterService: ClusterService, val settings: Settings, - val xContentRegistry: NamedXContentRegistry + val xContentRegistry: NamedXContentRegistry, ) : HandledTransportAction( - IndexRollupAction.NAME, transportService, actionFilters, ::IndexRollupRequest + IndexRollupAction.NAME, transportService, actionFilters, ::IndexRollupRequest, ) { - @Volatile private var filterByEnabled = IndexManagementSettings.FILTER_BY_BACKEND_ROLES.get(settings) init { @@ -71,14 +72,13 @@ class TransportIndexRollupAction @Inject constructor( private val client: Client, private val actionListener: ActionListener, private val request: IndexRollupRequest, - private val user: User? = buildUser(client.threadPool().threadContext, request.rollup.user) + private val user: User? = buildUser(client.threadPool().threadContext, request.rollup.user), ) { - fun start() { log.debug( "User and roles string from thread context: ${client.threadPool().threadContext.getTransient( - ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT - )}" + ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT, + )}", ) client.threadPool().threadContext.stashContext().use { if (!validateUserConfiguration(user, filterByEnabled, actionListener)) { @@ -96,8 +96,8 @@ class TransportIndexRollupAction @Inject constructor( return actionListener.onFailure( OpenSearchStatusException( "target_index value is invalid: ${request.rollup.targetIndex}", - RestStatus.BAD_REQUEST - ) + RestStatus.BAD_REQUEST, + ), ) } putRollup() @@ -141,8 +141,8 @@ class TransportIndexRollupAction @Inject constructor( return actionListener.onFailure( OpenSearchStatusException( "target_index value is invalid: ${request.rollup.targetIndex}", - RestStatus.BAD_REQUEST - ) + RestStatus.BAD_REQUEST, + ), ) } putRollup() @@ -176,8 +176,8 @@ class TransportIndexRollupAction @Inject constructor( actionListener.onResponse( IndexRollupResponse( response.id, response.version, response.seqNo, response.primaryTerm, status, - rollup.copy(seqNo = response.seqNo, primaryTerm = response.primaryTerm) - ) + rollup.copy(seqNo = response.seqNo, primaryTerm = response.primaryTerm), + ), ) } } @@ -185,7 +185,7 @@ class TransportIndexRollupAction @Inject constructor( override fun onFailure(e: Exception) { actionListener.onFailure(e) } - } + }, ) } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/mapping/TransportUpdateRollupMappingAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/mapping/TransportUpdateRollupMappingAction.kt index 1a3deb9f7..d156e4d66 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/mapping/TransportUpdateRollupMappingAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/mapping/TransportUpdateRollupMappingAction.kt @@ -6,7 +6,6 @@ package org.opensearch.indexmanagement.rollup.action.mapping import org.apache.logging.log4j.LogManager -import org.opensearch.core.action.ActionListener import org.opensearch.action.admin.indices.mapping.put.PutMappingRequest import org.opensearch.action.support.ActionFilters import org.opensearch.action.support.clustermanager.TransportClusterManagerNodeAction @@ -17,14 +16,15 @@ import org.opensearch.cluster.block.ClusterBlockException import org.opensearch.cluster.block.ClusterBlockLevel import org.opensearch.cluster.metadata.IndexNameExpressionResolver import org.opensearch.cluster.service.ClusterService -import org.opensearch.core.common.bytes.BytesReference import org.opensearch.common.inject.Inject -import org.opensearch.core.common.io.stream.StreamInput -import org.opensearch.core.common.io.stream.Writeable -import org.opensearch.core.xcontent.MediaType import org.opensearch.common.xcontent.XContentFactory import org.opensearch.common.xcontent.XContentHelper import org.opensearch.common.xcontent.XContentType +import org.opensearch.core.action.ActionListener +import org.opensearch.core.common.bytes.BytesReference +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.MediaType import org.opensearch.indexmanagement.indexstatemanagement.util.XCONTENT_WITHOUT_TYPE import org.opensearch.indexmanagement.rollup.util.RollupFieldValueExpressionResolver import org.opensearch.indexmanagement.util.IndexUtils.Companion._META @@ -32,13 +32,15 @@ import org.opensearch.threadpool.ThreadPool import org.opensearch.transport.TransportService import java.lang.Exception -class TransportUpdateRollupMappingAction @Inject constructor( +class TransportUpdateRollupMappingAction +@Inject +constructor( threadPool: ThreadPool, clusterService: ClusterService, transportService: TransportService, actionFilters: ActionFilters, indexNameExpressionResolver: IndexNameExpressionResolver, - val client: Client + val client: Client, ) : TransportClusterManagerNodeAction( UpdateRollupMappingAction.INSTANCE.name(), transportService, @@ -46,9 +48,8 @@ class TransportUpdateRollupMappingAction @Inject constructor( threadPool, actionFilters, Writeable.Reader { UpdateRollupMappingRequest(it) }, - indexNameExpressionResolver + indexNameExpressionResolver, ) { - private val log = LogManager.getLogger(javaClass) override fun checkBlock(request: UpdateRollupMappingRequest, state: ClusterState): ClusterBlockException? { @@ -60,7 +61,7 @@ class TransportUpdateRollupMappingAction @Inject constructor( override fun clusterManagerOperation( request: UpdateRollupMappingRequest, state: ClusterState, - listener: ActionListener + listener: ActionListener, ) { val targetIndexResolvedName = RollupFieldValueExpressionResolver.resolve(request.rollup, request.rollup.targetIndex) val index = state.metadata.index(targetIndexResolvedName) @@ -79,11 +80,12 @@ class TransportUpdateRollupMappingAction @Inject constructor( return listener.onFailure(IllegalStateException("Could not find source for index mapping [$index]")) } - val rollup = XContentHelper.convertToMap( - BytesReference.bytes(request.rollup.toXContent(XContentFactory.jsonBuilder(), XCONTENT_WITHOUT_TYPE)), - false, - XContentType.JSON as (MediaType) - ).v2() + val rollup = + XContentHelper.convertToMap( + BytesReference.bytes(request.rollup.toXContent(XContentFactory.jsonBuilder(), XCONTENT_WITHOUT_TYPE)), + false, + XContentType.JSON as (MediaType), + ).v2() val metaMappings = mutableMapOf() // TODO: Clean this up val meta = source[_META] @@ -104,7 +106,7 @@ class TransportUpdateRollupMappingAction @Inject constructor( if ((rollups as Map<*, *>).containsKey(request.rollup.id)) { log.debug("Meta rollup mappings already contain rollup ${request.rollup.id} for index [$index]") return listener.onFailure( - IllegalStateException("Meta rollup mappings already contain rollup ${request.rollup.id} for index [$index]") + IllegalStateException("Meta rollup mappings already contain rollup ${request.rollup.id} for index [$index]"), ) } @@ -128,7 +130,7 @@ class TransportUpdateRollupMappingAction @Inject constructor( override fun onFailure(e: Exception) { listener.onFailure(e) } - } + }, ) } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/mapping/UpdateRollupMappingAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/mapping/UpdateRollupMappingAction.kt index 2f84b9822..613ae9e79 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/mapping/UpdateRollupMappingAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/mapping/UpdateRollupMappingAction.kt @@ -10,7 +10,6 @@ import org.opensearch.action.support.master.AcknowledgedResponse import org.opensearch.core.common.io.stream.Writeable class UpdateRollupMappingAction : ActionType(NAME, reader) { - companion object { const val NAME = "cluster:admin/opendistro/rollup/mapping/update" val INSTANCE = UpdateRollupMappingAction() diff --git a/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/start/StartRollupRequest.kt b/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/start/StartRollupRequest.kt index 4a22b98a9..3493d5473 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/start/StartRollupRequest.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/start/StartRollupRequest.kt @@ -13,7 +13,6 @@ import org.opensearch.core.common.io.stream.StreamOutput import java.io.IOException class StartRollupRequest : UpdateRequest { - @Throws(IOException::class) constructor(sin: StreamInput) : super(sin) diff --git a/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/start/TransportStartRollupAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/start/TransportStartRollupAction.kt index 47320ed4f..535c834b5 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/start/TransportStartRollupAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/start/TransportStartRollupAction.kt @@ -8,7 +8,6 @@ package org.opensearch.indexmanagement.rollup.action.start import org.apache.logging.log4j.LogManager import org.opensearch.ExceptionsHelper import org.opensearch.OpenSearchStatusException -import org.opensearch.core.action.ActionListener import org.opensearch.action.DocWriteResponse import org.opensearch.action.get.GetRequest import org.opensearch.action.get.GetResponse @@ -22,11 +21,13 @@ import org.opensearch.cluster.service.ClusterService import org.opensearch.common.inject.Inject import org.opensearch.common.settings.Settings import org.opensearch.common.xcontent.LoggingDeprecationHandler -import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.common.xcontent.XContentHelper import org.opensearch.common.xcontent.XContentType import org.opensearch.commons.ConfigConstants import org.opensearch.commons.authuser.User +import org.opensearch.core.action.ActionListener +import org.opensearch.core.rest.RestStatus +import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.INDEX_MANAGEMENT_INDEX import org.opensearch.indexmanagement.opensearchapi.parseWithType import org.opensearch.indexmanagement.rollup.model.Rollup @@ -35,24 +36,24 @@ import org.opensearch.indexmanagement.rollup.util.parseRollup import org.opensearch.indexmanagement.settings.IndexManagementSettings import org.opensearch.indexmanagement.util.SecurityUtils.Companion.buildUser import org.opensearch.indexmanagement.util.SecurityUtils.Companion.userHasPermissionForResource -import org.opensearch.core.rest.RestStatus import org.opensearch.tasks.Task import org.opensearch.transport.TransportService import java.lang.IllegalArgumentException import java.time.Instant @Suppress("ReturnCount") -class TransportStartRollupAction @Inject constructor( +class TransportStartRollupAction +@Inject +constructor( transportService: TransportService, val client: Client, val clusterService: ClusterService, val settings: Settings, actionFilters: ActionFilters, - val xContentRegistry: NamedXContentRegistry + val xContentRegistry: NamedXContentRegistry, ) : HandledTransportAction( - StartRollupAction.NAME, transportService, actionFilters, ::StartRollupRequest + StartRollupAction.NAME, transportService, actionFilters, ::StartRollupRequest, ) { - @Volatile private var filterByEnabled = IndexManagementSettings.FILTER_BY_BACKEND_ROLES.get(settings) init { @@ -66,8 +67,8 @@ class TransportStartRollupAction @Inject constructor( override fun doExecute(task: Task, request: StartRollupRequest, actionListener: ActionListener) { log.debug( "User and roles string from thread context: ${client.threadPool().threadContext.getTransient( - ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT - )}" + ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT, + )}", ) val getReq = GetRequest(INDEX_MANAGEMENT_INDEX, request.id()) val user: User? = buildUser(client.threadPool().threadContext) @@ -106,7 +107,7 @@ class TransportStartRollupAction @Inject constructor( override fun onFailure(e: Exception) { actionListener.onFailure(ExceptionsHelper.unwrapCause(e) as Exception) } - } + }, ) } } @@ -116,11 +117,12 @@ class TransportStartRollupAction @Inject constructor( val now = Instant.now().toEpochMilli() request.index(INDEX_MANAGEMENT_INDEX).doc( mapOf( - Rollup.ROLLUP_TYPE to mapOf( - Rollup.ENABLED_FIELD to true, - Rollup.ENABLED_TIME_FIELD to now, Rollup.LAST_UPDATED_TIME_FIELD to now - ) - ) + Rollup.ROLLUP_TYPE to + mapOf( + Rollup.ENABLED_FIELD to true, + Rollup.ENABLED_TIME_FIELD to now, Rollup.LAST_UPDATED_TIME_FIELD to now, + ), + ), ) client.update( request, @@ -137,10 +139,11 @@ class TransportStartRollupAction @Inject constructor( actionListener.onResponse(AcknowledgedResponse(false)) } } + override fun onFailure(e: Exception) { actionListener.onFailure(ExceptionsHelper.unwrapCause(e) as Exception) } - } + }, ) } @@ -155,13 +158,15 @@ class TransportStartRollupAction @Inject constructor( // in FAILED status which the user will need to retry from actionListener.onResponse(AcknowledgedResponse(true)) } else { - val metadata = response.sourceAsBytesRef?.let { - val xcp = XContentHelper.createParser( - NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, it, XContentType.JSON - ) - xcp.parseWithType(response.id, response.seqNo, response.primaryTerm, RollupMetadata.Companion::parse) - } + val metadata = + response.sourceAsBytesRef?.let { + val xcp = + XContentHelper.createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, it, XContentType.JSON, + ) + xcp.parseWithType(response.id, response.seqNo, response.primaryTerm, RollupMetadata.Companion::parse) + } if (metadata == null) { // If there is no metadata doc then the runner will instantiate a new one // in FAILED status which the user will need to retry from @@ -175,28 +180,31 @@ class TransportStartRollupAction @Inject constructor( override fun onFailure(e: Exception) { actionListener.onFailure(ExceptionsHelper.unwrapCause(e) as Exception) } - } + }, ) } private fun updateRollupMetadata(rollup: Rollup, metadata: RollupMetadata, actionListener: ActionListener) { val now = Instant.now().toEpochMilli() - val updatedStatus = when (metadata.status) { - RollupMetadata.Status.FINISHED, RollupMetadata.Status.STOPPED -> RollupMetadata.Status.STARTED - RollupMetadata.Status.STARTED, RollupMetadata.Status.INIT, RollupMetadata.Status.RETRY -> - return actionListener.onResponse(AcknowledgedResponse(true)) - RollupMetadata.Status.FAILED -> RollupMetadata.Status.RETRY - } - val updateRequest = UpdateRequest(INDEX_MANAGEMENT_INDEX, rollup.metadataID) - .doc( - mapOf( - RollupMetadata.ROLLUP_METADATA_TYPE to mapOf( - RollupMetadata.STATUS_FIELD to updatedStatus.type, - RollupMetadata.FAILURE_REASON to null, RollupMetadata.LAST_UPDATED_FIELD to now - ) + val updatedStatus = + when (metadata.status) { + RollupMetadata.Status.FINISHED, RollupMetadata.Status.STOPPED -> RollupMetadata.Status.STARTED + RollupMetadata.Status.STARTED, RollupMetadata.Status.INIT, RollupMetadata.Status.RETRY -> + return actionListener.onResponse(AcknowledgedResponse(true)) + RollupMetadata.Status.FAILED -> RollupMetadata.Status.RETRY + } + val updateRequest = + UpdateRequest(INDEX_MANAGEMENT_INDEX, rollup.metadataID) + .doc( + mapOf( + RollupMetadata.ROLLUP_METADATA_TYPE to + mapOf( + RollupMetadata.STATUS_FIELD to updatedStatus.type, + RollupMetadata.FAILURE_REASON to null, RollupMetadata.LAST_UPDATED_FIELD to now, + ), + ), ) - ) - .routing(rollup.id) + .routing(rollup.id) client.update( updateRequest, object : ActionListener { @@ -207,7 +215,7 @@ class TransportStartRollupAction @Inject constructor( override fun onFailure(e: Exception) { actionListener.onFailure(ExceptionsHelper.unwrapCause(e) as Exception) } - } + }, ) } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/stop/StopRollupRequest.kt b/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/stop/StopRollupRequest.kt index 1b8d78e8d..1a03317a7 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/stop/StopRollupRequest.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/stop/StopRollupRequest.kt @@ -13,7 +13,6 @@ import org.opensearch.core.common.io.stream.StreamOutput import java.io.IOException class StopRollupRequest : UpdateRequest { - @Throws(IOException::class) constructor(sin: StreamInput) : super(sin) diff --git a/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/stop/TransportStopRollupAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/stop/TransportStopRollupAction.kt index 4b4487c17..fdeead257 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/stop/TransportStopRollupAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/rollup/action/stop/TransportStopRollupAction.kt @@ -8,7 +8,6 @@ package org.opensearch.indexmanagement.rollup.action.stop import org.apache.logging.log4j.LogManager import org.opensearch.ExceptionsHelper import org.opensearch.OpenSearchStatusException -import org.opensearch.core.action.ActionListener import org.opensearch.action.DocWriteResponse import org.opensearch.action.get.GetRequest import org.opensearch.action.get.GetResponse @@ -22,10 +21,12 @@ import org.opensearch.cluster.service.ClusterService import org.opensearch.common.inject.Inject import org.opensearch.common.settings.Settings import org.opensearch.common.xcontent.LoggingDeprecationHandler -import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.common.xcontent.XContentHelper import org.opensearch.common.xcontent.XContentType import org.opensearch.commons.ConfigConstants +import org.opensearch.core.action.ActionListener +import org.opensearch.core.rest.RestStatus +import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.indexmanagement.IndexManagementPlugin import org.opensearch.indexmanagement.opensearchapi.parseWithType import org.opensearch.indexmanagement.rollup.model.Rollup @@ -34,7 +35,6 @@ import org.opensearch.indexmanagement.rollup.util.parseRollup import org.opensearch.indexmanagement.settings.IndexManagementSettings import org.opensearch.indexmanagement.util.SecurityUtils.Companion.buildUser import org.opensearch.indexmanagement.util.SecurityUtils.Companion.userHasPermissionForResource -import org.opensearch.core.rest.RestStatus import org.opensearch.tasks.Task import org.opensearch.transport.TransportService import java.lang.IllegalArgumentException @@ -53,17 +53,18 @@ import java.time.Instant * The inverse (job: successful and metadata: fail) will end up with a disabled job and a metadata that potentially * says STARTED still which is wrong. */ -class TransportStopRollupAction @Inject constructor( +class TransportStopRollupAction +@Inject +constructor( transportService: TransportService, val client: Client, val clusterService: ClusterService, val settings: Settings, actionFilters: ActionFilters, - val xContentRegistry: NamedXContentRegistry + val xContentRegistry: NamedXContentRegistry, ) : HandledTransportAction( - StopRollupAction.NAME, transportService, actionFilters, ::StopRollupRequest + StopRollupAction.NAME, transportService, actionFilters, ::StopRollupRequest, ) { - @Volatile private var filterByEnabled = IndexManagementSettings.FILTER_BY_BACKEND_ROLES.get(settings) init { @@ -79,8 +80,8 @@ class TransportStopRollupAction @Inject constructor( log.debug("Executing StopRollupAction on ${request.id()}") log.debug( "User and roles string from thread context: ${client.threadPool().threadContext.getTransient( - ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT - )}" + ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT, + )}", ) val getRequest = GetRequest(IndexManagementPlugin.INDEX_MANAGEMENT_INDEX, request.id()) val user = buildUser(client.threadPool().threadContext) @@ -114,7 +115,7 @@ class TransportStopRollupAction @Inject constructor( override fun onFailure(e: Exception) { actionListener.onFailure(ExceptionsHelper.unwrapCause(e) as Exception) } - } + }, ) } } @@ -129,13 +130,15 @@ class TransportStopRollupAction @Inject constructor( // If there is no metadata there is nothing to stop, proceed to disable job updateRollupJob(rollup, request, actionListener) } else { - val metadata = response.sourceAsBytesRef?.let { - val xcp = XContentHelper.createParser( - NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, it, XContentType.JSON - ) - xcp.parseWithType(response.id, response.seqNo, response.primaryTerm, RollupMetadata.Companion::parse) - } + val metadata = + response.sourceAsBytesRef?.let { + val xcp = + XContentHelper.createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, it, XContentType.JSON, + ) + xcp.parseWithType(response.id, response.seqNo, response.primaryTerm, RollupMetadata.Companion::parse) + } if (metadata == null) { // If there is no metadata there is nothing to stop, proceed to disable job updateRollupJob(rollup, request, actionListener) @@ -148,7 +151,7 @@ class TransportStopRollupAction @Inject constructor( override fun onFailure(e: Exception) { actionListener.onFailure(ExceptionsHelper.unwrapCause(e) as Exception) } - } + }, ) } @@ -164,26 +167,33 @@ class TransportStopRollupAction @Inject constructor( rollup: Rollup, metadata: RollupMetadata, request: StopRollupRequest, - actionListener: ActionListener + actionListener: ActionListener, ) { val now = Instant.now().toEpochMilli() - val updatedStatus = when (metadata.status) { - RollupMetadata.Status.STARTED, RollupMetadata.Status.INIT, RollupMetadata.Status.STOPPED -> RollupMetadata.Status.STOPPED - RollupMetadata.Status.FINISHED, RollupMetadata.Status.FAILED -> metadata.status - RollupMetadata.Status.RETRY -> RollupMetadata.Status.FAILED - } - val failureReason = if (metadata.status == RollupMetadata.Status.RETRY) - "Stopped a rollup that was in retry, rolling back to failed status" else null - val updateRequest = UpdateRequest(IndexManagementPlugin.INDEX_MANAGEMENT_INDEX, rollup.metadataID) - .doc( - mapOf( - RollupMetadata.ROLLUP_METADATA_TYPE to mapOf( - RollupMetadata.STATUS_FIELD to updatedStatus.type, - RollupMetadata.FAILURE_REASON to failureReason, RollupMetadata.LAST_UPDATED_FIELD to now - ) + val updatedStatus = + when (metadata.status) { + RollupMetadata.Status.STARTED, RollupMetadata.Status.INIT, RollupMetadata.Status.STOPPED -> RollupMetadata.Status.STOPPED + RollupMetadata.Status.FINISHED, RollupMetadata.Status.FAILED -> metadata.status + RollupMetadata.Status.RETRY -> RollupMetadata.Status.FAILED + } + val failureReason = + if (metadata.status == RollupMetadata.Status.RETRY) { + "Stopped a rollup that was in retry, rolling back to failed status" + } else { + null + } + val updateRequest = + UpdateRequest(IndexManagementPlugin.INDEX_MANAGEMENT_INDEX, rollup.metadataID) + .doc( + mapOf( + RollupMetadata.ROLLUP_METADATA_TYPE to + mapOf( + RollupMetadata.STATUS_FIELD to updatedStatus.type, + RollupMetadata.FAILURE_REASON to failureReason, RollupMetadata.LAST_UPDATED_FIELD to now, + ), + ), ) - ) - .routing(rollup.id) + .routing(rollup.id) client.update( updateRequest, object : ActionListener { @@ -198,7 +208,7 @@ class TransportStopRollupAction @Inject constructor( override fun onFailure(e: Exception) { actionListener.onFailure(ExceptionsHelper.unwrapCause(e) as Exception) } - } + }, ) } @@ -207,11 +217,12 @@ class TransportStopRollupAction @Inject constructor( request.index(IndexManagementPlugin.INDEX_MANAGEMENT_INDEX).setIfSeqNo(rollup.seqNo).setIfPrimaryTerm(rollup.primaryTerm) .doc( mapOf( - Rollup.ROLLUP_TYPE to mapOf( - Rollup.ENABLED_FIELD to false, - Rollup.ENABLED_TIME_FIELD to null, Rollup.LAST_UPDATED_TIME_FIELD to now - ) - ) + Rollup.ROLLUP_TYPE to + mapOf( + Rollup.ENABLED_FIELD to false, + Rollup.ENABLED_TIME_FIELD to null, Rollup.LAST_UPDATED_TIME_FIELD to now, + ), + ), ) .routing(rollup.id) client.update( @@ -220,10 +231,11 @@ class TransportStopRollupAction @Inject constructor( override fun onResponse(response: UpdateResponse) { actionListener.onResponse(AcknowledgedResponse(response.result == DocWriteResponse.Result.UPDATED)) } + override fun onFailure(e: Exception) { actionListener.onFailure(ExceptionsHelper.unwrapCause(e) as Exception) } - } + }, ) } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/rollup/actionfilter/FieldCapsFilter.kt b/src/main/kotlin/org/opensearch/indexmanagement/rollup/actionfilter/FieldCapsFilter.kt index f870d4fb2..ff1bf5828 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/rollup/actionfilter/FieldCapsFilter.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/rollup/actionfilter/FieldCapsFilter.kt @@ -6,9 +6,7 @@ package org.opensearch.indexmanagement.rollup.actionfilter import org.apache.logging.log4j.LogManager -import org.opensearch.core.action.ActionListener import org.opensearch.action.ActionRequest -import org.opensearch.core.action.ActionResponse import org.opensearch.action.fieldcaps.FieldCapabilities import org.opensearch.action.fieldcaps.FieldCapabilitiesRequest import org.opensearch.action.fieldcaps.FieldCapabilitiesResponse @@ -18,6 +16,8 @@ import org.opensearch.action.support.IndicesOptions import org.opensearch.cluster.metadata.IndexNameExpressionResolver import org.opensearch.cluster.service.ClusterService import org.opensearch.common.settings.Settings +import org.opensearch.core.action.ActionListener +import org.opensearch.core.action.ActionResponse import org.opensearch.indexmanagement.GuiceHolder import org.opensearch.indexmanagement.rollup.model.Rollup import org.opensearch.indexmanagement.rollup.model.RollupFieldMapping @@ -35,9 +35,8 @@ private val logger = LogManager.getLogger(FieldCapsFilter::class.java) class FieldCapsFilter( val clusterService: ClusterService, val settings: Settings, - private val indexNameExpressionResolver: IndexNameExpressionResolver + private val indexNameExpressionResolver: IndexNameExpressionResolver, ) : ActionFilter { - @Volatile private var shouldIntercept = RollupSettings.ROLLUP_DASHBOARDS.get(settings) init { @@ -51,15 +50,16 @@ class FieldCapsFilter( action: String, request: Request, listener: ActionListener, - chain: ActionFilterChain + chain: ActionFilterChain, ) { if (request is FieldCapabilitiesRequest && shouldIntercept) { val indices = request.indices().map { it.toString() }.toTypedArray() val rollupIndices = mutableSetOf() val nonRollupIndices = mutableSetOf() - val remoteClusterIndices = GuiceHolder.remoteClusterService.groupIndices(request.indicesOptions(), indices) { idx: String? -> - indexNameExpressionResolver.hasIndexAbstraction(idx, clusterService.state()) - } + val remoteClusterIndices = + GuiceHolder.remoteClusterService.groupIndices(request.indicesOptions(), indices) { idx: String? -> + indexNameExpressionResolver.hasIndexAbstraction(idx, clusterService.state()) + } val localIndices = remoteClusterIndices.remove(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY) localIndices?.let { @@ -112,7 +112,7 @@ class FieldCapsFilter( override fun onFailure(e: Exception) { listener.onFailure(e) } - } + }, ) } else { chain.proceed(task, action, request, listener) @@ -168,7 +168,7 @@ class FieldCapsFilter( private fun rewriteResponse( indices: Array, fields: Map>, - rollupIndices: Set + rollupIndices: Set, ): ActionResponse { val filteredIndicesFields = expandIndicesInFields(indices, fields) val rollupIndicesFields = populateRollupIndicesFields(rollupIndices) @@ -189,12 +189,13 @@ class FieldCapsFilter( response[fieldName] = mutableMapOf() } val isSearchable = fieldMapping.fieldType == RollupFieldMapping.Companion.FieldType.DIMENSION - response[fieldName]!![type] = FieldCapabilities( - fieldName, type, isSearchable, true, - fieldMappingIndexMap.getValue(fieldMapping) - .toTypedArray(), - null, null, mapOf>() - ) + response[fieldName]!![type] = + FieldCapabilities( + fieldName, type, isSearchable, true, + fieldMappingIndexMap.getValue(fieldMapping) + .toTypedArray(), + null, null, mapOf>(), + ) } return response @@ -202,12 +203,13 @@ class FieldCapsFilter( private fun populateSourceFieldMappingsForRollupJob(rollup: Rollup): Set { val rollupFieldMappings = rollup.populateFieldMappings() - val sourceIndices = indexNameExpressionResolver.concreteIndexNames( - clusterService.state(), - IndicesOptions.lenientExpand(), - true, - rollup.sourceIndex - ) + val sourceIndices = + indexNameExpressionResolver.concreteIndexNames( + clusterService.state(), + IndicesOptions.lenientExpand(), + true, + rollup.sourceIndex, + ) sourceIndices.forEach { val mappings = clusterService.state().metadata.index(it).mapping()?.sourceAsMap ?: return rollupFieldMappings rollupFieldMappings.forEach { fieldMapping -> @@ -254,7 +256,7 @@ class FieldCapsFilter( private fun expandIndicesInFields( indices: Array, - fields: Map> + fields: Map>, ): Map> { val expandedResponse = mutableMapOf>() fields.keys.forEach { field -> @@ -264,12 +266,13 @@ class FieldCapsFilter( } val fieldCaps = fields.getValue(field).getValue(type) val rewrittenIndices = if (fieldCaps.indices() != null && fieldCaps.indices().isNotEmpty()) fieldCaps.indices() else indices - expandedResponse[field]!![type] = FieldCapabilities( - fieldCaps.name, fieldCaps.type, fieldCaps.isSearchable, - fieldCaps - .isAggregatable, - rewrittenIndices, fieldCaps.nonSearchableIndices(), fieldCaps.nonAggregatableIndices(), fieldCaps.meta() - ) + expandedResponse[field]!![type] = + FieldCapabilities( + fieldCaps.name, fieldCaps.type, fieldCaps.isSearchable, + fieldCaps + .isAggregatable, + rewrittenIndices, fieldCaps.nonSearchableIndices(), fieldCaps.nonAggregatableIndices(), fieldCaps.meta(), + ) } } @@ -278,7 +281,7 @@ class FieldCapsFilter( private fun mergeFields( f1: Map>, - f2: Map> + f2: Map>, ): Map> { val mergedResponses = mutableMapOf>() val fields = f1.keys.union(f2.keys) @@ -320,13 +323,14 @@ class FieldCapsFilter( val indices = fc1.indices() + fc2.indices() val nonAggregatableIndices = mergeNonAggregatableIndices(fc1, fc2) val nonSearchableIndices = mergeNonSearchableIndices(fc1, fc2) - val meta = (fc1.meta().keys + fc2.meta().keys) - .associateWith { - val data = mutableSetOf() - data.addAll(fc1.meta().getOrDefault(it, mutableSetOf())) - data.addAll(fc2.meta().getOrDefault(it, mutableSetOf())) - data - } + val meta = + (fc1.meta().keys + fc2.meta().keys) + .associateWith { + val data = mutableSetOf() + data.addAll(fc1.meta().getOrDefault(it, mutableSetOf())) + data.addAll(fc2.meta().getOrDefault(it, mutableSetOf())) + data + } return FieldCapabilities(name, type, isSearchable, isAggregatable, indices, nonSearchableIndices, nonAggregatableIndices, meta) } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/rollup/actionfilter/SerDeHelper.kt b/src/main/kotlin/org/opensearch/indexmanagement/rollup/actionfilter/SerDeHelper.kt index 5d75b4379..6b0fc1f25 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/rollup/actionfilter/SerDeHelper.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/rollup/actionfilter/SerDeHelper.kt @@ -23,13 +23,12 @@ import org.opensearch.core.common.io.stream.Writeable class ISMFieldCapabilitiesIndexResponse( private val indexName: String, private val responseMap: Map, - private val canMatch: Boolean + private val canMatch: Boolean, ) : Writeable { - constructor(sin: StreamInput) : this( indexName = sin.readString(), responseMap = sin.readMap({ it.readString() }, { ISMIndexFieldCapabilities(it) }), - canMatch = sin.readBoolean() + canMatch = sin.readBoolean(), ) override fun writeTo(out: StreamOutput) { @@ -37,7 +36,7 @@ class ISMFieldCapabilitiesIndexResponse( out.writeMap( responseMap, { writer, value -> writer.writeString(value) }, - { writer, value -> value.writeTo(writer) } + { writer, value -> value.writeTo(writer) }, ) out.writeBoolean(canMatch) } @@ -46,16 +45,15 @@ class ISMFieldCapabilitiesIndexResponse( class ISMFieldCapabilitiesResponse( val indices: Array, val responseMap: Map>, - val indexResponses: List + val indexResponses: List, ) { - fun toFieldCapabilitiesResponse(): FieldCapabilitiesResponse { val out = BytesStreamOutput() out.writeStringArray(indices) out.writeMap( responseMap, { writer, value -> writer.writeString(value) }, - { writer, value -> writer.writeMap(value, { w, v -> w.writeString(v) }, { w, v -> v.writeTo(w) }) } + { writer, value -> writer.writeMap(value, { w, v -> w.writeString(v) }, { w, v -> v.writeTo(w) }) }, ) out.writeList(indexResponses) val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) @@ -83,9 +81,8 @@ class ISMFieldCapabilities( private val indices: Array?, private val nonSearchableIndices: Array?, private val nonAggregatableIndices: Array?, - private val meta: Map> + private val meta: Map>, ) : Writeable { - override fun writeTo(out: StreamOutput) { out.writeString(name) out.writeString(type) @@ -97,7 +94,7 @@ class ISMFieldCapabilities( out.writeMap( meta, { writer, value -> writer.writeString(value) }, - { writer, value -> writer.writeCollection(value) { w, v -> w.writeString(v) } } + { writer, value -> writer.writeCollection(value) { w, v -> w.writeString(v) } }, ) } @@ -109,7 +106,7 @@ class ISMFieldCapabilities( indices = sin.readOptionalStringArray(), nonSearchableIndices = sin.readOptionalStringArray(), nonAggregatableIndices = sin.readOptionalStringArray(), - meta = sin.readMap({ it.readString() }, { it.readSet { it.readString() } }) + meta = sin.readMap({ it.readString() }, { it.readSet { it.readString() } }), ) } @@ -118,15 +115,14 @@ class ISMIndexFieldCapabilities( private val type: String, private val isSearchable: Boolean, private val isAggregatable: Boolean, - private val meta: Map + private val meta: Map, ) : Writeable { - constructor(sin: StreamInput) : this( name = sin.readString(), type = sin.readString(), isSearchable = sin.readBoolean(), isAggregatable = sin.readBoolean(), - meta = sin.readMap({ it.readString() }, { it.readString() }) + meta = sin.readMap({ it.readString() }, { it.readString() }), ) override fun writeTo(out: StreamOutput) { @@ -137,7 +133,7 @@ class ISMIndexFieldCapabilities( out.writeMap( meta, { writer, value: String -> writer.writeString(value) }, - { writer, value: String -> writer.writeString(value) } + { writer, value: String -> writer.writeString(value) }, ) } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/rollup/interceptor/RollupInterceptor.kt b/src/main/kotlin/org/opensearch/indexmanagement/rollup/interceptor/RollupInterceptor.kt index ffd1e4bd7..87919c173 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/rollup/interceptor/RollupInterceptor.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/rollup/interceptor/RollupInterceptor.kt @@ -55,12 +55,12 @@ import org.opensearch.transport.TransportRequestHandler class RollupInterceptor( val clusterService: ClusterService, val settings: Settings, - val indexNameExpressionResolver: IndexNameExpressionResolver + val indexNameExpressionResolver: IndexNameExpressionResolver, ) : TransportInterceptor { - private val logger = LogManager.getLogger(javaClass) @Volatile private var searchEnabled = RollupSettings.ROLLUP_SEARCH_ENABLED.get(settings) + @Volatile private var searchAllJobs = RollupSettings.ROLLUP_SEARCH_ALL_JOBS.get(settings) init { @@ -77,7 +77,7 @@ class RollupInterceptor( action: String, executor: String, forceExecution: Boolean, - actualHandler: TransportRequestHandler + actualHandler: TransportRequestHandler, ): TransportRequestHandler { return object : TransportRequestHandler { override fun messageReceived(request: T, channel: TransportChannel, task: Task) { @@ -90,15 +90,18 @@ class RollupInterceptor( } val indices = request.indices().map { it.toString() }.toTypedArray() - val concreteIndices = indexNameExpressionResolver - .concreteIndexNames(clusterService.state(), request.indicesOptions(), *indices) + val concreteIndices = + indexNameExpressionResolver + .concreteIndexNames(clusterService.state(), request.indicesOptions(), *indices) // To extract fields from QueryStringQueryBuilder we need concrete source index name. - val rollupJob = clusterService.state().metadata.index(index).getRollupJobs()?.get(0) - ?: throw IllegalArgumentException("No rollup job associated with target_index") - val queryFieldMappings = getQueryMetadata( - request.source().query(), - getConcreteSourceIndex(rollupJob.sourceIndex, indexNameExpressionResolver, clusterService.state()) - ) + val rollupJob = + clusterService.state().metadata.index(index).getRollupJobs()?.get(0) + ?: throw IllegalArgumentException("No rollup job associated with target_index") + val queryFieldMappings = + getQueryMetadata( + request.source().query(), + getConcreteSourceIndex(rollupJob.sourceIndex, indexNameExpressionResolver, clusterService.state()), + ) val aggregationFieldMappings = getAggregationMetadata(request.source().aggregations()?.aggregatorFactories) val fieldMappings = queryFieldMappings + aggregationFieldMappings @@ -135,14 +138,15 @@ class RollupInterceptor( } /* - * Validate that all indices have rollup job which matches field mappings from request - * TODO return compiled list of issues here instead of just throwing exception - * */ + * Validate that all indices have rollup job which matches field mappings from request + * TODO return compiled list of issues here instead of just throwing exception + * */ private fun validateIndicies(concreteIndices: Array, fieldMappings: Set): Map> { var allMatchingRollupJobs: Map> = mapOf() for (concreteIndex in concreteIndices) { - val rollupJobs = clusterService.state().metadata.index(concreteIndex).getRollupJobs() - ?: throw IllegalArgumentException("Not all indices have rollup job") + val rollupJobs = + clusterService.state().metadata.index(concreteIndex).getRollupJobs() + ?: throw IllegalArgumentException("Not all indices have rollup job") val (matchingRollupJobs, issues) = findMatchingRollupJobs(fieldMappings, rollupJobs) if (issues.isNotEmpty() || matchingRollupJobs.isEmpty()) { @@ -156,7 +160,7 @@ class RollupInterceptor( @Suppress("ComplexMethod") private fun getAggregationMetadata( aggregationBuilders: Collection?, - fieldMappings: MutableSet = mutableSetOf() + fieldMappings: MutableSet = mutableSetOf(), ): Set { aggregationBuilders?.forEach { when (it) { @@ -197,7 +201,7 @@ class RollupInterceptor( private fun getQueryMetadata( query: QueryBuilder?, concreteSourceIndexName: String?, - fieldMappings: MutableSet = mutableSetOf() + fieldMappings: MutableSet = mutableSetOf(), ): Set { if (query == null) { return fieldMappings @@ -236,7 +240,7 @@ class RollupInterceptor( query.zeroTermsQuery() != MatchQuery.DEFAULT_ZERO_TERMS_QUERY ) { throw IllegalArgumentException( - "The ${query.name} query is currently not supported with analyzer/slop/zero_terms_query in rollups" + "The ${query.name} query is currently not supported with analyzer/slop/zero_terms_query in rollups", ) } fieldMappings.add(RollupFieldMapping(RollupFieldMapping.Companion.FieldType.DIMENSION, query.fieldName(), Dimension.Type.TERMS.type)) @@ -265,23 +269,28 @@ class RollupInterceptor( @Suppress("ComplexMethod") private fun findMatchingRollupJobs( fieldMappings: Set, - rollupJobs: List + rollupJobs: List, ): Pair>, Set> { - val rollupFieldMappings = rollupJobs.map { rollup -> - rollup to rollup.populateFieldMappings() - }.toMap() + val rollupFieldMappings = + rollupJobs.map { rollup -> + rollup to rollup.populateFieldMappings() + }.toMap() val knownFieldMappings = mutableSetOf() val unknownFields = mutableSetOf() fieldMappings.forEach { - if (it.mappingType == UNKNOWN_MAPPING) unknownFields.add(it.fieldName) - else knownFieldMappings.add(it) + if (it.mappingType == UNKNOWN_MAPPING) { + unknownFields.add(it.fieldName) + } else { + knownFieldMappings.add(it) + } } - val potentialRollupFieldMappings = rollupFieldMappings.filterValues { - it.containsAll(knownFieldMappings) && it.map { rollupFieldMapping -> rollupFieldMapping.fieldName }.containsAll(unknownFields) - } + val potentialRollupFieldMappings = + rollupFieldMappings.filterValues { + it.containsAll(knownFieldMappings) && it.map { rollupFieldMapping -> rollupFieldMapping.fieldName }.containsAll(unknownFields) + } val issues = mutableSetOf() if (potentialRollupFieldMappings.isEmpty()) { @@ -294,8 +303,11 @@ class RollupInterceptor( // Adding to the issue if cannot find defined field mapping or if the field is missing fieldMappings.forEach { - if (!allFields.contains(it.fieldName)) issues.add(it.toIssue(true)) - else if (it.mappingType != UNKNOWN_MAPPING && !allFieldMappings.contains(it)) issues.add(it.toIssue()) + if (!allFields.contains(it.fieldName)) { + issues.add(it.toIssue(true)) + } else if (it.mappingType != UNKNOWN_MAPPING && !allFieldMappings.contains(it)) { + issues.add(it.toIssue()) + } } } @@ -312,8 +324,11 @@ class RollupInterceptor( // Picking the job with largest rollup window for now return sortedRollups.reduce { matched, new -> - if (getEstimateRollupInterval(matched) > getEstimateRollupInterval(new)) matched - else new + if (getEstimateRollupInterval(matched) > getEstimateRollupInterval(new)) { + matched + } else { + new + } } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/rollup/model/ExplainRollup.kt b/src/main/kotlin/org/opensearch/indexmanagement/rollup/model/ExplainRollup.kt index fc9763ac3..ecdf47532 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/rollup/model/ExplainRollup.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/rollup/model/ExplainRollup.kt @@ -15,13 +15,12 @@ import java.io.IOException data class ExplainRollup( val metadataID: String? = null, - val metadata: RollupMetadata? = null + val metadata: RollupMetadata? = null, ) : ToXContentObject, Writeable { - @Throws(IOException::class) constructor(sin: StreamInput) : this( metadataID = sin.readOptionalString(), - metadata = if (sin.readBoolean()) RollupMetadata(sin) else null + metadata = if (sin.readBoolean()) RollupMetadata(sin) else null, ) @Throws(IOException::class) diff --git a/src/main/kotlin/org/opensearch/indexmanagement/rollup/model/ISMRollup.kt b/src/main/kotlin/org/opensearch/indexmanagement/rollup/model/ISMRollup.kt index faa7b2244..2fced86ce 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/rollup/model/ISMRollup.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/rollup/model/ISMRollup.kt @@ -6,6 +6,7 @@ package org.opensearch.indexmanagement.rollup.model import org.apache.commons.codec.digest.DigestUtils +import org.opensearch.commons.authuser.User import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.core.common.io.stream.StreamOutput import org.opensearch.core.common.io.stream.Writeable @@ -14,7 +15,6 @@ import org.opensearch.core.xcontent.ToXContentObject import org.opensearch.core.xcontent.XContentBuilder import org.opensearch.core.xcontent.XContentParser import org.opensearch.core.xcontent.XContentParserUtils -import org.opensearch.commons.authuser.User import org.opensearch.index.seqno.SequenceNumbers import org.opensearch.indexmanagement.common.model.dimension.DateHistogram import org.opensearch.indexmanagement.common.model.dimension.Dimension @@ -31,9 +31,8 @@ data class ISMRollup( val targetIndex: String, val pageSize: Int, val dimensions: List, - val metrics: List + val metrics: List, ) : ToXContentObject, Writeable { - // TODO: This can be moved to a common place, since this is shared between Rollup and ISMRollup init { require(pageSize in Rollup.MINIMUM_PAGE_SIZE..Rollup.MAXIMUM_PAGE_SIZE) { @@ -80,7 +79,7 @@ data class ISMRollup( continuous = false, dimensions = dimensions, metrics = metrics, - user = user + user = user, ) } @@ -89,7 +88,8 @@ data class ISMRollup( description = sin.readString(), targetIndex = sin.readString(), pageSize = sin.readInt(), - dimensions = sin.let { + dimensions = + sin.let { val dimensionsList = mutableListOf() val size = it.readVInt() repeat(size) { _ -> @@ -99,12 +99,12 @@ data class ISMRollup( Dimension.Type.DATE_HISTOGRAM -> DateHistogram(sin) Dimension.Type.TERMS -> Terms(sin) Dimension.Type.HISTOGRAM -> Histogram(sin) - } + }, ) } dimensionsList.toList() }, - metrics = sin.readList(::RollupMetrics) + metrics = sin.readList(::RollupMetrics), ) override fun toString(): String { @@ -146,7 +146,7 @@ data class ISMRollup( @JvmStatic @Throws(IOException::class) fun parse( - xcp: XContentParser + xcp: XContentParser, ): ISMRollup { var description = "" var targetIndex = "" @@ -168,7 +168,7 @@ data class ISMRollup( XContentParserUtils.ensureExpectedToken( XContentParser.Token.START_ARRAY, xcp.currentToken(), - xcp + xcp, ) while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { dimensions.add(Dimension.parse(xcp)) @@ -178,7 +178,7 @@ data class ISMRollup( XContentParserUtils.ensureExpectedToken( XContentParser.Token.START_ARRAY, xcp.currentToken(), - xcp + xcp, ) while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { metrics.add(RollupMetrics.parse(xcp)) @@ -193,7 +193,7 @@ data class ISMRollup( pageSize = pageSize, dimensions = dimensions, metrics = metrics, - targetIndex = targetIndex + targetIndex = targetIndex, ) } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/rollup/model/Rollup.kt b/src/main/kotlin/org/opensearch/indexmanagement/rollup/model/Rollup.kt index 9cf07f9b8..b15145040 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/rollup/model/Rollup.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/rollup/model/Rollup.kt @@ -5,6 +5,7 @@ package org.opensearch.indexmanagement.rollup.model +import org.opensearch.commons.authuser.User import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.core.common.io.stream.StreamOutput import org.opensearch.core.common.io.stream.Writeable @@ -13,7 +14,6 @@ import org.opensearch.core.xcontent.XContentBuilder import org.opensearch.core.xcontent.XContentParser import org.opensearch.core.xcontent.XContentParser.Token import org.opensearch.core.xcontent.XContentParserUtils.ensureExpectedToken -import org.opensearch.commons.authuser.User import org.opensearch.index.seqno.SequenceNumbers import org.opensearch.indexmanagement.common.model.dimension.DateHistogram import org.opensearch.indexmanagement.common.model.dimension.Dimension @@ -54,9 +54,8 @@ data class Rollup( val continuous: Boolean, val dimensions: List, val metrics: List, - val user: User? = null + val user: User? = null, ) : ScheduledJobParameter, Writeable { - init { if (enabled) { requireNotNull(jobEnabledTime) { "Job enabled time must be present if the job is enabled" } @@ -65,17 +64,18 @@ data class Rollup( } // Copy the delay parameter of the job into the job scheduler for continuous jobs only if (jobSchedule.delay != delay && continuous) { - jobSchedule = when (jobSchedule) { - is CronSchedule -> { - val cronSchedule = jobSchedule as CronSchedule - CronSchedule(cronSchedule.cronExpression, cronSchedule.timeZone, delay ?: 0) - } - is IntervalSchedule -> { - val intervalSchedule = jobSchedule as IntervalSchedule - IntervalSchedule(intervalSchedule.startTime, intervalSchedule.interval, intervalSchedule.unit, delay ?: 0) + jobSchedule = + when (jobSchedule) { + is CronSchedule -> { + val cronSchedule = jobSchedule as CronSchedule + CronSchedule(cronSchedule.cronExpression, cronSchedule.timeZone, delay ?: 0) + } + is IntervalSchedule -> { + val intervalSchedule = jobSchedule as IntervalSchedule + IntervalSchedule(intervalSchedule.startTime, intervalSchedule.interval, intervalSchedule.unit, delay ?: 0) + } + else -> jobSchedule } - else -> jobSchedule - } } when (jobSchedule) { is CronSchedule -> { @@ -116,7 +116,8 @@ data class Rollup( primaryTerm = sin.readLong(), enabled = sin.readBoolean(), schemaVersion = sin.readLong(), - jobSchedule = sin.let { + jobSchedule = + sin.let { when (requireNotNull(sin.readEnum(ScheduleType::class.java)) { "ScheduleType cannot be null" }) { ScheduleType.CRON -> CronSchedule(sin) ScheduleType.INTERVAL -> IntervalSchedule(sin) @@ -132,7 +133,8 @@ data class Rollup( pageSize = sin.readInt(), delay = sin.readOptionalLong(), continuous = sin.readBoolean(), - dimensions = sin.let { + dimensions = + sin.let { val dimensionsList = mutableListOf() val size = it.readVInt() repeat(size) { _ -> @@ -142,15 +144,18 @@ data class Rollup( Dimension.Type.DATE_HISTOGRAM -> DateHistogram(sin) Dimension.Type.TERMS -> Terms(sin) Dimension.Type.HISTOGRAM -> Histogram(sin) - } + }, ) } dimensionsList.toList() }, metrics = sin.readList(::RollupMetrics), - user = if (sin.readBoolean()) { + user = + if (sin.readBoolean()) { User(sin) - } else null + } else { + null + }, ) override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { @@ -216,8 +221,10 @@ data class Rollup( companion object { // TODO: Move this enum to Job Scheduler plugin enum class ScheduleType { - CRON, INTERVAL; + CRON, + INTERVAL, } + const val ROLLUP_LOCK_DURATION_SECONDS = 1800L // 30 minutes const val ROLLUP_TYPE = "rollup" const val ROLLUP_ID_FIELD = "rollup_id" @@ -241,9 +248,10 @@ data class Rollup( const val MINIMUM_PAGE_SIZE = 1 const val MAXIMUM_PAGE_SIZE = 10_000 const val ROLLUP_DOC_ID_FIELD = "$ROLLUP_TYPE.$_ID" + /* - * _doc_count has to be in root of document so that core's aggregator would pick it up and use it - * */ + * _doc_count has to be in root of document so that core's aggregator would pick it up and use it + * */ const val ROLLUP_DOC_COUNT_FIELD = "_doc_count" const val ROLLUP_DOC_SCHEMA_VERSION_FIELD = "$ROLLUP_TYPE._$SCHEMA_VERSION_FIELD" const val USER_FIELD = "user" @@ -256,7 +264,7 @@ data class Rollup( xcp: XContentParser, id: String = NO_ID, seqNo: Long = SequenceNumbers.UNASSIGNED_SEQ_NO, - primaryTerm: Long = SequenceNumbers.UNASSIGNED_PRIMARY_TERM + primaryTerm: Long = SequenceNumbers.UNASSIGNED_PRIMARY_TERM, ): Rollup { var schedule: Schedule? = null var schemaVersion: Long = IndexUtils.DEFAULT_SCHEMA_VERSION @@ -281,7 +289,9 @@ data class Rollup( xcp.nextToken() when (fieldName) { - ROLLUP_ID_FIELD -> { requireNotNull(xcp.text()) { "The rollup_id field is null" } /* Just used for searching */ } + ROLLUP_ID_FIELD -> { + requireNotNull(xcp.text()) { "The rollup_id field is null" } // Just used for searching + } ENABLED_FIELD -> enabled = xcp.booleanValue() SCHEDULE_FIELD -> schedule = ScheduleParser.parse(xcp) SCHEMA_VERSION_FIELD -> schemaVersion = xcp.longValue() @@ -351,7 +361,7 @@ data class Rollup( continuous = continuous, dimensions = dimensions, metrics = metrics, - user = user + user = user, ) } } @@ -359,6 +369,8 @@ data class Rollup( sealed class RollupJobValidationResult { object Valid : RollupJobValidationResult() + data class Invalid(val reason: String) : RollupJobValidationResult() + data class Failure(val message: String, val e: Exception? = null) : RollupJobValidationResult() } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/rollup/model/RollupFieldMapping.kt b/src/main/kotlin/org/opensearch/indexmanagement/rollup/model/RollupFieldMapping.kt index 0bfe12132..5bd971c9a 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/rollup/model/RollupFieldMapping.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/rollup/model/RollupFieldMapping.kt @@ -9,7 +9,6 @@ import org.opensearch.indexmanagement.rollup.model.Rollup.Companion.DIMENSIONS_F import org.opensearch.indexmanagement.rollup.model.Rollup.Companion.METRICS_FIELD data class RollupFieldMapping(val fieldType: FieldType, val fieldName: String, val mappingType: String, var sourceType: String? = null) { - fun sourceType(type: String?) { this.sourceType = type } @@ -19,18 +18,23 @@ data class RollupFieldMapping(val fieldType: FieldType, val fieldName: String, v } fun toIssue(isFieldMissing: Boolean = false): String { - return if (isFieldMissing || mappingType == UNKNOWN_MAPPING) return "missing field $fieldName" - else when (fieldType) { - FieldType.METRIC -> "missing $mappingType aggregation on $fieldName" - else -> "missing $mappingType grouping on $fieldName" + return if (isFieldMissing || mappingType == UNKNOWN_MAPPING) { + return "missing field $fieldName" + } else { + when (fieldType) { + FieldType.METRIC -> "missing $mappingType aggregation on $fieldName" + else -> "missing $mappingType grouping on $fieldName" + } } } companion object { const val UNKNOWN_MAPPING = "unknown" + enum class FieldType(val type: String) { DIMENSION(DIMENSIONS_FIELD), - METRIC(METRICS_FIELD); + METRIC(METRICS_FIELD), + ; override fun toString(): String { return type diff --git a/src/main/kotlin/org/opensearch/indexmanagement/rollup/model/RollupMetadata.kt b/src/main/kotlin/org/opensearch/indexmanagement/rollup/model/RollupMetadata.kt index 8c8a32ee7..38521a476 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/rollup/model/RollupMetadata.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/rollup/model/RollupMetadata.kt @@ -26,13 +26,12 @@ import java.util.Locale data class ContinuousMetadata( val nextWindowStartTime: Instant, - val nextWindowEndTime: Instant + val nextWindowEndTime: Instant, ) : ToXContentObject, Writeable { - @Throws(IOException::class) constructor(sin: StreamInput) : this( nextWindowStartTime = sin.readInstant(), - nextWindowEndTime = sin.readInstant() + nextWindowEndTime = sin.readInstant(), ) override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { @@ -73,7 +72,7 @@ data class ContinuousMetadata( return ContinuousMetadata( nextWindowStartTime = requireNotNull(windowStartTime) { "Next window start time must not be null for a continuous job" }, - nextWindowEndTime = requireNotNull(windowEndTime) { "Next window end time must not be null for a continuous job" } + nextWindowEndTime = requireNotNull(windowEndTime) { "Next window end time must not be null for a continuous job" }, ) } } @@ -84,16 +83,15 @@ data class RollupStats( val documentsProcessed: Long, val rollupsIndexed: Long, val indexTimeInMillis: Long, - val searchTimeInMillis: Long + val searchTimeInMillis: Long, ) : ToXContentObject, Writeable { - @Throws(IOException::class) constructor(sin: StreamInput) : this( pagesProcessed = sin.readLong(), documentsProcessed = sin.readLong(), rollupsIndexed = sin.readLong(), indexTimeInMillis = sin.readLong(), - searchTimeInMillis = sin.readLong() + searchTimeInMillis = sin.readLong(), ) override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { @@ -150,7 +148,7 @@ data class RollupStats( documentsProcessed = requireNotNull(documentsProcessed) { "Documents processed must not be null" }, rollupsIndexed = requireNotNull(rollupsIndexed) { "Rollups indexed must not be null" }, indexTimeInMillis = requireNotNull(indexTimeInMillis) { "Index time in millis must not be null" }, - searchTimeInMillis = requireNotNull(searchTimeInMillis) { "Search time in millis must not be null" } + searchTimeInMillis = requireNotNull(searchTimeInMillis) { "Search time in millis must not be null" }, ) } } @@ -166,16 +164,16 @@ data class RollupMetadata( val continuous: ContinuousMetadata? = null, val status: Status, val failureReason: String? = null, - val stats: RollupStats + val stats: RollupStats, ) : ToXContentObject, Writeable { - enum class Status(val type: String) { INIT("init"), STARTED("started"), STOPPED("stopped"), FINISHED("finished"), FAILED("failed"), - RETRY("retry"); + RETRY("retry"), + ; override fun toString(): String { return type @@ -193,7 +191,7 @@ data class RollupMetadata( continuous = if (sin.readBoolean()) ContinuousMetadata(sin) else null, status = sin.readEnum(Status::class.java), failureReason = sin.readOptionalString(), - stats = RollupStats(sin) + stats = RollupStats(sin), ) override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { @@ -245,7 +243,7 @@ data class RollupMetadata( xcp: XContentParser, id: String = NO_ID, seqNo: Long = SequenceNumbers.UNASSIGNED_SEQ_NO, - primaryTerm: Long = SequenceNumbers.UNASSIGNED_PRIMARY_TERM + primaryTerm: Long = SequenceNumbers.UNASSIGNED_PRIMARY_TERM, ): RollupMetadata { var rollupID: String? = null var afterKey: Map? = null @@ -281,7 +279,7 @@ data class RollupMetadata( continuous = continuous, status = requireNotNull(status) { "Status must not be null" }, failureReason = failureReason, - stats = requireNotNull(stats) { "Stats must not be null" } + stats = requireNotNull(stats) { "Stats must not be null" }, ) } @@ -293,23 +291,26 @@ data class RollupMetadata( fun RollupMetadata.incrementStats(response: SearchResponse, internalComposite: InternalComposite): RollupMetadata { return this.copy( - stats = this.stats.copy( + stats = + this.stats.copy( pagesProcessed = stats.pagesProcessed + 1L, - documentsProcessed = stats.documentsProcessed + + documentsProcessed = + stats.documentsProcessed + internalComposite.buckets.fold(0L) { acc, internalBucket -> acc + internalBucket.docCount }, - searchTimeInMillis = stats.searchTimeInMillis + response.took.millis - ) + searchTimeInMillis = stats.searchTimeInMillis + response.took.millis, + ), ) } fun RollupMetadata.mergeStats(stats: RollupStats): RollupMetadata { return this.copy( - stats = this.stats.copy( + stats = + this.stats.copy( pagesProcessed = this.stats.pagesProcessed + stats.pagesProcessed, documentsProcessed = this.stats.documentsProcessed + stats.documentsProcessed, rollupsIndexed = this.stats.rollupsIndexed + stats.rollupsIndexed, indexTimeInMillis = this.stats.indexTimeInMillis + stats.indexTimeInMillis, - searchTimeInMillis = this.stats.searchTimeInMillis + stats.searchTimeInMillis - ) + searchTimeInMillis = this.stats.searchTimeInMillis + stats.searchTimeInMillis, + ), ) } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/rollup/model/RollupMetrics.kt b/src/main/kotlin/org/opensearch/indexmanagement/rollup/model/RollupMetrics.kt index 26286ae0c..2542358b2 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/rollup/model/RollupMetrics.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/rollup/model/RollupMetrics.kt @@ -25,9 +25,8 @@ import java.io.IOException data class RollupMetrics( val sourceField: String, val targetField: String, - val metrics: List + val metrics: List, ) : ToXContentObject, Writeable { - init { require(metrics.size == metrics.distinctBy { it.type }.size) { "Cannot have multiple metrics of the same type in a single rollup metric [$metrics]" @@ -40,7 +39,8 @@ data class RollupMetrics( constructor(sin: StreamInput) : this( sourceField = sin.readString(), targetField = sin.readString(), - metrics = sin.let { + metrics = + sin.let { val metricsList = mutableListOf() val size = it.readVInt() repeat(size) { _ -> @@ -52,11 +52,11 @@ data class RollupMetrics( Metric.Type.MIN -> Min(it) Metric.Type.SUM -> Sum(it) Metric.Type.VALUE_COUNT -> ValueCount(it) - } + }, ) } metricsList.toList() - } + }, ) override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { @@ -128,7 +128,7 @@ data class RollupMetrics( return RollupMetrics( sourceField = requireNotNull(sourceField) { "Source field must not be null" }, targetField = requireNotNull(targetField) { "Target field must not be null" }, - metrics = metrics.toList() + metrics = metrics.toList(), ) } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/rollup/model/metric/Average.kt b/src/main/kotlin/org/opensearch/indexmanagement/rollup/model/metric/Average.kt index c337f7919..f49eeefda 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/rollup/model/metric/Average.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/rollup/model/metric/Average.kt @@ -14,7 +14,6 @@ import org.opensearch.core.xcontent.XContentParser.Token import org.opensearch.core.xcontent.XContentParserUtils.ensureExpectedToken class Average() : Metric(Type.AVERAGE) { - @Suppress("UNUSED_PARAMETER") constructor(sin: StreamInput) : this() diff --git a/src/main/kotlin/org/opensearch/indexmanagement/rollup/model/metric/Metric.kt b/src/main/kotlin/org/opensearch/indexmanagement/rollup/model/metric/Metric.kt index 1fe96dcf5..0b6b15526 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/rollup/model/metric/Metric.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/rollup/model/metric/Metric.kt @@ -13,13 +13,13 @@ import org.opensearch.core.xcontent.XContentParserUtils.ensureExpectedToken import java.io.IOException abstract class Metric(val type: Type) : ToXContentObject, Writeable { - enum class Type(val type: String) { AVERAGE("avg"), SUM("sum"), MAX("max"), MIN("min"), - VALUE_COUNT("value_count"); + VALUE_COUNT("value_count"), + ; override fun toString(): String { return type @@ -37,14 +37,15 @@ abstract class Metric(val type: Type) : ToXContentObject, Writeable { val fieldName = xcp.currentName() xcp.nextToken() - metric = when (fieldName) { - Type.AVERAGE.type -> Average.parse(xcp) - Type.MAX.type -> Max.parse(xcp) - Type.MIN.type -> Min.parse(xcp) - Type.SUM.type -> Sum.parse(xcp) - Type.VALUE_COUNT.type -> ValueCount.parse(xcp) - else -> throw IllegalArgumentException("Invalid metric type [$fieldName] found in metrics") - } + metric = + when (fieldName) { + Type.AVERAGE.type -> Average.parse(xcp) + Type.MAX.type -> Max.parse(xcp) + Type.MIN.type -> Min.parse(xcp) + Type.SUM.type -> Sum.parse(xcp) + Type.VALUE_COUNT.type -> ValueCount.parse(xcp) + else -> throw IllegalArgumentException("Invalid metric type [$fieldName] found in metrics") + } } return requireNotNull(metric) { "Metric is null" } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/rollup/query/QueryStringQueryParserExt.kt b/src/main/kotlin/org/opensearch/indexmanagement/rollup/query/QueryStringQueryParserExt.kt index ed1168e8f..d0fb3b701 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/rollup/query/QueryStringQueryParserExt.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/rollup/query/QueryStringQueryParserExt.kt @@ -13,7 +13,6 @@ import org.opensearch.index.search.QueryStringQueryParser const val EXISTS = "_exists_" class QueryStringQueryParserExt : QueryStringQueryParser { - val discoveredFields = mutableListOf() var hasLonelyTerms = false @@ -25,26 +24,32 @@ class QueryStringQueryParserExt : QueryStringQueryParser { handleFieldQueryDiscovered(field) return super.getFuzzyQuery(field, termStr, minSimilarity) } + override fun getPrefixQuery(field: String?, termStr: String?): Query { handleFieldQueryDiscovered(field) return super.getPrefixQuery(field, termStr) } + override fun getFieldQuery(field: String?, queryText: String?, quoted: Boolean): Query { handleFieldQueryDiscovered(field, queryText) return super.getFieldQuery(field, queryText, quoted) } + override fun getWildcardQuery(field: String?, termStr: String?): Query { handleFieldQueryDiscovered(field) return super.getWildcardQuery(field, termStr) } + override fun getFieldQuery(field: String?, queryText: String?, slop: Int): Query { handleFieldQueryDiscovered(field, queryText) return super.getFieldQuery(field, queryText, slop) } + override fun getRangeQuery(field: String?, part1: String?, part2: String?, startInclusive: Boolean, endInclusive: Boolean): Query { handleFieldQueryDiscovered(field) return super.getRangeQuery(field, part1, part2, startInclusive, endInclusive) } + override fun getRegexpQuery(field: String?, termStr: String?): Query { handleFieldQueryDiscovered(field) return super.getRegexpQuery(field, termStr) @@ -54,8 +59,11 @@ class QueryStringQueryParserExt : QueryStringQueryParser { if (field == null || Regex.isSimpleMatchPattern(field)) { hasLonelyTerms = true } else { - if (field == EXISTS && queryText?.isNotEmpty() == true) discoveredFields.add(queryText) - else discoveredFields.add(field) + if (field == EXISTS && queryText?.isNotEmpty() == true) { + discoveredFields.add(queryText) + } else { + discoveredFields.add(field) + } } } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/rollup/query/QueryStringQueryUtil.kt b/src/main/kotlin/org/opensearch/indexmanagement/rollup/query/QueryStringQueryUtil.kt index a82197d5a..d8c0904f3 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/rollup/query/QueryStringQueryUtil.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/rollup/query/QueryStringQueryUtil.kt @@ -21,10 +21,9 @@ import org.opensearch.indexmanagement.common.model.dimension.Dimension import org.opensearch.indexmanagement.rollup.util.QueryShardContextFactory object QueryStringQueryUtil { - fun rewriteQueryStringQuery( queryBuilder: QueryBuilder, - concreteIndexName: String + concreteIndexName: String, ): QueryStringQueryBuilder { val qsqBuilder = queryBuilder as QueryStringQueryBuilder // Parse query_string query and extract all discovered fields @@ -53,28 +52,29 @@ object QueryStringQueryUtil { newFields.put("${it.key}.${Dimension.Type.TERMS.type}", it.value) } } - var retVal = QueryStringQueryBuilder(newQueryString) - .rewrite(qsqBuilder.rewrite()) - .fuzzyRewrite(qsqBuilder.fuzzyRewrite()) - .autoGenerateSynonymsPhraseQuery(qsqBuilder.autoGenerateSynonymsPhraseQuery()) - .allowLeadingWildcard(qsqBuilder.allowLeadingWildcard()) - .analyzeWildcard(qsqBuilder.analyzeWildcard()) - .defaultOperator(qsqBuilder.defaultOperator()) - .escape(qsqBuilder.escape()) - .fuzziness(qsqBuilder.fuzziness()) - .lenient(qsqBuilder.lenient()) - .enablePositionIncrements(qsqBuilder.enablePositionIncrements()) - .fuzzyMaxExpansions(qsqBuilder.fuzzyMaxExpansions()) - .fuzzyPrefixLength(qsqBuilder.fuzzyPrefixLength()) - .queryName(qsqBuilder.queryName()) - .quoteAnalyzer(qsqBuilder.quoteAnalyzer()) - .analyzer(qsqBuilder.analyzer()) - .minimumShouldMatch(qsqBuilder.minimumShouldMatch()) - .timeZone(qsqBuilder.timeZone()) - .phraseSlop(qsqBuilder.phraseSlop()) - .quoteFieldSuffix(qsqBuilder.quoteFieldSuffix()) - .boost(qsqBuilder.boost()) - .fuzzyTranspositions(qsqBuilder.fuzzyTranspositions()) + var retVal = + QueryStringQueryBuilder(newQueryString) + .rewrite(qsqBuilder.rewrite()) + .fuzzyRewrite(qsqBuilder.fuzzyRewrite()) + .autoGenerateSynonymsPhraseQuery(qsqBuilder.autoGenerateSynonymsPhraseQuery()) + .allowLeadingWildcard(qsqBuilder.allowLeadingWildcard()) + .analyzeWildcard(qsqBuilder.analyzeWildcard()) + .defaultOperator(qsqBuilder.defaultOperator()) + .escape(qsqBuilder.escape()) + .fuzziness(qsqBuilder.fuzziness()) + .lenient(qsqBuilder.lenient()) + .enablePositionIncrements(qsqBuilder.enablePositionIncrements()) + .fuzzyMaxExpansions(qsqBuilder.fuzzyMaxExpansions()) + .fuzzyPrefixLength(qsqBuilder.fuzzyPrefixLength()) + .queryName(qsqBuilder.queryName()) + .quoteAnalyzer(qsqBuilder.quoteAnalyzer()) + .analyzer(qsqBuilder.analyzer()) + .minimumShouldMatch(qsqBuilder.minimumShouldMatch()) + .timeZone(qsqBuilder.timeZone()) + .phraseSlop(qsqBuilder.phraseSlop()) + .quoteFieldSuffix(qsqBuilder.quoteFieldSuffix()) + .boost(qsqBuilder.boost()) + .fuzzyTranspositions(qsqBuilder.fuzzyTranspositions()) if (newDefaultField != null) { retVal = retVal.defaultField(newDefaultField) @@ -113,35 +113,40 @@ object QueryStringQueryUtil { } else if (qsqBuilder.fields().size > 0) { val resolvedFields = QueryParserHelper.resolveMappingFields(context, qsqBuilder.fields()) otherFields = resolvedFields - queryParser = if (QueryParserHelper.hasAllFieldsWildcard(qsqBuilder.fields().keys)) { - QueryStringQueryParserExt(context, resolvedFields, if (qsqBuilder.lenient() == null) true else qsqBuilder.lenient()) - } else { - QueryStringQueryParserExt(context, resolvedFields, isLenient) - } + queryParser = + if (QueryParserHelper.hasAllFieldsWildcard(qsqBuilder.fields().keys)) { + QueryStringQueryParserExt(context, resolvedFields, if (qsqBuilder.lenient() == null) true else qsqBuilder.lenient()) + } else { + QueryStringQueryParserExt(context, resolvedFields, isLenient) + } } else { val defaultFields: List = context.defaultFields() - queryParser = if (QueryParserHelper.hasAllFieldsWildcard(defaultFields)) { - otherFields = resolveMatchPatternFields(context) - QueryStringQueryParserExt(context, if (qsqBuilder.lenient() == null) true else qsqBuilder.lenient()) - } else { - val resolvedFields = QueryParserHelper.resolveMappingFields( - context, - QueryParserHelper.parseFieldsAndWeights(defaultFields) - ) - otherFields = resolvedFields - QueryStringQueryParserExt(context, resolvedFields, isLenient) - } + queryParser = + if (QueryParserHelper.hasAllFieldsWildcard(defaultFields)) { + otherFields = resolveMatchPatternFields(context) + QueryStringQueryParserExt(context, if (qsqBuilder.lenient() == null) true else qsqBuilder.lenient()) + } else { + val resolvedFields = + QueryParserHelper.resolveMappingFields( + context, + QueryParserHelper.parseFieldsAndWeights(defaultFields), + ) + otherFields = resolvedFields + QueryStringQueryParserExt(context, resolvedFields, isLenient) + } } if (qsqBuilder.analyzer() != null) { - val namedAnalyzer: NamedAnalyzer = context.getIndexAnalyzers().get(qsqBuilder.analyzer()) - ?: throw QueryShardException(context, "[query_string] analyzer [$qsqBuilder.analyzer] not found") + val namedAnalyzer: NamedAnalyzer = + context.getIndexAnalyzers().get(qsqBuilder.analyzer()) + ?: throw QueryShardException(context, "[query_string] analyzer [$qsqBuilder.analyzer] not found") queryParser.setForceAnalyzer(namedAnalyzer) } if (qsqBuilder.quoteAnalyzer() != null) { - val forceQuoteAnalyzer: NamedAnalyzer = context.getIndexAnalyzers().get(qsqBuilder.quoteAnalyzer()) - ?: throw QueryShardException(context, "[query_string] quote_analyzer [$qsqBuilder.quoteAnalyzer] not found") + val forceQuoteAnalyzer: NamedAnalyzer = + context.getIndexAnalyzers().get(qsqBuilder.quoteAnalyzer()) + ?: throw QueryShardException(context, "[query_string] quote_analyzer [$qsqBuilder.quoteAnalyzer] not found") queryParser.setForceQuoteAnalyzer(forceQuoteAnalyzer) } @@ -156,11 +161,17 @@ object QueryStringQueryUtil { queryParser.phraseSlop = qsqBuilder.phraseSlop() queryParser.setQuoteFieldSuffix(qsqBuilder.quoteFieldSuffix()) queryParser.allowLeadingWildcard = - if (qsqBuilder.allowLeadingWildcard() == null) context.queryStringAllowLeadingWildcard() - else qsqBuilder.allowLeadingWildcard() + if (qsqBuilder.allowLeadingWildcard() == null) { + context.queryStringAllowLeadingWildcard() + } else { + qsqBuilder.allowLeadingWildcard() + } queryParser.setAnalyzeWildcard( - if (qsqBuilder.analyzeWildcard() == null) context.queryStringAnalyzeWildcard() - else qsqBuilder.analyzeWildcard() + if (qsqBuilder.analyzeWildcard() == null) { + context.queryStringAnalyzeWildcard() + } else { + qsqBuilder.analyzeWildcard() + }, ) queryParser.enablePositionIncrements = qsqBuilder.enablePositionIncrements() queryParser.setFuzziness(qsqBuilder.fuzziness()) @@ -185,7 +196,7 @@ object QueryStringQueryUtil { @Suppress("EmptyCatchBlock", "LoopWithTooManyJumpStatements") fun resolveMatchPatternFields( context: QueryShardContext, - pattern: String = "*" + pattern: String = "*", ): Map { val allFields = context.simpleMatchToIndexNames(pattern) val fields: MutableMap = HashMap() diff --git a/src/main/kotlin/org/opensearch/indexmanagement/rollup/resthandler/RestDeleteRollupAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/rollup/resthandler/RestDeleteRollupAction.kt index a97672a26..916deb26f 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/rollup/resthandler/RestDeleteRollupAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/rollup/resthandler/RestDeleteRollupAction.kt @@ -21,7 +21,6 @@ import org.opensearch.rest.action.RestToXContentListener import java.io.IOException class RestDeleteRollupAction : BaseRestHandler() { - override fun routes(): List { return emptyList() } @@ -30,8 +29,8 @@ class RestDeleteRollupAction : BaseRestHandler() { return listOf( ReplacedRoute( DELETE, "$ROLLUP_JOBS_BASE_URI/{rollupID}", - DELETE, "$LEGACY_ROLLUP_JOBS_BASE_URI/{rollupID}" - ) + DELETE, "$LEGACY_ROLLUP_JOBS_BASE_URI/{rollupID}", + ), ) } @@ -43,8 +42,9 @@ class RestDeleteRollupAction : BaseRestHandler() { val refreshPolicy = RefreshPolicy.parse(request.param(REFRESH, RefreshPolicy.IMMEDIATE.value)) return RestChannelConsumer { channel -> channel.newBuilder() - val deleteRollupRequest = DeleteRollupRequest(rollupID) - .setRefreshPolicy(refreshPolicy) + val deleteRollupRequest = + DeleteRollupRequest(rollupID) + .setRefreshPolicy(refreshPolicy) client.execute(DeleteRollupAction.INSTANCE, deleteRollupRequest, RestToXContentListener(channel)) } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/rollup/resthandler/RestExplainRollupAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/rollup/resthandler/RestExplainRollupAction.kt index bce23b1ce..84f3d447d 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/rollup/resthandler/RestExplainRollupAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/rollup/resthandler/RestExplainRollupAction.kt @@ -19,7 +19,6 @@ import org.opensearch.rest.RestRequest.Method.GET import org.opensearch.rest.action.RestToXContentListener class RestExplainRollupAction : BaseRestHandler() { - override fun routes(): List { return emptyList() } @@ -28,8 +27,8 @@ class RestExplainRollupAction : BaseRestHandler() { return listOf( ReplacedRoute( GET, "$ROLLUP_JOBS_BASE_URI/{rollupID}/_explain", - GET, "$LEGACY_ROLLUP_JOBS_BASE_URI/{rollupID}/_explain" - ) + GET, "$LEGACY_ROLLUP_JOBS_BASE_URI/{rollupID}/_explain", + ), ) } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/rollup/resthandler/RestGetRollupAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/rollup/resthandler/RestGetRollupAction.kt index 013b4bfaa..40a8b1342 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/rollup/resthandler/RestGetRollupAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/rollup/resthandler/RestGetRollupAction.kt @@ -27,7 +27,6 @@ import org.opensearch.rest.action.RestToXContentListener import org.opensearch.search.fetch.subphase.FetchSourceContext class RestGetRollupAction : BaseRestHandler() { - override fun routes(): List { return emptyList() } @@ -36,16 +35,16 @@ class RestGetRollupAction : BaseRestHandler() { return listOf( ReplacedRoute( GET, ROLLUP_JOBS_BASE_URI, - GET, LEGACY_ROLLUP_JOBS_BASE_URI + GET, LEGACY_ROLLUP_JOBS_BASE_URI, ), ReplacedRoute( GET, "$ROLLUP_JOBS_BASE_URI/{rollupID}", - GET, "$LEGACY_ROLLUP_JOBS_BASE_URI/{rollupID}" + GET, "$LEGACY_ROLLUP_JOBS_BASE_URI/{rollupID}", ), ReplacedRoute( HEAD, "$ROLLUP_JOBS_BASE_URI/{rollupID}", - HEAD, "$LEGACY_ROLLUP_JOBS_BASE_URI/{rollupID}" - ) + HEAD, "$LEGACY_ROLLUP_JOBS_BASE_URI/{rollupID}", + ), ) } @@ -62,13 +61,14 @@ class RestGetRollupAction : BaseRestHandler() { val sortDirection = request.param("sortDirection", DEFAULT_SORT_DIRECTION) return RestChannelConsumer { channel -> if (rollupID == null || rollupID.isEmpty()) { - val req = GetRollupsRequest( - searchString, - from, - size, - sortField, - sortDirection - ) + val req = + GetRollupsRequest( + searchString, + from, + size, + sortField, + sortDirection, + ) client.execute(GetRollupsAction.INSTANCE, req, RestToXContentListener(channel)) } else { val req = GetRollupRequest(rollupID, if (request.method() == HEAD) FetchSourceContext.DO_NOT_FETCH_SOURCE else null) diff --git a/src/main/kotlin/org/opensearch/indexmanagement/rollup/resthandler/RestIndexRollupAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/rollup/resthandler/RestIndexRollupAction.kt index 28e807c20..26eac68b5 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/rollup/resthandler/RestIndexRollupAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/rollup/resthandler/RestIndexRollupAction.kt @@ -7,6 +7,7 @@ package org.opensearch.indexmanagement.rollup.resthandler import org.opensearch.action.support.WriteRequest import org.opensearch.client.node.NodeClient +import org.opensearch.core.rest.RestStatus import org.opensearch.core.xcontent.ToXContent import org.opensearch.index.seqno.SequenceNumbers import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.LEGACY_ROLLUP_JOBS_BASE_URI @@ -29,13 +30,11 @@ import org.opensearch.rest.RestHandler.Route import org.opensearch.rest.RestRequest import org.opensearch.rest.RestRequest.Method.PUT import org.opensearch.rest.RestResponse -import org.opensearch.core.rest.RestStatus import org.opensearch.rest.action.RestResponseListener import java.io.IOException import java.time.Instant class RestIndexRollupAction : BaseRestHandler() { - override fun routes(): List { return emptyList() } @@ -44,12 +43,12 @@ class RestIndexRollupAction : BaseRestHandler() { return listOf( ReplacedRoute( PUT, ROLLUP_JOBS_BASE_URI, - PUT, LEGACY_ROLLUP_JOBS_BASE_URI + PUT, LEGACY_ROLLUP_JOBS_BASE_URI, ), ReplacedRoute( PUT, "$ROLLUP_JOBS_BASE_URI/{rollupID}", - PUT, "$LEGACY_ROLLUP_JOBS_BASE_URI/{rollupID}" - ) + PUT, "$LEGACY_ROLLUP_JOBS_BASE_URI/{rollupID}", + ), ) } @@ -67,21 +66,22 @@ class RestIndexRollupAction : BaseRestHandler() { val seqNo = request.paramAsLong(IF_SEQ_NO, SequenceNumbers.UNASSIGNED_SEQ_NO) val primaryTerm = request.paramAsLong(IF_PRIMARY_TERM, SequenceNumbers.UNASSIGNED_PRIMARY_TERM) val xcp = request.contentParser() - val rollup = xcp.parseWithType(id = id, seqNo = seqNo, primaryTerm = primaryTerm, parse = Rollup.Companion::parse) - .copy(jobLastUpdatedTime = Instant.now()) - val refreshPolicy = if (request.hasParam(REFRESH)) { - WriteRequest.RefreshPolicy.parse(request.param(REFRESH)) - } else { - WriteRequest.RefreshPolicy.IMMEDIATE - } + val rollup = + xcp.parseWithType(id = id, seqNo = seqNo, primaryTerm = primaryTerm, parse = Rollup.Companion::parse) + .copy(jobLastUpdatedTime = Instant.now()) + val refreshPolicy = + if (request.hasParam(REFRESH)) { + WriteRequest.RefreshPolicy.parse(request.param(REFRESH)) + } else { + WriteRequest.RefreshPolicy.IMMEDIATE + } val indexRollupRequest = IndexRollupRequest(rollup, refreshPolicy) return RestChannelConsumer { channel -> client.execute(IndexRollupAction.INSTANCE, indexRollupRequest, indexRollupResponse(channel)) } } - private fun indexRollupResponse(channel: RestChannel): - RestResponseListener { + private fun indexRollupResponse(channel: RestChannel): RestResponseListener { return object : RestResponseListener(channel) { @Throws(Exception::class) override fun buildResponse(response: IndexRollupResponse): RestResponse { diff --git a/src/main/kotlin/org/opensearch/indexmanagement/rollup/resthandler/RestStartRollupAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/rollup/resthandler/RestStartRollupAction.kt index ece151c24..057854565 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/rollup/resthandler/RestStartRollupAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/rollup/resthandler/RestStartRollupAction.kt @@ -21,7 +21,6 @@ import org.opensearch.rest.action.RestToXContentListener import java.io.IOException class RestStartRollupAction : BaseRestHandler() { - override fun routes(): List { return emptyList() } @@ -30,8 +29,8 @@ class RestStartRollupAction : BaseRestHandler() { return listOf( ReplacedRoute( POST, "$ROLLUP_JOBS_BASE_URI/{rollupID}/_start", - POST, "$LEGACY_ROLLUP_JOBS_BASE_URI/{rollupID}/_start" - ) + POST, "$LEGACY_ROLLUP_JOBS_BASE_URI/{rollupID}/_start", + ), ) } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/rollup/resthandler/RestStopRollupAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/rollup/resthandler/RestStopRollupAction.kt index f4de4d63b..c4530371c 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/rollup/resthandler/RestStopRollupAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/rollup/resthandler/RestStopRollupAction.kt @@ -21,7 +21,6 @@ import org.opensearch.rest.action.RestToXContentListener import java.io.IOException class RestStopRollupAction : BaseRestHandler() { - override fun routes(): List { return emptyList() } @@ -30,8 +29,8 @@ class RestStopRollupAction : BaseRestHandler() { return listOf( ReplacedRoute( POST, "$ROLLUP_JOBS_BASE_URI/{rollupID}/_stop", - POST, "$LEGACY_ROLLUP_JOBS_BASE_URI/{rollupID}/_stop" - ) + POST, "$LEGACY_ROLLUP_JOBS_BASE_URI/{rollupID}/_stop", + ), ) } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/rollup/settings/LegacyOpenDistroRollupSettings.kt b/src/main/kotlin/org/opensearch/indexmanagement/rollup/settings/LegacyOpenDistroRollupSettings.kt index a0e74e81b..2ac4d2963 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/rollup/settings/LegacyOpenDistroRollupSettings.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/rollup/settings/LegacyOpenDistroRollupSettings.kt @@ -10,7 +10,6 @@ import org.opensearch.common.unit.TimeValue @Suppress("UtilityClassWithPublicConstructor") class LegacyOpenDistroRollupSettings { - companion object { const val DEFAULT_ROLLUP_ENABLED = true const val DEFAULT_ACQUIRE_LOCK_RETRY_COUNT = 3 @@ -20,68 +19,76 @@ class LegacyOpenDistroRollupSettings { const val DEFAULT_CLIENT_REQUEST_RETRY_COUNT = 3 const val DEFAULT_CLIENT_REQUEST_RETRY_DELAY = 1000L - val ROLLUP_ENABLED: Setting = Setting.boolSetting( - "opendistro.rollup.enabled", - DEFAULT_ROLLUP_ENABLED, - Setting.Property.NodeScope, - Setting.Property.Dynamic, - Setting.Property.Deprecated - ) + val ROLLUP_ENABLED: Setting = + Setting.boolSetting( + "opendistro.rollup.enabled", + DEFAULT_ROLLUP_ENABLED, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated, + ) - val ROLLUP_SEARCH_ENABLED: Setting = Setting.boolSetting( - "opendistro.rollup.search.enabled", - true, - Setting.Property.NodeScope, - Setting.Property.Dynamic, - Setting.Property.Deprecated - ) + val ROLLUP_SEARCH_ENABLED: Setting = + Setting.boolSetting( + "opendistro.rollup.search.enabled", + true, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated, + ) - val ROLLUP_INDEX: Setting = Setting.boolSetting( - "index.opendistro.rollup_index", - false, - Setting.Property.IndexScope, - Setting.Property.InternalIndex, - Setting.Property.Deprecated - ) + val ROLLUP_INDEX: Setting = + Setting.boolSetting( + "index.opendistro.rollup_index", + false, + Setting.Property.IndexScope, + Setting.Property.InternalIndex, + Setting.Property.Deprecated, + ) - val ROLLUP_INGEST_BACKOFF_MILLIS: Setting = Setting.positiveTimeSetting( - "opendistro.rollup.ingest.backoff_millis", - TimeValue.timeValueMillis(1000), - Setting.Property.NodeScope, - Setting.Property.Dynamic, - Setting.Property.Deprecated - ) + val ROLLUP_INGEST_BACKOFF_MILLIS: Setting = + Setting.positiveTimeSetting( + "opendistro.rollup.ingest.backoff_millis", + TimeValue.timeValueMillis(1000), + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated, + ) - val ROLLUP_INGEST_BACKOFF_COUNT: Setting = Setting.intSetting( - "opendistro.rollup.ingest.backoff_count", - 5, - Setting.Property.NodeScope, - Setting.Property.Dynamic, - Setting.Property.Deprecated - ) + val ROLLUP_INGEST_BACKOFF_COUNT: Setting = + Setting.intSetting( + "opendistro.rollup.ingest.backoff_count", + 5, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated, + ) - val ROLLUP_SEARCH_BACKOFF_MILLIS: Setting = Setting.positiveTimeSetting( - "opendistro.rollup.search.backoff_millis", - TimeValue.timeValueMillis(1000), - Setting.Property.NodeScope, - Setting.Property.Dynamic, - Setting.Property.Deprecated - ) + val ROLLUP_SEARCH_BACKOFF_MILLIS: Setting = + Setting.positiveTimeSetting( + "opendistro.rollup.search.backoff_millis", + TimeValue.timeValueMillis(1000), + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated, + ) - val ROLLUP_SEARCH_BACKOFF_COUNT: Setting = Setting.intSetting( - "opendistro.rollup.search.backoff_count", - 5, - Setting.Property.NodeScope, - Setting.Property.Dynamic, - Setting.Property.Deprecated - ) + val ROLLUP_SEARCH_BACKOFF_COUNT: Setting = + Setting.intSetting( + "opendistro.rollup.search.backoff_count", + 5, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated, + ) - val ROLLUP_DASHBOARDS: Setting = Setting.boolSetting( - "opendistro.rollup.dashboards.enabled", - DEFAULT_ROLLUP_ENABLED, - Setting.Property.NodeScope, - Setting.Property.Dynamic, - Setting.Property.Deprecated - ) + val ROLLUP_DASHBOARDS: Setting = + Setting.boolSetting( + "opendistro.rollup.dashboards.enabled", + DEFAULT_ROLLUP_ENABLED, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated, + ) } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/rollup/settings/RollupSettings.kt b/src/main/kotlin/org/opensearch/indexmanagement/rollup/settings/RollupSettings.kt index 22238fd6d..0554a7061 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/rollup/settings/RollupSettings.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/rollup/settings/RollupSettings.kt @@ -10,7 +10,6 @@ import org.opensearch.common.unit.TimeValue @Suppress("UtilityClassWithPublicConstructor") class RollupSettings { - companion object { const val DEFAULT_ROLLUP_ENABLED = true const val DEFAULT_SEARCH_ALL_JOBS = false @@ -22,67 +21,76 @@ class RollupSettings { const val DEFAULT_CLIENT_REQUEST_RETRY_DELAY = 1000L const val MINIMUM_CANCEL_AFTER_TIME_INTERVAL_MINUTES = 10L - val ROLLUP_ENABLED: Setting = Setting.boolSetting( - "plugins.rollup.enabled", - LegacyOpenDistroRollupSettings.ROLLUP_ENABLED, - Setting.Property.NodeScope, - Setting.Property.Dynamic - ) + val ROLLUP_ENABLED: Setting = + Setting.boolSetting( + "plugins.rollup.enabled", + LegacyOpenDistroRollupSettings.ROLLUP_ENABLED, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) - val ROLLUP_SEARCH_ENABLED: Setting = Setting.boolSetting( - "plugins.rollup.search.enabled", - LegacyOpenDistroRollupSettings.ROLLUP_SEARCH_ENABLED, - Setting.Property.NodeScope, - Setting.Property.Dynamic - ) + val ROLLUP_SEARCH_ENABLED: Setting = + Setting.boolSetting( + "plugins.rollup.search.enabled", + LegacyOpenDistroRollupSettings.ROLLUP_SEARCH_ENABLED, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) - val ROLLUP_INDEX: Setting = Setting.boolSetting( - "index.plugins.rollup_index", - LegacyOpenDistroRollupSettings.ROLLUP_INDEX, - Setting.Property.IndexScope, - Setting.Property.Dynamic - ) + val ROLLUP_INDEX: Setting = + Setting.boolSetting( + "index.plugins.rollup_index", + LegacyOpenDistroRollupSettings.ROLLUP_INDEX, + Setting.Property.IndexScope, + Setting.Property.Dynamic, + ) - val ROLLUP_INGEST_BACKOFF_MILLIS: Setting = Setting.positiveTimeSetting( - "plugins.rollup.ingest.backoff_millis", - LegacyOpenDistroRollupSettings.ROLLUP_INGEST_BACKOFF_MILLIS, - Setting.Property.NodeScope, - Setting.Property.Dynamic - ) + val ROLLUP_INGEST_BACKOFF_MILLIS: Setting = + Setting.positiveTimeSetting( + "plugins.rollup.ingest.backoff_millis", + LegacyOpenDistroRollupSettings.ROLLUP_INGEST_BACKOFF_MILLIS, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) - val ROLLUP_INGEST_BACKOFF_COUNT: Setting = Setting.intSetting( - "plugins.rollup.ingest.backoff_count", - LegacyOpenDistroRollupSettings.ROLLUP_INGEST_BACKOFF_COUNT, - Setting.Property.NodeScope, - Setting.Property.Dynamic - ) + val ROLLUP_INGEST_BACKOFF_COUNT: Setting = + Setting.intSetting( + "plugins.rollup.ingest.backoff_count", + LegacyOpenDistroRollupSettings.ROLLUP_INGEST_BACKOFF_COUNT, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) - val ROLLUP_SEARCH_BACKOFF_MILLIS: Setting = Setting.positiveTimeSetting( - "plugins.rollup.search.backoff_millis", - LegacyOpenDistroRollupSettings.ROLLUP_SEARCH_BACKOFF_MILLIS, - Setting.Property.NodeScope, - Setting.Property.Dynamic - ) + val ROLLUP_SEARCH_BACKOFF_MILLIS: Setting = + Setting.positiveTimeSetting( + "plugins.rollup.search.backoff_millis", + LegacyOpenDistroRollupSettings.ROLLUP_SEARCH_BACKOFF_MILLIS, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) - val ROLLUP_SEARCH_BACKOFF_COUNT: Setting = Setting.intSetting( - "plugins.rollup.search.backoff_count", - LegacyOpenDistroRollupSettings.ROLLUP_SEARCH_BACKOFF_COUNT, - Setting.Property.NodeScope, - Setting.Property.Dynamic - ) + val ROLLUP_SEARCH_BACKOFF_COUNT: Setting = + Setting.intSetting( + "plugins.rollup.search.backoff_count", + LegacyOpenDistroRollupSettings.ROLLUP_SEARCH_BACKOFF_COUNT, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) - val ROLLUP_SEARCH_ALL_JOBS: Setting = Setting.boolSetting( - "plugins.rollup.search.search_all_jobs", - DEFAULT_SEARCH_ALL_JOBS, - Setting.Property.NodeScope, - Setting.Property.Dynamic - ) + val ROLLUP_SEARCH_ALL_JOBS: Setting = + Setting.boolSetting( + "plugins.rollup.search.search_all_jobs", + DEFAULT_SEARCH_ALL_JOBS, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) - val ROLLUP_DASHBOARDS: Setting = Setting.boolSetting( - "plugins.rollup.dashboards.enabled", - LegacyOpenDistroRollupSettings.ROLLUP_DASHBOARDS, - Setting.Property.NodeScope, - Setting.Property.Dynamic - ) + val ROLLUP_DASHBOARDS: Setting = + Setting.boolSetting( + "plugins.rollup.dashboards.enabled", + LegacyOpenDistroRollupSettings.ROLLUP_DASHBOARDS, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/rollup/util/QueryShardContextFactory.kt b/src/main/kotlin/org/opensearch/indexmanagement/rollup/util/QueryShardContextFactory.kt index fc9bac0b4..b1fe08e56 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/rollup/util/QueryShardContextFactory.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/rollup/util/QueryShardContextFactory.kt @@ -9,15 +9,15 @@ import org.opensearch.Version import org.opensearch.client.Client import org.opensearch.cluster.metadata.IndexMetadata import org.opensearch.cluster.service.ClusterService -import org.opensearch.core.common.io.stream.NamedWriteableRegistry import org.opensearch.common.regex.Regex import org.opensearch.common.settings.IndexScopedSettings import org.opensearch.common.settings.Settings import org.opensearch.common.settings.SettingsModule import org.opensearch.common.util.BigArrays +import org.opensearch.core.common.io.stream.NamedWriteableRegistry +import org.opensearch.core.index.Index import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.env.Environment -import org.opensearch.core.index.Index import org.opensearch.index.IndexSettings import org.opensearch.index.mapper.MapperService import org.opensearch.index.query.QueryShardContext @@ -48,7 +48,7 @@ object QueryShardContextFactory { scriptService: ScriptService, xContentRegistry: NamedXContentRegistry, namedWriteableRegistry: NamedWriteableRegistry, - environment: Environment + environment: Environment, ) { this.client = client this.clusterService = clusterService @@ -61,8 +61,9 @@ object QueryShardContextFactory { private fun getIndexSettingsAndMetadata(indexName: String?): Triple { val index: Index? val indexSettings: Settings? - val indexMetadata = clusterService.state().metadata.index(indexName) - ?: throw IllegalArgumentException("Can't find IndexMetadata for index: [$indexName]") + val indexMetadata = + clusterService.state().metadata.index(indexName) + ?: throw IllegalArgumentException("Can't find IndexMetadata for index: [$indexName]") index = indexMetadata.index indexSettings = indexMetadata.settings return Triple(index, indexSettings, indexMetadata) @@ -70,18 +71,20 @@ object QueryShardContextFactory { fun createShardContext(indexName: String?): QueryShardContext { val (index, indexSettings, indexMetadata) = getIndexSettingsAndMetadata(indexName) - val nodeSettings = Settings.builder() - .put("node.name", "dummyNodeName") - .put(Environment.PATH_HOME_SETTING.key, environment.tmpDir()) - .build() + val nodeSettings = + Settings.builder() + .put("node.name", "dummyNodeName") + .put(Environment.PATH_HOME_SETTING.key, environment.tmpDir()) + .build() val pluginsService = PluginsService(nodeSettings, null, null, null, listOf()) val additionalSettings = pluginsService.pluginSettings - val settingsModule = SettingsModule( - nodeSettings, - additionalSettings, - pluginsService.pluginSettingsFilter, emptySet() - ) + val settingsModule = + SettingsModule( + nodeSettings, + additionalSettings, + pluginsService.pluginSettingsFilter, emptySet(), + ) val indexScopedSettings: IndexScopedSettings = settingsModule.indexScopedSettings val idxSettings = newIndexSettings(index, indexSettings, indexScopedSettings) val indicesModule = IndicesModule(pluginsService.filterPlugins(MapperPlugin::class.java)) @@ -89,16 +92,17 @@ object QueryShardContextFactory { val analysisModule = AnalysisModule(environment, emptyList()) val indexAnalyzers = analysisModule.analysisRegistry.build(idxSettings) val similarityService = SimilarityService(idxSettings, null, emptyMap()) - val mapperService = MapperService( - idxSettings, - indexAnalyzers, - xContentRegistry, - similarityService, - mapperRegistry, - { createShardContext(null) }, - { false }, - scriptService - ) + val mapperService = + MapperService( + idxSettings, + indexAnalyzers, + xContentRegistry, + similarityService, + mapperRegistry, + { createShardContext(null) }, + { false }, + scriptService, + ) // In order to be able to call toQuery method on QueryBuilder, we need to setup mappings in MapperService mapperService.merge("_doc", indexMetadata?.mapping()?.source(), MapperService.MergeReason.MAPPING_UPDATE) @@ -119,17 +123,18 @@ object QueryShardContextFactory { null, { pattern -> Regex.simpleMatch(pattern, index?.name) }, { true }, - null + null, ) } private fun newIndexSettings(index: Index?, settings: Settings?, indexScopedSettings: IndexScopedSettings?): IndexSettings? { - val build = Settings.builder() - .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT) - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1) - .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - .put(settings) - .build() + val build = + Settings.builder() + .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1) + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .put(settings) + .build() val metadata = IndexMetadata.builder(index?.name).settings(build).build() return IndexSettings(metadata, Settings.EMPTY, indexScopedSettings) } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/rollup/util/RollupFieldValueExpressionResolver.kt b/src/main/kotlin/org/opensearch/indexmanagement/rollup/util/RollupFieldValueExpressionResolver.kt index 8446be029..e7ce347d8 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/rollup/util/RollupFieldValueExpressionResolver.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/rollup/util/RollupFieldValueExpressionResolver.kt @@ -18,7 +18,6 @@ import org.opensearch.script.ScriptType import org.opensearch.script.TemplateScript object RollupFieldValueExpressionResolver { - private val validTopContextFields = setOf(Rollup.SOURCE_INDEX_FIELD) private lateinit var scriptService: ScriptService @@ -28,13 +27,15 @@ object RollupFieldValueExpressionResolver { fun resolve(rollup: Rollup, fieldValue: String): String { val script = Script(ScriptType.INLINE, Script.DEFAULT_TEMPLATE_LANG, fieldValue, mapOf()) - val contextMap = rollup.toXContent(XContentFactory.jsonBuilder(), XCONTENT_WITHOUT_TYPE) - .toMap() - .filterKeys { key -> key in validTopContextFields } + val contextMap = + rollup.toXContent(XContentFactory.jsonBuilder(), XCONTENT_WITHOUT_TYPE) + .toMap() + .filterKeys { key -> key in validTopContextFields } - var compiledValue = scriptService.compile(script, TemplateScript.CONTEXT) - .newInstance(script.params + mapOf("ctx" to contextMap)) - .execute() + var compiledValue = + scriptService.compile(script, TemplateScript.CONTEXT) + .newInstance(script.params + mapOf("ctx" to contextMap)) + .execute() if (indexAliasUtils.isAlias(compiledValue)) { compiledValue = indexAliasUtils.getWriteIndexNameForAlias(compiledValue) @@ -56,7 +57,6 @@ object RollupFieldValueExpressionResolver { } open class IndexAliasUtils(val clusterService: ClusterService) { - open fun hasAlias(index: String): Boolean { val aliases = this.clusterService.state().metadata().indices[index]?.aliases if (aliases != null) { diff --git a/src/main/kotlin/org/opensearch/indexmanagement/rollup/util/RollupUtils.kt b/src/main/kotlin/org/opensearch/indexmanagement/rollup/util/RollupUtils.kt index 502cc181f..16265e5f9 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/rollup/util/RollupUtils.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/rollup/util/RollupUtils.kt @@ -13,10 +13,10 @@ import org.opensearch.cluster.ClusterState import org.opensearch.cluster.metadata.IndexMetadata import org.opensearch.common.xcontent.LoggingDeprecationHandler import org.opensearch.common.xcontent.XContentHelper -import org.opensearch.core.xcontent.XContentParserUtils.ensureExpectedToken import org.opensearch.common.xcontent.XContentType import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.core.xcontent.XContentParser.Token +import org.opensearch.core.xcontent.XContentParserUtils.ensureExpectedToken import org.opensearch.index.query.BoolQueryBuilder import org.opensearch.index.query.BoostingQueryBuilder import org.opensearch.index.query.ConstantScoreQueryBuilder @@ -84,19 +84,21 @@ fun Rollup.isTargetIndexAlias(): Boolean { } fun Rollup.getRollupSearchRequest(metadata: RollupMetadata): SearchRequest { - val query = if (metadata.continuous != null) { - RangeQueryBuilder(this.getDateHistogram().sourceField) - .from(metadata.continuous.nextWindowStartTime.toEpochMilli(), true) - .to(metadata.continuous.nextWindowEndTime.toEpochMilli(), false) - .format(DATE_FIELD_EPOCH_MILLIS_FORMAT) - } else { - MatchAllQueryBuilder() - } - val searchSourceBuilder = SearchSourceBuilder() - .trackTotalHits(false) - .size(0) - .aggregation(this.getCompositeAggregationBuilder(metadata.afterKey)) - .query(query) + val query = + if (metadata.continuous != null) { + RangeQueryBuilder(this.getDateHistogram().sourceField) + .from(metadata.continuous.nextWindowStartTime.toEpochMilli(), true) + .to(metadata.continuous.nextWindowEndTime.toEpochMilli(), false) + .format(DATE_FIELD_EPOCH_MILLIS_FORMAT) + } else { + MatchAllQueryBuilder() + } + val searchSourceBuilder = + SearchSourceBuilder() + .trackTotalHits(false) + .size(0) + .aggregation(this.getCompositeAggregationBuilder(metadata.afterKey)) + .query(query) return SearchRequest(this.sourceIndex) .source(searchSourceBuilder) .allowPartialSearchResults(false) @@ -109,22 +111,23 @@ fun Rollup.getCompositeAggregationBuilder(afterKey: Map?): Composit return CompositeAggregationBuilder(this.id, sources).size(this.pageSize).also { compositeAgg -> afterKey?.let { compositeAgg.aggregateAfter(it) } this.metrics.forEach { metric -> - val subAggs = metric.metrics.flatMap { agg -> - when (agg) { - is Average -> { - listOf( - SumAggregationBuilder(metric.targetFieldWithType(agg) + ".sum").field(metric.sourceField), - ValueCountAggregationBuilder(metric.targetFieldWithType(agg) + ".value_count").field(metric.sourceField) - ) + val subAggs = + metric.metrics.flatMap { agg -> + when (agg) { + is Average -> { + listOf( + SumAggregationBuilder(metric.targetFieldWithType(agg) + ".sum").field(metric.sourceField), + ValueCountAggregationBuilder(metric.targetFieldWithType(agg) + ".value_count").field(metric.sourceField), + ) + } + is Sum -> listOf(SumAggregationBuilder(metric.targetFieldWithType(agg)).field(metric.sourceField)) + is Max -> listOf(MaxAggregationBuilder(metric.targetFieldWithType(agg)).field(metric.sourceField)) + is Min -> listOf(MinAggregationBuilder(metric.targetFieldWithType(agg)).field(metric.sourceField)) + is ValueCount -> listOf(ValueCountAggregationBuilder(metric.targetFieldWithType(agg)).field(metric.sourceField)) + // This shouldn't be possible as rollup will fail to initialize with an unsupported metric + else -> throw IllegalArgumentException("Found unsupported metric aggregation ${agg.type.type}") } - is Sum -> listOf(SumAggregationBuilder(metric.targetFieldWithType(agg)).field(metric.sourceField)) - is Max -> listOf(MaxAggregationBuilder(metric.targetFieldWithType(agg)).field(metric.sourceField)) - is Min -> listOf(MinAggregationBuilder(metric.targetFieldWithType(agg)).field(metric.sourceField)) - is ValueCount -> listOf(ValueCountAggregationBuilder(metric.targetFieldWithType(agg)).field(metric.sourceField)) - // This shouldn't be possible as rollup will fail to initialize with an unsupported metric - else -> throw IllegalArgumentException("Found unsupported metric aggregation ${agg.type.type}") } - } subAggs.forEach { compositeAgg.subAggregation(it) } } } @@ -160,8 +163,9 @@ inline fun Rollup.findMatchingMetricField(field: String): String { fun IndexMetadata.getRollupJobs(): List? { val rollupJobs = mutableListOf() val source = this.mapping()?.source() ?: return null - val xcp = XContentHelper - .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, source.compressedReference(), XContentType.JSON) + val xcp = + XContentHelper + .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, source.compressedReference(), XContentType.JSON) ensureExpectedToken(Token.START_OBJECT, xcp.nextToken(), xcp) // start of block ensureExpectedToken(Token.FIELD_NAME, xcp.nextToken(), xcp) // _doc ensureExpectedToken(Token.START_OBJECT, xcp.nextToken(), xcp) // start of _doc block @@ -199,11 +203,12 @@ fun IndexMetadata.getRollupJobs(): List? { // TODO: If we have to set this manually for each aggregation builder then it means we could miss new ones settings in the future @Suppress("ComplexMethod", "LongMethod") fun Rollup.rewriteAggregationBuilder(aggregationBuilder: AggregationBuilder): AggregationBuilder { - val aggFactory = AggregatorFactories.builder().also { factories -> - aggregationBuilder.subAggregations.forEach { - factories.addAggregator(this.rewriteAggregationBuilder(it)) + val aggFactory = + AggregatorFactories.builder().also { factories -> + aggregationBuilder.subAggregations.forEach { + factories.addAggregator(this.rewriteAggregationBuilder(it)) + } } - } return when (aggregationBuilder) { is TermsAggregationBuilder -> { @@ -231,20 +236,20 @@ fun Rollup.rewriteAggregationBuilder(aggregationBuilder: AggregationBuilder): Ag "state.sums += doc[\"${this.findMatchingMetricField(aggregationBuilder.field()) + ".sum"}\"].value; " + "state.counts += doc[\"${this.findMatchingMetricField(aggregationBuilder.field()) + ".value_count"}\"" + "].value", - emptyMap() - ) + emptyMap(), + ), ) .combineScript( Script( ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, - "def d = new double[2]; d[0] = state.sums; d[1] = state.counts; return d", emptyMap() - ) + "def d = new double[2]; d[0] = state.sums; d[1] = state.counts; return d", emptyMap(), + ), ) .reduceScript( Script( ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, - "double sum = 0; double count = 0; for (a in states) { sum += a[0]; count += a[1]; } return sum/count", emptyMap() - ) + "double sum = 0; double count = 0; for (a in states) { sum += a[0]; count += a[1]; } return sum/count", emptyMap(), + ), ) } is MaxAggregationBuilder -> { @@ -257,32 +262,32 @@ fun Rollup.rewriteAggregationBuilder(aggregationBuilder: AggregationBuilder): Ag } is ValueCountAggregationBuilder -> { /* - * A value count aggs of a pre-computed value count is incorrect as it just returns the number of - * pre-computed value counts instead of their sum. Unfortunately can't just use the sum aggregation - * because I was not able to find a way to cast the result of that to a long (instead of the returned float) - * and the 3893 vs 3893.0 was bothering me.. so this is the next best I can think of. Hopefully there is a better - * way and we can use that in the future. - * */ + * A value count aggs of a pre-computed value count is incorrect as it just returns the number of + * pre-computed value counts instead of their sum. Unfortunately can't just use the sum aggregation + * because I was not able to find a way to cast the result of that to a long (instead of the returned float) + * and the 3893 vs 3893.0 was bothering me.. so this is the next best I can think of. Hopefully there is a better + * way and we can use that in the future. + * */ ScriptedMetricAggregationBuilder(aggregationBuilder.name) .initScript(Script(ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, "state.valueCounts = []", emptyMap())) .mapScript( Script( ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, "state.valueCounts.add(doc[\"${this.findMatchingMetricField(aggregationBuilder.field())}\"].value)", - emptyMap() - ) + emptyMap(), + ), ) .combineScript( Script( ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, - "long valueCount = 0; for (vc in state.valueCounts) { valueCount += vc } return valueCount", emptyMap() - ) + "long valueCount = 0; for (vc in state.valueCounts) { valueCount += vc } return valueCount", emptyMap(), + ), ) .reduceScript( Script( ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, - "long valueCount = 0; for (vc in states) { valueCount += vc } return valueCount", emptyMap() - ) + "long valueCount = 0; for (vc in states) { valueCount += vc } return valueCount", emptyMap(), + ), ) } // We do nothing otherwise, the validation logic should have already verified so not throwing an exception @@ -294,7 +299,7 @@ fun Rollup.rewriteAggregationBuilder(aggregationBuilder: AggregationBuilder): Ag fun Rollup.rewriteQueryBuilder( queryBuilder: QueryBuilder, fieldNameMappingTypeMap: Map, - concreteIndexName: String = "" + concreteIndexName: String = "", ): QueryBuilder { return when (queryBuilder) { is TermQueryBuilder -> { @@ -409,7 +414,7 @@ fun Rollup.populateFieldMappings(): Set { fun SearchSourceBuilder.rewriteSearchSourceBuilder( jobs: Set, fieldNameMappingTypeMap: Map, - concreteIndexName: String + concreteIndexName: String, ): SearchSourceBuilder { val ssb = SearchSourceBuilder() // can use first() here as all jobs in the set will have a superset of the query's terms @@ -447,7 +452,7 @@ fun SearchSourceBuilder.rewriteSearchSourceBuilder( fun SearchSourceBuilder.rewriteSearchSourceBuilder( job: Rollup, fieldNameMappingTypeMap: Map, - concreteIndexName: String + concreteIndexName: String, ): SearchSourceBuilder { return this.rewriteSearchSourceBuilder(setOf(job), fieldNameMappingTypeMap, concreteIndexName) } @@ -456,14 +461,15 @@ fun Rollup.getInitialDocValues(docCount: Long): MutableMap = mutableMapOf( Rollup.ROLLUP_DOC_ID_FIELD to this.id, Rollup.ROLLUP_DOC_COUNT_FIELD to docCount, - Rollup.ROLLUP_DOC_SCHEMA_VERSION_FIELD to this.schemaVersion + Rollup.ROLLUP_DOC_SCHEMA_VERSION_FIELD to this.schemaVersion, ) fun parseRollup(response: GetResponse, xContentRegistry: NamedXContentRegistry = NamedXContentRegistry.EMPTY): Rollup { - val xcp = XContentHelper.createParser( - xContentRegistry, LoggingDeprecationHandler.INSTANCE, - response.sourceAsBytesRef, XContentType.JSON - ) + val xcp = + XContentHelper.createParser( + xContentRegistry, LoggingDeprecationHandler.INSTANCE, + response.sourceAsBytesRef, XContentType.JSON, + ) return xcp.parseWithType(response.id, response.seqNo, response.primaryTerm, Rollup.Companion::parse) } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/settings/IndexManagementSettings.kt b/src/main/kotlin/org/opensearch/indexmanagement/settings/IndexManagementSettings.kt index a96442808..daf9b8edb 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/settings/IndexManagementSettings.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/settings/IndexManagementSettings.kt @@ -8,14 +8,13 @@ import org.opensearch.common.settings.Setting @Suppress("UtilityClassWithPublicConstructor") class IndexManagementSettings { - companion object { - - val FILTER_BY_BACKEND_ROLES: Setting = Setting.boolSetting( - "plugins.index_management.filter_by_backend_roles", - false, - Setting.Property.NodeScope, - Setting.Property.Dynamic - ) + val FILTER_BY_BACKEND_ROLES: Setting = + Setting.boolSetting( + "plugins.index_management.filter_by_backend_roles", + false, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/SMRunner.kt b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/SMRunner.kt index b15491c1e..50f4dd8a5 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/SMRunner.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/SMRunner.kt @@ -13,32 +13,31 @@ import kotlinx.coroutines.launch import org.apache.logging.log4j.LogManager import org.opensearch.action.bulk.BackoffPolicy import org.opensearch.client.Client -import org.opensearch.common.unit.TimeValue -import org.opensearch.indexmanagement.snapshotmanagement.engine.SMStateMachine -import org.opensearch.indexmanagement.snapshotmanagement.engine.states.SMState -import org.opensearch.indexmanagement.snapshotmanagement.model.SMPolicy -import org.opensearch.indexmanagement.snapshotmanagement.model.SMMetadata -import org.opensearch.cluster.service.ClusterService -import org.opensearch.common.settings.Settings import org.opensearch.cluster.health.ClusterHealthStatus import org.opensearch.cluster.health.ClusterStateHealth +import org.opensearch.cluster.service.ClusterService +import org.opensearch.common.settings.Settings +import org.opensearch.common.unit.TimeValue +import org.opensearch.core.rest.RestStatus import org.opensearch.index.seqno.SequenceNumbers import org.opensearch.indexmanagement.IndexManagementIndices +import org.opensearch.indexmanagement.snapshotmanagement.engine.SMStateMachine +import org.opensearch.indexmanagement.snapshotmanagement.engine.states.SMState import org.opensearch.indexmanagement.snapshotmanagement.engine.states.creationTransitions import org.opensearch.indexmanagement.snapshotmanagement.engine.states.deletionTransitions +import org.opensearch.indexmanagement.snapshotmanagement.model.SMMetadata +import org.opensearch.indexmanagement.snapshotmanagement.model.SMPolicy import org.opensearch.indexmanagement.util.acquireLockForScheduledJob import org.opensearch.indexmanagement.util.releaseLockForScheduledJob import org.opensearch.jobscheduler.spi.JobExecutionContext import org.opensearch.jobscheduler.spi.ScheduledJobParameter import org.opensearch.jobscheduler.spi.ScheduledJobRunner -import org.opensearch.core.rest.RestStatus import org.opensearch.threadpool.ThreadPool import java.time.Instant.now object SMRunner : ScheduledJobRunner, CoroutineScope by CoroutineScope(SupervisorJob() + Dispatchers.Default + CoroutineName("snapshot_management_runner")) { - private val log = LogManager.getLogger(javaClass) private lateinit var client: Client @@ -65,9 +64,10 @@ object SMRunner : private const val MAX_NUMBER_OF_RETRIES = 3 private const val EXPONENTIAL_BACKOFF_MILLIS = 1000L - private val backoffPolicy: BackoffPolicy = BackoffPolicy.exponentialBackoff( - TimeValue.timeValueMillis(EXPONENTIAL_BACKOFF_MILLIS), MAX_NUMBER_OF_RETRIES - ) + private val backoffPolicy: BackoffPolicy = + BackoffPolicy.exponentialBackoff( + TimeValue.timeValueMillis(EXPONENTIAL_BACKOFF_MILLIS), MAX_NUMBER_OF_RETRIES, + ) override fun runJob(job: ScheduledJobParameter, context: JobExecutionContext) { log.debug("Snapshot management running job: {}", job) @@ -89,12 +89,13 @@ object SMRunner : } try { - var metadata = try { - client.getSMMetadata(job.id) - } catch (e: Exception) { - log.error("Failed to retrieve metadata before running ${job.policyName}", e) - return@launch - } + var metadata = + try { + client.getSMMetadata(job.id) + } catch (e: Exception) { + log.error("Failed to retrieve metadata before running ${job.policyName}", e) + return@launch + } if (metadata == null) { metadata = initMetadata(job) metadata ?: return@launch @@ -132,10 +133,11 @@ object SMRunner : log.info("Initializing metadata [$initMetadata] for [${job.policyName}].") try { // TODO SM more granular error checking - val res = client.indexMetadata( - initMetadata, job.id, create = true, - seqNo = SequenceNumbers.UNASSIGNED_SEQ_NO, primaryTerm = SequenceNumbers.UNASSIGNED_PRIMARY_TERM - ) + val res = + client.indexMetadata( + initMetadata, job.id, create = true, + seqNo = SequenceNumbers.UNASSIGNED_SEQ_NO, primaryTerm = SequenceNumbers.UNASSIGNED_PRIMARY_TERM, + ) if (res.status() != RestStatus.CREATED) { log.error("Metadata initialization response status is ${res.status()}, expecting CREATED 201.") return null @@ -153,18 +155,20 @@ object SMRunner : id = smPolicyNameToMetadataDocId(smDocIdToPolicyName(job.id)), policySeqNo = job.seqNo, policyPrimaryTerm = job.primaryTerm, - creation = SMMetadata.WorkflowMetadata( + creation = + SMMetadata.WorkflowMetadata( SMState.CREATION_START, SMMetadata.Trigger( - time = job.creation.schedule.getNextExecutionTime(now) - ) + time = job.creation.schedule.getNextExecutionTime(now), + ), ), - deletion = job.deletion?.let { + deletion = + job.deletion?.let { SMMetadata.WorkflowMetadata( SMState.DELETION_START, SMMetadata.Trigger( - time = job.deletion.schedule.getNextExecutionTime(now) - ) + time = job.deletion.schedule.getNextExecutionTime(now), + ), ) }, ) diff --git a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/SMUtils.kt b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/SMUtils.kt index e3f381971..acb0936a0 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/SMUtils.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/SMUtils.kt @@ -4,6 +4,7 @@ */ @file:Suppress("TooManyFunctions") + package org.opensearch.indexmanagement.snapshotmanagement import org.apache.logging.log4j.LogManager @@ -17,15 +18,17 @@ import org.opensearch.action.get.GetResponse import org.opensearch.action.index.IndexRequest import org.opensearch.action.index.IndexResponse import org.opensearch.client.Client -import org.opensearch.core.common.Strings import org.opensearch.common.time.DateFormatter +import org.opensearch.common.time.DateFormatters import org.opensearch.common.unit.TimeValue import org.opensearch.common.xcontent.LoggingDeprecationHandler -import org.opensearch.core.xcontent.NamedXContentRegistry -import org.opensearch.core.xcontent.ToXContent import org.opensearch.common.xcontent.XContentFactory import org.opensearch.common.xcontent.XContentHelper import org.opensearch.common.xcontent.XContentType +import org.opensearch.core.common.Strings +import org.opensearch.core.rest.RestStatus +import org.opensearch.core.xcontent.NamedXContentRegistry +import org.opensearch.core.xcontent.ToXContent import org.opensearch.index.IndexNotFoundException import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.INDEX_MANAGEMENT_INDEX import org.opensearch.indexmanagement.opensearchapi.parseWithType @@ -39,11 +42,10 @@ import org.opensearch.indexmanagement.snapshotmanagement.model.SMPolicy.Companio import org.opensearch.indexmanagement.snapshotmanagement.model.SMPolicy.Companion.SM_DOC_ID_SUFFIX import org.opensearch.indexmanagement.snapshotmanagement.model.SMPolicy.Companion.SM_METADATA_ID_SUFFIX import org.opensearch.indexmanagement.snapshotmanagement.model.SMPolicy.Companion.SM_TYPE -import org.opensearch.snapshots.SnapshotsService import org.opensearch.jobscheduler.spi.schedule.Schedule -import org.opensearch.core.rest.RestStatus import org.opensearch.snapshots.SnapshotInfo import org.opensearch.snapshots.SnapshotMissingException +import org.opensearch.snapshots.SnapshotsService import org.opensearch.transport.RemoteTransportException import java.time.Instant import java.time.Instant.now @@ -51,13 +53,15 @@ import java.time.ZoneId import java.time.format.DateTimeFormatter import java.time.temporal.ChronoUnit import java.util.Locale -import org.opensearch.common.time.DateFormatters private val log = LogManager.getLogger("o.i.s.SnapshotManagementHelper") fun smPolicyNameToDocId(policyName: String) = "$policyName$SM_DOC_ID_SUFFIX" + fun smDocIdToPolicyName(docId: String) = docId.substringBeforeLast(SM_DOC_ID_SUFFIX) + fun smPolicyNameToMetadataDocId(policyName: String) = "$policyName$SM_METADATA_ID_SUFFIX" + fun smMetadataDocIdToPolicyName(docId: String) = docId.substringBeforeLast(SM_METADATA_ID_SUFFIX) @Suppress("RethrowCaughtException", "ThrowsCount") @@ -128,12 +132,13 @@ suspend fun Client.indexMetadata( primaryTerm: Long, create: Boolean = false, ): IndexResponse { - val indexReq = IndexRequest(INDEX_MANAGEMENT_INDEX).create(create) - .id(smPolicyNameToMetadataDocId(smDocIdToPolicyName(id))) - .source(metadata.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS)) - .setIfSeqNo(seqNo) - .setIfPrimaryTerm(primaryTerm) - .routing(id) + val indexReq = + IndexRequest(INDEX_MANAGEMENT_INDEX).create(create) + .id(smPolicyNameToMetadataDocId(smDocIdToPolicyName(id))) + .source(metadata.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS)) + .setIfSeqNo(seqNo) + .setIfPrimaryTerm(primaryTerm) + .routing(id) return suspendUntil { index(indexReq, it) } } @@ -144,14 +149,15 @@ fun generateSnapshotName(policy: SMPolicy): String { if (dateFormat == null) { dateFormat = "yyyy-MM-dd'T'HH:mm:ss" } - val dateValue = if (policy.snapshotConfig[DATE_FORMAT_TIMEZONE_FIELD] != null) { - generateFormatDate( - dateFormat, - ZoneId.of(policy.snapshotConfig[DATE_FORMAT_TIMEZONE_FIELD] as String), - ) - } else { - generateFormatDate(dateFormat) - }.lowercase() + val dateValue = + if (policy.snapshotConfig[DATE_FORMAT_TIMEZONE_FIELD] != null) { + generateFormatDate( + dateFormat, + ZoneId.of(policy.snapshotConfig[DATE_FORMAT_TIMEZONE_FIELD] as String), + ) + } else { + generateFormatDate(dateFormat) + }.lowercase() result += "-$dateValue" return result + "-${getRandomString(RANDOM_STRING_LENGTH)}" } @@ -214,9 +220,10 @@ fun List.filterBySMPolicyInSnapshotMetadata(policyName: String): L */ suspend fun Client.getSnapshots(name: String, repo: String): List { try { - val req = GetSnapshotsRequest() - .snapshots(arrayOf(name)) - .repository(repo) + val req = + GetSnapshotsRequest() + .snapshots(arrayOf(name)) + .repository(repo) val res: GetSnapshotsResponse = admin().cluster().suspendUntil { getSnapshots(req, it) } return res.snapshots } catch (ex: RemoteTransportException) { @@ -237,23 +244,24 @@ suspend fun Client.getSnapshots( snapshotMissingMsg: String?, exceptionMsg: String, ): GetSnapshotsResult { - val snapshots = try { - getSnapshots( - name, - job.snapshotConfig["repository"] as String - ) - } catch (ex: SnapshotMissingException) { - snapshotMissingMsg?.let { log.warn(snapshotMissingMsg) } - return GetSnapshotsResult(false, emptyList(), metadataBuilder) - } catch (ex: Exception) { - log.error(exceptionMsg, ex) - metadataBuilder.setLatestExecution( - status = SMMetadata.LatestExecution.Status.RETRYING, - message = exceptionMsg, - cause = ex, - ) - return GetSnapshotsResult(true, emptyList(), metadataBuilder) - }.filterBySMPolicyInSnapshotMetadata(job.policyName) + val snapshots = + try { + getSnapshots( + name, + job.snapshotConfig["repository"] as String, + ) + } catch (ex: SnapshotMissingException) { + snapshotMissingMsg?.let { log.warn(snapshotMissingMsg) } + return GetSnapshotsResult(false, emptyList(), metadataBuilder) + } catch (ex: Exception) { + log.error(exceptionMsg, ex) + metadataBuilder.setLatestExecution( + status = SMMetadata.LatestExecution.Status.RETRYING, + message = exceptionMsg, + cause = ex, + ) + return GetSnapshotsResult(true, emptyList(), metadataBuilder) + }.filterBySMPolicyInSnapshotMetadata(job.policyName) return GetSnapshotsResult(false, snapshots, metadataBuilder) } @@ -269,7 +277,7 @@ fun tryUpdatingNextExecutionTime( nextTime: Instant, schedule: Schedule, workflowType: WorkflowType, - log: Logger + log: Logger, ): UpdateNextExecutionTimeResult { val now = now() return if (!now.isBefore(nextTime)) { @@ -322,7 +330,7 @@ fun validateSMPolicyName(policyName: String) { } if (!Strings.validFileName(policyName)) { errorMessages.add( - "Policy name must not contain the following characters " + Strings.INVALID_FILENAME_CHARS + "." + "Policy name must not contain the following characters " + Strings.INVALID_FILENAME_CHARS + ".", ) } if (errorMessages.isNotEmpty()) { @@ -352,13 +360,14 @@ fun timeLimitExceeded( } fun getTimeLimitExceededMessage(timeLimit: TimeValue, workflow: WorkflowType): String { - val workflowStr = when (workflow) { - WorkflowType.CREATION -> { - "creation" - } - WorkflowType.DELETION -> { - "deletion" + val workflowStr = + when (workflow) { + WorkflowType.CREATION -> { + "creation" + } + WorkflowType.DELETION -> { + "deletion" + } } - } return "Time limit $timeLimit exceeded during snapshot $workflowStr step" } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/SnapshotManagementException.kt b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/SnapshotManagementException.kt index f553975e0..703d71e73 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/SnapshotManagementException.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/SnapshotManagementException.kt @@ -18,7 +18,6 @@ class SnapshotManagementException( cause: Throwable? = null, message: String? = null, ) : OpenSearchException(message, cause) { - enum class ExceptionKey { GENERAL, METADATA_INDEXING_FAILURE, @@ -33,11 +32,12 @@ class SnapshotManagementException( companion object { // Customized user facing exception messages - private val exceptionMsgMap: Map = mapOf( - ExceptionKey.GENERAL to "Caught exception while snapshot management runs. Please check the error log.", - ExceptionKey.METADATA_INDEXING_FAILURE to "Failed to update metadata.", - ExceptionKey.REPO_MISSING to "The repository provided is missing.", - ) + private val exceptionMsgMap: Map = + mapOf( + ExceptionKey.GENERAL to "Caught exception while snapshot management runs. Please check the error log.", + ExceptionKey.METADATA_INDEXING_FAILURE to "Failed to update metadata.", + ExceptionKey.REPO_MISSING to "The repository provided is missing.", + ) /** * Wrap an exception in SnapshotManagementException diff --git a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/resthandler/RestBaseIndexSMPolicyHandler.kt b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/resthandler/RestBaseIndexSMPolicyHandler.kt index 32ca9f9c1..4344fc5c0 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/resthandler/RestBaseIndexSMPolicyHandler.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/resthandler/RestBaseIndexSMPolicyHandler.kt @@ -7,6 +7,7 @@ package org.opensearch.indexmanagement.snapshotmanagement.api.resthandler import org.opensearch.action.support.WriteRequest import org.opensearch.client.node.NodeClient +import org.opensearch.core.rest.RestStatus import org.opensearch.core.xcontent.ToXContent import org.opensearch.index.seqno.SequenceNumbers import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.SM_POLICIES_URI @@ -23,26 +24,26 @@ import org.opensearch.rest.BaseRestHandler import org.opensearch.rest.BytesRestResponse import org.opensearch.rest.RestRequest import org.opensearch.rest.RestResponse -import org.opensearch.core.rest.RestStatus import org.opensearch.rest.action.RestResponseListener import java.time.Instant abstract class RestBaseIndexSMPolicyHandler : BaseRestHandler() { - protected fun prepareIndexRequest(request: RestRequest, client: NodeClient, create: Boolean): RestChannelConsumer { val policyName = request.getValidSMPolicyName() val seqNo = request.paramAsLong(IF_SEQ_NO, SequenceNumbers.UNASSIGNED_SEQ_NO) val primaryTerm = request.paramAsLong(IF_PRIMARY_TERM, SequenceNumbers.UNASSIGNED_PRIMARY_TERM) val xcp = request.contentParser() - val policy = SMPolicy.parse(xcp, id = smPolicyNameToDocId(policyName), seqNo = seqNo, primaryTerm = primaryTerm) - .copy(jobLastUpdateTime = Instant.now()) + val policy = + SMPolicy.parse(xcp, id = smPolicyNameToDocId(policyName), seqNo = seqNo, primaryTerm = primaryTerm) + .copy(jobLastUpdateTime = Instant.now()) - val refreshPolicy = if (request.hasParam(REFRESH)) { - WriteRequest.RefreshPolicy.parse(request.param(REFRESH)) - } else { - WriteRequest.RefreshPolicy.IMMEDIATE - } + val refreshPolicy = + if (request.hasParam(REFRESH)) { + WriteRequest.RefreshPolicy.parse(request.param(REFRESH)) + } else { + WriteRequest.RefreshPolicy.IMMEDIATE + } return RestChannelConsumer { client.execute( @@ -57,7 +58,7 @@ abstract class RestBaseIndexSMPolicyHandler : BaseRestHandler() { } return restResponse } - } + }, ) } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/resthandler/RestCreateSMPolicyHandler.kt b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/resthandler/RestCreateSMPolicyHandler.kt index ee731d834..6a0a489d5 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/resthandler/RestCreateSMPolicyHandler.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/resthandler/RestCreateSMPolicyHandler.kt @@ -11,14 +11,13 @@ import org.opensearch.rest.RestHandler.Route import org.opensearch.rest.RestRequest class RestCreateSMPolicyHandler : RestBaseIndexSMPolicyHandler() { - override fun getName(): String { return "snapshot_management_create_policy_rest_handler" } override fun routes(): List { return listOf( - Route(RestRequest.Method.POST, "$SM_POLICIES_URI/{policyName}") + Route(RestRequest.Method.POST, "$SM_POLICIES_URI/{policyName}"), ) } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/resthandler/RestDeleteSMPolicyHandler.kt b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/resthandler/RestDeleteSMPolicyHandler.kt index 672a6ff73..781141090 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/resthandler/RestDeleteSMPolicyHandler.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/resthandler/RestDeleteSMPolicyHandler.kt @@ -18,14 +18,13 @@ import org.opensearch.rest.RestRequest import org.opensearch.rest.action.RestToXContentListener class RestDeleteSMPolicyHandler : BaseRestHandler() { - override fun getName(): String { return "snapshot_management_delete_policy_rest_handler" } override fun routes(): List { return listOf( - Route(RestRequest.Method.DELETE, "$SM_POLICIES_URI/{policyName}") + Route(RestRequest.Method.DELETE, "$SM_POLICIES_URI/{policyName}"), ) } @@ -35,17 +34,18 @@ class RestDeleteSMPolicyHandler : BaseRestHandler() { throw IllegalArgumentException("Missing policy name") } - val refreshPolicy = if (request.hasParam(REFRESH)) { - WriteRequest.RefreshPolicy.parse(request.param(REFRESH)) - } else { - WriteRequest.RefreshPolicy.IMMEDIATE - } + val refreshPolicy = + if (request.hasParam(REFRESH)) { + WriteRequest.RefreshPolicy.parse(request.param(REFRESH)) + } else { + WriteRequest.RefreshPolicy.IMMEDIATE + } return RestChannelConsumer { client.execute( SMActions.DELETE_SM_POLICY_ACTION_TYPE, DeleteSMPolicyRequest(smPolicyNameToDocId(policyName)).setRefreshPolicy(refreshPolicy), - RestToXContentListener(it) + RestToXContentListener(it), ) } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/resthandler/RestExplainSMPolicyHandler.kt b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/resthandler/RestExplainSMPolicyHandler.kt index deff23cd7..0bd132997 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/resthandler/RestExplainSMPolicyHandler.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/resthandler/RestExplainSMPolicyHandler.kt @@ -18,7 +18,6 @@ import org.opensearch.rest.RestRequest.Method.GET import org.opensearch.rest.action.RestToXContentListener class RestExplainSMPolicyHandler : BaseRestHandler() { - private val log = LogManager.getLogger(RestExplainSMPolicyHandler::class.java) override fun getName(): String { @@ -27,7 +26,7 @@ class RestExplainSMPolicyHandler : BaseRestHandler() { override fun routes(): List { return listOf( - Route(GET, "$SM_POLICIES_URI/{policyName}/_explain") + Route(GET, "$SM_POLICIES_URI/{policyName}/_explain"), ) } @@ -40,7 +39,7 @@ class RestExplainSMPolicyHandler : BaseRestHandler() { client.execute( SMActions.EXPLAIN_SM_POLICY_ACTION_TYPE, ExplainSMPolicyRequest(policyNames), - RestToXContentListener(it) + RestToXContentListener(it), ) } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/resthandler/RestGetSMPolicyHandler.kt b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/resthandler/RestGetSMPolicyHandler.kt index 8149c2da7..f987b1c17 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/resthandler/RestGetSMPolicyHandler.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/resthandler/RestGetSMPolicyHandler.kt @@ -7,8 +7,8 @@ package org.opensearch.indexmanagement.snapshotmanagement.api.resthandler import org.opensearch.client.node.NodeClient import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.SM_POLICIES_URI -import org.opensearch.indexmanagement.snapshotmanagement.api.transport.SMActions.GET_SM_POLICY_ACTION_TYPE import org.opensearch.indexmanagement.snapshotmanagement.api.transport.SMActions.GET_SM_POLICIES_ACTION_TYPE +import org.opensearch.indexmanagement.snapshotmanagement.api.transport.SMActions.GET_SM_POLICY_ACTION_TYPE import org.opensearch.indexmanagement.snapshotmanagement.api.transport.get.GetSMPoliciesRequest import org.opensearch.indexmanagement.snapshotmanagement.api.transport.get.GetSMPolicyRequest import org.opensearch.indexmanagement.snapshotmanagement.smPolicyNameToDocId @@ -21,7 +21,6 @@ import org.opensearch.rest.RestRequest.Method.GET import org.opensearch.rest.action.RestToXContentListener class RestGetSMPolicyHandler : BaseRestHandler() { - override fun getName(): String { return "snapshot_management_get_policy_rest_handler" } @@ -29,7 +28,7 @@ class RestGetSMPolicyHandler : BaseRestHandler() { override fun routes(): List { return listOf( Route(GET, "$SM_POLICIES_URI/{policyName}"), - Route(GET, "$SM_POLICIES_URI/") + Route(GET, "$SM_POLICIES_URI/"), ) } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/resthandler/RestStartSMPolicyHandler.kt b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/resthandler/RestStartSMPolicyHandler.kt index a5ea32102..9b6b31e6a 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/resthandler/RestStartSMPolicyHandler.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/resthandler/RestStartSMPolicyHandler.kt @@ -18,7 +18,6 @@ import org.opensearch.rest.RestRequest import org.opensearch.rest.action.RestToXContentListener class RestStartSMPolicyHandler : BaseRestHandler() { - private val log = LogManager.getLogger(RestStartSMPolicyHandler::class.java) override fun getName(): String { @@ -27,7 +26,7 @@ class RestStartSMPolicyHandler : BaseRestHandler() { override fun routes(): List { return listOf( - Route(RestRequest.Method.POST, "$SM_POLICIES_URI/{policyName}/_start") + Route(RestRequest.Method.POST, "$SM_POLICIES_URI/{policyName}/_start"), ) } @@ -39,7 +38,7 @@ class RestStartSMPolicyHandler : BaseRestHandler() { return RestChannelConsumer { client.execute( SMActions.START_SM_POLICY_ACTION_TYPE, - indexReq, RestToXContentListener(it) + indexReq, RestToXContentListener(it), ) } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/resthandler/RestStopSMPolicyHandler.kt b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/resthandler/RestStopSMPolicyHandler.kt index 5bd29a1d4..d42c38a74 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/resthandler/RestStopSMPolicyHandler.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/resthandler/RestStopSMPolicyHandler.kt @@ -18,7 +18,6 @@ import org.opensearch.rest.RestRequest import org.opensearch.rest.action.RestToXContentListener class RestStopSMPolicyHandler : BaseRestHandler() { - private val log = LogManager.getLogger(RestStopSMPolicyHandler::class.java) override fun getName(): String { @@ -27,7 +26,7 @@ class RestStopSMPolicyHandler : BaseRestHandler() { override fun routes(): List { return listOf( - Route(RestRequest.Method.POST, "$SM_POLICIES_URI/{policyName}/_stop") + Route(RestRequest.Method.POST, "$SM_POLICIES_URI/{policyName}/_stop"), ) } @@ -39,7 +38,7 @@ class RestStopSMPolicyHandler : BaseRestHandler() { return RestChannelConsumer { client.execute( SMActions.STOP_SM_POLICY_ACTION_TYPE, - indexReq, RestToXContentListener(it) + indexReq, RestToXContentListener(it), ) } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/resthandler/RestUpdateSMPolicyHandler.kt b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/resthandler/RestUpdateSMPolicyHandler.kt index 3c13076e8..1a7c2c9da 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/resthandler/RestUpdateSMPolicyHandler.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/resthandler/RestUpdateSMPolicyHandler.kt @@ -11,14 +11,13 @@ import org.opensearch.rest.RestHandler.Route import org.opensearch.rest.RestRequest class RestUpdateSMPolicyHandler : RestBaseIndexSMPolicyHandler() { - override fun getName(): String { return "snapshot_management_update_policy_rest_handler" } override fun routes(): List { return listOf( - Route(RestRequest.Method.PUT, "$SM_POLICIES_URI/{policyName}") + Route(RestRequest.Method.PUT, "$SM_POLICIES_URI/{policyName}"), ) } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/BaseTransportAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/BaseTransportAction.kt index 2adcbdc0f..98435d834 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/BaseTransportAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/BaseTransportAction.kt @@ -10,20 +10,20 @@ import kotlinx.coroutines.Dispatchers import kotlinx.coroutines.launch import org.apache.logging.log4j.LogManager import org.opensearch.OpenSearchStatusException -import org.opensearch.core.action.ActionListener import org.opensearch.action.ActionRequest -import org.opensearch.core.action.ActionResponse import org.opensearch.action.support.ActionFilters import org.opensearch.action.support.HandledTransportAction import org.opensearch.client.Client -import org.opensearch.core.common.io.stream.Writeable import org.opensearch.common.util.concurrent.ThreadContext.StoredContext import org.opensearch.commons.ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT import org.opensearch.commons.authuser.User +import org.opensearch.core.action.ActionListener +import org.opensearch.core.action.ActionResponse +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.rest.RestStatus import org.opensearch.index.engine.VersionConflictEngineException import org.opensearch.indexmanagement.util.IndexManagementException import org.opensearch.indexmanagement.util.SecurityUtils -import org.opensearch.core.rest.RestStatus import org.opensearch.tasks.Task import org.opensearch.transport.TransportService @@ -34,20 +34,19 @@ abstract class BaseTransportAction, ) : HandledTransportAction( - name, transportService, actionFilters, requestReader + name, transportService, actionFilters, requestReader, ) { - private val log = LogManager.getLogger(javaClass) private val coroutineScope: CoroutineScope = CoroutineScope(Dispatchers.IO) override fun doExecute( task: Task, request: Request, - listener: ActionListener + listener: ActionListener, ) { log.debug( "user and roles string from thread context: " + - client.threadPool().threadContext.getTransient(OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT) + client.threadPool().threadContext.getTransient(OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT), ) val user: User? = SecurityUtils.buildUser(client.threadPool().threadContext) coroutineScope.launch { diff --git a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/SMActions.kt b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/SMActions.kt index f694c269b..7e8e54105 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/SMActions.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/SMActions.kt @@ -10,13 +10,13 @@ import org.opensearch.action.delete.DeleteResponse import org.opensearch.action.support.master.AcknowledgedResponse import org.opensearch.indexmanagement.snapshotmanagement.api.transport.delete.TransportDeleteSMPolicyAction import org.opensearch.indexmanagement.snapshotmanagement.api.transport.explain.ExplainSMPolicyResponse +import org.opensearch.indexmanagement.snapshotmanagement.api.transport.explain.TransportExplainSMAction import org.opensearch.indexmanagement.snapshotmanagement.api.transport.get.GetSMPoliciesResponse import org.opensearch.indexmanagement.snapshotmanagement.api.transport.get.GetSMPolicyResponse +import org.opensearch.indexmanagement.snapshotmanagement.api.transport.get.TransportGetSMPoliciesAction import org.opensearch.indexmanagement.snapshotmanagement.api.transport.get.TransportGetSMPolicyAction import org.opensearch.indexmanagement.snapshotmanagement.api.transport.index.IndexSMPolicyResponse import org.opensearch.indexmanagement.snapshotmanagement.api.transport.index.TransportIndexSMPolicyAction -import org.opensearch.indexmanagement.snapshotmanagement.api.transport.get.TransportGetSMPoliciesAction -import org.opensearch.indexmanagement.snapshotmanagement.api.transport.explain.TransportExplainSMAction import org.opensearch.indexmanagement.snapshotmanagement.api.transport.start.TransportStartSMAction import org.opensearch.indexmanagement.snapshotmanagement.api.transport.stop.TransportStopSMAction diff --git a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/delete/TransportDeleteSMPolicyAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/delete/TransportDeleteSMPolicyAction.kt index 03853b603..84d320123 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/delete/TransportDeleteSMPolicyAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/delete/TransportDeleteSMPolicyAction.kt @@ -15,6 +15,7 @@ import org.opensearch.common.inject.Inject import org.opensearch.common.settings.Settings import org.opensearch.common.util.concurrent.ThreadContext import org.opensearch.commons.authuser.User +import org.opensearch.core.rest.RestStatus import org.opensearch.index.engine.VersionConflictEngineException import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.INDEX_MANAGEMENT_INDEX import org.opensearch.indexmanagement.opensearchapi.suspendUntil @@ -23,19 +24,19 @@ import org.opensearch.indexmanagement.snapshotmanagement.api.transport.SMActions import org.opensearch.indexmanagement.snapshotmanagement.getSMPolicy import org.opensearch.indexmanagement.snapshotmanagement.settings.SnapshotManagementSettings.Companion.FILTER_BY_BACKEND_ROLES import org.opensearch.indexmanagement.util.SecurityUtils.Companion.verifyUserHasPermissionForResource -import org.opensearch.core.rest.RestStatus import org.opensearch.transport.TransportService -class TransportDeleteSMPolicyAction @Inject constructor( +class TransportDeleteSMPolicyAction +@Inject +constructor( client: Client, transportService: TransportService, actionFilters: ActionFilters, val clusterService: ClusterService, val settings: Settings, ) : BaseTransportAction( - DELETE_SM_POLICY_ACTION_NAME, transportService, client, actionFilters, ::DeleteSMPolicyRequest + DELETE_SM_POLICY_ACTION_NAME, transportService, client, actionFilters, ::DeleteSMPolicyRequest, ) { - private val log = LogManager.getLogger(javaClass) @Volatile private var filterByEnabled = FILTER_BY_BACKEND_ROLES.get(settings) @@ -49,7 +50,7 @@ class TransportDeleteSMPolicyAction @Inject constructor( override suspend fun executeRequest( request: DeleteSMPolicyRequest, user: User?, - threadContext: ThreadContext.StoredContext + threadContext: ThreadContext.StoredContext, ): DeleteResponse { val smPolicy = client.getSMPolicy(request.id()) diff --git a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/explain/ExplainSMPolicyRequest.kt b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/explain/ExplainSMPolicyRequest.kt index 4e2d6cd7b..0b757a31c 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/explain/ExplainSMPolicyRequest.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/explain/ExplainSMPolicyRequest.kt @@ -11,7 +11,7 @@ import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.core.common.io.stream.StreamOutput class ExplainSMPolicyRequest( - val policyNames: Array + val policyNames: Array, ) : ActionRequest() { override fun validate(): ActionRequestValidationException? { return null diff --git a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/explain/ExplainSMPolicyResponse.kt b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/explain/ExplainSMPolicyResponse.kt index 1bc775c6d..532cb05f8 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/explain/ExplainSMPolicyResponse.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/explain/ExplainSMPolicyResponse.kt @@ -29,14 +29,15 @@ class ExplainSMPolicyResponse : ActionResponse, ToXContentObject { @Throws(IOException::class) constructor(sin: StreamInput) : this( - policiesToExplain = sin.let { + policiesToExplain = + sin.let { val policiesToExplain = mutableMapOf() val size = it.readVInt() repeat(size) { _ -> policiesToExplain[it.readString()] = sin.readOptionalValue(::ExplainSMPolicy) } policiesToExplain.toMap() - } + }, ) @Throws(IOException::class) diff --git a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/explain/TransportExplainSMAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/explain/TransportExplainSMAction.kt index dcd1e5fa1..2760d96a8 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/explain/TransportExplainSMAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/explain/TransportExplainSMAction.kt @@ -15,9 +15,10 @@ import org.opensearch.cluster.service.ClusterService import org.opensearch.common.inject.Inject import org.opensearch.common.settings.Settings import org.opensearch.common.util.concurrent.ThreadContext +import org.opensearch.commons.authuser.User +import org.opensearch.core.rest.RestStatus import org.opensearch.core.xcontent.XContentParser import org.opensearch.core.xcontent.XContentParserUtils.ensureExpectedToken -import org.opensearch.commons.authuser.User import org.opensearch.index.IndexNotFoundException import org.opensearch.index.query.BoolQueryBuilder import org.opensearch.index.query.ExistsQueryBuilder @@ -39,21 +40,21 @@ import org.opensearch.indexmanagement.snapshotmanagement.model.SMPolicy.Companio import org.opensearch.indexmanagement.snapshotmanagement.settings.SnapshotManagementSettings.Companion.FILTER_BY_BACKEND_ROLES import org.opensearch.indexmanagement.snapshotmanagement.smMetadataDocIdToPolicyName import org.opensearch.indexmanagement.util.SecurityUtils -import org.opensearch.core.rest.RestStatus import org.opensearch.search.builder.SearchSourceBuilder import org.opensearch.search.fetch.subphase.FetchSourceContext import org.opensearch.transport.TransportService -class TransportExplainSMAction @Inject constructor( +class TransportExplainSMAction +@Inject +constructor( client: Client, transportService: TransportService, actionFilters: ActionFilters, val clusterService: ClusterService, val settings: Settings, ) : BaseTransportAction( - SMActions.EXPLAIN_SM_POLICY_ACTION_NAME, transportService, client, actionFilters, ::ExplainSMPolicyRequest + SMActions.EXPLAIN_SM_POLICY_ACTION_NAME, transportService, client, actionFilters, ::ExplainSMPolicyRequest, ) { - private val log = LogManager.getLogger(javaClass) @Volatile private var filterByEnabled = FILTER_BY_BACKEND_ROLES.get(settings) @@ -67,7 +68,7 @@ class TransportExplainSMAction @Inject constructor( override suspend fun executeRequest( request: ExplainSMPolicyRequest, user: User?, - threadContext: ThreadContext.StoredContext + threadContext: ThreadContext.StoredContext, ): ExplainSMPolicyResponse { val policyNames = request.policyNames.toSet() @@ -80,14 +81,15 @@ class TransportExplainSMAction @Inject constructor( private suspend fun getPolicyEnabledStatus(policyNames: Set, user: User?): Map { // Search the config index for SM policies val searchRequest = getPolicyEnabledSearchRequest(policyNames, user) - val searchResponse: SearchResponse = try { - client.suspendUntil { search(searchRequest, it) } - } catch (e: IndexNotFoundException) { - throw OpenSearchStatusException("Snapshot management config index not found", RestStatus.NOT_FOUND) - } catch (e: Exception) { - log.error("Failed to search for snapshot management policy", e) - throw OpenSearchStatusException("Failed to search for snapshot management policy", RestStatus.INTERNAL_SERVER_ERROR) - } + val searchResponse: SearchResponse = + try { + client.suspendUntil { search(searchRequest, it) } + } catch (e: IndexNotFoundException) { + throw OpenSearchStatusException("Snapshot management config index not found", RestStatus.NOT_FOUND) + } catch (e: Exception) { + log.error("Failed to search for snapshot management policy", e) + throw OpenSearchStatusException("Failed to search for snapshot management policy", RestStatus.INTERNAL_SERVER_ERROR) + } // Parse each returned policy to get the job enabled status return try { @@ -107,10 +109,11 @@ class TransportExplainSMAction @Inject constructor( SecurityUtils.addUserFilter(user, queryBuilder, filterByEnabled, "sm_policy.user") // Only return the name and enabled field - val includes = arrayOf( - "${SMPolicy.SM_TYPE}.$NAME_FIELD", - "${SMPolicy.SM_TYPE}.$ENABLED_FIELD" - ) + val includes = + arrayOf( + "${SMPolicy.SM_TYPE}.$NAME_FIELD", + "${SMPolicy.SM_TYPE}.$ENABLED_FIELD", + ) val fetchSourceContext = FetchSourceContext(true, includes, arrayOf()) val searchSourceBuilder = SearchSourceBuilder().size(MAX_HITS).query(queryBuilder).fetchSource(fetchSourceContext) return SearchRequest(INDEX_MANAGEMENT_INDEX).source(searchSourceBuilder) @@ -133,11 +136,12 @@ class TransportExplainSMAction @Inject constructor( private suspend fun getSMMetadata(policyNames: Set): Map { val searchRequest = getSMMetadataSearchRequest(policyNames) - val searchResponse: SearchResponse = try { - client.suspendUntil { search(searchRequest, it) } - } catch (e: IndexNotFoundException) { - throw OpenSearchStatusException("Snapshot management config index not found", RestStatus.NOT_FOUND) - } + val searchResponse: SearchResponse = + try { + client.suspendUntil { search(searchRequest, it) } + } catch (e: IndexNotFoundException) { + throw OpenSearchStatusException("Snapshot management config index not found", RestStatus.NOT_FOUND) + } return try { searchResponse.hits.hits.associate { @@ -183,9 +187,10 @@ class TransportExplainSMAction @Inject constructor( } private fun buildExplainResponse(namesToEnabled: Map, namesToMetadata: Map): ExplainSMPolicyResponse { - val policiesToExplain = namesToEnabled.entries.associate { (policyName, enabled) -> - policyName to ExplainSMPolicy(namesToMetadata[policyName], enabled) - } + val policiesToExplain = + namesToEnabled.entries.associate { (policyName, enabled) -> + policyName to ExplainSMPolicy(namesToMetadata[policyName], enabled) + } log.debug("Explain response: $policiesToExplain") return ExplainSMPolicyResponse(policiesToExplain) } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/get/GetSMPoliciesRequest.kt b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/get/GetSMPoliciesRequest.kt index 5464556d9..c86fcd49c 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/get/GetSMPoliciesRequest.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/get/GetSMPoliciesRequest.kt @@ -15,7 +15,7 @@ import java.io.IOException class GetSMPoliciesRequest(val searchParams: SearchParams) : ActionRequest() { @Throws(IOException::class) constructor(sin: StreamInput) : this( - searchParams = SearchParams(sin) + searchParams = SearchParams(sin), ) @Throws(IOException::class) diff --git a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/get/GetSMPoliciesResponse.kt b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/get/GetSMPoliciesResponse.kt index d49b86917..64f328f4b 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/get/GetSMPoliciesResponse.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/get/GetSMPoliciesResponse.kt @@ -20,12 +20,11 @@ import org.opensearch.indexmanagement.util._SEQ_NO // totalPolicies may differ from the length of the policies field if the size parameter is introduced class GetSMPoliciesResponse( val policies: List, - val totalPolicies: Long + val totalPolicies: Long, ) : ActionResponse(), ToXContentObject { - constructor(sin: StreamInput) : this( policies = sin.readList(::SMPolicy), - totalPolicies = sin.readLong() + totalPolicies = sin.readLong(), ) override fun writeTo(out: StreamOutput) { diff --git a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/get/GetSMPolicyRequest.kt b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/get/GetSMPolicyRequest.kt index 439d93f2e..ee3c2e396 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/get/GetSMPolicyRequest.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/get/GetSMPolicyRequest.kt @@ -11,7 +11,7 @@ import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.core.common.io.stream.StreamOutput class GetSMPolicyRequest( - val policyID: String + val policyID: String, ) : ActionRequest() { override fun validate(): ActionRequestValidationException? { return null diff --git a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/get/GetSMPolicyResponse.kt b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/get/GetSMPolicyResponse.kt index e02acfd7b..bb68b8ff2 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/get/GetSMPolicyResponse.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/get/GetSMPolicyResponse.kt @@ -24,15 +24,14 @@ class GetSMPolicyResponse( val version: Long, val seqNo: Long, val primaryTerm: Long, - val policy: SMPolicy + val policy: SMPolicy, ) : ActionResponse(), ToXContentObject { - constructor(sin: StreamInput) : this( id = sin.readString(), version = sin.readLong(), seqNo = sin.readLong(), primaryTerm = sin.readLong(), - policy = SMPolicy(sin) + policy = SMPolicy(sin), ) override fun writeTo(out: StreamOutput) { diff --git a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/get/TransportGetSMPoliciesAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/get/TransportGetSMPoliciesAction.kt index bcc927a09..91d747830 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/get/TransportGetSMPoliciesAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/get/TransportGetSMPoliciesAction.kt @@ -6,6 +6,7 @@ package org.opensearch.indexmanagement.snapshotmanagement.api.transport.get import org.apache.logging.log4j.LogManager +import org.opensearch.ExceptionsHelper import org.opensearch.OpenSearchStatusException import org.opensearch.action.search.SearchRequest import org.opensearch.action.search.SearchResponse @@ -16,8 +17,8 @@ import org.opensearch.common.inject.Inject import org.opensearch.common.settings.Settings import org.opensearch.common.util.concurrent.ThreadContext import org.opensearch.commons.authuser.User +import org.opensearch.core.rest.RestStatus import org.opensearch.index.IndexNotFoundException -import org.opensearch.ExceptionsHelper import org.opensearch.index.query.BoolQueryBuilder import org.opensearch.index.query.ExistsQueryBuilder import org.opensearch.index.query.Operator @@ -33,20 +34,20 @@ import org.opensearch.indexmanagement.snapshotmanagement.model.SMPolicy import org.opensearch.indexmanagement.snapshotmanagement.settings.SnapshotManagementSettings.Companion.FILTER_BY_BACKEND_ROLES import org.opensearch.indexmanagement.snapshotmanagement.util.SM_POLICY_NAME_KEYWORD import org.opensearch.indexmanagement.util.SecurityUtils -import org.opensearch.core.rest.RestStatus import org.opensearch.search.builder.SearchSourceBuilder import org.opensearch.transport.TransportService -class TransportGetSMPoliciesAction @Inject constructor( +class TransportGetSMPoliciesAction +@Inject +constructor( client: Client, transportService: TransportService, actionFilters: ActionFilters, val clusterService: ClusterService, val settings: Settings, ) : BaseTransportAction( - GET_SM_POLICIES_ACTION_NAME, transportService, client, actionFilters, ::GetSMPoliciesRequest + GET_SM_POLICIES_ACTION_NAME, transportService, client, actionFilters, ::GetSMPoliciesRequest, ) { - private val log = LogManager.getLogger(javaClass) @Volatile private var filterByEnabled = FILTER_BY_BACKEND_ROLES.get(settings) @@ -60,7 +61,7 @@ class TransportGetSMPoliciesAction @Inject constructor( override suspend fun executeRequest( request: GetSMPoliciesRequest, user: User?, - threadContext: ThreadContext.StoredContext + threadContext: ThreadContext.StoredContext, ): GetSMPoliciesResponse { val searchParams = request.searchParams val (policies, totalPoliciesCount) = getAllPolicies(searchParams, user) @@ -87,23 +88,25 @@ class TransportGetSMPoliciesAction @Inject constructor( private fun getAllPoliciesRequest(searchParams: SearchParams, user: User?): SearchRequest { val sortBuilder = searchParams.getSortBuilder() - val queryBuilder = BoolQueryBuilder() - .filter(ExistsQueryBuilder(SMPolicy.SM_TYPE)) - .must( - QueryBuilders.queryStringQuery(searchParams.queryString) - .defaultOperator(Operator.AND) - .field(SM_POLICY_NAME_KEYWORD) - ) + val queryBuilder = + BoolQueryBuilder() + .filter(ExistsQueryBuilder(SMPolicy.SM_TYPE)) + .must( + QueryBuilders.queryStringQuery(searchParams.queryString) + .defaultOperator(Operator.AND) + .field(SM_POLICY_NAME_KEYWORD), + ) // Add user filter if enabled SecurityUtils.addUserFilter(user, queryBuilder, filterByEnabled, "sm_policy.user") - val searchSourceBuilder = SearchSourceBuilder() - .size(searchParams.size) - .from(searchParams.from) - .sort(sortBuilder) - .query(queryBuilder) - .seqNoAndPrimaryTerm(true) + val searchSourceBuilder = + SearchSourceBuilder() + .size(searchParams.size) + .from(searchParams.from) + .sort(sortBuilder) + .query(queryBuilder) + .seqNoAndPrimaryTerm(true) return SearchRequest(IndexManagementPlugin.INDEX_MANAGEMENT_INDEX).source(searchSourceBuilder) } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/get/TransportGetSMPolicyAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/get/TransportGetSMPolicyAction.kt index dbfc4f857..323e8dfa4 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/get/TransportGetSMPolicyAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/get/TransportGetSMPolicyAction.kt @@ -16,6 +16,7 @@ import org.opensearch.common.inject.Inject import org.opensearch.common.settings.Settings import org.opensearch.common.util.concurrent.ThreadContext import org.opensearch.commons.authuser.User +import org.opensearch.core.rest.RestStatus import org.opensearch.index.IndexNotFoundException import org.opensearch.indexmanagement.IndexManagementPlugin import org.opensearch.indexmanagement.opensearchapi.suspendUntil @@ -24,19 +25,19 @@ import org.opensearch.indexmanagement.snapshotmanagement.api.transport.SMActions import org.opensearch.indexmanagement.snapshotmanagement.parseSMPolicy import org.opensearch.indexmanagement.snapshotmanagement.settings.SnapshotManagementSettings.Companion.FILTER_BY_BACKEND_ROLES import org.opensearch.indexmanagement.util.SecurityUtils.Companion.verifyUserHasPermissionForResource -import org.opensearch.core.rest.RestStatus import org.opensearch.transport.TransportService -class TransportGetSMPolicyAction @Inject constructor( +class TransportGetSMPolicyAction +@Inject +constructor( client: Client, transportService: TransportService, actionFilters: ActionFilters, val clusterService: ClusterService, val settings: Settings, ) : BaseTransportAction( - GET_SM_POLICY_ACTION_NAME, transportService, client, actionFilters, ::GetSMPolicyRequest + GET_SM_POLICY_ACTION_NAME, transportService, client, actionFilters, ::GetSMPolicyRequest, ) { - private val log = LogManager.getLogger(javaClass) @Volatile private var filterByEnabled = FILTER_BY_BACKEND_ROLES.get(settings) @@ -50,23 +51,25 @@ class TransportGetSMPolicyAction @Inject constructor( override suspend fun executeRequest( request: GetSMPolicyRequest, user: User?, - threadContext: ThreadContext.StoredContext + threadContext: ThreadContext.StoredContext, ): GetSMPolicyResponse { val getRequest = GetRequest(IndexManagementPlugin.INDEX_MANAGEMENT_INDEX, request.policyID) - val getResponse: GetResponse = try { - client.suspendUntil { get(getRequest, it) } - } catch (e: IndexNotFoundException) { - throw OpenSearchStatusException("Snapshot management config index not found", RestStatus.NOT_FOUND) - } + val getResponse: GetResponse = + try { + client.suspendUntil { get(getRequest, it) } + } catch (e: IndexNotFoundException) { + throw OpenSearchStatusException("Snapshot management config index not found", RestStatus.NOT_FOUND) + } if (!getResponse.isExists) { throw OpenSearchStatusException("Snapshot management policy not found", RestStatus.NOT_FOUND) } - val smPolicy = try { - parseSMPolicy(getResponse) - } catch (e: IllegalArgumentException) { - log.error("Error while parsing snapshot management policy ${request.policyID}", e) - throw OpenSearchStatusException("Snapshot management policy not found", RestStatus.INTERNAL_SERVER_ERROR) - } + val smPolicy = + try { + parseSMPolicy(getResponse) + } catch (e: IllegalArgumentException) { + log.error("Error while parsing snapshot management policy ${request.policyID}", e) + throw OpenSearchStatusException("Snapshot management policy not found", RestStatus.INTERNAL_SERVER_ERROR) + } // Check if the requested user has permission on the resource, throwing an exception if the user does not verifyUserHasPermissionForResource(user, smPolicy.user, filterByEnabled, "snapshot management policy", smPolicy.policyName) diff --git a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/index/IndexSMPolicyRequest.kt b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/index/IndexSMPolicyRequest.kt index 03662cf14..a57337af9 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/index/IndexSMPolicyRequest.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/index/IndexSMPolicyRequest.kt @@ -17,13 +17,12 @@ import org.opensearch.indexmanagement.snapshotmanagement.model.SMPolicy import java.time.Instant.now class IndexSMPolicyRequest : IndexRequest { - var policy: SMPolicy constructor( policy: SMPolicy, create: Boolean, - refreshPolicy: WriteRequest.RefreshPolicy + refreshPolicy: WriteRequest.RefreshPolicy, ) : super() { this.policy = policy this.create(create) @@ -36,8 +35,9 @@ class IndexSMPolicyRequest : IndexRequest { override fun validate(): ActionRequestValidationException? { var validationException: ActionRequestValidationException? = null - val invalidSeqNumPrimaryTerm = this.ifSeqNo() == SequenceNumbers.UNASSIGNED_SEQ_NO || - this.ifPrimaryTerm() == SequenceNumbers.UNASSIGNED_PRIMARY_TERM + val invalidSeqNumPrimaryTerm = + this.ifSeqNo() == SequenceNumbers.UNASSIGNED_SEQ_NO || + this.ifPrimaryTerm() == SequenceNumbers.UNASSIGNED_PRIMARY_TERM if (this.opType() != DocWriteRequest.OpType.CREATE && invalidSeqNumPrimaryTerm) { validationException = ValidateActions.addValidationError(SEQ_NUM_PRIMARY_TERM_UPDATE_ERROR, validationException) } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/index/IndexSMPolicyResponse.kt b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/index/IndexSMPolicyResponse.kt index 3b4fe8331..d059b7fa6 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/index/IndexSMPolicyResponse.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/index/IndexSMPolicyResponse.kt @@ -8,6 +8,7 @@ package org.opensearch.indexmanagement.snapshotmanagement.api.transport.index import org.opensearch.core.action.ActionResponse import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.rest.RestStatus import org.opensearch.core.xcontent.ToXContent import org.opensearch.core.xcontent.ToXContentObject import org.opensearch.core.xcontent.XContentBuilder @@ -18,7 +19,6 @@ import org.opensearch.indexmanagement.util._ID import org.opensearch.indexmanagement.util._PRIMARY_TERM import org.opensearch.indexmanagement.util._SEQ_NO import org.opensearch.indexmanagement.util._VERSION -import org.opensearch.core.rest.RestStatus class IndexSMPolicyResponse( val id: String, @@ -26,16 +26,15 @@ class IndexSMPolicyResponse( val seqNo: Long, val primaryTerm: Long, val policy: SMPolicy, - val status: RestStatus + val status: RestStatus, ) : ActionResponse(), ToXContentObject { - constructor(sin: StreamInput) : this( id = sin.readString(), version = sin.readLong(), seqNo = sin.readLong(), primaryTerm = sin.readLong(), policy = SMPolicy(sin), - status = sin.readEnum(RestStatus::class.java) + status = sin.readEnum(RestStatus::class.java), ) override fun writeTo(out: StreamOutput) { diff --git a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/index/TransportIndexSMPolicyAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/index/TransportIndexSMPolicyAction.kt index d26e082e3..b445cf879 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/index/TransportIndexSMPolicyAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/index/TransportIndexSMPolicyAction.kt @@ -13,9 +13,9 @@ import org.opensearch.cluster.service.ClusterService import org.opensearch.common.inject.Inject import org.opensearch.common.settings.Settings import org.opensearch.common.util.concurrent.ThreadContext -import org.opensearch.core.xcontent.ToXContent import org.opensearch.common.xcontent.XContentFactory import org.opensearch.commons.authuser.User +import org.opensearch.core.xcontent.ToXContent import org.opensearch.indexmanagement.IndexManagementIndices import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.INDEX_MANAGEMENT_INDEX import org.opensearch.indexmanagement.opensearchapi.suspendUntil @@ -26,7 +26,9 @@ import org.opensearch.indexmanagement.util.IndexUtils import org.opensearch.indexmanagement.util.SecurityUtils import org.opensearch.transport.TransportService -class TransportIndexSMPolicyAction @Inject constructor( +class TransportIndexSMPolicyAction +@Inject +constructor( client: Client, transportService: TransportService, private val indexManagementIndices: IndexManagementIndices, @@ -34,9 +36,8 @@ class TransportIndexSMPolicyAction @Inject constructor( val clusterService: ClusterService, val settings: Settings, ) : BaseTransportAction( - INDEX_SM_POLICY_ACTION_NAME, transportService, client, actionFilters, ::IndexSMPolicyRequest + INDEX_SM_POLICY_ACTION_NAME, transportService, client, actionFilters, ::IndexSMPolicyRequest, ) { - private val log = LogManager.getLogger(javaClass) @Volatile private var filterByEnabled = FILTER_BY_BACKEND_ROLES.get(settings) @@ -50,7 +51,7 @@ class TransportIndexSMPolicyAction @Inject constructor( override suspend fun executeRequest( request: IndexSMPolicyRequest, user: User?, - threadContext: ThreadContext.StoredContext + threadContext: ThreadContext.StoredContext, ): IndexSMPolicyResponse { // If filterBy is enabled and security is disabled or if filter by is enabled and backend role are empty an exception will be thrown SecurityUtils.validateUserConfiguration(user, filterByEnabled) @@ -63,10 +64,11 @@ class TransportIndexSMPolicyAction @Inject constructor( private suspend fun indexSMPolicy(request: IndexSMPolicyRequest, user: User?): IndexSMPolicyResponse { val policy = request.policy.copy(schemaVersion = IndexUtils.indexManagementConfigSchemaVersion, user = user) - val indexReq = request.index(INDEX_MANAGEMENT_INDEX) - .source(policy.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS)) - .id(policy.id) - .routing(policy.id) // by default routed by id + val indexReq = + request.index(INDEX_MANAGEMENT_INDEX) + .source(policy.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS)) + .id(policy.id) + .routing(policy.id) // by default routed by id val indexRes: IndexResponse = client.suspendUntil { index(indexReq, it) } return IndexSMPolicyResponse(indexRes.id, indexRes.version, indexRes.seqNo, indexRes.primaryTerm, policy, indexRes.status()) diff --git a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/start/TransportStartSMAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/start/TransportStartSMAction.kt index ac802a290..ac5e58ab5 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/start/TransportStartSMAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/start/TransportStartSMAction.kt @@ -18,6 +18,7 @@ import org.opensearch.common.inject.Inject import org.opensearch.common.settings.Settings import org.opensearch.common.util.concurrent.ThreadContext import org.opensearch.commons.authuser.User +import org.opensearch.core.rest.RestStatus import org.opensearch.index.engine.VersionConflictEngineException import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.INDEX_MANAGEMENT_INDEX import org.opensearch.indexmanagement.opensearchapi.suspendUntil @@ -27,20 +28,20 @@ import org.opensearch.indexmanagement.snapshotmanagement.getSMPolicy import org.opensearch.indexmanagement.snapshotmanagement.model.SMPolicy import org.opensearch.indexmanagement.snapshotmanagement.settings.SnapshotManagementSettings.Companion.FILTER_BY_BACKEND_ROLES import org.opensearch.indexmanagement.util.SecurityUtils.Companion.verifyUserHasPermissionForResource -import org.opensearch.core.rest.RestStatus import org.opensearch.transport.TransportService import java.time.Instant -class TransportStartSMAction @Inject constructor( +class TransportStartSMAction +@Inject +constructor( client: Client, transportService: TransportService, actionFilters: ActionFilters, val clusterService: ClusterService, val settings: Settings, ) : BaseTransportAction( - SMActions.START_SM_POLICY_ACTION_NAME, transportService, client, actionFilters, ::StartSMRequest + SMActions.START_SM_POLICY_ACTION_NAME, transportService, client, actionFilters, ::StartSMRequest, ) { - private val log = LogManager.getLogger(javaClass) @Volatile private var filterByEnabled = FILTER_BY_BACKEND_ROLES.get(settings) @@ -54,7 +55,7 @@ class TransportStartSMAction @Inject constructor( override suspend fun executeRequest( request: StartSMRequest, user: User?, - threadContext: ThreadContext.StoredContext + threadContext: ThreadContext.StoredContext, ): AcknowledgedResponse { val smPolicy = client.getSMPolicy(request.id()) @@ -72,22 +73,24 @@ class TransportStartSMAction @Inject constructor( val now = Instant.now().toEpochMilli() updateRequest.index(INDEX_MANAGEMENT_INDEX).doc( mapOf( - SMPolicy.SM_TYPE to mapOf( - SMPolicy.ENABLED_FIELD to true, - SMPolicy.ENABLED_TIME_FIELD to now, - SMPolicy.LAST_UPDATED_TIME_FIELD to now - ) - ) + SMPolicy.SM_TYPE to + mapOf( + SMPolicy.ENABLED_FIELD to true, + SMPolicy.ENABLED_TIME_FIELD to now, + SMPolicy.LAST_UPDATED_TIME_FIELD to now, + ), + ), ) - val updateResponse: UpdateResponse = try { - client.suspendUntil { update(updateRequest, it) } - } catch (e: VersionConflictEngineException) { - log.error("VersionConflictEngineException while trying to enable snapshot management policy id [${updateRequest.id()}]: $e") - throw OpenSearchStatusException(conflictExceptionMessage, RestStatus.INTERNAL_SERVER_ERROR) - } catch (e: Exception) { - log.error("Failed trying to enable snapshot management policy id [${updateRequest.id()}]: $e") - throw OpenSearchStatusException("Failed while trying to enable SM Policy", RestStatus.INTERNAL_SERVER_ERROR) - } + val updateResponse: UpdateResponse = + try { + client.suspendUntil { update(updateRequest, it) } + } catch (e: VersionConflictEngineException) { + log.error("VersionConflictEngineException while trying to enable snapshot management policy id [${updateRequest.id()}]: $e") + throw OpenSearchStatusException(conflictExceptionMessage, RestStatus.INTERNAL_SERVER_ERROR) + } catch (e: Exception) { + log.error("Failed trying to enable snapshot management policy id [${updateRequest.id()}]: $e") + throw OpenSearchStatusException("Failed while trying to enable SM Policy", RestStatus.INTERNAL_SERVER_ERROR) + } return updateResponse.result == DocWriteResponse.Result.UPDATED } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/stop/TransportStopSMAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/stop/TransportStopSMAction.kt index 08626ef43..4e4b0b7e6 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/stop/TransportStopSMAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/api/transport/stop/TransportStopSMAction.kt @@ -18,6 +18,7 @@ import org.opensearch.common.inject.Inject import org.opensearch.common.settings.Settings import org.opensearch.common.util.concurrent.ThreadContext import org.opensearch.commons.authuser.User +import org.opensearch.core.rest.RestStatus import org.opensearch.index.engine.VersionConflictEngineException import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.INDEX_MANAGEMENT_INDEX import org.opensearch.indexmanagement.opensearchapi.suspendUntil @@ -27,20 +28,20 @@ import org.opensearch.indexmanagement.snapshotmanagement.getSMPolicy import org.opensearch.indexmanagement.snapshotmanagement.model.SMPolicy import org.opensearch.indexmanagement.snapshotmanagement.settings.SnapshotManagementSettings.Companion.FILTER_BY_BACKEND_ROLES import org.opensearch.indexmanagement.util.SecurityUtils.Companion.verifyUserHasPermissionForResource -import org.opensearch.core.rest.RestStatus import org.opensearch.transport.TransportService import java.time.Instant -class TransportStopSMAction @Inject constructor( +class TransportStopSMAction +@Inject +constructor( client: Client, transportService: TransportService, actionFilters: ActionFilters, val clusterService: ClusterService, val settings: Settings, ) : BaseTransportAction( - SMActions.STOP_SM_POLICY_ACTION_NAME, transportService, client, actionFilters, ::StopSMRequest + SMActions.STOP_SM_POLICY_ACTION_NAME, transportService, client, actionFilters, ::StopSMRequest, ) { - private val log = LogManager.getLogger(javaClass) @Volatile private var filterByEnabled = FILTER_BY_BACKEND_ROLES.get(settings) @@ -54,7 +55,7 @@ class TransportStopSMAction @Inject constructor( override suspend fun executeRequest( request: StopSMRequest, user: User?, - threadContext: ThreadContext.StoredContext + threadContext: ThreadContext.StoredContext, ): AcknowledgedResponse { val smPolicy = client.getSMPolicy(request.id()) @@ -72,22 +73,24 @@ class TransportStopSMAction @Inject constructor( val now = Instant.now().toEpochMilli() updateRequest.index(INDEX_MANAGEMENT_INDEX).doc( mapOf( - SMPolicy.SM_TYPE to mapOf( - SMPolicy.ENABLED_FIELD to false, - SMPolicy.ENABLED_TIME_FIELD to null, - SMPolicy.LAST_UPDATED_TIME_FIELD to now - ) - ) + SMPolicy.SM_TYPE to + mapOf( + SMPolicy.ENABLED_FIELD to false, + SMPolicy.ENABLED_TIME_FIELD to null, + SMPolicy.LAST_UPDATED_TIME_FIELD to now, + ), + ), ) - val updateResponse: UpdateResponse = try { - client.suspendUntil { update(updateRequest, it) } - } catch (e: VersionConflictEngineException) { - log.error("VersionConflictEngineException while trying to disable snapshot management policy id [${updateRequest.id()}]: $e") - throw OpenSearchStatusException(conflictExceptionMessage, RestStatus.INTERNAL_SERVER_ERROR) - } catch (e: Exception) { - log.error("Failed trying to disable snapshot management policy id [${updateRequest.id()}]: $e") - throw OpenSearchStatusException("Failed while trying to disable SM Policy", RestStatus.INTERNAL_SERVER_ERROR) - } + val updateResponse: UpdateResponse = + try { + client.suspendUntil { update(updateRequest, it) } + } catch (e: VersionConflictEngineException) { + log.error("VersionConflictEngineException while trying to disable snapshot management policy id [${updateRequest.id()}]: $e") + throw OpenSearchStatusException(conflictExceptionMessage, RestStatus.INTERNAL_SERVER_ERROR) + } catch (e: Exception) { + log.error("Failed trying to disable snapshot management policy id [${updateRequest.id()}]: $e") + throw OpenSearchStatusException("Failed while trying to disable SM Policy", RestStatus.INTERNAL_SERVER_ERROR) + } // TODO update metadata return updateResponse.result == DocWriteResponse.Result.UPDATED } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/SMStateMachine.kt b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/SMStateMachine.kt index 058eeb7d5..99675ecd7 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/SMStateMachine.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/SMStateMachine.kt @@ -10,21 +10,21 @@ import org.apache.logging.log4j.Logger import org.opensearch.action.bulk.BackoffPolicy import org.opensearch.client.Client import org.opensearch.common.settings.Settings -import org.opensearch.commons.ConfigConstants -import org.opensearch.indexmanagement.opensearchapi.IndexManagementSecurityContext -import org.opensearch.indexmanagement.opensearchapi.withClosableContext import org.opensearch.common.unit.TimeValue +import org.opensearch.commons.ConfigConstants import org.opensearch.indexmanagement.IndexManagementIndices +import org.opensearch.indexmanagement.opensearchapi.IndexManagementSecurityContext import org.opensearch.indexmanagement.opensearchapi.retry +import org.opensearch.indexmanagement.opensearchapi.withClosableContext import org.opensearch.indexmanagement.snapshotmanagement.SnapshotManagementException import org.opensearch.indexmanagement.snapshotmanagement.SnapshotManagementException.ExceptionKey import org.opensearch.indexmanagement.snapshotmanagement.engine.states.SMResult import org.opensearch.indexmanagement.snapshotmanagement.engine.states.SMState import org.opensearch.indexmanagement.snapshotmanagement.engine.states.WorkflowType import org.opensearch.indexmanagement.snapshotmanagement.indexMetadata -import org.opensearch.indexmanagement.snapshotmanagement.model.SMPolicy import org.opensearch.indexmanagement.snapshotmanagement.model.SMMetadata import org.opensearch.indexmanagement.snapshotmanagement.model.SMMetadata.LatestExecution.Status.TIME_LIMIT_EXCEEDED +import org.opensearch.indexmanagement.snapshotmanagement.model.SMPolicy import org.opensearch.indexmanagement.util.OpenForTesting import org.opensearch.threadpool.ThreadPool import java.time.Instant.now @@ -38,10 +38,10 @@ class SMStateMachine( val threadPool: ThreadPool, val indicesManager: IndexManagementIndices, ) { - val log: Logger = LogManager.getLogger(javaClass) lateinit var currentState: SMState + fun currentState(currentState: SMState): SMStateMachine { this.currentState = currentState return this @@ -65,25 +65,26 @@ class SMStateMachine( log.debug("Start executing {}.", currentState) log.debug( "User and roles string from thread context: ${threadPool.threadContext.getTransient( - ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT - )}" + ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT, + )}", ) - result = withClosableContext( - IndexManagementSecurityContext( - job.id, settings, threadPool.threadContext, job.user - ) - ) { - log.debug( - "User and roles string from thread context: ${threadPool.threadContext.getTransient( - ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT - )}" - ) - currentState.instance.execute(this@SMStateMachine) as SMResult - } + result = + withClosableContext( + IndexManagementSecurityContext( + job.id, settings, threadPool.threadContext, job.user, + ), + ) { + log.debug( + "User and roles string from thread context: ${threadPool.threadContext.getTransient( + ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT, + )}", + ) + currentState.instance.execute(this@SMStateMachine) as SMResult + } log.debug( "User and roles string from thread context: ${threadPool.threadContext.getTransient( - ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT - )}" + ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT, + )}", ) when (result) { @@ -93,7 +94,7 @@ class SMStateMachine( result.metadataToSave .setCurrentState(currentState) .resetRetry() - .build() + .build(), ) // break the nextStates loop, to avoid executing other lateral states break @@ -104,7 +105,7 @@ class SMStateMachine( result.metadataToSave .setCurrentState(prevState) .resetRetry() - .build() + .build(), ) // can still execute other lateral states if exists } @@ -145,7 +146,7 @@ class SMStateMachine( job.policyName, message, job.user, - log + log, ) } else { job.notificationConfig?.sendFailureNotification(client, job.policyName, message, job.user, log) @@ -156,14 +157,15 @@ class SMStateMachine( private fun handleRetry(result: SMResult.Fail, prevState: SMState): SMMetadata.Builder { val metadataBuilder = result.metadataToSave.setCurrentState(prevState) val metadata = result.metadataToSave.build() - val retry = when (result.workflowType) { - WorkflowType.CREATION -> { - metadata.creation.retry - } - WorkflowType.DELETION -> { - metadata.deletion?.retry + val retry = + when (result.workflowType) { + WorkflowType.CREATION -> { + metadata.creation.retry + } + WorkflowType.DELETION -> { + metadata.deletion?.retry + } } - } val retryCount: Int if (retry == null) { log.warn("Starting to retry state [$currentState], remaining count 3.") @@ -178,7 +180,7 @@ class SMStateMachine( log.warn(errorMessage) metadataBuilder.setLatestExecution( status = SMMetadata.LatestExecution.Status.FAILED, - endTime = now() + endTime = now(), ).resetWorkflow() } } @@ -197,6 +199,7 @@ class SMStateMachine( */ private var metadataSeqNo: Long = metadata.seqNo private var metadataPrimaryTerm: Long = metadata.primaryTerm + suspend fun updateMetadata(md: SMMetadata) { indicesManager.checkAndUpdateIMConfigIndex(log) try { @@ -220,9 +223,10 @@ class SMStateMachine( // TODO SM save a copy to history } - private val updateMetaDataRetryPolicy = BackoffPolicy.exponentialBackoff( - TimeValue.timeValueMillis(EXPONENTIAL_BACKOFF_MILLIS), MAX_NUMBER_OF_RETRIES - ) + private val updateMetaDataRetryPolicy = + BackoffPolicy.exponentialBackoff( + TimeValue.timeValueMillis(EXPONENTIAL_BACKOFF_MILLIS), MAX_NUMBER_OF_RETRIES, + ) /** * Handle the policy change before job running @@ -232,9 +236,10 @@ class SMStateMachine( suspend fun handlePolicyChange(): SMStateMachine { if (job.seqNo > metadata.policySeqNo || job.primaryTerm > metadata.policyPrimaryTerm) { val now = now() - val metadataToSave = SMMetadata.Builder(metadata) - .setSeqNoPrimaryTerm(job.seqNo, job.primaryTerm) - .setNextCreationTime(job.creation.schedule.getNextExecutionTime(now)) + val metadataToSave = + SMMetadata.Builder(metadata) + .setSeqNoPrimaryTerm(job.seqNo, job.primaryTerm) + .setNextCreationTime(job.creation.schedule.getNextExecutionTime(now)) val deletion = job.deletion deletion?.let { diff --git a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/SMState.kt b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/SMState.kt index 00630bd5e..2a19312c1 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/SMState.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/SMState.kt @@ -5,13 +5,13 @@ package org.opensearch.indexmanagement.snapshotmanagement.engine.states +import org.opensearch.indexmanagement.snapshotmanagement.engine.states.creation.CreatingState import org.opensearch.indexmanagement.snapshotmanagement.engine.states.creation.CreationConditionMetState import org.opensearch.indexmanagement.snapshotmanagement.engine.states.creation.CreationFinishedState -import org.opensearch.indexmanagement.snapshotmanagement.engine.states.creation.CreatingState import org.opensearch.indexmanagement.snapshotmanagement.engine.states.creation.CreationStartState +import org.opensearch.indexmanagement.snapshotmanagement.engine.states.deletion.DeletingState import org.opensearch.indexmanagement.snapshotmanagement.engine.states.deletion.DeletionConditionMetState import org.opensearch.indexmanagement.snapshotmanagement.engine.states.deletion.DeletionFinishedState -import org.opensearch.indexmanagement.snapshotmanagement.engine.states.deletion.DeletingState import org.opensearch.indexmanagement.snapshotmanagement.engine.states.deletion.DeletionStartState import org.opensearch.indexmanagement.snapshotmanagement.model.SMMetadata @@ -47,7 +47,9 @@ enum class WorkflowType { */ sealed class SMResult : State.Result { data class Next(val metadataToSave: SMMetadata.Builder) : SMResult() + data class Stay(val metadataToSave: SMMetadata.Builder) : SMResult() + data class Fail( val metadataToSave: SMMetadata.Builder, val workflowType: WorkflowType, @@ -56,20 +58,23 @@ sealed class SMResult : State.Result { } // TODO SM enhance transition with predicate + /** * Transitions from current to next state vertically. * If there are multiple next states in lateral, these would be executed in sequence in order. */ -val creationTransitions: Map> = mapOf( - SMState.CREATION_START to listOf(SMState.CREATION_CONDITION_MET), - SMState.CREATION_CONDITION_MET to listOf(SMState.CREATING), - SMState.CREATING to listOf(SMState.CREATION_FINISHED), - SMState.CREATION_FINISHED to listOf(SMState.CREATION_START), -) +val creationTransitions: Map> = + mapOf( + SMState.CREATION_START to listOf(SMState.CREATION_CONDITION_MET), + SMState.CREATION_CONDITION_MET to listOf(SMState.CREATING), + SMState.CREATING to listOf(SMState.CREATION_FINISHED), + SMState.CREATION_FINISHED to listOf(SMState.CREATION_START), + ) -val deletionTransitions: Map> = mapOf( - SMState.DELETION_START to listOf(SMState.DELETION_CONDITION_MET), - SMState.DELETION_CONDITION_MET to listOf(SMState.DELETING), - SMState.DELETING to listOf(SMState.DELETION_FINISHED), - SMState.DELETION_FINISHED to listOf(SMState.DELETION_START), -) +val deletionTransitions: Map> = + mapOf( + SMState.DELETION_START to listOf(SMState.DELETION_CONDITION_MET), + SMState.DELETION_CONDITION_MET to listOf(SMState.DELETING), + SMState.DELETING to listOf(SMState.DELETION_FINISHED), + SMState.DELETION_FINISHED to listOf(SMState.DELETION_START), + ) diff --git a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/creation/CreatingState.kt b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/creation/CreatingState.kt index bc591a486..cdaf51506 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/creation/CreatingState.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/creation/CreatingState.kt @@ -11,12 +11,12 @@ import org.opensearch.action.admin.cluster.snapshots.create.CreateSnapshotReques import org.opensearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse import org.opensearch.client.ClusterAdminClient import org.opensearch.indexmanagement.opensearchapi.suspendUntil -import org.opensearch.indexmanagement.snapshotmanagement.engine.SMStateMachine -import org.opensearch.indexmanagement.snapshotmanagement.generateSnapshotName import org.opensearch.indexmanagement.snapshotmanagement.addSMPolicyInSnapshotMetadata +import org.opensearch.indexmanagement.snapshotmanagement.engine.SMStateMachine import org.opensearch.indexmanagement.snapshotmanagement.engine.states.SMResult import org.opensearch.indexmanagement.snapshotmanagement.engine.states.State import org.opensearch.indexmanagement.snapshotmanagement.engine.states.WorkflowType +import org.opensearch.indexmanagement.snapshotmanagement.generateSnapshotName import org.opensearch.indexmanagement.snapshotmanagement.getSnapshots import org.opensearch.indexmanagement.snapshotmanagement.model.SMMetadata import org.opensearch.snapshots.ConcurrentSnapshotExecutionException @@ -25,7 +25,6 @@ import org.opensearch.transport.RemoteTransportException import java.time.Instant object CreatingState : State { - override val continuous: Boolean = false @Suppress("ReturnCount") @@ -35,18 +34,20 @@ object CreatingState : State { val metadata = context.metadata val log = context.log - var metadataBuilder = SMMetadata.Builder(metadata) - .workflow(WorkflowType.CREATION) + var metadataBuilder = + SMMetadata.Builder(metadata) + .workflow(WorkflowType.CREATION) var snapshotName: String? = metadata.creation.started?.first() // Check if there's already a snapshot created by SM in current execution period. // So that this State can be executed idempotent. if (snapshotName == null) { - val getSnapshotsResult = client.getSnapshots( - job, job.policyName + "*", metadataBuilder, - log, null, SNAPSHOT_ERROR_MESSAGE, - ) + val getSnapshotsResult = + client.getSnapshots( + job, job.policyName + "*", metadataBuilder, + log, null, SNAPSHOT_ERROR_MESSAGE, + ) metadataBuilder = getSnapshotsResult.metadataBuilder if (getSnapshotsResult.failed) { return SMResult.Fail(metadataBuilder, WorkflowType.CREATION) @@ -58,7 +59,7 @@ object CreatingState : State { if (snapshotName != null) { log.info("Already created snapshot [$snapshotName] during this execution period starting at $latestExecutionStartTime.") metadataBuilder.setLatestExecution( - status = SMMetadata.LatestExecution.Status.IN_PROGRESS + status = SMMetadata.LatestExecution.Status.IN_PROGRESS, ).setCreationStarted(snapshotName) return SMResult.Next(metadataBuilder) } @@ -66,9 +67,10 @@ object CreatingState : State { snapshotName = generateSnapshotName(job) try { - val req = CreateSnapshotRequest(job.snapshotConfig["repository"] as String, snapshotName) - .source(addSMPolicyInSnapshotMetadata(job.snapshotConfig, job.policyName)) - .waitForCompletion(false) + val req = + CreateSnapshotRequest(job.snapshotConfig["repository"] as String, snapshotName) + .source(addSMPolicyInSnapshotMetadata(job.snapshotConfig, job.policyName)) + .waitForCompletion(false) client.admin().cluster().suspendUntil { createSnapshot(req, it) } metadataBuilder.setLatestExecution( @@ -104,10 +106,13 @@ object CreatingState : State { } const val CONCURRENT_SNAPSHOT_MESSAGE = "Concurrent snapshot exception happened, retrying..." + private fun getSnapshotCreationStartedMessage(snapshotName: String) = "Snapshot $snapshotName creation has been started and waiting for completion." + private const val SNAPSHOT_ERROR_MESSAGE = "Caught exception while getting snapshots to decide if snapshot has been created in previous execution period." + private fun getCreateSnapshotErrorMessage(snapshotName: String) = "Caught exception while creating snapshot $snapshotName." diff --git a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/creation/CreationConditionMetState.kt b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/creation/CreationConditionMetState.kt index 60f5c0a5f..34c5414d4 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/creation/CreationConditionMetState.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/creation/CreationConditionMetState.kt @@ -13,7 +13,6 @@ import org.opensearch.indexmanagement.snapshotmanagement.model.SMMetadata import org.opensearch.indexmanagement.snapshotmanagement.tryUpdatingNextExecutionTime object CreationConditionMetState : State { - override val continuous = true override suspend fun execute(context: SMStateMachine): SMResult { @@ -21,13 +20,15 @@ object CreationConditionMetState : State { val metadata = context.metadata val log = context.log - var metadataBuilder = SMMetadata.Builder(metadata) - .workflow(WorkflowType.CREATION) + var metadataBuilder = + SMMetadata.Builder(metadata) + .workflow(WorkflowType.CREATION) val nextCreationTime = metadata.creation.trigger.time - val updateNextTimeResult = tryUpdatingNextExecutionTime( - metadataBuilder, nextCreationTime, job.creation.schedule, WorkflowType.CREATION, log - ) + val updateNextTimeResult = + tryUpdatingNextExecutionTime( + metadataBuilder, nextCreationTime, job.creation.schedule, WorkflowType.CREATION, log, + ) if (!updateNextTimeResult.updated) { return SMResult.Stay(metadataBuilder) } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/creation/CreationFinishedState.kt b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/creation/CreationFinishedState.kt index 0ad464908..d9c41f197 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/creation/CreationFinishedState.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/creation/CreationFinishedState.kt @@ -19,7 +19,6 @@ import org.opensearch.snapshots.SnapshotState import java.time.Instant.now object CreationFinishedState : State { - override val continuous = true @Suppress("ReturnCount", "LongMethod", "NestedBlockDepth") @@ -29,8 +28,9 @@ object CreationFinishedState : State { val metadata = context.metadata val log = context.log - var metadataBuilder = SMMetadata.Builder(metadata) - .workflow(WorkflowType.CREATION) + var metadataBuilder = + SMMetadata.Builder(metadata) + .workflow(WorkflowType.CREATION) metadata.creation.started?.first()?.let { snapshotName -> if (metadata.creation.latestExecution == null) { @@ -40,11 +40,12 @@ object CreationFinishedState : State { return@let } - val getSnapshotsResult = client.getSnapshots( - job, snapshotName, metadataBuilder, log, - getSnapshotMissingMessageInCreationWorkflow(snapshotName), - getSnapshotExceptionInCreationWorkflow(snapshotName), - ) + val getSnapshotsResult = + client.getSnapshots( + job, snapshotName, metadataBuilder, log, + getSnapshotMissingMessageInCreationWorkflow(snapshotName), + getSnapshotExceptionInCreationWorkflow(snapshotName), + ) metadataBuilder = getSnapshotsResult.metadataBuilder if (getSnapshotsResult.failed) { return SMResult.Fail(metadataBuilder, WorkflowType.CREATION) @@ -77,7 +78,7 @@ object CreationFinishedState : State { job.creation.timeLimit?.let { timeLimit -> if (timeLimit.isExceed(metadata.creation.latestExecution.startTime)) { return timeLimitExceeded( - timeLimit, metadataBuilder, WorkflowType.CREATION, log + timeLimit, metadataBuilder, WorkflowType.CREATION, log, ) } } @@ -97,10 +98,11 @@ object CreationFinishedState : State { // if now is after next creation time, update nextCreationTime to next execution schedule // TODO may want to notify user that we skipped the execution because snapshot creation time is longer than execution schedule - val result = tryUpdatingNextExecutionTime( - metadataBuilder, metadata.creation.trigger.time, job.creation.schedule, - WorkflowType.CREATION, log - ) + val result = + tryUpdatingNextExecutionTime( + metadataBuilder, metadata.creation.trigger.time, job.creation.schedule, + WorkflowType.CREATION, log, + ) if (result.updated) { metadataBuilder = result.metadataBuilder } @@ -115,8 +117,10 @@ object CreationFinishedState : State { private fun getSnapshotCreationSucceedMessage(snapshotName: String) = "Snapshot $snapshotName creation has finished successfully." + private fun getSnapshotMissingMessageInCreationWorkflow(snapshotName: String) = "Snapshot $snapshotName not found while checking if it has been created." + private fun getSnapshotExceptionInCreationWorkflow(snapshotName: String) = "Caught exception while getting started creation snapshot [$snapshotName]." } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/creation/CreationStartState.kt b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/creation/CreationStartState.kt index 8f1f1b36b..7a729cc30 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/creation/CreationStartState.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/creation/CreationStartState.kt @@ -13,13 +13,13 @@ import org.opensearch.indexmanagement.snapshotmanagement.engine.states.WorkflowT import org.opensearch.indexmanagement.snapshotmanagement.model.SMMetadata object CreationStartState : State { - override val continuous: Boolean = true override suspend fun execute(context: SMStateMachine): SMResult { - val metadataToSave = SMMetadata.Builder(context.metadata) - .workflow(WorkflowType.CREATION) - .setCurrentState(SMState.CREATION_START) + val metadataToSave = + SMMetadata.Builder(context.metadata) + .workflow(WorkflowType.CREATION) + .setCurrentState(SMState.CREATION_START) return SMResult.Next(metadataToSave) } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/deletion/DeletingState.kt b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/deletion/DeletingState.kt index 69ed735b6..c3bd9e761 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/deletion/DeletingState.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/deletion/DeletingState.kt @@ -27,7 +27,6 @@ import java.time.Instant import java.time.Instant.now object DeletingState : State { - override val continuous: Boolean = false @Suppress("ReturnCount", "SpreadOperator") @@ -37,38 +36,43 @@ object DeletingState : State { val metadata = context.metadata val log = context.log - var metadataBuilder = SMMetadata.Builder(metadata) - .workflow(WorkflowType.DELETION) + var metadataBuilder = + SMMetadata.Builder(metadata) + .workflow(WorkflowType.DELETION) if (job.deletion == null) { log.warn("Policy deletion config becomes null before trying to delete old snapshots. Reset.") return SMResult.Fail( - metadataBuilder.resetDeletion(), WorkflowType.DELETION, forceReset = true + metadataBuilder.resetDeletion(), WorkflowType.DELETION, forceReset = true, ) } val snapshotsToDelete: List - val getSnapshotsRes = client.getSnapshots( - job, job.policyName + "*", metadataBuilder, log, - getSnapshotsMissingMessage(), - getSnapshotsErrorMessage(), - ) + val getSnapshotsRes = + client.getSnapshots( + job, job.policyName + "*", metadataBuilder, log, + getSnapshotsMissingMessage(), + getSnapshotsErrorMessage(), + ) metadataBuilder = getSnapshotsRes.metadataBuilder - if (getSnapshotsRes.failed) + if (getSnapshotsRes.failed) { return SMResult.Fail(metadataBuilder, WorkflowType.DELETION) + } val getSnapshots = getSnapshotsRes.snapshots - snapshotsToDelete = filterByDeleteCondition( - getSnapshots.filter { it.state() != SnapshotState.IN_PROGRESS }, - job.deletion.condition, log - ) + snapshotsToDelete = + filterByDeleteCondition( + getSnapshots.filter { it.state() != SnapshotState.IN_PROGRESS }, + job.deletion.condition, log, + ) if (snapshotsToDelete.isNotEmpty()) { try { - val req = DeleteSnapshotRequest( - job.snapshotConfig["repository"] as String, - *snapshotsToDelete.toTypedArray() - ) + val req = + DeleteSnapshotRequest( + job.snapshotConfig["repository"] as String, + *snapshotsToDelete.toTypedArray(), + ) client.admin().cluster().suspendUntil { deleteSnapshot(req, it) } metadataBuilder.setLatestExecution( @@ -105,8 +109,11 @@ object DeletingState : State { private fun getSnapshotDeletionStartedMessage(snapshotNames: List) = "Snapshots $snapshotNames deletion has been started and waiting for completion." + private fun getSnapshotsMissingMessage() = "No snapshots found under policy while getting snapshots to decide which snapshots to delete." + private fun getSnapshotsErrorMessage() = "Caught exception while getting snapshots to decide which snapshots to delete." + private fun getDeleteSnapshotErrorMessage(snapshotNames: List) = "Caught exception while deleting snapshot $snapshotNames." /** diff --git a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/deletion/DeletionConditionMetState.kt b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/deletion/DeletionConditionMetState.kt index b0470291c..58f2cf27e 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/deletion/DeletionConditionMetState.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/deletion/DeletionConditionMetState.kt @@ -15,7 +15,6 @@ import java.time.Instant.now // check the status of creating, deleting snapshot object DeletionConditionMetState : State { - override val continuous = true @Suppress("ReturnCount") @@ -24,28 +23,31 @@ object DeletionConditionMetState : State { val metadata = context.metadata val log = context.log - var metadataBuilder = SMMetadata.Builder(metadata) - .workflow(WorkflowType.DELETION) + var metadataBuilder = + SMMetadata.Builder(metadata) + .workflow(WorkflowType.DELETION) if (job.deletion == null) { log.warn("Policy deletion config becomes null before checking if delete schedule met. Reset.") return SMResult.Fail( - metadataBuilder.resetDeletion(), WorkflowType.DELETION, forceReset = true + metadataBuilder.resetDeletion(), WorkflowType.DELETION, forceReset = true, ) } // if job.deletion != null, then metadata.deletion.trigger.time should already be // initialized or handled in handlePolicyChange before executing this state. - val nextDeletionTime = if (metadata.deletion == null) { - val nextTime = job.deletion.schedule.getNextExecutionTime(now()) - metadataBuilder.setNextDeletionTime(nextTime) - nextTime - } else { - metadata.deletion.trigger.time - } - val updateNextTimeResult = tryUpdatingNextExecutionTime( - metadataBuilder, nextDeletionTime, job.deletion.schedule, WorkflowType.DELETION, log - ) + val nextDeletionTime = + if (metadata.deletion == null) { + val nextTime = job.deletion.schedule.getNextExecutionTime(now()) + metadataBuilder.setNextDeletionTime(nextTime) + nextTime + } else { + metadata.deletion.trigger.time + } + val updateNextTimeResult = + tryUpdatingNextExecutionTime( + metadataBuilder, nextDeletionTime, job.deletion.schedule, WorkflowType.DELETION, log, + ) if (!updateNextTimeResult.updated) { return SMResult.Stay(metadataBuilder) } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/deletion/DeletionFinishedState.kt b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/deletion/DeletionFinishedState.kt index 8de31780c..d36f318e9 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/deletion/DeletionFinishedState.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/deletion/DeletionFinishedState.kt @@ -17,7 +17,6 @@ import org.opensearch.indexmanagement.snapshotmanagement.tryUpdatingNextExecutio import java.time.Instant.now object DeletionFinishedState : State { - override val continuous = true @Suppress("ReturnCount", "NestedBlockDepth") @@ -27,8 +26,9 @@ object DeletionFinishedState : State { val metadata = context.metadata val log = context.log - var metadataBuilder = SMMetadata.Builder(metadata) - .workflow(WorkflowType.DELETION) + var metadataBuilder = + SMMetadata.Builder(metadata) + .workflow(WorkflowType.DELETION) metadata.deletion?.started?.let { snapshotsStartedDeletion -> if (metadata.deletion.latestExecution == null) { @@ -38,14 +38,16 @@ object DeletionFinishedState : State { return@let } - val getSnapshotsRes = client.getSnapshots( - job, "${job.policyName}*", metadataBuilder, log, - getSnapshotMissingMessageInDeletionWorkflow(), - getSnapshotExceptionInDeletionWorkflow(snapshotsStartedDeletion), - ) + val getSnapshotsRes = + client.getSnapshots( + job, "${job.policyName}*", metadataBuilder, log, + getSnapshotMissingMessageInDeletionWorkflow(), + getSnapshotExceptionInDeletionWorkflow(snapshotsStartedDeletion), + ) metadataBuilder = getSnapshotsRes.metadataBuilder - if (getSnapshotsRes.failed) + if (getSnapshotsRes.failed) { return SMResult.Fail(metadataBuilder, WorkflowType.DELETION) + } val getSnapshots = getSnapshotsRes.snapshots val existingSnapshotsNameSet = getSnapshots.map { it.snapshotId().name }.toSet() @@ -71,9 +73,10 @@ object DeletionFinishedState : State { // if now is after next deletion time, update next execution schedule // TODO may want to notify user that we skipped the execution because snapshot deletion time is longer than execution schedule job.deletion?.let { - val result = tryUpdatingNextExecutionTime( - metadataBuilder, metadata.deletion.trigger.time, job.deletion.schedule, WorkflowType.DELETION, log - ) + val result = + tryUpdatingNextExecutionTime( + metadataBuilder, metadata.deletion.trigger.time, job.deletion.schedule, WorkflowType.DELETION, log, + ) if (result.updated) { metadataBuilder = result.metadataBuilder } @@ -89,6 +92,7 @@ object DeletionFinishedState : State { private fun getSnapshotMissingMessageInDeletionWorkflow() = "No snapshots found under policy while getting snapshots to decide if snapshots has been deleted." + private fun getSnapshotExceptionInDeletionWorkflow(startedDeleteSnapshots: List) = "Caught exception while getting snapshots to decide if snapshots [$startedDeleteSnapshots] has been deleted." } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/deletion/DeletionStartState.kt b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/deletion/DeletionStartState.kt index 24970bb41..cda515c12 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/deletion/DeletionStartState.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/deletion/DeletionStartState.kt @@ -13,13 +13,13 @@ import org.opensearch.indexmanagement.snapshotmanagement.engine.states.WorkflowT import org.opensearch.indexmanagement.snapshotmanagement.model.SMMetadata object DeletionStartState : State { - override val continuous: Boolean = true override suspend fun execute(context: SMStateMachine): SMResult { - val metadataToSave = SMMetadata.Builder(context.metadata) - .workflow(WorkflowType.DELETION) - .setCurrentState(SMState.DELETION_START) + val metadataToSave = + SMMetadata.Builder(context.metadata) + .workflow(WorkflowType.DELETION) + .setCurrentState(SMState.DELETION_START) return SMResult.Next(metadataToSave) } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/model/ExplainSMPolicy.kt b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/model/ExplainSMPolicy.kt index b1f6288c9..d0540c974 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/model/ExplainSMPolicy.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/model/ExplainSMPolicy.kt @@ -16,13 +16,12 @@ import java.io.IOException data class ExplainSMPolicy( val metadata: SMMetadata? = null, - val enabled: Boolean? = null + val enabled: Boolean? = null, ) : ToXContentFragment, Writeable { - @Throws(IOException::class) constructor(sin: StreamInput) : this( metadata = if (sin.readBoolean()) SMMetadata(sin) else null, - enabled = sin.readOptionalBoolean() + enabled = sin.readOptionalBoolean(), ) @Throws(IOException::class) diff --git a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/model/NotificationConfig.kt b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/model/NotificationConfig.kt index 88b38df8d..688f29bf6 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/model/NotificationConfig.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/model/NotificationConfig.kt @@ -7,6 +7,9 @@ package org.opensearch.indexmanagement.snapshotmanagement.model import org.apache.logging.log4j.Logger import org.opensearch.client.Client +import org.opensearch.commons.authuser.User +import org.opensearch.commons.notifications.model.EventSource +import org.opensearch.commons.notifications.model.SeverityType import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.core.common.io.stream.StreamOutput import org.opensearch.core.common.io.stream.Writeable @@ -16,9 +19,6 @@ import org.opensearch.core.xcontent.XContentBuilder import org.opensearch.core.xcontent.XContentParser import org.opensearch.core.xcontent.XContentParser.Token import org.opensearch.core.xcontent.XContentParserUtils.ensureExpectedToken -import org.opensearch.commons.authuser.User -import org.opensearch.commons.notifications.model.EventSource -import org.opensearch.commons.notifications.model.SeverityType import org.opensearch.indexmanagement.common.model.notification.Channel import java.io.IOException @@ -29,7 +29,6 @@ data class NotificationConfig( val channel: Channel, val conditions: Conditions, ) : ToXContentObject, Writeable { - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { return builder.startObject() .field(CHANNEL_FIELD, channel) @@ -40,7 +39,7 @@ data class NotificationConfig( @Throws(IOException::class) constructor(sin: StreamInput) : this( Channel(sin), - Conditions(sin) + Conditions(sin), ) @Throws(IOException::class) @@ -78,7 +77,7 @@ data class NotificationConfig( policyName: String, message: String, user: User?, - log: Logger + log: Logger, ) { if (this.conditions.timeLimitExceeded) { try { @@ -130,7 +129,7 @@ data class NotificationConfig( return NotificationConfig( channel = requireNotNull(channel) { "Snapshot Management notification channel must not be null" }, - conditions = conditions ?: Conditions() + conditions = conditions ?: Conditions(), ) } } @@ -139,9 +138,8 @@ data class NotificationConfig( val creation: Boolean = true, val deletion: Boolean = false, val failure: Boolean = false, - val timeLimitExceeded: Boolean = false + val timeLimitExceeded: Boolean = false, ) : Writeable, ToXContent { - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { return builder.startObject() .field(CREATION_FIELD, creation) diff --git a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/model/SMMetadata.kt b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/model/SMMetadata.kt index e42486a6f..4346b8b91 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/model/SMMetadata.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/model/SMMetadata.kt @@ -44,7 +44,6 @@ data class SMMetadata( val seqNo: Long = SequenceNumbers.UNASSIGNED_SEQ_NO, val primaryTerm: Long = SequenceNumbers.UNASSIGNED_PRIMARY_TERM, ) : Writeable, ToXContentObject { - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { builder.startObject() if (params.paramAsBoolean(WITH_TYPE, true)) builder.startObject(SM_METADATA_TYPE) @@ -68,7 +67,7 @@ data class SMMetadata( xcp: XContentParser, id: String = NO_ID, seqNo: Long = SequenceNumbers.UNASSIGNED_SEQ_NO, - primaryTerm: Long = SequenceNumbers.UNASSIGNED_PRIMARY_TERM + primaryTerm: Long = SequenceNumbers.UNASSIGNED_PRIMARY_TERM, ): SMMetadata { var policySeqNo: Long? = null var policyPrimaryTerm: Long? = null @@ -96,7 +95,7 @@ data class SMMetadata( deletion = deletion, id = id, seqNo = seqNo, - primaryTerm = primaryTerm + primaryTerm = primaryTerm, ) } @@ -138,7 +137,6 @@ data class SMMetadata( val latestExecution: LatestExecution? = null, val retry: Retry? = null, ) : Writeable, ToXContentObject { - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { return builder.startObject() .field(CURRENT_STATE_FIELD, currentState.toString()) @@ -209,7 +207,6 @@ data class SMMetadata( val endTime: Instant? = null, val info: Info? = null, ) : Writeable, ToXContentObject { - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { return builder.startObject() .field(STATUS_FIELD, status.toString()) @@ -288,7 +285,6 @@ data class SMMetadata( val message: String? = null, val cause: String? = null, ) : Writeable, ToXContentObject { - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { return builder.startObject() .optionalField(MESSAGE_FIELD, message) @@ -342,7 +338,6 @@ data class SMMetadata( data class Trigger( val time: Instant, ) : Writeable, ToXContentObject { - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { return builder.startObject() .optionalTimeField(TIME_FIELD, time) @@ -372,7 +367,7 @@ data class SMMetadata( } constructor(sin: StreamInput) : this( - time = sin.readInstant() + time = sin.readInstant(), ) override fun writeTo(out: StreamOutput) { @@ -383,7 +378,6 @@ data class SMMetadata( data class Retry( val count: Int, ) : Writeable, ToXContentObject { - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { return builder.startObject() .field(COUNT_FIELD, count) @@ -408,13 +402,13 @@ data class SMMetadata( } return Retry( - count = requireNotNull(count) { "count field in Retry must not be null." } + count = requireNotNull(count) { "count field in Retry must not be null." }, ) } } constructor(sin: StreamInput) : this( - count = sin.readInt() + count = sin.readInt(), ) override fun writeTo(out: StreamOutput) { @@ -430,10 +424,10 @@ data class SMMetadata( class Builder( private var metadata: SMMetadata, ) { - fun build() = metadata private lateinit var workflowType: WorkflowType + fun workflow(workflowType: WorkflowType): Builder { this.workflowType = workflowType return this @@ -442,18 +436,22 @@ data class SMMetadata( fun setCurrentState(state: SMState): Builder { when (workflowType) { WorkflowType.CREATION -> { - metadata = metadata.copy( - creation = metadata.creation.copy( - currentState = state + metadata = + metadata.copy( + creation = + metadata.creation.copy( + currentState = state, + ), ) - ) } WorkflowType.DELETION -> { - metadata = metadata.copy( - deletion = metadata.deletion?.copy( - currentState = state + metadata = + metadata.copy( + deletion = + metadata.deletion?.copy( + currentState = state, + ), ) - ) } } return this @@ -481,27 +479,31 @@ data class SMMetadata( } } - metadata = metadata.copy( - creation = metadata.creation.copy( - currentState = creationCurrentState, - started = startedCreation, - retry = creationRetry, - ), - deletion = deletionCurrentState?.let { - metadata.deletion?.copy( - currentState = it, - started = startedDeletion, - retry = deletionRetry, - ) - }, - ) + metadata = + metadata.copy( + creation = + metadata.creation.copy( + currentState = creationCurrentState, + started = startedCreation, + retry = creationRetry, + ), + deletion = + deletionCurrentState?.let { + metadata.deletion?.copy( + currentState = it, + started = startedDeletion, + retry = deletionRetry, + ) + }, + ) return this } fun resetDeletion(): Builder { - metadata = metadata.copy( - deletion = null - ) + metadata = + metadata.copy( + deletion = null, + ) return this } @@ -514,49 +516,55 @@ data class SMMetadata( message: String? = null, updateCause: Boolean = true, cause: Exception? = null, - endTime: Instant? = null + endTime: Instant? = null, ): Builder { val messageWithTime = if (message != null) preFixTimeStamp(message) else null val causeWithTime = if (cause != null) preFixTimeStamp(SnapshotManagementException.getUserErrorMessage(cause).message) else null + fun getUpdatedWorkflowMetadata(workflowMetadata: WorkflowMetadata): WorkflowMetadata { // if started is null, we need to override the previous latestExecution // w/ a newly initialized one if (workflowMetadata.started == null) { return workflowMetadata.copy( - latestExecution = LatestExecution.init( + latestExecution = + LatestExecution.init( status = status, - info = Info( + info = + Info( message = messageWithTime, cause = causeWithTime, - ) - ) + ), + ), ) } else { // if started is not null, latestExecution should never be null return workflowMetadata.copy( - latestExecution = workflowMetadata.latestExecution?.copy( + latestExecution = + workflowMetadata.latestExecution?.copy( status = status, - info = Info( + info = + Info( message = if (updateMessage) messageWithTime else metadata.creation.latestExecution?.info?.message, cause = if (updateCause) causeWithTime else metadata.creation.latestExecution?.info?.cause, ), endTime = endTime, - ) + ), ) } } - metadata = when (workflowType) { - WorkflowType.CREATION -> { - metadata.copy( - creation = getUpdatedWorkflowMetadata(metadata.creation) - ) - } - WorkflowType.DELETION -> { - metadata.copy( - deletion = metadata.deletion?.let { getUpdatedWorkflowMetadata(it) } - ) + metadata = + when (workflowType) { + WorkflowType.CREATION -> { + metadata.copy( + creation = getUpdatedWorkflowMetadata(metadata.creation), + ) + } + WorkflowType.DELETION -> { + metadata.copy( + deletion = metadata.deletion?.let { getUpdatedWorkflowMetadata(it) }, + ) + } } - } return this } @@ -589,18 +597,22 @@ data class SMMetadata( fun setRetry(count: Int): Builder { when (workflowType) { WorkflowType.CREATION -> { - metadata = metadata.copy( - creation = metadata.creation.copy( - retry = Retry(count = count) + metadata = + metadata.copy( + creation = + metadata.creation.copy( + retry = Retry(count = count), + ), ) - ) } WorkflowType.DELETION -> { - metadata = metadata.copy( - deletion = metadata.deletion?.copy( - retry = Retry(count = count) + metadata = + metadata.copy( + deletion = + metadata.deletion?.copy( + retry = Retry(count = count), + ), ) - ) } } return this @@ -611,20 +623,24 @@ data class SMMetadata( when (workflowType) { WorkflowType.CREATION -> { if (metadata.creation.retry != null) { - metadata = metadata.copy( - creation = metadata.creation.copy( - retry = null + metadata = + metadata.copy( + creation = + metadata.creation.copy( + retry = null, + ), ) - ) } } WorkflowType.DELETION -> { if (metadata.deletion?.retry != null) { - metadata = metadata.copy( - deletion = metadata.deletion?.copy( - retry = null + metadata = + metadata.copy( + deletion = + metadata.deletion?.copy( + retry = null, + ), ) - ) } } } @@ -632,62 +648,75 @@ data class SMMetadata( } fun setSeqNoPrimaryTerm(seqNo: Long, primaryTerm: Long): Builder { - metadata = metadata.copy( - policySeqNo = seqNo, - policyPrimaryTerm = primaryTerm, - ) + metadata = + metadata.copy( + policySeqNo = seqNo, + policyPrimaryTerm = primaryTerm, + ) return this } fun setNextCreationTime(time: Instant): Builder { - metadata = metadata.copy( - creation = metadata.creation.copy( - trigger = metadata.creation.trigger.copy( - time = time - ) + metadata = + metadata.copy( + creation = + metadata.creation.copy( + trigger = + metadata.creation.trigger.copy( + time = time, + ), + ), ) - ) return this } fun setCreationStarted(snapshot: String?): Builder { - metadata = metadata.copy( - creation = metadata.creation.copy( - started = if (snapshot == null) null else listOf(snapshot), + metadata = + metadata.copy( + creation = + metadata.creation.copy( + started = if (snapshot == null) null else listOf(snapshot), + ), ) - ) return this } fun setNextDeletionTime(time: Instant): Builder { val deletion = metadata.deletion if (deletion != null) { - metadata = metadata.copy( - deletion = deletion.copy( - trigger = deletion.trigger.copy( - time = time - ) + metadata = + metadata.copy( + deletion = + deletion.copy( + trigger = + deletion.trigger.copy( + time = time, + ), + ), ) - ) } else { - metadata = metadata.copy( - deletion = WorkflowMetadata( - SMState.DELETION_START, - Trigger( - time = time + metadata = + metadata.copy( + deletion = + WorkflowMetadata( + SMState.DELETION_START, + Trigger( + time = time, + ), ), ) - ) } return this } fun setDeletionStarted(snapshots: List?): Builder { - metadata = metadata.copy( - deletion = metadata.deletion?.copy( - started = snapshots, + metadata = + metadata.copy( + deletion = + metadata.deletion?.copy( + started = snapshots, + ), ) - ) return this } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/model/SMPolicy.kt b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/model/SMPolicy.kt index 358b84c80..ccc92239e 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/model/SMPolicy.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/model/SMPolicy.kt @@ -5,16 +5,16 @@ package org.opensearch.indexmanagement.snapshotmanagement.model +import org.opensearch.common.unit.TimeValue +import org.opensearch.commons.authuser.User import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.core.common.io.stream.StreamOutput import org.opensearch.core.common.io.stream.Writeable -import org.opensearch.common.unit.TimeValue import org.opensearch.core.xcontent.ToXContent import org.opensearch.core.xcontent.XContentBuilder import org.opensearch.core.xcontent.XContentParser import org.opensearch.core.xcontent.XContentParser.Token import org.opensearch.core.xcontent.XContentParserUtils.ensureExpectedToken -import org.opensearch.commons.authuser.User import org.opensearch.index.seqno.SequenceNumbers import org.opensearch.indexmanagement.indexstatemanagement.util.WITH_TYPE import org.opensearch.indexmanagement.indexstatemanagement.util.WITH_USER @@ -23,10 +23,10 @@ import org.opensearch.indexmanagement.opensearchapi.nullValueHandler import org.opensearch.indexmanagement.opensearchapi.optionalField import org.opensearch.indexmanagement.opensearchapi.optionalTimeField import org.opensearch.indexmanagement.opensearchapi.optionalUserField -import org.opensearch.indexmanagement.snapshotmanagement.smPolicyNameToMetadataDocId import org.opensearch.indexmanagement.snapshotmanagement.smDocIdToPolicyName -import org.opensearch.indexmanagement.util.IndexUtils +import org.opensearch.indexmanagement.snapshotmanagement.smPolicyNameToMetadataDocId import org.opensearch.indexmanagement.snapshotmanagement.validateDateFormat +import org.opensearch.indexmanagement.util.IndexUtils import org.opensearch.jobscheduler.spi.ScheduledJobParameter import org.opensearch.jobscheduler.spi.schedule.CronSchedule import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule @@ -53,7 +53,6 @@ data class SMPolicy( val notificationConfig: NotificationConfig? = null, val user: User? = null, ) : ScheduledJobParameter, Writeable { - init { require(snapshotConfig["repository"] != null && snapshotConfig["repository"] != "") { "Must provide the repository in snapshot config." @@ -183,9 +182,10 @@ data class SMPolicy( require(creation != null) { "Must provide the creation configuration." } // If user doesn't provide delete schedule, use the creation schedule if (deletion != null && !deletion.scheduleProvided) { - deletion = deletion.copy( - schedule = creation.schedule - ) + deletion = + deletion.copy( + schedule = creation.schedule, + ) } requireNotNull(snapshotConfig) { "$SNAPSHOT_CONFIG_FIELD field must not be null" } @@ -210,7 +210,7 @@ data class SMPolicy( seqNo = seqNo, primaryTerm = primaryTerm, notificationConfig = notificationConfig, - user = user + user = user, ) } } @@ -229,7 +229,7 @@ data class SMPolicy( seqNo = sin.readLong(), primaryTerm = sin.readLong(), notificationConfig = sin.readOptionalWriteable { NotificationConfig(it) }, - user = sin.readOptionalWriteable(::User) + user = sin.readOptionalWriteable(::User), ) override fun writeTo(out: StreamOutput) { @@ -253,7 +253,6 @@ data class SMPolicy( val schedule: Schedule, val timeLimit: TimeValue? = null, ) : Writeable, ToXContent { - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { return builder.startObject() .field(SCHEDULE_FIELD, schedule) @@ -281,7 +280,7 @@ data class SMPolicy( return Creation( schedule = requireNotNull(schedule) { "schedule field must not be null" }, - timeLimit = timeLimit + timeLimit = timeLimit, ) } } @@ -303,7 +302,6 @@ data class SMPolicy( val condition: DeleteCondition, val timeLimit: TimeValue? = null, ) : Writeable, ToXContent { - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { return builder.startObject() .field(SCHEDULE_FIELD, schedule) @@ -367,7 +365,6 @@ data class SMPolicy( val minCount: Int, val maxCount: Int? = null, ) : Writeable, ToXContent { - init { require(!(maxAge == null && maxCount == null)) { "Please provide $MAX_AGE_FIELD or $MAX_COUNT_FIELD." } require(minCount > 0) { "$MIN_COUNT_FIELD should be bigger than 0." } @@ -416,7 +413,7 @@ data class SMPolicy( constructor(sin: StreamInput) : this( maxCount = sin.readOptionalInt(), maxAge = sin.readOptionalTimeValue(), - minCount = sin.readInt() + minCount = sin.readInt(), ) override fun writeTo(out: StreamOutput) { diff --git a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/settings/SnapshotManagementSettings.kt b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/settings/SnapshotManagementSettings.kt index 0e86a687e..7b7179b89 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/settings/SnapshotManagementSettings.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/settings/SnapshotManagementSettings.kt @@ -8,13 +8,13 @@ import org.opensearch.common.settings.Setting @Suppress("UtilityClassWithPublicConstructor") class SnapshotManagementSettings { - companion object { - val FILTER_BY_BACKEND_ROLES: Setting = Setting.boolSetting( - "plugins.snapshot_management.filter_by_backend_roles", - false, - Setting.Property.NodeScope, - Setting.Property.Dynamic - ) + val FILTER_BY_BACKEND_ROLES: Setting = + Setting.boolSetting( + "plugins.snapshot_management.filter_by_backend_roles", + false, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/util/RestHandlerUtils.kt b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/util/RestHandlerUtils.kt index 0efb080de..135507f53 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/util/RestHandlerUtils.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/util/RestHandlerUtils.kt @@ -4,6 +4,7 @@ */ @file:Suppress("TopLevelPropertyNaming", "MatchingDeclarationName") + package org.opensearch.indexmanagement.snapshotmanagement.util import org.opensearch.indexmanagement.snapshotmanagement.model.SMPolicy.Companion.NAME_FIELD diff --git a/src/main/kotlin/org/opensearch/indexmanagement/transform/TargetIndexMappingService.kt b/src/main/kotlin/org/opensearch/indexmanagement/transform/TargetIndexMappingService.kt index 0d29502a5..0dfdaded1 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/transform/TargetIndexMappingService.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/transform/TargetIndexMappingService.kt @@ -9,11 +9,11 @@ import org.apache.logging.log4j.LogManager import org.opensearch.action.admin.indices.mapping.get.GetMappingsRequest import org.opensearch.action.admin.indices.mapping.get.GetMappingsResponse import org.opensearch.client.Client -import org.opensearch.core.common.bytes.BytesReference import org.opensearch.common.xcontent.LoggingDeprecationHandler import org.opensearch.common.xcontent.XContentFactory import org.opensearch.common.xcontent.XContentHelper import org.opensearch.common.xcontent.XContentType +import org.opensearch.core.common.bytes.BytesReference import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.core.xcontent.XContentBuilder import org.opensearch.core.xcontent.XContentParser @@ -63,9 +63,10 @@ object TargetIndexMappingService { suspend fun getTargetMappingsForDates(transform: Transform): Map { val sourceIndex = transform.sourceIndex try { - val result: GetMappingsResponse = client.admin().indices().suspendUntil { - getMappings(GetMappingsRequest().indices(sourceIndex), it) - } ?: error("GetMappingResponse for [$transform.sourceIndex] was null") + val result: GetMappingsResponse = + client.admin().indices().suspendUntil { + getMappings(GetMappingsRequest().indices(sourceIndex), it) + } ?: error("GetMappingResponse for [$transform.sourceIndex] was null") val sourceIndexMapping = result.mappings[sourceIndex]?.sourceAsMap @@ -115,12 +116,13 @@ object TargetIndexMappingService { val byteBuffer = ByteBuffer.wrap(dynamicMappings.toByteArray(StandardCharsets.UTF_8)) val bytesReference = BytesReference.fromByteBuffer(byteBuffer) - val xcp = XContentHelper.createParser( - NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, - bytesReference, - XContentType.JSON - ) + val xcp = + XContentHelper.createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + bytesReference, + XContentType.JSON, + ) loop@while (!xcp.isClosed) { val token = xcp.currentToken() val fieldName = xcp.currentName() @@ -223,7 +225,6 @@ object TargetIndexMappingService { ) { val iterator = aggBuilders.iterator() while (iterator.hasNext()) { - val aggBuilder = iterator.next() val targetIdxFieldName = aggBuilder.name val fullPath = parentPath?.plus(".")?.plus(targetIdxFieldName) ?: targetIdxFieldName @@ -246,5 +247,6 @@ object TargetIndexMappingService { mapDateAggregation(aggBuilder.subAggregations, sourceIndexMapping, targetIndexMapping, fullPath) } } + private fun isFieldInMappings(fieldName: String, mappings: Map<*, *>) = IndexUtils.getFieldFromMappings(fieldName, mappings) != null } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/transform/TransformIndexer.kt b/src/main/kotlin/org/opensearch/indexmanagement/transform/TransformIndexer.kt index 904c6c6cd..d35c0be6e 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/transform/TransformIndexer.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/transform/TransformIndexer.kt @@ -19,34 +19,34 @@ import org.opensearch.action.index.IndexRequest import org.opensearch.client.Client import org.opensearch.cluster.service.ClusterService import org.opensearch.common.settings.Settings +import org.opensearch.core.rest.RestStatus import org.opensearch.indexmanagement.opensearchapi.retry import org.opensearch.indexmanagement.opensearchapi.suspendUntil import org.opensearch.indexmanagement.transform.exceptions.TransformIndexException import org.opensearch.indexmanagement.transform.settings.TransformSettings import org.opensearch.indexmanagement.transform.util.TransformContext -import org.opensearch.core.rest.RestStatus import org.opensearch.transport.RemoteTransportException @Suppress("ComplexMethod") class TransformIndexer( settings: Settings, private val clusterService: ClusterService, - private val client: Client + private val client: Client, ) { - private val logger = LogManager.getLogger(javaClass) @Volatile - private var backoffPolicy = BackoffPolicy.constantBackoff( - TransformSettings.TRANSFORM_JOB_INDEX_BACKOFF_MILLIS.get(settings), - TransformSettings.TRANSFORM_JOB_INDEX_BACKOFF_COUNT.get(settings) - ) + private var backoffPolicy = + BackoffPolicy.constantBackoff( + TransformSettings.TRANSFORM_JOB_INDEX_BACKOFF_MILLIS.get(settings), + TransformSettings.TRANSFORM_JOB_INDEX_BACKOFF_COUNT.get(settings), + ) init { // To update the retry policy with updated settings clusterService.clusterSettings.addSettingsUpdateConsumer( TransformSettings.TRANSFORM_JOB_INDEX_BACKOFF_MILLIS, - TransformSettings.TRANSFORM_JOB_INDEX_BACKOFF_COUNT + TransformSettings.TRANSFORM_JOB_INDEX_BACKOFF_COUNT, ) { millis, count -> backoffPolicy = BackoffPolicy.constantBackoff(millis, count) } @@ -88,9 +88,10 @@ class TransformIndexer( nonRetryableFailures.add(failedResponse) } } - updatableDocsToIndex = retryableFailures.map { failure -> - updatableDocsToIndex[failure.itemId] as IndexRequest - } + updatableDocsToIndex = + retryableFailures.map { failure -> + updatableDocsToIndex[failure.itemId] as IndexRequest + } if (updatableDocsToIndex.isNotEmpty()) { throw ExceptionsHelper.convertToOpenSearchException(retryableFailures.first().failure.cause) } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/transform/TransformMetadataService.kt b/src/main/kotlin/org/opensearch/indexmanagement/transform/TransformMetadataService.kt index e03f5001a..4618c4fcb 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/transform/TransformMetadataService.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/transform/TransformMetadataService.kt @@ -17,11 +17,11 @@ import org.opensearch.action.index.IndexRequest import org.opensearch.action.index.IndexResponse import org.opensearch.client.Client import org.opensearch.common.xcontent.LoggingDeprecationHandler -import org.opensearch.core.xcontent.NamedXContentRegistry -import org.opensearch.core.xcontent.ToXContent import org.opensearch.common.xcontent.XContentFactory import org.opensearch.common.xcontent.XContentHelper import org.opensearch.common.xcontent.XContentType +import org.opensearch.core.xcontent.NamedXContentRegistry +import org.opensearch.core.xcontent.ToXContent import org.opensearch.indexmanagement.IndexManagementPlugin import org.opensearch.indexmanagement.opensearchapi.parseWithType import org.opensearch.indexmanagement.opensearchapi.suspendUntil @@ -36,7 +36,6 @@ import java.time.Instant @SuppressWarnings("ReturnCount") class TransformMetadataService(private val client: Client, val xContentRegistry: NamedXContentRegistry) { - private val logger = LogManager.getLogger(javaClass) @Suppress("BlockingMethodInNonBlockingContext") @@ -46,12 +45,13 @@ class TransformMetadataService(private val client: Client, val xContentRegistry: val getRequest = GetRequest(IndexManagementPlugin.INDEX_MANAGEMENT_INDEX, transform.metadataId).routing(transform.id) val response: GetResponse = client.suspendUntil { get(getRequest, it) } val metadataSource = response.sourceAsBytesRef - val transformMetadata = metadataSource?.let { - withContext(Dispatchers.IO) { - val xcp = XContentHelper.createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, metadataSource, XContentType.JSON) - xcp.parseWithType(response.id, response.seqNo, response.primaryTerm, TransformMetadata.Companion::parse) + val transformMetadata = + metadataSource?.let { + withContext(Dispatchers.IO) { + val xcp = XContentHelper.createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, metadataSource, XContentType.JSON) + xcp.parseWithType(response.id, response.seqNo, response.primaryTerm, TransformMetadata.Companion::parse) + } } - } // TODO: Should we attempt to create a new document instead if failed to parse, the only reason this can happen is if someone deleted // the metadata doc? transformMetadata ?: throw TransformMetadataException("Failed to parse the existing metadata document") @@ -65,14 +65,15 @@ class TransformMetadataService(private val client: Client, val xContentRegistry: // Including timestamp in the metadata id to prevent clashes if the job was deleted but metadata is not deleted, in that case we want to // create a clean metadata doc val id = hashToFixedSize("TransformMetadata#${transform.id}#${transform.lastUpdateTime}") - val metadata = TransformMetadata( - id = id, - transformId = transform.id, - lastUpdatedAt = Instant.now(), - status = TransformMetadata.Status.INIT, - stats = TransformStats(0, 0, 0, 0, 0), - continuousStats = if (transform.continuous) ContinuousTransformStats(null, null) else null - ) + val metadata = + TransformMetadata( + id = id, + transformId = transform.id, + lastUpdatedAt = Instant.now(), + status = TransformMetadata.Status.INIT, + stats = TransformStats(0, 0, 0, 0, 0), + continuousStats = if (transform.continuous) ContinuousTransformStats(null, null) else null, + ) return writeMetadata(metadata) } @@ -81,10 +82,11 @@ class TransformMetadataService(private val client: Client, val xContentRegistry: val errorMessage = "Failed to ${if (updating) "update" else "create"} metadata doc ${metadata.id} for transform job ${metadata.transformId}" try { val builder = metadata.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS) - val indexRequest = IndexRequest(IndexManagementPlugin.INDEX_MANAGEMENT_INDEX) - .source(builder) - .id(metadata.id) - .routing(metadata.transformId) + val indexRequest = + IndexRequest(IndexManagementPlugin.INDEX_MANAGEMENT_INDEX) + .source(builder) + .id(metadata.id) + .routing(metadata.transformId) if (updating) { indexRequest.setIfSeqNo(metadata.seqNo).setIfPrimaryTerm(metadata.primaryTerm) } else { diff --git a/src/main/kotlin/org/opensearch/indexmanagement/transform/TransformProcessedBucketLog.kt b/src/main/kotlin/org/opensearch/indexmanagement/transform/TransformProcessedBucketLog.kt index adc2489ef..23da9936e 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/transform/TransformProcessedBucketLog.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/transform/TransformProcessedBucketLog.kt @@ -9,7 +9,6 @@ import java.math.BigInteger import java.security.MessageDigest class TransformProcessedBucketLog { - companion object { const val MAX_SIZE = 100_000_000 const val HEX_RADIX = 16 @@ -38,8 +37,11 @@ class TransformProcessedBucketLog { val md5Crypt = MessageDigest.getInstance("MD5") bucket.entries.sortedBy { it.key }.onEach { entry -> md5Crypt.update( - if (entry.value == null) "null".toByteArray() - else entry.value.toString().toByteArray() + if (entry.value == null) { + "null".toByteArray() + } else { + entry.value.toString().toByteArray() + }, ) } return BigInteger(1, md5Crypt.digest()).toString(HEX_RADIX) diff --git a/src/main/kotlin/org/opensearch/indexmanagement/transform/TransformRunner.kt b/src/main/kotlin/org/opensearch/indexmanagement/transform/TransformRunner.kt index 799b49e39..8ac49dce0 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/transform/TransformRunner.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/transform/TransformRunner.kt @@ -16,8 +16,8 @@ import org.opensearch.client.Client import org.opensearch.cluster.metadata.IndexNameExpressionResolver import org.opensearch.cluster.service.ClusterService import org.opensearch.common.settings.Settings -import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.core.index.shard.ShardId +import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.indexmanagement.opensearchapi.IndexManagementSecurityContext import org.opensearch.indexmanagement.opensearchapi.suspendUntil import org.opensearch.indexmanagement.opensearchapi.withClosableContext @@ -42,7 +42,6 @@ import java.time.Instant object TransformRunner : ScheduledJobRunner, CoroutineScope by CoroutineScope(SupervisorJob() + Dispatchers.Default + CoroutineName("TransformRunner")) { - private val logger = LogManager.getLogger(javaClass) private lateinit var client: Client @@ -62,7 +61,7 @@ object TransformRunner : settings: Settings, indexNameExpressionResolver: IndexNameExpressionResolver, jvmService: JvmService, - threadPool: ThreadPool + threadPool: ThreadPool, ): TransformRunner { this.clusterService = clusterService this.client = client @@ -109,9 +108,10 @@ object TransformRunner : val transformProcessedBucketLog = TransformProcessedBucketLog() var bucketsToTransform = BucketsToTransform(HashSet(), metadata) - val transformContext = TransformContext( - TransformLockManager(transform, context) - ) + val transformContext = + TransformContext( + TransformLockManager(transform, context), + ) // Acquires the lock if there is no running job execution for the given transform; Lock is acquired per transform val transformLockManager = transformContext.transformLockManager @@ -143,21 +143,24 @@ object TransformRunner : // Note the timestamp when we got the shard global checkpoints to the user may know what data is included newGlobalCheckpointTime = Instant.now() newGlobalCheckpoints = transformSearchService.getShardsGlobalCheckpoint(transform.sourceIndex) - bucketsToTransform = bucketsToTransform.initializeShardsToSearch( - metadata.shardIDToGlobalCheckpoint, - newGlobalCheckpoints - ) + bucketsToTransform = + bucketsToTransform.initializeShardsToSearch( + metadata.shardIDToGlobalCheckpoint, + newGlobalCheckpoints, + ) } // If there are shards to search do it here if (bucketsToTransform.currentShard != null) { // Computes aggregation on modified documents for current shard to get modified buckets - bucketsToTransform = getBucketsToTransformIteration(transform, bucketsToTransform, transformContext).also { - currentMetadata = it.metadata - } + bucketsToTransform = + getBucketsToTransformIteration(transform, bucketsToTransform, transformContext).also { + currentMetadata = it.metadata + } // Filter out already processed buckets - val modifiedBuckets = bucketsToTransform.modifiedBuckets.filter { - transformProcessedBucketLog.isNotProcessed(it) - }.toMutableSet() + val modifiedBuckets = + bucketsToTransform.modifiedBuckets.filter { + transformProcessedBucketLog.isNotProcessed(it) + }.toMutableSet() // Recompute modified buckets and update them in targetIndex currentMetadata = recomputeModifiedBuckets(transform, currentMetadata, modifiedBuckets, transformContext) // Add processed buckets to 'processed set' so that we don't try to reprocess them again @@ -179,19 +182,21 @@ object TransformRunner : } while (bucketsToTransform.currentShard != null || currentMetadata.afterKey != null) } catch (e: Exception) { logger.error("Failed to execute the transform job [${transform.id}] because of exception [${e.localizedMessage}]", e) - currentMetadata = currentMetadata.copy( - lastUpdatedAt = Instant.now(), - status = TransformMetadata.Status.FAILED, - failureReason = e.localizedMessage - ) + currentMetadata = + currentMetadata.copy( + lastUpdatedAt = Instant.now(), + status = TransformMetadata.Status.FAILED, + failureReason = e.localizedMessage, + ) } finally { transformLockManager.lock?.let { // Update the global checkpoints only after execution finishes successfully if (transform.continuous && currentMetadata.status != TransformMetadata.Status.FAILED) { - currentMetadata = currentMetadata.copy( - shardIDToGlobalCheckpoint = newGlobalCheckpoints, - continuousStats = ContinuousTransformStats(newGlobalCheckpointTime, null) - ) + currentMetadata = + currentMetadata.copy( + shardIDToGlobalCheckpoint = newGlobalCheckpoints, + continuousStats = ContinuousTransformStats(newGlobalCheckpointTime, null), + ) } transformMetadataService.writeMetadata(currentMetadata, true) if (!transform.continuous || currentMetadata.status == TransformMetadata.Status.FAILED) { @@ -206,7 +211,7 @@ object TransformRunner : private suspend fun getBucketsToTransformIteration( transform: Transform, bucketsToTransform: BucketsToTransform, - transformContext: TransformContext + transformContext: TransformContext, ): BucketsToTransform { var currentBucketsToTransform = bucketsToTransform val currentShard = bucketsToTransform.currentShard @@ -214,49 +219,58 @@ object TransformRunner : currentBucketsToTransform.modifiedBuckets.clear() if (currentShard != null) { - val shardLevelModifiedBuckets = withTransformSecurityContext(transform) { - transformSearchService.getShardLevelModifiedBuckets( - transform, - currentBucketsToTransform.metadata.afterKey, - currentShard, - transformContext - ) - } + val shardLevelModifiedBuckets = + withTransformSecurityContext(transform) { + transformSearchService.getShardLevelModifiedBuckets( + transform, + currentBucketsToTransform.metadata.afterKey, + currentShard, + transformContext, + ) + } currentBucketsToTransform.modifiedBuckets.addAll(shardLevelModifiedBuckets.modifiedBuckets) - val mergedSearchTime = currentBucketsToTransform.metadata.stats.searchTimeInMillis + - shardLevelModifiedBuckets.searchTimeInMillis - currentBucketsToTransform = currentBucketsToTransform.copy( - metadata = currentBucketsToTransform.metadata.copy( - stats = currentBucketsToTransform.metadata.stats.copy( - pagesProcessed = currentBucketsToTransform.metadata.stats.pagesProcessed + 1, - searchTimeInMillis = mergedSearchTime + val mergedSearchTime = + currentBucketsToTransform.metadata.stats.searchTimeInMillis + + shardLevelModifiedBuckets.searchTimeInMillis + currentBucketsToTransform = + currentBucketsToTransform.copy( + metadata = + currentBucketsToTransform.metadata.copy( + stats = + currentBucketsToTransform.metadata.stats.copy( + pagesProcessed = currentBucketsToTransform.metadata.stats.pagesProcessed + 1, + searchTimeInMillis = mergedSearchTime, + ), + afterKey = shardLevelModifiedBuckets.afterKey, ), - afterKey = shardLevelModifiedBuckets.afterKey - ), - currentShard = currentShard - ) + currentShard = currentShard, + ) } // If finished with this shard, go to the next if (currentBucketsToTransform.metadata.afterKey == null) { val shardsToSearch = currentBucketsToTransform.shardsToSearch - currentBucketsToTransform = if (shardsToSearch?.hasNext() == true) { - currentBucketsToTransform.copy(currentShard = shardsToSearch.next()) - } else { - currentBucketsToTransform.copy(currentShard = null) - } + currentBucketsToTransform = + if (shardsToSearch?.hasNext() == true) { + currentBucketsToTransform.copy(currentShard = shardsToSearch.next()) + } else { + currentBucketsToTransform.copy(currentShard = null) + } } return currentBucketsToTransform } private suspend fun validateTransform(transform: Transform, transformMetadata: TransformMetadata): TransformMetadata { - val validationResult = withTransformSecurityContext(transform) { - transformValidator.validate(transform) - } + val validationResult = + withTransformSecurityContext(transform) { + transformValidator.validate(transform) + } return if (!validationResult.isValid) { val failureMessage = "Failed validation - ${validationResult.issues}" val failureMetadata = transformMetadata.copy(status = TransformMetadata.Status.FAILED, failureReason = failureMessage) transformMetadataService.writeMetadata(failureMetadata, true) - } else transformMetadata + } else { + transformMetadata + } } /** @@ -267,31 +281,33 @@ object TransformRunner : private suspend fun computeBucketsIteration( transform: Transform, metadata: TransformMetadata, - transformContext: TransformContext + transformContext: TransformContext, ): TransformMetadata { - - val transformSearchResult = withTransformSecurityContext(transform) { - transformSearchService.executeCompositeSearch( - transform, - metadata.afterKey, - null, - transformContext - ) - } - val indexTimeInMillis = withTransformSecurityContext(transform) { - transformIndexer.index(transform.targetIndex, transformSearchResult.docsToIndex, transformContext) - } + val transformSearchResult = + withTransformSecurityContext(transform) { + transformSearchService.executeCompositeSearch( + transform, + metadata.afterKey, + null, + transformContext, + ) + } + val indexTimeInMillis = + withTransformSecurityContext(transform) { + transformIndexer.index(transform.targetIndex, transformSearchResult.docsToIndex, transformContext) + } val afterKey = transformSearchResult.afterKey val stats = transformSearchResult.stats - val updatedStats = stats.copy( - pagesProcessed = stats.pagesProcessed, - indexTimeInMillis = stats.indexTimeInMillis + indexTimeInMillis, - documentsIndexed = transformSearchResult.docsToIndex.size.toLong() - ) + val updatedStats = + stats.copy( + pagesProcessed = stats.pagesProcessed, + indexTimeInMillis = stats.indexTimeInMillis + indexTimeInMillis, + documentsIndexed = transformSearchResult.docsToIndex.size.toLong(), + ) return metadata.mergeStats(updatedStats).copy( afterKey = afterKey, lastUpdatedAt = Instant.now(), - status = if (afterKey == null) TransformMetadata.Status.FINISHED else TransformMetadata.Status.STARTED + status = if (afterKey == null) TransformMetadata.Status.FINISHED else TransformMetadata.Status.STARTED, ) } @@ -299,26 +315,32 @@ object TransformRunner : transform: Transform, metadata: TransformMetadata, modifiedBuckets: MutableSet>, - transformContext: TransformContext + transformContext: TransformContext, ): TransformMetadata { - val updatedMetadata = if (modifiedBuckets.isNotEmpty()) { - val transformSearchResult = withTransformSecurityContext(transform) { - transformSearchService.executeCompositeSearch(transform, null, modifiedBuckets, transformContext) - } - val indexTimeInMillis = withTransformSecurityContext(transform) { - transformIndexer.index(transform.targetIndex, transformSearchResult.docsToIndex, transformContext) + val updatedMetadata = + if (modifiedBuckets.isNotEmpty()) { + val transformSearchResult = + withTransformSecurityContext(transform) { + transformSearchService.executeCompositeSearch(transform, null, modifiedBuckets, transformContext) + } + val indexTimeInMillis = + withTransformSecurityContext(transform) { + transformIndexer.index(transform.targetIndex, transformSearchResult.docsToIndex, transformContext) + } + val stats = transformSearchResult.stats + val updatedStats = + stats.copy( + pagesProcessed = if (transform.continuous) 0 else stats.pagesProcessed, + indexTimeInMillis = stats.indexTimeInMillis + indexTimeInMillis, + documentsIndexed = transformSearchResult.docsToIndex.size.toLong(), + ) + metadata.mergeStats(updatedStats).copy( + lastUpdatedAt = Instant.now(), + status = TransformMetadata.Status.STARTED, + ) + } else { + metadata.copy(lastUpdatedAt = Instant.now(), status = TransformMetadata.Status.STARTED) } - val stats = transformSearchResult.stats - val updatedStats = stats.copy( - pagesProcessed = if (transform.continuous) 0 else stats.pagesProcessed, - indexTimeInMillis = stats.indexTimeInMillis + indexTimeInMillis, - documentsIndexed = transformSearchResult.docsToIndex.size.toLong() - ) - metadata.mergeStats(updatedStats).copy( - lastUpdatedAt = Instant.now(), - status = TransformMetadata.Status.STARTED - ) - } else metadata.copy(lastUpdatedAt = Instant.now(), status = TransformMetadata.Status.STARTED) return updatedMetadata } @@ -327,19 +349,21 @@ object TransformRunner : } private suspend fun updateTransform(transform: Transform): Transform { - val request = IndexTransformRequest( - transform = transform.copy(updatedAt = Instant.now()), - refreshPolicy = WriteRequest.RefreshPolicy.IMMEDIATE - ) + val request = + IndexTransformRequest( + transform = transform.copy(updatedAt = Instant.now()), + refreshPolicy = WriteRequest.RefreshPolicy.IMMEDIATE, + ) return withClosableContext( - IndexManagementSecurityContext(transform.id, settings, threadPool.threadContext, null) + IndexManagementSecurityContext(transform.id, settings, threadPool.threadContext, null), ) { - val response: IndexTransformResponse = client.suspendUntil { - execute(IndexTransformAction.INSTANCE, request, it) - } + val response: IndexTransformResponse = + client.suspendUntil { + execute(IndexTransformAction.INSTANCE, request, it) + } return@withClosableContext transform.copy( seqNo = response.seqNo, - primaryTerm = response.primaryTerm + primaryTerm = response.primaryTerm, ) } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/transform/TransformSearchService.kt b/src/main/kotlin/org/opensearch/indexmanagement/transform/TransformSearchService.kt index 58e050645..b9944e425 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/transform/TransformSearchService.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/transform/TransformSearchService.kt @@ -8,7 +8,6 @@ package org.opensearch.indexmanagement.transform import org.apache.logging.log4j.LogManager import org.opensearch.ExceptionsHelper import org.opensearch.OpenSearchSecurityException -import org.opensearch.core.action.ActionListener import org.opensearch.action.admin.indices.stats.IndicesStatsAction import org.opensearch.action.admin.indices.stats.IndicesStatsRequest import org.opensearch.action.admin.indices.stats.IndicesStatsResponse @@ -23,14 +22,16 @@ import org.opensearch.cluster.service.ClusterService import org.opensearch.common.settings.Settings import org.opensearch.common.unit.TimeValue import org.opensearch.common.xcontent.XContentType +import org.opensearch.core.action.ActionListener import org.opensearch.core.index.Index +import org.opensearch.core.index.shard.ShardId +import org.opensearch.core.rest.RestStatus import org.opensearch.index.query.BoolQueryBuilder import org.opensearch.index.query.ExistsQueryBuilder import org.opensearch.index.query.QueryBuilder import org.opensearch.index.query.QueryBuilders import org.opensearch.index.query.RangeQueryBuilder import org.opensearch.index.seqno.SequenceNumbers -import org.opensearch.core.index.shard.ShardId import org.opensearch.indexmanagement.common.model.dimension.Dimension import org.opensearch.indexmanagement.opensearchapi.retry import org.opensearch.indexmanagement.opensearchapi.suspendUntil @@ -41,14 +42,13 @@ import org.opensearch.indexmanagement.transform.model.Transform import org.opensearch.indexmanagement.transform.model.TransformSearchResult import org.opensearch.indexmanagement.transform.model.TransformStats import org.opensearch.indexmanagement.transform.opensearchapi.retryTransformSearch +import org.opensearch.indexmanagement.transform.settings.TransformSettings.Companion.MINIMUM_CANCEL_AFTER_TIME_INTERVAL_SECONDS import org.opensearch.indexmanagement.transform.settings.TransformSettings.Companion.TRANSFORM_JOB_SEARCH_BACKOFF_COUNT import org.opensearch.indexmanagement.transform.settings.TransformSettings.Companion.TRANSFORM_JOB_SEARCH_BACKOFF_MILLIS import org.opensearch.indexmanagement.transform.util.TransformContext import org.opensearch.indexmanagement.util.IndexUtils.Companion.LUCENE_MAX_CLAUSES import org.opensearch.indexmanagement.util.IndexUtils.Companion.ODFE_MAGIC_NULL import org.opensearch.indexmanagement.util.IndexUtils.Companion.hashToFixedSize -import org.opensearch.core.rest.RestStatus -import org.opensearch.indexmanagement.transform.settings.TransformSettings.Companion.MINIMUM_CANCEL_AFTER_TIME_INTERVAL_SECONDS import org.opensearch.search.aggregations.Aggregation import org.opensearch.search.aggregations.bucket.composite.CompositeAggregation import org.opensearch.search.aggregations.bucket.composite.CompositeAggregationBuilder @@ -72,9 +72,8 @@ import kotlin.math.pow class TransformSearchService( val settings: Settings, val clusterService: ClusterService, - private val client: Client + private val client: Client, ) { - private var logger = LogManager.getLogger(javaClass) @Volatile private var backoffPolicy = @@ -98,14 +97,15 @@ class TransformSearchService( try { var retryAttempt = 1 // Retry on standard retry fail statuses plus NOT_FOUND in case a shard routing entry isn't ready yet - val searchResponse: IndicesStatsResponse = backoffPolicy.retry(logger, listOf(RestStatus.NOT_FOUND)) { - val request = IndicesStatsRequest().indices(index).clear() - if (retryAttempt > 1) { - logger.debug(getShardsRetryMessage(retryAttempt)) + val searchResponse: IndicesStatsResponse = + backoffPolicy.retry(logger, listOf(RestStatus.NOT_FOUND)) { + val request = IndicesStatsRequest().indices(index).clear() + if (retryAttempt > 1) { + logger.debug(getShardsRetryMessage(retryAttempt)) + } + retryAttempt++ + client.suspendUntil { execute(IndicesStatsAction.INSTANCE, request, it) } } - retryAttempt++ - client.suspendUntil { execute(IndicesStatsAction.INSTANCE, request, it) } - } if (searchResponse.status == RestStatus.OK) { return convertIndicesStatsResponse(searchResponse) } @@ -127,32 +127,33 @@ class TransformSearchService( transform: Transform, afterKey: Map?, currentShard: ShardNewDocuments, - transformContext: TransformContext + transformContext: TransformContext, ): BucketSearchResult { try { var retryAttempt = 0 var pageSize = calculateMaxPageSize(transform) val searchStart = Instant.now().epochSecond - val searchResponse = backoffPolicy.retryTransformSearch(logger, transformContext.transformLockManager) { - val pageSizeDecay = 2f.pow(retryAttempt++) - val searchRequestTimeoutInSeconds = transformContext.getMaxRequestTimeoutInSeconds() - client.suspendUntil { listener: ActionListener -> - // If the previous request of the current transform job execution was successful, take the page size of previous request. - // If not, calculate the page size. - pageSize = transformContext.lastSuccessfulPageSize ?: max(1, pageSize.div(pageSizeDecay.toInt())) - if (retryAttempt > 1) { - logger.debug( - "Attempt [${retryAttempt - 1}] to get modified buckets for transform [${transform.id}]. Attempting " + - "again with reduced page size [$pageSize]" - ) + val searchResponse = + backoffPolicy.retryTransformSearch(logger, transformContext.transformLockManager) { + val pageSizeDecay = 2f.pow(retryAttempt++) + val searchRequestTimeoutInSeconds = transformContext.getMaxRequestTimeoutInSeconds() + client.suspendUntil { listener: ActionListener -> + // If the previous request of the current transform job execution was successful, take the page size of previous request. + // If not, calculate the page size. + pageSize = transformContext.lastSuccessfulPageSize ?: max(1, pageSize.div(pageSizeDecay.toInt())) + if (retryAttempt > 1) { + logger.debug( + "Attempt [${retryAttempt - 1}] to get modified buckets for transform [${transform.id}]. Attempting " + + "again with reduced page size [$pageSize]", + ) + } + if (searchRequestTimeoutInSeconds == null) { + return@suspendUntil + } + val request = getShardLevelBucketsSearchRequest(transform, afterKey, pageSize, currentShard, searchRequestTimeoutInSeconds) + search(request, listener) } - if (searchRequestTimeoutInSeconds == null) { - return@suspendUntil - } - val request = getShardLevelBucketsSearchRequest(transform, afterKey, pageSize, currentShard, searchRequestTimeoutInSeconds) - search(request, listener) } - } // If the request was successful, update page size transformContext.lastSuccessfulPageSize = pageSize transformContext.renewLockForLongSearch(Instant.now().epochSecond - searchStart) @@ -182,39 +183,41 @@ class TransformSearchService( transform: Transform, afterKey: Map? = null, modifiedBuckets: MutableSet>? = null, - transformContext: TransformContext + transformContext: TransformContext, ): TransformSearchResult { try { var pageSize: Int = - if (modifiedBuckets.isNullOrEmpty()) + if (modifiedBuckets.isNullOrEmpty()) { transform.pageSize - else + } else { modifiedBuckets.size + } var retryAttempt = 0 val searchStart = Instant.now().epochSecond - val searchResponse = backoffPolicy.retryTransformSearch(logger, transformContext.transformLockManager) { - val pageSizeDecay = 2f.pow(retryAttempt++) + val searchResponse = + backoffPolicy.retryTransformSearch(logger, transformContext.transformLockManager) { + val pageSizeDecay = 2f.pow(retryAttempt++) - var searchRequestTimeoutInSeconds = transformContext.getMaxRequestTimeoutInSeconds() - if (searchRequestTimeoutInSeconds == null) { - searchRequestTimeoutInSeconds = getCancelAfterTimeIntervalSeconds(cancelAfterTimeInterval.seconds) - } + var searchRequestTimeoutInSeconds = transformContext.getMaxRequestTimeoutInSeconds() + if (searchRequestTimeoutInSeconds == null) { + searchRequestTimeoutInSeconds = getCancelAfterTimeIntervalSeconds(cancelAfterTimeInterval.seconds) + } - client.suspendUntil { listener: ActionListener -> - // If the previous request of the current transform job execution was successful, take the page size of previous request. - // If not, calculate the page size. - pageSize = transformContext.lastSuccessfulPageSize ?: max(1, pageSize.div(pageSizeDecay.toInt())) - if (retryAttempt > 1) { - logger.debug( - "Attempt [${retryAttempt - 1}] of composite search failed for transform [${transform.id}]. Attempting " + - "again with reduced page size [$pageSize]" - ) + client.suspendUntil { listener: ActionListener -> + // If the previous request of the current transform job execution was successful, take the page size of previous request. + // If not, calculate the page size. + pageSize = transformContext.lastSuccessfulPageSize ?: max(1, pageSize.div(pageSizeDecay.toInt())) + if (retryAttempt > 1) { + logger.debug( + "Attempt [${retryAttempt - 1}] of composite search failed for transform [${transform.id}]. Attempting " + + "again with reduced page size [$pageSize]", + ) + } + val request = getSearchServiceRequest(transform, afterKey, pageSize, modifiedBuckets, searchRequestTimeoutInSeconds) + search(request, listener) } - val request = getSearchServiceRequest(transform, afterKey, pageSize, modifiedBuckets, searchRequestTimeoutInSeconds) - search(request, listener) } - } // If the request was successful, update page size transformContext.lastSuccessfulPageSize = pageSize transformContext.renewLockForLongSearch(Instant.now().epochSecond - searchStart) @@ -222,7 +225,7 @@ class TransformSearchService( transform, searchResponse, modifiedBuckets = modifiedBuckets, - targetIndexDateFieldMappings = transformContext.getTargetIndexDateFieldMappings() + targetIndexDateFieldMappings = transformContext.getTargetIndexDateFieldMappings(), ) } catch (e: TransformSearchServiceException) { throw e @@ -250,7 +253,9 @@ class TransformSearchService( const val failedSearchErrorMessage = "Failed to search data in source indices" const val modifiedBucketsErrorMessage = "Failed to get the modified buckets in source indices" const val getShardsErrorMessage = "Failed to get the shards in the source indices" + private fun getShardsRetryMessage(attemptNumber: Int) = "Attempt [$attemptNumber] to get shard global checkpoint numbers" + private fun noTransformGroupErrorMessage(bucketField: String) = "Failed to find a transform group matching the bucket field [$bucketField]" fun getSearchServiceRequest( @@ -258,34 +263,37 @@ class TransformSearchService( afterKey: Map? = null, pageSize: Int, modifiedBuckets: MutableSet>? = null, - timeoutInSeconds: Long? = null + timeoutInSeconds: Long? = null, ): SearchRequest { val sources = mutableListOf>() transform.groups.forEach { group -> sources.add(group.toSourceBuilder().missingBucket(true)) } - val aggregationBuilder = CompositeAggregationBuilder(transform.id, sources) - .size(pageSize) - .subAggregations(transform.aggregations) - .apply { afterKey?.let { this.aggregateAfter(it) } } - val query = if (modifiedBuckets == null) { - transform.dataSelectionQuery - } else { - getQueryWithModifiedBuckets(transform.dataSelectionQuery, modifiedBuckets, transform.groups) - } + val aggregationBuilder = + CompositeAggregationBuilder(transform.id, sources) + .size(pageSize) + .subAggregations(transform.aggregations) + .apply { afterKey?.let { this.aggregateAfter(it) } } + val query = + if (modifiedBuckets == null) { + transform.dataSelectionQuery + } else { + getQueryWithModifiedBuckets(transform.dataSelectionQuery, modifiedBuckets, transform.groups) + } return getSearchServiceRequest(transform.sourceIndex, query, aggregationBuilder, timeoutInSeconds) } private fun getQueryWithModifiedBuckets( originalQuery: QueryBuilder, modifiedBuckets: MutableSet>, - groups: List + groups: List, ): QueryBuilder { val query: BoolQueryBuilder = QueryBuilders.boolQuery().must(originalQuery).minimumShouldMatch(1) modifiedBuckets.forEach { bucket -> val bucketQuery: BoolQueryBuilder = QueryBuilders.boolQuery() bucket.forEach { group -> // There should be a transform grouping for each bucket key, if not then throw an error - val transformGroup = groups.find { it.targetField == group.key } - ?: throw TransformSearchServiceException(noTransformGroupErrorMessage(group.key)) + val transformGroup = + groups.find { it.targetField == group.key } + ?: throw TransformSearchServiceException(noTransformGroupErrorMessage(group.key)) if (group.value as Any? == null) { val subQuery = ExistsQueryBuilder(transformGroup.sourceField) bucketQuery.mustNot(subQuery) @@ -315,16 +323,18 @@ class TransformSearchService( index: String, query: QueryBuilder, aggregationBuilder: CompositeAggregationBuilder, - timeoutInSeconds: Long? = null + timeoutInSeconds: Long? = null, ): SearchRequest { - val searchSourceBuilder = SearchSourceBuilder() - .trackTotalHits(false) - .size(0) - .aggregation(aggregationBuilder) - .query(query) - val request = SearchRequest(index) - .source(searchSourceBuilder) - .allowPartialSearchResults(false) + val searchSourceBuilder = + SearchSourceBuilder() + .trackTotalHits(false) + .size(0) + .aggregation(aggregationBuilder) + .query(query) + val request = + SearchRequest(index) + .source(searchSourceBuilder) + .allowPartialSearchResults(false) // The time after which the search request will be canceled. // Request-level parameter takes precedence over cancel_after_time_interval cluster setting. Default is -1. request.cancelAfterTimeInterval = timeoutInSeconds?.let { TimeValue(timeoutInSeconds, TimeUnit.SECONDS) } @@ -336,14 +346,15 @@ class TransformSearchService( afterKey: Map? = null, pageSize: Int, currentShard: ShardNewDocuments, - timeoutInSeconds: Long? + timeoutInSeconds: Long?, ): SearchRequest { val rangeQuery = getSeqNoRangeQuery(currentShard.from, currentShard.to) val query = QueryBuilders.boolQuery().filter(rangeQuery).must(transform.dataSelectionQuery) val sources = transform.groups.map { it.toSourceBuilder().missingBucket(true) } - val aggregationBuilder = CompositeAggregationBuilder(transform.id, sources) - .size(pageSize) - .apply { afterKey?.let { this.aggregateAfter(it) } } + val aggregationBuilder = + CompositeAggregationBuilder(transform.id, sources) + .size(pageSize) + .apply { afterKey?.let { this.aggregateAfter(it) } } return getSearchServiceRequest(currentShard.shardId.indexName, query, aggregationBuilder, timeoutInSeconds) .preference("_shards:" + currentShard.shardId.id.toString()) } @@ -383,9 +394,10 @@ class TransformSearchService( document[aggregation.name] = getAggregationValue(aggregation, targetIndexDateFieldMappings) } - val indexRequest = IndexRequest(transform.targetIndex) - .id(hashedId) - .source(document, XContentType.JSON) + val indexRequest = + IndexRequest(transform.targetIndex) + .id(hashedId) + .source(document, XContentType.JSON) docsToIndex.add(indexRequest) } @@ -395,7 +407,7 @@ class TransformSearchService( // Gathers and returns from the bucket search response the modified buckets from the query, the afterkey, and the search time private fun convertBucketSearchResponse( transform: Transform, - searchResponse: SearchResponse + searchResponse: SearchResponse, ): BucketSearchResult { val aggs = searchResponse.aggregations.get(transform.id) as CompositeAggregation val modifiedBuckets = aggs.buckets.map { it.key }.toMutableSet() @@ -428,7 +440,7 @@ class TransformSearchService( aggregation.aggregation() } else -> throw TransformSearchServiceException( - "Found aggregation [${aggregation.name}] of type [${aggregation.type}] in composite result that is not currently supported" + "Found aggregation [${aggregation.name}] of type [${aggregation.type}] in composite result that is not currently supported", ) } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/transform/TransformValidator.kt b/src/main/kotlin/org/opensearch/indexmanagement/transform/TransformValidator.kt index a7ee2586e..a94748c27 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/transform/TransformValidator.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/transform/TransformValidator.kt @@ -31,10 +31,10 @@ class TransformValidator( private val clusterService: ClusterService, private val client: Client, val settings: Settings, - private val jvmService: JvmService + private val jvmService: JvmService, ) { - @Volatile private var circuitBreakerEnabled = TransformSettings.TRANSFORM_CIRCUIT_BREAKER_ENABLED.get(settings) + @Volatile private var circuitBreakerJvmThreshold = TransformSettings.TRANSFORM_CIRCUIT_BREAKER_JVM_THRESHOLD.get(settings) init { @@ -45,6 +45,7 @@ class TransformValidator( circuitBreakerJvmThreshold = it } } + /** * // TODO: When FGAC is supported in transform should check the user has the correct permissions * Validates the provided transform. Validation checks include the following: @@ -63,9 +64,10 @@ class TransformValidator( indexNameExpressionResolver.concreteIndexNames(clusterService.state(), IndicesOptions.lenientExpand(), true, transform.sourceIndex) if (concreteIndices.isEmpty()) return TransformValidationResult(false, listOf("No specified source index exist in the cluster")) - val request = ClusterHealthRequest() - .indices(*concreteIndices) - .waitForYellowStatus() + val request = + ClusterHealthRequest() + .indices(*concreteIndices) + .waitForYellowStatus() val response: ClusterHealthResponse = client.suspendUntil { execute(ClusterHealthAction.INSTANCE, request, it) } if (response.isTimedOut) { issues.add("Cannot determine that the requested source indices are healthy") diff --git a/src/main/kotlin/org/opensearch/indexmanagement/transform/action/delete/DeleteTransformsRequest.kt b/src/main/kotlin/org/opensearch/indexmanagement/transform/action/delete/DeleteTransformsRequest.kt index 9f28d15cc..b7c9297c3 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/transform/action/delete/DeleteTransformsRequest.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/transform/action/delete/DeleteTransformsRequest.kt @@ -14,13 +14,12 @@ import java.io.IOException class DeleteTransformsRequest( val ids: List, - val force: Boolean + val force: Boolean, ) : ActionRequest() { - @Throws(IOException::class) constructor(sin: StreamInput) : this( ids = sin.readStringList(), - force = sin.readBoolean() + force = sin.readBoolean(), ) override fun validate(): ActionRequestValidationException? { diff --git a/src/main/kotlin/org/opensearch/indexmanagement/transform/action/delete/TransportDeleteTransformsAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/transform/action/delete/TransportDeleteTransformsAction.kt index 30f3d64d5..1848f7ac8 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/transform/action/delete/TransportDeleteTransformsAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/transform/action/delete/TransportDeleteTransformsAction.kt @@ -7,7 +7,6 @@ package org.opensearch.indexmanagement.transform.action.delete import org.apache.logging.log4j.LogManager import org.opensearch.OpenSearchStatusException -import org.opensearch.core.action.ActionListener import org.opensearch.action.bulk.BulkRequest import org.opensearch.action.bulk.BulkResponse import org.opensearch.action.delete.DeleteRequest @@ -20,33 +19,36 @@ import org.opensearch.client.Client import org.opensearch.cluster.service.ClusterService import org.opensearch.common.inject.Inject import org.opensearch.common.settings.Settings -import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.commons.ConfigConstants import org.opensearch.commons.authuser.User +import org.opensearch.core.action.ActionListener +import org.opensearch.core.rest.RestStatus +import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.INDEX_MANAGEMENT_INDEX import org.opensearch.indexmanagement.opensearchapi.parseFromGetResponse import org.opensearch.indexmanagement.settings.IndexManagementSettings import org.opensearch.indexmanagement.transform.model.Transform import org.opensearch.indexmanagement.util.SecurityUtils.Companion.buildUser import org.opensearch.indexmanagement.util.SecurityUtils.Companion.userHasPermissionForResource -import org.opensearch.core.rest.RestStatus import org.opensearch.search.fetch.subphase.FetchSourceContext import org.opensearch.tasks.Task import org.opensearch.transport.TransportService @Suppress("ReturnCount") -class TransportDeleteTransformsAction @Inject constructor( +class TransportDeleteTransformsAction +@Inject +constructor( transportService: TransportService, val client: Client, val settings: Settings, val clusterService: ClusterService, val xContentRegistry: NamedXContentRegistry, - actionFilters: ActionFilters + actionFilters: ActionFilters, ) : HandledTransportAction( - DeleteTransformsAction.NAME, transportService, actionFilters, ::DeleteTransformsRequest + DeleteTransformsAction.NAME, transportService, actionFilters, ::DeleteTransformsRequest, ) { - private val log = LogManager.getLogger(javaClass) + @Volatile private var filterByEnabled = IndexManagementSettings.FILTER_BY_BACKEND_ROLES.get(settings) init { @@ -64,14 +66,13 @@ class TransportDeleteTransformsAction @Inject constructor( val client: Client, val request: DeleteTransformsRequest, val actionListener: ActionListener, - val user: User? = buildUser(client.threadPool().threadContext) + val user: User? = buildUser(client.threadPool().threadContext), ) { - fun start() { log.debug( "User and roles string from thread context: ${client.threadPool().threadContext.getTransient( - ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT - )}" + ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT, + )}", ) // Use Multi-Get Request val getRequest = MultiGetRequest() @@ -90,8 +91,8 @@ class TransportDeleteTransformsAction @Inject constructor( if (response.responses.first().isFailed) { actionListener.onFailure( OpenSearchStatusException( - "Cluster missing system index $INDEX_MANAGEMENT_INDEX, cannot execute the request", RestStatus.BAD_REQUEST - ) + "Cluster missing system index $INDEX_MANAGEMENT_INDEX, cannot execute the request", RestStatus.BAD_REQUEST, + ), ) return } @@ -103,7 +104,7 @@ class TransportDeleteTransformsAction @Inject constructor( } override fun onFailure(e: Exception) = actionListener.onFailure(e) - } + }, ) } } @@ -135,8 +136,8 @@ class TransportDeleteTransformsAction @Inject constructor( if (noPermission.isNotEmpty()) { actionListener.onFailure( OpenSearchStatusException( - "Don't have permission to delete some/all transforms in [${request.ids}]", RestStatus.FORBIDDEN - ) + "Don't have permission to delete some/all transforms in [${request.ids}]", RestStatus.FORBIDDEN, + ), ) return } @@ -144,8 +145,8 @@ class TransportDeleteTransformsAction @Inject constructor( if (notTransform.isNotEmpty()) { actionListener.onFailure( OpenSearchStatusException( - "Cannot find transforms $notTransform", RestStatus.BAD_REQUEST - ) + "Cannot find transforms $notTransform", RestStatus.BAD_REQUEST, + ), ) return } @@ -153,8 +154,8 @@ class TransportDeleteTransformsAction @Inject constructor( if (enabledIDs.isNotEmpty()) { actionListener.onFailure( OpenSearchStatusException( - "$enabledIDs transform(s) are enabled, please disable them before deleting them or set force flag", RestStatus.CONFLICT - ) + "$enabledIDs transform(s) are enabled, please disable them before deleting them or set force flag", RestStatus.CONFLICT, + ), ) return } @@ -173,7 +174,7 @@ class TransportDeleteTransformsAction @Inject constructor( } override fun onFailure(e: Exception) = actionListener.onFailure(e) - } + }, ) } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/transform/action/explain/ExplainTransformRequest.kt b/src/main/kotlin/org/opensearch/indexmanagement/transform/action/explain/ExplainTransformRequest.kt index 7ad468f46..1f4536644 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/transform/action/explain/ExplainTransformRequest.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/transform/action/explain/ExplainTransformRequest.kt @@ -13,7 +13,6 @@ import org.opensearch.core.common.io.stream.StreamOutput import java.io.IOException class ExplainTransformRequest(val transformIDs: List) : ActionRequest() { - @Throws(IOException::class) constructor(sin: StreamInput) : this(transformIDs = sin.readStringArray().toList()) diff --git a/src/main/kotlin/org/opensearch/indexmanagement/transform/action/explain/ExplainTransformResponse.kt b/src/main/kotlin/org/opensearch/indexmanagement/transform/action/explain/ExplainTransformResponse.kt index 1035863fb..7e9e0c7be 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/transform/action/explain/ExplainTransformResponse.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/transform/action/explain/ExplainTransformResponse.kt @@ -16,16 +16,16 @@ import java.io.IOException class ExplainTransformResponse( val idsToExplain: Map, - private val failedToExplain: Map + private val failedToExplain: Map, ) : ActionResponse(), ToXContentObject { - internal fun getIdsToExplain(): Map { return this.idsToExplain } @Throws(IOException::class) constructor(sin: StreamInput) : this( - idsToExplain = sin.let { + idsToExplain = + sin.let { val idsToExplain = mutableMapOf() val size = it.readVInt() repeat(size) { _ -> @@ -33,7 +33,7 @@ class ExplainTransformResponse( } idsToExplain.toMap() }, - failedToExplain = sin.readMap({ it.readString() }, { it.readString() }) + failedToExplain = sin.readMap({ it.readString() }, { it.readString() }), ) @Throws(IOException::class) @@ -47,7 +47,7 @@ class ExplainTransformResponse( out.writeMap( failedToExplain, { writer, value: String -> writer.writeString(value) }, - { writer, value: String -> writer.writeString(value) } + { writer, value: String -> writer.writeString(value) }, ) } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/transform/action/explain/TransportExplainTransformAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/transform/action/explain/TransportExplainTransformAction.kt index 591b348c9..cae1b5880 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/transform/action/explain/TransportExplainTransformAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/transform/action/explain/TransportExplainTransformAction.kt @@ -11,22 +11,22 @@ import kotlinx.coroutines.launch import org.apache.logging.log4j.LogManager import org.opensearch.ExceptionsHelper import org.opensearch.ResourceNotFoundException -import org.opensearch.core.action.ActionListener import org.opensearch.action.search.SearchRequest import org.opensearch.action.search.SearchResponse import org.opensearch.action.support.ActionFilters import org.opensearch.action.support.HandledTransportAction import org.opensearch.client.Client import org.opensearch.cluster.service.ClusterService -import org.opensearch.core.common.bytes.BytesReference import org.opensearch.common.inject.Inject import org.opensearch.common.settings.Settings import org.opensearch.common.xcontent.LoggingDeprecationHandler -import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.common.xcontent.XContentHelper -import org.opensearch.core.xcontent.XContentParser import org.opensearch.common.xcontent.XContentType import org.opensearch.commons.ConfigConstants +import org.opensearch.core.action.ActionListener +import org.opensearch.core.common.bytes.BytesReference +import org.opensearch.core.xcontent.NamedXContentRegistry +import org.opensearch.core.xcontent.XContentParser import org.opensearch.index.query.BoolQueryBuilder import org.opensearch.index.query.IdsQueryBuilder import org.opensearch.index.query.WildcardQueryBuilder @@ -43,17 +43,18 @@ import org.opensearch.tasks.Task import org.opensearch.transport.RemoteTransportException import org.opensearch.transport.TransportService -class TransportExplainTransformAction @Inject constructor( +class TransportExplainTransformAction +@Inject +constructor( transportService: TransportService, val client: Client, actionFilters: ActionFilters, val clusterService: ClusterService, val settings: Settings, - val xContentRegistry: NamedXContentRegistry + val xContentRegistry: NamedXContentRegistry, ) : HandledTransportAction( - ExplainTransformAction.NAME, transportService, actionFilters, ::ExplainTransformRequest + ExplainTransformAction.NAME, transportService, actionFilters, ::ExplainTransformRequest, ) { - @Volatile private var filterByEnabled = IndexManagementSettings.FILTER_BY_BACKEND_ROLES.get(settings) init { @@ -68,19 +69,21 @@ class TransportExplainTransformAction @Inject constructor( override fun doExecute(task: Task, request: ExplainTransformRequest, actionListener: ActionListener) { log.debug( "User and roles string from thread context: ${client.threadPool().threadContext.getTransient( - ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT - )}" + ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT, + )}", ) val ids = request.transformIDs // Instantiate concrete ids to metadata map by removing wildcard matches - val idsToExplain: MutableMap = ids.filter { !it.contains("*") } - .map { it to null }.toMap(mutableMapOf()) + val idsToExplain: MutableMap = + ids.filter { !it.contains("*") } + .map { it to null }.toMap(mutableMapOf()) val failedToExplain: MutableMap = mutableMapOf() - val queryBuilder = BoolQueryBuilder().minimumShouldMatch(1).apply { - ids.forEach { - this.should(WildcardQueryBuilder("${ Transform.TRANSFORM_TYPE}.${Transform.TRANSFORM_ID_FIELD}.keyword", "*$it*")) + val queryBuilder = + BoolQueryBuilder().minimumShouldMatch(1).apply { + ids.forEach { + this.should(WildcardQueryBuilder("${ Transform.TRANSFORM_TYPE}.${Transform.TRANSFORM_ID_FIELD}.keyword", "*$it*")) + } } - } val user = buildUser(client.threadPool().threadContext) addUserFilter(user, queryBuilder, filterByEnabled, "transform.user") @@ -105,8 +108,9 @@ class TransportExplainTransformAction @Inject constructor( } val metadataIds = idsToExplain.values.mapNotNull { it?.metadataID } - val metadataSearchRequest = SearchRequest(INDEX_MANAGEMENT_INDEX) - .source(SearchSourceBuilder().query(IdsQueryBuilder().addIds(*metadataIds.toTypedArray()))) + val metadataSearchRequest = + SearchRequest(INDEX_MANAGEMENT_INDEX) + .source(SearchSourceBuilder().query(IdsQueryBuilder().addIds(*metadataIds.toTypedArray()))) client.search( metadataSearchRequest, object : ActionListener { @@ -114,8 +118,9 @@ class TransportExplainTransformAction @Inject constructor( CoroutineScope(Dispatchers.IO).launch { response.hits.hits.forEach { try { - val metadata = contentParser(it.sourceRef) - .parseWithType(it.id, it.seqNo, it.primaryTerm, TransformMetadata.Companion::parse) + val metadata = + contentParser(it.sourceRef) + .parseWithType(it.id, it.seqNo, it.primaryTerm, TransformMetadata.Companion::parse) val transform = metadataIdToTransform[metadata.id] // Only add continuous stats for continuous transforms which have not failed @@ -157,15 +162,16 @@ class TransportExplainTransformAction @Inject constructor( } else { idsToExplain.computeIfPresent(metadata.transformId) { _, explainTransform -> explainTransform.copy( - metadata = metadata.copy( + metadata = + metadata.copy( shardIDToGlobalCheckpoint = null, - continuousStats = continuousStats - ) + continuousStats = continuousStats, + ), ) } } } - } + }, ) } @@ -181,7 +187,7 @@ class TransportExplainTransformAction @Inject constructor( else -> actionListener.onFailure(e) } } - } + }, ) } } @@ -189,7 +195,7 @@ class TransportExplainTransformAction @Inject constructor( private fun contentParser(bytesReference: BytesReference): XContentParser { return XContentHelper.createParser( xContentRegistry, - LoggingDeprecationHandler.INSTANCE, bytesReference, XContentType.JSON + LoggingDeprecationHandler.INSTANCE, bytesReference, XContentType.JSON, ) } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/transform/action/get/GetTransformRequest.kt b/src/main/kotlin/org/opensearch/indexmanagement/transform/action/get/GetTransformRequest.kt index 80b7f2157..d2fc92bfd 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/transform/action/get/GetTransformRequest.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/transform/action/get/GetTransformRequest.kt @@ -16,14 +16,13 @@ import java.io.IOException class GetTransformRequest( val id: String, val srcContext: FetchSourceContext? = null, - val preference: String? = null + val preference: String? = null, ) : ActionRequest() { - @Throws(IOException::class) constructor(sin: StreamInput) : this( id = sin.readString(), srcContext = if (sin.readBoolean()) FetchSourceContext(sin) else null, - preference = sin.readOptionalString() + preference = sin.readOptionalString(), ) override fun validate(): ActionRequestValidationException? { diff --git a/src/main/kotlin/org/opensearch/indexmanagement/transform/action/get/GetTransformResponse.kt b/src/main/kotlin/org/opensearch/indexmanagement/transform/action/get/GetTransformResponse.kt index 27adaf5d3..e1454b154 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/transform/action/get/GetTransformResponse.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/transform/action/get/GetTransformResponse.kt @@ -8,6 +8,7 @@ package org.opensearch.indexmanagement.transform.action.get import org.opensearch.core.action.ActionResponse import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.rest.RestStatus import org.opensearch.core.xcontent.ToXContent import org.opensearch.core.xcontent.ToXContentObject import org.opensearch.core.xcontent.XContentBuilder @@ -18,7 +19,6 @@ import org.opensearch.indexmanagement.util._ID import org.opensearch.indexmanagement.util._PRIMARY_TERM import org.opensearch.indexmanagement.util._SEQ_NO import org.opensearch.indexmanagement.util._VERSION -import org.opensearch.core.rest.RestStatus import java.io.IOException class GetTransformResponse( @@ -27,9 +27,8 @@ class GetTransformResponse( val seqNo: Long, val primaryTerm: Long, val status: RestStatus, - val transform: Transform? + val transform: Transform?, ) : ActionResponse(), ToXContentObject { - @Throws(IOException::class) constructor(sin: StreamInput) : this( id = sin.readString(), @@ -37,7 +36,7 @@ class GetTransformResponse( seqNo = sin.readLong(), primaryTerm = sin.readLong(), status = sin.readEnum(RestStatus::class.java), - transform = if (sin.readBoolean()) Transform(sin) else null + transform = if (sin.readBoolean()) Transform(sin) else null, ) override fun writeTo(out: StreamOutput) { diff --git a/src/main/kotlin/org/opensearch/indexmanagement/transform/action/get/GetTransformsRequest.kt b/src/main/kotlin/org/opensearch/indexmanagement/transform/action/get/GetTransformsRequest.kt index cfdaa8375..6a8aeaaeb 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/transform/action/get/GetTransformsRequest.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/transform/action/get/GetTransformsRequest.kt @@ -17,16 +17,15 @@ class GetTransformsRequest( val from: Int = DEFAULT_FROM, val size: Int = DEFAULT_SIZE, val sortField: String = DEFAULT_SORT_FIELD, - val sortDirection: String = DEFAULT_SORT_DIRECTION + val sortDirection: String = DEFAULT_SORT_DIRECTION, ) : ActionRequest() { - @Throws(IOException::class) constructor(sin: StreamInput) : this( searchString = sin.readString(), from = sin.readInt(), size = sin.readInt(), sortField = sin.readString(), - sortDirection = sin.readString() + sortDirection = sin.readString(), ) override fun validate(): ActionRequestValidationException? = null diff --git a/src/main/kotlin/org/opensearch/indexmanagement/transform/action/get/GetTransformsResponse.kt b/src/main/kotlin/org/opensearch/indexmanagement/transform/action/get/GetTransformsResponse.kt index bdef75c1d..ef3ec8cb0 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/transform/action/get/GetTransformsResponse.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/transform/action/get/GetTransformsResponse.kt @@ -8,6 +8,7 @@ package org.opensearch.indexmanagement.transform.action.get import org.opensearch.core.action.ActionResponse import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.rest.RestStatus import org.opensearch.core.xcontent.ToXContent import org.opensearch.core.xcontent.ToXContentObject import org.opensearch.core.xcontent.XContentBuilder @@ -17,20 +18,18 @@ import org.opensearch.indexmanagement.transform.model.Transform.Companion.TRANSF import org.opensearch.indexmanagement.util._ID import org.opensearch.indexmanagement.util._PRIMARY_TERM import org.opensearch.indexmanagement.util._SEQ_NO -import org.opensearch.core.rest.RestStatus import java.io.IOException class GetTransformsResponse( val transforms: List, val totalTransforms: Int, - val status: RestStatus + val status: RestStatus, ) : ActionResponse(), ToXContentObject { - @Throws(IOException::class) constructor(sin: StreamInput) : this( transforms = sin.readList(::Transform), totalTransforms = sin.readInt(), - status = sin.readEnum(RestStatus::class.java) + status = sin.readEnum(RestStatus::class.java), ) override fun writeTo(out: StreamOutput) { diff --git a/src/main/kotlin/org/opensearch/indexmanagement/transform/action/get/TransportGetTransformAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/transform/action/get/TransportGetTransformAction.kt index 6808f58df..d0c4590ca 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/transform/action/get/TransportGetTransformAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/transform/action/get/TransportGetTransformAction.kt @@ -8,7 +8,6 @@ package org.opensearch.indexmanagement.transform.action.get import org.apache.logging.log4j.LogManager import org.opensearch.ExceptionsHelper import org.opensearch.OpenSearchStatusException -import org.opensearch.core.action.ActionListener import org.opensearch.action.get.GetRequest import org.opensearch.action.get.GetResponse import org.opensearch.action.support.ActionFilters @@ -17,29 +16,31 @@ import org.opensearch.client.Client import org.opensearch.cluster.service.ClusterService import org.opensearch.common.inject.Inject import org.opensearch.common.settings.Settings -import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.commons.ConfigConstants +import org.opensearch.core.action.ActionListener +import org.opensearch.core.rest.RestStatus +import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.INDEX_MANAGEMENT_INDEX import org.opensearch.indexmanagement.opensearchapi.parseFromGetResponse import org.opensearch.indexmanagement.settings.IndexManagementSettings import org.opensearch.indexmanagement.transform.model.Transform import org.opensearch.indexmanagement.util.SecurityUtils.Companion.buildUser import org.opensearch.indexmanagement.util.SecurityUtils.Companion.userHasPermissionForResource -import org.opensearch.core.rest.RestStatus import org.opensearch.tasks.Task import org.opensearch.transport.TransportService -class TransportGetTransformAction @Inject constructor( +class TransportGetTransformAction +@Inject +constructor( transportService: TransportService, val client: Client, val settings: Settings, val clusterService: ClusterService, actionFilters: ActionFilters, - val xContentRegistry: NamedXContentRegistry + val xContentRegistry: NamedXContentRegistry, ) : HandledTransportAction ( - GetTransformAction.NAME, transportService, actionFilters, ::GetTransformRequest + GetTransformAction.NAME, transportService, actionFilters, ::GetTransformRequest, ) { - @Volatile private var filterByEnabled = IndexManagementSettings.FILTER_BY_BACKEND_ROLES.get(settings) private val log = LogManager.getLogger(javaClass) @@ -53,8 +54,8 @@ class TransportGetTransformAction @Inject constructor( override fun doExecute(task: Task, request: GetTransformRequest, listener: ActionListener) { log.debug( "User and roles string from thread context: ${client.threadPool().threadContext.getTransient( - ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT - )}" + ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT, + )}", ) val user = buildUser(client.threadPool().threadContext) val getRequest = GetRequest(INDEX_MANAGEMENT_INDEX, request.id).preference(request.preference) @@ -81,19 +82,20 @@ class TransportGetTransformAction @Inject constructor( } // if HEAD request don't return the transform - val transformResponse = if (request.srcContext != null && !request.srcContext.fetchSource()) { - GetTransformResponse(response.id, response.version, response.seqNo, response.primaryTerm, RestStatus.OK, null) - } else { - GetTransformResponse(response.id, response.version, response.seqNo, response.primaryTerm, RestStatus.OK, transform) - } + val transformResponse = + if (request.srcContext != null && !request.srcContext.fetchSource()) { + GetTransformResponse(response.id, response.version, response.seqNo, response.primaryTerm, RestStatus.OK, null) + } else { + GetTransformResponse(response.id, response.version, response.seqNo, response.primaryTerm, RestStatus.OK, transform) + } listener.onResponse(transformResponse) } catch (e: Exception) { listener.onFailure( OpenSearchStatusException( "Failed to parse transform", RestStatus.INTERNAL_SERVER_ERROR, - ExceptionsHelper.unwrapCause(e) - ) + ExceptionsHelper.unwrapCause(e), + ), ) } } @@ -101,7 +103,7 @@ class TransportGetTransformAction @Inject constructor( override fun onFailure(e: Exception) { listener.onFailure(e) } - } + }, ) } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/transform/action/get/TransportGetTransformsAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/transform/action/get/TransportGetTransformsAction.kt index 97ee0512a..f0fc78611 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/transform/action/get/TransportGetTransformsAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/transform/action/get/TransportGetTransformsAction.kt @@ -6,21 +6,21 @@ package org.opensearch.indexmanagement.transform.action.get import org.apache.logging.log4j.LogManager -import org.opensearch.core.action.ActionListener -import org.opensearch.core.action.ActionResponse import org.opensearch.action.support.ActionFilters import org.opensearch.action.support.HandledTransportAction import org.opensearch.client.Client import org.opensearch.cluster.service.ClusterService -import org.opensearch.core.common.bytes.BytesReference import org.opensearch.common.inject.Inject import org.opensearch.common.settings.Settings import org.opensearch.common.xcontent.LoggingDeprecationHandler -import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.common.xcontent.XContentHelper -import org.opensearch.core.xcontent.XContentParser import org.opensearch.common.xcontent.XContentType import org.opensearch.commons.ConfigConstants +import org.opensearch.core.action.ActionListener +import org.opensearch.core.action.ActionResponse +import org.opensearch.core.common.bytes.BytesReference +import org.opensearch.core.xcontent.NamedXContentRegistry +import org.opensearch.core.xcontent.XContentParser import org.opensearch.index.query.BoolQueryBuilder import org.opensearch.index.query.ExistsQueryBuilder import org.opensearch.index.query.WildcardQueryBuilder @@ -34,17 +34,18 @@ import org.opensearch.search.sort.SortOrder import org.opensearch.tasks.Task import org.opensearch.transport.TransportService -class TransportGetTransformsAction @Inject constructor( +class TransportGetTransformsAction +@Inject +constructor( transportService: TransportService, val client: Client, val settings: Settings, val clusterService: ClusterService, actionFilters: ActionFilters, - val xContentRegistry: NamedXContentRegistry + val xContentRegistry: NamedXContentRegistry, ) : HandledTransportAction ( - GetTransformsAction.NAME, transportService, actionFilters, ::GetTransformsRequest + GetTransformsAction.NAME, transportService, actionFilters, ::GetTransformsRequest, ) { - @Volatile private var filterByEnabled = IndexManagementSettings.FILTER_BY_BACKEND_ROLES.get(settings) private val log = LogManager.getLogger(javaClass) @@ -57,8 +58,8 @@ class TransportGetTransformsAction @Inject constructor( override fun doExecute(task: Task, request: GetTransformsRequest, listener: ActionListener) { log.debug( "User and roles string from thread context: ${client.threadPool().threadContext.getTransient( - ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT - )}" + ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT, + )}", ) val searchString = request.searchString.trim() val from = request.from @@ -72,8 +73,9 @@ class TransportGetTransformsAction @Inject constructor( } val user = buildUser(client.threadPool().threadContext) addUserFilter(user, boolQueryBuilder, filterByEnabled, "transform.user") - val searchSourceBuilder = SearchSourceBuilder().query(boolQueryBuilder).from(from).size(size).seqNoAndPrimaryTerm(true) - .sort(sortField, SortOrder.fromString(sortDirection)) + val searchSourceBuilder = + SearchSourceBuilder().query(boolQueryBuilder).from(from).size(size).seqNoAndPrimaryTerm(true) + .sort(sortField, SortOrder.fromString(sortDirection)) client.threadPool().threadContext.stashContext().use { @Suppress("UNCHECKED_CAST") @@ -82,7 +84,7 @@ class TransportGetTransformsAction @Inject constructor( searchSourceBuilder, listener as ActionListener, Transform.TRANSFORM_TYPE, - ::contentParser + ::contentParser, ) } } @@ -90,7 +92,7 @@ class TransportGetTransformsAction @Inject constructor( private fun contentParser(bytesReference: BytesReference): XContentParser { return XContentHelper.createParser( xContentRegistry, - LoggingDeprecationHandler.INSTANCE, bytesReference, XContentType.JSON + LoggingDeprecationHandler.INSTANCE, bytesReference, XContentType.JSON, ) } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/transform/action/index/IndexTransformRequest.kt b/src/main/kotlin/org/opensearch/indexmanagement/transform/action/index/IndexTransformRequest.kt index d0e26dc95..efa3480d7 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/transform/action/index/IndexTransformRequest.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/transform/action/index/IndexTransformRequest.kt @@ -27,7 +27,7 @@ class IndexTransformRequest : IndexRequest { constructor( transform: Transform, - refreshPolicy: WriteRequest.RefreshPolicy + refreshPolicy: WriteRequest.RefreshPolicy, ) { this.transform = transform if (transform.seqNo == SequenceNumbers.UNASSIGNED_SEQ_NO || transform.primaryTerm == SequenceNumbers.UNASSIGNED_PRIMARY_TERM) { diff --git a/src/main/kotlin/org/opensearch/indexmanagement/transform/action/index/IndexTransformResponse.kt b/src/main/kotlin/org/opensearch/indexmanagement/transform/action/index/IndexTransformResponse.kt index f6d9fd44d..0ace0968f 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/transform/action/index/IndexTransformResponse.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/transform/action/index/IndexTransformResponse.kt @@ -8,6 +8,7 @@ package org.opensearch.indexmanagement.transform.action.index import org.opensearch.core.action.ActionResponse import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.rest.RestStatus import org.opensearch.core.xcontent.ToXContent import org.opensearch.core.xcontent.ToXContentObject import org.opensearch.core.xcontent.XContentBuilder @@ -18,7 +19,6 @@ import org.opensearch.indexmanagement.util._ID import org.opensearch.indexmanagement.util._PRIMARY_TERM import org.opensearch.indexmanagement.util._SEQ_NO import org.opensearch.indexmanagement.util._VERSION -import org.opensearch.core.rest.RestStatus import java.io.IOException class IndexTransformResponse( @@ -27,9 +27,8 @@ class IndexTransformResponse( val seqNo: Long, val primaryTerm: Long, val status: RestStatus, - val transform: Transform + val transform: Transform, ) : ActionResponse(), ToXContentObject { - @Throws(IOException::class) constructor(sin: StreamInput) : this( id = sin.readString(), @@ -37,7 +36,7 @@ class IndexTransformResponse( seqNo = sin.readLong(), primaryTerm = sin.readLong(), status = sin.readEnum(RestStatus::class.java), - transform = Transform(sin) + transform = Transform(sin), ) @Throws(IOException::class) diff --git a/src/main/kotlin/org/opensearch/indexmanagement/transform/action/index/TransportIndexTransformAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/transform/action/index/TransportIndexTransformAction.kt index 708bfa1d2..05e87438e 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/transform/action/index/TransportIndexTransformAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/transform/action/index/TransportIndexTransformAction.kt @@ -7,7 +7,6 @@ package org.opensearch.indexmanagement.transform.action.index import org.apache.logging.log4j.LogManager import org.opensearch.OpenSearchStatusException -import org.opensearch.core.action.ActionListener import org.opensearch.action.DocWriteRequest import org.opensearch.action.admin.indices.mapping.get.GetMappingsAction import org.opensearch.action.admin.indices.mapping.get.GetMappingsRequest @@ -25,11 +24,13 @@ import org.opensearch.cluster.metadata.IndexNameExpressionResolver import org.opensearch.cluster.service.ClusterService import org.opensearch.common.inject.Inject import org.opensearch.common.settings.Settings -import org.opensearch.core.xcontent.NamedXContentRegistry -import org.opensearch.core.xcontent.ToXContent import org.opensearch.common.xcontent.XContentFactory.jsonBuilder import org.opensearch.commons.ConfigConstants import org.opensearch.commons.authuser.User +import org.opensearch.core.action.ActionListener +import org.opensearch.core.rest.RestStatus +import org.opensearch.core.xcontent.NamedXContentRegistry +import org.opensearch.core.xcontent.ToXContent import org.opensearch.indexmanagement.IndexManagementIndices import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.INDEX_MANAGEMENT_INDEX import org.opensearch.indexmanagement.opensearchapi.parseFromGetResponse @@ -40,12 +41,13 @@ import org.opensearch.indexmanagement.util.IndexUtils import org.opensearch.indexmanagement.util.SecurityUtils.Companion.buildUser import org.opensearch.indexmanagement.util.SecurityUtils.Companion.userHasPermissionForResource import org.opensearch.indexmanagement.util.SecurityUtils.Companion.validateUserConfiguration -import org.opensearch.core.rest.RestStatus import org.opensearch.tasks.Task import org.opensearch.transport.TransportService @Suppress("SpreadOperator", "LongParameterList") -class TransportIndexTransformAction @Inject constructor( +class TransportIndexTransformAction +@Inject +constructor( transportService: TransportService, val client: Client, actionFilters: ActionFilters, @@ -53,11 +55,10 @@ class TransportIndexTransformAction @Inject constructor( val indexNameExpressionResolver: IndexNameExpressionResolver, val clusterService: ClusterService, val settings: Settings, - val xContentRegistry: NamedXContentRegistry + val xContentRegistry: NamedXContentRegistry, ) : HandledTransportAction( - IndexTransformAction.NAME, transportService, actionFilters, ::IndexTransformRequest + IndexTransformAction.NAME, transportService, actionFilters, ::IndexTransformRequest, ) { - @Volatile private var filterByEnabled = IndexManagementSettings.FILTER_BY_BACKEND_ROLES.get(settings) init { @@ -76,21 +77,20 @@ class TransportIndexTransformAction @Inject constructor( private val client: Client, private val actionListener: ActionListener, private val request: IndexTransformRequest, - private val user: User? = buildUser(client.threadPool().threadContext, request.transform.user) + private val user: User? = buildUser(client.threadPool().threadContext, request.transform.user), ) { - fun start() { log.debug( "User and roles string from thread context: ${client.threadPool().threadContext.getTransient( - ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT - )}" + ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT, + )}", ) client.threadPool().threadContext.stashContext().use { if (!validateUserConfiguration(user, filterByEnabled, actionListener)) { return } indexManagementIndices.checkAndUpdateIMConfigIndex( - ActionListener.wrap(::onConfigIndexAcknowledgedResponse, actionListener::onFailure) + ActionListener.wrap(::onConfigIndexAcknowledgedResponse, actionListener::onFailure), ) } } @@ -168,8 +168,8 @@ class TransportIndexTransformAction @Inject constructor( actionListener.onResponse( IndexTransformResponse( response.id, response.version, response.seqNo, response.primaryTerm, status, - transform.copy(seqNo = response.seqNo, primaryTerm = response.primaryTerm) - ) + transform.copy(seqNo = response.seqNo, primaryTerm = response.primaryTerm), + ), ) } } @@ -177,7 +177,7 @@ class TransportIndexTransformAction @Inject constructor( override fun onFailure(e: Exception) { actionListener.onFailure(e) } - } + }, ) } @@ -186,7 +186,7 @@ class TransportIndexTransformAction @Inject constructor( indexNameExpressionResolver.concreteIndexNames( clusterService.state(), IndicesOptions.lenientExpand(), true, request.transform - .sourceIndex + .sourceIndex, ) if (concreteIndices.isEmpty()) { actionListener.onFailure(OpenSearchStatusException("No specified source index exist in the cluster", RestStatus.NOT_FOUND)) @@ -211,7 +211,7 @@ class TransportIndexTransformAction @Inject constructor( override fun onFailure(e: Exception) { actionListener.onFailure(e) } - } + }, ) } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/transform/action/preview/PreviewTransformRequest.kt b/src/main/kotlin/org/opensearch/indexmanagement/transform/action/preview/PreviewTransformRequest.kt index 1c65421e4..3811d1005 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/transform/action/preview/PreviewTransformRequest.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/transform/action/preview/PreviewTransformRequest.kt @@ -13,11 +13,10 @@ import org.opensearch.indexmanagement.transform.model.Transform import java.io.IOException class PreviewTransformRequest( - val transform: Transform + val transform: Transform, ) : ActionRequest() { - constructor(sin: StreamInput) : this( - transform = Transform(sin) + transform = Transform(sin), ) override fun validate(): ActionRequestValidationException? { diff --git a/src/main/kotlin/org/opensearch/indexmanagement/transform/action/preview/PreviewTransformResponse.kt b/src/main/kotlin/org/opensearch/indexmanagement/transform/action/preview/PreviewTransformResponse.kt index e9120829c..b12bd509e 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/transform/action/preview/PreviewTransformResponse.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/transform/action/preview/PreviewTransformResponse.kt @@ -8,18 +8,18 @@ package org.opensearch.indexmanagement.transform.action.preview import org.opensearch.core.action.ActionResponse import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.rest.RestStatus import org.opensearch.core.xcontent.ToXContent import org.opensearch.core.xcontent.ToXContentObject import org.opensearch.core.xcontent.XContentBuilder -import org.opensearch.core.rest.RestStatus class PreviewTransformResponse( val documents: List>, - val status: RestStatus + val status: RestStatus, ) : ActionResponse(), ToXContentObject { - constructor(sin: StreamInput) : this( - documents = sin.let { + documents = + sin.let { val documentList = mutableListOf>() val size = it.readVInt() repeat(size) { _ -> @@ -27,7 +27,7 @@ class PreviewTransformResponse( } documentList.toList() }, - status = sin.readEnum(RestStatus::class.java) + status = sin.readEnum(RestStatus::class.java), ) override fun writeTo(out: StreamOutput) { diff --git a/src/main/kotlin/org/opensearch/indexmanagement/transform/action/preview/TransportPreviewTransformAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/transform/action/preview/TransportPreviewTransformAction.kt index bb4f9219e..74f4b23c0 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/transform/action/preview/TransportPreviewTransformAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/transform/action/preview/TransportPreviewTransformAction.kt @@ -11,7 +11,6 @@ import kotlinx.coroutines.launch import org.apache.logging.log4j.LogManager import org.opensearch.ExceptionsHelper import org.opensearch.OpenSearchStatusException -import org.opensearch.core.action.ActionListener import org.opensearch.action.admin.indices.mapping.get.GetMappingsAction import org.opensearch.action.admin.indices.mapping.get.GetMappingsRequest import org.opensearch.action.admin.indices.mapping.get.GetMappingsResponse @@ -26,6 +25,8 @@ import org.opensearch.cluster.service.ClusterService import org.opensearch.common.inject.Inject import org.opensearch.common.settings.Settings import org.opensearch.commons.ConfigConstants +import org.opensearch.core.action.ActionListener +import org.opensearch.core.rest.RestStatus import org.opensearch.indexmanagement.opensearchapi.IndexManagementSecurityContext import org.opensearch.indexmanagement.opensearchapi.suspendUntil import org.opensearch.indexmanagement.opensearchapi.withClosableContext @@ -34,29 +35,29 @@ import org.opensearch.indexmanagement.transform.TransformSearchService import org.opensearch.indexmanagement.transform.TransformValidator import org.opensearch.indexmanagement.transform.model.Transform import org.opensearch.indexmanagement.util.SecurityUtils -import org.opensearch.core.rest.RestStatus import org.opensearch.tasks.Task import org.opensearch.transport.TransportService -class TransportPreviewTransformAction @Inject constructor( +class TransportPreviewTransformAction +@Inject +constructor( transportService: TransportService, actionFilters: ActionFilters, val settings: Settings, private val client: Client, private val clusterService: ClusterService, - private val indexNameExpressionResolver: IndexNameExpressionResolver + private val indexNameExpressionResolver: IndexNameExpressionResolver, ) : HandledTransportAction( - PreviewTransformAction.NAME, transportService, actionFilters, ::PreviewTransformRequest + PreviewTransformAction.NAME, transportService, actionFilters, ::PreviewTransformRequest, ) { - private val log = LogManager.getLogger(javaClass) @Suppress("SpreadOperator") override fun doExecute(task: Task, request: PreviewTransformRequest, listener: ActionListener) { log.debug( "User and roles string from thread context: ${client.threadPool().threadContext.getTransient( - ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT - )}" + ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT, + )}", ) val transform = request.transform @@ -83,7 +84,7 @@ class TransportPreviewTransformAction @Inject constructor( CoroutineScope(Dispatchers.IO).launch { withClosableContext( - IndexManagementSecurityContext("PreviewTransformHandler", settings, client.threadPool().threadContext, user) + IndexManagementSecurityContext("PreviewTransformHandler", settings, client.threadPool().threadContext, user), ) { executeSearch(searchRequest, transform, listener) } @@ -93,7 +94,7 @@ class TransportPreviewTransformAction @Inject constructor( override fun onFailure(e: Exception) { listener.onFailure(e) } - } + }, ) } @@ -105,30 +106,34 @@ class TransportPreviewTransformAction @Inject constructor( return issues } + suspend fun executeSearch(searchRequest: SearchRequest, transform: Transform, listener: ActionListener) { - val response = try { - val searchResponse: SearchResponse = client.suspendUntil { search(searchRequest, it) } - searchResponse - } catch (e: Exception) { - listener.onFailure(e) - return - } + val response = + try { + val searchResponse: SearchResponse = client.suspendUntil { search(searchRequest, it) } + searchResponse + } catch (e: Exception) { + listener.onFailure(e) + return + } try { val targetIndexDateFieldMappings = TargetIndexMappingService.getTargetMappingsForDates(transform) - val transformSearchResult = TransformSearchService.convertResponse( - transform = transform, searchResponse = response, waterMarkDocuments = false, - targetIndexDateFieldMappings = targetIndexDateFieldMappings - ) - val formattedResult = transformSearchResult.docsToIndex.map { - it.sourceAsMap() - } + val transformSearchResult = + TransformSearchService.convertResponse( + transform = transform, searchResponse = response, waterMarkDocuments = false, + targetIndexDateFieldMappings = targetIndexDateFieldMappings, + ) + val formattedResult = + transformSearchResult.docsToIndex.map { + it.sourceAsMap() + } listener.onResponse(PreviewTransformResponse(formattedResult, RestStatus.OK)) } catch (e: Exception) { listener.onFailure( OpenSearchStatusException( - "Failed to parse the transformed results", RestStatus.INTERNAL_SERVER_ERROR, ExceptionsHelper.unwrapCause(e) - ) + "Failed to parse the transformed results", RestStatus.INTERNAL_SERVER_ERROR, ExceptionsHelper.unwrapCause(e), + ), ) } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/transform/action/start/StartTransformRequest.kt b/src/main/kotlin/org/opensearch/indexmanagement/transform/action/start/StartTransformRequest.kt index 528b564a2..a6514c6df 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/transform/action/start/StartTransformRequest.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/transform/action/start/StartTransformRequest.kt @@ -13,7 +13,6 @@ import org.opensearch.core.common.io.stream.StreamOutput import java.io.IOException class StartTransformRequest : UpdateRequest { - @Throws(IOException::class) constructor(sin: StreamInput) : super(sin) diff --git a/src/main/kotlin/org/opensearch/indexmanagement/transform/action/start/TransportStartTransformAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/transform/action/start/TransportStartTransformAction.kt index 9fe33feed..b69187d89 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/transform/action/start/TransportStartTransformAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/transform/action/start/TransportStartTransformAction.kt @@ -8,7 +8,6 @@ package org.opensearch.indexmanagement.transform.action.start import org.apache.logging.log4j.LogManager import org.opensearch.ExceptionsHelper import org.opensearch.OpenSearchStatusException -import org.opensearch.core.action.ActionListener import org.opensearch.action.DocWriteResponse import org.opensearch.action.get.GetRequest import org.opensearch.action.get.GetResponse @@ -22,10 +21,12 @@ import org.opensearch.cluster.service.ClusterService import org.opensearch.common.inject.Inject import org.opensearch.common.settings.Settings import org.opensearch.common.xcontent.LoggingDeprecationHandler -import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.common.xcontent.XContentHelper import org.opensearch.common.xcontent.XContentType import org.opensearch.commons.ConfigConstants +import org.opensearch.core.action.ActionListener +import org.opensearch.core.rest.RestStatus +import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.INDEX_MANAGEMENT_INDEX import org.opensearch.indexmanagement.opensearchapi.parseFromGetResponse import org.opensearch.indexmanagement.opensearchapi.parseWithType @@ -34,23 +35,23 @@ import org.opensearch.indexmanagement.transform.model.Transform import org.opensearch.indexmanagement.transform.model.TransformMetadata import org.opensearch.indexmanagement.util.SecurityUtils.Companion.buildUser import org.opensearch.indexmanagement.util.SecurityUtils.Companion.userHasPermissionForResource -import org.opensearch.core.rest.RestStatus import org.opensearch.tasks.Task import org.opensearch.transport.TransportService import java.time.Instant @Suppress("ReturnCount") -class TransportStartTransformAction @Inject constructor( +class TransportStartTransformAction +@Inject +constructor( transportService: TransportService, val client: Client, val settings: Settings, val clusterService: ClusterService, actionFilters: ActionFilters, - val xContentRegistry: NamedXContentRegistry + val xContentRegistry: NamedXContentRegistry, ) : HandledTransportAction( - StartTransformAction.NAME, transportService, actionFilters, ::StartTransformRequest + StartTransformAction.NAME, transportService, actionFilters, ::StartTransformRequest, ) { - @Volatile private var filterByEnabled = IndexManagementSettings.FILTER_BY_BACKEND_ROLES.get(settings) init { @@ -64,8 +65,8 @@ class TransportStartTransformAction @Inject constructor( override fun doExecute(task: Task, request: StartTransformRequest, actionListener: ActionListener) { log.debug( "User and roles string from thread context: ${client.threadPool().threadContext.getTransient( - ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT - )}" + ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT, + )}", ) val getRequest = GetRequest(INDEX_MANAGEMENT_INDEX, request.id()) val user = buildUser(client.threadPool().threadContext) @@ -105,7 +106,7 @@ class TransportStartTransformAction @Inject constructor( override fun onFailure(e: Exception) { actionListener.onFailure(ExceptionsHelper.unwrapCause(e) as Exception) } - } + }, ) } } @@ -113,16 +114,17 @@ class TransportStartTransformAction @Inject constructor( private fun updateTransformJob( transform: Transform, request: StartTransformRequest, - actionListener: ActionListener + actionListener: ActionListener, ) { val now = Instant.now().toEpochMilli() request.index(INDEX_MANAGEMENT_INDEX).doc( mapOf( - Transform.TRANSFORM_TYPE to mapOf( - Transform.ENABLED_FIELD to true, - Transform.ENABLED_AT_FIELD to now, Transform.UPDATED_AT_FIELD to now - ) - ) + Transform.TRANSFORM_TYPE to + mapOf( + Transform.ENABLED_FIELD to true, + Transform.ENABLED_AT_FIELD to now, Transform.UPDATED_AT_FIELD to now, + ), + ), ) client.update( request, @@ -143,7 +145,7 @@ class TransportStartTransformAction @Inject constructor( override fun onFailure(e: Exception) { actionListener.onFailure(ExceptionsHelper.unwrapCause(e) as Exception) } - } + }, ) } @@ -156,18 +158,20 @@ class TransportStartTransformAction @Inject constructor( if (!response.isExists || response.isSourceEmpty) { actionListener.onFailure(OpenSearchStatusException("Metadata doc missing for transform [${req.id()}]", RestStatus.NOT_FOUND)) } else { - val metadata = response.sourceAsBytesRef?.let { - val xcp = XContentHelper.createParser( - NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, it, XContentType.JSON - ) - xcp.parseWithType(response.id, response.seqNo, response.primaryTerm, TransformMetadata.Companion::parse) - } + val metadata = + response.sourceAsBytesRef?.let { + val xcp = + XContentHelper.createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, it, XContentType.JSON, + ) + xcp.parseWithType(response.id, response.seqNo, response.primaryTerm, TransformMetadata.Companion::parse) + } if (metadata == null) { actionListener.onFailure( OpenSearchStatusException( - "Metadata doc missing for transform [${req.id()}]", RestStatus.NOT_FOUND - ) + "Metadata doc missing for transform [${req.id()}]", RestStatus.NOT_FOUND, + ), ) } else { updateTransformMetadata(transform, metadata, actionListener) @@ -178,28 +182,31 @@ class TransportStartTransformAction @Inject constructor( override fun onFailure(e: Exception) { actionListener.onFailure(ExceptionsHelper.unwrapCause(e) as Exception) } - } + }, ) } private fun updateTransformMetadata(transform: Transform, metadata: TransformMetadata, actionListener: ActionListener) { val now = Instant.now().toEpochMilli() - val updatedStatus = when (metadata.status) { - TransformMetadata.Status.FINISHED, TransformMetadata.Status.STOPPED -> TransformMetadata.Status.STARTED - TransformMetadata.Status.STARTED, TransformMetadata.Status.INIT -> - return actionListener.onResponse(AcknowledgedResponse(true)) - TransformMetadata.Status.FAILED -> TransformMetadata.Status.STARTED - } - val updateRequest = UpdateRequest(INDEX_MANAGEMENT_INDEX, transform.metadataId) - .doc( - mapOf( - TransformMetadata.TRANSFORM_METADATA_TYPE to mapOf( - TransformMetadata.STATUS_FIELD to updatedStatus.type, - TransformMetadata.FAILURE_REASON to null, TransformMetadata.LAST_UPDATED_AT_FIELD to now - ) + val updatedStatus = + when (metadata.status) { + TransformMetadata.Status.FINISHED, TransformMetadata.Status.STOPPED -> TransformMetadata.Status.STARTED + TransformMetadata.Status.STARTED, TransformMetadata.Status.INIT -> + return actionListener.onResponse(AcknowledgedResponse(true)) + TransformMetadata.Status.FAILED -> TransformMetadata.Status.STARTED + } + val updateRequest = + UpdateRequest(INDEX_MANAGEMENT_INDEX, transform.metadataId) + .doc( + mapOf( + TransformMetadata.TRANSFORM_METADATA_TYPE to + mapOf( + TransformMetadata.STATUS_FIELD to updatedStatus.type, + TransformMetadata.FAILURE_REASON to null, TransformMetadata.LAST_UPDATED_AT_FIELD to now, + ), + ), ) - ) - .routing(transform.id) + .routing(transform.id) client.update( updateRequest, object : ActionListener { @@ -210,7 +217,7 @@ class TransportStartTransformAction @Inject constructor( override fun onFailure(e: Exception) { actionListener.onFailure(ExceptionsHelper.unwrapCause(e) as Exception) } - } + }, ) } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/transform/action/stop/StopTransformRequest.kt b/src/main/kotlin/org/opensearch/indexmanagement/transform/action/stop/StopTransformRequest.kt index 85295560d..7d3a3c2fc 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/transform/action/stop/StopTransformRequest.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/transform/action/stop/StopTransformRequest.kt @@ -13,7 +13,6 @@ import org.opensearch.core.common.io.stream.StreamOutput import java.io.IOException class StopTransformRequest : UpdateRequest { - @Throws(IOException::class) constructor(sin: StreamInput) : super(sin) diff --git a/src/main/kotlin/org/opensearch/indexmanagement/transform/action/stop/TransportStopTransformAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/transform/action/stop/TransportStopTransformAction.kt index 71cbb006e..4e0e480cb 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/transform/action/stop/TransportStopTransformAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/transform/action/stop/TransportStopTransformAction.kt @@ -8,7 +8,6 @@ package org.opensearch.indexmanagement.transform.action.stop import org.apache.logging.log4j.LogManager import org.opensearch.ExceptionsHelper import org.opensearch.OpenSearchStatusException -import org.opensearch.core.action.ActionListener import org.opensearch.action.DocWriteResponse import org.opensearch.action.get.GetRequest import org.opensearch.action.get.GetResponse @@ -22,10 +21,12 @@ import org.opensearch.cluster.service.ClusterService import org.opensearch.common.inject.Inject import org.opensearch.common.settings.Settings import org.opensearch.common.xcontent.LoggingDeprecationHandler -import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.common.xcontent.XContentHelper import org.opensearch.common.xcontent.XContentType import org.opensearch.commons.ConfigConstants +import org.opensearch.core.action.ActionListener +import org.opensearch.core.rest.RestStatus +import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.indexmanagement.IndexManagementPlugin import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.INDEX_MANAGEMENT_INDEX import org.opensearch.indexmanagement.opensearchapi.parseFromGetResponse @@ -35,7 +36,6 @@ import org.opensearch.indexmanagement.transform.model.Transform import org.opensearch.indexmanagement.transform.model.TransformMetadata import org.opensearch.indexmanagement.util.SecurityUtils.Companion.buildUser import org.opensearch.indexmanagement.util.SecurityUtils.Companion.userHasPermissionForResource -import org.opensearch.core.rest.RestStatus import org.opensearch.tasks.Task import org.opensearch.transport.TransportService import java.time.Instant @@ -53,17 +53,18 @@ import java.time.Instant * The inverse (job: successful and metadata: fail) will end up with a disabled job and a metadata that potentially * says STARTED still which is wrong. */ -class TransportStopTransformAction @Inject constructor( +class TransportStopTransformAction +@Inject +constructor( transportService: TransportService, val client: Client, val settings: Settings, val clusterService: ClusterService, actionFilters: ActionFilters, - val xContentRegistry: NamedXContentRegistry + val xContentRegistry: NamedXContentRegistry, ) : HandledTransportAction( - StopTransformAction.NAME, transportService, actionFilters, ::StopTransformRequest + StopTransformAction.NAME, transportService, actionFilters, ::StopTransformRequest, ) { - @Volatile private var filterByEnabled = IndexManagementSettings.FILTER_BY_BACKEND_ROLES.get(settings) init { @@ -78,8 +79,8 @@ class TransportStopTransformAction @Inject constructor( log.debug("Executing StopTransformAction on ${request.id()}") log.debug( "User and roles string from thread context: ${client.threadPool().threadContext.getTransient( - ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT - )}" + ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT, + )}", ) val getRequest = GetRequest(INDEX_MANAGEMENT_INDEX, request.id()) val user = buildUser(client.threadPool().threadContext) @@ -115,7 +116,7 @@ class TransportStopTransformAction @Inject constructor( override fun onFailure(e: Exception) { actionListener.onFailure(ExceptionsHelper.unwrapCause(e) as Exception) } - } + }, ) } } @@ -123,7 +124,7 @@ class TransportStopTransformAction @Inject constructor( private fun retrieveAndUpdateTransformMetadata( transform: Transform, request: StopTransformRequest, - actionListener: ActionListener + actionListener: ActionListener, ) { val req = GetRequest(IndexManagementPlugin.INDEX_MANAGEMENT_INDEX, transform.metadataId).routing(transform.id) client.get( @@ -134,13 +135,15 @@ class TransportStopTransformAction @Inject constructor( // If there is no metadata there is nothing to stop, proceed to disable job updateTransformJob(transform, request, actionListener) } else { - val metadata = response.sourceAsBytesRef?.let { - val xcp = XContentHelper.createParser( - NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, it, XContentType.JSON - ) - xcp.parseWithType(response.id, response.seqNo, response.primaryTerm, TransformMetadata.Companion::parse) - } + val metadata = + response.sourceAsBytesRef?.let { + val xcp = + XContentHelper.createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, it, XContentType.JSON, + ) + xcp.parseWithType(response.id, response.seqNo, response.primaryTerm, TransformMetadata.Companion::parse) + } if (metadata == null) { // If there is no metadata there is nothing to stop, proceed to disable job updateTransformJob(transform, request, actionListener) @@ -153,7 +156,7 @@ class TransportStopTransformAction @Inject constructor( override fun onFailure(e: Exception) { actionListener.onFailure(ExceptionsHelper.unwrapCause(e) as Exception) } - } + }, ) } @@ -168,24 +171,27 @@ class TransportStopTransformAction @Inject constructor( transform: Transform, metadata: TransformMetadata, request: StopTransformRequest, - actionListener: ActionListener + actionListener: ActionListener, ) { val now = Instant.now().toEpochMilli() - val updatedStatus = when (metadata.status) { - TransformMetadata.Status.STARTED, TransformMetadata.Status.INIT, TransformMetadata.Status.STOPPED -> TransformMetadata.Status.STOPPED - TransformMetadata.Status.FINISHED, TransformMetadata.Status.FAILED -> metadata.status - } + val updatedStatus = + when (metadata.status) { + TransformMetadata.Status.STARTED, TransformMetadata.Status.INIT, TransformMetadata.Status.STOPPED -> TransformMetadata.Status.STOPPED + TransformMetadata.Status.FINISHED, TransformMetadata.Status.FAILED -> metadata.status + } - val updateRequest = UpdateRequest(IndexManagementPlugin.INDEX_MANAGEMENT_INDEX, transform.metadataId) - .doc( - mapOf( - TransformMetadata.TRANSFORM_METADATA_TYPE to mapOf( - TransformMetadata.STATUS_FIELD to updatedStatus.type, - TransformMetadata.LAST_UPDATED_AT_FIELD to now - ) + val updateRequest = + UpdateRequest(IndexManagementPlugin.INDEX_MANAGEMENT_INDEX, transform.metadataId) + .doc( + mapOf( + TransformMetadata.TRANSFORM_METADATA_TYPE to + mapOf( + TransformMetadata.STATUS_FIELD to updatedStatus.type, + TransformMetadata.LAST_UPDATED_AT_FIELD to now, + ), + ), ) - ) - .routing(transform.id) + .routing(transform.id) client.update( updateRequest, object : ActionListener { @@ -200,7 +206,7 @@ class TransportStopTransformAction @Inject constructor( override fun onFailure(e: Exception) { actionListener.onFailure(ExceptionsHelper.unwrapCause(e) as Exception) } - } + }, ) } @@ -209,11 +215,12 @@ class TransportStopTransformAction @Inject constructor( request.index(IndexManagementPlugin.INDEX_MANAGEMENT_INDEX).setIfSeqNo(transform.seqNo).setIfPrimaryTerm(transform.primaryTerm) .doc( mapOf( - Transform.TRANSFORM_TYPE to mapOf( - Transform.ENABLED_FIELD to false, - Transform.ENABLED_AT_FIELD to null, Transform.UPDATED_AT_FIELD to now - ) - ) + Transform.TRANSFORM_TYPE to + mapOf( + Transform.ENABLED_FIELD to false, + Transform.ENABLED_AT_FIELD to null, Transform.UPDATED_AT_FIELD to now, + ), + ), ) client.update( request, @@ -221,10 +228,11 @@ class TransportStopTransformAction @Inject constructor( override fun onResponse(response: UpdateResponse) { actionListener.onResponse(AcknowledgedResponse(response.result == DocWriteResponse.Result.UPDATED)) } + override fun onFailure(e: Exception) { actionListener.onFailure(ExceptionsHelper.unwrapCause(e) as Exception) } - } + }, ) } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/transform/model/ContinuousTransformStats.kt b/src/main/kotlin/org/opensearch/indexmanagement/transform/model/ContinuousTransformStats.kt index 884a9eca5..a5113caae 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/transform/model/ContinuousTransformStats.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/transform/model/ContinuousTransformStats.kt @@ -19,13 +19,12 @@ import java.time.Instant data class ContinuousTransformStats( val lastTimestamp: Instant?, - val documentsBehind: Map? + val documentsBehind: Map?, ) : ToXContentObject, Writeable { - @Throws(IOException::class) constructor(sin: StreamInput) : this( lastTimestamp = if (sin.readBoolean()) sin.readInstant() else null, - documentsBehind = if (sin.readBoolean()) sin.readMap({ it.readString() }, { it.readLong() }) else null + documentsBehind = if (sin.readBoolean()) sin.readMap({ it.readString() }, { it.readLong() }) else null, ) override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { diff --git a/src/main/kotlin/org/opensearch/indexmanagement/transform/model/ExplainTransform.kt b/src/main/kotlin/org/opensearch/indexmanagement/transform/model/ExplainTransform.kt index ad52a21b1..5524d246f 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/transform/model/ExplainTransform.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/transform/model/ExplainTransform.kt @@ -16,13 +16,12 @@ import java.io.IOException data class ExplainTransform( val metadataID: String? = null, - val metadata: TransformMetadata? = null + val metadata: TransformMetadata? = null, ) : ToXContentObject, Writeable { - @Throws(IOException::class) constructor(sin: StreamInput) : this( metadataID = sin.readOptionalString(), - metadata = if (sin.readBoolean()) TransformMetadata(sin) else null + metadata = if (sin.readBoolean()) TransformMetadata(sin) else null, ) @Throws(IOException::class) diff --git a/src/main/kotlin/org/opensearch/indexmanagement/transform/model/ISMTransform.kt b/src/main/kotlin/org/opensearch/indexmanagement/transform/model/ISMTransform.kt index ecc092ac9..d421a0c42 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/transform/model/ISMTransform.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/transform/model/ISMTransform.kt @@ -42,9 +42,8 @@ data class ISMTransform( val pageSize: Int, val dataSelectionQuery: QueryBuilder = MatchAllQueryBuilder(), val groups: List, - val aggregations: AggregatorFactories.Builder = AggregatorFactories.builder() + val aggregations: AggregatorFactories.Builder = AggregatorFactories.builder(), ) : ToXContentObject, Writeable { - init { require(pageSize in Transform.MINIMUM_PAGE_SIZE..Transform.MAXIMUM_PAGE_SIZE) { "Page size must be between ${Transform.MINIMUM_PAGE_SIZE} and ${Transform.MAXIMUM_PAGE_SIZE}" @@ -92,7 +91,7 @@ data class ISMTransform( continuous = false, groups = this.groups, aggregations = this.aggregations, - user = user + user = user, ) } @@ -102,7 +101,8 @@ data class ISMTransform( targetIndex = sin.readString(), pageSize = sin.readInt(), dataSelectionQuery = requireNotNull(sin.readOptionalNamedWriteable(QueryBuilder::class.java)) { "Query cannot be null" }, - groups = sin.let { + groups = + sin.let { val dimensionList = mutableListOf() val size = it.readVInt() repeat(size) { _ -> @@ -112,12 +112,12 @@ data class ISMTransform( Dimension.Type.DATE_HISTOGRAM -> DateHistogram(sin) Dimension.Type.TERMS -> Terms(sin) Dimension.Type.HISTOGRAM -> Histogram(sin) - } + }, ) } dimensionList.toList() }, - aggregations = requireNotNull(sin.readOptionalWriteable { AggregatorFactories.Builder(it) }) { "Aggregations cannot be null" } + aggregations = requireNotNull(sin.readOptionalWriteable { AggregatorFactories.Builder(it) }) { "Aggregations cannot be null" }, ) override fun toString(): String { @@ -177,11 +177,12 @@ data class ISMTransform( val registry = xcp.xContentRegistry val source = xcp.mapOrdered() val xContentBuilder = XContentFactory.jsonBuilder().map(source) - val sourceParser = XContentType.JSON.xContent().createParser( - registry, LoggingDeprecationHandler.INSTANCE, - BytesReference - .bytes(xContentBuilder).streamInput() - ) + val sourceParser = + XContentType.JSON.xContent().createParser( + registry, LoggingDeprecationHandler.INSTANCE, + BytesReference + .bytes(xContentBuilder).streamInput(), + ) dataSelectionQuery = AbstractQueryBuilder.parseInnerQueryBuilder(sourceParser) } Transform.GROUPS_FIELD -> { @@ -201,7 +202,7 @@ data class ISMTransform( pageSize = pageSize, dataSelectionQuery = dataSelectionQuery, groups = groups, - aggregations = aggregations + aggregations = aggregations, ) } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/transform/model/Transform.kt b/src/main/kotlin/org/opensearch/indexmanagement/transform/model/Transform.kt index db3145e08..14dd49ac0 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/transform/model/Transform.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/transform/model/Transform.kt @@ -9,24 +9,25 @@ import org.opensearch.action.admin.indices.stats.IndicesStatsAction import org.opensearch.action.admin.indices.stats.IndicesStatsRequest import org.opensearch.action.admin.indices.stats.IndicesStatsResponse import org.opensearch.client.Client +import org.opensearch.common.xcontent.LoggingDeprecationHandler +import org.opensearch.common.xcontent.XContentFactory +import org.opensearch.common.xcontent.XContentType +import org.opensearch.commons.authuser.User import org.opensearch.core.common.bytes.BytesReference import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.core.common.io.stream.StreamOutput import org.opensearch.core.common.io.stream.Writeable -import org.opensearch.common.xcontent.LoggingDeprecationHandler +import org.opensearch.core.index.shard.ShardId +import org.opensearch.core.rest.RestStatus import org.opensearch.core.xcontent.ToXContent import org.opensearch.core.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentFactory import org.opensearch.core.xcontent.XContentParser import org.opensearch.core.xcontent.XContentParser.Token import org.opensearch.core.xcontent.XContentParserUtils.ensureExpectedToken -import org.opensearch.common.xcontent.XContentType -import org.opensearch.commons.authuser.User import org.opensearch.index.query.AbstractQueryBuilder import org.opensearch.index.query.MatchAllQueryBuilder import org.opensearch.index.query.QueryBuilder import org.opensearch.index.seqno.SequenceNumbers -import org.opensearch.core.index.shard.ShardId import org.opensearch.indexmanagement.common.model.dimension.DateHistogram import org.opensearch.indexmanagement.common.model.dimension.Dimension import org.opensearch.indexmanagement.common.model.dimension.Histogram @@ -45,7 +46,6 @@ import org.opensearch.jobscheduler.spi.schedule.CronSchedule import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule import org.opensearch.jobscheduler.spi.schedule.Schedule import org.opensearch.jobscheduler.spi.schedule.ScheduleParser -import org.opensearch.core.rest.RestStatus import org.opensearch.search.aggregations.AggregatorFactories import java.io.IOException import java.time.Instant @@ -70,9 +70,8 @@ data class Transform( val groups: List, val aggregations: AggregatorFactories.Builder = AggregatorFactories.builder(), val continuous: Boolean = false, - val user: User? = null + val user: User? = null, ) : ScheduledJobParameter, Writeable { - init { aggregations.aggregatorFactories.forEach { require(supportedAggregations.contains(it.type)) { "Unsupported aggregation [${it.type}]" } @@ -174,12 +173,12 @@ data class Transform( mutableMapOf( TRANSFORM_DOC_ID_FIELD to this.id, DOC_COUNT to docCount, - TRANSFORM_DOC_COUNT_FIELD to docCount + TRANSFORM_DOC_COUNT_FIELD to docCount, ) } else { mutableMapOf( DOC_COUNT to docCount, - TRANSFORM_DOC_COUNT_FIELD to docCount + TRANSFORM_DOC_COUNT_FIELD to docCount, ) } } @@ -187,21 +186,24 @@ data class Transform( suspend fun getContinuousStats(client: Client, metadata: TransformMetadata): ContinuousTransformStats? { val indicesStatsRequest = IndicesStatsRequest().indices(sourceIndex).clear() val response: IndicesStatsResponse = client.suspendUntil { execute(IndicesStatsAction.INSTANCE, indicesStatsRequest, it) } - val shardIDsToGlobalCheckpoint = if (response.status == RestStatus.OK) { - TransformSearchService.convertIndicesStatsResponse(response) - } else return null + val shardIDsToGlobalCheckpoint = + if (response.status == RestStatus.OK) { + TransformSearchService.convertIndicesStatsResponse(response) + } else { + return null + } return ContinuousTransformStats( metadata.continuousStats?.lastTimestamp, getDocumentsBehind( metadata.shardIDToGlobalCheckpoint, - shardIDsToGlobalCheckpoint - ) + shardIDsToGlobalCheckpoint, + ), ) } private fun getDocumentsBehind( oldShardIDsToGlobalCheckpoint: Map?, - newShardIDsToGlobalCheckpoint: Map? + newShardIDsToGlobalCheckpoint: Map?, ): MutableMap { val documentsBehind: MutableMap = HashMap() if (newShardIDsToGlobalCheckpoint == null) { @@ -225,7 +227,8 @@ data class Transform( seqNo = sin.readLong(), primaryTerm = sin.readLong(), schemaVersion = sin.readLong(), - jobSchedule = sin.let { + jobSchedule = + sin.let { when (requireNotNull(sin.readEnum(ScheduleType::class.java)) { "ScheduleType cannot be null" }) { ScheduleType.CRON -> CronSchedule(sin) ScheduleType.INTERVAL -> IntervalSchedule(sin) @@ -241,7 +244,8 @@ data class Transform( targetIndex = sin.readString(), roles = sin.readStringArray().toList(), pageSize = sin.readInt(), - groups = sin.let { + groups = + sin.let { val dimensionList = mutableListOf() val size = it.readVInt() repeat(size) { _ -> @@ -251,21 +255,25 @@ data class Transform( Dimension.Type.DATE_HISTOGRAM -> DateHistogram(sin) Dimension.Type.TERMS -> Terms(sin) Dimension.Type.HISTOGRAM -> Histogram(sin) - } + }, ) } dimensionList.toList() }, aggregations = requireNotNull(sin.readOptionalWriteable { AggregatorFactories.Builder(it) }) { "Aggregations cannot be null" }, continuous = sin.readBoolean(), - user = if (sin.readBoolean()) { + user = + if (sin.readBoolean()) { User(sin) - } else null + } else { + null + }, ) companion object { enum class ScheduleType { - CRON, INTERVAL; + CRON, + INTERVAL, } val supportedAggregations = listOf("sum", "max", "min", "value_count", "avg", "scripted_metric", "percentiles") @@ -292,6 +300,7 @@ data class Transform( const val MINIMUM_JOB_INTERVAL = 1 const val TRANSFORM_DOC_ID_FIELD = "$TRANSFORM_TYPE._id" const val DOC_COUNT = "_doc_count" + // Keeping the field in order to be backward compatible const val TRANSFORM_DOC_COUNT_FIELD = "$TRANSFORM_TYPE._doc_count" const val CONTINUOUS_FIELD = "continuous" @@ -304,7 +313,7 @@ data class Transform( xcp: XContentParser, id: String = NO_ID, seqNo: Long = SequenceNumbers.UNASSIGNED_SEQ_NO, - primaryTerm: Long = SequenceNumbers.UNASSIGNED_PRIMARY_TERM + primaryTerm: Long = SequenceNumbers.UNASSIGNED_PRIMARY_TERM, ): Transform { var schedule: Schedule? = null var schemaVersion: Long = IndexUtils.DEFAULT_SCHEMA_VERSION @@ -343,11 +352,12 @@ data class Transform( val registry = xcp.xContentRegistry val source = xcp.mapOrdered() val xContentBuilder = XContentFactory.jsonBuilder().map(source) - val sourceParser = XContentType.JSON.xContent().createParser( - registry, LoggingDeprecationHandler.INSTANCE, - BytesReference - .bytes(xContentBuilder).streamInput() - ) + val sourceParser = + XContentType.JSON.xContent().createParser( + registry, LoggingDeprecationHandler.INSTANCE, + BytesReference + .bytes(xContentBuilder).streamInput(), + ) dataSelectionQuery = AbstractQueryBuilder.parseInnerQueryBuilder(sourceParser) } TARGET_INDEX_FIELD -> targetIndex = xcp.text() @@ -412,7 +422,7 @@ data class Transform( groups = groups, aggregations = aggregations, continuous = continuous, - user = user + user = user, ) } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/transform/model/TransformMetadata.kt b/src/main/kotlin/org/opensearch/indexmanagement/transform/model/TransformMetadata.kt index a45505878..1c72a8186 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/transform/model/TransformMetadata.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/transform/model/TransformMetadata.kt @@ -33,15 +33,15 @@ data class TransformMetadata( val failureReason: String? = null, val stats: TransformStats, val shardIDToGlobalCheckpoint: Map? = null, - val continuousStats: ContinuousTransformStats? = null + val continuousStats: ContinuousTransformStats? = null, ) : ToXContentObject, Writeable { - enum class Status(val type: String) { INIT("init"), STARTED("started"), STOPPED("stopped"), FINISHED("finished"), - FAILED("failed"); + FAILED("failed"), + ; override fun toString(): String { return type @@ -60,7 +60,7 @@ data class TransformMetadata( failureReason = sin.readOptionalString(), stats = TransformStats(sin), shardIDToGlobalCheckpoint = if (sin.readBoolean()) sin.readMap({ ShardId(it) }, { it.readLong() }) else null, - continuousStats = if (sin.readBoolean()) ContinuousTransformStats(sin) else null + continuousStats = if (sin.readBoolean()) ContinuousTransformStats(sin) else null, ) override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { @@ -100,13 +100,14 @@ data class TransformMetadata( fun mergeStats(stats: TransformStats): TransformMetadata { return this.copy( - stats = this.stats.copy( + stats = + this.stats.copy( pagesProcessed = this.stats.pagesProcessed + stats.pagesProcessed, documentsIndexed = this.stats.documentsIndexed + stats.documentsIndexed, documentsProcessed = this.stats.documentsProcessed + stats.documentsProcessed, indexTimeInMillis = this.stats.indexTimeInMillis + stats.indexTimeInMillis, - searchTimeInMillis = this.stats.searchTimeInMillis + stats.searchTimeInMillis - ) + searchTimeInMillis = this.stats.searchTimeInMillis + stats.searchTimeInMillis, + ), ) } @@ -128,7 +129,7 @@ data class TransformMetadata( xcp: XContentParser, id: String, seqNo: Long = SequenceNumbers.UNASSIGNED_SEQ_NO, - primaryTerm: Long = SequenceNumbers.UNASSIGNED_PRIMARY_TERM + primaryTerm: Long = SequenceNumbers.UNASSIGNED_PRIMARY_TERM, ): TransformMetadata { var transformId: String? = null var afterkey: Map? = null @@ -152,8 +153,9 @@ data class TransformMetadata( FAILURE_REASON -> failureReason = xcp.textOrNull() STATS_FIELD -> stats = TransformStats.parse(xcp) SHARD_ID_TO_GLOBAL_CHECKPOINT_FIELD -> - shardIDToGlobalCheckpoint = xcp.map({ HashMap() }, { parser -> parser.longValue() }) - .mapKeys { ShardId.fromString(it.key) } + shardIDToGlobalCheckpoint = + xcp.map({ HashMap() }, { parser -> parser.longValue() }) + .mapKeys { ShardId.fromString(it.key) } CONTINUOUS_STATS_FIELD -> continuousStats = ContinuousTransformStats.parse(xcp) } } @@ -169,7 +171,7 @@ data class TransformMetadata( failureReason = failureReason, stats = requireNotNull(stats) { "Stats must not be null" }, shardIDToGlobalCheckpoint = shardIDToGlobalCheckpoint, - continuousStats = continuousStats + continuousStats = continuousStats, ) } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/transform/model/TransformSearchResult.kt b/src/main/kotlin/org/opensearch/indexmanagement/transform/model/TransformSearchResult.kt index 0b833965a..39c308d9f 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/transform/model/TransformSearchResult.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/transform/model/TransformSearchResult.kt @@ -19,12 +19,12 @@ data class BucketsToTransform( fun BucketsToTransform.initializeShardsToSearch( originalGlobalCheckpoints: Map?, - currentShardIdToGlobalCheckpoint: Map + currentShardIdToGlobalCheckpoint: Map, ): BucketsToTransform { val shardsToSearch = getShardsToSearch(originalGlobalCheckpoints, currentShardIdToGlobalCheckpoint).iterator() return this.copy( shardsToSearch = shardsToSearch, - currentShard = if (shardsToSearch.hasNext()) shardsToSearch.next() else null + currentShard = if (shardsToSearch.hasNext()) shardsToSearch.next() else null, ) } @@ -43,7 +43,7 @@ private fun getShardsToSearch(oldShardIDToMaxSeqNo: Map?, newShar data class BucketSearchResult( val modifiedBuckets: MutableSet>, val afterKey: Map? = null, - val searchTimeInMillis: Long = 0 + val searchTimeInMillis: Long = 0, ) data class ShardNewDocuments(val shardId: ShardId, val from: Long?, val to: Long) diff --git a/src/main/kotlin/org/opensearch/indexmanagement/transform/model/TransformStats.kt b/src/main/kotlin/org/opensearch/indexmanagement/transform/model/TransformStats.kt index 3b68a61b4..e8903a513 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/transform/model/TransformStats.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/transform/model/TransformStats.kt @@ -20,16 +20,15 @@ data class TransformStats( val documentsProcessed: Long, val documentsIndexed: Long, val indexTimeInMillis: Long, - val searchTimeInMillis: Long + val searchTimeInMillis: Long, ) : ToXContentObject, Writeable { - @Throws(IOException::class) constructor(sin: StreamInput) : this( pagesProcessed = sin.readLong(), documentsProcessed = sin.readLong(), documentsIndexed = sin.readLong(), indexTimeInMillis = sin.readLong(), - searchTimeInMillis = sin.readLong() + searchTimeInMillis = sin.readLong(), ) override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { @@ -86,7 +85,7 @@ data class TransformStats( documentsProcessed = requireNotNull(documentsProcessed) { "Documents processed must not be null" }, documentsIndexed = requireNotNull(documentsIndexed) { "Documents indexed must not be null" }, indexTimeInMillis = requireNotNull(indexTimeInMillis) { "Index time in millis must not be null" }, - searchTimeInMillis = requireNotNull(searchTimeInMillis) { "Search time in millis must not be null" } + searchTimeInMillis = requireNotNull(searchTimeInMillis) { "Search time in millis must not be null" }, ) } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/transform/opensearchapi/OpenSearchExtensions.kt b/src/main/kotlin/org/opensearch/indexmanagement/transform/opensearchapi/OpenSearchExtensions.kt index 48897a2c7..3dbf2370c 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/transform/opensearchapi/OpenSearchExtensions.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/transform/opensearchapi/OpenSearchExtensions.kt @@ -10,10 +10,10 @@ import org.apache.logging.log4j.Logger import org.opensearch.OpenSearchException import org.opensearch.action.bulk.BackoffPolicy import org.opensearch.common.unit.TimeValue -import org.opensearch.indexmanagement.opensearchapi.isRetryable -import org.opensearch.indexmanagement.transform.util.TransformLockManager import org.opensearch.core.rest.RestStatus import org.opensearch.core.tasks.TaskCancelledException +import org.opensearch.indexmanagement.opensearchapi.isRetryable +import org.opensearch.indexmanagement.transform.util.TransformLockManager import java.util.regex.Pattern /** @@ -39,7 +39,7 @@ suspend fun BackoffPolicy.retryTransformSearch( logger: Logger, transformLockManager: TransformLockManager, retryOn: List = emptyList(), - block: suspend (backoff: TimeValue) -> T + block: suspend (backoff: TimeValue) -> T, ): T { val iter = iterator() var backoff: TimeValue = TimeValue.ZERO diff --git a/src/main/kotlin/org/opensearch/indexmanagement/transform/resthandler/RestDeleteTransformAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/transform/resthandler/RestDeleteTransformAction.kt index 34cda3053..ef7076b24 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/transform/resthandler/RestDeleteTransformAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/transform/resthandler/RestDeleteTransformAction.kt @@ -20,10 +20,9 @@ import org.opensearch.rest.action.RestToXContentListener import java.io.IOException class RestDeleteTransformAction : BaseRestHandler() { - override fun routes(): List { return listOf( - Route(DELETE, "$TRANSFORM_BASE_URI/{transformID}") + Route(DELETE, "$TRANSFORM_BASE_URI/{transformID}"), ) } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/transform/resthandler/RestExplainTransformAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/transform/resthandler/RestExplainTransformAction.kt index bc3653235..095e92d71 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/transform/resthandler/RestExplainTransformAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/transform/resthandler/RestExplainTransformAction.kt @@ -18,7 +18,6 @@ import org.opensearch.rest.RestRequest.Method.GET import org.opensearch.rest.action.RestToXContentListener class RestExplainTransformAction : BaseRestHandler() { - override fun routes(): List { return listOf(Route(GET, "$TRANSFORM_BASE_URI/{transformID}/_explain")) } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/transform/resthandler/RestGetTransformAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/transform/resthandler/RestGetTransformAction.kt index bf311671d..918fd0d78 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/transform/resthandler/RestGetTransformAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/transform/resthandler/RestGetTransformAction.kt @@ -26,12 +26,11 @@ import org.opensearch.rest.action.RestToXContentListener import org.opensearch.search.fetch.subphase.FetchSourceContext class RestGetTransformAction : BaseRestHandler() { - override fun routes(): List { return listOf( Route(GET, TRANSFORM_BASE_URI), Route(GET, "$TRANSFORM_BASE_URI/{transformID}"), - Route(HEAD, "$TRANSFORM_BASE_URI/{transformID}") + Route(HEAD, "$TRANSFORM_BASE_URI/{transformID}"), ) } @@ -48,13 +47,14 @@ class RestGetTransformAction : BaseRestHandler() { val sortDirection = request.param("sortDirection", DEFAULT_SORT_DIRECTION) return RestChannelConsumer { channel -> if (transformID == null || transformID.isEmpty()) { - val req = GetTransformsRequest( - searchString, - from, - size, - sortField, - sortDirection - ) + val req = + GetTransformsRequest( + searchString, + from, + size, + sortField, + sortDirection, + ) client.execute(GetTransformsAction.INSTANCE, req, RestToXContentListener(channel)) } else { val req = GetTransformRequest(transformID, if (request.method() == HEAD) FetchSourceContext.DO_NOT_FETCH_SOURCE else null) diff --git a/src/main/kotlin/org/opensearch/indexmanagement/transform/resthandler/RestIndexTransformAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/transform/resthandler/RestIndexTransformAction.kt index d68061809..9bad49952 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/transform/resthandler/RestIndexTransformAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/transform/resthandler/RestIndexTransformAction.kt @@ -7,6 +7,7 @@ package org.opensearch.indexmanagement.transform.resthandler import org.opensearch.action.support.WriteRequest import org.opensearch.client.node.NodeClient +import org.opensearch.core.rest.RestStatus import org.opensearch.core.xcontent.ToXContent import org.opensearch.index.seqno.SequenceNumbers import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.TRANSFORM_BASE_URI @@ -27,17 +28,15 @@ import org.opensearch.rest.RestHandler import org.opensearch.rest.RestRequest import org.opensearch.rest.RestRequest.Method.PUT import org.opensearch.rest.RestResponse -import org.opensearch.core.rest.RestStatus import org.opensearch.rest.action.RestResponseListener import java.io.IOException import java.time.Instant class RestIndexTransformAction : BaseRestHandler() { - override fun routes(): List { return listOf( RestHandler.Route(PUT, TRANSFORM_BASE_URI), - RestHandler.Route(PUT, "$TRANSFORM_BASE_URI/{transformID}") + RestHandler.Route(PUT, "$TRANSFORM_BASE_URI/{transformID}"), ) } @@ -55,21 +54,22 @@ class RestIndexTransformAction : BaseRestHandler() { val seqNo = request.paramAsLong(IF_SEQ_NO, SequenceNumbers.UNASSIGNED_SEQ_NO) val primaryTerm = request.paramAsLong(IF_PRIMARY_TERM, SequenceNumbers.UNASSIGNED_PRIMARY_TERM) val xcp = request.contentParser() - val transform = xcp.parseWithType(id = id, seqNo = seqNo, primaryTerm = primaryTerm, parse = Transform.Companion::parse) - .copy(updatedAt = Instant.now()) - val refreshPolicy = if (request.hasParam(REFRESH)) { - WriteRequest.RefreshPolicy.parse(request.param(REFRESH)) - } else { - WriteRequest.RefreshPolicy.IMMEDIATE - } + val transform = + xcp.parseWithType(id = id, seqNo = seqNo, primaryTerm = primaryTerm, parse = Transform.Companion::parse) + .copy(updatedAt = Instant.now()) + val refreshPolicy = + if (request.hasParam(REFRESH)) { + WriteRequest.RefreshPolicy.parse(request.param(REFRESH)) + } else { + WriteRequest.RefreshPolicy.IMMEDIATE + } val indexTransformRequest = IndexTransformRequest(transform, refreshPolicy) return RestChannelConsumer { channel -> client.execute(IndexTransformAction.INSTANCE, indexTransformRequest, indexTransformResponse(channel)) } } - private fun indexTransformResponse(channel: RestChannel): - RestResponseListener { + private fun indexTransformResponse(channel: RestChannel): RestResponseListener { return object : RestResponseListener(channel) { @Throws(Exception::class) override fun buildResponse(response: IndexTransformResponse): RestResponse { diff --git a/src/main/kotlin/org/opensearch/indexmanagement/transform/resthandler/RestPreviewTransformAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/transform/resthandler/RestPreviewTransformAction.kt index e96735a5f..012663559 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/transform/resthandler/RestPreviewTransformAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/transform/resthandler/RestPreviewTransformAction.kt @@ -19,11 +19,10 @@ import org.opensearch.rest.RestRequest.Method.POST import org.opensearch.rest.action.RestToXContentListener class RestPreviewTransformAction : BaseRestHandler() { - override fun routes(): List { return listOf( RestHandler.Route(POST, TRANSFORM_BASE_URI), - RestHandler.Route(POST, "$TRANSFORM_BASE_URI/_preview") + RestHandler.Route(POST, "$TRANSFORM_BASE_URI/_preview"), ) } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/transform/resthandler/RestStartTransformAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/transform/resthandler/RestStartTransformAction.kt index 9cfc1d97c..8d3c18313 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/transform/resthandler/RestStartTransformAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/transform/resthandler/RestStartTransformAction.kt @@ -19,10 +19,9 @@ import org.opensearch.rest.action.RestToXContentListener import java.io.IOException class RestStartTransformAction : BaseRestHandler() { - override fun routes(): List { return listOf( - Route(POST, "$TRANSFORM_BASE_URI/{transformID}/_start") + Route(POST, "$TRANSFORM_BASE_URI/{transformID}/_start"), ) } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/transform/resthandler/RestStopTransformAction.kt b/src/main/kotlin/org/opensearch/indexmanagement/transform/resthandler/RestStopTransformAction.kt index a48034e5c..6eb005083 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/transform/resthandler/RestStopTransformAction.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/transform/resthandler/RestStopTransformAction.kt @@ -19,10 +19,9 @@ import org.opensearch.rest.action.RestToXContentListener import java.io.IOException class RestStopTransformAction : BaseRestHandler() { - override fun routes(): List { return listOf( - Route(POST, "$TRANSFORM_BASE_URI/{transformID}/_stop") + Route(POST, "$TRANSFORM_BASE_URI/{transformID}/_stop"), ) } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/transform/settings/TransformSettings.kt b/src/main/kotlin/org/opensearch/indexmanagement/transform/settings/TransformSettings.kt index abfdc1b1f..71c36246e 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/transform/settings/TransformSettings.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/transform/settings/TransformSettings.kt @@ -10,54 +10,59 @@ import org.opensearch.common.unit.TimeValue @Suppress("UtilityClassWithPublicConstructor") class TransformSettings { - companion object { const val DEFAULT_RENEW_LOCK_RETRY_COUNT = 3 const val DEFAULT_RENEW_LOCK_RETRY_DELAY = 1000L const val MINIMUM_CANCEL_AFTER_TIME_INTERVAL_SECONDS = 600L - val TRANSFORM_JOB_SEARCH_BACKOFF_COUNT: Setting = Setting.intSetting( - "plugins.transform.internal.search.backoff_count", - 5, - Setting.Property.NodeScope, - Setting.Property.Dynamic - ) - - val TRANSFORM_JOB_SEARCH_BACKOFF_MILLIS: Setting = Setting.positiveTimeSetting( - "plugins.transform.internal.search.backoff_millis", - TimeValue.timeValueMillis(1000), - Setting.Property.NodeScope, - Setting.Property.Dynamic - ) - - val TRANSFORM_JOB_INDEX_BACKOFF_COUNT: Setting = Setting.intSetting( - "plugins.transform.internal.index.backoff_count", - 5, - Setting.Property.NodeScope, - Setting.Property.Dynamic - ) - - val TRANSFORM_JOB_INDEX_BACKOFF_MILLIS: Setting = Setting.positiveTimeSetting( - "plugins.transform.internal.index.backoff_millis", - TimeValue.timeValueMillis(1000), - Setting.Property.NodeScope, - Setting.Property.Dynamic - ) - - val TRANSFORM_CIRCUIT_BREAKER_ENABLED: Setting = Setting.boolSetting( - "plugins.transform.circuit_breaker.enabled", - true, - Setting.Property.NodeScope, - Setting.Property.Dynamic - ) - - val TRANSFORM_CIRCUIT_BREAKER_JVM_THRESHOLD: Setting = Setting.intSetting( - "plugins.transform.circuit_breaker.jvm.threshold", - 85, - 0, - 100, - Setting.Property.NodeScope, - Setting.Property.Dynamic - ) + val TRANSFORM_JOB_SEARCH_BACKOFF_COUNT: Setting = + Setting.intSetting( + "plugins.transform.internal.search.backoff_count", + 5, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val TRANSFORM_JOB_SEARCH_BACKOFF_MILLIS: Setting = + Setting.positiveTimeSetting( + "plugins.transform.internal.search.backoff_millis", + TimeValue.timeValueMillis(1000), + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val TRANSFORM_JOB_INDEX_BACKOFF_COUNT: Setting = + Setting.intSetting( + "plugins.transform.internal.index.backoff_count", + 5, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val TRANSFORM_JOB_INDEX_BACKOFF_MILLIS: Setting = + Setting.positiveTimeSetting( + "plugins.transform.internal.index.backoff_millis", + TimeValue.timeValueMillis(1000), + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val TRANSFORM_CIRCUIT_BREAKER_ENABLED: Setting = + Setting.boolSetting( + "plugins.transform.circuit_breaker.enabled", + true, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val TRANSFORM_CIRCUIT_BREAKER_JVM_THRESHOLD: Setting = + Setting.intSetting( + "plugins.transform.circuit_breaker.jvm.threshold", + 85, + 0, + 100, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/transform/util/TransformLockManager.kt b/src/main/kotlin/org/opensearch/indexmanagement/transform/util/TransformLockManager.kt index 0a06ef18b..12bdc8ac0 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/transform/util/TransformLockManager.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/transform/util/TransformLockManager.kt @@ -24,18 +24,20 @@ import java.time.Instant @OpenForTesting class TransformLockManager( private val transformJob: Transform, - val context: JobExecutionContext + val context: JobExecutionContext, ) { private val logger = LogManager.getLogger(javaClass) - private val exponentialBackoffPolicy = BackoffPolicy.exponentialBackoff( - TimeValue.timeValueMillis(TransformSettings.DEFAULT_RENEW_LOCK_RETRY_DELAY), - TransformSettings.DEFAULT_RENEW_LOCK_RETRY_COUNT - ) + private val exponentialBackoffPolicy = + BackoffPolicy.exponentialBackoff( + TimeValue.timeValueMillis(TransformSettings.DEFAULT_RENEW_LOCK_RETRY_DELAY), + TransformSettings.DEFAULT_RENEW_LOCK_RETRY_COUNT, + ) var lock: LockModel? = null protected set fun lockExpirationInSeconds() = lock?.let { it.lockTime.epochSecond + it.lockDurationSeconds - Instant.now().epochSecond } + /** * Util method to attempt to get the lock on the requested scheduled job using the backoff policy. * If failed to acquire the lock using backoff policy will return a null lock otherwise returns acquired lock. diff --git a/src/main/kotlin/org/opensearch/indexmanagement/util/IndexManagementException.kt b/src/main/kotlin/org/opensearch/indexmanagement/util/IndexManagementException.kt index 9242c71a9..05bea82b3 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/util/IndexManagementException.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/util/IndexManagementException.kt @@ -7,14 +7,13 @@ package org.opensearch.indexmanagement.util import org.opensearch.OpenSearchException import org.opensearch.OpenSearchStatusException -import org.opensearch.core.common.Strings import org.opensearch.common.ValidationException -import org.opensearch.index.IndexNotFoundException +import org.opensearch.core.common.Strings import org.opensearch.core.rest.RestStatus +import org.opensearch.index.IndexNotFoundException import java.lang.IllegalArgumentException class IndexManagementException(message: String, val status: RestStatus, ex: Exception) : OpenSearchException(message, ex) { - override fun status(): RestStatus { return status } @@ -22,7 +21,6 @@ class IndexManagementException(message: String, val status: RestStatus, ex: Exce companion object { @JvmStatic fun wrap(ex: Exception): OpenSearchException { - var friendlyMsg = ex.message as String var status = RestStatus.INTERNAL_SERVER_ERROR when (ex) { diff --git a/src/main/kotlin/org/opensearch/indexmanagement/util/IndexUtils.kt b/src/main/kotlin/org/opensearch/indexmanagement/util/IndexUtils.kt index 934fde0e1..cd0786e76 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/util/IndexUtils.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/util/IndexUtils.kt @@ -6,7 +6,6 @@ package org.opensearch.indexmanagement.util import org.apache.logging.log4j.LogManager -import org.opensearch.core.action.ActionListener import org.opensearch.action.admin.indices.mapping.put.PutMappingRequest import org.opensearch.action.support.master.AcknowledgedResponse import org.opensearch.client.IndicesAdminClient @@ -16,6 +15,7 @@ import org.opensearch.cluster.metadata.IndexMetadata import org.opensearch.common.hash.MurmurHash3 import org.opensearch.common.xcontent.LoggingDeprecationHandler import org.opensearch.common.xcontent.XContentType +import org.opensearch.core.action.ActionListener import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.core.xcontent.XContentParser.Token import org.opensearch.indexmanagement.IndexManagementIndices @@ -51,10 +51,11 @@ class IndexUtils { @Suppress("NestedBlockDepth") fun getSchemaVersion(mapping: String): Long { - val xcp = XContentType.JSON.xContent().createParser( - NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, mapping - ) + val xcp = + XContentType.JSON.xContent().createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, mapping, + ) while (!xcp.isClosed) { val token = xcp.currentToken() @@ -96,7 +97,7 @@ class IndexUtils { fun checkAndUpdateConfigIndexMapping( clusterState: ClusterState, client: IndicesAdminClient, - actionListener: ActionListener + actionListener: ActionListener, ) { checkAndUpdateIndexMapping( IndexManagementPlugin.INDEX_MANAGEMENT_INDEX, @@ -104,14 +105,14 @@ class IndexUtils { IndexManagementIndices.indexManagementMappings, clusterState, client, - actionListener + actionListener, ) } fun checkAndUpdateHistoryIndexMapping( clusterState: ClusterState, client: IndicesAdminClient, - actionListener: ActionListener + actionListener: ActionListener, ) { checkAndUpdateAliasMapping( IndexManagementIndices.HISTORY_WRITE_INDEX_ALIAS, @@ -119,7 +120,7 @@ class IndexUtils { IndexManagementIndices.indexStateManagementHistoryMappings, clusterState, client, - actionListener + actionListener, ) } @@ -131,7 +132,7 @@ class IndexUtils { mapping: String, clusterState: ClusterState, client: IndicesAdminClient, - actionListener: ActionListener + actionListener: ActionListener, ) { if (clusterState.metadata.indices.containsKey(index)) { if (shouldUpdateIndex(clusterState.metadata.indices[index], schemaVersion)) { @@ -154,7 +155,7 @@ class IndexUtils { mapping: String, clusterState: ClusterState, client: IndicesAdminClient, - actionListener: ActionListener + actionListener: ActionListener, ) { val result = clusterState.metadata.indicesLookup[alias] if (result == null || result.type != IndexAbstraction.Type.ALIAS) { @@ -167,8 +168,9 @@ class IndexUtils { actionListener.onResponse(AcknowledgedResponse(false)) } else { if (shouldUpdateIndex(writeIndex, schemaVersion)) { - val putMappingRequest: PutMappingRequest = PutMappingRequest(writeIndex.index.name) - .source(mapping, XContentType.JSON) + val putMappingRequest: PutMappingRequest = + PutMappingRequest(writeIndex.index.name) + .source(mapping, XContentType.JSON) client.putMapping(putMappingRequest, actionListener) } else { actionListener.onResponse(AcknowledgedResponse(true)) @@ -204,8 +206,9 @@ class IndexUtils { fun getWriteIndex(indexName: String?, clusterState: ClusterState): String? { if (isAlias(indexName, clusterState) || isDataStream(indexName, clusterState)) { - val writeIndexMetadata = clusterState.metadata - .indicesLookup[indexName]!!.writeIndex + val writeIndexMetadata = + clusterState.metadata + .indicesLookup[indexName]!!.writeIndex if (writeIndexMetadata != null) { return writeIndexMetadata.index.name } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/util/RestHandlerUtils.kt b/src/main/kotlin/org/opensearch/indexmanagement/util/RestHandlerUtils.kt index 934b3d3fb..fefb48c1f 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/util/RestHandlerUtils.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/util/RestHandlerUtils.kt @@ -4,6 +4,7 @@ */ @file:Suppress("TopLevelPropertyNaming", "MatchingDeclarationName") + package org.opensearch.indexmanagement.util import org.opensearch.indexmanagement.common.model.rest.DEFAULT_PAGINATION_FROM diff --git a/src/main/kotlin/org/opensearch/indexmanagement/util/ScheduledJobUtils.kt b/src/main/kotlin/org/opensearch/indexmanagement/util/ScheduledJobUtils.kt index 6880dd9c7..9bfff4684 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/util/ScheduledJobUtils.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/util/ScheduledJobUtils.kt @@ -7,24 +7,24 @@ package org.opensearch.indexmanagement.util import org.opensearch.ExceptionsHelper import org.opensearch.OpenSearchStatusException -import org.opensearch.core.action.ActionListener -import org.opensearch.core.action.ActionResponse import org.opensearch.action.search.SearchRequest import org.opensearch.action.search.SearchResponse import org.opensearch.client.Client -import org.opensearch.core.common.bytes.BytesReference import org.opensearch.common.xcontent.LoggingDeprecationHandler -import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.common.xcontent.XContentHelper -import org.opensearch.core.xcontent.XContentParser import org.opensearch.common.xcontent.XContentType +import org.opensearch.core.action.ActionListener +import org.opensearch.core.action.ActionResponse +import org.opensearch.core.common.bytes.BytesReference +import org.opensearch.core.rest.RestStatus +import org.opensearch.core.xcontent.NamedXContentRegistry +import org.opensearch.core.xcontent.XContentParser import org.opensearch.indexmanagement.IndexManagementPlugin import org.opensearch.indexmanagement.opensearchapi.parseWithType import org.opensearch.indexmanagement.rollup.action.get.GetRollupsResponse import org.opensearch.indexmanagement.rollup.model.Rollup import org.opensearch.indexmanagement.transform.action.get.GetTransformsResponse import org.opensearch.indexmanagement.transform.model.Transform -import org.opensearch.core.rest.RestStatus import org.opensearch.search.builder.SearchSourceBuilder fun getJobs( @@ -32,7 +32,7 @@ fun getJobs( searchSourceBuilder: SearchSourceBuilder, listener: ActionListener, scheduledJobType: String, - contentParser: (b: BytesReference) -> XContentParser = ::contentParser + contentParser: (b: BytesReference) -> XContentParser = ::contentParser, ) { val searchRequest = SearchRequest(IndexManagementPlugin.INDEX_MANAGEMENT_INDEX).source(searchSourceBuilder) client.search( @@ -46,23 +46,24 @@ fun getJobs( listener.onFailure(OpenSearchStatusException("Get $scheduledJobType failed on some shards", failure.status(), failure.cause)) } else { try { - val jobs = response.hits.hits.map { - contentParser(it.sourceRef).parseWithType(it.id, it.seqNo, it.primaryTerm, getParser(scheduledJobType)) - } + val jobs = + response.hits.hits.map { + contentParser(it.sourceRef).parseWithType(it.id, it.seqNo, it.primaryTerm, getParser(scheduledJobType)) + } listener.onResponse(populateResponse(scheduledJobType, jobs, RestStatus.OK, totalJobs.toInt())) } catch (e: Exception) { listener.onFailure( OpenSearchStatusException( "Failed to parse $scheduledJobType", - RestStatus.INTERNAL_SERVER_ERROR, ExceptionsHelper.unwrapCause(e) - ) + RestStatus.INTERNAL_SERVER_ERROR, ExceptionsHelper.unwrapCause(e), + ), ) } } } override fun onFailure(e: Exception) = listener.onFailure(e) - } + }, ) } @@ -71,7 +72,7 @@ private fun populateResponse( jobType: String, jobs: List, status: RestStatus, - totalJobs: Int + totalJobs: Int, ): ActionResponse { return when (jobType) { Rollup.ROLLUP_TYPE -> GetRollupsResponse(jobs as List, totalJobs, status) @@ -95,6 +96,6 @@ private fun getParser(jobType: String): (XContentParser, String, Long, Long) -> private fun contentParser(bytesReference: BytesReference): XContentParser { return XContentHelper.createParser( NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, bytesReference, XContentType.JSON + LoggingDeprecationHandler.INSTANCE, bytesReference, XContentType.JSON, ) } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/util/SecurityUtils.kt b/src/main/kotlin/org/opensearch/indexmanagement/util/SecurityUtils.kt index b087a4a7a..0b03d620d 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/util/SecurityUtils.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/util/SecurityUtils.kt @@ -6,14 +6,14 @@ package org.opensearch.indexmanagement.util import org.opensearch.OpenSearchStatusException -import org.opensearch.core.action.ActionListener import org.opensearch.common.util.concurrent.ThreadContext import org.opensearch.commons.ConfigConstants import org.opensearch.commons.authuser.User +import org.opensearch.core.action.ActionListener +import org.opensearch.core.rest.RestStatus import org.opensearch.index.query.BoolQueryBuilder import org.opensearch.index.query.ExistsQueryBuilder import org.opensearch.index.query.TermsQueryBuilder -import org.opensearch.core.rest.RestStatus @Suppress("ReturnCount", "UtilityClassWithPublicConstructor") class SecurityUtils { @@ -44,12 +44,12 @@ class SecurityUtils { throw IndexManagementException.wrap( OpenSearchStatusException( "Filter by user backend roles in IndexManagement is not supported with security disabled", - RestStatus.FORBIDDEN - ) + RestStatus.FORBIDDEN, + ), ) } else if (user.backendRoles.isEmpty()) { throw IndexManagementException.wrap( - OpenSearchStatusException("User doesn't have backend roles configured. Contact administrator", RestStatus.FORBIDDEN) + OpenSearchStatusException("User doesn't have backend roles configured. Contact administrator", RestStatus.FORBIDDEN), ) } } @@ -66,16 +66,16 @@ class SecurityUtils { IndexManagementException.wrap( OpenSearchStatusException( "Filter by user backend roles in IndexManagement is not supported with security disabled", - RestStatus.FORBIDDEN - ) - ) + RestStatus.FORBIDDEN, + ), + ), ) return false } else if (user.backendRoles.isEmpty()) { actionListener.onFailure( IndexManagementException.wrap( - OpenSearchStatusException("User doesn't have backend roles configured. Contact administrator", RestStatus.FORBIDDEN) - ) + OpenSearchStatusException("User doesn't have backend roles configured. Contact administrator", RestStatus.FORBIDDEN), + ), ) return false } @@ -93,13 +93,13 @@ class SecurityUtils { filterEnabled: Boolean = false, resourceName: String, resourceId: String, - actionListener: ActionListener + actionListener: ActionListener, ): Boolean { if (!userHasPermissionForResource(requestedUser, resourceUser, filterEnabled)) { actionListener.onFailure( IndexManagementException.wrap( - OpenSearchStatusException("Do not have permission for $resourceName [$resourceId]", RestStatus.FORBIDDEN) - ) + OpenSearchStatusException("Do not have permission for $resourceName [$resourceId]", RestStatus.FORBIDDEN), + ), ) return false } @@ -115,11 +115,11 @@ class SecurityUtils { resourceUser: User?, filterEnabled: Boolean = false, resourceName: String, - resourceId: String + resourceId: String, ) { if (!userHasPermissionForResource(requestedUser, resourceUser, filterEnabled)) { throw IndexManagementException.wrap( - OpenSearchStatusException("Do not have permission for $resourceName [$resourceId]", RestStatus.FORBIDDEN) + OpenSearchStatusException("Do not have permission for $resourceName [$resourceId]", RestStatus.FORBIDDEN), ) } } @@ -131,7 +131,7 @@ class SecurityUtils { fun userHasPermissionForResource( requestedUser: User?, resourceUser: User?, - filterEnabled: Boolean = false + filterEnabled: Boolean = false, ): Boolean { // Will not filter if filter is not enabled or stored user is null or requested user is null or if the user is admin if (!filterEnabled || resourceUser == null || requestedUser == null || requestedUser.roles.contains(ADMIN_ROLE)) { @@ -152,13 +152,14 @@ class SecurityUtils { return } - val filterQuery = BoolQueryBuilder().should( - TermsQueryBuilder("$filterPathPrefix.backend_roles.keyword", user.backendRoles) - ).should( - BoolQueryBuilder().mustNot( - ExistsQueryBuilder(filterPathPrefix) + val filterQuery = + BoolQueryBuilder().should( + TermsQueryBuilder("$filterPathPrefix.backend_roles.keyword", user.backendRoles), + ).should( + BoolQueryBuilder().mustNot( + ExistsQueryBuilder(filterPathPrefix), + ), ) - ) queryBuilder.filter(filterQuery) } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/AccessRoles.kt b/src/test/kotlin/org/opensearch/indexmanagement/AccessRoles.kt index 19682e3ac..56ca4e55e 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/AccessRoles.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/AccessRoles.kt @@ -36,6 +36,7 @@ const val GET_POLICY = GetPolicyAction.NAME const val EXPLAIN_INDEX = ExplainAction.NAME const val MANAGED_INDEX = ManagedIndexAction.NAME const val DELETE_POLICY = DeletePolicyAction.NAME + // Rollup const val ROLLUP_ALL = "cluster:admin/opendistro/rollup/*" const val INDEX_ROLLUP = IndexRollupAction.NAME @@ -43,6 +44,7 @@ const val GET_ROLLUP = GetRollupAction.NAME const val EXPLAIN_ROLLUP = ExplainRollupAction.NAME const val UPDATE_ROLLUP = UpdateRollupMappingAction.NAME const val DELETE_ROLLUP = DeleteRollupAction.NAME + // Transform const val TRANSFORM_ACTION = IndexTransformAction.NAME const val GET_TRANSFORM = GetTransformAction.NAME @@ -51,8 +53,10 @@ const val START_TRANSFORM = StartTransformAction.NAME const val DELETE_TRANSFORM = DeleteTransformsAction.NAME const val GET_TRANSFORMS = GetTransformsAction.NAME const val STOP_TRANSFORM = StopTransformAction.NAME + // In order to execute transform, user need to have health privilege const val HEALTH = "cluster:monitor/health" + // Index const val GET_INDEX_MAPPING = "indices:admin/mappings/get" const val PUT_INDEX_MAPPING = "indices:admin/mapping/put" @@ -60,6 +64,7 @@ const val SEARCH_INDEX = "indices:data/read/search" const val CREATE_INDEX = "indices:admin/create" const val WRITE_INDEX = "indices:data/write/index" const val BULK_WRITE_INDEX = "indices:data/write/bulk*" + // Long-running operation notification (lron) const val INDEX_LRON_CONFIG = IndexLRONConfigAction.NAME const val GET_LRON_CONFIG = GetLRONConfigAction.NAME diff --git a/src/test/kotlin/org/opensearch/indexmanagement/IndexManagementIndicesIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/IndexManagementIndicesIT.kt index 78b3a7bef..24a46ace9 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/IndexManagementIndicesIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/IndexManagementIndicesIT.kt @@ -8,8 +8,9 @@ package org.opensearch.indexmanagement import org.apache.hc.core5.http.ContentType import org.apache.hc.core5.http.io.entity.StringEntity import org.opensearch.common.settings.Settings -import org.opensearch.core.xcontent.ToXContent import org.opensearch.common.xcontent.XContentFactory +import org.opensearch.core.rest.RestStatus +import org.opensearch.core.xcontent.ToXContent import org.opensearch.indexmanagement.IndexManagementIndices.Companion.HISTORY_INDEX_BASE import org.opensearch.indexmanagement.IndexManagementIndices.Companion.HISTORY_WRITE_INDEX_ALIAS import org.opensearch.indexmanagement.IndexManagementIndices.Companion.indexManagementMappings @@ -32,31 +33,29 @@ import org.opensearch.indexmanagement.opensearchapi.string import org.opensearch.indexmanagement.refreshanalyzer.RestRefreshSearchAnalyzerAction import org.opensearch.indexmanagement.rollup.randomRollup import org.opensearch.rest.RestRequest -import org.opensearch.core.rest.RestStatus import org.opensearch.test.OpenSearchTestCase import java.util.Locale class IndexManagementIndicesIT : IndexStateManagementRestTestCase() { - private val testIndexName = javaClass.simpleName.lowercase(Locale.ROOT) /* - * If this test fails it means you changed the config mappings - * This test is to ensure you did not forget to increase the schema_version in the mappings _meta object - * The schema_version is used at runtime to check if the mappings need to be updated for the index - * Once you are sure you increased the schema_version or know it is not needed you can update the cached mappings with the new values - * */ + * If this test fails it means you changed the config mappings + * This test is to ensure you did not forget to increase the schema_version in the mappings _meta object + * The schema_version is used at runtime to check if the mappings need to be updated for the index + * Once you are sure you increased the schema_version or know it is not needed you can update the cached mappings with the new values + * */ fun `test config mappings schema version number`() { val cachedMappings = javaClass.classLoader.getResource("mappings/cached-opendistro-ism-config.json")!!.readText() assertEquals("I see you updated the config mappings. Did you also update the schema_version?", cachedMappings, indexManagementMappings) } /* - * If this test fails it means you changed the history mappings - * This test is to ensure you did not forget to increase the schema_version in the mappings _meta object - * The schema_version is used at runtime to check if the mappings need to be updated for the index - * Once you are sure you increased the schema_version or know it is not needed you can update the cached mappings with the new values - * */ + * If this test fails it means you changed the history mappings + * This test is to ensure you did not forget to increase the schema_version in the mappings _meta object + * The schema_version is used at runtime to check if the mappings need to be updated for the index + * Once you are sure you increased the schema_version or know it is not needed you can update the cached mappings with the new values + * */ fun `test history mappings schema version number`() { val cachedMappings = javaClass.classLoader.getResource("mappings/cached-opendistro-ism-history.json")!!.readText() assertEquals("I see you updated the history mappings. Did you also update the schema_version?", cachedMappings, indexStateManagementHistoryMappings) @@ -74,8 +73,9 @@ class IndexManagementIndicesIT : IndexStateManagementRestTestCase() { wipeAllIndices() assertIndexDoesNotExist(INDEX_MANAGEMENT_INDEX) - val mapping = indexManagementMappings.trim().trimStart('{').trimEnd('}') - .replace("\"schema_version\": $configSchemaVersion", "\"schema_version\": 0") + val mapping = + indexManagementMappings.trim().trimStart('{').trimEnd('}') + .replace("\"schema_version\": $configSchemaVersion", "\"schema_version\": 0") createIndex(INDEX_MANAGEMENT_INDEX, Settings.builder().put(INDEX_HIDDEN, true).build(), mapping) assertIndexExists(INDEX_MANAGEMENT_INDEX) @@ -92,8 +92,9 @@ class IndexManagementIndicesIT : IndexStateManagementRestTestCase() { fun `test update management index history mappings with new schema version`() { assertIndexDoesNotExist("$HISTORY_WRITE_INDEX_ALIAS?allow_no_indices=false") - val mapping = indexStateManagementHistoryMappings.trim().trimStart('{').trimEnd('}') - .replace("\"schema_version\": $historySchemaVersion", "\"schema_version\": 0") + val mapping = + indexStateManagementHistoryMappings.trim().trimStart('{').trimEnd('}') + .replace("\"schema_version\": $historySchemaVersion", "\"schema_version\": 0") val aliases = "\"$HISTORY_WRITE_INDEX_ALIAS\": { \"is_write_index\": true }" createIndex("$HISTORY_INDEX_BASE-1", Settings.builder().put(INDEX_HIDDEN, true).build(), mapping, aliases) @@ -128,23 +129,26 @@ class IndexManagementIndicesIT : IndexStateManagementRestTestCase() { assertNull("Change policy is not null", managedIndexConfig.changePolicy) assertEquals("Policy id does not match", policy.id, managedIndexConfig.policyID) - val mapping = "{" + indexManagementMappings.trimStart('{').trimEnd('}') - .replace("\"schema_version\": $configSchemaVersion", "\"schema_version\": 0") + val mapping = + "{" + + indexManagementMappings.trimStart('{').trimEnd('}') + .replace("\"schema_version\": $configSchemaVersion", "\"schema_version\": 0") val entity = StringEntity(mapping, ContentType.APPLICATION_JSON) client().makeRequest( RestRequest.Method.PUT.toString(), - "/$INDEX_MANAGEMENT_INDEX/_mapping", emptyMap(), entity + "/$INDEX_MANAGEMENT_INDEX/_mapping", emptyMap(), entity, ) verifyIndexSchemaVersion(INDEX_MANAGEMENT_INDEX, 0) // if we try to change policy now, it'll have no ManagedIndexMetaData yet and should succeed val changePolicy = ChangePolicy(newPolicy.id, null, emptyList(), false) - val response = client().makeRequest( - RestRequest.Method.POST.toString(), - "${RestChangePolicyAction.CHANGE_POLICY_BASE_URI}/$index", emptyMap(), changePolicy.toHttpEntity() - ) + val response = + client().makeRequest( + RestRequest.Method.POST.toString(), + "${RestChangePolicyAction.CHANGE_POLICY_BASE_URI}/$index", emptyMap(), changePolicy.toHttpEntity(), + ) verifyIndexSchemaVersion(INDEX_MANAGEMENT_INDEX, configSchemaVersion) @@ -162,42 +166,48 @@ class IndexManagementIndicesIT : IndexStateManagementRestTestCase() { val indexName = "bwc_index" createIndex(indexName, null) - val addPolicyResponse = client().makeRequest( - RestRequest.Method.POST.toString(), - "${RestAddPolicyAction.LEGACY_ADD_POLICY_BASE_URI}/$indexName", - StringEntity("{ \"policy_id\": \"$policyId\" }", ContentType.APPLICATION_JSON) - ) + val addPolicyResponse = + client().makeRequest( + RestRequest.Method.POST.toString(), + "${RestAddPolicyAction.LEGACY_ADD_POLICY_BASE_URI}/$indexName", + StringEntity("{ \"policy_id\": \"$policyId\" }", ContentType.APPLICATION_JSON), + ) assertEquals("Unexpected RestStatus", RestStatus.OK, addPolicyResponse.restStatus()) - val changePolicyResponse = client().makeRequest( - RestRequest.Method.POST.toString(), - "${RestAddPolicyAction.LEGACY_ADD_POLICY_BASE_URI}/$indexName", - StringEntity("{ \"policy_id\": \"$policyId\" }", ContentType.APPLICATION_JSON) - ) + val changePolicyResponse = + client().makeRequest( + RestRequest.Method.POST.toString(), + "${RestAddPolicyAction.LEGACY_ADD_POLICY_BASE_URI}/$indexName", + StringEntity("{ \"policy_id\": \"$policyId\" }", ContentType.APPLICATION_JSON), + ) assertEquals("Unexpected RestStatus", RestStatus.OK, changePolicyResponse.restStatus()) - val retryFailedResponse = client().makeRequest( - RestRequest.Method.POST.toString(), - "${RestRetryFailedManagedIndexAction.LEGACY_RETRY_BASE_URI}/$indexName" - ) + val retryFailedResponse = + client().makeRequest( + RestRequest.Method.POST.toString(), + "${RestRetryFailedManagedIndexAction.LEGACY_RETRY_BASE_URI}/$indexName", + ) assertEquals("Unexpected RestStatus", RestStatus.OK, retryFailedResponse.restStatus()) - val explainResponse = client().makeRequest( - RestRequest.Method.GET.toString(), - "${RestExplainAction.LEGACY_EXPLAIN_BASE_URI}/$indexName" - ) + val explainResponse = + client().makeRequest( + RestRequest.Method.GET.toString(), + "${RestExplainAction.LEGACY_EXPLAIN_BASE_URI}/$indexName", + ) assertEquals("Unexpected RestStatus", RestStatus.OK, explainResponse.restStatus()) - val removePolicyResponse = client().makeRequest( - RestRequest.Method.POST.toString(), - "${RestRemovePolicyAction.LEGACY_REMOVE_POLICY_BASE_URI}/$indexName" - ) + val removePolicyResponse = + client().makeRequest( + RestRequest.Method.POST.toString(), + "${RestRemovePolicyAction.LEGACY_REMOVE_POLICY_BASE_URI}/$indexName", + ) assertEquals("Unexpected RestStatus", RestStatus.OK, removePolicyResponse.restStatus()) - val deletePolicyResponse = client().makeRequest( - RestRequest.Method.DELETE.toString(), - "${IndexManagementPlugin.LEGACY_POLICY_BASE_URI}/$policyId" - ) + val deletePolicyResponse = + client().makeRequest( + RestRequest.Method.DELETE.toString(), + "${IndexManagementPlugin.LEGACY_POLICY_BASE_URI}/$policyId", + ) assertEquals("Unexpected RestStatus", RestStatus.OK, deletePolicyResponse.restStatus()) val getPolicies = client().makeRequest(RestRequest.Method.GET.toString(), "${IndexManagementPlugin.LEGACY_POLICY_BASE_URI}") @@ -215,13 +225,14 @@ class IndexManagementIndicesIT : IndexStateManagementRestTestCase() { fun `test rollup backward compatibility with opendistro`() { val rollup = randomRollup() val rollupJsonString = rollup.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS).string() - val createRollupResponse = client().makeRequest( - "PUT", "${IndexManagementPlugin.LEGACY_ROLLUP_JOBS_BASE_URI}/${rollup.id}", emptyMap(), - StringEntity( - rollupJsonString, - ContentType.APPLICATION_JSON + val createRollupResponse = + client().makeRequest( + "PUT", "${IndexManagementPlugin.LEGACY_ROLLUP_JOBS_BASE_URI}/${rollup.id}", emptyMap(), + StringEntity( + rollupJsonString, + ContentType.APPLICATION_JSON, + ), ) - ) assertEquals("Create rollup failed", RestStatus.CREATED, createRollupResponse.restStatus()) val getRollupResponse = client().makeRequest("GET", "${IndexManagementPlugin.LEGACY_ROLLUP_JOBS_BASE_URI}/${rollup.id}") diff --git a/src/test/kotlin/org/opensearch/indexmanagement/IndexManagementRestTestCase.kt b/src/test/kotlin/org/opensearch/indexmanagement/IndexManagementRestTestCase.kt index 6f56a174f..bb51a1d6b 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/IndexManagementRestTestCase.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/IndexManagementRestTestCase.kt @@ -16,17 +16,17 @@ import org.opensearch.client.Request import org.opensearch.client.RequestOptions import org.opensearch.client.Response import org.opensearch.client.ResponseException -import org.opensearch.core.common.Strings import org.opensearch.client.RestClient import org.opensearch.client.WarningsHandler import org.opensearch.common.io.PathUtils import org.opensearch.common.settings.Settings import org.opensearch.common.xcontent.XContentType +import org.opensearch.core.common.Strings +import org.opensearch.core.rest.RestStatus import org.opensearch.core.xcontent.DeprecationHandler +import org.opensearch.core.xcontent.MediaType import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.indexmanagement.indexstatemanagement.util.INDEX_HIDDEN -import org.opensearch.core.rest.RestStatus -import org.opensearch.core.xcontent.MediaType import org.opensearch.indexmanagement.rollup.model.Rollup import org.opensearch.indexmanagement.transform.model.Transform import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule @@ -34,14 +34,13 @@ import java.io.IOException import java.nio.file.Files import java.time.Duration import java.time.Instant -import java.util.* +import java.util.Date import javax.management.MBeanServerInvocationHandler import javax.management.ObjectName import javax.management.remote.JMXConnectorFactory import javax.management.remote.JMXServiceURL abstract class IndexManagementRestTestCase : ODFERestTestCase() { - val configSchemaVersion = 21 val historySchemaVersion = 7 @@ -52,7 +51,7 @@ abstract class IndexManagementRestTestCase : ODFERestTestCase() { fun setAutoCreateIndex() { client().makeRequest( "PUT", "_cluster/settings", - StringEntity("""{"persistent":{"action.auto_create_index":"-.opendistro-*,*"}}""", ContentType.APPLICATION_JSON) + StringEntity("""{"persistent":{"action.auto_create_index":"-.opendistro-*,*"}}""", ContentType.APPLICATION_JSON), ) } @@ -83,8 +82,8 @@ abstract class IndexManagementRestTestCase : ODFERestTestCase() { } } """.trimIndent(), - ContentType.APPLICATION_JSON - ) + ContentType.APPLICATION_JSON, + ), ) } @@ -92,6 +91,7 @@ abstract class IndexManagementRestTestCase : ODFERestTestCase() { protected val isDebuggingRemoteCluster = System.getProperty("cluster.debug", "false")!!.toBoolean() protected val isLocalTest = clusterName() == "integTest" + private fun clusterName(): String { return System.getProperty("tests.clustername") } @@ -218,14 +218,15 @@ abstract class IndexManagementRestTestCase : ODFERestTestCase() { // During this period, this update got missed // Since from the log, this happens very fast (within 0.1~0.2s), the above cluster explain may not have the granularity to catch this. logger.info("Update rollup start time to $startTimeMillis") - val response = client().makeRequest( - "POST", "${IndexManagementPlugin.INDEX_MANAGEMENT_INDEX}/_update/${update.id}?wait_for_active_shards=$waitForActiveShards&refresh=true", - StringEntity( - "{\"doc\":{\"rollup\":{\"schedule\":{\"interval\":{\"start_time\":" + - "\"$startTimeMillis\"}}}}}", - ContentType.APPLICATION_JSON + val response = + client().makeRequest( + "POST", "${IndexManagementPlugin.INDEX_MANAGEMENT_INDEX}/_update/${update.id}?wait_for_active_shards=$waitForActiveShards&refresh=true", + StringEntity( + "{\"doc\":{\"rollup\":{\"schedule\":{\"interval\":{\"start_time\":" + + "\"$startTimeMillis\"}}}}}", + ContentType.APPLICATION_JSON, + ), ) - ) assertEquals("Request failed", RestStatus.OK, response.restStatus()) } @@ -247,23 +248,26 @@ abstract class IndexManagementRestTestCase : ODFERestTestCase() { val millis = Duration.of(intervalSchedule.interval.toLong(), intervalSchedule.unit).minusSeconds(2).toMillis() val startTimeMillis = desiredStartTimeMillis ?: (Instant.now().toEpochMilli() - millis) val waitForActiveShards = if (isMultiNode) "all" else "1" - val response = client().makeRequest( - "POST", "${IndexManagementPlugin.INDEX_MANAGEMENT_INDEX}/_update/${update.id}?wait_for_active_shards=$waitForActiveShards", - StringEntity( - "{\"doc\":{\"transform\":{\"schedule\":{\"interval\":{\"start_time\":" + - "\"$startTimeMillis\"}}}}}", - ContentType.APPLICATION_JSON + val response = + client().makeRequest( + "POST", "${IndexManagementPlugin.INDEX_MANAGEMENT_INDEX}/_update/${update.id}?wait_for_active_shards=$waitForActiveShards", + StringEntity( + "{\"doc\":{\"transform\":{\"schedule\":{\"interval\":{\"start_time\":" + + "\"$startTimeMillis\"}}}}}", + ContentType.APPLICATION_JSON, + ), ) - ) assertEquals("Request failed", RestStatus.OK, response.restStatus()) } override fun preserveIndicesUponCompletion(): Boolean = true + companion object { val isMultiNode = System.getProperty("cluster.number_of_nodes", "1").toInt() > 1 val isBWCTest = System.getProperty("tests.plugin_bwc_version", "0") != "0" protected val defaultKeepIndexSet = setOf(".opendistro_security") + /** * We override preserveIndicesUponCompletion to true and use this function to clean up indices * Meant to be used in @After or @AfterClass of your feature test suite @@ -289,7 +293,7 @@ abstract class IndexManagementRestTestCase : ODFERestTestCase() { val xContentType = MediaType.fromMediaType(response.entity.contentType) xContentType.xContent().createParser( NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, - response.entity.content + response.entity.content, ).use { parser -> for (index in parser.list()) { val jsonObject: Map<*, *> = index as java.util.HashMap<*, *> @@ -358,7 +362,7 @@ abstract class IndexManagementRestTestCase : ODFERestTestCase() { val xContentType = MediaType.fromMediaType(response.entity.contentType) xContentType.xContent().createParser( NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, - response.entity.content + response.entity.content, ).use { parser -> for (index in parser.list()) { val jsonObject: Map<*, *> = index as java.util.HashMap<*, *> @@ -378,18 +382,20 @@ abstract class IndexManagementRestTestCase : ODFERestTestCase() { var sessionId: String? fun getExecutionData(reset: Boolean): ByteArray? + fun dump(reset: Boolean) + fun reset() } /* - * We need to be able to dump the jacoco coverage before the cluster shuts down. - * The new internal testing framework removed some gradle tasks we were listening to, - * to choose a good time to do it. This will dump the executionData to file after each test. - * TODO: This is also currently just overwriting integTest.exec with the updated execData without - * resetting after writing each time. This can be improved to either write an exec file per test - * or by letting jacoco append to the file. - * */ + * We need to be able to dump the jacoco coverage before the cluster shuts down. + * The new internal testing framework removed some gradle tasks we were listening to, + * to choose a good time to do it. This will dump the executionData to file after each test. + * TODO: This is also currently just overwriting integTest.exec with the updated execData without + * resetting after writing each time. This can be improved to either write an exec file per test + * or by letting jacoco append to the file. + * */ @JvmStatic @AfterClass fun dumpCoverage() { @@ -398,12 +404,13 @@ abstract class IndexManagementRestTestCase : ODFERestTestCase() { val jacocoBuildPath = System.getProperty("jacoco.dir") ?: return val serverUrl = "service:jmx:rmi:///jndi/rmi://127.0.0.1:7777/jmxrmi" JMXConnectorFactory.connect(JMXServiceURL(serverUrl)).use { connector -> - val proxy = MBeanServerInvocationHandler.newProxyInstance( - connector.mBeanServerConnection, - ObjectName("org.jacoco:type=Runtime"), - IProxy::class.java, - false - ) + val proxy = + MBeanServerInvocationHandler.newProxyInstance( + connector.mBeanServerConnection, + ObjectName("org.jacoco:type=Runtime"), + IProxy::class.java, + false, + ) proxy.getExecutionData(false)?.let { val path = PathUtils.get("$jacocoBuildPath/integTest.exec") Files.write(path, it) diff --git a/src/test/kotlin/org/opensearch/indexmanagement/IndexManagementSettingsTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/IndexManagementSettingsTests.kt index 41bb1662d..55fce2adc 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/IndexManagementSettingsTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/IndexManagementSettingsTests.kt @@ -16,7 +16,6 @@ import org.opensearch.indexmanagement.snapshotmanagement.settings.SnapshotManage import org.opensearch.test.OpenSearchTestCase class IndexManagementSettingsTests : OpenSearchTestCase() { - private lateinit var plugin: IndexManagementPlugin @Before @@ -55,9 +54,9 @@ class IndexManagementSettingsTests : OpenSearchTestCase() { LegacyOpenDistroRollupSettings.ROLLUP_INDEX, LegacyOpenDistroRollupSettings.ROLLUP_ENABLED, LegacyOpenDistroRollupSettings.ROLLUP_SEARCH_ENABLED, - LegacyOpenDistroRollupSettings.ROLLUP_DASHBOARDS - ) - ) + LegacyOpenDistroRollupSettings.ROLLUP_DASHBOARDS, + ), + ), ) } @@ -93,16 +92,16 @@ class IndexManagementSettingsTests : OpenSearchTestCase() { RollupSettings.ROLLUP_SEARCH_ENABLED, RollupSettings.ROLLUP_SEARCH_ALL_JOBS, RollupSettings.ROLLUP_DASHBOARDS, - SnapshotManagementSettings.FILTER_BY_BACKEND_ROLES - ) - ) + SnapshotManagementSettings.FILTER_BY_BACKEND_ROLES, + ), + ), ) } fun testLegacyOpenDistroSettingsFallback() { assertEquals( ManagedIndexSettings.INDEX_STATE_MANAGEMENT_ENABLED.get(Settings.EMPTY), - LegacyOpenDistroManagedIndexSettings.INDEX_STATE_MANAGEMENT_ENABLED.get(Settings.EMPTY) + LegacyOpenDistroManagedIndexSettings.INDEX_STATE_MANAGEMENT_ENABLED.get(Settings.EMPTY), ) } @@ -113,48 +112,51 @@ class IndexManagementSettingsTests : OpenSearchTestCase() { } fun testIndexSettingLegacyFallback() { - var settings = Settings.builder() - .put("index.opendistro.index_state_management.rollover_skip", true) - .build() + var settings = + Settings.builder() + .put("index.opendistro.index_state_management.rollover_skip", true) + .build() assertEquals(ManagedIndexSettings.ROLLOVER_SKIP.get(settings), true) - settings = Settings.builder() - .put("index.opendistro.index_state_management.rollover_skip", true) - .put("index.plugins.index_state_management.rollover_skip", false) - .build() + settings = + Settings.builder() + .put("index.opendistro.index_state_management.rollover_skip", true) + .put("index.plugins.index_state_management.rollover_skip", false) + .build() assertEquals(ManagedIndexSettings.ROLLOVER_SKIP.get(settings), false) assertSettingDeprecationsAndWarnings( - arrayOf(LegacyOpenDistroManagedIndexSettings.ROLLOVER_SKIP) + arrayOf(LegacyOpenDistroManagedIndexSettings.ROLLOVER_SKIP), ) } fun testSettingsGetValueWithLegacyFallback() { - val settings = Settings.builder() - .put("opendistro.index_state_management.enabled", false) - .put("opendistro.index_state_management.metadata_service.enabled", false) - .put("opendistro.index_state_management.job_interval", 1) - .put("opendistro.index_state_management.coordinator.sweep_period", "6m") - .put("opendistro.index_state_management.coordinator.backoff_millis", "1ms") - .put("opendistro.index_state_management.coordinator.backoff_count", 1) - .put("opendistro.index_state_management.history.enabled", false) - .put("opendistro.index_state_management.history.max_docs", 1L) - .put("opendistro.index_state_management.history.max_age", "1m") - .put("opendistro.index_state_management.history.rollover_check_period", "1m") - .put("opendistro.index_state_management.history.rollover_retention_period", "1m") - .put("opendistro.index_state_management.history.number_of_shards", 2) - .put("opendistro.index_state_management.history.number_of_replicas", 2) - .putList("opendistro.index_state_management.allow_list", listOf("1")) - .putList("opendistro.index_state_management.snapshot.deny_list", listOf("1")) - .put("opendistro.index_state_management.restricted_index_pattern", "blocked_index_pattern") - .put("opendistro.rollup.enabled", false) - .put("opendistro.rollup.search.enabled", false) - .put("opendistro.rollup.ingest.backoff_millis", "1ms") - .put("opendistro.rollup.ingest.backoff_count", 1) - .put("opendistro.rollup.search.backoff_millis", "1ms") - .put("opendistro.rollup.search.backoff_count", 1) - .put("opendistro.rollup.dashboards.enabled", false) - .build() + val settings = + Settings.builder() + .put("opendistro.index_state_management.enabled", false) + .put("opendistro.index_state_management.metadata_service.enabled", false) + .put("opendistro.index_state_management.job_interval", 1) + .put("opendistro.index_state_management.coordinator.sweep_period", "6m") + .put("opendistro.index_state_management.coordinator.backoff_millis", "1ms") + .put("opendistro.index_state_management.coordinator.backoff_count", 1) + .put("opendistro.index_state_management.history.enabled", false) + .put("opendistro.index_state_management.history.max_docs", 1L) + .put("opendistro.index_state_management.history.max_age", "1m") + .put("opendistro.index_state_management.history.rollover_check_period", "1m") + .put("opendistro.index_state_management.history.rollover_retention_period", "1m") + .put("opendistro.index_state_management.history.number_of_shards", 2) + .put("opendistro.index_state_management.history.number_of_replicas", 2) + .putList("opendistro.index_state_management.allow_list", listOf("1")) + .putList("opendistro.index_state_management.snapshot.deny_list", listOf("1")) + .put("opendistro.index_state_management.restricted_index_pattern", "blocked_index_pattern") + .put("opendistro.rollup.enabled", false) + .put("opendistro.rollup.search.enabled", false) + .put("opendistro.rollup.ingest.backoff_millis", "1ms") + .put("opendistro.rollup.ingest.backoff_count", 1) + .put("opendistro.rollup.search.backoff_millis", "1ms") + .put("opendistro.rollup.search.backoff_count", 1) + .put("opendistro.rollup.dashboards.enabled", false) + .build() assertEquals(ManagedIndexSettings.INDEX_STATE_MANAGEMENT_ENABLED.get(settings), false) assertEquals(ManagedIndexSettings.JOB_INTERVAL.get(settings), 1) @@ -203,8 +205,8 @@ class IndexManagementSettingsTests : OpenSearchTestCase() { LegacyOpenDistroRollupSettings.ROLLUP_INGEST_BACKOFF_COUNT, LegacyOpenDistroRollupSettings.ROLLUP_SEARCH_BACKOFF_MILLIS, LegacyOpenDistroRollupSettings.ROLLUP_SEARCH_BACKOFF_COUNT, - LegacyOpenDistroRollupSettings.ROLLUP_DASHBOARDS - ) + LegacyOpenDistroRollupSettings.ROLLUP_DASHBOARDS, + ), ) } } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/IndexStateManagementSecurityBehaviorIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/IndexStateManagementSecurityBehaviorIT.kt index c585761f6..9d3bdc5f4 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/IndexStateManagementSecurityBehaviorIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/IndexStateManagementSecurityBehaviorIT.kt @@ -15,6 +15,7 @@ import org.junit.After import org.junit.Before import org.opensearch.client.RestClient import org.opensearch.commons.rest.SecureRestClientBuilder +import org.opensearch.core.rest.RestStatus import org.opensearch.indexmanagement.common.model.dimension.DateHistogram import org.opensearch.indexmanagement.common.model.dimension.Terms import org.opensearch.indexmanagement.indexstatemanagement.action.RollupAction @@ -33,7 +34,6 @@ import org.opensearch.indexmanagement.rollup.model.metric.Max import org.opensearch.indexmanagement.rollup.model.metric.Min import org.opensearch.indexmanagement.rollup.model.metric.Sum import org.opensearch.indexmanagement.rollup.model.metric.ValueCount -import org.opensearch.core.rest.RestStatus import org.opensearch.test.junit.annotations.TestLogging import java.time.Instant import java.time.temporal.ChronoUnit @@ -41,7 +41,6 @@ import java.util.Locale @TestLogging("level:DEBUG", reason = "Debug for tests.") class IndexStateManagementSecurityBehaviorIT : SecurityRestTestCase() { - private val testIndexName = javaClass.simpleName.lowercase(Locale.ROOT) private val password = "Test123sdfsdfds435346FDGDFGDFG2342&^%#$@#35!" @@ -56,28 +55,30 @@ class IndexStateManagementSecurityBehaviorIT : SecurityRestTestCase() { fun setupUsersAndRoles() { updateClusterSetting(ManagedIndexSettings.JITTER.key, "0.0", false) - val helpdeskClusterPermissions = listOf( - WRITE_POLICY, - DELETE_POLICY, - ADD_POLICY, - GET_POLICY, - GET_POLICIES, - EXPLAIN_INDEX, - INDEX_ROLLUP, - GET_ROLLUP, - EXPLAIN_ROLLUP, - UPDATE_ROLLUP, - ) + val helpdeskClusterPermissions = + listOf( + WRITE_POLICY, + DELETE_POLICY, + ADD_POLICY, + GET_POLICY, + GET_POLICIES, + EXPLAIN_INDEX, + INDEX_ROLLUP, + GET_ROLLUP, + EXPLAIN_ROLLUP, + UPDATE_ROLLUP, + ) - val indexPermissions = listOf( - MANAGED_INDEX, - CREATE_INDEX, - WRITE_INDEX, - BULK_WRITE_INDEX, - GET_INDEX_MAPPING, - SEARCH_INDEX, - PUT_INDEX_MAPPING - ) + val indexPermissions = + listOf( + MANAGED_INDEX, + CREATE_INDEX, + WRITE_INDEX, + BULK_WRITE_INDEX, + GET_INDEX_MAPPING, + SEARCH_INDEX, + PUT_INDEX_MAPPING, + ) // In this test suite case john is a "super-user" which has all relevant privileges createUser(superIsmUser, password, listOf(HELPDESK)) createRole(HELPDESK_ROLE, helpdeskClusterPermissions, indexPermissions, listOf(AIRLINE_INDEX_PATTERN)) @@ -85,7 +86,7 @@ class IndexStateManagementSecurityBehaviorIT : SecurityRestTestCase() { superUserClient = SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), superIsmUser, password).setSocketTimeout( - 60000 + 60000, ).setConnectionRequestTimeout(180000) .build() } @@ -119,9 +120,10 @@ class IndexStateManagementSecurityBehaviorIT : SecurityRestTestCase() { val rollup = createISMRollup(targetIdxRollup) val actionConfig = RollupAction(rollup, 0) - val states = listOf( - State("rollup", listOf(actionConfig), listOf()) - ) + val states = + listOf( + State("rollup", listOf(actionConfig), listOf()), + ) val policy = createPolicyWithRollupStep(policyID, states, indexName) @@ -204,7 +206,7 @@ class IndexStateManagementSecurityBehaviorIT : SecurityRestTestCase() { fun `test delete policy`() { createTestUserWithRole( listOf(EXPLAIN_INDEX, GET_POLICY, EXPLAIN_INDEX), - listOf(GET_INDEX_MAPPING, SEARCH_INDEX) + listOf(GET_INDEX_MAPPING, SEARCH_INDEX), ) testClient = @@ -240,18 +242,20 @@ class IndexStateManagementSecurityBehaviorIT : SecurityRestTestCase() { states: List, indexName: String, ): Policy { - val policy = Policy( - id = policyID, - description = "$testIndexName description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states, - ismTemplate = listOf( - ISMTemplate(listOf("$indexName*"), 100, Instant.now().truncatedTo(ChronoUnit.MILLIS)) + val policy = + Policy( + id = policyID, + description = "$testIndexName description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ismTemplate = + listOf( + ISMTemplate(listOf("$indexName*"), 100, Instant.now().truncatedTo(ChronoUnit.MILLIS)), + ), ) - ) return policy } @@ -260,25 +264,28 @@ class IndexStateManagementSecurityBehaviorIT : SecurityRestTestCase() { description = "basic search test", targetIndex = targetIdxRollup, pageSize = 100, - dimensions = listOf( + dimensions = + listOf( DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1h"), Terms("RatecodeID", "RatecodeID"), - Terms("PULocationID", "PULocationID") + Terms("PULocationID", "PULocationID"), ), - metrics = listOf( + metrics = + listOf( RollupMetrics( sourceField = "passenger_count", targetField = "passenger_count", - metrics = listOf( + metrics = + listOf( Sum(), Min(), Max(), - ValueCount(), Average() - ) + ValueCount(), Average(), + ), ), RollupMetrics( sourceField = "total_amount", targetField = "total_amount", - metrics = listOf(Max(), Min()) - ) - ) + metrics = listOf(Max(), Min()), + ), + ), ) } @@ -296,7 +303,7 @@ class IndexStateManagementSecurityBehaviorIT : SecurityRestTestCase() { waitFor { assertEquals( AttemptCreateRollupJobStep.getSuccessMessage(rollupId, indexName), - getExplainManagedIndexMetaData(indexName).info?.get("message") + getExplainManagedIndexMetaData(indexName).info?.get("message"), ) } @@ -312,7 +319,7 @@ class IndexStateManagementSecurityBehaviorIT : SecurityRestTestCase() { waitFor { assertEquals( WaitForRollupCompletionStep.getJobCompletionMessage(rollupId, indexName), - getExplainManagedIndexMetaData(indexName).info?.get("message") + getExplainManagedIndexMetaData(indexName).info?.get("message"), ) } } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/MocksTestCase.kt b/src/test/kotlin/org/opensearch/indexmanagement/MocksTestCase.kt index 43f1c075d..bdfda843a 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/MocksTestCase.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/MocksTestCase.kt @@ -11,20 +11,19 @@ import com.nhaarman.mockitokotlin2.mock import com.nhaarman.mockitokotlin2.whenever import org.junit.Before import org.mockito.Mockito -import org.opensearch.core.action.ActionListener -import org.opensearch.core.action.ActionResponse import org.opensearch.action.index.IndexResponse import org.opensearch.client.AdminClient import org.opensearch.client.Client import org.opensearch.client.ClusterAdminClient import org.opensearch.common.settings.Settings import org.opensearch.common.util.concurrent.ThreadContext +import org.opensearch.core.action.ActionListener +import org.opensearch.core.action.ActionResponse import org.opensearch.indexmanagement.snapshotmanagement.mockIndexResponse import org.opensearch.test.OpenSearchTestCase import org.opensearch.threadpool.ThreadPool abstract class MocksTestCase : OpenSearchTestCase() { - val client: Client = mock() private val adminClient: AdminClient = mock() private val clusterAdminClient: ClusterAdminClient = mock() @@ -51,52 +50,61 @@ abstract class MocksTestCase : OpenSearchTestCase() { fun mockCreateSnapshotCall( response: ActionResponse? = null, - exception: Exception? = null + exception: Exception? = null, ) { assertTrue( "Must provide either a response or an exception.", - (response != null).xor(exception != null) + (response != null).xor(exception != null), ) whenever(client.admin()).thenReturn(adminClient) whenever(adminClient.cluster()).thenReturn(clusterAdminClient) doAnswer { val listener = it.getArgument>(1) - if (response != null) listener.onResponse(response) - else listener.onFailure(exception) + if (response != null) { + listener.onResponse(response) + } else { + listener.onFailure(exception) + } }.whenever(clusterAdminClient).createSnapshot(any(), any()) } fun mockDeleteSnapshotCall( response: ActionResponse? = null, - exception: Exception? = null + exception: Exception? = null, ) { assertTrue( "Must provide either a response or an exception.", - (response != null).xor(exception != null) + (response != null).xor(exception != null), ) whenever(client.admin()).thenReturn(adminClient) whenever(adminClient.cluster()).thenReturn(clusterAdminClient) doAnswer { val listener = it.getArgument>(1) - if (response != null) listener.onResponse(response) - else listener.onFailure(exception) + if (response != null) { + listener.onResponse(response) + } else { + listener.onFailure(exception) + } }.whenever(clusterAdminClient).deleteSnapshot(any(), any()) } fun mockGetSnapshotsCall( response: ActionResponse? = null, - exception: Exception? = null + exception: Exception? = null, ) { assertTrue( "Must provide either a response or an exception.", - (response != null).xor(exception != null) + (response != null).xor(exception != null), ) whenever(client.admin()).thenReturn(adminClient) whenever(adminClient.cluster()).thenReturn(clusterAdminClient) doAnswer { val listener = it.getArgument>(1) - if (response != null) listener.onResponse(response) - else listener.onFailure(exception) + if (response != null) { + listener.onResponse(response) + } else { + listener.onFailure(exception) + } }.whenever(clusterAdminClient).getSnapshots(any(), any()) } } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/ODFERestTestCase.kt b/src/test/kotlin/org/opensearch/indexmanagement/ODFERestTestCase.kt index 2be47a8ca..480e0950c 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/ODFERestTestCase.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/ODFERestTestCase.kt @@ -21,7 +21,6 @@ import org.opensearch.test.rest.OpenSearchRestTestCase import java.io.IOException abstract class ODFERestTestCase : OpenSearchRestTestCase() { - fun isHttps(): Boolean = System.getProperty("https", "false")!!.toBoolean() fun securityEnabled(): Boolean = System.getProperty("security", "false")!!.toBoolean() diff --git a/src/test/kotlin/org/opensearch/indexmanagement/PolicySecurityBehaviorIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/PolicySecurityBehaviorIT.kt index 9f898ff12..17148a7d5 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/PolicySecurityBehaviorIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/PolicySecurityBehaviorIT.kt @@ -17,13 +17,13 @@ import org.opensearch.action.admin.indices.alias.IndicesAliasesRequest import org.opensearch.client.ResponseException import org.opensearch.client.RestClient import org.opensearch.commons.rest.SecureRestClientBuilder +import org.opensearch.core.rest.RestStatus import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.INDEX_MANAGEMENT_INDEX import org.opensearch.indexmanagement.indexstatemanagement.action.AliasAction import org.opensearch.indexmanagement.indexstatemanagement.model.Policy import org.opensearch.indexmanagement.indexstatemanagement.model.State import org.opensearch.indexmanagement.indexstatemanagement.randomErrorNotification import org.opensearch.indexmanagement.indexstatemanagement.transport.action.addpolicy.AddPolicyAction -import org.opensearch.core.rest.RestStatus import org.opensearch.test.OpenSearchTestCase import org.opensearch.test.junit.annotations.TestLogging import java.time.Instant @@ -39,23 +39,26 @@ class PolicySecurityBehaviorIT : SecurityRestTestCase() { private val permittedIndicesPrefix = "permitted-index" private val permittedIndicesPattern = "permitted-index*" + @Before fun setupUsersAndRoles() { // updateClusterSetting(ManagedIndexSettings.JITTER.key, "0.0", false) - val custerPermissions = listOf( - AddPolicyAction.NAME - ) - - val indexPermissions = listOf( - MANAGED_INDEX, - CREATE_INDEX, - WRITE_INDEX, - BULK_WRITE_INDEX, - GET_INDEX_MAPPING, - SEARCH_INDEX, - PUT_INDEX_MAPPING - ) + val custerPermissions = + listOf( + AddPolicyAction.NAME, + ) + + val indexPermissions = + listOf( + MANAGED_INDEX, + CREATE_INDEX, + WRITE_INDEX, + BULK_WRITE_INDEX, + GET_INDEX_MAPPING, + SEARCH_INDEX, + PUT_INDEX_MAPPING, + ) createUser(ismUser, password, listOf(HELPDESK)) createRole(HELPDESK_ROLE, custerPermissions, indexPermissions, listOf(permittedIndicesPattern)) assignRoleToUsers(HELPDESK_ROLE, listOf(ismUser)) @@ -76,7 +79,6 @@ class PolicySecurityBehaviorIT : SecurityRestTestCase() { } fun `test add policy`() { - val notPermittedIndexPrefix = OpenSearchTestCase.randomAlphaOfLength(10).lowercase(Locale.getDefault()) val policyId = OpenSearchTestCase.randomAlphaOfLength(10) @@ -94,15 +96,16 @@ class PolicySecurityBehaviorIT : SecurityRestTestCase() { val actions = listOf(IndicesAliasesRequest.AliasActions.add().alias("aaa")) val actionConfig = AliasAction(actions = actions, index = 0) val states = listOf(State("alias", listOf(actionConfig), listOf())) - val policy = Policy( - id = policyId, - description = "description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = "alias", - states = states - ) + val policy = + Policy( + id = policyId, + description = "description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = "alias", + states = states, + ) createPolicy(policy, policy.id, true, client()) // Call AddPolicyAction as user addPolicyToIndex(index = allIndicesJoined, policyId = policy.id, expectedStatus = RestStatus.OK, client = ismUserClient!!) diff --git a/src/test/kotlin/org/opensearch/indexmanagement/RollupSecurityBehaviorIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/RollupSecurityBehaviorIT.kt index 503df5495..99c487185 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/RollupSecurityBehaviorIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/RollupSecurityBehaviorIT.kt @@ -15,6 +15,7 @@ import org.junit.After import org.junit.Before import org.opensearch.client.RestClient import org.opensearch.commons.rest.SecureRestClientBuilder +import org.opensearch.core.rest.RestStatus import org.opensearch.indexmanagement.common.model.dimension.DateHistogram import org.opensearch.indexmanagement.indexstatemanagement.settings.ManagedIndexSettings import org.opensearch.indexmanagement.rollup.model.Rollup @@ -27,7 +28,6 @@ import org.opensearch.indexmanagement.rollup.model.metric.Sum import org.opensearch.indexmanagement.rollup.model.metric.ValueCount import org.opensearch.indexmanagement.rollup.randomRollup import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule -import org.opensearch.core.rest.RestStatus import org.opensearch.test.junit.annotations.TestLogging import java.time.Instant import java.time.temporal.ChronoUnit @@ -48,24 +48,25 @@ class RollupSecurityBehaviorIT : SecurityRestTestCase() { updateClusterSetting(ManagedIndexSettings.JITTER.key, "0.0", false) // Init super transform user - val helpdeskClusterPermissions = listOf( - INDEX_ROLLUP, - GET_ROLLUP, - EXPLAIN_ROLLUP, - UPDATE_ROLLUP, - DELETE_ROLLUP - - ) - - val indexPermissions = listOf( - MANAGED_INDEX, - CREATE_INDEX, - WRITE_INDEX, - BULK_WRITE_INDEX, - GET_INDEX_MAPPING, - SEARCH_INDEX, - PUT_INDEX_MAPPING - ) + val helpdeskClusterPermissions = + listOf( + INDEX_ROLLUP, + GET_ROLLUP, + EXPLAIN_ROLLUP, + UPDATE_ROLLUP, + DELETE_ROLLUP, + ) + + val indexPermissions = + listOf( + MANAGED_INDEX, + CREATE_INDEX, + WRITE_INDEX, + BULK_WRITE_INDEX, + GET_INDEX_MAPPING, + SEARCH_INDEX, + PUT_INDEX_MAPPING, + ) // In this test suite case john is a "super-user" which has all relevant privileges createUser(superRollupUser, password, listOf(HELPDESK)) createRole(HELPDESK_ROLE, helpdeskClusterPermissions, indexPermissions, listOf(AIRLINE_INDEX_PATTERN)) @@ -73,7 +74,7 @@ class RollupSecurityBehaviorIT : SecurityRestTestCase() { superUserClient = SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), superRollupUser, password).setSocketTimeout( - 60000 + 60000, ).setConnectionRequestTimeout(180000) .build() } @@ -233,13 +234,14 @@ class RollupSecurityBehaviorIT : SecurityRestTestCase() { delay = 0, continuous = false, dimensions = listOf(DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1h")), - metrics = listOf( + metrics = + listOf( RollupMetrics( sourceField = "passenger_count", targetField = "passenger_count", - metrics = listOf(Sum(), Min(), Max(), ValueCount(), Average()) - ) - ) + metrics = listOf(Sum(), Min(), Max(), ValueCount(), Average()), + ), + ), ) private fun createTestUserWithRole(clusterPermissions: List, indexPermissions: List) { diff --git a/src/test/kotlin/org/opensearch/indexmanagement/SecurityBehaviorIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/SecurityBehaviorIT.kt index 29e77dcd8..0e01e3dcb 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/SecurityBehaviorIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/SecurityBehaviorIT.kt @@ -15,8 +15,8 @@ import org.junit.After import org.junit.Before import org.opensearch.client.RestClient import org.opensearch.commons.rest.SecureRestClientBuilder -import org.opensearch.indexmanagement.indexstatemanagement.settings.ManagedIndexSettings import org.opensearch.core.rest.RestStatus +import org.opensearch.indexmanagement.indexstatemanagement.settings.ManagedIndexSettings import org.opensearch.test.junit.annotations.TestLogging @TestLogging("level:DEBUG", reason = "Debug for tests.") @@ -30,22 +30,24 @@ class SecurityBehaviorIT : SecurityRestTestCase() { fun setupUsersAndRoles() { updateClusterSetting(ManagedIndexSettings.JITTER.key, "0.0", false) - val helpdeskClusterPermissions = listOf( - WRITE_POLICY, - GET_POLICY, - GET_POLICIES, - EXPLAIN_INDEX - ) - - val indexPermissions = listOf( - MANAGED_INDEX, - CREATE_INDEX, - WRITE_INDEX, - BULK_WRITE_INDEX, - GET_INDEX_MAPPING, - SEARCH_INDEX, - PUT_INDEX_MAPPING - ) + val helpdeskClusterPermissions = + listOf( + WRITE_POLICY, + GET_POLICY, + GET_POLICIES, + EXPLAIN_INDEX, + ) + + val indexPermissions = + listOf( + MANAGED_INDEX, + CREATE_INDEX, + WRITE_INDEX, + BULK_WRITE_INDEX, + GET_INDEX_MAPPING, + SEARCH_INDEX, + PUT_INDEX_MAPPING, + ) // In this test suite case john is a "super-user" which has all relevant privileges createUser(john, password, listOf(HELPDESK)) createRole(HELPDESK_ROLE, helpdeskClusterPermissions, indexPermissions, listOf(AIRLINE_INDEX_PATTERN)) @@ -71,20 +73,22 @@ class SecurityBehaviorIT : SecurityRestTestCase() { // Create jill without assigning a role createUser(jill, password, listOf(HELPDESK)) - val phoneOperatorClusterPermissions = listOf( - EXPLAIN_INDEX, - GET_POLICY, - WRITE_POLICY, - GET_POLICIES - ) - - val indexPermissions = listOf( - MANAGED_INDEX, - CREATE_INDEX, - GET_INDEX_MAPPING, - SEARCH_INDEX, - PUT_INDEX_MAPPING - ) + val phoneOperatorClusterPermissions = + listOf( + EXPLAIN_INDEX, + GET_POLICY, + WRITE_POLICY, + GET_POLICIES, + ) + + val indexPermissions = + listOf( + MANAGED_INDEX, + CREATE_INDEX, + GET_INDEX_MAPPING, + SEARCH_INDEX, + PUT_INDEX_MAPPING, + ) // Jane is phone operator; Phone operators can search availability indexes createUserWithCustomRole( jane, @@ -93,7 +97,7 @@ class SecurityBehaviorIT : SecurityRestTestCase() { phoneOperatorClusterPermissions, indexPermissions, listOf(PHONE_OPERATOR), - listOf(AVAILABILITY_INDEX_PATTERN) + listOf(AVAILABILITY_INDEX_PATTERN), ) val jillClient = diff --git a/src/test/kotlin/org/opensearch/indexmanagement/SecurityRestTestCase.kt b/src/test/kotlin/org/opensearch/indexmanagement/SecurityRestTestCase.kt index f77a9be21..d6125a7d8 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/SecurityRestTestCase.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/SecurityRestTestCase.kt @@ -19,9 +19,10 @@ import org.opensearch.client.Request import org.opensearch.client.Response import org.opensearch.client.ResponseException import org.opensearch.client.RestClient -import org.opensearch.core.common.Strings import org.opensearch.common.settings.Settings import org.opensearch.common.xcontent.XContentType +import org.opensearch.core.common.Strings +import org.opensearch.core.rest.RestStatus import org.opensearch.indexmanagement.indexstatemanagement.IndexStateManagementRestTestCase import org.opensearch.indexmanagement.indexstatemanagement.model.ManagedIndexConfig import org.opensearch.indexmanagement.indexstatemanagement.model.Policy @@ -39,19 +40,16 @@ import org.opensearch.indexmanagement.transform.TransformRestTestCase import org.opensearch.indexmanagement.transform.model.Transform import org.opensearch.indexmanagement.transform.toJsonString import org.opensearch.rest.RestRequest -import org.opensearch.core.rest.RestStatus import org.opensearch.test.OpenSearchTestCase import java.util.Locale abstract class SecurityRestTestCase : IndexManagementRestTestCase() { - private object RollupRestTestCaseSecurityExtension : RollupRestTestCase() { - fun createRollupExt( rollup: Rollup, rollupId: String, refresh: Boolean, - client: RestClient + client: RestClient, ) = super.createRollup(rollup, rollupId, refresh, client) fun getRollupMetadataExt( @@ -71,7 +69,6 @@ abstract class SecurityRestTestCase : IndexManagementRestTestCase() { } private object IndexStateManagementRestTestCaseExt : IndexStateManagementRestTestCase() { - fun createPolicyExt( policy: Policy, policyId: String = OpenSearchTestCase.randomAlphaOfLength(10), @@ -83,7 +80,7 @@ abstract class SecurityRestTestCase : IndexManagementRestTestCase() { policyString: String, policyId: String, refresh: Boolean = true, - client: RestClient + client: RestClient, ) = super.createPolicyJson(policyString, policyId, refresh, client) fun updateManagedIndexConfigStartTimeExt(update: ManagedIndexConfig, desiredStartTimeMillis: Long? = null, retryOnConflict: Int = 0) = super.updateManagedIndexConfigStartTime(update, desiredStartTimeMillis, retryOnConflict) @@ -108,7 +105,6 @@ abstract class SecurityRestTestCase : IndexManagementRestTestCase() { } private object TransformRestTestCaseExt : TransformRestTestCase() { - fun createTransformExt( transform: Transform, transformId: String = randomAlphaOfLength(10), @@ -119,7 +115,7 @@ abstract class SecurityRestTestCase : IndexManagementRestTestCase() { fun getTransformExt( transformId: String, header: BasicHeader = BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), - userClient: RestClient? = null + userClient: RestClient? = null, ) = super.getTransform(transformId, header, userClient) fun getTransformMetadataExt(metadataId: String) = super.getTransformMetadata(metadataId) @@ -127,23 +123,25 @@ abstract class SecurityRestTestCase : IndexManagementRestTestCase() { protected fun updateClusterSetting(key: String, value: String, escapeValue: Boolean = true) { val formattedValue = if (escapeValue) "\"$value\"" else value - val request = """ + val request = + """ { "persistent": { "$key": $formattedValue } } - """.trimIndent() - val res = client().makeRequest( - "PUT", "_cluster/settings", emptyMap(), - StringEntity(request, ContentType.APPLICATION_JSON) - ) + """.trimIndent() + val res = + client().makeRequest( + "PUT", "_cluster/settings", emptyMap(), + StringEntity(request, ContentType.APPLICATION_JSON), + ) assertEquals("Request failed", RestStatus.OK, res.restStatus()) } protected fun createRollup( rollup: Rollup, - client: RestClient + client: RestClient, ): Rollup { return RollupRestTestCaseSecurityExtension.createRollupExt(rollup, rollup.id, true, client) } @@ -151,9 +149,8 @@ abstract class SecurityRestTestCase : IndexManagementRestTestCase() { protected fun createRollupAndCheckStatus( rollup: Rollup, expectedStatus: RestStatus, - client: RestClient + client: RestClient, ): Response { - val request = Request("PUT", "${IndexManagementPlugin.ROLLUP_JOBS_BASE_URI}/${rollup.id}?refresh=true") request.setJsonEntity(rollup.toJsonString()) return executeRequest(request, expectedStatus, client) @@ -217,7 +214,7 @@ abstract class SecurityRestTestCase : IndexManagementRestTestCase() { policyString: String, policyId: String, refresh: Boolean = true, - client: RestClient + client: RestClient, ): Response { return IndexStateManagementRestTestCaseExt.createPolicyJsonExt(policyString, policyId, refresh, client) } @@ -228,11 +225,12 @@ abstract class SecurityRestTestCase : IndexManagementRestTestCase() { } protected fun addPolicyToIndex(index: String, policyId: String, expectedStatus: RestStatus, client: RestClient) { - val body = """ + val body = + """ { "policy_id": "$policyId" } - """.trimIndent() + """.trimIndent() val request = Request("POST", "/_opendistro/_ism/add/$index") request.setJsonEntity(body) executeRequest(request, expectedStatus, client) @@ -244,13 +242,14 @@ abstract class SecurityRestTestCase : IndexManagementRestTestCase() { protected fun createIndex(indexName: String, sourceIndexMappingString: String?, client: RestClient) { val waitForActiveShards = if (isMultiNode) "all" else "1" - val builtSettings = Settings.builder().let { - it.putNull(ManagedIndexSettings.ROLLOVER_ALIAS.key) - it.put(INDEX_NUMBER_OF_REPLICAS, "1") - it.put(INDEX_NUMBER_OF_SHARDS, "1") - it.put("index.write.wait_for_active_shards", waitForActiveShards) - it - }.build() + val builtSettings = + Settings.builder().let { + it.putNull(ManagedIndexSettings.ROLLOVER_ALIAS.key) + it.put(INDEX_NUMBER_OF_REPLICAS, "1") + it.put(INDEX_NUMBER_OF_SHARDS, "1") + it.put("index.write.wait_for_active_shards", waitForActiveShards) + it + }.build() val request = Request("PUT", "/$indexName") var entity = "{\"settings\": " + Strings.toString(XContentType.JSON, builtSettings) @@ -278,11 +277,11 @@ abstract class SecurityRestTestCase : IndexManagementRestTestCase() { userClient: RestClient, user: String, expectedNumberOfPolicies: Int?, - expectedStatus: RestStatus = RestStatus.OK + expectedStatus: RestStatus = RestStatus.OK, ): Response? { val response = executeRequest(request = Request(RestRequest.Method.GET.name, IndexManagementPlugin.POLICY_BASE_URI), expectedStatus, userClient) assertEquals( - "User $user not able to see all policies", expectedNumberOfPolicies, response.asMap()["total_policies"] + "User $user not able to see all policies", expectedNumberOfPolicies, response.asMap()["total_policies"], ) return response } @@ -317,7 +316,7 @@ abstract class SecurityRestTestCase : IndexManagementRestTestCase() { protected fun createTransformAndCheckStatus( transform: Transform, expectedStatus: RestStatus, - client: RestClient + client: RestClient, ): Response { val request = Request(RestRequest.Method.PUT.name, "${IndexManagementPlugin.TRANSFORM_BASE_URI}/${transform.id}?refresh=true") request.setJsonEntity(transform.toJsonString()) @@ -327,7 +326,7 @@ abstract class SecurityRestTestCase : IndexManagementRestTestCase() { protected fun createPolicyAndCheckStatus( policy: Policy, expectedStatus: RestStatus, - client: RestClient + client: RestClient, ): Response { val request = Request("PUT", "${IndexManagementPlugin.POLICY_BASE_URI}/${policy.id}?refresh=true") request.setJsonEntity(policy.toJsonString()) @@ -337,7 +336,7 @@ abstract class SecurityRestTestCase : IndexManagementRestTestCase() { protected fun getTransform( transformId: String, header: BasicHeader = BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), - client: RestClient + client: RestClient, ) = TransformRestTestCaseExt.getTransformExt(transformId, header, client) protected fun getTransformMetadata(metadataId: String) = TransformRestTestCaseExt.getTransformMetadataExt(metadataId) @@ -370,13 +369,14 @@ abstract class SecurityRestTestCase : IndexManagementRestTestCase() { protected fun executeRequest( request: Request, expectedRestStatus: RestStatus? = null, - client: RestClient + client: RestClient, ): Response { - val response = try { - client.performRequest(request) - } catch (exception: ResponseException) { - exception.response - } + val response = + try { + client.performRequest(request) + } catch (exception: ResponseException) { + exception.response + } if (expectedRestStatus != null) { assertEquals(expectedRestStatus.status, response.statusLine.statusCode) } @@ -385,13 +385,14 @@ abstract class SecurityRestTestCase : IndexManagementRestTestCase() { protected fun createUser(name: String, pwd: String = "Test123!", backendRoles: List = listOf()) { val backendRolesStr = backendRoles.joinToString { "\"$it\"" } - val json = """ + val json = + """ { "password": "$pwd", "backend_roles": [$backendRolesStr], "attributes":{} } - """.trimIndent() + """.trimIndent() val request = Request(RestRequest.Method.PUT.name, "_plugins/_security/api/internalusers/$name") request.setJsonEntity(json) @@ -405,7 +406,7 @@ abstract class SecurityRestTestCase : IndexManagementRestTestCase() { clusterPermissions: List = emptyList(), indexPermissions: List = emptyList(), backendRoles: List = emptyList(), - indexPatterns: List = emptyList() + indexPatterns: List = emptyList(), ) { createUser(user, password, backendRoles) createRole(role, clusterPermissions, indexPermissions, indexPatterns) @@ -418,11 +419,12 @@ abstract class SecurityRestTestCase : IndexManagementRestTestCase() { indexPermissions: List, indexPatterns: List, ) { - val response = try { - client().performRequest(Request("GET", "/_plugins/_security/api/roles/$name")) - } catch (ex: ResponseException) { - ex.response - } + val response = + try { + client().performRequest(Request("GET", "/_plugins/_security/api/roles/$name")) + } catch (ex: ResponseException) { + ex.response + } // If role already exist, do nothing if (response.statusLine.statusCode == RestStatus.OK.status) { return @@ -432,7 +434,8 @@ abstract class SecurityRestTestCase : IndexManagementRestTestCase() { val indexPatternsStr = indexPatterns.joinToString { "\"$it\"" } val clusterPermissionsStr = clusterPermissions.joinToString { "\"$it\"" } val indexPermissionsStr = indexPermissions.joinToString { "\"$it\"" } - val entity = """ + val entity = + """ { "cluster_permissions": [$clusterPermissionsStr], "index_permissions": [ @@ -445,7 +448,7 @@ abstract class SecurityRestTestCase : IndexManagementRestTestCase() { ], "tenant_permissions": [] } - """.trimIndent() + """.trimIndent() request.setJsonEntity(entity) executeRequest(request, RestStatus.CREATED, client()) @@ -454,25 +457,27 @@ abstract class SecurityRestTestCase : IndexManagementRestTestCase() { protected fun assignRoleToUsers(role: String, users: List) { val request = Request("PUT", "/_plugins/_security/api/rolesmapping/$role") val usersStr = users.joinToString { "\"$it\"" } - val entity = """ + val entity = + """ { "backend_roles": [], "hosts": [], "users": [$usersStr] } - """.trimIndent() + """.trimIndent() request.setJsonEntity(entity) client().performRequest(request) } protected fun setFilterByBackendRole(filter: Boolean) { - val setting = """ + val setting = + """ { "persistent": { "plugins.index_management.filter_by_backend_roles": "$filter" } } - """.trimIndent() + """.trimIndent() val request = Request(RestRequest.Method.PUT.name, "_cluster/settings") request.setJsonEntity(setting) executeRequest(request, RestStatus.OK, client()) diff --git a/src/test/kotlin/org/opensearch/indexmanagement/TestHelpers.kt b/src/test/kotlin/org/opensearch/indexmanagement/TestHelpers.kt index 4b7ad50e8..301c19ffa 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/TestHelpers.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/TestHelpers.kt @@ -72,7 +72,7 @@ fun RestClient.makeRequest( params: Map = emptyMap(), entity: HttpEntity? = null, vararg headers: Header, - strictDeprecationMode: Boolean = false + strictDeprecationMode: Boolean = false, ): Response { val request = Request(method, endpoint) val options = RequestOptions.DEFAULT.toBuilder() @@ -97,7 +97,7 @@ fun RestClient.makeRequest( endpoint: String, entity: HttpEntity? = null, vararg headers: Header, - strictDeprecationMode: Boolean = false + strictDeprecationMode: Boolean = false, ): Response { val request = Request(method, endpoint) val options = RequestOptions.DEFAULT.toBuilder() @@ -112,7 +112,7 @@ fun RestClient.makeRequest( fun waitFor( timeout: Instant = Instant.ofEpochSecond(20), - block: () -> T + block: () -> T, ): T { var to = timeout if (isMultiNode) { diff --git a/src/test/kotlin/org/opensearch/indexmanagement/TransformSecurityBehaviorIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/TransformSecurityBehaviorIT.kt index 1545b68e3..f737ab3cc 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/TransformSecurityBehaviorIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/TransformSecurityBehaviorIT.kt @@ -15,13 +15,13 @@ import org.junit.After import org.junit.Before import org.opensearch.client.RestClient import org.opensearch.commons.rest.SecureRestClientBuilder +import org.opensearch.core.rest.RestStatus import org.opensearch.indexmanagement.common.model.dimension.Terms import org.opensearch.indexmanagement.indexstatemanagement.settings.ManagedIndexSettings import org.opensearch.indexmanagement.transform.model.Transform import org.opensearch.indexmanagement.transform.model.TransformMetadata import org.opensearch.indexmanagement.transform.randomTransform import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule -import org.opensearch.core.rest.RestStatus import org.opensearch.test.junit.annotations.TestLogging import java.time.Instant import java.time.temporal.ChronoUnit @@ -42,27 +42,29 @@ class TransformSecurityBehaviorIT : SecurityRestTestCase() { updateClusterSetting(ManagedIndexSettings.JITTER.key, "0.0", false) // Init super transform user - val helpdeskClusterPermissions = listOf( - STOP_TRANSFORM, - EXPLAIN_INDEX, - TRANSFORM_ACTION, - GET_TRANSFORM, - EXPLAIN_TRANSFORM, - START_TRANSFORM, - DELETE_TRANSFORM, - HEALTH, - GET_TRANSFORMS - ) + val helpdeskClusterPermissions = + listOf( + STOP_TRANSFORM, + EXPLAIN_INDEX, + TRANSFORM_ACTION, + GET_TRANSFORM, + EXPLAIN_TRANSFORM, + START_TRANSFORM, + DELETE_TRANSFORM, + HEALTH, + GET_TRANSFORMS, + ) - val indexPermissions = listOf( - MANAGED_INDEX, - CREATE_INDEX, - WRITE_INDEX, - BULK_WRITE_INDEX, - GET_INDEX_MAPPING, - SEARCH_INDEX, - PUT_INDEX_MAPPING - ) + val indexPermissions = + listOf( + MANAGED_INDEX, + CREATE_INDEX, + WRITE_INDEX, + BULK_WRITE_INDEX, + GET_INDEX_MAPPING, + SEARCH_INDEX, + PUT_INDEX_MAPPING, + ) // In this test suite case john is a "super-user" which has all relevant privileges createUser(superTransformUser, password, listOf(HELPDESK)) createRole(HELPDESK_ROLE, helpdeskClusterPermissions, indexPermissions, listOf(AIRLINE_INDEX_PATTERN)) @@ -73,7 +75,7 @@ class TransformSecurityBehaviorIT : SecurityRestTestCase() { clusterHosts.toTypedArray(), isHttps(), superTransformUser, - password + password, ).setSocketTimeout(60000).setConnectionRequestTimeout(180000) .build() } @@ -282,9 +284,10 @@ class TransformSecurityBehaviorIT : SecurityRestTestCase() { targetIndex = targetIndex, roles = emptyList(), pageSize = 100, - groups = listOf( - Terms(sourceField = "store_and_fwd_flag", targetField = "flag") - ) + groups = + listOf( + Terms(sourceField = "store_and_fwd_flag", targetField = "flag"), + ), ) } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/bwc/ISMBackwardsCompatibilityIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/bwc/ISMBackwardsCompatibilityIT.kt index d2e5fbedc..6b8d2d7a1 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/bwc/ISMBackwardsCompatibilityIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/bwc/ISMBackwardsCompatibilityIT.kt @@ -16,13 +16,13 @@ import org.opensearch.indexmanagement.waitFor import java.util.Locale class ISMBackwardsCompatibilityIT : IndexStateManagementRestTestCase() { - private val testIndexName = javaClass.simpleName.lowercase(Locale.ROOT) private enum class ClusterType { OLD, MIXED, - UPGRADED; + UPGRADED, + ; companion object { fun parse(value: String): ClusterType { @@ -137,7 +137,8 @@ class ISMBackwardsCompatibilityIT : IndexStateManagementRestTestCase() { } private fun createRolloverPolicy(policyID: String) { - val policy = """ + val policy = + """ { "policy": { "policy_id": "$policyID", @@ -161,7 +162,7 @@ class ISMBackwardsCompatibilityIT : IndexStateManagementRestTestCase() { ] } } - """.trimIndent() + """.trimIndent() createPolicyJson(policy, policyID) } @@ -174,12 +175,12 @@ class ISMBackwardsCompatibilityIT : IndexStateManagementRestTestCase() { val info = getExplainManagedIndexMetaData(index).info as Map assertEquals( "Index rollover before it met the condition.", - AttemptRolloverStep.getPendingMessage(index), info["message"] + AttemptRolloverStep.getPendingMessage(index), info["message"], ) val conditions = info["conditions"] as Map assertEquals( "Did not have exclusively min age and min doc count conditions", - setOf(RolloverAction.MIN_INDEX_AGE_FIELD, RolloverAction.MIN_DOC_COUNT_FIELD), conditions.keys + setOf(RolloverAction.MIN_INDEX_AGE_FIELD, RolloverAction.MIN_DOC_COUNT_FIELD), conditions.keys, ) val minAge = conditions[RolloverAction.MIN_INDEX_AGE_FIELD] as Map val minDocCount = conditions[RolloverAction.MIN_DOC_COUNT_FIELD] as Map @@ -202,7 +203,7 @@ class ISMBackwardsCompatibilityIT : IndexStateManagementRestTestCase() { val conditions = info["conditions"] as Map assertEquals( "Did not have exclusively min age and min doc count conditions", - setOf(RolloverAction.MIN_INDEX_AGE_FIELD, RolloverAction.MIN_DOC_COUNT_FIELD), conditions.keys + setOf(RolloverAction.MIN_INDEX_AGE_FIELD, RolloverAction.MIN_DOC_COUNT_FIELD), conditions.keys, ) val minAge = conditions[RolloverAction.MIN_INDEX_AGE_FIELD] as Map val minDocCount = conditions[RolloverAction.MIN_DOC_COUNT_FIELD] as Map diff --git a/src/test/kotlin/org/opensearch/indexmanagement/bwc/IndexManagementBackwardsCompatibilityIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/bwc/IndexManagementBackwardsCompatibilityIT.kt index ae3acdf79..8cd25d362 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/bwc/IndexManagementBackwardsCompatibilityIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/bwc/IndexManagementBackwardsCompatibilityIT.kt @@ -9,6 +9,7 @@ import org.apache.hc.core5.http.ContentType import org.apache.hc.core5.http.io.entity.StringEntity import org.opensearch.common.settings.Settings import org.opensearch.common.xcontent.XContentFactory +import org.opensearch.core.rest.RestStatus import org.opensearch.index.query.QueryBuilders import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.LEGACY_ISM_BASE_URI import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.LEGACY_POLICY_BASE_URI @@ -18,12 +19,10 @@ import org.opensearch.indexmanagement.indexstatemanagement.util.XCONTENT_WITHOUT import org.opensearch.indexmanagement.makeRequest import org.opensearch.indexmanagement.opensearchapi.string import org.opensearch.indexmanagement.util.NO_ID -import org.opensearch.core.rest.RestStatus import org.opensearch.indexmanagement.waitFor import org.opensearch.search.builder.SearchSourceBuilder class IndexManagementBackwardsCompatibilityIT : IndexManagementRestTestCase() { - companion object { private val CLUSTER_TYPE = ClusterType.parse(System.getProperty("tests.rest.bwcsuite")) private val CLUSTER_NAME = System.getProperty("tests.clustername") @@ -82,7 +81,8 @@ class IndexManagementBackwardsCompatibilityIT : IndexManagementRestTestCase() { private enum class ClusterType { OLD, MIXED, - UPGRADED; + UPGRADED, + ; companion object { fun parse(value: String): ClusterType { @@ -118,19 +118,21 @@ class IndexManagementBackwardsCompatibilityIT : IndexManagementRestTestCase() { createIndex(INDEX_NAME, Settings.EMPTY) - val createResponse = client().makeRequest( - method = "PUT", - endpoint = "$LEGACY_POLICY_BASE_URI/$POLICY_NAME?refresh=true", - params = emptyMap(), - entity = StringEntity(policyString, ContentType.APPLICATION_JSON) - ) - - val addResponse = client().makeRequest( - method = "POST", - endpoint = "$LEGACY_ISM_BASE_URI/add/$INDEX_NAME", - params = emptyMap(), - entity = StringEntity(policyNameString, ContentType.APPLICATION_JSON) - ) + val createResponse = + client().makeRequest( + method = "PUT", + endpoint = "$LEGACY_POLICY_BASE_URI/$POLICY_NAME?refresh=true", + params = emptyMap(), + entity = StringEntity(policyString, ContentType.APPLICATION_JSON), + ) + + val addResponse = + client().makeRequest( + method = "POST", + endpoint = "$LEGACY_ISM_BASE_URI/add/$INDEX_NAME", + params = emptyMap(), + entity = StringEntity(policyNameString, ContentType.APPLICATION_JSON), + ) assertEquals("Create policy failed", RestStatus.CREATED, createResponse.restStatus()) assertEquals("Add policy failed", RestStatus.OK, addResponse.restStatus()) @@ -145,23 +147,25 @@ class IndexManagementBackwardsCompatibilityIT : IndexManagementRestTestCase() { @Suppress("UNCHECKED_CAST") private fun verifyPolicyExists(uri: String) { val search = SearchSourceBuilder().query(QueryBuilders.matchAllQuery()).toString() - val getResponse = client().makeRequest( - "GET", - "$uri/$POLICY_NAME", - emptyMap(), - StringEntity(search, ContentType.APPLICATION_JSON) - ) + val getResponse = + client().makeRequest( + "GET", + "$uri/$POLICY_NAME", + emptyMap(), + StringEntity(search, ContentType.APPLICATION_JSON), + ) assertEquals("Get policy failed", RestStatus.OK, getResponse.restStatus()) } @Throws(Exception::class) @Suppress("UNCHECKED_CAST") private fun verifyPolicyOnIndex(uri: String) { - val getResponse = client().makeRequest( - method = "GET", - endpoint = "$uri/explain/$INDEX_NAME", - params = emptyMap() - ) + val getResponse = + client().makeRequest( + method = "GET", + endpoint = "$uri/explain/$INDEX_NAME", + params = emptyMap(), + ) assertEquals("Explain Index failed", RestStatus.OK, getResponse.restStatus()) val responseBody = getResponse.asMap() diff --git a/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/LRONConfigSecurityBehaviorIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/LRONConfigSecurityBehaviorIT.kt index bed2e0b9f..2ec99fded 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/LRONConfigSecurityBehaviorIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/LRONConfigSecurityBehaviorIT.kt @@ -10,12 +10,12 @@ import org.junit.Before import org.opensearch.client.Request import org.opensearch.client.RestClient import org.opensearch.commons.rest.SecureRestClientBuilder +import org.opensearch.core.rest.RestStatus import org.opensearch.indexmanagement.DELETE_LRON_CONFIG import org.opensearch.indexmanagement.GET_LRON_CONFIG import org.opensearch.indexmanagement.INDEX_LRON_CONFIG import org.opensearch.indexmanagement.IndexManagementPlugin import org.opensearch.indexmanagement.SecurityRestTestCase -import org.opensearch.core.rest.RestStatus @Suppress("UNCHECKED_CAST") class LRONConfigSecurityBehaviorIT : SecurityRestTestCase() { @@ -31,25 +31,26 @@ class LRONConfigSecurityBehaviorIT : SecurityRestTestCase() { fun setupUsersAndRoles() { initNodeIdsInRestIT(client()) // Init super user - val helpdeskClusterPermissions = listOf( - INDEX_LRON_CONFIG, - GET_LRON_CONFIG, - DELETE_LRON_CONFIG - ) + val helpdeskClusterPermissions = + listOf( + INDEX_LRON_CONFIG, + GET_LRON_CONFIG, + DELETE_LRON_CONFIG, + ) // In this test suite case john is a "super-user" which has all relevant privileges createUser(name = superUser, pwd = password) createAndAssignRole(HELPDESK_ROLE, helpdeskClusterPermissions, superUser) superUserClient = SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), superUser, password).setSocketTimeout( - 60000 + 60000, ).setConnectionRequestTimeout(180000) .build() createUser(name = testUser, pwd = password) testUserClient = SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), testUser, password).setSocketTimeout( - 60000 + 60000, ).setConnectionRequestTimeout(180000) .build() } @@ -68,11 +69,11 @@ class LRONConfigSecurityBehaviorIT : SecurityRestTestCase() { } fun `test index LRONConfig with security using POST`() { - /* super user */ + // super user val request = Request("POST", IndexManagementPlugin.LRON_BASE_URI) request.setJsonEntity(randomLRONConfig(taskId = randomTaskId(nodeId = nodeIdsInRestIT.random())).toJsonString()) executeRequest(request, RestStatus.OK, superUserClient!!) - /* test user */ + // test user request.setJsonEntity(randomLRONConfig(taskId = randomTaskId(nodeId = nodeIdsInRestIT.random())).toJsonString()) executeRequest(request, RestStatus.FORBIDDEN, testUserClient!!) @@ -86,12 +87,12 @@ class LRONConfigSecurityBehaviorIT : SecurityRestTestCase() { } fun `test index LRONConfig dry run with security using POST`() { - /* super user */ + // super user val request = Request("POST", IndexManagementPlugin.LRON_BASE_URI) request.addParameter("dry_run", "true") request.setJsonEntity(randomLRONConfig(taskId = randomTaskId(nodeId = nodeIdsInRestIT.random())).toJsonString()) executeRequest(request, RestStatus.OK, superUserClient!!) - /* test user */ + // test user executeRequest(request, RestStatus.FORBIDDEN, testUserClient!!) val indexConfigRole = "index_lron_config" @@ -104,7 +105,7 @@ class LRONConfigSecurityBehaviorIT : SecurityRestTestCase() { } fun `test update LRONConfig with security using PUT`() { - /* super user */ + // super user val lronConfig = randomLRONConfig(taskId = randomTaskId(nodeId = nodeIdsInRestIT.random())) val createRequest = Request("POST", IndexManagementPlugin.LRON_BASE_URI) createRequest.setJsonEntity(lronConfig.toJsonString()) @@ -113,7 +114,7 @@ class LRONConfigSecurityBehaviorIT : SecurityRestTestCase() { updateRequest.setJsonEntity(randomLRONConfig(taskId = lronConfig.taskId, actionName = lronConfig.actionName).toJsonString()) executeRequest(updateRequest, RestStatus.OK, superUserClient!!) - /* test user */ + // test user executeRequest(updateRequest, RestStatus.FORBIDDEN, testUserClient!!) val indexConfigRole = "index_lron_config" @@ -126,7 +127,7 @@ class LRONConfigSecurityBehaviorIT : SecurityRestTestCase() { } fun `test delete LRONConfig with security`() { - /* super user */ + // super user val lronConfig = randomLRONConfig(taskId = randomTaskId(nodeId = nodeIdsInRestIT.random())) val createRequest = Request("POST", IndexManagementPlugin.LRON_BASE_URI) createRequest.setJsonEntity(lronConfig.toJsonString()) @@ -134,7 +135,7 @@ class LRONConfigSecurityBehaviorIT : SecurityRestTestCase() { val deleteRequest = Request("DELETE", getResourceURI(lronConfig.taskId, lronConfig.actionName)) executeRequest(deleteRequest, RestStatus.OK, superUserClient!!) - /* test user */ + // test user executeRequest(createRequest, RestStatus.OK, superUserClient!!) executeRequest(deleteRequest, RestStatus.FORBIDDEN, testUserClient!!) @@ -148,7 +149,7 @@ class LRONConfigSecurityBehaviorIT : SecurityRestTestCase() { } fun `test get LRONConfig with security`() { - /* super user */ + // super user val lronConfig = randomLRONConfig(taskId = randomTaskId(nodeId = nodeIdsInRestIT.random())) val createRequest = Request("POST", IndexManagementPlugin.LRON_BASE_URI) createRequest.setJsonEntity(lronConfig.toJsonString()) @@ -156,7 +157,7 @@ class LRONConfigSecurityBehaviorIT : SecurityRestTestCase() { val getRequest = Request("GET", getResourceURI(lronConfig.taskId, lronConfig.actionName)) executeRequest(getRequest, RestStatus.OK, superUserClient!!) - /* test user */ + // test user executeRequest(getRequest, RestStatus.FORBIDDEN, testUserClient!!) val getConfigRole = "get_lron_config" @@ -169,7 +170,7 @@ class LRONConfigSecurityBehaviorIT : SecurityRestTestCase() { } fun `test get LRONConfigs with security`() { - /* super user */ + // super user val createRequest = Request("POST", IndexManagementPlugin.LRON_BASE_URI) randomList(1, 15) { createRequest.setJsonEntity(randomLRONConfig(taskId = randomTaskId(nodeId = nodeIdsInRestIT.random())).toJsonString()) @@ -179,7 +180,7 @@ class LRONConfigSecurityBehaviorIT : SecurityRestTestCase() { val getRequest = Request("GET", IndexManagementPlugin.LRON_BASE_URI) executeRequest(getRequest, RestStatus.OK, superUserClient!!) - /* test user */ + // test user executeRequest(getRequest, RestStatus.FORBIDDEN, testUserClient!!) val getConfigRole = "get_lron_config" diff --git a/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/SerializationTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/SerializationTests.kt index d8c2c6979..74d9cb7e7 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/SerializationTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/SerializationTests.kt @@ -9,14 +9,13 @@ import org.junit.Assert import org.opensearch.common.io.stream.BytesStreamOutput import org.opensearch.indexmanagement.controlcenter.notification.action.delete.DeleteLRONConfigRequest import org.opensearch.indexmanagement.controlcenter.notification.action.get.GetLRONConfigRequest -import org.opensearch.indexmanagement.controlcenter.notification.model.LRONConfig import org.opensearch.indexmanagement.controlcenter.notification.action.get.GetLRONConfigResponse +import org.opensearch.indexmanagement.controlcenter.notification.model.LRONConfig import org.opensearch.indexmanagement.opensearchapi.convertToMap import org.opensearch.indexmanagement.snapshotmanagement.getRandomString import org.opensearch.test.OpenSearchTestCase class SerializationTests : OpenSearchTestCase() { - fun `test lronConfig serialization`() { val lronConfig = randomLRONConfig() val out = BytesStreamOutput() @@ -25,7 +24,7 @@ class SerializationTests : OpenSearchTestCase() { Assert.assertEquals( buildMessage("lronConfig"), lronConfig, - LRONConfig(out.bytes().streamInput()) + LRONConfig(out.bytes().streamInput()), ) } @@ -36,7 +35,7 @@ class SerializationTests : OpenSearchTestCase() { Assert.assertEquals( buildMessage("deleteLronConfigRequest"), deleteLRONConfigRequest.docId, - DeleteLRONConfigRequest(out.bytes().streamInput()).docId + DeleteLRONConfigRequest(out.bytes().streamInput()).docId, ) } @@ -47,7 +46,7 @@ class SerializationTests : OpenSearchTestCase() { Assert.assertEquals( buildMessage("getLronConfigRequest"), getLRONConfigRequest.docId, - GetLRONConfigRequest(out.bytes().streamInput()).docId + GetLRONConfigRequest(out.bytes().streamInput()).docId, ) } @@ -58,7 +57,7 @@ class SerializationTests : OpenSearchTestCase() { Assert.assertEquals( buildMessage("lronConfigResponse"), lronConfigResponse.convertToMap(), - LRONConfigResponse(out.bytes().streamInput()).convertToMap() + LRONConfigResponse(out.bytes().streamInput()).convertToMap(), ) } @@ -69,12 +68,12 @@ class SerializationTests : OpenSearchTestCase() { Assert.assertEquals( buildMessage("getLRONConfigResponse"), getLRONConfigResponse.convertToMap(), - GetLRONConfigResponse(out.bytes().streamInput()).convertToMap() + GetLRONConfigResponse(out.bytes().streamInput()).convertToMap(), ) } private fun buildMessage( - itemType: String + itemType: String, ): String { return "$itemType serialization test failed. " } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/TestHelpers.kt b/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/TestHelpers.kt index b1ca9a2fb..d366e185d 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/TestHelpers.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/TestHelpers.kt @@ -9,26 +9,27 @@ import org.opensearch.client.RestClient import org.opensearch.common.UUIDs import org.opensearch.common.xcontent.XContentFactory import org.opensearch.commons.authuser.User +import org.opensearch.core.tasks.TaskId import org.opensearch.core.xcontent.ToXContent import org.opensearch.indexmanagement.IndexManagementPlugin +import org.opensearch.indexmanagement.common.model.notification.Channel +import org.opensearch.indexmanagement.controlcenter.notification.action.get.GetLRONConfigResponse import org.opensearch.indexmanagement.controlcenter.notification.model.LRONCondition import org.opensearch.indexmanagement.controlcenter.notification.model.LRONConfig import org.opensearch.indexmanagement.controlcenter.notification.util.getDocID import org.opensearch.indexmanagement.controlcenter.notification.util.getPriority import org.opensearch.indexmanagement.controlcenter.notification.util.supportedActions -import org.opensearch.indexmanagement.common.model.notification.Channel -import org.opensearch.indexmanagement.controlcenter.notification.action.get.GetLRONConfigResponse import org.opensearch.indexmanagement.indexstatemanagement.randomChannel import org.opensearch.indexmanagement.makeRequest import org.opensearch.indexmanagement.opensearchapi.string import org.opensearch.indexmanagement.randomUser -import org.opensearch.core.tasks.TaskId import org.opensearch.test.OpenSearchTestCase.randomBoolean import org.opensearch.test.OpenSearchTestCase.randomLong import org.opensearch.test.rest.OpenSearchRestTestCase -/* need to be initialized before used */ +// need to be initialized before used var nodeIdsInRestIT: Set = emptySet() + @Suppress("UNCHECKED_CAST") fun initNodeIdsInRestIT(client: RestClient) { if (nodeIdsInRestIT.isNotEmpty()) { @@ -45,7 +46,7 @@ fun randomLRONConfig( taskId: TaskId? = randomTaskId(), actionName: String? = randomActionName(), channels: List? = List(OpenSearchRestTestCase.randomIntBetween(1, 10)) { randomChannel() }, - user: User? = randomUser() + user: User? = randomUser(), ): LRONConfig { val priority = getPriority(taskId, actionName) return LRONConfig( @@ -54,20 +55,20 @@ fun randomLRONConfig( actionName = actionName, channels = channels, user = user, - priority = priority + priority = priority, ) } fun randomLRONCondition( success: Boolean = randomBoolean(), - failure: Boolean = randomBoolean() + failure: Boolean = randomBoolean(), ): LRONCondition { return LRONCondition(success, failure) } fun randomTaskId( nodeId: String = UUIDs.randomBase64UUID(), - id: Long = randomLong() + id: Long = randomLong(), ): TaskId { return TaskId(nodeId, id) } @@ -77,27 +78,28 @@ fun randomActionName(): String { } fun randomLRONConfigResponse( - lronConfig: LRONConfig = randomLRONConfig() + lronConfig: LRONConfig = randomLRONConfig(), ): LRONConfigResponse { val id = getDocID(lronConfig.taskId, lronConfig.actionName) return LRONConfigResponse( id = id, - lronConfig = lronConfig + lronConfig = lronConfig, ) } fun randomGetLRONConfigResponse( - size: Int = 10 + size: Int = 10, ): GetLRONConfigResponse { return GetLRONConfigResponse( lronConfigResponses = List(size) { randomLRONConfigResponse() }, - size + size, ) } -fun LRONConfig.toJsonString(params: ToXContent.Params = ToXContent.EMPTY_PARAMS): String = this.toXContent( - XContentFactory.jsonBuilder(), params -).string() +fun LRONConfig.toJsonString(params: ToXContent.Params = ToXContent.EMPTY_PARAMS): String = + this.toXContent( + XContentFactory.jsonBuilder(), params, + ).string() fun getResourceURI(taskId: TaskId?, actionName: String?): String { return "${IndexManagementPlugin.LRON_BASE_URI}/${getDocID(taskId, actionName)}" diff --git a/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/XContentTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/XContentTests.kt index 5cd779ada..e170d64d6 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/XContentTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/XContentTests.kt @@ -11,19 +11,19 @@ import org.opensearch.common.xcontent.LoggingDeprecationHandler import org.opensearch.common.xcontent.XContentFactory import org.opensearch.common.xcontent.XContentHelper import org.opensearch.common.xcontent.XContentType +import org.opensearch.core.tasks.TaskId import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.core.xcontent.ToXContent import org.opensearch.core.xcontent.XContentParser +import org.opensearch.indexmanagement.common.model.notification.Channel +import org.opensearch.indexmanagement.controlcenter.notification.action.get.GetLRONConfigResponse import org.opensearch.indexmanagement.controlcenter.notification.model.LRONCondition import org.opensearch.indexmanagement.controlcenter.notification.model.LRONConfig import org.opensearch.indexmanagement.controlcenter.notification.util.PRIORITY_TASK_ID import org.opensearch.indexmanagement.controlcenter.notification.util.getDocID -import org.opensearch.indexmanagement.common.model.notification.Channel -import org.opensearch.indexmanagement.controlcenter.notification.action.get.GetLRONConfigResponse import org.opensearch.indexmanagement.opensearchapi.parseWithType import org.opensearch.indexmanagement.opensearchapi.string import org.opensearch.indexmanagement.randomUser -import org.opensearch.core.tasks.TaskId import org.opensearch.test.OpenSearchTestCase class XContentTests : OpenSearchTestCase() { @@ -31,7 +31,7 @@ class XContentTests : OpenSearchTestCase() { Assert.assertEquals( buildMessage("lronConfig", XContentType.JSON), sampleLRONConfig, - parsedItem(sampleLRONConfig, XContentType.JSON, LRONConfig.Companion::parse) + parsedItem(sampleLRONConfig, XContentType.JSON, LRONConfig.Companion::parse), ) val xContentType = XContentType.values().random() @@ -39,12 +39,13 @@ class XContentTests : OpenSearchTestCase() { Assert.assertEquals( buildMessage("lronConfig", xContentType), lronConfig, - parsedItem(lronConfig, xContentType, LRONConfig.Companion::parse) + parsedItem(lronConfig, xContentType, LRONConfig.Companion::parse), ) } fun `test lronConfig Parsing default values`() { - val jsonString = """ + val jsonString = + """ { "lron_config": { "task_id": "node_123:456", @@ -56,18 +57,20 @@ class XContentTests : OpenSearchTestCase() { } } """.replace("\\s".toRegex(), "") - val lronConfig = XContentType.JSON.xContent().createParser( - xContentRegistry(), - LoggingDeprecationHandler.INSTANCE, - jsonString - ).parseWithType(parse = LRONConfig.Companion::parse) + val lronConfig = + XContentType.JSON.xContent().createParser( + xContentRegistry(), + LoggingDeprecationHandler.INSTANCE, + jsonString, + ).parseWithType(parse = LRONConfig.Companion::parse) assertEquals("action name should be null", null, lronConfig.actionName) assertEquals("should be true by default", true, lronConfig.lronCondition.success) assertEquals("should be true by default", true, lronConfig.lronCondition.failure) } fun `test lronConfig Parsing with no id no action fails`() { - val jsonString = """ + val jsonString = + """ { "lron_config": { "task_id": "node_123:456" @@ -78,7 +81,7 @@ class XContentTests : OpenSearchTestCase() { XContentType.JSON.xContent().createParser( xContentRegistry(), LoggingDeprecationHandler.INSTANCE, - jsonString + jsonString, ).parseWithType(parse = LRONConfig.Companion::parse) Assert.fail("expect to throw error when parsing lronConfig") } catch (e: IllegalArgumentException) { @@ -87,7 +90,8 @@ class XContentTests : OpenSearchTestCase() { } fun `test lronConfig Parsing with no channels fails`() { - val jsonString = """ + val jsonString = + """ { "lron_config": { "channels": [ @@ -102,7 +106,7 @@ class XContentTests : OpenSearchTestCase() { XContentType.JSON.xContent().createParser( xContentRegistry(), LoggingDeprecationHandler.INSTANCE, - jsonString + jsonString, ).parseWithType(parse = LRONConfig.Companion::parse) Assert.fail("expect to throw error when parsing lronConfig") } catch (e: IllegalArgumentException) { @@ -111,19 +115,22 @@ class XContentTests : OpenSearchTestCase() { } fun `test lronConfigResponse`() { - val responseString = sampleLRONConfigResponse - .toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS).string() - /* we drop the user info and priority info in rest layer */ + val responseString = + sampleLRONConfigResponse + .toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS).string() + // we drop the user info and priority info in rest layer assertEquals("lronConfigResponse toXcontent failed.", sampleExpectedJson, responseString) } fun `test getLRONConfigResponse`() { - val response = GetLRONConfigResponse( - listOf(sampleLRONConfigResponse, sampleLRONConfigResponse), - totalNumber = 2 - ) + val response = + GetLRONConfigResponse( + listOf(sampleLRONConfigResponse, sampleLRONConfigResponse), + totalNumber = 2, + ) val responseString = response.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS).string() - val expectedJSON = """ + val expectedJSON = + """ { "lron_configs": [ $sampleExpectedJson, @@ -138,7 +145,7 @@ class XContentTests : OpenSearchTestCase() { private fun buildMessage( itemType: String, - xContentType: XContentType + xContentType: XContentType, ): String { return "$itemType toXContent test failed. xContentType: ${xContentType.subtype()}. " } @@ -146,20 +153,22 @@ class XContentTests : OpenSearchTestCase() { private fun parsedItem( item: T, xContentType: XContentType, - parseWithTypeParser: (xcp: XContentParser, id: String, seqNo: Long, primaryTerm: Long) -> T + parseWithTypeParser: (xcp: XContentParser, id: String, seqNo: Long, primaryTerm: Long) -> T, ): T { - val bytesReference = toShuffledXContent( - item, - xContentType.xContent().mediaType(), - ToXContent.EMPTY_PARAMS, - randomBoolean() - ) - val xcp = XContentHelper.createParser( - NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, - bytesReference, - xContentType.xContent().mediaType() - ) + val bytesReference = + toShuffledXContent( + item, + xContentType.xContent().mediaType(), + ToXContent.EMPTY_PARAMS, + randomBoolean(), + ) + val xcp = + XContentHelper.createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + bytesReference, + xContentType.xContent().mediaType(), + ) return xcp.parseWithType(parse = parseWithTypeParser) } @@ -171,19 +180,22 @@ class XContentTests : OpenSearchTestCase() { @BeforeClass @JvmStatic fun setup() { - sampleLRONConfig = LRONConfig( - lronCondition = LRONCondition(success = true, failure = false), - taskId = TaskId("node_123", 456L), - actionName = "indices:admin/resize", - channels = listOf(Channel("channel123"), Channel("channel456")), - user = randomUser(), - priority = PRIORITY_TASK_ID - ) - sampleLRONConfigResponse = LRONConfigResponse( - id = getDocID(sampleLRONConfig.taskId, sampleLRONConfig.actionName), - lronConfig = sampleLRONConfig - ) - sampleExpectedJson = """ + sampleLRONConfig = + LRONConfig( + lronCondition = LRONCondition(success = true, failure = false), + taskId = TaskId("node_123", 456L), + actionName = "indices:admin/resize", + channels = listOf(Channel("channel123"), Channel("channel456")), + user = randomUser(), + priority = PRIORITY_TASK_ID, + ) + sampleLRONConfigResponse = + LRONConfigResponse( + id = getDocID(sampleLRONConfig.taskId, sampleLRONConfig.actionName), + lronConfig = sampleLRONConfig, + ) + sampleExpectedJson = + """ { "_id": "LRON:node_123:456", "lron_config": { diff --git a/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/filter/IndexOperationActionFilterTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/filter/IndexOperationActionFilterTests.kt index 8a1a13f9d..79ad291ff 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/filter/IndexOperationActionFilterTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/filter/IndexOperationActionFilterTests.kt @@ -8,8 +8,6 @@ package org.opensearch.indexmanagement.controlcenter.notification.filter import org.junit.Assert import org.junit.Before import org.mockito.Mockito -import org.opensearch.core.action.ActionListener -import org.opensearch.core.action.ActionResponse import org.opensearch.action.admin.indices.forcemerge.ForceMergeAction import org.opensearch.action.admin.indices.open.OpenIndexAction import org.opensearch.action.admin.indices.shrink.ResizeAction @@ -18,11 +16,13 @@ import org.opensearch.client.Client import org.opensearch.cluster.OpenSearchAllocationTestCase import org.opensearch.cluster.metadata.IndexNameExpressionResolver import org.opensearch.cluster.service.ClusterService +import org.opensearch.core.action.ActionListener +import org.opensearch.core.action.ActionResponse +import org.opensearch.core.tasks.TaskId import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.index.reindex.ReindexAction import org.opensearch.index.reindex.ReindexRequest import org.opensearch.tasks.Task -import org.opensearch.core.tasks.TaskId import org.opensearch.threadpool.ThreadPool class IndexOperationActionFilterTests : OpenSearchAllocationTestCase() { @@ -48,9 +48,10 @@ class IndexOperationActionFilterTests : OpenSearchAllocationTestCase() { val activeShardsObserver = ActiveShardsObserver(clusterService, client.threadPool()) - filter = IndexOperationActionFilter( - this.client, clusterService, activeShardsObserver, indexNameExpressionResolver - ) + filter = + IndexOperationActionFilter( + this.client, clusterService, activeShardsObserver, indexNameExpressionResolver, + ) } fun `test wrapped listener for long running actions`() { @@ -60,12 +61,13 @@ class IndexOperationActionFilterTests : OpenSearchAllocationTestCase() { val wrappedActions = listOf(ReindexAction.NAME, ResizeAction.NAME, ForceMergeAction.NAME, OpenIndexAction.NAME) for (action in wrappedActions) { - val newListener = filter.wrapActionListener( - task, - ReindexAction.NAME, - ReindexRequest(), - listener - ) + val newListener = + filter.wrapActionListener( + task, + ReindexAction.NAME, + ReindexRequest(), + listener, + ) Assert.assertNotSame(listener, newListener) Assert.assertTrue(newListener is NotificationActionListener<*, *>) @@ -76,12 +78,13 @@ class IndexOperationActionFilterTests : OpenSearchAllocationTestCase() { val task = Mockito.mock(Task::class.java) Mockito.`when`(task.parentTaskId).thenReturn(TaskId.EMPTY_TASK_ID) val listener = TestActionListener() - val newListener = filter.wrapActionListener( - task, - "test", - ReindexRequest(), - listener - ) + val newListener = + filter.wrapActionListener( + task, + "test", + ReindexRequest(), + listener, + ) Assert.assertSame(listener, newListener) } @@ -90,12 +93,13 @@ class IndexOperationActionFilterTests : OpenSearchAllocationTestCase() { val task = Mockito.mock(Task::class.java) Mockito.`when`(task.parentTaskId).thenReturn(TaskId("abc:1")) val listener = TestActionListener() - val newListener = filter.wrapActionListener( - task, - ReindexAction.NAME, - ReindexRequest(), - listener - ) + val newListener = + filter.wrapActionListener( + task, + ReindexAction.NAME, + ReindexRequest(), + listener, + ) Assert.assertSame(listener, newListener) } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/filter/NotificationActionListenerIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/filter/NotificationActionListenerIT.kt index 0200da8ec..d95bc26a1 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/filter/NotificationActionListenerIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/filter/NotificationActionListenerIT.kt @@ -17,19 +17,18 @@ import org.opensearch.client.Response import org.opensearch.client.ResponseException import org.opensearch.client.RestClient import org.opensearch.common.settings.Settings +import org.opensearch.core.rest.RestStatus import org.opensearch.index.reindex.ReindexAction import org.opensearch.indexmanagement.IndexManagementPlugin import org.opensearch.indexmanagement.IndexManagementRestTestCase import org.opensearch.indexmanagement.controlcenter.notification.util.supportedActions import org.opensearch.indexmanagement.makeRequest import org.opensearch.indexmanagement.waitFor -import org.opensearch.core.rest.RestStatus import java.net.InetAddress import java.net.InetSocketAddress import java.time.Instant class NotificationActionListenerIT : IndexManagementRestTestCase() { - private val notificationConfId = "test-notification-id" private val notificationIndex = "test-notification-index" @@ -41,19 +40,21 @@ class NotificationActionListenerIT : IndexManagementRestTestCase() { server = HttpServer.create(InetSocketAddress(InetAddress.getLoopbackAddress(), 0), 0) logger.info("starting mock server at {}", server.address.hostString) - val httpHandler = HttpHandler { - val msg = String(it.requestBody.readAllBytes()) - logger.info(msg) - val res = client.makeRequest( - "POST", "$notificationIndex/_doc?refresh=true", - StringEntity("""{"msg": "${msg.replace(System.lineSeparator(), " ")}"}""", ContentType.APPLICATION_JSON) - ) - logger.info(res.restStatus()) - - it.sendResponseHeaders(200, "ack".toByteArray().size.toLong()) - it.responseBody.write("ack".toByteArray()) - it.close() - } + val httpHandler = + HttpHandler { + val msg = String(it.requestBody.readAllBytes()) + logger.info(msg) + val res = + client.makeRequest( + "POST", "$notificationIndex/_doc?refresh=true", + StringEntity("""{"msg": "${msg.replace(System.lineSeparator(), " ")}"}""", ContentType.APPLICATION_JSON), + ) + logger.info(res.restStatus()) + + it.sendResponseHeaders(200, "ack".toByteArray().size.toLong()) + it.responseBody.write("ack".toByteArray()) + it.close() + } server.createContext("/notification", httpHandler) server.createContext("/notification2", httpHandler) @@ -78,22 +79,22 @@ class NotificationActionListenerIT : IndexManagementRestTestCase() { "POST", "/_plugins/_notifications/configs", StringEntity( """ - { - "config_id": "$notificationConfId", - "name": "test-webhook", - "config": { - "name": "Sample webhook Channel", - "description": "This is a webhook channel", - "config_type": "webhook", - "is_enabled": true, - "webhook": { - "url": "http://${server.address.hostString}:${server.address.port}/notification" - } - } + { + "config_id": "$notificationConfId", + "name": "test-webhook", + "config": { + "name": "Sample webhook Channel", + "description": "This is a webhook channel", + "config_type": "webhook", + "is_enabled": true, + "webhook": { + "url": "http://${server.address.hostString}:${server.address.port}/notification" } + } + } """.trimIndent(), - ContentType.APPLICATION_JSON - ) + ContentType.APPLICATION_JSON, + ), ) supportedActions.forEach { action -> @@ -112,8 +113,8 @@ class NotificationActionListenerIT : IndexManagementRestTestCase() { } } """.trimIndent(), - ContentType.APPLICATION_JSON - ) + ContentType.APPLICATION_JSON, + ), ) } } @@ -122,9 +123,10 @@ class NotificationActionListenerIT : IndexManagementRestTestCase() { fun `test notify for force merge`() { insertSampleData("source-index", 10) - val response = client.makeRequest( - "POST", "source-index/_forcemerge" - ) + val response = + client.makeRequest( + "POST", "source-index/_forcemerge", + ) Assert.assertTrue(response.restStatus() == RestStatus.OK) @@ -135,7 +137,7 @@ class NotificationActionListenerIT : IndexManagementRestTestCase() { ( client.makeRequest("GET", "$notificationIndex/_search?q=msg:merge") .asMap() as Map>> - )["hits"]!!["total"]!!["value"] + )["hits"]!!["total"]!!["value"], ) } } @@ -145,19 +147,20 @@ class NotificationActionListenerIT : IndexManagementRestTestCase() { insertSampleData("source-index", 10) updateIndexSettings("source-index", Settings.builder().put("index.blocks.write", true)) - val response = client.makeRequest( - "POST", "source-index/_split/test-split", - StringEntity( - """ - { - "settings":{ - "index.number_of_shards": 2 + val response = + client.makeRequest( + "POST", "source-index/_split/test-split", + StringEntity( + """ + { + "settings":{ + "index.number_of_shards": 2 + } } - } - """.trimIndent(), - ContentType.APPLICATION_JSON + """.trimIndent(), + ContentType.APPLICATION_JSON, + ), ) - ) Assert.assertTrue(response.restStatus() == RestStatus.OK) @@ -168,7 +171,7 @@ class NotificationActionListenerIT : IndexManagementRestTestCase() { ( client.makeRequest("GET", "$notificationIndex/_search?q=msg:Split") .asMap() as Map>> - )["hits"]!!["total"]!!["value"] + )["hits"]!!["total"]!!["value"], ) } } @@ -179,9 +182,10 @@ class NotificationActionListenerIT : IndexManagementRestTestCase() { closeIndex("source-index") - val response = client.makeRequest( - "POST", "source-index/_open" - ) + val response = + client.makeRequest( + "POST", "source-index/_open", + ) Assert.assertTrue(response.restStatus() == RestStatus.OK) @@ -192,7 +196,7 @@ class NotificationActionListenerIT : IndexManagementRestTestCase() { ( client.makeRequest("GET", "$notificationIndex/_search?q=msg:Open") .asMap() as Map>> - )["hits"]!!["total"]!!["value"] + )["hits"]!!["total"]!!["value"], ) } } @@ -210,7 +214,7 @@ class NotificationActionListenerIT : IndexManagementRestTestCase() { ( client.makeRequest("GET", "$notificationIndex/_search?q=msg:reindex") .asMap() as Map>> - )["hits"]!!["total"]!!["value"] + )["hits"]!!["total"]!!["value"], ) } } @@ -221,51 +225,53 @@ class NotificationActionListenerIT : IndexManagementRestTestCase() { createIndex("reindex-dest", Settings.EMPTY) client.makeRequest( "POST", "/_plugins/_notifications/configs", - StringEntity( - """ - { - "config_id": "config_id", - "name": "test-webhook2", - "config": { - "name": "Sample webhook Channel2", - "description": "This is a webhook channel2", - "config_type": "webhook", - "is_enabled": true, - "webhook": { - "url": "http://${server.address.hostString}:${server.address.port}/notification2" - } - } - } - """.trimIndent(), - ContentType.APPLICATION_JSON - ) - ) - - val response = client.makeRequest( - "POST", "_reindex?wait_for_completion=false", StringEntity( """ { - "source": { - "index": "source-index" - }, - "dest": { - "index": "reindex-dest" + "config_id": "config_id", + "name": "test-webhook2", + "config": { + "name": "Sample webhook Channel2", + "description": "This is a webhook channel2", + "config_type": "webhook", + "is_enabled": true, + "webhook": { + "url": "http://${server.address.hostString}:${server.address.port}/notification2" + } } } """.trimIndent(), - ContentType.APPLICATION_JSON - ) + ContentType.APPLICATION_JSON, + ), ) + val response = + client.makeRequest( + "POST", "_reindex?wait_for_completion=false", + StringEntity( + """ + { + "source": { + "index": "source-index" + }, + "dest": { + "index": "reindex-dest" + } + } + """.trimIndent(), + ContentType.APPLICATION_JSON, + ), + ) + Assert.assertTrue(response.restStatus() == RestStatus.OK) val taskId = response.asMap()["task"] as String logger.info("task id {}", taskId) - val policyResponse = client.makeRequest( - "POST", "_plugins/_im/lron", - StringEntity( - """ + val policyResponse = + client.makeRequest( + "POST", "_plugins/_im/lron", + StringEntity( + """ { "lron_config": { "action_name": "${ReindexAction.NAME}", @@ -277,10 +283,10 @@ class NotificationActionListenerIT : IndexManagementRestTestCase() { ] } } - """.trimIndent(), - ContentType.APPLICATION_JSON + """.trimIndent(), + ContentType.APPLICATION_JSON, + ), ) - ) val id = policyResponse.asMap()["_id"] as String logger.info("policy id {}", id) @@ -291,15 +297,16 @@ class NotificationActionListenerIT : IndexManagementRestTestCase() { ( client.makeRequest("GET", "$notificationIndex/_search?q=msg:Reindex") .asMap() as Map>> - )["hits"]!!["total"]!!["value"] + )["hits"]!!["total"]!!["value"], ) // runtime policy been removed - val res = try { - client.makeRequest("GET", "_plugins/_im/lron/${id.replace("/", "%2F")}") - } catch (e: ResponseException) { - e.response - } + val res = + try { + client.makeRequest("GET", "_plugins/_im/lron/${id.replace("/", "%2F")}") + } catch (e: ResponseException) { + e.response + } assertEquals(RestStatus.NOT_FOUND.status, res.statusLine.statusCode) } } @@ -315,20 +322,20 @@ class NotificationActionListenerIT : IndexManagementRestTestCase() { "POST", "_plugins/_im/lron", StringEntity( """ - { - "lron_config": { - "action_name": "${ReindexAction.NAME}", - "task_id": "$taskId", - "channels": [ - { - "id": "$notificationConfId" - } - ] - } + { + "lron_config": { + "action_name": "${ReindexAction.NAME}", + "task_id": "$taskId", + "channels": [ + { + "id": "$notificationConfId" + } + ] } + } """.trimIndent(), - ContentType.APPLICATION_JSON - ) + ContentType.APPLICATION_JSON, + ), ) waitFor(Instant.ofEpochSecond(60)) { @@ -338,7 +345,7 @@ class NotificationActionListenerIT : IndexManagementRestTestCase() { ( client.makeRequest("GET", "$notificationIndex/_search?q=msg:Reindex") .asMap() as Map>> - )["hits"]!!["total"]!!["value"] + )["hits"]!!["total"]!!["value"], ) } } @@ -355,7 +362,7 @@ class NotificationActionListenerIT : IndexManagementRestTestCase() { ( client.makeRequest("GET", "$notificationIndex/_search?q=msg:Close") .asMap() as Map>> - )["hits"]!!["total"]!!["value"] + )["hits"]!!["total"]!!["value"], ) } } @@ -368,9 +375,10 @@ class NotificationActionListenerIT : IndexManagementRestTestCase() { // remove notification policy client.makeRequest("DELETE", "_plugins/_im/lron/LRON:${OpenIndexAction.NAME.replace("/", "%2F")}") - val response = client.makeRequest( - "POST", "source-index/_open" - ) + val response = + client.makeRequest( + "POST", "source-index/_open", + ) Assert.assertTrue(response.restStatus() == RestStatus.OK) @@ -382,7 +390,7 @@ class NotificationActionListenerIT : IndexManagementRestTestCase() { ( client.makeRequest("GET", "$notificationIndex/_search?q=msg:Open") .asMap() as Map>> - )["hits"]!!["total"]!!["value"] + )["hits"]!!["total"]!!["value"], ) } } @@ -395,9 +403,10 @@ class NotificationActionListenerIT : IndexManagementRestTestCase() { // delete system index client.makeRequest("DELETE", IndexManagementPlugin.CONTROL_CENTER_INDEX) - val response = client.makeRequest( - "POST", "source-index/_open" - ) + val response = + client.makeRequest( + "POST", "source-index/_open", + ) Assert.assertTrue(response.restStatus() == RestStatus.OK) @@ -409,7 +418,7 @@ class NotificationActionListenerIT : IndexManagementRestTestCase() { ( client.makeRequest("GET", "$notificationIndex/_search?q=msg:Open") .asMap() as Map>> - )["hits"]!!["total"]!!["value"] + )["hits"]!!["total"]!!["value"], ) } } @@ -427,23 +436,23 @@ class NotificationActionListenerIT : IndexManagementRestTestCase() { "POST", "_plugins/_im/lron", StringEntity( """ - { - "lron_config": { - "task_id": "$taskId", - "lron_condition": { - "failure": true, - "success": false - }, - "channels": [ - { - "id": "$notificationConfId" - } - ] - } + { + "lron_config": { + "task_id": "$taskId", + "lron_condition": { + "failure": true, + "success": false + }, + "channels": [ + { + "id": "$notificationConfId" + } + ] } + } """.trimIndent(), - ContentType.APPLICATION_JSON - ) + ContentType.APPLICATION_JSON, + ), ) waitFor(Instant.ofEpochSecond(60)) { @@ -453,7 +462,7 @@ class NotificationActionListenerIT : IndexManagementRestTestCase() { ( client.makeRequest("GET", "$notificationIndex/_search?q=msg:reindex") .asMap() as Map>> - )["hits"]!!["total"]!!["value"] + )["hits"]!!["total"]!!["value"], ) try { @@ -469,10 +478,11 @@ class NotificationActionListenerIT : IndexManagementRestTestCase() { insertSampleData("source-index", 10) createIndex("reindex-dest", Settings.EMPTY) - val response = client.makeRequest( - "POST", "_reindex?wait_for_completion=false", - StringEntity( - """ + val response = + client.makeRequest( + "POST", "_reindex?wait_for_completion=false", + StringEntity( + """ { "source": { "index": "source-index" @@ -481,10 +491,10 @@ class NotificationActionListenerIT : IndexManagementRestTestCase() { "index": "reindex-dest" } } - """.trimIndent(), - ContentType.APPLICATION_JSON + """.trimIndent(), + ContentType.APPLICATION_JSON, + ), ) - ) return response } } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/filter/NotificationActionListenerTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/filter/NotificationActionListenerTests.kt index 29797757c..edd417e26 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/filter/NotificationActionListenerTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/filter/NotificationActionListenerTests.kt @@ -8,13 +8,13 @@ package org.opensearch.indexmanagement.controlcenter.notification.filter import org.junit.Assert import org.junit.Before import org.mockito.Mockito.mock -import org.opensearch.core.action.ActionListener import org.opensearch.action.ActionRequest -import org.opensearch.core.action.ActionResponse import org.opensearch.action.support.ActiveShardsObserver import org.opensearch.client.Client import org.opensearch.cluster.metadata.IndexNameExpressionResolver import org.opensearch.cluster.service.ClusterService +import org.opensearch.core.action.ActionListener +import org.opensearch.core.action.ActionResponse import org.opensearch.indexmanagement.controlcenter.notification.action.get.GetLRONConfigResponse import org.opensearch.indexmanagement.controlcenter.notification.model.LRONCondition import org.opensearch.indexmanagement.controlcenter.notification.randomLRONConfig @@ -23,7 +23,6 @@ import org.opensearch.tasks.Task import org.opensearch.test.OpenSearchTestCase class NotificationActionListenerTests : OpenSearchTestCase() { - private lateinit var listener: NotificationActionListener private lateinit var delegate: ActionListener private lateinit var client: Client @@ -42,24 +41,26 @@ class NotificationActionListenerTests : OpenSearchTestCase() { activeShardsObserver = mock() indexNameExpressionResolver = mock() request = mock() - listener = NotificationActionListener( - delegate, - client, - clusterService, - "open", - task, - activeShardsObserver, - request, - indexNameExpressionResolver - ) + listener = + NotificationActionListener( + delegate, + client, + clusterService, + "open", + task, + activeShardsObserver, + request, + indexNameExpressionResolver, + ) } fun `test all conditions are disabled`() { val lronConfig = randomLRONConfig(lronCondition = LRONCondition(false, false)) val lronConfigResponse = randomLRONConfigResponse(lronConfig = lronConfig) - val responses = GetLRONConfigResponse( - lronConfigResponses = listOf(lronConfigResponse), 1 - ) + val responses = + GetLRONConfigResponse( + lronConfigResponses = listOf(lronConfigResponse), 1, + ) Assert.assertTrue(listener.getNotificationPolices(responses, OperationResult.COMPLETE).isEmpty()) Assert.assertTrue(listener.getNotificationPolices(responses, OperationResult.FAILED).isEmpty()) @@ -67,16 +68,18 @@ class NotificationActionListenerTests : OpenSearchTestCase() { fun `test success and failed conditions`() { val lronConfigWithTaskId = randomLRONConfig(lronCondition = LRONCondition(true, false)) - val lronConfigDefault = randomLRONConfig( - lronCondition = LRONCondition(true, true), - taskId = null, - actionName = lronConfigWithTaskId.actionName - ) + val lronConfigDefault = + randomLRONConfig( + lronCondition = LRONCondition(true, true), + taskId = null, + actionName = lronConfigWithTaskId.actionName, + ) val lronConfigResponseWithTaskId = randomLRONConfigResponse(lronConfig = lronConfigWithTaskId) val lronConfigResponseDefault = randomLRONConfigResponse(lronConfig = lronConfigDefault) - val responses = GetLRONConfigResponse( - lronConfigResponses = listOf(lronConfigResponseWithTaskId, lronConfigResponseDefault), 2 - ) + val responses = + GetLRONConfigResponse( + lronConfigResponses = listOf(lronConfigResponseWithTaskId, lronConfigResponseDefault), 2, + ) Assert.assertEquals(2, listener.getNotificationPolices(responses, OperationResult.COMPLETE).size) Assert.assertEquals(1, listener.getNotificationPolices(responses, OperationResult.FAILED).size) diff --git a/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/filter/parser/ForceMergeIndexRespParserTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/filter/parser/ForceMergeIndexRespParserTests.kt index 89b61d16a..156275833 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/filter/parser/ForceMergeIndexRespParserTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/filter/parser/ForceMergeIndexRespParserTests.kt @@ -9,22 +9,22 @@ import org.junit.Assert import org.opensearch.OpenSearchException import org.opensearch.action.admin.indices.forcemerge.ForceMergeRequest import org.opensearch.action.admin.indices.forcemerge.ForceMergeResponse -import org.opensearch.core.action.support.DefaultShardOperationFailedException import org.opensearch.action.support.broadcast.BroadcastResponse import org.opensearch.common.xcontent.XContentType +import org.opensearch.core.action.support.DefaultShardOperationFailedException +import org.opensearch.core.index.Index import org.opensearch.core.xcontent.DeprecationHandler import org.opensearch.core.xcontent.NamedXContentRegistry -import org.opensearch.core.index.Index import org.opensearch.indexmanagement.controlcenter.notification.filter.OperationResult import org.opensearch.indexmanagement.snapshotmanagement.toJsonString class ForceMergeIndexRespParserTests : BaseRespParserTests() { - fun `test build message for completion`() { - val xContentParser = XContentType.JSON.xContent().createParser( - NamedXContentRegistry.EMPTY, DeprecationHandler.IGNORE_DEPRECATIONS, - "{\"_shards\":{\"total\":10,\"successful\":10,\"failed\":0}}" - ) + val xContentParser = + XContentType.JSON.xContent().createParser( + NamedXContentRegistry.EMPTY, DeprecationHandler.IGNORE_DEPRECATIONS, + "{\"_shards\":{\"total\":10,\"successful\":10,\"failed\":0}}", + ) val response = ForceMergeResponse.fromXContent(xContentParser) val request = ForceMergeRequest("test-index-1") @@ -35,15 +35,16 @@ class ForceMergeIndexRespParserTests : BaseRespParserTests() { Assert.assertEquals(title, "Force merge operation on [test-cluster/test-index-1] has completed") Assert.assertEquals( msg, - "The force merge operation on [test-cluster/test-index-1] has been completed." + "The force merge operation on [test-cluster/test-index-1] has been completed.", ) } fun `test build message for completion with multiple indexes`() { - val xContentParser = XContentType.JSON.xContent().createParser( - NamedXContentRegistry.EMPTY, DeprecationHandler.IGNORE_DEPRECATIONS, - "{\"_shards\":{\"total\":10,\"successful\":10,\"failed\":0}}" - ) + val xContentParser = + XContentType.JSON.xContent().createParser( + NamedXContentRegistry.EMPTY, DeprecationHandler.IGNORE_DEPRECATIONS, + "{\"_shards\":{\"total\":10,\"successful\":10,\"failed\":0}}", + ) val response = ForceMergeResponse.fromXContent(xContentParser) val request = ForceMergeRequest("test-index-1", "test-index-2") @@ -54,22 +55,24 @@ class ForceMergeIndexRespParserTests : BaseRespParserTests() { Assert.assertEquals(title, "Force merge operation on 2 indexes from [test-cluster] has completed") Assert.assertEquals( msg, - "[test-index-1,test-index-2] from [test-cluster] have been merged." + "[test-index-1,test-index-2] from [test-cluster] have been merged.", ) } fun `test build message for failure`() { val ex = OpenSearchException("shard is not available") ex.index = Index("test-index-1", "uuid-1") - val resp = BroadcastResponse( - 2, 1, 1, - arrayListOf(DefaultShardOperationFailedException(ex)) - ) + val resp = + BroadcastResponse( + 2, 1, 1, + arrayListOf(DefaultShardOperationFailedException(ex)), + ) - val xContentParser = XContentType.JSON.xContent().createParser( - NamedXContentRegistry.EMPTY, DeprecationHandler.IGNORE_DEPRECATIONS, - resp.toJsonString() - ) + val xContentParser = + XContentType.JSON.xContent().createParser( + NamedXContentRegistry.EMPTY, DeprecationHandler.IGNORE_DEPRECATIONS, + resp.toJsonString(), + ) val response = ForceMergeResponse.fromXContent(xContentParser) val request = ForceMergeRequest("test-index-1") @@ -80,7 +83,7 @@ class ForceMergeIndexRespParserTests : BaseRespParserTests() { Assert.assertEquals(ret.title, "Force merge operation on [test-cluster/test-index-1] has failed") Assert.assertEquals( ret.message, - "index [test-index-1] shard [-1] OpenSearchException[OpenSearch exception [type=exception, reason=shard is not available]]" + "index [test-index-1] shard [-1] OpenSearchException[OpenSearch exception [type=exception, reason=shard is not available]]", ) } } @@ -96,7 +99,7 @@ class ForceMergeIndexRespParserTests : BaseRespParserTests() { Assert.assertEquals(ret.title, "Force merge operation on [test-cluster/test-index-1] has failed") Assert.assertEquals( ret.message, - "index [test-index-1] index not exists." + "index [test-index-1] index not exists.", ) } } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/filter/parser/OpenRespParserTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/filter/parser/OpenRespParserTests.kt index 85e312f3e..5019dae08 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/filter/parser/OpenRespParserTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/filter/parser/OpenRespParserTests.kt @@ -23,7 +23,6 @@ import org.opensearch.indexmanagement.controlcenter.notification.filter.Operatio import kotlin.Exception class OpenRespParserTests : BaseRespParserTests() { - private lateinit var activeShardsObserver: ActiveShardsObserver private lateinit var indexNameExpressionResolver: IndexNameExpressionResolver diff --git a/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/filter/parser/ReindexRespParserTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/filter/parser/ReindexRespParserTests.kt index ddaed9112..e0ccab06a 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/filter/parser/ReindexRespParserTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/filter/parser/ReindexRespParserTests.kt @@ -11,51 +11,52 @@ import org.opensearch.OpenSearchException import org.opensearch.action.bulk.BulkItemResponse import org.opensearch.common.unit.TimeValue import org.opensearch.core.index.Index +import org.opensearch.core.tasks.TaskId import org.opensearch.index.reindex.BulkByScrollResponse import org.opensearch.index.reindex.BulkByScrollTask import org.opensearch.index.reindex.ReindexAction import org.opensearch.index.reindex.ReindexRequest import org.opensearch.indexmanagement.controlcenter.notification.filter.OperationResult import org.opensearch.tasks.Task -import org.opensearch.core.tasks.TaskId import java.lang.Exception import java.util.concurrent.TimeUnit class ReindexRespParserTests : BaseRespParserTests() { - private lateinit var task: Task private lateinit var request: ReindexRequest @Before fun setup() { task = Task(1, "transport", ReindexAction.NAME, "reindex from src to dest", TaskId.EMPTY_TASK_ID, mapOf()) - request = ReindexRequest().also { - it.searchRequest.indices("source") - it.destination.index("dest") - } + request = + ReindexRequest().also { + it.searchRequest.indices("source") + it.destination.index("dest") + } } fun `test build message for completion`() { - val response = BulkByScrollResponse( - TimeValue(1, TimeUnit.SECONDS), - BulkByScrollTask.Status( - 1, - 100, - 0, - 100, - 0, - 1, - 0, - 0, - 0, - 0, - TimeValue(0, TimeUnit.SECONDS), - 0.0f, - "", - TimeValue(0, TimeUnit.SECONDS) - ), - listOf(), listOf(), false - ) + val response = + BulkByScrollResponse( + TimeValue(1, TimeUnit.SECONDS), + BulkByScrollTask.Status( + 1, + 100, + 0, + 100, + 0, + 1, + 0, + 0, + 0, + 0, + TimeValue(0, TimeUnit.SECONDS), + 0.0f, + "", + TimeValue(0, TimeUnit.SECONDS), + ), + listOf(), listOf(), false, + ) val parser = ReindexRespParser(task, request, clusterService) val msg = parser.buildNotificationMessage(response) @@ -64,7 +65,7 @@ class ReindexRespParserTests : BaseRespParserTests() { "The reindex operation from [test-cluster/source] to [test-cluster/dest] has been completed.\n" + "\n" + "*Summary (number of documents)* \n" + - "Total: 100, Created: 100, Updated: 0, Deleted: 0, Conflicts: 0" + "Total: 100, Created: 100, Updated: 0, Deleted: 0, Conflicts: 0", ) val title = parser.buildNotificationTitle(OperationResult.COMPLETE) @@ -72,26 +73,27 @@ class ReindexRespParserTests : BaseRespParserTests() { } fun `test build message for cancellation`() { - val response = BulkByScrollResponse( - TimeValue(1, TimeUnit.SECONDS), - BulkByScrollTask.Status( - 1, - 100, - 0, - 20, - 0, - 1, - 0, - 0, - 0, - 0, - TimeValue(0, TimeUnit.SECONDS), - 0.0f, - "user cancelled", - TimeValue(0, TimeUnit.SECONDS) - ), - listOf(), listOf(), false - ) + val response = + BulkByScrollResponse( + TimeValue(1, TimeUnit.SECONDS), + BulkByScrollTask.Status( + 1, + 100, + 0, + 20, + 0, + 1, + 0, + 0, + 0, + 0, + TimeValue(0, TimeUnit.SECONDS), + 0.0f, + "user cancelled", + TimeValue(0, TimeUnit.SECONDS), + ), + listOf(), listOf(), false, + ) val parser = ReindexRespParser(task, request, clusterService) val msg = parser.buildNotificationMessage(response) @@ -100,7 +102,7 @@ class ReindexRespParserTests : BaseRespParserTests() { "The reindex operation from [test-cluster/source] to [test-cluster/dest] has been cancelled by user's request\n" + "\n" + "*Summary (number of documents)* \n" + - "Total: 100, Created: 20, Updated: 0, Deleted: 0, Conflicts: 0" + "Total: 100, Created: 20, Updated: 0, Deleted: 0, Conflicts: 0", ) val title = parser.buildNotificationTitle(OperationResult.CANCELLED) @@ -108,28 +110,29 @@ class ReindexRespParserTests : BaseRespParserTests() { } fun `test build message for failure`() { - val response = BulkByScrollResponse( - TimeValue(1, TimeUnit.SECONDS), - BulkByScrollTask.Status( - 1, - 100, - 0, - 99, - 0, - 1, - 1, - 0, - 0, - 0, - TimeValue(0, TimeUnit.SECONDS), - 0.0f, - "", - TimeValue( - 0, TimeUnit.SECONDS - ) - ), - listOf(BulkItemResponse.Failure("dest", "id-1", Exception("version conflicts"))), listOf(), false - ) + val response = + BulkByScrollResponse( + TimeValue(1, TimeUnit.SECONDS), + BulkByScrollTask.Status( + 1, + 100, + 0, + 99, + 0, + 1, + 1, + 0, + 0, + 0, + TimeValue(0, TimeUnit.SECONDS), + 0.0f, + "", + TimeValue( + 0, TimeUnit.SECONDS, + ), + ), + listOf(BulkItemResponse.Failure("dest", "id-1", Exception("version conflicts"))), listOf(), false, + ) val parser = ReindexRespParser(task, request, clusterService) val msg = parser.buildNotificationMessage(response) @@ -142,7 +145,7 @@ class ReindexRespParserTests : BaseRespParserTests() { "To see full errors, use `GET /_tasks/mJzoy8SBuTW12rbV8jSg:1`\n" + "\n" + "*Summary (number of documents)* \n" + - "Total: 100, Created: 99, Updated: 0, Deleted: 0, Conflicts: 1" + "Total: 100, Created: 99, Updated: 0, Deleted: 0, Conflicts: 1", ) val title = parser.buildNotificationTitle(OperationResult.FAILED) @@ -159,7 +162,7 @@ class ReindexRespParserTests : BaseRespParserTests() { Assert.assertEquals(ret.title, "Reindex operation on [test-cluster/source] has failed") Assert.assertEquals( ret.message, - "The reindex operation from [test-cluster/source] to [test-cluster/dest] has failed. index doest not exists" + "The reindex operation from [test-cluster/source] to [test-cluster/dest] has failed. index doest not exists", ) } } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/filter/parser/ResizeIndexRespParserTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/filter/parser/ResizeIndexRespParserTests.kt index 4456c695d..6a8cb0834 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/filter/parser/ResizeIndexRespParserTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/filter/parser/ResizeIndexRespParserTests.kt @@ -25,7 +25,6 @@ import org.opensearch.indexmanagement.controlcenter.notification.filter.Operatio import java.lang.IllegalStateException class ResizeIndexRespParserTests : BaseRespParserTests() { - private lateinit var activeShardsObserver: ActiveShardsObserver @Before @@ -75,9 +74,10 @@ class ResizeIndexRespParserTests : BaseRespParserTests() { Mockito.eq(ActiveShardCount.DEFAULT), eq(TimeValue.timeValueMinutes(50)), any(), - any() + any(), ) } + fun `test source index not exist exception`() { val request = ResizeRequest("target", "source_index") request.resizeType = ResizeType.SHRINK @@ -117,11 +117,12 @@ class ResizeIndexRespParserTests : BaseRespParserTests() { "You must allocate a copy of every shard of the source index to the same node before split. To allocate it to same node, try use PUT /source_index/_settings\n" + "{\n" + "\"index.routing.allocation.require._name\":\"your_node_name\"\n" + - "}" + "}", ) Assert.assertEquals(ret.title, "Split operation on [test-cluster/source_index] has failed") } } + fun `test not all shards are started timeout`() { val request = ResizeRequest("target", "source") request.resizeType = ResizeType.SHRINK @@ -132,7 +133,7 @@ class ResizeIndexRespParserTests : BaseRespParserTests() { parser.parseAndSendNotification(response) { ret -> Assert.assertEquals( ret.message, - "The shrink operation from [test-cluster/source] to [test-cluster/target] has taken more than 4h to complete. To see the latest status, use `GET /target/_recovery`" + "The shrink operation from [test-cluster/source] to [test-cluster/target] has taken more than 4h to complete. To see the latest status, use `GET /target/_recovery`", ) Assert.assertEquals(ret.title, "Shrink operation on [test-cluster/source] has timed out") } @@ -163,7 +164,7 @@ class ResizeIndexRespParserTests : BaseRespParserTests() { val msg = parser.buildNotificationMessage(response, ResourceAlreadyExistsException(Index("target-index", "uuid"))) Assert.assertEquals( msg, - "The target index [test-cluster/target-index] already exists." + "The target index [test-cluster/target-index] already exists.", ) val title = parser.buildNotificationTitle(OperationResult.FAILED) @@ -179,7 +180,7 @@ class ResizeIndexRespParserTests : BaseRespParserTests() { val msg = parser.buildNotificationMessage(response, isTimeout = true) Assert.assertEquals( msg, - "The split operation from [test-cluster/source] to [test-cluster/target] has taken more than 1h to complete. To see the latest status, use `GET /target/_recovery`" + "The split operation from [test-cluster/source] to [test-cluster/target] has taken more than 1h to complete. To see the latest status, use `GET /target/_recovery`", ) val title = parser.buildNotificationTitle(OperationResult.TIMEOUT) diff --git a/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/resthandler/LRONConfigRestTestCase.kt b/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/resthandler/LRONConfigRestTestCase.kt index e67a64bdc..00e62a2d1 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/resthandler/LRONConfigRestTestCase.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/resthandler/LRONConfigRestTestCase.kt @@ -13,18 +13,18 @@ import org.junit.AfterClass import org.junit.Before import org.opensearch.client.Response import org.opensearch.client.ResponseException +import org.opensearch.core.rest.RestStatus import org.opensearch.indexmanagement.IndexManagementPlugin import org.opensearch.indexmanagement.IndexManagementRestTestCase import org.opensearch.indexmanagement.controlcenter.notification.initNodeIdsInRestIT import org.opensearch.indexmanagement.controlcenter.notification.model.LRONConfig import org.opensearch.indexmanagement.controlcenter.notification.toJsonString import org.opensearch.indexmanagement.makeRequest -import org.opensearch.core.rest.RestStatus abstract class LRONConfigRestTestCase : IndexManagementRestTestCase() { @Before fun prepareForIT() { - /* init cluster node ids in integ test */ + // init cluster node ids in integ test initNodeIdsInRestIT(client()) } @@ -35,11 +35,11 @@ abstract class LRONConfigRestTestCase : IndexManagementRestTestCase() { "POST", "${IndexManagementPlugin.CONTROL_CENTER_INDEX}/_delete_by_query", mapOf("refresh" to "true"), - StringEntity("""{"query": {"match_all": {}}}""", ContentType.APPLICATION_JSON) + StringEntity("""{"query": {"match_all": {}}}""", ContentType.APPLICATION_JSON), ) } catch (e: ResponseException) { logger.info(e.response.asMap()) - /* ignore if the index has not been created */ + // ignore if the index has not been created assertEquals("Unexpected status", RestStatus.NOT_FOUND, e.response.restStatus()) } } @@ -56,7 +56,7 @@ abstract class LRONConfigRestTestCase : IndexManagementRestTestCase() { try { adminClient().makeRequest("DELETE", IndexManagementPlugin.CONTROL_CENTER_INDEX, emptyMap()) } catch (e: ResponseException) { - /* ignore if the index has not been created */ + // ignore if the index has not been created assertEquals("Unexpected status", RestStatus.NOT_FOUND, RestStatus.fromCode(e.response.statusLine.statusCode)) } } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/resthandler/RestDeleteLRONConfigActionIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/resthandler/RestDeleteLRONConfigActionIT.kt index e603ea886..53411a79b 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/resthandler/RestDeleteLRONConfigActionIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/resthandler/RestDeleteLRONConfigActionIT.kt @@ -7,13 +7,13 @@ package org.opensearch.indexmanagement.controlcenter.notification.resthandler import org.junit.Assert import org.opensearch.client.ResponseException +import org.opensearch.core.rest.RestStatus import org.opensearch.indexmanagement.controlcenter.notification.getResourceURI import org.opensearch.indexmanagement.controlcenter.notification.nodeIdsInRestIT import org.opensearch.indexmanagement.controlcenter.notification.randomLRONConfig import org.opensearch.indexmanagement.controlcenter.notification.randomTaskId import org.opensearch.indexmanagement.controlcenter.notification.util.getDocID import org.opensearch.indexmanagement.makeRequest -import org.opensearch.core.rest.RestStatus @Suppress("UNCHECKED_CAST") class RestDeleteLRONConfigActionIT : LRONConfigRestTestCase() { @@ -39,7 +39,7 @@ class RestDeleteLRONConfigActionIT : LRONConfigRestTestCase() { } fun `test delete nonexist LRONConfig response`() { - /* index a random doc to create .opensearch-control-center index */ + // index a random doc to create .opensearch-control-center index createLRONConfig(randomLRONConfig(taskId = randomTaskId(nodeId = nodeIdsInRestIT.random()))) val lronConfig = randomLRONConfig(taskId = randomTaskId(nodeId = nodeIdsInRestIT.random())) val response = client().makeRequest("DELETE", getResourceURI(lronConfig.taskId, lronConfig.actionName)) diff --git a/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/resthandler/RestGetLRONConfigActionIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/resthandler/RestGetLRONConfigActionIT.kt index c2193a7b9..1e4a803b4 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/resthandler/RestGetLRONConfigActionIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/resthandler/RestGetLRONConfigActionIT.kt @@ -7,6 +7,7 @@ package org.opensearch.indexmanagement.controlcenter.notification.resthandler import org.junit.Assert import org.opensearch.client.ResponseException +import org.opensearch.core.rest.RestStatus import org.opensearch.indexmanagement.IndexManagementPlugin import org.opensearch.indexmanagement.controlcenter.notification.getResourceURI import org.opensearch.indexmanagement.controlcenter.notification.model.LRONConfig @@ -16,7 +17,6 @@ import org.opensearch.indexmanagement.controlcenter.notification.randomTaskId import org.opensearch.indexmanagement.controlcenter.notification.util.getDocID import org.opensearch.indexmanagement.makeRequest import org.opensearch.indexmanagement.opensearchapi.convertToMap -import org.opensearch.core.rest.RestStatus import org.opensearch.test.OpenSearchTestCase @Suppress("UNCHECKED_CAST") @@ -34,12 +34,12 @@ class RestGetLRONConfigActionIT : LRONConfigRestTestCase() { Assert.assertEquals( "not same LRONConfig", lronConfigMap.filterKeys { it != LRONConfig.USER_FIELD && it != LRONConfig.PRIORITY_FIELD }, - responseBody["lron_config"] as Map + responseBody["lron_config"] as Map, ) } fun `test get nonexist LRONConfig fails`() { - /* index a random doc to create .opensearch-control-center index */ + // index a random doc to create .opensearch-control-center index createLRONConfig(randomLRONConfig(taskId = randomTaskId(nodeId = nodeIdsInRestIT.random()))) try { val lronConfig = randomLRONConfig(taskId = randomTaskId(nodeId = nodeIdsInRestIT.random())) @@ -52,11 +52,12 @@ class RestGetLRONConfigActionIT : LRONConfigRestTestCase() { } fun `test get all LRONConfigs`() { - /* LRONConfigRestTestCase index a doc to auto create the index, here we wipe the index before count doc number */ + // LRONConfigRestTestCase index a doc to auto create the index, here we wipe the index before count doc number removeControlCenterIndex() - val lronConfigResponses = randomList(1, 15) { - createLRONConfig(randomLRONConfig(taskId = randomTaskId(nodeId = nodeIdsInRestIT.random()))).asMap() - } + val lronConfigResponses = + randomList(1, 15) { + createLRONConfig(randomLRONConfig(taskId = randomTaskId(nodeId = nodeIdsInRestIT.random()))).asMap() + } val response = client().makeRequest("GET", IndexManagementPlugin.LRON_BASE_URI) assertEquals("get LRONConfigs failed", RestStatus.OK, response.restStatus()) val responseBody = response.asMap() @@ -71,7 +72,7 @@ class RestGetLRONConfigActionIT : LRONConfigRestTestCase() { assertEquals( "different lronConfigResponse", lronConfigResponse[LRONConfig.LRON_CONFIG_FIELD], - resLRONConfigResponse!![LRONConfig.LRON_CONFIG_FIELD] + resLRONConfigResponse!![LRONConfig.LRON_CONFIG_FIELD], ) } } @@ -82,7 +83,7 @@ class RestGetLRONConfigActionIT : LRONConfigRestTestCase() { client().makeRequest( "GET", getResourceURI(lronConfig.taskId, lronConfig.actionName), - mapOf("size" to "10") + mapOf("size" to "10"), ) Assert.fail("Expected 400 BAD_REQUEST") } catch (e: ResponseException) { diff --git a/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/resthandler/RestIndexLRONConfigActionIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/resthandler/RestIndexLRONConfigActionIT.kt index 00ecd594a..1e7cdd79a 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/resthandler/RestIndexLRONConfigActionIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/resthandler/RestIndexLRONConfigActionIT.kt @@ -12,6 +12,7 @@ import kotlinx.coroutines.runBlocking import org.junit.Assert import org.opensearch.client.ResponseException import org.opensearch.common.xcontent.XContentType +import org.opensearch.core.rest.RestStatus import org.opensearch.indexmanagement.IndexManagementPlugin import org.opensearch.indexmanagement.controlcenter.notification.getResourceURI import org.opensearch.indexmanagement.controlcenter.notification.model.LRONConfig @@ -24,7 +25,6 @@ import org.opensearch.indexmanagement.indexstatemanagement.randomChannel import org.opensearch.indexmanagement.makeRequest import org.opensearch.indexmanagement.opensearchapi.convertToMap import org.opensearch.indexmanagement.util.DRY_RUN -import org.opensearch.core.rest.RestStatus import java.util.concurrent.Executors @Suppress("UNCHECKED_CAST") @@ -40,7 +40,7 @@ class RestIndexLRONConfigActionIT : LRONConfigRestTestCase() { Assert.assertEquals( "not same LRONConfig", lronConfigMap.filterKeys { it != LRONConfig.USER_FIELD && it != LRONConfig.PRIORITY_FIELD }, - responseBody["lron_config"] as Map + responseBody["lron_config"] as Map, ) } @@ -51,7 +51,7 @@ class RestIndexLRONConfigActionIT : LRONConfigRestTestCase() { "POST", getResourceURI(lronConfig.taskId, lronConfig.actionName), emptyMap(), - lronConfig.toHttpEntity() + lronConfig.toHttpEntity(), ) fail("Expected 405 METHOD_NOT_ALLOWED") } catch (e: ResponseException) { @@ -74,21 +74,23 @@ class RestIndexLRONConfigActionIT : LRONConfigRestTestCase() { val lronConfig = randomLRONConfig(taskId = randomTaskId(nodeId = nodeIdsInRestIT.random())) createLRONConfig(lronConfig) - val newLRONConfig = LRONConfig( - lronCondition = randomLRONCondition(), - taskId = lronConfig.taskId, - actionName = lronConfig.actionName, - channels = List(10) { randomChannel() }, - user = null, - priority = null - ) + val newLRONConfig = + LRONConfig( + lronCondition = randomLRONCondition(), + taskId = lronConfig.taskId, + actionName = lronConfig.actionName, + channels = List(10) { randomChannel() }, + user = null, + priority = null, + ) - val response = client().makeRequest( - "PUT", - getResourceURI(lronConfig.taskId, lronConfig.actionName), - emptyMap(), - newLRONConfig.toHttpEntity() - ) + val response = + client().makeRequest( + "PUT", + getResourceURI(lronConfig.taskId, lronConfig.actionName), + emptyMap(), + newLRONConfig.toHttpEntity(), + ) assertEquals("update LRONConfig failed", RestStatus.OK, response.restStatus()) val responseBody = response.asMap() @@ -98,25 +100,26 @@ class RestIndexLRONConfigActionIT : LRONConfigRestTestCase() { Assert.assertEquals( "not same LRONConfig", newLRONConfigMap.filterKeys { it != LRONConfig.USER_FIELD && it != LRONConfig.PRIORITY_FIELD }, - responseBody["lron_config"] as Map + responseBody["lron_config"] as Map, ) } fun `test create LRONConfig using PUT`() { val lronConfig = randomLRONConfig(taskId = randomTaskId(nodeId = nodeIdsInRestIT.random())) - val response = client().makeRequest( - "PUT", - getResourceURI(lronConfig.taskId, lronConfig.actionName), - emptyMap(), - lronConfig.toHttpEntity() - ) + val response = + client().makeRequest( + "PUT", + getResourceURI(lronConfig.taskId, lronConfig.actionName), + emptyMap(), + lronConfig.toHttpEntity(), + ) assertEquals("autocreate LRONConfig failed", RestStatus.OK, response.restStatus()) val responseBody = response.asMap() val lronConfigMap = lronConfig.convertToMap()[LRONConfig.LRON_CONFIG_FIELD] as Map Assert.assertEquals( "not same LRONConfig", lronConfigMap.filterKeys { it != LRONConfig.USER_FIELD && it != LRONConfig.PRIORITY_FIELD }, - responseBody["lron_config"] as Map + responseBody["lron_config"] as Map, ) } @@ -127,7 +130,7 @@ class RestIndexLRONConfigActionIT : LRONConfigRestTestCase() { "PUT", IndexManagementPlugin.LRON_BASE_URI, emptyMap(), - lronConfig.toHttpEntity() + lronConfig.toHttpEntity(), ) fail("Expected 405 METHOD_NOT_ALLOWED") } catch (e: ResponseException) { @@ -137,18 +140,18 @@ class RestIndexLRONConfigActionIT : LRONConfigRestTestCase() { fun `test creating LRONConfig dryRun`() { val lronConfig = randomLRONConfig(taskId = randomTaskId(nodeId = nodeIdsInRestIT.random())) - /* first use POST and PUT to create, then try to get */ + // first use POST and PUT to create, then try to get client().makeRequest( "POST", IndexManagementPlugin.LRON_BASE_URI, mapOf(DRY_RUN to "true"), - lronConfig.toHttpEntity() + lronConfig.toHttpEntity(), ) client().makeRequest( "PUT", getResourceURI(lronConfig.taskId, lronConfig.actionName), mapOf(DRY_RUN to "true"), - lronConfig.toHttpEntity() + lronConfig.toHttpEntity(), ) try { client().makeRequest("GET", getResourceURI(lronConfig.taskId, lronConfig.actionName)) @@ -165,11 +168,12 @@ class RestIndexLRONConfigActionIT : LRONConfigRestTestCase() { var response = createLRONConfig(lronConfig) assertEquals("Create LRONConfig failed", RestStatus.OK, response.restStatus()) removeControlCenterIndex() - response = client().makeRequest( - "PUT", - getResourceURI(lronConfig.taskId, lronConfig.actionName), - lronConfig.toHttpEntity() - ) + response = + client().makeRequest( + "PUT", + getResourceURI(lronConfig.taskId, lronConfig.actionName), + lronConfig.toHttpEntity(), + ) assertEquals("Create LRONConfig failed", RestStatus.OK, response.restStatus()) } @@ -181,11 +185,12 @@ class RestIndexLRONConfigActionIT : LRONConfigRestTestCase() { val response = client().makeRequest("GET", "/${IndexManagementPlugin.CONTROL_CENTER_INDEX}/_mapping") val parserMap = createParser(XContentType.JSON.xContent(), response.entity.content).map() as Map> val mappingsMap = parserMap[IndexManagementPlugin.CONTROL_CENTER_INDEX]!!["mappings"] as Map - val expected = createParser( - XContentType.JSON.xContent(), - javaClass.classLoader.getResource("mappings/opensearch-control-center.json")!! - .readText() - ) + val expected = + createParser( + XContentType.JSON.xContent(), + javaClass.classLoader.getResource("mappings/opensearch-control-center.json")!! + .readText(), + ) val expectedMap = expected.map() assertEquals("Mappings are different", expectedMap, mappingsMap) @@ -199,11 +204,12 @@ class RestIndexLRONConfigActionIT : LRONConfigRestTestCase() { try { runBlocking { val dispatcher = threadPool.asCoroutineDispatcher() - val responses = lronConfigs.map { - async(dispatcher) { - createLRONConfig(it) - } - }.awaitAll() + val responses = + lronConfigs.map { + async(dispatcher) { + createLRONConfig(it) + } + }.awaitAll() responses.forEach { assertEquals("Create LRONConfig failed", RestStatus.OK, it.restStatus()) } } } finally { diff --git a/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/util/LRONUtilsTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/util/LRONUtilsTests.kt index 527cf2401..b89837591 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/util/LRONUtilsTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/controlcenter/notification/util/LRONUtilsTests.kt @@ -6,10 +6,10 @@ package org.opensearch.indexmanagement.controlcenter.notification.util import org.junit.Assert +import org.opensearch.core.tasks.TaskId import org.opensearch.index.reindex.ReindexAction import org.opensearch.indexmanagement.controlcenter.notification.randomActionName import org.opensearch.indexmanagement.controlcenter.notification.randomTaskId -import org.opensearch.core.tasks.TaskId import org.opensearch.test.OpenSearchTestCase import kotlin.IllegalArgumentException diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/IndexMetadataProviderTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/IndexMetadataProviderTests.kt index 757eff33f..af79a8a61 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/IndexMetadataProviderTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/IndexMetadataProviderTests.kt @@ -20,7 +20,6 @@ import org.opensearch.test.OpenSearchTestCase import org.opensearch.test.rest.OpenSearchRestTestCase class IndexMetadataProviderTests : OpenSearchTestCase() { - private val clusterService: ClusterService = mock() private val client: Client = mock() private val settings: Settings = Settings.EMPTY @@ -42,8 +41,8 @@ class IndexMetadataProviderTests : OpenSearchTestCase() { "Should not manage index management config index", indexEvaluator.isUnManageableIndex( IndexManagementPlugin - .INDEX_MANAGEMENT_INDEX - ) + .INDEX_MANAGEMENT_INDEX, + ), ) assertTrue("Should not manage kibana index", indexEvaluator.isUnManageableIndex(".kibana_1242142_user")) diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/IndexStateManagementRestTestCase.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/IndexStateManagementRestTestCase.kt index ba61a2769..21f27f5af 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/IndexStateManagementRestTestCase.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/IndexStateManagementRestTestCase.kt @@ -5,9 +5,9 @@ package org.opensearch.indexmanagement.indexstatemanagement +import org.apache.hc.core5.http.ContentType import org.apache.hc.core5.http.HttpEntity import org.apache.hc.core5.http.HttpHeaders -import org.apache.hc.core5.http.ContentType import org.apache.hc.core5.http.io.entity.StringEntity import org.apache.hc.core5.http.message.BasicHeader import org.junit.After @@ -23,13 +23,14 @@ import org.opensearch.cluster.ClusterModule import org.opensearch.cluster.metadata.IndexMetadata import org.opensearch.common.settings.Settings import org.opensearch.common.unit.TimeValue -import org.opensearch.core.xcontent.DeprecationHandler import org.opensearch.common.xcontent.LoggingDeprecationHandler +import org.opensearch.common.xcontent.XContentType +import org.opensearch.common.xcontent.json.JsonXContent.jsonXContent +import org.opensearch.core.rest.RestStatus +import org.opensearch.core.xcontent.DeprecationHandler import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.core.xcontent.XContentParser.Token import org.opensearch.core.xcontent.XContentParserUtils.ensureExpectedToken -import org.opensearch.common.xcontent.XContentType -import org.opensearch.common.xcontent.json.JsonXContent.jsonXContent import org.opensearch.index.seqno.SequenceNumbers import org.opensearch.indexmanagement.IndexManagementIndices import org.opensearch.indexmanagement.IndexManagementPlugin @@ -38,7 +39,13 @@ import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.INDEX_STAT import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.ISM_BASE_URI import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.POLICY_BASE_URI import org.opensearch.indexmanagement.IndexManagementRestTestCase +import org.opensearch.indexmanagement.indexstatemanagement.model.ChangePolicy +import org.opensearch.indexmanagement.indexstatemanagement.model.ExplainFilter +import org.opensearch.indexmanagement.indexstatemanagement.model.ISMTemplate +import org.opensearch.indexmanagement.indexstatemanagement.model.ManagedIndexConfig +import org.opensearch.indexmanagement.indexstatemanagement.model.Policy import org.opensearch.indexmanagement.indexstatemanagement.model.Policy.Companion.POLICY_TYPE +import org.opensearch.indexmanagement.indexstatemanagement.model.StateFilter import org.opensearch.indexmanagement.indexstatemanagement.resthandler.RestExplainAction import org.opensearch.indexmanagement.indexstatemanagement.settings.ManagedIndexSettings import org.opensearch.indexmanagement.indexstatemanagement.util.FAILED_INDICES @@ -50,6 +57,7 @@ import org.opensearch.indexmanagement.makeRequest import org.opensearch.indexmanagement.opensearchapi.parseWithType import org.opensearch.indexmanagement.rollup.model.Rollup import org.opensearch.indexmanagement.rollup.model.RollupMetadata +import org.opensearch.indexmanagement.rollup.randomTermQuery import org.opensearch.indexmanagement.spi.indexstatemanagement.model.ActionMetaData import org.opensearch.indexmanagement.spi.indexstatemanagement.model.ManagedIndexMetaData import org.opensearch.indexmanagement.spi.indexstatemanagement.model.PolicyRetryInfoMetaData @@ -65,14 +73,6 @@ import org.opensearch.indexmanagement.waitFor import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule import org.opensearch.rest.RestRequest import org.opensearch.search.SearchModule -import org.opensearch.core.rest.RestStatus -import org.opensearch.indexmanagement.indexstatemanagement.model.ChangePolicy -import org.opensearch.indexmanagement.indexstatemanagement.model.ExplainFilter -import org.opensearch.indexmanagement.indexstatemanagement.model.ISMTemplate -import org.opensearch.indexmanagement.indexstatemanagement.model.ManagedIndexConfig -import org.opensearch.indexmanagement.indexstatemanagement.model.Policy -import org.opensearch.indexmanagement.indexstatemanagement.model.StateFilter -import org.opensearch.indexmanagement.rollup.randomTermQuery import org.opensearch.test.OpenSearchTestCase import java.io.IOException import java.time.Duration @@ -80,7 +80,6 @@ import java.time.Instant import java.util.Locale abstract class IndexStateManagementRestTestCase : IndexManagementRestTestCase() { - @After fun clearIndicesAfterEachTest() { wipeAllIndices(skip = isBWCTest) @@ -107,22 +106,23 @@ abstract class IndexStateManagementRestTestCase : IndexManagementRestTestCase() policy: Policy, policyId: String = OpenSearchTestCase.randomAlphaOfLength(10), refresh: Boolean = true, - userClient: RestClient? = null + userClient: RestClient? = null, ): Policy { val response = createPolicyJson(policy.toJsonString(), policyId, refresh, userClient) - val policyJson = jsonXContent - .createParser( - NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, - response.entity.content - ).map() + val policyJson = + jsonXContent + .createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + response.entity.content, + ).map() val createdId = policyJson["_id"] as String assertEquals("policy ids are not the same", policyId, createdId) return policy.copy( id = createdId, seqNo = (policyJson["_seq_no"] as Int).toLong(), - primaryTerm = (policyJson["_primary_term"] as Int).toLong() + primaryTerm = (policyJson["_primary_term"] as Int).toLong(), ) } @@ -130,16 +130,17 @@ abstract class IndexStateManagementRestTestCase : IndexManagementRestTestCase() policyString: String, policyId: String, refresh: Boolean = true, - userClient: RestClient? = null + userClient: RestClient? = null, ): Response { val client = userClient ?: client() - val response = client - .makeRequest( - "PUT", - "$POLICY_BASE_URI/$policyId?refresh=$refresh", - emptyMap(), - StringEntity(policyString, ContentType.APPLICATION_JSON) - ) + val response = + client + .makeRequest( + "PUT", + "$POLICY_BASE_URI/$policyId?refresh=$refresh", + emptyMap(), + StringEntity(policyString, ContentType.APPLICATION_JSON), + ) assertEquals("Unable to create a new policy", RestStatus.CREATED, response.restStatus()) return response } @@ -152,7 +153,7 @@ abstract class IndexStateManagementRestTestCase : IndexManagementRestTestCase() protected fun getPolicy( policyId: String, - header: BasicHeader = BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") + header: BasicHeader = BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), ): Policy { val response = client().makeRequest("GET", "$POLICY_BASE_URI/$policyId", null, header) assertEquals("Unable to get policy $policyId", RestStatus.OK, response.restStatus()) @@ -179,8 +180,9 @@ abstract class IndexStateManagementRestTestCase : IndexManagementRestTestCase() } protected fun removePolicy(index: String): Response { - val response = client() - .makeRequest("POST", "$ISM_BASE_URI/remove/$index") + val response = + client() + .makeRequest("POST", "$ISM_BASE_URI/remove/$index") assertEquals("Request failed", RestStatus.OK, response.restStatus()) return response } @@ -195,18 +197,19 @@ abstract class IndexStateManagementRestTestCase : IndexManagementRestTestCase() settings: Settings? = null, ): Pair { val waitForActiveShards = if (isMultiNode) "all" else "1" - val builtSettings = Settings.builder().let { - if (alias == null) { - it.putNull(ManagedIndexSettings.ROLLOVER_ALIAS.key) - } else { - it.put(ManagedIndexSettings.ROLLOVER_ALIAS.key, alias) - } - it.put(INDEX_NUMBER_OF_REPLICAS, replicas ?: "1") - it.put(INDEX_NUMBER_OF_SHARDS, shards ?: "1") - it.put("index.write.wait_for_active_shards", waitForActiveShards) - if (settings != null) it.put(settings) - it - }.build() + val builtSettings = + Settings.builder().let { + if (alias == null) { + it.putNull(ManagedIndexSettings.ROLLOVER_ALIAS.key) + } else { + it.put(ManagedIndexSettings.ROLLOVER_ALIAS.key, alias) + } + it.put(INDEX_NUMBER_OF_REPLICAS, replicas ?: "1") + it.put(INDEX_NUMBER_OF_SHARDS, shards ?: "1") + it.put("index.write.wait_for_active_shards", waitForActiveShards) + if (settings != null) it.put(settings) + it + }.build() val aliases = if (alias == null) "" else "\"$alias\": { \"is_write_index\": true }" createIndex(index, builtSettings, mapping, aliases) if (policyID != null) { @@ -217,22 +220,24 @@ abstract class IndexStateManagementRestTestCase : IndexManagementRestTestCase() protected fun createDataStream( dataStream: String, - template: StringEntity? = null + template: StringEntity? = null, ) { - val dataStreamTemplate = template ?: StringEntity( - """ - { - "data_stream": {}, - "index_patterns": ["$dataStream"] - } - """.trimIndent(), - ContentType.APPLICATION_JSON - ) - val res = client().makeRequest( - "PUT", - "/_index_template/transform-data-stream-template", - dataStreamTemplate - ) + val dataStreamTemplate = + template ?: StringEntity( + """ + { + "data_stream": {}, + "index_patterns": ["$dataStream"] + } + """.trimIndent(), + ContentType.APPLICATION_JSON, + ) + val res = + client().makeRequest( + "PUT", + "/_index_template/transform-data-stream-template", + dataStreamTemplate, + ) assertEquals("Unexpected RestStatus", RestStatus.OK, res.restStatus()) val response = client().makeRequest("PUT", "/_data_stream/$dataStream") assertEquals("Unexpected RestStatus", RestStatus.OK, response.restStatus()) @@ -247,17 +252,23 @@ abstract class IndexStateManagementRestTestCase : IndexManagementRestTestCase() searchRouting: Int = randomInt(), indexRouting: Int = randomInt(), filter: String = randomTermQuery().toString(), - isHidden: Boolean = randomBoolean() + isHidden: Boolean = randomBoolean(), ) { val isWriteIndexField = if (isWriteIndex) "\",\"is_write_index\": \"$isWriteIndex" else "" - val params = if (action == "add" && routing != null) """ - ,"routing": $routing, - "search_routing": $searchRouting, - "index_routing": $indexRouting, - "filter": $filter, - "is_hidden": $isHidden - """.trimIndent() else "" - val body = """ + val params = + if (action == "add" && routing != null) { + """ + ,"routing": $routing, + "search_routing": $searchRouting, + "index_routing": $indexRouting, + "filter": $filter, + "is_hidden": $isHidden + """.trimIndent() + } else { + "" + } + val body = + """ { "actions": [ { @@ -268,7 +279,7 @@ abstract class IndexStateManagementRestTestCase : IndexManagementRestTestCase() } ] } - """.trimIndent() + """.trimIndent() val response = client().makeRequest("POST", "_aliases", StringEntity(body, ContentType.APPLICATION_JSON)) assertEquals("Unexpected RestStatus", RestStatus.OK, response.restStatus()) } @@ -281,13 +292,14 @@ abstract class IndexStateManagementRestTestCase : IndexManagementRestTestCase() protected fun addPolicyToIndex( index: String, - policyID: String + policyID: String, ) { - val body = """ + val body = + """ { "policy_id": "$policyID" } - """.trimIndent() + """.trimIndent() val response = client().makeRequest("POST", "/_opendistro/_ism/add/$index", StringEntity(body, ContentType.APPLICATION_JSON)) assertEquals("Unexpected RestStatus", RestStatus.OK, response.restStatus()) } @@ -303,17 +315,19 @@ abstract class IndexStateManagementRestTestCase : IndexManagementRestTestCase() protected fun updateClusterSetting(key: String, value: String, escapeValue: Boolean = true) { val formattedValue = if (escapeValue) "\"$value\"" else value - val request = """ + val request = + """ { "persistent": { "$key": $formattedValue } } - """.trimIndent() - val res = client().makeRequest( - "PUT", "_cluster/settings", emptyMap(), - StringEntity(request, ContentType.APPLICATION_JSON) - ) + """.trimIndent() + val res = + client().makeRequest( + "PUT", "_cluster/settings", emptyMap(), + StringEntity(request, ContentType.APPLICATION_JSON), + ) assertEquals("Request failed", RestStatus.OK, res.restStatus()) } @@ -328,29 +342,33 @@ abstract class IndexStateManagementRestTestCase : IndexManagementRestTestCase() protected fun updateIndexSetting( index: String, key: String, - value: String + value: String, ) { - val body = """ + val body = + """ { "$key" : "$value" } - """.trimIndent() - val res = client().makeRequest( - "PUT", "$index/_settings", emptyMap(), - StringEntity(body, ContentType.APPLICATION_JSON) - ) + """.trimIndent() + val res = + client().makeRequest( + "PUT", "$index/_settings", emptyMap(), + StringEntity(body, ContentType.APPLICATION_JSON), + ) assertEquals("Update index setting failed", RestStatus.OK, res.restStatus()) } protected fun getIndexSetting(index: String) { - val res = client().makeRequest( - "GET", "$index/_settings", emptyMap() - ) + val res = + client().makeRequest( + "GET", "$index/_settings", emptyMap(), + ) assertEquals("Update index setting failed", RestStatus.OK, res.restStatus()) } protected fun getManagedIndexConfig(index: String): ManagedIndexConfig? { - val request = """ + val request = + """ { "seq_no_primary_term": true, "query": { @@ -359,11 +377,12 @@ abstract class IndexStateManagementRestTestCase : IndexManagementRestTestCase() } } } - """.trimIndent() - val response = client().makeRequest( - "POST", "$INDEX_MANAGEMENT_INDEX/_search", emptyMap(), - StringEntity(request, ContentType.APPLICATION_JSON) - ) + """.trimIndent() + val response = + client().makeRequest( + "POST", "$INDEX_MANAGEMENT_INDEX/_search", emptyMap(), + StringEntity(request, ContentType.APPLICATION_JSON), + ) assertEquals("Request failed", RestStatus.OK, response.restStatus()) val searchResponse = SearchResponse.fromXContent(createParser(jsonXContent, response.entity.content)) assertTrue("Found more than one managed index config", searchResponse.hits.hits.size < 2) @@ -387,7 +406,8 @@ abstract class IndexStateManagementRestTestCase : IndexManagementRestTestCase() @Suppress("UNCHECKED_CAST") protected fun getHistorySearchResponse(index: String): SearchResponse { - val request = """ + val request = + """ { "seq_no_primary_term": true, "sort": [ @@ -399,11 +419,12 @@ abstract class IndexStateManagementRestTestCase : IndexManagementRestTestCase() } } } - """.trimIndent() - val response = client().makeRequest( - "POST", "${IndexManagementIndices.HISTORY_ALL}/_search", emptyMap(), - StringEntity(request, ContentType.APPLICATION_JSON) - ) + """.trimIndent() + val response = + client().makeRequest( + "POST", "${IndexManagementIndices.HISTORY_ALL}/_search", emptyMap(), + StringEntity(request, ContentType.APPLICATION_JSON), + ) assertEquals("Request failed", RestStatus.OK, response.restStatus()) return SearchResponse.fromXContent(createParser(jsonXContent, response.entity.content)) } @@ -434,7 +455,7 @@ abstract class IndexStateManagementRestTestCase : IndexManagementRestTestCase() client().makeRequest( "GET", "_cluster/allocation/explain", - StringEntity("{ \"index\": \"$INDEX_MANAGEMENT_INDEX\" }", ContentType.APPLICATION_JSON) + StringEntity("{ \"index\": \"$INDEX_MANAGEMENT_INDEX\" }", ContentType.APPLICATION_JSON), ) fail("Expected 400 Bad Request when there are no unassigned shards to explain") } catch (e: ResponseException) { @@ -447,26 +468,28 @@ abstract class IndexStateManagementRestTestCase : IndexManagementRestTestCase() val startTimeMillis = desiredStartTimeMillis ?: Instant.now().toEpochMilli() - millis val waitForActiveShards = if (isMultiNode) "all" else "1" val endpoint = "$INDEX_MANAGEMENT_INDEX/_update/${update.id}?wait_for_active_shards=$waitForActiveShards;retry_on_conflict=$retryOnConflict" - val response = client().makeRequest( - "POST", endpoint, - StringEntity( - "{\"doc\":{\"managed_index\":{\"schedule\":{\"interval\":{\"start_time\":" + - "\"$startTimeMillis\"}}}}}", - ContentType.APPLICATION_JSON + val response = + client().makeRequest( + "POST", endpoint, + StringEntity( + "{\"doc\":{\"managed_index\":{\"schedule\":{\"interval\":{\"start_time\":" + + "\"$startTimeMillis\"}}}}}", + ContentType.APPLICATION_JSON, + ), ) - ) assertEquals("Request failed", RestStatus.OK, response.restStatus()) } protected fun updateManagedIndexConfigPolicySeqNo(update: ManagedIndexConfig) { - val response = client().makeRequest( - "POST", "$INDEX_MANAGEMENT_INDEX/_update/${update.id}", - StringEntity( - "{\"doc\":{\"managed_index\":{\"policy_seq_no\":\"${update.policySeqNo}\"}}}", - ContentType.APPLICATION_JSON + val response = + client().makeRequest( + "POST", "$INDEX_MANAGEMENT_INDEX/_update/${update.id}", + StringEntity( + "{\"doc\":{\"managed_index\":{\"policy_seq_no\":\"${update.policySeqNo}\"}}}", + ContentType.APPLICATION_JSON, + ), ) - ) assertEquals("Request failed", RestStatus.OK, response.restStatus()) } @@ -630,12 +653,13 @@ abstract class IndexStateManagementRestTestCase : IndexManagementRestTestCase() @Suppress("UNCHECKED_CAST") protected fun getNodes(): MutableSet { - val response = client() - .makeRequest( - "GET", - "_cat/nodes?format=json", - emptyMap() - ) + val response = + client() + .makeRequest( + "GET", + "_cat/nodes?format=json", + emptyMap(), + ) assertEquals("Unable to get nodes", RestStatus.OK, response.restStatus()) try { return jsonXContent @@ -708,35 +732,38 @@ abstract class IndexStateManagementRestTestCase : IndexManagementRestTestCase() } protected fun rolloverIndex(alias: String) { - val response = client().performRequest( - Request( - "POST", - "/$alias/_rollover" + val response = + client().performRequest( + Request( + "POST", + "/$alias/_rollover", + ), ) - ) assertEquals(response.statusLine.statusCode, RestStatus.OK.status) } protected fun createRepository( - repository: String + repository: String, ) { - val response = client() - .makeRequest( - "PUT", - "_snapshot/$repository", - emptyMap(), - StringEntity("{\"type\":\"fs\", \"settings\": {\"location\": \"$repository\"}}", ContentType.APPLICATION_JSON) - ) + val response = + client() + .makeRequest( + "PUT", + "_snapshot/$repository", + emptyMap(), + StringEntity("{\"type\":\"fs\", \"settings\": {\"location\": \"$repository\"}}", ContentType.APPLICATION_JSON), + ) assertEquals("Unable to create a new repository", RestStatus.OK, response.restStatus()) } protected fun getShardsList(target: String = "*"): List { - val response = client() - .makeRequest( - "GET", - "_cat/shards/$target?format=json", - emptyMap() - ) + val response = + client() + .makeRequest( + "GET", + "_cat/shards/$target?format=json", + emptyMap(), + ) assertEquals("Unable to get allocation info", RestStatus.OK, response.restStatus()) try { return jsonXContent @@ -748,12 +775,13 @@ abstract class IndexStateManagementRestTestCase : IndexManagementRestTestCase() } protected fun cat(endpoint: String = "indices"): List { - val response = client() - .makeRequest( - "GET", - "_cat/$endpoint", - emptyMap() - ) + val response = + client() + .makeRequest( + "GET", + "_cat/$endpoint", + emptyMap(), + ) assertEquals("Unable to get cat info", RestStatus.OK, response.restStatus()) try { return jsonXContent @@ -777,12 +805,13 @@ abstract class IndexStateManagementRestTestCase : IndexManagementRestTestCase() } private fun getSnapshotsList(repository: String): List { - val response = client() - .makeRequest( - "GET", - "_cat/snapshots/$repository?format=json", - emptyMap() - ) + val response = + client() + .makeRequest( + "GET", + "_cat/snapshots/$repository?format=json", + emptyMap(), + ) assertEquals("Unable to get a snapshot", RestStatus.OK, response.restStatus()) try { return jsonXContent @@ -795,7 +824,7 @@ abstract class IndexStateManagementRestTestCase : IndexManagementRestTestCase() protected fun getRollup( rollupId: String, - header: BasicHeader = BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") + header: BasicHeader = BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), ): Rollup { val response = client().makeRequest("GET", "${IndexManagementPlugin.ROLLUP_JOBS_BASE_URI}/$rollupId", null, header) assertEquals("Unable to get rollup $rollupId", RestStatus.OK, response.restStatus()) @@ -823,7 +852,7 @@ abstract class IndexStateManagementRestTestCase : IndexManagementRestTestCase() protected fun getRollupMetadata( metadataId: String, - header: BasicHeader = BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") + header: BasicHeader = BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), ): RollupMetadata { val response = client().makeRequest("GET", "$INDEX_MANAGEMENT_INDEX/_doc/$metadataId", null, header) assertEquals("Unable to get rollup metadata $metadataId", RestStatus.OK, response.restStatus()) @@ -852,7 +881,7 @@ abstract class IndexStateManagementRestTestCase : IndexManagementRestTestCase() protected fun getTransform( transformId: String, - header: BasicHeader = BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") + header: BasicHeader = BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), ): Transform { val response = client().makeRequest("GET", "${IndexManagementPlugin.TRANSFORM_BASE_URI}/$transformId", null, header) assertEquals("Unable to get transform $transformId", RestStatus.OK, response.restStatus()) @@ -880,7 +909,7 @@ abstract class IndexStateManagementRestTestCase : IndexManagementRestTestCase() protected fun getTransformMetadata( metadataId: String, - header: BasicHeader = BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") + header: BasicHeader = BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), ): TransformMetadata { val response = client().makeRequest("GET", "$INDEX_MANAGEMENT_INDEX/_doc/$metadataId", null, header) assertEquals("Unable to get transform metadata $metadataId", RestStatus.OK, response.restStatus()) @@ -915,13 +944,13 @@ abstract class IndexStateManagementRestTestCase : IndexManagementRestTestCase() @Suppress("UNCHECKED_CAST") protected fun assertSnapshotExists( repository: String, - snapshot: String + snapshot: String, ) = require(getSnapshotsList(repository).any { element -> (element as Map)["id"]!!.startsWith(snapshot) }) { "No snapshot found with id: $snapshot" } @Suppress("UNCHECKED_CAST") protected fun assertSnapshotFinishedWithSuccess( repository: String, - snapshot: String + snapshot: String, ) = require(getSnapshotsList(repository).any { element -> (element as Map)["id"]!!.startsWith(snapshot) && "SUCCESS" == element["status"] }) { "Snapshot didn't finish with success." } /** @@ -980,7 +1009,7 @@ abstract class IndexStateManagementRestTestCase : IndexManagementRestTestCase() protected fun assertPredicatesOnMetaData( indexPredicates: List Boolean>>>>, response: Map, - strict: Boolean = true + strict: Boolean = true, ) { indexPredicates.forEach { (index, predicates) -> assertTrue("The index: $index was not found in the response: $response", response.containsKey(index)) @@ -1037,7 +1066,7 @@ abstract class IndexStateManagementRestTestCase : IndexManagementRestTestCase() @Suppress("UNCHECKED_CAST") protected fun assertPredicatesOnISMTemplatesMap( templatePredicates: List Boolean>>>>, // response map name: predicate - response: Map + response: Map, ) { val templates = response["ism_templates"] as ArrayList> @@ -1068,33 +1097,35 @@ abstract class IndexStateManagementRestTestCase : IndexManagementRestTestCase() } protected fun createV1Template(templateName: String, indexPatterns: String, policyID: String, order: Int = 0) { - val response = client().makeRequest( - "PUT", "_template/$templateName", - StringEntity( - "{\n" + - " \"index_patterns\": [\"$indexPatterns\"],\n" + - " \"settings\": {\n" + - " \"opendistro.index_state_management.policy_id\": \"$policyID\"\n" + - " }, \n" + - " \"order\": $order\n" + - "}", - ContentType.APPLICATION_JSON + val response = + client().makeRequest( + "PUT", "_template/$templateName", + StringEntity( + "{\n" + + " \"index_patterns\": [\"$indexPatterns\"],\n" + + " \"settings\": {\n" + + " \"opendistro.index_state_management.policy_id\": \"$policyID\"\n" + + " }, \n" + + " \"order\": $order\n" + + "}", + ContentType.APPLICATION_JSON, + ), ) - ) assertEquals("Request failed", RestStatus.OK, response.restStatus()) } protected fun createV1Template2(templateName: String, indexPatterns: String, order: Int = 0) { - val response = client().makeRequest( - "PUT", "_template/$templateName", - StringEntity( - "{\n" + - " \"index_patterns\": [\"$indexPatterns\"],\n" + - " \"order\": $order\n" + - "}", - ContentType.APPLICATION_JSON + val response = + client().makeRequest( + "PUT", "_template/$templateName", + StringEntity( + "{\n" + + " \"index_patterns\": [\"$indexPatterns\"],\n" + + " \"order\": $order\n" + + "}", + ContentType.APPLICATION_JSON, + ), ) - ) assertEquals("Request failed", RestStatus.OK, response.restStatus()) } @@ -1104,20 +1135,21 @@ abstract class IndexStateManagementRestTestCase : IndexManagementRestTestCase() } protected fun createV2Template(templateName: String, indexPatterns: String, policyID: String) { - val response = client().makeRequest( - "PUT", "_index_template/$templateName", - StringEntity( - "{\n" + - " \"index_patterns\": [\"$indexPatterns\"],\n" + - " \"template\": {\n" + - " \"settings\": {\n" + - " \"opendistro.index_state_management.policy_id\": \"$policyID\"\n" + - " }\n" + - " }\n" + - "}", - ContentType.APPLICATION_JSON + val response = + client().makeRequest( + "PUT", "_index_template/$templateName", + StringEntity( + "{\n" + + " \"index_patterns\": [\"$indexPatterns\"],\n" + + " \"template\": {\n" + + " \"settings\": {\n" + + " \"opendistro.index_state_management.policy_id\": \"$policyID\"\n" + + " }\n" + + " }\n" + + "}", + ContentType.APPLICATION_JSON, + ), ) - ) assertEquals("Request failed", RestStatus.OK, response.restStatus()) } @@ -1137,7 +1169,7 @@ abstract class IndexStateManagementRestTestCase : IndexManagementRestTestCase() .createParser( NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, - response.entity.content + response.entity.content, ) .use { parser -> parser.list() } } catch (e: IOException) { @@ -1149,8 +1181,8 @@ abstract class IndexStateManagementRestTestCase : IndexManagementRestTestCase() return NamedXContentRegistry( listOf( ClusterModule.getNamedXWriteables(), - SearchModule(Settings.EMPTY, emptyList()).namedXContents - ).flatten() + SearchModule(Settings.EMPTY, emptyList()).namedXContents, + ).flatten(), ) } } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/ManagedIndexConfigTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/ManagedIndexConfigTests.kt index d911f6d4c..33ad1179c 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/ManagedIndexConfigTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/ManagedIndexConfigTests.kt @@ -6,17 +6,15 @@ package org.opensearch.indexmanagement.indexstatemanagement import org.opensearch.common.xcontent.LoggingDeprecationHandler -import org.opensearch.core.xcontent.XContentParser import org.opensearch.common.xcontent.XContentType +import org.opensearch.core.xcontent.XContentParser import org.opensearch.indexmanagement.indexstatemanagement.model.ManagedIndexConfig import org.opensearch.indexmanagement.opensearchapi.parseWithType import org.opensearch.test.OpenSearchTestCase import kotlin.test.assertFailsWith class ManagedIndexConfigTests : OpenSearchTestCase() { - fun `test managed index config parsing`() { - val missingIndexUuid = """{"managed_index":{"name":"edpNNwdVXG","enabled":false,"index":"DcdVHfmQUI","schedule":{"interval":{"start_time":1560402722674,"period":5,"unit":"Minutes"}},"last_updated_time":1560402722676,"enabled_time":null,"policy_id":"KumaJGCWPi","policy_seq_no":5,"policy_primary_term":17,"policy":{"name":"KumaJGCWPi","last_updated_time":1560402722676,"schema_version":348392,"error_notification":null,"default_state":"EpbLVqVhtL","states":[{"name":"EpbLVqVhtL","action":[],"transitions":[]},{"name":"IIJxQdcenu","action":[],"transitions":[]},{"name":"zSXlbLUBqG","action":[],"transitions":[]},{"name":"nYRPBojBiy","action":[],"transitions":[]}]},"change_policy":{"policy_id":"BtrDpcCBeT","state":"obxAkRuhvq"}}}""" val missingIndex = """{"managed_index":{"name":"edpNNwdVXG","enabled":false,"index_uuid":"SdcNvtdyAZYyrVkFMoQr","schedule":{"interval":{"start_time":1560402722674,"period":5,"unit":"Minutes"}},"last_updated_time":1560402722676,"enabled_time":null,"policy_id":"KumaJGCWPi","policy_seq_no":5,"policy_primary_term":17,"policy":{"name":"KumaJGCWPi","last_updated_time":1560402722676,"schema_version":348392,"error_notification":null,"default_state":"EpbLVqVhtL","states":[{"name":"EpbLVqVhtL","action":[],"transitions":[]},{"name":"IIJxQdcenu","action":[],"transitions":[]},{"name":"zSXlbLUBqG","action":[],"transitions":[]},{"name":"nYRPBojBiy","action":[],"transitions":[]}]},"change_policy":{"policy_id":"BtrDpcCBeT","state":"obxAkRuhvq"}}}""" val missingName = """{"managed_index":{"enabled":false,"index":"DcdVHfmQUI","index_uuid":"SdcNvtdyAZYyrVkFMoQr","schedule":{"interval":{"start_time":1560402722674,"period":5,"unit":"Minutes"}},"last_updated_time":1560402722676,"enabled_time":null,"policy_id":"KumaJGCWPi","policy_seq_no":5,"policy_primary_term":17,"policy":{"name":"KumaJGCWPi","last_updated_time":1560402722676,"schema_version":348392,"error_notification":null,"default_state":"EpbLVqVhtL","states":[{"name":"EpbLVqVhtL","action":[],"transitions":[]},{"name":"IIJxQdcenu","action":[],"transitions":[]},{"name":"zSXlbLUBqG","action":[],"transitions":[]},{"name":"nYRPBojBiy","action":[],"transitions":[]}]},"change_policy":{"policy_id":"BtrDpcCBeT","state":"obxAkRuhvq"}}}""" diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/TestHelpers.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/TestHelpers.kt index d6fa0bed7..218ec4879 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/TestHelpers.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/TestHelpers.kt @@ -7,12 +7,13 @@ package org.opensearch.indexmanagement.indexstatemanagement import org.opensearch.action.admin.indices.alias.Alias import org.opensearch.action.admin.indices.alias.IndicesAliasesRequest -import org.opensearch.core.common.unit.ByteSizeValue import org.opensearch.common.unit.TimeValue -import org.opensearch.core.xcontent.ToXContent import org.opensearch.common.xcontent.XContentFactory +import org.opensearch.core.common.unit.ByteSizeValue +import org.opensearch.core.xcontent.ToXContent import org.opensearch.index.RandomCreateIndexGenerator.randomAlias import org.opensearch.index.seqno.SequenceNumbers +import org.opensearch.indexmanagement.common.model.notification.Channel import org.opensearch.indexmanagement.indexstatemanagement.action.AliasAction import org.opensearch.indexmanagement.indexstatemanagement.action.AllocationAction import org.opensearch.indexmanagement.indexstatemanagement.action.CloseAction @@ -28,6 +29,7 @@ import org.opensearch.indexmanagement.indexstatemanagement.action.RolloverAction import org.opensearch.indexmanagement.indexstatemanagement.action.RollupAction import org.opensearch.indexmanagement.indexstatemanagement.action.ShrinkAction import org.opensearch.indexmanagement.indexstatemanagement.action.SnapshotAction +import org.opensearch.indexmanagement.indexstatemanagement.action.TransformAction import org.opensearch.indexmanagement.indexstatemanagement.model.ChangePolicy import org.opensearch.indexmanagement.indexstatemanagement.model.Conditions import org.opensearch.indexmanagement.indexstatemanagement.model.ErrorNotification @@ -40,8 +42,6 @@ import org.opensearch.indexmanagement.indexstatemanagement.model.StateFilter import org.opensearch.indexmanagement.indexstatemanagement.model.Transition import org.opensearch.indexmanagement.indexstatemanagement.model.coordinator.ClusterStateManagedIndexConfig import org.opensearch.indexmanagement.indexstatemanagement.model.coordinator.SweptManagedIndexConfig -import org.opensearch.indexmanagement.common.model.notification.Channel -import org.opensearch.indexmanagement.indexstatemanagement.action.TransformAction import org.opensearch.indexmanagement.indexstatemanagement.model.destination.Chime import org.opensearch.indexmanagement.indexstatemanagement.model.destination.CustomWebhook import org.opensearch.indexmanagement.indexstatemanagement.model.destination.Destination @@ -75,25 +75,25 @@ fun randomPolicy( lastUpdatedTime: Instant = Instant.now().truncatedTo(ChronoUnit.MILLIS), errorNotification: ErrorNotification? = randomErrorNotification(), states: List = List(OpenSearchRestTestCase.randomIntBetween(1, 10)) { randomState() }, - ismTemplate: List? = null + ismTemplate: List? = null, ): Policy { return Policy( id = id, schemaVersion = schemaVersion, lastUpdatedTime = lastUpdatedTime, - errorNotification = errorNotification, defaultState = states[0].name, states = states, description = description, ismTemplate = ismTemplate + errorNotification = errorNotification, defaultState = states[0].name, states = states, description = description, ismTemplate = ismTemplate, ) } fun randomState( name: String = OpenSearchRestTestCase.randomAlphaOfLength(10), actions: List = listOf(), - transitions: List = listOf() + transitions: List = listOf(), ): State { return State(name = name, actions = actions, transitions = transitions) } fun randomTransition( stateName: String = OpenSearchRestTestCase.randomAlphaOfLength(10), - conditions: Conditions? = randomConditions() + conditions: Conditions? = randomConditions(), ): Transition { return Transition(stateName = stateName, conditions = conditions) } @@ -106,9 +106,8 @@ fun randomTransition( */ fun randomConditions( condition: Pair? = - OpenSearchRestTestCase.randomFrom(listOf(randomIndexAge(), randomDocCount(), randomSize(), randomRolloverAge(), null)) + OpenSearchRestTestCase.randomFrom(listOf(randomIndexAge(), randomDocCount(), randomSize(), randomRolloverAge(), null)), ): Conditions? { - if (condition == null) return null val type = condition.first @@ -135,14 +134,14 @@ fun randomRolloverActionConfig( minSize: ByteSizeValue = randomByteSizeValue(), minDocs: Long = OpenSearchRestTestCase.randomLongBetween(1, 1000), minAge: TimeValue = randomTimeValueObject(), - minPrimaryShardSize: ByteSizeValue = randomByteSizeValue() + minPrimaryShardSize: ByteSizeValue = randomByteSizeValue(), ): RolloverAction { return RolloverAction( minSize = minSize, minDocs = minDocs, minAge = minAge, minPrimaryShardSize = minPrimaryShardSize, - index = 0 + index = 0, ) } @@ -154,7 +153,7 @@ fun randomShrinkAction( targetIndexTemplate: Script? = if (randomBoolean()) randomTemplateScript(randomAlphaOfLength(10)) else null, aliases: List? = if (randomBoolean()) randomList(10) { randomAlias() } else null, switchAliases: Boolean = randomBoolean(), - forceUnsafe: Boolean? = if (randomBoolean()) randomBoolean() else null + forceUnsafe: Boolean? = if (randomBoolean()) randomBoolean() else null, ): ShrinkAction { if (numNewShards == null && maxShardSize == null && percentageOfSourceShards == null) { when (randomInt(2)) { @@ -183,7 +182,7 @@ fun randomIndexPriorityActionConfig(indexPriority: Int = OpenSearchRestTestCase. } fun randomForceMergeActionConfig( - maxNumSegments: Int = OpenSearchRestTestCase.randomIntBetween(1, 50) + maxNumSegments: Int = OpenSearchRestTestCase.randomIntBetween(1, 50), ): ForceMergeAction { return ForceMergeAction(maxNumSegments = maxNumSegments, index = 0) } @@ -191,7 +190,7 @@ fun randomForceMergeActionConfig( fun randomNotificationActionConfig( destination: Destination = randomDestination(), messageTemplate: Script = randomTemplateScript("random message"), - index: Int = 0 + index: Int = 0, ): NotificationAction { return NotificationAction(destination, null, messageTemplate, index) } @@ -239,7 +238,7 @@ fun randomDestination(type: DestinationType = randomDestinationType()): Destinat type = type, chime = if (type == DestinationType.CHIME) randomChime() else null, slack = if (type == DestinationType.SLACK) randomSlack() else null, - customWebhook = if (type == DestinationType.CUSTOM_WEBHOOK) randomCustomWebhook() else null + customWebhook = if (type == DestinationType.CUSTOM_WEBHOOK) randomCustomWebhook() else null, ) } @@ -266,7 +265,7 @@ fun randomCustomWebhook(): CustomWebhook { queryParams = emptyMap(), headerParams = emptyMap(), username = null, - password = null + password = null, ) } @@ -274,7 +273,7 @@ fun randomTemplateScript( source: String = OpenSearchRestTestCase.randomAlphaOfLength(10), params: Map = emptyMap(), scriptType: ScriptType = ScriptType.INLINE, - lang: String = Script.DEFAULT_TEMPLATE_LANG + lang: String = Script.DEFAULT_TEMPLATE_LANG, ): Script = Script(scriptType, lang, source, params) fun randomSnapshotActionConfig(repository: String = "repo", snapshot: String = "sp"): SnapshotAction { @@ -300,8 +299,9 @@ fun randomTimeValueObject(): TimeValue = TimeValue.parseTimeValue(OpenSearchRest fun randomByteSizeValue(): ByteSizeValue = ByteSizeValue.parseBytesSizeValue( OpenSearchRestTestCase.randomIntBetween(1, 1000).toString() + OpenSearchRestTestCase.randomFrom(listOf("b", "kb", "mb", "gb")), - "" + "", ) + /** * End - Conditions helper functions */ @@ -310,7 +310,7 @@ fun randomExplainFilter( policyID: String? = if (OpenSearchRestTestCase.randomBoolean()) OpenSearchRestTestCase.randomAlphaOfLength(10) else null, state: String? = if (OpenSearchRestTestCase.randomBoolean()) OpenSearchRestTestCase.randomAlphaOfLength(10) else null, actionType: String? = if (OpenSearchRestTestCase.randomBoolean()) OpenSearchRestTestCase.randomAlphaOfLength(10) else null, - failed: Boolean? = if (OpenSearchRestTestCase.randomBoolean()) OpenSearchRestTestCase.randomBoolean() else null + failed: Boolean? = if (OpenSearchRestTestCase.randomBoolean()) OpenSearchRestTestCase.randomBoolean() else null, ): ExplainFilter { return ExplainFilter(policyID, state, actionType, failed) } @@ -319,7 +319,7 @@ fun randomChangePolicy( policyID: String = OpenSearchRestTestCase.randomAlphaOfLength(10), state: String? = if (OpenSearchRestTestCase.randomBoolean()) OpenSearchRestTestCase.randomAlphaOfLength(10) else null, include: List = emptyList(), - isSafe: Boolean = false + isSafe: Boolean = false, ): ChangePolicy { return ChangePolicy(policyID, state, include, isSafe) } @@ -338,7 +338,7 @@ fun randomManagedIndexConfig( enabledTime: Instant? = if (enabled) Instant.now().truncatedTo(ChronoUnit.MILLIS) else null, policy: Policy = randomPolicy(), changePolicy: ChangePolicy? = randomChangePolicy(), - jitter: Double? = 0.0 + jitter: Double? = 0.0, ): ManagedIndexConfig { return ManagedIndexConfig( jobName = name, @@ -353,7 +353,7 @@ fun randomManagedIndexConfig( policyPrimaryTerm = policy.primaryTerm, policy = policy.copy(seqNo = SequenceNumbers.UNASSIGNED_SEQ_NO, primaryTerm = SequenceNumbers.UNASSIGNED_PRIMARY_TERM), changePolicy = changePolicy, - jobJitter = jitter + jobJitter = jitter, ) } @@ -362,14 +362,14 @@ fun randomClusterStateManagedIndexConfig( uuid: String = OpenSearchRestTestCase.randomAlphaOfLength(20), policyID: String = OpenSearchRestTestCase.randomAlphaOfLength(10), seqNo: Long = SequenceNumbers.UNASSIGNED_SEQ_NO, - primaryTerm: Long = SequenceNumbers.UNASSIGNED_PRIMARY_TERM + primaryTerm: Long = SequenceNumbers.UNASSIGNED_PRIMARY_TERM, ): ClusterStateManagedIndexConfig { return ClusterStateManagedIndexConfig( index = index, uuid = uuid, policyID = policyID, seqNo = seqNo, - primaryTerm = primaryTerm + primaryTerm = primaryTerm, ) } @@ -380,7 +380,7 @@ fun randomSweptManagedIndexConfig( seqNo: Long = SequenceNumbers.UNASSIGNED_SEQ_NO, primaryTerm: Long = SequenceNumbers.UNASSIGNED_PRIMARY_TERM, changePolicy: ChangePolicy? = null, - policy: Policy? = null + policy: Policy? = null, ): SweptManagedIndexConfig { return SweptManagedIndexConfig( index = index, @@ -389,19 +389,19 @@ fun randomSweptManagedIndexConfig( seqNo = seqNo, primaryTerm = primaryTerm, policy = policy, - changePolicy = changePolicy + changePolicy = changePolicy, ) } fun randomISMTemplate( indexPatterns: List = listOf(OpenSearchRestTestCase.randomAlphaOfLength(10) + "*"), priority: Int = OpenSearchRestTestCase.randomIntBetween(0, 100), - lastUpdatedTime: Instant = Instant.now().truncatedTo(ChronoUnit.MILLIS) + lastUpdatedTime: Instant = Instant.now().truncatedTo(ChronoUnit.MILLIS), ): ISMTemplate { return ISMTemplate( indexPatterns = indexPatterns, priority = priority, - lastUpdatedTime = lastUpdatedTime + lastUpdatedTime = lastUpdatedTime, ) } @@ -537,7 +537,7 @@ fun Channel.toJsonString(): String { @Suppress("RethrowCaughtException") fun wait( timeout: Instant = Instant.ofEpochSecond(10), - block: () -> T + block: () -> T, ) { val startTime = Instant.now().toEpochMilli() do { diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/ActionRetryIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/ActionRetryIT.kt index 55564f211..c95d3f046 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/ActionRetryIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/ActionRetryIT.kt @@ -24,12 +24,13 @@ class ActionRetryIT : IndexStateManagementRestTestCase() { * We are forcing RollOver to fail in this integration test. */ fun `test failed action`() { - val testPolicy = """ - {"policy":{"description":"Default policy","default_state":"Ingest","states":[ - {"name":"Ingest","actions":[{"retry":{"count":2,"backoff":"constant","delay":"1s"},"rollover":{"min_doc_count":100}}],"transitions":[{"state_name":"Search"}]}, - {"name":"Search","actions":[],"transitions":[{"state_name":"Delete","conditions":{"min_index_age":"30d"}}]}, - {"name":"Delete","actions":[{"delete":{}}],"transitions":[]}]}} - """.trimIndent() + val testPolicy = + """ + {"policy":{"description":"Default policy","default_state":"Ingest","states":[ + {"name":"Ingest","actions":[{"retry":{"count":2,"backoff":"constant","delay":"1s"},"rollover":{"min_doc_count":100}}],"transitions":[{"state_name":"Search"}]}, + {"name":"Search","actions":[],"transitions":[{"state_name":"Delete","conditions":{"min_index_age":"30d"}}]}, + {"name":"Delete","actions":[{"delete":{}}],"transitions":[]}]}} + """.trimIndent() val indexName = "${testIndexName}_index_1" val policyID = "${testIndexName}_testPolicyName_1" @@ -53,9 +54,9 @@ class ActionRetryIT : IndexStateManagementRestTestCase() { assertEquals( ActionMetaData( "rollover", managedIndexMetaData.actionMetaData?.startTime, 0, false, 1, - managedIndexMetaData.actionMetaData?.lastRetryTime, null + managedIndexMetaData.actionMetaData?.lastRetryTime, null, ), - managedIndexMetaData.actionMetaData + managedIndexMetaData.actionMetaData, ) assertEquals(expectedInfoString, managedIndexMetaData.info.toString()) @@ -69,9 +70,9 @@ class ActionRetryIT : IndexStateManagementRestTestCase() { assertEquals( ActionMetaData( "rollover", managedIndexMetaData.actionMetaData?.startTime, 0, false, 2, - managedIndexMetaData.actionMetaData?.lastRetryTime, null + managedIndexMetaData.actionMetaData?.lastRetryTime, null, ), - managedIndexMetaData.actionMetaData + managedIndexMetaData.actionMetaData, ) assertEquals(expectedInfoString, managedIndexMetaData.info.toString()) @@ -85,9 +86,9 @@ class ActionRetryIT : IndexStateManagementRestTestCase() { assertEquals( ActionMetaData( "rollover", managedIndexMetaData.actionMetaData?.startTime, 0, true, 2, - managedIndexMetaData.actionMetaData?.lastRetryTime, null + managedIndexMetaData.actionMetaData?.lastRetryTime, null, ), - managedIndexMetaData.actionMetaData + managedIndexMetaData.actionMetaData, ) assertEquals(expectedInfoString, managedIndexMetaData.info.toString()) @@ -95,12 +96,13 @@ class ActionRetryIT : IndexStateManagementRestTestCase() { } fun `test exponential backoff`() { - val testPolicy = """ - {"policy":{"description":"Default policy","default_state":"Ingest","states":[ - {"name":"Ingest","actions":[{"retry":{"count":2,"backoff":"exponential","delay":"1m"},"rollover":{"min_doc_count":100}}],"transitions":[{"state_name":"Search"}]}, - {"name":"Search","actions":[],"transitions":[{"state_name":"Delete","conditions":{"min_index_age":"30d"}}]}, - {"name":"Delete","actions":[{"delete":{}}],"transitions":[]}]}} - """.trimIndent() + val testPolicy = + """ + {"policy":{"description":"Default policy","default_state":"Ingest","states":[ + {"name":"Ingest","actions":[{"retry":{"count":2,"backoff":"exponential","delay":"1m"},"rollover":{"min_doc_count":100}}],"transitions":[{"state_name":"Search"}]}, + {"name":"Search","actions":[],"transitions":[{"state_name":"Delete","conditions":{"min_index_age":"30d"}}]}, + {"name":"Delete","actions":[{"delete":{}}],"transitions":[]}]}} + """.trimIndent() val indexName = "${testIndexName}_index_2" val policyID = "${testIndexName}_testPolicyName_2" @@ -134,35 +136,48 @@ class ActionRetryIT : IndexStateManagementRestTestCase() { val expectedInfoString = mapOf("message" to AttemptRolloverStep.getFailedNoValidAliasMessage(indexName)).toString() assertPredicatesOnMetaData( listOf( - indexName to listOf( - explainResponseOpendistroPolicyIdSetting to policyID::equals, - explainResponseOpenSearchPolicyIdSetting to policyID::equals, - ManagedIndexMetaData.INDEX to managedIndexConfig.index::equals, - ManagedIndexMetaData.INDEX_UUID to managedIndexConfig.indexUuid::equals, - ManagedIndexMetaData.POLICY_ID to managedIndexConfig.policyID::equals, - ManagedIndexMetaData.POLICY_SEQ_NO to policySeq::equals, - ManagedIndexMetaData.POLICY_PRIMARY_TERM to policyPrimaryTerm::equals, - ManagedIndexMetaData.ROLLED_OVER to false::equals, - ManagedIndexMetaData.INDEX_CREATION_DATE to fun(indexCreationDate: Any?): Boolean = (indexCreationDate as Long) > 1L, - StateMetaData.STATE to fun(stateMetaDataMap: Any?): Boolean = - assertStateEquals(StateMetaData("Ingest", Instant.now().toEpochMilli()), stateMetaDataMap), - ActionMetaData.ACTION to fun(actionMetaDataMap: Any?): Boolean = - assertActionEquals( - ActionMetaData("rollover", Instant.now().toEpochMilli(), 0, false, 1, null, null), - actionMetaDataMap - ), - StepMetaData.STEP to fun(stepMetaDataMap: Any?): Boolean = - assertStepEquals( - StepMetaData("attempt_rollover", Instant.now().toEpochMilli(), Step.StepStatus.FAILED), - stepMetaDataMap - ), - PolicyRetryInfoMetaData.RETRY_INFO to fun(retryInfoMetaDataMap: Any?): Boolean = - assertRetryInfoEquals(PolicyRetryInfoMetaData(false, 0), retryInfoMetaDataMap), - ManagedIndexMetaData.INFO to fun(info: Any?): Boolean = expectedInfoString == info.toString(), - ManagedIndexMetaData.ENABLED to true::equals - ) + indexName to + listOf( + explainResponseOpendistroPolicyIdSetting to policyID::equals, + explainResponseOpenSearchPolicyIdSetting to policyID::equals, + ManagedIndexMetaData.INDEX to managedIndexConfig.index::equals, + ManagedIndexMetaData.INDEX_UUID to managedIndexConfig.indexUuid::equals, + ManagedIndexMetaData.POLICY_ID to managedIndexConfig.policyID::equals, + ManagedIndexMetaData.POLICY_SEQ_NO to policySeq::equals, + ManagedIndexMetaData.POLICY_PRIMARY_TERM to policyPrimaryTerm::equals, + ManagedIndexMetaData.ROLLED_OVER to false::equals, + ManagedIndexMetaData.INDEX_CREATION_DATE to + + fun(indexCreationDate: Any?): Boolean = (indexCreationDate as Long) > 1L, + StateMetaData.STATE to + + fun(stateMetaDataMap: Any?): Boolean = + assertStateEquals(StateMetaData("Ingest", Instant.now().toEpochMilli()), stateMetaDataMap), + ActionMetaData.ACTION to + + fun(actionMetaDataMap: Any?): Boolean = + assertActionEquals( + ActionMetaData("rollover", Instant.now().toEpochMilli(), 0, false, 1, null, null), + actionMetaDataMap, + ), + StepMetaData.STEP to + + fun(stepMetaDataMap: Any?): Boolean = + assertStepEquals( + StepMetaData("attempt_rollover", Instant.now().toEpochMilli(), Step.StepStatus.FAILED), + stepMetaDataMap, + ), + PolicyRetryInfoMetaData.RETRY_INFO to + + fun(retryInfoMetaDataMap: Any?): Boolean = + assertRetryInfoEquals(PolicyRetryInfoMetaData(false, 0), retryInfoMetaDataMap), + ManagedIndexMetaData.INFO to + + fun(info: Any?): Boolean = expectedInfoString == info.toString(), + ManagedIndexMetaData.ENABLED to true::equals, + ), ), - getExplainMap(indexName) + getExplainMap(indexName), ) } } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/ActionTimeoutIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/ActionTimeoutIT.kt index 551132009..8d26405a8 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/ActionTimeoutIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/ActionTimeoutIT.kt @@ -20,11 +20,12 @@ class ActionTimeoutIT : IndexStateManagementRestTestCase() { fun `test failed action`() { val indexName = "${testIndexName}_index_1" val policyID = "${testIndexName}_testPolicyName_1" - val testPolicy = """ - {"policy":{"description":"Default policy","default_state":"rolloverstate","states":[ - {"name":"rolloverstate","actions":[{"timeout":"1s","rollover":{"min_doc_count":100}}], - "transitions":[]}]}} - """.trimIndent() + val testPolicy = + """ + {"policy":{"description":"Default policy","default_state":"rolloverstate","states":[ + {"name":"rolloverstate","actions":[{"timeout":"1s","rollover":{"min_doc_count":100}}], + "transitions":[]}]}} + """.trimIndent() createPolicyJson(testPolicy, policyID) @@ -43,7 +44,7 @@ class ActionTimeoutIT : IndexStateManagementRestTestCase() { assertEquals( "Should be attempting to rollover", getExplainManagedIndexMetaData(indexName).info?.get("message"), - AttemptRolloverStep.getPendingMessage(indexName) + AttemptRolloverStep.getPendingMessage(indexName), ) } @@ -52,19 +53,22 @@ class ActionTimeoutIT : IndexStateManagementRestTestCase() { waitFor { assertPredicatesOnMetaData( listOf( - indexName to listOf( - ActionMetaData.ACTION to fun(actionMetaDataMap: Any?): Boolean = - assertActionEquals( - ActionMetaData( - name = RolloverAction.name, startTime = Instant.now().toEpochMilli(), index = 0, - failed = true, consumedRetries = 0, lastRetryTime = null, actionProperties = null - ), - actionMetaDataMap - ) - ) + indexName to + listOf( + ActionMetaData.ACTION to + + fun(actionMetaDataMap: Any?): Boolean = + assertActionEquals( + ActionMetaData( + name = RolloverAction.name, startTime = Instant.now().toEpochMilli(), index = 0, + failed = true, consumedRetries = 0, lastRetryTime = null, actionProperties = null, + ), + actionMetaDataMap, + ), + ), ), getExplainMap(indexName), - strict = false + strict = false, ) } } @@ -73,11 +77,12 @@ class ActionTimeoutIT : IndexStateManagementRestTestCase() { fun `test action timeout doesn't bleed over into next action`() { val indexName = "${testIndexName}_index_2" val policyID = "${testIndexName}_testPolicyName_2" - val testPolicy = """ - {"policy":{"description":"Default policy","default_state":"rolloverstate","states":[ - {"name":"rolloverstate","actions":[{"timeout": "5s","open":{}},{"timeout":"1s","rollover":{"min_doc_count":100}}], - "transitions":[]}]}} - """.trimIndent() + val testPolicy = + """ + {"policy":{"description":"Default policy","default_state":"rolloverstate","states":[ + {"name":"rolloverstate","actions":[{"timeout": "5s","open":{}},{"timeout":"1s","rollover":{"min_doc_count":100}}], + "transitions":[]}]}} + """.trimIndent() createPolicyJson(testPolicy, policyID) @@ -96,9 +101,16 @@ class ActionTimeoutIT : IndexStateManagementRestTestCase() { val expectedOpenInfoString = mapOf("message" to AttemptOpenStep.getSuccessMessage(indexName)).toString() waitFor { assertPredicatesOnMetaData( - listOf(indexName to listOf(ManagedIndexMetaData.INFO to fun(info: Any?): Boolean = expectedOpenInfoString == info.toString())), + listOf( + indexName to + listOf( + ManagedIndexMetaData.INFO to + + fun(info: Any?): Boolean = expectedOpenInfoString == info.toString(), + ), + ), getExplainMap(indexName), - strict = false + strict = false, ) } @@ -112,7 +124,7 @@ class ActionTimeoutIT : IndexStateManagementRestTestCase() { assertEquals( "Should be attempting to rollover", getExplainManagedIndexMetaData(indexName).info?.get("message"), - AttemptRolloverStep.getPendingMessage(indexName) + AttemptRolloverStep.getPendingMessage(indexName), ) } } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/AliasActionIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/AliasActionIT.kt index 0ce641fbd..c21f091c2 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/AliasActionIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/AliasActionIT.kt @@ -27,15 +27,16 @@ class AliasActionIT : IndexStateManagementRestTestCase() { val actionConfig = AliasAction(actions = actions, index = 0) val states = listOf(State("alias", listOf(actionConfig), listOf())) - val policy = Policy( - id = policyID, - description = "$testIndexName description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val policy = + Policy( + id = policyID, + description = "$testIndexName description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) createPolicy(policy, policyID) createIndex(indexName, policyID) val managedIndexConfig = getExistingManagedIndexConfig(indexName) @@ -70,15 +71,16 @@ class AliasActionIT : IndexStateManagementRestTestCase() { val actionConfig = AliasAction(actions = actions, index = 0) val states = listOf(State("alias", listOf(actionConfig), listOf())) - val policy = Policy( - id = policyID, - description = "$testIndexName description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val policy = + Policy( + id = policyID, + description = "$testIndexName description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) createPolicy(policy, policyID) createIndex(indexName, policyID) val managedIndexConfig = getExistingManagedIndexConfig(indexName) @@ -113,15 +115,16 @@ class AliasActionIT : IndexStateManagementRestTestCase() { val actionConfig = AliasAction(actions = actions, index = 0) val states = listOf(State("alias", listOf(actionConfig), listOf())) - val policy = Policy( - id = policyID, - description = "$testIndexName description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val policy = + Policy( + id = policyID, + description = "$testIndexName description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) createPolicy(policy, policyID) createIndex(indexName, policyID, aliasName) val managedIndexConfig = getExistingManagedIndexConfig(indexName) diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/AllocationActionIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/AllocationActionIT.kt index 63b00200a..989d53f4a 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/AllocationActionIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/AllocationActionIT.kt @@ -23,18 +23,20 @@ class AllocationActionIT : IndexStateManagementRestTestCase() { val indexName = "${testIndexName}_index_1" val policyID = "${testIndexName}_testPolicyName_1" val actionConfig = AllocationAction(require = mapOf("box_type" to "hot"), exclude = emptyMap(), include = emptyMap(), index = 0) - val states = listOf( - State("Allocate", listOf(actionConfig), listOf()) - ) - val policy = Policy( - id = policyID, - description = "$testIndexName description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val states = + listOf( + State("Allocate", listOf(actionConfig), listOf()), + ) + val policy = + Policy( + id = policyID, + description = "$testIndexName description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) createPolicy(policy, policyID) createIndex(indexName, policyID) val managedIndexConfig = getExistingManagedIndexConfig(indexName) @@ -66,18 +68,20 @@ class AllocationActionIT : IndexStateManagementRestTestCase() { availableNodes.remove(getIndexShardNodes(indexName)[0]) val actionConfig = AllocationAction(require = mapOf("_name" to availableNodes.first()), exclude = emptyMap(), include = emptyMap(), index = 0) - val states = listOf( - State("Allocate", listOf(actionConfig), listOf()) - ) - val policy = Policy( - id = policyID, - description = "$testIndexName description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val states = + listOf( + State("Allocate", listOf(actionConfig), listOf()), + ) + val policy = + Policy( + id = policyID, + description = "$testIndexName description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) createPolicy(policy, policyID) addPolicyToIndex(policyID, indexName) val managedIndexConfig = getExistingManagedIndexConfig(indexName) @@ -112,18 +116,20 @@ class AllocationActionIT : IndexStateManagementRestTestCase() { val excludedNode = getIndexShardNodes(indexName)[0].toString() val actionConfig = AllocationAction(require = emptyMap(), exclude = mapOf("_name" to excludedNode), include = emptyMap(), index = 0) - val states = listOf( - State("Allocate", listOf(actionConfig), listOf()) - ) - val policy = Policy( - id = policyID, - description = "$testIndexName description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val states = + listOf( + State("Allocate", listOf(actionConfig), listOf()), + ) + val policy = + Policy( + id = policyID, + description = "$testIndexName description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) createPolicy(policy, policyID) addPolicyToIndex(policyID, indexName) val managedIndexConfig = getExistingManagedIndexConfig(indexName) @@ -159,19 +165,21 @@ class AllocationActionIT : IndexStateManagementRestTestCase() { availableNodes.remove(getIndexShardNodes(indexName)[0]) val actionConfig = AllocationAction(require = emptyMap(), exclude = emptyMap(), include = mapOf("_name" to availableNodes.first()), index = 0) - val states = listOf( - State("Allocate", listOf(actionConfig), listOf()) - ) - - val policy = Policy( - id = policyID, - description = "$testIndexName description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val states = + listOf( + State("Allocate", listOf(actionConfig), listOf()), + ) + + val policy = + Policy( + id = policyID, + description = "$testIndexName description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) createPolicy(policy, policyID) addPolicyToIndex(policyID, indexName) val managedIndexConfig = getExistingManagedIndexConfig(indexName) @@ -200,18 +208,20 @@ class AllocationActionIT : IndexStateManagementRestTestCase() { val indexName = "${testIndexName}_illegal_key" val policyID = "${testIndexName}_illegal_key" val actionConfig = AllocationAction(require = mapOf("..//" to "value"), exclude = emptyMap(), include = emptyMap(), index = 0) - val states = listOf( - State("Allocate", listOf(actionConfig), listOf()) - ) - val policy = Policy( - id = policyID, - description = "$testIndexName description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val states = + listOf( + State("Allocate", listOf(actionConfig), listOf()), + ) + val policy = + Policy( + id = policyID, + description = "$testIndexName description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) createPolicy(policy, policyID) createIndex(indexName, policyID, null, "0") val managedIndexConfig = getExistingManagedIndexConfig(indexName) diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/CloseActionIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/CloseActionIT.kt index 1a2760cce..8bbadc566 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/CloseActionIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/CloseActionIT.kt @@ -22,19 +22,21 @@ class CloseActionIT : IndexStateManagementRestTestCase() { val indexName = "${testIndexName}_index_1" val policyID = "${testIndexName}_testPolicyName_1" val actionConfig = CloseAction(0) - val states = listOf( - State("CloseState", listOf(actionConfig), listOf()) - ) - - val policy = Policy( - id = policyID, - description = "$testIndexName description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val states = + listOf( + State("CloseState", listOf(actionConfig), listOf()), + ) + + val policy = + Policy( + id = policyID, + description = "$testIndexName description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) createPolicy(policy, policyID) createIndex(indexName, policyID) @@ -57,19 +59,21 @@ class CloseActionIT : IndexStateManagementRestTestCase() { val indexName = "${testIndexName}_index_2" val policyID = "${testIndexName}_testPolicyName_2" val actionConfig = CloseAction(0) - val states = listOf( - State("CloseState", listOf(actionConfig), listOf()) - ) - - val policy = Policy( - id = policyID, - description = "$testIndexName description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val states = + listOf( + State("CloseState", listOf(actionConfig), listOf()), + ) + + val policy = + Policy( + id = policyID, + description = "$testIndexName description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) createPolicy(policy, policyID) createIndex(indexName, policyID) @@ -97,15 +101,16 @@ class CloseActionIT : IndexStateManagementRestTestCase() { val firstState = State("CloseState", listOf(actionConfig), listOf(Transition(stateName = secondState.name, conditions = null))) val states = listOf(firstState, secondState) - val policy = Policy( - id = policyID, - description = "$testIndexName description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val policy = + Policy( + id = policyID, + description = "$testIndexName description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) createPolicy(policy, policyID) createIndex(indexName, policyID) diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/DeleteActionIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/DeleteActionIT.kt index 163e862e2..534ea6e15 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/DeleteActionIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/DeleteActionIT.kt @@ -21,19 +21,21 @@ class DeleteActionIT : IndexStateManagementRestTestCase() { val indexName = "${testIndexName}_index_1" val policyID = "${testIndexName}_testPolicyName_1" val actionConfig = DeleteAction(0) - val states = listOf( - State("DeleteState", listOf(actionConfig), listOf()) - ) + val states = + listOf( + State("DeleteState", listOf(actionConfig), listOf()), + ) - val policy = Policy( - id = policyID, - description = "$testIndexName description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val policy = + Policy( + id = policyID, + description = "$testIndexName description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) createPolicy(policy, policyID) createIndex(indexName, policyID) diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/ForceMergeActionIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/ForceMergeActionIT.kt index 67567be71..849aa80e1 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/ForceMergeActionIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/ForceMergeActionIT.kt @@ -29,15 +29,16 @@ class ForceMergeActionIT : IndexStateManagementRestTestCase() { // Create a Policy with one State that only preforms a force_merge Action val forceMergeActionConfig = ForceMergeAction(maxNumSegments = 1, index = 0) val states = listOf(State("ForceMergeState", listOf(forceMergeActionConfig), listOf())) - val policy = Policy( - id = policyID, - description = "$testIndexName description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val policy = + Policy( + id = policyID, + description = "$testIndexName description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) createPolicy(policy, policyID) createIndex(indexName, policyID) @@ -64,7 +65,7 @@ class ForceMergeActionIT : IndexStateManagementRestTestCase() { assertEquals( "maxNumSegments not set in ActionProperties", forceMergeActionConfig.maxNumSegments, - getExplainManagedIndexMetaData(indexName).actionMetaData?.actionProperties?.maxNumSegments + getExplainManagedIndexMetaData(indexName).actionMetaData?.actionProperties?.maxNumSegments, ) } @@ -85,15 +86,16 @@ class ForceMergeActionIT : IndexStateManagementRestTestCase() { val forceMergeActionConfig = ForceMergeAction(maxNumSegments = 1, index = 0) val states = listOf(State("ForceMergeState", listOf(forceMergeActionConfig), listOf())) - val policy = Policy( - id = policyID, - description = "$testIndexName description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val policy = + Policy( + id = policyID, + description = "$testIndexName description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) createPolicy(policy, policyID) createIndex(indexName, policyID) diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/IndexPolicyActionIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/IndexPolicyActionIT.kt index f6efc713d..ddd3b84d0 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/IndexPolicyActionIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/IndexPolicyActionIT.kt @@ -33,20 +33,21 @@ class IndexPolicyActionIT : IndexStateManagementRestTestCase() { updateClusterSetting(AwarenessAllocationDecider.CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTE_SETTING.key, "zone") // creates a dummy policy , so that ISM index gets initialized - var policy = Policy( - id = policyID, - description = "$testIndexName description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + var policy = + Policy( + id = policyID, + description = "$testIndexName description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) client().makeRequest( "PUT", "${IndexManagementPlugin.POLICY_BASE_URI}/init-index", emptyMap(), - StringEntity(policy.toJsonString(), ContentType.APPLICATION_JSON) + StringEntity(policy.toJsonString(), ContentType.APPLICATION_JSON), ) updateClusterSetting(AwarenessAllocationDecider.CLUSTER_ROUTING_ALLOCATION_AWARENESS_FORCE_GROUP_SETTING.key + "zone.values", "a, b") @@ -56,28 +57,29 @@ class IndexPolicyActionIT : IndexStateManagementRestTestCase() { "PUT", "${IndexManagementPlugin.POLICY_BASE_URI}/$policyID", emptyMap(), - StringEntity(policy.toJsonString(), ContentType.APPLICATION_JSON) + StringEntity(policy.toJsonString(), ContentType.APPLICATION_JSON), ) actionConfig = ReplicaCountAction(4, 0) states = listOf(State(name = "ReplicaCountState", actions = listOf(actionConfig), transitions = listOf())) - policy = Policy( - id = policyID, - description = "$testIndexName description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + policy = + Policy( + id = policyID, + description = "$testIndexName description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) Assert.assertThrows( - ResponseException::class.java + ResponseException::class.java, ) { client().makeRequest( "PUT", "${IndexManagementPlugin.POLICY_BASE_URI}/$policyID", emptyMap(), - StringEntity(policy.toJsonString(), ContentType.APPLICATION_JSON) + StringEntity(policy.toJsonString(), ContentType.APPLICATION_JSON), ) } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/IndexPriorityActionIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/IndexPriorityActionIT.kt index c77b5e8ad..fb61240ff 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/IndexPriorityActionIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/IndexPriorityActionIT.kt @@ -22,15 +22,16 @@ class IndexPriorityActionIT : IndexStateManagementRestTestCase() { val policyID = "${testIndexName}_testPolicyName_1" val actionConfig = IndexPriorityAction(50, 0) val states = listOf(State(name = "SetPriorityState", actions = listOf(actionConfig), transitions = listOf())) - val policy = Policy( - id = policyID, - description = "$testIndexName description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val policy = + Policy( + id = policyID, + description = "$testIndexName description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) createPolicy(policy, policyID) diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/IndexStateManagementHistoryIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/IndexStateManagementHistoryIT.kt index deb0a22d1..66daa5c54 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/IndexStateManagementHistoryIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/IndexStateManagementHistoryIT.kt @@ -14,10 +14,10 @@ import org.opensearch.indexmanagement.indexstatemanagement.randomErrorNotificati import org.opensearch.indexmanagement.indexstatemanagement.settings.ManagedIndexSettings import org.opensearch.indexmanagement.indexstatemanagement.step.readonly.SetReadOnlyStep import org.opensearch.indexmanagement.spi.indexstatemanagement.Step -import org.opensearch.indexmanagement.spi.indexstatemanagement.model.StateMetaData import org.opensearch.indexmanagement.spi.indexstatemanagement.model.ActionMetaData import org.opensearch.indexmanagement.spi.indexstatemanagement.model.ManagedIndexMetaData import org.opensearch.indexmanagement.spi.indexstatemanagement.model.PolicyRetryInfoMetaData +import org.opensearch.indexmanagement.spi.indexstatemanagement.model.StateMetaData import org.opensearch.indexmanagement.spi.indexstatemanagement.model.StepMetaData import org.opensearch.indexmanagement.waitFor import java.time.Instant @@ -31,18 +31,20 @@ class IndexStateManagementHistoryIT : IndexStateManagementRestTestCase() { val indexName = "${testIndexName}_index_1" val policyID = "${testIndexName}_testPolicyName_1" val actionConfig = ReadOnlyAction(0) - val states = listOf( - State("ReadOnlyState", listOf(actionConfig), listOf()) - ) - val policy = Policy( - id = policyID, - description = "$testIndexName description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val states = + listOf( + State("ReadOnlyState", listOf(actionConfig), listOf()), + ) + val policy = + Policy( + id = policyID, + description = "$testIndexName description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) createPolicy(policy, policyID) createIndex(indexName, policyID) @@ -56,28 +58,30 @@ class IndexStateManagementHistoryIT : IndexStateManagementRestTestCase() { // Second run updateManagedIndexConfigStartTime(managedIndexConfig) - val historySearchResponse: SearchResponse = waitFor { - val historySearchResponse = getHistorySearchResponse(indexName) - assertEquals(2, historySearchResponse.hits.totalHits!!.value) - historySearchResponse - } + val historySearchResponse: SearchResponse = + waitFor { + val historySearchResponse = getHistorySearchResponse(indexName) + assertEquals(2, historySearchResponse.hits.totalHits!!.value) + historySearchResponse + } val actualHistory = getLatestHistory(historySearchResponse) - val expectedHistory = ManagedIndexMetaData( - indexName, - getUuid(indexName), - policyID, - actualHistory.policySeqNo, - policyPrimaryTerm = actualHistory.policyPrimaryTerm, - policyCompleted = null, - rolledOver = null, - indexCreationDate = actualHistory.indexCreationDate, - transitionTo = null, - stateMetaData = StateMetaData("ReadOnlyState", actualHistory.stateMetaData!!.startTime), - actionMetaData = ActionMetaData(ReadOnlyAction.name, actualHistory.actionMetaData!!.startTime, 0, false, 0, 0, null), - stepMetaData = StepMetaData("set_read_only", actualHistory.stepMetaData!!.startTime, Step.StepStatus.COMPLETED), - policyRetryInfo = PolicyRetryInfoMetaData(false, 0), - info = mapOf("message" to SetReadOnlyStep.getSuccessMessage(indexName)) - ) + val expectedHistory = + ManagedIndexMetaData( + indexName, + getUuid(indexName), + policyID, + actualHistory.policySeqNo, + policyPrimaryTerm = actualHistory.policyPrimaryTerm, + policyCompleted = null, + rolledOver = null, + indexCreationDate = actualHistory.indexCreationDate, + transitionTo = null, + stateMetaData = StateMetaData("ReadOnlyState", actualHistory.stateMetaData!!.startTime), + actionMetaData = ActionMetaData(ReadOnlyAction.name, actualHistory.actionMetaData!!.startTime, 0, false, 0, 0, null), + stepMetaData = StepMetaData("set_read_only", actualHistory.stepMetaData!!.startTime, Step.StepStatus.COMPLETED), + policyRetryInfo = PolicyRetryInfoMetaData(false, 0), + info = mapOf("message" to SetReadOnlyStep.getSuccessMessage(indexName)), + ) assertEquals(expectedHistory, actualHistory) waitFor { assertEquals("true", getIndexBlocksWriteSetting(indexName)) } @@ -87,18 +91,20 @@ class IndexStateManagementHistoryIT : IndexStateManagementRestTestCase() { val indexName = "${testIndexName}_index_2" val policyID = "${testIndexName}_testPolicyName_2" val actionConfig = ReadOnlyAction(0) - val states = listOf( - State("ReadOnlyState", listOf(actionConfig), listOf()) - ) - val policy = Policy( - id = policyID, - description = "$testIndexName description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val states = + listOf( + State("ReadOnlyState", listOf(actionConfig), listOf()), + ) + val policy = + Policy( + id = policyID, + description = "$testIndexName description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) createPolicy(policy, policyID) createIndex(indexName, policyID) @@ -117,28 +123,30 @@ class IndexStateManagementHistoryIT : IndexStateManagementRestTestCase() { // Second run updateManagedIndexConfigStartTime(managedIndexConfig) - val historySearchResponse: SearchResponse = waitFor { - val historySearchResponse = getHistorySearchResponse(indexName) - assertEquals(2, historySearchResponse.hits.totalHits!!.value) - historySearchResponse - } + val historySearchResponse: SearchResponse = + waitFor { + val historySearchResponse = getHistorySearchResponse(indexName) + assertEquals(2, historySearchResponse.hits.totalHits!!.value) + historySearchResponse + } val actualHistory = getLatestHistory(historySearchResponse) - val expectedHistory = ManagedIndexMetaData( - indexName, - getUuid(indexName), - policyID, - actualHistory.policySeqNo, - policyPrimaryTerm = actualHistory.policyPrimaryTerm, - policyCompleted = null, - rolledOver = null, - indexCreationDate = actualHistory.indexCreationDate, - transitionTo = null, - stateMetaData = StateMetaData("ReadOnlyState", actualHistory.stateMetaData!!.startTime), - actionMetaData = ActionMetaData(ReadOnlyAction.name, actualHistory.actionMetaData!!.startTime, 0, false, 0, 0, null), - stepMetaData = StepMetaData("set_read_only", actualHistory.stepMetaData!!.startTime, Step.StepStatus.COMPLETED), - policyRetryInfo = PolicyRetryInfoMetaData(false, 0), - info = mapOf("message" to SetReadOnlyStep.getSuccessMessage(indexName)) - ) + val expectedHistory = + ManagedIndexMetaData( + indexName, + getUuid(indexName), + policyID, + actualHistory.policySeqNo, + policyPrimaryTerm = actualHistory.policyPrimaryTerm, + policyCompleted = null, + rolledOver = null, + indexCreationDate = actualHistory.indexCreationDate, + transitionTo = null, + stateMetaData = StateMetaData("ReadOnlyState", actualHistory.stateMetaData!!.startTime), + actionMetaData = ActionMetaData(ReadOnlyAction.name, actualHistory.actionMetaData!!.startTime, 0, false, 0, 0, null), + stepMetaData = StepMetaData("set_read_only", actualHistory.stepMetaData!!.startTime, Step.StepStatus.COMPLETED), + policyRetryInfo = PolicyRetryInfoMetaData(false, 0), + info = mapOf("message" to SetReadOnlyStep.getSuccessMessage(indexName)), + ) assertEquals(expectedHistory, actualHistory) var historyIndexName = getIndexNamesOfPattern(".opendistro-ism-managed-index-history") @@ -161,15 +169,16 @@ class IndexStateManagementHistoryIT : IndexStateManagementRestTestCase() { val policyID = "${testIndexName}_shard_settings_1" val actionConfig = ReadOnlyAction(0) val states = listOf(State("ReadOnlyState", listOf(actionConfig), listOf())) - val policy = Policy( - id = policyID, - description = "$testIndexName description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val policy = + Policy( + id = policyID, + description = "$testIndexName description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) createPolicy(policy, policyID) createIndex(indexName, policyID) diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/NotificationActionIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/NotificationActionIT.kt index 6790d2ed6..ef6d7f175 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/NotificationActionIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/NotificationActionIT.kt @@ -32,34 +32,37 @@ class NotificationActionIT : IndexStateManagementRestTestCase() { val policyID = "${testIndexName}_testPolicyName" val notificationIndex = "notification_index" val clusterUri = System.getProperty("tests.rest.cluster").split(",")[0] - val destination = Destination( - type = DestinationType.CUSTOM_WEBHOOK, - chime = null, - slack = null, - customWebhook = CustomWebhook( - url = "$protocol://$clusterUri/$notificationIndex/_doc", - scheme = null, - host = null, - port = -1, - path = null, - queryParams = emptyMap(), - headerParams = mapOf("Content-Type" to "application/json"), - username = if (securityEnabled()) "admin" else null, - password = if (securityEnabled()) "admin" else null + val destination = + Destination( + type = DestinationType.CUSTOM_WEBHOOK, + chime = null, + slack = null, + customWebhook = + CustomWebhook( + url = "$protocol://$clusterUri/$notificationIndex/_doc", + scheme = null, + host = null, + port = -1, + path = null, + queryParams = emptyMap(), + headerParams = mapOf("Content-Type" to "application/json"), + username = if (securityEnabled()) "admin" else null, + password = if (securityEnabled()) "admin" else null, + ), ) - ) val messageTemplate = Script(ScriptType.INLINE, Script.DEFAULT_TEMPLATE_LANG, "{ \"testing\": 5 }", emptyMap()) val actionConfig = NotificationAction(destination = destination, channel = null, messageTemplate = messageTemplate, index = 0) val states = listOf(State(name = "NotificationState", actions = listOf(actionConfig), transitions = emptyList())) - val policy = Policy( - id = policyID, - description = "$testIndexName description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val policy = + Policy( + id = policyID, + description = "$testIndexName description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) createPolicy(policy, policyID) createIndex(indexName, policyID) @@ -78,7 +81,7 @@ class NotificationActionIT : IndexStateManagementRestTestCase() { ( client().makeRequest("GET", "$notificationIndex/_search") .asMap() as Map>> - )["hits"]!!["total"]!!["value"] + )["hits"]!!["total"]!!["value"], ) // Speed up to second execution where it will trigger the first execution of the action which @@ -92,7 +95,7 @@ class NotificationActionIT : IndexStateManagementRestTestCase() { ( client().makeRequest("GET", "$notificationIndex/_search") .asMap() as Map>> - )["hits"]!!["total"]!!["value"] + )["hits"]!!["total"]!!["value"], ) } } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/OpenActionIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/OpenActionIT.kt index d253806ba..4cbc07ce9 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/OpenActionIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/OpenActionIT.kt @@ -21,19 +21,21 @@ class OpenActionIT : IndexStateManagementRestTestCase() { val indexName = "${testIndexName}_index_1" val policyID = "${testIndexName}_testPolicyName_1" val actionConfig = OpenAction(0) - val states = listOf( - State("OpenState", listOf(actionConfig), listOf()) - ) - - val policy = Policy( - id = policyID, - description = "$testIndexName description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val states = + listOf( + State("OpenState", listOf(actionConfig), listOf()), + ) + + val policy = + Policy( + id = policyID, + description = "$testIndexName description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) createPolicy(policy, policyID) createIndex(indexName, policyID) closeIndex(indexName) @@ -58,19 +60,21 @@ class OpenActionIT : IndexStateManagementRestTestCase() { val indexName = "${testIndexName}_index_2" val policyID = "${testIndexName}_testPolicyName_2" val actionConfig = OpenAction(0) - val states = listOf( - State("OpenState", listOf(actionConfig), listOf()) - ) - - val policy = Policy( - id = policyID, - description = "$testIndexName description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val states = + listOf( + State("OpenState", listOf(actionConfig), listOf()), + ) + + val policy = + Policy( + id = policyID, + description = "$testIndexName description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) createPolicy(policy, policyID) createIndex(indexName, policyID) diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/ReadOnlyActionIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/ReadOnlyActionIT.kt index 8ecf52aeb..d8feb95e4 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/ReadOnlyActionIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/ReadOnlyActionIT.kt @@ -21,19 +21,21 @@ class ReadOnlyActionIT : IndexStateManagementRestTestCase() { val indexName = "${testIndexName}_index_1" val policyID = "${testIndexName}_testPolicyName_1" val actionConfig = ReadOnlyAction(0) - val states = listOf( - State("ReadOnlyState", listOf(actionConfig), listOf()) - ) - - val policy = Policy( - id = policyID, - description = "$testIndexName description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val states = + listOf( + State("ReadOnlyState", listOf(actionConfig), listOf()), + ) + + val policy = + Policy( + id = policyID, + description = "$testIndexName description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) createPolicy(policy, policyID) createIndex(indexName, policyID) diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/ReadWriteActionIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/ReadWriteActionIT.kt index e52786314..fb119a3a3 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/ReadWriteActionIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/ReadWriteActionIT.kt @@ -16,33 +16,34 @@ import java.time.temporal.ChronoUnit import java.util.Locale class ReadWriteActionIT : IndexStateManagementRestTestCase() { - private val testIndexName = javaClass.simpleName.lowercase(Locale.ROOT) fun `test basic workflow`() { val indexName = "${testIndexName}_index_1" val policyID = "${testIndexName}_testPolicyName_1" val actionConfig = ReadWriteAction(0) - val states = listOf( - State("ReadWriteState", listOf(actionConfig), listOf()) - ) + val states = + listOf( + State("ReadWriteState", listOf(actionConfig), listOf()), + ) - val policy = Policy( - id = policyID, - description = "$testIndexName description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val policy = + Policy( + id = policyID, + description = "$testIndexName description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) createPolicy(policy, policyID) createIndex(indexName, null) // Set index to read-only updateIndexSettings( indexName, - Settings.builder().put("index.blocks.write", true) + Settings.builder().put("index.blocks.write", true), ) assertEquals("true", getIndexBlocksWriteSetting(indexName)) diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/ReplicaCountActionIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/ReplicaCountActionIT.kt index f88a094b8..7b8ccb015 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/ReplicaCountActionIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/ReplicaCountActionIT.kt @@ -15,7 +15,6 @@ import java.time.temporal.ChronoUnit import java.util.Locale class ReplicaCountActionIT : IndexStateManagementRestTestCase() { - private val testIndexName = javaClass.simpleName.lowercase(Locale.ROOT) fun `test basic replica count`() { @@ -23,15 +22,16 @@ class ReplicaCountActionIT : IndexStateManagementRestTestCase() { val policyID = "${testIndexName}_testPolicyName_1" val actionConfig = ReplicaCountAction(10, 0) val states = listOf(State(name = "ReplicaCountState", actions = listOf(actionConfig), transitions = listOf())) - val policy = Policy( - id = policyID, - description = "$testIndexName description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val policy = + Policy( + id = policyID, + description = "$testIndexName description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) createPolicy(policy, policyID) // create index defaults to 1 replica diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/RolloverActionIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/RolloverActionIT.kt index b627d2898..a45120b6c 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/RolloverActionIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/RolloverActionIT.kt @@ -10,9 +10,11 @@ import org.apache.hc.core5.http.io.entity.StringEntity import org.junit.Assert import org.opensearch.cluster.metadata.DataStream import org.opensearch.common.settings.Settings +import org.opensearch.common.unit.TimeValue import org.opensearch.core.common.unit.ByteSizeUnit import org.opensearch.core.common.unit.ByteSizeValue -import org.opensearch.common.unit.TimeValue +import org.opensearch.core.rest.RestStatus +import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.INDEX_MANAGEMENT_INDEX import org.opensearch.indexmanagement.indexstatemanagement.IndexStateManagementRestTestCase import org.opensearch.indexmanagement.indexstatemanagement.model.ISMTemplate import org.opensearch.indexmanagement.indexstatemanagement.model.Policy @@ -26,15 +28,12 @@ import org.opensearch.indexmanagement.spi.indexstatemanagement.Step import org.opensearch.indexmanagement.spi.indexstatemanagement.model.ActionRetry import org.opensearch.indexmanagement.waitFor import org.opensearch.rest.RestRequest -import org.opensearch.core.rest.RestStatus -import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.INDEX_MANAGEMENT_INDEX import org.opensearch.test.OpenSearchTestCase import java.time.Instant import java.time.temporal.ChronoUnit import java.util.Locale class RolloverActionIT : IndexStateManagementRestTestCase() { - private val testIndexName = javaClass.simpleName.lowercase(Locale.ROOT) fun `test rollover no condition`() { @@ -44,15 +43,16 @@ class RolloverActionIT : IndexStateManagementRestTestCase() { val policyID = "${testIndexName}_testPolicyName_1" val actionConfig = RolloverAction(null, null, null, null, false, 0) val states = listOf(State(name = "RolloverAction", actions = listOf(actionConfig), transitions = listOf())) - val policy = Policy( - id = policyID, - description = "$testIndexName description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val policy = + Policy( + id = policyID, + description = "$testIndexName description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) createPolicy(policy, policyID) // create index defaults @@ -91,22 +91,23 @@ class RolloverActionIT : IndexStateManagementRestTestCase() { " \"$aliasName\": {\"is_write_index\": true}\n" + " }\n" + "}", - ContentType.APPLICATION_JSON - ) + ContentType.APPLICATION_JSON, + ), ) val policyID = "${testIndexName}_bwc" val actionConfig = RolloverAction(null, null, null, null, false, 0) val states = listOf(State(name = "RolloverAction", actions = listOf(actionConfig), transitions = listOf())) - val policy = Policy( - id = policyID, - description = "$testIndexName description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val policy = + Policy( + id = policyID, + description = "$testIndexName description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) createPolicy(policy, policyID) addPolicyToIndex(firstIndex, policyID) @@ -135,15 +136,16 @@ class RolloverActionIT : IndexStateManagementRestTestCase() { val policyID = "${testIndexName}_testPolicyName_byte_1" val actionConfig = RolloverAction(ByteSizeValue(10, ByteSizeUnit.BYTES), 1000000, null, null, false, 0) val states = listOf(State(name = "RolloverAction", actions = listOf(actionConfig), transitions = listOf())) - val policy = Policy( - id = policyID, - description = "$testIndexName description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val policy = + Policy( + id = policyID, + description = "$testIndexName description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) createPolicy(policy, policyID) // create index defaults @@ -161,12 +163,12 @@ class RolloverActionIT : IndexStateManagementRestTestCase() { val info = getExplainManagedIndexMetaData(firstIndex).info as Map assertEquals( "Index rollover before it met the condition.", - AttemptRolloverStep.getPendingMessage(firstIndex), info["message"] + AttemptRolloverStep.getPendingMessage(firstIndex), info["message"], ) val conditions = info["conditions"] as Map assertEquals( "Did not have exclusively min size and min doc count conditions", - setOf(RolloverAction.MIN_SIZE_FIELD, RolloverAction.MIN_DOC_COUNT_FIELD), conditions.keys + setOf(RolloverAction.MIN_SIZE_FIELD, RolloverAction.MIN_DOC_COUNT_FIELD), conditions.keys, ) val minSize = conditions[RolloverAction.MIN_SIZE_FIELD] as Map val minDocCount = conditions[RolloverAction.MIN_DOC_COUNT_FIELD] as Map @@ -186,7 +188,7 @@ class RolloverActionIT : IndexStateManagementRestTestCase() { val conditions = info["conditions"] as Map assertEquals( "Did not have exclusively min size and min doc count conditions", - setOf(RolloverAction.MIN_SIZE_FIELD, RolloverAction.MIN_DOC_COUNT_FIELD), conditions.keys + setOf(RolloverAction.MIN_SIZE_FIELD, RolloverAction.MIN_DOC_COUNT_FIELD), conditions.keys, ) val minSize = conditions[RolloverAction.MIN_SIZE_FIELD] as Map val minDocCount = conditions[RolloverAction.MIN_DOC_COUNT_FIELD] as Map @@ -207,15 +209,16 @@ class RolloverActionIT : IndexStateManagementRestTestCase() { val policyID = "${testIndexName}_primary_shard_1" val actionConfig = RolloverAction(null, null, null, ByteSizeValue(100, ByteSizeUnit.KB), false, 0) val states = listOf(State(name = "RolloverAction", actions = listOf(actionConfig), transitions = listOf())) - val policy = Policy( - id = policyID, - description = "$testIndexName description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val policy = + Policy( + id = policyID, + description = "$testIndexName description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) createPolicy(policy, policyID) // create index defaults @@ -225,7 +228,7 @@ class RolloverActionIT : IndexStateManagementRestTestCase() { alias = aliasName, replicas = "0", shards = "20", - settings = Settings.builder().put("store.stats_refresh_interval", "1s").build() + settings = Settings.builder().put("store.stats_refresh_interval", "1s").build(), ) val managedIndexConfig = getExistingManagedIndexConfig(firstIndex) @@ -258,12 +261,12 @@ class RolloverActionIT : IndexStateManagementRestTestCase() { val info = getExplainManagedIndexMetaData(firstIndex).info as Map assertEquals( "Index rollover before it met the condition.", - AttemptRolloverStep.getPendingMessage(firstIndex), info["message"] + AttemptRolloverStep.getPendingMessage(firstIndex), info["message"], ) val conditions = info["conditions"] as Map assertEquals( "Did not have exclusively min primary shard size condition", - setOf(RolloverAction.MIN_PRIMARY_SHARD_SIZE_FIELD), conditions.keys + setOf(RolloverAction.MIN_PRIMARY_SHARD_SIZE_FIELD), conditions.keys, ) val minPrimarySize = conditions[RolloverAction.MIN_PRIMARY_SHARD_SIZE_FIELD] as Map assertEquals("Did not have min size condition", "100kb", minPrimarySize["condition"]) @@ -281,14 +284,15 @@ class RolloverActionIT : IndexStateManagementRestTestCase() { insertSampleData(index = firstIndex, docCount = 20, delay = 0, jsonString = "{ \"test_field\": \"${OpenSearchTestCase.randomAlphaOfLength(7000)}\" }", routing = "custom_routing") flush(firstIndex, true) forceMerge(firstIndex, "1") - val primaryShards = waitFor { - val primaryShards = (cat("shards/$firstIndex?format=json&bytes=b") as List>).filter { it["prirep"] == "p" } - // TODO seeing flakyness of multiple shards over 100kb, log out shards to further debug - logger.info("cat shards result: $primaryShards") - val primaryShardsOver100KB = primaryShards.filter { (it["store"] as String).toInt() > 100000 } - assertTrue("Shard over 100kb is not exactly 1", primaryShardsOver100KB.size == 1) - primaryShards - } + val primaryShards = + waitFor { + val primaryShards = (cat("shards/$firstIndex?format=json&bytes=b") as List>).filter { it["prirep"] == "p" } + // TODO seeing flakyness of multiple shards over 100kb, log out shards to further debug + logger.info("cat shards result: $primaryShards") + val primaryShardsOver100KB = primaryShards.filter { (it["store"] as String).toInt() > 100000 } + assertTrue("Shard over 100kb is not exactly 1", primaryShardsOver100KB.size == 1) + primaryShards + } primaryShardSizeBytes = primaryShards.maxOf { (it["store"] as String).toInt() } } @@ -300,7 +304,7 @@ class RolloverActionIT : IndexStateManagementRestTestCase() { val conditions = info["conditions"] as Map assertEquals( "Did not have exclusively min primary shard size conditions", - setOf(RolloverAction.MIN_PRIMARY_SHARD_SIZE_FIELD), conditions.keys + setOf(RolloverAction.MIN_PRIMARY_SHARD_SIZE_FIELD), conditions.keys, ) val minPrimaryShardSize = conditions[RolloverAction.MIN_PRIMARY_SHARD_SIZE_FIELD] as Map assertEquals("Did not have min primary shard size condition", "100kb", minPrimaryShardSize["condition"]) @@ -317,15 +321,16 @@ class RolloverActionIT : IndexStateManagementRestTestCase() { val policyID = "${testIndexName}_testPolicyName_doc_1" val actionConfig = RolloverAction(null, 3, TimeValue.timeValueDays(2), null, false, 0) val states = listOf(State(name = "RolloverAction", actions = listOf(actionConfig), transitions = listOf())) - val policy = Policy( - id = policyID, - description = "$testIndexName description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val policy = + Policy( + id = policyID, + description = "$testIndexName description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) createPolicy(policy, policyID) // create index defaults @@ -343,12 +348,12 @@ class RolloverActionIT : IndexStateManagementRestTestCase() { val info = getExplainManagedIndexMetaData(firstIndex).info as Map assertEquals( "Index rollover before it met the condition.", - AttemptRolloverStep.getPendingMessage(firstIndex), info["message"] + AttemptRolloverStep.getPendingMessage(firstIndex), info["message"], ) val conditions = info["conditions"] as Map assertEquals( "Did not have exclusively min age and min doc count conditions", - setOf(RolloverAction.MIN_INDEX_AGE_FIELD, RolloverAction.MIN_DOC_COUNT_FIELD), conditions.keys + setOf(RolloverAction.MIN_INDEX_AGE_FIELD, RolloverAction.MIN_DOC_COUNT_FIELD), conditions.keys, ) val minAge = conditions[RolloverAction.MIN_INDEX_AGE_FIELD] as Map val minDocCount = conditions[RolloverAction.MIN_DOC_COUNT_FIELD] as Map @@ -368,7 +373,7 @@ class RolloverActionIT : IndexStateManagementRestTestCase() { val conditions = info["conditions"] as Map assertEquals( "Did not have exclusively min age and min doc count conditions", - setOf(RolloverAction.MIN_INDEX_AGE_FIELD, RolloverAction.MIN_DOC_COUNT_FIELD), conditions.keys + setOf(RolloverAction.MIN_INDEX_AGE_FIELD, RolloverAction.MIN_DOC_COUNT_FIELD), conditions.keys, ) val minAge = conditions[RolloverAction.MIN_INDEX_AGE_FIELD] as Map val minDocCount = conditions[RolloverAction.MIN_DOC_COUNT_FIELD] as Map @@ -391,15 +396,16 @@ class RolloverActionIT : IndexStateManagementRestTestCase() { val actionConfig = RolloverAction(null, 3, TimeValue.timeValueDays(2), null, false, 0) actionConfig.configRetry = ActionRetry(0) val states = listOf(State(name = "RolloverAction", actions = listOf(actionConfig), transitions = listOf())) - val policy = Policy( - id = policyID, - description = "$testIndexName description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val policy = + Policy( + id = policyID, + description = "$testIndexName description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) createPolicy(policy, policyID) createIndex(index1, policyID) changeAlias(index1, alias1, "add") @@ -420,16 +426,17 @@ class RolloverActionIT : IndexStateManagementRestTestCase() { val info = getExplainManagedIndexMetaData(index1).info as Map assertEquals( "Index rollover not stopped by pre-check.", - AttemptRolloverStep.getFailedPreCheckMessage(index1), info["message"] + AttemptRolloverStep.getFailedPreCheckMessage(index1), info["message"], ) } updateIndexSetting(index1, ManagedIndexSettings.ROLLOVER_SKIP.key, "true") - val response = client().makeRequest( - RestRequest.Method.POST.toString(), - "${RestRetryFailedManagedIndexAction.RETRY_BASE_URI}/$index1" - ) + val response = + client().makeRequest( + RestRequest.Method.POST.toString(), + "${RestRetryFailedManagedIndexAction.RETRY_BASE_URI}/$index1", + ) assertEquals("Unexpected RestStatus", RestStatus.OK, response.restStatus()) updateManagedIndexConfigStartTime(managedIndexConfig) @@ -437,7 +444,7 @@ class RolloverActionIT : IndexStateManagementRestTestCase() { val info = getExplainManagedIndexMetaData(index1).info as Map assertEquals( "Index rollover not skip.", - AttemptRolloverStep.getSkipRolloverMessage(index1), info["message"] + AttemptRolloverStep.getSkipRolloverMessage(index1), info["message"], ) } } @@ -449,16 +456,17 @@ class RolloverActionIT : IndexStateManagementRestTestCase() { // Create the rollover policy val rolloverAction = RolloverAction(null, null, null, null, false, 0) val states = listOf(State(name = "default", actions = listOf(rolloverAction), transitions = listOf())) - val policy = Policy( - id = policyID, - description = "rollover policy description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states, - ismTemplate = listOf(ISMTemplate(listOf(dataStreamName), 100, Instant.now().truncatedTo(ChronoUnit.MILLIS))) - ) + val policy = + Policy( + id = policyID, + description = "rollover policy description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ismTemplate = listOf(ISMTemplate(listOf(dataStreamName), 100, Instant.now().truncatedTo(ChronoUnit.MILLIS))), + ) createPolicy(policy, policyID) // Create the data stream @@ -466,7 +474,7 @@ class RolloverActionIT : IndexStateManagementRestTestCase() { client().makeRequest( "PUT", "/_index_template/rollover-data-stream-template", - StringEntity("{ \"index_patterns\": [ \"$dataStreamName\" ], \"data_stream\": { } }", ContentType.APPLICATION_JSON) + StringEntity("{ \"index_patterns\": [ \"$dataStreamName\" ], \"data_stream\": { } }", ContentType.APPLICATION_JSON), ) client().makeRequest("PUT", "/_data_stream/$dataStreamName") @@ -483,7 +491,7 @@ class RolloverActionIT : IndexStateManagementRestTestCase() { assertEquals( "Data stream did not rollover.", AttemptRolloverStep.getSuccessDataStreamRolloverMessage(dataStreamName, firstIndexName), - info["message"] + info["message"], ) assertNull("Should not have conditions if none specified", info["conditions"]) } @@ -508,16 +516,17 @@ class RolloverActionIT : IndexStateManagementRestTestCase() { // Create the rollover policy val rolloverAction = RolloverAction(null, 3, TimeValue.timeValueDays(2), null, false, 0) val states = listOf(State(name = "default", actions = listOf(rolloverAction), transitions = listOf())) - val policy = Policy( - id = policyID, - description = "rollover policy description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states, - ismTemplate = listOf(ISMTemplate(listOf(dataStreamName), 100, Instant.now().truncatedTo(ChronoUnit.MILLIS))) - ) + val policy = + Policy( + id = policyID, + description = "rollover policy description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ismTemplate = listOf(ISMTemplate(listOf(dataStreamName), 100, Instant.now().truncatedTo(ChronoUnit.MILLIS))), + ) createPolicy(policy, policyID) // Create the data stream @@ -525,7 +534,7 @@ class RolloverActionIT : IndexStateManagementRestTestCase() { client().makeRequest( "PUT", "/_index_template/rollover-data-stream-template", - StringEntity("{ \"index_patterns\": [ \"$dataStreamName\" ], \"data_stream\": { } }", ContentType.APPLICATION_JSON) + StringEntity("{ \"index_patterns\": [ \"$dataStreamName\" ], \"data_stream\": { } }", ContentType.APPLICATION_JSON), ) client().makeRequest("PUT", "/_data_stream/$dataStreamName") @@ -543,14 +552,14 @@ class RolloverActionIT : IndexStateManagementRestTestCase() { assertEquals( "Index rollover before it met the condition.", AttemptRolloverStep.getPendingMessage(firstIndexName), - info["message"] + info["message"], ) val conditions = info["conditions"] as Map assertEquals( "Did not have exclusively min age and min doc count conditions", setOf(RolloverAction.MIN_INDEX_AGE_FIELD, RolloverAction.MIN_DOC_COUNT_FIELD), - conditions.keys + conditions.keys, ) val minAge = conditions[RolloverAction.MIN_INDEX_AGE_FIELD] as Map @@ -571,14 +580,14 @@ class RolloverActionIT : IndexStateManagementRestTestCase() { assertEquals( "Data stream did not rollover", AttemptRolloverStep.getSuccessDataStreamRolloverMessage(dataStreamName, firstIndexName), - info["message"] + info["message"], ) val conditions = info["conditions"] as Map assertEquals( "Did not have exclusively min age and min doc count conditions", setOf(RolloverAction.MIN_INDEX_AGE_FIELD, RolloverAction.MIN_DOC_COUNT_FIELD), - conditions.keys + conditions.keys, ) val minAge = conditions[RolloverAction.MIN_INDEX_AGE_FIELD] as Map @@ -603,15 +612,16 @@ class RolloverActionIT : IndexStateManagementRestTestCase() { val policyID = "${testIndexName}_testPolicyName_1" val actionConfig = RolloverAction(null, null, null, null, false, 0) val states = listOf(State(name = "RolloverAction", actions = listOf(actionConfig), transitions = listOf())) - val policy = Policy( - id = policyID, - description = "$testIndexName description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val policy = + Policy( + id = policyID, + description = "$testIndexName description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) createPolicy(policy, policyID) // create index defaults @@ -645,15 +655,16 @@ class RolloverActionIT : IndexStateManagementRestTestCase() { val policyID = "${testIndexName}_testPolicyName_doc_1" val actionConfig = RolloverAction(null, 3, TimeValue.timeValueDays(2), null, true, 0) val states = listOf(State(name = "RolloverAction", actions = listOf(actionConfig), transitions = listOf())) - val policy = Policy( - id = policyID, - description = "$testIndexName description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val policy = + Policy( + id = policyID, + description = "$testIndexName description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) createPolicy(policy, policyID) // create index defaults @@ -662,10 +673,11 @@ class RolloverActionIT : IndexStateManagementRestTestCase() { // Add a bunch of aliases changeAlias( firstIndex, "test_alias1", "add", routing = 0, searchRouting = 1, indexRouting = 2, - filter = """ - { "term": { "user.id": "kimchy" } } + filter = + """ + { "term": { "user.id": "kimchy" } } """.trimIndent(), - isHidden = false + isHidden = false, ) changeAlias(firstIndex, "test_alias2", "add") changeAlias(firstIndex, "test_alias3", "add") @@ -682,12 +694,12 @@ class RolloverActionIT : IndexStateManagementRestTestCase() { val info = getExplainManagedIndexMetaData(firstIndex).info as Map assertEquals( "Index rollover before it met the condition.", - AttemptRolloverStep.getPendingMessage(firstIndex), info["message"] + AttemptRolloverStep.getPendingMessage(firstIndex), info["message"], ) val conditions = info["conditions"] as Map assertEquals( "Did not have exclusively min age and min doc count conditions", - setOf(RolloverAction.MIN_INDEX_AGE_FIELD, RolloverAction.MIN_DOC_COUNT_FIELD), conditions.keys + setOf(RolloverAction.MIN_INDEX_AGE_FIELD, RolloverAction.MIN_DOC_COUNT_FIELD), conditions.keys, ) val minAge = conditions[RolloverAction.MIN_INDEX_AGE_FIELD] as Map val minDocCount = conditions[RolloverAction.MIN_DOC_COUNT_FIELD] as Map @@ -709,7 +721,7 @@ class RolloverActionIT : IndexStateManagementRestTestCase() { val conditions = info["conditions"] as Map assertEquals( "Did not have exclusively min age and min doc count conditions", - setOf(RolloverAction.MIN_INDEX_AGE_FIELD, RolloverAction.MIN_DOC_COUNT_FIELD), conditions.keys + setOf(RolloverAction.MIN_INDEX_AGE_FIELD, RolloverAction.MIN_DOC_COUNT_FIELD), conditions.keys, ) val minAge = conditions[RolloverAction.MIN_INDEX_AGE_FIELD] as Map val minDocCount = conditions[RolloverAction.MIN_DOC_COUNT_FIELD] as Map @@ -742,15 +754,16 @@ class RolloverActionIT : IndexStateManagementRestTestCase() { val policyID = "${testIndexName}_testPolicyName_1" val actionConfig = RolloverAction(null, 1, null, null, false, 0) val states = listOf(State(name = "RolloverAction", actions = listOf(actionConfig), transitions = listOf())) - val policy = Policy( - id = policyID, - description = "$testIndexName description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val policy = + Policy( + id = policyID, + description = "$testIndexName description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) createPolicy(policy, policyID) createIndex(firstIndex, policyID, aliasName) @@ -770,21 +783,22 @@ class RolloverActionIT : IndexStateManagementRestTestCase() { assertEquals("Index did not rollover.", AttemptRolloverStep.getSuccessMessage(firstIndex), info["message"]) } // Manually produce transaction failure - val response = client().makeRequest( - "POST", "$INDEX_MANAGEMENT_INDEX/_update/${managedIndexConfig.id}%23metadata", - StringEntity( - "{\n" + - " \"script\": {\n" + - " \"source\": \"ctx._source.managed_index_metadata.step.step_status = params.step_status\",\n" + - " \"lang\": \"painless\",\n" + - " \"params\": {\n" + - " \"step_status\": \"starting\"\n" + - " }\n" + - " }\n" + - "}", - ContentType.APPLICATION_JSON + val response = + client().makeRequest( + "POST", "$INDEX_MANAGEMENT_INDEX/_update/${managedIndexConfig.id}%23metadata", + StringEntity( + "{\n" + + " \"script\": {\n" + + " \"source\": \"ctx._source.managed_index_metadata.step.step_status = params.step_status\",\n" + + " \"lang\": \"painless\",\n" + + " \"params\": {\n" + + " \"step_status\": \"starting\"\n" + + " }\n" + + " }\n" + + "}", + ContentType.APPLICATION_JSON, + ), ) - ) assertEquals("Request failed", RestStatus.OK, response.restStatus()) // Execute again to see the transaction failure diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/RollupActionIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/RollupActionIT.kt index 37b507151..26c9bae16 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/RollupActionIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/RollupActionIT.kt @@ -33,48 +33,54 @@ import java.time.temporal.ChronoUnit import java.util.Locale class RollupActionIT : IndexStateManagementRestTestCase() { - private val testIndexName = javaClass.simpleName.lowercase(Locale.ROOT) fun `test rollup action`() { val indexName = "${testIndexName}_index_basic" val policyID = "${testIndexName}_policy_basic" - val rollup = ISMRollup( - description = "basic search test", - targetIndex = "target_rollup_search", - pageSize = 100, - dimensions = listOf( - DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1h"), - Terms("RatecodeID", "RatecodeID"), - Terms("PULocationID", "PULocationID") - ), - metrics = listOf( - RollupMetrics( - sourceField = "passenger_count", targetField = "passenger_count", - metrics = listOf( - Sum(), Min(), Max(), - ValueCount(), Average() - ) + val rollup = + ISMRollup( + description = "basic search test", + targetIndex = "target_rollup_search", + pageSize = 100, + dimensions = + listOf( + DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1h"), + Terms("RatecodeID", "RatecodeID"), + Terms("PULocationID", "PULocationID"), + ), + metrics = + listOf( + RollupMetrics( + sourceField = "passenger_count", targetField = "passenger_count", + metrics = + listOf( + Sum(), Min(), Max(), + ValueCount(), Average(), + ), + ), + RollupMetrics(sourceField = "total_amount", targetField = "total_amount", metrics = listOf(Max(), Min())), ), - RollupMetrics(sourceField = "total_amount", targetField = "total_amount", metrics = listOf(Max(), Min())) ) - ) val actionConfig = RollupAction(rollup, 0) - val states = listOf( - State("rollup", listOf(actionConfig), listOf()) - ) - val sourceIndexMappingString = "\"properties\": {\"tpep_pickup_datetime\": { \"type\": \"date\" }, \"RatecodeID\": { \"type\": " + - "\"keyword\" }, \"PULocationID\": { \"type\": \"keyword\" }, \"passenger_count\": { \"type\": \"integer\" }, \"total_amount\": " + - "{ \"type\": \"double\" }}" - val policy = Policy( - id = policyID, - description = "$testIndexName description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val states = + listOf( + State("rollup", listOf(actionConfig), listOf()), + ) + val sourceIndexMappingString = + "\"properties\": {\"tpep_pickup_datetime\": { \"type\": \"date\" }, \"RatecodeID\": { \"type\": " + + "\"keyword\" }, \"PULocationID\": { \"type\": \"keyword\" }, \"passenger_count\": { \"type\": \"integer\" }, \"total_amount\": " + + "{ \"type\": \"double\" }}" + val policy = + Policy( + id = policyID, + description = "$testIndexName description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) createPolicy(policy, policyID) createIndex(indexName, policyID, mapping = sourceIndexMappingString) @@ -85,47 +91,52 @@ class RollupActionIT : IndexStateManagementRestTestCase() { val dataStreamName = "${testIndexName}_data_stream" val policyID = "${testIndexName}_rollup_policy" - val rollup = ISMRollup( - description = "data stream rollup", - targetIndex = "target_rollup_search", - pageSize = 100, - dimensions = listOf( - DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1h"), - Terms("RatecodeID", "RatecodeID"), - Terms("PULocationID", "PULocationID") - ), - metrics = listOf( - RollupMetrics( - sourceField = "passenger_count", - targetField = "passenger_count", - metrics = listOf(Sum(), Min(), Max(), ValueCount(), Average()) + val rollup = + ISMRollup( + description = "data stream rollup", + targetIndex = "target_rollup_search", + pageSize = 100, + dimensions = + listOf( + DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1h"), + Terms("RatecodeID", "RatecodeID"), + Terms("PULocationID", "PULocationID"), + ), + metrics = + listOf( + RollupMetrics( + sourceField = "passenger_count", + targetField = "passenger_count", + metrics = listOf(Sum(), Min(), Max(), ValueCount(), Average()), + ), + RollupMetrics( + sourceField = "total_amount", + targetField = "total_amount", + metrics = listOf(Max(), Min()), + ), ), - RollupMetrics( - sourceField = "total_amount", - targetField = "total_amount", - metrics = listOf(Max(), Min()) - ) ) - ) // Create an ISM policy to rollup backing indices of a data stream. val actionConfig = RollupAction(rollup, 0) val states = listOf(State("rollup", listOf(actionConfig), listOf())) - val policy = Policy( - id = policyID, - description = "data stream rollup policy", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states, - ismTemplate = listOf(ISMTemplate(listOf(dataStreamName), 100, Instant.now().truncatedTo(ChronoUnit.MILLIS))) - ) + val policy = + Policy( + id = policyID, + description = "data stream rollup policy", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ismTemplate = listOf(ISMTemplate(listOf(dataStreamName), 100, Instant.now().truncatedTo(ChronoUnit.MILLIS))), + ) createPolicy(policy, policyID) - val sourceIndexMappingString = "\"properties\": {\"tpep_pickup_datetime\": { \"type\": \"date\" }, \"RatecodeID\": { \"type\": " + - "\"keyword\" }, \"PULocationID\": { \"type\": \"keyword\" }, \"passenger_count\": { \"type\": \"integer\" }, \"total_amount\": " + - "{ \"type\": \"double\" }}" + val sourceIndexMappingString = + "\"properties\": {\"tpep_pickup_datetime\": { \"type\": \"date\" }, \"RatecodeID\": { \"type\": " + + "\"keyword\" }, \"PULocationID\": { \"type\": \"keyword\" }, \"passenger_count\": { \"type\": \"integer\" }, \"total_amount\": " + + "{ \"type\": \"double\" }}" // Create an index template for a data stream with the given source index mapping. client().makeRequest( @@ -136,8 +147,8 @@ class RollupActionIT : IndexStateManagementRestTestCase() { "\"index_patterns\": [ \"$dataStreamName\" ], " + "\"data_stream\": { \"timestamp_field\": { \"name\": \"tpep_pickup_datetime\" } }, " + "\"template\": { \"mappings\": { $sourceIndexMappingString } } }", - ContentType.APPLICATION_JSON - ) + ContentType.APPLICATION_JSON, + ), ) client().makeRequest("PUT", "/_data_stream/$dataStreamName") @@ -149,47 +160,52 @@ class RollupActionIT : IndexStateManagementRestTestCase() { fun `test data stream rollup action with scripted targetIndex`() { val dataStreamName = "${testIndexName}_data_stream" val policyID = "${testIndexName}_rollup_policy" - val rollup = ISMRollup( - description = "data stream rollup", - targetIndex = "rollup_{{ctx.source_index}}", - pageSize = 100, - dimensions = listOf( - DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1h"), - Terms("RatecodeID", "RatecodeID"), - Terms("PULocationID", "PULocationID") - ), - metrics = listOf( - RollupMetrics( - sourceField = "passenger_count", - targetField = "passenger_count", - metrics = listOf(Sum(), Min(), Max(), ValueCount(), Average()) + val rollup = + ISMRollup( + description = "data stream rollup", + targetIndex = "rollup_{{ctx.source_index}}", + pageSize = 100, + dimensions = + listOf( + DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1h"), + Terms("RatecodeID", "RatecodeID"), + Terms("PULocationID", "PULocationID"), + ), + metrics = + listOf( + RollupMetrics( + sourceField = "passenger_count", + targetField = "passenger_count", + metrics = listOf(Sum(), Min(), Max(), ValueCount(), Average()), + ), + RollupMetrics( + sourceField = "total_amount", + targetField = "total_amount", + metrics = listOf(Max(), Min()), + ), ), - RollupMetrics( - sourceField = "total_amount", - targetField = "total_amount", - metrics = listOf(Max(), Min()) - ) ) - ) // Create an ISM policy to rollup backing indices of a data stream. val actionConfig = RollupAction(rollup, 0) val states = listOf(State("rollup", listOf(actionConfig), listOf())) - val policy = Policy( - id = policyID, - description = "data stream rollup policy", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states, - ismTemplate = listOf(ISMTemplate(listOf(dataStreamName), 100, Instant.now().truncatedTo(ChronoUnit.MILLIS))) - ) + val policy = + Policy( + id = policyID, + description = "data stream rollup policy", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ismTemplate = listOf(ISMTemplate(listOf(dataStreamName), 100, Instant.now().truncatedTo(ChronoUnit.MILLIS))), + ) createPolicy(policy, policyID) - val sourceIndexMappingString = "\"properties\": {\"tpep_pickup_datetime\": { \"type\": \"date\" }, \"RatecodeID\": { \"type\": " + - "\"keyword\" }, \"PULocationID\": { \"type\": \"keyword\" }, \"passenger_count\": { \"type\": \"integer\" }, \"total_amount\": " + - "{ \"type\": \"double\" }}" + val sourceIndexMappingString = + "\"properties\": {\"tpep_pickup_datetime\": { \"type\": \"date\" }, \"RatecodeID\": { \"type\": " + + "\"keyword\" }, \"PULocationID\": { \"type\": \"keyword\" }, \"passenger_count\": { \"type\": \"integer\" }, \"total_amount\": " + + "{ \"type\": \"double\" }}" // Create an index template for a data stream with the given source index mapping. client().makeRequest( @@ -200,8 +216,8 @@ class RollupActionIT : IndexStateManagementRestTestCase() { "\"index_patterns\": [ \"$dataStreamName\" ], " + "\"data_stream\": { \"timestamp_field\": { \"name\": \"tpep_pickup_datetime\" } }, " + "\"template\": { \"mappings\": { $sourceIndexMappingString } } }", - ContentType.APPLICATION_JSON - ) + ContentType.APPLICATION_JSON, + ), ) client().makeRequest("PUT", "/_data_stream/$dataStreamName") @@ -214,43 +230,50 @@ class RollupActionIT : IndexStateManagementRestTestCase() { fun `test rollup action failure`() { val indexName = "${testIndexName}_index_failure" val policyID = "${testIndexName}_policy_failure" - val ismRollup = ISMRollup( - description = "basic search test", - targetIndex = "target_rollup_search", - pageSize = 100, - dimensions = listOf( - DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1h"), - Terms("RatecodeID", "RatecodeID"), - Terms("PULocationID", "PULocationID") - ), - metrics = listOf( - RollupMetrics( - sourceField = "passenger_count", targetField = "passenger_count", - metrics = listOf( - Sum(), Min(), Max(), - ValueCount(), Average() - ) - ) + val ismRollup = + ISMRollup( + description = "basic search test", + targetIndex = "target_rollup_search", + pageSize = 100, + dimensions = + listOf( + DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1h"), + Terms("RatecodeID", "RatecodeID"), + Terms("PULocationID", "PULocationID"), + ), + metrics = + listOf( + RollupMetrics( + sourceField = "passenger_count", targetField = "passenger_count", + metrics = + listOf( + Sum(), Min(), Max(), + ValueCount(), Average(), + ), + ), + ), ) - ) val rollup = ismRollup.toRollup(indexName) val rollupId = rollup.id val actionConfig = RollupAction(ismRollup, 0) - val states = listOf( - State("rollup", listOf(actionConfig), listOf()) - ) - val sourceIndexMappingString = "\"properties\": {\"tpep_pickup_datetime\": { \"type\": \"date\" }, \"RatecodeID\": { \"type\": " + - "\"keyword\" }, \"passenger_count\": { \"type\": \"integer\" }, \"total_amount\": " + - "{ \"type\": \"double\" }}" - val policy = Policy( - id = policyID, - description = "$testIndexName description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val states = + listOf( + State("rollup", listOf(actionConfig), listOf()), + ) + val sourceIndexMappingString = + "\"properties\": {\"tpep_pickup_datetime\": { \"type\": \"date\" }, \"RatecodeID\": { \"type\": " + + "\"keyword\" }, \"passenger_count\": { \"type\": \"integer\" }, \"total_amount\": " + + "{ \"type\": \"double\" }}" + val policy = + Policy( + id = policyID, + description = "$testIndexName description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) createPolicy(policy, policyID) createIndex(indexName, policyID, mapping = sourceIndexMappingString) @@ -265,7 +288,7 @@ class RollupActionIT : IndexStateManagementRestTestCase() { waitFor { assertEquals( AttemptCreateRollupJobStep.getSuccessMessage(rollupId, indexName), - getExplainManagedIndexMetaData(indexName).info?.get("message") + getExplainManagedIndexMetaData(indexName).info?.get("message"), ) } @@ -275,7 +298,7 @@ class RollupActionIT : IndexStateManagementRestTestCase() { waitFor { assertEquals( WaitForRollupCompletionStep.getJobFailedMessage(rollupId, indexName), - getExplainManagedIndexMetaData(indexName).info?.get("message") + getExplainManagedIndexMetaData(indexName).info?.get("message"), ) } } @@ -283,42 +306,49 @@ class RollupActionIT : IndexStateManagementRestTestCase() { fun `test rollup action create failure due to wildcards in target_index`() { val indexName = "${testIndexName}_index_failure" val policyID = "${testIndexName}_policy_failure" - val rollup = ISMRollup( - description = "basic search test", - targetIndex = "target_with_wildcard*", - pageSize = 100, - dimensions = listOf( - DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1h"), - Terms("RatecodeID", "RatecodeID"), - Terms("PULocationID", "PULocationID") - ), - metrics = listOf( - RollupMetrics( - sourceField = "passenger_count", targetField = "passenger_count", - metrics = listOf( - Sum(), Min(), Max(), - ValueCount(), Average() - ) - ) + val rollup = + ISMRollup( + description = "basic search test", + targetIndex = "target_with_wildcard*", + pageSize = 100, + dimensions = + listOf( + DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1h"), + Terms("RatecodeID", "RatecodeID"), + Terms("PULocationID", "PULocationID"), + ), + metrics = + listOf( + RollupMetrics( + sourceField = "passenger_count", targetField = "passenger_count", + metrics = + listOf( + Sum(), Min(), Max(), + ValueCount(), Average(), + ), + ), + ), ) - ) val rollupId = rollup.toRollup(indexName).id val actionConfig = RollupAction(rollup, 0) - val states = listOf( - State("rollup", listOf(actionConfig), listOf()) - ) - val sourceIndexMappingString = "\"properties\": {\"tpep_pickup_datetime\": { \"type\": \"date\" }, \"RatecodeID\": { \"type\": " + - "\"keyword\" }, \"passenger_count\": { \"type\": \"integer\" }, \"total_amount\": " + - "{ \"type\": \"double\" }}" - val policy = Policy( - id = policyID, - description = "$testIndexName description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val states = + listOf( + State("rollup", listOf(actionConfig), listOf()), + ) + val sourceIndexMappingString = + "\"properties\": {\"tpep_pickup_datetime\": { \"type\": \"date\" }, \"RatecodeID\": { \"type\": " + + "\"keyword\" }, \"passenger_count\": { \"type\": \"integer\" }, \"total_amount\": " + + "{ \"type\": \"double\" }}" + val policy = + Policy( + id = policyID, + description = "$testIndexName description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) createPolicy(policy, policyID) createIndex(indexName, policyID, mapping = sourceIndexMappingString) @@ -333,7 +363,7 @@ class RollupActionIT : IndexStateManagementRestTestCase() { waitFor { assertEquals( AttemptCreateRollupJobStep.getFailedMessage(rollupId, indexName), - getExplainManagedIndexMetaData(indexName).info?.get("message") + getExplainManagedIndexMetaData(indexName).info?.get("message"), ) } } @@ -341,34 +371,40 @@ class RollupActionIT : IndexStateManagementRestTestCase() { fun `test rollup action failure and retry failed step`() { val indexName = "${testIndexName}_index_retry" val policyID = "${testIndexName}_policy_retry" - val ismRollup = ISMRollup( - description = "basic search test", - targetIndex = "target_rollup_search", - pageSize = 100, - dimensions = listOf( - DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1h"), - Terms("RatecodeID", "RatecodeID"), - Terms("PULocationID", "PULocationID") - ), - metrics = listOf( - RollupMetrics( - sourceField = "passenger_count", targetField = "passenger_count", - metrics = listOf( - Sum(), Min(), Max(), - ValueCount(), Average() - ) - ) + val ismRollup = + ISMRollup( + description = "basic search test", + targetIndex = "target_rollup_search", + pageSize = 100, + dimensions = + listOf( + DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1h"), + Terms("RatecodeID", "RatecodeID"), + Terms("PULocationID", "PULocationID"), + ), + metrics = + listOf( + RollupMetrics( + sourceField = "passenger_count", targetField = "passenger_count", + metrics = + listOf( + Sum(), Min(), Max(), + ValueCount(), Average(), + ), + ), + ), ) - ) val rollup = ismRollup.toRollup(indexName) val rollupId = rollup.id - val policyString = "{\"policy\":{\"description\":\"$testIndexName description\",\"default_state\":\"rollup\",\"states\":[{\"name\":\"rollup\"," + - "\"actions\":[{\"retry\":{\"count\":2,\"backoff\":\"constant\",\"delay\":\"10ms\"},\"rollup\":{\"ism_rollup\":" + - "${ismRollup.toJsonString()}}}],\"transitions\":[]}]}}" - - val sourceIndexMappingString = "\"properties\": {\"tpep_pickup_datetime\": { \"type\": \"date\" }, \"RatecodeID\": { \"type\": " + - "\"keyword\" }, \"passenger_count\": { \"type\": \"integer\" }, \"total_amount\": " + - "{ \"type\": \"double\" }}" + val policyString = + "{\"policy\":{\"description\":\"$testIndexName description\",\"default_state\":\"rollup\",\"states\":[{\"name\":\"rollup\"," + + "\"actions\":[{\"retry\":{\"count\":2,\"backoff\":\"constant\",\"delay\":\"10ms\"},\"rollup\":{\"ism_rollup\":" + + "${ismRollup.toJsonString()}}}],\"transitions\":[]}]}}" + + val sourceIndexMappingString = + "\"properties\": {\"tpep_pickup_datetime\": { \"type\": \"date\" }, \"RatecodeID\": { \"type\": " + + "\"keyword\" }, \"passenger_count\": { \"type\": \"integer\" }, \"total_amount\": " + + "{ \"type\": \"double\" }}" createPolicyJson(policyString, policyID) createIndex(indexName, policyID, mapping = sourceIndexMappingString) @@ -383,7 +419,7 @@ class RollupActionIT : IndexStateManagementRestTestCase() { waitFor { assertEquals( AttemptCreateRollupJobStep.getSuccessMessage(rollupId, indexName), - getExplainManagedIndexMetaData(indexName).info?.get("message") + getExplainManagedIndexMetaData(indexName).info?.get("message"), ) } @@ -392,7 +428,7 @@ class RollupActionIT : IndexStateManagementRestTestCase() { waitFor { assertEquals( WaitForRollupCompletionStep.getJobProcessingMessage(rollupId, indexName), - getExplainManagedIndexMetaData(indexName).info?.get("message") + getExplainManagedIndexMetaData(indexName).info?.get("message"), ) } @@ -402,7 +438,7 @@ class RollupActionIT : IndexStateManagementRestTestCase() { waitFor { assertEquals( WaitForRollupCompletionStep.getJobFailedMessage(rollupId, indexName), - getExplainManagedIndexMetaData(indexName).info?.get("message") + getExplainManagedIndexMetaData(indexName).info?.get("message"), ) } } @@ -421,7 +457,7 @@ class RollupActionIT : IndexStateManagementRestTestCase() { waitFor { assertEquals( AttemptCreateRollupJobStep.getSuccessMessage(rollupId, indexName), - getExplainManagedIndexMetaData(indexName).info?.get("message") + getExplainManagedIndexMetaData(indexName).info?.get("message"), ) } @@ -437,7 +473,7 @@ class RollupActionIT : IndexStateManagementRestTestCase() { waitFor { assertEquals( WaitForRollupCompletionStep.getJobCompletionMessage(rollupId, indexName), - getExplainManagedIndexMetaData(indexName).info?.get("message") + getExplainManagedIndexMetaData(indexName).info?.get("message"), ) } } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/ShrinkActionIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/ShrinkActionIT.kt index 7b08d5ad7..2c23a2e4f 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/ShrinkActionIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/ShrinkActionIT.kt @@ -39,7 +39,6 @@ import java.time.Instant import java.time.temporal.ChronoUnit class ShrinkActionIT : IndexStateManagementRestTestCase() { - @Suppress("UnusedPrivateMember") @Before private fun disableJobIndexShardRelocation() { @@ -49,7 +48,8 @@ class ShrinkActionIT : IndexStateManagementRestTestCase() { updateIndexSetting(INDEX_MANAGEMENT_INDEX, "routing.allocation.enable", "none") // When doing remote testing, the docker image seems to keep the disk free space very low, causing the shrink action // to not be able to find a node to shrink onto. Lowering these watermarks avoids that. - val request = """ + val request = + """ { "persistent": { "${CLUSTER_ROUTING_ALLOCATION_DISK_FLOOD_STAGE_WATERMARK_SETTING.key}": "5b", @@ -57,11 +57,12 @@ class ShrinkActionIT : IndexStateManagementRestTestCase() { "${CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.key}": "15b" } } - """.trimIndent() - val res = client().makeRequest( - "PUT", "_cluster/settings", emptyMap(), - StringEntity(request, ContentType.APPLICATION_JSON) - ) + """.trimIndent() + val res = + client().makeRequest( + "PUT", "_cluster/settings", emptyMap(), + StringEntity(request, ContentType.APPLICATION_JSON), + ) assertEquals("Request failed", RestStatus.OK, res.restStatus()) } @@ -73,15 +74,16 @@ class ShrinkActionIT : IndexStateManagementRestTestCase() { val policyID = "${testIndexName}_testPolicyName_1" val shrinkAction = randomShrinkAction() val states = listOf(State("ShrinkState", listOf(shrinkAction), listOf())) - val policy = Policy( - id = policyID, - description = "$testIndexName description", - schemaVersion = 11L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val policy = + Policy( + id = policyID, + description = "$testIndexName description", + schemaVersion = 11L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) createPolicy(policy, policyID) createIndex(indexName, policyID, null, "0", "3", "") @@ -97,35 +99,38 @@ class ShrinkActionIT : IndexStateManagementRestTestCase() { val aliases = listOf(Alias("test-alias1"), Alias("test-alias2").filter(QueryBuilders.termQuery("foo", "bar")).writeIndex(true)) val targetIndexTemplate = Script(ScriptType.INLINE, Script.DEFAULT_TEMPLATE_LANG, "{{ctx.index}}$testIndexSuffix", mapOf()) return when (choice) { - 0 -> ShrinkAction( - numNewShards = newShards, - maxShardSize = null, - percentageOfSourceShards = null, - targetIndexTemplate = targetIndexTemplate, - aliases = aliases, - forceUnsafe = true, - index = 0 - ) - - 1 -> ShrinkAction( - numNewShards = null, - maxShardSize = newMaxShardSize, - percentageOfSourceShards = null, - targetIndexTemplate = targetIndexTemplate, - aliases = aliases, - forceUnsafe = true, - index = 0 - ) - - 2 -> ShrinkAction( - numNewShards = null, - maxShardSize = null, - percentageOfSourceShards = newPercentageOfSourceShards, - targetIndexTemplate = targetIndexTemplate, - aliases = aliases, - forceUnsafe = true, - index = 0 - ) + 0 -> + ShrinkAction( + numNewShards = newShards, + maxShardSize = null, + percentageOfSourceShards = null, + targetIndexTemplate = targetIndexTemplate, + aliases = aliases, + forceUnsafe = true, + index = 0, + ) + + 1 -> + ShrinkAction( + numNewShards = null, + maxShardSize = newMaxShardSize, + percentageOfSourceShards = null, + targetIndexTemplate = targetIndexTemplate, + aliases = aliases, + forceUnsafe = true, + index = 0, + ) + + 2 -> + ShrinkAction( + numNewShards = null, + maxShardSize = null, + percentageOfSourceShards = newPercentageOfSourceShards, + targetIndexTemplate = targetIndexTemplate, + aliases = aliases, + forceUnsafe = true, + index = 0, + ) else -> { error("Invalid choice") @@ -154,7 +159,7 @@ class ShrinkActionIT : IndexStateManagementRestTestCase() { waitFor(Instant.ofEpochSecond(60)) { assertEquals( targetIndexName, - getExplainManagedIndexMetaData(indexName).actionMetaData!!.actionProperties!!.shrinkActionProperties!!.targetIndexName + getExplainManagedIndexMetaData(indexName).actionMetaData!!.actionProperties!!.shrinkActionProperties!!.targetIndexName, ) assertEquals("true", getIndexBlocksWriteSetting(indexName)) val nodeName = getExplainManagedIndexMetaData(indexName).actionMetaData!!.actionProperties!!.shrinkActionProperties!!.nodeName @@ -167,7 +172,7 @@ class ShrinkActionIT : IndexStateManagementRestTestCase() { assertEquals(nodeToShrink, settings["index.routing.allocation.require._name"]) assertEquals( AttemptMoveShardsStep.getSuccessMessage(nodeToShrink), - getExplainManagedIndexMetaData(indexName).info?.get("message") + getExplainManagedIndexMetaData(indexName).info?.get("message"), ) } val nodeToShrink = @@ -178,7 +183,7 @@ class ShrinkActionIT : IndexStateManagementRestTestCase() { waitFor(Instant.ofEpochSecond(60)) { assertEquals( WaitForMoveShardsStep.getSuccessMessage(nodeToShrink), - getExplainManagedIndexMetaData(indexName).info?.get("message") + getExplainManagedIndexMetaData(indexName).info?.get("message"), ) } @@ -190,7 +195,7 @@ class ShrinkActionIT : IndexStateManagementRestTestCase() { assertEquals(Step.StepStatus.COMPLETED, getExplainManagedIndexMetaData(indexName).stepMetaData?.stepStatus) assertEquals( AttemptShrinkStep.getSuccessMessage(targetIndexName), - getExplainManagedIndexMetaData(indexName).info?.get("message") + getExplainManagedIndexMetaData(indexName).info?.get("message"), ) } @@ -201,7 +206,7 @@ class ShrinkActionIT : IndexStateManagementRestTestCase() { assertTrue(getIndexShards(targetIndexName).size == 2) assertEquals( WaitForShrinkStep.SUCCESS_MESSAGE, - getExplainManagedIndexMetaData(indexName).info?.get("message") + getExplainManagedIndexMetaData(indexName).info?.get("message"), ) assertEquals("Write block setting was not reset after successful shrink", "true", getIndexBlocksWriteSetting(indexName)) val aliases = getAlias(targetIndexName, "") @@ -216,15 +221,16 @@ class ShrinkActionIT : IndexStateManagementRestTestCase() { val policyID = "${testIndexName}_testPolicyName_4" val shrinkAction = randomShrinkAction() val states = listOf(State("ShrinkState", listOf(shrinkAction), listOf())) - val policy = Policy( - id = policyID, - description = "$testIndexName description", - schemaVersion = 11L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val policy = + Policy( + id = policyID, + description = "$testIndexName description", + schemaVersion = 11L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) createPolicy(policy, policyID) createIndex(indexName, policyID, null, "0", "3", "") @@ -233,7 +239,7 @@ class ShrinkActionIT : IndexStateManagementRestTestCase() { logger.info("Excluded node: $excludedNode") updateIndexSettings( indexName, - Settings.builder().put("index.routing.allocation.exclude._name", excludedNode) + Settings.builder().put("index.routing.allocation.exclude._name", excludedNode), ) assertShrinkActionRun(indexName, policyID, excludedNode) @@ -242,57 +248,62 @@ class ShrinkActionIT : IndexStateManagementRestTestCase() { @Suppress("UNCHECKED_CAST") fun `test switch aliases`() { val indexName = "${testIndexName}_index_4" - val aliasToSwitch = Alias("${indexName}_alias_to_switch") - .writeIndex(false) - .isHidden(false) - .filter("""{"term":{"switch":"switch"}}""") - .routing("1") - - val aliasToOverride = Alias("${indexName}_alias_to_override") - .writeIndex(true) - .isHidden(false) - .filter("""{"term":{"overridden":"overridden"}}""") - .routing("2") - - val aliasToAdd = Alias("${indexName}_alias_to_add") - .writeIndex(false) - .isHidden(false) - .filter("""{"term":{"add":"add"}}""") - .routing("3") + val aliasToSwitch = + Alias("${indexName}_alias_to_switch") + .writeIndex(false) + .isHidden(false) + .filter("""{"term":{"switch":"switch"}}""") + .routing("1") + + val aliasToOverride = + Alias("${indexName}_alias_to_override") + .writeIndex(true) + .isHidden(false) + .filter("""{"term":{"overridden":"overridden"}}""") + .routing("2") + + val aliasToAdd = + Alias("${indexName}_alias_to_add") + .writeIndex(false) + .isHidden(false) + .filter("""{"term":{"add":"add"}}""") + .routing("3") val policyID = "${testIndexName}_testPolicyName_3" - val shrinkAction = ShrinkAction( - numNewShards = null, - maxShardSize = null, - percentageOfSourceShards = 0.5, - targetIndexTemplate = Script(ScriptType.INLINE, Script.DEFAULT_TEMPLATE_LANG, "{{ctx.index}}$testIndexSuffix", mapOf()), - aliases = listOf(aliasToOverride, aliasToAdd), - switchAliases = true, - forceUnsafe = true, - index = 0 - ) + val shrinkAction = + ShrinkAction( + numNewShards = null, + maxShardSize = null, + percentageOfSourceShards = 0.5, + targetIndexTemplate = Script(ScriptType.INLINE, Script.DEFAULT_TEMPLATE_LANG, "{{ctx.index}}$testIndexSuffix", mapOf()), + aliases = listOf(aliasToOverride, aliasToAdd), + switchAliases = true, + forceUnsafe = true, + index = 0, + ) val states = listOf(State("ShrinkState", listOf(shrinkAction), listOf())) - val policy = Policy( - id = policyID, - description = "$testIndexName description", - schemaVersion = 11L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val policy = + Policy( + id = policyID, + description = "$testIndexName description", + schemaVersion = 11L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) createPolicy(policy, policyID) createIndex(indexName, policyID, null, "0", "3", "") changeAlias( index = indexName, alias = aliasToSwitch.name(), action = "add", filter = aliasToSwitch.filter(), isWriteIndex = aliasToSwitch.writeIndex(), isHidden = aliasToSwitch.isHidden, - routing = aliasToSwitch.indexRouting().toInt(), indexRouting = aliasToSwitch.indexRouting().toInt(), searchRouting = aliasToSwitch.searchRouting().toInt() + routing = aliasToSwitch.indexRouting().toInt(), indexRouting = aliasToSwitch.indexRouting().toInt(), searchRouting = aliasToSwitch.searchRouting().toInt(), ) changeAlias( index = indexName, alias = aliasToOverride.name(), action = "add", filter = aliasToOverride.filter(), isWriteIndex = false, isHidden = aliasToOverride.isHidden, - routing = aliasToOverride.indexRouting().toInt(), indexRouting = aliasToOverride.indexRouting().toInt(), searchRouting = aliasToOverride.searchRouting().toInt() + routing = aliasToOverride.indexRouting().toInt(), indexRouting = aliasToOverride.indexRouting().toInt(), searchRouting = aliasToOverride.searchRouting().toInt(), ) insertSampleData(indexName, 3) @@ -317,7 +328,7 @@ class ShrinkActionIT : IndexStateManagementRestTestCase() { assertEquals(nodeToShrink, settings["index.routing.allocation.require._name"]) assertEquals( AttemptMoveShardsStep.getSuccessMessage(nodeToShrink), - getExplainManagedIndexMetaData(indexName).info?.get("message") + getExplainManagedIndexMetaData(indexName).info?.get("message"), ) } @@ -328,7 +339,7 @@ class ShrinkActionIT : IndexStateManagementRestTestCase() { waitFor(Instant.ofEpochSecond(60)) { assertEquals( WaitForMoveShardsStep.getSuccessMessage(nodeToShrink), - getExplainManagedIndexMetaData(indexName).info?.get("message") + getExplainManagedIndexMetaData(indexName).info?.get("message"), ) } // Wait for move should finish before this. Starts AttemptShrinkStep @@ -337,7 +348,7 @@ class ShrinkActionIT : IndexStateManagementRestTestCase() { assertTrue("Target index is not created", indexExists(targetIndexName)) assertEquals( AttemptShrinkStep.getSuccessMessage(targetIndexName), - getExplainManagedIndexMetaData(indexName).info?.get("message") + getExplainManagedIndexMetaData(indexName).info?.get("message"), ) } @@ -382,26 +393,28 @@ class ShrinkActionIT : IndexStateManagementRestTestCase() { val policyID = "${testIndexName}_testPolicyName_shard_noop" // Create a Policy with one State that only preforms a force_merge Action - val shrinkAction = ShrinkAction( - numNewShards = null, - maxShardSize = null, - percentageOfSourceShards = 0.5, - targetIndexTemplate = Script(ScriptType.INLINE, Script.DEFAULT_TEMPLATE_LANG, "{{ctx.index}}$testIndexSuffix", mapOf()), - aliases = null, - forceUnsafe = true, - index = 0 - ) + val shrinkAction = + ShrinkAction( + numNewShards = null, + maxShardSize = null, + percentageOfSourceShards = 0.5, + targetIndexTemplate = Script(ScriptType.INLINE, Script.DEFAULT_TEMPLATE_LANG, "{{ctx.index}}$testIndexSuffix", mapOf()), + aliases = null, + forceUnsafe = true, + index = 0, + ) val states = listOf(State("ShrinkState", listOf(shrinkAction), listOf())) - val policy = Policy( - id = policyID, - description = "$testIndexName description", - schemaVersion = 11L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val policy = + Policy( + id = policyID, + description = "$testIndexName description", + schemaVersion = 11L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) createPolicy(policy, policyID) createIndex(indexName, policyID, null, "0", "1", "") @@ -424,12 +437,12 @@ class ShrinkActionIT : IndexStateManagementRestTestCase() { assertEquals( "Did not get the no-op due to single primary shard message", AttemptMoveShardsStep.ONE_PRIMARY_SHARD_MESSAGE, - metadata.info?.get("message") + metadata.info?.get("message"), ) assertEquals( "Was not on the last step after no-op due to single primary shard", WaitForShrinkStep.name, - metadata.stepMetaData?.name + metadata.stepMetaData?.name, ) } } @@ -441,15 +454,16 @@ class ShrinkActionIT : IndexStateManagementRestTestCase() { val policyID = "${testIndexName}_with_replicas" val shrinkAction = randomShrinkAction() val states = listOf(State("ShrinkState", listOf(shrinkAction), listOf())) - val policy = Policy( - id = policyID, - description = "$testIndexName description", - schemaVersion = 11L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val policy = + Policy( + id = policyID, + description = "$testIndexName description", + schemaVersion = 11L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) createPolicy(policy, policyID) createIndex(indexName, policyID, null, "1", "3", "") @@ -457,11 +471,12 @@ class ShrinkActionIT : IndexStateManagementRestTestCase() { } fun `test retries from first step`() { - val testPolicy = """ - {"policy":{"description":"Default policy","default_state":"Shrink","states":[ - {"name":"Shrink","actions":[{"retry":{"count":2,"backoff":"constant","delay":"1s"},"shrink": - {"num_new_shards":1, "target_index_name_template":{"source": "{{ctx.index}}_shrink_test"}, "force_unsafe": "true"}}],"transitions":[]}]}} - """.trimIndent() + val testPolicy = + """ + {"policy":{"description":"Default policy","default_state":"Shrink","states":[ + {"name":"Shrink","actions":[{"retry":{"count":2,"backoff":"constant","delay":"1s"},"shrink": + {"num_new_shards":1, "target_index_name_template":{"source": "{{ctx.index}}_shrink_test"}, "force_unsafe": "true"}}],"transitions":[]}]}} + """.trimIndent() val logger = LogManager.getLogger(::ShrinkActionIT) val indexName = "${testIndexName}_retry" val policyID = "${testIndexName}_testPolicyName_retry" @@ -490,7 +505,7 @@ class ShrinkActionIT : IndexStateManagementRestTestCase() { assertEquals(nodeToShrink, settings["index.routing.allocation.require._name"]) assertEquals( AttemptMoveShardsStep.getSuccessMessage(nodeToShrink), - getExplainManagedIndexMetaData(indexName).info?.get("message") + getExplainManagedIndexMetaData(indexName).info?.get("message"), ) } var nodeToShrink = getExplainManagedIndexMetaData(indexName).actionMetaData!!.actionProperties!!.shrinkActionProperties!!.nodeName @@ -499,7 +514,7 @@ class ShrinkActionIT : IndexStateManagementRestTestCase() { waitFor(Instant.ofEpochSecond(60)) { assertEquals( WaitForMoveShardsStep.getSuccessMessage(nodeToShrink), - getExplainManagedIndexMetaData(indexName).info?.get("message") + getExplainManagedIndexMetaData(indexName).info?.get("message"), ) } // Create an index with the target index name so the AttemptShrinkStep fails @@ -516,7 +531,7 @@ class ShrinkActionIT : IndexStateManagementRestTestCase() { assertFalse("Did not clear index write block setting.", settings.containsKey("index.blocks.writes")) assertNull( "Did not clear shrink action properties", - getExplainManagedIndexMetaData(indexName).actionMetaData!!.actionProperties!!.shrinkActionProperties + getExplainManagedIndexMetaData(indexName).actionMetaData!!.actionProperties!!.shrinkActionProperties, ) } @@ -545,7 +560,7 @@ class ShrinkActionIT : IndexStateManagementRestTestCase() { assertEquals(nodeToShrink, settings["index.routing.allocation.require._name"]) assertEquals( AttemptMoveShardsStep.getSuccessMessage(nodeToShrink), - getExplainManagedIndexMetaData(indexName).info?.get("message") + getExplainManagedIndexMetaData(indexName).info?.get("message"), ) } } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/SnapshotActionIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/SnapshotActionIT.kt index 555f8610e..7cc0776ec 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/SnapshotActionIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/SnapshotActionIT.kt @@ -18,7 +18,6 @@ import java.time.temporal.ChronoUnit import java.util.Locale class SnapshotActionIT : IndexStateManagementRestTestCase() { - private val testIndexName = javaClass.simpleName.lowercase(Locale.ROOT) fun `test basic`() { @@ -27,21 +26,23 @@ class SnapshotActionIT : IndexStateManagementRestTestCase() { val repository = "repository" val snapshot = "snapshot" val actionConfig = SnapshotAction(repository, snapshot, 0) - val states = listOf( - State("Snapshot", listOf(actionConfig), listOf()) - ) + val states = + listOf( + State("Snapshot", listOf(actionConfig), listOf()), + ) createRepository(repository) - val policy = Policy( - id = policyID, - description = "$testIndexName description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val policy = + Policy( + id = policyID, + description = "$testIndexName description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) createPolicy(policy, policyID) createIndex(indexName, policyID) @@ -64,21 +65,23 @@ class SnapshotActionIT : IndexStateManagementRestTestCase() { val policyID = "${testIndexName}_policy_basic" val repository = "repository" val actionConfig = SnapshotAction(repository, "{{ctx.index}}", 0) - val states = listOf( - State("Snapshot", listOf(actionConfig), listOf()) - ) + val states = + listOf( + State("Snapshot", listOf(actionConfig), listOf()), + ) createRepository(repository) - val policy = Policy( - id = policyID, - description = "$testIndexName description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val policy = + Policy( + id = policyID, + description = "$testIndexName description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) createPolicy(policy, policyID) createIndex(indexName, policyID) @@ -101,21 +104,23 @@ class SnapshotActionIT : IndexStateManagementRestTestCase() { val policyID = "${testIndexName}_policy_basic" val repository = "repository" val actionConfig = SnapshotAction(repository, "{{ctx.someField}}", 0) - val states = listOf( - State("Snapshot", listOf(actionConfig), listOf()) - ) + val states = + listOf( + State("Snapshot", listOf(actionConfig), listOf()), + ) createRepository(repository) - val policy = Policy( - id = policyID, - description = "$testIndexName description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val policy = + Policy( + id = policyID, + description = "$testIndexName description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) createPolicy(policy, policyID) createIndex(indexName, policyID) @@ -139,21 +144,23 @@ class SnapshotActionIT : IndexStateManagementRestTestCase() { val repository = "repository" val snapshot = "snapshot_success_test" val actionConfig = SnapshotAction(repository, snapshot, 0) - val states = listOf( - State("Snapshot", listOf(actionConfig), listOf()) - ) + val states = + listOf( + State("Snapshot", listOf(actionConfig), listOf()), + ) createRepository(repository) - val policy = Policy( - id = policyID, - description = "$testIndexName description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val policy = + Policy( + id = policyID, + description = "$testIndexName description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) createPolicy(policy, policyID) createIndex(indexName, policyID) @@ -174,7 +181,7 @@ class SnapshotActionIT : IndexStateManagementRestTestCase() { // verify we set snapshotName in action properties waitFor { assert( - getExplainManagedIndexMetaData(indexName).actionMetaData?.actionProperties?.snapshotName?.contains(snapshot) == true + getExplainManagedIndexMetaData(indexName).actionMetaData?.actionProperties?.snapshotName?.contains(snapshot) == true, ) } @@ -188,21 +195,23 @@ class SnapshotActionIT : IndexStateManagementRestTestCase() { val repository = "repository" val snapshot = "-" val actionConfig = SnapshotAction(repository, "", 0) - val states = listOf( - State("Snapshot", listOf(actionConfig), listOf()) - ) + val states = + listOf( + State("Snapshot", listOf(actionConfig), listOf()), + ) createRepository(repository) - val policy = Policy( - id = policyID, - description = "$testIndexName description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val policy = + Policy( + id = policyID, + description = "$testIndexName description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) createPolicy(policy, policyID) createIndex(indexName, policyID) @@ -223,7 +232,7 @@ class SnapshotActionIT : IndexStateManagementRestTestCase() { // verify we set snapshotName in action properties waitFor { assert( - getExplainManagedIndexMetaData(indexName).actionMetaData?.actionProperties?.snapshotName?.contains(snapshot) == true + getExplainManagedIndexMetaData(indexName).actionMetaData?.actionProperties?.snapshotName?.contains(snapshot) == true, ) } @@ -237,21 +246,23 @@ class SnapshotActionIT : IndexStateManagementRestTestCase() { val repository = "repository" val snapshot = "snapshot_failed_test" val actionConfig = SnapshotAction(repository, snapshot, 0) - val states = listOf( - State("Snapshot", listOf(actionConfig), listOf()) - ) + val states = + listOf( + State("Snapshot", listOf(actionConfig), listOf()), + ) createRepository(repository) - val policy = Policy( - id = policyID, - description = "$testIndexName description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val policy = + Policy( + id = policyID, + description = "$testIndexName description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) createPolicy(policy, policyID) createIndex(indexName, policyID) @@ -290,21 +301,23 @@ class SnapshotActionIT : IndexStateManagementRestTestCase() { val policyID = "${testIndexName}_policy_basic" val repository = "hello-world" val actionConfig = SnapshotAction(repository, "snapshot", 0) - val states = listOf( - State("Snapshot", listOf(actionConfig), listOf()) - ) + val states = + listOf( + State("Snapshot", listOf(actionConfig), listOf()), + ) createRepository(repository) - val policy = Policy( - id = policyID, - description = "$testIndexName description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val policy = + Policy( + id = policyID, + description = "$testIndexName description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) createPolicy(policy, policyID) createIndex(indexName, policyID) diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/TransformActionIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/TransformActionIT.kt index df1efd98b..0904a5910 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/TransformActionIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/TransformActionIT.kt @@ -37,7 +37,6 @@ import java.time.temporal.ChronoUnit import java.util.Locale class TransformActionIT : IndexStateManagementRestTestCase() { - private val testPrefix = javaClass.simpleName.lowercase(Locale.ROOT) companion object { @@ -90,22 +89,25 @@ class TransformActionIT : IndexStateManagementRestTestCase() { val targetIndex = "${testPrefix}_target_failure" val policyId = "${testPrefix}_policy_failure" - val ismTransform = ISMTransform( - description = "test transform", - targetIndex = targetIndex, - pageSize = 100, - dataSelectionQuery = MatchAllQueryBuilder(), - groups = listOf( - DateHistogram(sourceField = "timestamp", fixedInterval = "1d"), - Terms(sourceField = "wrong_field", targetField = "wrong_field") - ), - aggregations = AggregatorFactories.builder() - .addAggregator(sumAggregation()) - .addAggregator(maxAggregation()) - .addAggregator(minAggregation()) - .addAggregator(avgAggregation()) - .addAggregator(valueCountAggregation()) - ) + val ismTransform = + ISMTransform( + description = "test transform", + targetIndex = targetIndex, + pageSize = 100, + dataSelectionQuery = MatchAllQueryBuilder(), + groups = + listOf( + DateHistogram(sourceField = "timestamp", fixedInterval = "1d"), + Terms(sourceField = "wrong_field", targetField = "wrong_field"), + ), + aggregations = + AggregatorFactories.builder() + .addAggregator(sumAggregation()) + .addAggregator(maxAggregation()) + .addAggregator(minAggregation()) + .addAggregator(avgAggregation()) + .addAggregator(valueCountAggregation()), + ) val policy = preparePolicyContainingTransform(indexName, ismTransform, policyId) createPolicy(policy, policyId) createIndex(indexName, policyId, mapping = SOURCE_INDEX_MAPPING) @@ -118,22 +120,25 @@ class TransformActionIT : IndexStateManagementRestTestCase() { val targetIndex = "${testPrefix}_target_retry" val policyId = "${testPrefix}_policy_retry" - val ismTransform = ISMTransform( - description = "test transform", - targetIndex = targetIndex, - pageSize = 100, - dataSelectionQuery = MatchAllQueryBuilder(), - groups = listOf( - DateHistogram(sourceField = "timestamp", fixedInterval = "1d"), - Terms(sourceField = "wrong_field", targetField = "wrong_field") - ), - aggregations = AggregatorFactories.builder() - .addAggregator(sumAggregation()) - .addAggregator(maxAggregation()) - .addAggregator(minAggregation()) - .addAggregator(avgAggregation()) - .addAggregator(valueCountAggregation()) - ) + val ismTransform = + ISMTransform( + description = "test transform", + targetIndex = targetIndex, + pageSize = 100, + dataSelectionQuery = MatchAllQueryBuilder(), + groups = + listOf( + DateHistogram(sourceField = "timestamp", fixedInterval = "1d"), + Terms(sourceField = "wrong_field", targetField = "wrong_field"), + ), + aggregations = + AggregatorFactories.builder() + .addAggregator(sumAggregation()) + .addAggregator(maxAggregation()) + .addAggregator(minAggregation()) + .addAggregator(avgAggregation()) + .addAggregator(valueCountAggregation()), + ) val transform = ismTransform.toTransform(indexName) val policy = preparePolicyContainingTransform(indexName, ismTransform, policyId, retry = 1) createPolicy(policy, policyId) @@ -146,7 +151,7 @@ class TransformActionIT : IndexStateManagementRestTestCase() { waitFor { assertEquals( AttemptCreateTransformJobStep.getFailedMessage(transform.id, indexName), - getExplainManagedIndexMetaData(indexName).info?.get("message") + getExplainManagedIndexMetaData(indexName).info?.get("message"), ) } } @@ -171,25 +176,28 @@ class TransformActionIT : IndexStateManagementRestTestCase() { targetIndex = targetIndex, pageSize = 100, dataSelectionQuery = MatchAllQueryBuilder(), - groups = listOf( + groups = + listOf( DateHistogram(sourceField = "timestamp", fixedInterval = "1d"), - Terms(sourceField = "category", targetField = "category") + Terms(sourceField = "category", targetField = "category"), ), - aggregations = AggregatorFactories.builder() + aggregations = + AggregatorFactories.builder() .addAggregator(sumAggregation()) .addAggregator(maxAggregation()) .addAggregator(minAggregation()) .addAggregator(avgAggregation()) - .addAggregator(valueCountAggregation()) + .addAggregator(valueCountAggregation()), ) } private fun preparePolicyContainingTransform(indexName: String, ismTransform: ISMTransform, policyId: String, retry: Long = 0): Policy { val actionConfig = TransformAction(ismTransform, 0) actionConfig.configRetry = ActionRetry(retry) - val states = listOf( - State("transform", listOf(actionConfig), listOf()) - ) + val states = + listOf( + State("transform", listOf(actionConfig), listOf()), + ) return Policy( id = policyId, description = "test description", @@ -198,13 +206,14 @@ class TransformActionIT : IndexStateManagementRestTestCase() { errorNotification = randomErrorNotification(), defaultState = states[0].name, states = states, - ismTemplate = listOf( + ismTemplate = + listOf( ISMTemplate( indexPatterns = listOf(indexName), priority = 100, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS) - ) - ) + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + ), + ), ) } @@ -212,14 +221,15 @@ class TransformActionIT : IndexStateManagementRestTestCase() { indexName: String, ismTransform: ISMTransform, policyId: String, - retry: Long = 0 + retry: Long = 0, ): Policy { val actionConfig = TransformAction(ismTransform, 0) actionConfig.configRetry = ActionRetry(retry) - val states = listOf( - State("transform1", listOf(actionConfig), listOf(Transition(stateName = "transform2", conditions = null))), - State("transform2", listOf(actionConfig), listOf()) - ) + val states = + listOf( + State("transform1", listOf(actionConfig), listOf(Transition(stateName = "transform2", conditions = null))), + State("transform2", listOf(actionConfig), listOf()), + ) return Policy( id = policyId, description = "test description", @@ -228,13 +238,14 @@ class TransformActionIT : IndexStateManagementRestTestCase() { errorNotification = randomErrorNotification(), defaultState = states[0].name, states = states, - ismTemplate = listOf( + ismTemplate = + listOf( ISMTemplate( indexPatterns = listOf(indexName), priority = 100, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS) - ) - ) + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + ), + ), ) } @@ -248,8 +259,8 @@ class TransformActionIT : IndexStateManagementRestTestCase() { "\"index_patterns\": [ \"$dataStreamName\" ], " + "\"data_stream\": { \"timestamp_field\": { \"name\": \"timestamp\" } }, " + "\"template\": { \"mappings\": { $SOURCE_INDEX_MAPPING } } }", - ContentType.APPLICATION_JSON - ) + ContentType.APPLICATION_JSON, + ), ) // create data stream client().makeRequest("PUT", "/_data_stream/$dataStreamName") @@ -269,7 +280,7 @@ class TransformActionIT : IndexStateManagementRestTestCase() { waitFor { assertEquals( AttemptCreateTransformJobStep.getSuccessMessage(transformId, indexName), - getExplainManagedIndexMetaData(indexName).info?.get("message") + getExplainManagedIndexMetaData(indexName).info?.get("message"), ) } @@ -282,7 +293,7 @@ class TransformActionIT : IndexStateManagementRestTestCase() { waitFor { assertEquals( WaitForTransformCompletionStep.getJobCompletionMessage(transformId, indexName), - getExplainManagedIndexMetaData(indexName).info?.get("message") + getExplainManagedIndexMetaData(indexName).info?.get("message"), ) } } @@ -301,7 +312,7 @@ class TransformActionIT : IndexStateManagementRestTestCase() { waitFor { assertEquals( AttemptCreateTransformJobStep.getSuccessMessage(transformId, indexName), - getExplainManagedIndexMetaData(indexName).info?.get("message") + getExplainManagedIndexMetaData(indexName).info?.get("message"), ) } @@ -312,7 +323,7 @@ class TransformActionIT : IndexStateManagementRestTestCase() { waitFor { assertEquals( WaitForTransformCompletionStep.getJobCompletionMessage(transformId, indexName), - getExplainManagedIndexMetaData(indexName).info?.get("message") + getExplainManagedIndexMetaData(indexName).info?.get("message"), ) } @@ -321,7 +332,7 @@ class TransformActionIT : IndexStateManagementRestTestCase() { waitFor { assertEquals( AttemptTransitionStep.getSuccessMessage(indexName, "transform2"), - getExplainManagedIndexMetaData(indexName).info?.get("message") + getExplainManagedIndexMetaData(indexName).info?.get("message"), ) } @@ -330,7 +341,7 @@ class TransformActionIT : IndexStateManagementRestTestCase() { waitFor { assertEquals( AttemptCreateTransformJobStep.getSuccessRestartMessage(transformId, indexName), - getExplainManagedIndexMetaData(indexName).info?.get("message") + getExplainManagedIndexMetaData(indexName).info?.get("message"), ) } @@ -341,7 +352,7 @@ class TransformActionIT : IndexStateManagementRestTestCase() { waitFor { assertEquals( WaitForTransformCompletionStep.getJobCompletionMessage(transformId, indexName), - getExplainManagedIndexMetaData(indexName).info?.get("message") + getExplainManagedIndexMetaData(indexName).info?.get("message"), ) } } @@ -369,7 +380,7 @@ class TransformActionIT : IndexStateManagementRestTestCase() { waitFor { assertEquals( AttemptCreateTransformJobStep.getFailedMessage(transformId, indexName), - getExplainManagedIndexMetaData(indexName).info?.get("message") + getExplainManagedIndexMetaData(indexName).info?.get("message"), ) } } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/TransitionActionIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/TransitionActionIT.kt index 05fb85954..18e137d8b 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/TransitionActionIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/TransitionActionIT.kt @@ -19,27 +19,28 @@ import java.time.temporal.ChronoUnit import java.util.Locale class TransitionActionIT : IndexStateManagementRestTestCase() { - private val testIndexName = javaClass.simpleName.lowercase(Locale.ROOT) fun `test doc count condition`() { val indexName = "${testIndexName}_index_1" val policyID = "${testIndexName}_testPolicyName_1" val secondStateName = "second" - val states = listOf( - State("first", listOf(), listOf(Transition(secondStateName, Conditions(docCount = 5L)))), - State(secondStateName, listOf(), listOf()) - ) - - val policy = Policy( - id = policyID, - description = "$testIndexName description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val states = + listOf( + State("first", listOf(), listOf(Transition(secondStateName, Conditions(docCount = 5L)))), + State(secondStateName, listOf(), listOf()), + ) + + val policy = + Policy( + id = policyID, + description = "$testIndexName description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) createPolicy(policy, policyID) createIndex(indexName, policyID) @@ -71,20 +72,22 @@ class TransitionActionIT : IndexStateManagementRestTestCase() { val indexName = "${testIndexName}_rollover_age_no_rollover" val policyID = "${testIndexName}_rollover_age_no_rollover_policy" val secondStateName = "second" - val states = listOf( - State("first", listOf(), listOf(Transition(secondStateName, Conditions(rolloverAge = TimeValue.timeValueSeconds(30))))), - State(secondStateName, listOf(), listOf()) - ) - - val policy = Policy( - id = policyID, - description = "$testIndexName description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val states = + listOf( + State("first", listOf(), listOf(Transition(secondStateName, Conditions(rolloverAge = TimeValue.timeValueSeconds(30))))), + State(secondStateName, listOf(), listOf()), + ) + + val policy = + Policy( + id = policyID, + description = "$testIndexName description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) createPolicy(policy, policyID) createIndex(indexName, policyID) @@ -108,20 +111,22 @@ class TransitionActionIT : IndexStateManagementRestTestCase() { val policyID = "${testIndexName}_rollover_age_policy" val alias = "foo-alias" val secondStateName = "second" - val states = listOf( - State("first", listOf(), listOf(Transition(secondStateName, Conditions(rolloverAge = TimeValue.timeValueMillis(1))))), - State(secondStateName, listOf(), listOf()) - ) - - val policy = Policy( - id = policyID, - description = "$testIndexName description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val states = + listOf( + State("first", listOf(), listOf(Transition(secondStateName, Conditions(rolloverAge = TimeValue.timeValueMillis(1))))), + State(secondStateName, listOf(), listOf()), + ) + + val policy = + Policy( + id = policyID, + description = "$testIndexName description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) createPolicy(policy, policyID) createIndex(indexName, policyID, alias) diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/coordinator/ManagedIndexCoordinatorIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/coordinator/ManagedIndexCoordinatorIT.kt index f42b55a56..aa373a53a 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/coordinator/ManagedIndexCoordinatorIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/coordinator/ManagedIndexCoordinatorIT.kt @@ -25,7 +25,6 @@ import java.time.temporal.ChronoUnit import kotlin.test.assertFailsWith class ManagedIndexCoordinatorIT : IndexStateManagementRestTestCase() { - fun `test creating index with valid policy_id`() { val policy = createRandomPolicy() val (index, policyID) = createIndex(policyID = policy.id) @@ -77,7 +76,7 @@ class ManagedIndexCoordinatorIT : IndexStateManagementRestTestCase() { assertPredicatesOnMetaData( listOf(index to listOf(ManagedIndexMetaData.POLICY_ID to policy.id::equals)), getExplainMap(index), - false + false, ) } @@ -89,16 +88,23 @@ class ManagedIndexCoordinatorIT : IndexStateManagementRestTestCase() { waitFor { assertPredicatesOnMetaData( listOf( - index to listOf( - explainResponseOpendistroPolicyIdSetting to fun(policyID: Any?): Boolean = - policyID == null, - explainResponseOpenSearchPolicyIdSetting to fun(policyID: Any?): Boolean = - policyID == null, - ManagedIndexMetaData.ENABLED to fun(enabled: Any?): Boolean = enabled == null - ) + index to + listOf( + explainResponseOpendistroPolicyIdSetting to + + fun(policyID: Any?): Boolean = + policyID == null, + explainResponseOpenSearchPolicyIdSetting to + + fun(policyID: Any?): Boolean = + policyID == null, + ManagedIndexMetaData.ENABLED to + + fun(enabled: Any?): Boolean = enabled == null, + ), ), getExplainMap(index), - true + true, ) } } @@ -118,7 +124,7 @@ class ManagedIndexCoordinatorIT : IndexStateManagementRestTestCase() { assertPredicatesOnMetaData( listOf(index to listOf(ManagedIndexMetaData.POLICY_ID to policy.id::equals)), getExplainMap(index), - false + false, ) } @@ -141,15 +147,16 @@ class ManagedIndexCoordinatorIT : IndexStateManagementRestTestCase() { val rolloverActionConfig = RolloverAction(index = 0, minDocs = 5, minAge = null, minSize = null, minPrimaryShardSize = null) val states = listOf(State(name = "RolloverState", actions = listOf(rolloverActionConfig), transitions = listOf())) - val policy = Policy( - id = policyID, - description = "$policyID description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val policy = + Policy( + id = policyID, + description = "$policyID description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) createPolicy(policy, policyID) createIndex(indexName, policyID, "some_alias") @@ -173,11 +180,12 @@ class ManagedIndexCoordinatorIT : IndexStateManagementRestTestCase() { updateManagedIndexConfigStartTime(managedIndexConfig) // Confirm job was disabled - val disabledManagedIndexConfig: ManagedIndexConfig = waitFor { - val config = getManagedIndexConfigByDocId(managedIndexConfig.id) - assertEquals("ManagedIndexConfig was not disabled", false, config!!.enabled) - config - } + val disabledManagedIndexConfig: ManagedIndexConfig = + waitFor { + val config = getManagedIndexConfigByDocId(managedIndexConfig.id) + assertEquals("ManagedIndexConfig was not disabled", false, config!!.enabled) + config + } // Speed up to next execution and confirm that Explain API still shows information of policy initialization updateManagedIndexConfigStartTime(disabledManagedIndexConfig) @@ -186,14 +194,17 @@ class ManagedIndexCoordinatorIT : IndexStateManagementRestTestCase() { val expectedInfoString = mapOf("message" to "Successfully initialized policy: $policyID").toString() assertPredicatesOnMetaData( listOf( - indexName to listOf( - ManagedIndexMetaData.INDEX to indexName::equals, - ManagedIndexMetaData.POLICY_ID to policyID::equals, - ManagedIndexMetaData.INFO to fun(info: Any?): Boolean = expectedInfoString == info.toString() - ) + indexName to + listOf( + ManagedIndexMetaData.INDEX to indexName::equals, + ManagedIndexMetaData.POLICY_ID to policyID::equals, + ManagedIndexMetaData.INFO to + + fun(info: Any?): Boolean = expectedInfoString == info.toString(), + ), ), getExplainMap(indexName), - false + false, ) } @@ -201,18 +212,19 @@ class ManagedIndexCoordinatorIT : IndexStateManagementRestTestCase() { updateClusterSetting(ManagedIndexSettings.INDEX_STATE_MANAGEMENT_ENABLED.key, "true") // Confirm job was re-enabled - val enabledManagedIndexConfig: ManagedIndexConfig = waitFor { - val config = getManagedIndexConfigByDocId(disabledManagedIndexConfig.id) - assertEquals("ManagedIndexConfig was not re-enabled", true, config!!.enabled) - config - } + val enabledManagedIndexConfig: ManagedIndexConfig = + waitFor { + val config = getManagedIndexConfigByDocId(disabledManagedIndexConfig.id) + assertEquals("ManagedIndexConfig was not re-enabled", true, config!!.enabled) + config + } updateManagedIndexConfigStartTime(enabledManagedIndexConfig, retryOnConflict = 4) waitFor { assertEquals( AttemptRolloverStep.getSuccessMessage(indexName), - getExplainManagedIndexMetaData(indexName).info?.get("message") + getExplainManagedIndexMetaData(indexName).info?.get("message"), ) } } @@ -224,24 +236,26 @@ class ManagedIndexCoordinatorIT : IndexStateManagementRestTestCase() { // Create a policy with one State that performs force_merge and another State that deletes the index val forceMergeActionConfig = ForceMergeAction(index = 0, maxNumSegments = 1) val deleteActionConfig = DeleteAction(index = 0) - val states = listOf( - State( - name = "ForceMergeState", - actions = listOf(forceMergeActionConfig), - transitions = listOf(Transition(stateName = "DeleteState", conditions = null)) - ), - State(name = "DeleteState", actions = listOf(deleteActionConfig), transitions = listOf()) - ) + val states = + listOf( + State( + name = "ForceMergeState", + actions = listOf(forceMergeActionConfig), + transitions = listOf(Transition(stateName = "DeleteState", conditions = null)), + ), + State(name = "DeleteState", actions = listOf(deleteActionConfig), transitions = listOf()), + ) - val policy = Policy( - id = policyID, - description = "$policyID description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val policy = + Policy( + id = policyID, + description = "$policyID description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) createPolicy(policy, policyID) createIndex(indexName, policyID) @@ -252,7 +266,7 @@ class ManagedIndexCoordinatorIT : IndexStateManagementRestTestCase() { waitFor { assertTrue( "Segment count for [$indexName] was less than expected", - validateSegmentCount(indexName, min = 2) + validateSegmentCount(indexName, min = 2), ) } @@ -279,7 +293,7 @@ class ManagedIndexCoordinatorIT : IndexStateManagementRestTestCase() { assertEquals( "maxNumSegments not set in ActionProperties", forceMergeActionConfig.maxNumSegments, - getExplainManagedIndexMetaData(indexName).actionMetaData?.actionProperties?.maxNumSegments + getExplainManagedIndexMetaData(indexName).actionMetaData?.actionProperties?.maxNumSegments, ) } @@ -293,7 +307,7 @@ class ManagedIndexCoordinatorIT : IndexStateManagementRestTestCase() { waitFor { assertEquals( WaitForForceMergeStep.getSuccessMessage(indexName), - getExplainManagedIndexMetaData(indexName).info?.get("message") + getExplainManagedIndexMetaData(indexName).info?.get("message"), ) } @@ -303,7 +317,7 @@ class ManagedIndexCoordinatorIT : IndexStateManagementRestTestCase() { // Validate segments were merged assertTrue( "Segment count for [$indexName] after force merge is incorrect", - validateSegmentCount(indexName, min = 1, max = 1) + validateSegmentCount(indexName, min = 1, max = 1), ) // Fifth execution: Attempt transition, which is safe to disable on, so job should be disabled @@ -313,16 +327,17 @@ class ManagedIndexCoordinatorIT : IndexStateManagementRestTestCase() { waitFor { assertEquals( WaitForForceMergeStep.getSuccessMessage(indexName), - getExplainManagedIndexMetaData(indexName).info?.get("message") + getExplainManagedIndexMetaData(indexName).info?.get("message"), ) } // Confirm job was disabled - val disabledManagedIndexConfig: ManagedIndexConfig = waitFor { - val config = getExistingManagedIndexConfig(indexName) - assertEquals("ManagedIndexConfig was not disabled", false, config.enabled) - config - } + val disabledManagedIndexConfig: ManagedIndexConfig = + waitFor { + val config = getExistingManagedIndexConfig(indexName) + assertEquals("ManagedIndexConfig was not disabled", false, config.enabled) + config + } // Speed up to next execution to confirm Explain API still shows information of the last executed step (WaitForForceMergeStep) updateManagedIndexConfigStartTime(disabledManagedIndexConfig) @@ -331,14 +346,17 @@ class ManagedIndexCoordinatorIT : IndexStateManagementRestTestCase() { val expectedInfoString = mapOf("message" to WaitForForceMergeStep.getSuccessMessage(indexName)).toString() assertPredicatesOnMetaData( listOf( - indexName to listOf( - ManagedIndexMetaData.INDEX to indexName::equals, - ManagedIndexMetaData.POLICY_ID to policyID::equals, - ManagedIndexMetaData.INFO to fun(info: Any?): Boolean = expectedInfoString == info.toString() - ) + indexName to + listOf( + ManagedIndexMetaData.INDEX to indexName::equals, + ManagedIndexMetaData.POLICY_ID to policyID::equals, + ManagedIndexMetaData.INFO to + + fun(info: Any?): Boolean = expectedInfoString == info.toString(), + ), ), getExplainMap(indexName), - false + false, ) } } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/coordinator/ManagedIndexCoordinatorTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/coordinator/ManagedIndexCoordinatorTests.kt index 62be3917e..a99e0fee5 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/coordinator/ManagedIndexCoordinatorTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/coordinator/ManagedIndexCoordinatorTests.kt @@ -27,7 +27,6 @@ import org.opensearch.threadpool.Scheduler import org.opensearch.threadpool.ThreadPool class ManagedIndexCoordinatorTests : OpenSearchAllocationTestCase() { - private lateinit var client: Client private lateinit var clusterService: ClusterService private lateinit var xContentRegistry: NamedXContentRegistry @@ -68,10 +67,11 @@ class ManagedIndexCoordinatorTests : OpenSearchAllocationTestCase() { val originClusterService: ClusterService = ClusterServiceUtils.createClusterService(threadPool, discoveryNode, clusterSettings) clusterService = Mockito.spy(originClusterService) indexMetadataProvider = IndexMetadataProvider(settings, client, clusterService, mutableMapOf()) - coordinator = ManagedIndexCoordinator( - settings, client, clusterService, threadPool, indexManagementIndices, indexMetadataProvider, - NamedXContentRegistry(SearchModule(Settings.EMPTY, emptyList()).namedXContents) - ) + coordinator = + ManagedIndexCoordinator( + settings, client, clusterService, threadPool, indexManagementIndices, indexMetadataProvider, + NamedXContentRegistry(SearchModule(Settings.EMPTY, emptyList()).namedXContents), + ) } fun `test after start`() { diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/coordinator/SkipExecutionTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/coordinator/SkipExecutionTests.kt index 8f3ea4f71..2421ff5f1 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/coordinator/SkipExecutionTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/coordinator/SkipExecutionTests.kt @@ -14,7 +14,6 @@ import org.opensearch.cluster.OpenSearchAllocationTestCase import org.opensearch.indexmanagement.indexstatemanagement.SkipExecution class SkipExecutionTests : OpenSearchAllocationTestCase() { - private lateinit var client: Client private lateinit var skip: SkipExecution diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/extension/ISMActionsParserTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/extension/ISMActionsParserTests.kt index 654a4fe5a..4aa6be1eb 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/extension/ISMActionsParserTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/extension/ISMActionsParserTests.kt @@ -7,9 +7,9 @@ package org.opensearch.indexmanagement.indexstatemanagement.extension import org.junit.After import org.opensearch.common.xcontent.LoggingDeprecationHandler -import org.opensearch.core.xcontent.ToXContent import org.opensearch.common.xcontent.XContentFactory import org.opensearch.common.xcontent.XContentType +import org.opensearch.core.xcontent.ToXContent import org.opensearch.indexmanagement.indexstatemanagement.ISMActionsParser import org.opensearch.indexmanagement.opensearchapi.convertToMap import org.opensearch.indexmanagement.opensearchapi.string @@ -17,7 +17,6 @@ import org.opensearch.test.OpenSearchTestCase import kotlin.test.assertFailsWith class ISMActionsParserTests : OpenSearchTestCase() { - val extensionName = "testExtension" /* diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/extension/SampleCustomActionParser.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/extension/SampleCustomActionParser.kt index 64b80ad31..b896ef231 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/extension/SampleCustomActionParser.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/extension/SampleCustomActionParser.kt @@ -44,8 +44,8 @@ class SampleCustomActionParser : ActionParser() { override fun getActionType(): String { return SampleCustomAction.name } - class SampleCustomAction(val someInt: Int, index: Int) : Action(name, index) { + class SampleCustomAction(val someInt: Int, index: Int) : Action(name, index) { private val sampleCustomStep = SampleCustomStep() private val steps = listOf(sampleCustomStep) @@ -69,6 +69,7 @@ class SampleCustomActionParser : ActionParser() { const val SOME_INT_FIELD = "some_int_field" } } + class SampleCustomStep : Step(name) { private var stepStatus = StepStatus.STARTING @@ -81,7 +82,7 @@ class SampleCustomActionParser : ActionParser() { return currentMetadata.copy( stepMetaData = StepMetaData(name, getStepStartTime(currentMetadata).toEpochMilli(), stepStatus), transitionTo = null, - info = null + info = null, ) } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/ActionPropertiesTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/ActionPropertiesTests.kt index da7a55c88..3f3725c8a 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/ActionPropertiesTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/ActionPropertiesTests.kt @@ -10,15 +10,15 @@ import org.opensearch.indexmanagement.spi.indexstatemanagement.model.ActionPrope import org.opensearch.test.OpenSearchTestCase class ActionPropertiesTests : OpenSearchTestCase() { - @Suppress("UNCHECKED_CAST") fun `test action properties exist in history index`() { // All properties inside the ActionProperties class need to be also added to the ism history mappings // This is to catch any commits/PRs that add to ActionProperties but forget to add to history mappings - val expected = createParser( - XContentType.JSON.xContent(), - javaClass.classLoader.getResource("mappings/opendistro-ism-history.json")!!.readText() - ) + val expected = + createParser( + XContentType.JSON.xContent(), + javaClass.classLoader.getResource("mappings/opendistro-ism-history.json")!!.readText(), + ) val expectedMap = expected.map() as Map>>>>>>> val actionProperties = ActionProperties.Properties.values().map { it.key } val mappingActionProperties = expectedMap["properties"]!!["managed_index_meta_data"]!!["properties"]!!["action"]!!["properties"]!!["action_properties"]!!["properties"] diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/ActionTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/ActionTests.kt index 070388cd9..ef08a1651 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/ActionTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/ActionTests.kt @@ -6,21 +6,21 @@ package org.opensearch.indexmanagement.indexstatemanagement.model import org.opensearch.cluster.routing.allocation.DiskThresholdSettings -import org.opensearch.core.common.io.stream.InputStreamStreamInput -import org.opensearch.core.common.io.stream.OutputStreamStreamOutput import org.opensearch.common.settings.ClusterSettings import org.opensearch.common.settings.Settings -import org.opensearch.core.common.unit.ByteSizeValue import org.opensearch.common.unit.TimeValue import org.opensearch.common.xcontent.LoggingDeprecationHandler import org.opensearch.common.xcontent.XContentFactory import org.opensearch.common.xcontent.XContentType +import org.opensearch.core.common.io.stream.InputStreamStreamInput +import org.opensearch.core.common.io.stream.OutputStreamStreamOutput +import org.opensearch.core.common.unit.ByteSizeValue import org.opensearch.indexmanagement.indexstatemanagement.ISMActionsParser import org.opensearch.indexmanagement.indexstatemanagement.action.DeleteAction import org.opensearch.indexmanagement.indexstatemanagement.action.NotificationAction import org.opensearch.indexmanagement.indexstatemanagement.randomAllocationActionConfig -import org.opensearch.indexmanagement.indexstatemanagement.randomChannel import org.opensearch.indexmanagement.indexstatemanagement.randomByteSizeValue +import org.opensearch.indexmanagement.indexstatemanagement.randomChannel import org.opensearch.indexmanagement.indexstatemanagement.randomCloseActionConfig import org.opensearch.indexmanagement.indexstatemanagement.randomDeleteActionConfig import org.opensearch.indexmanagement.indexstatemanagement.randomDestination @@ -50,7 +50,6 @@ import java.lang.Math.abs import kotlin.test.assertFailsWith class ActionTests : OpenSearchTestCase() { - fun `test invalid timeout for delete action fails`() { assertFailsWith(IllegalArgumentException::class, "Expected IllegalArgumentException for invalid timeout") { ActionTimeout(timeout = TimeValue.parseTimeValue("invalidTimeout", "timeout_test")) @@ -141,8 +140,8 @@ class ActionTests : OpenSearchTestCase() { ( require = mapOf("box_type" to "hot"), include = mapOf(randomAlphaOfLengthBetween(1, 10) to randomAlphaOfLengthBetween(1, 10)), - exclude = mapOf(randomAlphaOfLengthBetween(1, 10) to randomAlphaOfLengthBetween(1, 10)) - ) + exclude = mapOf(randomAlphaOfLengthBetween(1, 10) to randomAlphaOfLengthBetween(1, 10)), + ), ) } @@ -163,17 +162,18 @@ class ActionTests : OpenSearchTestCase() { } fun `test action timeout and retry round trip`() { - val builder = XContentFactory.jsonBuilder() - .startObject() - .field(ActionTimeout.TIMEOUT_FIELD, randomTimeValueObject().stringRep) - .startObject(ActionRetry.RETRY_FIELD) - .field(ActionRetry.COUNT_FIELD, 1) - .field(ActionRetry.BACKOFF_FIELD, ActionRetry.Backoff.EXPONENTIAL) - .field(ActionRetry.DELAY_FIELD, TimeValue.timeValueMinutes(1)) - .endObject() - .startObject(DeleteAction.name) - .endObject() - .endObject() + val builder = + XContentFactory.jsonBuilder() + .startObject() + .field(ActionTimeout.TIMEOUT_FIELD, randomTimeValueObject().stringRep) + .startObject(ActionRetry.RETRY_FIELD) + .field(ActionRetry.COUNT_FIELD, 1) + .field(ActionRetry.BACKOFF_FIELD, ActionRetry.Backoff.EXPONENTIAL) + .field(ActionRetry.DELAY_FIELD, TimeValue.timeValueMinutes(1)) + .endObject() + .startObject(DeleteAction.name) + .endObject() + .endObject() val parser = XContentType.JSON.xContent().createParser(xContentRegistry(), LoggingDeprecationHandler.INSTANCE, builder.string()) parser.nextToken() @@ -185,9 +185,10 @@ class ActionTests : OpenSearchTestCase() { fun `test shrink disk threshold percentage settings`() { val rawPercentage = randomIntBetween(0, 100) val percentage = "$rawPercentage%" - val settings = Settings.builder().put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.key, percentage) - .put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_DISK_FLOOD_STAGE_WATERMARK_SETTING.key, percentage) - .put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.key, percentage).build() + val settings = + Settings.builder().put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.key, percentage) + .put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_DISK_FLOOD_STAGE_WATERMARK_SETTING.key, percentage) + .put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.key, percentage).build() val clusterSettings = ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS.map { it }.toSet()) val totalNodeBytes = randomByteSizeValue().bytes val thresholdBytes = getFreeBytesThresholdHigh(clusterSettings, totalNodeBytes) @@ -199,9 +200,10 @@ class ActionTests : OpenSearchTestCase() { fun `test shrink disk threshold byte settings`() { val byteValue = randomByteSizeValue() - val settings = Settings.builder().put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.key, byteValue) - .put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_DISK_FLOOD_STAGE_WATERMARK_SETTING.key, byteValue) - .put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.key, byteValue).build() + val settings = + Settings.builder().put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.key, byteValue) + .put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_DISK_FLOOD_STAGE_WATERMARK_SETTING.key, byteValue) + .put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.key, byteValue).build() val clusterSettings = ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS.map { it }.toSet()) val thresholdBytes = getFreeBytesThresholdHigh(clusterSettings, randomByteSizeValue().bytes) assertEquals("Free bytes threshold not being calculated correctly for byte setting.", thresholdBytes, byteValue.bytes) diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/ConditionsTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/ConditionsTests.kt index b3c0e6ebc..918728757 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/ConditionsTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/ConditionsTests.kt @@ -12,11 +12,10 @@ import org.opensearch.test.OpenSearchTestCase import kotlin.test.assertFailsWith class ConditionsTests : OpenSearchTestCase() { - fun `test supplying more than one transition condition fails`() { assertFailsWith( IllegalArgumentException::class, - "Expected IllegalArgumentException for supplying multiple transition conditions" + "Expected IllegalArgumentException for supplying multiple transition conditions", ) { Conditions(indexAge = randomTimeValueObject(), size = randomByteSizeValue()) } @@ -25,7 +24,7 @@ class ConditionsTests : OpenSearchTestCase() { fun `test doc count condition of zero fails`() { assertFailsWith( IllegalArgumentException::class, - "Expected IllegalArgumentException for doc count condition less than 1" + "Expected IllegalArgumentException for doc count condition less than 1", ) { Conditions(docCount = 0) } @@ -34,7 +33,7 @@ class ConditionsTests : OpenSearchTestCase() { fun `test size condition of zero fails`() { assertFailsWith( IllegalArgumentException::class, - "Expected IllegalArgumentException for size condition less than 1" + "Expected IllegalArgumentException for size condition less than 1", ) { Conditions(size = ByteSizeValue.parseBytesSizeValue("0", "size_test")) } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/DestinationTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/DestinationTests.kt index cf6269711..77f649eed 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/DestinationTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/DestinationTests.kt @@ -15,7 +15,6 @@ import org.opensearch.test.OpenSearchTestCase * alerting so all plugins can consume and use. */ class DestinationTests : OpenSearchTestCase() { - fun `test chime destination`() { val chime = Chime("http://abc.com") assertEquals("Url is manipulated", chime.url, "http://abc.com") diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/ISMTemplateTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/ISMTemplateTests.kt index faf200a9a..a782e1183 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/ISMTemplateTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/ISMTemplateTests.kt @@ -13,7 +13,6 @@ import java.io.ByteArrayInputStream import java.io.ByteArrayOutputStream class ISMTemplateTests : OpenSearchTestCase() { - fun `test basic`() { val expectedISMTemplate = randomISMTemplate() diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/ManagedIndexMetaDataTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/ManagedIndexMetaDataTests.kt index dc1810219..7c4671f6e 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/ManagedIndexMetaDataTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/ManagedIndexMetaDataTests.kt @@ -20,87 +20,90 @@ import java.io.ByteArrayOutputStream import java.time.Instant class ManagedIndexMetaDataTests : OpenSearchTestCase() { - fun `test basic`() { - val expectedManagedIndexMetaData = ManagedIndexMetaData( - index = "movies", - indexUuid = "ahPcR4fNRrSe-Q7czV3VPQ", - policyID = "close_policy", - policySeqNo = 0, - policyPrimaryTerm = 1, - policyCompleted = null, - rolledOver = null, - indexCreationDate = Instant.now().toEpochMilli(), - transitionTo = null, - stateMetaData = StateMetaData("close-index", 1234), - actionMetaData = null, - stepMetaData = null, - policyRetryInfo = PolicyRetryInfoMetaData(false, 0), - info = mapOf("message" to "Successfully initialized policy: close_policy") - ) + val expectedManagedIndexMetaData = + ManagedIndexMetaData( + index = "movies", + indexUuid = "ahPcR4fNRrSe-Q7czV3VPQ", + policyID = "close_policy", + policySeqNo = 0, + policyPrimaryTerm = 1, + policyCompleted = null, + rolledOver = null, + indexCreationDate = Instant.now().toEpochMilli(), + transitionTo = null, + stateMetaData = StateMetaData("close-index", 1234), + actionMetaData = null, + stepMetaData = null, + policyRetryInfo = PolicyRetryInfoMetaData(false, 0), + info = mapOf("message" to "Successfully initialized policy: close_policy"), + ) roundTripManagedIndexMetaData(expectedManagedIndexMetaData) } fun `test action`() { - val expectedManagedIndexMetaData = ManagedIndexMetaData( - index = "movies", - indexUuid = "ahPcR4fNRrSe-Q7czV3VPQ", - policyID = "close_policy", - policySeqNo = 0, - policyPrimaryTerm = 1, - policyCompleted = null, - rolledOver = null, - indexCreationDate = null, - transitionTo = null, - stateMetaData = StateMetaData("close-index", 1234), - actionMetaData = ActionMetaData("close", 4321, 0, false, 0, 0, null), - stepMetaData = null, - policyRetryInfo = PolicyRetryInfoMetaData(false, 0), - info = mapOf("message" to "Successfully closed index") - ) + val expectedManagedIndexMetaData = + ManagedIndexMetaData( + index = "movies", + indexUuid = "ahPcR4fNRrSe-Q7czV3VPQ", + policyID = "close_policy", + policySeqNo = 0, + policyPrimaryTerm = 1, + policyCompleted = null, + rolledOver = null, + indexCreationDate = null, + transitionTo = null, + stateMetaData = StateMetaData("close-index", 1234), + actionMetaData = ActionMetaData("close", 4321, 0, false, 0, 0, null), + stepMetaData = null, + policyRetryInfo = PolicyRetryInfoMetaData(false, 0), + info = mapOf("message" to "Successfully closed index"), + ) roundTripManagedIndexMetaData(expectedManagedIndexMetaData) } fun `test action property`() { - val expectedManagedIndexMetaData = ManagedIndexMetaData( - index = "movies", - indexUuid = "ahPcR4fNRrSe-Q7czV3VPQ", - policyID = "close_policy", - policySeqNo = 0, - policyPrimaryTerm = 1, - policyCompleted = null, - rolledOver = null, - indexCreationDate = null, - transitionTo = null, - stateMetaData = StateMetaData("close-index", 1234), - actionMetaData = ActionMetaData("close", 4321, 0, false, 0, 0, ActionProperties(3)), - stepMetaData = null, - policyRetryInfo = PolicyRetryInfoMetaData(false, 0), - info = mapOf("message" to "Successfully closed index") - ) + val expectedManagedIndexMetaData = + ManagedIndexMetaData( + index = "movies", + indexUuid = "ahPcR4fNRrSe-Q7czV3VPQ", + policyID = "close_policy", + policySeqNo = 0, + policyPrimaryTerm = 1, + policyCompleted = null, + rolledOver = null, + indexCreationDate = null, + transitionTo = null, + stateMetaData = StateMetaData("close-index", 1234), + actionMetaData = ActionMetaData("close", 4321, 0, false, 0, 0, ActionProperties(3)), + stepMetaData = null, + policyRetryInfo = PolicyRetryInfoMetaData(false, 0), + info = mapOf("message" to "Successfully closed index"), + ) roundTripManagedIndexMetaData(expectedManagedIndexMetaData) } fun `test step`() { - val expectedManagedIndexMetaData = ManagedIndexMetaData( - index = "movies", - indexUuid = "ahPcR4fNRrSe-Q7czV3VPQ", - policyID = "close_policy", - policySeqNo = 0, - policyPrimaryTerm = 1, - policyCompleted = null, - rolledOver = false, - indexCreationDate = null, - transitionTo = null, - stateMetaData = StateMetaData("rollover-index", 1234), - actionMetaData = ActionMetaData("rollover", 4321, 0, false, 0, 0, null), - stepMetaData = StepMetaData("attempt_rollover", 6789, Step.StepStatus.FAILED), - policyRetryInfo = PolicyRetryInfoMetaData(false, 0), - info = mapOf("message" to "There is no valid rollover_alias=null set on movies") - ) + val expectedManagedIndexMetaData = + ManagedIndexMetaData( + index = "movies", + indexUuid = "ahPcR4fNRrSe-Q7czV3VPQ", + policyID = "close_policy", + policySeqNo = 0, + policyPrimaryTerm = 1, + policyCompleted = null, + rolledOver = false, + indexCreationDate = null, + transitionTo = null, + stateMetaData = StateMetaData("rollover-index", 1234), + actionMetaData = ActionMetaData("rollover", 4321, 0, false, 0, 0, null), + stepMetaData = StepMetaData("attempt_rollover", 6789, Step.StepStatus.FAILED), + policyRetryInfo = PolicyRetryInfoMetaData(false, 0), + info = mapOf("message" to "There is no valid rollover_alias=null set on movies"), + ) roundTripManagedIndexMetaData(expectedManagedIndexMetaData) } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/PolicyTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/PolicyTests.kt index feb41a9dc..b2e4b6610 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/PolicyTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/PolicyTests.kt @@ -12,7 +12,6 @@ import org.opensearch.test.OpenSearchTestCase import kotlin.test.assertFailsWith class PolicyTests : OpenSearchTestCase() { - fun `test invalid default state`() { assertFailsWith(IllegalArgumentException::class, "Expected IllegalArgumentException for invalid default state") { randomPolicy().copy(defaultState = "definitely not this") diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/StateTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/StateTests.kt index 4d6ec7799..91a3447b8 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/StateTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/StateTests.kt @@ -13,7 +13,6 @@ import org.opensearch.test.OpenSearchTestCase import kotlin.test.assertFailsWith class StateTests : OpenSearchTestCase() { - fun `test invalid state name`() { assertFailsWith(IllegalArgumentException::class, "Expected IllegalArgumentException for blank state name") { State(" ", emptyList(), emptyList()) diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/XContentTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/XContentTests.kt index fda797fac..89c41d185 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/XContentTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/XContentTests.kt @@ -6,11 +6,11 @@ package org.opensearch.indexmanagement.indexstatemanagement.model import org.opensearch.common.xcontent.LoggingDeprecationHandler -import org.opensearch.core.xcontent.XContentParser import org.opensearch.common.xcontent.XContentType +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.indexmanagement.common.model.notification.Channel import org.opensearch.indexmanagement.indexstatemanagement.ISMActionsParser import org.opensearch.indexmanagement.indexstatemanagement.action.RollupAction -import org.opensearch.indexmanagement.common.model.notification.Channel import org.opensearch.indexmanagement.indexstatemanagement.model.destination.DestinationType import org.opensearch.indexmanagement.indexstatemanagement.nonNullRandomConditions import org.opensearch.indexmanagement.indexstatemanagement.randomAliasAction @@ -44,7 +44,6 @@ import org.opensearch.test.OpenSearchTestCase import kotlin.test.assertFailsWith class XContentTests : OpenSearchTestCase() { - fun `test policy parsing`() { val policy = randomPolicy() @@ -142,21 +141,21 @@ class XContentTests : OpenSearchTestCase() { val chimeParsedNotificationAction = ISMActionsParser.instance.parse(parser(chimeNotificationActionString), 0) assertEquals( "Round tripping chime NotificationAction doesn't work", - chimeNotificationAction.convertToMap(), chimeParsedNotificationAction.convertToMap() + chimeNotificationAction.convertToMap(), chimeParsedNotificationAction.convertToMap(), ) val slackNotificationActionString = slackNotificationAction.toJsonString() val slackParsedNotificationAction = ISMActionsParser.instance.parse(parser(slackNotificationActionString), 0) assertEquals( "Round tripping slack NotificationAction doesn't work", - slackNotificationAction.convertToMap(), slackParsedNotificationAction.convertToMap() + slackNotificationAction.convertToMap(), slackParsedNotificationAction.convertToMap(), ) val customNotificationActionString = customNotificationAction.toJsonString() val customParsedNotificationAction = ISMActionsParser.instance.parse(parser(customNotificationActionString), 0) assertEquals( "Round tripping custom webhook NotificationAction doesn't work", - customNotificationAction.convertToMap(), customParsedNotificationAction.convertToMap() + customNotificationAction.convertToMap(), customParsedNotificationAction.convertToMap(), ) } @@ -167,16 +166,17 @@ class XContentTests : OpenSearchTestCase() { val parsedSnapshotAction = ISMActionsParser.instance.parse(parser(snapshotActionString), 0) assertEquals( "Round tripping SnapshotAction doesn't work", - snapshotAction.convertToMap(), parsedSnapshotAction.convertToMap() + snapshotAction.convertToMap(), parsedSnapshotAction.convertToMap(), ) } fun `test allocation action config parsing`() { - val allocationAction = randomAllocationActionConfig( - require = mapOf("box_type" to "hot"), - include = mapOf(randomAlphaOfLengthBetween(1, 10) to randomAlphaOfLengthBetween(1, 10)), - exclude = mapOf(randomAlphaOfLengthBetween(1, 10) to randomAlphaOfLengthBetween(1, 10)) - ) + val allocationAction = + randomAllocationActionConfig( + require = mapOf("box_type" to "hot"), + include = mapOf(randomAlphaOfLengthBetween(1, 10) to randomAlphaOfLengthBetween(1, 10)), + exclude = mapOf(randomAlphaOfLengthBetween(1, 10) to randomAlphaOfLengthBetween(1, 10)), + ) val allocationActionString = allocationAction.toJsonString() val parsedAllocationAction = ISMActionsParser.instance.parse(parser(allocationActionString), 0) @@ -238,22 +238,23 @@ class XContentTests : OpenSearchTestCase() { } fun `test managed index metadata parsing`() { - val metadata = ManagedIndexMetaData( - index = randomAlphaOfLength(10), - indexUuid = randomAlphaOfLength(10), - policyID = randomAlphaOfLength(10), - policySeqNo = randomNonNegativeLong(), - policyPrimaryTerm = randomNonNegativeLong(), - policyCompleted = null, - rolledOver = null, - indexCreationDate = null, - transitionTo = randomAlphaOfLength(10), - stateMetaData = null, - actionMetaData = null, - stepMetaData = null, - policyRetryInfo = null, - info = null - ) + val metadata = + ManagedIndexMetaData( + index = randomAlphaOfLength(10), + indexUuid = randomAlphaOfLength(10), + policyID = randomAlphaOfLength(10), + policySeqNo = randomNonNegativeLong(), + policyPrimaryTerm = randomNonNegativeLong(), + policyCompleted = null, + rolledOver = null, + indexCreationDate = null, + transitionTo = randomAlphaOfLength(10), + stateMetaData = null, + actionMetaData = null, + stepMetaData = null, + policyRetryInfo = null, + info = null, + ) val metadataString = metadata.toJsonString() val parsedMetaData = ManagedIndexMetaData.parse(parser(metadataString)) assertEquals("Round tripping ManagedIndexMetaData doesn't work", metadata, parsedMetaData) @@ -297,7 +298,7 @@ class XContentTests : OpenSearchTestCase() { val aliasAction = randomAliasAction(true) val aliasActionString = aliasAction.toJsonString() ISMActionsParser.instance.parse(parser(aliasActionString), 0) - } + }, ) } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/opensearchapi/ExtensionsTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/opensearchapi/ExtensionsTests.kt index 2fd29f149..30d09edc0 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/opensearchapi/ExtensionsTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/opensearchapi/ExtensionsTests.kt @@ -11,24 +11,25 @@ import org.opensearch.cluster.metadata.IndexMetadata import org.opensearch.test.OpenSearchTestCase class ExtensionsTests : OpenSearchTestCase() { - fun `test getting oldest rollover time`() { - val noRolloverMetadata = IndexMetadata - .Builder("foo-index") - .settings(settings(Version.CURRENT)) - .numberOfShards(1) - .numberOfReplicas(1) - .build() + val noRolloverMetadata = + IndexMetadata + .Builder("foo-index") + .settings(settings(Version.CURRENT)) + .numberOfShards(1) + .numberOfReplicas(1) + .build() assertNull(noRolloverMetadata.getOldestRolloverTime()) val oldest = RolloverInfo("bar-alias", emptyList(), 17L) - val metadata = IndexMetadata - .Builder(noRolloverMetadata) - .putRolloverInfo(RolloverInfo("foo-alias", emptyList(), 42L)) - .putRolloverInfo(oldest) - .putRolloverInfo(RolloverInfo("baz-alias", emptyList(), 134345L)) - .build() + val metadata = + IndexMetadata + .Builder(noRolloverMetadata) + .putRolloverInfo(RolloverInfo("foo-alias", emptyList(), 42L)) + .putRolloverInfo(oldest) + .putRolloverInfo(RolloverInfo("baz-alias", emptyList(), 134345L)) + .build() assertEquals("Did not get the oldest rollover time", oldest.time, metadata.getOldestRolloverTime()?.toEpochMilli()) } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/ISMTemplateRestAPIIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/ISMTemplateRestAPIIT.kt index 4fad90ebb..660e83562 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/ISMTemplateRestAPIIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/ISMTemplateRestAPIIT.kt @@ -7,6 +7,7 @@ package org.opensearch.indexmanagement.indexstatemanagement.resthandler import org.opensearch.client.ResponseException import org.opensearch.common.settings.Settings +import org.opensearch.core.rest.RestStatus import org.opensearch.indexmanagement.indexstatemanagement.IndexStateManagementRestTestCase import org.opensearch.indexmanagement.indexstatemanagement.action.ReadOnlyAction import org.opensearch.indexmanagement.indexstatemanagement.model.ISMTemplate @@ -18,13 +19,11 @@ import org.opensearch.indexmanagement.indexstatemanagement.util.INDEX_HIDDEN import org.opensearch.indexmanagement.randomInstant import org.opensearch.indexmanagement.spi.indexstatemanagement.model.ManagedIndexMetaData import org.opensearch.indexmanagement.waitFor -import org.opensearch.core.rest.RestStatus import java.time.Instant import java.time.temporal.ChronoUnit import java.util.Locale class ISMTemplateRestAPIIT : IndexStateManagementRestTestCase() { - private val testIndexName = javaClass.simpleName.lowercase(Locale.ROOT) private val policyID1 = "t1" @@ -90,19 +89,21 @@ class ISMTemplateRestAPIIT : IndexStateManagementRestTestCase() { val ismTemp = ISMTemplate(listOf("log*"), 100, randomInstant()) val action = ReadOnlyAction(0) - val states = listOf( - State("ReadOnlyState", listOf(action), listOf()) - ) - val policy = Policy( - id = policyID, - description = "$testIndexName description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states, - ismTemplate = listOf(ismTemp) - ) + val states = + listOf( + State("ReadOnlyState", listOf(action), listOf()), + ) + val policy = + Policy( + id = policyID, + description = "$testIndexName description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ismTemplate = listOf(ismTemp), + ) createPolicy(policy, policyID) createIndex(indexName2, null) @@ -118,28 +119,42 @@ class ISMTemplateRestAPIIT : IndexStateManagementRestTestCase() { // only index create after template can be managed assertPredicatesOnMetaData( listOf( - indexName1 to listOf( - explainResponseOpendistroPolicyIdSetting to fun(policyID: Any?): Boolean = policyID == null, - explainResponseOpenSearchPolicyIdSetting to fun(policyID: Any?): Boolean = policyID == null, - ManagedIndexMetaData.ENABLED to fun(enabled: Any?): Boolean = enabled == null - ) + indexName1 to + listOf( + explainResponseOpendistroPolicyIdSetting to + + fun(policyID: Any?): Boolean = policyID == null, + explainResponseOpenSearchPolicyIdSetting to + + fun(policyID: Any?): Boolean = policyID == null, + ManagedIndexMetaData.ENABLED to + + fun(enabled: Any?): Boolean = enabled == null, + ), ), getExplainMap(indexName1), - true + true, ) assertNull(getManagedIndexConfig(indexName1)) // hidden index will not be manage assertPredicatesOnMetaData( listOf( - indexName1 to listOf( - explainResponseOpendistroPolicyIdSetting to fun(policyID: Any?): Boolean = policyID == null, - explainResponseOpenSearchPolicyIdSetting to fun(policyID: Any?): Boolean = policyID == null, - ManagedIndexMetaData.ENABLED to fun(enabled: Any?): Boolean = enabled == null - ) + indexName1 to + listOf( + explainResponseOpendistroPolicyIdSetting to + + fun(policyID: Any?): Boolean = policyID == null, + explainResponseOpenSearchPolicyIdSetting to + + fun(policyID: Any?): Boolean = policyID == null, + ManagedIndexMetaData.ENABLED to + + fun(enabled: Any?): Boolean = enabled == null, + ), ), getExplainMap(indexName1), - true + true, ) assertNull(getManagedIndexConfig(indexName3)) } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/IndexStateManagementRestApiIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/IndexStateManagementRestApiIT.kt index 99b43048b..a102d4e8e 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/IndexStateManagementRestApiIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/IndexStateManagementRestApiIT.kt @@ -11,6 +11,7 @@ import org.opensearch.action.search.SearchResponse import org.opensearch.client.ResponseException import org.opensearch.common.xcontent.XContentType import org.opensearch.common.xcontent.json.JsonXContent.jsonXContent +import org.opensearch.core.rest.RestStatus import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.INDEX_MANAGEMENT_INDEX import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.POLICY_BASE_URI import org.opensearch.indexmanagement.indexstatemanagement.ISMActionsParser @@ -27,14 +28,12 @@ import org.opensearch.indexmanagement.util._ID import org.opensearch.indexmanagement.util._PRIMARY_TERM import org.opensearch.indexmanagement.util._SEQ_NO import org.opensearch.rest.RestRequest -import org.opensearch.core.rest.RestStatus import org.opensearch.test.OpenSearchTestCase import org.opensearch.test.junit.annotations.TestLogging @TestLogging(value = "level:DEBUG", reason = "Debugging tests") @Suppress("UNCHECKED_CAST") class IndexStateManagementRestApiIT : IndexStateManagementRestTestCase() { - @Throws(Exception::class) fun `test plugins are loaded`() { val response = entityAsMap(client().makeRequest("GET", "_nodes/plugins")) @@ -95,9 +94,10 @@ class IndexStateManagementRestApiIT : IndexStateManagementRestTestCase() { fun `test creating a policy with a disallowed actions fails`() { try { // remove read_only from the allowlist - val allowedActions = ISMActionsParser.instance.parsers.map { it.getActionType() }.toList() - .filter { actionType -> actionType != ReadOnlyAction.name } - .joinToString(prefix = "[", postfix = "]") { string -> "\"$string\"" } + val allowedActions = + ISMActionsParser.instance.parsers.map { it.getActionType() }.toList() + .filter { actionType -> actionType != ReadOnlyAction.name } + .joinToString(prefix = "[", postfix = "]") { string -> "\"$string\"" } updateClusterSetting(ManagedIndexSettings.ALLOW_LIST.key, allowedActions, escapeValue = false) val policy = randomPolicy(states = listOf(randomState(actions = listOf(randomReadOnlyActionConfig())))) client().makeRequest("PUT", "$POLICY_BASE_URI/some_id", emptyMap(), policy.toHttpEntity()) @@ -111,21 +111,23 @@ class IndexStateManagementRestApiIT : IndexStateManagementRestTestCase() { fun `test updating a policy with a disallowed actions fails`() { try { // remove read_only from the allowlist - val allowedActions = ISMActionsParser.instance.parsers.map { it.getActionType() }.toList() - .filter { actionType -> actionType != ReadOnlyAction.name } - .joinToString(prefix = "[", postfix = "]") { string -> "\"$string\"" } + val allowedActions = + ISMActionsParser.instance.parsers.map { it.getActionType() }.toList() + .filter { actionType -> actionType != ReadOnlyAction.name } + .joinToString(prefix = "[", postfix = "]") { string -> "\"$string\"" } updateClusterSetting(ManagedIndexSettings.ALLOW_LIST.key, allowedActions, escapeValue = false) // createRandomPolicy currently does not create a random list of actions, so it won't accidentally create one with read_only val policy = createRandomPolicy() // update the policy to have read_only action which is not allowed - val updatedPolicy = policy.copy( - defaultState = "some_state", - states = listOf(randomState(name = "some_state", actions = listOf(randomReadOnlyActionConfig()))) - ) + val updatedPolicy = + policy.copy( + defaultState = "some_state", + states = listOf(randomState(name = "some_state", actions = listOf(randomReadOnlyActionConfig()))), + ) client().makeRequest( "PUT", "$POLICY_BASE_URI/${updatedPolicy.id}?refresh=true&if_seq_no=${updatedPolicy.seqNo}&if_primary_term=${updatedPolicy.primaryTerm}", - emptyMap(), updatedPolicy.toHttpEntity() + emptyMap(), updatedPolicy.toHttpEntity(), ) fail("Expected 403 Method FORBIDDEN response") } catch (e: ResponseException) { @@ -152,10 +154,11 @@ class IndexStateManagementRestApiIT : IndexStateManagementRestTestCase() { val parserMap = createParser(XContentType.JSON.xContent(), response.entity.content).map() as Map> val mappingsMap = parserMap[INDEX_MANAGEMENT_INDEX]!!["mappings"] as Map - val expected = createParser( - XContentType.JSON.xContent(), - javaClass.classLoader.getResource("mappings/opendistro-ism-config.json").readText() - ) + val expected = + createParser( + XContentType.JSON.xContent(), + javaClass.classLoader.getResource("mappings/opendistro-ism-config.json").readText(), + ) val expectedMap = expected.map() assertEquals("Mappings are different", expectedMap, mappingsMap) @@ -169,7 +172,7 @@ class IndexStateManagementRestApiIT : IndexStateManagementRestTestCase() { client().makeRequest( "PUT", "$POLICY_BASE_URI/${policy.id}?refresh=true&if_seq_no=10251989&if_primary_term=2342", - emptyMap(), policy.toHttpEntity() + emptyMap(), policy.toHttpEntity(), ) fail("expected 409 ResponseException") } catch (e: ResponseException) { @@ -180,11 +183,12 @@ class IndexStateManagementRestApiIT : IndexStateManagementRestTestCase() { @Throws(Exception::class) fun `test update policy with correct seq_no and primary_term`() { val policy = createRandomPolicy() - val updateResponse = client().makeRequest( - "PUT", - "$POLICY_BASE_URI/${policy.id}?refresh=true&if_seq_no=${policy.seqNo}&if_primary_term=${policy.primaryTerm}", - emptyMap(), policy.toHttpEntity() - ) + val updateResponse = + client().makeRequest( + "PUT", + "$POLICY_BASE_URI/${policy.id}?refresh=true&if_seq_no=${policy.seqNo}&if_primary_term=${policy.primaryTerm}", + emptyMap(), policy.toHttpEntity(), + ) assertEquals("Update policy failed", RestStatus.OK, updateResponse.restStatus()) val responseBody = updateResponse.asMap() @@ -254,7 +258,8 @@ class IndexStateManagementRestApiIT : IndexStateManagementRestTestCase() { fun `test able to fuzzy search policies`() { val policy = createRandomPolicy() - val request = """ + val request = + """ { "query": { "query_string": { @@ -264,11 +269,12 @@ class IndexStateManagementRestApiIT : IndexStateManagementRestTestCase() { } } } - """.trimIndent() - val response = client().makeRequest( - "POST", "$INDEX_MANAGEMENT_INDEX/_search", emptyMap(), - StringEntity(request, ContentType.APPLICATION_JSON) - ) + """.trimIndent() + val response = + client().makeRequest( + "POST", "$INDEX_MANAGEMENT_INDEX/_search", emptyMap(), + StringEntity(request, ContentType.APPLICATION_JSON), + ) assertEquals("Request failed", RestStatus.OK, response.restStatus()) val searchResponse = SearchResponse.fromXContent(createParser(jsonXContent, response.entity.content)) assertTrue("Did not find policy using fuzzy search", searchResponse.hits.hits.size == 1) @@ -276,10 +282,11 @@ class IndexStateManagementRestApiIT : IndexStateManagementRestTestCase() { fun `test get policies before ism init`() { val actualResponse = client().makeRequest(RestRequest.Method.GET.toString(), POLICY_BASE_URI).asMap() - val expectedResponse = mapOf( - "policies" to emptyList(), - "total_policies" to 0 - ) + val expectedResponse = + mapOf( + "policies" to emptyList(), + "total_policies" to 0, + ) assertEquals(expectedResponse, actualResponse) } @@ -289,32 +296,36 @@ class IndexStateManagementRestApiIT : IndexStateManagementRestTestCase() { val response = client().makeRequest(RestRequest.Method.GET.toString(), POLICY_BASE_URI) val actualMessage = response.asMap() - val expectedMessage = mapOf( - "total_policies" to 1, - "policies" to listOf( - mapOf( - _SEQ_NO to policy.seqNo, - _ID to policy.id, - _PRIMARY_TERM to policy.primaryTerm, - Policy.POLICY_TYPE to mapOf( - "schema_version" to policy.schemaVersion, - "policy_id" to policy.id, - "last_updated_time" to policy.lastUpdatedTime.toEpochMilli(), - "default_state" to policy.defaultState, - "ism_template" to null, - "description" to policy.description, - "error_notification" to policy.errorNotification, - "states" to policy.states.map { - mapOf( - "name" to it.name, - "transitions" to it.transitions, - "actions" to it.actions - ) - } - ) - ) + val expectedMessage = + mapOf( + "total_policies" to 1, + "policies" to + listOf( + mapOf( + _SEQ_NO to policy.seqNo, + _ID to policy.id, + _PRIMARY_TERM to policy.primaryTerm, + Policy.POLICY_TYPE to + mapOf( + "schema_version" to policy.schemaVersion, + "policy_id" to policy.id, + "last_updated_time" to policy.lastUpdatedTime.toEpochMilli(), + "default_state" to policy.defaultState, + "ism_template" to null, + "description" to policy.description, + "error_notification" to policy.errorNotification, + "states" to + policy.states.map { + mapOf( + "name" to it.name, + "transitions" to it.transitions, + "actions" to it.actions, + ) + }, + ), + ), + ), ) - ) assertEquals(expectedMessage.toString(), actualMessage.toString()) } @@ -327,32 +338,36 @@ class IndexStateManagementRestApiIT : IndexStateManagementRestTestCase() { val response = client().makeRequest(RestRequest.Method.GET.toString(), "$POLICY_BASE_URI?queryString=*testing-hyphens*") val actualMessage = response.asMap() - val expectedMessage = mapOf( - "total_policies" to 1, - "policies" to listOf( - mapOf( - _SEQ_NO to policy.seqNo, - _ID to policy.id, - _PRIMARY_TERM to policy.primaryTerm, - Policy.POLICY_TYPE to mapOf( - "schema_version" to policy.schemaVersion, - "policy_id" to policy.id, - "last_updated_time" to policy.lastUpdatedTime.toEpochMilli(), - "default_state" to policy.defaultState, - "ism_template" to null, - "description" to policy.description, - "error_notification" to policy.errorNotification, - "states" to policy.states.map { - mapOf( - "name" to it.name, - "transitions" to it.transitions, - "actions" to it.actions - ) - } - ) - ) + val expectedMessage = + mapOf( + "total_policies" to 1, + "policies" to + listOf( + mapOf( + _SEQ_NO to policy.seqNo, + _ID to policy.id, + _PRIMARY_TERM to policy.primaryTerm, + Policy.POLICY_TYPE to + mapOf( + "schema_version" to policy.schemaVersion, + "policy_id" to policy.id, + "last_updated_time" to policy.lastUpdatedTime.toEpochMilli(), + "default_state" to policy.defaultState, + "ism_template" to null, + "description" to policy.description, + "error_notification" to policy.errorNotification, + "states" to + policy.states.map { + mapOf( + "name" to it.name, + "transitions" to it.transitions, + "actions" to it.actions, + ) + }, + ), + ), + ), ) - ) assertEquals(expectedMessage.toString(), actualMessage.toString()) } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/RestAddPolicyActionIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/RestAddPolicyActionIT.kt index e863bb097..63d00caf5 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/RestAddPolicyActionIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/RestAddPolicyActionIT.kt @@ -11,6 +11,7 @@ import org.opensearch.client.ResponseException import org.opensearch.common.settings.Settings import org.opensearch.common.xcontent.XContentHelper import org.opensearch.common.xcontent.XContentType +import org.opensearch.core.rest.RestStatus import org.opensearch.indexmanagement.IndexManagementPlugin import org.opensearch.indexmanagement.indexstatemanagement.IndexStateManagementRestTestCase import org.opensearch.indexmanagement.indexstatemanagement.util.FAILED_INDICES @@ -20,10 +21,8 @@ import org.opensearch.indexmanagement.indexstatemanagement.util.UPDATED_INDICES import org.opensearch.indexmanagement.makeRequest import org.opensearch.indexmanagement.waitFor import org.opensearch.rest.RestRequest.Method.POST -import org.opensearch.core.rest.RestStatus class RestAddPolicyActionIT : IndexStateManagementRestTestCase() { - fun `test missing indices`() { try { client().makeRequest(POST.toString(), RestAddPolicyAction.ADD_POLICY_BASE_URI) @@ -31,16 +30,19 @@ class RestAddPolicyActionIT : IndexStateManagementRestTestCase() { } catch (e: ResponseException) { assertEquals("Unexpected RestStatus", RestStatus.BAD_REQUEST, e.response.restStatus()) val actualMessage = e.response.asMap() - val expectedErrorMessage = mapOf( - "error" to mapOf( - "root_cause" to listOf>( - mapOf("type" to "illegal_argument_exception", "reason" to "Missing indices") - ), - "type" to "illegal_argument_exception", - "reason" to "Missing indices" - ), - "status" to 400 - ) + val expectedErrorMessage = + mapOf( + "error" to + mapOf( + "root_cause" to + listOf>( + mapOf("type" to "illegal_argument_exception", "reason" to "Missing indices"), + ), + "type" to "illegal_argument_exception", + "reason" to "Missing indices", + ), + "status" to 400, + ) assertEquals(expectedErrorMessage, actualMessage) } } @@ -51,24 +53,27 @@ class RestAddPolicyActionIT : IndexStateManagementRestTestCase() { createIndex(index, null) closeIndex(index) - val response = client().makeRequest( - POST.toString(), - "${RestAddPolicyAction.ADD_POLICY_BASE_URI}/$index", - StringEntity("{ \"policy_id\": \"${policy.id}\" }", ContentType.APPLICATION_JSON) - ) + val response = + client().makeRequest( + POST.toString(), + "${RestAddPolicyAction.ADD_POLICY_BASE_URI}/$index", + StringEntity("{ \"policy_id\": \"${policy.id}\" }", ContentType.APPLICATION_JSON), + ) assertEquals("Unexpected RestStatus", RestStatus.OK, response.restStatus()) val actualMessage = response.asMap() - val expectedMessage = mapOf( - FAILURES to true, - UPDATED_INDICES to 0, - FAILED_INDICES to listOf( - mapOf( - "index_name" to index, - "index_uuid" to getUuid(index), - "reason" to "This index is closed" - ) + val expectedMessage = + mapOf( + FAILURES to true, + UPDATED_INDICES to 0, + FAILED_INDICES to + listOf( + mapOf( + "index_name" to index, + "index_uuid" to getUuid(index), + "reason" to "This index is closed", + ), + ), ) - ) assertAffectedIndicesResponseIsEqual(expectedMessage, actualMessage) } @@ -78,24 +83,27 @@ class RestAddPolicyActionIT : IndexStateManagementRestTestCase() { val policy = createRandomPolicy() createIndex(index, policy.id) - val response = client().makeRequest( - POST.toString(), - "${RestAddPolicyAction.ADD_POLICY_BASE_URI}/$index", - StringEntity("{ \"policy_id\": \"${policy.id}\" }", ContentType.APPLICATION_JSON) - ) + val response = + client().makeRequest( + POST.toString(), + "${RestAddPolicyAction.ADD_POLICY_BASE_URI}/$index", + StringEntity("{ \"policy_id\": \"${policy.id}\" }", ContentType.APPLICATION_JSON), + ) assertEquals("Unexpected RestStatus", RestStatus.OK, response.restStatus()) val actualMessage = response.asMap() - val expectedMessage = mapOf( - FAILURES to true, - UPDATED_INDICES to 0, - FAILED_INDICES to listOf( - mapOf( - "index_name" to index, - "index_uuid" to getUuid(index), - "reason" to "This index already has a policy, use the update policy API to update index policies" - ) + val expectedMessage = + mapOf( + FAILURES to true, + UPDATED_INDICES to 0, + FAILED_INDICES to + listOf( + mapOf( + "index_name" to index, + "index_uuid" to getUuid(index), + "reason" to "This index already has a policy, use the update policy API to update index policies", + ), + ), ) - ) assertAffectedIndicesResponseIsEqual(expectedMessage, actualMessage) } @@ -110,29 +118,32 @@ class RestAddPolicyActionIT : IndexStateManagementRestTestCase() { closeIndex(indexOne) - val response = client().makeRequest( - POST.toString(), - "${RestAddPolicyAction.ADD_POLICY_BASE_URI}/$indexOne,$indexTwo", - StringEntity("{ \"policy_id\": \"${newPolicy.id}\" }", ContentType.APPLICATION_JSON) - ) + val response = + client().makeRequest( + POST.toString(), + "${RestAddPolicyAction.ADD_POLICY_BASE_URI}/$indexOne,$indexTwo", + StringEntity("{ \"policy_id\": \"${newPolicy.id}\" }", ContentType.APPLICATION_JSON), + ) assertEquals("Unexpected RestStatus", RestStatus.OK, response.restStatus()) val actualMessage = response.asMap() - val expectedMessage = mapOf( - FAILURES to true, - UPDATED_INDICES to 0, - FAILED_INDICES to listOf( - mapOf( - "index_name" to indexOne, - "index_uuid" to getUuid(indexOne), - "reason" to "This index is closed" - ), - mapOf( - "index_name" to indexTwo, - "index_uuid" to getUuid(indexTwo), - "reason" to "This index already has a policy, use the update policy API to update index policies" - ) + val expectedMessage = + mapOf( + FAILURES to true, + UPDATED_INDICES to 0, + FAILED_INDICES to + listOf( + mapOf( + "index_name" to indexOne, + "index_uuid" to getUuid(indexOne), + "reason" to "This index is closed", + ), + mapOf( + "index_name" to indexTwo, + "index_uuid" to getUuid(indexTwo), + "reason" to "This index already has a policy, use the update policy API to update index policies", + ), + ), ) - ) assertAffectedIndicesResponseIsEqual(expectedMessage, actualMessage) } @@ -150,29 +161,32 @@ class RestAddPolicyActionIT : IndexStateManagementRestTestCase() { closeIndex(indexOne) - val response = client().makeRequest( - POST.toString(), - "${RestAddPolicyAction.ADD_POLICY_BASE_URI}/$indexPattern*", - StringEntity("{ \"policy_id\": \"${newPolicy.id}\" }", ContentType.APPLICATION_JSON) - ) + val response = + client().makeRequest( + POST.toString(), + "${RestAddPolicyAction.ADD_POLICY_BASE_URI}/$indexPattern*", + StringEntity("{ \"policy_id\": \"${newPolicy.id}\" }", ContentType.APPLICATION_JSON), + ) assertEquals("Unexpected RestStatus", RestStatus.OK, response.restStatus()) val actualMessage = response.asMap() - val expectedMessage = mapOf( - UPDATED_INDICES to 1, - FAILURES to true, - FAILED_INDICES to listOf( - mapOf( - "index_name" to indexOne, - "index_uuid" to getUuid(indexOne), - "reason" to "This index is closed" - ), - mapOf( - "index_name" to indexTwo, - "index_uuid" to getUuid(indexTwo), - "reason" to "This index already has a policy, use the update policy API to update index policies" - ) + val expectedMessage = + mapOf( + UPDATED_INDICES to 1, + FAILURES to true, + FAILED_INDICES to + listOf( + mapOf( + "index_name" to indexOne, + "index_uuid" to getUuid(indexOne), + "reason" to "This index is closed", + ), + mapOf( + "index_name" to indexTwo, + "index_uuid" to getUuid(indexTwo), + "reason" to "This index already has a policy, use the update policy API to update index policies", + ), + ), ) - ) assertAffectedIndicesResponseIsEqual(expectedMessage, actualMessage) @@ -195,40 +209,43 @@ class RestAddPolicyActionIT : IndexStateManagementRestTestCase() { } } - val response = client().makeRequest( - POST.toString(), - "${RestAddPolicyAction.ADD_POLICY_BASE_URI}/.*", - StringEntity("{ \"policy_id\": \"${policy.id}\" }", ContentType.APPLICATION_JSON) - ) + val response = + client().makeRequest( + POST.toString(), + "${RestAddPolicyAction.ADD_POLICY_BASE_URI}/.*", + StringEntity("{ \"policy_id\": \"${policy.id}\" }", ContentType.APPLICATION_JSON), + ) assertEquals("Unexpected RestStatus", RestStatus.OK, response.restStatus()) val actualMessage = response.asMap() // Not going to attach policy to ism config index or other restricted index patterns - val expectedMessage = mapOf( - UPDATED_INDICES to 1, - FAILURES to true, - FAILED_INDICES to listOf( - mapOf( - "index_name" to indexOne, - "index_uuid" to getUuidWithOutStrictChecking(indexOne), - "reason" to "Matches restricted index pattern defined in the cluster setting" - ), - mapOf( - "index_name" to indexTwo, - "index_uuid" to getUuidWithOutStrictChecking(indexTwo), - "reason" to "Matches restricted index pattern defined in the cluster setting" - ), - mapOf( - "index_name" to indexThree, - "index_uuid" to getUuidWithOutStrictChecking(indexThree), - "reason" to "Matches restricted index pattern defined in the cluster setting" - ), - mapOf( - "index_name" to IndexManagementPlugin.INDEX_MANAGEMENT_INDEX, - "index_uuid" to getUuidWithOutStrictChecking(IndexManagementPlugin.INDEX_MANAGEMENT_INDEX), - "reason" to "Matches restricted index pattern defined in the cluster setting" - ) + val expectedMessage = + mapOf( + UPDATED_INDICES to 1, + FAILURES to true, + FAILED_INDICES to + listOf( + mapOf( + "index_name" to indexOne, + "index_uuid" to getUuidWithOutStrictChecking(indexOne), + "reason" to "Matches restricted index pattern defined in the cluster setting", + ), + mapOf( + "index_name" to indexTwo, + "index_uuid" to getUuidWithOutStrictChecking(indexTwo), + "reason" to "Matches restricted index pattern defined in the cluster setting", + ), + mapOf( + "index_name" to indexThree, + "index_uuid" to getUuidWithOutStrictChecking(indexThree), + "reason" to "Matches restricted index pattern defined in the cluster setting", + ), + mapOf( + "index_name" to IndexManagementPlugin.INDEX_MANAGEMENT_INDEX, + "index_uuid" to getUuidWithOutStrictChecking(IndexManagementPlugin.INDEX_MANAGEMENT_INDEX), + "reason" to "Matches restricted index pattern defined in the cluster setting", + ), + ), ) - ) assertAffectedIndicesResponseIsEqual(expectedMessage, actualMessage) diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/RestChangePolicyActionIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/RestChangePolicyActionIT.kt index eba50e92b..463b745d4 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/RestChangePolicyActionIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/RestChangePolicyActionIT.kt @@ -8,6 +8,7 @@ package org.opensearch.indexmanagement.indexstatemanagement.resthandler import org.junit.Before import org.opensearch.client.ResponseException import org.opensearch.common.settings.Settings +import org.opensearch.core.rest.RestStatus import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.INDEX_MANAGEMENT_INDEX import org.opensearch.indexmanagement.indexstatemanagement.IndexStateManagementRestTestCase import org.opensearch.indexmanagement.indexstatemanagement.action.DeleteAction @@ -33,12 +34,10 @@ import org.opensearch.indexmanagement.spi.indexstatemanagement.model.ManagedInde import org.opensearch.indexmanagement.spi.indexstatemanagement.model.StateMetaData import org.opensearch.indexmanagement.waitFor import org.opensearch.rest.RestRequest -import org.opensearch.core.rest.RestStatus import java.time.Instant import java.util.Locale class RestChangePolicyActionIT : IndexStateManagementRestTestCase() { - private val testIndexName = javaClass.simpleName.lowercase(Locale.ROOT) @Before @@ -56,16 +55,19 @@ class RestChangePolicyActionIT : IndexStateManagementRestTestCase() { } catch (e: ResponseException) { assertEquals("Unexpected RestStatus.", RestStatus.BAD_REQUEST, e.response.restStatus()) val actualMessage = e.response.asMap() - val expectedErrorMessage = mapOf( - "error" to mapOf( - "root_cause" to listOf>( - mapOf("type" to "illegal_argument_exception", "reason" to "Missing index") - ), - "type" to "illegal_argument_exception", - "reason" to "Missing index" - ), - "status" to 400 - ) + val expectedErrorMessage = + mapOf( + "error" to + mapOf( + "root_cause" to + listOf>( + mapOf("type" to "illegal_argument_exception", "reason" to "Missing index"), + ), + "type" to "illegal_argument_exception", + "reason" to "Missing index", + ), + "status" to 400, + ) assertEquals(expectedErrorMessage, actualMessage) } } @@ -77,7 +79,7 @@ class RestChangePolicyActionIT : IndexStateManagementRestTestCase() { createPolicy(policy, policy.id) client().makeRequest( RestRequest.Method.POST.toString(), - "${RestChangePolicyAction.CHANGE_POLICY_BASE_URI}/other_index", emptyMap(), changePolicy.toHttpEntity() + "${RestChangePolicyAction.CHANGE_POLICY_BASE_URI}/other_index", emptyMap(), changePolicy.toHttpEntity(), ) fail("Expected a failure.") } catch (e: ResponseException) { @@ -92,33 +94,36 @@ class RestChangePolicyActionIT : IndexStateManagementRestTestCase() { val changePolicy = ChangePolicy("some_id", null, emptyList(), false) client().makeRequest( RestRequest.Method.POST.toString(), - "${RestChangePolicyAction.CHANGE_POLICY_BASE_URI}/other_index", emptyMap(), changePolicy.toHttpEntity() + "${RestChangePolicyAction.CHANGE_POLICY_BASE_URI}/other_index", emptyMap(), changePolicy.toHttpEntity(), ) fail("Expected a failure.") } catch (e: ResponseException) { assertEquals("Unexpected RestStatus.", RestStatus.NOT_FOUND, e.response.restStatus()) val actualMessage = e.response.asMap() - val expectedErrorMessage = mapOf( - "error" to mapOf( - "root_cause" to listOf>( + val expectedErrorMessage = + mapOf( + "error" to mapOf( + "root_cause" to + listOf>( + mapOf( + "type" to "index_not_found_exception", + "index_uuid" to "_na_", + "index" to ".opendistro-ism-config", + "resource.type" to "index_expression", + "resource.id" to ".opendistro-ism-config", + "reason" to "no such index [.opendistro-ism-config]", + ), + ), "type" to "index_not_found_exception", "index_uuid" to "_na_", "index" to ".opendistro-ism-config", "resource.type" to "index_expression", "resource.id" to ".opendistro-ism-config", - "reason" to "no such index [.opendistro-ism-config]" - ) - ), - "type" to "index_not_found_exception", - "index_uuid" to "_na_", - "index" to ".opendistro-ism-config", - "resource.type" to "index_expression", - "resource.id" to ".opendistro-ism-config", - "reason" to "no such index [.opendistro-ism-config]" - ), - "status" to 404 - ) + "reason" to "no such index [.opendistro-ism-config]", + ), + "status" to 404, + ) assertEquals(expectedErrorMessage, actualMessage) } } @@ -129,33 +134,36 @@ class RestChangePolicyActionIT : IndexStateManagementRestTestCase() { val changePolicy = ChangePolicy(policy.id, null, emptyList(), false) client().makeRequest( RestRequest.Method.POST.toString(), - "${RestChangePolicyAction.CHANGE_POLICY_BASE_URI}/this_does_not_exist", emptyMap(), changePolicy.toHttpEntity() + "${RestChangePolicyAction.CHANGE_POLICY_BASE_URI}/this_does_not_exist", emptyMap(), changePolicy.toHttpEntity(), ) fail("Expected a failure.") } catch (e: ResponseException) { assertEquals("Unexpected RestStatus.", RestStatus.NOT_FOUND, e.response.restStatus()) val actualMessage = e.response.asMap() - val expectedErrorMessage = mapOf( - "error" to mapOf( - "root_cause" to listOf>( + val expectedErrorMessage = + mapOf( + "error" to mapOf( + "root_cause" to + listOf>( + mapOf( + "type" to "index_not_found_exception", + "index_uuid" to "_na_", + "index" to "this_does_not_exist", + "resource.type" to "index_or_alias", + "resource.id" to "this_does_not_exist", + "reason" to "no such index [this_does_not_exist]", + ), + ), "type" to "index_not_found_exception", "index_uuid" to "_na_", "index" to "this_does_not_exist", "resource.type" to "index_or_alias", "resource.id" to "this_does_not_exist", - "reason" to "no such index [this_does_not_exist]" - ) - ), - "type" to "index_not_found_exception", - "index_uuid" to "_na_", - "index" to "this_does_not_exist", - "resource.type" to "index_or_alias", - "resource.id" to "this_does_not_exist", - "reason" to "no such index [this_does_not_exist]" - ), - "status" to 404 - ) + "reason" to "no such index [this_does_not_exist]", + ), + "status" to 404, + ) assertEquals(expectedErrorMessage, actualMessage) } } @@ -164,21 +172,24 @@ class RestChangePolicyActionIT : IndexStateManagementRestTestCase() { // Create a random policy to init .opendistro-ism-config index val policy = createRandomPolicy() val changePolicy = ChangePolicy(policy.id, null, emptyList(), false) - val response = client().makeRequest( - RestRequest.Method.POST.toString(), - "${RestChangePolicyAction.CHANGE_POLICY_BASE_URI}/movies", emptyMap(), changePolicy.toHttpEntity() - ) - val expectedResponse = mapOf( - FAILURES to true, - FAILED_INDICES to listOf( - mapOf( - "index_name" to "movies", - "index_uuid" to getUuid("movies"), - "reason" to INDEX_NOT_MANAGED - ) - ), - UPDATED_INDICES to 0 - ) + val response = + client().makeRequest( + RestRequest.Method.POST.toString(), + "${RestChangePolicyAction.CHANGE_POLICY_BASE_URI}/movies", emptyMap(), changePolicy.toHttpEntity(), + ) + val expectedResponse = + mapOf( + FAILURES to true, + FAILED_INDICES to + listOf( + mapOf( + "index_name" to "movies", + "index_uuid" to getUuid("movies"), + "reason" to INDEX_NOT_MANAGED, + ), + ), + UPDATED_INDICES to 0, + ) assertAffectedIndicesResponseIsEqual(expectedResponse, response.asMap()) } @@ -194,10 +205,11 @@ class RestChangePolicyActionIT : IndexStateManagementRestTestCase() { // If we try to change the policy now, it hasn't actually run and has no ManagedIndexMetaData yet so it should succeed val changePolicy = ChangePolicy(newPolicy.id, null, emptyList(), false) - val response = client().makeRequest( - RestRequest.Method.POST.toString(), - "${RestChangePolicyAction.CHANGE_POLICY_BASE_URI}/$index", emptyMap(), changePolicy.toHttpEntity() - ) + val response = + client().makeRequest( + RestRequest.Method.POST.toString(), + "${RestChangePolicyAction.CHANGE_POLICY_BASE_URI}/$index", emptyMap(), changePolicy.toHttpEntity(), + ) assertAffectedIndicesResponseIsEqual(mapOf(FAILURES to false, FAILED_INDICES to emptyList(), UPDATED_INDICES to 1), response.asMap()) @@ -206,12 +218,13 @@ class RestChangePolicyActionIT : IndexStateManagementRestTestCase() { // speed up to first execution where we initialize the policy on the job updateManagedIndexConfigStartTime(managedIndexConfig) - val updatedManagedIndexConfig = waitFor { - // TODO: get by docID could get older version of the doc which could cause flaky failure - val config = getManagedIndexConfigByDocId(managedIndexConfig.id) - assertEquals(newPolicy.id, config?.policyID) - config - } + val updatedManagedIndexConfig = + waitFor { + // TODO: get by docID could get older version of the doc which could cause flaky failure + val config = getManagedIndexConfigByDocId(managedIndexConfig.id) + assertEquals(newPolicy.id, config?.policyID) + config + } // The initialized policy should be the change policy one assertNotNull("Updated managed index config is null", updatedManagedIndexConfig) @@ -222,7 +235,7 @@ class RestChangePolicyActionIT : IndexStateManagementRestTestCase() { // TODO: figure out why the newPolicy.lastUpdatedTime and cached policy lastUpdatedTime is off by a few milliseconds assertEquals( "Initialized policy is not the change policy", newPolicy.description, - updatedManagedIndexConfig.policy.description + updatedManagedIndexConfig.policy.description, ) } @@ -238,31 +251,34 @@ class RestChangePolicyActionIT : IndexStateManagementRestTestCase() { // if we try to change policy now, it'll have no ManagedIndexMetaData yet and should go through val changePolicy = ChangePolicy(newPolicy.id, null, emptyList(), false) - val response = client().makeRequest( - RestRequest.Method.POST.toString(), - "${RestChangePolicyAction.CHANGE_POLICY_BASE_URI}/$index,movi*", emptyMap(), changePolicy.toHttpEntity() - ) - val expectedResponse = mapOf( - FAILURES to true, - FAILED_INDICES to listOf( - mapOf( - "index_name" to "movies", - "index_uuid" to getUuid("movies"), - "reason" to INDEX_NOT_MANAGED - ), - mapOf( - "index_name" to "movies_1", - "index_uuid" to getUuid("movies_1"), - "reason" to INDEX_NOT_MANAGED - ), - mapOf( - "index_name" to "movies_2", - "index_uuid" to getUuid("movies_2"), - "reason" to INDEX_NOT_MANAGED - ) - ), - UPDATED_INDICES to 1 - ) + val response = + client().makeRequest( + RestRequest.Method.POST.toString(), + "${RestChangePolicyAction.CHANGE_POLICY_BASE_URI}/$index,movi*", emptyMap(), changePolicy.toHttpEntity(), + ) + val expectedResponse = + mapOf( + FAILURES to true, + FAILED_INDICES to + listOf( + mapOf( + "index_name" to "movies", + "index_uuid" to getUuid("movies"), + "reason" to INDEX_NOT_MANAGED, + ), + mapOf( + "index_name" to "movies_1", + "index_uuid" to getUuid("movies_1"), + "reason" to INDEX_NOT_MANAGED, + ), + mapOf( + "index_name" to "movies_2", + "index_uuid" to getUuid("movies_2"), + "reason" to INDEX_NOT_MANAGED, + ), + ), + UPDATED_INDICES to 1, + ) assertAffectedIndicesResponseIsEqual(expectedResponse, response.asMap()) waitFor { @@ -282,10 +298,11 @@ class RestChangePolicyActionIT : IndexStateManagementRestTestCase() { // Creates new policy that has two states, same as before except a second state with a delete action and a transition from readonly to delete states // we will also add a new action to readonly state otherwise an immediate change policy is triggered val stateWithDeleteAction = randomState(actions = listOf(DeleteAction(index = 0))) - val updatedStateWithReadOnlyAction = stateWithReadOnlyAction.copy( - actions = listOf(stateWithReadOnlyAction.actions.first(), OpenAction(index = 1)), - transitions = listOf(Transition(stateWithDeleteAction.name, null)) - ) + val updatedStateWithReadOnlyAction = + stateWithReadOnlyAction.copy( + actions = listOf(stateWithReadOnlyAction.actions.first(), OpenAction(index = 1)), + transitions = listOf(Transition(stateWithDeleteAction.name, null)), + ) val newPolicy = createPolicy(randomPolicy(states = listOf(updatedStateWithReadOnlyAction, stateWithDeleteAction)), "new_policy", true) val indexName = "${testIndexName}_mouse" val (index) = createIndex(indexName, policy.id) @@ -293,7 +310,7 @@ class RestChangePolicyActionIT : IndexStateManagementRestTestCase() { // Set index to read-write updateIndexSettings( index, - Settings.builder().put("index.blocks.write", false) + Settings.builder().put("index.blocks.write", false), ) val managedIndexConfig = getExistingManagedIndexConfig(index) @@ -305,45 +322,51 @@ class RestChangePolicyActionIT : IndexStateManagementRestTestCase() { // After first execution we should expect the change policy to still be null (since we haven't called it yet) // and the initial policy should have been cached - val executedManagedIndexConfig: ManagedIndexConfig = waitFor { - val config = getManagedIndexConfigByDocId(managedIndexConfig.id) - assertNotNull("Executed managed index config is null", config) - assertNull("Executed change policy is not null", config!!.changePolicy) - assertNotNull("Executed policy is null", config.policy) - assertEquals("Executed saved policy does not match initial policy", policy.id, config.policyID) - assertEquals("Index writes should not be blocked", "false", getIndexBlocksWriteSetting(index)) - config - } + val executedManagedIndexConfig: ManagedIndexConfig = + waitFor { + val config = getManagedIndexConfigByDocId(managedIndexConfig.id) + assertNotNull("Executed managed index config is null", config) + assertNull("Executed change policy is not null", config!!.changePolicy) + assertNotNull("Executed policy is null", config.policy) + assertEquals("Executed saved policy does not match initial policy", policy.id, config.policyID) + assertEquals("Index writes should not be blocked", "false", getIndexBlocksWriteSetting(index)) + config + } // We should expect the explain API to show an initialized ManagedIndexMetaData with the default state from the initial policy waitFor { val explainResponseMap = getExplainMap(index) assertPredicatesOnMetaData( listOf( - index to listOf( - explainResponseOpendistroPolicyIdSetting to policy.id::equals, - explainResponseOpenSearchPolicyIdSetting to policy.id::equals, - ManagedIndexMetaData.INDEX to executedManagedIndexConfig.index::equals, - ManagedIndexMetaData.INDEX_UUID to executedManagedIndexConfig.indexUuid::equals, - ManagedIndexMetaData.POLICY_ID to executedManagedIndexConfig.policyID::equals, - StateMetaData.STATE to fun(stateMetaDataMap: Any?): Boolean = - assertStateEquals(StateMetaData(policy.defaultState, Instant.now().toEpochMilli()), stateMetaDataMap) - ) + index to + listOf( + explainResponseOpendistroPolicyIdSetting to policy.id::equals, + explainResponseOpenSearchPolicyIdSetting to policy.id::equals, + ManagedIndexMetaData.INDEX to executedManagedIndexConfig.index::equals, + ManagedIndexMetaData.INDEX_UUID to executedManagedIndexConfig.indexUuid::equals, + ManagedIndexMetaData.POLICY_ID to executedManagedIndexConfig.policyID::equals, + StateMetaData.STATE to + + fun(stateMetaDataMap: Any?): Boolean = + assertStateEquals(StateMetaData(policy.defaultState, Instant.now().toEpochMilli()), stateMetaDataMap), + ), ), - explainResponseMap, false + explainResponseMap, false, ) } val changePolicy = ChangePolicy(newPolicy.id, null, emptyList(), false) - val response = client().makeRequest( - RestRequest.Method.POST.toString(), - "${RestChangePolicyAction.CHANGE_POLICY_BASE_URI}/$index", emptyMap(), changePolicy.toHttpEntity() - ) - val expectedResponse = mapOf( - FAILURES to false, - FAILED_INDICES to emptyList(), - UPDATED_INDICES to 1 - ) + val response = + client().makeRequest( + RestRequest.Method.POST.toString(), + "${RestChangePolicyAction.CHANGE_POLICY_BASE_URI}/$index", emptyMap(), changePolicy.toHttpEntity(), + ) + val expectedResponse = + mapOf( + FAILURES to false, + FAILED_INDICES to emptyList(), + UPDATED_INDICES to 1, + ) assertAffectedIndicesResponseIsEqual(expectedResponse, response.asMap()) // speed up to second execution we will have a ChangePolicy but not be in Transitions yet @@ -364,63 +387,74 @@ class RestChangePolicyActionIT : IndexStateManagementRestTestCase() { waitFor { assertPredicatesOnMetaData( listOf( - index to listOf( - explainResponseOpendistroPolicyIdSetting to policy.id::equals, - explainResponseOpenSearchPolicyIdSetting to policy.id::equals, - ManagedIndexMetaData.INDEX to executedManagedIndexConfig.index::equals, - ManagedIndexMetaData.INDEX_UUID to executedManagedIndexConfig.indexUuid::equals, - ManagedIndexMetaData.POLICY_ID to executedManagedIndexConfig.policyID::equals, - StateMetaData.STATE to fun(stateMetaDataMap: Any?): Boolean = - assertStateEquals(StateMetaData(policy.defaultState, Instant.now().toEpochMilli()), stateMetaDataMap), - ActionMetaData.ACTION to fun(actionMetaDataMap: Any?): Boolean = - assertActionEquals( - ActionMetaData( - name = ReadOnlyAction.name, startTime = Instant.now().toEpochMilli(), index = 0, - failed = false, consumedRetries = 0, lastRetryTime = null, actionProperties = null - ), - actionMetaDataMap - ) - ) + index to + listOf( + explainResponseOpendistroPolicyIdSetting to policy.id::equals, + explainResponseOpenSearchPolicyIdSetting to policy.id::equals, + ManagedIndexMetaData.INDEX to executedManagedIndexConfig.index::equals, + ManagedIndexMetaData.INDEX_UUID to executedManagedIndexConfig.indexUuid::equals, + ManagedIndexMetaData.POLICY_ID to executedManagedIndexConfig.policyID::equals, + StateMetaData.STATE to + + fun(stateMetaDataMap: Any?): Boolean = + assertStateEquals(StateMetaData(policy.defaultState, Instant.now().toEpochMilli()), stateMetaDataMap), + ActionMetaData.ACTION to + + fun(actionMetaDataMap: Any?): Boolean = + assertActionEquals( + ActionMetaData( + name = ReadOnlyAction.name, startTime = Instant.now().toEpochMilli(), index = 0, + failed = false, consumedRetries = 0, lastRetryTime = null, actionProperties = null, + ), + actionMetaDataMap, + ), + ), ), - getExplainMap(index), false + getExplainMap(index), false, ) } // speed up to third execution so that we try to move to transitions and trigger a change policy updateManagedIndexConfigStartTime(managedIndexConfig) - val changedManagedIndexConfig: ManagedIndexConfig = waitFor { - val config = getManagedIndexConfigByDocId(managedIndexConfig.id) - assertNotNull("Changed managed index config is null", config) - assertNull("Changed change policy is not null", config!!.changePolicy) - assertNotNull("Changed policy is null", config.policy) - assertEquals("Changed saved policy does not match new policy", newPolicy.id, config.policyID) - assertEquals("Index writes should still be blocked", "true", getIndexBlocksWriteSetting(index)) - config - } + val changedManagedIndexConfig: ManagedIndexConfig = + waitFor { + val config = getManagedIndexConfigByDocId(managedIndexConfig.id) + assertNotNull("Changed managed index config is null", config) + assertNull("Changed change policy is not null", config!!.changePolicy) + assertNotNull("Changed policy is null", config.policy) + assertEquals("Changed saved policy does not match new policy", newPolicy.id, config.policyID) + assertEquals("Index writes should still be blocked", "true", getIndexBlocksWriteSetting(index)) + config + } // We should expect the explain API to show us with the new policy waitFor { assertPredicatesOnMetaData( listOf( - index to listOf( - explainResponseOpendistroPolicyIdSetting to newPolicy.id::equals, - explainResponseOpenSearchPolicyIdSetting to newPolicy.id::equals, - ManagedIndexMetaData.INDEX to changedManagedIndexConfig.index::equals, - ManagedIndexMetaData.INDEX_UUID to changedManagedIndexConfig.indexUuid::equals, - ManagedIndexMetaData.POLICY_ID to changedManagedIndexConfig.policyID::equals, - StateMetaData.STATE to fun(stateMetaDataMap: Any?): Boolean = - assertStateEquals(StateMetaData(policy.defaultState, Instant.now().toEpochMilli()), stateMetaDataMap), - ActionMetaData.ACTION to fun(actionMetaDataMap: Any?): Boolean = - assertActionEquals( - ActionMetaData( - name = TransitionsAction.name, startTime = Instant.now().toEpochMilli(), index = 0, - failed = false, consumedRetries = 0, lastRetryTime = null, actionProperties = null - ), - actionMetaDataMap - ) - ) + index to + listOf( + explainResponseOpendistroPolicyIdSetting to newPolicy.id::equals, + explainResponseOpenSearchPolicyIdSetting to newPolicy.id::equals, + ManagedIndexMetaData.INDEX to changedManagedIndexConfig.index::equals, + ManagedIndexMetaData.INDEX_UUID to changedManagedIndexConfig.indexUuid::equals, + ManagedIndexMetaData.POLICY_ID to changedManagedIndexConfig.policyID::equals, + StateMetaData.STATE to + + fun(stateMetaDataMap: Any?): Boolean = + assertStateEquals(StateMetaData(policy.defaultState, Instant.now().toEpochMilli()), stateMetaDataMap), + ActionMetaData.ACTION to + + fun(actionMetaDataMap: Any?): Boolean = + assertActionEquals( + ActionMetaData( + name = TransitionsAction.name, startTime = Instant.now().toEpochMilli(), index = 0, + failed = false, consumedRetries = 0, lastRetryTime = null, actionProperties = null, + ), + actionMetaDataMap, + ), + ), ), - getExplainMap(index), false + getExplainMap(index), false, ) } } @@ -470,15 +504,17 @@ class RestChangePolicyActionIT : IndexStateManagementRestTestCase() { val newPolicy = createRandomPolicy() val changePolicy = ChangePolicy(newPolicy.id, null, listOf(StateFilter(state = firstState.name)), false) - val response = client().makeRequest( - RestRequest.Method.POST.toString(), - "${RestChangePolicyAction.CHANGE_POLICY_BASE_URI}/$firstIndex,$secondIndex", emptyMap(), changePolicy.toHttpEntity() - ) - val expectedResponse = mapOf( - FAILURES to false, - FAILED_INDICES to emptyList(), - UPDATED_INDICES to 1 - ) + val response = + client().makeRequest( + RestRequest.Method.POST.toString(), + "${RestChangePolicyAction.CHANGE_POLICY_BASE_URI}/$firstIndex,$secondIndex", emptyMap(), changePolicy.toHttpEntity(), + ) + val expectedResponse = + mapOf( + FAILURES to false, + FAILED_INDICES to emptyList(), + UPDATED_INDICES to 1, + ) // TODO flaky part, log for more info val responseMap = response.asMap() logger.info("Change policy response: $responseMap") @@ -509,10 +545,11 @@ class RestChangePolicyActionIT : IndexStateManagementRestTestCase() { // if we try to change policy now, it'll have no ManagedIndexMetaData yet and should succeed val changePolicy = ChangePolicy(newPolicy.id, "some_other_state", emptyList(), false) - val response = client().makeRequest( - RestRequest.Method.POST.toString(), - "${RestChangePolicyAction.CHANGE_POLICY_BASE_URI}/$index", emptyMap(), changePolicy.toHttpEntity() - ) + val response = + client().makeRequest( + RestRequest.Method.POST.toString(), + "${RestChangePolicyAction.CHANGE_POLICY_BASE_URI}/$index", emptyMap(), changePolicy.toHttpEntity(), + ) assertAffectedIndicesResponseIsEqual(mapOf(FAILURES to false, FAILED_INDICES to emptyList(), UPDATED_INDICES to 1), response.asMap()) waitFor { assertNotNull(getExistingManagedIndexConfig(index).changePolicy) } @@ -520,33 +557,37 @@ class RestChangePolicyActionIT : IndexStateManagementRestTestCase() { updateManagedIndexConfigStartTime(managedIndexConfig) // The initialized policy should be the change policy one - val updatedManagedIndexConfig: ManagedIndexConfig = waitFor { - val config = getManagedIndexConfigByDocId(managedIndexConfig.id) - assertNotNull("Updated managed index config is null", config) - assertNull("Updated change policy is not null", config!!.changePolicy) - assertEquals("Initialized policyId is not the change policy id", newPolicy.id, config.policyID) - // Will use the unique generated description to ensure they are the same policies, the cached policy does not have - // id, seqNo, primaryTerm on the policy itself so cannot directly compare - // TODO: figure out why the newPolicy.lastUpdatedTime and cached policy lastUpdatedTime is off by a few milliseconds - assertEquals( - "Initialized policy is not the change policy", newPolicy.description, - config.policy.description - ) - config - } + val updatedManagedIndexConfig: ManagedIndexConfig = + waitFor { + val config = getManagedIndexConfigByDocId(managedIndexConfig.id) + assertNotNull("Updated managed index config is null", config) + assertNull("Updated change policy is not null", config!!.changePolicy) + assertEquals("Initialized policyId is not the change policy id", newPolicy.id, config.policyID) + // Will use the unique generated description to ensure they are the same policies, the cached policy does not have + // id, seqNo, primaryTerm on the policy itself so cannot directly compare + // TODO: figure out why the newPolicy.lastUpdatedTime and cached policy lastUpdatedTime is off by a few milliseconds + assertEquals( + "Initialized policy is not the change policy", newPolicy.description, + config.policy.description, + ) + config + } // should expect to see us starting in the state mentioned in changepolicy waitFor { assertPredicatesOnMetaData( listOf( - index to listOf( - ManagedIndexMetaData.INDEX_UUID to updatedManagedIndexConfig.indexUuid::equals, - ManagedIndexMetaData.POLICY_ID to newPolicy.id::equals, - StateMetaData.STATE to fun(stateMetaDataMap: Any?): Boolean = - assertStateEquals(StateMetaData("some_other_state", Instant.now().toEpochMilli()), stateMetaDataMap) - ) + index to + listOf( + ManagedIndexMetaData.INDEX_UUID to updatedManagedIndexConfig.indexUuid::equals, + ManagedIndexMetaData.POLICY_ID to newPolicy.id::equals, + StateMetaData.STATE to + + fun(stateMetaDataMap: Any?): Boolean = + assertStateEquals(StateMetaData("some_other_state", Instant.now().toEpochMilli()), stateMetaDataMap), + ), ), - getExplainMap(index), false + getExplainMap(index), false, ) } } @@ -579,26 +620,29 @@ class RestChangePolicyActionIT : IndexStateManagementRestTestCase() { assertEquals(RolloverAction.name, getExplainManagedIndexMetaData(indexName).actionMetaData?.name) assertEquals( AttemptRolloverStep.getPendingMessage(indexName), - getExplainManagedIndexMetaData(indexName).info?.get("message") + getExplainManagedIndexMetaData(indexName).info?.get("message"), ) } - val newStateWithReadOnlyAction = randomState( - name = stateWithReadOnlyAction.name, - actions = listOf(RolloverAction(index = 0, minDocs = 5, minAge = null, minSize = null, minPrimaryShardSize = null)) - ) + val newStateWithReadOnlyAction = + randomState( + name = stateWithReadOnlyAction.name, + actions = listOf(RolloverAction(index = 0, minDocs = 5, minAge = null, minSize = null, minPrimaryShardSize = null)), + ) val newRandomPolicy = randomPolicy(states = listOf(newStateWithReadOnlyAction)) val newPolicy = createPolicy(newRandomPolicy) val changePolicy = ChangePolicy(newPolicy.id, null, emptyList(), false) - val response = client().makeRequest( - RestRequest.Method.POST.toString(), - "${RestChangePolicyAction.CHANGE_POLICY_BASE_URI}/$index", emptyMap(), changePolicy.toHttpEntity() - ) - val expectedResponse = mapOf( - FAILURES to false, - FAILED_INDICES to emptyList(), - UPDATED_INDICES to 1 - ) + val response = + client().makeRequest( + RestRequest.Method.POST.toString(), + "${RestChangePolicyAction.CHANGE_POLICY_BASE_URI}/$index", emptyMap(), changePolicy.toHttpEntity(), + ) + val expectedResponse = + mapOf( + FAILURES to false, + FAILED_INDICES to emptyList(), + UPDATED_INDICES to 1, + ) assertAffectedIndicesResponseIsEqual(expectedResponse, response.asMap()) // the change policy REST API should of set safe to true as the policies have the same state/actions @@ -621,7 +665,7 @@ class RestChangePolicyActionIT : IndexStateManagementRestTestCase() { waitFor { assertEquals( AttemptRolloverStep.getSuccessMessage(indexName), - getExplainManagedIndexMetaData(indexName).info?.get("message") + getExplainManagedIndexMetaData(indexName).info?.get("message"), ) } } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/RestExplainActionIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/RestExplainActionIT.kt index c0842adbf..e364b60fb 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/RestExplainActionIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/RestExplainActionIT.kt @@ -6,18 +6,14 @@ package org.opensearch.indexmanagement.indexstatemanagement.resthandler import org.opensearch.common.xcontent.XContentFactory +import org.opensearch.core.rest.RestStatus import org.opensearch.indexmanagement.IndexManagementPlugin import org.opensearch.indexmanagement.indexstatemanagement.IndexStateManagementRestTestCase -import org.opensearch.indexmanagement.indexstatemanagement.model.ChangePolicy -import org.opensearch.indexmanagement.makeRequest -import org.opensearch.indexmanagement.opensearchapi.toMap -import org.opensearch.indexmanagement.waitFor -import org.opensearch.rest.RestRequest -import org.opensearch.core.rest.RestStatus import org.opensearch.indexmanagement.indexstatemanagement.action.AllocationAction import org.opensearch.indexmanagement.indexstatemanagement.action.DeleteAction import org.opensearch.indexmanagement.indexstatemanagement.action.OpenAction import org.opensearch.indexmanagement.indexstatemanagement.action.ReadOnlyAction +import org.opensearch.indexmanagement.indexstatemanagement.model.ChangePolicy import org.opensearch.indexmanagement.indexstatemanagement.model.ExplainFilter import org.opensearch.indexmanagement.indexstatemanagement.model.Transition import org.opensearch.indexmanagement.indexstatemanagement.randomPolicy @@ -25,6 +21,8 @@ import org.opensearch.indexmanagement.indexstatemanagement.randomState import org.opensearch.indexmanagement.indexstatemanagement.util.SHOW_POLICY_QUERY_PARAM import org.opensearch.indexmanagement.indexstatemanagement.util.TOTAL_MANAGED_INDICES import org.opensearch.indexmanagement.indexstatemanagement.util.XCONTENT_WITHOUT_TYPE_AND_USER +import org.opensearch.indexmanagement.makeRequest +import org.opensearch.indexmanagement.opensearchapi.toMap import org.opensearch.indexmanagement.spi.indexstatemanagement.Step import org.opensearch.indexmanagement.spi.indexstatemanagement.model.ActionMetaData import org.opensearch.indexmanagement.spi.indexstatemanagement.model.ActionRetry @@ -32,33 +30,37 @@ import org.opensearch.indexmanagement.spi.indexstatemanagement.model.ManagedInde import org.opensearch.indexmanagement.spi.indexstatemanagement.model.PolicyRetryInfoMetaData import org.opensearch.indexmanagement.spi.indexstatemanagement.model.StateMetaData import org.opensearch.indexmanagement.spi.indexstatemanagement.model.StepMetaData +import org.opensearch.indexmanagement.waitFor +import org.opensearch.rest.RestRequest import java.time.Instant import java.util.Locale class RestExplainActionIT : IndexStateManagementRestTestCase() { - private val testIndexName = javaClass.simpleName.lowercase(Locale.ROOT) fun `test single index`() { val indexName = "${testIndexName}_movies" createIndex(indexName, null) - val expected = mapOf( - TOTAL_MANAGED_INDICES to 0, - indexName to mapOf( - explainResponseOpendistroPolicyIdSetting to null, - explainResponseOpenSearchPolicyIdSetting to null, - ManagedIndexMetaData.ENABLED to null + val expected = + mapOf( + TOTAL_MANAGED_INDICES to 0, + indexName to + mapOf( + explainResponseOpendistroPolicyIdSetting to null, + explainResponseOpenSearchPolicyIdSetting to null, + ManagedIndexMetaData.ENABLED to null, + ), ) - ) assertResponseMap(expected, getExplainMap(indexName)) } fun `test single index explain all`() { val indexName = "${testIndexName}_movies" createIndex(indexName, null) - val expected = mapOf( - "total_managed_indices" to 0 - ) + val expected = + mapOf( + "total_managed_indices" to 0, + ) assertResponseMap(expected, getExplainMap(null)) } @@ -70,22 +72,25 @@ class RestExplainActionIT : IndexStateManagementRestTestCase() { createIndex(indexName1, policy.id) createIndex(indexName2, null) - val expected = mapOf( - TOTAL_MANAGED_INDICES to 1, - indexName1 to mapOf( - explainResponseOpendistroPolicyIdSetting to policy.id, - explainResponseOpenSearchPolicyIdSetting to policy.id, - "index" to indexName1, - "index_uuid" to getUuid(indexName1), - "policy_id" to policy.id, - ManagedIndexMetaData.ENABLED to true - ), - indexName2 to mapOf( - explainResponseOpendistroPolicyIdSetting to null, - explainResponseOpenSearchPolicyIdSetting to null, - ManagedIndexMetaData.ENABLED to null + val expected = + mapOf( + TOTAL_MANAGED_INDICES to 1, + indexName1 to + mapOf( + explainResponseOpendistroPolicyIdSetting to policy.id, + explainResponseOpenSearchPolicyIdSetting to policy.id, + "index" to indexName1, + "index_uuid" to getUuid(indexName1), + "policy_id" to policy.id, + ManagedIndexMetaData.ENABLED to true, + ), + indexName2 to + mapOf( + explainResponseOpendistroPolicyIdSetting to null, + explainResponseOpenSearchPolicyIdSetting to null, + ManagedIndexMetaData.ENABLED to null, + ), ) - ) waitFor { assertResponseMap(expected, getExplainMap("$indexName1,$indexName2")) } @@ -99,17 +104,19 @@ class RestExplainActionIT : IndexStateManagementRestTestCase() { createIndex(indexName1, policy.id) createIndex(indexName2, null) - val expected = mapOf( - indexName1 to mapOf( - explainResponseOpendistroPolicyIdSetting to policy.id, - explainResponseOpenSearchPolicyIdSetting to policy.id, - "index" to indexName1, - "index_uuid" to getUuid(indexName1), - "policy_id" to policy.id, - "enabled" to true - ), - "total_managed_indices" to 1 - ) + val expected = + mapOf( + indexName1 to + mapOf( + explainResponseOpendistroPolicyIdSetting to policy.id, + explainResponseOpenSearchPolicyIdSetting to policy.id, + "index" to indexName1, + "index_uuid" to getUuid(indexName1), + "policy_id" to policy.id, + "enabled" to true, + ), + "total_managed_indices" to 1, + ) waitFor { assertResponseMap(expected, getExplainMap(null)) } @@ -123,30 +130,34 @@ class RestExplainActionIT : IndexStateManagementRestTestCase() { createIndex(indexName1, policyID = policy.id) createIndex(indexName2, policyID = policy.id) createIndex(indexName3, null) - val expected = mapOf( - TOTAL_MANAGED_INDICES to 2, - indexName1 to mapOf( - explainResponseOpendistroPolicyIdSetting to policy.id, - explainResponseOpenSearchPolicyIdSetting to policy.id, - "index" to indexName1, - "index_uuid" to getUuid(indexName1), - "policy_id" to policy.id, - ManagedIndexMetaData.ENABLED to true - ), - indexName2 to mapOf( - explainResponseOpendistroPolicyIdSetting to policy.id, - explainResponseOpenSearchPolicyIdSetting to policy.id, - "index" to indexName2, - "index_uuid" to getUuid(indexName2), - "policy_id" to policy.id, - ManagedIndexMetaData.ENABLED to true - ), - indexName3 to mapOf( - explainResponseOpendistroPolicyIdSetting to null, - explainResponseOpenSearchPolicyIdSetting to null, - ManagedIndexMetaData.ENABLED to null + val expected = + mapOf( + TOTAL_MANAGED_INDICES to 2, + indexName1 to + mapOf( + explainResponseOpendistroPolicyIdSetting to policy.id, + explainResponseOpenSearchPolicyIdSetting to policy.id, + "index" to indexName1, + "index_uuid" to getUuid(indexName1), + "policy_id" to policy.id, + ManagedIndexMetaData.ENABLED to true, + ), + indexName2 to + mapOf( + explainResponseOpendistroPolicyIdSetting to policy.id, + explainResponseOpenSearchPolicyIdSetting to policy.id, + "index" to indexName2, + "index_uuid" to getUuid(indexName2), + "policy_id" to policy.id, + ManagedIndexMetaData.ENABLED to true, + ), + indexName3 to + mapOf( + explainResponseOpendistroPolicyIdSetting to null, + explainResponseOpenSearchPolicyIdSetting to null, + ManagedIndexMetaData.ENABLED to null, + ), ) - ) waitFor { assertResponseMap(expected, getExplainMap("$indexName1*")) } @@ -168,55 +179,66 @@ class RestExplainActionIT : IndexStateManagementRestTestCase() { createIndex(indexName4, policyID = policy.id) createIndex(indexName5, policyID = policy.id) addPolicyToIndex(dataStreamName, policy.id) - val indexName1Map = indexName1 to mapOf( - explainResponseOpendistroPolicyIdSetting to policy.id, - explainResponseOpenSearchPolicyIdSetting to policy.id, - "index" to indexName1, - "index_uuid" to getUuid(indexName1), - "policy_id" to policy.id, - "enabled" to true - ) - val indexName2Map = indexName2 to mapOf( - explainResponseOpendistroPolicyIdSetting to policy.id, - explainResponseOpenSearchPolicyIdSetting to policy.id, - "index" to indexName2, - "index_uuid" to getUuid(indexName2), - "policy_id" to policy.id, - "enabled" to true - ) - val indexName4Map = indexName4 to mapOf( - explainResponseOpendistroPolicyIdSetting to policy.id, - explainResponseOpenSearchPolicyIdSetting to policy.id, - "index" to indexName4, - "index_uuid" to getUuid(indexName4), - "policy_id" to policy.id, - "enabled" to true - ) - val indexName5Map = indexName5 to mapOf( - explainResponseOpendistroPolicyIdSetting to policy.id, - explainResponseOpenSearchPolicyIdSetting to policy.id, - "index" to indexName5, - "index_uuid" to getUuid(indexName5), - "policy_id" to policy.id, - "enabled" to true - ) - val datastreamMap = ".ds-$dataStreamName-000001" to mapOf( - explainResponseOpendistroPolicyIdSetting to policy.id, - explainResponseOpenSearchPolicyIdSetting to policy.id, - "index" to ".ds-$dataStreamName-000001", - "index_uuid" to getUuid(".ds-$dataStreamName-000001"), - "policy_id" to policy.id, - "enabled" to true - ) + val indexName1Map = + indexName1 to + mapOf( + explainResponseOpendistroPolicyIdSetting to policy.id, + explainResponseOpenSearchPolicyIdSetting to policy.id, + "index" to indexName1, + "index_uuid" to getUuid(indexName1), + "policy_id" to policy.id, + "enabled" to true, + ) + val indexName2Map = + indexName2 to + mapOf( + explainResponseOpendistroPolicyIdSetting to policy.id, + explainResponseOpenSearchPolicyIdSetting to policy.id, + "index" to indexName2, + "index_uuid" to getUuid(indexName2), + "policy_id" to policy.id, + "enabled" to true, + ) + val indexName4Map = + indexName4 to + mapOf( + explainResponseOpendistroPolicyIdSetting to policy.id, + explainResponseOpenSearchPolicyIdSetting to policy.id, + "index" to indexName4, + "index_uuid" to getUuid(indexName4), + "policy_id" to policy.id, + "enabled" to true, + ) + val indexName5Map = + indexName5 to + mapOf( + explainResponseOpendistroPolicyIdSetting to policy.id, + explainResponseOpenSearchPolicyIdSetting to policy.id, + "index" to indexName5, + "index_uuid" to getUuid(indexName5), + "policy_id" to policy.id, + "enabled" to true, + ) + val datastreamMap = + ".ds-$dataStreamName-000001" to + mapOf( + explainResponseOpendistroPolicyIdSetting to policy.id, + explainResponseOpenSearchPolicyIdSetting to policy.id, + "index" to ".ds-$dataStreamName-000001", + "index_uuid" to getUuid(".ds-$dataStreamName-000001"), + "policy_id" to policy.id, + "enabled" to true, + ) waitFor { - val expected = mapOf( - indexName1Map, - indexName2Map, - indexName4Map, - indexName5Map, - "total_managed_indices" to 4 - ) + val expected = + mapOf( + indexName1Map, + indexName2Map, + indexName4Map, + indexName5Map, + "total_managed_indices" to 4, + ) // These should match all non datastream managed indices assertResponseMap(expected, getExplainMap(indexName = null, queryParams = "queryString=$testIndexName*")) assertResponseMap(expected, getExplainMap(indexName = null, queryParams = "queryString=$testIndexName-*")) @@ -224,14 +246,15 @@ class RestExplainActionIT : IndexStateManagementRestTestCase() { } waitFor { - val expected = mapOf( - indexName1Map, - indexName2Map, - indexName4Map, - indexName5Map, - datastreamMap, - "total_managed_indices" to 5 - ) + val expected = + mapOf( + indexName1Map, + indexName2Map, + indexName4Map, + indexName5Map, + datastreamMap, + "total_managed_indices" to 5, + ) // These should match all managed indices including datastreams assertResponseMap(expected, getExplainMap(indexName = null, queryParams = "queryString=*$testIndexName-*")) assertResponseMap(expected, getExplainMap(indexName = null, queryParams = "queryString=*search*")) @@ -239,20 +262,22 @@ class RestExplainActionIT : IndexStateManagementRestTestCase() { } waitFor { - val expected = mapOf( - datastreamMap, - "total_managed_indices" to 1 - ) + val expected = + mapOf( + datastreamMap, + "total_managed_indices" to 1, + ) // These should match all datastream managed indices (and system/hidden indices) assertResponseMap(expected, getExplainMap(indexName = null, queryParams = "queryString=.*")) assertResponseMap(expected, getExplainMap(indexName = null, queryParams = "queryString=.ds-$testIndexName-*")) } waitFor { - val expected = mapOf( - indexName4Map, - "total_managed_indices" to 1 - ) + val expected = + mapOf( + indexName4Map, + "total_managed_indices" to 1, + ) // These should match all just the single index, and validates that it does not match the 15-02-2022 index // i.e. if it was still matching on tokens then ["2022", "02", "15"] would match both which we don't want assertResponseMap(expected, getExplainMap(indexName = null, queryParams = "queryString=*2022-02-15")) @@ -274,27 +299,36 @@ class RestExplainActionIT : IndexStateManagementRestTestCase() { val expectedInfoString = mapOf("message" to "Successfully initialized policy: ${policy.id}").toString() assertPredicatesOnMetaData( listOf( - indexName to listOf( - explainResponseOpendistroPolicyIdSetting to policy.id::equals, - explainResponseOpenSearchPolicyIdSetting to policy.id::equals, - ManagedIndexMetaData.INDEX to managedIndexConfig.index::equals, - ManagedIndexMetaData.INDEX_UUID to managedIndexConfig.indexUuid::equals, - ManagedIndexMetaData.POLICY_ID to managedIndexConfig.policyID::equals, - ManagedIndexMetaData.POLICY_SEQ_NO to policy.seqNo.toInt()::equals, - ManagedIndexMetaData.POLICY_PRIMARY_TERM to policy.primaryTerm.toInt()::equals, - ManagedIndexMetaData.INDEX_CREATION_DATE to fun(indexCreationDate: Any?): Boolean = (indexCreationDate as Long) > 1L, - StateMetaData.STATE to fun(stateMetaDataMap: Any?): Boolean = - assertStateEquals( - StateMetaData(policy.defaultState, Instant.now().toEpochMilli()), - stateMetaDataMap - ), - PolicyRetryInfoMetaData.RETRY_INFO to fun(retryInfoMetaDataMap: Any?): Boolean = - assertRetryInfoEquals(PolicyRetryInfoMetaData(false, 0), retryInfoMetaDataMap), - ManagedIndexMetaData.INFO to fun(info: Any?): Boolean = expectedInfoString == info.toString(), - ManagedIndexMetaData.ENABLED to true::equals - ) + indexName to + listOf( + explainResponseOpendistroPolicyIdSetting to policy.id::equals, + explainResponseOpenSearchPolicyIdSetting to policy.id::equals, + ManagedIndexMetaData.INDEX to managedIndexConfig.index::equals, + ManagedIndexMetaData.INDEX_UUID to managedIndexConfig.indexUuid::equals, + ManagedIndexMetaData.POLICY_ID to managedIndexConfig.policyID::equals, + ManagedIndexMetaData.POLICY_SEQ_NO to policy.seqNo.toInt()::equals, + ManagedIndexMetaData.POLICY_PRIMARY_TERM to policy.primaryTerm.toInt()::equals, + ManagedIndexMetaData.INDEX_CREATION_DATE to + + fun(indexCreationDate: Any?): Boolean = (indexCreationDate as Long) > 1L, + StateMetaData.STATE to + + fun(stateMetaDataMap: Any?): Boolean = + assertStateEquals( + StateMetaData(policy.defaultState, Instant.now().toEpochMilli()), + stateMetaDataMap, + ), + PolicyRetryInfoMetaData.RETRY_INFO to + + fun(retryInfoMetaDataMap: Any?): Boolean = + assertRetryInfoEquals(PolicyRetryInfoMetaData(false, 0), retryInfoMetaDataMap), + ManagedIndexMetaData.INFO to + + fun(info: Any?): Boolean = expectedInfoString == info.toString(), + ManagedIndexMetaData.ENABLED to true::equals, + ), ), - getExplainMap(indexName) + getExplainMap(indexName), ) } } @@ -307,12 +341,13 @@ class RestExplainActionIT : IndexStateManagementRestTestCase() { val changePolicy = ChangePolicy(newPolicy.id, null, emptyList(), false) client().makeRequest( RestRequest.Method.POST.toString(), - "${RestChangePolicyAction.CHANGE_POLICY_BASE_URI}/$indexName", emptyMap(), changePolicy.toHttpEntity() - ) - val deletePolicyResponse = client().makeRequest( - RestRequest.Method.DELETE.toString(), - "${IndexManagementPlugin.LEGACY_POLICY_BASE_URI}/${changePolicy.policyID}" + "${RestChangePolicyAction.CHANGE_POLICY_BASE_URI}/$indexName", emptyMap(), changePolicy.toHttpEntity(), ) + val deletePolicyResponse = + client().makeRequest( + RestRequest.Method.DELETE.toString(), + "${IndexManagementPlugin.LEGACY_POLICY_BASE_URI}/${changePolicy.policyID}", + ) assertEquals("Unexpected RestStatus", RestStatus.OK, deletePolicyResponse.restStatus()) val managedIndexConfig = getExistingManagedIndexConfig(indexName) @@ -325,20 +360,27 @@ class RestExplainActionIT : IndexStateManagementRestTestCase() { val explainMap = getExplainMap(indexName) assertPredicatesOnMetaData( listOf( - indexName to listOf( - explainResponseOpendistroPolicyIdSetting to policy.id::equals, - explainResponseOpenSearchPolicyIdSetting to policy.id::equals, - ManagedIndexMetaData.INDEX to managedIndexConfig.index::equals, - ManagedIndexMetaData.INDEX_UUID to managedIndexConfig.indexUuid::equals, - ManagedIndexMetaData.POLICY_ID to newPolicy.id::equals, - PolicyRetryInfoMetaData.RETRY_INFO to fun(retryInfoMetaDataMap: Any?): Boolean = - assertRetryInfoEquals(PolicyRetryInfoMetaData(true, 0), retryInfoMetaDataMap), - ManagedIndexMetaData.INFO to fun(info: Any?): Boolean = expectedInfoString == info.toString(), - ManagedIndexMetaData.INDEX_CREATION_DATE to fun(indexCreationDate: Any?): Boolean = (indexCreationDate as Long) > 1L, - ManagedIndexMetaData.ENABLED to true::equals - ) + indexName to + listOf( + explainResponseOpendistroPolicyIdSetting to policy.id::equals, + explainResponseOpenSearchPolicyIdSetting to policy.id::equals, + ManagedIndexMetaData.INDEX to managedIndexConfig.index::equals, + ManagedIndexMetaData.INDEX_UUID to managedIndexConfig.indexUuid::equals, + ManagedIndexMetaData.POLICY_ID to newPolicy.id::equals, + PolicyRetryInfoMetaData.RETRY_INFO to + + fun(retryInfoMetaDataMap: Any?): Boolean = + assertRetryInfoEquals(PolicyRetryInfoMetaData(true, 0), retryInfoMetaDataMap), + ManagedIndexMetaData.INFO to + + fun(info: Any?): Boolean = expectedInfoString == info.toString(), + ManagedIndexMetaData.INDEX_CREATION_DATE to + + fun(indexCreationDate: Any?): Boolean = (indexCreationDate as Long) > 1L, + ManagedIndexMetaData.ENABLED to true::equals, + ), ), - explainMap + explainMap, ) } } @@ -349,18 +391,20 @@ class RestExplainActionIT : IndexStateManagementRestTestCase() { createIndex(indexName, policy.id) val expectedPolicy = policy.toXContent(XContentFactory.jsonBuilder(), XCONTENT_WITHOUT_TYPE_AND_USER).toMap() - val expected = mapOf( - indexName to mapOf( - explainResponseOpendistroPolicyIdSetting to policy.id, - explainResponseOpenSearchPolicyIdSetting to policy.id, - "index" to indexName, - "index_uuid" to getUuid(indexName), - "policy_id" to policy.id, - ManagedIndexMetaData.ENABLED to true, - "policy" to expectedPolicy, - ), - TOTAL_MANAGED_INDICES to 1, - ) + val expected = + mapOf( + indexName to + mapOf( + explainResponseOpendistroPolicyIdSetting to policy.id, + explainResponseOpenSearchPolicyIdSetting to policy.id, + "index" to indexName, + "index_uuid" to getUuid(indexName), + "policy_id" to policy.id, + ManagedIndexMetaData.ENABLED to true, + "policy" to expectedPolicy, + ), + TOTAL_MANAGED_INDICES to 1, + ) waitFor { logger.info(getExplainMap(indexName, queryParams = SHOW_POLICY_QUERY_PARAM)) assertResponseMap(expected, getExplainMap(indexName, queryParams = SHOW_POLICY_QUERY_PARAM)) @@ -375,10 +419,11 @@ class RestExplainActionIT : IndexStateManagementRestTestCase() { val policy1 = createPolicy(randomPolicy(states = listOf(stateWithReadOnlyAction))) val stateWithDeleteAction = randomState(actions = listOf(DeleteAction(index = 0))) - val updatedStateWithReadOnlyAction = stateWithReadOnlyAction.copy( - actions = listOf(stateWithReadOnlyAction.actions.first(), OpenAction(index = 1)), - transitions = listOf(Transition(stateWithDeleteAction.name, null)) - ) + val updatedStateWithReadOnlyAction = + stateWithReadOnlyAction.copy( + actions = listOf(stateWithReadOnlyAction.actions.first(), OpenAction(index = 1)), + transitions = listOf(Transition(stateWithDeleteAction.name, null)), + ) val policy2 = createPolicy(randomPolicy(states = listOf(stateWithDeleteAction, updatedStateWithReadOnlyAction))) createIndex(indexName1, policy1.id) @@ -400,74 +445,100 @@ class RestExplainActionIT : IndexStateManagementRestTestCase() { // speed up to execute set read only step updateManagedIndexConfigStartTime(managedIndexConfig1) - val index1Predicates = indexName1 to listOf( - explainResponseOpendistroPolicyIdSetting to policy1.id::equals, - explainResponseOpenSearchPolicyIdSetting to policy1.id::equals, - ManagedIndexMetaData.INDEX to managedIndexConfig1.index::equals, - ManagedIndexMetaData.INDEX_UUID to managedIndexConfig1.indexUuid::equals, - ManagedIndexMetaData.POLICY_ID to managedIndexConfig1.policyID::equals, - ManagedIndexMetaData.POLICY_SEQ_NO to policy1.seqNo.toInt()::equals, - ManagedIndexMetaData.POLICY_PRIMARY_TERM to policy1.primaryTerm.toInt()::equals, - ManagedIndexMetaData.INDEX_CREATION_DATE to fun(indexCreationDate: Any?): Boolean = (indexCreationDate as Long) > 1L, - StateMetaData.STATE to fun(stateMetaDataMap: Any?): Boolean = assertStateEquals( - StateMetaData(policy1.defaultState, Instant.now().toEpochMilli()), - stateMetaDataMap - ), - ActionMetaData.ACTION to fun(actionMetaDataMap: Any?): Boolean = assertActionEquals( - ActionMetaData( - name = "read_only", startTime = Instant.now().toEpochMilli(), failed = false, - index = 0, consumedRetries = 0, lastRetryTime = null, actionProperties = null - ), - actionMetaDataMap - ), - PolicyRetryInfoMetaData.RETRY_INFO to fun(retryInfoMetaDataMap: Any?): Boolean = - assertRetryInfoEquals(PolicyRetryInfoMetaData(false, 0), retryInfoMetaDataMap), - ManagedIndexMetaData.ENABLED to true::equals - ) + val index1Predicates = + indexName1 to + listOf( + explainResponseOpendistroPolicyIdSetting to policy1.id::equals, + explainResponseOpenSearchPolicyIdSetting to policy1.id::equals, + ManagedIndexMetaData.INDEX to managedIndexConfig1.index::equals, + ManagedIndexMetaData.INDEX_UUID to managedIndexConfig1.indexUuid::equals, + ManagedIndexMetaData.POLICY_ID to managedIndexConfig1.policyID::equals, + ManagedIndexMetaData.POLICY_SEQ_NO to policy1.seqNo.toInt()::equals, + ManagedIndexMetaData.POLICY_PRIMARY_TERM to policy1.primaryTerm.toInt()::equals, + ManagedIndexMetaData.INDEX_CREATION_DATE to + + fun(indexCreationDate: Any?): Boolean = (indexCreationDate as Long) > 1L, + StateMetaData.STATE to + + fun(stateMetaDataMap: Any?): Boolean = + assertStateEquals( + StateMetaData(policy1.defaultState, Instant.now().toEpochMilli()), + stateMetaDataMap, + ), + ActionMetaData.ACTION to + + fun(actionMetaDataMap: Any?): Boolean = + assertActionEquals( + ActionMetaData( + name = "read_only", startTime = Instant.now().toEpochMilli(), failed = false, + index = 0, consumedRetries = 0, lastRetryTime = null, actionProperties = null, + ), + actionMetaDataMap, + ), + PolicyRetryInfoMetaData.RETRY_INFO to - val index2Predicates = indexName2 to listOf( - explainResponseOpendistroPolicyIdSetting to policy2.id::equals, - explainResponseOpenSearchPolicyIdSetting to policy2.id::equals, - ManagedIndexMetaData.INDEX to managedIndexConfig2.index::equals, - ManagedIndexMetaData.INDEX_UUID to managedIndexConfig2.indexUuid::equals, - ManagedIndexMetaData.POLICY_ID to managedIndexConfig2.policyID::equals, - ManagedIndexMetaData.POLICY_SEQ_NO to policy2.seqNo.toInt()::equals, - ManagedIndexMetaData.POLICY_PRIMARY_TERM to policy2.primaryTerm.toInt()::equals, - ManagedIndexMetaData.INDEX_CREATION_DATE to fun(indexCreationDate: Any?): Boolean = (indexCreationDate as Long) > 1L, - StateMetaData.STATE to fun(stateMetaDataMap: Any?): Boolean = assertStateEquals( - StateMetaData(policy2.defaultState, Instant.now().toEpochMilli()), - stateMetaDataMap - ), - ActionMetaData.ACTION to fun(actionMetaDataMap: Any?): Boolean = assertActionEquals( - ActionMetaData( - name = "delete", startTime = Instant.now().toEpochMilli(), failed = false, - index = 0, consumedRetries = 0, lastRetryTime = null, actionProperties = null - ), - actionMetaDataMap - ), - PolicyRetryInfoMetaData.RETRY_INFO to fun(retryInfoMetaDataMap: Any?): Boolean = - assertRetryInfoEquals(PolicyRetryInfoMetaData(false, 0), retryInfoMetaDataMap), - ManagedIndexMetaData.ENABLED to true::equals - ) + fun(retryInfoMetaDataMap: Any?): Boolean = + assertRetryInfoEquals(PolicyRetryInfoMetaData(false, 0), retryInfoMetaDataMap), + ManagedIndexMetaData.ENABLED to true::equals, + ) + + val index2Predicates = + indexName2 to + listOf( + explainResponseOpendistroPolicyIdSetting to policy2.id::equals, + explainResponseOpenSearchPolicyIdSetting to policy2.id::equals, + ManagedIndexMetaData.INDEX to managedIndexConfig2.index::equals, + ManagedIndexMetaData.INDEX_UUID to managedIndexConfig2.indexUuid::equals, + ManagedIndexMetaData.POLICY_ID to managedIndexConfig2.policyID::equals, + ManagedIndexMetaData.POLICY_SEQ_NO to policy2.seqNo.toInt()::equals, + ManagedIndexMetaData.POLICY_PRIMARY_TERM to policy2.primaryTerm.toInt()::equals, + ManagedIndexMetaData.INDEX_CREATION_DATE to + + fun(indexCreationDate: Any?): Boolean = (indexCreationDate as Long) > 1L, + StateMetaData.STATE to + + fun(stateMetaDataMap: Any?): Boolean = + assertStateEquals( + StateMetaData(policy2.defaultState, Instant.now().toEpochMilli()), + stateMetaDataMap, + ), + ActionMetaData.ACTION to + + fun(actionMetaDataMap: Any?): Boolean = + assertActionEquals( + ActionMetaData( + name = "delete", startTime = Instant.now().toEpochMilli(), failed = false, + index = 0, consumedRetries = 0, lastRetryTime = null, actionProperties = null, + ), + actionMetaDataMap, + ), + PolicyRetryInfoMetaData.RETRY_INFO to + + fun(retryInfoMetaDataMap: Any?): Boolean = + assertRetryInfoEquals(PolicyRetryInfoMetaData(false, 0), retryInfoMetaDataMap), + ManagedIndexMetaData.ENABLED to true::equals, + ) // check metadata for result from filtering on the first policy and its state waitFor { - val filterPolicy = ExplainFilter( - policyID = policy1.id, - state = policy1.states[0].name, - failed = false - ) - - val resp = client().makeRequest( - RestRequest.Method.POST.toString(), - RestExplainAction.EXPLAIN_BASE_URI, emptyMap(), filterPolicy.toHttpEntity() - ) + val filterPolicy = + ExplainFilter( + policyID = policy1.id, + state = policy1.states[0].name, + failed = false, + ) + + val resp = + client().makeRequest( + RestRequest.Method.POST.toString(), + RestExplainAction.EXPLAIN_BASE_URI, emptyMap(), filterPolicy.toHttpEntity(), + ) assertEquals("Unexpected RestStatus", RestStatus.OK, resp.restStatus()) assertPredicatesOnMetaData( listOf(index1Predicates), - resp.asMap(), false + resp.asMap(), false, ) } @@ -478,23 +549,25 @@ class RestExplainActionIT : IndexStateManagementRestTestCase() { updateManagedIndexConfigStartTime(managedIndexConfig2) waitFor { - val filterPolicy = ExplainFilter( - actionType = "delete", - failed = false - ) - - val resp = client().makeRequest( - RestRequest.Method.POST.toString(), - RestExplainAction.EXPLAIN_BASE_URI, emptyMap(), filterPolicy.toHttpEntity() - ) + val filterPolicy = + ExplainFilter( + actionType = "delete", + failed = false, + ) + + val resp = + client().makeRequest( + RestRequest.Method.POST.toString(), + RestExplainAction.EXPLAIN_BASE_URI, emptyMap(), filterPolicy.toHttpEntity(), + ) assertEquals("Unexpected RestStatus", RestStatus.OK, resp.restStatus()) assertPredicatesOnMetaData( listOf( - index2Predicates + index2Predicates, ), - resp.asMap(), false + resp.asMap(), false, ) } @@ -512,16 +585,18 @@ class RestExplainActionIT : IndexStateManagementRestTestCase() { // for failed index val config = AllocationAction(require = mapOf("..//" to "value"), exclude = emptyMap(), include = emptyMap(), index = 0) config.configRetry = ActionRetry(0) - val states = listOf( - randomState().copy( - transitions = listOf(), - actions = listOf(config) + val states = + listOf( + randomState().copy( + transitions = listOf(), + actions = listOf(config), + ), + ) + val invalidPolicy = + randomPolicy().copy( + states = states, + defaultState = states[0].name, ) - ) - val invalidPolicy = randomPolicy().copy( - states = states, - defaultState = states[0].name - ) // for successful index val stateWithReadOnlyAction = randomState(actions = listOf(ReadOnlyAction(index = 0))) @@ -544,67 +619,83 @@ class RestExplainActionIT : IndexStateManagementRestTestCase() { // Change the start time so that we attempt allocation that is intended to fail updateManagedIndexConfigStartTime(managedIndexConfig1) waitFor { - val explainFilter = ExplainFilter( - failed = true - ) + val explainFilter = + ExplainFilter( + failed = true, + ) - val resp = client().makeRequest( - RestRequest.Method.POST.toString(), - RestExplainAction.EXPLAIN_BASE_URI, emptyMap(), explainFilter.toHttpEntity() - ) + val resp = + client().makeRequest( + RestRequest.Method.POST.toString(), + RestExplainAction.EXPLAIN_BASE_URI, emptyMap(), explainFilter.toHttpEntity(), + ) assertEquals("Unexpected RestStatus", RestStatus.OK, resp.restStatus()) assertPredicatesOnMetaData( listOf( - indexName1 to listOf( - explainResponseOpendistroPolicyIdSetting to invalidPolicy.id::equals, - explainResponseOpenSearchPolicyIdSetting to invalidPolicy.id::equals, - ManagedIndexMetaData.INDEX to managedIndexConfig1.index::equals, - ManagedIndexMetaData.INDEX_UUID to managedIndexConfig1.indexUuid::equals, - ManagedIndexMetaData.POLICY_ID to managedIndexConfig1.policyID::equals, - ManagedIndexMetaData.INDEX_CREATION_DATE to fun(indexCreationDate: Any?): Boolean = (indexCreationDate as Long) > 1L, - StepMetaData.STEP to fun(stepMetaDataMap: Any?): Boolean = assertStepEquals( - StepMetaData("attempt_allocation", Instant.now().toEpochMilli(), Step.StepStatus.FAILED), - stepMetaDataMap + indexName1 to + listOf( + explainResponseOpendistroPolicyIdSetting to invalidPolicy.id::equals, + explainResponseOpenSearchPolicyIdSetting to invalidPolicy.id::equals, + ManagedIndexMetaData.INDEX to managedIndexConfig1.index::equals, + ManagedIndexMetaData.INDEX_UUID to managedIndexConfig1.indexUuid::equals, + ManagedIndexMetaData.POLICY_ID to managedIndexConfig1.policyID::equals, + ManagedIndexMetaData.INDEX_CREATION_DATE to + + fun(indexCreationDate: Any?): Boolean = (indexCreationDate as Long) > 1L, + StepMetaData.STEP to + + fun(stepMetaDataMap: Any?): Boolean = + assertStepEquals( + StepMetaData("attempt_allocation", Instant.now().toEpochMilli(), Step.StepStatus.FAILED), + stepMetaDataMap, + ), + ManagedIndexMetaData.ENABLED to true::equals, ), - ManagedIndexMetaData.ENABLED to true::equals - ) ), - resp.asMap(), false + resp.asMap(), false, ) } updateManagedIndexConfigStartTime(managedIndexConfig2) waitFor { - val explainFilter = ExplainFilter( - failed = false - ) + val explainFilter = + ExplainFilter( + failed = false, + ) - val resp = client().makeRequest( - RestRequest.Method.POST.toString(), - RestExplainAction.EXPLAIN_BASE_URI, emptyMap(), explainFilter.toHttpEntity() - ) + val resp = + client().makeRequest( + RestRequest.Method.POST.toString(), + RestExplainAction.EXPLAIN_BASE_URI, emptyMap(), explainFilter.toHttpEntity(), + ) assertEquals("Unexpected RestStatus", RestStatus.OK, resp.restStatus()) assertPredicatesOnMetaData( listOf( - indexName2 to listOf( - explainResponseOpendistroPolicyIdSetting to validPolicy.id::equals, - explainResponseOpenSearchPolicyIdSetting to validPolicy.id::equals, - ManagedIndexMetaData.INDEX to managedIndexConfig2.index::equals, - ManagedIndexMetaData.INDEX_UUID to managedIndexConfig2.indexUuid::equals, - ManagedIndexMetaData.POLICY_ID to managedIndexConfig2.policyID::equals, - ManagedIndexMetaData.INDEX_CREATION_DATE to fun(indexCreationDate: Any?): Boolean = (indexCreationDate as Long) > 1L, - StepMetaData.STEP to fun(stepMetaDataMap: Any?): Boolean = assertStepEquals( - StepMetaData("set_read_only", Instant.now().toEpochMilli(), Step.StepStatus.STARTING), - stepMetaDataMap + indexName2 to + listOf( + explainResponseOpendistroPolicyIdSetting to validPolicy.id::equals, + explainResponseOpenSearchPolicyIdSetting to validPolicy.id::equals, + ManagedIndexMetaData.INDEX to managedIndexConfig2.index::equals, + ManagedIndexMetaData.INDEX_UUID to managedIndexConfig2.indexUuid::equals, + ManagedIndexMetaData.POLICY_ID to managedIndexConfig2.policyID::equals, + ManagedIndexMetaData.INDEX_CREATION_DATE to + + fun(indexCreationDate: Any?): Boolean = (indexCreationDate as Long) > 1L, + StepMetaData.STEP to + + fun(stepMetaDataMap: Any?): Boolean = + assertStepEquals( + StepMetaData("set_read_only", Instant.now().toEpochMilli(), Step.StepStatus.STARTING), + stepMetaDataMap, + ), + ManagedIndexMetaData.ENABLED to true::equals, ), - ManagedIndexMetaData.ENABLED to true::equals - ) ), - resp.asMap(), false + resp.asMap(), false, ) } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/RestRemovePolicyActionIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/RestRemovePolicyActionIT.kt index 4449049e3..83ea5c99b 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/RestRemovePolicyActionIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/RestRemovePolicyActionIT.kt @@ -7,6 +7,7 @@ package org.opensearch.indexmanagement.indexstatemanagement.resthandler import org.opensearch.client.ResponseException import org.opensearch.cluster.metadata.IndexMetadata +import org.opensearch.core.rest.RestStatus import org.opensearch.indexmanagement.indexstatemanagement.IndexStateManagementRestTestCase import org.opensearch.indexmanagement.indexstatemanagement.settings.ManagedIndexSettings import org.opensearch.indexmanagement.indexstatemanagement.util.FAILED_INDICES @@ -15,10 +16,8 @@ import org.opensearch.indexmanagement.indexstatemanagement.util.UPDATED_INDICES import org.opensearch.indexmanagement.makeRequest import org.opensearch.indexmanagement.waitFor import org.opensearch.rest.RestRequest.Method.POST -import org.opensearch.core.rest.RestStatus class RestRemovePolicyActionIT : IndexStateManagementRestTestCase() { - fun `test missing indices`() { try { client().makeRequest(POST.toString(), RestRemovePolicyAction.REMOVE_POLICY_BASE_URI) @@ -26,16 +25,19 @@ class RestRemovePolicyActionIT : IndexStateManagementRestTestCase() { } catch (e: ResponseException) { assertEquals("Unexpected RestStatus", RestStatus.BAD_REQUEST, e.response.restStatus()) val actualMessage = e.response.asMap() - val expectedErrorMessage = mapOf( - "error" to mapOf( - "root_cause" to listOf>( - mapOf("type" to "illegal_argument_exception", "reason" to "Missing indices") - ), - "type" to "illegal_argument_exception", - "reason" to "Missing indices" - ), - "status" to 400 - ) + val expectedErrorMessage = + mapOf( + "error" to + mapOf( + "root_cause" to + listOf>( + mapOf("type" to "illegal_argument_exception", "reason" to "Missing indices"), + ), + "type" to "illegal_argument_exception", + "reason" to "Missing indices", + ), + "status" to 400, + ) assertEquals(expectedErrorMessage, actualMessage) } } @@ -46,23 +48,26 @@ class RestRemovePolicyActionIT : IndexStateManagementRestTestCase() { createIndex(index, policy.id) closeIndex(index) - val response = client().makeRequest( - POST.toString(), - "${RestRemovePolicyAction.REMOVE_POLICY_BASE_URI}/$index" - ) + val response = + client().makeRequest( + POST.toString(), + "${RestRemovePolicyAction.REMOVE_POLICY_BASE_URI}/$index", + ) assertEquals("Unexpected RestStatus", RestStatus.OK, response.restStatus()) val actualMessage = response.asMap() - val expectedMessage = mapOf( - FAILURES to true, - UPDATED_INDICES to 0, - FAILED_INDICES to listOf( - mapOf( - "index_name" to index, - "index_uuid" to getUuid(index), - "reason" to "This index is closed" - ) + val expectedMessage = + mapOf( + FAILURES to true, + UPDATED_INDICES to 0, + FAILED_INDICES to + listOf( + mapOf( + "index_name" to index, + "index_uuid" to getUuid(index), + "reason" to "This index is closed", + ), + ), ) - ) assertAffectedIndicesResponseIsEqual(expectedMessage, actualMessage) } @@ -72,23 +77,26 @@ class RestRemovePolicyActionIT : IndexStateManagementRestTestCase() { createRandomPolicy() createIndex(index, null) - val response = client().makeRequest( - POST.toString(), - "${RestRemovePolicyAction.REMOVE_POLICY_BASE_URI}/$index" - ) + val response = + client().makeRequest( + POST.toString(), + "${RestRemovePolicyAction.REMOVE_POLICY_BASE_URI}/$index", + ) assertEquals("Unexpected RestStatus", RestStatus.OK, response.restStatus()) val actualMessage = response.asMap() - val expectedMessage = mapOf( - FAILURES to true, - UPDATED_INDICES to 0, - FAILED_INDICES to listOf( - mapOf( - "index_name" to index, - "index_uuid" to getUuid(index), - "reason" to "This index does not have a policy to remove" - ) + val expectedMessage = + mapOf( + FAILURES to true, + UPDATED_INDICES to 0, + FAILED_INDICES to + listOf( + mapOf( + "index_name" to index, + "index_uuid" to getUuid(index), + "reason" to "This index does not have a policy to remove", + ), + ), ) - ) assertAffectedIndicesResponseIsEqual(expectedMessage, actualMessage) } @@ -102,28 +110,31 @@ class RestRemovePolicyActionIT : IndexStateManagementRestTestCase() { closeIndex(indexOne) - val response = client().makeRequest( - POST.toString(), - "${RestRemovePolicyAction.REMOVE_POLICY_BASE_URI}/$indexOne,$indexTwo" - ) + val response = + client().makeRequest( + POST.toString(), + "${RestRemovePolicyAction.REMOVE_POLICY_BASE_URI}/$indexOne,$indexTwo", + ) assertEquals("Unexpected RestStatus", RestStatus.OK, response.restStatus()) val actualMessage = response.asMap() - val expectedMessage = mapOf( - FAILURES to true, - UPDATED_INDICES to 0, - FAILED_INDICES to listOf( - mapOf( - "index_name" to indexOne, - "index_uuid" to getUuid(indexOne), - "reason" to "This index is closed" - ), - mapOf( - "index_name" to indexTwo, - "index_uuid" to getUuid(indexTwo), - "reason" to "This index does not have a policy to remove" - ) + val expectedMessage = + mapOf( + FAILURES to true, + UPDATED_INDICES to 0, + FAILED_INDICES to + listOf( + mapOf( + "index_name" to indexOne, + "index_uuid" to getUuid(indexOne), + "reason" to "This index is closed", + ), + mapOf( + "index_name" to indexTwo, + "index_uuid" to getUuid(indexTwo), + "reason" to "This index does not have a policy to remove", + ), + ), ) - ) assertAffectedIndicesResponseIsEqual(expectedMessage, actualMessage) } @@ -140,28 +151,31 @@ class RestRemovePolicyActionIT : IndexStateManagementRestTestCase() { closeIndex(indexOne) - val response = client().makeRequest( - POST.toString(), - "${RestRemovePolicyAction.REMOVE_POLICY_BASE_URI}/$indexPattern*" - ) + val response = + client().makeRequest( + POST.toString(), + "${RestRemovePolicyAction.REMOVE_POLICY_BASE_URI}/$indexPattern*", + ) assertEquals("Unexpected RestStatus", RestStatus.OK, response.restStatus()) val actualMessage = response.asMap() - val expectedMessage = mapOf( - UPDATED_INDICES to 1, - FAILURES to true, - FAILED_INDICES to listOf( - mapOf( - "index_name" to indexOne, - "index_uuid" to getUuid(indexOne), - "reason" to "This index is closed" - ), - mapOf( - "index_name" to indexTwo, - "index_uuid" to getUuid(indexTwo), - "reason" to "This index does not have a policy to remove" - ) + val expectedMessage = + mapOf( + UPDATED_INDICES to 1, + FAILURES to true, + FAILED_INDICES to + listOf( + mapOf( + "index_name" to indexOne, + "index_uuid" to getUuid(indexOne), + "reason" to "This index is closed", + ), + mapOf( + "index_name" to indexTwo, + "index_uuid" to getUuid(indexTwo), + "reason" to "This index does not have a policy to remove", + ), + ), ) - ) assertAffectedIndicesResponseIsEqual(expectedMessage, actualMessage) @@ -186,17 +200,19 @@ class RestRemovePolicyActionIT : IndexStateManagementRestTestCase() { updateIndexSetting(index2, IndexMetadata.SETTING_READ_ONLY_ALLOW_DELETE, "true") updateIndexSetting(index4, ManagedIndexSettings.AUTO_MANAGE.key, "false") - val response = client().makeRequest( - POST.toString(), - "${RestRemovePolicyAction.REMOVE_POLICY_BASE_URI}/$indexPattern" - ) + val response = + client().makeRequest( + POST.toString(), + "${RestRemovePolicyAction.REMOVE_POLICY_BASE_URI}/$indexPattern", + ) assertEquals("Unexpected RestStatus", RestStatus.OK, response.restStatus()) val actualMessage = response.asMap() - val expectedMessage = mapOf( - UPDATED_INDICES to 4, - FAILURES to false, - FAILED_INDICES to emptyList() - ) + val expectedMessage = + mapOf( + UPDATED_INDICES to 4, + FAILURES to false, + FAILED_INDICES to emptyList(), + ) assertAffectedIndicesResponseIsEqual(expectedMessage, actualMessage) waitFor { diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/RestRetryFailedManagedIndexActionIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/RestRetryFailedManagedIndexActionIT.kt index 4425e5ffd..63d7d36f2 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/RestRetryFailedManagedIndexActionIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/resthandler/RestRetryFailedManagedIndexActionIT.kt @@ -6,11 +6,13 @@ package org.opensearch.indexmanagement.indexstatemanagement.resthandler import org.opensearch.client.ResponseException +import org.opensearch.core.rest.RestStatus import org.opensearch.indexmanagement.indexstatemanagement.IndexStateManagementRestTestCase import org.opensearch.indexmanagement.indexstatemanagement.action.AllocationAction import org.opensearch.indexmanagement.indexstatemanagement.randomForceMergeActionConfig import org.opensearch.indexmanagement.indexstatemanagement.randomPolicy import org.opensearch.indexmanagement.indexstatemanagement.randomState +import org.opensearch.indexmanagement.indexstatemanagement.step.forcemerge.AttemptSetReadOnlyStep import org.opensearch.indexmanagement.indexstatemanagement.util.FAILED_INDICES import org.opensearch.indexmanagement.indexstatemanagement.util.FAILURES import org.opensearch.indexmanagement.indexstatemanagement.util.UPDATED_INDICES @@ -21,13 +23,10 @@ import org.opensearch.indexmanagement.spi.indexstatemanagement.model.ActionRetry import org.opensearch.indexmanagement.spi.indexstatemanagement.model.ManagedIndexMetaData import org.opensearch.indexmanagement.waitFor import org.opensearch.rest.RestRequest -import org.opensearch.core.rest.RestStatus -import org.opensearch.indexmanagement.indexstatemanagement.step.forcemerge.AttemptSetReadOnlyStep import java.time.Instant import java.util.Locale class RestRetryFailedManagedIndexActionIT : IndexStateManagementRestTestCase() { - private val testIndexName = javaClass.simpleName.lowercase(Locale.ROOT) fun `test missing indices`() { @@ -37,16 +36,19 @@ class RestRetryFailedManagedIndexActionIT : IndexStateManagementRestTestCase() { } catch (e: ResponseException) { assertEquals("Unexpected RestStatus", RestStatus.BAD_REQUEST, e.response.restStatus()) val actualMessage = e.response.asMap() - val expectedErrorMessage = mapOf( - "error" to mapOf( - "root_cause" to listOf>( - mapOf("type" to "illegal_argument_exception", "reason" to "Missing indices") - ), - "type" to "illegal_argument_exception", - "reason" to "Missing indices" - ), - "status" to 400 - ) + val expectedErrorMessage = + mapOf( + "error" to + mapOf( + "root_cause" to + listOf>( + mapOf("type" to "illegal_argument_exception", "reason" to "Missing indices"), + ), + "type" to "illegal_argument_exception", + "reason" to "Missing indices", + ), + "status" to 400, + ) assertEquals(expectedErrorMessage, actualMessage) } } @@ -62,28 +64,31 @@ class RestRetryFailedManagedIndexActionIT : IndexStateManagementRestTestCase() { createIndex(indexName2, null) createIndex(indexName3, null) - val response = client().makeRequest( - RestRequest.Method.POST.toString(), - "${RestRetryFailedManagedIndexAction.RETRY_BASE_URI}/$indexName,$indexName1" - ) + val response = + client().makeRequest( + RestRequest.Method.POST.toString(), + "${RestRetryFailedManagedIndexAction.RETRY_BASE_URI}/$indexName,$indexName1", + ) assertEquals("Unexpected RestStatus", RestStatus.OK, response.restStatus()) val actualMessage = response.asMap() - val expectedErrorMessage = mapOf( - FAILURES to true, - UPDATED_INDICES to 0, - FAILED_INDICES to listOf( - mapOf( - "index_name" to indexName, - "index_uuid" to getUuid(indexName), - "reason" to "This index is not being managed." - ), - mapOf( - "index_name" to indexName1, - "index_uuid" to getUuid(indexName1), - "reason" to "This index has no metadata information" - ) + val expectedErrorMessage = + mapOf( + FAILURES to true, + UPDATED_INDICES to 0, + FAILED_INDICES to + listOf( + mapOf( + "index_name" to indexName, + "index_uuid" to getUuid(indexName), + "reason" to "This index is not being managed.", + ), + mapOf( + "index_name" to indexName1, + "index_uuid" to getUuid(indexName1), + "reason" to "This index has no metadata information", + ), + ), ) - ) assertAffectedIndicesResponseIsEqual(expectedErrorMessage, actualMessage) } @@ -98,57 +103,63 @@ class RestRetryFailedManagedIndexActionIT : IndexStateManagementRestTestCase() { createIndex(indexName2, policy.id) createIndex(indexName3, null) - val response = client().makeRequest( - RestRequest.Method.POST.toString(), - "${RestRetryFailedManagedIndexAction.RETRY_BASE_URI}/$indexName*" - ) + val response = + client().makeRequest( + RestRequest.Method.POST.toString(), + "${RestRetryFailedManagedIndexAction.RETRY_BASE_URI}/$indexName*", + ) assertEquals("Unexpected RestStatus", RestStatus.OK, response.restStatus()) val actualMessage = response.asMap() - val expectedErrorMessage = mapOf( - FAILURES to true, - UPDATED_INDICES to 0, - FAILED_INDICES to listOf( - mapOf( - "index_name" to indexName, - "index_uuid" to getUuid(indexName), - "reason" to "This index is not being managed." - ), - mapOf( - "index_name" to indexName1, - "index_uuid" to getUuid(indexName1), - "reason" to "This index is not being managed." - ), - mapOf( - "index_name" to indexName2, - "index_uuid" to getUuid(indexName2), - "reason" to "This index has no metadata information" - ) + val expectedErrorMessage = + mapOf( + FAILURES to true, + UPDATED_INDICES to 0, + FAILED_INDICES to + listOf( + mapOf( + "index_name" to indexName, + "index_uuid" to getUuid(indexName), + "reason" to "This index is not being managed.", + ), + mapOf( + "index_name" to indexName1, + "index_uuid" to getUuid(indexName1), + "reason" to "This index is not being managed.", + ), + mapOf( + "index_name" to indexName2, + "index_uuid" to getUuid(indexName2), + "reason" to "This index has no metadata information", + ), + ), ) - ) assertAffectedIndicesResponseIsEqual(expectedErrorMessage, actualMessage) } fun `test index not being managed`() { val indexName = "${testIndexName}_games" createIndex(indexName, null) - val response = client().makeRequest( - RestRequest.Method.POST.toString(), - "${RestRetryFailedManagedIndexAction.RETRY_BASE_URI}/$indexName" - ) + val response = + client().makeRequest( + RestRequest.Method.POST.toString(), + "${RestRetryFailedManagedIndexAction.RETRY_BASE_URI}/$indexName", + ) assertEquals("Unexpected RestStatus", RestStatus.OK, response.restStatus()) val actualMessage = response.asMap() - val expectedErrorMessage = mapOf( - FAILURES to true, - UPDATED_INDICES to 0, - FAILED_INDICES to listOf( - mapOf( - "index_name" to indexName, - "index_uuid" to getUuid(indexName), - "reason" to "This index is not being managed." - ) + val expectedErrorMessage = + mapOf( + FAILURES to true, + UPDATED_INDICES to 0, + FAILED_INDICES to + listOf( + mapOf( + "index_name" to indexName, + "index_uuid" to getUuid(indexName), + "reason" to "This index is not being managed.", + ), + ), ) - ) assertAffectedIndicesResponseIsEqual(expectedErrorMessage, actualMessage) } @@ -157,23 +168,26 @@ class RestRetryFailedManagedIndexActionIT : IndexStateManagementRestTestCase() { val policy = createRandomPolicy() createIndex(indexName, policy.id) - val response = client().makeRequest( - RestRequest.Method.POST.toString(), - "${RestRetryFailedManagedIndexAction.RETRY_BASE_URI}/$indexName" - ) + val response = + client().makeRequest( + RestRequest.Method.POST.toString(), + "${RestRetryFailedManagedIndexAction.RETRY_BASE_URI}/$indexName", + ) assertEquals("Unexpected RestStatus", RestStatus.OK, response.restStatus()) val actualMessage = response.asMap() - val expectedErrorMessage = mapOf( - FAILURES to true, - UPDATED_INDICES to 0, - FAILED_INDICES to listOf( - mapOf( - "index_name" to indexName, - "index_uuid" to getUuid(indexName), - "reason" to "This index has no metadata information" - ) + val expectedErrorMessage = + mapOf( + FAILURES to true, + UPDATED_INDICES to 0, + FAILED_INDICES to + listOf( + mapOf( + "index_name" to indexName, + "index_uuid" to getUuid(indexName), + "reason" to "This index has no metadata information", + ), + ), ) - ) assertAffectedIndicesResponseIsEqual(expectedErrorMessage, actualMessage) } @@ -187,23 +201,26 @@ class RestRetryFailedManagedIndexActionIT : IndexStateManagementRestTestCase() { updateManagedIndexConfigStartTime(managedIndexConfig) waitFor { - val response = client().makeRequest( - RestRequest.Method.POST.toString(), - "${RestRetryFailedManagedIndexAction.RETRY_BASE_URI}/$indexName" - ) + val response = + client().makeRequest( + RestRequest.Method.POST.toString(), + "${RestRetryFailedManagedIndexAction.RETRY_BASE_URI}/$indexName", + ) assertEquals("Unexpected RestStatus", RestStatus.OK, response.restStatus()) val actualMessage = response.asMap() - val expectedErrorMessage = mapOf( - FAILURES to true, - UPDATED_INDICES to 0, - FAILED_INDICES to listOf( - mapOf( - "index_name" to indexName, - "index_uuid" to getUuid(indexName), - "reason" to "This index is not in failed state." - ) + val expectedErrorMessage = + mapOf( + FAILURES to true, + UPDATED_INDICES to 0, + FAILED_INDICES to + listOf( + mapOf( + "index_name" to indexName, + "index_uuid" to getUuid(indexName), + "reason" to "This index is not in failed state.", + ), + ), ) - ) assertAffectedIndicesResponseIsEqual(expectedErrorMessage, actualMessage) } } @@ -212,16 +229,18 @@ class RestRetryFailedManagedIndexActionIT : IndexStateManagementRestTestCase() { val indexName = "${testIndexName}_blueberry" val config = AllocationAction(require = mapOf("..//" to "value"), exclude = emptyMap(), include = emptyMap(), index = 0) config.configRetry = ActionRetry(0) - val states = listOf( - randomState().copy( - transitions = listOf(), - actions = listOf(config) + val states = + listOf( + randomState().copy( + transitions = listOf(), + actions = listOf(config), + ), + ) + val invalidPolicy = + randomPolicy().copy( + states = states, + defaultState = states[0].name, ) - ) - val invalidPolicy = randomPolicy().copy( - states = states, - defaultState = states[0].name - ) createPolicy(invalidPolicy, invalidPolicy.id) createIndex(indexName, invalidPolicy.id) @@ -237,17 +256,19 @@ class RestRetryFailedManagedIndexActionIT : IndexStateManagementRestTestCase() { } waitFor { - val response = client().makeRequest( - RestRequest.Method.POST.toString(), - "${RestRetryFailedManagedIndexAction.RETRY_BASE_URI}/$indexName" - ) + val response = + client().makeRequest( + RestRequest.Method.POST.toString(), + "${RestRetryFailedManagedIndexAction.RETRY_BASE_URI}/$indexName", + ) assertEquals("Unexpected RestStatus", RestStatus.OK, response.restStatus()) val actualMessage = response.asMap() - val expectedErrorMessage = mapOf( - UPDATED_INDICES to 1, - FAILURES to false, - FAILED_INDICES to emptyList>() - ) + val expectedErrorMessage = + mapOf( + UPDATED_INDICES to 1, + FAILURES to false, + FAILED_INDICES to emptyList>(), + ) assertAffectedIndicesResponseIsEqual(expectedErrorMessage, actualMessage) } } @@ -276,17 +297,21 @@ class RestRetryFailedManagedIndexActionIT : IndexStateManagementRestTestCase() { assertPredicatesOnMetaData( listOf( - indexName to listOf( - ActionMetaData.ACTION to fun(actionMetaDataMap: Any?): Boolean = assertActionEquals( - ActionMetaData( - name = "force_merge", startTime = Instant.now().toEpochMilli(), failed = false, - index = 0, consumedRetries = 0, lastRetryTime = null, actionProperties = null - ), - actionMetaDataMap - ) - ) + indexName to + listOf( + ActionMetaData.ACTION to + + fun(actionMetaDataMap: Any?): Boolean = + assertActionEquals( + ActionMetaData( + name = "force_merge", startTime = Instant.now().toEpochMilli(), failed = false, + index = 0, consumedRetries = 0, lastRetryTime = null, actionProperties = null, + ), + actionMetaDataMap, + ), + ), ), - explainMap, false + explainMap, false, ) } @@ -300,49 +325,57 @@ class RestRetryFailedManagedIndexActionIT : IndexStateManagementRestTestCase() { waitFor { assertPredicatesOnMetaData( listOf( - indexName to listOf( - ActionMetaData.ACTION to fun(actionMetaDataMap: Any?): Boolean { - @Suppress("UNCHECKED_CAST") - actionMetaDataMap as Map - firstStartTime = actionMetaDataMap[ManagedIndexMetaData.START_TIME] as Long - return assertActionEquals( - ActionMetaData( - name = "force_merge", startTime = Instant.now().toEpochMilli(), failed = true, - index = 0, consumedRetries = 0, lastRetryTime = null, actionProperties = null - ), - actionMetaDataMap - ) - } - ) + indexName to + listOf( + ActionMetaData.ACTION to + + fun(actionMetaDataMap: Any?): Boolean { + @Suppress("UNCHECKED_CAST") + actionMetaDataMap as Map + firstStartTime = actionMetaDataMap[ManagedIndexMetaData.START_TIME] as Long + return assertActionEquals( + ActionMetaData( + name = "force_merge", startTime = Instant.now().toEpochMilli(), failed = true, + index = 0, consumedRetries = 0, lastRetryTime = null, actionProperties = null, + ), + actionMetaDataMap, + ) + }, + ), ), - getExplainMap(indexName), false + getExplainMap(indexName), false, ) } // retry - val response = client().makeRequest( - RestRequest.Method.POST.toString(), - "${RestRetryFailedManagedIndexAction.RETRY_BASE_URI}/$indexName" - ) + val response = + client().makeRequest( + RestRequest.Method.POST.toString(), + "${RestRetryFailedManagedIndexAction.RETRY_BASE_URI}/$indexName", + ) assertEquals("Unexpected RestStatus", RestStatus.OK, response.restStatus()) - val expectedErrorMessage = mapOf( - UPDATED_INDICES to 1, - FAILURES to false, - FAILED_INDICES to emptyList>() - ) + val expectedErrorMessage = + mapOf( + UPDATED_INDICES to 1, + FAILURES to false, + FAILED_INDICES to emptyList>(), + ) assertAffectedIndicesResponseIsEqual(expectedErrorMessage, response.asMap()) // verify actionStartTime was reset to null assertPredicatesOnMetaData( listOf( - indexName to listOf( - ActionMetaData.ACTION to fun(actionMetaDataMap: Any?): Boolean { - @Suppress("UNCHECKED_CAST") - actionMetaDataMap as Map - return actionMetaDataMap[ManagedIndexMetaData.START_TIME] as Long? == null - } - ) + indexName to + listOf( + ActionMetaData.ACTION to + + fun(actionMetaDataMap: Any?): Boolean { + @Suppress("UNCHECKED_CAST") + actionMetaDataMap as Map + return actionMetaDataMap[ManagedIndexMetaData.START_TIME] as Long? == null + }, + ), ), - getExplainMap(indexName), false + getExplainMap(indexName), false, ) // should execute and set the startTime again @@ -351,19 +384,22 @@ class RestRetryFailedManagedIndexActionIT : IndexStateManagementRestTestCase() { waitFor { assertPredicatesOnMetaData( listOf( - indexName to listOf( - ActionMetaData.ACTION to fun(actionMetaDataMap: Any?): Boolean { - @Suppress("UNCHECKED_CAST") - actionMetaDataMap as Map - return actionMetaDataMap[ManagedIndexMetaData.START_TIME] as Long > firstStartTime - } - ) + indexName to + listOf( + ActionMetaData.ACTION to + + fun(actionMetaDataMap: Any?): Boolean { + @Suppress("UNCHECKED_CAST") + actionMetaDataMap as Map + return actionMetaDataMap[ManagedIndexMetaData.START_TIME] as Long > firstStartTime + }, + ), ), - getExplainMap(indexName), false + getExplainMap(indexName), false, ) assertEquals( AttemptSetReadOnlyStep.getSuccessMessage(indexName), - getExplainManagedIndexMetaData(indexName).info?.get("message") + getExplainManagedIndexMetaData(indexName).info?.get("message"), ) } } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/runner/ManagedIndexRunnerIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/runner/ManagedIndexRunnerIT.kt index 38d013232..428ce810d 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/runner/ManagedIndexRunnerIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/runner/ManagedIndexRunnerIT.kt @@ -29,22 +29,22 @@ import java.time.Instant import java.time.temporal.ChronoUnit class ManagedIndexRunnerIT : IndexStateManagementRestTestCase() { - fun `test version conflict fails job`() { val indexName = "version_conflict_index" val policyID = "version_conflict_policy" val actionConfig = OpenAction(0) val states = listOf(State("OpenState", listOf(actionConfig), listOf())) - val policy = Policy( - id = policyID, - description = "$indexName description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val policy = + Policy( + id = policyID, + description = "$indexName description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) createPolicy(policy, policyID) createIndex(indexName, policyID) @@ -66,14 +66,19 @@ class ManagedIndexRunnerIT : IndexStateManagementRestTestCase() { waitFor { assertPredicatesOnMetaData( listOf( - indexName to listOf( - PolicyRetryInfoMetaData.RETRY_INFO to fun(retryInfoMetaDataMap: Any?): Boolean = - assertRetryInfoEquals(PolicyRetryInfoMetaData(true, 0), retryInfoMetaDataMap), - ManagedIndexMetaData.INFO to fun(info: Any?): Boolean = expectedInfoString == info.toString() - ) + indexName to + listOf( + PolicyRetryInfoMetaData.RETRY_INFO to + + fun(retryInfoMetaDataMap: Any?): Boolean = + assertRetryInfoEquals(PolicyRetryInfoMetaData(true, 0), retryInfoMetaDataMap), + ManagedIndexMetaData.INFO to + + fun(info: Any?): Boolean = expectedInfoString == info.toString(), + ), ), getExplainMap(indexName), - strict = false + strict = false, ) } } @@ -88,7 +93,7 @@ class ManagedIndexRunnerIT : IndexStateManagementRestTestCase() { assertEquals( "Created managed index did not default to ${ManagedIndexSettings.DEFAULT_JOB_INTERVAL} minutes", - ManagedIndexSettings.DEFAULT_JOB_INTERVAL, (managedIndexConfig.jobSchedule as IntervalSchedule).interval + ManagedIndexSettings.DEFAULT_JOB_INTERVAL, (managedIndexConfig.jobSchedule as IntervalSchedule).interval, ) // init policy @@ -118,7 +123,7 @@ class ManagedIndexRunnerIT : IndexStateManagementRestTestCase() { assertEquals( "New managed index did not have updated job schedule interval", - newJobInterval, (newManagedIndexConfig.jobSchedule as IntervalSchedule).interval + newJobInterval, (newManagedIndexConfig.jobSchedule as IntervalSchedule).interval, ) // init new policy @@ -133,14 +138,16 @@ class ManagedIndexRunnerIT : IndexStateManagementRestTestCase() { fun `test allow list fails execution`() { val indexName = "allow_list_index" - val firstState = randomState( - name = "first_state", actions = listOf(randomReadOnlyActionConfig()), - transitions = listOf(randomTransition(stateName = "second_state", conditions = null)) - ) - val secondState = randomState( - name = "second_state", actions = listOf(randomReadWriteActionConfig()), - transitions = listOf(randomTransition(stateName = "first_state", conditions = null)) - ) + val firstState = + randomState( + name = "first_state", actions = listOf(randomReadOnlyActionConfig()), + transitions = listOf(randomTransition(stateName = "second_state", conditions = null)), + ) + val secondState = + randomState( + name = "second_state", actions = listOf(randomReadWriteActionConfig()), + transitions = listOf(randomTransition(stateName = "first_state", conditions = null)), + ) val randomPolicy = randomPolicy(id = "allow_policy", states = listOf(firstState, secondState)) val createdPolicy = createPolicy(randomPolicy, "allow_policy") @@ -169,9 +176,10 @@ class ManagedIndexRunnerIT : IndexStateManagementRestTestCase() { waitFor { assertEquals(AttemptTransitionStep.getSuccessMessage(indexName, firstState.name), getExplainManagedIndexMetaData(indexName).info?.get("message")) } // remove read_only from the allowlist - val allowedActions = ISMActionsParser.instance.parsers.map { it.getActionType() }.toList() - .filter { actionType -> actionType != ReadOnlyAction.name } - .joinToString(prefix = "[", postfix = "]") { string -> "\"$string\"" } + val allowedActions = + ISMActionsParser.instance.parsers.map { it.getActionType() }.toList() + .filter { actionType -> actionType != ReadOnlyAction.name } + .joinToString(prefix = "[", postfix = "]") { string -> "\"$string\"" } updateClusterSetting(ManagedIndexSettings.ALLOW_LIST.key, allowedActions, escapeValue = false) // speed up to fifth execution that should try to set index to read only and fail because the action is not allowed @@ -187,7 +195,7 @@ class ManagedIndexRunnerIT : IndexStateManagementRestTestCase() { val managedIndexConfig = getExistingManagedIndexConfig(indexName) assertEquals( - "Created managed index did not default to 0.0", 0.0, managedIndexConfig.jitter + "Created managed index did not default to 0.0", 0.0, managedIndexConfig.jitter, ) waitFor { @@ -207,7 +215,7 @@ class ManagedIndexRunnerIT : IndexStateManagementRestTestCase() { val newManagedIndexConfig = getExistingManagedIndexConfig(newIndexName) assertEquals( - "New managed index did not have updated jitter", newJitter, newManagedIndexConfig.jitter + "New managed index did not have updated jitter", newJitter, newManagedIndexConfig.jitter, ) waitFor { diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/runner/ManagedIndexRunnerTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/runner/ManagedIndexRunnerTests.kt index 6e12caa06..1e9e745f8 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/runner/ManagedIndexRunnerTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/runner/ManagedIndexRunnerTests.kt @@ -28,7 +28,6 @@ import org.opensearch.threadpool.ThreadPool @ThreadLeakScope(ThreadLeakScope.Scope.NONE) class ManagedIndexRunnerTests : OpenSearchTestCase() { - private lateinit var client: Client private lateinit var clusterService: ClusterService private lateinit var xContentRegistry: NamedXContentRegistry @@ -68,13 +67,14 @@ class ManagedIndexRunnerTests : OpenSearchTestCase() { Mockito.`when`(environment.settings()).thenReturn(settings) - runner = ManagedIndexRunner - .registerClusterService(clusterService) - .registerNamedXContentRegistry(xContentRegistry) - .registerScriptService(scriptService) - .registerSettings(environment.settings()) - .registerConsumers() - .registerHistoryIndex(indexStateManagementHistory) - .registerSkipFlag(skipFlag) + runner = + ManagedIndexRunner + .registerClusterService(clusterService) + .registerNamedXContentRegistry(xContentRegistry) + .registerScriptService(scriptService) + .registerSettings(environment.settings()) + .registerConsumers() + .registerHistoryIndex(indexStateManagementHistory) + .registerSkipFlag(skipFlag) } } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/AttemptCloseStepTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/AttemptCloseStepTests.kt index 68af07ac0..a7e2851e6 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/AttemptCloseStepTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/AttemptCloseStepTests.kt @@ -11,13 +11,13 @@ import com.nhaarman.mockitokotlin2.doReturn import com.nhaarman.mockitokotlin2.mock import com.nhaarman.mockitokotlin2.whenever import kotlinx.coroutines.runBlocking -import org.opensearch.core.action.ActionListener import org.opensearch.action.admin.indices.close.CloseIndexResponse import org.opensearch.client.AdminClient import org.opensearch.client.Client import org.opensearch.client.IndicesAdminClient import org.opensearch.cluster.service.ClusterService import org.opensearch.common.settings.Settings +import org.opensearch.core.action.ActionListener import org.opensearch.indexmanagement.indexstatemanagement.step.close.AttemptCloseStep import org.opensearch.indexmanagement.spi.indexstatemanagement.Step import org.opensearch.indexmanagement.spi.indexstatemanagement.model.ManagedIndexMetaData @@ -30,7 +30,6 @@ import org.opensearch.transport.RemoteTransportException import kotlin.IllegalArgumentException class AttemptCloseStepTests : OpenSearchTestCase() { - private val clusterService: ClusterService = mock() private val scriptService: ScriptService = mock() private val settings: Settings = Settings.EMPTY @@ -122,14 +121,19 @@ class AttemptCloseStepTests : OpenSearchTestCase() { } private fun getClient(adminClient: AdminClient): Client = mock { on { admin() } doReturn adminClient } + private fun getAdminClient(indicesAdminClient: IndicesAdminClient): AdminClient = mock { on { indices() } doReturn indicesAdminClient } + private fun getIndicesAdminClient(closeIndexResponse: CloseIndexResponse?, exception: Exception?): IndicesAdminClient { assertTrue("Must provide one and only one response or exception", (closeIndexResponse != null).xor(exception != null)) return mock { doAnswer { invocationOnMock -> val listener = invocationOnMock.getArgument>(1) - if (closeIndexResponse != null) listener.onResponse(closeIndexResponse) - else listener.onFailure(exception) + if (closeIndexResponse != null) { + listener.onResponse(closeIndexResponse) + } else { + listener.onFailure(exception) + } }.whenever(this.mock).close(any(), any()) } } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/AttemptCreateRollupJobStepTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/AttemptCreateRollupJobStepTests.kt index f41eca7c3..a73dd9ce1 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/AttemptCreateRollupJobStepTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/AttemptCreateRollupJobStepTests.kt @@ -14,14 +14,14 @@ import org.opensearch.indexmanagement.spi.indexstatemanagement.model.ManagedInde import org.opensearch.test.OpenSearchTestCase class AttemptCreateRollupJobStepTests : OpenSearchTestCase() { - private val rollupAction = randomRollupActionConfig() private val indexName = "test" private val rollupId: String = rollupAction.ismRollup.toRollup(indexName).id - private val metadata = ManagedIndexMetaData( - indexName, "indexUuid", "policy_id", null, null, null, null, null, null, null, - ActionMetaData(AttemptCreateRollupJobStep.name, 1, 0, false, 0, null, ActionProperties(rollupId = rollupId)), null, null, null - ) + private val metadata = + ManagedIndexMetaData( + indexName, "indexUuid", "policy_id", null, null, null, null, null, null, null, + ActionMetaData(AttemptCreateRollupJobStep.name, 1, 0, false, 0, null, ActionProperties(rollupId = rollupId)), null, null, null, + ) private val step = AttemptCreateRollupJobStep(rollupAction) fun `test process failure`() { @@ -31,7 +31,7 @@ class AttemptCreateRollupJobStepTests : OpenSearchTestCase() { assertEquals( "Error message is not expected", AttemptCreateRollupJobStep.getFailedMessage(rollupId, indexName), - updatedManagedIndexMetaData.info?.get("message") + updatedManagedIndexMetaData.info?.get("message"), ) } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/AttemptCreateTransformJobStepTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/AttemptCreateTransformJobStepTests.kt index 02945979e..c25437ebe 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/AttemptCreateTransformJobStepTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/AttemptCreateTransformJobStepTests.kt @@ -15,29 +15,29 @@ import org.opensearch.indexmanagement.spi.indexstatemanagement.model.TransformAc import org.opensearch.test.OpenSearchTestCase class AttemptCreateTransformJobStepTests : OpenSearchTestCase() { - private val transformAction = randomTransformActionConfig() private val indexName = "test" private val transformId: String = transformAction.ismTransform.toTransform(indexName).id - private val metadata = ManagedIndexMetaData( - indexName, - "indexUuid", - "policy_id", - null, - null, - null, - null, - null, - null, - null, - ActionMetaData( - AttemptCreateTransformJobStep.name, 1, 0, false, 0, null, - ActionProperties(transformActionProperties = TransformActionProperties(transformId)) - ), - null, - null, - null - ) + private val metadata = + ManagedIndexMetaData( + indexName, + "indexUuid", + "policy_id", + null, + null, + null, + null, + null, + null, + null, + ActionMetaData( + AttemptCreateTransformJobStep.name, 1, 0, false, 0, null, + ActionProperties(transformActionProperties = TransformActionProperties(transformId)), + ), + null, + null, + null, + ) private val step = AttemptCreateTransformJobStep(transformAction) fun `test process failure`() { @@ -46,16 +46,16 @@ class AttemptCreateTransformJobStepTests : OpenSearchTestCase() { assertEquals( "Step status is not FAILED", Step.StepStatus.FAILED, - updatedManagedIndexMedaData.stepMetaData?.stepStatus + updatedManagedIndexMedaData.stepMetaData?.stepStatus, ) assertEquals( "Error message is not expected", AttemptCreateTransformJobStep.getFailedMessage(transformId, indexName), - updatedManagedIndexMedaData.info?.get("message") + updatedManagedIndexMedaData.info?.get("message"), ) assertNull( "TransformId in action properties is not cleaned up", - updatedManagedIndexMedaData.actionMetaData?.actionProperties?.transformActionProperties?.transformId + updatedManagedIndexMedaData.actionMetaData?.actionProperties?.transformActionProperties?.transformId, ) } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/AttemptDeleteStepTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/AttemptDeleteStepTests.kt index 4748e2107..4dcd6e2f1 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/AttemptDeleteStepTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/AttemptDeleteStepTests.kt @@ -11,13 +11,13 @@ import com.nhaarman.mockitokotlin2.mock import com.nhaarman.mockitokotlin2.whenever import kotlinx.coroutines.runBlocking import org.mockito.Mockito.doAnswer -import org.opensearch.core.action.ActionListener import org.opensearch.action.support.master.AcknowledgedResponse import org.opensearch.client.AdminClient import org.opensearch.client.Client import org.opensearch.client.IndicesAdminClient import org.opensearch.cluster.service.ClusterService import org.opensearch.common.settings.Settings +import org.opensearch.core.action.ActionListener import org.opensearch.indexmanagement.indexstatemanagement.step.delete.AttemptDeleteStep import org.opensearch.indexmanagement.spi.indexstatemanagement.Step import org.opensearch.indexmanagement.spi.indexstatemanagement.model.ManagedIndexMetaData @@ -28,7 +28,6 @@ import org.opensearch.snapshots.SnapshotInProgressException import org.opensearch.test.OpenSearchTestCase class AttemptDeleteStepTests : OpenSearchTestCase() { - private val clusterService: ClusterService = mock() private val scriptService: ScriptService = mock() private val settings: Settings = Settings.EMPTY @@ -92,14 +91,19 @@ class AttemptDeleteStepTests : OpenSearchTestCase() { } private fun getClient(adminClient: AdminClient): Client = mock { on { admin() } doReturn adminClient } + private fun getAdminClient(indicesAdminClient: IndicesAdminClient): AdminClient = mock { on { indices() } doReturn indicesAdminClient } + private fun getIndicesAdminClient(acknowledgedResponse: AcknowledgedResponse?, exception: Exception?): IndicesAdminClient { assertTrue("Must provide one and only one response or exception", (acknowledgedResponse != null).xor(exception != null)) return mock { doAnswer { invocationOnMock -> val listener = invocationOnMock.getArgument>(1) - if (acknowledgedResponse != null) listener.onResponse(acknowledgedResponse) - else listener.onFailure(exception) + if (acknowledgedResponse != null) { + listener.onResponse(acknowledgedResponse) + } else { + listener.onFailure(exception) + } }.whenever(this.mock).delete(any(), any()) } } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/AttemptOpenStepTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/AttemptOpenStepTests.kt index c7996be55..c0797e3d9 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/AttemptOpenStepTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/AttemptOpenStepTests.kt @@ -11,13 +11,13 @@ import com.nhaarman.mockitokotlin2.doReturn import com.nhaarman.mockitokotlin2.mock import com.nhaarman.mockitokotlin2.whenever import kotlinx.coroutines.runBlocking -import org.opensearch.core.action.ActionListener import org.opensearch.action.admin.indices.open.OpenIndexResponse import org.opensearch.client.AdminClient import org.opensearch.client.Client import org.opensearch.client.IndicesAdminClient import org.opensearch.cluster.service.ClusterService import org.opensearch.common.settings.Settings +import org.opensearch.core.action.ActionListener import org.opensearch.indexmanagement.indexstatemanagement.step.open.AttemptOpenStep import org.opensearch.indexmanagement.spi.indexstatemanagement.Step import org.opensearch.indexmanagement.spi.indexstatemanagement.model.ManagedIndexMetaData @@ -28,7 +28,6 @@ import org.opensearch.test.OpenSearchTestCase import org.opensearch.transport.RemoteTransportException class AttemptOpenStepTests : OpenSearchTestCase() { - private val clusterService: ClusterService = mock() private val scriptService: ScriptService = mock() private val settings: Settings = Settings.EMPTY @@ -78,14 +77,19 @@ class AttemptOpenStepTests : OpenSearchTestCase() { } private fun getClient(adminClient: AdminClient): Client = mock { on { admin() } doReturn adminClient } + private fun getAdminClient(indicesAdminClient: IndicesAdminClient): AdminClient = mock { on { indices() } doReturn indicesAdminClient } + private fun getIndicesAdminClient(openIndexResponse: OpenIndexResponse?, exception: Exception?): IndicesAdminClient { assertTrue("Must provide one and only one response or exception", (openIndexResponse != null).xor(exception != null)) return mock { doAnswer { invocationOnMock -> val listener = invocationOnMock.getArgument>(1) - if (openIndexResponse != null) listener.onResponse(openIndexResponse) - else listener.onFailure(exception) + if (openIndexResponse != null) { + listener.onResponse(openIndexResponse) + } else { + listener.onFailure(exception) + } }.whenever(this.mock).open(any(), any()) } } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/AttemptRolloverStepTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/AttemptRolloverStepTests.kt index 5c4beea89..69f674f12 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/AttemptRolloverStepTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/AttemptRolloverStepTests.kt @@ -12,7 +12,6 @@ import com.nhaarman.mockitokotlin2.mock import com.nhaarman.mockitokotlin2.whenever import kotlinx.coroutines.runBlocking import org.junit.Before -import org.opensearch.core.action.ActionListener import org.opensearch.action.admin.indices.rollover.RolloverResponse import org.opensearch.action.support.master.AcknowledgedResponse import org.opensearch.client.AdminClient @@ -20,9 +19,13 @@ import org.opensearch.client.Client import org.opensearch.client.IndicesAdminClient import org.opensearch.cluster.ClusterState import org.opensearch.cluster.metadata.IndexMetadata +import org.opensearch.cluster.metadata.Metadata import org.opensearch.cluster.service.ClusterService import org.opensearch.common.settings.Settings +import org.opensearch.core.action.ActionListener +import org.opensearch.index.IndexNotFoundException import org.opensearch.indexmanagement.indexstatemanagement.action.RolloverAction +import org.opensearch.indexmanagement.indexstatemanagement.settings.ManagedIndexSettings import org.opensearch.indexmanagement.indexstatemanagement.step.rollover.AttemptRolloverStep import org.opensearch.indexmanagement.spi.indexstatemanagement.Step import org.opensearch.indexmanagement.spi.indexstatemanagement.model.ManagedIndexMetaData @@ -30,12 +33,8 @@ import org.opensearch.indexmanagement.spi.indexstatemanagement.model.StepContext import org.opensearch.jobscheduler.spi.utils.LockService import org.opensearch.script.ScriptService import org.opensearch.test.OpenSearchTestCase -import org.opensearch.cluster.metadata.Metadata -import org.opensearch.index.IndexNotFoundException -import org.opensearch.indexmanagement.indexstatemanagement.settings.ManagedIndexSettings class AttemptRolloverStepTests : OpenSearchTestCase() { - private val clusterService: ClusterService = mock() private val scriptService: ScriptService = mock() private val settings: Settings = Settings.EMPTY @@ -50,10 +49,11 @@ class AttemptRolloverStepTests : OpenSearchTestCase() { val clusterState: ClusterState = mock() val metadata: Metadata = mock() val indexMetadata: IndexMetadata = mock() - val settings = Settings.builder() - .put(ManagedIndexSettings.ROLLOVER_ALIAS.key, alias) - .put(ManagedIndexSettings.ROLLOVER_SKIP.key, false) - .build() + val settings = + Settings.builder() + .put(ManagedIndexSettings.ROLLOVER_ALIAS.key, alias) + .put(ManagedIndexSettings.ROLLOVER_SKIP.key, false) + .build() whenever(clusterService.state()).thenReturn(clusterState) whenever(clusterState.metadata()).thenReturn(metadata) whenever(clusterState.metadata).thenReturn(metadata) @@ -73,13 +73,14 @@ class AttemptRolloverStepTests : OpenSearchTestCase() { runBlocking { val rolloverAction = RolloverAction(null, null, null, null, true, 0) - val managedIndexMetaData = ManagedIndexMetaData( - oldIndexName, "indexUuid", "policy_id", - null, null, null, - null, null, null, - null, null, null, - null, null, rolledOverIndexName = newIndexName - ) + val managedIndexMetaData = + ManagedIndexMetaData( + oldIndexName, "indexUuid", "policy_id", + null, null, null, + null, null, null, + null, null, null, + null, null, rolledOverIndexName = newIndexName, + ) val attemptRolloverStep = AttemptRolloverStep(rolloverAction) val context = StepContext(managedIndexMetaData, clusterService, client, null, null, scriptService, settings, lockService) attemptRolloverStep.preExecute(logger, context).execute() @@ -97,13 +98,14 @@ class AttemptRolloverStepTests : OpenSearchTestCase() { runBlocking { val rolloverAction = RolloverAction(null, null, null, null, true, 0) - val managedIndexMetaData = ManagedIndexMetaData( - oldIndexName, "indexUuid", "policy_id", - null, null, null, - null, null, null, - null, null, null, - null, null, rolledOverIndexName = newIndexName - ) + val managedIndexMetaData = + ManagedIndexMetaData( + oldIndexName, "indexUuid", "policy_id", + null, null, null, + null, null, null, + null, null, null, + null, null, rolledOverIndexName = newIndexName, + ) val attemptRolloverStep = AttemptRolloverStep(rolloverAction) val context = StepContext(managedIndexMetaData, clusterService, client, null, null, scriptService, settings, lockService) attemptRolloverStep.preExecute(logger, context).execute() @@ -121,13 +123,14 @@ class AttemptRolloverStepTests : OpenSearchTestCase() { runBlocking { val rolloverAction = RolloverAction(null, null, null, null, true, 0) - val managedIndexMetaData = ManagedIndexMetaData( - oldIndexName, "indexUuid", "policy_id", - null, null, null, - null, null, null, - null, null, null, - null, null, rolledOverIndexName = newIndexName - ) + val managedIndexMetaData = + ManagedIndexMetaData( + oldIndexName, "indexUuid", "policy_id", + null, null, null, + null, null, null, + null, null, null, + null, null, rolledOverIndexName = newIndexName, + ) val attemptRolloverStep = AttemptRolloverStep(rolloverAction) val context = StepContext(managedIndexMetaData, clusterService, client, null, null, scriptService, settings, lockService) attemptRolloverStep.preExecute(logger, context).execute() @@ -145,13 +148,14 @@ class AttemptRolloverStepTests : OpenSearchTestCase() { runBlocking { val rolloverAction = RolloverAction(null, null, null, null, true, 0) - val managedIndexMetaData = ManagedIndexMetaData( - oldIndexName, "indexUuid", "policy_id", - null, null, null, - null, null, null, - null, null, null, - null, null, rolledOverIndexName = null - ) + val managedIndexMetaData = + ManagedIndexMetaData( + oldIndexName, "indexUuid", "policy_id", + null, null, null, + null, null, null, + null, null, null, + null, null, rolledOverIndexName = null, + ) val attemptRolloverStep = AttemptRolloverStep(rolloverAction) val context = StepContext(managedIndexMetaData, clusterService, client, null, null, scriptService, settings, lockService) attemptRolloverStep.preExecute(logger, context).execute() @@ -162,7 +166,9 @@ class AttemptRolloverStepTests : OpenSearchTestCase() { } private fun getClient(adminClient: AdminClient): Client = mock { on { admin() } doReturn adminClient } + private fun getAdminClient(indicesAdminClient: IndicesAdminClient): AdminClient = mock { on { indices() } doReturn indicesAdminClient } + private fun getIndicesAdminClient( rolloverResponse: RolloverResponse?, aliasResponse: AcknowledgedResponse?, @@ -171,23 +177,29 @@ class AttemptRolloverStepTests : OpenSearchTestCase() { ): IndicesAdminClient { assertTrue( "Must provide one and only one response or exception", - (rolloverResponse != null).xor(rolloverException != null) + (rolloverResponse != null).xor(rolloverException != null), ) assertTrue( "Must provide one and only one response or exception", - (aliasResponse != null).xor(aliasException != null) + (aliasResponse != null).xor(aliasException != null), ) return mock { doAnswer { invocationOnMock -> val listener = invocationOnMock.getArgument>(1) - if (rolloverResponse != null) listener.onResponse(rolloverResponse) - else listener.onFailure(rolloverException) + if (rolloverResponse != null) { + listener.onResponse(rolloverResponse) + } else { + listener.onFailure(rolloverException) + } }.whenever(this.mock).rolloverIndex(any(), any()) doAnswer { invocationOnMock -> val listener = invocationOnMock.getArgument>(1) - if (aliasResponse != null) listener.onResponse(aliasResponse) - else listener.onFailure(aliasException) + if (aliasResponse != null) { + listener.onResponse(aliasResponse) + } else { + listener.onFailure(aliasException) + } }.whenever(this.mock).aliases(any(), any()) } } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/AttemptSetIndexPriorityStepTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/AttemptSetIndexPriorityStepTests.kt index 4a4e33ae9..6842b4dc4 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/AttemptSetIndexPriorityStepTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/AttemptSetIndexPriorityStepTests.kt @@ -11,13 +11,13 @@ import com.nhaarman.mockitokotlin2.doReturn import com.nhaarman.mockitokotlin2.mock import com.nhaarman.mockitokotlin2.whenever import kotlinx.coroutines.runBlocking -import org.opensearch.core.action.ActionListener import org.opensearch.action.support.master.AcknowledgedResponse import org.opensearch.client.AdminClient import org.opensearch.client.Client import org.opensearch.client.IndicesAdminClient import org.opensearch.cluster.service.ClusterService import org.opensearch.common.settings.Settings +import org.opensearch.core.action.ActionListener import org.opensearch.indexmanagement.indexstatemanagement.action.IndexPriorityAction import org.opensearch.indexmanagement.indexstatemanagement.step.indexpriority.AttemptSetIndexPriorityStep import org.opensearch.indexmanagement.spi.indexstatemanagement.Step @@ -29,7 +29,6 @@ import org.opensearch.test.OpenSearchTestCase import org.opensearch.transport.RemoteTransportException class AttemptSetIndexPriorityStepTests : OpenSearchTestCase() { - private val clusterService: ClusterService = mock() private val scriptService: ScriptService = mock() private val settings: Settings = Settings.EMPTY @@ -99,14 +98,19 @@ class AttemptSetIndexPriorityStepTests : OpenSearchTestCase() { } private fun getClient(adminClient: AdminClient): Client = mock { on { admin() } doReturn adminClient } + private fun getAdminClient(indicesAdminClient: IndicesAdminClient): AdminClient = mock { on { indices() } doReturn indicesAdminClient } + private fun getIndicesAdminClient(acknowledgedResponse: AcknowledgedResponse?, exception: Exception?): IndicesAdminClient { assertTrue("Must provide one and only one response or exception", (acknowledgedResponse != null).xor(exception != null)) return mock { doAnswer { invocationOnMock -> val listener = invocationOnMock.getArgument>(1) - if (acknowledgedResponse != null) listener.onResponse(acknowledgedResponse) - else listener.onFailure(exception) + if (acknowledgedResponse != null) { + listener.onResponse(acknowledgedResponse) + } else { + listener.onFailure(exception) + } }.whenever(this.mock).updateSettings(any(), any()) } } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/AttemptSetReplicaCountStepTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/AttemptSetReplicaCountStepTests.kt index 9256c5a83..30f2e6639 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/AttemptSetReplicaCountStepTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/AttemptSetReplicaCountStepTests.kt @@ -11,13 +11,13 @@ import com.nhaarman.mockitokotlin2.mock import com.nhaarman.mockitokotlin2.whenever import kotlinx.coroutines.runBlocking import org.mockito.Mockito.doAnswer -import org.opensearch.core.action.ActionListener import org.opensearch.action.support.master.AcknowledgedResponse import org.opensearch.client.AdminClient import org.opensearch.client.Client import org.opensearch.client.IndicesAdminClient import org.opensearch.cluster.service.ClusterService import org.opensearch.common.settings.Settings +import org.opensearch.core.action.ActionListener import org.opensearch.indexmanagement.indexstatemanagement.action.ReplicaCountAction import org.opensearch.indexmanagement.indexstatemanagement.step.replicacount.AttemptReplicaCountStep import org.opensearch.indexmanagement.spi.indexstatemanagement.Step @@ -29,7 +29,6 @@ import org.opensearch.test.OpenSearchTestCase import org.opensearch.transport.RemoteTransportException class AttemptSetReplicaCountStepTests : OpenSearchTestCase() { - private val clusterService: ClusterService = mock() private val scriptService: ScriptService = mock() private val settings: Settings = Settings.EMPTY @@ -82,14 +81,19 @@ class AttemptSetReplicaCountStepTests : OpenSearchTestCase() { } private fun getClient(adminClient: AdminClient): Client = mock { on { admin() } doReturn adminClient } + private fun getAdminClient(indicesAdminClient: IndicesAdminClient): AdminClient = mock { on { indices() } doReturn indicesAdminClient } + private fun getIndicesAdminClient(replicaResponse: AcknowledgedResponse?, exception: Exception?): IndicesAdminClient { assertTrue("Must provide one and only one response or exception", (replicaResponse != null).xor(exception != null)) return mock { doAnswer { invocationOnMock -> val listener = invocationOnMock.getArgument>(1) - if (replicaResponse != null) listener.onResponse(replicaResponse) - else listener.onFailure(exception) + if (replicaResponse != null) { + listener.onResponse(replicaResponse) + } else { + listener.onFailure(exception) + } }.whenever(this.mock).updateSettings(any(), any()) } } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/AttemptSnapshotStepTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/AttemptSnapshotStepTests.kt index 646f64964..2cc7040db 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/AttemptSnapshotStepTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/AttemptSnapshotStepTests.kt @@ -13,7 +13,6 @@ import com.nhaarman.mockitokotlin2.mock import com.nhaarman.mockitokotlin2.whenever import kotlinx.coroutines.runBlocking import org.junit.Before -import org.opensearch.core.action.ActionListener import org.opensearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse import org.opensearch.client.AdminClient import org.opensearch.client.Client @@ -21,6 +20,8 @@ import org.opensearch.client.ClusterAdminClient import org.opensearch.cluster.service.ClusterService import org.opensearch.common.settings.ClusterSettings import org.opensearch.common.settings.Settings +import org.opensearch.core.action.ActionListener +import org.opensearch.core.rest.RestStatus import org.opensearch.indexmanagement.indexstatemanagement.randomSnapshotActionConfig import org.opensearch.indexmanagement.indexstatemanagement.settings.ManagedIndexSettings.Companion.SNAPSHOT_DENY_LIST import org.opensearch.indexmanagement.indexstatemanagement.step.snapshot.AttemptSnapshotStep @@ -31,7 +32,6 @@ import org.opensearch.indexmanagement.spi.indexstatemanagement.model.ManagedInde import org.opensearch.indexmanagement.spi.indexstatemanagement.model.StepContext import org.opensearch.ingest.TestTemplateService.MockTemplateScript import org.opensearch.jobscheduler.spi.utils.LockService -import org.opensearch.core.rest.RestStatus import org.opensearch.script.ScriptService import org.opensearch.script.TemplateScript import org.opensearch.snapshots.ConcurrentSnapshotExecutionException @@ -39,7 +39,6 @@ import org.opensearch.test.OpenSearchTestCase import org.opensearch.transport.RemoteTransportException class AttemptSnapshotStepTests : OpenSearchTestCase() { - private val clusterService: ClusterService = mock() private val scriptService: ScriptService = mock() private val settings: Settings = Settings.EMPTY @@ -138,14 +137,19 @@ class AttemptSnapshotStepTests : OpenSearchTestCase() { } private fun getClient(adminClient: AdminClient): Client = mock { on { admin() } doReturn adminClient } + private fun getAdminClient(clusterAdminClient: ClusterAdminClient): AdminClient = mock { on { cluster() } doReturn clusterAdminClient } + private fun getClusterAdminClient(createSnapshotRequest: CreateSnapshotResponse?, exception: Exception?): ClusterAdminClient { assertTrue("Must provide one and only one response or exception", (createSnapshotRequest != null).xor(exception != null)) return mock { doAnswer { invocationOnMock -> val listener = invocationOnMock.getArgument>(1) - if (createSnapshotRequest != null) listener.onResponse(createSnapshotRequest) - else listener.onFailure(exception) + if (createSnapshotRequest != null) { + listener.onResponse(createSnapshotRequest) + } else { + listener.onFailure(exception) + } }.whenever(this.mock).createSnapshot(any(), any()) } } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/AttemptTransitionStepTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/AttemptTransitionStepTests.kt index 158cc8d18..496c4f3b4 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/AttemptTransitionStepTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/AttemptTransitionStepTests.kt @@ -12,7 +12,6 @@ import com.nhaarman.mockitokotlin2.mock import com.nhaarman.mockitokotlin2.whenever import kotlinx.coroutines.runBlocking import org.junit.Before -import org.opensearch.core.action.ActionListener import org.opensearch.action.admin.indices.rollover.RolloverInfo import org.opensearch.action.admin.indices.stats.CommonStats import org.opensearch.action.admin.indices.stats.IndicesStatsResponse @@ -25,6 +24,8 @@ import org.opensearch.cluster.metadata.Metadata import org.opensearch.cluster.service.ClusterService import org.opensearch.common.settings.ClusterSettings import org.opensearch.common.settings.Settings +import org.opensearch.core.action.ActionListener +import org.opensearch.core.rest.RestStatus import org.opensearch.index.shard.DocsStats import org.opensearch.indexmanagement.indexstatemanagement.IndexMetadataProvider import org.opensearch.indexmanagement.indexstatemanagement.action.TransitionsAction @@ -37,25 +38,26 @@ import org.opensearch.indexmanagement.spi.indexstatemanagement.model.ManagedInde import org.opensearch.indexmanagement.spi.indexstatemanagement.model.StepContext import org.opensearch.indexmanagement.spi.indexstatemanagement.model.StepMetaData import org.opensearch.jobscheduler.spi.utils.LockService -import org.opensearch.core.rest.RestStatus import org.opensearch.script.ScriptService import org.opensearch.test.OpenSearchTestCase import org.opensearch.transport.RemoteTransportException import java.time.Instant class AttemptTransitionStepTests : OpenSearchTestCase() { - private val indexName: String = "test" private val indexUUID: String = "indexUuid" + @Suppress("UNCHECKED_CAST") - private val indexMetadata: IndexMetadata = mock { - on { rolloverInfos } doReturn mapOf() - on { indexUUID } doReturn indexUUID - } - private val metadata: Metadata = mock { - on { index(any()) } doReturn indexMetadata - on { hasIndex(indexName) } doReturn true - } + private val indexMetadata: IndexMetadata = + mock { + on { rolloverInfos } doReturn mapOf() + on { indexUUID } doReturn indexUUID + } + private val metadata: Metadata = + mock { + on { index(any()) } doReturn indexMetadata + on { hasIndex(indexName) } doReturn true + } private val clusterState: ClusterState = mock { on { metadata() } doReturn metadata } private val clusterService: ClusterService = mock { on { state() } doReturn clusterState } private val scriptService: ScriptService = mock() @@ -142,14 +144,19 @@ class AttemptTransitionStepTests : OpenSearchTestCase() { } private fun getClient(adminClient: AdminClient): Client = mock { on { admin() } doReturn adminClient } + private fun getAdminClient(indicesAdminClient: IndicesAdminClient): AdminClient = mock { on { indices() } doReturn indicesAdminClient } + private fun getIndicesAdminClient(statsResponse: IndicesStatsResponse?, exception: Exception?): IndicesAdminClient { assertTrue("Must provide one and only one response or exception", (statsResponse != null).xor(exception != null)) return mock { doAnswer { invocationOnMock -> val listener = invocationOnMock.getArgument>(1) - if (statsResponse != null) listener.onResponse(statsResponse) - else listener.onFailure(exception) + if (statsResponse != null) { + listener.onResponse(statsResponse) + } else { + listener.onFailure(exception) + } }.whenever(this.mock).stats(any(), any()) } } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/SetReadOnlyStepTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/SetReadOnlyStepTests.kt index f46d142b3..a3a70ca6c 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/SetReadOnlyStepTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/SetReadOnlyStepTests.kt @@ -11,13 +11,13 @@ import com.nhaarman.mockitokotlin2.doReturn import com.nhaarman.mockitokotlin2.mock import com.nhaarman.mockitokotlin2.whenever import kotlinx.coroutines.runBlocking -import org.opensearch.core.action.ActionListener import org.opensearch.action.support.master.AcknowledgedResponse import org.opensearch.client.AdminClient import org.opensearch.client.Client import org.opensearch.client.IndicesAdminClient import org.opensearch.cluster.service.ClusterService import org.opensearch.common.settings.Settings +import org.opensearch.core.action.ActionListener import org.opensearch.indexmanagement.indexstatemanagement.step.readonly.SetReadOnlyStep import org.opensearch.indexmanagement.spi.indexstatemanagement.Step import org.opensearch.indexmanagement.spi.indexstatemanagement.model.ManagedIndexMetaData @@ -28,7 +28,6 @@ import org.opensearch.test.OpenSearchTestCase import org.opensearch.transport.RemoteTransportException class SetReadOnlyStepTests : OpenSearchTestCase() { - private val clusterService: ClusterService = mock() private val scriptService: ScriptService = mock() private val settings: Settings = Settings.EMPTY @@ -78,14 +77,19 @@ class SetReadOnlyStepTests : OpenSearchTestCase() { } private fun getClient(adminClient: AdminClient): Client = mock { on { admin() } doReturn adminClient } + private fun getAdminClient(indicesAdminClient: IndicesAdminClient): AdminClient = mock { on { indices() } doReturn indicesAdminClient } + private fun getIndicesAdminClient(setReadOnlyResponse: AcknowledgedResponse?, exception: Exception?): IndicesAdminClient { assertTrue("Must provide one and only one response or exception", (setReadOnlyResponse != null).xor(exception != null)) return mock { doAnswer { invocationOnMock -> val listener = invocationOnMock.getArgument>(1) - if (setReadOnlyResponse != null) listener.onResponse(setReadOnlyResponse) - else listener.onFailure(exception) + if (setReadOnlyResponse != null) { + listener.onResponse(setReadOnlyResponse) + } else { + listener.onFailure(exception) + } }.whenever(this.mock).updateSettings(any(), any()) } } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/SetReadWriteStepTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/SetReadWriteStepTests.kt index f42f92c82..77211adde 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/SetReadWriteStepTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/SetReadWriteStepTests.kt @@ -11,13 +11,13 @@ import com.nhaarman.mockitokotlin2.doReturn import com.nhaarman.mockitokotlin2.mock import com.nhaarman.mockitokotlin2.whenever import kotlinx.coroutines.runBlocking -import org.opensearch.core.action.ActionListener import org.opensearch.action.support.master.AcknowledgedResponse import org.opensearch.client.AdminClient import org.opensearch.client.Client import org.opensearch.client.IndicesAdminClient import org.opensearch.cluster.service.ClusterService import org.opensearch.common.settings.Settings +import org.opensearch.core.action.ActionListener import org.opensearch.indexmanagement.indexstatemanagement.step.readwrite.SetReadWriteStep import org.opensearch.indexmanagement.spi.indexstatemanagement.Step import org.opensearch.indexmanagement.spi.indexstatemanagement.model.ManagedIndexMetaData @@ -28,7 +28,6 @@ import org.opensearch.test.OpenSearchTestCase import org.opensearch.transport.RemoteTransportException class SetReadWriteStepTests : OpenSearchTestCase() { - private val clusterService: ClusterService = mock() private val scriptService: ScriptService = mock() private val settings: Settings = Settings.EMPTY @@ -78,14 +77,19 @@ class SetReadWriteStepTests : OpenSearchTestCase() { } private fun getClient(adminClient: AdminClient): Client = mock { on { admin() } doReturn adminClient } + private fun getAdminClient(indicesAdminClient: IndicesAdminClient): AdminClient = mock { on { indices() } doReturn indicesAdminClient } + private fun getIndicesAdminClient(setReadWriteResponse: AcknowledgedResponse?, exception: Exception?): IndicesAdminClient { assertTrue("Must provide one and only one response or exception", (setReadWriteResponse != null).xor(exception != null)) return mock { doAnswer { invocationOnMock -> val listener = invocationOnMock.getArgument>(1) - if (setReadWriteResponse != null) listener.onResponse(setReadWriteResponse) - else listener.onFailure(exception) + if (setReadWriteResponse != null) { + listener.onResponse(setReadWriteResponse) + } else { + listener.onFailure(exception) + } }.whenever(this.mock).updateSettings(any(), any()) } } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/WaitForRollupCompletionStepTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/WaitForRollupCompletionStepTests.kt index b769d42e2..8537a3b54 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/WaitForRollupCompletionStepTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/WaitForRollupCompletionStepTests.kt @@ -24,22 +24,23 @@ import org.opensearch.test.OpenSearchTestCase import java.time.Instant class WaitForRollupCompletionStepTests : OpenSearchTestCase() { - private val clusterService: ClusterService = mock() private val scriptService: ScriptService = mock() private val settings: Settings = Settings.EMPTY private val rollupId: String = "dummy-id" private val indexName: String = "test" - private val metadata = ManagedIndexMetaData( - indexName, "indexUuid", "policy_id", null, null, null, null, null, null, null, - ActionMetaData - (WaitForRollupCompletionStep.name, 1, 0, false, 0, null, ActionProperties(rollupId = rollupId)), - null, null, null - ) - private val rollupMetadata = RollupMetadata( - rollupID = rollupId, lastUpdatedTime = Instant.now(), status = RollupMetadata.Status.FINISHED, - stats = RollupStats(1, 1, 1, 1, 1) - ) + private val metadata = + ManagedIndexMetaData( + indexName, "indexUuid", "policy_id", null, null, null, null, null, null, null, + ActionMetaData + (WaitForRollupCompletionStep.name, 1, 0, false, 0, null, ActionProperties(rollupId = rollupId)), + null, null, null, + ) + private val rollupMetadata = + RollupMetadata( + rollupID = rollupId, lastUpdatedTime = Instant.now(), status = RollupMetadata.Status.FINISHED, + stats = RollupStats(1, 1, 1, 1, 1), + ) private val client: Client = mock() private val step = WaitForRollupCompletionStep() private val lockService: LockService = LockService(mock(), clusterService) @@ -59,7 +60,7 @@ class WaitForRollupCompletionStepTests : OpenSearchTestCase() { assertEquals( "Missing failure message", WaitForRollupCompletionStep.getMissingRollupJobMessage(indexName), - updatedManagedIndexMetaData.info?.get("message") + updatedManagedIndexMetaData.info?.get("message"), ) } @@ -72,7 +73,7 @@ class WaitForRollupCompletionStepTests : OpenSearchTestCase() { assertEquals( "Missing failure message", WaitForRollupCompletionStep.getJobFailedMessage(rollupId, indexName), - updateManagedIndexMetaData.info?.get("message") + updateManagedIndexMetaData.info?.get("message"), ) assertEquals("Missing rollup failed action property", true, updateManagedIndexMetaData.actionMetaData?.actionProperties?.hasRollupFailed) } @@ -86,7 +87,7 @@ class WaitForRollupCompletionStepTests : OpenSearchTestCase() { assertEquals( "Missing failure message", WaitForRollupCompletionStep.getJobFailedMessage(rollupId, indexName), - updateManagedIndexMetaData.info?.get("message") + updateManagedIndexMetaData.info?.get("message"), ) assertEquals("Missing rollup failed action property", true, updateManagedIndexMetaData.actionMetaData?.actionProperties?.hasRollupFailed) assertEquals("Mismatch in cause", WaitForRollupCompletionStep.JOB_STOPPED_MESSAGE, updateManagedIndexMetaData.info?.get("cause")) @@ -101,7 +102,7 @@ class WaitForRollupCompletionStepTests : OpenSearchTestCase() { assertEquals( "Missing processing message", WaitForRollupCompletionStep.getJobProcessingMessage(rollupId, indexName), - updateManagedIndexMetaData.info?.get("message") + updateManagedIndexMetaData.info?.get("message"), ) assertNull("rollup failed property is not null", updateManagedIndexMetaData.actionMetaData?.actionProperties?.hasRollupFailed) } @@ -115,7 +116,7 @@ class WaitForRollupCompletionStepTests : OpenSearchTestCase() { assertEquals( "Missing processing message", WaitForRollupCompletionStep.getJobProcessingMessage(rollupId, indexName), - updateManagedIndexMetaData.info?.get("message") + updateManagedIndexMetaData.info?.get("message"), ) assertNull("rollup failed property is not null", updateManagedIndexMetaData.actionMetaData?.actionProperties?.hasRollupFailed) } @@ -129,7 +130,7 @@ class WaitForRollupCompletionStepTests : OpenSearchTestCase() { assertEquals( "Missing processing message", WaitForRollupCompletionStep.getJobCompletionMessage(rollupId, indexName), - updateManagedIndexMetaData.info?.get("message") + updateManagedIndexMetaData.info?.get("message"), ) assertNull("rollup failed property is not null", updateManagedIndexMetaData.actionMetaData?.actionProperties?.hasRollupFailed) } @@ -143,7 +144,7 @@ class WaitForRollupCompletionStepTests : OpenSearchTestCase() { assertEquals( "Missing processing message", WaitForRollupCompletionStep.getJobProcessingMessage(rollupId, indexName), - updateManagedIndexMetaData.info?.get("message") + updateManagedIndexMetaData.info?.get("message"), ) assertNull("rollup failed property is not null", updateManagedIndexMetaData.actionMetaData?.actionProperties?.hasRollupFailed) } @@ -156,7 +157,7 @@ class WaitForRollupCompletionStepTests : OpenSearchTestCase() { assertEquals( "Mismatch in message", WaitForRollupCompletionStep.getFailedMessage(rollupId, indexName), - updateManagedIndexMetaData.info?.get("message") + updateManagedIndexMetaData.info?.get("message"), ) assertEquals("Step status is not FAILED", Step.StepStatus.FAILED, updateManagedIndexMetaData.stepMetaData?.stepStatus) } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/WaitForShrinkStepTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/WaitForShrinkStepTests.kt index dbad3098b..fd8fd60df 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/WaitForShrinkStepTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/WaitForShrinkStepTests.kt @@ -37,7 +37,6 @@ import org.opensearch.script.ScriptService import org.opensearch.test.OpenSearchTestCase class WaitForShrinkStepTests : OpenSearchTestCase() { - private val metadata: Metadata = mock {} private val clusterState: ClusterState = mock { on { metadata() } doReturn metadata } private val clusterService: ClusterService = mock { on { state() } doReturn clusterState } @@ -71,14 +70,16 @@ class WaitForShrinkStepTests : OpenSearchTestCase() { val client = getClient(getAdminClient(getIndicesAdminClient(ackedResponse, null))) val context = StepContext(managedIndexMetaData, clusterService, client, null, null, scriptService, settings, lockService) - val targetIndexMetadata: IndexMetadata = mock { - on { aliases } doReturn mapOf("target-alias" to AliasMetadata.builder("target-alias").build()) - } + val targetIndexMetadata: IndexMetadata = + mock { + on { aliases } doReturn mapOf("target-alias" to AliasMetadata.builder("target-alias").build()) + } whenever(metadata.index("target_index_name")).doReturn(targetIndexMetadata) - val sourceIndexMetadata: IndexMetadata = mock { - on { aliases } doReturn mapOf("source-alias" to AliasMetadata.builder("source-alias").build()) - } + val sourceIndexMetadata: IndexMetadata = + mock { + on { aliases } doReturn mapOf("source-alias" to AliasMetadata.builder("source-alias").build()) + } whenever(metadata.index("source_index_name")).doReturn(sourceIndexMetadata) runBlocking { @@ -86,20 +87,23 @@ class WaitForShrinkStepTests : OpenSearchTestCase() { assertTrue(aliasesSwitched) } - val argMatcher = ArgumentMatcher { request: IndicesAliasesRequest -> - val addToTarget = request.aliasActions - .filter { it.actionType() == IndicesAliasesRequest.AliasActions.Type.ADD } - .filter { it.indices().contentEquals(arrayOf("target_index_name")) } - .filter { it.aliases().contentEquals(arrayOf("source-alias")) } - .size == 1 - val removeFromSource = request.aliasActions - .filter { it.actionType() == IndicesAliasesRequest.AliasActions.Type.REMOVE } - .filter { it.indices().contentEquals(arrayOf("source_index_name")) } - .filter { it.aliases().contentEquals(arrayOf("source-alias")) } - .size == 1 - val onlyTwoActions = request.aliasActions.size == 2 - addToTarget && removeFromSource && onlyTwoActions - } + val argMatcher = + ArgumentMatcher { request: IndicesAliasesRequest -> + val addToTarget = + request.aliasActions + .filter { it.actionType() == IndicesAliasesRequest.AliasActions.Type.ADD } + .filter { it.indices().contentEquals(arrayOf("target_index_name")) } + .filter { it.aliases().contentEquals(arrayOf("source-alias")) } + .size == 1 + val removeFromSource = + request.aliasActions + .filter { it.actionType() == IndicesAliasesRequest.AliasActions.Type.REMOVE } + .filter { it.indices().contentEquals(arrayOf("source_index_name")) } + .filter { it.aliases().contentEquals(arrayOf("source-alias")) } + .size == 1 + val onlyTwoActions = request.aliasActions.size == 2 + addToTarget && removeFromSource && onlyTwoActions + } verify(client.admin().indices()).aliases(argThat(argMatcher), any()) } @@ -107,14 +111,16 @@ class WaitForShrinkStepTests : OpenSearchTestCase() { val client = getClient(getAdminClient(getIndicesAdminClient(ackedResponse, null))) val context = StepContext(managedIndexMetaData, clusterService, client, null, null, scriptService, settings, lockService) - val targetIndexMetadata: IndexMetadata = mock { - on { aliases } doReturn mapOf("conflict-alias" to AliasMetadata.builder("conflict-alias").build()) - } + val targetIndexMetadata: IndexMetadata = + mock { + on { aliases } doReturn mapOf("conflict-alias" to AliasMetadata.builder("conflict-alias").build()) + } whenever(metadata.index("target_index_name")).doReturn(targetIndexMetadata) - val sourceIndexMetadata: IndexMetadata = mock { - on { aliases } doReturn mapOf("conflict-alias" to AliasMetadata.builder("conflict-alias").build()) - } + val sourceIndexMetadata: IndexMetadata = + mock { + on { aliases } doReturn mapOf("conflict-alias" to AliasMetadata.builder("conflict-alias").build()) + } whenever(metadata.index("source_index_name")).doReturn(sourceIndexMetadata) runBlocking { @@ -122,16 +128,18 @@ class WaitForShrinkStepTests : OpenSearchTestCase() { assertTrue(aliasesSwitched) } - val argMatcher = ArgumentMatcher { request: IndicesAliasesRequest -> + val argMatcher = + ArgumentMatcher { request: IndicesAliasesRequest -> - val removeFromSource = request.aliasActions - .filter { it.actionType() == IndicesAliasesRequest.AliasActions.Type.REMOVE } - .filter { it.indices().contentEquals(arrayOf("source_index_name")) } - .filter { it.aliases().contentEquals(arrayOf("conflict-alias")) } - .size == 1 - val onlyOneAction = request.aliasActions.size == 1 - removeFromSource && onlyOneAction - } + val removeFromSource = + request.aliasActions + .filter { it.actionType() == IndicesAliasesRequest.AliasActions.Type.REMOVE } + .filter { it.indices().contentEquals(arrayOf("source_index_name")) } + .filter { it.aliases().contentEquals(arrayOf("conflict-alias")) } + .size == 1 + val onlyOneAction = request.aliasActions.size == 1 + removeFromSource && onlyOneAction + } verify(client.admin().indices()).aliases(argThat(argMatcher), any()) } @@ -139,14 +147,16 @@ class WaitForShrinkStepTests : OpenSearchTestCase() { val client = getClient(getAdminClient(getIndicesAdminClient(null, Exception()))) val context = StepContext(managedIndexMetaData, clusterService, client, null, null, scriptService, settings, lockService) - val targetIndexMetadata: IndexMetadata = mock { - on { aliases } doReturn emptyMap() - } + val targetIndexMetadata: IndexMetadata = + mock { + on { aliases } doReturn emptyMap() + } whenever(metadata.index("target_index_name")).doReturn(targetIndexMetadata) - val sourceIndexMetadata: IndexMetadata = mock { - on { aliases } doReturn emptyMap() - } + val sourceIndexMetadata: IndexMetadata = + mock { + on { aliases } doReturn emptyMap() + } whenever(metadata.index("source_index_name")).doReturn(sourceIndexMetadata) runBlocking { @@ -159,14 +169,16 @@ class WaitForShrinkStepTests : OpenSearchTestCase() { val client = getClient(getAdminClient(getIndicesAdminClient(unAckedResponse, null))) val context = StepContext(managedIndexMetaData, clusterService, client, null, null, scriptService, settings, lockService) - val targetIndexMetadata: IndexMetadata = mock { - on { aliases } doReturn emptyMap() - } + val targetIndexMetadata: IndexMetadata = + mock { + on { aliases } doReturn emptyMap() + } whenever(metadata.index("target_index_name")).doReturn(targetIndexMetadata) - val sourceIndexMetadata: IndexMetadata = mock { - on { aliases } doReturn emptyMap() - } + val sourceIndexMetadata: IndexMetadata = + mock { + on { aliases } doReturn emptyMap() + } whenever(metadata.index("source_index_name")).doReturn(sourceIndexMetadata) runBlocking { @@ -176,14 +188,19 @@ class WaitForShrinkStepTests : OpenSearchTestCase() { } private fun getClient(adminClient: AdminClient): Client = mock { on { admin() } doReturn adminClient } + private fun getAdminClient(indicesAdminClient: IndicesAdminClient): AdminClient = mock { on { indices() } doReturn indicesAdminClient } + private fun getIndicesAdminClient(response: AcknowledgedResponse?, exception: Exception?): IndicesAdminClient { assertTrue("Must provide one and only one response or exception", (response != null).xor(exception != null)) return mock { doAnswer { invocationOnMock -> val listener = invocationOnMock.getArgument>(1) - if (response != null) listener.onResponse(response) - else listener.onFailure(exception) + if (response != null) { + listener.onResponse(response) + } else { + listener.onFailure(exception) + } }.whenever(this.mock).aliases(any(), any()) } } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/WaitForSnapshotStepTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/WaitForSnapshotStepTests.kt index b30084392..ae4d394df 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/WaitForSnapshotStepTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/WaitForSnapshotStepTests.kt @@ -11,7 +11,6 @@ import com.nhaarman.mockitokotlin2.doReturn import com.nhaarman.mockitokotlin2.mock import com.nhaarman.mockitokotlin2.whenever import kotlinx.coroutines.runBlocking -import org.opensearch.core.action.ActionListener import org.opensearch.action.admin.cluster.snapshots.status.SnapshotStatus import org.opensearch.action.admin.cluster.snapshots.status.SnapshotsStatusResponse import org.opensearch.client.AdminClient @@ -20,6 +19,7 @@ import org.opensearch.client.ClusterAdminClient import org.opensearch.cluster.SnapshotsInProgress import org.opensearch.cluster.service.ClusterService import org.opensearch.common.settings.Settings +import org.opensearch.core.action.ActionListener import org.opensearch.indexmanagement.indexstatemanagement.action.SnapshotAction import org.opensearch.indexmanagement.indexstatemanagement.step.snapshot.WaitForSnapshotStep import org.opensearch.indexmanagement.spi.indexstatemanagement.Step @@ -35,7 +35,6 @@ import org.opensearch.test.OpenSearchTestCase import org.opensearch.transport.RemoteTransportException class WaitForSnapshotStepTests : OpenSearchTestCase() { - private val clusterService: ClusterService = mock() private val scriptService: ScriptService = mock() private val settings: Settings = Settings.EMPTY @@ -188,14 +187,19 @@ class WaitForSnapshotStepTests : OpenSearchTestCase() { } private fun getClient(adminClient: AdminClient): Client = mock { on { admin() } doReturn adminClient } + private fun getAdminClient(clusterAdminClient: ClusterAdminClient): AdminClient = mock { on { cluster() } doReturn clusterAdminClient } + private fun getClusterAdminClient(snapshotsStatusResponse: SnapshotsStatusResponse?, exception: Exception?): ClusterAdminClient { assertTrue("Must provide one and only one response or exception", (snapshotsStatusResponse != null).xor(exception != null)) return mock { doAnswer { invocationOnMock -> val listener = invocationOnMock.getArgument>(1) - if (snapshotsStatusResponse != null) listener.onResponse(snapshotsStatusResponse) - else listener.onFailure(exception) + if (snapshotsStatusResponse != null) { + listener.onResponse(snapshotsStatusResponse) + } else { + listener.onFailure(exception) + } }.whenever(this.mock).snapshotsStatus(any(), any()) } } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/WaitForTransformCompletionStepTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/WaitForTransformCompletionStepTests.kt index d536c7449..bbd0bc99e 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/WaitForTransformCompletionStepTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/WaitForTransformCompletionStepTests.kt @@ -12,14 +12,14 @@ import org.opensearch.cluster.service.ClusterService import org.opensearch.common.settings.Settings import org.opensearch.indexmanagement.indexstatemanagement.step.transform.AttemptCreateTransformJobStep import org.opensearch.indexmanagement.indexstatemanagement.step.transform.WaitForTransformCompletionStep -import org.opensearch.indexmanagement.transform.model.TransformMetadata -import org.opensearch.indexmanagement.transform.model.TransformStats import org.opensearch.indexmanagement.spi.indexstatemanagement.Step import org.opensearch.indexmanagement.spi.indexstatemanagement.model.ActionMetaData import org.opensearch.indexmanagement.spi.indexstatemanagement.model.ActionProperties import org.opensearch.indexmanagement.spi.indexstatemanagement.model.ManagedIndexMetaData import org.opensearch.indexmanagement.spi.indexstatemanagement.model.StepContext import org.opensearch.indexmanagement.spi.indexstatemanagement.model.TransformActionProperties +import org.opensearch.indexmanagement.transform.model.TransformMetadata +import org.opensearch.indexmanagement.transform.model.TransformStats import org.opensearch.indexmanagement.util.NO_ID import org.opensearch.jobscheduler.spi.utils.LockService import org.opensearch.script.ScriptService @@ -27,38 +27,39 @@ import org.opensearch.test.OpenSearchTestCase import java.time.Instant class WaitForTransformCompletionStepTests : OpenSearchTestCase() { - private val clusterService: ClusterService = mock() private val scriptService: ScriptService = mock() private val settings: Settings = Settings.EMPTY private val transformId: String = "dummy-id" private val indexName: String = "test" - private val metadata = ManagedIndexMetaData( - indexName, - "indexUuid", - "policy_id", - null, - null, - null, - null, - null, - null, - null, - ActionMetaData( - AttemptCreateTransformJobStep.name, 1, 0, false, 0, null, - ActionProperties(transformActionProperties = TransformActionProperties(transformId)) - ), - null, - null, - null - ) - private val transformMetadata = TransformMetadata( - id = NO_ID, - transformId = transformId, - lastUpdatedAt = Instant.now(), - status = TransformMetadata.Status.FINISHED, - stats = TransformStats(1, 1, 1, 1, 1) - ) + private val metadata = + ManagedIndexMetaData( + indexName, + "indexUuid", + "policy_id", + null, + null, + null, + null, + null, + null, + null, + ActionMetaData( + AttemptCreateTransformJobStep.name, 1, 0, false, 0, null, + ActionProperties(transformActionProperties = TransformActionProperties(transformId)), + ), + null, + null, + null, + ) + private val transformMetadata = + TransformMetadata( + id = NO_ID, + transformId = transformId, + lastUpdatedAt = Instant.now(), + status = TransformMetadata.Status.FINISHED, + stats = TransformStats(1, 1, 1, 1, 1), + ) private val client: Client = mock() private val step = WaitForTransformCompletionStep() private val lockService: LockService = LockService(mock(), clusterService) @@ -78,7 +79,7 @@ class WaitForTransformCompletionStepTests : OpenSearchTestCase() { assertEquals( "Missing failure message", WaitForTransformCompletionStep.getMissingTransformJobMessage(indexName), - updatedManagedIndexMetaData.info?.get("message") + updatedManagedIndexMetaData.info?.get("message"), ) } @@ -91,7 +92,7 @@ class WaitForTransformCompletionStepTests : OpenSearchTestCase() { assertEquals( "Missing failure message", WaitForTransformCompletionStep.getJobFailedMessage(transformId, indexName), - updateManagedIndexMetaData.info?.get("message") + updateManagedIndexMetaData.info?.get("message"), ) } @@ -104,7 +105,7 @@ class WaitForTransformCompletionStepTests : OpenSearchTestCase() { assertEquals( "Missing failure message", WaitForTransformCompletionStep.getJobFailedMessage(transformId, indexName), - updateManagedIndexMetaData.info?.get("message") + updateManagedIndexMetaData.info?.get("message"), ) assertEquals("Mismatch in cause", WaitForTransformCompletionStep.JOB_STOPPED_MESSAGE, updateManagedIndexMetaData.info?.get("cause")) } @@ -117,12 +118,12 @@ class WaitForTransformCompletionStepTests : OpenSearchTestCase() { assertEquals( "Step status is not CONDITION_NOT_MET", Step.StepStatus.CONDITION_NOT_MET, - updateManagedIndexMetaData.stepMetaData?.stepStatus + updateManagedIndexMetaData.stepMetaData?.stepStatus, ) assertEquals( "Missing processing message", WaitForTransformCompletionStep.getJobProcessingMessage(transformId, indexName), - updateManagedIndexMetaData.info?.get("message") + updateManagedIndexMetaData.info?.get("message"), ) } @@ -135,7 +136,7 @@ class WaitForTransformCompletionStepTests : OpenSearchTestCase() { assertEquals( "Missing processing message", WaitForTransformCompletionStep.getJobProcessingMessage(transformId, indexName), - updateManagedIndexMetaData.info?.get("message") + updateManagedIndexMetaData.info?.get("message"), ) } @@ -148,7 +149,7 @@ class WaitForTransformCompletionStepTests : OpenSearchTestCase() { assertEquals( "Missing processing message", WaitForTransformCompletionStep.getJobCompletionMessage(transformId, indexName), - updateManagedIndexMetaData.info?.get("message") + updateManagedIndexMetaData.info?.get("message"), ) } @@ -160,7 +161,7 @@ class WaitForTransformCompletionStepTests : OpenSearchTestCase() { assertEquals( "Mismatch in message", WaitForTransformCompletionStep.getFailedMessage(transformId, indexName), - updateManagedIndexMetaData.info?.get("message") + updateManagedIndexMetaData.info?.get("message"), ) assertEquals("Step status is not FAILED", Step.StepStatus.FAILED, updateManagedIndexMetaData.stepMetaData?.stepStatus) } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/ISMStatusResponseTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/ISMStatusResponseTests.kt index 480d734c9..0adf6327a 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/ISMStatusResponseTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/ISMStatusResponseTests.kt @@ -11,7 +11,6 @@ import org.opensearch.indexmanagement.indexstatemanagement.util.FailedIndex import org.opensearch.test.OpenSearchTestCase class ISMStatusResponseTests : OpenSearchTestCase() { - fun `test ISM status response`() { val updated = 1 val failedIndex = FailedIndex("index", "uuid", "reason") diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/addpolicy/AddPolicyRequestTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/addpolicy/AddPolicyRequestTests.kt index bdbf69819..7e96ca2c5 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/addpolicy/AddPolicyRequestTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/addpolicy/AddPolicyRequestTests.kt @@ -11,7 +11,6 @@ import org.opensearch.indexmanagement.indexstatemanagement.util.DEFAULT_INDEX_TY import org.opensearch.test.OpenSearchTestCase class AddPolicyRequestTests : OpenSearchTestCase() { - fun `test add policy request`() { val indices = listOf("index1", "index2") val policyID = "policyID" diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/changepolicy/ChangePolicyRequestTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/changepolicy/ChangePolicyRequestTests.kt index e77141197..0638d636c 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/changepolicy/ChangePolicyRequestTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/changepolicy/ChangePolicyRequestTests.kt @@ -13,7 +13,6 @@ import org.opensearch.indexmanagement.indexstatemanagement.util.DEFAULT_INDEX_TY import org.opensearch.test.OpenSearchTestCase class ChangePolicyRequestTests : OpenSearchTestCase() { - fun `test change policy request`() { val indices = listOf("index1", "index2") val stateFilter = StateFilter("state1") diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/deletepolicy/DeletePolicyRequestTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/deletepolicy/DeletePolicyRequestTests.kt index ce6488a4a..4bc7da006 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/deletepolicy/DeletePolicyRequestTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/deletepolicy/DeletePolicyRequestTests.kt @@ -11,7 +11,6 @@ import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.test.OpenSearchTestCase class DeletePolicyRequestTests : OpenSearchTestCase() { - fun `test delete policy request`() { val policyID = "policyID" val refreshPolicy = WriteRequest.RefreshPolicy.IMMEDIATE diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/explain/ExplainRequestTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/explain/ExplainRequestTests.kt index 61d9d81e2..b88f3101e 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/explain/ExplainRequestTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/explain/ExplainRequestTests.kt @@ -6,14 +6,13 @@ package org.opensearch.indexmanagement.indexstatemanagement.transport.action.explain import org.opensearch.common.io.stream.BytesStreamOutput -import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.common.unit.TimeValue +import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.indexmanagement.common.model.rest.SearchParams import org.opensearch.indexmanagement.indexstatemanagement.util.DEFAULT_INDEX_TYPE import org.opensearch.test.OpenSearchTestCase class ExplainRequestTests : OpenSearchTestCase() { - fun `test explain request`() { val indices = listOf("index1", "index2") val local = true diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/explain/ExplainResponseTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/explain/ExplainResponseTests.kt index bbab164f3..3bbd5bd7f 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/explain/ExplainResponseTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/explain/ExplainResponseTests.kt @@ -14,26 +14,26 @@ import org.opensearch.indexmanagement.spi.indexstatemanagement.model.ValidationR import org.opensearch.test.OpenSearchTestCase class ExplainResponseTests : OpenSearchTestCase() { - fun `test explain response`() { val indexNames = listOf("index1") val indexPolicyIDs = listOf("policyID1") - val metadata = ManagedIndexMetaData( - index = "index1", - indexUuid = randomAlphaOfLength(10), - policyID = "policyID1", - policySeqNo = randomNonNegativeLong(), - policyPrimaryTerm = randomNonNegativeLong(), - policyCompleted = null, - rolledOver = null, - indexCreationDate = null, - transitionTo = randomAlphaOfLength(10), - stateMetaData = null, - actionMetaData = null, - stepMetaData = null, - policyRetryInfo = null, - info = null - ) + val metadata = + ManagedIndexMetaData( + index = "index1", + indexUuid = randomAlphaOfLength(10), + policyID = "policyID1", + policySeqNo = randomNonNegativeLong(), + policyPrimaryTerm = randomNonNegativeLong(), + policyCompleted = null, + rolledOver = null, + indexCreationDate = null, + transitionTo = randomAlphaOfLength(10), + stateMetaData = null, + actionMetaData = null, + stepMetaData = null, + policyRetryInfo = null, + info = null, + ) val validationResult = ValidationResult("test", Validate.ValidationStatus.FAILED) val validationResults = listOf(validationResult) val indexMetadatas = listOf(metadata) diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/getpolicy/GetPoliciesRequestTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/getpolicy/GetPoliciesRequestTests.kt index 84a2c78dc..d06cf4a1a 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/getpolicy/GetPoliciesRequestTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/getpolicy/GetPoliciesRequestTests.kt @@ -11,7 +11,6 @@ import org.opensearch.indexmanagement.common.model.rest.SearchParams import org.opensearch.test.OpenSearchTestCase class GetPoliciesRequestTests : OpenSearchTestCase() { - fun `test get policies request`() { val table = SearchParams(20, 0, "policy.policy_id.keyword", "desc", "*") val req = GetPoliciesRequest(table) diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/getpolicy/GetPoliciesResponseTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/getpolicy/GetPoliciesResponseTests.kt index 939774f07..0a1825bbc 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/getpolicy/GetPoliciesResponseTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/getpolicy/GetPoliciesResponseTests.kt @@ -6,11 +6,11 @@ package org.opensearch.indexmanagement.indexstatemanagement.transport.action.getpolicy import org.opensearch.common.io.stream.BytesStreamOutput -import org.opensearch.core.common.io.stream.StreamInput -import org.opensearch.core.xcontent.ToXContent import org.opensearch.common.xcontent.XContentFactory import org.opensearch.common.xcontent.XContentHelper import org.opensearch.common.xcontent.json.JsonXContent +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.xcontent.ToXContent import org.opensearch.indexmanagement.indexstatemanagement.ISMActionsParser import org.opensearch.indexmanagement.indexstatemanagement.extension.SampleCustomActionParser import org.opensearch.indexmanagement.indexstatemanagement.model.Policy @@ -24,7 +24,6 @@ import java.time.Instant import java.time.temporal.ChronoUnit class GetPoliciesResponseTests : OpenSearchTestCase() { - fun `test get policies response`() { val policy = randomPolicy() val res = GetPoliciesResponse(listOf(policy), 1) @@ -47,15 +46,16 @@ class GetPoliciesResponseTests : OpenSearchTestCase() { val policyID = "policyID" val action = SampleCustomActionParser.SampleCustomAction(someInt = randomInt(), index = 0) val states = listOf(State(name = "CustomState", actions = listOf(action), transitions = listOf())) - val policy = Policy( - id = policyID, - description = "description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val policy = + Policy( + id = policyID, + description = "description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) val res = GetPoliciesResponse(listOf(policy), 1) val responseString = res.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS).string() diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/getpolicy/GetPolicyRequestTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/getpolicy/GetPolicyRequestTests.kt index a05e45145..fb44ae5ec 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/getpolicy/GetPolicyRequestTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/getpolicy/GetPolicyRequestTests.kt @@ -11,7 +11,6 @@ import org.opensearch.search.fetch.subphase.FetchSourceContext import org.opensearch.test.OpenSearchTestCase class GetPolicyRequestTests : OpenSearchTestCase() { - fun `test get policy request`() { val policyID = "policyID" val version: Long = 123 diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/getpolicy/GetPolicyResponseTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/getpolicy/GetPolicyResponseTests.kt index b84b9150c..6c91207f7 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/getpolicy/GetPolicyResponseTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/getpolicy/GetPolicyResponseTests.kt @@ -6,11 +6,11 @@ package org.opensearch.indexmanagement.indexstatemanagement.transport.action.getpolicy import org.opensearch.common.io.stream.BytesStreamOutput -import org.opensearch.core.common.io.stream.StreamInput -import org.opensearch.core.xcontent.ToXContent import org.opensearch.common.xcontent.XContentFactory import org.opensearch.common.xcontent.XContentHelper import org.opensearch.common.xcontent.json.JsonXContent +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.xcontent.ToXContent import org.opensearch.indexmanagement.indexstatemanagement.ISMActionsParser import org.opensearch.indexmanagement.indexstatemanagement.action.IndexPriorityAction import org.opensearch.indexmanagement.indexstatemanagement.extension.SampleCustomActionParser @@ -24,7 +24,6 @@ import java.time.Instant import java.time.temporal.ChronoUnit class GetPolicyResponseTests : OpenSearchTestCase() { - fun `test get policy response`() { val id = "id" val version: Long = 1 @@ -32,15 +31,16 @@ class GetPolicyResponseTests : OpenSearchTestCase() { val seqNo: Long = 456 val actionConfig = IndexPriorityAction(50, 0) val states = listOf(State(name = "SetPriorityState", actions = listOf(actionConfig), transitions = listOf())) - val policy = Policy( - id = "policyID", - description = "description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val policy = + Policy( + id = "policyID", + description = "description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) val res = GetPolicyResponse(id, version, seqNo, primaryTerm, policy) val out = BytesStreamOutput() @@ -66,15 +66,16 @@ class GetPolicyResponseTests : OpenSearchTestCase() { val policyID = "policyID" val action = SampleCustomActionParser.SampleCustomAction(someInt = randomInt(), index = 0) val states = listOf(State(name = "CustomState", actions = listOf(action), transitions = listOf())) - val policy = Policy( - id = policyID, - description = "description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val policy = + Policy( + id = policyID, + description = "description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) val res = GetPolicyResponse(id, version, seqNo, primaryTerm, policy) val responseString = res.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS).string() diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/indexpolicy/IndexPolicyRequestTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/indexpolicy/IndexPolicyRequestTests.kt index 3cc77a0ad..7a0c2647e 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/indexpolicy/IndexPolicyRequestTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/indexpolicy/IndexPolicyRequestTests.kt @@ -22,20 +22,20 @@ import java.time.Instant import java.time.temporal.ChronoUnit class IndexPolicyRequestTests : OpenSearchTestCase() { - fun `test index policy request index priority action`() { val policyID = "policyID" val action = IndexPriorityAction(50, 0) val states = listOf(State(name = "SetPriorityState", actions = listOf(action), transitions = listOf())) - val policy = Policy( - id = policyID, - description = "description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val policy = + Policy( + id = policyID, + description = "description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) val seqNo: Long = 123 val primaryTerm: Long = 456 val refreshPolicy = WriteRequest.RefreshPolicy.NONE @@ -56,15 +56,16 @@ class IndexPolicyRequestTests : OpenSearchTestCase() { val action = AllocationAction(require = mapOf("box_type" to "hot"), exclude = emptyMap(), include = emptyMap(), index = 0) val states = listOf(State("Allocate", listOf(action), listOf())) - val policy = Policy( - id = policyID, - description = "description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val policy = + Policy( + id = policyID, + description = "description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) val seqNo: Long = 123 val primaryTerm: Long = 456 val refreshPolicy = WriteRequest.RefreshPolicy.NONE @@ -85,15 +86,16 @@ class IndexPolicyRequestTests : OpenSearchTestCase() { val action = DeleteAction(index = 0) val states = listOf(State("Delete", listOf(action), listOf())) - val policy = Policy( - id = policyID, - description = "description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val policy = + Policy( + id = policyID, + description = "description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) val seqNo: Long = 123 val primaryTerm: Long = 456 val refreshPolicy = WriteRequest.RefreshPolicy.NONE @@ -117,15 +119,16 @@ class IndexPolicyRequestTests : OpenSearchTestCase() { val action = SampleCustomActionParser.SampleCustomAction(someInt = randomInt(), index = 0) val states = listOf(State("MyState", listOf(action), listOf())) - val policy = Policy( - id = policyID, - description = "description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val policy = + Policy( + id = policyID, + description = "description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) val seqNo: Long = 123 val primaryTerm: Long = 456 val refreshPolicy = WriteRequest.RefreshPolicy.NONE diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/indexpolicy/IndexPolicyResponseTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/indexpolicy/IndexPolicyResponseTests.kt index 7e68b0a0c..121820683 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/indexpolicy/IndexPolicyResponseTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/indexpolicy/IndexPolicyResponseTests.kt @@ -6,11 +6,12 @@ package org.opensearch.indexmanagement.indexstatemanagement.transport.action.indexpolicy import org.opensearch.common.io.stream.BytesStreamOutput -import org.opensearch.core.common.io.stream.StreamInput -import org.opensearch.core.xcontent.ToXContent import org.opensearch.common.xcontent.XContentFactory import org.opensearch.common.xcontent.XContentHelper import org.opensearch.common.xcontent.json.JsonXContent +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.rest.RestStatus +import org.opensearch.core.xcontent.ToXContent import org.opensearch.indexmanagement.indexstatemanagement.ISMActionsParser import org.opensearch.indexmanagement.indexstatemanagement.action.IndexPriorityAction import org.opensearch.indexmanagement.indexstatemanagement.extension.SampleCustomActionParser @@ -19,13 +20,11 @@ import org.opensearch.indexmanagement.indexstatemanagement.model.State import org.opensearch.indexmanagement.indexstatemanagement.randomErrorNotification import org.opensearch.indexmanagement.opensearchapi.convertToMap import org.opensearch.indexmanagement.opensearchapi.string -import org.opensearch.core.rest.RestStatus import org.opensearch.test.OpenSearchTestCase import java.time.Instant import java.time.temporal.ChronoUnit class IndexPolicyResponseTests : OpenSearchTestCase() { - fun `test index policy response index priority action`() { val id = "id" val version: Long = 1 @@ -34,15 +33,16 @@ class IndexPolicyResponseTests : OpenSearchTestCase() { val policyID = "policyID" val action = IndexPriorityAction(50, 0) val states = listOf(State(name = "SetPriorityState", actions = listOf(action), transitions = listOf())) - val policy = Policy( - id = policyID, - description = "description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val policy = + Policy( + id = policyID, + description = "description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) val status = RestStatus.CREATED val res = IndexPolicyResponse(id, version, primaryTerm, seqNo, policy, status) @@ -71,15 +71,16 @@ class IndexPolicyResponseTests : OpenSearchTestCase() { val policyID = "policyID" val action = SampleCustomActionParser.SampleCustomAction(someInt = randomInt(), index = 0) val states = listOf(State(name = "CustomState", actions = listOf(action), transitions = listOf())) - val policy = Policy( - id = policyID, - description = "description", - schemaVersion = 1L, - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), - errorNotification = randomErrorNotification(), - defaultState = states[0].name, - states = states - ) + val policy = + Policy( + id = policyID, + description = "description", + schemaVersion = 1L, + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorNotification = randomErrorNotification(), + defaultState = states[0].name, + states = states, + ) val status = RestStatus.CREATED val res = IndexPolicyResponse(id, version, primaryTerm, seqNo, policy, status) diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/removepolicy/RemovePolicyRequestTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/removepolicy/RemovePolicyRequestTests.kt index 2386c010b..6290b0ece 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/removepolicy/RemovePolicyRequestTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/removepolicy/RemovePolicyRequestTests.kt @@ -11,7 +11,6 @@ import org.opensearch.indexmanagement.indexstatemanagement.util.DEFAULT_INDEX_TY import org.opensearch.test.OpenSearchTestCase class RemovePolicyRequestTests : OpenSearchTestCase() { - fun `test remove policy request`() { val indices = listOf("index1", "index2") val req = RemovePolicyRequest(indices, DEFAULT_INDEX_TYPE) diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/retryfailedmanagedindex/RetryFailedManagedIndexRequestTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/retryfailedmanagedindex/RetryFailedManagedIndexRequestTests.kt index 6cf699be6..ae436dd4b 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/retryfailedmanagedindex/RetryFailedManagedIndexRequestTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/transport/action/retryfailedmanagedindex/RetryFailedManagedIndexRequestTests.kt @@ -6,13 +6,12 @@ package org.opensearch.indexmanagement.indexstatemanagement.transport.action.retryfailedmanagedindex import org.opensearch.common.io.stream.BytesStreamOutput -import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.common.unit.TimeValue +import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.indexmanagement.indexstatemanagement.util.DEFAULT_INDEX_TYPE import org.opensearch.test.OpenSearchTestCase class RetryFailedManagedIndexRequestTests : OpenSearchTestCase() { - fun `test retry managed index request`() { val indices = listOf("index1", "index2") val startState = "state1" diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/util/ManagedIndexUtilsTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/util/ManagedIndexUtilsTests.kt index d6dfed16a..426549de5 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/util/ManagedIndexUtilsTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/util/ManagedIndexUtilsTests.kt @@ -5,15 +5,15 @@ package org.opensearch.indexmanagement.indexstatemanagement.util -import org.opensearch.core.common.bytes.BytesReference -import org.opensearch.core.common.unit.ByteSizeValue import org.opensearch.common.unit.TimeValue import org.opensearch.common.xcontent.LoggingDeprecationHandler import org.opensearch.common.xcontent.XContentHelper -import org.opensearch.core.xcontent.XContentParser import org.opensearch.common.xcontent.XContentType import org.opensearch.commons.destination.message.LegacyBaseMessage import org.opensearch.commons.destination.message.LegacyCustomWebhookMessage +import org.opensearch.core.common.bytes.BytesReference +import org.opensearch.core.common.unit.ByteSizeValue +import org.opensearch.core.xcontent.XContentParser import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.INDEX_MANAGEMENT_INDEX import org.opensearch.indexmanagement.indexstatemanagement.action.RolloverAction import org.opensearch.indexmanagement.indexstatemanagement.model.Conditions @@ -30,7 +30,6 @@ import java.time.Instant @Suppress("UnusedPrivateMember") class ManagedIndexUtilsTests : OpenSearchTestCase() { - fun `test create managed index request`() { val index = randomAlphaOfLength(10) val uuid = randomAlphaOfLength(10) @@ -63,10 +62,11 @@ class ManagedIndexUtilsTests : OpenSearchTestCase() { val index = randomAlphaOfLength(10) val uuid = randomAlphaOfLength(10) val policyID = randomAlphaOfLength(10) - val sweptManagedIndexConfig = SweptManagedIndexConfig( - index = index, uuid = uuid, policyID = policyID, - primaryTerm = 1, seqNo = 1, changePolicy = randomChangePolicy(policyID = policyID), policy = null - ) + val sweptManagedIndexConfig = + SweptManagedIndexConfig( + index = index, uuid = uuid, policyID = policyID, + primaryTerm = 1, seqNo = 1, changePolicy = randomChangePolicy(policyID = policyID), policy = null, + ) val updateRequest = updateManagedIndexRequest(sweptManagedIndexConfig) assertNotNull("UpdateRequest not created", updateRequest) @@ -77,7 +77,7 @@ class ManagedIndexUtilsTests : OpenSearchTestCase() { logger.info("source is $source") assertEquals( "Incorrect policy_id added to change_policy", sweptManagedIndexConfig.policyID, - ((source["managed_index"] as Map)["change_policy"] as Map)["policy_id"] + ((source["managed_index"] as Map)["change_policy"] as Map)["policy_id"], ) } @@ -86,28 +86,32 @@ class ManagedIndexUtilsTests : OpenSearchTestCase() { val sweptConfigToDelete = randomSweptManagedIndexConfig(policyID = "delete_me") val clusterConfigToUpdate = randomClusterStateManagedIndexConfig(policyID = "update_me") - val sweptConfigToBeUpdated = randomSweptManagedIndexConfig( - index = clusterConfigToUpdate.index, - uuid = clusterConfigToUpdate.uuid, policyID = "to_something_new", seqNo = 5, primaryTerm = 17 - ) + val sweptConfigToBeUpdated = + randomSweptManagedIndexConfig( + index = clusterConfigToUpdate.index, + uuid = clusterConfigToUpdate.uuid, policyID = "to_something_new", seqNo = 5, primaryTerm = 17, + ) val clusterConfigBeingUpdated = randomClusterStateManagedIndexConfig(policyID = "updating") - val sweptConfigBeingUpdated = randomSweptManagedIndexConfig( - index = clusterConfigBeingUpdated.index, - uuid = clusterConfigBeingUpdated.uuid, policyID = "to_something_new", seqNo = 5, primaryTerm = 17, - changePolicy = randomChangePolicy("updating") - ) + val sweptConfigBeingUpdated = + randomSweptManagedIndexConfig( + index = clusterConfigBeingUpdated.index, + uuid = clusterConfigBeingUpdated.uuid, policyID = "to_something_new", seqNo = 5, primaryTerm = 17, + changePolicy = randomChangePolicy("updating"), + ) val clusterConfig = randomClusterStateManagedIndexConfig(policyID = "do_nothing") - val sweptConfig = randomSweptManagedIndexConfig( - index = clusterConfig.index, - uuid = clusterConfig.uuid, policyID = clusterConfig.policyID, seqNo = 5, primaryTerm = 17 - ) - - val managedIndicesToDelete = getManagedIndicesToDelete( - listOf(clusterConfig.uuid, clusterConfigToUpdate.uuid, clusterConfigBeingUpdated.uuid, clusterConfigToCreate.uuid), - listOf(sweptConfig.uuid, sweptConfigToDelete.uuid, sweptConfigToBeUpdated.uuid, sweptConfigBeingUpdated.uuid) - ) + val sweptConfig = + randomSweptManagedIndexConfig( + index = clusterConfig.index, + uuid = clusterConfig.uuid, policyID = clusterConfig.policyID, seqNo = 5, primaryTerm = 17, + ) + + val managedIndicesToDelete = + getManagedIndicesToDelete( + listOf(clusterConfig.uuid, clusterConfigToUpdate.uuid, clusterConfigBeingUpdated.uuid, clusterConfigToCreate.uuid), + listOf(sweptConfig.uuid, sweptConfigToDelete.uuid, sweptConfigToBeUpdated.uuid, sweptConfigBeingUpdated.uuid), + ) val requests = managedIndicesToDelete.map { deleteManagedIndexRequest(it) } assertEquals("Too many requests", 1, requests.size) @@ -129,108 +133,108 @@ class ManagedIndexUtilsTests : OpenSearchTestCase() { assertTrue( "No conditions should always pass", noConditionsConfig - .evaluateConditions(indexAgeTimeValue = TimeValue.timeValueMillis(0), numDocs = 0, indexSize = ByteSizeValue.ZERO, primaryShardSize = ByteSizeValue.ZERO) + .evaluateConditions(indexAgeTimeValue = TimeValue.timeValueMillis(0), numDocs = 0, indexSize = ByteSizeValue.ZERO, primaryShardSize = ByteSizeValue.ZERO), ) assertTrue( "No conditions should always pass", noConditionsConfig - .evaluateConditions(indexAgeTimeValue = TimeValue.timeValueMillis(100), numDocs = 5, indexSize = ByteSizeValue(5), primaryShardSize = ByteSizeValue(5)) + .evaluateConditions(indexAgeTimeValue = TimeValue.timeValueMillis(100), numDocs = 5, indexSize = ByteSizeValue(5), primaryShardSize = ByteSizeValue(5)), ) assertTrue( "No conditions should always pass", noConditionsConfig - .evaluateConditions(indexAgeTimeValue = TimeValue.timeValueMillis(6000), numDocs = 5, indexSize = ByteSizeValue(5), primaryShardSize = ByteSizeValue(5)) + .evaluateConditions(indexAgeTimeValue = TimeValue.timeValueMillis(6000), numDocs = 5, indexSize = ByteSizeValue(5), primaryShardSize = ByteSizeValue(5)), ) val minSizeConfig = RolloverAction(minSize = ByteSizeValue(5), minDocs = null, minAge = null, minPrimaryShardSize = null, index = 0) assertFalse( "Less bytes should not pass", minSizeConfig - .evaluateConditions(indexAgeTimeValue = TimeValue.timeValueMillis(1000), numDocs = 0, indexSize = ByteSizeValue.ZERO, primaryShardSize = ByteSizeValue.ZERO) + .evaluateConditions(indexAgeTimeValue = TimeValue.timeValueMillis(1000), numDocs = 0, indexSize = ByteSizeValue.ZERO, primaryShardSize = ByteSizeValue.ZERO), ) assertTrue( "Equal bytes should pass", minSizeConfig - .evaluateConditions(indexAgeTimeValue = TimeValue.timeValueMillis(1000), numDocs = 0, indexSize = ByteSizeValue(5), primaryShardSize = ByteSizeValue(5)) + .evaluateConditions(indexAgeTimeValue = TimeValue.timeValueMillis(1000), numDocs = 0, indexSize = ByteSizeValue(5), primaryShardSize = ByteSizeValue(5)), ) assertTrue( "More bytes should pass", minSizeConfig - .evaluateConditions(indexAgeTimeValue = TimeValue.timeValueMillis(1000), numDocs = 0, indexSize = ByteSizeValue(10), primaryShardSize = ByteSizeValue(10)) + .evaluateConditions(indexAgeTimeValue = TimeValue.timeValueMillis(1000), numDocs = 0, indexSize = ByteSizeValue(10), primaryShardSize = ByteSizeValue(10)), ) val minPrimarySizeConfig = RolloverAction(minSize = null, minDocs = null, minAge = null, minPrimaryShardSize = ByteSizeValue(5), index = 0) assertFalse( "Less primary bytes should not pass", minPrimarySizeConfig - .evaluateConditions(indexAgeTimeValue = TimeValue.timeValueMillis(1000), numDocs = 0, indexSize = ByteSizeValue.ZERO, primaryShardSize = ByteSizeValue.ZERO) + .evaluateConditions(indexAgeTimeValue = TimeValue.timeValueMillis(1000), numDocs = 0, indexSize = ByteSizeValue.ZERO, primaryShardSize = ByteSizeValue.ZERO), ) assertTrue( "Equal primary bytes should pass", minPrimarySizeConfig - .evaluateConditions(indexAgeTimeValue = TimeValue.timeValueMillis(1000), numDocs = 0, indexSize = ByteSizeValue(5), primaryShardSize = ByteSizeValue(5)) + .evaluateConditions(indexAgeTimeValue = TimeValue.timeValueMillis(1000), numDocs = 0, indexSize = ByteSizeValue(5), primaryShardSize = ByteSizeValue(5)), ) assertTrue( "More primary bytes should pass", minPrimarySizeConfig - .evaluateConditions(indexAgeTimeValue = TimeValue.timeValueMillis(1000), numDocs = 0, indexSize = ByteSizeValue(10), primaryShardSize = ByteSizeValue(10)) + .evaluateConditions(indexAgeTimeValue = TimeValue.timeValueMillis(1000), numDocs = 0, indexSize = ByteSizeValue(10), primaryShardSize = ByteSizeValue(10)), ) val minDocsConfig = RolloverAction(minSize = null, minDocs = 5, minAge = null, minPrimaryShardSize = null, index = 0) assertFalse( "Less docs should not pass", minDocsConfig - .evaluateConditions(indexAgeTimeValue = TimeValue.timeValueMillis(1000), numDocs = 0, indexSize = ByteSizeValue.ZERO, primaryShardSize = ByteSizeValue.ZERO) + .evaluateConditions(indexAgeTimeValue = TimeValue.timeValueMillis(1000), numDocs = 0, indexSize = ByteSizeValue.ZERO, primaryShardSize = ByteSizeValue.ZERO), ) assertTrue( "Equal docs should pass", minDocsConfig - .evaluateConditions(indexAgeTimeValue = TimeValue.timeValueMillis(1000), numDocs = 5, indexSize = ByteSizeValue.ZERO, primaryShardSize = ByteSizeValue.ZERO) + .evaluateConditions(indexAgeTimeValue = TimeValue.timeValueMillis(1000), numDocs = 5, indexSize = ByteSizeValue.ZERO, primaryShardSize = ByteSizeValue.ZERO), ) assertTrue( "More docs should pass", minDocsConfig - .evaluateConditions(indexAgeTimeValue = TimeValue.timeValueMillis(1000), numDocs = 10, indexSize = ByteSizeValue.ZERO, primaryShardSize = ByteSizeValue.ZERO) + .evaluateConditions(indexAgeTimeValue = TimeValue.timeValueMillis(1000), numDocs = 10, indexSize = ByteSizeValue.ZERO, primaryShardSize = ByteSizeValue.ZERO), ) val minAgeConfig = RolloverAction(minSize = null, minDocs = null, minAge = TimeValue.timeValueSeconds(5), minPrimaryShardSize = null, index = 0) assertFalse( "Index age that is too young should not pass", minAgeConfig - .evaluateConditions(indexAgeTimeValue = TimeValue.timeValueMillis(1000), numDocs = 0, indexSize = ByteSizeValue.ZERO, primaryShardSize = ByteSizeValue.ZERO) + .evaluateConditions(indexAgeTimeValue = TimeValue.timeValueMillis(1000), numDocs = 0, indexSize = ByteSizeValue.ZERO, primaryShardSize = ByteSizeValue.ZERO), ) assertTrue( "Index age that is older should pass", minAgeConfig - .evaluateConditions(indexAgeTimeValue = TimeValue.timeValueMillis(10000), numDocs = 0, indexSize = ByteSizeValue.ZERO, primaryShardSize = ByteSizeValue.ZERO) + .evaluateConditions(indexAgeTimeValue = TimeValue.timeValueMillis(10000), numDocs = 0, indexSize = ByteSizeValue.ZERO, primaryShardSize = ByteSizeValue.ZERO), ) val multiConfig = RolloverAction(minSize = ByteSizeValue(1), minDocs = 1, minAge = TimeValue.timeValueSeconds(5), minPrimaryShardSize = ByteSizeValue(1), index = 0) assertFalse( "No conditions met should not pass", multiConfig - .evaluateConditions(indexAgeTimeValue = TimeValue.timeValueMillis(0), numDocs = 0, indexSize = ByteSizeValue.ZERO, primaryShardSize = ByteSizeValue.ZERO) + .evaluateConditions(indexAgeTimeValue = TimeValue.timeValueMillis(0), numDocs = 0, indexSize = ByteSizeValue.ZERO, primaryShardSize = ByteSizeValue.ZERO), ) assertTrue( "Multi condition, age should pass", multiConfig - .evaluateConditions(indexAgeTimeValue = TimeValue.timeValueMillis(10000), numDocs = 0, indexSize = ByteSizeValue.ZERO, primaryShardSize = ByteSizeValue.ZERO) + .evaluateConditions(indexAgeTimeValue = TimeValue.timeValueMillis(10000), numDocs = 0, indexSize = ByteSizeValue.ZERO, primaryShardSize = ByteSizeValue.ZERO), ) assertTrue( "Multi condition, docs should pass", multiConfig - .evaluateConditions(indexAgeTimeValue = TimeValue.timeValueMillis(0), numDocs = 2, indexSize = ByteSizeValue.ZERO, primaryShardSize = ByteSizeValue.ZERO) + .evaluateConditions(indexAgeTimeValue = TimeValue.timeValueMillis(0), numDocs = 2, indexSize = ByteSizeValue.ZERO, primaryShardSize = ByteSizeValue.ZERO), ) assertTrue( "Multi condition, size should pass", multiConfig - .evaluateConditions(indexAgeTimeValue = TimeValue.timeValueMillis(0), numDocs = 0, indexSize = ByteSizeValue(2), primaryShardSize = ByteSizeValue.ZERO) + .evaluateConditions(indexAgeTimeValue = TimeValue.timeValueMillis(0), numDocs = 0, indexSize = ByteSizeValue(2), primaryShardSize = ByteSizeValue.ZERO), ) assertTrue( "Multi condition, primary size should pass", multiConfig - .evaluateConditions(indexAgeTimeValue = TimeValue.timeValueMillis(0), numDocs = 0, indexSize = ByteSizeValue.ZERO, primaryShardSize = ByteSizeValue(2)) + .evaluateConditions(indexAgeTimeValue = TimeValue.timeValueMillis(0), numDocs = 0, indexSize = ByteSizeValue.ZERO, primaryShardSize = ByteSizeValue(2)), ) } @@ -239,54 +243,56 @@ class ManagedIndexUtilsTests : OpenSearchTestCase() { assertTrue( "No conditions should pass", emptyTransition - .evaluateConditions(indexCreationDate = Instant.now(), numDocs = null, indexSize = null, transitionStartTime = Instant.now(), rolloverDate = null) + .evaluateConditions(indexCreationDate = Instant.now(), numDocs = null, indexSize = null, transitionStartTime = Instant.now(), rolloverDate = null), ) - val timeTransition = Transition( - stateName = "some_state", - conditions = Conditions(indexAge = TimeValue.timeValueSeconds(5)) - ) + val timeTransition = + Transition( + stateName = "some_state", + conditions = Conditions(indexAge = TimeValue.timeValueSeconds(5)), + ) assertFalse( "Index age that is too young should not pass", timeTransition - .evaluateConditions(indexCreationDate = Instant.now(), numDocs = null, indexSize = null, transitionStartTime = Instant.now(), rolloverDate = null) + .evaluateConditions(indexCreationDate = Instant.now(), numDocs = null, indexSize = null, transitionStartTime = Instant.now(), rolloverDate = null), ) assertTrue( "Index age that is older should pass", timeTransition - .evaluateConditions(indexCreationDate = Instant.now().minusSeconds(10), numDocs = null, indexSize = null, transitionStartTime = Instant.now(), rolloverDate = null) + .evaluateConditions(indexCreationDate = Instant.now().minusSeconds(10), numDocs = null, indexSize = null, transitionStartTime = Instant.now(), rolloverDate = null), ) assertFalse( "Index age that is -1L should not pass", timeTransition - .evaluateConditions(indexCreationDate = Instant.ofEpochMilli(-1L), numDocs = null, indexSize = null, transitionStartTime = Instant.now(), rolloverDate = null) + .evaluateConditions(indexCreationDate = Instant.ofEpochMilli(-1L), numDocs = null, indexSize = null, transitionStartTime = Instant.now(), rolloverDate = null), ) - val rolloverTimeTransition = Transition( - stateName = "some_state", - conditions = Conditions(rolloverAge = TimeValue.timeValueSeconds(5)) - ) + val rolloverTimeTransition = + Transition( + stateName = "some_state", + conditions = Conditions(rolloverAge = TimeValue.timeValueSeconds(5)), + ) assertFalse( "Rollover age that is too young should not pass", rolloverTimeTransition - .evaluateConditions(indexCreationDate = Instant.now(), numDocs = null, indexSize = null, transitionStartTime = Instant.now(), rolloverDate = Instant.now()) + .evaluateConditions(indexCreationDate = Instant.now(), numDocs = null, indexSize = null, transitionStartTime = Instant.now(), rolloverDate = Instant.now()), ) assertTrue( "Rollover age that is older should pass", rolloverTimeTransition - .evaluateConditions(indexCreationDate = Instant.now().minusSeconds(10), numDocs = null, indexSize = null, transitionStartTime = Instant.now(), rolloverDate = Instant.now().minusSeconds(10)) + .evaluateConditions(indexCreationDate = Instant.now().minusSeconds(10), numDocs = null, indexSize = null, transitionStartTime = Instant.now(), rolloverDate = Instant.now().minusSeconds(10)), ) assertFalse( "Rollover age that is null should not pass", rolloverTimeTransition - .evaluateConditions(indexCreationDate = Instant.ofEpochMilli(-1L), numDocs = null, indexSize = null, transitionStartTime = Instant.now(), rolloverDate = null) + .evaluateConditions(indexCreationDate = Instant.ofEpochMilli(-1L), numDocs = null, indexSize = null, transitionStartTime = Instant.now(), rolloverDate = null), ) } private fun contentParser(bytesReference: BytesReference): XContentParser { return XContentHelper.createParser( xContentRegistry(), LoggingDeprecationHandler.INSTANCE, - bytesReference, XContentType.JSON + bytesReference, XContentType.JSON, ) } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/util/StepUtilsTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/util/StepUtilsTests.kt index 9eef04b65..fd52e05b5 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/util/StepUtilsTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/util/StepUtilsTests.kt @@ -23,18 +23,18 @@ import org.opensearch.monitor.fs.FsInfo import org.opensearch.test.OpenSearchTestCase class StepUtilsTests : OpenSearchTestCase() { - fun `test get shrink lock model`() { - val shrinkActionProperties = ShrinkActionProperties( - randomAlphaOfLength(10), - randomAlphaOfLength(10), - randomInt(), - randomNonNegativeLong(), - randomNonNegativeLong(), - randomInstant().toEpochMilli(), - randomInstant().toEpochMilli(), - mapOf() - ) + val shrinkActionProperties = + ShrinkActionProperties( + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomInt(), + randomNonNegativeLong(), + randomNonNegativeLong(), + randomInstant().toEpochMilli(), + randomInstant().toEpochMilli(), + mapOf(), + ) val lockModel = getShrinkLockModel(shrinkActionProperties) assertEquals("Incorrect lock model job index name", INDEX_MANAGEMENT_INDEX, lockModel.jobIndexName) assertEquals("Incorrect lock model jobID", getShrinkJobID(shrinkActionProperties.nodeName), lockModel.jobId) @@ -47,25 +47,27 @@ class StepUtilsTests : OpenSearchTestCase() { } fun `test get updated shrink action properties`() { - val shrinkActionProperties = ShrinkActionProperties( - randomAlphaOfLength(10), - randomAlphaOfLength(10), - randomInt(), - randomNonNegativeLong(), - randomNonNegativeLong(), - randomInstant().toEpochMilli(), - randomInstant().toEpochMilli(), - mapOf() - ) - val lockModel = LockModel( - randomAlphaOfLength(10), - getShrinkLockID(shrinkActionProperties.nodeName), - randomInstant(), - randomInstant().toEpochMilli(), - false, - randomNonNegativeLong(), - randomNonNegativeLong() - ) + val shrinkActionProperties = + ShrinkActionProperties( + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomInt(), + randomNonNegativeLong(), + randomNonNegativeLong(), + randomInstant().toEpochMilli(), + randomInstant().toEpochMilli(), + mapOf(), + ) + val lockModel = + LockModel( + randomAlphaOfLength(10), + getShrinkLockID(shrinkActionProperties.nodeName), + randomInstant(), + randomInstant().toEpochMilli(), + false, + randomNonNegativeLong(), + randomNonNegativeLong(), + ) val updatedProperties = getUpdatedShrinkActionProperties(shrinkActionProperties, lockModel) assertEquals("Node name should not have updated", updatedProperties.nodeName, shrinkActionProperties.nodeName) @@ -79,32 +81,34 @@ class StepUtilsTests : OpenSearchTestCase() { } fun `test get action start time`() { - val metadata = ManagedIndexMetaData( - "indexName", "indexUuid", "policy_id", null, null, null, null, null, null, null, - ActionMetaData("name", randomInstant().toEpochMilli(), 0, false, 0, null, null), null, null, null - ) + val metadata = + ManagedIndexMetaData( + "indexName", "indexUuid", "policy_id", null, null, null, null, null, null, null, + ActionMetaData("name", randomInstant().toEpochMilli(), 0, false, 0, null, null), null, null, null, + ) assertEquals("Action start time was not extracted correctly", metadata.actionMetaData?.startTime, getActionStartTime(metadata).toEpochMilli()) } fun `test get free bytes threshold high`() { val settings = Settings.builder() val nodeBytes = randomByteSizeValue().bytes - val expected: Long = if (randomBoolean()) { - val bytes = randomLongBetween(10, 100000000) - val highDisk = ByteSizeValue(bytes).stringRep - val lowDisk = ByteSizeValue(bytes + 1).stringRep - val floodDisk = ByteSizeValue(bytes - 1).stringRep - settings.put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.key, highDisk) - settings.put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.key, lowDisk) - settings.put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_DISK_FLOOD_STAGE_WATERMARK_SETTING.key, floodDisk) - bytes - } else { - val percentage = randomDoubleBetween(0.005, 0.995, false) - settings.put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.key, "${percentage * 100}%") - settings.put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.key, "${(percentage - 0.001) * 100}%") - settings.put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_DISK_FLOOD_STAGE_WATERMARK_SETTING.key, "${(percentage + 0.001) * 100}%") - (nodeBytes * (1 - percentage)).toLong() - } + val expected: Long = + if (randomBoolean()) { + val bytes = randomLongBetween(10, 100000000) + val highDisk = ByteSizeValue(bytes).stringRep + val lowDisk = ByteSizeValue(bytes + 1).stringRep + val floodDisk = ByteSizeValue(bytes - 1).stringRep + settings.put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.key, highDisk) + settings.put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.key, lowDisk) + settings.put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_DISK_FLOOD_STAGE_WATERMARK_SETTING.key, floodDisk) + bytes + } else { + val percentage = randomDoubleBetween(0.005, 0.995, false) + settings.put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.key, "${percentage * 100}%") + settings.put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.key, "${(percentage - 0.001) * 100}%") + settings.put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_DISK_FLOOD_STAGE_WATERMARK_SETTING.key, "${(percentage + 0.001) * 100}%") + (nodeBytes * (1 - percentage)).toLong() + } val clusterSettings = ClusterSettings(settings.build(), ClusterSettings.BUILT_IN_CLUSTER_SETTINGS) val thresholdHigh = getFreeBytesThresholdHigh(clusterSettings, nodeBytes) assertEquals(expected, thresholdHigh) @@ -122,10 +126,11 @@ class StepUtilsTests : OpenSearchTestCase() { val threshold = randomLongBetween(0, totalBytes / 2) Mockito.`when`(path.free).thenReturn(ByteSizeValue(freeBytes)) Mockito.`when`(path.total).thenReturn(ByteSizeValue(totalBytes)) - val settings = Settings.builder() - .put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.key, ByteSizeValue(threshold).stringRep) - .put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.key, ByteSizeValue(threshold + 1).stringRep) - .put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_DISK_FLOOD_STAGE_WATERMARK_SETTING.key, ByteSizeValue(threshold - 1).stringRep) + val settings = + Settings.builder() + .put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.key, ByteSizeValue(threshold).stringRep) + .put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.key, ByteSizeValue(threshold + 1).stringRep) + .put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_DISK_FLOOD_STAGE_WATERMARK_SETTING.key, ByteSizeValue(threshold - 1).stringRep) val clusterSettings = ClusterSettings(settings.build(), ClusterSettings.BUILT_IN_CLUSTER_SETTINGS) val remainingSpace = freeBytes - ((2 * indexSize) + threshold) if (remainingSpace > 0) { diff --git a/src/test/kotlin/org/opensearch/indexmanagement/refreshanalyzer/RefreshSearchAnalyzerActionIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/refreshanalyzer/RefreshSearchAnalyzerActionIT.kt index 3ecbe9cd1..0a60f1ae0 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/refreshanalyzer/RefreshSearchAnalyzerActionIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/refreshanalyzer/RefreshSearchAnalyzerActionIT.kt @@ -42,9 +42,10 @@ class RefreshSearchAnalyzerActionIT : IndexManagementRestTestCase() { writeToFile("$buildDir/testclusters/integTest-$i/config/pacman_synonyms.txt", "hello, hola") } - val settings: Settings = Settings.builder() - .loadFromSource(getIndexAnalyzerSettings(), XContentType.JSON) - .build() + val settings: Settings = + Settings.builder() + .loadFromSource(getIndexAnalyzerSettings(), XContentType.JSON) + .build() createIndex(indexName, settings, getAnalyzerMapping()) ingestData(indexName) @@ -88,9 +89,10 @@ class RefreshSearchAnalyzerActionIT : IndexManagementRestTestCase() { writeToFile("$buildDir/testclusters/integTest-$i/config/pacman_synonyms.txt", "hello, hola") } - val settings: Settings = Settings.builder() - .loadFromSource(getSearchAnalyzerSettings(), XContentType.JSON) - .build() + val settings: Settings = + Settings.builder() + .loadFromSource(getSearchAnalyzerSettings(), XContentType.JSON) + .build() createIndex(indexName, settings, getAnalyzerMapping()) ingestData(indexName) @@ -136,9 +138,10 @@ class RefreshSearchAnalyzerActionIT : IndexManagementRestTestCase() { writeToFile("$buildDir/testclusters/integTest-$i/config/pacman_synonyms.txt", "hello, hola") } - val settings: Settings = Settings.builder() - .loadFromSource(getSearchAnalyzerSettings(), XContentType.JSON) - .build() + val settings: Settings = + Settings.builder() + .loadFromSource(getSearchAnalyzerSettings(), XContentType.JSON) + .build() createIndex(indexName, settings, getAnalyzerMapping(), aliasSettings) ingestData(indexName) @@ -184,11 +187,12 @@ class RefreshSearchAnalyzerActionIT : IndexManagementRestTestCase() { fun ingestData(indexName: String) { val request = Request("POST", "/$indexName/_doc?refresh=true") - val data: String = """ + val data: String = + """ { "title": "hello world..." } - """.trimIndent() + """.trimIndent() request.setJsonEntity(data) client().performRequest(request) } @@ -200,69 +204,70 @@ class RefreshSearchAnalyzerActionIT : IndexManagementRestTestCase() { } fun refreshAnalyzer(indexName: String) { - val request = Request( - "POST", - "$REFRESH_SEARCH_ANALYZER_BASE_URI/$indexName" - ) + val request = + Request( + "POST", + "$REFRESH_SEARCH_ANALYZER_BASE_URI/$indexName", + ) client().performRequest(request) } fun getSearchAnalyzerSettings(): String { return """ - { - "index" : { - "analysis" : { - "analyzer" : { - "my_synonyms" : { - "tokenizer" : "whitespace", - "filter" : ["synonym"] - } - }, - "filter" : { - "synonym" : { - "type" : "synonym_graph", - "synonyms_path" : "pacman_synonyms.txt", - "updateable" : true + { + "index" : { + "analysis" : { + "analyzer" : { + "my_synonyms" : { + "tokenizer" : "whitespace", + "filter" : ["synonym"] + } + }, + "filter" : { + "synonym" : { + "type" : "synonym_graph", + "synonyms_path" : "pacman_synonyms.txt", + "updateable" : true + } } } } } - } """.trimIndent() } fun getIndexAnalyzerSettings(): String { return """ - { - "index" : { - "analysis" : { - "analyzer" : { - "my_synonyms" : { - "tokenizer" : "whitespace", - "filter" : ["synonym"] - } - }, - "filter" : { - "synonym" : { - "type" : "synonym_graph", - "synonyms_path" : "pacman_synonyms.txt" + { + "index" : { + "analysis" : { + "analyzer" : { + "my_synonyms" : { + "tokenizer" : "whitespace", + "filter" : ["synonym"] + } + }, + "filter" : { + "synonym" : { + "type" : "synonym_graph", + "synonyms_path" : "pacman_synonyms.txt" + } } } } } - } """.trimIndent() } fun getAnalyzerMapping(): String { return """ - "properties": { - "title": { - "type": "text", - "analyzer" : "standard", - "search_analyzer": "my_synonyms" + "properties": { + "title": { + "type": "text", + "analyzer" : "standard", + "search_analyzer": "my_synonyms" + } } - } """.trimIndent() } } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/refreshanalyzer/RefreshSearchAnalyzerResponseTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/refreshanalyzer/RefreshSearchAnalyzerResponseTests.kt index bfa880a5f..3f6c465f6 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/refreshanalyzer/RefreshSearchAnalyzerResponseTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/refreshanalyzer/RefreshSearchAnalyzerResponseTests.kt @@ -11,7 +11,6 @@ import org.opensearch.core.index.shard.ShardId import org.opensearch.test.OpenSearchTestCase class RefreshSearchAnalyzerResponseTests : OpenSearchTestCase() { - fun `test get successful refresh details`() { val index1 = "index1" val index2 = "index2" diff --git a/src/test/kotlin/org/opensearch/indexmanagement/refreshanalyzer/RefreshSearchAnalyzerShardResponseTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/refreshanalyzer/RefreshSearchAnalyzerShardResponseTests.kt index 9c58fad81..5b10c3e16 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/refreshanalyzer/RefreshSearchAnalyzerShardResponseTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/refreshanalyzer/RefreshSearchAnalyzerShardResponseTests.kt @@ -12,7 +12,6 @@ import org.opensearch.core.index.shard.ShardId import org.opensearch.test.OpenSearchTestCase class RefreshSearchAnalyzerShardResponseTests : OpenSearchTestCase() { - fun `test shard refresh response parsing`() { val reloadedAnalyzers = listOf("analyzer1", "analyzer2") val refreshShardResponse = RefreshSearchAnalyzerShardResponse(ShardId(Index("testIndex", "qwerty"), 0), reloadedAnalyzers) diff --git a/src/test/kotlin/org/opensearch/indexmanagement/refreshanalyzer/RestRefreshSearchAnalyzerActionIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/refreshanalyzer/RestRefreshSearchAnalyzerActionIT.kt index f523b7c6c..005754e30 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/refreshanalyzer/RestRefreshSearchAnalyzerActionIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/refreshanalyzer/RestRefreshSearchAnalyzerActionIT.kt @@ -8,14 +8,13 @@ package org.opensearch.indexmanagement.refreshanalyzer import org.junit.AfterClass import org.opensearch.client.ResponseException import org.opensearch.common.settings.Settings +import org.opensearch.core.rest.RestStatus import org.opensearch.indexmanagement.IndexManagementRestTestCase import org.opensearch.indexmanagement.makeRequest import org.opensearch.indexmanagement.refreshanalyzer.RestRefreshSearchAnalyzerAction.Companion.REFRESH_SEARCH_ANALYZER_BASE_URI import org.opensearch.rest.RestRequest.Method.POST -import org.opensearch.core.rest.RestStatus class RestRefreshSearchAnalyzerActionIT : IndexManagementRestTestCase() { - companion object { @AfterClass @JvmStatic fun clearIndicesAfterClass() { @@ -30,16 +29,19 @@ class RestRefreshSearchAnalyzerActionIT : IndexManagementRestTestCase() { } catch (e: ResponseException) { assertEquals("Unexpected RestStatus", RestStatus.BAD_REQUEST, e.response.restStatus()) val actualMessage = e.response.asMap() - val expectedErrorMessage = mapOf( - "error" to mapOf( - "root_cause" to listOf>( - mapOf("type" to "illegal_argument_exception", "reason" to "Missing indices") - ), - "type" to "illegal_argument_exception", - "reason" to "Missing indices" - ), - "status" to 400 - ) + val expectedErrorMessage = + mapOf( + "error" to + mapOf( + "root_cause" to + listOf>( + mapOf("type" to "illegal_argument_exception", "reason" to "Missing indices"), + ), + "type" to "illegal_argument_exception", + "reason" to "Missing indices", + ), + "status" to 400, + ) assertEquals(expectedErrorMessage, actualMessage) } } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/rollup/RollupMapperServiceTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/rollup/RollupMapperServiceTests.kt index fdaa225aa..82ade6ae6 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/rollup/RollupMapperServiceTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/rollup/RollupMapperServiceTests.kt @@ -13,7 +13,6 @@ import com.nhaarman.mockitokotlin2.mock import com.nhaarman.mockitokotlin2.whenever import kotlinx.coroutines.runBlocking import org.mockito.ArgumentMatchers.anyBoolean -import org.opensearch.core.action.ActionListener import org.opensearch.action.admin.indices.mapping.get.GetMappingsResponse import org.opensearch.client.AdminClient import org.opensearch.client.Client @@ -22,40 +21,44 @@ import org.opensearch.cluster.metadata.IndexNameExpressionResolver import org.opensearch.cluster.metadata.MappingMetadata import org.opensearch.cluster.service.ClusterService import org.opensearch.common.xcontent.XContentType +import org.opensearch.core.action.ActionListener import org.opensearch.indexmanagement.rollup.model.RollupJobValidationResult import org.opensearch.test.OpenSearchTestCase import java.time.Instant class RollupMapperServiceTests : OpenSearchTestCase() { - fun `test source index validation`() { val sourceIndex = "test-index" - val dimensions = listOf( - randomDateHistogram().copy( - sourceField = "order_date" + val dimensions = + listOf( + randomDateHistogram().copy( + sourceField = "order_date", + ), ) - ) - val metrics = listOf( - randomRollupMetrics().copy( - sourceField = "total_quantity" + val metrics = + listOf( + randomRollupMetrics().copy( + sourceField = "total_quantity", + ), + ) + val rollup = + randomRollup().copy( + enabled = true, + jobEnabledTime = Instant.now(), + dimensions = dimensions, + metrics = metrics, ) - ) - val rollup = randomRollup().copy( - enabled = true, - jobEnabledTime = Instant.now(), - dimensions = dimensions, - metrics = metrics - ) - val client = getClient( - getAdminClient( - getIndicesAdminClient( - getMappingsResponse = getMappingResponse(sourceIndex), - getMappingsException = null - ) + val client = + getClient( + getAdminClient( + getIndicesAdminClient( + getMappingsResponse = getMappingResponse(sourceIndex), + getMappingsException = null, + ), + ), ) - ) val clusterService = getClusterService() val indexNameExpressionResolver = getIndexNameExpressionResolver(listOf(sourceIndex)) @@ -70,31 +73,35 @@ class RollupMapperServiceTests : OpenSearchTestCase() { fun `test source index validation with custom type`() { val sourceIndex = "test-index" - val dimensions = listOf( - randomDateHistogram().copy( - sourceField = "order_date" + val dimensions = + listOf( + randomDateHistogram().copy( + sourceField = "order_date", + ), ) - ) - val metrics = listOf( - randomRollupMetrics().copy( - sourceField = "total_quantity" + val metrics = + listOf( + randomRollupMetrics().copy( + sourceField = "total_quantity", + ), + ) + val rollup = + randomRollup().copy( + enabled = true, + jobEnabledTime = Instant.now(), + dimensions = dimensions, + metrics = metrics, ) - ) - val rollup = randomRollup().copy( - enabled = true, - jobEnabledTime = Instant.now(), - dimensions = dimensions, - metrics = metrics - ) - val client = getClient( - getAdminClient( - getIndicesAdminClient( - getMappingsResponse = getMappingResponse(sourceIndex), - getMappingsException = null - ) + val client = + getClient( + getAdminClient( + getIndicesAdminClient( + getMappingsResponse = getMappingResponse(sourceIndex), + getMappingsException = null, + ), + ), ) - ) val clusterService = getClusterService() val indexNameExpressionResolver = getIndexNameExpressionResolver(listOf(sourceIndex)) val mapperService = RollupMapperService(client, clusterService, indexNameExpressionResolver) @@ -108,31 +115,35 @@ class RollupMapperServiceTests : OpenSearchTestCase() { fun `test source index validation with empty mappings`() { val sourceIndex = "test-index" - val dimensions = listOf( - randomDateHistogram().copy( - sourceField = "order_date" + val dimensions = + listOf( + randomDateHistogram().copy( + sourceField = "order_date", + ), ) - ) - val metrics = listOf( - randomRollupMetrics().copy( - sourceField = "total_quantity" + val metrics = + listOf( + randomRollupMetrics().copy( + sourceField = "total_quantity", + ), + ) + val rollup = + randomRollup().copy( + enabled = true, + jobEnabledTime = Instant.now(), + dimensions = dimensions, + metrics = metrics, ) - ) - val rollup = randomRollup().copy( - enabled = true, - jobEnabledTime = Instant.now(), - dimensions = dimensions, - metrics = metrics - ) - val client = getClient( - getAdminClient( - getIndicesAdminClient( - getMappingsResponse = getMappingResponse(sourceIndex, true), - getMappingsException = null - ) + val client = + getClient( + getAdminClient( + getIndicesAdminClient( + getMappingsResponse = getMappingResponse(sourceIndex, true), + getMappingsException = null, + ), + ), ) - ) val clusterService = getClusterService() val indexNameExpressionResolver = getIndexNameExpressionResolver(listOf(sourceIndex)) val mapperService = RollupMapperService(client, clusterService, indexNameExpressionResolver) @@ -146,31 +157,35 @@ class RollupMapperServiceTests : OpenSearchTestCase() { fun `test source index validation with subfield`() { val sourceIndex = "test-index" - val dimensions = listOf( - randomDateHistogram().copy( - sourceField = "category.keyword" + val dimensions = + listOf( + randomDateHistogram().copy( + sourceField = "category.keyword", + ), ) - ) - val metrics = listOf( - randomRollupMetrics().copy( - sourceField = "total_quantity" + val metrics = + listOf( + randomRollupMetrics().copy( + sourceField = "total_quantity", + ), + ) + val rollup = + randomRollup().copy( + enabled = true, + jobEnabledTime = Instant.now(), + dimensions = dimensions, + metrics = metrics, ) - ) - val rollup = randomRollup().copy( - enabled = true, - jobEnabledTime = Instant.now(), - dimensions = dimensions, - metrics = metrics - ) - val client = getClient( - getAdminClient( - getIndicesAdminClient( - getMappingsResponse = getMappingResponse(sourceIndex), - getMappingsException = null - ) + val client = + getClient( + getAdminClient( + getIndicesAdminClient( + getMappingsResponse = getMappingResponse(sourceIndex), + getMappingsException = null, + ), + ), ) - ) val clusterService = getClusterService() val indexNameExpressionResolver = getIndexNameExpressionResolver(listOf(sourceIndex)) @@ -185,31 +200,35 @@ class RollupMapperServiceTests : OpenSearchTestCase() { fun `test source index validation with nested field`() { val sourceIndex = "test-index" - val dimensions = listOf( - randomDateHistogram().copy( - sourceField = "order_date" + val dimensions = + listOf( + randomDateHistogram().copy( + sourceField = "order_date", + ), ) - ) - val metrics = listOf( - randomRollupMetrics().copy( - sourceField = "products.quantity" + val metrics = + listOf( + randomRollupMetrics().copy( + sourceField = "products.quantity", + ), + ) + val rollup = + randomRollup().copy( + enabled = true, + jobEnabledTime = Instant.now(), + dimensions = dimensions, + metrics = metrics, ) - ) - val rollup = randomRollup().copy( - enabled = true, - jobEnabledTime = Instant.now(), - dimensions = dimensions, - metrics = metrics - ) - val client = getClient( - getAdminClient( - getIndicesAdminClient( - getMappingsResponse = getMappingResponse(sourceIndex), - getMappingsException = null - ) + val client = + getClient( + getAdminClient( + getIndicesAdminClient( + getMappingsResponse = getMappingResponse(sourceIndex), + getMappingsException = null, + ), + ), ) - ) val clusterService = getClusterService() val indexNameExpressionResolver = getIndexNameExpressionResolver(listOf(sourceIndex)) @@ -224,26 +243,29 @@ class RollupMapperServiceTests : OpenSearchTestCase() { fun `test source index validation when field is not in mapping`() { val sourceIndex = "test-index" - val dimensions = listOf( - randomDateHistogram().copy( - sourceField = "nonexistent_field" + val dimensions = + listOf( + randomDateHistogram().copy( + sourceField = "nonexistent_field", + ), + ) + val rollup = + randomRollup().copy( + enabled = true, + jobEnabledTime = Instant.now(), + dimensions = dimensions, + metrics = emptyList(), ) - ) - val rollup = randomRollup().copy( - enabled = true, - jobEnabledTime = Instant.now(), - dimensions = dimensions, - metrics = emptyList() - ) - val client = getClient( - getAdminClient( - getIndicesAdminClient( - getMappingsResponse = getMappingResponse(sourceIndex), - getMappingsException = null - ) + val client = + getClient( + getAdminClient( + getIndicesAdminClient( + getMappingsResponse = getMappingResponse(sourceIndex), + getMappingsException = null, + ), + ), ) - ) val clusterService = getClusterService() val indexNameExpressionResolver = getIndexNameExpressionResolver(listOf(sourceIndex)) @@ -267,18 +289,21 @@ class RollupMapperServiceTests : OpenSearchTestCase() { private fun getIndicesAdminClient( getMappingsResponse: GetMappingsResponse?, - getMappingsException: Exception? + getMappingsException: Exception?, ): IndicesAdminClient { assertTrue( "Must provide either a getMappingsResponse or getMappingsException", - (getMappingsResponse != null).xor(getMappingsException != null) + (getMappingsResponse != null).xor(getMappingsException != null), ) return mock { doAnswer { invocationOnMock -> val listener = invocationOnMock.getArgument>(1) - if (getMappingsResponse != null) listener.onResponse(getMappingsResponse) - else listener.onFailure(getMappingsException) + if (getMappingsResponse != null) { + listener.onResponse(getMappingsResponse) + } else { + listener.onFailure(getMappingsException) + } }.whenever(this.mock).getMappings(any(), any()) } } @@ -290,16 +315,18 @@ class RollupMapperServiceTests : OpenSearchTestCase() { mock { on { concreteIndexNames(any(), any(), anyBoolean(), anyVararg()) } doReturn concreteIndices.toTypedArray() } private fun getMappingResponse(indexName: String, emptyMapping: Boolean = false): GetMappingsResponse { - val mappings = if (emptyMapping) { - mapOf() - } else { - val mappingSourceMap = createParser( - XContentType.JSON.xContent(), - javaClass.classLoader.getResource("mappings/kibana-sample-data.json").readText() - ).map() - val mappingMetadata = MappingMetadata("_doc", mappingSourceMap) // it seems it still expects a type, i.e. _doc now - mapOf(indexName to mappingMetadata) - } + val mappings = + if (emptyMapping) { + mapOf() + } else { + val mappingSourceMap = + createParser( + XContentType.JSON.xContent(), + javaClass.classLoader.getResource("mappings/kibana-sample-data.json").readText(), + ).map() + val mappingMetadata = MappingMetadata("_doc", mappingSourceMap) // it seems it still expects a type, i.e. _doc now + mapOf(indexName to mappingMetadata) + } return GetMappingsResponse(mappings) } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/rollup/RollupMetadataServiceTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/rollup/RollupMetadataServiceTests.kt index b181a9f4b..aa46ea53e 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/rollup/RollupMetadataServiceTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/rollup/RollupMetadataServiceTests.kt @@ -12,15 +12,15 @@ import com.nhaarman.mockitokotlin2.mock import com.nhaarman.mockitokotlin2.whenever import kotlinx.coroutines.runBlocking import org.junit.Before -import org.opensearch.core.action.ActionListener import org.opensearch.action.DocWriteResponse import org.opensearch.action.get.GetResponse import org.opensearch.action.index.IndexResponse import org.opensearch.action.search.SearchResponse import org.opensearch.client.Client -import org.opensearch.core.common.bytes.BytesReference import org.opensearch.common.document.DocumentField +import org.opensearch.core.action.ActionListener import org.opensearch.core.common.bytes.BytesArray +import org.opensearch.core.common.bytes.BytesReference import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.core.xcontent.ToXContent import org.opensearch.indexmanagement.common.model.dimension.DateHistogram @@ -40,7 +40,6 @@ import java.time.temporal.ChronoUnit // TODO: Given the way these tests are mocking data, only entries that work with ZonedDateTime.parse // are being tested, should mock the data more generically to test all cases class RollupMetadataServiceTests : OpenSearchTestCase() { - private lateinit var xContentRegistry: NamedXContentRegistry @Before @@ -50,27 +49,30 @@ class RollupMetadataServiceTests : OpenSearchTestCase() { } fun `test metadata for continuous rollup with minute calendar interval`() { - val dimensions = listOf( - randomCalendarDateHistogram().copy( - calendarInterval = "1m", - timezone = ZoneId.of(DateHistogram.UTC) - ) - ) - val rollup = randomRollup().copy( - enabled = true, - jobEnabledTime = Instant.now(), - metadataID = null, - continuous = true, - dimensions = dimensions - ) + val dimensions = + listOf( + randomCalendarDateHistogram().copy( + calendarInterval = "1m", + timezone = ZoneId.of(DateHistogram.UTC), + ), + ) + val rollup = + randomRollup().copy( + enabled = true, + jobEnabledTime = Instant.now(), + metadataID = null, + continuous = true, + dimensions = dimensions, + ) val firstDocTimestamp = "2020-10-02T05:01:15Z" - val client = getClient( - searchResponse = getSearchResponseForTimestamp(rollup, firstDocTimestamp), - searchException = null, - indexResponse = getIndexResponse(), - indexException = null - ) + val client = + getClient( + searchResponse = getSearchResponseForTimestamp(rollup, firstDocTimestamp), + searchException = null, + indexResponse = getIndexResponse(), + indexException = null, + ) val metadataService = RollupMetadataService(client, xContentRegistry) val expectedWindowStartTime = getInstant("2020-10-02T05:01:00Z") @@ -88,27 +90,30 @@ class RollupMetadataServiceTests : OpenSearchTestCase() { } fun `test metadata for continuous rollup with hour calendar interval`() { - val dimensions = listOf( - randomCalendarDateHistogram().copy( - calendarInterval = "1h", - timezone = ZoneId.of(DateHistogram.UTC) - ) - ) - val rollup = randomRollup().copy( - enabled = true, - jobEnabledTime = Instant.now(), - metadataID = null, - continuous = true, - dimensions = dimensions - ) + val dimensions = + listOf( + randomCalendarDateHistogram().copy( + calendarInterval = "1h", + timezone = ZoneId.of(DateHistogram.UTC), + ), + ) + val rollup = + randomRollup().copy( + enabled = true, + jobEnabledTime = Instant.now(), + metadataID = null, + continuous = true, + dimensions = dimensions, + ) val firstDocTimestamp = "2020-10-02T05:35:15Z" - val client = getClient( - searchResponse = getSearchResponseForTimestamp(rollup, firstDocTimestamp), - searchException = null, - indexResponse = getIndexResponse(), - indexException = null - ) + val client = + getClient( + searchResponse = getSearchResponseForTimestamp(rollup, firstDocTimestamp), + searchException = null, + indexResponse = getIndexResponse(), + indexException = null, + ) val metadataService = RollupMetadataService(client, xContentRegistry) val expectedWindowStartTime = getInstant("2020-10-02T05:00:00Z") @@ -126,27 +131,30 @@ class RollupMetadataServiceTests : OpenSearchTestCase() { } fun `test metadata for continuous rollup with hour calendar interval and daylight savings time`() { - val dimensions = listOf( - randomCalendarDateHistogram().copy( - calendarInterval = "1h", - timezone = ZoneId.of("America/Los_Angeles") - ) - ) - val rollup = randomRollup().copy( - enabled = true, - jobEnabledTime = Instant.now(), - metadataID = null, - continuous = true, - dimensions = dimensions - ) + val dimensions = + listOf( + randomCalendarDateHistogram().copy( + calendarInterval = "1h", + timezone = ZoneId.of("America/Los_Angeles"), + ), + ) + val rollup = + randomRollup().copy( + enabled = true, + jobEnabledTime = Instant.now(), + metadataID = null, + continuous = true, + dimensions = dimensions, + ) val firstDocTimestamp = "2020-03-08T01:35:15-08:00" - val client = getClient( - searchResponse = getSearchResponseForTimestamp(rollup, firstDocTimestamp), - searchException = null, - indexResponse = getIndexResponse(), - indexException = null - ) + val client = + getClient( + searchResponse = getSearchResponseForTimestamp(rollup, firstDocTimestamp), + searchException = null, + indexResponse = getIndexResponse(), + indexException = null, + ) val metadataService = RollupMetadataService(client, xContentRegistry) val expectedWindowStartTime = localDateAtTimezone("2020-03-08T01:00:00", ZoneId.of("America/Los_Angeles")) @@ -165,27 +173,30 @@ class RollupMetadataServiceTests : OpenSearchTestCase() { } fun `test metadata for continuous rollup with day calendar interval`() { - val dimensions = listOf( - randomCalendarDateHistogram().copy( - calendarInterval = "day", - timezone = ZoneId.of(DateHistogram.UTC) - ) - ) - val rollup = randomRollup().copy( - enabled = true, - jobEnabledTime = Instant.now(), - metadataID = null, - continuous = true, - dimensions = dimensions - ) + val dimensions = + listOf( + randomCalendarDateHistogram().copy( + calendarInterval = "day", + timezone = ZoneId.of(DateHistogram.UTC), + ), + ) + val rollup = + randomRollup().copy( + enabled = true, + jobEnabledTime = Instant.now(), + metadataID = null, + continuous = true, + dimensions = dimensions, + ) val firstDocTimestamp = "2020-10-02T05:35:15Z" - val client = getClient( - searchResponse = getSearchResponseForTimestamp(rollup, firstDocTimestamp), - searchException = null, - indexResponse = getIndexResponse(), - indexException = null - ) + val client = + getClient( + searchResponse = getSearchResponseForTimestamp(rollup, firstDocTimestamp), + searchException = null, + indexResponse = getIndexResponse(), + indexException = null, + ) val metadataService = RollupMetadataService(client, xContentRegistry) val expectedWindowStartTime = getInstant("2020-10-02T00:00:00Z") @@ -203,27 +214,30 @@ class RollupMetadataServiceTests : OpenSearchTestCase() { } fun `test metadata for continuous rollup with day calendar interval for leap year`() { - val dimensions = listOf( - randomCalendarDateHistogram().copy( - calendarInterval = "1d", - timezone = ZoneId.of(DateHistogram.UTC) - ) - ) - val rollup = randomRollup().copy( - enabled = true, - jobEnabledTime = Instant.now(), - metadataID = null, - continuous = true, - dimensions = dimensions - ) + val dimensions = + listOf( + randomCalendarDateHistogram().copy( + calendarInterval = "1d", + timezone = ZoneId.of(DateHistogram.UTC), + ), + ) + val rollup = + randomRollup().copy( + enabled = true, + jobEnabledTime = Instant.now(), + metadataID = null, + continuous = true, + dimensions = dimensions, + ) val firstDocTimestamp = "2020-02-28T08:40:15Z" - val client = getClient( - searchResponse = getSearchResponseForTimestamp(rollup, firstDocTimestamp), - searchException = null, - indexResponse = getIndexResponse(), - indexException = null - ) + val client = + getClient( + searchResponse = getSearchResponseForTimestamp(rollup, firstDocTimestamp), + searchException = null, + indexResponse = getIndexResponse(), + indexException = null, + ) val metadataService = RollupMetadataService(client, xContentRegistry) val expectedWindowStartTime = getInstant("2020-02-28T00:00:00Z") @@ -241,27 +255,30 @@ class RollupMetadataServiceTests : OpenSearchTestCase() { } fun `test metadata for continuous rollup with week calendar interval`() { - val dimensions = listOf( - randomCalendarDateHistogram().copy( - calendarInterval = "1w", - timezone = ZoneId.of(DateHistogram.UTC) - ) - ) - val rollup = randomRollup().copy( - enabled = true, - jobEnabledTime = Instant.now(), - metadataID = null, - continuous = true, - dimensions = dimensions - ) + val dimensions = + listOf( + randomCalendarDateHistogram().copy( + calendarInterval = "1w", + timezone = ZoneId.of(DateHistogram.UTC), + ), + ) + val rollup = + randomRollup().copy( + enabled = true, + jobEnabledTime = Instant.now(), + metadataID = null, + continuous = true, + dimensions = dimensions, + ) val firstDocTimestamp = "2020-03-22T08:40:15Z" // March 22, 2020, Sunday - val client = getClient( - searchResponse = getSearchResponseForTimestamp(rollup, firstDocTimestamp), - searchException = null, - indexResponse = getIndexResponse(), - indexException = null - ) + val client = + getClient( + searchResponse = getSearchResponseForTimestamp(rollup, firstDocTimestamp), + searchException = null, + indexResponse = getIndexResponse(), + indexException = null, + ) val metadataService = RollupMetadataService(client, xContentRegistry) // Since Monday is the beginning of the calendar week, the start time will be last Monday @@ -281,27 +298,30 @@ class RollupMetadataServiceTests : OpenSearchTestCase() { } fun `test metadata for continuous rollup with month calendar interval`() { - val dimensions = listOf( - randomCalendarDateHistogram().copy( - calendarInterval = "1M", - timezone = ZoneId.of(DateHistogram.UTC) - ) - ) - val rollup = randomRollup().copy( - enabled = true, - jobEnabledTime = Instant.now(), - metadataID = null, - continuous = true, - dimensions = dimensions - ) + val dimensions = + listOf( + randomCalendarDateHistogram().copy( + calendarInterval = "1M", + timezone = ZoneId.of(DateHistogram.UTC), + ), + ) + val rollup = + randomRollup().copy( + enabled = true, + jobEnabledTime = Instant.now(), + metadataID = null, + continuous = true, + dimensions = dimensions, + ) val firstDocTimestamp = "2019-12-24T08:40:15Z" - val client = getClient( - searchResponse = getSearchResponseForTimestamp(rollup, firstDocTimestamp), - searchException = null, - indexResponse = getIndexResponse(), - indexException = null - ) + val client = + getClient( + searchResponse = getSearchResponseForTimestamp(rollup, firstDocTimestamp), + searchException = null, + indexResponse = getIndexResponse(), + indexException = null, + ) val metadataService = RollupMetadataService(client, xContentRegistry) val expectedWindowStartTime = getInstant("2019-12-01T00:00:00Z") @@ -319,27 +339,30 @@ class RollupMetadataServiceTests : OpenSearchTestCase() { } fun `test metadata for continuous rollup with quarter calendar interval`() { - val dimensions = listOf( - randomCalendarDateHistogram().copy( - calendarInterval = "1q", - timezone = ZoneId.of(DateHistogram.UTC) - ) - ) - val rollup = randomRollup().copy( - enabled = true, - jobEnabledTime = Instant.now(), - metadataID = null, - continuous = true, - dimensions = dimensions - ) + val dimensions = + listOf( + randomCalendarDateHistogram().copy( + calendarInterval = "1q", + timezone = ZoneId.of(DateHistogram.UTC), + ), + ) + val rollup = + randomRollup().copy( + enabled = true, + jobEnabledTime = Instant.now(), + metadataID = null, + continuous = true, + dimensions = dimensions, + ) val firstDocTimestamp = "2020-04-24T08:40:15Z" - val client = getClient( - searchResponse = getSearchResponseForTimestamp(rollup, firstDocTimestamp), - searchException = null, - indexResponse = getIndexResponse(), - indexException = null - ) + val client = + getClient( + searchResponse = getSearchResponseForTimestamp(rollup, firstDocTimestamp), + searchException = null, + indexResponse = getIndexResponse(), + indexException = null, + ) val metadataService = RollupMetadataService(client, xContentRegistry) val expectedWindowStartTime = getInstant("2020-04-01T00:00:00Z") @@ -357,27 +380,30 @@ class RollupMetadataServiceTests : OpenSearchTestCase() { } fun `test metadata for continuous rollup with year calendar interval`() { - val dimensions = listOf( - randomCalendarDateHistogram().copy( - calendarInterval = "1y", - timezone = ZoneId.of(DateHistogram.UTC) - ) - ) - val rollup = randomRollup().copy( - enabled = true, - jobEnabledTime = Instant.now(), - metadataID = null, - continuous = true, - dimensions = dimensions - ) + val dimensions = + listOf( + randomCalendarDateHistogram().copy( + calendarInterval = "1y", + timezone = ZoneId.of(DateHistogram.UTC), + ), + ) + val rollup = + randomRollup().copy( + enabled = true, + jobEnabledTime = Instant.now(), + metadataID = null, + continuous = true, + dimensions = dimensions, + ) val firstDocTimestamp = "2020-04-24T08:40:15Z" - val client = getClient( - searchResponse = getSearchResponseForTimestamp(rollup, firstDocTimestamp), - searchException = null, - indexResponse = getIndexResponse(), - indexException = null - ) + val client = + getClient( + searchResponse = getSearchResponseForTimestamp(rollup, firstDocTimestamp), + searchException = null, + indexResponse = getIndexResponse(), + indexException = null, + ) val metadataService = RollupMetadataService(client, xContentRegistry) val expectedWindowStartTime = getInstant("2020-01-01T00:00:00Z") @@ -395,27 +421,30 @@ class RollupMetadataServiceTests : OpenSearchTestCase() { } fun `test metadata for continuous rollup with time offset for document`() { - val dimensions = listOf( - randomCalendarDateHistogram().copy( - calendarInterval = "1h", - timezone = ZoneId.of(DateHistogram.UTC) - ) - ) - val rollup = randomRollup().copy( - enabled = true, - jobEnabledTime = Instant.now(), - metadataID = null, - continuous = true, - dimensions = dimensions - ) + val dimensions = + listOf( + randomCalendarDateHistogram().copy( + calendarInterval = "1h", + timezone = ZoneId.of(DateHistogram.UTC), + ), + ) + val rollup = + randomRollup().copy( + enabled = true, + jobEnabledTime = Instant.now(), + metadataID = null, + continuous = true, + dimensions = dimensions, + ) val firstDocTimestamp = "2020-04-24T08:40:15-07:00" // UTC-07:00 for America/Los_Angeles - val client = getClient( - searchResponse = getSearchResponseForTimestamp(rollup, firstDocTimestamp), - searchException = null, - indexResponse = getIndexResponse(), - indexException = null - ) + val client = + getClient( + searchResponse = getSearchResponseForTimestamp(rollup, firstDocTimestamp), + searchException = null, + indexResponse = getIndexResponse(), + indexException = null, + ) val metadataService = RollupMetadataService(client, xContentRegistry) val expectedWindowStartTime = localDateAtTimezone("2020-04-24T08:00:00", ZoneId.of("America/Los_Angeles")) @@ -433,27 +462,30 @@ class RollupMetadataServiceTests : OpenSearchTestCase() { } fun `test metadata for continuous rollup with timezone for date histogram`() { - val dimensions = listOf( - randomCalendarDateHistogram().copy( - calendarInterval = "1h", - timezone = ZoneId.of("America/Los_Angeles") - ) - ) - val rollup = randomRollup().copy( - enabled = true, - jobEnabledTime = Instant.now(), - metadataID = null, - continuous = true, - dimensions = dimensions - ) + val dimensions = + listOf( + randomCalendarDateHistogram().copy( + calendarInterval = "1h", + timezone = ZoneId.of("America/Los_Angeles"), + ), + ) + val rollup = + randomRollup().copy( + enabled = true, + jobEnabledTime = Instant.now(), + metadataID = null, + continuous = true, + dimensions = dimensions, + ) val firstDocTimestamp = "2020-04-24T08:40:15Z" - val client = getClient( - searchResponse = getSearchResponseForTimestamp(rollup, firstDocTimestamp), - searchException = null, - indexResponse = getIndexResponse(), - indexException = null - ) + val client = + getClient( + searchResponse = getSearchResponseForTimestamp(rollup, firstDocTimestamp), + searchException = null, + indexResponse = getIndexResponse(), + indexException = null, + ) val metadataService = RollupMetadataService(client, xContentRegistry) val expectedWindowStartTime = localDateAtTimezone("2020-04-24T01:00:00", ZoneId.of("America/Los_Angeles")) @@ -471,27 +503,30 @@ class RollupMetadataServiceTests : OpenSearchTestCase() { } fun `test metadata for continuous rollup with hour fixed interval`() { - val dimensions = listOf( - randomFixedDateHistogram().copy( - fixedInterval = "3h", - timezone = ZoneId.of(DateHistogram.UTC) - ) - ) - val rollup = randomRollup().copy( - enabled = true, - jobEnabledTime = Instant.now(), - metadataID = null, - continuous = true, - dimensions = dimensions - ) + val dimensions = + listOf( + randomFixedDateHistogram().copy( + fixedInterval = "3h", + timezone = ZoneId.of(DateHistogram.UTC), + ), + ) + val rollup = + randomRollup().copy( + enabled = true, + jobEnabledTime = Instant.now(), + metadataID = null, + continuous = true, + dimensions = dimensions, + ) val firstDocTimestamp = "2020-04-24T22:40:15Z" - val client = getClient( - searchResponse = getSearchResponseForTimestamp(rollup, firstDocTimestamp), - searchException = null, - indexResponse = getIndexResponse(), - indexException = null - ) + val client = + getClient( + searchResponse = getSearchResponseForTimestamp(rollup, firstDocTimestamp), + searchException = null, + indexResponse = getIndexResponse(), + indexException = null, + ) val metadataService = RollupMetadataService(client, xContentRegistry) val expectedWindowStartTime = getInstant("2020-04-24T21:00:00Z") @@ -509,27 +544,30 @@ class RollupMetadataServiceTests : OpenSearchTestCase() { } fun `test metadata for continuous rollup with hour fixed interval and daylight savings time`() { - val dimensions = listOf( - randomFixedDateHistogram().copy( - fixedInterval = "3h", - timezone = ZoneId.of("America/Los_Angeles") - ) - ) - val rollup = randomRollup().copy( - enabled = true, - jobEnabledTime = Instant.now(), - metadataID = null, - continuous = true, - dimensions = dimensions - ) + val dimensions = + listOf( + randomFixedDateHistogram().copy( + fixedInterval = "3h", + timezone = ZoneId.of("America/Los_Angeles"), + ), + ) + val rollup = + randomRollup().copy( + enabled = true, + jobEnabledTime = Instant.now(), + metadataID = null, + continuous = true, + dimensions = dimensions, + ) val firstDocTimestamp = "2020-03-08T00:40:15-08:00" - val client = getClient( - searchResponse = getSearchResponseForTimestamp(rollup, firstDocTimestamp), - searchException = null, - indexResponse = getIndexResponse(), - indexException = null - ) + val client = + getClient( + searchResponse = getSearchResponseForTimestamp(rollup, firstDocTimestamp), + searchException = null, + indexResponse = getIndexResponse(), + indexException = null, + ) val metadataService = RollupMetadataService(client, xContentRegistry) val expectedWindowStartTime = localDateAtTimezone("2020-03-08T00:00:00", ZoneId.of("America/Los_Angeles")) @@ -549,27 +587,30 @@ class RollupMetadataServiceTests : OpenSearchTestCase() { } fun `test metadata for continuous rollup with day fixed interval and leap year`() { - val dimensions = listOf( - randomFixedDateHistogram().copy( - fixedInterval = "30d", - timezone = ZoneId.of(DateHistogram.UTC) - ) - ) - val rollup = randomRollup().copy( - enabled = true, - jobEnabledTime = Instant.now(), - metadataID = null, - continuous = true, - dimensions = dimensions - ) + val dimensions = + listOf( + randomFixedDateHistogram().copy( + fixedInterval = "30d", + timezone = ZoneId.of(DateHistogram.UTC), + ), + ) + val rollup = + randomRollup().copy( + enabled = true, + jobEnabledTime = Instant.now(), + metadataID = null, + continuous = true, + dimensions = dimensions, + ) val firstDocTimestamp = "2020-02-01T22:40:15Z" - val client = getClient( - searchResponse = getSearchResponseForTimestamp(rollup, firstDocTimestamp), - searchException = null, - indexResponse = getIndexResponse(), - indexException = null - ) + val client = + getClient( + searchResponse = getSearchResponseForTimestamp(rollup, firstDocTimestamp), + searchException = null, + indexResponse = getIndexResponse(), + indexException = null, + ) val metadataService = RollupMetadataService(client, xContentRegistry) // 30 days (24 hours * 30) increments since epoch will land us on 2020-01-09 as the nearest bucket @@ -588,19 +629,21 @@ class RollupMetadataServiceTests : OpenSearchTestCase() { } fun `test metadata init when getting existing metadata fails`() { - val rollup = randomRollup().copy( - enabled = true, - jobEnabledTime = Instant.now(), - metadataID = randomAlphaOfLength(10) - ) + val rollup = + randomRollup().copy( + enabled = true, + jobEnabledTime = Instant.now(), + metadataID = randomAlphaOfLength(10), + ) val getException = Exception("Test failure") - val client: Client = mock { - doAnswer { invocationOnMock -> - val listener = invocationOnMock.getArgument>(1) - listener.onFailure(getException) - }.whenever(this.mock).get(any(), any()) - } + val client: Client = + mock { + doAnswer { invocationOnMock -> + val listener = invocationOnMock.getArgument>(1) + listener.onFailure(getException) + }.whenever(this.mock).get(any(), any()) + } val metadataService = RollupMetadataService(client, xContentRegistry) runBlocking { @@ -612,20 +655,22 @@ class RollupMetadataServiceTests : OpenSearchTestCase() { } fun `test metadata init when indexing new metadata fails`() { - val rollup = randomRollup().copy( - enabled = true, - jobEnabledTime = Instant.now(), - metadataID = null - ) + val rollup = + randomRollup().copy( + enabled = true, + jobEnabledTime = Instant.now(), + metadataID = null, + ) val indexException = Exception("Test failure") val firstDocTimestamp = Instant.now().toString() - val client = getClient( - searchResponse = getSearchResponseForTimestamp(rollup, firstDocTimestamp), - searchException = null, - indexResponse = null, - indexException = indexException - ) + val client = + getClient( + searchResponse = getSearchResponseForTimestamp(rollup, firstDocTimestamp), + searchException = null, + indexResponse = null, + indexException = indexException, + ) val metadataService = RollupMetadataService(client, xContentRegistry) runBlocking { @@ -639,16 +684,17 @@ class RollupMetadataServiceTests : OpenSearchTestCase() { // TODO: This test is failing with a thread leak error: "There are still zombie threads that couldn't be terminated" // May be due to the use of BytesArray as it didn't start until after that was added fun `skip test get existing metadata`() { - val metadata = RollupMetadata( - id = randomAlphaOfLength(10), - seqNo = 0, - primaryTerm = 1, - rollupID = randomAlphaOfLength(10), - // Truncating to seconds since not doing so causes milliseconds mismatch when comparing results - lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.SECONDS), - status = RollupMetadata.Status.INIT, - stats = RollupStats(0, 0, 0, 0, 0) - ) + val metadata = + RollupMetadata( + id = randomAlphaOfLength(10), + seqNo = 0, + primaryTerm = 1, + rollupID = randomAlphaOfLength(10), + // Truncating to seconds since not doing so causes milliseconds mismatch when comparing results + lastUpdatedTime = Instant.now().truncatedTo(ChronoUnit.SECONDS), + status = RollupMetadata.Status.INIT, + stats = RollupStats(0, 0, 0, 0, 0), + ) val getResponse: GetResponse = mock() val source: BytesReference = BytesArray(metadata.toJsonString(params = ToXContent.MapParams(mapOf(WITH_TYPE to "true")))) @@ -658,12 +704,13 @@ class RollupMetadataServiceTests : OpenSearchTestCase() { whenever(getResponse.id).doReturn(metadata.id) whenever(getResponse.sourceAsBytesRef).doReturn(source) - val client: Client = mock { - doAnswer { invocationOnMock -> - val listener = invocationOnMock.getArgument>(1) - listener.onResponse(getResponse) - }.whenever(this.mock).get(any(), any()) - } + val client: Client = + mock { + doAnswer { invocationOnMock -> + val listener = invocationOnMock.getArgument>(1) + listener.onResponse(getResponse) + }.whenever(this.mock).get(any(), any()) + } RollupMetadataService(client, xContentRegistry) // runBlocking { @@ -680,19 +727,21 @@ class RollupMetadataServiceTests : OpenSearchTestCase() { val getResponse: GetResponse = mock() whenever(getResponse.isExists).doReturn(true) - val client: Client = mock { - doAnswer { invocationOnMock -> - val listener = invocationOnMock.getArgument>(1) - listener.onResponse(getResponse) - }.whenever(this.mock).get(any(), any()) - } + val client: Client = + mock { + doAnswer { invocationOnMock -> + val listener = invocationOnMock.getArgument>(1) + listener.onResponse(getResponse) + }.whenever(this.mock).get(any(), any()) + } val metadataService = RollupMetadataService(client, xContentRegistry) runBlocking { - val getExistingMetadataResult = metadataService.getExistingMetadata( - randomRollup() - .copy(id = randomAlphaOfLength(10), metadataID = randomAlphaOfLength(10)) - ) + val getExistingMetadataResult = + metadataService.getExistingMetadata( + randomRollup() + .copy(id = randomAlphaOfLength(10), metadataID = randomAlphaOfLength(10)), + ) require(getExistingMetadataResult is MetadataResult.NoMetadata) { "Getting existing metadata returned unexpected results" } @@ -704,19 +753,21 @@ class RollupMetadataServiceTests : OpenSearchTestCase() { fun `test get existing metadata fails`() { val getException = Exception("Test failure") - val client: Client = mock { - doAnswer { invocationOnMock -> - val listener = invocationOnMock.getArgument>(1) - listener.onFailure(getException) - }.whenever(this.mock).get(any(), any()) - } + val client: Client = + mock { + doAnswer { invocationOnMock -> + val listener = invocationOnMock.getArgument>(1) + listener.onFailure(getException) + }.whenever(this.mock).get(any(), any()) + } val metadataService = RollupMetadataService(client, xContentRegistry) runBlocking { - val getExistingMetadataResult = metadataService.getExistingMetadata( - randomRollup() - .copy(id = randomAlphaOfLength(10), metadataID = randomAlphaOfLength(10)) - ) + val getExistingMetadataResult = + metadataService.getExistingMetadata( + randomRollup() + .copy(id = randomAlphaOfLength(10), metadataID = randomAlphaOfLength(10)), + ) require(getExistingMetadataResult is MetadataResult.Failure) { "Getting existing metadata returned unexpected results" } @@ -761,21 +812,27 @@ class RollupMetadataServiceTests : OpenSearchTestCase() { searchResponse: SearchResponse?, searchException: Exception?, indexResponse: IndexResponse?, - indexException: Exception? + indexException: Exception?, ): Client { assertTrue("Must provide either a searchResponse or searchException", (searchResponse != null).xor(searchException != null)) assertTrue("Must provide either an indexResponse or indexException", (indexResponse != null).xor(indexException != null)) return mock { doAnswer { invocationOnMock -> val listener = invocationOnMock.getArgument>(1) - if (searchResponse != null) listener.onResponse(searchResponse) - else listener.onFailure(searchException) + if (searchResponse != null) { + listener.onResponse(searchResponse) + } else { + listener.onFailure(searchException) + } }.whenever(this.mock).search(any(), any()) doAnswer { invocationOnMock -> val listener = invocationOnMock.getArgument>(1) - if (indexResponse != null) listener.onResponse(indexResponse) - else listener.onFailure(indexException) + if (indexResponse != null) { + listener.onResponse(indexResponse) + } else { + listener.onFailure(indexException) + } }.whenever(this.mock).index(any(), any()) } } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/rollup/RollupRestTestCase.kt b/src/test/kotlin/org/opensearch/indexmanagement/rollup/RollupRestTestCase.kt index 1fb7df7fe..a1fec9755 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/rollup/RollupRestTestCase.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/rollup/RollupRestTestCase.kt @@ -5,24 +5,25 @@ package org.opensearch.indexmanagement.rollup -import org.junit.After +import org.apache.hc.core5.http.ContentType import org.apache.hc.core5.http.HttpEntity import org.apache.hc.core5.http.HttpHeaders -import org.apache.hc.core5.http.ContentType import org.apache.hc.core5.http.HttpStatus import org.apache.hc.core5.http.io.entity.StringEntity import org.apache.hc.core5.http.message.BasicHeader +import org.junit.After import org.junit.AfterClass import org.opensearch.client.Request import org.opensearch.client.Response import org.opensearch.client.RestClient import org.opensearch.common.settings.Settings import org.opensearch.common.xcontent.LoggingDeprecationHandler +import org.opensearch.common.xcontent.XContentType +import org.opensearch.common.xcontent.json.JsonXContent +import org.opensearch.core.rest.RestStatus import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.core.xcontent.XContentParser.Token import org.opensearch.core.xcontent.XContentParserUtils.ensureExpectedToken -import org.opensearch.common.xcontent.XContentType -import org.opensearch.common.xcontent.json.JsonXContent import org.opensearch.index.seqno.SequenceNumbers import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.INDEX_MANAGEMENT_INDEX import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.ROLLUP_JOBS_BASE_URI @@ -36,11 +37,9 @@ import org.opensearch.indexmanagement.util._ID import org.opensearch.indexmanagement.util._PRIMARY_TERM import org.opensearch.indexmanagement.util._SEQ_NO import org.opensearch.indexmanagement.waitFor -import org.opensearch.core.rest.RestStatus import java.time.Instant abstract class RollupRestTestCase : IndexManagementRestTestCase() { - companion object { @AfterClass @JvmStatic fun clearIndicesAfterClass() { @@ -91,19 +90,20 @@ abstract class RollupRestTestCase : IndexManagementRestTestCase() { rollup: Rollup, rollupId: String, refresh: Boolean = true, - client: RestClient? = null + client: RestClient? = null, ): Rollup { val response = createRollupJson(rollup.toJsonString(), rollupId, refresh, client) - val rollupJson = JsonXContent.jsonXContent - .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, response.entity.content) - .map() + val rollupJson = + JsonXContent.jsonXContent + .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, response.entity.content) + .map() val createdId = rollupJson["_id"] as String assertEquals("Rollup ids are not the same", rollupId, createdId) return rollup.copy( id = createdId, seqNo = (rollupJson["_seq_no"] as Int).toLong(), - primaryTerm = (rollupJson["_primary_term"] as Int).toLong() + primaryTerm = (rollupJson["_primary_term"] as Int).toLong(), ) } @@ -111,16 +111,17 @@ abstract class RollupRestTestCase : IndexManagementRestTestCase() { rollupString: String, rollupId: String, refresh: Boolean = true, - userClient: RestClient? = null + userClient: RestClient? = null, ): Response { val client = userClient ?: client() - val response = client - .makeRequest( - "PUT", - "$ROLLUP_JOBS_BASE_URI/$rollupId?refresh=$refresh", - emptyMap(), - StringEntity(rollupString, ContentType.APPLICATION_JSON) - ) + val response = + client + .makeRequest( + "PUT", + "$ROLLUP_JOBS_BASE_URI/$rollupId?refresh=$refresh", + emptyMap(), + StringEntity(rollupString, ContentType.APPLICATION_JSON), + ) assertEquals("Unable to create a new rollup", RestStatus.CREATED, response.restStatus()) return response } @@ -136,11 +137,12 @@ abstract class RollupRestTestCase : IndexManagementRestTestCase() { var mappingString = "" var addCommaPrefix = false rollup.dimensions.forEach { - val fieldType = when (it.type) { - Dimension.Type.DATE_HISTOGRAM -> "date" - Dimension.Type.HISTOGRAM -> "long" - Dimension.Type.TERMS -> "keyword" - } + val fieldType = + when (it.type) { + Dimension.Type.DATE_HISTOGRAM -> "date" + Dimension.Type.HISTOGRAM -> "long" + Dimension.Type.TERMS -> "keyword" + } val string = "${if (addCommaPrefix) "," else ""}\"${it.sourceField}\":{\"type\": \"$fieldType\"}" addCommaPrefix = true mappingString += string @@ -160,15 +162,17 @@ abstract class RollupRestTestCase : IndexManagementRestTestCase() { protected fun putDateDocumentInSourceIndex(rollup: Rollup) { val dateHistogram = rollup.dimensions.first() - val request = """ + val request = + """ { "${dateHistogram.sourceField}" : "${Instant.now()}" } - """.trimIndent() - val response = client().makeRequest( - "POST", "${rollup.sourceIndex}/_doc?refresh=true", - emptyMap(), StringEntity(request, ContentType.APPLICATION_JSON) - ) + """.trimIndent() + val response = + client().makeRequest( + "POST", "${rollup.sourceIndex}/_doc?refresh=true", + emptyMap(), StringEntity(request, ContentType.APPLICATION_JSON), + ) assertEquals("Request failed", RestStatus.CREATED, response.restStatus()) } @@ -249,22 +253,25 @@ abstract class RollupRestTestCase : IndexManagementRestTestCase() { protected fun updateSearchAllJobsClusterSetting(value: Boolean) { val formattedValue = "\"${value}\"" - val request = """ + val request = + """ { "persistent": { "${RollupSettings.ROLLUP_SEARCH_ALL_JOBS.key}": $formattedValue } } - """.trimIndent() - val res = client().makeRequest( - "PUT", "_cluster/settings", emptyMap(), - StringEntity(request, ContentType.APPLICATION_JSON) - ) + """.trimIndent() + val res = + client().makeRequest( + "PUT", "_cluster/settings", emptyMap(), + StringEntity(request, ContentType.APPLICATION_JSON), + ) assertEquals("Request failed", RestStatus.OK, res.restStatus()) } protected fun createSampleIndexForQSQTest(index: String) { - val mapping = """ + val mapping = + """ "properties": { "event_ts": { "type": "date" @@ -299,11 +306,12 @@ abstract class RollupRestTestCase : IndexManagementRestTestCase() { } } - """.trimIndent() + """.trimIndent() createIndex(index, Settings.EMPTY, mapping) for (i in 1..5) { - val doc = """ + val doc = + """ { "event_ts": "2019-01-01T12:10:30Z", "test.fff": "12345", @@ -315,11 +323,12 @@ abstract class RollupRestTestCase : IndexManagementRestTestCase() { "state_ordinal": ${i % 3}, "earnings": $i } - """.trimIndent() + """.trimIndent() indexDoc(index, "$i", doc) } for (i in 6..8) { - val doc = """ + val doc = + """ { "event_ts": "2019-01-01T12:10:30Z", "state": "TA", @@ -329,11 +338,12 @@ abstract class RollupRestTestCase : IndexManagementRestTestCase() { "abc test": 123, "earnings": $i } - """.trimIndent() + """.trimIndent() indexDoc(index, "$i", doc) } for (i in 9..11) { - val doc = """ + val doc = + """ { "event_ts": "2019-01-02T12:10:30Z", "state": "CA", @@ -343,7 +353,7 @@ abstract class RollupRestTestCase : IndexManagementRestTestCase() { "abc test": 123, "earnings": $i } - """.trimIndent() + """.trimIndent() indexDoc(index, "$i", doc) } } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/rollup/TestHelpers.kt b/src/test/kotlin/org/opensearch/indexmanagement/rollup/TestHelpers.kt index 8d650714c..f914b5a72 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/rollup/TestHelpers.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/rollup/TestHelpers.kt @@ -5,8 +5,8 @@ package org.opensearch.indexmanagement.rollup -import org.opensearch.core.xcontent.ToXContent import org.opensearch.common.xcontent.XContentFactory +import org.opensearch.core.xcontent.ToXContent import org.opensearch.index.query.TermQueryBuilder import org.opensearch.indexmanagement.common.model.dimension.DateHistogram import org.opensearch.indexmanagement.common.model.dimension.Dimension @@ -44,22 +44,26 @@ fun randomCalendarInterval(): String = "1d" @Suppress("FunctionOnlyReturningConstant") fun randomFixedInterval(): String = "30m" -fun randomFixedDateHistogram(): DateHistogram = OpenSearchRestTestCase.randomAlphaOfLength(10).let { - DateHistogram(sourceField = it, targetField = it, fixedInterval = randomFixedInterval(), calendarInterval = null, timezone = OpenSearchRestTestCase.randomZone()) -} -fun randomCalendarDateHistogram(): DateHistogram = OpenSearchRestTestCase.randomAlphaOfLength(10).let { - DateHistogram(sourceField = it, targetField = it, fixedInterval = null, calendarInterval = randomCalendarInterval(), timezone = OpenSearchRestTestCase.randomZone()) -} +fun randomFixedDateHistogram(): DateHistogram = + OpenSearchRestTestCase.randomAlphaOfLength(10).let { + DateHistogram(sourceField = it, targetField = it, fixedInterval = randomFixedInterval(), calendarInterval = null, timezone = OpenSearchRestTestCase.randomZone()) + } + +fun randomCalendarDateHistogram(): DateHistogram = + OpenSearchRestTestCase.randomAlphaOfLength(10).let { + DateHistogram(sourceField = it, targetField = it, fixedInterval = null, calendarInterval = randomCalendarInterval(), timezone = OpenSearchRestTestCase.randomZone()) + } fun randomDateHistogram(): DateHistogram = if (OpenSearchRestTestCase.randomBoolean()) randomFixedDateHistogram() else randomCalendarDateHistogram() -fun randomHistogram(): Histogram = OpenSearchRestTestCase.randomAlphaOfLength(10).let { - Histogram( - sourceField = it, - targetField = it, - interval = OpenSearchRestTestCase.randomDoubleBetween(0.0, Double.MAX_VALUE, false) // start, end, lowerInclusive - ) -} +fun randomHistogram(): Histogram = + OpenSearchRestTestCase.randomAlphaOfLength(10).let { + Histogram( + sourceField = it, + targetField = it, + interval = OpenSearchRestTestCase.randomDoubleBetween(0.0, Double.MAX_VALUE, false), // start, end, lowerInclusive + ) + } fun randomTerms(): Terms = OpenSearchRestTestCase.randomAlphaOfLength(10).let { Terms(sourceField = it, targetField = it) } @@ -81,9 +85,10 @@ fun randomMetric(): Metric = fun randomMetrics(): List = OpenSearchRestTestCase.randomList(1, metrics.size, ::randomMetric).distinctBy { it.type } -fun randomRollupMetrics(): RollupMetrics = OpenSearchRestTestCase.randomAlphaOfLength(10).let { - RollupMetrics(sourceField = it, targetField = it, metrics = randomMetrics()) -} +fun randomRollupMetrics(): RollupMetrics = + OpenSearchRestTestCase.randomAlphaOfLength(10).let { + RollupMetrics(sourceField = it, targetField = it, metrics = randomMetrics()) + } fun randomRollupDimensions(): List { val dimensions = mutableListOf(randomDateHistogram()) @@ -114,7 +119,7 @@ fun randomRollup(): Rollup { continuous = OpenSearchRestTestCase.randomBoolean(), dimensions = randomRollupDimensions(), metrics = OpenSearchRestTestCase.randomList(20, ::randomRollupMetrics).distinctBy { it.targetField }, - user = randomUser() + user = randomUser(), ) } @@ -124,7 +129,7 @@ fun randomRollupStats(): RollupStats { documentsProcessed = OpenSearchRestTestCase.randomNonNegativeLong(), rollupsIndexed = OpenSearchRestTestCase.randomNonNegativeLong(), indexTimeInMillis = OpenSearchRestTestCase.randomNonNegativeLong(), - searchTimeInMillis = OpenSearchRestTestCase.randomNonNegativeLong() + searchTimeInMillis = OpenSearchRestTestCase.randomNonNegativeLong(), ) } @@ -137,7 +142,7 @@ fun randomContinuousMetadata(): ContinuousMetadata { val two = randomInstant() return ContinuousMetadata( nextWindowEndTime = if (one.isAfter(two)) one else two, - nextWindowStartTime = if (one.isAfter(two)) two else one + nextWindowStartTime = if (one.isAfter(two)) two else one, ) } @@ -161,7 +166,7 @@ fun randomRollupMetadata(): RollupMetadata { continuous = randomContinuousMetadata(), status = status, failureReason = if (status == RollupMetadata.Status.FAILED) OpenSearchRestTestCase.randomAlphaOfLength(10) else null, - stats = randomRollupStats() + stats = randomRollupStats(), ) } @@ -176,7 +181,7 @@ fun randomISMRollup(): ISMRollup { targetIndex = OpenSearchRestTestCase.randomAlphaOfLength(10).lowercase(Locale.ROOT), pageSize = OpenSearchRestTestCase.randomIntBetween(1, 10000), dimensions = randomRollupDimensions(), - metrics = OpenSearchRestTestCase.randomList(20, ::randomRollupMetrics).distinctBy { it.targetField } + metrics = OpenSearchRestTestCase.randomList(20, ::randomRollupMetrics).distinctBy { it.targetField }, ) } @@ -189,7 +194,7 @@ fun randomISMFieldCapabilities(): ISMFieldCapabilities { indices = OpenSearchRestTestCase.generateRandomStringArray(10, 10, true, true), nonSearchableIndices = OpenSearchRestTestCase.generateRandomStringArray(10, 10, true, true), nonAggregatableIndices = OpenSearchRestTestCase.generateRandomStringArray(10, 10, true, true), - meta = mapOf(OpenSearchRestTestCase.randomAlphaOfLength(10) to setOf(OpenSearchRestTestCase.randomAlphaOfLength(10))) + meta = mapOf(OpenSearchRestTestCase.randomAlphaOfLength(10) to setOf(OpenSearchRestTestCase.randomAlphaOfLength(10))), ) } @@ -199,7 +204,7 @@ fun randomISMIndexFieldCapabilities(): ISMIndexFieldCapabilities { type = OpenSearchRestTestCase.randomAlphaOfLength(10), isSearchable = OpenSearchRestTestCase.randomBoolean(), isAggregatable = OpenSearchRestTestCase.randomBoolean(), - meta = mapOf(OpenSearchRestTestCase.randomAlphaOfLength(10) to OpenSearchRestTestCase.randomAlphaOfLength(10)) + meta = mapOf(OpenSearchRestTestCase.randomAlphaOfLength(10) to OpenSearchRestTestCase.randomAlphaOfLength(10)), ) } @@ -207,7 +212,7 @@ fun randomISMFieldCapabilitiesIndexResponse(): ISMFieldCapabilitiesIndexResponse return ISMFieldCapabilitiesIndexResponse( indexName = OpenSearchRestTestCase.randomAlphaOfLength(10), responseMap = mapOf(OpenSearchRestTestCase.randomAlphaOfLength(10) to randomISMIndexFieldCapabilities()), - canMatch = OpenSearchRestTestCase.randomBoolean() + canMatch = OpenSearchRestTestCase.randomBoolean(), ) } @@ -215,7 +220,7 @@ fun randomISMFieldCaps(): ISMFieldCapabilitiesResponse { return ISMFieldCapabilitiesResponse( indices = OpenSearchRestTestCase.generateRandomStringArray(10, 10, false), responseMap = mapOf(OpenSearchRestTestCase.randomAlphaOfLength(10) to mapOf(OpenSearchRestTestCase.randomAlphaOfLength(10) to randomISMFieldCapabilities())), - indexResponses = OpenSearchRestTestCase.randomList(4, ::randomISMFieldCapabilitiesIndexResponse) + indexResponses = OpenSearchRestTestCase.randomList(4, ::randomISMFieldCapabilitiesIndexResponse), ) } @@ -224,7 +229,9 @@ fun randomDimension(): Dimension { return OpenSearchRestTestCase.randomSubsetOf(1, dimensions).first() } -fun randomTermQuery(): TermQueryBuilder { return TermQueryBuilder(OpenSearchRestTestCase.randomAlphaOfLength(5), OpenSearchRestTestCase.randomAlphaOfLength(5)) } +fun randomTermQuery(): TermQueryBuilder { + return TermQueryBuilder(OpenSearchRestTestCase.randomAlphaOfLength(5), OpenSearchRestTestCase.randomAlphaOfLength(5)) +} fun DateHistogram.toJsonString(): String = this.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS).string() diff --git a/src/test/kotlin/org/opensearch/indexmanagement/rollup/action/ActionTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/rollup/action/ActionTests.kt index a351618b9..e3fd81463 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/rollup/action/ActionTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/rollup/action/ActionTests.kt @@ -14,7 +14,6 @@ import org.opensearch.indexmanagement.rollup.action.stop.StopRollupAction import org.opensearch.test.OpenSearchTestCase class ActionTests : OpenSearchTestCase() { - fun `test delete action name`() { assertNotNull(DeleteRollupAction.INSTANCE.name()) assertEquals(DeleteRollupAction.INSTANCE.name(), DeleteRollupAction.NAME) diff --git a/src/test/kotlin/org/opensearch/indexmanagement/rollup/action/RequestTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/rollup/action/RequestTests.kt index 6389e6f91..3786a20f2 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/rollup/action/RequestTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/rollup/action/RequestTests.kt @@ -24,7 +24,6 @@ import org.opensearch.search.fetch.subphase.FetchSourceContext import org.opensearch.test.OpenSearchTestCase class RequestTests : OpenSearchTestCase() { - fun `test delete rollup request`() { val id = "some_id" val req = DeleteRollupRequest(id).index(INDEX_MANAGEMENT_INDEX) @@ -99,10 +98,11 @@ class RequestTests : OpenSearchTestCase() { fun `test index rollup post request`() { val rollup = randomRollup().copy(seqNo = SequenceNumbers.UNASSIGNED_SEQ_NO, primaryTerm = SequenceNumbers.UNASSIGNED_PRIMARY_TERM) - val req = IndexRollupRequest( - rollup = rollup, - refreshPolicy = WriteRequest.RefreshPolicy.IMMEDIATE - ).index(INDEX_MANAGEMENT_INDEX) + val req = + IndexRollupRequest( + rollup = rollup, + refreshPolicy = WriteRequest.RefreshPolicy.IMMEDIATE, + ).index(INDEX_MANAGEMENT_INDEX) val out = BytesStreamOutput().apply { req.writeTo(this) } val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) @@ -116,10 +116,11 @@ class RequestTests : OpenSearchTestCase() { fun `test index rollup put request`() { val rollup = randomRollup().copy(seqNo = 1L, primaryTerm = 2L) - val req = IndexRollupRequest( - rollup = rollup, - refreshPolicy = WriteRequest.RefreshPolicy.IMMEDIATE - ).index(INDEX_MANAGEMENT_INDEX) + val req = + IndexRollupRequest( + rollup = rollup, + refreshPolicy = WriteRequest.RefreshPolicy.IMMEDIATE, + ).index(INDEX_MANAGEMENT_INDEX) val out = BytesStreamOutput().apply { req.writeTo(this) } val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) diff --git a/src/test/kotlin/org/opensearch/indexmanagement/rollup/action/ResponseTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/rollup/action/ResponseTests.kt index 19508dd28..51db9cb0e 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/rollup/action/ResponseTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/rollup/action/ResponseTests.kt @@ -7,17 +7,16 @@ package org.opensearch.indexmanagement.rollup.action import org.opensearch.common.io.stream.BytesStreamOutput import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.rest.RestStatus import org.opensearch.indexmanagement.rollup.action.explain.ExplainRollupResponse import org.opensearch.indexmanagement.rollup.action.get.GetRollupResponse import org.opensearch.indexmanagement.rollup.action.get.GetRollupsResponse import org.opensearch.indexmanagement.rollup.action.index.IndexRollupResponse import org.opensearch.indexmanagement.rollup.randomExplainRollup import org.opensearch.indexmanagement.rollup.randomRollup -import org.opensearch.core.rest.RestStatus import org.opensearch.test.OpenSearchTestCase class ResponseTests : OpenSearchTestCase() { - fun `test explain rollup response`() { val idsToExplain = randomList(10) { randomAlphaOfLength(10) to randomExplainRollup() }.toMap() val res = ExplainRollupResponse(idsToExplain) diff --git a/src/test/kotlin/org/opensearch/indexmanagement/rollup/actionfilter/FieldCapsFilterIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/rollup/actionfilter/FieldCapsFilterIT.kt index 3566c367d..1c4057dbe 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/rollup/actionfilter/FieldCapsFilterIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/rollup/actionfilter/FieldCapsFilterIT.kt @@ -7,15 +7,14 @@ package org.opensearch.indexmanagement.rollup.actionfilter import org.opensearch.client.ResponseException import org.opensearch.common.settings.Settings +import org.opensearch.core.rest.RestStatus import org.opensearch.indexmanagement.makeRequest import org.opensearch.indexmanagement.rollup.RollupRestTestCase import org.opensearch.indexmanagement.rollup.settings.RollupSettings -import org.opensearch.core.rest.RestStatus // TODO: Add assertions on fields @Suppress("UNCHECKED_CAST") class FieldCapsFilterIT : RollupRestTestCase() { - fun `test field caps interception`() { createIndex("raw-data", Settings.EMPTY, """"properties":{"field-1":{"type":"boolean"},"field-2":{"type":"integer"},"field-3":{"type":"float"},"field-4":{"type":"keyword"},"field-5":{"type":"date","format":"yyyy-MM-dd HH:mm:ss"},"field-6":{"type":"text","fields":{"field-6-1":{"type":"keyword"}}},"field-7":{"properties":{"field-7-1":{"type":"geo_point"}}}}""") createIndex("rollup-data", Settings.builder().put(RollupSettings.ROLLUP_INDEX.key, true).build(), """"properties":{"field-1":{"type":"keyword"}}""") diff --git a/src/test/kotlin/org/opensearch/indexmanagement/rollup/actionfilter/FieldCapsFilterTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/rollup/actionfilter/FieldCapsFilterTests.kt index 49f5e2e14..eddd6bfd9 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/rollup/actionfilter/FieldCapsFilterTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/rollup/actionfilter/FieldCapsFilterTests.kt @@ -55,8 +55,9 @@ class FieldCapsFilterTests : OpenSearchTestCase() { fun `test rewrite unmerged response discarding existing response`() { val fieldCapFilter = FieldCapsFilter(clusterService, settings, indexNameExpressionResolver) val originalIsmResponse = ISMFieldCapabilitiesResponse(arrayOf(), mapOf(), listOf(randomISMFieldCapabilitiesIndexResponse())) - val rewrittenResponse = fieldCapFilter.rewriteResponse(originalIsmResponse.toFieldCapabilitiesResponse(), setOf(rollupIndex), true) as - FieldCapabilitiesResponse + val rewrittenResponse = + fieldCapFilter.rewriteResponse(originalIsmResponse.toFieldCapabilitiesResponse(), setOf(rollupIndex), true) as + FieldCapabilitiesResponse val rewrittenIsmResponse = ISMFieldCapabilitiesResponse.fromFieldCapabilitiesResponse(rewrittenResponse) assertEquals("Expected merged response to be empty, indices not empty", 0, rewrittenResponse.indices.size) assertEquals("Expected merged response to be empty, map is empty", 0, rewrittenResponse.get().size) @@ -67,8 +68,9 @@ class FieldCapsFilterTests : OpenSearchTestCase() { val fieldCapFilter = FieldCapsFilter(clusterService, settings, indexNameExpressionResolver) val ismResponse = randomISMFieldCaps() val originalIsmResponse = ISMFieldCapabilitiesResponse(ismResponse.indices, ismResponse.responseMap, listOf()) - val rewrittenResponse = fieldCapFilter.rewriteResponse(originalIsmResponse.toFieldCapabilitiesResponse(), setOf(rollupIndex), true) as - FieldCapabilitiesResponse + val rewrittenResponse = + fieldCapFilter.rewriteResponse(originalIsmResponse.toFieldCapabilitiesResponse(), setOf(rollupIndex), true) as + FieldCapabilitiesResponse val rewrittenIsmResponse = ISMFieldCapabilitiesResponse.fromFieldCapabilitiesResponse(rewrittenResponse) assertTrue("Expected unmerged response to be empty", rewrittenIsmResponse.indexResponses.isEmpty()) } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/rollup/actionfilter/SerDeTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/rollup/actionfilter/SerDeTests.kt index cdf6f637f..38a331f9f 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/rollup/actionfilter/SerDeTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/rollup/actionfilter/SerDeTests.kt @@ -10,7 +10,6 @@ import org.opensearch.indexmanagement.rollup.randomISMFieldCaps import org.opensearch.test.OpenSearchTestCase class SerDeTests : OpenSearchTestCase() { - fun `test round trip empty`() { val fieldCaps = FieldCapabilitiesResponse(arrayOf(), mapOf()) val roundTripFromFieldCaps = ISMFieldCapabilitiesResponse.fromFieldCapabilitiesResponse(fieldCaps).toFieldCapabilitiesResponse() diff --git a/src/test/kotlin/org/opensearch/indexmanagement/rollup/interceptor/RollupInterceptorIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/rollup/interceptor/RollupInterceptorIT.kt index 9d38220cd..b87fe55ae 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/rollup/interceptor/RollupInterceptorIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/rollup/interceptor/RollupInterceptorIT.kt @@ -9,6 +9,7 @@ import org.apache.hc.core5.http.ContentType import org.apache.hc.core5.http.io.entity.StringEntity import org.junit.Assert import org.opensearch.client.ResponseException +import org.opensearch.core.rest.RestStatus import org.opensearch.indexmanagement.common.model.dimension.DateHistogram import org.opensearch.indexmanagement.common.model.dimension.Terms import org.opensearch.indexmanagement.makeRequest @@ -23,46 +24,48 @@ import org.opensearch.indexmanagement.rollup.model.metric.Sum import org.opensearch.indexmanagement.rollup.model.metric.ValueCount import org.opensearch.indexmanagement.waitFor import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule -import org.opensearch.core.rest.RestStatus import java.time.Instant import java.time.temporal.ChronoUnit @Suppress("UNCHECKED_CAST") class RollupInterceptorIT : RollupRestTestCase() { - fun `test roll up search`() { generateNYCTaxiData("source_rollup_search") - val rollup = Rollup( - id = "basic_term_query_rollup_search", - enabled = true, - schemaVersion = 1L, - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - jobLastUpdatedTime = Instant.now(), - jobEnabledTime = Instant.now(), - description = "basic search test", - sourceIndex = "source_rollup_search", - targetIndex = "target_rollup_search", - metadataID = null, - roles = emptyList(), - pageSize = 10, - delay = 0, - continuous = false, - dimensions = listOf( - DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1h"), - Terms("RatecodeID", "RatecodeID"), - Terms("PULocationID", "PULocationID") - ), - metrics = listOf( - RollupMetrics( - sourceField = "passenger_count", targetField = "passenger_count", - metrics = listOf( - Sum(), Min(), Max(), - ValueCount(), Average() - ) + val rollup = + Rollup( + id = "basic_term_query_rollup_search", + enabled = true, + schemaVersion = 1L, + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + jobLastUpdatedTime = Instant.now(), + jobEnabledTime = Instant.now(), + description = "basic search test", + sourceIndex = "source_rollup_search", + targetIndex = "target_rollup_search", + metadataID = null, + roles = emptyList(), + pageSize = 10, + delay = 0, + continuous = false, + dimensions = + listOf( + DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1h"), + Terms("RatecodeID", "RatecodeID"), + Terms("PULocationID", "PULocationID"), ), - RollupMetrics(sourceField = "total_amount", targetField = "total_amount", metrics = listOf(Max(), Min())) - ) - ).let { createRollup(it, it.id) } + metrics = + listOf( + RollupMetrics( + sourceField = "passenger_count", targetField = "passenger_count", + metrics = + listOf( + Sum(), Min(), Max(), + ValueCount(), Average(), + ), + ), + RollupMetrics(sourceField = "total_amount", targetField = "total_amount", metrics = listOf(Max(), Min())), + ), + ).let { createRollup(it, it.id) } updateRollupStartTime(rollup) @@ -76,7 +79,8 @@ class RollupInterceptorIT : RollupRestTestCase() { refreshAllIndices() // Term query - var req = """ + var req = + """ { "size": 0, "query": { @@ -92,7 +96,7 @@ class RollupInterceptorIT : RollupRestTestCase() { } } } - """.trimIndent() + """.trimIndent() var rawRes = client().makeRequest("POST", "/source_rollup_search/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) assertTrue(rawRes.restStatus() == RestStatus.OK) var rollupRes = client().makeRequest("POST", "/target_rollup_search/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) @@ -102,11 +106,12 @@ class RollupInterceptorIT : RollupRestTestCase() { assertEquals( "Source and rollup index did not return same min results", rawAggRes.getValue("min_passenger_count")["value"], - rollupAggRes.getValue("min_passenger_count")["value"] + rollupAggRes.getValue("min_passenger_count")["value"], ) // Terms query - req = """ + req = + """ { "size": 0, "query": { @@ -122,7 +127,7 @@ class RollupInterceptorIT : RollupRestTestCase() { } } } - """.trimIndent() + """.trimIndent() rawRes = client().makeRequest("POST", "/source_rollup_search/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) assertTrue(rawRes.restStatus() == RestStatus.OK) rollupRes = client().makeRequest("POST", "/target_rollup_search/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) @@ -132,11 +137,12 @@ class RollupInterceptorIT : RollupRestTestCase() { assertEquals( "Source and rollup index did not return same min results", rawAggRes.getValue("min_passenger_count")["value"], - rollupAggRes.getValue("min_passenger_count")["value"] + rollupAggRes.getValue("min_passenger_count")["value"], ) // Range query - req = """ + req = + """ { "size": 0, "query": { @@ -152,7 +158,7 @@ class RollupInterceptorIT : RollupRestTestCase() { } } } - """.trimIndent() + """.trimIndent() rawRes = client().makeRequest("POST", "/source_rollup_search/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) assertTrue(rawRes.restStatus() == RestStatus.OK) rollupRes = client().makeRequest("POST", "/target_rollup_search/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) @@ -162,11 +168,12 @@ class RollupInterceptorIT : RollupRestTestCase() { assertEquals( "Source and rollup index did not return same min results", rawAggRes.getValue("min_passenger_count")["value"], - rollupAggRes.getValue("min_passenger_count")["value"] + rollupAggRes.getValue("min_passenger_count")["value"], ) // Bool query - req = """ + req = + """ { "size": 0, "query": { @@ -185,7 +192,7 @@ class RollupInterceptorIT : RollupRestTestCase() { } } } - """.trimIndent() + """.trimIndent() rawRes = client().makeRequest("POST", "/source_rollup_search/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) assertTrue(rawRes.restStatus() == RestStatus.OK) rollupRes = client().makeRequest("POST", "/target_rollup_search/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) @@ -195,11 +202,12 @@ class RollupInterceptorIT : RollupRestTestCase() { assertEquals( "Source and rollup index did not return same min results", rawAggRes.getValue("min_passenger_count")["value"], - rollupAggRes.getValue("min_passenger_count")["value"] + rollupAggRes.getValue("min_passenger_count")["value"], ) // Boost query - req = """ + req = + """ { "size": 0, "query": { @@ -217,7 +225,7 @@ class RollupInterceptorIT : RollupRestTestCase() { } } } - """.trimIndent() + """.trimIndent() rawRes = client().makeRequest("POST", "/source_rollup_search/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) assertTrue(rawRes.restStatus() == RestStatus.OK) rollupRes = client().makeRequest("POST", "/target_rollup_search/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) @@ -227,11 +235,12 @@ class RollupInterceptorIT : RollupRestTestCase() { assertEquals( "Source and rollup index did not return same min results", rawAggRes.getValue("min_passenger_count")["value"], - rollupAggRes.getValue("min_passenger_count")["value"] + rollupAggRes.getValue("min_passenger_count")["value"], ) // Const score query - req = """ + req = + """ { "size": 0, "query": { @@ -248,7 +257,7 @@ class RollupInterceptorIT : RollupRestTestCase() { } } } - """.trimIndent() + """.trimIndent() rawRes = client().makeRequest("POST", "/source_rollup_search/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) assertTrue(rawRes.restStatus() == RestStatus.OK) rollupRes = client().makeRequest("POST", "/target_rollup_search/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) @@ -258,11 +267,12 @@ class RollupInterceptorIT : RollupRestTestCase() { assertEquals( "Source and rollup index did not return same min results", rawAggRes.getValue("min_passenger_count")["value"], - rollupAggRes.getValue("min_passenger_count")["value"] + rollupAggRes.getValue("min_passenger_count")["value"], ) // Dis max query - req = """ + req = + """ { "size": 0, "query": { @@ -282,7 +292,7 @@ class RollupInterceptorIT : RollupRestTestCase() { } } } - """.trimIndent() + """.trimIndent() rawRes = client().makeRequest("POST", "/source_rollup_search/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) assertTrue(rawRes.restStatus() == RestStatus.OK) rollupRes = client().makeRequest("POST", "/target_rollup_search/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) @@ -292,11 +302,12 @@ class RollupInterceptorIT : RollupRestTestCase() { assertEquals( "Source and rollup index did not return same min results", rawAggRes.getValue("min_passenger_count")["value"], - rollupAggRes.getValue("min_passenger_count")["value"] + rollupAggRes.getValue("min_passenger_count")["value"], ) // Match phrase query - req = """ + req = + """ { "size": 0, "query": { @@ -312,7 +323,7 @@ class RollupInterceptorIT : RollupRestTestCase() { } } } - """.trimIndent() + """.trimIndent() rawRes = client().makeRequest("POST", "/source_rollup_search/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) assertTrue(rawRes.restStatus() == RestStatus.OK) rollupRes = client().makeRequest("POST", "/target_rollup_search/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) @@ -322,11 +333,12 @@ class RollupInterceptorIT : RollupRestTestCase() { assertEquals( "Source and rollup index did not return same min results", rawAggRes.getValue("min_passenger_count")["value"], - rollupAggRes.getValue("min_passenger_count")["value"] + rollupAggRes.getValue("min_passenger_count")["value"], ) // Unsupported query - req = """ + req = + """ { "size": 0, "query": { @@ -342,7 +354,7 @@ class RollupInterceptorIT : RollupRestTestCase() { } } } - """.trimIndent() + """.trimIndent() try { client().makeRequest("POST", "/target_rollup_search/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) fail("Expected 400 Method BAD_REQUEST response") @@ -350,13 +362,14 @@ class RollupInterceptorIT : RollupRestTestCase() { assertEquals( "Wrong error message", "The match query is currently not supported in rollups", - (e.response.asMap() as Map>>)["error"]!!["caused_by"]!!["reason"] + (e.response.asMap() as Map>>)["error"]!!["caused_by"]!!["reason"], ) assertEquals("Unexpected status", RestStatus.BAD_REQUEST, e.response.restStatus()) } // No valid job for rollup search - req = """ + req = + """ { "size": 0, "query": { @@ -373,7 +386,7 @@ class RollupInterceptorIT : RollupRestTestCase() { } } } - """.trimIndent() + """.trimIndent() try { client().makeRequest("POST", "/target_rollup_search/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) fail("Expected 400 Method BAD_REQUEST response") @@ -382,13 +395,14 @@ class RollupInterceptorIT : RollupRestTestCase() { "Wrong error message", "Could not find a rollup job that can answer this query because [missing field RateCodeID, missing field timestamp, " + "missing sum aggregation on total_amount]", - (e.response.asMap() as Map>>)["error"]!!["caused_by"]!!["reason"] + (e.response.asMap() as Map>>)["error"]!!["caused_by"]!!["reason"], ) assertEquals("Unexpected status", RestStatus.BAD_REQUEST, e.response.restStatus()) } // No query just aggregations - req = """ + req = + """ { "size": 0, "aggs": { @@ -419,7 +433,7 @@ class RollupInterceptorIT : RollupRestTestCase() { } } } - """.trimIndent() + """.trimIndent() rawRes = client().makeRequest("POST", "/source_rollup_search/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) assertTrue(rawRes.restStatus() == RestStatus.OK) rollupRes = client().makeRequest("POST", "/target_rollup_search/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) @@ -433,12 +447,13 @@ class RollupInterceptorIT : RollupRestTestCase() { assertEquals("Source and rollup index did not return same avg results", rawAggRes["avg"]!!["value"], rollupAggRes["avg"]!!["value"]) // Invalid size in search - size > 0 - req = """ + req = + """ { "size": 3, "aggs": { "sum": { "sum": { "field": "passenger_count" } } } } - """.trimIndent() + """.trimIndent() try { client().makeRequest("POST", "/target_rollup_search/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) fail("Expected 400 Method BAD_REQUEST response") @@ -446,7 +461,7 @@ class RollupInterceptorIT : RollupRestTestCase() { assertEquals( "Wrong error message", "Rollup search must have size explicitly set to 0, but found 3", - (e.response.asMap() as Map>>)["error"]!!["caused_by"]!!["reason"] + (e.response.asMap() as Map>>)["error"]!!["caused_by"]!!["reason"], ) assertEquals("Unexpected status", RestStatus.BAD_REQUEST, e.response.restStatus()) } @@ -460,7 +475,7 @@ class RollupInterceptorIT : RollupRestTestCase() { assertEquals( "Wrong error message", "Rollup search must have size explicitly set to 0, but found -1", - (e.response.asMap() as Map>>)["error"]!!["caused_by"]!!["reason"] + (e.response.asMap() as Map>>)["error"]!!["caused_by"]!!["reason"], ) assertEquals("Unexpected status", RestStatus.BAD_REQUEST, e.response.restStatus()) } @@ -468,37 +483,41 @@ class RollupInterceptorIT : RollupRestTestCase() { fun `test bucket and sub aggregations have correct values`() { generateNYCTaxiData("source_rollup_bucket_and_sub") - val rollup = Rollup( - id = "basic_term_query_rollup_bucket_and_sub", - enabled = true, - schemaVersion = 1L, - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - jobLastUpdatedTime = Instant.now(), - jobEnabledTime = Instant.now(), - description = "basic search test", - sourceIndex = "source_rollup_bucket_and_sub", - targetIndex = "target_rollup_bucket_and_sub", - metadataID = null, - roles = emptyList(), - pageSize = 10, - delay = 0, - continuous = false, - dimensions = listOf( - DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1h"), - Terms("RatecodeID", "RatecodeID"), - Terms("PULocationID", "PULocationID") - ), - metrics = listOf( - RollupMetrics( - sourceField = "passenger_count", targetField = "passenger_count", - metrics = listOf( - Sum(), Min(), Max(), - ValueCount(), Average() - ) + val rollup = + Rollup( + id = "basic_term_query_rollup_bucket_and_sub", + enabled = true, + schemaVersion = 1L, + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + jobLastUpdatedTime = Instant.now(), + jobEnabledTime = Instant.now(), + description = "basic search test", + sourceIndex = "source_rollup_bucket_and_sub", + targetIndex = "target_rollup_bucket_and_sub", + metadataID = null, + roles = emptyList(), + pageSize = 10, + delay = 0, + continuous = false, + dimensions = + listOf( + DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1h"), + Terms("RatecodeID", "RatecodeID"), + Terms("PULocationID", "PULocationID"), ), - RollupMetrics(sourceField = "total_amount", targetField = "total_amount", metrics = listOf(Max(), Min())) - ) - ).let { createRollup(it, it.id) } + metrics = + listOf( + RollupMetrics( + sourceField = "passenger_count", targetField = "passenger_count", + metrics = + listOf( + Sum(), Min(), Max(), + ValueCount(), Average(), + ), + ), + RollupMetrics(sourceField = "total_amount", targetField = "total_amount", metrics = listOf(Max(), Min())), + ), + ).let { createRollup(it, it.id) } updateRollupStartTime(rollup) @@ -512,7 +531,8 @@ class RollupInterceptorIT : RollupRestTestCase() { refreshAllIndices() // No query just bucket and sub metric aggregations - val req = """ + val req = + """ { "size": 0, "aggs": { @@ -528,7 +548,7 @@ class RollupInterceptorIT : RollupRestTestCase() { } } } - """.trimIndent() + """.trimIndent() val rawRes = client().makeRequest("POST", "/source_rollup_bucket_and_sub/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) assertTrue(rawRes.restStatus() == RestStatus.OK) val rollupRes = client().makeRequest("POST", "/target_rollup_bucket_and_sub/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) @@ -541,60 +561,64 @@ class RollupInterceptorIT : RollupRestTestCase() { val rollupAggBucket = rollupAggBuckets[idx] assertEquals( "The sum aggregation had a different value raw[$rawAggBucket] rollup[$rollupAggBucket]", - rawAggBucket["sum"]!!["value"], rollupAggBucket["sum"]!!["value"] + rawAggBucket["sum"]!!["value"], rollupAggBucket["sum"]!!["value"], ) assertEquals( "The max aggregation had a different value raw[$rawAggBucket] rollup[$rollupAggBucket]", - rawAggBucket["max"]!!["value"], rollupAggBucket["max"]!!["value"] + rawAggBucket["max"]!!["value"], rollupAggBucket["max"]!!["value"], ) assertEquals( "The min aggregation had a different value raw[$rawAggBucket] rollup[$rollupAggBucket]", - rawAggBucket["min"]!!["value"], rollupAggBucket["min"]!!["value"] + rawAggBucket["min"]!!["value"], rollupAggBucket["min"]!!["value"], ) assertEquals( "The value_count aggregation had a different value raw[$rawAggBucket] rollup[$rollupAggBucket]", - rawAggBucket["value_count"]!!["value"], rollupAggBucket["value_count"]!!["value"] + rawAggBucket["value_count"]!!["value"], rollupAggBucket["value_count"]!!["value"], ) assertEquals( "The avg aggregation had a different value raw[$rawAggBucket] rollup[$rollupAggBucket]", - rawAggBucket["avg"]!!["value"], rollupAggBucket["avg"]!!["value"] + rawAggBucket["avg"]!!["value"], rollupAggBucket["avg"]!!["value"], ) } } fun `test continuous rollup search`() { generateNYCTaxiData("source_continuous_rollup_search") - val rollup = Rollup( - id = "basic_term_query_continuous_rollup_search", - enabled = true, - schemaVersion = 1L, - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - jobLastUpdatedTime = Instant.now(), - jobEnabledTime = Instant.now(), - description = "basic search test", - sourceIndex = "source_continuous_rollup_search", - targetIndex = "target_continuous_rollup_search", - metadataID = null, - roles = emptyList(), - pageSize = 10, - delay = 0, - continuous = true, - dimensions = listOf( - DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "7d"), - Terms("RatecodeID", "RatecodeID"), - Terms("PULocationID", "PULocationID") - ), - metrics = listOf( - RollupMetrics( - sourceField = "passenger_count", targetField = "passenger_count", - metrics = listOf( - Sum(), Min(), Max(), - ValueCount(), Average() - ) + val rollup = + Rollup( + id = "basic_term_query_continuous_rollup_search", + enabled = true, + schemaVersion = 1L, + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + jobLastUpdatedTime = Instant.now(), + jobEnabledTime = Instant.now(), + description = "basic search test", + sourceIndex = "source_continuous_rollup_search", + targetIndex = "target_continuous_rollup_search", + metadataID = null, + roles = emptyList(), + pageSize = 10, + delay = 0, + continuous = true, + dimensions = + listOf( + DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "7d"), + Terms("RatecodeID", "RatecodeID"), + Terms("PULocationID", "PULocationID"), ), - RollupMetrics(sourceField = "total_amount", targetField = "total_amount", metrics = listOf(Max(), Min())) - ) - ).let { createRollup(it, it.id) } + metrics = + listOf( + RollupMetrics( + sourceField = "passenger_count", targetField = "passenger_count", + metrics = + listOf( + Sum(), Min(), Max(), + ValueCount(), Average(), + ), + ), + RollupMetrics(sourceField = "total_amount", targetField = "total_amount", metrics = listOf(Max(), Min())), + ), + ).let { createRollup(it, it.id) } updateRollupStartTime(rollup) @@ -607,14 +631,15 @@ class RollupInterceptorIT : RollupRestTestCase() { // if the nextWindowStartTime is after 2019-01-02T00:00:00Z then all data has been rolled up assertTrue( "Rollup has not caught up yet, docs processed: ${rollupMetadata.stats.documentsProcessed}", - rollupMetadata.continuous!!.nextWindowStartTime.isAfter(Instant.parse("2019-01-02T00:00:00Z")) + rollupMetadata.continuous!!.nextWindowStartTime.isAfter(Instant.parse("2019-01-02T00:00:00Z")), ) } refreshAllIndices() // Term query - val req = """ + val req = + """ { "size": 0, "query": { @@ -630,7 +655,7 @@ class RollupInterceptorIT : RollupRestTestCase() { } } } - """.trimIndent() + """.trimIndent() val rawRes = client().makeRequest("POST", "/source_continuous_rollup_search/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) assertTrue(rawRes.restStatus() == RestStatus.OK) val rollupRes = client().makeRequest("POST", "/target_continuous_rollup_search/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) @@ -640,7 +665,7 @@ class RollupInterceptorIT : RollupRestTestCase() { assertEquals( "Source and rollup index did not return same min results", rawAggRes.getValue("min_passenger_count")["value"], - rollupAggRes.getValue("min_passenger_count")["value"] + rollupAggRes.getValue("min_passenger_count")["value"], ) } @@ -648,37 +673,41 @@ class RollupInterceptorIT : RollupRestTestCase() { generateNYCTaxiData("source_rollup_search_all_jobs_1") generateNYCTaxiData("source_rollup_search_all_jobs_2") val targetIndex = "target_rollup_search_all_jobs" - val rollupHourly = Rollup( - id = "hourly_basic_term_query_rollup_search_all", - enabled = true, - schemaVersion = 1L, - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - jobLastUpdatedTime = Instant.now(), - jobEnabledTime = Instant.now(), - description = "basic search test", - sourceIndex = "source_rollup_search_all_jobs_1", - targetIndex = targetIndex, - metadataID = null, - roles = emptyList(), - pageSize = 10, - delay = 0, - continuous = false, - dimensions = listOf( - DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1h"), - Terms("RatecodeID", "RatecodeID"), - Terms("PULocationID", "PULocationID") - ), - metrics = listOf( - RollupMetrics( - sourceField = "passenger_count", targetField = "passenger_count", - metrics = listOf( - Sum(), Min(), Max(), - ValueCount(), Average() - ) + val rollupHourly = + Rollup( + id = "hourly_basic_term_query_rollup_search_all", + enabled = true, + schemaVersion = 1L, + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + jobLastUpdatedTime = Instant.now(), + jobEnabledTime = Instant.now(), + description = "basic search test", + sourceIndex = "source_rollup_search_all_jobs_1", + targetIndex = targetIndex, + metadataID = null, + roles = emptyList(), + pageSize = 10, + delay = 0, + continuous = false, + dimensions = + listOf( + DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1h"), + Terms("RatecodeID", "RatecodeID"), + Terms("PULocationID", "PULocationID"), ), - RollupMetrics(sourceField = "total_amount", targetField = "total_amount", metrics = listOf(Max(), Min())) - ) - ).let { createRollup(it, it.id) } + metrics = + listOf( + RollupMetrics( + sourceField = "passenger_count", targetField = "passenger_count", + metrics = + listOf( + Sum(), Min(), Max(), + ValueCount(), Average(), + ), + ), + RollupMetrics(sourceField = "total_amount", targetField = "total_amount", metrics = listOf(Max(), Min())), + ), + ).let { createRollup(it, it.id) } updateRollupStartTime(rollupHourly) @@ -689,36 +718,40 @@ class RollupInterceptorIT : RollupRestTestCase() { assertEquals("Rollup is not finished", RollupMetadata.Status.FINISHED, rollupMetadata.status) } - val rollupMinutely = Rollup( - id = "minutely_basic_term_query_rollup_search_all", - enabled = true, - schemaVersion = 1L, - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - jobLastUpdatedTime = Instant.now(), - jobEnabledTime = Instant.now(), - description = "basic search test", - sourceIndex = "source_rollup_search_all_jobs_2", - targetIndex = targetIndex, - metadataID = null, - roles = emptyList(), - pageSize = 10, - delay = 0, - continuous = false, - dimensions = listOf( - DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1m"), - Terms("RatecodeID", "RatecodeID") - ), - metrics = listOf( - RollupMetrics( - sourceField = "passenger_count", targetField = "passenger_count", - metrics = listOf( - Sum(), Min(), Max(), - ValueCount(), Average() - ) + val rollupMinutely = + Rollup( + id = "minutely_basic_term_query_rollup_search_all", + enabled = true, + schemaVersion = 1L, + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + jobLastUpdatedTime = Instant.now(), + jobEnabledTime = Instant.now(), + description = "basic search test", + sourceIndex = "source_rollup_search_all_jobs_2", + targetIndex = targetIndex, + metadataID = null, + roles = emptyList(), + pageSize = 10, + delay = 0, + continuous = false, + dimensions = + listOf( + DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1m"), + Terms("RatecodeID", "RatecodeID"), ), - RollupMetrics(sourceField = "total_amount", targetField = "total_amount", metrics = listOf(Max(), Min())) - ) - ).let { createRollup(it, it.id) } + metrics = + listOf( + RollupMetrics( + sourceField = "passenger_count", targetField = "passenger_count", + metrics = + listOf( + Sum(), Min(), Max(), + ValueCount(), Average(), + ), + ), + RollupMetrics(sourceField = "total_amount", targetField = "total_amount", metrics = listOf(Max(), Min())), + ), + ).let { createRollup(it, it.id) } updateRollupStartTime(rollupMinutely) @@ -731,7 +764,8 @@ class RollupInterceptorIT : RollupRestTestCase() { refreshAllIndices() - val req = """ + val req = + """ { "size": 0, "query": { @@ -743,7 +777,7 @@ class RollupInterceptorIT : RollupRestTestCase() { "value_count_passenger_count": { "value_count": { "field": "passenger_count" } } } } - """.trimIndent() + """.trimIndent() val rawRes1 = client().makeRequest("POST", "/source_rollup_search_all_jobs_1/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) assertTrue(rawRes1.restStatus() == RestStatus.OK) val rawRes2 = client().makeRequest("POST", "/source_rollup_search_all_jobs_2/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) @@ -757,16 +791,16 @@ class RollupInterceptorIT : RollupRestTestCase() { // When the cluster setting to search all jobs is off, the aggregations will be the same for searching a single job as for searching both assertEquals( "Searching single rollup job and rollup target index did not return the same max results", - rawAgg1Res.getValue("max_passenger_count")["value"], rollupAggResSingle.getValue("max_passenger_count")["value"] + rawAgg1Res.getValue("max_passenger_count")["value"], rollupAggResSingle.getValue("max_passenger_count")["value"], ) assertEquals( "Searching single rollup job and rollup target index did not return the same sum results", - rawAgg1Res.getValue("sum_passenger_count")["value"], rollupAggResSingle.getValue("sum_passenger_count")["value"] + rawAgg1Res.getValue("sum_passenger_count")["value"], rollupAggResSingle.getValue("sum_passenger_count")["value"], ) val trueAggCount = rawAgg1Res.getValue("value_count_passenger_count")["value"] as Int + rawAgg2Res.getValue("value_count_passenger_count")["value"] as Int assertEquals( "Searching single rollup job and rollup target index did not return the same value count results", - rawAgg1Res.getValue("value_count_passenger_count")["value"], rollupAggResSingle.getValue("value_count_passenger_count")["value"] + rawAgg1Res.getValue("value_count_passenger_count")["value"], rollupAggResSingle.getValue("value_count_passenger_count")["value"], ) val trueAggSum = rawAgg1Res.getValue("sum_passenger_count")["value"] as Double + rawAgg2Res.getValue("sum_passenger_count")["value"] as Double @@ -779,15 +813,15 @@ class RollupInterceptorIT : RollupRestTestCase() { // With search all jobs setting on, the sum, and value_count will now be equal to the sum of the single job search results assertEquals( "Searching single rollup job and rollup target index did not return the same sum results", - rawAgg1Res.getValue("max_passenger_count")["value"], rollupAggResAll.getValue("max_passenger_count")["value"] + rawAgg1Res.getValue("max_passenger_count")["value"], rollupAggResAll.getValue("max_passenger_count")["value"], ) assertEquals( "Searching rollup target index did not return the sum for all of the rollup jobs on the index", - trueAggSum, rollupAggResAll.getValue("sum_passenger_count")["value"] + trueAggSum, rollupAggResAll.getValue("sum_passenger_count")["value"], ) assertEquals( "Searching rollup target index did not return the value count for all of the rollup jobs on the index", - trueAggCount, rollupAggResAll.getValue("value_count_passenger_count")["value"] + trueAggCount, rollupAggResAll.getValue("value_count_passenger_count")["value"], ) } @@ -798,37 +832,41 @@ class RollupInterceptorIT : RollupRestTestCase() { generateNYCTaxiData(sourceIndex2) val targetIndex1 = "target_rollup_search_multi_jobs1" val targetIndex2 = "target_rollup_search_multi_jobs2" - val rollupHourly1 = Rollup( - id = "hourly_basic_term_query_rollup_search_multi_1", - enabled = true, - schemaVersion = 1L, - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - jobLastUpdatedTime = Instant.now(), - jobEnabledTime = Instant.now(), - description = "basic search test", - sourceIndex = sourceIndex1, - targetIndex = targetIndex1, - metadataID = null, - roles = emptyList(), - pageSize = 10, - delay = 0, - continuous = false, - dimensions = listOf( - DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1h"), - Terms("RatecodeID", "RatecodeID"), - Terms("PULocationID", "PULocationID") - ), - metrics = listOf( - RollupMetrics( - sourceField = "passenger_count", targetField = "passenger_count", - metrics = listOf( - Sum(), Min(), Max(), - ValueCount(), Average() - ) + val rollupHourly1 = + Rollup( + id = "hourly_basic_term_query_rollup_search_multi_1", + enabled = true, + schemaVersion = 1L, + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + jobLastUpdatedTime = Instant.now(), + jobEnabledTime = Instant.now(), + description = "basic search test", + sourceIndex = sourceIndex1, + targetIndex = targetIndex1, + metadataID = null, + roles = emptyList(), + pageSize = 10, + delay = 0, + continuous = false, + dimensions = + listOf( + DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1h"), + Terms("RatecodeID", "RatecodeID"), + Terms("PULocationID", "PULocationID"), ), - RollupMetrics(sourceField = "total_amount", targetField = "total_amount", metrics = listOf(Max(), Min())) - ) - ).let { createRollup(it, it.id) } + metrics = + listOf( + RollupMetrics( + sourceField = "passenger_count", targetField = "passenger_count", + metrics = + listOf( + Sum(), Min(), Max(), + ValueCount(), Average(), + ), + ), + RollupMetrics(sourceField = "total_amount", targetField = "total_amount", metrics = listOf(Max(), Min())), + ), + ).let { createRollup(it, it.id) } updateRollupStartTime(rollupHourly1) @@ -839,37 +877,41 @@ class RollupInterceptorIT : RollupRestTestCase() { assertEquals("Rollup is not finished", RollupMetadata.Status.FINISHED, rollupMetadata.status) } - val rollupHourly2 = Rollup( - id = "hourly_basic_term_query_rollup_search_multi_2", - enabled = true, - schemaVersion = 1L, - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - jobLastUpdatedTime = Instant.now(), - jobEnabledTime = Instant.now(), - description = "basic search test", - sourceIndex = sourceIndex2, - targetIndex = targetIndex2, - metadataID = null, - roles = emptyList(), - pageSize = 10, - delay = 0, - continuous = false, - dimensions = listOf( - DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1h"), - Terms("RatecodeID", "RatecodeID"), - Terms("PULocationID", "PULocationID") - ), - metrics = listOf( - RollupMetrics( - sourceField = "passenger_count", targetField = "passenger_count", - metrics = listOf( - Sum(), Min(), Max(), - ValueCount(), Average() - ) + val rollupHourly2 = + Rollup( + id = "hourly_basic_term_query_rollup_search_multi_2", + enabled = true, + schemaVersion = 1L, + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + jobLastUpdatedTime = Instant.now(), + jobEnabledTime = Instant.now(), + description = "basic search test", + sourceIndex = sourceIndex2, + targetIndex = targetIndex2, + metadataID = null, + roles = emptyList(), + pageSize = 10, + delay = 0, + continuous = false, + dimensions = + listOf( + DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1h"), + Terms("RatecodeID", "RatecodeID"), + Terms("PULocationID", "PULocationID"), ), - RollupMetrics(sourceField = "total_amount", targetField = "total_amount", metrics = listOf(Max(), Min())) - ) - ).let { createRollup(it, it.id) } + metrics = + listOf( + RollupMetrics( + sourceField = "passenger_count", targetField = "passenger_count", + metrics = + listOf( + Sum(), Min(), Max(), + ValueCount(), Average(), + ), + ), + RollupMetrics(sourceField = "total_amount", targetField = "total_amount", metrics = listOf(Max(), Min())), + ), + ).let { createRollup(it, it.id) } updateRollupStartTime(rollupHourly2) @@ -882,7 +924,8 @@ class RollupInterceptorIT : RollupRestTestCase() { refreshAllIndices() - val req = """ + val req = + """ { "size": 0, "query": { @@ -894,7 +937,7 @@ class RollupInterceptorIT : RollupRestTestCase() { "value_count_passenger_count": { "value_count": { "field": "passenger_count" } } } } - """.trimIndent() + """.trimIndent() val rawRes1 = client().makeRequest("POST", "/$sourceIndex1/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) assertTrue(rawRes1.restStatus() == RestStatus.OK) val rawRes2 = client().makeRequest("POST", "/$sourceIndex2/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) @@ -908,16 +951,16 @@ class RollupInterceptorIT : RollupRestTestCase() { // When the cluster setting to search all jobs is off, the aggregations will be the same for searching a single job as for searching both assertEquals( "Searching single rollup job and rollup target index did not return the same max results", - rawAgg1Res.getValue("max_passenger_count")["value"], rollupAggResMulti.getValue("max_passenger_count")["value"] + rawAgg1Res.getValue("max_passenger_count")["value"], rollupAggResMulti.getValue("max_passenger_count")["value"], ) assertEquals( "Searching single rollup job and rollup target index did not return the same sum results", - rawAgg1Res.getValue("sum_passenger_count")["value"], rollupAggResMulti.getValue("sum_passenger_count")["value"] + rawAgg1Res.getValue("sum_passenger_count")["value"], rollupAggResMulti.getValue("sum_passenger_count")["value"], ) val trueAggCount = rawAgg1Res.getValue("value_count_passenger_count")["value"] as Int + rawAgg2Res.getValue("value_count_passenger_count")["value"] as Int assertEquals( "Searching single rollup job and rollup target index did not return the same value count results", - rawAgg1Res.getValue("value_count_passenger_count")["value"], rollupAggResMulti.getValue("value_count_passenger_count")["value"] + rawAgg1Res.getValue("value_count_passenger_count")["value"], rollupAggResMulti.getValue("value_count_passenger_count")["value"], ) val trueAggSum = rawAgg1Res.getValue("sum_passenger_count")["value"] as Double + rawAgg2Res.getValue("sum_passenger_count")["value"] as Double @@ -930,15 +973,15 @@ class RollupInterceptorIT : RollupRestTestCase() { // With search all jobs setting on, the sum, and value_count will now be equal to the sum of the single job search results assertEquals( "Searching single rollup job and rollup target index did not return the same sum results", - rawAgg1Res.getValue("max_passenger_count")["value"], rollupAggResAll.getValue("max_passenger_count")["value"] + rawAgg1Res.getValue("max_passenger_count")["value"], rollupAggResAll.getValue("max_passenger_count")["value"], ) assertEquals( "Searching rollup target index did not return the sum for all of the rollup jobs on the index", - trueAggSum, rollupAggResAll.getValue("sum_passenger_count")["value"] + trueAggSum, rollupAggResAll.getValue("sum_passenger_count")["value"], ) assertEquals( "Searching rollup target index did not return the value count for all of the rollup jobs on the index", - trueAggCount, rollupAggResAll.getValue("value_count_passenger_count")["value"] + trueAggCount, rollupAggResAll.getValue("value_count_passenger_count")["value"], ) } @@ -949,36 +992,40 @@ class RollupInterceptorIT : RollupRestTestCase() { generateNYCTaxiData(sourceIndex2) val targetIndex1 = "target_rollup_search_multi_failed_jobs1" val targetIndex2 = "target_rollup_search_multi_failed_jobs2" - val rollupJob1 = Rollup( - id = "hourly_basic_term_query_rollup_search_failed_1", - enabled = true, - schemaVersion = 1L, - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - jobLastUpdatedTime = Instant.now(), - jobEnabledTime = Instant.now(), - description = "basic search test", - sourceIndex = sourceIndex1, - targetIndex = targetIndex1, - metadataID = null, - roles = emptyList(), - pageSize = 10, - delay = 0, - continuous = false, - dimensions = listOf( - DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1h"), - Terms("VendorID", "VendorID"), - ), - metrics = listOf( - RollupMetrics( - sourceField = "fare_amount", targetField = "fare_amount", - metrics = listOf( - Sum(), Min(), Max(), - ValueCount(), Average() - ) + val rollupJob1 = + Rollup( + id = "hourly_basic_term_query_rollup_search_failed_1", + enabled = true, + schemaVersion = 1L, + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + jobLastUpdatedTime = Instant.now(), + jobEnabledTime = Instant.now(), + description = "basic search test", + sourceIndex = sourceIndex1, + targetIndex = targetIndex1, + metadataID = null, + roles = emptyList(), + pageSize = 10, + delay = 0, + continuous = false, + dimensions = + listOf( + DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1h"), + Terms("VendorID", "VendorID"), ), - RollupMetrics(sourceField = "improvement_surcharge", targetField = "improvement_surcharge", metrics = listOf(Max(), Min())) - ) - ).let { createRollup(it, it.id) } + metrics = + listOf( + RollupMetrics( + sourceField = "fare_amount", targetField = "fare_amount", + metrics = + listOf( + Sum(), Min(), Max(), + ValueCount(), Average(), + ), + ), + RollupMetrics(sourceField = "improvement_surcharge", targetField = "improvement_surcharge", metrics = listOf(Max(), Min())), + ), + ).let { createRollup(it, it.id) } updateRollupStartTime(rollupJob1) @@ -989,37 +1036,41 @@ class RollupInterceptorIT : RollupRestTestCase() { assertEquals("Rollup is not finished", RollupMetadata.Status.FINISHED, rollupMetadata.status) } - val rollupJob2 = Rollup( - id = "hourly_basic_term_query_rollup_search_failed_2", - enabled = true, - schemaVersion = 1L, - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - jobLastUpdatedTime = Instant.now(), - jobEnabledTime = Instant.now(), - description = "basic search test", - sourceIndex = sourceIndex2, - targetIndex = targetIndex2, - metadataID = null, - roles = emptyList(), - pageSize = 10, - delay = 0, - continuous = false, - dimensions = listOf( - DateHistogram(sourceField = "tpep_dropoff_datetime", fixedInterval = "1h"), - Terms("RatecodeID", "RatecodeID"), - Terms("PULocationID", "PULocationID") - ), - metrics = listOf( - RollupMetrics( - sourceField = "passenger_count", targetField = "passenger_count", - metrics = listOf( - Sum(), Min(), Max(), - ValueCount(), Average() - ) + val rollupJob2 = + Rollup( + id = "hourly_basic_term_query_rollup_search_failed_2", + enabled = true, + schemaVersion = 1L, + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + jobLastUpdatedTime = Instant.now(), + jobEnabledTime = Instant.now(), + description = "basic search test", + sourceIndex = sourceIndex2, + targetIndex = targetIndex2, + metadataID = null, + roles = emptyList(), + pageSize = 10, + delay = 0, + continuous = false, + dimensions = + listOf( + DateHistogram(sourceField = "tpep_dropoff_datetime", fixedInterval = "1h"), + Terms("RatecodeID", "RatecodeID"), + Terms("PULocationID", "PULocationID"), ), - RollupMetrics(sourceField = "total_amount", targetField = "total_amount", metrics = listOf(Max(), Min())) - ) - ).let { createRollup(it, it.id) } + metrics = + listOf( + RollupMetrics( + sourceField = "passenger_count", targetField = "passenger_count", + metrics = + listOf( + Sum(), Min(), Max(), + ValueCount(), Average(), + ), + ), + RollupMetrics(sourceField = "total_amount", targetField = "total_amount", metrics = listOf(Max(), Min())), + ), + ).let { createRollup(it, it.id) } updateRollupStartTime(rollupJob2) @@ -1032,7 +1083,8 @@ class RollupInterceptorIT : RollupRestTestCase() { refreshAllIndices() - val req = """ + val req = + """ { "size": 0, "query": { @@ -1044,7 +1096,7 @@ class RollupInterceptorIT : RollupRestTestCase() { "value_count_passenger_count": { "value_count": { "field": "passenger_count" } } } } - """.trimIndent() + """.trimIndent() // Search 1 non-rollup index and 1 rollup val searchResult1 = client().makeRequest("POST", "/$sourceIndex2,$targetIndex2/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) assertTrue(searchResult1.restStatus() == RestStatus.OK) @@ -1053,12 +1105,11 @@ class RollupInterceptorIT : RollupRestTestCase() { assertEquals(1, failures?.size) assertEquals( "Searching multiple indices where one is rollup and other is not, didn't return failure", - "illegal_argument_exception", failures?.get(0)?.get("type") ?: "Didn't find failure type in search response" - + "illegal_argument_exception", failures?.get(0)?.get("type") ?: "Didn't find failure type in search response", ) assertEquals( "Searching multiple indices where one is rollup and other is not, didn't return failure", - "Not all indices have rollup job", failures?.get(0)?.get("reason") ?: "Didn't find failure reason in search response" + "Not all indices have rollup job", failures?.get(0)?.get("reason") ?: "Didn't find failure reason in search response", ) // Search 2 rollups with different mappings @@ -1067,13 +1118,13 @@ class RollupInterceptorIT : RollupRestTestCase() { "POST", "/$targetIndex1,$targetIndex2/_search", emptyMap(), - StringEntity(req, ContentType.APPLICATION_JSON) + StringEntity(req, ContentType.APPLICATION_JSON), ) } catch (e: ResponseException) { assertEquals( "Searching multiple rollup indices which weren't created by same rollup job, didn't return failure", "Could not find a rollup job that can answer this query because [missing field RatecodeID, missing field passenger_count]", - (e.response.asMap() as Map>>)["error"]!!["caused_by"]!!["reason"] + (e.response.asMap() as Map>>)["error"]!!["caused_by"]!!["reason"], ) assertEquals("Unexpected status", RestStatus.BAD_REQUEST, e.response.restStatus()) } @@ -1085,39 +1136,43 @@ class RollupInterceptorIT : RollupRestTestCase() { createSampleIndexForQSQTest(sourceIndex) - val rollup = Rollup( - id = "basic_query_string_query_rollup_search111", - enabled = true, - schemaVersion = 1L, - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - jobLastUpdatedTime = Instant.now(), - jobEnabledTime = Instant.now(), - description = "basic search test", - sourceIndex = sourceIndex, - targetIndex = targetIndex, - metadataID = null, - roles = emptyList(), - pageSize = 10, - delay = 0, - continuous = false, - dimensions = listOf( - DateHistogram(sourceField = "event_ts", fixedInterval = "1h"), - Terms("state", "state"), - Terms("state_ext", "state_ext"), - Terms("state_ext2", "state_ext2"), - Terms("state_ordinal", "state_ordinal"), - Terms("abc test", "abc test"), - ), - metrics = listOf( - RollupMetrics( - sourceField = "earnings", targetField = "earnings", - metrics = listOf( - Sum(), Min(), Max(), - ValueCount(), Average() - ) - ) - ) - ).let { createRollup(it, it.id) } + val rollup = + Rollup( + id = "basic_query_string_query_rollup_search111", + enabled = true, + schemaVersion = 1L, + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + jobLastUpdatedTime = Instant.now(), + jobEnabledTime = Instant.now(), + description = "basic search test", + sourceIndex = sourceIndex, + targetIndex = targetIndex, + metadataID = null, + roles = emptyList(), + pageSize = 10, + delay = 0, + continuous = false, + dimensions = + listOf( + DateHistogram(sourceField = "event_ts", fixedInterval = "1h"), + Terms("state", "state"), + Terms("state_ext", "state_ext"), + Terms("state_ext2", "state_ext2"), + Terms("state_ordinal", "state_ordinal"), + Terms("abc test", "abc test"), + ), + metrics = + listOf( + RollupMetrics( + sourceField = "earnings", targetField = "earnings", + metrics = + listOf( + Sum(), Min(), Max(), + ValueCount(), Average(), + ), + ), + ), + ).let { createRollup(it, it.id) } updateRollupStartTime(rollup) @@ -1131,7 +1186,8 @@ class RollupInterceptorIT : RollupRestTestCase() { refreshAllIndices() // Term query - var req = """ + var req = + """ { "size": 0, "query": { @@ -1149,7 +1205,7 @@ class RollupInterceptorIT : RollupRestTestCase() { } } } - """.trimIndent() + """.trimIndent() var rawRes = client().makeRequest("POST", "/$sourceIndex/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) assertTrue(rawRes.restStatus() == RestStatus.OK) var rollupRes = client().makeRequest("POST", "/$targetIndex/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) @@ -1159,11 +1215,12 @@ class RollupInterceptorIT : RollupRestTestCase() { assertEquals( "Source and rollup index did not return same min results", rawAggRes.getValue("earnings_total")["value"], - rollupAggRes.getValue("earnings_total")["value"] + rollupAggRes.getValue("earnings_total")["value"], ) // Fuzzy query - req = """ + req = + """ { "size": 0, "query": { @@ -1179,7 +1236,7 @@ class RollupInterceptorIT : RollupRestTestCase() { } } } - """.trimIndent() + """.trimIndent() rawRes = client().makeRequest("POST", "/$sourceIndex/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) assertTrue(rawRes.restStatus() == RestStatus.OK) rollupRes = client().makeRequest("POST", "/$targetIndex/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) @@ -1189,10 +1246,11 @@ class RollupInterceptorIT : RollupRestTestCase() { assertEquals( "Source and rollup index did not return same min results", rawAggRes.getValue("earnings_total")["value"], - rollupAggRes.getValue("earnings_total")["value"] + rollupAggRes.getValue("earnings_total")["value"], ) // Prefix query - req = """ + req = + """ { "size": 0, "query": { @@ -1208,7 +1266,7 @@ class RollupInterceptorIT : RollupRestTestCase() { } } } - """.trimIndent() + """.trimIndent() rawRes = client().makeRequest("POST", "/$sourceIndex/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) assertTrue(rawRes.restStatus() == RestStatus.OK) rollupRes = client().makeRequest("POST", "/$targetIndex/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) @@ -1218,10 +1276,11 @@ class RollupInterceptorIT : RollupRestTestCase() { assertEquals( "Source and rollup index did not return same min results", rawAggRes.getValue("earnings_total")["value"], - rollupAggRes.getValue("earnings_total")["value"] + rollupAggRes.getValue("earnings_total")["value"], ) // Regex query - req = """ + req = + """ { "size": 0, "query": { @@ -1237,7 +1296,7 @@ class RollupInterceptorIT : RollupRestTestCase() { } } } - """.trimIndent() + """.trimIndent() rawRes = client().makeRequest("POST", "/$sourceIndex/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) assertTrue(rawRes.restStatus() == RestStatus.OK) rollupRes = client().makeRequest("POST", "/$targetIndex/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) @@ -1247,10 +1306,11 @@ class RollupInterceptorIT : RollupRestTestCase() { assertEquals( "Source and rollup index did not return same min results", rawAggRes.getValue("earnings_total")["value"], - rollupAggRes.getValue("earnings_total")["value"] + rollupAggRes.getValue("earnings_total")["value"], ) // Range query - req = """ + req = + """ { "size": 0, "query": { @@ -1266,7 +1326,7 @@ class RollupInterceptorIT : RollupRestTestCase() { } } } - """.trimIndent() + """.trimIndent() rawRes = client().makeRequest("POST", "/$sourceIndex/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) assertTrue(rawRes.restStatus() == RestStatus.OK) rollupRes = client().makeRequest("POST", "/$targetIndex/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) @@ -1276,10 +1336,11 @@ class RollupInterceptorIT : RollupRestTestCase() { assertEquals( "Source and rollup index did not return same min results", rawAggRes.getValue("earnings_total")["value"], - rollupAggRes.getValue("earnings_total")["value"] + rollupAggRes.getValue("earnings_total")["value"], ) // Query with field prefix - req = """ + req = + """ { "size": 0, "query": { @@ -1296,7 +1357,7 @@ class RollupInterceptorIT : RollupRestTestCase() { } } } - """.trimIndent() + """.trimIndent() rawRes = client().makeRequest("POST", "/$sourceIndex/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) assertTrue(rawRes.restStatus() == RestStatus.OK) rollupRes = client().makeRequest("POST", "/$targetIndex/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) @@ -1306,11 +1367,12 @@ class RollupInterceptorIT : RollupRestTestCase() { assertEquals( "Source and rollup index did not return same min results", rawAggRes.getValue("earnings_total")["value"], - rollupAggRes.getValue("earnings_total")["value"] + rollupAggRes.getValue("earnings_total")["value"], ) // Using ALL_MATCH_PATTERN for default_field but rollup job didn't include all fields - req = """ + req = + """ { "size": 0, "query": { @@ -1328,19 +1390,20 @@ class RollupInterceptorIT : RollupRestTestCase() { } } } - """.trimIndent() + """.trimIndent() try { client().makeRequest("POST", "/$targetIndex/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) } catch (e: ResponseException) { assertTrue( e.message?.contains( - "[missing terms grouping on earnings, missing terms grouping on event_ts, missing field test.vvv, missing field test.fff]" - ) ?: false + "[missing terms grouping on earnings, missing terms grouping on event_ts, missing field test.vvv, missing field test.fff]", + ) ?: false, ) } // Using ALL_MATCH_PATTERN in one of fields in "fields" array but rollup job didn't include all fields - req = """ + req = + """ { "size": 0, "query": { @@ -1358,19 +1421,20 @@ class RollupInterceptorIT : RollupRestTestCase() { } } } - """.trimIndent() + """.trimIndent() try { client().makeRequest("POST", "/$targetIndex/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) } catch (e: ResponseException) { assertTrue( e.message?.contains( - "[missing terms grouping on earnings, missing terms grouping on event_ts, missing field test.vvv, missing field test.fff]" - ) ?: false + "[missing terms grouping on earnings, missing terms grouping on event_ts, missing field test.vvv, missing field test.fff]", + ) ?: false, ) } // field from "fields" list is missing in rollup - req = """ + req = + """ { "size": 0, "query": { @@ -1388,7 +1452,7 @@ class RollupInterceptorIT : RollupRestTestCase() { } } } - """.trimIndent() + """.trimIndent() try { client().makeRequest("POST", "/$targetIndex/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) } catch (e: ResponseException) { @@ -1396,7 +1460,8 @@ class RollupInterceptorIT : RollupRestTestCase() { } // no fields or default_field present. Fallback on index setting [index.query.default_field] default value: "*" - req = """ + req = + """ { "size": 0, "query": { @@ -1413,14 +1478,14 @@ class RollupInterceptorIT : RollupRestTestCase() { } } } - """.trimIndent() + """.trimIndent() try { client().makeRequest("POST", "/$targetIndex/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) } catch (e: ResponseException) { assertTrue( e.message?.contains( - "[missing terms grouping on earnings, missing terms grouping on event_ts, missing field test.vvv, missing field test.fff]" - ) ?: false + "[missing terms grouping on earnings, missing terms grouping on event_ts, missing field test.vvv, missing field test.fff]", + ) ?: false, ) } @@ -1437,11 +1502,12 @@ class RollupInterceptorIT : RollupRestTestCase() { } } """.trimIndent(), - ContentType.APPLICATION_JSON - ) + ContentType.APPLICATION_JSON, + ), ) // - req = """ + req = + """ { "size": 0, "query": { @@ -1458,7 +1524,7 @@ class RollupInterceptorIT : RollupRestTestCase() { } } } - """.trimIndent() + """.trimIndent() rawRes = client().makeRequest("POST", "/$sourceIndex/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) assertTrue(rawRes.restStatus() == RestStatus.OK) rollupRes = client().makeRequest("POST", "/$targetIndex/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) @@ -1468,11 +1534,12 @@ class RollupInterceptorIT : RollupRestTestCase() { assertEquals( "Source and rollup index did not return same min results", rawAggRes.getValue("earnings_total")["value"], - rollupAggRes.getValue("earnings_total")["value"] + rollupAggRes.getValue("earnings_total")["value"], ) // prefix pattern in "default_field" field - req = """ + req = + """ { "size": 0, "query": { @@ -1490,7 +1557,7 @@ class RollupInterceptorIT : RollupRestTestCase() { } } } - """.trimIndent() + """.trimIndent() rawRes = client().makeRequest("POST", "/$sourceIndex/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) assertTrue(rawRes.restStatus() == RestStatus.OK) rollupRes = client().makeRequest("POST", "/$targetIndex/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) @@ -1500,11 +1567,12 @@ class RollupInterceptorIT : RollupRestTestCase() { assertEquals( "Source and rollup index did not return same min results", rawAggRes.getValue("earnings_total")["value"], - rollupAggRes.getValue("earnings_total")["value"] + rollupAggRes.getValue("earnings_total")["value"], ) // field with space in query: - req = """ + req = + """ { "size": 0, "query": { @@ -1521,7 +1589,7 @@ class RollupInterceptorIT : RollupRestTestCase() { } } } - """.trimIndent() + """.trimIndent() rawRes = client().makeRequest("POST", "/$sourceIndex/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) assertTrue(rawRes.restStatus() == RestStatus.OK) rollupRes = client().makeRequest("POST", "/$targetIndex/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) @@ -1531,11 +1599,12 @@ class RollupInterceptorIT : RollupRestTestCase() { assertEquals( "Source and rollup index did not return same min results", rawAggRes.getValue("earnings_total")["value"], - rollupAggRes.getValue("earnings_total")["value"] + rollupAggRes.getValue("earnings_total")["value"], ) // _exists_:field - req = """ + req = + """ { "size": 0, "query": { @@ -1552,7 +1621,7 @@ class RollupInterceptorIT : RollupRestTestCase() { } } } - """.trimIndent() + """.trimIndent() rawRes = client().makeRequest("POST", "/$sourceIndex/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) assertTrue(rawRes.restStatus() == RestStatus.OK) rollupRes = client().makeRequest("POST", "/$targetIndex/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) @@ -1562,7 +1631,7 @@ class RollupInterceptorIT : RollupRestTestCase() { assertEquals( "Source and rollup index did not return same min results", rawAggRes.getValue("earnings_total")["value"], - rollupAggRes.getValue("earnings_total")["value"] + rollupAggRes.getValue("earnings_total")["value"], ) } @@ -1570,37 +1639,41 @@ class RollupInterceptorIT : RollupRestTestCase() { val sourceIndex = "source_rollup_search_qsq_2" val targetIndex = "target_rollup_qsq_search_2" generateNYCTaxiData(sourceIndex) - val rollup = Rollup( - id = "basic_query_string_query_rollup_search_2", - enabled = true, - schemaVersion = 1L, - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - jobLastUpdatedTime = Instant.now(), - jobEnabledTime = Instant.now(), - description = "basic search test", - sourceIndex = sourceIndex, - targetIndex = targetIndex, - metadataID = null, - roles = emptyList(), - pageSize = 10, - delay = 0, - continuous = false, - dimensions = listOf( - DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1h"), - Terms("RatecodeID", "RatecodeID"), - Terms("PULocationID", "PULocationID") - ), - metrics = listOf( - RollupMetrics( - sourceField = "passenger_count", targetField = "passenger_count", - metrics = listOf( - Sum(), Min(), Max(), - ValueCount(), Average() - ) + val rollup = + Rollup( + id = "basic_query_string_query_rollup_search_2", + enabled = true, + schemaVersion = 1L, + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + jobLastUpdatedTime = Instant.now(), + jobEnabledTime = Instant.now(), + description = "basic search test", + sourceIndex = sourceIndex, + targetIndex = targetIndex, + metadataID = null, + roles = emptyList(), + pageSize = 10, + delay = 0, + continuous = false, + dimensions = + listOf( + DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1h"), + Terms("RatecodeID", "RatecodeID"), + Terms("PULocationID", "PULocationID"), ), - RollupMetrics(sourceField = "total_amount", targetField = "total_amount", metrics = listOf(Max(), Min())) - ) - ).let { createRollup(it, it.id) } + metrics = + listOf( + RollupMetrics( + sourceField = "passenger_count", targetField = "passenger_count", + metrics = + listOf( + Sum(), Min(), Max(), + ValueCount(), Average(), + ), + ), + RollupMetrics(sourceField = "total_amount", targetField = "total_amount", metrics = listOf(Max(), Min())), + ), + ).let { createRollup(it, it.id) } updateRollupStartTime(rollup) @@ -1614,7 +1687,8 @@ class RollupInterceptorIT : RollupRestTestCase() { refreshAllIndices() // Invalid query - var req = """ + var req = + """ { "size": 0, "query": { @@ -1630,7 +1704,7 @@ class RollupInterceptorIT : RollupRestTestCase() { } } } - """.trimIndent() + """.trimIndent() try { client().makeRequest("POST", "/$targetIndex/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) fail("search should've failed due to incorrect query") @@ -1643,37 +1717,41 @@ class RollupInterceptorIT : RollupRestTestCase() { val sourceIndex = "source_rollup_search_qsq" val targetIndex = "target_rollup_qsq_search" generateNYCTaxiData(sourceIndex) - val rollup = Rollup( - id = "basic_query_string_query_rollup_search", - enabled = true, - schemaVersion = 1L, - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - jobLastUpdatedTime = Instant.now(), - jobEnabledTime = Instant.now(), - description = "basic search test", - sourceIndex = sourceIndex, - targetIndex = targetIndex, - metadataID = null, - roles = emptyList(), - pageSize = 10, - delay = 0, - continuous = false, - dimensions = listOf( - DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1h"), - Terms("RatecodeID", "RatecodeID"), - Terms("PULocationID", "PULocationID") - ), - metrics = listOf( - RollupMetrics( - sourceField = "passenger_count", targetField = "passenger_count", - metrics = listOf( - Sum(), Min(), Max(), - ValueCount(), Average() - ) + val rollup = + Rollup( + id = "basic_query_string_query_rollup_search", + enabled = true, + schemaVersion = 1L, + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + jobLastUpdatedTime = Instant.now(), + jobEnabledTime = Instant.now(), + description = "basic search test", + sourceIndex = sourceIndex, + targetIndex = targetIndex, + metadataID = null, + roles = emptyList(), + pageSize = 10, + delay = 0, + continuous = false, + dimensions = + listOf( + DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1h"), + Terms("RatecodeID", "RatecodeID"), + Terms("PULocationID", "PULocationID"), ), - RollupMetrics(sourceField = "total_amount", targetField = "total_amount", metrics = listOf(Max(), Min())) - ) - ).let { createRollup(it, it.id) } + metrics = + listOf( + RollupMetrics( + sourceField = "passenger_count", targetField = "passenger_count", + metrics = + listOf( + Sum(), Min(), Max(), + ValueCount(), Average(), + ), + ), + RollupMetrics(sourceField = "total_amount", targetField = "total_amount", metrics = listOf(Max(), Min())), + ), + ).let { createRollup(it, it.id) } updateRollupStartTime(rollup) @@ -1687,7 +1765,8 @@ class RollupInterceptorIT : RollupRestTestCase() { refreshAllIndices() // Term query - var req = """ + var req = + """ { "size": 0, "query": { @@ -1703,7 +1782,7 @@ class RollupInterceptorIT : RollupRestTestCase() { } } } - """.trimIndent() + """.trimIndent() try { client().makeRequest("POST", "/$targetIndex/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) fail("search should've failed due to incorrect query") @@ -1718,39 +1797,43 @@ class RollupInterceptorIT : RollupRestTestCase() { createSampleIndexForQSQTest(sourceIndex) - val rollup = Rollup( - id = "basic_query_string_query_rollup_search98243", - enabled = true, - schemaVersion = 1L, - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - jobLastUpdatedTime = Instant.now(), - jobEnabledTime = Instant.now(), - description = "basic search test", - sourceIndex = "source_111*", - targetIndex = targetIndex, - metadataID = null, - roles = emptyList(), - pageSize = 10, - delay = 0, - continuous = false, - dimensions = listOf( - DateHistogram(sourceField = "event_ts", fixedInterval = "1h"), - Terms("state", "state"), - Terms("state_ext", "state_ext"), - Terms("state_ext2", "state_ext2"), - Terms("state_ordinal", "state_ordinal"), - Terms("abc test", "abc test"), - ), - metrics = listOf( - RollupMetrics( - sourceField = "earnings", targetField = "earnings", - metrics = listOf( - Sum(), Min(), Max(), - ValueCount(), Average() - ) - ) - ) - ).let { createRollup(it, it.id) } + val rollup = + Rollup( + id = "basic_query_string_query_rollup_search98243", + enabled = true, + schemaVersion = 1L, + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + jobLastUpdatedTime = Instant.now(), + jobEnabledTime = Instant.now(), + description = "basic search test", + sourceIndex = "source_111*", + targetIndex = targetIndex, + metadataID = null, + roles = emptyList(), + pageSize = 10, + delay = 0, + continuous = false, + dimensions = + listOf( + DateHistogram(sourceField = "event_ts", fixedInterval = "1h"), + Terms("state", "state"), + Terms("state_ext", "state_ext"), + Terms("state_ext2", "state_ext2"), + Terms("state_ordinal", "state_ordinal"), + Terms("abc test", "abc test"), + ), + metrics = + listOf( + RollupMetrics( + sourceField = "earnings", targetField = "earnings", + metrics = + listOf( + Sum(), Min(), Max(), + ValueCount(), Average(), + ), + ), + ), + ).let { createRollup(it, it.id) } updateRollupStartTime(rollup) @@ -1764,7 +1847,8 @@ class RollupInterceptorIT : RollupRestTestCase() { refreshAllIndices() // Term query - var req = """ + var req = + """ { "size": 0, "query": { @@ -1782,7 +1866,7 @@ class RollupInterceptorIT : RollupRestTestCase() { } } } - """.trimIndent() + """.trimIndent() var rawRes = client().makeRequest("POST", "/$sourceIndex/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) assertTrue(rawRes.restStatus() == RestStatus.OK) var rollupRes = client().makeRequest("POST", "/$targetIndex/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) @@ -1792,7 +1876,7 @@ class RollupInterceptorIT : RollupRestTestCase() { assertEquals( "Source and rollup index did not return same min results", rawAggRes.getValue("earnings_total")["value"], - rollupAggRes.getValue("earnings_total")["value"] + rollupAggRes.getValue("earnings_total")["value"], ) } @@ -1802,39 +1886,43 @@ class RollupInterceptorIT : RollupRestTestCase() { createSampleIndexForQSQTest(sourceIndex) - val rollup = Rollup( - id = "basic_query_string_query_rollup_search982499", - enabled = true, - schemaVersion = 1L, - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - jobLastUpdatedTime = Instant.now(), - jobEnabledTime = Instant.now(), - description = "basic search test", - sourceIndex = "source_999*", - targetIndex = targetIndex, - metadataID = null, - roles = emptyList(), - pageSize = 10, - delay = 0, - continuous = false, - dimensions = listOf( - DateHistogram(sourceField = "event_ts", fixedInterval = "1h"), - Terms("state", "state"), - Terms("state_ext", "state_ext"), - Terms("state_ext2", "state_ext2"), - Terms("state_ordinal", "state_ordinal"), - Terms("abc test", "abc test"), - ), - metrics = listOf( - RollupMetrics( - sourceField = "earnings", targetField = "earnings", - metrics = listOf( - Sum(), Min(), Max(), - ValueCount(), Average() - ) - ) - ) - ).let { createRollup(it, it.id) } + val rollup = + Rollup( + id = "basic_query_string_query_rollup_search982499", + enabled = true, + schemaVersion = 1L, + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + jobLastUpdatedTime = Instant.now(), + jobEnabledTime = Instant.now(), + description = "basic search test", + sourceIndex = "source_999*", + targetIndex = targetIndex, + metadataID = null, + roles = emptyList(), + pageSize = 10, + delay = 0, + continuous = false, + dimensions = + listOf( + DateHistogram(sourceField = "event_ts", fixedInterval = "1h"), + Terms("state", "state"), + Terms("state_ext", "state_ext"), + Terms("state_ext2", "state_ext2"), + Terms("state_ordinal", "state_ordinal"), + Terms("abc test", "abc test"), + ), + metrics = + listOf( + RollupMetrics( + sourceField = "earnings", targetField = "earnings", + metrics = + listOf( + Sum(), Min(), Max(), + ValueCount(), Average(), + ), + ), + ), + ).let { createRollup(it, it.id) } updateRollupStartTime(rollup) @@ -1850,7 +1938,8 @@ class RollupInterceptorIT : RollupRestTestCase() { deleteIndex(sourceIndex) // Term query - var req = """ + var req = + """ { "size": 0, "query": { @@ -1868,7 +1957,7 @@ class RollupInterceptorIT : RollupRestTestCase() { } } } - """.trimIndent() + """.trimIndent() try { client().makeRequest("POST", "/$targetIndex/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) fail("Failure was expected when searching rollup index using qsq query when sourceIndex does not exist!") diff --git a/src/test/kotlin/org/opensearch/indexmanagement/rollup/model/DimensionTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/rollup/model/DimensionTests.kt index 00d72f0ca..65b5277b0 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/rollup/model/DimensionTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/rollup/model/DimensionTests.kt @@ -12,7 +12,6 @@ import org.opensearch.test.OpenSearchTestCase import kotlin.test.assertFailsWith class DimensionTests : OpenSearchTestCase() { - fun `test date histogram empty fields`() { assertFailsWith(IllegalArgumentException::class, "Source and target field must not be empty") { randomDateHistogram().copy(sourceField = "", targetField = "") diff --git a/src/test/kotlin/org/opensearch/indexmanagement/rollup/model/ISMRollupTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/rollup/model/ISMRollupTests.kt index 0a2c15ee5..89fbf6e04 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/rollup/model/ISMRollupTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/rollup/model/ISMRollupTests.kt @@ -17,47 +17,46 @@ import java.time.temporal.ChronoUnit import kotlin.test.assertFailsWith class ISMRollupTests : OpenSearchTestCase() { - fun `test ism rollup requires only one date histogram and it should be first dimension`() { - assertFailsWith(IllegalArgumentException:: class, "The first dimension must be a date histogram") { + assertFailsWith(IllegalArgumentException::class, "The first dimension must be a date histogram") { randomISMRollup().copy(dimensions = listOf(randomTerms(), randomDateHistogram())) } - assertFailsWith(IllegalArgumentException:: class, "Requires one date histogram in dimensions") { + assertFailsWith(IllegalArgumentException::class, "Requires one date histogram in dimensions") { randomISMRollup().copy(dimensions = listOf()) } - assertFailsWith(IllegalArgumentException:: class, "Requires one date histogram in dimensions") { + assertFailsWith(IllegalArgumentException::class, "Requires one date histogram in dimensions") { randomISMRollup().copy(dimensions = listOf(randomTerms())) } - assertFailsWith(IllegalArgumentException:: class, "Requires only one date histogram in dimensions") { + assertFailsWith(IllegalArgumentException::class, "Requires only one date histogram in dimensions") { randomISMRollup().copy(dimensions = listOf(randomDateHistogram(), randomDateHistogram())) } } fun `test ism rollup requires non empty description`() { - assertFailsWith(IllegalArgumentException:: class, "Requires non empty description") { + assertFailsWith(IllegalArgumentException::class, "Requires non empty description") { randomISMRollup().copy(description = "") } } fun `test ism rollup requires non empty target index`() { - assertFailsWith(IllegalArgumentException:: class, "Requires non empty target index") { + assertFailsWith(IllegalArgumentException::class, "Requires non empty target index") { randomISMRollup().copy(targetIndex = "") } } fun `test ism rollup requires page size to be between 1 and 10K`() { - assertFailsWith(IllegalArgumentException:: class, "Page size cannot be less than 1") { + assertFailsWith(IllegalArgumentException::class, "Page size cannot be less than 1") { randomISMRollup().copy(pageSize = -1) } - assertFailsWith(IllegalArgumentException:: class, "Page size cannot be less than 1") { + assertFailsWith(IllegalArgumentException::class, "Page size cannot be less than 1") { randomISMRollup().copy(pageSize = 0) } - assertFailsWith(IllegalArgumentException:: class, "Page size cannot be greater than 10000") { + assertFailsWith(IllegalArgumentException::class, "Page size cannot be greater than 10000") { randomISMRollup().copy(pageSize = 10001) } } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/rollup/model/RollupFieldMappingTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/rollup/model/RollupFieldMappingTests.kt index cf7d8fcea..5cdaa6b02 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/rollup/model/RollupFieldMappingTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/rollup/model/RollupFieldMappingTests.kt @@ -9,7 +9,6 @@ import org.opensearch.indexmanagement.rollup.model.RollupFieldMapping.Companion. import org.opensearch.test.OpenSearchTestCase class RollupFieldMappingTests : OpenSearchTestCase() { - fun `test toIssue`() { var fieldMapping = RollupFieldMapping(RollupFieldMapping.Companion.FieldType.DIMENSION, "dummy-field", "terms") var actual = fieldMapping.toIssue() diff --git a/src/test/kotlin/org/opensearch/indexmanagement/rollup/model/RollupTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/rollup/model/RollupTests.kt index 174e52685..e13f1d6e7 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/rollup/model/RollupTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/rollup/model/RollupTests.kt @@ -88,17 +88,19 @@ class RollupTests : OpenSearchTestCase() { fun `test delay applies to continuous rollups only`() { // Continuous rollup schedule matches delay val newDelay: Long = 500 - val continuousRollup = randomRollup().copy( - delay = newDelay, - continuous = true - ) + val continuousRollup = + randomRollup().copy( + delay = newDelay, + continuous = true, + ) assertEquals(newDelay, continuousRollup.jobSchedule.delay) // Non continuous rollup schedule should have null delay - val nonContinuousRollup = randomRollup().copy( - jobSchedule = randomSchedule(), - delay = newDelay, - continuous = false - ) + val nonContinuousRollup = + randomRollup().copy( + jobSchedule = randomSchedule(), + delay = newDelay, + continuous = false, + ) assertNull(nonContinuousRollup.jobSchedule.delay) } } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/rollup/model/WriteableTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/rollup/model/WriteableTests.kt index 216b16d6f..374fcf15c 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/rollup/model/WriteableTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/rollup/model/WriteableTests.kt @@ -33,7 +33,6 @@ import org.opensearch.indexmanagement.rollup.randomValueCount import org.opensearch.test.OpenSearchTestCase class WriteableTests : OpenSearchTestCase() { - fun `test date histogram dimension as stream`() { val dateHistogram = randomDateHistogram() val out = BytesStreamOutput().also { dateHistogram.writeTo(it) } @@ -115,10 +114,11 @@ class WriteableTests : OpenSearchTestCase() { } fun `test rollup roles field deprecation`() { - val rollup = randomRollup().copy( - delay = randomLongBetween(0, 60000000), - roles = listOf("I am deprecated") - ) + val rollup = + randomRollup().copy( + delay = randomLongBetween(0, 60000000), + roles = listOf("I am deprecated"), + ) val out = BytesStreamOutput().also { rollup.writeTo(it) } val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) val streamedRollup = Rollup(sin) diff --git a/src/test/kotlin/org/opensearch/indexmanagement/rollup/model/XContentTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/rollup/model/XContentTests.kt index 8e230013a..65c5dfa84 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/rollup/model/XContentTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/rollup/model/XContentTests.kt @@ -6,8 +6,8 @@ package org.opensearch.indexmanagement.rollup.model import org.opensearch.common.xcontent.LoggingDeprecationHandler -import org.opensearch.core.xcontent.XContentParser import org.opensearch.common.xcontent.XContentType +import org.opensearch.core.xcontent.XContentParser import org.opensearch.indexmanagement.common.model.dimension.Dimension import org.opensearch.indexmanagement.indexstatemanagement.util.XCONTENT_WITHOUT_TYPE import org.opensearch.indexmanagement.opensearchapi.parseWithType @@ -28,7 +28,6 @@ import org.opensearch.test.OpenSearchTestCase import kotlin.test.assertFailsWith class XContentTests : OpenSearchTestCase() { - fun `test invalid dimension parsing`() { assertFailsWith(IllegalArgumentException::class, "Invalid dimension type [invalid_dimension] found in dimensions") { Dimension.parse(parser("{\"invalid_dimension\":{}}")) diff --git a/src/test/kotlin/org/opensearch/indexmanagement/rollup/resthandler/RestDeleteRollupActionIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/rollup/resthandler/RestDeleteRollupActionIT.kt index 67bab487c..f35e6ac1b 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/rollup/resthandler/RestDeleteRollupActionIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/rollup/resthandler/RestDeleteRollupActionIT.kt @@ -6,15 +6,14 @@ package org.opensearch.indexmanagement.rollup.resthandler import org.opensearch.client.ResponseException -import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.ROLLUP_JOBS_BASE_URI -import org.opensearch.indexmanagement.makeRequest import org.opensearch.core.rest.RestStatus +import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.ROLLUP_JOBS_BASE_URI import org.opensearch.indexmanagement.indexstatemanagement.wait +import org.opensearch.indexmanagement.makeRequest import org.opensearch.test.junit.annotations.TestLogging @TestLogging(value = "level:DEBUG", reason = "Debugging tests") class RestDeleteRollupActionIT : RollupRestAPITestCase() { - @Throws(Exception::class) fun `test deleting a rollup`() { val rollup = createRandomRollup() diff --git a/src/test/kotlin/org/opensearch/indexmanagement/rollup/resthandler/RestExplainRollupActionIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/rollup/resthandler/RestExplainRollupActionIT.kt index 8674b0445..42d4f27d1 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/rollup/resthandler/RestExplainRollupActionIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/rollup/resthandler/RestExplainRollupActionIT.kt @@ -6,13 +6,13 @@ package org.opensearch.indexmanagement.rollup.resthandler import org.opensearch.client.ResponseException +import org.opensearch.core.rest.RestStatus import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.ROLLUP_JOBS_BASE_URI import org.opensearch.indexmanagement.makeRequest import org.opensearch.indexmanagement.rollup.model.RollupMetadata import org.opensearch.indexmanagement.rollup.randomRollup import org.opensearch.indexmanagement.waitFor import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule -import org.opensearch.core.rest.RestStatus import org.opensearch.test.junit.annotations.TestLogging import java.time.Instant import java.time.temporal.ChronoUnit @@ -20,17 +20,17 @@ import java.time.temporal.ChronoUnit @TestLogging(value = "level:DEBUG", reason = "Debugging tests") @Suppress("UNCHECKED_CAST") class RestExplainRollupActionIT : RollupRestAPITestCase() { - @Throws(Exception::class) fun `test explain rollup`() { - val rollup = randomRollup().copy( - id = "test_explain_rollup", - continuous = false, - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - enabled = true, - jobEnabledTime = Instant.now(), - metadataID = null - ).let { createRollup(it, it.id) } + val rollup = + randomRollup().copy( + id = "test_explain_rollup", + continuous = false, + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + enabled = true, + jobEnabledTime = Instant.now(), + metadataID = null, + ).let { createRollup(it, it.id) } createRollupSourceIndex(rollup) updateRollupStartTime(rollup) diff --git a/src/test/kotlin/org/opensearch/indexmanagement/rollup/resthandler/RestGetRollupActionIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/rollup/resthandler/RestGetRollupActionIT.kt index e02b7eafb..2d05b39f6 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/rollup/resthandler/RestGetRollupActionIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/rollup/resthandler/RestGetRollupActionIT.kt @@ -6,18 +6,17 @@ package org.opensearch.indexmanagement.rollup.resthandler import org.opensearch.client.ResponseException +import org.opensearch.core.rest.RestStatus import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.ROLLUP_JOBS_BASE_URI import org.opensearch.indexmanagement.makeRequest import org.opensearch.indexmanagement.rollup.action.get.GetRollupsRequest.Companion.DEFAULT_SIZE import org.opensearch.indexmanagement.rollup.randomRollup -import org.opensearch.core.rest.RestStatus import org.opensearch.test.junit.annotations.TestLogging import java.util.Locale @TestLogging(value = "level:DEBUG", reason = "Debugging tests") @Suppress("UNCHECKED_CAST") class RestGetRollupActionIT : RollupRestAPITestCase() { - private val testName = javaClass.simpleName.lowercase(Locale.ROOT) @Throws(Exception::class) @@ -26,15 +25,16 @@ class RestGetRollupActionIT : RollupRestAPITestCase() { val indexedRollup = getRollup(rollup.id) // Schema version and last updated time are updated during the creation so we need to update the original too for comparison // Job schedule interval will have a dynamic start time - rollup = rollup.copy( - schemaVersion = indexedRollup.schemaVersion, - jobLastUpdatedTime = indexedRollup.jobLastUpdatedTime, - jobSchedule = indexedRollup.jobSchedule, - // roles are deprecated and will not be stored or returned - roles = listOf(), - // user information is hidden and not returned - user = null - ) + rollup = + rollup.copy( + schemaVersion = indexedRollup.schemaVersion, + jobLastUpdatedTime = indexedRollup.jobLastUpdatedTime, + jobSchedule = indexedRollup.jobSchedule, + // roles are deprecated and will not be stored or returned + roles = listOf(), + // user information is hidden and not returned + user = null, + ) assertEquals("Indexed and retrieved rollup differ", rollup, indexedRollup) } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/rollup/resthandler/RestIndexRollupActionIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/rollup/resthandler/RestIndexRollupActionIT.kt index 4ae34eab6..c759d4f06 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/rollup/resthandler/RestIndexRollupActionIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/rollup/resthandler/RestIndexRollupActionIT.kt @@ -7,6 +7,7 @@ package org.opensearch.indexmanagement.rollup.resthandler import org.opensearch.client.ResponseException import org.opensearch.common.xcontent.XContentType +import org.opensearch.core.rest.RestStatus import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.INDEX_MANAGEMENT_INDEX import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.ROLLUP_JOBS_BASE_URI import org.opensearch.indexmanagement.common.model.dimension.DateHistogram @@ -27,7 +28,6 @@ import org.opensearch.indexmanagement.rollup.randomRollupMetrics import org.opensearch.indexmanagement.util.NO_ID import org.opensearch.indexmanagement.util._ID import org.opensearch.indexmanagement.util._SEQ_NO -import org.opensearch.core.rest.RestStatus import org.opensearch.test.OpenSearchTestCase import org.opensearch.test.junit.annotations.TestLogging import java.util.Locale @@ -35,7 +35,6 @@ import java.util.Locale @TestLogging(value = "level:DEBUG", reason = "Debugging tests") @Suppress("UNCHECKED_CAST") class RestIndexRollupActionIT : RollupRestAPITestCase() { - private val testName = javaClass.simpleName.lowercase(Locale.ROOT) @Throws(Exception::class) @@ -79,11 +78,12 @@ class RestIndexRollupActionIT : RollupRestAPITestCase() { val response = client().makeRequest("GET", "/$INDEX_MANAGEMENT_INDEX/_mapping") val parserMap = createParser(XContentType.JSON.xContent(), response.entity.content).map() as Map> val mappingsMap = parserMap[INDEX_MANAGEMENT_INDEX]!!["mappings"] as Map - val expected = createParser( - XContentType.JSON.xContent(), - javaClass.classLoader.getResource("mappings/opendistro-ism-config.json") - .readText() - ) + val expected = + createParser( + XContentType.JSON.xContent(), + javaClass.classLoader.getResource("mappings/opendistro-ism-config.json") + .readText(), + ) val expectedMap = expected.map() assertEquals("Mappings are different", expectedMap, mappingsMap) @@ -97,7 +97,7 @@ class RestIndexRollupActionIT : RollupRestAPITestCase() { client().makeRequest( "PUT", "$ROLLUP_JOBS_BASE_URI/${rollup.id}?refresh=true&if_seq_no=10251989&if_primary_term=2342", - emptyMap(), rollup.toHttpEntity() + emptyMap(), rollup.toHttpEntity(), ) fail("expected 409 ResponseException") } catch (e: ResponseException) { @@ -108,11 +108,12 @@ class RestIndexRollupActionIT : RollupRestAPITestCase() { @Throws(Exception::class) fun `test update rollup with correct seq_no and primary_term`() { val rollup = createRandomRollup() - val updateResponse = client().makeRequest( - "PUT", - "$ROLLUP_JOBS_BASE_URI/${rollup.id}?refresh=true&if_seq_no=${rollup.seqNo}&if_primary_term=${rollup.primaryTerm}", - emptyMap(), rollup.toHttpEntity() - ) + val updateResponse = + client().makeRequest( + "PUT", + "$ROLLUP_JOBS_BASE_URI/${rollup.id}?refresh=true&if_seq_no=${rollup.seqNo}&if_primary_term=${rollup.primaryTerm}", + emptyMap(), rollup.toHttpEntity(), + ) assertEquals("Update rollup failed", RestStatus.OK, updateResponse.restStatus()) val responseBody = updateResponse.asMap() @@ -130,22 +131,25 @@ class RestIndexRollupActionIT : RollupRestAPITestCase() { client().makeRequest( "PUT", "$ROLLUP_JOBS_BASE_URI/${rollup.id}?refresh=true&if_seq_no=${rollup.seqNo}&if_primary_term=${rollup.primaryTerm}", - emptyMap(), rollup.copy(sourceIndex = "something_different").toHttpEntity() + emptyMap(), rollup.copy(sourceIndex = "something_different").toHttpEntity(), ) fail("Expected 400 Method BAD_REQUEST response") } catch (e: ResponseException) { assertEquals("Unexpected status", RestStatus.BAD_REQUEST, e.response.restStatus()) val actualMessage = e.response.asMap() - val expectedErrorMessage = mapOf( - "error" to mapOf( - "root_cause" to listOf>( - mapOf("type" to "status_exception", "reason" to "Not allowed to modify [source_index]") - ), - "type" to "status_exception", - "reason" to "Not allowed to modify [source_index]" - ), - "status" to 400 - ) + val expectedErrorMessage = + mapOf( + "error" to + mapOf( + "root_cause" to + listOf>( + mapOf("type" to "status_exception", "reason" to "Not allowed to modify [source_index]"), + ), + "type" to "status_exception", + "reason" to "Not allowed to modify [source_index]", + ), + "status" to 400, + ) assertEquals(expectedErrorMessage, actualMessage) } } @@ -157,22 +161,25 @@ class RestIndexRollupActionIT : RollupRestAPITestCase() { client().makeRequest( "PUT", "$ROLLUP_JOBS_BASE_URI/${rollup.id}?refresh=true&if_seq_no=${rollup.seqNo}&if_primary_term=${rollup.primaryTerm}", - emptyMap(), rollup.copy(targetIndex = "something_different").toHttpEntity() + emptyMap(), rollup.copy(targetIndex = "something_different").toHttpEntity(), ) fail("Expected 400 Method BAD_REQUEST response") } catch (e: ResponseException) { assertEquals("Unexpected status", RestStatus.BAD_REQUEST, e.response.restStatus()) val actualMessage = e.response.asMap() - val expectedErrorMessage = mapOf( - "error" to mapOf( - "root_cause" to listOf>( - mapOf("type" to "status_exception", "reason" to "Not allowed to modify [target_index]") - ), - "type" to "status_exception", - "reason" to "Not allowed to modify [target_index]" - ), - "status" to 400 - ) + val expectedErrorMessage = + mapOf( + "error" to + mapOf( + "root_cause" to + listOf>( + mapOf("type" to "status_exception", "reason" to "Not allowed to modify [target_index]"), + ), + "type" to "status_exception", + "reason" to "Not allowed to modify [target_index]", + ), + "status" to 400, + ) assertEquals(expectedErrorMessage, actualMessage) } } @@ -184,22 +191,25 @@ class RestIndexRollupActionIT : RollupRestAPITestCase() { client().makeRequest( "PUT", "$ROLLUP_JOBS_BASE_URI/${rollup.id}?refresh=true&if_seq_no=${rollup.seqNo}&if_primary_term=${rollup.primaryTerm}", - emptyMap(), rollup.copy(continuous = !rollup.continuous).toHttpEntity() + emptyMap(), rollup.copy(continuous = !rollup.continuous).toHttpEntity(), ) fail("Expected 400 Method BAD_REQUEST response") } catch (e: ResponseException) { assertEquals("Unexpected status", RestStatus.BAD_REQUEST, e.response.restStatus()) val actualMessage = e.response.asMap() - val expectedErrorMessage = mapOf( - "error" to mapOf( - "root_cause" to listOf>( - mapOf("type" to "status_exception", "reason" to "Not allowed to modify [continuous]") - ), - "type" to "status_exception", - "reason" to "Not allowed to modify [continuous]" - ), - "status" to 400 - ) + val expectedErrorMessage = + mapOf( + "error" to + mapOf( + "root_cause" to + listOf>( + mapOf("type" to "status_exception", "reason" to "Not allowed to modify [continuous]"), + ), + "type" to "status_exception", + "reason" to "Not allowed to modify [continuous]", + ), + "status" to 400, + ) assertEquals(expectedErrorMessage, actualMessage) } } @@ -208,33 +218,37 @@ class RestIndexRollupActionIT : RollupRestAPITestCase() { fun `test updating rollup dimensions`() { try { val dimensions = randomRollupDimensions() - val newDimensions: List = dimensions.map { - when (it.type) { - Dimension.Type.DATE_HISTOGRAM -> (it as DateHistogram).copy(timezone = OpenSearchTestCase.randomZone()) - Dimension.Type.HISTOGRAM -> (it as Histogram).copy(interval = 5.5) - Dimension.Type.TERMS -> (it as Terms).copy(targetField = "some_other_target_field") + val newDimensions: List = + dimensions.map { + when (it.type) { + Dimension.Type.DATE_HISTOGRAM -> (it as DateHistogram).copy(timezone = OpenSearchTestCase.randomZone()) + Dimension.Type.HISTOGRAM -> (it as Histogram).copy(interval = 5.5) + Dimension.Type.TERMS -> (it as Terms).copy(targetField = "some_other_target_field") + } } - } val rollup = createRollup(rollup = randomRollup().copy(dimensions = dimensions), rollupId = "$testName-1") client().makeRequest( "PUT", "$ROLLUP_JOBS_BASE_URI/${rollup.id}?refresh=true&if_seq_no=${rollup.seqNo}&if_primary_term=${rollup.primaryTerm}", - emptyMap(), rollup.copy(dimensions = newDimensions).toHttpEntity() + emptyMap(), rollup.copy(dimensions = newDimensions).toHttpEntity(), ) fail("Expected 400 Method BAD_REQUEST response") } catch (e: ResponseException) { assertEquals("Unexpected status", RestStatus.BAD_REQUEST, e.response.restStatus()) val actualMessage = e.response.asMap() - val expectedErrorMessage = mapOf( - "error" to mapOf( - "root_cause" to listOf>( - mapOf("type" to "status_exception", "reason" to "Not allowed to modify [dimensions]") - ), - "type" to "status_exception", - "reason" to "Not allowed to modify [dimensions]" - ), - "status" to 400 - ) + val expectedErrorMessage = + mapOf( + "error" to + mapOf( + "root_cause" to + listOf>( + mapOf("type" to "status_exception", "reason" to "Not allowed to modify [dimensions]"), + ), + "type" to "status_exception", + "reason" to "Not allowed to modify [dimensions]", + ), + "status" to 400, + ) assertEquals(expectedErrorMessage, actualMessage) } } @@ -243,39 +257,44 @@ class RestIndexRollupActionIT : RollupRestAPITestCase() { fun `test updating rollup metrics`() { try { val metrics = listOf(randomRollupMetrics(), randomRollupMetrics()) - val newMetrics: List = metrics.map { - it.copy( - metrics = it.metrics.map { - when (it.type) { - Metric.Type.AVERAGE -> Max() - Metric.Type.MAX -> Min() - Metric.Type.MIN -> Sum() - Metric.Type.SUM -> ValueCount() - Metric.Type.VALUE_COUNT -> Average() - } - } - ) - } + val newMetrics: List = + metrics.map { + it.copy( + metrics = + it.metrics.map { + when (it.type) { + Metric.Type.AVERAGE -> Max() + Metric.Type.MAX -> Min() + Metric.Type.MIN -> Sum() + Metric.Type.SUM -> ValueCount() + Metric.Type.VALUE_COUNT -> Average() + } + }, + ) + } val rollup = createRollup(rollup = randomRollup().copy(metrics = metrics), rollupId = "$testName-2") client().makeRequest( "PUT", "$ROLLUP_JOBS_BASE_URI/${rollup.id}?refresh=true&if_seq_no=${rollup.seqNo}&if_primary_term=${rollup.primaryTerm}", - emptyMap(), rollup.copy(metrics = newMetrics).toHttpEntity() + emptyMap(), rollup.copy(metrics = newMetrics).toHttpEntity(), ) fail("Expected 400 Method BAD_REQUEST response") } catch (e: ResponseException) { assertEquals("Unexpected status", RestStatus.BAD_REQUEST, e.response.restStatus()) val actualMessage = e.response.asMap() - val expectedErrorMessage = mapOf( - "error" to mapOf( - "root_cause" to listOf>( - mapOf("type" to "status_exception", "reason" to "Not allowed to modify [metrics]") - ), - "type" to "status_exception", - "reason" to "Not allowed to modify [metrics]" - ), - "status" to 400 - ) + val expectedErrorMessage = + mapOf( + "error" to + mapOf( + "root_cause" to + listOf>( + mapOf("type" to "status_exception", "reason" to "Not allowed to modify [metrics]"), + ), + "type" to "status_exception", + "reason" to "Not allowed to modify [metrics]", + ), + "status" to 400, + ) assertEquals(expectedErrorMessage, actualMessage) } } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/rollup/resthandler/RestStartRollupActionIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/rollup/resthandler/RestStartRollupActionIT.kt index d2442995d..45a82ef39 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/rollup/resthandler/RestStartRollupActionIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/rollup/resthandler/RestStartRollupActionIT.kt @@ -7,6 +7,7 @@ package org.opensearch.indexmanagement.rollup.resthandler import org.opensearch.client.ResponseException import org.opensearch.common.settings.Settings +import org.opensearch.core.rest.RestStatus import org.opensearch.indexmanagement.IndexManagementIndices import org.opensearch.indexmanagement.IndexManagementPlugin import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.ROLLUP_JOBS_BASE_URI @@ -19,13 +20,11 @@ import org.opensearch.indexmanagement.rollup.model.RollupMetadata import org.opensearch.indexmanagement.rollup.randomRollup import org.opensearch.indexmanagement.waitFor import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule -import org.opensearch.core.rest.RestStatus import java.time.Instant import java.time.temporal.ChronoUnit import java.util.Locale class RestStartRollupActionIT : RollupRestAPITestCase() { - private val testName = javaClass.simpleName.lowercase(Locale.ROOT) @Throws(Exception::class) @@ -81,24 +80,25 @@ class RestStartRollupActionIT : RollupRestAPITestCase() { @Throws(Exception::class) fun `test starting a failed rollup`() { - val rollup = Rollup( - id = "restart_failed_rollup", - schemaVersion = 1L, - enabled = true, - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - jobLastUpdatedTime = Instant.now(), - jobEnabledTime = Instant.now(), - description = "basic search test", - sourceIndex = "source_restart_failed_rollup", - targetIndex = "target_restart_failed_rollup", - metadataID = null, - roles = emptyList(), - pageSize = 10, - delay = 0, - continuous = false, - dimensions = listOf(DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1h")), - metrics = emptyList() - ).let { createRollup(it, it.id) } + val rollup = + Rollup( + id = "restart_failed_rollup", + schemaVersion = 1L, + enabled = true, + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + jobLastUpdatedTime = Instant.now(), + jobEnabledTime = Instant.now(), + description = "basic search test", + sourceIndex = "source_restart_failed_rollup", + targetIndex = "target_restart_failed_rollup", + metadataID = null, + roles = emptyList(), + pageSize = 10, + delay = 0, + continuous = false, + dimensions = listOf(DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1h")), + metrics = emptyList(), + ).let { createRollup(it, it.id) } // This should fail because we did not create a source index updateRollupStartTime(rollup) @@ -143,24 +143,25 @@ class RestStartRollupActionIT : RollupRestAPITestCase() { @Throws(Exception::class) fun `test starting a finished rollup`() { generateNYCTaxiData("source_restart_finished_rollup") - val rollup = Rollup( - id = "restart_finished_rollup", - schemaVersion = 1L, - enabled = true, - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - jobLastUpdatedTime = Instant.now(), - jobEnabledTime = Instant.now(), - description = "basic search test", - sourceIndex = "source_restart_finished_rollup", - targetIndex = "target_restart_finished_rollup", - metadataID = null, - roles = emptyList(), - pageSize = 10, - delay = 0, - continuous = false, - dimensions = listOf(DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1h")), - metrics = emptyList() - ).let { createRollup(it, it.id) } + val rollup = + Rollup( + id = "restart_finished_rollup", + schemaVersion = 1L, + enabled = true, + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + jobLastUpdatedTime = Instant.now(), + jobEnabledTime = Instant.now(), + description = "basic search test", + sourceIndex = "source_restart_finished_rollup", + targetIndex = "target_restart_finished_rollup", + metadataID = null, + roles = emptyList(), + pageSize = 10, + delay = 0, + continuous = false, + dimensions = listOf(DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1h")), + metrics = emptyList(), + ).let { createRollup(it, it.id) } updateRollupStartTime(rollup) var firstRollupsIndexed = 0L @@ -201,31 +202,33 @@ class RestStartRollupActionIT : RollupRestAPITestCase() { // setup ism-config index with multiple primary shards deleteIndex(IndexManagementPlugin.INDEX_MANAGEMENT_INDEX) val mapping = IndexManagementIndices.indexManagementMappings.trim().trimStart('{').trimEnd('}') - val settings = Settings.builder() - .put(INDEX_HIDDEN, true) - .put(INDEX_NUMBER_OF_SHARDS, 5) - .build() + val settings = + Settings.builder() + .put(INDEX_HIDDEN, true) + .put(INDEX_NUMBER_OF_SHARDS, 5) + .build() createIndex(IndexManagementPlugin.INDEX_MANAGEMENT_INDEX, settings, mapping) assertIndexExists(IndexManagementPlugin.INDEX_MANAGEMENT_INDEX) - val rollup = Rollup( - id = "multi_shard_start", - schemaVersion = 1L, - enabled = true, - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - jobLastUpdatedTime = Instant.now(), - jobEnabledTime = Instant.now(), - description = "basic search test", - sourceIndex = "source_multi_shard_start", - targetIndex = "target_multi_shard_start", - metadataID = null, - roles = emptyList(), - pageSize = 10, - delay = 0, - continuous = false, - dimensions = listOf(DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1h")), - metrics = emptyList() - ).let { createRollup(it, it.id) } + val rollup = + Rollup( + id = "multi_shard_start", + schemaVersion = 1L, + enabled = true, + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + jobLastUpdatedTime = Instant.now(), + jobEnabledTime = Instant.now(), + description = "basic search test", + sourceIndex = "source_multi_shard_start", + targetIndex = "target_multi_shard_start", + metadataID = null, + roles = emptyList(), + pageSize = 10, + delay = 0, + continuous = false, + dimensions = listOf(DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1h")), + metrics = emptyList(), + ).let { createRollup(it, it.id) } // The updateRollupStartTime call can be missed if the job scheduler hasn't started listening to the new index yet, // sleep a bit to let it initialize diff --git a/src/test/kotlin/org/opensearch/indexmanagement/rollup/resthandler/RestStopRollupActionIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/rollup/resthandler/RestStopRollupActionIT.kt index 4c6fa79a2..d4eada87d 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/rollup/resthandler/RestStopRollupActionIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/rollup/resthandler/RestStopRollupActionIT.kt @@ -7,6 +7,7 @@ package org.opensearch.indexmanagement.rollup.resthandler import org.opensearch.client.ResponseException import org.opensearch.common.settings.Settings +import org.opensearch.core.rest.RestStatus import org.opensearch.indexmanagement.IndexManagementIndices import org.opensearch.indexmanagement.IndexManagementPlugin import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.ROLLUP_JOBS_BASE_URI @@ -21,13 +22,11 @@ import org.opensearch.indexmanagement.rollup.model.RollupMetadata import org.opensearch.indexmanagement.rollup.randomRollup import org.opensearch.indexmanagement.waitFor import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule -import org.opensearch.core.rest.RestStatus import java.time.Instant import java.time.temporal.ChronoUnit import java.util.Locale class RestStopRollupActionIT : RollupRestAPITestCase() { - private val testName = javaClass.simpleName.lowercase(Locale.ROOT) @Throws(Exception::class) @@ -69,17 +68,18 @@ class RestStopRollupActionIT : RollupRestAPITestCase() { @Throws(Exception::class) fun `test stopping a finished rollup`() { // Create a rollup that finishes - val rollup = createRollup( - randomRollup() - .copy( - continuous = false, - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - enabled = true, - jobEnabledTime = Instant.now(), - metadataID = null - ), - rollupId = "$testName-3" - ) + val rollup = + createRollup( + randomRollup() + .copy( + continuous = false, + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + enabled = true, + jobEnabledTime = Instant.now(), + metadataID = null, + ), + rollupId = "$testName-3", + ) createRollupSourceIndex(rollup) updateRollupStartTime(rollup) @@ -109,14 +109,15 @@ class RestStopRollupActionIT : RollupRestAPITestCase() { @Throws(Exception::class) fun `test stopping a failed rollup`() { // Create a rollup that will fail because no source index - val rollup = randomRollup().copy( - id = "test_stopping_a_failed_rollup", - continuous = false, - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - enabled = true, - jobEnabledTime = Instant.now(), - metadataID = null - ).let { createRollup(it, it.id) } + val rollup = + randomRollup().copy( + id = "test_stopping_a_failed_rollup", + continuous = false, + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + enabled = true, + jobEnabledTime = Instant.now(), + metadataID = null, + ).let { createRollup(it, it.id) } updateRollupStartTime(rollup) // Assert its in failed @@ -143,17 +144,18 @@ class RestStopRollupActionIT : RollupRestAPITestCase() { @Throws(Exception::class) fun `test stopping a retry rollup`() { // Create a rollup job - val rollup = createRollup( - randomRollup() - .copy( - continuous = false, - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - enabled = true, - jobEnabledTime = Instant.now(), - metadataID = null - ), - rollupId = "$testName-4" - ) + val rollup = + createRollup( + randomRollup() + .copy( + continuous = false, + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + enabled = true, + jobEnabledTime = Instant.now(), + metadataID = null, + ), + rollupId = "$testName-4", + ) // Force rollup to execute which should fail as we did not create a source index updateRollupStartTime(rollup) @@ -195,28 +197,30 @@ class RestStopRollupActionIT : RollupRestAPITestCase() { @Throws(Exception::class) fun `test stopping rollup with metadata`() { generateNYCTaxiData("source") - val rollup = Rollup( - id = "basic_term_query", - schemaVersion = 1L, - enabled = true, - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - jobLastUpdatedTime = Instant.now(), - jobEnabledTime = Instant.now(), - description = "basic search test", - sourceIndex = "source", - targetIndex = "target", - metadataID = null, - roles = emptyList(), - pageSize = 10, - delay = 0, - continuous = true, - dimensions = listOf( - DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1h"), - Terms("RatecodeID", "RatecodeID"), - Terms("PULocationID", "PULocationID") - ), - metrics = emptyList() - ).let { createRollup(it, it.id) } + val rollup = + Rollup( + id = "basic_term_query", + schemaVersion = 1L, + enabled = true, + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + jobLastUpdatedTime = Instant.now(), + jobEnabledTime = Instant.now(), + description = "basic search test", + sourceIndex = "source", + targetIndex = "target", + metadataID = null, + roles = emptyList(), + pageSize = 10, + delay = 0, + continuous = true, + dimensions = + listOf( + DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1h"), + Terms("RatecodeID", "RatecodeID"), + Terms("PULocationID", "PULocationID"), + ), + metrics = emptyList(), + ).let { createRollup(it, it.id) } updateRollupStartTime(rollup) @@ -254,36 +258,39 @@ class RestStopRollupActionIT : RollupRestAPITestCase() { // setup ism-config index with multiple primary shards deleteIndex(IndexManagementPlugin.INDEX_MANAGEMENT_INDEX) val mapping = IndexManagementIndices.indexManagementMappings.trim().trimStart('{').trimEnd('}') - val settings = Settings.builder() - .put(INDEX_HIDDEN, true) - .put(INDEX_NUMBER_OF_SHARDS, 5) - .build() + val settings = + Settings.builder() + .put(INDEX_HIDDEN, true) + .put(INDEX_NUMBER_OF_SHARDS, 5) + .build() createIndex(IndexManagementPlugin.INDEX_MANAGEMENT_INDEX, settings, mapping) assertIndexExists(IndexManagementPlugin.INDEX_MANAGEMENT_INDEX) generateNYCTaxiData("source_multi_shard_stop") - val rollup = Rollup( - id = "multi_shard_stop", - schemaVersion = 1L, - enabled = true, - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - jobLastUpdatedTime = Instant.now(), - jobEnabledTime = Instant.now(), - description = "basic search test", - sourceIndex = "source_multi_shard_stop", - targetIndex = "target_multi_shard_stop", - metadataID = null, - roles = emptyList(), - pageSize = 1, - delay = 0, - continuous = true, - dimensions = listOf( - DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1h"), - Terms("RatecodeID", "RatecodeID"), - Terms("PULocationID", "PULocationID") - ), - metrics = emptyList() - ).let { createRollup(it, it.id) } + val rollup = + Rollup( + id = "multi_shard_stop", + schemaVersion = 1L, + enabled = true, + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + jobLastUpdatedTime = Instant.now(), + jobEnabledTime = Instant.now(), + description = "basic search test", + sourceIndex = "source_multi_shard_stop", + targetIndex = "target_multi_shard_stop", + metadataID = null, + roles = emptyList(), + pageSize = 1, + delay = 0, + continuous = true, + dimensions = + listOf( + DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1h"), + Terms("RatecodeID", "RatecodeID"), + Terms("PULocationID", "PULocationID"), + ), + metrics = emptyList(), + ).let { createRollup(it, it.id) } // The updateRollupStartTime call can be missed if the job scheduler hasn't started listening to the new index yet, // sleep a bit to let it initialize diff --git a/src/test/kotlin/org/opensearch/indexmanagement/rollup/runner/RollupRunnerIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/rollup/runner/RollupRunnerIT.kt index 73e1ca97b..09dedecb8 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/rollup/runner/RollupRunnerIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/rollup/runner/RollupRunnerIT.kt @@ -8,6 +8,7 @@ package org.opensearch.indexmanagement.rollup.runner import org.apache.hc.core5.http.ContentType import org.apache.hc.core5.http.io.entity.StringEntity import org.opensearch.common.settings.Settings +import org.opensearch.core.rest.RestStatus import org.opensearch.indexmanagement.IndexManagementPlugin import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.ROLLUP_JOBS_BASE_URI import org.opensearch.indexmanagement.common.model.dimension.DateHistogram @@ -30,28 +31,27 @@ import org.opensearch.indexmanagement.rollup.settings.RollupSettings.Companion.R import org.opensearch.indexmanagement.waitFor import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule import org.opensearch.rest.RestRequest -import org.opensearch.core.rest.RestStatus import java.time.Instant import java.time.temporal.ChronoUnit import java.util.Collections.emptyMap import java.util.Locale class RollupRunnerIT : RollupRestTestCase() { - private val testName = javaClass.simpleName.lowercase(Locale.ROOT) fun `test metadata is created for rollup job when none exists`() { val indexName = "test_index_runner_first" // Define rollup - var rollup = randomRollup().copy( - enabled = true, - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - jobEnabledTime = Instant.now(), - sourceIndex = indexName, - metadataID = null, - continuous = false - ) + var rollup = + randomRollup().copy( + enabled = true, + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + jobEnabledTime = Instant.now(), + sourceIndex = indexName, + metadataID = null, + continuous = false, + ) // Create source index createRollupSourceIndex(rollup) @@ -85,26 +85,28 @@ class RollupRunnerIT : RollupRestTestCase() { generateNYCTaxiData(sourceIdxTestName) - val rollup = Rollup( - id = "rollup_test", - schemaVersion = 1L, - enabled = true, - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - jobLastUpdatedTime = Instant.now(), - jobEnabledTime = Instant.now(), - description = "basic stats test", - sourceIndex = sourceIdxTestName, - targetIndex = targetIdxTestName, - metadataID = null, - roles = emptyList(), - pageSize = 100, - delay = 0, - continuous = false, - dimensions = listOf(DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1h")), - metrics = listOf( - RollupMetrics(sourceField = propertyName, targetField = propertyName, metrics = listOf(Average())) - ) - ).let { createRollup(it, it.id) } + val rollup = + Rollup( + id = "rollup_test", + schemaVersion = 1L, + enabled = true, + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + jobLastUpdatedTime = Instant.now(), + jobEnabledTime = Instant.now(), + description = "basic stats test", + sourceIndex = sourceIdxTestName, + targetIndex = targetIdxTestName, + metadataID = null, + roles = emptyList(), + pageSize = 100, + delay = 0, + continuous = false, + dimensions = listOf(DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1h")), + metrics = + listOf( + RollupMetrics(sourceField = propertyName, targetField = propertyName, metrics = listOf(Average())), + ), + ).let { createRollup(it, it.id) } updateRollupStartTime(rollup) @@ -117,21 +119,22 @@ class RollupRunnerIT : RollupRestTestCase() { assertEquals("Rollup is not finished", RollupMetadata.Status.FINISHED, rollupMetadata.status) // Term query - val req = """ - { - "size": 0, - "query": { - "match_all": {} - }, - "aggs": { - "$avgMetricName": { - "avg": { - "field": "$propertyName" + val req = + """ + { + "size": 0, + "query": { + "match_all": {} + }, + "aggs": { + "$avgMetricName": { + "avg": { + "field": "$propertyName" + } } } } - } - """.trimIndent() + """.trimIndent() var rawRes = client().makeRequest(RestRequest.Method.POST.name, "/$sourceIdxTestName/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) assertTrue(rawRes.restStatus() == RestStatus.OK) var rollupRes = client().makeRequest(RestRequest.Method.POST.name, "/$targetIdxTestName/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) @@ -141,7 +144,7 @@ class RollupRunnerIT : RollupRestTestCase() { assertEquals( "Source and rollup index did not return same avg results", rawAggRes.getValue(avgMetricName)["value"], - rollupAggRes.getValue(avgMetricName)["value"] + rollupAggRes.getValue(avgMetricName)["value"], ) } } @@ -150,16 +153,17 @@ class RollupRunnerIT : RollupRestTestCase() { val dataStreamName = "test-data-stream" // Define the rollup job - var rollup = randomRollup().copy( - id = "$testName-1", - enabled = true, - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - jobEnabledTime = Instant.now(), - sourceIndex = dataStreamName, - targetIndex = "$dataStreamName-rollup", - metadataID = null, - continuous = false - ) + var rollup = + randomRollup().copy( + id = "$testName-1", + enabled = true, + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + jobEnabledTime = Instant.now(), + sourceIndex = dataStreamName, + targetIndex = "$dataStreamName-rollup", + metadataID = null, + continuous = false, + ) // Create the source data stream client().makeRequest( @@ -170,8 +174,8 @@ class RollupRunnerIT : RollupRestTestCase() { "\"index_patterns\": [ \"$dataStreamName\" ], " + "\"data_stream\": { }, " + "\"template\": { \"mappings\": { ${createRollupMappingString(rollup)} } } }", - ContentType.APPLICATION_JSON - ) + ContentType.APPLICATION_JSON, + ), ) client().makeRequest("PUT", "/_data_stream/$dataStreamName") @@ -201,16 +205,17 @@ class RollupRunnerIT : RollupRestTestCase() { val indexName = "test_index_runner_second" // Define rollup - var rollup = randomRollup().copy( - id = "metadata_set_failed_id_doc_not_exist", - enabled = true, - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - jobEnabledTime = Instant.now(), - sourceIndex = indexName, - targetIndex = "${indexName}_target", - metadataID = null, - continuous = false - ) + var rollup = + randomRollup().copy( + id = "metadata_set_failed_id_doc_not_exist", + enabled = true, + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + jobEnabledTime = Instant.now(), + sourceIndex = indexName, + targetIndex = "${indexName}_target", + metadataID = null, + continuous = false, + ) // Create source index createRollupSourceIndex(rollup) @@ -227,17 +232,18 @@ class RollupRunnerIT : RollupRestTestCase() { updateRollupStartTime(rollup) var previousRollupMetadata: RollupMetadata? = null - rollup = waitFor { - val rollupJob = getRollup(rollupId = rollup.id) - assertNotNull("Rollup job not found", rollupJob) - assertNotNull("Rollup job doesn't have metadata set", rollupJob.metadataID) - assertFalse("Rollup job is still enabled", rollupJob.enabled) - - previousRollupMetadata = getRollupMetadata(rollupJob.metadataID!!) - assertNotNull("Rollup metadata not found", previousRollupMetadata) - assertEquals("Unexpected metadata status", RollupMetadata.Status.FINISHED, previousRollupMetadata!!.status) - rollupJob - } + rollup = + waitFor { + val rollupJob = getRollup(rollupId = rollup.id) + assertNotNull("Rollup job not found", rollupJob) + assertNotNull("Rollup job doesn't have metadata set", rollupJob.metadataID) + assertFalse("Rollup job is still enabled", rollupJob.enabled) + + previousRollupMetadata = getRollupMetadata(rollupJob.metadataID!!) + assertNotNull("Rollup metadata not found", previousRollupMetadata) + assertEquals("Unexpected metadata status", RollupMetadata.Status.FINISHED, previousRollupMetadata!!.status) + rollupJob + } // Delete rollup metadata assertNotNull("Previous rollup metadata was not saved", previousRollupMetadata) deleteRollupMetadata(previousRollupMetadata!!.id) @@ -246,7 +252,7 @@ class RollupRunnerIT : RollupRestTestCase() { client().makeRequest( "PUT", "$ROLLUP_JOBS_BASE_URI/${rollup.id}?if_seq_no=${rollup.seqNo}&if_primary_term=${rollup.primaryTerm}", - emptyMap(), rollup.copy(enabled = true, jobEnabledTime = Instant.now()).toHttpEntity() + emptyMap(), rollup.copy(enabled = true, jobEnabledTime = Instant.now()).toHttpEntity(), ) updateRollupStartTime(rollup) @@ -271,20 +277,22 @@ class RollupRunnerIT : RollupRestTestCase() { // Setting the interval to something large to minimize this scenario. fun `test no-op execution when a full window of time to rollup is not available`() { val indexName = "test_index_runner_third" - var rollup = randomRollup().copy( - id = "$testName-2", - enabled = true, - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - jobEnabledTime = Instant.now(), - sourceIndex = indexName, - metadataID = null, - continuous = true, - dimensions = listOf( - randomCalendarDateHistogram().copy( - calendarInterval = "1y" - ) + var rollup = + randomRollup().copy( + id = "$testName-2", + enabled = true, + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + jobEnabledTime = Instant.now(), + sourceIndex = indexName, + metadataID = null, + continuous = true, + dimensions = + listOf( + randomCalendarDateHistogram().copy( + calendarInterval = "1y", + ), + ), ) - ) // Create source index createRollupSourceIndex(rollup) @@ -328,15 +336,16 @@ class RollupRunnerIT : RollupRestTestCase() { val indexName = "test_index_runner_fourth" // Define rollup - var rollup = randomRollup().copy( - id = "$testName-3", - enabled = true, - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - jobEnabledTime = Instant.now(), - sourceIndex = indexName, - metadataID = null, - continuous = true - ) + var rollup = + randomRollup().copy( + id = "$testName-3", + enabled = true, + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + jobEnabledTime = Instant.now(), + sourceIndex = indexName, + metadataID = null, + continuous = true, + ) // Create rollup job rollup = createRollup(rollup = rollup, rollupId = rollup.id) @@ -367,100 +376,109 @@ class RollupRunnerIT : RollupRestTestCase() { generateNYCTaxiData("source_runner_fifth") - val rollup = Rollup( - id = "basic_stats_check_runner_fifth", - schemaVersion = 1L, - enabled = true, - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - jobLastUpdatedTime = Instant.now(), - jobEnabledTime = Instant.now(), - description = "basic stats test", - sourceIndex = "source_runner_fifth", - targetIndex = "target_runner_fifth", - metadataID = null, - roles = emptyList(), - pageSize = 100, - delay = 0, - continuous = false, - dimensions = listOf(DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1h")), - metrics = listOf( - RollupMetrics(sourceField = "passenger_count", targetField = "passenger_count", metrics = listOf(Sum(), Min(), Max(), ValueCount(), Average())) - ) - ).let { createRollup(it, it.id) } - - val secondRollup = Rollup( - id = "all_inclusive_intervals_runner_fifth", - schemaVersion = 1L, - enabled = true, - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - jobLastUpdatedTime = Instant.now(), - jobEnabledTime = Instant.now(), - description = "basic stats test", - sourceIndex = "source_runner_fifth", - targetIndex = "target_runner_fifth", - metadataID = null, - roles = emptyList(), - pageSize = 100, - delay = 0, - continuous = false, - dimensions = listOf(DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "100d")), - metrics = listOf( - RollupMetrics(sourceField = "passenger_count", targetField = "passenger_count", metrics = listOf(Sum(), Min(), Max(), ValueCount(), Average())) - ) - ).let { createRollup(it, it.id) } - - val thirdRollup = Rollup( - id = "second_interval_runner_fifth", - schemaVersion = 1L, - enabled = true, - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - jobLastUpdatedTime = Instant.now(), - jobEnabledTime = Instant.now(), - description = "basic 1s test", - sourceIndex = "source_runner_fifth", - targetIndex = "target_runner_fifth", - metadataID = null, - roles = emptyList(), - pageSize = 100, - delay = 0, - continuous = false, - dimensions = listOf(DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1s")), - metrics = listOf( - RollupMetrics(sourceField = "passenger_count", targetField = "passenger_count", metrics = listOf(Sum(), Min(), Max(), ValueCount(), Average())) - ) - ).let { createRollup(it, it.id) } + val rollup = + Rollup( + id = "basic_stats_check_runner_fifth", + schemaVersion = 1L, + enabled = true, + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + jobLastUpdatedTime = Instant.now(), + jobEnabledTime = Instant.now(), + description = "basic stats test", + sourceIndex = "source_runner_fifth", + targetIndex = "target_runner_fifth", + metadataID = null, + roles = emptyList(), + pageSize = 100, + delay = 0, + continuous = false, + dimensions = listOf(DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1h")), + metrics = + listOf( + RollupMetrics(sourceField = "passenger_count", targetField = "passenger_count", metrics = listOf(Sum(), Min(), Max(), ValueCount(), Average())), + ), + ).let { createRollup(it, it.id) } + + val secondRollup = + Rollup( + id = "all_inclusive_intervals_runner_fifth", + schemaVersion = 1L, + enabled = true, + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + jobLastUpdatedTime = Instant.now(), + jobEnabledTime = Instant.now(), + description = "basic stats test", + sourceIndex = "source_runner_fifth", + targetIndex = "target_runner_fifth", + metadataID = null, + roles = emptyList(), + pageSize = 100, + delay = 0, + continuous = false, + dimensions = listOf(DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "100d")), + metrics = + listOf( + RollupMetrics(sourceField = "passenger_count", targetField = "passenger_count", metrics = listOf(Sum(), Min(), Max(), ValueCount(), Average())), + ), + ).let { createRollup(it, it.id) } + + val thirdRollup = + Rollup( + id = "second_interval_runner_fifth", + schemaVersion = 1L, + enabled = true, + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + jobLastUpdatedTime = Instant.now(), + jobEnabledTime = Instant.now(), + description = "basic 1s test", + sourceIndex = "source_runner_fifth", + targetIndex = "target_runner_fifth", + metadataID = null, + roles = emptyList(), + pageSize = 100, + delay = 0, + continuous = false, + dimensions = listOf(DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1s")), + metrics = + listOf( + RollupMetrics(sourceField = "passenger_count", targetField = "passenger_count", metrics = listOf(Sum(), Min(), Max(), ValueCount(), Average())), + ), + ).let { createRollup(it, it.id) } updateRollupStartTime(rollup) waitFor { assertTrue("Target rollup index was not created", indexExists(rollup.targetIndex)) } - val finishedRollup = waitFor() { - val rollupJob = getRollup(rollupId = rollup.id) - assertNotNull("Rollup job doesn't have metadata set", rollupJob.metadataID) - val rollupMetadata = getRollupMetadata(rollupJob.metadataID!!) - assertEquals("Rollup is not finished", RollupMetadata.Status.FINISHED, rollupMetadata.status) - rollupJob - } + val finishedRollup = + waitFor { + val rollupJob = getRollup(rollupId = rollup.id) + assertNotNull("Rollup job doesn't have metadata set", rollupJob.metadataID) + val rollupMetadata = getRollupMetadata(rollupJob.metadataID!!) + assertEquals("Rollup is not finished", RollupMetadata.Status.FINISHED, rollupMetadata.status) + rollupJob + } updateRollupStartTime(secondRollup) - val secondFinishedRollup = waitFor() { - val rollupJob = getRollup(rollupId = secondRollup.id) - assertNotNull("Rollup job doesn't have metadata set", rollupJob.metadataID) - val rollupMetadata = getRollupMetadata(rollupJob.metadataID!!) - assertEquals("Rollup is not finished", RollupMetadata.Status.FINISHED, rollupMetadata.status) - rollupJob - } + val secondFinishedRollup = + waitFor { + val rollupJob = getRollup(rollupId = secondRollup.id) + assertNotNull("Rollup job doesn't have metadata set", rollupJob.metadataID) + val rollupMetadata = getRollupMetadata(rollupJob.metadataID!!) + assertEquals("Rollup is not finished", RollupMetadata.Status.FINISHED, rollupMetadata.status) + rollupJob + } updateRollupStartTime(thirdRollup) - val thirdFinishedRollup = waitFor() { - val rollupJob = getRollup(rollupId = thirdRollup.id) - assertNotNull("Rollup job doesn't have metadata set", rollupJob.metadataID) - val rollupMetadata = getRollupMetadata(rollupJob.metadataID!!) - assertEquals("Rollup is not finished $rollupMetadata", RollupMetadata.Status.FINISHED, rollupMetadata.status) - rollupJob - } + val thirdFinishedRollup = + waitFor { + val rollupJob = getRollup(rollupId = thirdRollup.id) + assertNotNull("Rollup job doesn't have metadata set", rollupJob.metadataID) + val rollupMetadata = getRollupMetadata(rollupJob.metadataID!!) + assertEquals("Rollup is not finished $rollupMetadata", RollupMetadata.Status.FINISHED, rollupMetadata.status) + rollupJob + } refreshAllIndices() @@ -510,52 +528,56 @@ class RollupRunnerIT : RollupRestTestCase() { // to rollup a single document per execution which gives us enough time to change the pageSize to something large generateNYCTaxiData("source_runner_sixth") - val rollup = Rollup( - id = "page_size_runner_sixth", - schemaVersion = 1L, - enabled = true, - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - jobLastUpdatedTime = Instant.now(), - jobEnabledTime = Instant.now(), - description = "basic change of page size", - sourceIndex = "source_runner_sixth", - targetIndex = "target_runner_sixth", - metadataID = null, - roles = emptyList(), - pageSize = 1, - delay = 0, - continuous = false, - dimensions = listOf(DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1s")), - metrics = listOf( - RollupMetrics(sourceField = "passenger_count", targetField = "passenger_count", metrics = listOf(Sum(), Min(), Max(), ValueCount(), Average())) - ) - ).let { createRollup(it, it.id) } + val rollup = + Rollup( + id = "page_size_runner_sixth", + schemaVersion = 1L, + enabled = true, + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + jobLastUpdatedTime = Instant.now(), + jobEnabledTime = Instant.now(), + description = "basic change of page size", + sourceIndex = "source_runner_sixth", + targetIndex = "target_runner_sixth", + metadataID = null, + roles = emptyList(), + pageSize = 1, + delay = 0, + continuous = false, + dimensions = listOf(DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1s")), + metrics = + listOf( + RollupMetrics(sourceField = "passenger_count", targetField = "passenger_count", metrics = listOf(Sum(), Min(), Max(), ValueCount(), Average())), + ), + ).let { createRollup(it, it.id) } updateRollupStartTime(rollup) waitFor { assertTrue("Target rollup index was not created", indexExists(rollup.targetIndex)) } - val startedRollup = waitFor { - val rollupJob = getRollup(rollupId = rollup.id) - assertNotNull("Rollup job doesn't have metadata set", rollupJob.metadataID) - val rollupMetadata = getRollupMetadata(rollupJob.metadataID!!) - assertEquals("Rollup is not started", RollupMetadata.Status.STARTED, rollupMetadata.status) - rollupJob - } + val startedRollup = + waitFor { + val rollupJob = getRollup(rollupId = rollup.id) + assertNotNull("Rollup job doesn't have metadata set", rollupJob.metadataID) + val rollupMetadata = getRollupMetadata(rollupJob.metadataID!!) + assertEquals("Rollup is not started", RollupMetadata.Status.STARTED, rollupMetadata.status) + rollupJob + } client().makeRequest( "PUT", "$ROLLUP_JOBS_BASE_URI/${startedRollup.id}?if_seq_no=${startedRollup.seqNo}&if_primary_term=${startedRollup.primaryTerm}", - emptyMap(), rollup.copy(pageSize = 1000).toHttpEntity() + emptyMap(), rollup.copy(pageSize = 1000).toHttpEntity(), ) - val finishedRollup = waitFor { - val rollupJob = getRollup(rollupId = rollup.id) - assertNotNull("Rollup job doesn't have metadata set", rollupJob.metadataID) - val rollupMetadata = getRollupMetadata(rollupJob.metadataID!!) - assertEquals("Rollup is not started", RollupMetadata.Status.FINISHED, rollupMetadata.status) - rollupJob - } + val finishedRollup = + waitFor { + val rollupJob = getRollup(rollupId = rollup.id) + assertNotNull("Rollup job doesn't have metadata set", rollupJob.metadataID) + val rollupMetadata = getRollupMetadata(rollupJob.metadataID!!) + assertEquals("Rollup is not started", RollupMetadata.Status.FINISHED, rollupMetadata.status) + rollupJob + } val rollupMetadataID = finishedRollup.metadataID!! val rollupMetadata = getRollupMetadata(rollupMetadataID) @@ -569,24 +591,25 @@ class RollupRunnerIT : RollupRestTestCase() { // Set the search max buckets to 50 and rollup search retry count to 0 so it won't retry on failure. This is to confirm first that yes we do get an error and moved into failed state. client().makeRequest("PUT", "/_cluster/settings", StringEntity("""{"persistent":{"search.max_buckets":"50", "${ROLLUP_SEARCH_BACKOFF_COUNT.key}": 0 }}""", ContentType.APPLICATION_JSON)) - val rollup = Rollup( - id = "page_size_no_retry_first_runner_seventh", - schemaVersion = 1L, - enabled = true, - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - jobLastUpdatedTime = Instant.now(), - jobEnabledTime = Instant.now(), - description = "basic page size", - sourceIndex = "source_runner_seventh", - targetIndex = "target_runner_seventh", - metadataID = null, - roles = emptyList(), - pageSize = 100, - delay = 0, - continuous = false, - dimensions = listOf(DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1m")), - metrics = listOf(RollupMetrics(sourceField = "passenger_count", targetField = "passenger_count", metrics = listOf(Sum(), Min(), Max(), ValueCount(), Average()))) - ).let { createRollup(it, it.id) } + val rollup = + Rollup( + id = "page_size_no_retry_first_runner_seventh", + schemaVersion = 1L, + enabled = true, + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + jobLastUpdatedTime = Instant.now(), + jobEnabledTime = Instant.now(), + description = "basic page size", + sourceIndex = "source_runner_seventh", + targetIndex = "target_runner_seventh", + metadataID = null, + roles = emptyList(), + pageSize = 100, + delay = 0, + continuous = false, + dimensions = listOf(DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1m")), + metrics = listOf(RollupMetrics(sourceField = "passenger_count", targetField = "passenger_count", metrics = listOf(Sum(), Min(), Max(), ValueCount(), Average()))), + ).let { createRollup(it, it.id) } updateRollupStartTime(rollup) @@ -604,24 +627,25 @@ class RollupRunnerIT : RollupRestTestCase() { client().makeRequest("PUT", "/_cluster/settings", StringEntity("""{"persistent":{"search.max_buckets":"50", "${ROLLUP_SEARCH_BACKOFF_COUNT.key}": 5 }}""", ContentType.APPLICATION_JSON)) - val secondRollup = Rollup( - id = "page_size_with_retry_second_runner_seventh", - schemaVersion = 1L, - enabled = true, - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - jobLastUpdatedTime = Instant.now(), - jobEnabledTime = Instant.now(), - description = "basic page size", - sourceIndex = "source_runner_seventh", - targetIndex = "new_target_runner_seventh", - metadataID = null, - roles = emptyList(), - pageSize = 100, - delay = 0, - continuous = false, - dimensions = listOf(DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1m")), - metrics = listOf(RollupMetrics(sourceField = "passenger_count", targetField = "passenger_count", metrics = listOf(Sum(), Min(), Max(), ValueCount(), Average()))) - ).let { createRollup(it, it.id) } + val secondRollup = + Rollup( + id = "page_size_with_retry_second_runner_seventh", + schemaVersion = 1L, + enabled = true, + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + jobLastUpdatedTime = Instant.now(), + jobEnabledTime = Instant.now(), + description = "basic page size", + sourceIndex = "source_runner_seventh", + targetIndex = "new_target_runner_seventh", + metadataID = null, + roles = emptyList(), + pageSize = 100, + delay = 0, + continuous = false, + dimensions = listOf(DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1m")), + metrics = listOf(RollupMetrics(sourceField = "passenger_count", targetField = "passenger_count", metrics = listOf(Sum(), Min(), Max(), ValueCount(), Average()))), + ).let { createRollup(it, it.id) } updateRollupStartTime(secondRollup) @@ -641,21 +665,23 @@ class RollupRunnerIT : RollupRestTestCase() { val indexName = "test_index_runner_eighth" val delay: Long = 7_500 // Define rollup - var rollup = randomRollup().copy( - id = "$testName-4", - enabled = true, - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - jobEnabledTime = Instant.now(), - sourceIndex = indexName, - metadataID = null, - continuous = true, - delay = delay, - dimensions = listOf( - randomCalendarDateHistogram().copy( - calendarInterval = "5s" - ) + var rollup = + randomRollup().copy( + id = "$testName-4", + enabled = true, + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + jobEnabledTime = Instant.now(), + sourceIndex = indexName, + metadataID = null, + continuous = true, + delay = delay, + dimensions = + listOf( + randomCalendarDateHistogram().copy( + calendarInterval = "5s", + ), + ), ) - ) // Create source index createRollupSourceIndex(rollup) @@ -664,16 +690,17 @@ class RollupRunnerIT : RollupRestTestCase() { // Create rollup job val jobStartTime = Instant.now() - val rollupNow = rollup.copy( - jobSchedule = IntervalSchedule(jobStartTime, 1, ChronoUnit.MINUTES), - jobEnabledTime = jobStartTime - ) + val rollupNow = + rollup.copy( + jobSchedule = IntervalSchedule(jobStartTime, 1, ChronoUnit.MINUTES), + jobEnabledTime = jobStartTime, + ) rollup = createRollup(rollup = rollupNow, rollupId = rollupNow.id) val expectedFirstExecutionTime = rollup.jobSchedule.getNextExecutionTime(null).toEpochMilli() assertTrue("The first job execution time should be equal [job start time] + [delay].", expectedFirstExecutionTime == jobStartTime.toEpochMilli() + delay) - waitFor() { + waitFor { assertTrue("Target rollup index was not created", indexExists(rollup.targetIndex)) val rollupJob = getRollup(rollupId = rollup.id) assertNotNull("Rollup job doesn't have metadata set", rollupJob.metadataID) @@ -694,26 +721,28 @@ class RollupRunnerIT : RollupRestTestCase() { // Setting the delay to this time so most of the data records would be excluded if delay were applied val goalDateMS: Long = Instant.parse("2018-11-30T00:00:00Z").toEpochMilli() val testDelay: Long = Instant.now().toEpochMilli() - goalDateMS - val rollup = Rollup( - id = "non_continuous_delay_stats_check", - schemaVersion = 1L, - enabled = true, - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - jobLastUpdatedTime = Instant.now(), - jobEnabledTime = Instant.now(), - description = "basic delay test", - sourceIndex = "source_runner_ninth", - targetIndex = "target_runner_ninth", - metadataID = null, - roles = emptyList(), - pageSize = 100, - delay = testDelay, - continuous = false, - dimensions = listOf(DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1h")), - metrics = listOf( - RollupMetrics(sourceField = "passenger_count", targetField = "passenger_count", metrics = listOf(Sum(), Min(), Max(), ValueCount(), Average())) - ) - ).let { createRollup(it, it.id) } + val rollup = + Rollup( + id = "non_continuous_delay_stats_check", + schemaVersion = 1L, + enabled = true, + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + jobLastUpdatedTime = Instant.now(), + jobEnabledTime = Instant.now(), + description = "basic delay test", + sourceIndex = "source_runner_ninth", + targetIndex = "target_runner_ninth", + metadataID = null, + roles = emptyList(), + pageSize = 100, + delay = testDelay, + continuous = false, + dimensions = listOf(DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1h")), + metrics = + listOf( + RollupMetrics(sourceField = "passenger_count", targetField = "passenger_count", metrics = listOf(Sum(), Min(), Max(), ValueCount(), Average())), + ), + ).let { createRollup(it, it.id) } val now = Instant.now() val intervalMillis = (rollup.schedule as IntervalSchedule).interval * 60 * 1000 @@ -725,13 +754,14 @@ class RollupRunnerIT : RollupRestTestCase() { updateRollupStartTime(rollup) - val finishedRollup = waitFor { - val rollupJob = getRollup(rollupId = rollup.id) - assertNotNull("Rollup job doesn't have metadata set", rollupJob.metadataID) - val rollupMetadata = getRollupMetadata(rollupJob.metadataID!!) - assertEquals("Rollup is not finished $rollupMetadata", RollupMetadata.Status.FINISHED, rollupMetadata.status) - rollupJob - } + val finishedRollup = + waitFor { + val rollupJob = getRollup(rollupId = rollup.id) + assertNotNull("Rollup job doesn't have metadata set", rollupJob.metadataID) + val rollupMetadata = getRollupMetadata(rollupJob.metadataID!!) + assertEquals("Rollup is not finished $rollupMetadata", RollupMetadata.Status.FINISHED, rollupMetadata.status) + rollupJob + } refreshAllIndices() @@ -757,37 +787,40 @@ class RollupRunnerIT : RollupRestTestCase() { // Setting the delay to this time so most of the data records are excluded val goalDateMS: Long = Instant.parse("2018-11-30T00:00:00Z").toEpochMilli() val testDelay: Long = Instant.now().toEpochMilli() - goalDateMS - val rollup = Rollup( - id = "continuous_delay_stats_check", - schemaVersion = 1L, - enabled = true, - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - jobLastUpdatedTime = Instant.now(), - jobEnabledTime = Instant.now(), - description = "basic delay test", - sourceIndex = "source_runner_tenth", - targetIndex = "target_runner_tenth", - metadataID = null, - roles = emptyList(), - pageSize = 100, - delay = testDelay, - continuous = true, - dimensions = listOf(DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1h")), - metrics = listOf( - RollupMetrics(sourceField = "passenger_count", targetField = "passenger_count", metrics = listOf(Sum(), Min(), Max(), ValueCount(), Average())) - ) - ).let { createRollup(it, it.id) } + val rollup = + Rollup( + id = "continuous_delay_stats_check", + schemaVersion = 1L, + enabled = true, + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + jobLastUpdatedTime = Instant.now(), + jobEnabledTime = Instant.now(), + description = "basic delay test", + sourceIndex = "source_runner_tenth", + targetIndex = "target_runner_tenth", + metadataID = null, + roles = emptyList(), + pageSize = 100, + delay = testDelay, + continuous = true, + dimensions = listOf(DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1h")), + metrics = + listOf( + RollupMetrics(sourceField = "passenger_count", targetField = "passenger_count", metrics = listOf(Sum(), Min(), Max(), ValueCount(), Average())), + ), + ).let { createRollup(it, it.id) } updateRollupStartTime(rollup, Instant.now().minusMillis(testDelay).minusMillis(55000).toEpochMilli()) - val finishedRollup = waitFor { - val rollupJob = getRollup(rollupId = rollup.id) - assertNotNull("Rollup job doesn't have metadata set", rollupJob.metadataID) - val rollupMetadata = getRollupMetadata(rollupJob.metadataID!!) - assertEquals("Rollup is not started $rollupMetadata", RollupMetadata.Status.STARTED, rollupMetadata.status) - assertTrue("Continuous rollup did not process history", rollupMetadata.continuous!!.nextWindowEndTime.toEpochMilli() > goalDateMS) - rollupJob - } + val finishedRollup = + waitFor { + val rollupJob = getRollup(rollupId = rollup.id) + assertNotNull("Rollup job doesn't have metadata set", rollupJob.metadataID) + val rollupMetadata = getRollupMetadata(rollupJob.metadataID!!) + assertEquals("Rollup is not started $rollupMetadata", RollupMetadata.Status.STARTED, rollupMetadata.status) + assertTrue("Continuous rollup did not process history", rollupMetadata.continuous!!.nextWindowEndTime.toEpochMilli() > goalDateMS) + rollupJob + } refreshAllIndices() @@ -812,58 +845,63 @@ class RollupRunnerIT : RollupRestTestCase() { // Create index with alias, without mappings val indexAlias = "alias_as_target_index" val backingIndex = "backing_target_index" - val builtSettings = Settings.builder().let { - it.put(INDEX_NUMBER_OF_REPLICAS, "1") - it.put(INDEX_NUMBER_OF_SHARDS, "1") - it - }.build() + val builtSettings = + Settings.builder().let { + it.put(INDEX_NUMBER_OF_REPLICAS, "1") + it.put(INDEX_NUMBER_OF_SHARDS, "1") + it + }.build() val aliases = "\"$indexAlias\": { \"is_write_index\": true }" createIndex(backingIndex, builtSettings, null, aliases) refreshAllIndices() - val rollup = Rollup( - id = "runner_with_alias_as_target", - schemaVersion = 1L, - enabled = true, - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - jobLastUpdatedTime = Instant.now(), - jobEnabledTime = Instant.now(), - description = "basic change of page size", - sourceIndex = "source_runner_sixth_eleventh_1", - targetIndex = indexAlias, - metadataID = null, - roles = emptyList(), - pageSize = 1000, - delay = 0, - continuous = false, - dimensions = listOf( - DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1s"), - Terms("RatecodeID", "RatecodeID"), - Terms("PULocationID", "PULocationID") - ), - metrics = listOf( - RollupMetrics( - sourceField = "passenger_count", - targetField = "passenger_count", - metrics = listOf(Sum(), Min(), Max(), ValueCount(), Average()) - ) - ) - ).let { createRollup(it, it.id) } + val rollup = + Rollup( + id = "runner_with_alias_as_target", + schemaVersion = 1L, + enabled = true, + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + jobLastUpdatedTime = Instant.now(), + jobEnabledTime = Instant.now(), + description = "basic change of page size", + sourceIndex = "source_runner_sixth_eleventh_1", + targetIndex = indexAlias, + metadataID = null, + roles = emptyList(), + pageSize = 1000, + delay = 0, + continuous = false, + dimensions = + listOf( + DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1s"), + Terms("RatecodeID", "RatecodeID"), + Terms("PULocationID", "PULocationID"), + ), + metrics = + listOf( + RollupMetrics( + sourceField = "passenger_count", + targetField = "passenger_count", + metrics = listOf(Sum(), Min(), Max(), ValueCount(), Average()), + ), + ), + ).let { createRollup(it, it.id) } // First run, backing index is empty: no mappings, no rollup_index setting, no rollupjobs in _META updateRollupStartTime(rollup) waitFor { assertTrue("Target rollup index was not created", indexExists(backingIndex)) } - var startedRollup = waitFor { - val rollupJob = getRollup(rollupId = rollup.id) - assertNotNull("Rollup job doesn't have metadata set", rollupJob.metadataID) - val rollupMetadata = getRollupMetadata(rollupJob.metadataID!!) - assertEquals("Rollup is not finished", RollupMetadata.Status.FINISHED, rollupMetadata.status) - assertTrue("Rollup is not disabled", !rollupJob.enabled) - rollupJob - } + var startedRollup = + waitFor { + val rollupJob = getRollup(rollupId = rollup.id) + assertNotNull("Rollup job doesn't have metadata set", rollupJob.metadataID) + val rollupMetadata = getRollupMetadata(rollupJob.metadataID!!) + assertEquals("Rollup is not finished", RollupMetadata.Status.FINISHED, rollupMetadata.status) + assertTrue("Rollup is not disabled", !rollupJob.enabled) + rollupJob + } var rollupMetadataID = startedRollup.metadataID!! var rollupMetadata = getRollupMetadata(rollupMetadataID) assertTrue("Did not process any doc during rollup", rollupMetadata.stats.documentsProcessed > 0) @@ -872,18 +910,19 @@ class RollupRunnerIT : RollupRestTestCase() { client().makeRequest( "PUT", "$ROLLUP_JOBS_BASE_URI/${startedRollup.id}?if_seq_no=${startedRollup.seqNo}&if_primary_term=${startedRollup.primaryTerm}", - emptyMap(), rollup.copy(enabled = true).toHttpEntity() + emptyMap(), rollup.copy(enabled = true).toHttpEntity(), ) // Second run, backing index is setup just like any other rollup index updateRollupStartTime(rollup) - startedRollup = waitFor { - val rollupJob = getRollup(rollupId = rollup.id) - assertNotNull("Rollup job doesn't have metadata set", rollupJob.metadataID) - val rollupMetadata1 = getRollupMetadata(rollupJob.metadataID!!) - assertEquals("Rollup is not finished", RollupMetadata.Status.FINISHED, rollupMetadata1.status) - rollupJob - } + startedRollup = + waitFor { + val rollupJob = getRollup(rollupId = rollup.id) + assertNotNull("Rollup job doesn't have metadata set", rollupJob.metadataID) + val rollupMetadata1 = getRollupMetadata(rollupJob.metadataID!!) + assertEquals("Rollup is not finished", RollupMetadata.Status.FINISHED, rollupMetadata1.status) + rollupJob + } rollupMetadataID = startedRollup.metadataID!! rollupMetadata = getRollupMetadata(rollupMetadataID) @@ -898,58 +937,63 @@ class RollupRunnerIT : RollupRestTestCase() { val indexAlias = "alias_as_target_index_2" val backingIndex1 = "backing_target_index1-000001" val backingIndex2 = "backing_target_index1-000002" - val builtSettings = Settings.builder().let { - it.put(INDEX_NUMBER_OF_REPLICAS, "1") - it.put(INDEX_NUMBER_OF_SHARDS, "1") - it - }.build() + val builtSettings = + Settings.builder().let { + it.put(INDEX_NUMBER_OF_REPLICAS, "1") + it.put(INDEX_NUMBER_OF_SHARDS, "1") + it + }.build() val aliases = "\"$indexAlias\": { \"is_write_index\": true }" createIndex(backingIndex1, builtSettings, null, aliases) refreshAllIndices() - val rollup = Rollup( - id = "page_size_runner_sixth_2", - schemaVersion = 1L, - enabled = true, - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - jobLastUpdatedTime = Instant.now(), - jobEnabledTime = Instant.now(), - description = "basic change of page size", - sourceIndex = "source_runner_sixth_29932", - targetIndex = indexAlias, - metadataID = null, - roles = emptyList(), - pageSize = 1000, - delay = 0, - continuous = false, - dimensions = listOf( - DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1s"), - Terms("RatecodeID", "RatecodeID"), - Terms("PULocationID", "PULocationID") - ), - metrics = listOf( - RollupMetrics( - sourceField = "passenger_count", - targetField = "passenger_count", - metrics = listOf(Sum(), Min(), Max(), ValueCount(), Average()) - ) - ) - ).let { createRollup(it, it.id) } + val rollup = + Rollup( + id = "page_size_runner_sixth_2", + schemaVersion = 1L, + enabled = true, + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + jobLastUpdatedTime = Instant.now(), + jobEnabledTime = Instant.now(), + description = "basic change of page size", + sourceIndex = "source_runner_sixth_29932", + targetIndex = indexAlias, + metadataID = null, + roles = emptyList(), + pageSize = 1000, + delay = 0, + continuous = false, + dimensions = + listOf( + DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1s"), + Terms("RatecodeID", "RatecodeID"), + Terms("PULocationID", "PULocationID"), + ), + metrics = + listOf( + RollupMetrics( + sourceField = "passenger_count", + targetField = "passenger_count", + metrics = listOf(Sum(), Min(), Max(), ValueCount(), Average()), + ), + ), + ).let { createRollup(it, it.id) } // First run, backing index is empty: no mappings, no rollup_index setting, no rollupjobs in _META updateRollupStartTime(rollup) waitFor { assertTrue("Target rollup index was not created", indexExists(backingIndex1)) } - var startedRollup = waitFor { - val rollupJob = getRollup(rollupId = rollup.id) - assertNotNull("Rollup job doesn't have metadata set", rollupJob.metadataID) - val rollupMetadata = getRollupMetadata(rollupJob.metadataID!!) - assertEquals("Rollup is not finished", RollupMetadata.Status.FINISHED, rollupMetadata.status) - assertTrue("Rollup is not disabled", !rollupJob.enabled) - rollupJob - } + var startedRollup = + waitFor { + val rollupJob = getRollup(rollupId = rollup.id) + assertNotNull("Rollup job doesn't have metadata set", rollupJob.metadataID) + val rollupMetadata = getRollupMetadata(rollupJob.metadataID!!) + assertEquals("Rollup is not finished", RollupMetadata.Status.FINISHED, rollupMetadata.status) + assertTrue("Rollup is not disabled", !rollupJob.enabled) + rollupJob + } var rollupMetadataID = startedRollup.metadataID!! var rollupMetadata = getRollupMetadata(rollupMetadataID) assertTrue("Did not process any doc during rollup", rollupMetadata.stats.documentsProcessed > 0) @@ -963,18 +1007,19 @@ class RollupRunnerIT : RollupRestTestCase() { client().makeRequest( "PUT", "$ROLLUP_JOBS_BASE_URI/${startedRollup.id}?if_seq_no=${startedRollup.seqNo}&if_primary_term=${startedRollup.primaryTerm}", - emptyMap(), rollup.copy(enabled = true).toHttpEntity() + emptyMap(), rollup.copy(enabled = true).toHttpEntity(), ) // Second run, backing index is setup just like any other rollup index updateRollupStartTime(rollup) - startedRollup = waitFor { - val rollupJob = getRollup(rollupId = rollup.id) - assertNotNull("Rollup job doesn't have metadata set", rollupJob.metadataID) - val rollupMetadata1 = getRollupMetadata(rollupJob.metadataID!!) - assertEquals("Rollup is not finished", RollupMetadata.Status.FINISHED, rollupMetadata1.status) - rollupJob - } + startedRollup = + waitFor { + val rollupJob = getRollup(rollupId = rollup.id) + assertNotNull("Rollup job doesn't have metadata set", rollupJob.metadataID) + val rollupMetadata1 = getRollupMetadata(rollupJob.metadataID!!) + assertEquals("Rollup is not finished", RollupMetadata.Status.FINISHED, rollupMetadata1.status) + rollupJob + } rollupMetadataID = startedRollup.metadataID!! rollupMetadata = getRollupMetadata(rollupMetadataID) @@ -993,77 +1038,84 @@ class RollupRunnerIT : RollupRestTestCase() { val indexAlias = "alias_as_target_index_failed" val backingIndex1 = "backing_target_index1_f-000001" val backingIndex2 = "backing_target_index1_f-000002" - val builtSettings = Settings.builder().let { - it.put(INDEX_NUMBER_OF_REPLICAS, "1") - it.put(INDEX_NUMBER_OF_SHARDS, "1") - it - }.build() + val builtSettings = + Settings.builder().let { + it.put(INDEX_NUMBER_OF_REPLICAS, "1") + it.put(INDEX_NUMBER_OF_SHARDS, "1") + it + }.build() val aliases = "\"$indexAlias\": { \"is_write_index\": true }" createIndex(backingIndex1, builtSettings, null, aliases) refreshAllIndices() - val job1 = Rollup( - id = "rollup_with1_alias_1", - schemaVersion = 1L, - enabled = true, - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - jobLastUpdatedTime = Instant.now(), - jobEnabledTime = Instant.now(), - description = "basic change of page size", - sourceIndex = "source_runner_sixth_2123", - targetIndex = indexAlias, - metadataID = null, - roles = emptyList(), - pageSize = 1000, - delay = 0, - continuous = false, - dimensions = listOf( - DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1s"), - Terms("RatecodeID", "RatecodeID"), - Terms("PULocationID", "PULocationID") - ), - metrics = listOf( - RollupMetrics( - sourceField = "passenger_count", - targetField = "passenger_count", - metrics = listOf(Sum(), Min(), Max(), ValueCount(), Average()) - ) - ) - ).let { createRollup(it, it.id) } + val job1 = + Rollup( + id = "rollup_with1_alias_1", + schemaVersion = 1L, + enabled = true, + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + jobLastUpdatedTime = Instant.now(), + jobEnabledTime = Instant.now(), + description = "basic change of page size", + sourceIndex = "source_runner_sixth_2123", + targetIndex = indexAlias, + metadataID = null, + roles = emptyList(), + pageSize = 1000, + delay = 0, + continuous = false, + dimensions = + listOf( + DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1s"), + Terms("RatecodeID", "RatecodeID"), + Terms("PULocationID", "PULocationID"), + ), + metrics = + listOf( + RollupMetrics( + sourceField = "passenger_count", + targetField = "passenger_count", + metrics = listOf(Sum(), Min(), Max(), ValueCount(), Average()), + ), + ), + ).let { createRollup(it, it.id) } // First run, backing index is empty: no mappings, no rollup_index setting, no rollupjobs in _META updateRollupStartTime(job1) waitFor { assertTrue("Target rollup index was not created", indexExists(backingIndex1)) } - var startedRollup1 = waitFor { - val rollupJob = getRollup(rollupId = job1.id) - assertNotNull("Rollup job doesn't have metadata set", rollupJob.metadataID) - val rollupMetadata = getRollupMetadata(rollupJob.metadataID!!) - assertEquals("Rollup is not finished", RollupMetadata.Status.FINISHED, rollupMetadata.status) - assertTrue("Rollup is not disabled", !rollupJob.enabled) - rollupJob - } + var startedRollup1 = + waitFor { + val rollupJob = getRollup(rollupId = job1.id) + assertNotNull("Rollup job doesn't have metadata set", rollupJob.metadataID) + val rollupMetadata = getRollupMetadata(rollupJob.metadataID!!) + assertEquals("Rollup is not finished", RollupMetadata.Status.FINISHED, rollupMetadata.status) + assertTrue("Rollup is not disabled", !rollupJob.enabled) + rollupJob + } var rollupMetadataID = startedRollup1.metadataID!! var rollupMetadata = getRollupMetadata(rollupMetadataID) assertTrue("Did not process any doc during rollup", rollupMetadata.stats.documentsProcessed > 0) // Run job #2 on same target_index - val job2 = job1.copy(id = "some_other_job_999", targetIndex = backingIndex1) - .let { createRollup(it, it.id) } + val job2 = + job1.copy(id = "some_other_job_999", targetIndex = backingIndex1) + .let { createRollup(it, it.id) } // Job2 First run, it should add itself to _meta in the same index job1 did. updateRollupStartTime(job2) - var startedRollup2 = waitFor { - val rollupJob = getRollup(rollupId = job2.id) - assertNotNull("Rollup job doesn't have metadata set", rollupJob.metadataID) - val rollupMetadata1 = getRollupMetadata(rollupJob.metadataID!!) - assertEquals("Rollup is not finished", RollupMetadata.Status.FINISHED, rollupMetadata1.status) - assertTrue("Rollup is not disabled", !rollupJob.enabled) - rollupJob - } + var startedRollup2 = + waitFor { + val rollupJob = getRollup(rollupId = job2.id) + assertNotNull("Rollup job doesn't have metadata set", rollupJob.metadataID) + val rollupMetadata1 = getRollupMetadata(rollupJob.metadataID!!) + assertEquals("Rollup is not finished", RollupMetadata.Status.FINISHED, rollupMetadata1.status) + assertTrue("Rollup is not disabled", !rollupJob.enabled) + rollupJob + } rollupMetadataID = startedRollup2.metadataID!! rollupMetadata = getRollupMetadata(rollupMetadataID) assertTrue("Did not process any doc during rollup", rollupMetadata.stats.documentsProcessed > 0) @@ -1079,18 +1131,19 @@ class RollupRunnerIT : RollupRestTestCase() { client().makeRequest( "PUT", "$ROLLUP_JOBS_BASE_URI/${startedRollup1.id}?if_seq_no=${startedRollup1.seqNo}&if_primary_term=${startedRollup1.primaryTerm}", - emptyMap(), job1.copy(enabled = true).toHttpEntity() + emptyMap(), job1.copy(enabled = true).toHttpEntity(), ) // Second run, backing index is setup just like any other rollup index updateRollupStartTime(job1) - startedRollup1 = waitFor { - val rollupJob = getRollup(rollupId = job1.id) - assertNotNull("Rollup job doesn't have metadata set", rollupJob.metadataID) - val rollupMetadata1 = getRollupMetadata(rollupJob.metadataID!!) - assertEquals("Rollup is not finished", RollupMetadata.Status.FAILED, rollupMetadata1.status) - rollupJob - } + startedRollup1 = + waitFor { + val rollupJob = getRollup(rollupId = job1.id) + assertNotNull("Rollup job doesn't have metadata set", rollupJob.metadataID) + val rollupMetadata1 = getRollupMetadata(rollupJob.metadataID!!) + assertEquals("Rollup is not finished", RollupMetadata.Status.FAILED, rollupMetadata1.status) + rollupJob + } rollupMetadataID = startedRollup1.metadataID!! rollupMetadata = getRollupMetadata(rollupMetadataID) @@ -1104,77 +1157,84 @@ class RollupRunnerIT : RollupRestTestCase() { // Create index with alias, without mappings val indexAlias = "alias_as_target_index_failed_1" val backingIndex1 = "backing-000001" - val builtSettings = Settings.builder().let { - it.put(INDEX_NUMBER_OF_REPLICAS, "1") - it.put(INDEX_NUMBER_OF_SHARDS, "1") - it - }.build() + val builtSettings = + Settings.builder().let { + it.put(INDEX_NUMBER_OF_REPLICAS, "1") + it.put(INDEX_NUMBER_OF_SHARDS, "1") + it + }.build() val aliases = "\"$indexAlias\": { \"is_write_index\": true }" createIndex(backingIndex1, builtSettings, null, aliases) refreshAllIndices() - val job1 = Rollup( - id = "rollup_with_alias_11", - schemaVersion = 1L, - enabled = true, - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - jobLastUpdatedTime = Instant.now(), - jobEnabledTime = Instant.now(), - description = "basic change of page size", - sourceIndex = "source_runner_sixth_2209", - targetIndex = indexAlias, - metadataID = null, - roles = emptyList(), - pageSize = 1000, - delay = 0, - continuous = false, - dimensions = listOf( - DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1s"), - Terms("RatecodeID", "RatecodeID"), - Terms("PULocationID", "PULocationID") - ), - metrics = listOf( - RollupMetrics( - sourceField = "passenger_count", - targetField = "passenger_count", - metrics = listOf(Sum(), Min(), Max(), ValueCount(), Average()) - ) - ) - ).let { createRollup(it, it.id) } + val job1 = + Rollup( + id = "rollup_with_alias_11", + schemaVersion = 1L, + enabled = true, + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + jobLastUpdatedTime = Instant.now(), + jobEnabledTime = Instant.now(), + description = "basic change of page size", + sourceIndex = "source_runner_sixth_2209", + targetIndex = indexAlias, + metadataID = null, + roles = emptyList(), + pageSize = 1000, + delay = 0, + continuous = false, + dimensions = + listOf( + DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1s"), + Terms("RatecodeID", "RatecodeID"), + Terms("PULocationID", "PULocationID"), + ), + metrics = + listOf( + RollupMetrics( + sourceField = "passenger_count", + targetField = "passenger_count", + metrics = listOf(Sum(), Min(), Max(), ValueCount(), Average()), + ), + ), + ).let { createRollup(it, it.id) } // First run, backing index is empty: no mappings, no rollup_index setting, no rollupjobs in _META updateRollupStartTime(job1) waitFor { assertTrue("Target rollup index was not created", indexExists(backingIndex1)) } - val startedRollup1 = waitFor { - val rollupJob = getRollup(rollupId = job1.id) - assertNotNull("Rollup job doesn't have metadata set", rollupJob.metadataID) - val rollupMetadata = getRollupMetadata(rollupJob.metadataID!!) - assertEquals("Rollup is not finished", RollupMetadata.Status.FINISHED, rollupMetadata.status) - assertTrue("Rollup is not disabled", !rollupJob.enabled) - rollupJob - } + val startedRollup1 = + waitFor { + val rollupJob = getRollup(rollupId = job1.id) + assertNotNull("Rollup job doesn't have metadata set", rollupJob.metadataID) + val rollupMetadata = getRollupMetadata(rollupJob.metadataID!!) + assertEquals("Rollup is not finished", RollupMetadata.Status.FINISHED, rollupMetadata.status) + assertTrue("Rollup is not disabled", !rollupJob.enabled) + rollupJob + } var rollupMetadataID = startedRollup1.metadataID!! var rollupMetadata = getRollupMetadata(rollupMetadataID) assertTrue("Did not process any doc during rollup", rollupMetadata.stats.documentsProcessed > 0) // Run job #2 on same target_index alias - val job2 = job1.copy(id = "some_other_job_9991", targetIndex = indexAlias) - .let { createRollup(it, it.id) } + val job2 = + job1.copy(id = "some_other_job_9991", targetIndex = indexAlias) + .let { createRollup(it, it.id) } // Job2 First run, it should fail because job1 already wrote to backing index updateRollupStartTime(job2) - val startedRollup2 = waitFor { - val rollupJob = getRollup(rollupId = job2.id) - assertNotNull("Rollup job doesn't have metadata set", rollupJob.metadataID) - val rollupMetadata1 = getRollupMetadata(rollupJob.metadataID!!) - assertEquals("Rollup is not finished", RollupMetadata.Status.FAILED, rollupMetadata1.status) - assertTrue("Rollup is not disabled", !rollupJob.enabled) - rollupJob - } + val startedRollup2 = + waitFor { + val rollupJob = getRollup(rollupId = job2.id) + assertNotNull("Rollup job doesn't have metadata set", rollupJob.metadataID) + val rollupMetadata1 = getRollupMetadata(rollupJob.metadataID!!) + assertEquals("Rollup is not finished", RollupMetadata.Status.FAILED, rollupMetadata1.status) + assertTrue("Rollup is not disabled", !rollupJob.enabled) + rollupJob + } rollupMetadataID = startedRollup2.metadataID!! rollupMetadata = getRollupMetadata(rollupMetadataID) assertEquals("More than one rollup jobs present on the backing index of the target alias, cannot perform rollup to this target alias [$indexAlias].", rollupMetadata.failureReason) @@ -1187,11 +1247,12 @@ class RollupRunnerIT : RollupRestTestCase() { val indexAlias = "alias_as_target_index_failed_19941" val backingIndex1 = "backing-99000001" val backingIndex2 = "backing-99000002" - val builtSettings = Settings.builder().let { - it.put(INDEX_NUMBER_OF_REPLICAS, "1") - it.put(INDEX_NUMBER_OF_SHARDS, "1") - it - }.build() + val builtSettings = + Settings.builder().let { + it.put(INDEX_NUMBER_OF_REPLICAS, "1") + it.put(INDEX_NUMBER_OF_SHARDS, "1") + it + }.build() var aliases = "\"$indexAlias\": { \"is_write_index\": true }" createIndex(backingIndex1, builtSettings, null, aliases) aliases = "\"$indexAlias\": {}" @@ -1199,48 +1260,52 @@ class RollupRunnerIT : RollupRestTestCase() { refreshAllIndices() - val job1 = Rollup( - id = "rollup_with_alias_99243411", - schemaVersion = 1L, - enabled = true, - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - jobLastUpdatedTime = Instant.now(), - jobEnabledTime = Instant.now(), - description = "basic change of page size", - sourceIndex = "source_runner_sixth_1532209", - targetIndex = indexAlias, - metadataID = null, - roles = emptyList(), - pageSize = 1000, - delay = 0, - continuous = false, - dimensions = listOf( - DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1s"), - Terms("RatecodeID", "RatecodeID"), - Terms("PULocationID", "PULocationID") - ), - metrics = listOf( - RollupMetrics( - sourceField = "passenger_count", - targetField = "passenger_count", - metrics = listOf(Sum(), Min(), Max(), ValueCount(), Average()) - ) - ) - ).let { createRollup(it, it.id) } + val job1 = + Rollup( + id = "rollup_with_alias_99243411", + schemaVersion = 1L, + enabled = true, + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + jobLastUpdatedTime = Instant.now(), + jobEnabledTime = Instant.now(), + description = "basic change of page size", + sourceIndex = "source_runner_sixth_1532209", + targetIndex = indexAlias, + metadataID = null, + roles = emptyList(), + pageSize = 1000, + delay = 0, + continuous = false, + dimensions = + listOf( + DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1s"), + Terms("RatecodeID", "RatecodeID"), + Terms("PULocationID", "PULocationID"), + ), + metrics = + listOf( + RollupMetrics( + sourceField = "passenger_count", + targetField = "passenger_count", + metrics = listOf(Sum(), Min(), Max(), ValueCount(), Average()), + ), + ), + ).let { createRollup(it, it.id) } // First run, backing index is empty: no mappings, no rollup_index setting, no rollupjobs in _META updateRollupStartTime(job1) waitFor { assertTrue("Target rollup index was not created", indexExists(backingIndex1)) } - var startedRollup1 = waitFor { - val rollupJob = getRollup(rollupId = job1.id) - assertNotNull("Rollup job doesn't have metadata set", rollupJob.metadataID) - val rollupMetadata = getRollupMetadata(rollupJob.metadataID!!) - assertEquals("Rollup is not finished", RollupMetadata.Status.FAILED, rollupMetadata.status) - assertTrue("Rollup is not disabled", !rollupJob.enabled) - rollupJob - } + var startedRollup1 = + waitFor { + val rollupJob = getRollup(rollupId = job1.id) + assertNotNull("Rollup job doesn't have metadata set", rollupJob.metadataID) + val rollupMetadata = getRollupMetadata(rollupJob.metadataID!!) + assertEquals("Rollup is not finished", RollupMetadata.Status.FAILED, rollupMetadata.status) + assertTrue("Rollup is not disabled", !rollupJob.enabled) + rollupJob + } var rollupMetadataID = startedRollup1.metadataID!! var rollupMetadata = getRollupMetadata(rollupMetadataID) assertEquals("Backing index [$backingIndex2] has to have owner rollup job with id:[${startedRollup1.id}]", rollupMetadata.failureReason) @@ -1262,7 +1327,7 @@ class RollupRunnerIT : RollupRestTestCase() { "itemPrice": { "type": "float" } - }""" + }""", ) indexDoc(index, "1", """{"purchaseDate": 1683149130000.6497, "itemName": "shoes", "itemPrice": 100.5}""".trimIndent()) @@ -1271,33 +1336,36 @@ class RollupRunnerIT : RollupRestTestCase() { refreshAllIndices() - val job = Rollup( - id = "rollup_with_alias_992434131", - schemaVersion = 1L, - enabled = true, - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.DAYS), - jobLastUpdatedTime = Instant.now(), - jobEnabledTime = Instant.now(), - description = "basic change of page size", - sourceIndex = index, - targetIndex = rollupIndex, - metadataID = null, - roles = emptyList(), - pageSize = 1000, - delay = 0, - continuous = true, - dimensions = listOf( - DateHistogram(sourceField = "purchaseDate", fixedInterval = "5d"), - Terms("itemName", "itemName"), - ), - metrics = listOf( - RollupMetrics( - sourceField = "itemPrice", - targetField = "itemPrice", - metrics = listOf(Sum(), Min(), Max(), ValueCount(), Average()) - ) - ) - ).let { createRollup(it, it.id) } + val job = + Rollup( + id = "rollup_with_alias_992434131", + schemaVersion = 1L, + enabled = true, + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.DAYS), + jobLastUpdatedTime = Instant.now(), + jobEnabledTime = Instant.now(), + description = "basic change of page size", + sourceIndex = index, + targetIndex = rollupIndex, + metadataID = null, + roles = emptyList(), + pageSize = 1000, + delay = 0, + continuous = true, + dimensions = + listOf( + DateHistogram(sourceField = "purchaseDate", fixedInterval = "5d"), + Terms("itemName", "itemName"), + ), + metrics = + listOf( + RollupMetrics( + sourceField = "itemPrice", + targetField = "itemPrice", + metrics = listOf(Sum(), Min(), Max(), ValueCount(), Average()), + ), + ), + ).let { createRollup(it, it.id) } updateRollupStartTime(job) @@ -1319,29 +1387,32 @@ class RollupRunnerIT : RollupRestTestCase() { val maxMetricName = "min_message_bytes_in" generateMessageLogsData(sourceIdxTestName) - val rollup = Rollup( - id = "rollup_test_max", - schemaVersion = 1L, - enabled = true, - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - jobLastUpdatedTime = Instant.now(), - jobEnabledTime = Instant.now(), - description = "basic stats test", - sourceIndex = sourceIdxTestName, - targetIndex = targetIdxTestName, - metadataID = null, - roles = emptyList(), - pageSize = 100, - delay = 0, - continuous = false, - dimensions = listOf( - DateHistogram(sourceField = "message.timestamp_received", targetField = "message.timestamp_received", fixedInterval = "10m"), - Terms("message.plugin", "message.plugin") - ), - metrics = listOf( - RollupMetrics(sourceField = propertyName, targetField = propertyName, metrics = listOf(Max())) - ) - ).let { createRollup(it, it.id) } + val rollup = + Rollup( + id = "rollup_test_max", + schemaVersion = 1L, + enabled = true, + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + jobLastUpdatedTime = Instant.now(), + jobEnabledTime = Instant.now(), + description = "basic stats test", + sourceIndex = sourceIdxTestName, + targetIndex = targetIdxTestName, + metadataID = null, + roles = emptyList(), + pageSize = 100, + delay = 0, + continuous = false, + dimensions = + listOf( + DateHistogram(sourceField = "message.timestamp_received", targetField = "message.timestamp_received", fixedInterval = "10m"), + Terms("message.plugin", "message.plugin"), + ), + metrics = + listOf( + RollupMetrics(sourceField = propertyName, targetField = propertyName, metrics = listOf(Max())), + ), + ).let { createRollup(it, it.id) } updateRollupStartTime(rollup) @@ -1354,21 +1425,22 @@ class RollupRunnerIT : RollupRestTestCase() { assertEquals("Rollup is not finished", RollupMetadata.Status.FINISHED, rollupMetadata.status) // Term query - val req = """ - { - "size": 0, - "query": { - "match_all": {} - }, - "aggs": { - "$maxMetricName": { - "max": { - "field": "$propertyName" + val req = + """ + { + "size": 0, + "query": { + "match_all": {} + }, + "aggs": { + "$maxMetricName": { + "max": { + "field": "$propertyName" + } } } } - } - """.trimIndent() + """.trimIndent() var rawRes = client().makeRequest(RestRequest.Method.POST.name, "/$sourceIdxTestName/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) assertTrue(rawRes.restStatus() == RestStatus.OK) var rollupRes = client().makeRequest(RestRequest.Method.POST.name, "/$targetIdxTestName/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) @@ -1378,7 +1450,7 @@ class RollupRunnerIT : RollupRestTestCase() { assertEquals( "Source and rollup index did not return same max results", rawAggRes.getValue(maxMetricName)["value"], - rollupAggRes.getValue(maxMetricName)["value"] + rollupAggRes.getValue(maxMetricName)["value"], ) } } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/rollup/util/RollupFieldValueExpressionResolverTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/rollup/util/RollupFieldValueExpressionResolverTests.kt index 66a905573..8f1a97ae9 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/rollup/util/RollupFieldValueExpressionResolverTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/rollup/util/RollupFieldValueExpressionResolverTests.kt @@ -6,10 +6,10 @@ package org.opensearch.indexmanagement.rollup.util import com.nhaarman.mockitokotlin2.any +import com.nhaarman.mockitokotlin2.doReturn import com.nhaarman.mockitokotlin2.eq import com.nhaarman.mockitokotlin2.mock import com.nhaarman.mockitokotlin2.whenever -import com.nhaarman.mockitokotlin2.doReturn import org.junit.Before import org.mockito.ArgumentMatchers.anyString import org.opensearch.cluster.service.ClusterService @@ -20,7 +20,6 @@ import org.opensearch.script.TemplateScript import org.opensearch.test.OpenSearchTestCase class RollupFieldValueExpressionResolverTests : OpenSearchTestCase() { - private val scriptService: ScriptService = mock() private val clusterService: ClusterService = mock() private var indexAliasUtils: RollupFieldValueExpressionResolver.IndexAliasUtils = mock() diff --git a/src/test/kotlin/org/opensearch/indexmanagement/rollup/util/RollupUtilsTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/rollup/util/RollupUtilsTests.kt index d535f1fb7..4fe289ca5 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/rollup/util/RollupUtilsTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/rollup/util/RollupUtilsTests.kt @@ -40,7 +40,6 @@ import org.opensearch.test.OpenSearchTestCase import org.opensearch.test.rest.OpenSearchRestTestCase class RollupUtilsTests : OpenSearchTestCase() { - fun `test rewriteQueryBuilder term query`() { val termQuery = randomTermQuery() termQuery.queryName("dummy-query") @@ -217,12 +216,13 @@ class RollupUtilsTests : OpenSearchTestCase() { val newDims = mutableListOf() // Make rollup dimensions and metrics contain the aggregation field name and aggregation metrics rollup.dimensions.forEach { - val dimToAdd = when (it) { - is DateHistogram -> it.copy(sourceField = aggField, targetField = aggField) - is Terms -> it.copy(sourceField = aggField, targetField = aggField) - is Histogram -> it.copy(sourceField = aggField, targetField = aggField) - else -> it - } + val dimToAdd = + when (it) { + is DateHistogram -> it.copy(sourceField = aggField, targetField = aggField) + is Terms -> it.copy(sourceField = aggField, targetField = aggField) + is Histogram -> it.copy(sourceField = aggField, targetField = aggField) + else -> it + } newDims.add(dimToAdd) } val newMetrics = mutableListOf(RollupMetrics(aggField, aggField, listOf(randomAverage(), randomMax(), randomMin(), randomSum(), randomValueCount()))) diff --git a/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/SMRunnerIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/SMRunnerIT.kt index 85a9d98bb..45a151e21 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/SMRunnerIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/SMRunnerIT.kt @@ -5,27 +5,27 @@ package org.opensearch.indexmanagement.snapshotmanagement +import org.opensearch.core.rest.RestStatus import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.SM_POLICIES_URI import org.opensearch.indexmanagement.makeRequest import org.opensearch.indexmanagement.snapshotmanagement.model.SMMetadata import org.opensearch.indexmanagement.waitFor import org.opensearch.jobscheduler.spi.schedule.CronSchedule import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule -import org.opensearch.core.rest.RestStatus import java.time.Instant.now import java.time.temporal.ChronoUnit class SMRunnerIT : SnapshotManagementRestTestCase() { - fun `test overall workflow`() { createRepository("repo") - val smPolicy = randomSMPolicy( - creationSchedule = CronSchedule("* * * * *", randomZone()), - jobSchedule = IntervalSchedule(now(), 1, ChronoUnit.MINUTES), - jobEnabled = true, - jobEnabledTime = now(), - ) + val smPolicy = + randomSMPolicy( + creationSchedule = CronSchedule("* * * * *", randomZone()), + jobSchedule = IntervalSchedule(now(), 1, ChronoUnit.MINUTES), + jobEnabled = true, + jobEnabledTime = now(), + ) val policyName = smPolicy.policyName val response = client().makeRequest("POST", "$SM_POLICIES_URI/$policyName", emptyMap(), smPolicy.toHttpEntity()) assertEquals("Create SM policy failed", RestStatus.CREATED, response.restStatus()) diff --git a/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/SnapshotManagementRestTestCase.kt b/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/SnapshotManagementRestTestCase.kt index a2bbf7162..5c98260fa 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/SnapshotManagementRestTestCase.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/SnapshotManagementRestTestCase.kt @@ -5,18 +5,19 @@ package org.opensearch.indexmanagement.snapshotmanagement +import org.apache.hc.core5.http.ContentType import org.apache.hc.core5.http.HttpEntity import org.apache.hc.core5.http.HttpHeaders -import org.apache.hc.core5.http.ContentType import org.apache.hc.core5.http.io.entity.StringEntity import org.apache.hc.core5.http.message.BasicHeader import org.junit.After import org.junit.Before import org.opensearch.client.Response import org.opensearch.client.ResponseException +import org.opensearch.common.xcontent.XContentType +import org.opensearch.core.rest.RestStatus import org.opensearch.core.xcontent.XContentParser import org.opensearch.core.xcontent.XContentParserUtils.ensureExpectedToken -import org.opensearch.common.xcontent.XContentType import org.opensearch.index.seqno.SequenceNumbers import org.opensearch.indexmanagement.IndexManagementPlugin import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.INDEX_MANAGEMENT_INDEX @@ -29,14 +30,12 @@ import org.opensearch.indexmanagement.util._PRIMARY_TERM import org.opensearch.indexmanagement.util._SEQ_NO import org.opensearch.indexmanagement.waitFor import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule -import org.opensearch.core.rest.RestStatus import java.io.InputStream import java.time.Duration import java.time.Instant import java.time.Instant.now abstract class SnapshotManagementRestTestCase : IndexManagementRestTestCase() { - @After fun clearIndicesAfterEachTest() { wipeAllIndices() @@ -68,13 +67,14 @@ abstract class SnapshotManagementRestTestCase : IndexManagementRestTestCase() { smPolicyName: String, refresh: Boolean = true, ): Response { - val response = client() - .makeRequest( - "POST", - "${IndexManagementPlugin.SM_POLICIES_URI}/$smPolicyName?refresh=$refresh", - emptyMap(), - StringEntity(smPolicyString, ContentType.APPLICATION_JSON) - ) + val response = + client() + .makeRequest( + "POST", + "${IndexManagementPlugin.SM_POLICIES_URI}/$smPolicyName?refresh=$refresh", + emptyMap(), + StringEntity(smPolicyString, ContentType.APPLICATION_JSON), + ) assertEquals("Unable to create a new snapshot management policy", RestStatus.CREATED, response.restStatus()) return response } @@ -138,13 +138,14 @@ abstract class SnapshotManagementRestTestCase : IndexManagementRestTestCase() { val millis = Duration.of(intervalSchedule.interval.toLong(), intervalSchedule.unit).minusSeconds(2).toMillis() val startTimeMillis = desiredStartTimeMillis ?: (now().toEpochMilli() - millis) val waitForActiveShards = if (isMultiNode) "all" else "1" - val response = client().makeRequest( - "POST", "$INDEX_MANAGEMENT_INDEX/_update/${update.id}?wait_for_active_shards=$waitForActiveShards", - StringEntity( - "{\"doc\":{\"sm_policy\":{\"schedule\":{\"interval\":{\"start_time\":\"$startTimeMillis\"}}}}}", - ContentType.APPLICATION_JSON + val response = + client().makeRequest( + "POST", "$INDEX_MANAGEMENT_INDEX/_update/${update.id}?wait_for_active_shards=$waitForActiveShards", + StringEntity( + "{\"doc\":{\"sm_policy\":{\"schedule\":{\"interval\":{\"start_time\":\"$startTimeMillis\"}}}}}", + ContentType.APPLICATION_JSON, + ), ) - ) assertEquals("Request failed", RestStatus.OK, response.restStatus()) } @@ -169,10 +170,11 @@ abstract class SnapshotManagementRestTestCase : IndexManagementRestTestCase() { val millis = Duration.of(intervalSchedule.interval.toLong(), intervalSchedule.unit).minusSeconds(2).toMillis() val startTimeMillis = desiredStartTimeMillis ?: (now().toEpochMilli() - millis) val waitForActiveShards = if (isMultiNode) "all" else "1" - val response = client().makeRequest( - "POST", "$INDEX_MANAGEMENT_INDEX/_update/${update.metadataID}?wait_for_active_shards=$waitForActiveShards", - StringEntity( - """ + val response = + client().makeRequest( + "POST", "$INDEX_MANAGEMENT_INDEX/_update/${update.metadataID}?wait_for_active_shards=$waitForActiveShards", + StringEntity( + """ { "doc": { "sm_metadata": { @@ -186,10 +188,10 @@ abstract class SnapshotManagementRestTestCase : IndexManagementRestTestCase() { } } } - """.trimIndent(), - ContentType.APPLICATION_JSON + """.trimIndent(), + ContentType.APPLICATION_JSON, + ), ) - ) assertEquals("Request failed", RestStatus.OK, response.restStatus()) } @@ -259,15 +261,16 @@ abstract class SnapshotManagementRestTestCase : IndexManagementRestTestCase() { } protected fun createRepository( - repository: String + repository: String, ) { - val response = client() - .makeRequest( - "PUT", - "_snapshot/$repository", - emptyMap(), - StringEntity("{\"type\":\"fs\", \"settings\": {\"location\": \"$repository\"}}", ContentType.APPLICATION_JSON) - ) + val response = + client() + .makeRequest( + "PUT", + "_snapshot/$repository", + emptyMap(), + StringEntity("{\"type\":\"fs\", \"settings\": {\"location\": \"$repository\"}}", ContentType.APPLICATION_JSON), + ) assertEquals("Unable to create a new repository", RestStatus.OK, response.restStatus()) } } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/TestUtils.kt b/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/TestUtils.kt index 9b924c2b9..82344ac8c 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/TestUtils.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/TestUtils.kt @@ -16,14 +16,15 @@ import org.opensearch.cluster.SnapshotsInProgress import org.opensearch.common.UUIDs import org.opensearch.common.unit.TimeValue import org.opensearch.common.xcontent.LoggingDeprecationHandler +import org.opensearch.common.xcontent.XContentFactory +import org.opensearch.common.xcontent.XContentType +import org.opensearch.core.rest.RestStatus import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.core.xcontent.ToXContent -import org.opensearch.common.xcontent.XContentFactory import org.opensearch.core.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentType -import org.opensearch.indexmanagement.opensearchapi.string import org.opensearch.index.seqno.SequenceNumbers import org.opensearch.indexmanagement.indexstatemanagement.randomChannel +import org.opensearch.indexmanagement.opensearchapi.string import org.opensearch.indexmanagement.opensearchapi.toMap import org.opensearch.indexmanagement.randomCronSchedule import org.opensearch.indexmanagement.randomInstant @@ -33,7 +34,6 @@ import org.opensearch.indexmanagement.snapshotmanagement.model.SMMetadata import org.opensearch.indexmanagement.snapshotmanagement.model.SMPolicy import org.opensearch.jobscheduler.spi.schedule.CronSchedule import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule -import org.opensearch.core.rest.RestStatus import org.opensearch.snapshots.Snapshot import org.opensearch.snapshots.SnapshotId import org.opensearch.snapshots.SnapshotInfo @@ -62,19 +62,23 @@ fun randomSMMetadata( return SMMetadata( policySeqNo = policySeqNo, policyPrimaryTerm = policyPrimaryTerm, - creation = SMMetadata.WorkflowMetadata( + creation = + SMMetadata.WorkflowMetadata( currentState = creationCurrentState, - trigger = SMMetadata.Trigger( - time = nextCreationTime + trigger = + SMMetadata.Trigger( + time = nextCreationTime, ), started = if (startedCreation != null) listOf(startedCreation) else null, latestExecution = creationLatestExecution, retry = creationRetryCount?.let { SMMetadata.Retry(it) }, ), - deletion = SMMetadata.WorkflowMetadata( + deletion = + SMMetadata.WorkflowMetadata( currentState = deletionCurrentState, - trigger = SMMetadata.Trigger( - time = nextDeletionTime + trigger = + SMMetadata.Trigger( + time = nextDeletionTime, ), started = startedDeletion, latestExecution = deletionLatestExecution, @@ -104,15 +108,16 @@ fun randomSMPolicy( deletionMaxAge: TimeValue? = null, deletionMinCount: Int = randomIntBetween(1, 5), deletionNull: Boolean = false, - snapshotConfig: MutableMap = mutableMapOf( - "repository" to "repo", - ), + snapshotConfig: MutableMap = + mutableMapOf( + "repository" to "repo", + ), dateFormat: String? = null, jobEnabledTime: Instant? = randomInstant(), jobSchedule: IntervalSchedule = IntervalSchedule(randomInstant(), 1, ChronoUnit.MINUTES), seqNo: Long = SequenceNumbers.UNASSIGNED_SEQ_NO, primaryTerm: Long = SequenceNumbers.UNASSIGNED_PRIMARY_TERM, - notificationConfig: NotificationConfig? = null + notificationConfig: NotificationConfig? = null, ): SMPolicy { if (dateFormat != null) { snapshotConfig["date_format"] = dateFormat @@ -122,11 +127,13 @@ fun randomSMPolicy( schemaVersion = schemaVersion, jobEnabled = jobEnabled, jobLastUpdateTime = jobLastUpdateTime, - creation = SMPolicy.Creation( + creation = + SMPolicy.Creation( schedule = creationSchedule, timeLimit = creationTimeLimit, ), - deletion = randomPolicyDeletion( + deletion = + randomPolicyDeletion( deletionSchedule, deletionTimeLimit, deletionMaxCount, @@ -155,11 +162,12 @@ fun randomPolicyDeletion( return SMPolicy.Deletion( schedule = deletionSchedule, timeLimit = deletionTimeLimit, - condition = SMPolicy.DeleteCondition( + condition = + SMPolicy.DeleteCondition( maxCount = deletionMaxCount, maxAge = deletionMaxAge, minCount = deletionMinCount, - ) + ), ) } @@ -210,19 +218,20 @@ fun mockSnapshotInfo( policyName: String = "daily-snapshot", remoteStoreIndexShallowCopy: Boolean = randomBoolean(), ): SnapshotInfo { - val result = SnapshotInfo( - SnapshotId(name, UUIDs.randomBase64UUID()), - listOf("index1"), - listOf("ds-1"), - startTime, - reason, - endTime, - 5, - emptyList(), - false, - mapOf("sm_policy" to policyName), - remoteStoreIndexShallowCopy - ) + val result = + SnapshotInfo( + SnapshotId(name, UUIDs.randomBase64UUID()), + listOf("index1"), + listOf("ds-1"), + startTime, + reason, + endTime, + 5, + emptyList(), + false, + mapOf("sm_policy" to policyName), + remoteStoreIndexShallowCopy, + ) return result } @@ -235,21 +244,22 @@ fun mockInProgressSnapshotInfo( name: String = randomAlphaOfLength(10), remoteStoreIndexShallowCopy: Boolean = randomBoolean(), ): SnapshotInfo { - val entry = SnapshotsInProgress.Entry( - Snapshot("repo", SnapshotId(name, UUIDs.randomBase64UUID())), - false, - false, - SnapshotsInProgress.State.SUCCESS, - emptyList(), - emptyList(), - randomNonNegativeLong(), - randomNonNegativeLong(), - mapOf(), - "", - mapOf("sm_policy" to "daily-snapshot"), - Version.CURRENT, - remoteStoreIndexShallowCopy - ) + val entry = + SnapshotsInProgress.Entry( + Snapshot("repo", SnapshotId(name, UUIDs.randomBase64UUID())), + false, + false, + SnapshotsInProgress.State.SUCCESS, + emptyList(), + emptyList(), + randomNonNegativeLong(), + randomNonNegativeLong(), + mapOf(), + "", + mapOf("sm_policy" to "daily-snapshot"), + Version.CURRENT, + remoteStoreIndexShallowCopy, + ) return SnapshotInfo(entry) } @@ -264,8 +274,8 @@ fun mockSnapshotInfoList(num: Int, namePrefix: String = randomAlphaOfLength(10)) for (i in 1..num) { result.add( mockSnapshotInfo( - name = namePrefix + i - ) + name = namePrefix + i, + ), ) } return result.toList() diff --git a/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/action/ActionTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/action/ActionTests.kt index 24a5949aa..fd658427f 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/action/ActionTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/action/ActionTests.kt @@ -9,10 +9,10 @@ import org.opensearch.indexmanagement.snapshotmanagement.api.transport.SMActions import org.opensearch.indexmanagement.snapshotmanagement.api.transport.SMActions.DELETE_SM_POLICY_ACTION_TYPE import org.opensearch.indexmanagement.snapshotmanagement.api.transport.SMActions.EXPLAIN_SM_POLICY_ACTION_NAME import org.opensearch.indexmanagement.snapshotmanagement.api.transport.SMActions.EXPLAIN_SM_POLICY_ACTION_TYPE -import org.opensearch.indexmanagement.snapshotmanagement.api.transport.SMActions.GET_SM_POLICY_ACTION_NAME -import org.opensearch.indexmanagement.snapshotmanagement.api.transport.SMActions.GET_SM_POLICY_ACTION_TYPE import org.opensearch.indexmanagement.snapshotmanagement.api.transport.SMActions.GET_SM_POLICIES_ACTION_NAME import org.opensearch.indexmanagement.snapshotmanagement.api.transport.SMActions.GET_SM_POLICIES_ACTION_TYPE +import org.opensearch.indexmanagement.snapshotmanagement.api.transport.SMActions.GET_SM_POLICY_ACTION_NAME +import org.opensearch.indexmanagement.snapshotmanagement.api.transport.SMActions.GET_SM_POLICY_ACTION_TYPE import org.opensearch.indexmanagement.snapshotmanagement.api.transport.SMActions.INDEX_SM_POLICY_ACTION_NAME import org.opensearch.indexmanagement.snapshotmanagement.api.transport.SMActions.INDEX_SM_POLICY_ACTION_TYPE import org.opensearch.indexmanagement.snapshotmanagement.api.transport.SMActions.START_SM_POLICY_ACTION_NAME @@ -22,7 +22,6 @@ import org.opensearch.indexmanagement.snapshotmanagement.api.transport.SMActions import org.opensearch.test.OpenSearchTestCase class ActionTests : OpenSearchTestCase() { - fun `test delete action name`() { assertNotNull(DELETE_SM_POLICY_ACTION_TYPE.name()) assertEquals(DELETE_SM_POLICY_ACTION_TYPE.name(), DELETE_SM_POLICY_ACTION_NAME) diff --git a/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/action/RequestTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/action/RequestTests.kt index e9cf78c09..440aca087 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/action/RequestTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/action/RequestTests.kt @@ -23,7 +23,6 @@ import org.opensearch.indexmanagement.snapshotmanagement.randomSMPolicy import org.opensearch.test.OpenSearchTestCase class RequestTests : OpenSearchTestCase() { - fun `test delete sm policy request`() { val id = "some_id" val req = DeleteSMPolicyRequest(id).index(INDEX_MANAGEMENT_INDEX) diff --git a/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/action/ResponseTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/action/ResponseTests.kt index 18987f768..409f3b29a 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/action/ResponseTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/action/ResponseTests.kt @@ -7,6 +7,7 @@ package org.opensearch.indexmanagement.snapshotmanagement.action import org.opensearch.common.io.stream.BytesStreamOutput import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.rest.RestStatus import org.opensearch.indexmanagement.indexstatemanagement.util.XCONTENT_WITHOUT_TYPE_AND_USER import org.opensearch.indexmanagement.snapshotmanagement.api.transport.explain.ExplainSMPolicyResponse import org.opensearch.indexmanagement.snapshotmanagement.api.transport.get.GetSMPoliciesResponse @@ -17,11 +18,9 @@ import org.opensearch.indexmanagement.snapshotmanagement.randomSMMetadata import org.opensearch.indexmanagement.snapshotmanagement.randomSMPolicy import org.opensearch.indexmanagement.snapshotmanagement.smDocIdToPolicyName import org.opensearch.indexmanagement.snapshotmanagement.toMap -import org.opensearch.core.rest.RestStatus import org.opensearch.test.OpenSearchTestCase class ResponseTests : OpenSearchTestCase() { - fun `test index sm policy response`() { val smPolicy = randomSMPolicy() val res = IndexSMPolicyResponse("someid", 1L, 2L, 3L, smPolicy, RestStatus.OK) diff --git a/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/SMStateMachineTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/SMStateMachineTests.kt index d7af63575..843b441b8 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/SMStateMachineTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/SMStateMachineTests.kt @@ -25,186 +25,209 @@ import org.opensearch.indexmanagement.snapshotmanagement.randomSMPolicy import java.time.Instant.now open class SMStateMachineTests : MocksTestCase() { - - fun `test sm result Next save the current state`() = runBlocking { - val currentState = SMState.CREATION_CONDITION_MET - val nextStates = creationTransitions[currentState] - mockGetSnapshotsCall(response = mockGetSnapshotResponse(0)) - mockCreateSnapshotCall(response = mockCreateSnapshotResponse()) - - val metadata = randomSMMetadata( - creationCurrentState = currentState - ) - val job = randomSMPolicy() - val stateMachineSpy = spy(SMStateMachine(client, job, metadata, settings, threadPool, indicesManager)) - - stateMachineSpy.currentState(currentState).next(creationTransitions) - argumentCaptor().apply { - verify(stateMachineSpy).updateMetadata(capture()) - assertEquals(nextStates!!.first(), firstValue.creation.currentState) + fun `test sm result Next save the current state`() = + runBlocking { + val currentState = SMState.CREATION_CONDITION_MET + val nextStates = creationTransitions[currentState] + mockGetSnapshotsCall(response = mockGetSnapshotResponse(0)) + mockCreateSnapshotCall(response = mockCreateSnapshotResponse()) + + val metadata = + randomSMMetadata( + creationCurrentState = currentState, + ) + val job = randomSMPolicy() + val stateMachineSpy = spy(SMStateMachine(client, job, metadata, settings, threadPool, indicesManager)) + + stateMachineSpy.currentState(currentState).next(creationTransitions) + argumentCaptor().apply { + verify(stateMachineSpy).updateMetadata(capture()) + assertEquals(nextStates!!.first(), firstValue.creation.currentState) + } } - } - - fun `test sm result Stay save the previous state`() = runBlocking { - val currentState = SMState.DELETION_START - // both creation and deletion conditions are not met - val metadata = randomSMMetadata( - deletionCurrentState = currentState, - nextCreationTime = now().plusSeconds(60), - nextDeletionTime = now().plusSeconds(60), - ) - val job = randomSMPolicy() - val stateMachineSpy = spy(SMStateMachine(client, job, metadata, settings, threadPool, indicesManager)) - - stateMachineSpy.currentState(currentState).next(deletionTransitions) - argumentCaptor().apply { - verify(stateMachineSpy, times(1)).updateMetadata(capture()) - assertEquals(currentState, firstValue.deletion!!.currentState) + + fun `test sm result Stay save the previous state`() = + runBlocking { + val currentState = SMState.DELETION_START + // both creation and deletion conditions are not met + val metadata = + randomSMMetadata( + deletionCurrentState = currentState, + nextCreationTime = now().plusSeconds(60), + nextDeletionTime = now().plusSeconds(60), + ) + val job = randomSMPolicy() + val stateMachineSpy = spy(SMStateMachine(client, job, metadata, settings, threadPool, indicesManager)) + + stateMachineSpy.currentState(currentState).next(deletionTransitions) + argumentCaptor().apply { + verify(stateMachineSpy, times(1)).updateMetadata(capture()) + assertEquals(currentState, firstValue.deletion!!.currentState) + } } - } - - fun `test sm result Fail starts retry for creation workflow`() = runBlocking { - val currentState = SMState.CREATION_CONDITION_MET - val ex = Exception() - mockGetSnapshotsCall(response = mockGetSnapshotResponse(0)) - mockCreateSnapshotCall(exception = ex) - - val metadata = randomSMMetadata( - creationCurrentState = currentState, - creationLatestExecution = randomLatestExecution( - status = SMMetadata.LatestExecution.Status.RETRYING, - ) - ) - val job = randomSMPolicy() - val stateMachineSpy = spy(SMStateMachine(client, job, metadata, settings, threadPool, indicesManager)) - stateMachineSpy.currentState(currentState).next(creationTransitions) - argumentCaptor().apply { - verify(stateMachineSpy).updateMetadata(capture()) - assertEquals(currentState, firstValue.creation.currentState) - assertNull(firstValue.creation.started) - assertEquals(3, firstValue.creation.retry!!.count) + + fun `test sm result Fail starts retry for creation workflow`() = + runBlocking { + val currentState = SMState.CREATION_CONDITION_MET + val ex = Exception() + mockGetSnapshotsCall(response = mockGetSnapshotResponse(0)) + mockCreateSnapshotCall(exception = ex) + + val metadata = + randomSMMetadata( + creationCurrentState = currentState, + creationLatestExecution = + randomLatestExecution( + status = SMMetadata.LatestExecution.Status.RETRYING, + ), + ) + val job = randomSMPolicy() + val stateMachineSpy = spy(SMStateMachine(client, job, metadata, settings, threadPool, indicesManager)) + stateMachineSpy.currentState(currentState).next(creationTransitions) + argumentCaptor().apply { + verify(stateMachineSpy).updateMetadata(capture()) + assertEquals(currentState, firstValue.creation.currentState) + assertNull(firstValue.creation.started) + assertEquals(3, firstValue.creation.retry!!.count) + } } - } - - fun `test sm result Fail starts retry for deletion workflow`() = runBlocking { - val currentState = SMState.DELETION_CONDITION_MET - val ex = Exception() - mockGetSnapshotsCall(response = mockGetSnapshotResponse(11)) - mockDeleteSnapshotCall(exception = ex) - - val metadata = randomSMMetadata( - deletionCurrentState = currentState, - deletionLatestExecution = randomLatestExecution( - status = SMMetadata.LatestExecution.Status.RETRYING, - ), - ) - val job = randomSMPolicy( - policyName = "daily-snapshot", - deletionMaxCount = 10, - ) - val stateMachineSpy = spy(SMStateMachine(client, job, metadata, settings, threadPool, indicesManager)) - stateMachineSpy.currentState(currentState).next(deletionTransitions) - argumentCaptor().apply { - verify(stateMachineSpy).updateMetadata(capture()) - assertEquals(currentState, firstValue.deletion!!.currentState) - assertNull(firstValue.deletion!!.started) - assertEquals(3, firstValue.deletion!!.retry!!.count) + + fun `test sm result Fail starts retry for deletion workflow`() = + runBlocking { + val currentState = SMState.DELETION_CONDITION_MET + val ex = Exception() + mockGetSnapshotsCall(response = mockGetSnapshotResponse(11)) + mockDeleteSnapshotCall(exception = ex) + + val metadata = + randomSMMetadata( + deletionCurrentState = currentState, + deletionLatestExecution = + randomLatestExecution( + status = SMMetadata.LatestExecution.Status.RETRYING, + ), + ) + val job = + randomSMPolicy( + policyName = "daily-snapshot", + deletionMaxCount = 10, + ) + val stateMachineSpy = spy(SMStateMachine(client, job, metadata, settings, threadPool, indicesManager)) + stateMachineSpy.currentState(currentState).next(deletionTransitions) + argumentCaptor().apply { + verify(stateMachineSpy).updateMetadata(capture()) + assertEquals(currentState, firstValue.deletion!!.currentState) + assertNull(firstValue.deletion!!.started) + assertEquals(3, firstValue.deletion!!.retry!!.count) + } } - } - - fun `test sm result Fail retry count remaining 2`() = runBlocking { - val currentState = SMState.DELETION_CONDITION_MET - - val ex = Exception() - mockGetSnapshotsCall(exception = ex) - val metadata = randomSMMetadata( - deletionCurrentState = currentState, - deletionRetryCount = 2, - deletionLatestExecution = randomLatestExecution( - status = SMMetadata.LatestExecution.Status.RETRYING, - ) - ) - val job = randomSMPolicy() - - val stateMachineSpy = spy(SMStateMachine(client, job, metadata, settings, threadPool, indicesManager)) - stateMachineSpy.currentState(currentState).next(deletionTransitions) - argumentCaptor().apply { - verify(stateMachineSpy).updateMetadata(capture()) - assertEquals(currentState, firstValue.deletion!!.currentState) - assertEquals(1, firstValue.deletion?.retry!!.count) + + fun `test sm result Fail retry count remaining 2`() = + runBlocking { + val currentState = SMState.DELETION_CONDITION_MET + + val ex = Exception() + mockGetSnapshotsCall(exception = ex) + val metadata = + randomSMMetadata( + deletionCurrentState = currentState, + deletionRetryCount = 2, + deletionLatestExecution = + randomLatestExecution( + status = SMMetadata.LatestExecution.Status.RETRYING, + ), + ) + val job = randomSMPolicy() + + val stateMachineSpy = spy(SMStateMachine(client, job, metadata, settings, threadPool, indicesManager)) + stateMachineSpy.currentState(currentState).next(deletionTransitions) + argumentCaptor().apply { + verify(stateMachineSpy).updateMetadata(capture()) + assertEquals(currentState, firstValue.deletion!!.currentState) + assertEquals(1, firstValue.deletion?.retry!!.count) + } } - } - - fun `test sm result Fail retry count has exhausted and reset workflow`() = runBlocking { - val currentState = SMState.DELETION_CONDITION_MET - val resetState = SMState.DELETION_START - - val ex = Exception() - mockGetSnapshotsCall(exception = ex) - val metadata = randomSMMetadata( - deletionCurrentState = currentState, - deletionRetryCount = 1, - deletionLatestExecution = randomLatestExecution( - status = SMMetadata.LatestExecution.Status.RETRYING, - ) - ) - val job = randomSMPolicy() - - val stateMachineSpy = spy(SMStateMachine(client, job, metadata, settings, threadPool, indicesManager)) - stateMachineSpy.currentState(currentState).next(deletionTransitions) - argumentCaptor().apply { - verify(stateMachineSpy).updateMetadata(capture()) - assertEquals(resetState, firstValue.deletion!!.currentState) - assertNull(firstValue.deletion!!.retry) - assertNull(firstValue.deletion!!.started) + + fun `test sm result Fail retry count has exhausted and reset workflow`() = + runBlocking { + val currentState = SMState.DELETION_CONDITION_MET + val resetState = SMState.DELETION_START + + val ex = Exception() + mockGetSnapshotsCall(exception = ex) + val metadata = + randomSMMetadata( + deletionCurrentState = currentState, + deletionRetryCount = 1, + deletionLatestExecution = + randomLatestExecution( + status = SMMetadata.LatestExecution.Status.RETRYING, + ), + ) + val job = randomSMPolicy() + + val stateMachineSpy = spy(SMStateMachine(client, job, metadata, settings, threadPool, indicesManager)) + stateMachineSpy.currentState(currentState).next(deletionTransitions) + argumentCaptor().apply { + verify(stateMachineSpy).updateMetadata(capture()) + assertEquals(resetState, firstValue.deletion!!.currentState) + assertNull(firstValue.deletion!!.retry) + assertNull(firstValue.deletion!!.started) + } } - } - - fun `test sm result Fail time limit exceed reset workflow`() = runBlocking { - val currentState = SMState.DELETING - val resetState = SMState.DELETION_START - - val snapshotName = "test_state_machine_deletion_time_exceed" - val snapshotInfo = mockSnapshotInfo(name = snapshotName) - mockGetSnapshotsCall(response = mockGetSnapshotResponse(snapshotInfo)) - val metadata = randomSMMetadata( - deletionCurrentState = currentState, - startedDeletion = listOf(snapshotName), - deletionLatestExecution = randomLatestExecution( - startTime = now().minusSeconds(50), - ) - ) - val job = randomSMPolicy( - policyName = "daily-snapshot", - deletionTimeLimit = TimeValue.timeValueSeconds(5) - ) - - val stateMachineSpy = spy(SMStateMachine(client, job, metadata, settings, threadPool, indicesManager)) - stateMachineSpy.currentState(currentState).next(deletionTransitions) - argumentCaptor().apply { - // first execute DELETE_CONDITION_MET state and return Stay - // second execute FINISHED state and return Fail because of deletion time_limit_exceed - verify(stateMachineSpy, times(1)).updateMetadata(capture()) - assertEquals(resetState, firstValue.deletion!!.currentState) + + fun `test sm result Fail time limit exceed reset workflow`() = + runBlocking { + val currentState = SMState.DELETING + val resetState = SMState.DELETION_START + + val snapshotName = "test_state_machine_deletion_time_exceed" + val snapshotInfo = mockSnapshotInfo(name = snapshotName) + mockGetSnapshotsCall(response = mockGetSnapshotResponse(snapshotInfo)) + val metadata = + randomSMMetadata( + deletionCurrentState = currentState, + startedDeletion = listOf(snapshotName), + deletionLatestExecution = + randomLatestExecution( + startTime = now().minusSeconds(50), + ), + ) + val job = + randomSMPolicy( + policyName = "daily-snapshot", + deletionTimeLimit = TimeValue.timeValueSeconds(5), + ) + + val stateMachineSpy = spy(SMStateMachine(client, job, metadata, settings, threadPool, indicesManager)) + stateMachineSpy.currentState(currentState).next(deletionTransitions) + argumentCaptor().apply { + // first execute DELETE_CONDITION_MET state and return Stay + // second execute FINISHED state and return Fail because of deletion time_limit_exceed + verify(stateMachineSpy, times(1)).updateMetadata(capture()) + assertEquals(resetState, firstValue.deletion!!.currentState) + } } - } - - fun `test handlePolicyChange`() = runBlocking { - val metadata = randomSMMetadata( - policySeqNo = 0, - policyPrimaryTerm = 0, - ) - val job = randomSMPolicy( - seqNo = 1, - primaryTerm = 1, - ) - - val stateMachineSpy = spy(SMStateMachine(client, job, metadata, settings, threadPool, indicesManager)) - stateMachineSpy.handlePolicyChange() - argumentCaptor().apply { - verify(stateMachineSpy).updateMetadata(capture()) - assertEquals(1, firstValue.policySeqNo) - assertEquals(1, firstValue.policyPrimaryTerm) + + fun `test handlePolicyChange`() = + runBlocking { + val metadata = + randomSMMetadata( + policySeqNo = 0, + policyPrimaryTerm = 0, + ) + val job = + randomSMPolicy( + seqNo = 1, + primaryTerm = 1, + ) + + val stateMachineSpy = spy(SMStateMachine(client, job, metadata, settings, threadPool, indicesManager)) + stateMachineSpy.handlePolicyChange() + argumentCaptor().apply { + verify(stateMachineSpy).updateMetadata(capture()) + assertEquals(1, firstValue.policySeqNo) + assertEquals(1, firstValue.policyPrimaryTerm) + } } - } } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/creation/CreatingStateTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/creation/CreatingStateTests.kt index 7281ad22e..59aaab726 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/creation/CreatingStateTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/creation/CreatingStateTests.kt @@ -10,106 +10,115 @@ import org.opensearch.indexmanagement.MocksTestCase import org.opensearch.indexmanagement.snapshotmanagement.engine.SMStateMachine import org.opensearch.indexmanagement.snapshotmanagement.engine.states.SMResult import org.opensearch.indexmanagement.snapshotmanagement.engine.states.SMState -import org.opensearch.indexmanagement.snapshotmanagement.randomSMMetadata -import org.opensearch.indexmanagement.snapshotmanagement.randomSMPolicy import org.opensearch.indexmanagement.snapshotmanagement.mockCreateSnapshotResponse import org.opensearch.indexmanagement.snapshotmanagement.mockGetSnapshotResponse import org.opensearch.indexmanagement.snapshotmanagement.mockSnapshotInfo import org.opensearch.indexmanagement.snapshotmanagement.model.SMMetadata +import org.opensearch.indexmanagement.snapshotmanagement.randomSMMetadata +import org.opensearch.indexmanagement.snapshotmanagement.randomSMPolicy import java.time.Instant.now import java.time.temporal.ChronoUnit class CreatingStateTests : MocksTestCase() { - - fun `test create snapshot succeed`() = runBlocking { - mockGetSnapshotsCall(response = mockGetSnapshotResponse(0)) - mockCreateSnapshotCall(response = mockCreateSnapshotResponse()) - - val metadata = randomSMMetadata( - creationCurrentState = SMState.CREATION_CONDITION_MET, - ) - val job = randomSMPolicy() - val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) - - val result = SMState.CREATING.instance.execute(context) - assertTrue("Execution result should be Next.", result is SMResult.Next) - result as SMResult.Next - assertNotNull("Creation started field is initialized.", result.metadataToSave.build().creation.started) - assertEquals("Latest execution status is in_progress", SMMetadata.LatestExecution.Status.IN_PROGRESS, result.metadataToSave.build().creation.latestExecution!!.status) - } - - fun `test create snapshot exception`() = runBlocking { - val ex = Exception() - mockGetSnapshotsCall(response = mockGetSnapshotResponse(0)) - mockCreateSnapshotCall(exception = ex) - - val metadata = randomSMMetadata( - creationCurrentState = SMState.CREATION_CONDITION_MET, - ) - val job = randomSMPolicy() - val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) - - val result = SMState.CREATING.instance.execute(context) - assertTrue("Execution result should be Failure.", result is SMResult.Fail) - result as SMResult.Fail - assertNull("Creation started field should not be initialized.", result.metadataToSave.build().creation.started) - assertEquals("Latest execution status is retrying", SMMetadata.LatestExecution.Status.RETRYING, result.metadataToSave.build().creation.latestExecution!!.status) - assertNotNull("Latest execution info should not be null", result.metadataToSave.build().creation.latestExecution!!.info) - } - - fun `test snapshot already created in previous schedule`() = runBlocking { - val mockSnapshotInfo = mockSnapshotInfo(startTime = now().minusMillis(30).toEpochMilli()) - val mockGetSnapshotResponse = mockGetSnapshotResponse(mockSnapshotInfo) - val snapshotName = mockGetSnapshotResponse.snapshots.first().snapshotId().name - mockGetSnapshotsCall(response = mockGetSnapshotResponse) - - val metadata = randomSMMetadata( - creationCurrentState = SMState.CREATION_CONDITION_MET, - ) - val job = randomSMPolicy(policyName = "daily-snapshot") - val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) - - val result = SMState.CREATING.instance.execute(context) - assertTrue("Execution result should be Next.", result is SMResult.Next) - result as SMResult.Next - assertEquals("Started create snapshot name is $snapshotName.", snapshotName, result.metadataToSave.build().creation.started!!.first()) - assertEquals("Latest execution status is in_progress", SMMetadata.LatestExecution.Status.IN_PROGRESS, result.metadataToSave.build().creation.latestExecution!!.status) - } - - fun `test snapshot already created but not in previous schedule`() = runBlocking { - val mockSnapshotInfo = mockSnapshotInfo(startTime = now().minus(370, ChronoUnit.DAYS).toEpochMilli()) - val mockGetSnapshotResponse = mockGetSnapshotResponse(mockSnapshotInfo) - val snapshotName = mockGetSnapshotResponse.snapshots.first().snapshotId().name - mockGetSnapshotsCall(response = mockGetSnapshotResponse) - mockCreateSnapshotCall(response = mockCreateSnapshotResponse()) - - val metadata = randomSMMetadata( - creationCurrentState = SMState.CREATION_CONDITION_MET, - ) - val job = randomSMPolicy(policyName = "daily-snapshot") - val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) - - val result = SMState.CREATING.instance.execute(context) - assertTrue("Execution result should be Next.", result is SMResult.Next) - result as SMResult.Next - assertNotEquals("Started create snapshot name should not be $snapshotName.", snapshotName, result.metadataToSave.build().creation.started!!.first()) - } - - fun `test get snapshots exception while checking if snapshot already created`() = runBlocking { - val ex = Exception() - mockGetSnapshotsCall(exception = ex) - - val metadata = randomSMMetadata( - creationCurrentState = SMState.CREATION_CONDITION_MET, - ) - val job = randomSMPolicy() - val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) - - val result = SMState.CREATING.instance.execute(context) - assertTrue("Execution result should be Failure.", result is SMResult.Fail) - result as SMResult.Fail - assertNull("Creation started field should not be initialized.", result.metadataToSave.build().creation.started) - assertEquals("Latest execution status is retrying", SMMetadata.LatestExecution.Status.RETRYING, result.metadataToSave.build().creation.latestExecution!!.status) - assertNotNull("Latest execution info should not be null", result.metadataToSave.build().creation.latestExecution!!.info) - } + fun `test create snapshot succeed`() = + runBlocking { + mockGetSnapshotsCall(response = mockGetSnapshotResponse(0)) + mockCreateSnapshotCall(response = mockCreateSnapshotResponse()) + + val metadata = + randomSMMetadata( + creationCurrentState = SMState.CREATION_CONDITION_MET, + ) + val job = randomSMPolicy() + val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) + + val result = SMState.CREATING.instance.execute(context) + assertTrue("Execution result should be Next.", result is SMResult.Next) + result as SMResult.Next + assertNotNull("Creation started field is initialized.", result.metadataToSave.build().creation.started) + assertEquals("Latest execution status is in_progress", SMMetadata.LatestExecution.Status.IN_PROGRESS, result.metadataToSave.build().creation.latestExecution!!.status) + } + + fun `test create snapshot exception`() = + runBlocking { + val ex = Exception() + mockGetSnapshotsCall(response = mockGetSnapshotResponse(0)) + mockCreateSnapshotCall(exception = ex) + + val metadata = + randomSMMetadata( + creationCurrentState = SMState.CREATION_CONDITION_MET, + ) + val job = randomSMPolicy() + val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) + + val result = SMState.CREATING.instance.execute(context) + assertTrue("Execution result should be Failure.", result is SMResult.Fail) + result as SMResult.Fail + assertNull("Creation started field should not be initialized.", result.metadataToSave.build().creation.started) + assertEquals("Latest execution status is retrying", SMMetadata.LatestExecution.Status.RETRYING, result.metadataToSave.build().creation.latestExecution!!.status) + assertNotNull("Latest execution info should not be null", result.metadataToSave.build().creation.latestExecution!!.info) + } + + fun `test snapshot already created in previous schedule`() = + runBlocking { + val mockSnapshotInfo = mockSnapshotInfo(startTime = now().minusMillis(30).toEpochMilli()) + val mockGetSnapshotResponse = mockGetSnapshotResponse(mockSnapshotInfo) + val snapshotName = mockGetSnapshotResponse.snapshots.first().snapshotId().name + mockGetSnapshotsCall(response = mockGetSnapshotResponse) + + val metadata = + randomSMMetadata( + creationCurrentState = SMState.CREATION_CONDITION_MET, + ) + val job = randomSMPolicy(policyName = "daily-snapshot") + val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) + + val result = SMState.CREATING.instance.execute(context) + assertTrue("Execution result should be Next.", result is SMResult.Next) + result as SMResult.Next + assertEquals("Started create snapshot name is $snapshotName.", snapshotName, result.metadataToSave.build().creation.started!!.first()) + assertEquals("Latest execution status is in_progress", SMMetadata.LatestExecution.Status.IN_PROGRESS, result.metadataToSave.build().creation.latestExecution!!.status) + } + + fun `test snapshot already created but not in previous schedule`() = + runBlocking { + val mockSnapshotInfo = mockSnapshotInfo(startTime = now().minus(370, ChronoUnit.DAYS).toEpochMilli()) + val mockGetSnapshotResponse = mockGetSnapshotResponse(mockSnapshotInfo) + val snapshotName = mockGetSnapshotResponse.snapshots.first().snapshotId().name + mockGetSnapshotsCall(response = mockGetSnapshotResponse) + mockCreateSnapshotCall(response = mockCreateSnapshotResponse()) + + val metadata = + randomSMMetadata( + creationCurrentState = SMState.CREATION_CONDITION_MET, + ) + val job = randomSMPolicy(policyName = "daily-snapshot") + val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) + + val result = SMState.CREATING.instance.execute(context) + assertTrue("Execution result should be Next.", result is SMResult.Next) + result as SMResult.Next + assertNotEquals("Started create snapshot name should not be $snapshotName.", snapshotName, result.metadataToSave.build().creation.started!!.first()) + } + + fun `test get snapshots exception while checking if snapshot already created`() = + runBlocking { + val ex = Exception() + mockGetSnapshotsCall(exception = ex) + + val metadata = + randomSMMetadata( + creationCurrentState = SMState.CREATION_CONDITION_MET, + ) + val job = randomSMPolicy() + val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) + + val result = SMState.CREATING.instance.execute(context) + assertTrue("Execution result should be Failure.", result is SMResult.Fail) + result as SMResult.Fail + assertNull("Creation started field should not be initialized.", result.metadataToSave.build().creation.started) + assertEquals("Latest execution status is retrying", SMMetadata.LatestExecution.Status.RETRYING, result.metadataToSave.build().creation.latestExecution!!.status) + assertNotNull("Latest execution info should not be null", result.metadataToSave.build().creation.latestExecution!!.info) + } } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/creation/CreationConditionMetStateTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/creation/CreationConditionMetStateTests.kt index 3b5d0a3d7..64ca24639 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/creation/CreationConditionMetStateTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/creation/CreationConditionMetStateTests.kt @@ -15,32 +15,35 @@ import org.opensearch.indexmanagement.snapshotmanagement.randomSMPolicy import java.time.Instant.now class CreationConditionMetStateTests : MocksTestCase() { + fun `test next creation time met`() = + runBlocking { + val metadata = + randomSMMetadata( + creationCurrentState = SMState.CREATION_START, + nextCreationTime = now().minusSeconds(60), + ) + val job = randomSMPolicy() + val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) - fun `test next creation time met`() = runBlocking { - val metadata = randomSMMetadata( - creationCurrentState = SMState.CREATION_START, - nextCreationTime = now().minusSeconds(60), - ) - val job = randomSMPolicy() - val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) + val result = SMState.CREATION_CONDITION_MET.instance.execute(context) + assertTrue("Execution result should be Next.", result is SMResult.Next) + result as SMResult.Next + assertNotEquals("Next execution time should be updated.", metadata.creation.trigger.time, result.metadataToSave.build().creation.trigger.time) + } - val result = SMState.CREATION_CONDITION_MET.instance.execute(context) - assertTrue("Execution result should be Next.", result is SMResult.Next) - result as SMResult.Next - assertNotEquals("Next execution time should be updated.", metadata.creation.trigger.time, result.metadataToSave.build().creation.trigger.time) - } + fun `test next creation time has not met`() = + runBlocking { + val metadata = + randomSMMetadata( + creationCurrentState = SMState.CREATION_START, + nextCreationTime = now().plusSeconds(60), + ) + val job = randomSMPolicy() + val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) - fun `test next creation time has not met`() = runBlocking { - val metadata = randomSMMetadata( - creationCurrentState = SMState.CREATION_START, - nextCreationTime = now().plusSeconds(60), - ) - val job = randomSMPolicy() - val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) - - val result = SMState.CREATION_CONDITION_MET.instance.execute(context) - assertTrue("Execution result should be Stay.", result is SMResult.Stay) - result as SMResult.Stay - assertEquals("Next execution time should not be updated.", metadata, result.metadataToSave.build()) - } + val result = SMState.CREATION_CONDITION_MET.instance.execute(context) + assertTrue("Execution result should be Stay.", result is SMResult.Stay) + result as SMResult.Stay + assertEquals("Next execution time should not be updated.", metadata, result.metadataToSave.build()) + } } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/creation/CreationFinishedStateTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/creation/CreationFinishedStateTests.kt index 27ef430e7..0db5accff 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/creation/CreationFinishedStateTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/creation/CreationFinishedStateTests.kt @@ -21,149 +21,168 @@ import org.opensearch.indexmanagement.snapshotmanagement.randomSMPolicy import java.time.Instant class CreationFinishedStateTests : MocksTestCase() { - fun `test creation end successful`() = runBlocking { - val snapshotName = "test_creation_succeed" - val snapshotInfo = mockSnapshotInfo(name = snapshotName) - mockGetSnapshotsCall(response = mockGetSnapshotResponse(snapshotInfo)) - - val metadata = randomSMMetadata( - creationCurrentState = SMState.CREATING, - startedCreation = snapshotName, - creationLatestExecution = randomLatestExecution( - startTime = Instant.now(), - ), - ) - val job = randomSMPolicy(policyName = "daily-snapshot") - val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) - - val result = SMState.CREATION_FINISHED.instance.execute(context) - assertTrue("Execution results should be Next.", result is SMResult.Next) - result as SMResult.Next - val metadataToSave = result.metadataToSave.build() - assertNull("Started creation should be reset to null.", metadataToSave.creation.started) - assertEquals("Latest execution status is success", SMMetadata.LatestExecution.Status.SUCCESS, metadataToSave.creation.latestExecution!!.status) - assertNotNull("Latest execution status end_time should not be null", metadataToSave.creation.latestExecution!!.endTime) - assertNotNull("Latest execution status message should not be null", metadataToSave.creation.latestExecution!!.info!!.message) - } - - fun `test creation still in progress`() = runBlocking { - val snapshotName = "test_creation_in_progress" - val snapshotInfo = mockInProgressSnapshotInfo(name = snapshotName) - mockGetSnapshotsCall(response = mockGetSnapshotResponse(snapshotInfo)) - - val metadata = randomSMMetadata( - creationCurrentState = SMState.CREATING, - startedCreation = snapshotName, - creationLatestExecution = randomLatestExecution( - startTime = Instant.now(), - ), - ) - val job = randomSMPolicy(policyName = "daily-snapshot") - val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) - - val result = SMState.CREATION_FINISHED.instance.execute(context) - assertTrue("Execution results should be Stay.", result is SMResult.Stay) - result as SMResult.Stay - val metadataToSave = result.metadataToSave.build() - assertEquals("Started creation should not be reset.", snapshotName, metadataToSave.creation.started!!.first()) - } - - fun `test creation end not successful`() = runBlocking { - val snapshotName = "test_creation_end_with_failed_state" - val snapshotInfo = mockSnapshotInfo(name = snapshotName, reason = "failed state") - mockGetSnapshotsCall(response = mockGetSnapshotResponse(snapshotInfo)) - - val metadata = randomSMMetadata( - creationCurrentState = SMState.CREATING, - startedCreation = snapshotName, - creationLatestExecution = randomLatestExecution( - startTime = Instant.now(), - ), - ) - val job = randomSMPolicy(policyName = "daily-snapshot") - val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) - - val result = SMState.CREATION_FINISHED.instance.execute(context) - assertTrue("Execution results should be Fail.", result is SMResult.Fail) - result as SMResult.Fail - val metadataToSave = result.metadataToSave.build() - assertNull("Started creation should be reset to null.", metadataToSave.creation.started) - assertEquals("Latest execution status is failed", SMMetadata.LatestExecution.Status.FAILED, metadataToSave.creation.latestExecution!!.status) - assertNotNull("Latest execution status end_time should not be null", metadataToSave.creation.latestExecution!!.endTime) - assertNotNull("Latest execution status cause should not be null", metadataToSave.creation.latestExecution!!.info!!.cause) - } - - fun `test get snapshots exception in creation`() = runBlocking { - val snapshotName = "test_creation_get_snapshots_exception" - mockGetSnapshotsCall(exception = Exception()) - - val metadata = randomSMMetadata( - creationCurrentState = SMState.CREATING, - startedCreation = snapshotName, - creationLatestExecution = randomLatestExecution( - startTime = Instant.now(), - ), - ) - val job = randomSMPolicy(policyName = "daily-snapshot") - val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) - - val result = SMState.CREATION_FINISHED.instance.execute(context) - assertTrue("Execution results should be Failure.", result is SMResult.Fail) - result as SMResult.Fail - val metadataToSave = result.metadataToSave.build() - assertEquals("Latest execution status is retrying", SMMetadata.LatestExecution.Status.RETRYING, metadataToSave.creation.latestExecution!!.status) - assertNull("Latest execution status end_time should be null", metadataToSave.creation.latestExecution!!.endTime) - assertNotNull("Latest execution status info should not be null", metadataToSave.creation.latestExecution!!.info) - } - - fun `test get snapshots empty in creation`() = runBlocking { - val snapshotName = "test_creation_get_snapshots_empty" - mockGetSnapshotsCall(response = mockGetSnapshotResponse(0)) - - val metadata = randomSMMetadata( - creationCurrentState = SMState.CREATING, - startedCreation = snapshotName, - creationLatestExecution = randomLatestExecution( - startTime = Instant.now(), - ), - ) - val job = randomSMPolicy(policyName = "daily-snapshot") - val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) - - val result = SMState.CREATION_FINISHED.instance.execute(context) - assertTrue("Execution results should be Next.", result is SMResult.Next) - result as SMResult.Next - val metadataToSave = result.metadataToSave.build() - assertEquals("Latest execution status is success", SMMetadata.LatestExecution.Status.SUCCESS, metadataToSave.creation.latestExecution!!.status) - assertNotNull("Latest execution status end_time should not be null", metadataToSave.creation.latestExecution!!.endTime) - assertNotNull("Latest execution status message should not be null", metadataToSave.creation.latestExecution!!.info!!.message) - } - - fun `test creation time limit exceed`() = runBlocking { - val snapshotName = "test_creation_time_exceed" - val snapshotInfo = mockInProgressSnapshotInfo(name = snapshotName) - mockGetSnapshotsCall(response = mockGetSnapshotResponse(snapshotInfo)) - - val metadata = randomSMMetadata( - creationCurrentState = SMState.CREATING, - startedCreation = snapshotName, - creationLatestExecution = randomLatestExecution( - startTime = Instant.now().minusSeconds(10), - ), - ) - val job = randomSMPolicy( - policyName = "daily-snapshot", - creationTimeLimit = TimeValue.timeValueSeconds(5) - ) - val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) - - val result = SMState.CREATION_FINISHED.instance.execute(context) - assertTrue("Execution results should be Failure.", result is SMResult.Fail) - result as SMResult.Fail - val metadataToSave = result.metadataToSave.build() - assertTrue(result.forceReset!!) - assertEquals("Latest execution status is time limit exceed", SMMetadata.LatestExecution.Status.TIME_LIMIT_EXCEEDED, metadataToSave.creation.latestExecution!!.status) - assertNotNull("Latest execution status end_time should not be null", metadataToSave.creation.latestExecution!!.endTime) - assertNotNull("Latest execution status cause should not be null", metadataToSave.creation.latestExecution!!.info!!.cause) - } + fun `test creation end successful`() = + runBlocking { + val snapshotName = "test_creation_succeed" + val snapshotInfo = mockSnapshotInfo(name = snapshotName) + mockGetSnapshotsCall(response = mockGetSnapshotResponse(snapshotInfo)) + + val metadata = + randomSMMetadata( + creationCurrentState = SMState.CREATING, + startedCreation = snapshotName, + creationLatestExecution = + randomLatestExecution( + startTime = Instant.now(), + ), + ) + val job = randomSMPolicy(policyName = "daily-snapshot") + val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) + + val result = SMState.CREATION_FINISHED.instance.execute(context) + assertTrue("Execution results should be Next.", result is SMResult.Next) + result as SMResult.Next + val metadataToSave = result.metadataToSave.build() + assertNull("Started creation should be reset to null.", metadataToSave.creation.started) + assertEquals("Latest execution status is success", SMMetadata.LatestExecution.Status.SUCCESS, metadataToSave.creation.latestExecution!!.status) + assertNotNull("Latest execution status end_time should not be null", metadataToSave.creation.latestExecution!!.endTime) + assertNotNull("Latest execution status message should not be null", metadataToSave.creation.latestExecution!!.info!!.message) + } + + fun `test creation still in progress`() = + runBlocking { + val snapshotName = "test_creation_in_progress" + val snapshotInfo = mockInProgressSnapshotInfo(name = snapshotName) + mockGetSnapshotsCall(response = mockGetSnapshotResponse(snapshotInfo)) + + val metadata = + randomSMMetadata( + creationCurrentState = SMState.CREATING, + startedCreation = snapshotName, + creationLatestExecution = + randomLatestExecution( + startTime = Instant.now(), + ), + ) + val job = randomSMPolicy(policyName = "daily-snapshot") + val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) + + val result = SMState.CREATION_FINISHED.instance.execute(context) + assertTrue("Execution results should be Stay.", result is SMResult.Stay) + result as SMResult.Stay + val metadataToSave = result.metadataToSave.build() + assertEquals("Started creation should not be reset.", snapshotName, metadataToSave.creation.started!!.first()) + } + + fun `test creation end not successful`() = + runBlocking { + val snapshotName = "test_creation_end_with_failed_state" + val snapshotInfo = mockSnapshotInfo(name = snapshotName, reason = "failed state") + mockGetSnapshotsCall(response = mockGetSnapshotResponse(snapshotInfo)) + + val metadata = + randomSMMetadata( + creationCurrentState = SMState.CREATING, + startedCreation = snapshotName, + creationLatestExecution = + randomLatestExecution( + startTime = Instant.now(), + ), + ) + val job = randomSMPolicy(policyName = "daily-snapshot") + val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) + + val result = SMState.CREATION_FINISHED.instance.execute(context) + assertTrue("Execution results should be Fail.", result is SMResult.Fail) + result as SMResult.Fail + val metadataToSave = result.metadataToSave.build() + assertNull("Started creation should be reset to null.", metadataToSave.creation.started) + assertEquals("Latest execution status is failed", SMMetadata.LatestExecution.Status.FAILED, metadataToSave.creation.latestExecution!!.status) + assertNotNull("Latest execution status end_time should not be null", metadataToSave.creation.latestExecution!!.endTime) + assertNotNull("Latest execution status cause should not be null", metadataToSave.creation.latestExecution!!.info!!.cause) + } + + fun `test get snapshots exception in creation`() = + runBlocking { + val snapshotName = "test_creation_get_snapshots_exception" + mockGetSnapshotsCall(exception = Exception()) + + val metadata = + randomSMMetadata( + creationCurrentState = SMState.CREATING, + startedCreation = snapshotName, + creationLatestExecution = + randomLatestExecution( + startTime = Instant.now(), + ), + ) + val job = randomSMPolicy(policyName = "daily-snapshot") + val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) + + val result = SMState.CREATION_FINISHED.instance.execute(context) + assertTrue("Execution results should be Failure.", result is SMResult.Fail) + result as SMResult.Fail + val metadataToSave = result.metadataToSave.build() + assertEquals("Latest execution status is retrying", SMMetadata.LatestExecution.Status.RETRYING, metadataToSave.creation.latestExecution!!.status) + assertNull("Latest execution status end_time should be null", metadataToSave.creation.latestExecution!!.endTime) + assertNotNull("Latest execution status info should not be null", metadataToSave.creation.latestExecution!!.info) + } + + fun `test get snapshots empty in creation`() = + runBlocking { + val snapshotName = "test_creation_get_snapshots_empty" + mockGetSnapshotsCall(response = mockGetSnapshotResponse(0)) + + val metadata = + randomSMMetadata( + creationCurrentState = SMState.CREATING, + startedCreation = snapshotName, + creationLatestExecution = + randomLatestExecution( + startTime = Instant.now(), + ), + ) + val job = randomSMPolicy(policyName = "daily-snapshot") + val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) + + val result = SMState.CREATION_FINISHED.instance.execute(context) + assertTrue("Execution results should be Next.", result is SMResult.Next) + result as SMResult.Next + val metadataToSave = result.metadataToSave.build() + assertEquals("Latest execution status is success", SMMetadata.LatestExecution.Status.SUCCESS, metadataToSave.creation.latestExecution!!.status) + assertNotNull("Latest execution status end_time should not be null", metadataToSave.creation.latestExecution!!.endTime) + assertNotNull("Latest execution status message should not be null", metadataToSave.creation.latestExecution!!.info!!.message) + } + + fun `test creation time limit exceed`() = + runBlocking { + val snapshotName = "test_creation_time_exceed" + val snapshotInfo = mockInProgressSnapshotInfo(name = snapshotName) + mockGetSnapshotsCall(response = mockGetSnapshotResponse(snapshotInfo)) + + val metadata = + randomSMMetadata( + creationCurrentState = SMState.CREATING, + startedCreation = snapshotName, + creationLatestExecution = + randomLatestExecution( + startTime = Instant.now().minusSeconds(10), + ), + ) + val job = + randomSMPolicy( + policyName = "daily-snapshot", + creationTimeLimit = TimeValue.timeValueSeconds(5), + ) + val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) + + val result = SMState.CREATION_FINISHED.instance.execute(context) + assertTrue("Execution results should be Failure.", result is SMResult.Fail) + result as SMResult.Fail + val metadataToSave = result.metadataToSave.build() + assertTrue(result.forceReset!!) + assertEquals("Latest execution status is time limit exceed", SMMetadata.LatestExecution.Status.TIME_LIMIT_EXCEEDED, metadataToSave.creation.latestExecution!!.status) + assertNotNull("Latest execution status end_time should not be null", metadataToSave.creation.latestExecution!!.endTime) + assertNotNull("Latest execution status cause should not be null", metadataToSave.creation.latestExecution!!.info!!.cause) + } } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/creation/CreationStartStateTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/creation/CreationStartStateTests.kt index 1eeee7e06..3877fe717 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/creation/CreationStartStateTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/creation/CreationStartStateTests.kt @@ -14,15 +14,16 @@ import org.opensearch.indexmanagement.snapshotmanagement.randomSMMetadata import org.opensearch.indexmanagement.snapshotmanagement.randomSMPolicy class CreationStartStateTests : MocksTestCase() { + fun `test start state execution`() = + runBlocking { + val metadata = + randomSMMetadata( + creationCurrentState = SMState.CREATION_FINISHED, + ) + val job = randomSMPolicy() + val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) - fun `test start state execution`() = runBlocking { - val metadata = randomSMMetadata( - creationCurrentState = SMState.CREATION_FINISHED - ) - val job = randomSMPolicy() - val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) - - val result = SMState.CREATION_START.instance.execute(context) - assertTrue("Execution result should be Next.", result is SMResult.Next) - } + val result = SMState.CREATION_START.instance.execute(context) + assertTrue("Execution result should be Next.", result is SMResult.Next) + } } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/deletion/DeletingStateTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/deletion/DeletingStateTests.kt index f9eaa7137..3d76ed6d4 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/deletion/DeletingStateTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/deletion/DeletingStateTests.kt @@ -23,156 +23,175 @@ import org.opensearch.indexmanagement.snapshotmanagement.randomSnapshotName import java.time.Instant.now class DeletingStateTests : MocksTestCase() { - - fun `test snapshots exceed max count`() = runBlocking { - mockGetSnapshotsCall(response = mockGetSnapshotResponse(11)) - mockDeleteSnapshotCall(response = AcknowledgedResponse(true)) - - val metadata = randomSMMetadata( - deletionCurrentState = SMState.DELETION_CONDITION_MET, - ) - val job = randomSMPolicy( - policyName = "daily-snapshot", - deletionMaxCount = 10, - ) - val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) - - val result = SMState.DELETING.instance.execute(context) - assertTrue("Execution result should be Next.", result is SMResult.Next) - result as SMResult.Next - val metadataToSave = result.metadataToSave.build() - assertNotNull("Deletion started field is initialized.", metadataToSave.deletion!!.started) - assertEquals(1, metadataToSave.deletion?.started!!.size) - assertEquals("Latest execution status is in_progress", SMMetadata.LatestExecution.Status.IN_PROGRESS, metadataToSave.deletion!!.latestExecution!!.status) - } - - fun `test snapshots exceed max age`() = runBlocking { - val oldSnapshot = mockSnapshotInfo(name = "old_snapshot", startTime = now().minusSeconds(2 * 60).toEpochMilli()) - val newSnapshot = mockSnapshotInfo(name = "new_snapshot", startTime = now().toEpochMilli()) - mockGetSnapshotsCall(response = mockGetSnapshotsResponse(listOf(oldSnapshot, newSnapshot, newSnapshot))) - mockDeleteSnapshotCall(response = AcknowledgedResponse(true)) - - val metadata = randomSMMetadata( - deletionCurrentState = SMState.DELETION_CONDITION_MET, - ) - val job = randomSMPolicy( - policyName = "daily-snapshot", - deletionMaxAge = TimeValue.timeValueMinutes(1), - deletionMinCount = 2, - ) - val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) - - val result = SMState.DELETING.instance.execute(context) - assertTrue("Execution result should be Next.", result is SMResult.Next) - result as SMResult.Next - val metadataToSave = result.metadataToSave.build() - assertNotNull("Deletion started field is initialized.", metadataToSave.deletion!!.started) - assertEquals(1, metadataToSave.deletion?.started!!.size) - assertEquals("old_snapshot", metadataToSave.deletion!!.started!!.first()) - assertEquals("Latest execution status is in_progress", SMMetadata.LatestExecution.Status.IN_PROGRESS, metadataToSave.deletion!!.latestExecution!!.status) - } - - fun `test snapshots exceed max age but need to remain min count`() = runBlocking { - val oldSnapshot = mockSnapshotInfo(name = "old_snapshot", startTime = now().minusSeconds(2 * 60).toEpochMilli()) - val newSnapshot = mockSnapshotInfo(name = "new_snapshot", startTime = now().toEpochMilli()) - mockGetSnapshotsCall(response = mockGetSnapshotsResponse(listOf(oldSnapshot, newSnapshot))) - - val metadata = randomSMMetadata( - deletionCurrentState = SMState.DELETION_CONDITION_MET, - ) - val job = randomSMPolicy( - policyName = "daily-snapshot", - deletionMaxAge = TimeValue.timeValueMinutes(1), - deletionMinCount = 2, - ) - val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) - - val result = SMState.DELETING.instance.execute(context) - assertTrue("Execution result should be Next.", result is SMResult.Next) - result as SMResult.Next - val metadataToSave = result.metadataToSave.build() - assertNull("Deletion started field should not be initialized.", metadataToSave.deletion!!.started) - assertNull("Latest execution should not be initialized", metadataToSave.deletion!!.latestExecution) - } - - fun `test snapshots min count check won't get negative deletion count`() = runBlocking { - val oldSnapshot = mockSnapshotInfo(name = "old_snapshot", startTime = now().minusSeconds(2 * 60).toEpochMilli()) - mockGetSnapshotsCall(response = mockGetSnapshotsResponse(listOf(oldSnapshot))) - - val metadata = randomSMMetadata( - deletionCurrentState = SMState.DELETION_CONDITION_MET, - ) - val job = randomSMPolicy( - policyName = "daily-snapshot", - deletionMaxAge = TimeValue.timeValueMinutes(1), - deletionMinCount = 2, - ) - val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) - - val result = SMState.DELETING.instance.execute(context) - assertTrue("Execution result should be Next.", result is SMResult.Next) - result as SMResult.Next - val metadataToSave = result.metadataToSave.build() - assertNull("Deletion started field should not be initialized.", metadataToSave.deletion!!.started) - assertNull("Latest execution should not be initialized", metadataToSave.deletion!!.latestExecution) - } - - fun `test delete snapshot exception`() = runBlocking { - val ex = Exception() - mockGetSnapshotsCall(response = mockGetSnapshotResponse(11)) - mockDeleteSnapshotCall(exception = ex) - - val metadata = randomSMMetadata( - deletionCurrentState = SMState.DELETION_CONDITION_MET, - ) - val job = randomSMPolicy(policyName = "daily-snapshot") - val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) - - val result = SMState.DELETING.instance.execute(context) - assertTrue("Execution result should be Failure.", result is SMResult.Fail) - result as SMResult.Fail - val metadataToSave = result.metadataToSave.build() - assertNull("Deletion started field should not be initialized.", metadataToSave.deletion!!.started) - assertEquals("Latest execution status is retrying", SMMetadata.LatestExecution.Status.RETRYING, metadataToSave.deletion!!.latestExecution!!.status) - assertNotNull("Latest execution info should not be null", metadataToSave.deletion!!.latestExecution!!.info) - } - - fun `test get snapshots exception`() = runBlocking { - val ex = Exception() - mockGetSnapshotsCall(exception = ex) - - val metadata = randomSMMetadata( - deletionCurrentState = SMState.DELETION_CONDITION_MET, - deletionLatestExecution = randomLatestExecution( - startTime = now().minusSeconds(10), - ) - ) - val job = randomSMPolicy(policyName = "daily-snapshot") - val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) - - val result = SMState.DELETING.instance.execute(context) - assertTrue("Execution result should be Fail.", result is SMResult.Fail) - result as SMResult.Fail - val metadataToSave = result.metadataToSave.build() - assertNull("Deletion started field should not be initialized.", metadataToSave.deletion!!.started) - assertEquals("Latest execution status is retrying", SMMetadata.LatestExecution.Status.RETRYING, metadataToSave.deletion!!.latestExecution!!.status) - assertNotNull("Latest execution info should not be null", metadataToSave.deletion!!.latestExecution!!.info) - } - - fun `test policy deletion is null`() = runBlocking { - val metadata = randomSMMetadata( - deletionCurrentState = SMState.DELETION_CONDITION_MET, - startedDeletion = listOf(randomSnapshotName()), - ) - val job = randomSMPolicy( - deletionNull = true - ) - val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) - - val result = SMState.DELETING.instance.execute(context) - assertTrue("Execution result should be Fail.", result is SMResult.Fail) - result as SMResult.Fail - val metadataToSave = result.metadataToSave.build() - assertNull("Deletion metadata should be null.", metadataToSave.deletion) - } + fun `test snapshots exceed max count`() = + runBlocking { + mockGetSnapshotsCall(response = mockGetSnapshotResponse(11)) + mockDeleteSnapshotCall(response = AcknowledgedResponse(true)) + + val metadata = + randomSMMetadata( + deletionCurrentState = SMState.DELETION_CONDITION_MET, + ) + val job = + randomSMPolicy( + policyName = "daily-snapshot", + deletionMaxCount = 10, + ) + val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) + + val result = SMState.DELETING.instance.execute(context) + assertTrue("Execution result should be Next.", result is SMResult.Next) + result as SMResult.Next + val metadataToSave = result.metadataToSave.build() + assertNotNull("Deletion started field is initialized.", metadataToSave.deletion!!.started) + assertEquals(1, metadataToSave.deletion?.started!!.size) + assertEquals("Latest execution status is in_progress", SMMetadata.LatestExecution.Status.IN_PROGRESS, metadataToSave.deletion!!.latestExecution!!.status) + } + + fun `test snapshots exceed max age`() = + runBlocking { + val oldSnapshot = mockSnapshotInfo(name = "old_snapshot", startTime = now().minusSeconds(2 * 60).toEpochMilli()) + val newSnapshot = mockSnapshotInfo(name = "new_snapshot", startTime = now().toEpochMilli()) + mockGetSnapshotsCall(response = mockGetSnapshotsResponse(listOf(oldSnapshot, newSnapshot, newSnapshot))) + mockDeleteSnapshotCall(response = AcknowledgedResponse(true)) + + val metadata = + randomSMMetadata( + deletionCurrentState = SMState.DELETION_CONDITION_MET, + ) + val job = + randomSMPolicy( + policyName = "daily-snapshot", + deletionMaxAge = TimeValue.timeValueMinutes(1), + deletionMinCount = 2, + ) + val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) + + val result = SMState.DELETING.instance.execute(context) + assertTrue("Execution result should be Next.", result is SMResult.Next) + result as SMResult.Next + val metadataToSave = result.metadataToSave.build() + assertNotNull("Deletion started field is initialized.", metadataToSave.deletion!!.started) + assertEquals(1, metadataToSave.deletion?.started!!.size) + assertEquals("old_snapshot", metadataToSave.deletion!!.started!!.first()) + assertEquals("Latest execution status is in_progress", SMMetadata.LatestExecution.Status.IN_PROGRESS, metadataToSave.deletion!!.latestExecution!!.status) + } + + fun `test snapshots exceed max age but need to remain min count`() = + runBlocking { + val oldSnapshot = mockSnapshotInfo(name = "old_snapshot", startTime = now().minusSeconds(2 * 60).toEpochMilli()) + val newSnapshot = mockSnapshotInfo(name = "new_snapshot", startTime = now().toEpochMilli()) + mockGetSnapshotsCall(response = mockGetSnapshotsResponse(listOf(oldSnapshot, newSnapshot))) + + val metadata = + randomSMMetadata( + deletionCurrentState = SMState.DELETION_CONDITION_MET, + ) + val job = + randomSMPolicy( + policyName = "daily-snapshot", + deletionMaxAge = TimeValue.timeValueMinutes(1), + deletionMinCount = 2, + ) + val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) + + val result = SMState.DELETING.instance.execute(context) + assertTrue("Execution result should be Next.", result is SMResult.Next) + result as SMResult.Next + val metadataToSave = result.metadataToSave.build() + assertNull("Deletion started field should not be initialized.", metadataToSave.deletion!!.started) + assertNull("Latest execution should not be initialized", metadataToSave.deletion!!.latestExecution) + } + + fun `test snapshots min count check won't get negative deletion count`() = + runBlocking { + val oldSnapshot = mockSnapshotInfo(name = "old_snapshot", startTime = now().minusSeconds(2 * 60).toEpochMilli()) + mockGetSnapshotsCall(response = mockGetSnapshotsResponse(listOf(oldSnapshot))) + + val metadata = + randomSMMetadata( + deletionCurrentState = SMState.DELETION_CONDITION_MET, + ) + val job = + randomSMPolicy( + policyName = "daily-snapshot", + deletionMaxAge = TimeValue.timeValueMinutes(1), + deletionMinCount = 2, + ) + val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) + + val result = SMState.DELETING.instance.execute(context) + assertTrue("Execution result should be Next.", result is SMResult.Next) + result as SMResult.Next + val metadataToSave = result.metadataToSave.build() + assertNull("Deletion started field should not be initialized.", metadataToSave.deletion!!.started) + assertNull("Latest execution should not be initialized", metadataToSave.deletion!!.latestExecution) + } + + fun `test delete snapshot exception`() = + runBlocking { + val ex = Exception() + mockGetSnapshotsCall(response = mockGetSnapshotResponse(11)) + mockDeleteSnapshotCall(exception = ex) + + val metadata = + randomSMMetadata( + deletionCurrentState = SMState.DELETION_CONDITION_MET, + ) + val job = randomSMPolicy(policyName = "daily-snapshot") + val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) + + val result = SMState.DELETING.instance.execute(context) + assertTrue("Execution result should be Failure.", result is SMResult.Fail) + result as SMResult.Fail + val metadataToSave = result.metadataToSave.build() + assertNull("Deletion started field should not be initialized.", metadataToSave.deletion!!.started) + assertEquals("Latest execution status is retrying", SMMetadata.LatestExecution.Status.RETRYING, metadataToSave.deletion!!.latestExecution!!.status) + assertNotNull("Latest execution info should not be null", metadataToSave.deletion!!.latestExecution!!.info) + } + + fun `test get snapshots exception`() = + runBlocking { + val ex = Exception() + mockGetSnapshotsCall(exception = ex) + + val metadata = + randomSMMetadata( + deletionCurrentState = SMState.DELETION_CONDITION_MET, + deletionLatestExecution = + randomLatestExecution( + startTime = now().minusSeconds(10), + ), + ) + val job = randomSMPolicy(policyName = "daily-snapshot") + val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) + + val result = SMState.DELETING.instance.execute(context) + assertTrue("Execution result should be Fail.", result is SMResult.Fail) + result as SMResult.Fail + val metadataToSave = result.metadataToSave.build() + assertNull("Deletion started field should not be initialized.", metadataToSave.deletion!!.started) + assertEquals("Latest execution status is retrying", SMMetadata.LatestExecution.Status.RETRYING, metadataToSave.deletion!!.latestExecution!!.status) + assertNotNull("Latest execution info should not be null", metadataToSave.deletion!!.latestExecution!!.info) + } + + fun `test policy deletion is null`() = + runBlocking { + val metadata = + randomSMMetadata( + deletionCurrentState = SMState.DELETION_CONDITION_MET, + startedDeletion = listOf(randomSnapshotName()), + ) + val job = + randomSMPolicy( + deletionNull = true, + ) + val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) + + val result = SMState.DELETING.instance.execute(context) + assertTrue("Execution result should be Fail.", result is SMResult.Fail) + result as SMResult.Fail + val metadataToSave = result.metadataToSave.build() + assertNull("Deletion metadata should be null.", metadataToSave.deletion) + } } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/deletion/DeletionConditionMetStateTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/deletion/DeletionConditionMetStateTests.kt index 3b1cbc6d5..1c5330f58 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/deletion/DeletionConditionMetStateTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/deletion/DeletionConditionMetStateTests.kt @@ -16,48 +16,54 @@ import org.opensearch.indexmanagement.snapshotmanagement.randomSnapshotName import java.time.Instant class DeletionConditionMetStateTests : MocksTestCase() { + fun `test next deletion time met`() = + runBlocking { + val metadata = + randomSMMetadata( + deletionCurrentState = SMState.DELETION_START, + nextDeletionTime = Instant.now().minusSeconds(60), + ) + val job = randomSMPolicy() + val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) - fun `test next deletion time met`() = runBlocking { - val metadata = randomSMMetadata( - deletionCurrentState = SMState.DELETION_START, - nextDeletionTime = Instant.now().minusSeconds(60), - ) - val job = randomSMPolicy() - val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) - - val result = SMState.DELETION_CONDITION_MET.instance.execute(context) - assertTrue("Execution result should be Next.", result is SMResult.Next) - result as SMResult.Next - assertNotEquals("Next execution time should be updated.", metadata.deletion!!.trigger.time, result.metadataToSave.build().deletion!!.trigger.time) - } - - fun `test next deletion time has not met`() = runBlocking { - val metadata = randomSMMetadata( - deletionCurrentState = SMState.DELETION_START, - nextDeletionTime = Instant.now().plusSeconds(60), - ) - val job = randomSMPolicy() - val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) - - val result = SMState.DELETION_CONDITION_MET.instance.execute(context) - assertTrue("Execution result should be Stay.", result is SMResult.Stay) - result as SMResult.Stay - assertEquals("Next execution time should not be updated.", metadata, result.metadataToSave.build()) - } - - fun `test job deletion config is null`() = runBlocking { - val metadata = randomSMMetadata( - deletionCurrentState = SMState.DELETION_START, - startedDeletion = listOf(randomSnapshotName()), - ) - val job = randomSMPolicy( - deletionNull = true - ) - val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) - - val result = SMState.DELETION_CONDITION_MET.instance.execute(context) - assertTrue("Execution result should be Fail.", result is SMResult.Fail) - result as SMResult.Fail - assertNull("Deletion metadata should be null.", result.metadataToSave.build().deletion) - } + val result = SMState.DELETION_CONDITION_MET.instance.execute(context) + assertTrue("Execution result should be Next.", result is SMResult.Next) + result as SMResult.Next + assertNotEquals("Next execution time should be updated.", metadata.deletion!!.trigger.time, result.metadataToSave.build().deletion!!.trigger.time) + } + + fun `test next deletion time has not met`() = + runBlocking { + val metadata = + randomSMMetadata( + deletionCurrentState = SMState.DELETION_START, + nextDeletionTime = Instant.now().plusSeconds(60), + ) + val job = randomSMPolicy() + val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) + + val result = SMState.DELETION_CONDITION_MET.instance.execute(context) + assertTrue("Execution result should be Stay.", result is SMResult.Stay) + result as SMResult.Stay + assertEquals("Next execution time should not be updated.", metadata, result.metadataToSave.build()) + } + + fun `test job deletion config is null`() = + runBlocking { + val metadata = + randomSMMetadata( + deletionCurrentState = SMState.DELETION_START, + startedDeletion = listOf(randomSnapshotName()), + ) + val job = + randomSMPolicy( + deletionNull = true, + ) + val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) + + val result = SMState.DELETION_CONDITION_MET.instance.execute(context) + assertTrue("Execution result should be Fail.", result is SMResult.Fail) + result as SMResult.Fail + assertNull("Deletion metadata should be null.", result.metadataToSave.build().deletion) + } } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/deletion/DeletionFinishedStateTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/deletion/DeletionFinishedStateTests.kt index e3646c765..c3334f7ba 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/deletion/DeletionFinishedStateTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/deletion/DeletionFinishedStateTests.kt @@ -20,99 +20,111 @@ import org.opensearch.indexmanagement.snapshotmanagement.randomSMPolicy import java.time.Instant class DeletionFinishedStateTests : MocksTestCase() { - fun `test deletion succeed`() = runBlocking { - val snapshotName = "test_deletion_success" - mockGetSnapshotsCall(response = mockGetSnapshotResponse(0)) + fun `test deletion succeed`() = + runBlocking { + val snapshotName = "test_deletion_success" + mockGetSnapshotsCall(response = mockGetSnapshotResponse(0)) - val metadata = randomSMMetadata( - deletionCurrentState = SMState.DELETING, - startedDeletion = listOf(snapshotName), - deletionLatestExecution = randomLatestExecution( - startTime = Instant.now().minusSeconds(50), - ), - ) - val job = randomSMPolicy(policyName = "daily-snapshot") - val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) + val metadata = + randomSMMetadata( + deletionCurrentState = SMState.DELETING, + startedDeletion = listOf(snapshotName), + deletionLatestExecution = + randomLatestExecution( + startTime = Instant.now().minusSeconds(50), + ), + ) + val job = randomSMPolicy(policyName = "daily-snapshot") + val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) - val result = SMState.DELETION_FINISHED.instance.execute(context) - assertTrue("Execution results should be Next.", result is SMResult.Next) - result as SMResult.Next - val metadataToSave = result.metadataToSave.build() - assertNull("Started deletion should be reset to null.", metadataToSave.deletion!!.started) - assertEquals("Latest execution status is success", SMMetadata.LatestExecution.Status.SUCCESS, metadataToSave.deletion!!.latestExecution!!.status) - assertNotNull("Latest execution status end_time should not be null", metadataToSave.deletion!!.latestExecution!!.endTime) - assertNotNull("Latest execution status message should not be null", metadataToSave.deletion!!.latestExecution!!.info!!.message) - } + val result = SMState.DELETION_FINISHED.instance.execute(context) + assertTrue("Execution results should be Next.", result is SMResult.Next) + result as SMResult.Next + val metadataToSave = result.metadataToSave.build() + assertNull("Started deletion should be reset to null.", metadataToSave.deletion!!.started) + assertEquals("Latest execution status is success", SMMetadata.LatestExecution.Status.SUCCESS, metadataToSave.deletion!!.latestExecution!!.status) + assertNotNull("Latest execution status end_time should not be null", metadataToSave.deletion!!.latestExecution!!.endTime) + assertNotNull("Latest execution status message should not be null", metadataToSave.deletion!!.latestExecution!!.info!!.message) + } - fun `test deletion has not finished`() = runBlocking { - val snapshotName = "test_deletion_not_finished" - val snapshotInfo = mockSnapshotInfo(name = snapshotName) - mockGetSnapshotsCall(response = mockGetSnapshotResponse(snapshotInfo)) + fun `test deletion has not finished`() = + runBlocking { + val snapshotName = "test_deletion_not_finished" + val snapshotInfo = mockSnapshotInfo(name = snapshotName) + mockGetSnapshotsCall(response = mockGetSnapshotResponse(snapshotInfo)) - val metadata = randomSMMetadata( - deletionCurrentState = SMState.DELETING, - startedDeletion = listOf(snapshotName), - deletionLatestExecution = randomLatestExecution( - startTime = Instant.now().minusSeconds(50), - ), - ) - val job = randomSMPolicy(policyName = "daily-snapshot") - val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) + val metadata = + randomSMMetadata( + deletionCurrentState = SMState.DELETING, + startedDeletion = listOf(snapshotName), + deletionLatestExecution = + randomLatestExecution( + startTime = Instant.now().minusSeconds(50), + ), + ) + val job = randomSMPolicy(policyName = "daily-snapshot") + val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) - val result = SMState.DELETION_FINISHED.instance.execute(context) - assertTrue("Execution results should be Stay.", result is SMResult.Stay) - result as SMResult.Stay - val metadataToSave = result.metadataToSave.build() - assertNotNull("Started deletion should not be reset.", metadataToSave.deletion!!.started) - assertEquals("Latest execution status is in_progress", SMMetadata.LatestExecution.Status.IN_PROGRESS, metadataToSave.deletion!!.latestExecution!!.status) - assertNull("Latest execution status end_time should be null", metadataToSave.deletion!!.latestExecution!!.endTime) - } + val result = SMState.DELETION_FINISHED.instance.execute(context) + assertTrue("Execution results should be Stay.", result is SMResult.Stay) + result as SMResult.Stay + val metadataToSave = result.metadataToSave.build() + assertNotNull("Started deletion should not be reset.", metadataToSave.deletion!!.started) + assertEquals("Latest execution status is in_progress", SMMetadata.LatestExecution.Status.IN_PROGRESS, metadataToSave.deletion!!.latestExecution!!.status) + assertNull("Latest execution status end_time should be null", metadataToSave.deletion!!.latestExecution!!.endTime) + } - fun `test get snapshots exception in deletion`() = runBlocking { - val snapshotName = "test_deletion_get_snapshots_exception" - mockGetSnapshotsCall(exception = Exception()) + fun `test get snapshots exception in deletion`() = + runBlocking { + val snapshotName = "test_deletion_get_snapshots_exception" + mockGetSnapshotsCall(exception = Exception()) - val metadata = randomSMMetadata( - deletionCurrentState = SMState.DELETING, - startedDeletion = listOf(snapshotName), - deletionLatestExecution = randomLatestExecution( - startTime = Instant.now().minusSeconds(50), - ), - ) - val job = randomSMPolicy(policyName = "daily-snapshot") - val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) + val metadata = + randomSMMetadata( + deletionCurrentState = SMState.DELETING, + startedDeletion = listOf(snapshotName), + deletionLatestExecution = + randomLatestExecution( + startTime = Instant.now().minusSeconds(50), + ), + ) + val job = randomSMPolicy(policyName = "daily-snapshot") + val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) - val result = SMState.DELETION_FINISHED.instance.execute(context) - assertTrue("Execution results should be Failure.", result is SMResult.Fail) - result as SMResult.Fail - val metadataToSave = result.metadataToSave.build() - assertEquals("Latest execution status is retrying", SMMetadata.LatestExecution.Status.RETRYING, metadataToSave.deletion!!.latestExecution!!.status) - assertNull("Latest execution status end_time should be null", metadataToSave.deletion!!.latestExecution!!.endTime) - assertNotNull("Latest execution status info should not be null", metadataToSave.deletion!!.latestExecution!!.info) - } + val result = SMState.DELETION_FINISHED.instance.execute(context) + assertTrue("Execution results should be Failure.", result is SMResult.Fail) + result as SMResult.Fail + val metadataToSave = result.metadataToSave.build() + assertEquals("Latest execution status is retrying", SMMetadata.LatestExecution.Status.RETRYING, metadataToSave.deletion!!.latestExecution!!.status) + assertNull("Latest execution status end_time should be null", metadataToSave.deletion!!.latestExecution!!.endTime) + assertNotNull("Latest execution status info should not be null", metadataToSave.deletion!!.latestExecution!!.info) + } - fun `test deletion time limit exceed`() = runBlocking { - val snapshotName = "test_deletion_time_exceed" - val snapshotInfo = mockSnapshotInfo(name = snapshotName) - mockGetSnapshotsCall(response = mockGetSnapshotResponse(snapshotInfo)) + fun `test deletion time limit exceed`() = + runBlocking { + val snapshotName = "test_deletion_time_exceed" + val snapshotInfo = mockSnapshotInfo(name = snapshotName) + mockGetSnapshotsCall(response = mockGetSnapshotResponse(snapshotInfo)) - val metadata = randomSMMetadata( - deletionCurrentState = SMState.DELETING, - startedDeletion = listOf(snapshotName), - deletionLatestExecution = randomLatestExecution( - startTime = Instant.now().minusSeconds(50), - ), - ) - val job = randomSMPolicy(policyName = "daily-snapshot", deletionTimeLimit = TimeValue.timeValueSeconds(5)) - val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) + val metadata = + randomSMMetadata( + deletionCurrentState = SMState.DELETING, + startedDeletion = listOf(snapshotName), + deletionLatestExecution = + randomLatestExecution( + startTime = Instant.now().minusSeconds(50), + ), + ) + val job = randomSMPolicy(policyName = "daily-snapshot", deletionTimeLimit = TimeValue.timeValueSeconds(5)) + val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) - val result = SMState.DELETION_FINISHED.instance.execute(context) - assertTrue("Execution results should be Failure.", result is SMResult.Fail) - result as SMResult.Fail - val metadataToSave = result.metadataToSave.build() - assertTrue(result.forceReset!!) - assertEquals("Latest execution status is time limit exceed", SMMetadata.LatestExecution.Status.TIME_LIMIT_EXCEEDED, metadataToSave.deletion!!.latestExecution!!.status) - assertNotNull("Latest execution status end_time should not be null", metadataToSave.deletion!!.latestExecution!!.endTime) - assertNotNull("Latest execution status cause should not be null", metadataToSave.deletion!!.latestExecution!!.info!!.cause) - } + val result = SMState.DELETION_FINISHED.instance.execute(context) + assertTrue("Execution results should be Failure.", result is SMResult.Fail) + result as SMResult.Fail + val metadataToSave = result.metadataToSave.build() + assertTrue(result.forceReset!!) + assertEquals("Latest execution status is time limit exceed", SMMetadata.LatestExecution.Status.TIME_LIMIT_EXCEEDED, metadataToSave.deletion!!.latestExecution!!.status) + assertNotNull("Latest execution status end_time should not be null", metadataToSave.deletion!!.latestExecution!!.endTime) + assertNotNull("Latest execution status cause should not be null", metadataToSave.deletion!!.latestExecution!!.info!!.cause) + } } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/deletion/DeletionStartStateTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/deletion/DeletionStartStateTests.kt index 7e512e912..f87491ebc 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/deletion/DeletionStartStateTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/deletion/DeletionStartStateTests.kt @@ -14,15 +14,16 @@ import org.opensearch.indexmanagement.snapshotmanagement.randomSMMetadata import org.opensearch.indexmanagement.snapshotmanagement.randomSMPolicy class DeletionStartStateTests : MocksTestCase() { + fun `test start state execution`() = + runBlocking { + val metadata = + randomSMMetadata( + deletionCurrentState = SMState.DELETION_FINISHED, + ) + val job = randomSMPolicy() + val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) - fun `test start state execution`() = runBlocking { - val metadata = randomSMMetadata( - deletionCurrentState = SMState.DELETION_FINISHED - ) - val job = randomSMPolicy() - val context = SMStateMachine(client, job, metadata, settings, threadPool, indicesManager) - - val result = SMState.DELETION_START.instance.execute(context) - assertTrue("Execution result should be Next.", result is SMResult.Next) - } + val result = SMState.DELETION_START.instance.execute(context) + assertTrue("Execution result should be Next.", result is SMResult.Next) + } } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/model/WriteableTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/model/WriteableTests.kt index b197e4cdb..cf23eb5d0 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/model/WriteableTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/model/WriteableTests.kt @@ -13,7 +13,6 @@ import org.opensearch.indexmanagement.snapshotmanagement.randomSMPolicy import org.opensearch.test.OpenSearchTestCase class WriteableTests : OpenSearchTestCase() { - fun `test sm policy as stream`() { val smPolicy = randomSMPolicy(notificationConfig = randomNotificationConfig()) val out = BytesStreamOutput().also { smPolicy.writeTo(it) } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/model/XContentTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/model/XContentTests.kt index 8af4a8e17..6e822294b 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/model/XContentTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/model/XContentTests.kt @@ -15,7 +15,6 @@ import org.opensearch.indexmanagement.snapshotmanagement.toJsonString import org.opensearch.test.OpenSearchTestCase class XContentTests : OpenSearchTestCase() { - fun `test sm policy parsing`() { val smPolicy = randomSMPolicy(notificationConfig = randomNotificationConfig()) val smPolicyString = smPolicy.toJsonString() diff --git a/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/resthandler/RestDeleteSnapshotManagementIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/resthandler/RestDeleteSnapshotManagementIT.kt index 130e8bf4b..7464c0e64 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/resthandler/RestDeleteSnapshotManagementIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/resthandler/RestDeleteSnapshotManagementIT.kt @@ -6,15 +6,14 @@ package org.opensearch.indexmanagement.snapshotmanagement.resthandler import org.opensearch.client.ResponseException +import org.opensearch.core.rest.RestStatus import org.opensearch.indexmanagement.IndexManagementPlugin import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.INDEX_MANAGEMENT_INDEX import org.opensearch.indexmanagement.makeRequest import org.opensearch.indexmanagement.snapshotmanagement.SnapshotManagementRestTestCase import org.opensearch.indexmanagement.snapshotmanagement.randomSMPolicy -import org.opensearch.core.rest.RestStatus class RestDeleteSnapshotManagementIT : SnapshotManagementRestTestCase() { - fun `test deleting a snapshot management policy`() { val smPolicy = createSMPolicy(randomSMPolicy()) val deleteResponse = client().makeRequest("DELETE", "${IndexManagementPlugin.SM_POLICIES_URI}/${smPolicy.policyName}?refresh=true") diff --git a/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/resthandler/RestExplainSnapshotManagementIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/resthandler/RestExplainSnapshotManagementIT.kt index ba31ad552..1e8d19848 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/resthandler/RestExplainSnapshotManagementIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/resthandler/RestExplainSnapshotManagementIT.kt @@ -7,6 +7,7 @@ package org.opensearch.indexmanagement.snapshotmanagement.resthandler import org.opensearch.client.ResponseException import org.opensearch.common.xcontent.XContentType +import org.opensearch.core.rest.RestStatus import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.INDEX_MANAGEMENT_INDEX import org.opensearch.indexmanagement.snapshotmanagement.SnapshotManagementRestTestCase import org.opensearch.indexmanagement.snapshotmanagement.api.transport.explain.ExplainSMPolicyResponse @@ -16,21 +17,20 @@ import org.opensearch.indexmanagement.snapshotmanagement.model.SMPolicy import org.opensearch.indexmanagement.snapshotmanagement.randomSMPolicy import org.opensearch.indexmanagement.waitFor import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule -import org.opensearch.core.rest.RestStatus import java.time.Instant.now import java.time.temporal.ChronoUnit @Suppress("UNCHECKED_CAST") class RestExplainSnapshotManagementIT : SnapshotManagementRestTestCase() { - fun `test explaining a snapshot management policy`() { - val smPolicy = createSMPolicy( - randomSMPolicy().copy( - jobEnabled = true, - jobEnabledTime = now(), - jobSchedule = IntervalSchedule(now(), 1, ChronoUnit.MINUTES), + val smPolicy = + createSMPolicy( + randomSMPolicy().copy( + jobEnabled = true, + jobEnabledTime = now(), + jobSchedule = IntervalSchedule(now(), 1, ChronoUnit.MINUTES), + ), ) - ) updateSMPolicyStartTime(smPolicy) waitFor(timeout = timeout) { val explainResponse = explainSMPolicy(smPolicy.policyName) @@ -67,13 +67,14 @@ class RestExplainSnapshotManagementIT : SnapshotManagementRestTestCase() { } fun `test explain all with list of policy names`() { - val smPolicies = randomList(2, 3) { - createSMPolicy( - randomSMPolicy( - jobEnabled = true, + val smPolicies = + randomList(2, 3) { + createSMPolicy( + randomSMPolicy( + jobEnabled = true, + ), ) - ) - } + } // if this proves to be flaky, just index the metadata directly instead of executing to generate metadata smPolicies.forEach { updateSMPolicyStartTime(it) } waitFor(timeout = timeout) { @@ -95,13 +96,14 @@ class RestExplainSnapshotManagementIT : SnapshotManagementRestTestCase() { } fun `test explain all with empty policy name`() { - val smPolicies = randomList(2, 3) { - createSMPolicy( - randomSMPolicy( - jobEnabled = true, + val smPolicies = + randomList(2, 3) { + createSMPolicy( + randomSMPolicy( + jobEnabled = true, + ), ) - ) - } + } // if this proves to be flaky, just index the metadata directly instead of executing to generate metadata smPolicies.forEach { updateSMPolicyStartTime(it) } waitFor(timeout = timeout) { diff --git a/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/resthandler/RestGetSnapshotManagementIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/resthandler/RestGetSnapshotManagementIT.kt index b467d2a74..d65e2a5fa 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/resthandler/RestGetSnapshotManagementIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/resthandler/RestGetSnapshotManagementIT.kt @@ -8,6 +8,7 @@ package org.opensearch.indexmanagement.snapshotmanagement.resthandler import org.apache.hc.core5.http.HttpHeaders import org.apache.hc.core5.http.message.BasicHeader import org.opensearch.client.ResponseException +import org.opensearch.core.rest.RestStatus import org.opensearch.indexmanagement.IndexManagementPlugin import org.opensearch.indexmanagement.makeRequest import org.opensearch.indexmanagement.opensearchapi.convertToMap @@ -16,22 +17,21 @@ import org.opensearch.indexmanagement.snapshotmanagement.model.SMPolicy import org.opensearch.indexmanagement.snapshotmanagement.model.SMPolicy.Companion.ENABLED_TIME_FIELD import org.opensearch.indexmanagement.snapshotmanagement.model.SMPolicy.Companion.SM_TYPE import org.opensearch.indexmanagement.snapshotmanagement.randomSMPolicy -import org.opensearch.core.rest.RestStatus class RestGetSnapshotManagementIT : SnapshotManagementRestTestCase() { - fun `test getting a snapshot management policy`() { var smPolicy = createSMPolicy(randomSMPolicy().copy(jobEnabled = false, jobEnabledTime = null)) val indexedSMPolicy = getSMPolicy(smPolicy.policyName) // Schema version and last updated time are updated during the creation so we need to update the original too for comparison // Job schedule interval will have a dynamic start time - smPolicy = smPolicy.copy( - id = indexedSMPolicy.id, - seqNo = indexedSMPolicy.seqNo, - primaryTerm = indexedSMPolicy.primaryTerm, - jobLastUpdateTime = indexedSMPolicy.jobLastUpdateTime, - jobSchedule = indexedSMPolicy.jobSchedule - ) + smPolicy = + smPolicy.copy( + id = indexedSMPolicy.id, + seqNo = indexedSMPolicy.seqNo, + primaryTerm = indexedSMPolicy.primaryTerm, + jobLastUpdateTime = indexedSMPolicy.jobLastUpdateTime, + jobSchedule = indexedSMPolicy.jobSchedule, + ) assertEquals("Indexed and retrieved snapshot management policies differ", smPolicy, indexedSMPolicy) } @@ -60,10 +60,11 @@ class RestGetSnapshotManagementIT : SnapshotManagementRestTestCase() { @Suppress("UNCHECKED_CAST") fun `test getting all snapshot management policies`() { val smPolicies = randomList(1, 15) { createSMPolicy(randomSMPolicy()) } - val response = client().makeRequest( - "GET", IndexManagementPlugin.SM_POLICIES_URI, null, - BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") - ) + val response = + client().makeRequest( + "GET", IndexManagementPlugin.SM_POLICIES_URI, null, + BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), + ) val map = response.asMap() val totalPolicies = map["total_policies"] as Int val responsePolicies = map["policies"] as List> @@ -83,10 +84,11 @@ class RestGetSnapshotManagementIT : SnapshotManagementRestTestCase() { @Throws(Exception::class) @Suppress("UNCHECKED_CAST") fun `test getting all snapshot management policies when config index doesn't exist`() { - val response = client().makeRequest( - "GET", IndexManagementPlugin.SM_POLICIES_URI, null, - BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") - ) + val response = + client().makeRequest( + "GET", IndexManagementPlugin.SM_POLICIES_URI, null, + BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), + ) val map = response.asMap() val totalPolicies = map["total_policies"] as Int val responsePolicies = map["policies"] as List> diff --git a/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/resthandler/RestIndexSnapshotManagementIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/resthandler/RestIndexSnapshotManagementIT.kt index d260f5868..6a7975dd0 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/resthandler/RestIndexSnapshotManagementIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/resthandler/RestIndexSnapshotManagementIT.kt @@ -7,6 +7,7 @@ package org.opensearch.indexmanagement.snapshotmanagement.resthandler import org.opensearch.client.ResponseException import org.opensearch.common.xcontent.XContentType +import org.opensearch.core.rest.RestStatus import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.INDEX_MANAGEMENT_INDEX import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.SM_POLICIES_URI import org.opensearch.indexmanagement.indexstatemanagement.util.XCONTENT_WITHOUT_TYPE_AND_USER @@ -20,11 +21,9 @@ import org.opensearch.indexmanagement.util.IndexUtils import org.opensearch.indexmanagement.util.NO_ID import org.opensearch.indexmanagement.util._ID import org.opensearch.indexmanagement.util._SEQ_NO -import org.opensearch.core.rest.RestStatus import java.time.Instant class RestIndexSnapshotManagementIT : SnapshotManagementRestTestCase() { - @Suppress("UNCHECKED_CAST") fun `test creating a snapshot management policy`() { var smPolicy = randomSMPolicy() @@ -37,21 +36,23 @@ class RestIndexSnapshotManagementIT : SnapshotManagementRestTestCase() { assertEquals("Incorrect Location header", "$SM_POLICIES_URI/${smPolicy.policyName}", response.getHeader("Location")) val responseSMPolicy = responseBody[SM_TYPE] as Map // During indexing, we update these two fields so we need to override them here before the equality check - smPolicy = smPolicy.copy( - jobLastUpdateTime = Instant.ofEpochMilli(responseSMPolicy[SMPolicy.LAST_UPDATED_TIME_FIELD] as Long), - schemaVersion = IndexUtils.indexManagementConfigSchemaVersion - ) + smPolicy = + smPolicy.copy( + jobLastUpdateTime = Instant.ofEpochMilli(responseSMPolicy[SMPolicy.LAST_UPDATED_TIME_FIELD] as Long), + schemaVersion = IndexUtils.indexManagementConfigSchemaVersion, + ) assertEquals("Created and returned snapshot management policies differ", smPolicy.toMap(XCONTENT_WITHOUT_TYPE_AND_USER), responseSMPolicy) } @Suppress("UNCHECKED_CAST") fun `test updating a snapshot management policy with correct seq_no and primary_term`() { val smPolicy = createSMPolicy(randomSMPolicy()) - val updateResponse = client().makeRequest( - "PUT", - "$SM_POLICIES_URI/${smPolicy.policyName}?refresh=true&if_seq_no=${smPolicy.seqNo}&if_primary_term=${smPolicy.primaryTerm}", - emptyMap(), smPolicy.toHttpEntity() - ) + val updateResponse = + client().makeRequest( + "PUT", + "$SM_POLICIES_URI/${smPolicy.policyName}?refresh=true&if_seq_no=${smPolicy.seqNo}&if_primary_term=${smPolicy.primaryTerm}", + emptyMap(), smPolicy.toHttpEntity(), + ) assertEquals("Update snapshot management policy failed", RestStatus.OK, updateResponse.restStatus()) val responseBody = updateResponse.asMap() @@ -72,7 +73,7 @@ class RestIndexSnapshotManagementIT : SnapshotManagementRestTestCase() { client().makeRequest( "PUT", "$SM_POLICIES_URI/${smPolicy.policyName}?refresh=true&if_seq_no=10251989&if_primary_term=2342", - emptyMap(), smPolicy.toHttpEntity() + emptyMap(), smPolicy.toHttpEntity(), ) fail("expected 409 ResponseException") } catch (e: ResponseException) { @@ -82,7 +83,7 @@ class RestIndexSnapshotManagementIT : SnapshotManagementRestTestCase() { client().makeRequest( "PUT", "$SM_POLICIES_URI/${smPolicy.policyName}?refresh=true", - emptyMap(), smPolicy.toHttpEntity() + emptyMap(), smPolicy.toHttpEntity(), ) fail("expected exception") } catch (e: ResponseException) { @@ -96,7 +97,7 @@ class RestIndexSnapshotManagementIT : SnapshotManagementRestTestCase() { client().makeRequest( "PUT", "$SM_POLICIES_URI/${smPolicy.policyName}?refresh=true&if_seq_no=10251989&if_primary_term=2342", - emptyMap(), smPolicy.toHttpEntity() + emptyMap(), smPolicy.toHttpEntity(), ) fail("expected exception") } catch (e: ResponseException) { @@ -135,10 +136,11 @@ class RestIndexSnapshotManagementIT : SnapshotManagementRestTestCase() { val response = client().makeRequest("GET", "/$INDEX_MANAGEMENT_INDEX/_mapping") val parserMap = createParser(XContentType.JSON.xContent(), response.entity.content).map() as Map> val mappingsMap = parserMap[INDEX_MANAGEMENT_INDEX]!!["mappings"] as Map - val expected = createParser( - XContentType.JSON.xContent(), - javaClass.classLoader.getResource("mappings/opendistro-ism-config.json").readText() - ) + val expected = + createParser( + XContentType.JSON.xContent(), + javaClass.classLoader.getResource("mappings/opendistro-ism-config.json").readText(), + ) val expectedMap = expected.map() assertEquals("Mappings are different", expectedMap, mappingsMap) diff --git a/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/resthandler/RestStartSnapshotManagementIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/resthandler/RestStartSnapshotManagementIT.kt index df58d309b..4ec67011a 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/resthandler/RestStartSnapshotManagementIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/resthandler/RestStartSnapshotManagementIT.kt @@ -6,15 +6,14 @@ package org.opensearch.indexmanagement.snapshotmanagement.resthandler import org.opensearch.client.ResponseException +import org.opensearch.core.rest.RestStatus import org.opensearch.indexmanagement.IndexManagementPlugin import org.opensearch.indexmanagement.makeRequest import org.opensearch.indexmanagement.snapshotmanagement.SnapshotManagementRestTestCase import org.opensearch.indexmanagement.snapshotmanagement.randomSMPolicy -import org.opensearch.core.rest.RestStatus import java.time.Instant class RestStartSnapshotManagementIT : SnapshotManagementRestTestCase() { - fun `test starting a stopped snapshot management policy`() { val smPolicy = createSMPolicy(randomSMPolicy().copy(jobEnabled = false, jobEnabledTime = null)) assertFalse("Snapshot management policy was not disabled", smPolicy.jobEnabled) diff --git a/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/resthandler/RestStopSnapshotManagementIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/resthandler/RestStopSnapshotManagementIT.kt index e0711beb6..77569632f 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/resthandler/RestStopSnapshotManagementIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/snapshotmanagement/resthandler/RestStopSnapshotManagementIT.kt @@ -6,15 +6,14 @@ package org.opensearch.indexmanagement.snapshotmanagement.resthandler import org.opensearch.client.ResponseException +import org.opensearch.core.rest.RestStatus import org.opensearch.indexmanagement.IndexManagementPlugin import org.opensearch.indexmanagement.makeRequest import org.opensearch.indexmanagement.snapshotmanagement.SnapshotManagementRestTestCase import org.opensearch.indexmanagement.snapshotmanagement.randomSMPolicy -import org.opensearch.core.rest.RestStatus import java.time.Instant class RestStopSnapshotManagementIT : SnapshotManagementRestTestCase() { - fun `test stopping an enabled snapshot management policy`() { val smPolicy = createSMPolicy(randomSMPolicy().copy(jobEnabled = true, jobEnabledTime = Instant.now())) assertTrue("Snapshot management policy was not enabled", smPolicy.jobEnabled) diff --git a/src/test/kotlin/org/opensearch/indexmanagement/transform/TargetIndexMappingServiceTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/transform/TargetIndexMappingServiceTests.kt index 46416b4e1..ff3467b58 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/transform/TargetIndexMappingServiceTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/transform/TargetIndexMappingServiceTests.kt @@ -9,7 +9,6 @@ import org.junit.Assert import org.opensearch.test.OpenSearchTestCase class TargetIndexMappingServiceTests : OpenSearchTestCase() { - fun `test create target index mapping fields mapped correctly`() { val expectedResult = """{"_meta":{"schema_version":1},"dynamic_templates":[{"strings":{"match_mapping_type":"string","mapping":{"type":"keyword"}}}],"properties":{"tpep_pickup_datetime":{"type":"date"}}}""" val dateFieldMap = mapOf("tpep_pickup_datetime" to mapOf("type" to "date")) diff --git a/src/test/kotlin/org/opensearch/indexmanagement/transform/TestHelpers.kt b/src/test/kotlin/org/opensearch/indexmanagement/transform/TestHelpers.kt index 415e44718..6ea41b56a 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/transform/TestHelpers.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/transform/TestHelpers.kt @@ -7,13 +7,13 @@ package org.opensearch.indexmanagement.transform import org.opensearch.cluster.metadata.IndexMetadata import org.opensearch.common.io.stream.BytesStreamOutput -import org.opensearch.core.common.io.stream.NamedWriteableAwareStreamInput -import org.opensearch.core.common.io.stream.NamedWriteableRegistry import org.opensearch.common.settings.Settings -import org.opensearch.core.xcontent.ToXContent import org.opensearch.common.xcontent.XContentFactory +import org.opensearch.core.common.io.stream.NamedWriteableAwareStreamInput +import org.opensearch.core.common.io.stream.NamedWriteableRegistry import org.opensearch.core.index.Index import org.opensearch.core.index.shard.ShardId +import org.opensearch.core.xcontent.ToXContent import org.opensearch.indexmanagement.common.model.dimension.Dimension import org.opensearch.indexmanagement.opensearchapi.string import org.opensearch.indexmanagement.randomInstant @@ -43,21 +43,30 @@ fun randomGroups(): List { return dimensions } -fun sumAggregation(): AggregationBuilder = AggregationBuilders.sum( - OpenSearchRestTestCase.randomAlphaOfLength(10) -).field(OpenSearchRestTestCase.randomAlphaOfLength(10)) -fun maxAggregation(): AggregationBuilder = AggregationBuilders.max( - OpenSearchRestTestCase.randomAlphaOfLength(10) -).field(OpenSearchRestTestCase.randomAlphaOfLength(10)) -fun minAggregation(): AggregationBuilder = AggregationBuilders.min( - OpenSearchRestTestCase.randomAlphaOfLength(10) -).field(OpenSearchRestTestCase.randomAlphaOfLength(10)) -fun valueCountAggregation(): AggregationBuilder = AggregationBuilders.count( - OpenSearchRestTestCase.randomAlphaOfLength(10) -).field(OpenSearchRestTestCase.randomAlphaOfLength(10)) -fun avgAggregation(): AggregationBuilder = AggregationBuilders.avg( - OpenSearchRestTestCase.randomAlphaOfLength(10) -).field(OpenSearchRestTestCase.randomAlphaOfLength(10)) +fun sumAggregation(): AggregationBuilder = + AggregationBuilders.sum( + OpenSearchRestTestCase.randomAlphaOfLength(10), + ).field(OpenSearchRestTestCase.randomAlphaOfLength(10)) + +fun maxAggregation(): AggregationBuilder = + AggregationBuilders.max( + OpenSearchRestTestCase.randomAlphaOfLength(10), + ).field(OpenSearchRestTestCase.randomAlphaOfLength(10)) + +fun minAggregation(): AggregationBuilder = + AggregationBuilders.min( + OpenSearchRestTestCase.randomAlphaOfLength(10), + ).field(OpenSearchRestTestCase.randomAlphaOfLength(10)) + +fun valueCountAggregation(): AggregationBuilder = + AggregationBuilders.count( + OpenSearchRestTestCase.randomAlphaOfLength(10), + ).field(OpenSearchRestTestCase.randomAlphaOfLength(10)) + +fun avgAggregation(): AggregationBuilder = + AggregationBuilders.avg( + OpenSearchRestTestCase.randomAlphaOfLength(10), + ).field(OpenSearchRestTestCase.randomAlphaOfLength(10)) fun randomAggregationBuilder(): AggregationBuilder { val aggregations = listOf(sumAggregation(), maxAggregation(), minAggregation(), valueCountAggregation(), avgAggregation()) @@ -93,7 +102,7 @@ fun randomTransform(): Transform { groups = randomGroups(), aggregations = randomAggregationFactories(), continuous = isContinuous, - user = randomUser() + user = randomUser(), ) } @@ -111,7 +120,7 @@ fun randomTransformMetadata(): TransformMetadata { failureReason = if (status == TransformMetadata.Status.FAILED) OpenSearchRestTestCase.randomAlphaOfLength(10) else null, stats = randomTransformStats(), shardIDToGlobalCheckpoint = if (isContinuous) randomShardIDToGlobalCheckpoint() else null, - continuousStats = if (isContinuous) randomContinuousStats() else null + continuousStats = if (isContinuous) randomContinuousStats() else null, ) } @@ -121,7 +130,7 @@ fun randomTransformStats(): TransformStats { documentsProcessed = OpenSearchRestTestCase.randomNonNegativeLong(), documentsIndexed = OpenSearchRestTestCase.randomNonNegativeLong(), indexTimeInMillis = OpenSearchRestTestCase.randomNonNegativeLong(), - searchTimeInMillis = OpenSearchRestTestCase.randomNonNegativeLong() + searchTimeInMillis = OpenSearchRestTestCase.randomNonNegativeLong(), ) } @@ -142,7 +151,7 @@ fun randomShardID(): ShardId { fun randomContinuousStats(): ContinuousTransformStats { return ContinuousTransformStats( lastTimestamp = randomInstant(), - documentsBehind = randomDocumentsBehind() + documentsBehind = randomDocumentsBehind(), ) } @@ -168,14 +177,15 @@ fun randomISMTransform(): ISMTransform { pageSize = OpenSearchRestTestCase.randomIntBetween(1, 10000), groups = randomGroups(), dataSelectionQuery = randomTermQuery(), - aggregations = randomAggregationFactories() + aggregations = randomAggregationFactories(), ) } fun Transform.toJsonString(params: ToXContent.Params = ToXContent.EMPTY_PARAMS): String = this.toXContent(XContentFactory.jsonBuilder(), params).string() -fun TransformMetadata.toJsonString(params: ToXContent.Params = ToXContent.EMPTY_PARAMS): String = this.toXContent(XContentFactory.jsonBuilder(), params) - .string() +fun TransformMetadata.toJsonString(params: ToXContent.Params = ToXContent.EMPTY_PARAMS): String = + this.toXContent(XContentFactory.jsonBuilder(), params) + .string() // Builds the required stream input for transforms by wrapping the stream input with required NamedWriteableRegistry. fun buildStreamInputForTransforms(out: BytesStreamOutput): NamedWriteableAwareStreamInput { diff --git a/src/test/kotlin/org/opensearch/indexmanagement/transform/TransformRestTestCase.kt b/src/test/kotlin/org/opensearch/indexmanagement/transform/TransformRestTestCase.kt index aaba661c3..e62f7321d 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/transform/TransformRestTestCase.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/transform/TransformRestTestCase.kt @@ -5,19 +5,20 @@ package org.opensearch.indexmanagement.transform +import org.apache.hc.core5.http.ContentType import org.apache.hc.core5.http.HttpEntity import org.apache.hc.core5.http.HttpHeaders -import org.apache.hc.core5.http.ContentType import org.apache.hc.core5.http.io.entity.StringEntity import org.apache.hc.core5.http.message.BasicHeader import org.junit.AfterClass import org.opensearch.client.Response import org.opensearch.client.RestClient import org.opensearch.common.settings.Settings +import org.opensearch.common.xcontent.XContentType +import org.opensearch.core.rest.RestStatus import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.core.xcontent.XContentParser import org.opensearch.core.xcontent.XContentParserUtils.ensureExpectedToken -import org.opensearch.common.xcontent.XContentType import org.opensearch.index.seqno.SequenceNumbers import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.INDEX_MANAGEMENT_INDEX import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.TRANSFORM_BASE_URI @@ -29,11 +30,9 @@ import org.opensearch.indexmanagement.transform.model.TransformMetadata import org.opensearch.indexmanagement.util._ID import org.opensearch.indexmanagement.util._PRIMARY_TERM import org.opensearch.indexmanagement.util._SEQ_NO -import org.opensearch.core.rest.RestStatus import org.opensearch.search.SearchModule abstract class TransformRestTestCase : IndexManagementRestTestCase() { - companion object { @AfterClass @JvmStatic fun clearIndicesAfterClass() { @@ -45,21 +44,22 @@ abstract class TransformRestTestCase : IndexManagementRestTestCase() { transform: Transform, transformId: String = randomAlphaOfLength(10), refresh: Boolean = true, - client: RestClient? = null + client: RestClient? = null, ): Transform { if (!indexExists(transform.sourceIndex)) { createTransformSourceIndex(transform) } val response = createTransformJson(transform.toJsonString(), transformId, refresh, client) - val transformJson = createParser(XContentType.JSON.xContent(), response.entity.content) - .map() + val transformJson = + createParser(XContentType.JSON.xContent(), response.entity.content) + .map() val createdId = transformJson["_id"] as String assertEquals("Transform ids are not the same", transformId, createdId) return transform.copy( id = createdId, seqNo = (transformJson["_seq_no"] as Int).toLong(), - primaryTerm = (transformJson["_primary_term"] as Int).toLong() + primaryTerm = (transformJson["_primary_term"] as Int).toLong(), ) } @@ -67,27 +67,29 @@ abstract class TransformRestTestCase : IndexManagementRestTestCase() { transformString: String, transformId: String, refresh: Boolean = true, - userClient: RestClient? = null + userClient: RestClient? = null, ): Response { val client = userClient ?: client() - val response = client - .makeRequest( - "PUT", - "$TRANSFORM_BASE_URI/$transformId?refresh=$refresh", - emptyMap(), - StringEntity(transformString, ContentType.APPLICATION_JSON) - ) + val response = + client + .makeRequest( + "PUT", + "$TRANSFORM_BASE_URI/$transformId?refresh=$refresh", + emptyMap(), + StringEntity(transformString, ContentType.APPLICATION_JSON), + ) assertEquals("Unable to create a new transform", RestStatus.CREATED, response.restStatus()) return response } protected fun disableTransform(transformId: String) { - val response = client() - .makeRequest( - "POST", - "$TRANSFORM_BASE_URI/$transformId/_stop", - emptyMap() - ) + val response = + client() + .makeRequest( + "POST", + "$TRANSFORM_BASE_URI/$transformId/_stop", + emptyMap(), + ) assertEquals("Unable to disable transform $transformId", RestStatus.OK, response.restStatus()) } @@ -101,11 +103,12 @@ abstract class TransformRestTestCase : IndexManagementRestTestCase() { var mappingString = "" var addCommaPrefix = false transform.groups.forEach { - val fieldType = when (it.type) { - Dimension.Type.DATE_HISTOGRAM -> "date" - Dimension.Type.HISTOGRAM -> "long" - Dimension.Type.TERMS -> "keyword" - } + val fieldType = + when (it.type) { + Dimension.Type.DATE_HISTOGRAM -> "date" + Dimension.Type.HISTOGRAM -> "long" + Dimension.Type.TERMS -> "keyword" + } val string = "${if (addCommaPrefix) "," else ""}\"${it.sourceField}\":{\"type\": \"$fieldType\"}" addCommaPrefix = true mappingString += string @@ -115,9 +118,10 @@ abstract class TransformRestTestCase : IndexManagementRestTestCase() { } protected fun getTransformMetadata(metadataId: String): TransformMetadata { - val response = client().makeRequest( - "GET", "$INDEX_MANAGEMENT_INDEX/_doc/$metadataId", null, BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") - ) + val response = + client().makeRequest( + "GET", "$INDEX_MANAGEMENT_INDEX/_doc/$metadataId", null, BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), + ) assertEquals("Unable to get transform metadata $metadataId", RestStatus.OK, response.restStatus()) val parser = createParser(XContentType.JSON.xContent(), response.entity.content) @@ -145,7 +149,7 @@ abstract class TransformRestTestCase : IndexManagementRestTestCase() { protected fun getTransform( transformId: String, header: BasicHeader = BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), - userClient: RestClient? = null + userClient: RestClient? = null, ): Transform { val client = userClient ?: client() val response = client.makeRequest("GET", "$TRANSFORM_BASE_URI/$transformId", null, header) diff --git a/src/test/kotlin/org/opensearch/indexmanagement/transform/TransformRunnerIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/transform/TransformRunnerIT.kt index a948899ff..6a7edba49 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/transform/TransformRunnerIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/transform/TransformRunnerIT.kt @@ -11,6 +11,7 @@ import org.opensearch.client.Request import org.opensearch.client.RequestOptions import org.opensearch.common.settings.Settings import org.opensearch.common.xcontent.XContentType +import org.opensearch.core.rest.RestStatus import org.opensearch.index.query.TermQueryBuilder import org.opensearch.indexmanagement.common.model.dimension.DateHistogram import org.opensearch.indexmanagement.common.model.dimension.Histogram @@ -21,7 +22,6 @@ import org.opensearch.indexmanagement.transform.model.TransformMetadata import org.opensearch.indexmanagement.waitFor import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule import org.opensearch.rest.RestRequest -import org.opensearch.core.rest.RestStatus import org.opensearch.script.Script import org.opensearch.script.ScriptType import org.opensearch.search.aggregations.AggregationBuilders @@ -34,39 +34,41 @@ import java.time.temporal.ChronoUnit import kotlin.test.assertFailsWith class TransformRunnerIT : TransformRestTestCase() { - fun `test transform`() { validateSourceIndex("transform-source-index") - val transform = Transform( - id = "id_1", - schemaVersion = 1L, - enabled = true, - enabledAt = Instant.now(), - updatedAt = Instant.now(), - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - description = "test transform", - metadataId = null, - sourceIndex = "transform-source-index", - targetIndex = "transform-target-index", - roles = emptyList(), - pageSize = 1, - groups = listOf( - Terms(sourceField = "store_and_fwd_flag", targetField = "flag") - ) - ).let { createTransform(it, it.id) } + val transform = + Transform( + id = "id_1", + schemaVersion = 1L, + enabled = true, + enabledAt = Instant.now(), + updatedAt = Instant.now(), + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + description = "test transform", + metadataId = null, + sourceIndex = "transform-source-index", + targetIndex = "transform-target-index", + roles = emptyList(), + pageSize = 1, + groups = + listOf( + Terms(sourceField = "store_and_fwd_flag", targetField = "flag"), + ), + ).let { createTransform(it, it.id) } updateTransformStartTime(transform) waitFor { assertTrue("Target transform index was not created", indexExists(transform.targetIndex)) } - val metadata = waitFor { - val job = getTransform(transformId = transform.id) - assertNotNull("Transform job doesn't have metadata set", job.metadataId) - val transformMetadata = getTransformMetadata(job.metadataId!!) - assertEquals("Transform has not finished", TransformMetadata.Status.FINISHED, transformMetadata.status) - transformMetadata - } + val metadata = + waitFor { + val job = getTransform(transformId = transform.id) + assertNotNull("Transform job doesn't have metadata set", job.metadataId) + val transformMetadata = getTransformMetadata(job.metadataId!!) + assertEquals("Transform has not finished", TransformMetadata.Status.FINISHED, transformMetadata.status) + transformMetadata + } assertEquals("More than expected pages processed", 3L, metadata.stats.pagesProcessed) assertEquals("More than expected documents indexed", 2L, metadata.stats.documentsIndexed) @@ -79,34 +81,37 @@ class TransformRunnerIT : TransformRestTestCase() { fun `test transform with data filter`() { validateSourceIndex("transform-source-index") - val transform = Transform( - id = "id_2", - schemaVersion = 1L, - enabled = true, - enabledAt = Instant.now(), - updatedAt = Instant.now(), - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - description = "test transform", - metadataId = null, - sourceIndex = "transform-source-index", - targetIndex = "transform-target-index", - roles = emptyList(), - pageSize = 1, - groups = listOf( - Terms(sourceField = "store_and_fwd_flag", targetField = "flag") - ), - dataSelectionQuery = TermQueryBuilder("store_and_fwd_flag", "N") - ).let { createTransform(it, it.id) } + val transform = + Transform( + id = "id_2", + schemaVersion = 1L, + enabled = true, + enabledAt = Instant.now(), + updatedAt = Instant.now(), + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + description = "test transform", + metadataId = null, + sourceIndex = "transform-source-index", + targetIndex = "transform-target-index", + roles = emptyList(), + pageSize = 1, + groups = + listOf( + Terms(sourceField = "store_and_fwd_flag", targetField = "flag"), + ), + dataSelectionQuery = TermQueryBuilder("store_and_fwd_flag", "N"), + ).let { createTransform(it, it.id) } updateTransformStartTime(transform) - val metadata = waitFor { - val job = getTransform(transformId = transform.id) - assertNotNull("Transform job doesn't have metadata set", job.metadataId) - val transformMetadata = getTransformMetadata(job.metadataId!!) - assertEquals("Transform has not finished", TransformMetadata.Status.FINISHED, transformMetadata.status) - transformMetadata - } + val metadata = + waitFor { + val job = getTransform(transformId = transform.id) + assertNotNull("Transform job doesn't have metadata set", job.metadataId) + val transformMetadata = getTransformMetadata(job.metadataId!!) + assertEquals("Transform has not finished", TransformMetadata.Status.FINISHED, transformMetadata.status) + transformMetadata + } assertEquals("More than expected pages processed", 2L, metadata.stats.pagesProcessed) assertEquals("More than expected documents indexed", 1L, metadata.stats.documentsIndexed) @@ -124,13 +129,14 @@ class TransformRunnerIT : TransformRestTestCase() { updateTransformStartTime(transform) - val metadata = waitFor { - val job = getTransform(transformId = transform.id) - assertNotNull("Transform job doesn't have metadata set", job.metadataId) - val transformMetadata = getTransformMetadata(job.metadataId!!) - assertEquals("Transform has not failed", TransformMetadata.Status.FAILED, transformMetadata.status) - transformMetadata - } + val metadata = + waitFor { + val job = getTransform(transformId = transform.id) + assertNotNull("Transform job doesn't have metadata set", job.metadataId) + val transformMetadata = getTransformMetadata(job.metadataId!!) + assertEquals("Transform has not failed", TransformMetadata.Status.FAILED, transformMetadata.status) + transformMetadata + } assertTrue("Expected failure message to be present", !metadata.failureReason.isNullOrBlank()) } @@ -153,55 +159,58 @@ class TransformRunnerIT : TransformRestTestCase() { ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, "state.sum += doc[\"total_amount\"].value; state.count += doc[\"passenger_count\"].value", - emptyMap() - ) + emptyMap(), + ), ) .combineScript( Script( ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, "def d = new long[2]; d[0] = state.sum; d[1] = state.count; return d", - emptyMap() - ) + emptyMap(), + ), ) .reduceScript( Script( ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, "double sum = 0; double count = 0; for (a in states) { sum += a[0]; count += a[1]; } return sum/count", - emptyMap() - ) - ) + emptyMap(), + ), + ), ) - val transform = Transform( - id = "id_4", - schemaVersion = 1L, - enabled = true, - enabledAt = Instant.now(), - updatedAt = Instant.now(), - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - description = "test transform", - metadataId = null, - sourceIndex = "transform-source-index", - targetIndex = "transform-target-index", - roles = emptyList(), - pageSize = 1, - groups = listOf( - Terms(sourceField = "store_and_fwd_flag", targetField = "flag") - ), - aggregations = aggregatorFactories - ).let { createTransform(it, it.id) } + val transform = + Transform( + id = "id_4", + schemaVersion = 1L, + enabled = true, + enabledAt = Instant.now(), + updatedAt = Instant.now(), + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + description = "test transform", + metadataId = null, + sourceIndex = "transform-source-index", + targetIndex = "transform-target-index", + roles = emptyList(), + pageSize = 1, + groups = + listOf( + Terms(sourceField = "store_and_fwd_flag", targetField = "flag"), + ), + aggregations = aggregatorFactories, + ).let { createTransform(it, it.id) } updateTransformStartTime(transform) - val metadata = waitFor { - val job = getTransform(transformId = transform.id) - assertNotNull("Transform job doesn't have metadata set", job.metadataId) - val transformMetadata = getTransformMetadata(job.metadataId!!) - assertEquals("Transform has not finished", TransformMetadata.Status.FINISHED, transformMetadata.status) - transformMetadata - } + val metadata = + waitFor { + val job = getTransform(transformId = transform.id) + assertNotNull("Transform job doesn't have metadata set", job.metadataId) + val transformMetadata = getTransformMetadata(job.metadataId!!) + assertEquals("Transform has not finished", TransformMetadata.Status.FINISHED, transformMetadata.status) + transformMetadata + } assertEquals("More than expected pages processed", 3L, metadata.stats.pagesProcessed) assertEquals("More than expected documents indexed", 2L, metadata.stats.documentsIndexed) @@ -221,24 +230,26 @@ class TransformRunnerIT : TransformRestTestCase() { validateSourceIndex(sourceIdxTestName) - val transform = Transform( - id = "id_13", - schemaVersion = 1L, - enabled = true, - enabledAt = Instant.now(), - updatedAt = Instant.now(), - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - description = "test transform doc values must be the same", - metadataId = null, - sourceIndex = sourceIdxTestName, - targetIndex = targetIdxTestName, - roles = emptyList(), - pageSize = 1, - groups = listOf( - Terms(sourceField = storeAndForwardTerm, targetField = storeAndForwardTerm) - ), - aggregations = AggregatorFactories.builder().addAggregator(AggregationBuilders.avg(fareAmount).field(fareAmount)) - ).let { createTransform(it, it.id) } + val transform = + Transform( + id = "id_13", + schemaVersion = 1L, + enabled = true, + enabledAt = Instant.now(), + updatedAt = Instant.now(), + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + description = "test transform doc values must be the same", + metadataId = null, + sourceIndex = sourceIdxTestName, + targetIndex = targetIdxTestName, + roles = emptyList(), + pageSize = 1, + groups = + listOf( + Terms(sourceField = storeAndForwardTerm, targetField = storeAndForwardTerm), + ), + aggregations = AggregatorFactories.builder().addAggregator(AggregationBuilders.avg(fareAmount).field(fareAmount)), + ).let { createTransform(it, it.id) } updateTransformStartTime(transform) @@ -252,20 +263,21 @@ class TransformRunnerIT : TransformRestTestCase() { val transformMetadata = getTransformMetadata(transformJob.metadataId!!) assertEquals("Transform is not finished", TransformMetadata.Status.FINISHED, transformMetadata.status) - val req = """ - { - "size": 0, - "aggs": { - "$avgAmountPerFlag": { - "terms": { - "field": "$storeAndForwardTerm", "order": { "_key": "asc" } - }, - "aggs": { - "avg": { "avg": { "field": "$fareAmount" } } } + val req = + """ + { + "size": 0, + "aggs": { + "$avgAmountPerFlag": { + "terms": { + "field": "$storeAndForwardTerm", "order": { "_key": "asc" } + }, + "aggs": { + "avg": { "avg": { "field": "$fareAmount" } } } + } } } - } - """.trimIndent() + """.trimIndent() var rawRes = client().makeRequest(RestRequest.Method.POST.name, "/$sourceIdxTestName/_search", emptyMap(), StringEntity(req, ContentType.APPLICATION_JSON)) assertTrue(rawRes.restStatus() == RestStatus.OK) @@ -281,7 +293,7 @@ class TransformRunnerIT : TransformRestTestCase() { val transformAggBucket = transformAggBuckets[idx] assertEquals( "The doc_count had a different value raw[$rawAggBucket] transform[$transformAggBucket]", - rawAggBucket["doc_count"]!!, transformAggBucket["doc_count"]!! + rawAggBucket["doc_count"]!!, transformAggBucket["doc_count"]!!, ) } } @@ -298,24 +310,26 @@ class TransformRunnerIT : TransformRestTestCase() { validateSourceIndex(sourceIdxTestName) - val transform = Transform( - id = "id_14", - schemaVersion = 1L, - enabled = true, - enabledAt = Instant.now(), - updatedAt = Instant.now(), - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - description = "test transform doc values must be the same", - metadataId = null, - sourceIndex = sourceIdxTestName, - targetIndex = targetIdxTestName, - roles = emptyList(), - pageSize = 1, - groups = listOf( - Terms(sourceField = pickupDateTime, targetField = pickupDateTime) - ), - aggregations = AggregatorFactories.builder().addAggregator(AggregationBuilders.avg(fareAmount).field(fareAmount)) - ).let { createTransform(it, it.id) } + val transform = + Transform( + id = "id_14", + schemaVersion = 1L, + enabled = true, + enabledAt = Instant.now(), + updatedAt = Instant.now(), + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + description = "test transform doc values must be the same", + metadataId = null, + sourceIndex = sourceIdxTestName, + targetIndex = targetIdxTestName, + roles = emptyList(), + pageSize = 1, + groups = + listOf( + Terms(sourceField = pickupDateTime, targetField = pickupDateTime), + ), + aggregations = AggregatorFactories.builder().addAggregator(AggregationBuilders.avg(fareAmount).field(fareAmount)), + ).let { createTransform(it, it.id) } updateTransformStartTime(transform) @@ -389,24 +403,26 @@ class TransformRunnerIT : TransformRestTestCase() { val avgFareAmountAgg = AggregationBuilders.avg(fareAmount).field(fareAmount) val maxDateAggBuilder = AggregationBuilders.max(pickupDateTime).field(pickupDateTime) - val transform = Transform( - id = "id_15", - schemaVersion = 1L, - enabled = true, - enabledAt = Instant.now(), - updatedAt = Instant.now(), - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - description = "test transform doc values must be the same", - metadataId = null, - sourceIndex = sourceIdxTestName, - targetIndex = targetIdxTestName, - roles = emptyList(), - pageSize = 1, - groups = listOf( - Terms(sourceField = storeAndForward, targetField = storeAndForward) - ), - aggregations = AggregatorFactories.builder().addAggregator(avgFareAmountAgg).addAggregator(maxDateAggBuilder) - ).let { createTransform(it, it.id) } + val transform = + Transform( + id = "id_15", + schemaVersion = 1L, + enabled = true, + enabledAt = Instant.now(), + updatedAt = Instant.now(), + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + description = "test transform doc values must be the same", + metadataId = null, + sourceIndex = sourceIdxTestName, + targetIndex = targetIdxTestName, + roles = emptyList(), + pageSize = 1, + groups = + listOf( + Terms(sourceField = storeAndForward, targetField = storeAndForward), + ), + aggregations = AggregatorFactories.builder().addAggregator(avgFareAmountAgg).addAggregator(maxDateAggBuilder), + ).let { createTransform(it, it.id) } updateTransformStartTime(transform) waitFor { @@ -482,24 +498,26 @@ class TransformRunnerIT : TransformRestTestCase() { val avgFareAmountAgg = AggregationBuilders.avg(fareAmount).field(fareAmount) val countDateAggBuilder = AggregationBuilders.count(pickupDateTime).field(pickupDateTime) - val transform = Transform( - id = "id_16", - schemaVersion = 1L, - enabled = true, - enabledAt = Instant.now(), - updatedAt = Instant.now(), - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - description = "test transform doc values must be the same", - metadataId = null, - sourceIndex = sourceIdxTestName, - targetIndex = targetIdxTestName, - roles = emptyList(), - pageSize = 1, - groups = listOf( - Terms(sourceField = pickupDateTime, targetField = pickupDateTimeTerm) - ), - aggregations = AggregatorFactories.builder().addAggregator(avgFareAmountAgg).addAggregator(countDateAggBuilder) - ).let { createTransform(it, it.id) } + val transform = + Transform( + id = "id_16", + schemaVersion = 1L, + enabled = true, + enabledAt = Instant.now(), + updatedAt = Instant.now(), + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + description = "test transform doc values must be the same", + metadataId = null, + sourceIndex = sourceIdxTestName, + targetIndex = targetIdxTestName, + roles = emptyList(), + pageSize = 1, + groups = + listOf( + Terms(sourceField = pickupDateTime, targetField = pickupDateTimeTerm), + ), + aggregations = AggregatorFactories.builder().addAggregator(avgFareAmountAgg).addAggregator(countDateAggBuilder), + ).let { createTransform(it, it.id) } updateTransformStartTime(transform) waitFor { @@ -592,33 +610,36 @@ class TransformRunnerIT : TransformRestTestCase() { waitFor { assertTrue("Strict target index not created", indexExists("transform-target-strict-index")) } - val transform = Transform( - id = "id_5", - schemaVersion = 1L, - enabled = true, - enabledAt = Instant.now(), - updatedAt = Instant.now(), - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - description = "test transform", - metadataId = null, - sourceIndex = "transform-source-index", - targetIndex = "transform-target-strict-index", - roles = emptyList(), - pageSize = 1, - groups = listOf( - Terms(sourceField = "store_and_fwd_flag", targetField = "flag") - ) - ).let { createTransform(it, it.id) } + val transform = + Transform( + id = "id_5", + schemaVersion = 1L, + enabled = true, + enabledAt = Instant.now(), + updatedAt = Instant.now(), + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + description = "test transform", + metadataId = null, + sourceIndex = "transform-source-index", + targetIndex = "transform-target-strict-index", + roles = emptyList(), + pageSize = 1, + groups = + listOf( + Terms(sourceField = "store_and_fwd_flag", targetField = "flag"), + ), + ).let { createTransform(it, it.id) } updateTransformStartTime(transform) - val metadata = waitFor { - val job = getTransform(transformId = transform.id) - assertNotNull("Transform job doesn't have metadata set", job.metadataId) - val transformMetadata = getTransformMetadata(job.metadataId!!) - assertEquals("Transform has not failed", TransformMetadata.Status.FAILED, transformMetadata.status) - transformMetadata - } + val metadata = + waitFor { + val job = getTransform(transformId = transform.id) + assertNotNull("Transform job doesn't have metadata set", job.metadataId) + val transformMetadata = getTransformMetadata(job.metadataId!!) + assertEquals("Transform has not failed", TransformMetadata.Status.FAILED, transformMetadata.status) + transformMetadata + } assertTrue("Expected failure message to be present", !metadata.failureReason.isNullOrBlank()) } @@ -635,57 +656,60 @@ class TransformRunnerIT : TransformRestTestCase() { ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, "state.sum += doc[\"random_field\"].value; state.count += doc[\"passenger_count\"].value", - emptyMap() - ) + emptyMap(), + ), ) .combineScript( Script( ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, "def d = new long[2]; d[0] = state.sum; d[1] = state.count; return d", - emptyMap() - ) + emptyMap(), + ), ) .reduceScript( Script( ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, "double sum = 0; double count = 0; for (a in states) { sum += a[0]; count += a[1]; } return sum/count", - emptyMap() - ) - ) + emptyMap(), + ), + ), ) - val transform = Transform( - id = "id_6", - schemaVersion = 1L, - enabled = true, - enabledAt = Instant.now(), - updatedAt = Instant.now(), - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - description = "test transform", - metadataId = null, - sourceIndex = "transform-source-index", - targetIndex = "transform-target-index", - roles = emptyList(), - pageSize = 1, - groups = listOf( - Terms(sourceField = "store_and_fwd_flag", targetField = "flag"), - Histogram(sourceField = "passenger_count", targetField = "count", interval = 2.0), - DateHistogram(sourceField = "tpep_pickup_datetime", targetField = "date", fixedInterval = "1d") - ), - aggregations = aggregatorFactories - ).let { createTransform(it, it.id) } + val transform = + Transform( + id = "id_6", + schemaVersion = 1L, + enabled = true, + enabledAt = Instant.now(), + updatedAt = Instant.now(), + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + description = "test transform", + metadataId = null, + sourceIndex = "transform-source-index", + targetIndex = "transform-target-index", + roles = emptyList(), + pageSize = 1, + groups = + listOf( + Terms(sourceField = "store_and_fwd_flag", targetField = "flag"), + Histogram(sourceField = "passenger_count", targetField = "count", interval = 2.0), + DateHistogram(sourceField = "tpep_pickup_datetime", targetField = "date", fixedInterval = "1d"), + ), + aggregations = aggregatorFactories, + ).let { createTransform(it, it.id) } updateTransformStartTime(transform) - val metadata = waitFor { - val job = getTransform(transformId = transform.id) - assertNotNull("Transform job doesn't have metadata set", job.metadataId) - val transformMetadata = getTransformMetadata(job.metadataId!!) - assertEquals("Transform has not failed", TransformMetadata.Status.FAILED, transformMetadata.status) - transformMetadata - } + val metadata = + waitFor { + val job = getTransform(transformId = transform.id) + assertNotNull("Transform job doesn't have metadata set", job.metadataId) + val transformMetadata = getTransformMetadata(job.metadataId!!) + assertEquals("Transform has not failed", TransformMetadata.Status.FAILED, transformMetadata.status) + transformMetadata + } assertTrue("Expected failure message to be present", !metadata.failureReason.isNullOrBlank()) } @@ -698,30 +722,32 @@ class TransformRunnerIT : TransformRestTestCase() { aggregatorFactories.addPipelineAggregator( BucketScriptPipelineAggregationBuilder( "test_pipeline_aggregation", - Script("1") - ) + Script("1"), + ), ) - val transform = Transform( - id = "id_17", - schemaVersion = 1L, - enabled = true, - enabledAt = Instant.now(), - updatedAt = Instant.now(), - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - description = "test transform", - metadataId = null, - sourceIndex = "transform-source-index", - targetIndex = "transform-target-index", - roles = emptyList(), - pageSize = 1, - groups = listOf( - Terms(sourceField = "store_and_fwd_flag", targetField = "flag"), - Histogram(sourceField = "passenger_count", targetField = "count", interval = 2.0), - DateHistogram(sourceField = "tpep_pickup_datetime", targetField = "date", fixedInterval = "1d") - ), - aggregations = aggregatorFactories - ).let { createTransform(it, it.id) } + val transform = + Transform( + id = "id_17", + schemaVersion = 1L, + enabled = true, + enabledAt = Instant.now(), + updatedAt = Instant.now(), + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + description = "test transform", + metadataId = null, + sourceIndex = "transform-source-index", + targetIndex = "transform-target-index", + roles = emptyList(), + pageSize = 1, + groups = + listOf( + Terms(sourceField = "store_and_fwd_flag", targetField = "flag"), + Histogram(sourceField = "passenger_count", targetField = "count", interval = 2.0), + DateHistogram(sourceField = "tpep_pickup_datetime", targetField = "date", fixedInterval = "1d"), + ), + aggregations = aggregatorFactories, + ).let { createTransform(it, it.id) } updateTransformStartTime(transform) } } @@ -744,8 +770,8 @@ class TransformRunnerIT : TransformRestTestCase() { } } """.trimIndent(), - ContentType.APPLICATION_JSON - ) + ContentType.APPLICATION_JSON, + ), ) client().makeRequest("PUT", "/_data_stream/$dataStreamName") @@ -756,35 +782,38 @@ class TransformRunnerIT : TransformRestTestCase() { client().makeRequest("POST", "/$dataStreamName/_rollover") // Create the transform job. - val transform = Transform( - id = "id_7", - schemaVersion = 1L, - enabled = true, - enabledAt = Instant.now(), - updatedAt = Instant.now(), - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - description = "test transform", - metadataId = null, - sourceIndex = dataStreamName, - targetIndex = "transform-target-index", - roles = emptyList(), - pageSize = 100, - groups = listOf( - Terms(sourceField = "store_and_fwd_flag", targetField = "flag") - ) - ).let { createTransform(it, it.id) } + val transform = + Transform( + id = "id_7", + schemaVersion = 1L, + enabled = true, + enabledAt = Instant.now(), + updatedAt = Instant.now(), + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + description = "test transform", + metadataId = null, + sourceIndex = dataStreamName, + targetIndex = "transform-target-index", + roles = emptyList(), + pageSize = 100, + groups = + listOf( + Terms(sourceField = "store_and_fwd_flag", targetField = "flag"), + ), + ).let { createTransform(it, it.id) } updateTransformStartTime(transform) waitFor { assertTrue("Target transform index was not created", indexExists(transform.targetIndex)) } - val metadata = waitFor { - val job = getTransform(transformId = transform.id) - assertNotNull("Transform job doesn't have metadata set", job.metadataId) - val transformMetadata = getTransformMetadata(job.metadataId!!) - assertEquals("Transform has not finished", TransformMetadata.Status.FINISHED, transformMetadata.status) - transformMetadata - } + val metadata = + waitFor { + val job = getTransform(transformId = transform.id) + assertNotNull("Transform job doesn't have metadata set", job.metadataId) + val transformMetadata = getTransformMetadata(job.metadataId!!) + assertEquals("Transform has not finished", TransformMetadata.Status.FINISHED, transformMetadata.status) + transformMetadata + } assertEquals("More than expected pages processed", 2L, metadata.stats.pagesProcessed) assertEquals("More than expected documents indexed", 2L, metadata.stats.documentsIndexed) @@ -797,40 +826,43 @@ class TransformRunnerIT : TransformRestTestCase() { fun `test no-op execution when no buckets have been modified`() { validateSourceIndex("transform-no-op-source-index") - val transform = Transform( - id = "id_8", - schemaVersion = 1L, - enabled = true, - enabledAt = Instant.now(), - updatedAt = Instant.now(), - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - description = "test transform", - metadataId = null, - sourceIndex = "transform-no-op-source-index", - targetIndex = "transform-no-op-target-index", - roles = emptyList(), - pageSize = 100, - groups = listOf( - Terms(sourceField = "store_and_fwd_flag", targetField = "flag"), - Histogram(sourceField = "trip_distance", targetField = "distance", interval = 0.1) - ), - continuous = true - ).let { createTransform(it, it.id) } + val transform = + Transform( + id = "id_8", + schemaVersion = 1L, + enabled = true, + enabledAt = Instant.now(), + updatedAt = Instant.now(), + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + description = "test transform", + metadataId = null, + sourceIndex = "transform-no-op-source-index", + targetIndex = "transform-no-op-target-index", + roles = emptyList(), + pageSize = 100, + groups = + listOf( + Terms(sourceField = "store_and_fwd_flag", targetField = "flag"), + Histogram(sourceField = "trip_distance", targetField = "distance", interval = 0.1), + ), + continuous = true, + ).let { createTransform(it, it.id) } updateTransformStartTime(transform) waitFor { assertTrue("Target transform index was not created", indexExists(transform.targetIndex)) } - val firstIterationMetadata = waitFor { - val job = getTransform(transformId = transform.id) - assertNotNull("Transform job doesn't have metadata set", job.metadataId) - val transformMetadata = getTransformMetadata(job.metadataId!!) - assertEquals("Transform did not complete iteration or had incorrect number of documents processed", 5000, transformMetadata.stats.documentsProcessed) - assertEquals("Transform did not complete iteration", null, transformMetadata.afterKey) - assertNotNull("Continuous stats were not updated", transformMetadata.continuousStats) - assertNotNull("Continuous stats were set, but lastTimestamp was not", transformMetadata.continuousStats!!.lastTimestamp) - transformMetadata - } + val firstIterationMetadata = + waitFor { + val job = getTransform(transformId = transform.id) + assertNotNull("Transform job doesn't have metadata set", job.metadataId) + val transformMetadata = getTransformMetadata(job.metadataId!!) + assertEquals("Transform did not complete iteration or had incorrect number of documents processed", 5000, transformMetadata.stats.documentsProcessed) + assertEquals("Transform did not complete iteration", null, transformMetadata.afterKey) + assertNotNull("Continuous stats were not updated", transformMetadata.continuousStats) + assertNotNull("Continuous stats were set, but lastTimestamp was not", transformMetadata.continuousStats!!.lastTimestamp) + transformMetadata + } assertEquals("Not the expected transform status", TransformMetadata.Status.STARTED, firstIterationMetadata.status) assertEquals("Not the expected pages processed", 3L, firstIterationMetadata.stats.pagesProcessed) @@ -841,13 +873,14 @@ class TransformRunnerIT : TransformRestTestCase() { updateTransformStartTime(transform) - val secondIterationMetadata = waitFor { - val job = getTransform(transformId = transform.id) - assertNotNull("Transform job doesn't have metadata set", job.metadataId) - val transformMetadata = getTransformMetadata(job.metadataId!!) - assertTrue("Transform did not complete iteration or update timestamp", transformMetadata.continuousStats!!.lastTimestamp!! > firstIterationMetadata.continuousStats!!.lastTimestamp) - transformMetadata - } + val secondIterationMetadata = + waitFor { + val job = getTransform(transformId = transform.id) + assertNotNull("Transform job doesn't have metadata set", job.metadataId) + val transformMetadata = getTransformMetadata(job.metadataId!!) + assertTrue("Transform did not complete iteration or update timestamp", transformMetadata.continuousStats!!.lastTimestamp!! > firstIterationMetadata.continuousStats!!.lastTimestamp) + transformMetadata + } assertEquals("Transform did not have null afterKey after iteration", null, secondIterationMetadata.afterKey) assertEquals("Not the expected transform status", TransformMetadata.Status.STARTED, firstIterationMetadata.status) @@ -866,38 +899,41 @@ class TransformRunnerIT : TransformRestTestCase() { val aggregatorFactories = AggregatorFactories.builder() aggregatorFactories.addAggregator(AggregationBuilders.sum("revenue").field("total_amount")) - val transform = Transform( - id = "id_9", - schemaVersion = 1L, - enabled = true, - enabledAt = Instant.now(), - updatedAt = Instant.now(), - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - description = "test transform", - metadataId = null, - sourceIndex = "continuous-transform-source-index", - targetIndex = "continuous-transform-target-index", - roles = emptyList(), - pageSize = 100, - groups = listOf( - Terms(sourceField = "store_and_fwd_flag", targetField = "flag") - ), - continuous = true, - aggregations = aggregatorFactories - ).let { createTransform(it, it.id) } + val transform = + Transform( + id = "id_9", + schemaVersion = 1L, + enabled = true, + enabledAt = Instant.now(), + updatedAt = Instant.now(), + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + description = "test transform", + metadataId = null, + sourceIndex = "continuous-transform-source-index", + targetIndex = "continuous-transform-target-index", + roles = emptyList(), + pageSize = 100, + groups = + listOf( + Terms(sourceField = "store_and_fwd_flag", targetField = "flag"), + ), + continuous = true, + aggregations = aggregatorFactories, + ).let { createTransform(it, it.id) } updateTransformStartTime(transform) waitFor { assertTrue("Target transform index was not created", indexExists(transform.targetIndex)) } - val firstIterationMetadata = waitFor { - val job = getTransform(transformId = transform.id) - assertNotNull("Transform job doesn't have metadata set", job.metadataId) - val transformMetadata = getTransformMetadata(job.metadataId!!) - assertEquals("Transform did not complete iteration or had incorrect number of documents processed", 5000, transformMetadata.stats.documentsProcessed) - assertEquals("Transform did not complete iteration", null, transformMetadata.afterKey) - transformMetadata - } + val firstIterationMetadata = + waitFor { + val job = getTransform(transformId = transform.id) + assertNotNull("Transform job doesn't have metadata set", job.metadataId) + val transformMetadata = getTransformMetadata(job.metadataId!!) + assertEquals("Transform did not complete iteration or had incorrect number of documents processed", 5000, transformMetadata.stats.documentsProcessed) + assertEquals("Transform did not complete iteration", null, transformMetadata.afterKey) + transformMetadata + } assertEquals("Not the expected transform status", TransformMetadata.Status.STARTED, firstIterationMetadata.status) assertEquals("Not the expected pages processed", 2L, firstIterationMetadata.stats.pagesProcessed) @@ -906,21 +942,24 @@ class TransformRunnerIT : TransformRestTestCase() { assertTrue("Doesn't capture indexed time", firstIterationMetadata.stats.indexTimeInMillis > 0) assertTrue("Didn't capture search time", firstIterationMetadata.stats.searchTimeInMillis > 0) - var hits = waitFor { - val response = client().makeRequest( - "GET", "continuous-transform-target-index/_search", - StringEntity("{}", ContentType.APPLICATION_JSON) - ) - assertEquals("Request failed", RestStatus.OK, response.restStatus()) - val responseHits = response.asMap().getValue("hits") as Map<*, *> - val totalDocs = (responseHits["hits"] as ArrayList<*>).fold(0) { sum, bucket -> - val docCount = ((bucket as Map<*, *>)["_source"] as Map<*, *>)["_doc_count"] as Int - sum + docCount - } - assertEquals("Not all documents included in the transform target index", 5000, totalDocs) + var hits = + waitFor { + val response = + client().makeRequest( + "GET", "continuous-transform-target-index/_search", + StringEntity("{}", ContentType.APPLICATION_JSON), + ) + assertEquals("Request failed", RestStatus.OK, response.restStatus()) + val responseHits = response.asMap().getValue("hits") as Map<*, *> + val totalDocs = + (responseHits["hits"] as ArrayList<*>).fold(0) { sum, bucket -> + val docCount = ((bucket as Map<*, *>)["_source"] as Map<*, *>)["_doc_count"] as Int + sum + docCount + } + assertEquals("Not all documents included in the transform target index", 5000, totalDocs) - responseHits["hits"] as ArrayList<*> - } + responseHits["hits"] as ArrayList<*> + } hits.forEach { val bucket = ((it as Map<*, *>)["_source"] as Map<*, *>) if (bucket["flag"] == "N") { @@ -940,15 +979,16 @@ class TransformRunnerIT : TransformRestTestCase() { updateTransformStartTime(transform) - val secondIterationMetadata = waitFor { - val job = getTransform(transformId = transform.id) - assertNotNull("Transform job doesn't have metadata set", job.metadataId) - val transformMetadata = getTransformMetadata(job.metadataId!!) - // As the new documents all fall into the same buckets as the last, all of the documents are processed again - assertEquals("Transform did not complete iteration or had incorrect number of documents processed", 15000, transformMetadata.stats.documentsProcessed) - assertEquals("Transform did not have null afterKey after iteration", null, transformMetadata.afterKey) - transformMetadata - } + val secondIterationMetadata = + waitFor { + val job = getTransform(transformId = transform.id) + assertNotNull("Transform job doesn't have metadata set", job.metadataId) + val transformMetadata = getTransformMetadata(job.metadataId!!) + // As the new documents all fall into the same buckets as the last, all of the documents are processed again + assertEquals("Transform did not complete iteration or had incorrect number of documents processed", 15000, transformMetadata.stats.documentsProcessed) + assertEquals("Transform did not have null afterKey after iteration", null, transformMetadata.afterKey) + transformMetadata + } assertEquals("Not the expected transform status", TransformMetadata.Status.STARTED, secondIterationMetadata.status) assertEquals("More than expected pages processed", 4L, secondIterationMetadata.stats.pagesProcessed) @@ -957,21 +997,24 @@ class TransformRunnerIT : TransformRestTestCase() { assertTrue("Doesn't capture indexed time", secondIterationMetadata.stats.indexTimeInMillis > firstIterationMetadata.stats.indexTimeInMillis) assertTrue("Didn't capture search time", secondIterationMetadata.stats.searchTimeInMillis > firstIterationMetadata.stats.searchTimeInMillis) - hits = waitFor { - val response = client().makeRequest( - "GET", "continuous-transform-target-index/_search", - StringEntity("{}", ContentType.APPLICATION_JSON) - ) - assertEquals("Request failed", RestStatus.OK, response.restStatus()) - val responseHits = response.asMap().getValue("hits") as Map<*, *> - val totalDocs = (responseHits["hits"] as ArrayList<*>).fold(0) { sum, bucket -> - val docCount = ((bucket as Map<*, *>)["_source"] as Map<*, *>)["_doc_count"] as Int - sum + docCount - } - assertEquals("Not all documents included in the transform target index", 10000, totalDocs) + hits = + waitFor { + val response = + client().makeRequest( + "GET", "continuous-transform-target-index/_search", + StringEntity("{}", ContentType.APPLICATION_JSON), + ) + assertEquals("Request failed", RestStatus.OK, response.restStatus()) + val responseHits = response.asMap().getValue("hits") as Map<*, *> + val totalDocs = + (responseHits["hits"] as ArrayList<*>).fold(0) { sum, bucket -> + val docCount = ((bucket as Map<*, *>)["_source"] as Map<*, *>)["_doc_count"] as Int + sum + docCount + } + assertEquals("Not all documents included in the transform target index", 10000, totalDocs) - responseHits["hits"] as ArrayList<*> - } + responseHits["hits"] as ArrayList<*> + } hits.forEach { val bucket = ((it as Map<*, *>)["_source"] as Map<*, *>) if (bucket["flag"] == "N") { @@ -994,38 +1037,41 @@ class TransformRunnerIT : TransformRestTestCase() { val aggregatorFactories = AggregatorFactories.builder() aggregatorFactories.addAggregator(AggregationBuilders.sum("twice_id_sum").field("twice_id")) - val transform = Transform( - id = "id_10", - schemaVersion = 1L, - enabled = true, - enabledAt = Instant.now(), - updatedAt = Instant.now(), - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - description = "test transform", - metadataId = null, - sourceIndex = sourceIndex, - targetIndex = "modified-bucket-target-index", - roles = emptyList(), - pageSize = 100, - groups = listOf( - Histogram(sourceField = "iterating_id", targetField = "id_group", interval = 5.0) - ), - continuous = true, - aggregations = aggregatorFactories - ).let { createTransform(it, it.id) } + val transform = + Transform( + id = "id_10", + schemaVersion = 1L, + enabled = true, + enabledAt = Instant.now(), + updatedAt = Instant.now(), + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + description = "test transform", + metadataId = null, + sourceIndex = sourceIndex, + targetIndex = "modified-bucket-target-index", + roles = emptyList(), + pageSize = 100, + groups = + listOf( + Histogram(sourceField = "iterating_id", targetField = "id_group", interval = 5.0), + ), + continuous = true, + aggregations = aggregatorFactories, + ).let { createTransform(it, it.id) } updateTransformStartTime(transform) waitFor { assertTrue("Target transform index was not created", indexExists(transform.targetIndex)) } - val firstIterationMetadata = waitFor { - val job = getTransform(transformId = transform.id) - assertNotNull("Transform job doesn't have metadata set", job.metadataId) - val transformMetadata = getTransformMetadata(job.metadataId!!) - assertEquals("Transform did not complete iteration or had incorrect number of documents processed", 48, transformMetadata.stats.documentsProcessed) - assertEquals("Transform did not complete iteration", null, transformMetadata.afterKey) - transformMetadata - } + val firstIterationMetadata = + waitFor { + val job = getTransform(transformId = transform.id) + assertNotNull("Transform job doesn't have metadata set", job.metadataId) + val transformMetadata = getTransformMetadata(job.metadataId!!) + assertEquals("Transform did not complete iteration or had incorrect number of documents processed", 48, transformMetadata.stats.documentsProcessed) + assertEquals("Transform did not complete iteration", null, transformMetadata.afterKey) + transformMetadata + } assertEquals("Not the expected transform status", TransformMetadata.Status.STARTED, firstIterationMetadata.status) assertEquals("Not the expected pages processed", 2L, firstIterationMetadata.stats.pagesProcessed) @@ -1035,21 +1081,24 @@ class TransformRunnerIT : TransformRestTestCase() { assertTrue("Didn't capture search time", firstIterationMetadata.stats.searchTimeInMillis > 0) // Get all of the buckets - var hits = waitFor { - val response = client().makeRequest( - "GET", "${transform.targetIndex}/_search", - StringEntity("{\"size\": 25}", ContentType.APPLICATION_JSON) - ) - assertEquals("Request failed", RestStatus.OK, response.restStatus()) - val responseHits = response.asMap().getValue("hits") as Map<*, *> - val totalDocs = (responseHits["hits"] as ArrayList<*>).fold(0) { sum, bucket -> - val docCount = ((bucket as Map<*, *>)["_source"] as Map<*, *>)["_doc_count"] as Int - sum + docCount - } - assertEquals("Not all documents included in the transform target index", 48, totalDocs) + var hits = + waitFor { + val response = + client().makeRequest( + "GET", "${transform.targetIndex}/_search", + StringEntity("{\"size\": 25}", ContentType.APPLICATION_JSON), + ) + assertEquals("Request failed", RestStatus.OK, response.restStatus()) + val responseHits = response.asMap().getValue("hits") as Map<*, *> + val totalDocs = + (responseHits["hits"] as ArrayList<*>).fold(0) { sum, bucket -> + val docCount = ((bucket as Map<*, *>)["_source"] as Map<*, *>)["_doc_count"] as Int + sum + docCount + } + assertEquals("Not all documents included in the transform target index", 48, totalDocs) - responseHits["hits"] as ArrayList<*> - } + responseHits["hits"] as ArrayList<*> + } // Validate the buckets include the correct information hits.forEach { @@ -1076,15 +1125,16 @@ class TransformRunnerIT : TransformRestTestCase() { updateTransformStartTime(transform) - val secondIterationMetadata = waitFor { - val job = getTransform(transformId = transform.id) - assertNotNull("Transform job doesn't have metadata set", job.metadataId) - val transformMetadata = getTransformMetadata(job.metadataId!!) - // As the ids 45-47 will be processed a second time when the bucket is recalculated, this number is greater than 100 - assertEquals("Transform did not complete iteration or had incorrect number of documents processed", 103L, transformMetadata.stats.documentsProcessed) - assertEquals("Transform did not have null afterKey after iteration", null, transformMetadata.afterKey) - transformMetadata - } + val secondIterationMetadata = + waitFor { + val job = getTransform(transformId = transform.id) + assertNotNull("Transform job doesn't have metadata set", job.metadataId) + val transformMetadata = getTransformMetadata(job.metadataId!!) + // As the ids 45-47 will be processed a second time when the bucket is recalculated, this number is greater than 100 + assertEquals("Transform did not complete iteration or had incorrect number of documents processed", 103L, transformMetadata.stats.documentsProcessed) + assertEquals("Transform did not have null afterKey after iteration", null, transformMetadata.afterKey) + transformMetadata + } assertEquals("Not the expected transform status", TransformMetadata.Status.STARTED, secondIterationMetadata.status) assertEquals("More than expected pages processed", 4L, secondIterationMetadata.stats.pagesProcessed) @@ -1095,21 +1145,24 @@ class TransformRunnerIT : TransformRestTestCase() { disableTransform(transform.id) - hits = waitFor { - val response = client().makeRequest( - "GET", "${transform.targetIndex}/_search", - StringEntity("{\"size\": 25}", ContentType.APPLICATION_JSON) - ) - assertEquals("Request failed", RestStatus.OK, response.restStatus()) - val responseHits = response.asMap().getValue("hits") as Map<*, *> - val totalDocs = (responseHits["hits"] as ArrayList<*>).fold(0) { sum, bucket -> - val docCount = ((bucket as Map<*, *>)["_source"] as Map<*, *>)["_doc_count"] as Int - sum + docCount - } - assertEquals("Not all documents included in the transform target index", 100, totalDocs) + hits = + waitFor { + val response = + client().makeRequest( + "GET", "${transform.targetIndex}/_search", + StringEntity("{\"size\": 25}", ContentType.APPLICATION_JSON), + ) + assertEquals("Request failed", RestStatus.OK, response.restStatus()) + val responseHits = response.asMap().getValue("hits") as Map<*, *> + val totalDocs = + (responseHits["hits"] as ArrayList<*>).fold(0) { sum, bucket -> + val docCount = ((bucket as Map<*, *>)["_source"] as Map<*, *>)["_doc_count"] as Int + sum + docCount + } + assertEquals("Not all documents included in the transform target index", 100, totalDocs) - responseHits["hits"] as ArrayList<*> - } + responseHits["hits"] as ArrayList<*> + } hits.forEach { val bucket = ((it as Map<*, *>)["_source"] as Map<*, *>) @@ -1124,45 +1177,48 @@ class TransformRunnerIT : TransformRestTestCase() { validateSourceIndex("wildcard-source-2") validateSourceIndex("wildcard-source-3") - val transform = Transform( - id = "id_11", - schemaVersion = 1L, - enabled = true, - enabledAt = Instant.now(), - updatedAt = Instant.now(), - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - description = "test transform", - metadataId = null, - sourceIndex = "wildcard-s*e-*", - targetIndex = "wildcard-target-index", - roles = emptyList(), - pageSize = 100, - groups = listOf( - Terms(sourceField = "store_and_fwd_flag", targetField = "flag") - ), - continuous = true - ).let { createTransform(it, it.id) } + val transform = + Transform( + id = "id_11", + schemaVersion = 1L, + enabled = true, + enabledAt = Instant.now(), + updatedAt = Instant.now(), + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + description = "test transform", + metadataId = null, + sourceIndex = "wildcard-s*e-*", + targetIndex = "wildcard-target-index", + roles = emptyList(), + pageSize = 100, + groups = + listOf( + Terms(sourceField = "store_and_fwd_flag", targetField = "flag"), + ), + continuous = true, + ).let { createTransform(it, it.id) } updateTransformStartTime(transform) waitFor { assertTrue("Target transform index was not created", indexExists(transform.targetIndex)) } - val firstIterationMetadata = waitFor { - val job = getTransform(transformId = transform.id) - assertNotNull("Transform job doesn't have metadata set", job.metadataId) - val transformMetadata = getTransformMetadata(job.metadataId!!) - assertEquals("Transform did not complete iteration or had incorrect number of documents processed", 15000, transformMetadata.stats.documentsProcessed) - assertEquals("Transform did not complete iteration", null, transformMetadata.afterKey) - assertNotNull("Continuous stats were not updated", transformMetadata.continuousStats) - assertNotNull("Continuous stats were set, but lastTimestamp was not", transformMetadata.continuousStats!!.lastTimestamp) - assertEquals("Not the expected transform status", TransformMetadata.Status.STARTED, transformMetadata.status) - assertEquals("Not the expected pages processed", 6L, transformMetadata.stats.pagesProcessed) - assertEquals("Not the expected documents indexed", 2L, transformMetadata.stats.documentsIndexed) - assertEquals("Not the expected documents processed", 15000L, transformMetadata.stats.documentsProcessed) - assertTrue("Doesn't capture indexed time", transformMetadata.stats.indexTimeInMillis > 0) - assertTrue("Didn't capture search time", transformMetadata.stats.searchTimeInMillis > 0) - transformMetadata - } + val firstIterationMetadata = + waitFor { + val job = getTransform(transformId = transform.id) + assertNotNull("Transform job doesn't have metadata set", job.metadataId) + val transformMetadata = getTransformMetadata(job.metadataId!!) + assertEquals("Transform did not complete iteration or had incorrect number of documents processed", 15000, transformMetadata.stats.documentsProcessed) + assertEquals("Transform did not complete iteration", null, transformMetadata.afterKey) + assertNotNull("Continuous stats were not updated", transformMetadata.continuousStats) + assertNotNull("Continuous stats were set, but lastTimestamp was not", transformMetadata.continuousStats!!.lastTimestamp) + assertEquals("Not the expected transform status", TransformMetadata.Status.STARTED, transformMetadata.status) + assertEquals("Not the expected pages processed", 6L, transformMetadata.stats.pagesProcessed) + assertEquals("Not the expected documents indexed", 2L, transformMetadata.stats.documentsIndexed) + assertEquals("Not the expected documents processed", 15000L, transformMetadata.stats.documentsProcessed) + assertTrue("Doesn't capture indexed time", transformMetadata.stats.indexTimeInMillis > 0) + assertTrue("Didn't capture search time", transformMetadata.stats.searchTimeInMillis > 0) + transformMetadata + } waitFor { val documentsBehind = getTransformDocumentsBehind(transform.id) @@ -1210,39 +1266,42 @@ class TransformRunnerIT : TransformRestTestCase() { val aggregatorFactories = AggregatorFactories.builder() aggregatorFactories.addAggregator(AggregationBuilders.sum("twice_id_sum").field("twice_id")) - val transform = Transform( - id = "id_12", - schemaVersion = 1L, - enabled = true, - enabledAt = Instant.now(), - updatedAt = Instant.now(), - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - description = "test transform", - metadataId = null, - sourceIndex = sourceIndex, - targetIndex = "null-bucket-target-index", - roles = emptyList(), - pageSize = 100, - groups = listOf( - Histogram(sourceField = "iterating_id", targetField = "id_group", interval = 5.0), - Terms(sourceField = "term_id", targetField = "id_term") - ), - continuous = true, - aggregations = aggregatorFactories - ).let { createTransform(it, it.id) } + val transform = + Transform( + id = "id_12", + schemaVersion = 1L, + enabled = true, + enabledAt = Instant.now(), + updatedAt = Instant.now(), + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + description = "test transform", + metadataId = null, + sourceIndex = sourceIndex, + targetIndex = "null-bucket-target-index", + roles = emptyList(), + pageSize = 100, + groups = + listOf( + Histogram(sourceField = "iterating_id", targetField = "id_group", interval = 5.0), + Terms(sourceField = "term_id", targetField = "id_term"), + ), + continuous = true, + aggregations = aggregatorFactories, + ).let { createTransform(it, it.id) } updateTransformStartTime(transform) waitFor { assertTrue("Target transform index was not created", indexExists(transform.targetIndex)) } - val firstIterationMetadata = waitFor { - val job = getTransform(transformId = transform.id) - assertNotNull("Transform job doesn't have metadata set", job.metadataId) - val transformMetadata = getTransformMetadata(job.metadataId!!) - assertEquals("Transform did not complete iteration or had incorrect number of documents processed", 52, transformMetadata.stats.documentsProcessed) - assertEquals("Transform did not complete iteration", null, transformMetadata.afterKey) - transformMetadata - } + val firstIterationMetadata = + waitFor { + val job = getTransform(transformId = transform.id) + assertNotNull("Transform job doesn't have metadata set", job.metadataId) + val transformMetadata = getTransformMetadata(job.metadataId!!) + assertEquals("Transform did not complete iteration or had incorrect number of documents processed", 52, transformMetadata.stats.documentsProcessed) + assertEquals("Transform did not complete iteration", null, transformMetadata.afterKey) + transformMetadata + } assertEquals("Not the expected transform status", TransformMetadata.Status.STARTED, firstIterationMetadata.status) assertEquals("Not the expected pages processed", 2L, firstIterationMetadata.stats.pagesProcessed) @@ -1252,21 +1311,24 @@ class TransformRunnerIT : TransformRestTestCase() { assertTrue("Didn't capture search time", firstIterationMetadata.stats.searchTimeInMillis > 0) // Get all the buckets - var hits = waitFor { - val response = client().makeRequest( - "GET", "${transform.targetIndex}/_search", - StringEntity("{\"size\": 25}", ContentType.APPLICATION_JSON) - ) - assertEquals("Request failed", RestStatus.OK, response.restStatus()) - val responseHits = response.asMap().getValue("hits") as Map<*, *> - val totalDocs = (responseHits["hits"] as ArrayList<*>).fold(0) { sum, bucket -> - val docCount = ((bucket as Map<*, *>)["_source"] as Map<*, *>)["_doc_count"] as Int - sum + docCount - } - assertEquals("Not all documents included in the transform target index", 52, totalDocs) + var hits = + waitFor { + val response = + client().makeRequest( + "GET", "${transform.targetIndex}/_search", + StringEntity("{\"size\": 25}", ContentType.APPLICATION_JSON), + ) + assertEquals("Request failed", RestStatus.OK, response.restStatus()) + val responseHits = response.asMap().getValue("hits") as Map<*, *> + val totalDocs = + (responseHits["hits"] as ArrayList<*>).fold(0) { sum, bucket -> + val docCount = ((bucket as Map<*, *>)["_source"] as Map<*, *>)["_doc_count"] as Int + sum + docCount + } + assertEquals("Not all documents included in the transform target index", 52, totalDocs) - responseHits["hits"] as ArrayList<*> - } + responseHits["hits"] as ArrayList<*> + } // Validate the buckets include the correct information hits.forEach { @@ -1308,14 +1370,15 @@ class TransformRunnerIT : TransformRestTestCase() { updateTransformStartTime(transform) - val secondIterationMetadata = waitFor { - val job = getTransform(transformId = transform.id) - assertNotNull("Transform job doesn't have metadata set", job.metadataId) - val transformMetadata = getTransformMetadata(job.metadataId!!) - assertEquals("Transform did not complete iteration or had incorrect number of documents processed", 104L, transformMetadata.stats.documentsProcessed) - assertEquals("Transform did not have null afterKey after iteration", null, transformMetadata.afterKey) - transformMetadata - } + val secondIterationMetadata = + waitFor { + val job = getTransform(transformId = transform.id) + assertNotNull("Transform job doesn't have metadata set", job.metadataId) + val transformMetadata = getTransformMetadata(job.metadataId!!) + assertEquals("Transform did not complete iteration or had incorrect number of documents processed", 104L, transformMetadata.stats.documentsProcessed) + assertEquals("Transform did not have null afterKey after iteration", null, transformMetadata.afterKey) + transformMetadata + } assertEquals("Not the expected transform status", TransformMetadata.Status.STARTED, secondIterationMetadata.status) assertEquals("More than expected pages processed", 4L, secondIterationMetadata.stats.pagesProcessed) @@ -1326,21 +1389,24 @@ class TransformRunnerIT : TransformRestTestCase() { disableTransform(transform.id) - hits = waitFor { - val response = client().makeRequest( - "GET", "${transform.targetIndex}/_search", - StringEntity("{\"size\": 40}", ContentType.APPLICATION_JSON) - ) - assertEquals("Request failed", RestStatus.OK, response.restStatus()) - val responseHits = response.asMap().getValue("hits") as Map<*, *> - val totalDocs = (responseHits["hits"] as ArrayList<*>).fold(0) { sum, bucket -> - val docCount = ((bucket as Map<*, *>)["_source"] as Map<*, *>)["_doc_count"] as Int - sum + docCount - } - assertEquals("Not all documents included in the transform target index", 88, totalDocs) + hits = + waitFor { + val response = + client().makeRequest( + "GET", "${transform.targetIndex}/_search", + StringEntity("{\"size\": 40}", ContentType.APPLICATION_JSON), + ) + assertEquals("Request failed", RestStatus.OK, response.restStatus()) + val responseHits = response.asMap().getValue("hits") as Map<*, *> + val totalDocs = + (responseHits["hits"] as ArrayList<*>).fold(0) { sum, bucket -> + val docCount = ((bucket as Map<*, *>)["_source"] as Map<*, *>)["_doc_count"] as Int + sum + docCount + } + assertEquals("Not all documents included in the transform target index", 88, totalDocs) - responseHits["hits"] as ArrayList<*> - } + responseHits["hits"] as ArrayList<*> + } // Validate the buckets include the correct information hits.forEach { @@ -1367,58 +1433,61 @@ class TransformRunnerIT : TransformRestTestCase() { } fun `test continuous transform with a lot of buckets`() { - // Create index with high cardinality fields val sourceIndex = "index_with_lots_of_buckets" val requestBody: StringBuilder = StringBuilder(100000) for (i in 1..2000) { - val docPayload: String = """ - { - "id1": "$i", - "id2": "${i + 1}" - } - """.trimIndent().replace(Regex("[\n\r\\s]"), "") + val docPayload: String = + """ + { + "id1": "$i", + "id2": "${i + 1}" + } + """.trimIndent().replace(Regex("[\n\r\\s]"), "") requestBody.append("{\"create\":{}}\n").append(docPayload).append('\n') } createIndexAndBulkInsert(sourceIndex, Settings.EMPTY, null, null, requestBody.toString()) // Source index will have total of 2000 buckets - val transform = Transform( - id = "transform_index_with_lots_of_buckets", - schemaVersion = 1L, - enabled = true, - enabledAt = Instant.now(), - updatedAt = Instant.now(), - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - description = "test transform", - metadataId = null, - sourceIndex = "index_with_lots_of_buckets", - targetIndex = "index_with_lots_of_buckets_transformed", - roles = emptyList(), - pageSize = 1000, - groups = listOf( - Terms(sourceField = "id1.keyword", targetField = "id1"), - Terms(sourceField = "id2.keyword", targetField = "id2") - ), - continuous = true - ).let { createTransform(it, it.id) } + val transform = + Transform( + id = "transform_index_with_lots_of_buckets", + schemaVersion = 1L, + enabled = true, + enabledAt = Instant.now(), + updatedAt = Instant.now(), + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + description = "test transform", + metadataId = null, + sourceIndex = "index_with_lots_of_buckets", + targetIndex = "index_with_lots_of_buckets_transformed", + roles = emptyList(), + pageSize = 1000, + groups = + listOf( + Terms(sourceField = "id1.keyword", targetField = "id1"), + Terms(sourceField = "id2.keyword", targetField = "id2"), + ), + continuous = true, + ).let { createTransform(it, it.id) } updateTransformStartTime(transform) waitFor { assertTrue("Target transform index was not created", indexExists(transform.targetIndex)) } - val firstIterationMetadata = waitFor { - val job = getTransform(transformId = transform.id) - assertNotNull("Transform job doesn't have metadata set", job.metadataId) - val transformMetadata = getTransformMetadata(job.metadataId!!) - assertEquals("Transform did not complete iteration or had incorrect number of documents processed", 2000, transformMetadata.stats.documentsProcessed) - assertEquals("Transform did not complete iteration", null, transformMetadata.afterKey) - assertNotNull("Continuous stats were not updated", transformMetadata.continuousStats) - assertNotNull("Continuous stats were set, but lastTimestamp was not", transformMetadata.continuousStats!!.lastTimestamp) - transformMetadata - } + val firstIterationMetadata = + waitFor { + val job = getTransform(transformId = transform.id) + assertNotNull("Transform job doesn't have metadata set", job.metadataId) + val transformMetadata = getTransformMetadata(job.metadataId!!) + assertEquals("Transform did not complete iteration or had incorrect number of documents processed", 2000, transformMetadata.stats.documentsProcessed) + assertEquals("Transform did not complete iteration", null, transformMetadata.afterKey) + assertNotNull("Continuous stats were not updated", transformMetadata.continuousStats) + assertNotNull("Continuous stats were set, but lastTimestamp was not", transformMetadata.continuousStats!!.lastTimestamp) + transformMetadata + } assertEquals("Not the expected transform status", TransformMetadata.Status.STARTED, firstIterationMetadata.status) assertEquals("Not the expected pages processed", 7, firstIterationMetadata.stats.pagesProcessed) @@ -1449,7 +1518,6 @@ class TransformRunnerIT : TransformRestTestCase() { } private fun createIndexAndBulkInsert(name: String, settings: Settings?, mapping: String?, aliases: String?, bulkData: String) { - if (settings != null || mapping != null || aliases != null) { createIndex(name, settings, mapping, aliases) } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/transform/action/ActionTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/transform/action/ActionTests.kt index e8a20e416..21ca67efb 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/transform/action/ActionTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/transform/action/ActionTests.kt @@ -16,7 +16,6 @@ import org.opensearch.indexmanagement.transform.action.stop.StopTransformAction import org.opensearch.test.OpenSearchTestCase class ActionTests : OpenSearchTestCase() { - fun `test delete transform name`() { assertNotNull(DeleteTransformsAction.INSTANCE.name()) assertEquals(DeleteTransformsAction.INSTANCE.name(), DeleteTransformsAction.NAME) diff --git a/src/test/kotlin/org/opensearch/indexmanagement/transform/action/RequestTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/transform/action/RequestTests.kt index 61023eea5..e2166ccf1 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/transform/action/RequestTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/transform/action/RequestTests.kt @@ -25,7 +25,6 @@ import org.opensearch.search.fetch.subphase.FetchSourceContext import org.opensearch.test.OpenSearchTestCase class RequestTests : OpenSearchTestCase() { - fun `test delete single transform request`() { val id = "some_id" val req = DeleteTransformsRequest(listOf(id), false) @@ -55,10 +54,11 @@ class RequestTests : OpenSearchTestCase() { fun `test index transform create request`() { val transform = randomTransform().copy(seqNo = SequenceNumbers.UNASSIGNED_SEQ_NO, primaryTerm = SequenceNumbers.UNASSIGNED_PRIMARY_TERM) - val req = IndexTransformRequest( - transform = transform, - refreshPolicy = WriteRequest.RefreshPolicy.IMMEDIATE - ).index(INDEX_MANAGEMENT_INDEX) + val req = + IndexTransformRequest( + transform = transform, + refreshPolicy = WriteRequest.RefreshPolicy.IMMEDIATE, + ).index(INDEX_MANAGEMENT_INDEX) val out = BytesStreamOutput().apply { req.writeTo(this) } val streamedReq = IndexTransformRequest(buildStreamInputForTransforms(out)) @@ -71,10 +71,11 @@ class RequestTests : OpenSearchTestCase() { fun `test index transform update request`() { val transform = randomTransform().copy(seqNo = 1L, primaryTerm = 2L) - val req = IndexTransformRequest( - transform = transform, - refreshPolicy = WriteRequest.RefreshPolicy.IMMEDIATE - ).index(INDEX_MANAGEMENT_INDEX) + val req = + IndexTransformRequest( + transform = transform, + refreshPolicy = WriteRequest.RefreshPolicy.IMMEDIATE, + ).index(INDEX_MANAGEMENT_INDEX) val out = BytesStreamOutput().apply { req.writeTo(this) } val streamedReq = IndexTransformRequest(buildStreamInputForTransforms(out)) diff --git a/src/test/kotlin/org/opensearch/indexmanagement/transform/action/ResponseTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/transform/action/ResponseTests.kt index 9605495ba..d441de697 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/transform/action/ResponseTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/transform/action/ResponseTests.kt @@ -7,6 +7,7 @@ package org.opensearch.indexmanagement.transform.action import org.opensearch.common.io.stream.BytesStreamOutput import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.rest.RestStatus import org.opensearch.indexmanagement.transform.action.explain.ExplainTransformResponse import org.opensearch.indexmanagement.transform.action.get.GetTransformResponse import org.opensearch.indexmanagement.transform.action.get.GetTransformsResponse @@ -15,12 +16,10 @@ import org.opensearch.indexmanagement.transform.action.preview.PreviewTransformR import org.opensearch.indexmanagement.transform.buildStreamInputForTransforms import org.opensearch.indexmanagement.transform.randomExplainTransform import org.opensearch.indexmanagement.transform.randomTransform -import org.opensearch.core.rest.RestStatus import org.opensearch.test.OpenSearchTestCase import org.opensearch.test.OpenSearchTestCase.randomList class ResponseTests : OpenSearchTestCase() { - fun `test explain transform response`() { val idsToExplain = randomList(10) { randomAlphaOfLength(10) to randomExplainTransform() }.toMap() val failedToExplain = randomList(10) { randomAlphaOfLength(10) to randomAlphaOfLength(10) }.toMap() @@ -45,10 +44,11 @@ class ResponseTests : OpenSearchTestCase() { } fun `test preview transform response`() { - val documents = listOf( - mapOf("a" to mapOf("90.0" to 100), "b" to "id1", "c" to 100), - mapOf("a" to mapOf("90.0" to 50), "b" to "id2", "c" to 20) - ) + val documents = + listOf( + mapOf("a" to mapOf("90.0" to 100), "b" to "id1", "c" to 100), + mapOf("a" to mapOf("90.0" to 50), "b" to "id2", "c" to 20), + ) val res = PreviewTransformResponse(documents, RestStatus.OK) val out = BytesStreamOutput().apply { res.writeTo(this) } val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) diff --git a/src/test/kotlin/org/opensearch/indexmanagement/transform/model/ISMTransformTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/transform/model/ISMTransformTests.kt index 6129f453f..47624dde3 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/transform/model/ISMTransformTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/transform/model/ISMTransformTests.kt @@ -10,7 +10,6 @@ import org.opensearch.test.OpenSearchTestCase import kotlin.test.assertFailsWith class ISMTransformTests : OpenSearchTestCase() { - fun `test ism transform requires non empty description`() { assertFailsWith(IllegalArgumentException::class, "Requires non empty description") { randomISMTransform().copy(description = "") @@ -30,15 +29,15 @@ class ISMTransformTests : OpenSearchTestCase() { } fun `test ism transform requires page size between 1 and 10K`() { - assertFailsWith(IllegalArgumentException:: class, "Page size cannot be less than 1") { + assertFailsWith(IllegalArgumentException::class, "Page size cannot be less than 1") { randomISMTransform().copy(pageSize = -1) } - assertFailsWith(IllegalArgumentException:: class, "Page size cannot be less than 1") { + assertFailsWith(IllegalArgumentException::class, "Page size cannot be less than 1") { randomISMTransform().copy(pageSize = 0) } - assertFailsWith(IllegalArgumentException:: class, "Page size cannot be greater than 10000") { + assertFailsWith(IllegalArgumentException::class, "Page size cannot be greater than 10000") { randomISMTransform().copy(pageSize = 10001) } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/transform/model/TransformTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/transform/model/TransformTests.kt index 54292224a..d3090093c 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/transform/model/TransformTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/transform/model/TransformTests.kt @@ -13,7 +13,6 @@ import java.time.temporal.ChronoUnit import kotlin.test.assertFailsWith class TransformTests : OpenSearchTestCase() { - fun `test transform same indices`() { assertFailsWith(IllegalArgumentException::class, "Source and target index cannot be the same") { randomTransform().copy(sourceIndex = "dummy-index", targetIndex = "dummy-index") diff --git a/src/test/kotlin/org/opensearch/indexmanagement/transform/model/WriteableTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/transform/model/WriteableTests.kt index af1053641..d4f7474cb 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/transform/model/WriteableTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/transform/model/WriteableTests.kt @@ -14,7 +14,6 @@ import org.opensearch.indexmanagement.transform.randomTransformMetadata import org.opensearch.test.OpenSearchTestCase class WriteableTests : OpenSearchTestCase() { - fun `test transform metadata as stream`() { val transformMetadata = randomTransformMetadata() val out = BytesStreamOutput().also { transformMetadata.writeTo(it) } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/transform/model/XContentTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/transform/model/XContentTests.kt index e714d56ae..f21413aac 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/transform/model/XContentTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/transform/model/XContentTests.kt @@ -7,9 +7,9 @@ package org.opensearch.indexmanagement.transform.model import org.opensearch.common.settings.Settings import org.opensearch.common.xcontent.LoggingDeprecationHandler +import org.opensearch.common.xcontent.XContentType import org.opensearch.core.xcontent.NamedXContentRegistry import org.opensearch.core.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentType import org.opensearch.indexmanagement.indexstatemanagement.util.XCONTENT_WITHOUT_TYPE import org.opensearch.indexmanagement.opensearchapi.parseWithType import org.opensearch.indexmanagement.transform.randomTransform @@ -19,13 +19,13 @@ import org.opensearch.search.SearchModule import org.opensearch.test.OpenSearchTestCase class XContentTests : OpenSearchTestCase() { - fun `test transform metadata parsing without type`() { val transformMetadata = randomTransformMetadata() val transformMetadataString = transformMetadata.toJsonString(XCONTENT_WITHOUT_TYPE) - val parsedTransformMetadata = TransformMetadata.parse( - parser(transformMetadataString), transformMetadata.id, transformMetadata.seqNo, transformMetadata.primaryTerm - ) + val parsedTransformMetadata = + TransformMetadata.parse( + parser(transformMetadataString), transformMetadata.id, transformMetadata.seqNo, transformMetadata.primaryTerm, + ) assertEquals("Round tripping Transform metadata without type doesn't work", transformMetadata, parsedTransformMetadata) } @@ -33,9 +33,10 @@ class XContentTests : OpenSearchTestCase() { val transformMetadata = randomTransformMetadata() val transformMetadataString = transformMetadata.toJsonString() val parser = parserWithType(transformMetadataString) - val parsedTransformMetadata = parser.parseWithType( - transformMetadata.id, transformMetadata.seqNo, transformMetadata.primaryTerm, TransformMetadata.Companion::parse - ) + val parsedTransformMetadata = + parser.parseWithType( + transformMetadata.id, transformMetadata.seqNo, transformMetadata.primaryTerm, TransformMetadata.Companion::parse, + ) assertEquals("Round tripping Transform metadata with type doesn't work", transformMetadata, parsedTransformMetadata) } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/transform/opensearchapi/ExtensionsTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/transform/opensearchapi/ExtensionsTests.kt index 605b49b5b..56c60bfc4 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/transform/opensearchapi/ExtensionsTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/transform/opensearchapi/ExtensionsTests.kt @@ -7,38 +7,40 @@ package org.opensearch.indexmanagement.transform.opensearchapi import org.junit.Assert import org.opensearch.OpenSearchException -import org.opensearch.indexmanagement.util.IndexManagementException import org.opensearch.core.rest.RestStatus import org.opensearch.core.tasks.TaskCancelledException +import org.opensearch.indexmanagement.util.IndexManagementException import org.opensearch.test.OpenSearchTestCase class ExtensionsTests : OpenSearchTestCase() { - fun `test is transform operation timeout`() { - val ex = OpenSearchException( - "opensearch test exception", - TaskCancelledException("cancelled task with reason: Cancellation timeout of 100s is expired") - ) + val ex = + OpenSearchException( + "opensearch test exception", + TaskCancelledException("cancelled task with reason: Cancellation timeout of 100s is expired"), + ) val result = isTransformOperationTimedOut(ex) Assert.assertTrue(result) } fun `test is transform operation timeout bad message`() { - val result = isTransformOperationTimedOut( - OpenSearchException( - "opensearch test exception", - TaskCancelledException("some test msg") + val result = + isTransformOperationTimedOut( + OpenSearchException( + "opensearch test exception", + TaskCancelledException("some test msg"), + ), ) - ) Assert.assertFalse(result) } fun `test is retryable`() { Assert.assertTrue(isRetryable(IndexManagementException("502", RestStatus.BAD_GATEWAY, RuntimeException()), emptyList())) - val ex = OpenSearchException( - "opensearch test exception", - TaskCancelledException("cancelled task with reason: Cancellation timeout of 100s is expired") - ) + val ex = + OpenSearchException( + "opensearch test exception", + TaskCancelledException("cancelled task with reason: Cancellation timeout of 100s is expired"), + ) Assert.assertTrue(isRetryable(ex, emptyList())) } } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/transform/resthandler/RestDeleteTransformActionIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/transform/resthandler/RestDeleteTransformActionIT.kt index 93b5da251..0aa807970 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/transform/resthandler/RestDeleteTransformActionIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/transform/resthandler/RestDeleteTransformActionIT.kt @@ -6,12 +6,12 @@ package org.opensearch.indexmanagement.transform.resthandler import org.opensearch.client.ResponseException +import org.opensearch.core.rest.RestStatus import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.INDEX_MANAGEMENT_INDEX import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.TRANSFORM_BASE_URI import org.opensearch.indexmanagement.makeRequest import org.opensearch.indexmanagement.transform.TransformRestTestCase import org.opensearch.indexmanagement.transform.randomTransform -import org.opensearch.core.rest.RestStatus import org.opensearch.test.junit.annotations.TestLogging @TestLogging(value = "level:DEBUG", reason = "Debugging tests") @@ -22,10 +22,11 @@ class RestDeleteTransformActionIT : TransformRestTestCase() { val transform = randomTransform().copy(enabled = false) createTransform(transform, transform.id, refresh = true) - val deleteResponse = client().makeRequest( - "DELETE", - "$TRANSFORM_BASE_URI/${transform.id}" - ) + val deleteResponse = + client().makeRequest( + "DELETE", + "$TRANSFORM_BASE_URI/${transform.id}", + ) assertEquals("Delete failed", RestStatus.OK, deleteResponse.restStatus()) val itemList = deleteResponse.asMap()["items"] as ArrayList>> val deleteMap = itemList[0]["delete"] @@ -43,7 +44,7 @@ class RestDeleteTransformActionIT : TransformRestTestCase() { try { client().makeRequest( "DELETE", - "$TRANSFORM_BASE_URI/${transform.id}" + "$TRANSFORM_BASE_URI/${transform.id}", ) fail("Expected an Exception") } catch (e: Exception) { diff --git a/src/test/kotlin/org/opensearch/indexmanagement/transform/resthandler/RestExplainTransformActionIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/transform/resthandler/RestExplainTransformActionIT.kt index badc26d7a..2a66e30a5 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/transform/resthandler/RestExplainTransformActionIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/transform/resthandler/RestExplainTransformActionIT.kt @@ -7,6 +7,7 @@ package org.opensearch.indexmanagement.transform.resthandler import org.junit.Assert import org.opensearch.client.ResponseException +import org.opensearch.core.rest.RestStatus import org.opensearch.indexmanagement.IndexManagementPlugin import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.TRANSFORM_BASE_URI import org.opensearch.indexmanagement.makeRequest @@ -15,7 +16,6 @@ import org.opensearch.indexmanagement.transform.model.TransformMetadata import org.opensearch.indexmanagement.transform.randomTransform import org.opensearch.indexmanagement.waitFor import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule -import org.opensearch.core.rest.RestStatus import org.opensearch.test.junit.annotations.TestLogging import java.time.Instant import java.time.temporal.ChronoUnit @@ -23,18 +23,18 @@ import java.time.temporal.ChronoUnit @TestLogging(value = "level:DEBUG", reason = "Debugging tests") @Suppress("UNCHECKED_CAST") class RestExplainTransformActionIT : TransformRestTestCase() { - @Throws(Exception::class) fun `test explain transform`() { - val transform = randomTransform().copy( - id = "test_explain_transform", - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - enabled = true, - enabledAt = Instant.now(), - metadataId = null, - sourceIndex = "test_source", - targetIndex = "test_target" - ).let { createTransform(it, it.id) } + val transform = + randomTransform().copy( + id = "test_explain_transform", + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + enabled = true, + enabledAt = Instant.now(), + metadataId = null, + sourceIndex = "test_source", + targetIndex = "test_target", + ).let { createTransform(it, it.id) } updateTransformStartTime(transform) waitFor { @@ -100,24 +100,26 @@ class RestExplainTransformActionIT : TransformRestTestCase() { @Throws(Exception::class) fun `test explain continuous transform with wildcard id`() { - val transform1 = randomTransform().copy( - id = "continuous_wildcard_1", - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - enabled = true, - enabledAt = Instant.now(), - metadataId = null, - continuous = true, - pageSize = 50 - ).let { createTransform(it, it.id) } - val transform2 = randomTransform().copy( - id = "continuous_wildcard_2", - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - enabled = true, - enabledAt = Instant.now(), - metadataId = null, - continuous = true, - pageSize = 50 - ).let { createTransform(it, it.id) } + val transform1 = + randomTransform().copy( + id = "continuous_wildcard_1", + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + enabled = true, + enabledAt = Instant.now(), + metadataId = null, + continuous = true, + pageSize = 50, + ).let { createTransform(it, it.id) } + val transform2 = + randomTransform().copy( + id = "continuous_wildcard_2", + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + enabled = true, + enabledAt = Instant.now(), + metadataId = null, + continuous = true, + pageSize = 50, + ).let { createTransform(it, it.id) } updateTransformStartTime(transform1) updateTransformStartTime(transform2) diff --git a/src/test/kotlin/org/opensearch/indexmanagement/transform/resthandler/RestGetTransformActionIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/transform/resthandler/RestGetTransformActionIT.kt index 63264d68c..797e41302 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/transform/resthandler/RestGetTransformActionIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/transform/resthandler/RestGetTransformActionIT.kt @@ -6,35 +6,35 @@ package org.opensearch.indexmanagement.transform.resthandler import org.opensearch.client.ResponseException +import org.opensearch.core.rest.RestStatus import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.TRANSFORM_BASE_URI import org.opensearch.indexmanagement.common.model.dimension.Dimension import org.opensearch.indexmanagement.makeRequest import org.opensearch.indexmanagement.transform.TransformRestTestCase import org.opensearch.indexmanagement.transform.action.get.GetTransformsRequest.Companion.DEFAULT_SIZE import org.opensearch.indexmanagement.transform.randomTransform -import org.opensearch.core.rest.RestStatus import org.opensearch.test.OpenSearchTestCase import org.opensearch.test.junit.annotations.TestLogging @TestLogging(value = "level:DEBUG", reason = "Debugging tests") @Suppress("UNCHECKED_CAST") class RestGetTransformActionIT : TransformRestTestCase() { - @Throws(Exception::class) fun `test getting a transform`() { var transform = createTransform(randomTransform()) val indexedTransform = getTransform(transform.id) - transform = transform.copy( - schemaVersion = indexedTransform.schemaVersion, - updatedAt = indexedTransform.updatedAt, - jobSchedule = indexedTransform.jobSchedule, - metadataId = null, - // Roles are deprecated and will not be returned - roles = listOf(), - // User information is not returned as part of REST output - user = null - ) + transform = + transform.copy( + schemaVersion = indexedTransform.schemaVersion, + updatedAt = indexedTransform.updatedAt, + jobSchedule = indexedTransform.jobSchedule, + metadataId = null, + // Roles are deprecated and will not be returned + roles = listOf(), + // User information is not returned as part of REST output + user = null, + ) assertEquals("Indexed and retrieved transform differ", transform, indexedTransform) } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/transform/resthandler/RestIndexTransformActionIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/transform/resthandler/RestIndexTransformActionIT.kt index df424ac86..7d9518826 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/transform/resthandler/RestIndexTransformActionIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/transform/resthandler/RestIndexTransformActionIT.kt @@ -7,13 +7,13 @@ package org.opensearch.indexmanagement.transform.resthandler import org.opensearch.client.ResponseException import org.opensearch.common.xcontent.XContentType +import org.opensearch.core.rest.RestStatus import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.INDEX_MANAGEMENT_INDEX import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.TRANSFORM_BASE_URI import org.opensearch.indexmanagement.makeRequest import org.opensearch.indexmanagement.transform.TransformRestTestCase import org.opensearch.indexmanagement.transform.randomTransform import org.opensearch.indexmanagement.util.NO_ID -import org.opensearch.core.rest.RestStatus import org.opensearch.test.junit.annotations.TestLogging @TestLogging(value = "level:DEBUG", reason = "Debugging tests") @@ -25,12 +25,13 @@ class RestIndexTransformActionIT : TransformRestTestCase() { fun `test creating a transform`() { val transform = randomTransform() createTransformSourceIndex(transform) - val response = client().makeRequest( - "PUT", - "$TRANSFORM_BASE_URI/${transform.id}", - emptyMap(), - transform.toHttpEntity() - ) + val response = + client().makeRequest( + "PUT", + "$TRANSFORM_BASE_URI/${transform.id}", + emptyMap(), + transform.toHttpEntity(), + ) assertEquals("Create transform failed", RestStatus.CREATED, response.restStatus()) val responseBody = response.asMap() val createdId = responseBody["_id"] as String @@ -47,7 +48,7 @@ class RestIndexTransformActionIT : TransformRestTestCase() { "PUT", TRANSFORM_BASE_URI, emptyMap(), - transform.toHttpEntity() + transform.toHttpEntity(), ) fail("Expected 400 Method BAD_REQUEST response") } catch (e: ResponseException) { @@ -63,7 +64,7 @@ class RestIndexTransformActionIT : TransformRestTestCase() { "POST", "$TRANSFORM_BASE_URI/some_transform", emptyMap(), - transform.toHttpEntity() + transform.toHttpEntity(), ) fail("Expected 405 Method Not Allowed response") } catch (e: ResponseException) { @@ -78,11 +79,12 @@ class RestIndexTransformActionIT : TransformRestTestCase() { val response = client().makeRequest("GET", "/$INDEX_MANAGEMENT_INDEX/_mapping") val parserMap = createParser(XContentType.JSON.xContent(), response.entity.content).map() as Map> val mappingsMap = parserMap[INDEX_MANAGEMENT_INDEX]!!["mappings"] as Map - val expected = createParser( - XContentType.JSON.xContent(), - javaClass.classLoader.getResource("mappings/opendistro-ism-config.json") - .readText() - ) + val expected = + createParser( + XContentType.JSON.xContent(), + javaClass.classLoader.getResource("mappings/opendistro-ism-config.json") + .readText(), + ) val expectedMap = expected.map() assertEquals("Mappings are different", expectedMap, mappingsMap) @@ -96,22 +98,25 @@ class RestIndexTransformActionIT : TransformRestTestCase() { "PUT", "$TRANSFORM_BASE_URI/${transform.id}?refresh=true&if_seq_no=${transform.seqNo}&if_primary_term=${transform.primaryTerm}", emptyMap(), - transform.copy(continuous = !transform.continuous, pageSize = 50).toHttpEntity() // Lower page size to make sure that doesn't throw an error first + transform.copy(continuous = !transform.continuous, pageSize = 50).toHttpEntity(), // Lower page size to make sure that doesn't throw an error first ) fail("Expected 405 Method Not Allowed response") } catch (e: ResponseException) { assertEquals("Unexpected status", RestStatus.BAD_REQUEST, e.response.restStatus()) val actualMessage = e.response.asMap() - val expectedErrorMessage = mapOf( - "error" to mapOf( - "root_cause" to listOf>( - mapOf("type" to "status_exception", "reason" to "Not allowed to modify [continuous]") - ), - "type" to "status_exception", - "reason" to "Not allowed to modify [continuous]" - ), - "status" to 400 - ) + val expectedErrorMessage = + mapOf( + "error" to + mapOf( + "root_cause" to + listOf>( + mapOf("type" to "status_exception", "reason" to "Not allowed to modify [continuous]"), + ), + "type" to "status_exception", + "reason" to "Not allowed to modify [continuous]", + ), + "status" to 400, + ) assertEquals(expectedErrorMessage, actualMessage) } } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/transform/resthandler/RestPreviewTransformActionIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/transform/resthandler/RestPreviewTransformActionIT.kt index 35272c75d..b32d5c023 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/transform/resthandler/RestPreviewTransformActionIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/transform/resthandler/RestPreviewTransformActionIT.kt @@ -9,6 +9,7 @@ import org.junit.AfterClass import org.junit.Before import org.opensearch.client.ResponseException import org.opensearch.common.time.DateFormatter +import org.opensearch.core.rest.RestStatus import org.opensearch.index.IndexNotFoundException import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.TRANSFORM_BASE_URI import org.opensearch.indexmanagement.common.model.dimension.Terms @@ -17,7 +18,6 @@ import org.opensearch.indexmanagement.transform.TransformRestTestCase import org.opensearch.indexmanagement.transform.model.Transform import org.opensearch.indexmanagement.transform.randomTransform import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule -import org.opensearch.core.rest.RestStatus import org.opensearch.search.aggregations.AggregationBuilders import org.opensearch.search.aggregations.AggregatorFactories import java.time.Instant @@ -27,17 +27,19 @@ import java.time.temporal.ChronoUnit @Suppress("UNCHECKED_CAST") class RestPreviewTransformActionIT : TransformRestTestCase() { - - private val factories = AggregatorFactories.builder() - .addAggregator(AggregationBuilders.sum("revenue").field("total_amount")) - .addAggregator(AggregationBuilders.percentiles("passengerCount").field("passenger_count").percentiles(90.0, 95.0)) - private val transform = randomTransform().copy( - sourceIndex = sourceIndex, - groups = listOf( - Terms(sourceField = "store_and_fwd_flag", targetField = "flag") - ), - aggregations = factories - ) + private val factories = + AggregatorFactories.builder() + .addAggregator(AggregationBuilders.sum("revenue").field("total_amount")) + .addAggregator(AggregationBuilders.percentiles("passengerCount").field("passenger_count").percentiles(90.0, 95.0)) + private val transform = + randomTransform().copy( + sourceIndex = sourceIndex, + groups = + listOf( + Terms(sourceField = "store_and_fwd_flag", targetField = "flag"), + ), + aggregations = factories, + ) @Before fun setupData() { @@ -63,12 +65,13 @@ class RestPreviewTransformActionIT : TransformRestTestCase() { } fun `test preview`() { - val response = client().makeRequest( - "POST", - "$TRANSFORM_BASE_URI/_preview", - emptyMap(), - transform.toHttpEntity() - ) + val response = + client().makeRequest( + "POST", + "$TRANSFORM_BASE_URI/_preview", + emptyMap(), + transform.toHttpEntity(), + ) val expectedKeys = setOf("revenue", "passengerCount", "flag", "transform._doc_count", "_doc_count") assertEquals("Preview transform failed", RestStatus.OK, response.restStatus()) val transformedDocs = response.asMap()["documents"] as List> @@ -80,31 +83,34 @@ class RestPreviewTransformActionIT : TransformRestTestCase() { val pickupDateTime = "tpep_pickup_datetime" val fareAmount = "fare_amount" - val transform = Transform( - id = "id_14", - schemaVersion = 1L, - enabled = true, - enabledAt = Instant.now(), - updatedAt = Instant.now(), - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - description = "test transform doc values must be the same", - metadataId = null, - sourceIndex = sourceIndex, - targetIndex = targetIdxTestName, - roles = emptyList(), - pageSize = 1, - groups = listOf( - Terms(sourceField = pickupDateTime, targetField = pickupDateTime) - ), - aggregations = AggregatorFactories.builder().addAggregator(AggregationBuilders.avg(fareAmount).field(fareAmount)) - ).let { createTransform(it, it.id) } + val transform = + Transform( + id = "id_14", + schemaVersion = 1L, + enabled = true, + enabledAt = Instant.now(), + updatedAt = Instant.now(), + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + description = "test transform doc values must be the same", + metadataId = null, + sourceIndex = sourceIndex, + targetIndex = targetIdxTestName, + roles = emptyList(), + pageSize = 1, + groups = + listOf( + Terms(sourceField = pickupDateTime, targetField = pickupDateTime), + ), + aggregations = AggregatorFactories.builder().addAggregator(AggregationBuilders.avg(fareAmount).field(fareAmount)), + ).let { createTransform(it, it.id) } - val response = client().makeRequest( - "POST", - "$TRANSFORM_BASE_URI/_preview", - emptyMap(), - transform.toHttpEntity() - ) + val response = + client().makeRequest( + "POST", + "$TRANSFORM_BASE_URI/_preview", + emptyMap(), + transform.toHttpEntity(), + ) val expectedKeys = setOf("fare_amount", "tpep_pickup_datetime", "transform._doc_count", "_doc_count") assertEquals("Preview transform failed", RestStatus.OK, response.restStatus()) val transformedDocs = response.asMap()["documents"] as List> @@ -116,18 +122,20 @@ class RestPreviewTransformActionIT : TransformRestTestCase() { } fun `test mismatched columns`() { - val factories = AggregatorFactories.builder() - .addAggregator(AggregationBuilders.sum("revenue").field("total_amountdzdfd")) - val transform = transform.copy( - groups = listOf(Terms(sourceField = "non-existent", targetField = "non-existent")), - aggregations = factories - ) + val factories = + AggregatorFactories.builder() + .addAggregator(AggregationBuilders.sum("revenue").field("total_amountdzdfd")) + val transform = + transform.copy( + groups = listOf(Terms(sourceField = "non-existent", targetField = "non-existent")), + aggregations = factories, + ) try { client().makeRequest( "POST", "$TRANSFORM_BASE_URI/_preview", emptyMap(), - transform.toHttpEntity() + transform.toHttpEntity(), ) fail("expected exception") } catch (e: ResponseException) { @@ -142,7 +150,7 @@ class RestPreviewTransformActionIT : TransformRestTestCase() { "POST", "$TRANSFORM_BASE_URI/_preview", emptyMap(), - transform.toHttpEntity() + transform.toHttpEntity(), ) fail("expected exception") } catch (e: ResponseException) { diff --git a/src/test/kotlin/org/opensearch/indexmanagement/transform/resthandler/RestStartTransformActionIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/transform/resthandler/RestStartTransformActionIT.kt index 5022a0768..1296a0851 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/transform/resthandler/RestStartTransformActionIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/transform/resthandler/RestStartTransformActionIT.kt @@ -6,6 +6,7 @@ package org.opensearch.indexmanagement.transform.resthandler import org.opensearch.client.ResponseException +import org.opensearch.core.rest.RestStatus import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.TRANSFORM_BASE_URI import org.opensearch.indexmanagement.common.model.dimension.DateHistogram import org.opensearch.indexmanagement.common.model.dimension.Terms @@ -15,7 +16,6 @@ import org.opensearch.indexmanagement.transform.model.TransformMetadata import org.opensearch.indexmanagement.transform.randomTransform import org.opensearch.indexmanagement.waitFor import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule -import org.opensearch.core.rest.RestStatus import org.opensearch.search.aggregations.AggregatorFactories import org.opensearch.test.junit.annotations.TestLogging import java.time.Instant @@ -24,7 +24,6 @@ import java.time.temporal.ChronoUnit @TestLogging(value = "level:DEBUG", reason = "Debugging tests") @Suppress("UNCHECKED_CAST") class RestStartTransformActionIT : TransformRestTestCase() { - @Throws(Exception::class) fun `test starting a stopped transform`() { val transform = createTransform(randomTransform().copy(enabled = false, enabledAt = null, metadataId = null)) @@ -77,25 +76,27 @@ class RestStartTransformActionIT : TransformRestTestCase() { @Throws(Exception::class) fun `test starting a failed transform`() { - val transform = randomTransform().copy( - id = "restart_failed_transform", - schemaVersion = 1L, - enabled = true, - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - updatedAt = Instant.now(), - enabledAt = Instant.now(), - description = "basic search test", - sourceIndex = "source_restart_failed_transform", - targetIndex = "target_restart_failed_transform", - metadataId = null, - roles = emptyList(), - pageSize = 10, - groups = listOf( - Terms(sourceField = "store_and_fwd_flag", targetField = "flag"), - DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1h") - ), - aggregations = AggregatorFactories.builder() - ).let { createTransform(it, it.id) } + val transform = + randomTransform().copy( + id = "restart_failed_transform", + schemaVersion = 1L, + enabled = true, + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + updatedAt = Instant.now(), + enabledAt = Instant.now(), + description = "basic search test", + sourceIndex = "source_restart_failed_transform", + targetIndex = "target_restart_failed_transform", + metadataId = null, + roles = emptyList(), + pageSize = 10, + groups = + listOf( + Terms(sourceField = "store_and_fwd_flag", targetField = "flag"), + DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1h"), + ), + aggregations = AggregatorFactories.builder(), + ).let { createTransform(it, it.id) } // This should fail because source index is deleted deleteIndex(transform.sourceIndex) @@ -148,25 +149,27 @@ class RestStartTransformActionIT : TransformRestTestCase() { fun `test starting a finished transform`() { generateNYCTaxiData("source_restart_finished_transform") assertIndexExists("source_restart_finished_transform") - val transform = randomTransform().copy( - id = "restart_finished_transform", - schemaVersion = 1L, - enabled = true, - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - updatedAt = Instant.now(), - enabledAt = Instant.now(), - description = "basic search test", - sourceIndex = "source_restart_finished_transform", - targetIndex = "target_restart_finished_transform", - metadataId = null, - roles = emptyList(), - pageSize = 10, - groups = listOf( - Terms(sourceField = "store_and_fwd_flag", targetField = "flag") - ), - aggregations = AggregatorFactories.builder(), - continuous = false - ).let { createTransform(it, it.id) } + val transform = + randomTransform().copy( + id = "restart_finished_transform", + schemaVersion = 1L, + enabled = true, + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + updatedAt = Instant.now(), + enabledAt = Instant.now(), + description = "basic search test", + sourceIndex = "source_restart_finished_transform", + targetIndex = "target_restart_finished_transform", + metadataId = null, + roles = emptyList(), + pageSize = 10, + groups = + listOf( + Terms(sourceField = "store_and_fwd_flag", targetField = "flag"), + ), + aggregations = AggregatorFactories.builder(), + continuous = false, + ).let { createTransform(it, it.id) } updateTransformStartTime(transform) var firstTransformsIndexed = 0L diff --git a/src/test/kotlin/org/opensearch/indexmanagement/transform/resthandler/RestStopTransformActionIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/transform/resthandler/RestStopTransformActionIT.kt index c1de752fc..bd269db51 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/transform/resthandler/RestStopTransformActionIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/transform/resthandler/RestStopTransformActionIT.kt @@ -6,6 +6,7 @@ package org.opensearch.indexmanagement.transform.resthandler import org.opensearch.client.ResponseException +import org.opensearch.core.rest.RestStatus import org.opensearch.indexmanagement.IndexManagementPlugin.Companion.TRANSFORM_BASE_URI import org.opensearch.indexmanagement.common.model.dimension.DateHistogram import org.opensearch.indexmanagement.common.model.dimension.Terms @@ -16,7 +17,6 @@ import org.opensearch.indexmanagement.transform.model.TransformMetadata import org.opensearch.indexmanagement.transform.randomTransform import org.opensearch.indexmanagement.waitFor import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule -import org.opensearch.core.rest.RestStatus import org.opensearch.search.aggregations.AggregatorFactories import org.opensearch.test.junit.annotations.TestLogging import java.time.Instant @@ -25,7 +25,6 @@ import java.time.temporal.ChronoUnit @TestLogging(value = "level:DEBUG", reason = "Debugging tests") @Suppress("UNCHECKED_CAST") class RestStopTransformActionIT : TransformRestTestCase() { - @Throws(Exception::class) fun `test stopping a stopped Transform`() { val transform = createTransform(randomTransform().copy(enabled = true, enabledAt = randomInstant(), metadataId = null)) @@ -51,16 +50,17 @@ class RestStopTransformActionIT : TransformRestTestCase() { @Throws(Exception::class) fun `test stopping a finished transform`() { // Create a transform that finishes - val transform = createTransform( - randomTransform() - .copy( - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - enabled = true, - enabledAt = Instant.now(), - metadataId = null, - continuous = false - ) - ) + val transform = + createTransform( + randomTransform() + .copy( + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + enabled = true, + enabledAt = Instant.now(), + metadataId = null, + continuous = false, + ), + ) updateTransformStartTime(transform) // Assert it finished @@ -89,13 +89,14 @@ class RestStopTransformActionIT : TransformRestTestCase() { @Throws(Exception::class) fun `test stopping a failed transform`() { // Create a transform that will fail because no source index - val transform = randomTransform().copy( - id = "test_stopping_a_failed_transform", - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - enabled = true, - enabledAt = Instant.now(), - metadataId = null - ).let { createTransform(it, it.id) } + val transform = + randomTransform().copy( + id = "test_stopping_a_failed_transform", + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + enabled = true, + enabledAt = Instant.now(), + metadataId = null, + ).let { createTransform(it, it.id) } deleteIndex(transform.sourceIndex) updateTransformStartTime(transform) @@ -127,25 +128,27 @@ class RestStopTransformActionIT : TransformRestTestCase() { @Throws(Exception::class) fun `test stopping a running transform`() { generateNYCTaxiData("source_test_stop_running_transform") - val transform = randomTransform().copy( - id = "test_stop_running_transform", - schemaVersion = 1L, - enabled = true, - jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), - updatedAt = Instant.now(), - enabledAt = Instant.now(), - description = "basic search test", - sourceIndex = "source_test_stop_running_transform", - targetIndex = "target_test_stop_running_transform", - metadataId = null, - roles = emptyList(), - pageSize = 1, - groups = listOf( - DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1m"), - Terms(sourceField = "store_and_fwd_flag", targetField = "flag") - ), - aggregations = AggregatorFactories.builder() - ).let { createTransform(it, it.id) } + val transform = + randomTransform().copy( + id = "test_stop_running_transform", + schemaVersion = 1L, + enabled = true, + jobSchedule = IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES), + updatedAt = Instant.now(), + enabledAt = Instant.now(), + description = "basic search test", + sourceIndex = "source_test_stop_running_transform", + targetIndex = "target_test_stop_running_transform", + metadataId = null, + roles = emptyList(), + pageSize = 1, + groups = + listOf( + DateHistogram(sourceField = "tpep_pickup_datetime", fixedInterval = "1m"), + Terms(sourceField = "store_and_fwd_flag", targetField = "flag"), + ), + aggregations = AggregatorFactories.builder(), + ).let { createTransform(it, it.id) } updateTransformStartTime(transform) diff --git a/src/test/kotlin/org/opensearch/indexmanagement/util/IndexUtilsTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/util/IndexUtilsTests.kt index dee9376c7..add9a191d 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/util/IndexUtilsTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/util/IndexUtilsTests.kt @@ -11,7 +11,6 @@ import org.opensearch.test.OpenSearchTestCase import kotlin.test.assertFailsWith class IndexUtilsTests : OpenSearchTestCase() { - fun `test get schema version`() { val message = "{\"user\":{ \"name\":\"test\"},\"_meta\":{\"schema_version\": 1}}" @@ -50,7 +49,8 @@ class IndexUtilsTests : OpenSearchTestCase() { } fun `test should update index without original version`() { - val indexContent = """ + val indexContent = + """ { "testIndex": { "settings": { @@ -69,7 +69,7 @@ class IndexUtilsTests : OpenSearchTestCase() { "aliases_version": 1 } } - """.trimIndent() + """.trimIndent() val parser = createParser(XContentType.JSON.xContent(), indexContent) val index: IndexMetadata = IndexMetadata.fromXContent(parser) @@ -79,7 +79,8 @@ class IndexUtilsTests : OpenSearchTestCase() { } fun `test should update index with lagged version`() { - val indexContent = """ + val indexContent = + """ { "testIndex": { "settings": { @@ -103,7 +104,7 @@ class IndexUtilsTests : OpenSearchTestCase() { "aliases_version": 1 } } - """.trimIndent() + """.trimIndent() val parser = createParser(XContentType.JSON.xContent(), indexContent) val index: IndexMetadata = IndexMetadata.fromXContent(parser) @@ -113,7 +114,8 @@ class IndexUtilsTests : OpenSearchTestCase() { } fun `test should update index with same version`() { - val indexContent = """ + val indexContent = + """ { "testIndex": { "settings": { @@ -137,7 +139,7 @@ class IndexUtilsTests : OpenSearchTestCase() { "aliases_version": 1 } } - """.trimIndent() + """.trimIndent() val parser = createParser(XContentType.JSON.xContent(), indexContent) val index: IndexMetadata = IndexMetadata.fromXContent(parser)