diff --git a/src/main/java/org/opensearch/flowframework/model/Template.java b/src/main/java/org/opensearch/flowframework/model/Template.java index f4d2ae600..4c8c2c9ef 100644 --- a/src/main/java/org/opensearch/flowframework/model/Template.java +++ b/src/main/java/org/opensearch/flowframework/model/Template.java @@ -8,12 +8,12 @@ */ package org.opensearch.flowframework.model; -import org.apache.logging.log4j.util.Strings; import org.opensearch.Version; import org.opensearch.common.xcontent.LoggingDeprecationHandler; import org.opensearch.common.xcontent.json.JsonXContent; import org.opensearch.common.xcontent.yaml.YamlXContent; import org.opensearch.commons.authuser.User; +import org.opensearch.core.common.Strings; import org.opensearch.core.rest.RestStatus; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.ToXContentObject; @@ -372,10 +372,10 @@ public static Template updateExistingTemplate(Template existingTemplate, Templat if (templateWithNewFields.name() != null) { builder.name(templateWithNewFields.name()); } - if (!Strings.isBlank(templateWithNewFields.description())) { + if (Strings.hasText(templateWithNewFields.description())) { builder.description(templateWithNewFields.description()); } - if (!Strings.isBlank(templateWithNewFields.useCase())) { + if (Strings.hasText(templateWithNewFields.useCase())) { builder.useCase(templateWithNewFields.useCase()); } if (templateWithNewFields.templateVersion() != null) { diff --git a/src/main/java/org/opensearch/flowframework/rest/RestCreateWorkflowAction.java b/src/main/java/org/opensearch/flowframework/rest/RestCreateWorkflowAction.java index 032b4b898..8acfab16a 100644 --- a/src/main/java/org/opensearch/flowframework/rest/RestCreateWorkflowAction.java +++ b/src/main/java/org/opensearch/flowframework/rest/RestCreateWorkflowAction.java @@ -138,6 +138,13 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli ); return processError(ffe, params, request); } + if (reprovision && !params.isEmpty()) { + FlowFrameworkException ffe = new FlowFrameworkException( + "Only the parameters " + request.consumedParams() + " are permitted unless the provision parameter is set to true.", + RestStatus.BAD_REQUEST + ); + return processError(ffe, params, request); + } try { Template template; Map useCaseDefaultsMap = Collections.emptyMap(); diff --git a/src/main/java/org/opensearch/flowframework/util/ParseUtils.java b/src/main/java/org/opensearch/flowframework/util/ParseUtils.java index e20b2ed3b..16e8b25e1 100644 --- a/src/main/java/org/opensearch/flowframework/util/ParseUtils.java +++ b/src/main/java/org/opensearch/flowframework/util/ParseUtils.java @@ -533,4 +533,21 @@ public static void flattenSettings(String prefix, Map settings, } } } + + /** + * Ensures index is prepended to flattened setting keys + * @param originalSettings the original settings map + * @return new map with keys prepended with index + */ + public static Map prependIndexToSettings(Map originalSettings) { + Map newSettings = new HashMap<>(); + originalSettings.entrySet().stream().forEach(x -> { + if (!x.getKey().startsWith("index.")) { + newSettings.put("index." + x.getKey(), x.getValue()); + } else { + newSettings.put(x.getKey(), x.getValue()); + } + }); + return newSettings; + } } diff --git a/src/main/java/org/opensearch/flowframework/workflow/UpdateIndexStep.java b/src/main/java/org/opensearch/flowframework/workflow/UpdateIndexStep.java index 9d35a32ce..719ef7237 100644 --- a/src/main/java/org/opensearch/flowframework/workflow/UpdateIndexStep.java +++ b/src/main/java/org/opensearch/flowframework/workflow/UpdateIndexStep.java @@ -113,7 +113,10 @@ public PlainActionFuture execute( if (updatedSettings.containsKey("index")) { ParseUtils.flattenSettings("", updatedSettings, flattenedSettings); } else { - flattenedSettings.putAll(updatedSettings); + // Create index setting configuration can be a mix of flattened or expanded settings + // prepend index. to ensure successful setting comparison + + flattenedSettings.putAll(ParseUtils.prependIndexToSettings(updatedSettings)); } Map filteredSettings = new HashMap<>(); @@ -133,35 +136,39 @@ public PlainActionFuture execute( filteredSettings.put(e.getKey(), e.getValue()); } } + + // Create and send the update settings request + updateSettingsRequest.settings(filteredSettings); + if (updateSettingsRequest.settings().size() == 0) { + String errorMessage = "Failed to update index settings for index " + + indexName + + ", no settings have been updated"; + updateIndexFuture.onFailure(new WorkflowStepException(errorMessage, RestStatus.BAD_REQUEST)); + } else { + client.admin().indices().updateSettings(updateSettingsRequest, ActionListener.wrap(acknowledgedResponse -> { + String resourceName = getResourceByWorkflowStep(getName()); + logger.info("Updated index settings for index {}", indexName); + updateIndexFuture.onResponse( + new WorkflowData(Map.of(resourceName, indexName), currentNodeInputs.getWorkflowId(), currentNodeId) + ); + + }, ex -> { + Exception e = getSafeException(ex); + String errorMessage = (e == null + ? "Failed to update the index settings for index " + indexName + : e.getMessage()); + logger.error(errorMessage, e); + updateIndexFuture.onFailure(new WorkflowStepException(errorMessage, ExceptionsHelper.status(e))); + })); + } }, ex -> { Exception e = getSafeException(ex); String errorMessage = (e == null ? "Failed to retrieve the index settings for index " + indexName : e.getMessage()); logger.error(errorMessage, e); updateIndexFuture.onFailure(new WorkflowStepException(errorMessage, ExceptionsHelper.status(e))); })); - - updateSettingsRequest.settings(filteredSettings); } } - - if (updateSettingsRequest.settings().size() == 0) { - String errorMessage = "Failed to update index settings for index " + indexName + ", no settings have been updated"; - throw new FlowFrameworkException(errorMessage, RestStatus.BAD_REQUEST); - } else { - client.admin().indices().updateSettings(updateSettingsRequest, ActionListener.wrap(acknowledgedResponse -> { - String resourceName = getResourceByWorkflowStep(getName()); - logger.info("Updated index settings for index {}", indexName); - updateIndexFuture.onResponse( - new WorkflowData(Map.of(resourceName, indexName), currentNodeInputs.getWorkflowId(), currentNodeId) - ); - - }, ex -> { - Exception e = getSafeException(ex); - String errorMessage = (e == null ? "Failed to update the index settings for index " + indexName : e.getMessage()); - logger.error(errorMessage, e); - updateIndexFuture.onFailure(new WorkflowStepException(errorMessage, ExceptionsHelper.status(e))); - })); - } } catch (Exception e) { updateIndexFuture.onFailure(new WorkflowStepException(e.getMessage(), ExceptionsHelper.status(e))); } diff --git a/src/test/java/org/opensearch/flowframework/FlowFrameworkRestTestCase.java b/src/test/java/org/opensearch/flowframework/FlowFrameworkRestTestCase.java index dc25f44fa..877b6292a 100644 --- a/src/test/java/org/opensearch/flowframework/FlowFrameworkRestTestCase.java +++ b/src/test/java/org/opensearch/flowframework/FlowFrameworkRestTestCase.java @@ -415,6 +415,25 @@ protected Response createWorkflowValidation(RestClient client, Template template return TestHelpers.makeRequest(client, "POST", WORKFLOW_URI, Collections.emptyMap(), template.toJson(), null); } + /** + * Helper method to invoke the Reprovision Workflow API + * @param client the rest client + * @param workflowId the document id + * @param templateFields the template to reprovision + * @throws Exception if the request fails + * @return a rest response + */ + protected Response reprovisionWorkflow(RestClient client, String workflowId, Template template) throws Exception { + return TestHelpers.makeRequest( + client, + "PUT", + String.format(Locale.ROOT, "%s/%s?reprovision=true", WORKFLOW_URI, workflowId), + Collections.emptyMap(), + template.toJson(), + null + ); + } + /** * Helper method to invoke the Update Workflow API * @param client the rest client diff --git a/src/test/java/org/opensearch/flowframework/TestHelpers.java b/src/test/java/org/opensearch/flowframework/TestHelpers.java index 6c4f3534b..6d136f7a6 100644 --- a/src/test/java/org/opensearch/flowframework/TestHelpers.java +++ b/src/test/java/org/opensearch/flowframework/TestHelpers.java @@ -11,7 +11,6 @@ import org.apache.hc.core5.http.Header; import org.apache.hc.core5.http.HttpEntity; import org.apache.hc.core5.http.io.entity.StringEntity; -import org.apache.logging.log4j.util.Strings; import org.opensearch.client.Request; import org.opensearch.client.RequestOptions; import org.opensearch.client.Response; @@ -24,6 +23,7 @@ import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.common.xcontent.XContentType; import org.opensearch.commons.authuser.User; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.rest.RestStatus; import org.opensearch.core.xcontent.ToXContent; @@ -74,7 +74,7 @@ public static Response makeRequest( String jsonEntity, List
headers ) throws IOException { - HttpEntity httpEntity = Strings.isBlank(jsonEntity) ? null : new StringEntity(jsonEntity, APPLICATION_JSON); + HttpEntity httpEntity = !Strings.hasText(jsonEntity) ? null : new StringEntity(jsonEntity, APPLICATION_JSON); return makeRequest(client, method, endpoint, params, httpEntity, headers); } diff --git a/src/test/java/org/opensearch/flowframework/rest/FlowFrameworkRestApiIT.java b/src/test/java/org/opensearch/flowframework/rest/FlowFrameworkRestApiIT.java index 6a454dc75..d176adc3b 100644 --- a/src/test/java/org/opensearch/flowframework/rest/FlowFrameworkRestApiIT.java +++ b/src/test/java/org/opensearch/flowframework/rest/FlowFrameworkRestApiIT.java @@ -37,9 +37,7 @@ import java.util.List; import java.util.Map; import java.util.Set; -import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; import java.util.stream.Collectors; import static org.opensearch.flowframework.common.CommonValue.CREATE_CONNECTOR_CREDENTIAL_KEY; @@ -48,15 +46,12 @@ import static org.opensearch.flowframework.common.CommonValue.WORKFLOW_ID; public class FlowFrameworkRestApiIT extends FlowFrameworkRestTestCase { - private static AtomicBoolean waitToStart = new AtomicBoolean(true); @Before public void waitToStart() throws Exception { // ML Commons cron job runs every 10 seconds and takes 20+ seconds to initialize .plugins-ml-config index - // Delay on the first attempt for 25 seconds to allow this initialization and prevent flaky tests - if (waitToStart.getAndSet(false)) { - CountDownLatch latch = new CountDownLatch(1); - latch.await(25, TimeUnit.SECONDS); + if (!indexExistsWithAdminClient(".plugins-ml-config")) { + assertBusy(() -> assertTrue(indexExistsWithAdminClient(".plugins-ml-config")), 40, TimeUnit.SECONDS); } } @@ -93,14 +88,7 @@ public void testFailedUpdateWorkflow() throws Exception { Map responseMap = entityAsMap(response); String workflowId = (String) responseMap.get(WORKFLOW_ID); - // Ensure Ml config index is initialized as creating a connector requires this, then hit Provision API and assert status - Response provisionResponse; - if (!indexExistsWithAdminClient(".plugins-ml-config")) { - assertBusy(() -> assertTrue(indexExistsWithAdminClient(".plugins-ml-config")), 40, TimeUnit.SECONDS); - provisionResponse = provisionWorkflow(client(), workflowId); - } else { - provisionResponse = provisionWorkflow(client(), workflowId); - } + Response provisionResponse = provisionResponse = provisionWorkflow(client(), workflowId); assertEquals(RestStatus.OK, TestHelpers.restStatus(provisionResponse)); getAndAssertWorkflowStatus(client(), workflowId, State.PROVISIONING, ProvisioningProgress.IN_PROGRESS); @@ -122,14 +110,7 @@ public void testUpdateWorkflowUsingFields() throws Exception { Map responseMap = entityAsMap(response); String workflowId = (String) responseMap.get(WORKFLOW_ID); - // Ensure Ml config index is initialized as creating a connector requires this, then hit Provision API and assert status - Response provisionResponse; - if (!indexExistsWithAdminClient(".plugins-ml-config")) { - assertBusy(() -> assertTrue(indexExistsWithAdminClient(".plugins-ml-config")), 40, TimeUnit.SECONDS); - provisionResponse = provisionWorkflow(client(), workflowId); - } else { - provisionResponse = provisionWorkflow(client(), workflowId); - } + Response provisionResponse = provisionWorkflow(client(), workflowId); assertEquals(RestStatus.OK, TestHelpers.restStatus(provisionResponse)); getAndAssertWorkflowStatus(client(), workflowId, State.PROVISIONING, ProvisioningProgress.IN_PROGRESS); @@ -259,14 +240,7 @@ public void testCreateAndProvisionRemoteModelWorkflow() throws Exception { String workflowId = (String) responseMap.get(WORKFLOW_ID); getAndAssertWorkflowStatus(client(), workflowId, State.NOT_STARTED, ProvisioningProgress.NOT_STARTED); - // Ensure Ml config index is initialized as creating a connector requires this, then hit Provision API and assert status - if (!indexExistsWithAdminClient(".plugins-ml-config")) { - assertBusy(() -> assertTrue(indexExistsWithAdminClient(".plugins-ml-config")), 40, TimeUnit.SECONDS); - response = provisionWorkflow(client(), workflowId); - } else { - response = provisionWorkflow(client(), workflowId); - } - + response = provisionWorkflow(client(), workflowId); assertEquals(RestStatus.OK, TestHelpers.restStatus(response)); getAndAssertWorkflowStatus(client(), workflowId, State.PROVISIONING, ProvisioningProgress.IN_PROGRESS); @@ -294,13 +268,7 @@ public void testCreateAndProvisionAgentFrameworkWorkflow() throws Exception { Template template = TestHelpers.createTemplateFromFile("agent-framework.json"); // Hit Create Workflow API to create agent-framework template, with template validation check and provision parameter - Response response; - if (!indexExistsWithAdminClient(".plugins-ml-config")) { - assertBusy(() -> assertTrue(indexExistsWithAdminClient(".plugins-ml-config")), 40, TimeUnit.SECONDS); - response = createWorkflowWithProvision(client(), template); - } else { - response = createWorkflowWithProvision(client(), template); - } + Response response = createWorkflowWithProvision(client(), template); assertEquals(RestStatus.CREATED, TestHelpers.restStatus(response)); Map responseMap = entityAsMap(response); String workflowId = (String) responseMap.get(WORKFLOW_ID); @@ -363,6 +331,233 @@ public void testCreateAndProvisionAgentFrameworkWorkflow() throws Exception { assertBusy(() -> { getAndAssertWorkflowStatusNotFound(client(), workflowId); }, 30, TimeUnit.SECONDS); } + public void testReprovisionWorkflow() throws Exception { + // Begin with a template to register a local pretrained model + Template template = TestHelpers.createTemplateFromFile("registerremotemodel.json"); + + Response response = createWorkflowWithProvision(client(), template); + assertEquals(RestStatus.CREATED, TestHelpers.restStatus(response)); + Map responseMap = entityAsMap(response); + String workflowId = (String) responseMap.get(WORKFLOW_ID); + // wait and ensure state is completed/done + assertBusy( + () -> { getAndAssertWorkflowStatus(client(), workflowId, State.COMPLETED, ProvisioningProgress.DONE); }, + 120, + TimeUnit.SECONDS + ); + + // Wait until provisioning has completed successfully before attempting to retrieve created resources + List resourcesCreated = getResourcesCreated(client(), workflowId, 30); + assertEquals(3, resourcesCreated.size()); + Map resourceMap = resourcesCreated.stream() + .collect(Collectors.toMap(ResourceCreated::workflowStepName, r -> r)); + assertTrue(resourceMap.containsKey("create_connector")); + assertTrue(resourceMap.containsKey("register_remote_model")); + + // Reprovision template to add ingest pipeline which uses the model ID + template = TestHelpers.createTemplateFromFile("registerremotemodel-ingestpipeline.json"); + response = reprovisionWorkflow(client(), workflowId, template); + assertEquals(RestStatus.CREATED, TestHelpers.restStatus(response)); + + resourcesCreated = getResourcesCreated(client(), workflowId, 30); + assertEquals(4, resourcesCreated.size()); + resourceMap = resourcesCreated.stream().collect(Collectors.toMap(ResourceCreated::workflowStepName, r -> r)); + assertTrue(resourceMap.containsKey("create_connector")); + assertTrue(resourceMap.containsKey("register_remote_model")); + assertTrue(resourceMap.containsKey("create_ingest_pipeline")); + + // Retrieve pipeline by ID to ensure model ID is set correctly + String modelId = resourceMap.get("register_remote_model").resourceId(); + String pipelineId = resourceMap.get("create_ingest_pipeline").resourceId(); + GetPipelineResponse getPipelineResponse = getPipelines(pipelineId); + assertEquals(1, getPipelineResponse.pipelines().size()); + assertTrue(getPipelineResponse.pipelines().get(0).getConfigAsMap().toString().contains(modelId)); + + // Reprovision template to add index which uses default ingest pipeline + template = TestHelpers.createTemplateFromFile("registerremotemodel-ingestpipeline-createindex.json"); + response = reprovisionWorkflow(client(), workflowId, template); + assertEquals(RestStatus.CREATED, TestHelpers.restStatus(response)); + + resourcesCreated = getResourcesCreated(client(), workflowId, 30); + assertEquals(5, resourcesCreated.size()); + resourceMap = resourcesCreated.stream().collect(Collectors.toMap(ResourceCreated::workflowStepName, r -> r)); + assertTrue(resourceMap.containsKey("create_connector")); + assertTrue(resourceMap.containsKey("register_remote_model")); + assertTrue(resourceMap.containsKey("create_ingest_pipeline")); + assertTrue(resourceMap.containsKey("create_index")); + + // Retrieve index settings to ensure pipeline ID is set correctly + String indexName = resourceMap.get("create_index").resourceId(); + Map indexSettings = getIndexSettingsAsMap(indexName); + assertEquals(pipelineId, indexSettings.get("index.default_pipeline")); + + // Reprovision template to remove default ingest pipeline + template = TestHelpers.createTemplateFromFile("registerremotemodel-ingestpipeline-updateindex.json"); + response = reprovisionWorkflow(client(), workflowId, template); + assertEquals(RestStatus.CREATED, TestHelpers.restStatus(response)); + + resourcesCreated = getResourcesCreated(client(), workflowId, 30); + // resource count should remain unchanged when updating an existing node + assertEquals(5, resourcesCreated.size()); + + // Retrieve index settings to ensure default pipeline has been updated correctly + indexSettings = getIndexSettingsAsMap(indexName); + assertEquals("_none", indexSettings.get("index.default_pipeline")); + + // Deprovision and delete all resources + Response deprovisionResponse = deprovisionWorkflowWithAllowDelete(client(), workflowId, pipelineId + "," + indexName); + assertBusy( + () -> { getAndAssertWorkflowStatus(client(), workflowId, State.NOT_STARTED, ProvisioningProgress.NOT_STARTED); }, + 60, + TimeUnit.SECONDS + ); + assertEquals(RestStatus.OK, TestHelpers.restStatus(deprovisionResponse)); + + // Hit Delete API + Response deleteResponse = deleteWorkflow(client(), workflowId); + assertEquals(RestStatus.OK, TestHelpers.restStatus(deleteResponse)); + } + + public void testReprovisionWorkflowMidNodeAddition() throws Exception { + // Begin with a template to register a local pretrained model and create an index, no edges + Template template = TestHelpers.createTemplateFromFile("registerremotemodel-createindex.json"); + + Response response = createWorkflowWithProvision(client(), template); + assertEquals(RestStatus.CREATED, TestHelpers.restStatus(response)); + Map responseMap = entityAsMap(response); + String workflowId = (String) responseMap.get(WORKFLOW_ID); + // wait and ensure state is completed/done + assertBusy( + () -> { getAndAssertWorkflowStatus(client(), workflowId, State.COMPLETED, ProvisioningProgress.DONE); }, + 120, + TimeUnit.SECONDS + ); + + // Wait until provisioning has completed successfully before attempting to retrieve created resources + List resourcesCreated = getResourcesCreated(client(), workflowId, 30); + assertEquals(4, resourcesCreated.size()); + Map resourceMap = resourcesCreated.stream() + .collect(Collectors.toMap(ResourceCreated::workflowStepName, r -> r)); + assertTrue(resourceMap.containsKey("create_connector")); + assertTrue(resourceMap.containsKey("register_remote_model")); + assertTrue(resourceMap.containsKey("create_index")); + + // Reprovision template to add ingest pipeline which uses the model ID + template = TestHelpers.createTemplateFromFile("registerremotemodel-ingestpipeline-createindex.json"); + response = reprovisionWorkflow(client(), workflowId, template); + assertEquals(RestStatus.CREATED, TestHelpers.restStatus(response)); + + resourcesCreated = getResourcesCreated(client(), workflowId, 30); + assertEquals(5, resourcesCreated.size()); + resourceMap = resourcesCreated.stream().collect(Collectors.toMap(ResourceCreated::workflowStepName, r -> r)); + assertTrue(resourceMap.containsKey("create_connector")); + assertTrue(resourceMap.containsKey("register_remote_model")); + assertTrue(resourceMap.containsKey("create_ingest_pipeline")); + assertTrue(resourceMap.containsKey("create_index")); + + // Ensure ingest pipeline configuration contains the model id and index settings have the ingest pipeline as default + String modelId = resourceMap.get("register_remote_model").resourceId(); + String pipelineId = resourceMap.get("create_ingest_pipeline").resourceId(); + GetPipelineResponse getPipelineResponse = getPipelines(pipelineId); + assertEquals(1, getPipelineResponse.pipelines().size()); + assertTrue(getPipelineResponse.pipelines().get(0).getConfigAsMap().toString().contains(modelId)); + + String indexName = resourceMap.get("create_index").resourceId(); + Map indexSettings = getIndexSettingsAsMap(indexName); + assertEquals(pipelineId, indexSettings.get("index.default_pipeline")); + + // Deprovision and delete all resources + Response deprovisionResponse = deprovisionWorkflowWithAllowDelete(client(), workflowId, pipelineId + "," + indexName); + assertBusy( + () -> { getAndAssertWorkflowStatus(client(), workflowId, State.NOT_STARTED, ProvisioningProgress.NOT_STARTED); }, + 60, + TimeUnit.SECONDS + ); + assertEquals(RestStatus.OK, TestHelpers.restStatus(deprovisionResponse)); + + // Hit Delete API + Response deleteResponse = deleteWorkflow(client(), workflowId); + assertEquals(RestStatus.OK, TestHelpers.restStatus(deleteResponse)); + } + + public void testReprovisionWithNoChange() throws Exception { + Template template = TestHelpers.createTemplateFromFile("registerremotemodel-ingestpipeline-createindex.json"); + + Response response = createWorkflowWithProvision(client(), template); + assertEquals(RestStatus.CREATED, TestHelpers.restStatus(response)); + Map responseMap = entityAsMap(response); + String workflowId = (String) responseMap.get(WORKFLOW_ID); + // wait and ensure state is completed/done + assertBusy( + () -> { getAndAssertWorkflowStatus(client(), workflowId, State.COMPLETED, ProvisioningProgress.DONE); }, + 120, + TimeUnit.SECONDS + ); + + // Attempt to reprovision the same template with no changes + ResponseException exception = expectThrows(ResponseException.class, () -> reprovisionWorkflow(client(), workflowId, template)); + assertEquals(RestStatus.BAD_REQUEST.getStatus(), exception.getResponse().getStatusLine().getStatusCode()); + assertTrue(exception.getMessage().contains("Template does not contain any modifications")); + + // Deprovision and delete all resources + Response deprovisionResponse = deprovisionWorkflowWithAllowDelete( + client(), + workflowId, + "nlp-ingest-pipeline" + "," + "my-nlp-index" + ); + assertBusy( + () -> { getAndAssertWorkflowStatus(client(), workflowId, State.NOT_STARTED, ProvisioningProgress.NOT_STARTED); }, + 60, + TimeUnit.SECONDS + ); + assertEquals(RestStatus.OK, TestHelpers.restStatus(deprovisionResponse)); + + // Hit Delete API + Response deleteResponse = deleteWorkflow(client(), workflowId); + assertEquals(RestStatus.OK, TestHelpers.restStatus(deleteResponse)); + } + + public void testReprovisionWithDeletion() throws Exception { + Template template = TestHelpers.createTemplateFromFile("registerremotemodel-ingestpipeline-createindex.json"); + + Response response = createWorkflowWithProvision(client(), template); + assertEquals(RestStatus.CREATED, TestHelpers.restStatus(response)); + Map responseMap = entityAsMap(response); + String workflowId = (String) responseMap.get(WORKFLOW_ID); + // wait and ensure state is completed/done + assertBusy( + () -> { getAndAssertWorkflowStatus(client(), workflowId, State.COMPLETED, ProvisioningProgress.DONE); }, + 120, + TimeUnit.SECONDS + ); + + // Attempt to reprovision template without ingest pipeline node + Template templateWithoutIngestPipeline = TestHelpers.createTemplateFromFile("registerremotemodel-createindex.json"); + ResponseException exception = expectThrows( + ResponseException.class, + () -> reprovisionWorkflow(client(), workflowId, templateWithoutIngestPipeline) + ); + assertEquals(RestStatus.BAD_REQUEST.getStatus(), exception.getResponse().getStatusLine().getStatusCode()); + assertTrue(exception.getMessage().contains("Workflow Step deletion is not supported when reprovisioning a template.")); + + // Deprovision and delete all resources + Response deprovisionResponse = deprovisionWorkflowWithAllowDelete( + client(), + workflowId, + "nlp-ingest-pipeline" + "," + "my-nlp-index" + ); + assertBusy( + () -> { getAndAssertWorkflowStatus(client(), workflowId, State.NOT_STARTED, ProvisioningProgress.NOT_STARTED); }, + 60, + TimeUnit.SECONDS + ); + assertEquals(RestStatus.OK, TestHelpers.restStatus(deprovisionResponse)); + + // Hit Delete API + Response deleteResponse = deleteWorkflow(client(), workflowId); + assertEquals(RestStatus.OK, TestHelpers.restStatus(deleteResponse)); + } + public void testTimestamps() throws Exception { Template noopTemplate = TestHelpers.createTemplateFromFile("noop.json"); // Create the template, should have created and updated matching @@ -421,14 +616,7 @@ public void testCreateAndProvisionIngestAndSearchPipeline() throws Exception { String workflowId = (String) responseMap.get(WORKFLOW_ID); getAndAssertWorkflowStatus(client(), workflowId, State.NOT_STARTED, ProvisioningProgress.NOT_STARTED); - // Ensure Ml config index is initialized as creating a connector requires this, then hit Provision API and assert status - if (!indexExistsWithAdminClient(".plugins-ml-config")) { - assertBusy(() -> assertTrue(indexExistsWithAdminClient(".plugins-ml-config")), 40, TimeUnit.SECONDS); - response = provisionWorkflow(client(), workflowId); - } else { - response = provisionWorkflow(client(), workflowId); - } - + response = provisionWorkflow(client(), workflowId); assertEquals(RestStatus.OK, TestHelpers.restStatus(response)); getAndAssertWorkflowStatus(client(), workflowId, State.PROVISIONING, ProvisioningProgress.IN_PROGRESS); @@ -475,14 +663,7 @@ public void testDefaultCohereUseCase() throws Exception { String workflowId = (String) responseMap.get(WORKFLOW_ID); getAndAssertWorkflowStatus(client(), workflowId, State.NOT_STARTED, ProvisioningProgress.NOT_STARTED); - // Ensure Ml config index is initialized as creating a connector requires this, then hit Provision API and assert status - if (!indexExistsWithAdminClient(".plugins-ml-config")) { - assertBusy(() -> assertTrue(indexExistsWithAdminClient(".plugins-ml-config")), 40, TimeUnit.SECONDS); - response = provisionWorkflow(client(), workflowId); - } else { - response = provisionWorkflow(client(), workflowId); - } - + response = provisionWorkflow(client(), workflowId); assertEquals(RestStatus.OK, TestHelpers.restStatus(response)); getAndAssertWorkflowStatus(client(), workflowId, State.PROVISIONING, ProvisioningProgress.IN_PROGRESS); @@ -526,14 +707,7 @@ public void testDefaultSemanticSearchUseCaseWithFailureExpected() throws Excepti String workflowId = (String) responseMap.get(WORKFLOW_ID); getAndAssertWorkflowStatus(client(), workflowId, State.NOT_STARTED, ProvisioningProgress.NOT_STARTED); - // Ensure Ml config index is initialized as creating a connector requires this, then hit Provision API and assert status - if (!indexExistsWithAdminClient(".plugins-ml-config")) { - assertBusy(() -> assertTrue(indexExistsWithAdminClient(".plugins-ml-config")), 40, TimeUnit.SECONDS); - response = provisionWorkflow(client(), workflowId); - } else { - response = provisionWorkflow(client(), workflowId); - } - + response = provisionWorkflow(client(), workflowId); assertEquals(RestStatus.OK, TestHelpers.restStatus(response)); getAndAssertWorkflowStatus(client(), workflowId, State.FAILED, ProvisioningProgress.FAILED); } diff --git a/src/test/java/org/opensearch/flowframework/rest/RestCreateWorkflowActionTests.java b/src/test/java/org/opensearch/flowframework/rest/RestCreateWorkflowActionTests.java index e4f22e947..f6b1a5fc7 100644 --- a/src/test/java/org/opensearch/flowframework/rest/RestCreateWorkflowActionTests.java +++ b/src/test/java/org/opensearch/flowframework/rest/RestCreateWorkflowActionTests.java @@ -178,6 +178,20 @@ public void testCreateWorkflowRequestWithCreateAndReprovision() throws Exception ); } + public void testCreateWorkflowRequestWithReprovisionAndSubstitutionParams() throws Exception { + RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.POST) + .withPath(this.createWorkflowPath) + .withParams(Map.ofEntries(Map.entry(REPROVISION_WORKFLOW, "true"), Map.entry("open_ai_key", "1234"))) + .withContent(new BytesArray(validTemplate), MediaTypeRegistry.JSON) + .build(); + FakeRestChannel channel = new FakeRestChannel(request, false, 1); + createWorkflowRestAction.handleRequest(request, channel, nodeClient); + assertEquals(RestStatus.BAD_REQUEST, channel.capturedResponse().status()); + assertTrue( + channel.capturedResponse().content().utf8ToString().contains("are permitted unless the provision parameter is set to true.") + ); + } + public void testCreateWorkflowRequestWithUpdateAndParams() throws Exception { RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.POST) .withPath(this.createWorkflowPath) diff --git a/src/test/java/org/opensearch/flowframework/util/ParseUtilsTests.java b/src/test/java/org/opensearch/flowframework/util/ParseUtilsTests.java index 1cdb0c50e..8237a7a93 100644 --- a/src/test/java/org/opensearch/flowframework/util/ParseUtilsTests.java +++ b/src/test/java/org/opensearch/flowframework/util/ParseUtilsTests.java @@ -330,4 +330,21 @@ public void testFlattenSettings() throws Exception { assertTrue(flattenedSettings.entrySet().stream().allMatch(x -> x.getKey().startsWith("index."))); } + + public void testPrependIndexToSettings() throws Exception { + + Map indexSettingsMap = Map.ofEntries( + Map.entry("knn", "true"), + Map.entry("number_of_shards", "2"), + Map.entry("number_of_replicas", "1"), + Map.entry("index.default_pipeline", "_none"), + Map.entry("search", Map.of("default_pipeine", "_none")) + ); + Map prependedSettings = ParseUtils.prependIndexToSettings(indexSettingsMap); + assertEquals(5, prependedSettings.size()); + + // every setting should start with index + assertTrue(prependedSettings.entrySet().stream().allMatch(x -> x.getKey().startsWith("index."))); + + } } diff --git a/src/test/java/org/opensearch/flowframework/workflow/UpdateIndexStepTests.java b/src/test/java/org/opensearch/flowframework/workflow/UpdateIndexStepTests.java index 7dade5607..e4ea939ea 100644 --- a/src/test/java/org/opensearch/flowframework/workflow/UpdateIndexStepTests.java +++ b/src/test/java/org/opensearch/flowframework/workflow/UpdateIndexStepTests.java @@ -12,6 +12,7 @@ import org.opensearch.action.admin.indices.settings.get.GetSettingsResponse; import org.opensearch.action.admin.indices.settings.put.UpdateSettingsRequest; import org.opensearch.action.support.PlainActionFuture; +import org.opensearch.action.support.master.AcknowledgedResponse; import org.opensearch.client.AdminClient; import org.opensearch.client.Client; import org.opensearch.client.IndicesAdminClient; @@ -78,6 +79,12 @@ public void testUpdateIndexStepWithUpdatedSettings() throws ExecutionException, return null; }).when(indicesAdminClient).getSettings(any(), any()); + doAnswer(invocation -> { + ActionListener ackResponseListener = invocation.getArgument(1); + ackResponseListener.onResponse(new AcknowledgedResponse(true)); + return null; + }).when(indicesAdminClient).updateSettings(any(), any()); + // validate update settings request content @SuppressWarnings({ "unchecked" }) ArgumentCaptor updateSettingsRequestCaptor = ArgumentCaptor.forClass(UpdateSettingsRequest.class); @@ -105,6 +112,63 @@ public void testUpdateIndexStepWithUpdatedSettings() throws ExecutionException, assertEquals(2, settingsToUpdate.size()); assertEquals("_none", settingsToUpdate.get("index.default_pipeline")); assertEquals("_none", settingsToUpdate.get("index.search.default_pipeline")); + + assertTrue(future.isDone()); + WorkflowData returnedData = (WorkflowData) future.get(); + assertEquals(Map.ofEntries(Map.entry(INDEX_NAME, indexName)), returnedData.getContent()); + assertEquals(data.getWorkflowId(), returnedData.getWorkflowId()); + assertEquals(data.getNodeId(), returnedData.getNodeId()); + } + + public void testFailedToUpdateIndexSettings() throws ExecutionException, InterruptedException, IOException { + + UpdateIndexStep updateIndexStep = new UpdateIndexStep(client); + + String indexName = "test-index"; + + // Create existing settings for default pipelines + Settings.Builder builder = Settings.builder(); + builder.put("index.number_of_shards", 2); + builder.put("index.number_of_replicas", 1); + builder.put("index.knn", true); + builder.put("index.default_pipeline", "ingest_pipeline_id"); + builder.put("index.search.default_pipeline", "search_pipeline_id"); + Map indexToSettings = new HashMap<>(); + indexToSettings.put(indexName, builder.build()); + + // Stub get index settings request/response + doAnswer(invocation -> { + ActionListener getSettingsResponseListener = invocation.getArgument(1); + getSettingsResponseListener.onResponse(new GetSettingsResponse(indexToSettings, indexToSettings)); + return null; + }).when(indicesAdminClient).getSettings(any(), any()); + + doAnswer(invocation -> { + ActionListener ackResponseListener = invocation.getArgument(1); + ackResponseListener.onFailure(new Exception("")); + return null; + }).when(indicesAdminClient).updateSettings(any(), any()); + + // Configurations has updated search/ingest pipeline default values of _none + String configurations = + "{\"settings\":{\"index\":{\"knn\":true,\"number_of_shards\":2,\"number_of_replicas\":1,\"default_pipeline\":\"_none\",\"search\":{\"default_pipeline\":\"_none\"}}},\"mappings\":{\"properties\":{\"age\":{\"type\":\"integer\"}}},\"aliases\":{\"sample-alias1\":{}}}"; + WorkflowData data = new WorkflowData( + Map.ofEntries(Map.entry(INDEX_NAME, indexName), Map.entry(CONFIGURATIONS, configurations)), + "test-id", + "test-node-id" + ); + PlainActionFuture future = updateIndexStep.execute( + data.getNodeId(), + data, + Collections.emptyMap(), + Collections.emptyMap(), + Collections.emptyMap() + ); + + assertTrue(future.isDone()); + ExecutionException exception = assertThrows(ExecutionException.class, () -> future.get()); + assertTrue(exception.getCause() instanceof Exception); + assertEquals("Failed to update the index settings for index test-index", exception.getCause().getMessage()); } public void testMissingSettings() throws InterruptedException { @@ -136,6 +200,55 @@ public void testMissingSettings() throws InterruptedException { ); } + public void testUpdateMixedSettings() throws InterruptedException { + UpdateIndexStep updateIndexStep = new UpdateIndexStep(client); + + String indexName = "test-index"; + + // Create existing settings for default pipelines + Settings.Builder builder = Settings.builder(); + builder.put("index.number_of_shards", 2); + builder.put("index.number_of_replicas", 1); + builder.put("index.knn", true); + builder.put("index.default_pipeline", "ingest_pipeline_id"); + Map indexToSettings = new HashMap<>(); + indexToSettings.put(indexName, builder.build()); + + // Stub get index settings request/response + doAnswer(invocation -> { + ActionListener getSettingsResponseListener = invocation.getArgument(1); + getSettingsResponseListener.onResponse(new GetSettingsResponse(indexToSettings, indexToSettings)); + return null; + }).when(indicesAdminClient).getSettings(any(), any()); + + // validate update settings request content + @SuppressWarnings({ "unchecked" }) + ArgumentCaptor updateSettingsRequestCaptor = ArgumentCaptor.forClass(UpdateSettingsRequest.class); + + // Configurations has updated ingest pipeline default values of _none. Settings have regular and full names + String configurations = + "{\"settings\":{\"index.knn\":true,\"default_pipeline\":\"_none\",\"index.number_of_shards\":2,\"index.number_of_replicas\":1},\"mappings\":{\"properties\":{\"age\":{\"type\":\"integer\"}}},\"aliases\":{\"sample-alias1\":{}}}"; + WorkflowData data = new WorkflowData( + Map.ofEntries(Map.entry(INDEX_NAME, indexName), Map.entry(CONFIGURATIONS, configurations)), + "test-id", + "test-node-id" + ); + PlainActionFuture future = updateIndexStep.execute( + data.getNodeId(), + data, + Collections.emptyMap(), + Collections.emptyMap(), + Collections.emptyMap() + ); + + verify(indicesAdminClient, times(1)).getSettings(any(GetSettingsRequest.class), any()); + verify(indicesAdminClient, times(1)).updateSettings(updateSettingsRequestCaptor.capture(), any()); + + Settings settingsToUpdate = updateSettingsRequestCaptor.getValue().settings(); + assertEquals(1, settingsToUpdate.size()); + assertEquals("_none", settingsToUpdate.get("index.default_pipeline")); + } + public void testEmptyConfiguration() throws InterruptedException { UpdateIndexStep updateIndexStep = new UpdateIndexStep(client); diff --git a/src/test/resources/template/registerremotemodel-createindex.json b/src/test/resources/template/registerremotemodel-createindex.json new file mode 100644 index 000000000..3005eeed1 --- /dev/null +++ b/src/test/resources/template/registerremotemodel-createindex.json @@ -0,0 +1,84 @@ +{ + "name": "semantic search with local pretrained model", + "description": "Setting up semantic search, with a local pretrained embedding model", + "use_case": "SEMANTIC_SEARCH", + "version": { + "template": "1.0.0", + "compatibility": [ + "2.12.0", + "3.0.0" + ] + }, + "workflows": { + "provision": { + "nodes": [ + { + "id": "create_openai_connector", + "type": "create_connector", + "user_inputs": { + "name": "OpenAI Chat Connector", + "description": "The connector to public OpenAI model service for text embedding model", + "version": "1", + "protocol": "http", + "parameters": { + "endpoint": "api.openai.com", + "model": "gpt-3.5-turbo", + "response_filter": "$.choices[0].message.content" + }, + "credential": { + "openAI_key": "12345" + }, + "actions": [ + { + "action_type": "predict", + "method": "POST", + "url": "https://${parameters.endpoint}/v1/chat/completions" + } + ] + } + }, + { + "id": "register_openai_model", + "type": "register_remote_model", + "previous_node_inputs": { + "create_openai_connector": "connector_id" + }, + "user_inputs": { + "name": "openAI-gpt-3.5-turbo", + "deploy": true + } + }, + { + "id": "create_index", + "type": "create_index", + "user_inputs": { + "index_name": "my-nlp-index", + "configurations": { + "settings": { + "index.knn": true, + "index.number_of_shards": "2" + }, + "mappings": { + "properties": { + "passage_embedding": { + "type": "knn_vector", + "dimension": "768", + "method": { + "engine": "lucene", + "space_type": "l2", + "name": "hnsw", + "parameters": {} + } + }, + "passage_text": { + "type": "text" + } + } + } + } + } + } + ] + } + } + } diff --git a/src/test/resources/template/registerremotemodel-ingestpipeline-createindex.json b/src/test/resources/template/registerremotemodel-ingestpipeline-createindex.json new file mode 100644 index 000000000..767da07b6 --- /dev/null +++ b/src/test/resources/template/registerremotemodel-ingestpipeline-createindex.json @@ -0,0 +1,111 @@ +{ + "name": "semantic search with local pretrained model", + "description": "Setting up semantic search, with a local pretrained embedding model", + "use_case": "SEMANTIC_SEARCH", + "version": { + "template": "1.0.0", + "compatibility": [ + "2.12.0", + "3.0.0" + ] + }, + "workflows": { + "provision": { + "nodes": [ + { + "id": "create_openai_connector", + "type": "create_connector", + "user_inputs": { + "name": "OpenAI Chat Connector", + "description": "The connector to public OpenAI model service for text embedding model", + "version": "1", + "protocol": "http", + "parameters": { + "endpoint": "api.openai.com", + "model": "gpt-3.5-turbo", + "response_filter": "$.choices[0].message.content" + }, + "credential": { + "openAI_key": "12345" + }, + "actions": [ + { + "action_type": "predict", + "method": "POST", + "url": "https://${parameters.endpoint}/v1/chat/completions" + } + ] + } + }, + { + "id": "register_openai_model", + "type": "register_remote_model", + "previous_node_inputs": { + "create_openai_connector": "connector_id" + }, + "user_inputs": { + "name": "openAI-gpt-3.5-turbo", + "deploy": true + } + }, + { + "id": "create_ingest_pipeline", + "type": "create_ingest_pipeline", + "previous_node_inputs": { + "register_openai_model": "model_id" + }, + "user_inputs": { + "pipeline_id": "nlp-ingest-pipeline", + "configurations": { + "description": "A text embedding pipeline", + "processors": [ + { + "text_embedding": { + "model_id": "${{register_openai_model.model_id}}", + "field_map": { + "passage_text": "passage_embedding" + } + } + } + ] + } + } + }, + { + "id": "create_index", + "type": "create_index", + "previous_node_inputs": { + "create_ingest_pipeline": "pipeline_id" + }, + "user_inputs": { + "index_name": "my-nlp-index", + "configurations": { + "settings": { + "index.knn": true, + "default_pipeline": "${{create_ingest_pipeline.pipeline_id}}", + "index.number_of_shards": "2" + }, + "mappings": { + "properties": { + "passage_embedding": { + "type": "knn_vector", + "dimension": "768", + "method": { + "engine": "lucene", + "space_type": "l2", + "name": "hnsw", + "parameters": {} + } + }, + "passage_text": { + "type": "text" + } + } + } + } + } + } + ] + } + } +} diff --git a/src/test/resources/template/registerremotemodel-ingestpipeline-updateindex.json b/src/test/resources/template/registerremotemodel-ingestpipeline-updateindex.json new file mode 100644 index 000000000..fc873ae66 --- /dev/null +++ b/src/test/resources/template/registerremotemodel-ingestpipeline-updateindex.json @@ -0,0 +1,111 @@ +{ + "name": "semantic search with local pretrained model", + "description": "Setting up semantic search, with a local pretrained embedding model", + "use_case": "SEMANTIC_SEARCH", + "version": { + "template": "1.0.0", + "compatibility": [ + "2.12.0", + "3.0.0" + ] + }, + "workflows": { + "provision": { + "nodes": [ + { + "id": "create_openai_connector", + "type": "create_connector", + "user_inputs": { + "name": "OpenAI Chat Connector", + "description": "The connector to public OpenAI model service for text embedding model", + "version": "1", + "protocol": "http", + "parameters": { + "endpoint": "api.openai.com", + "model": "gpt-3.5-turbo", + "response_filter": "$.choices[0].message.content" + }, + "credential": { + "openAI_key": "12345" + }, + "actions": [ + { + "action_type": "predict", + "method": "POST", + "url": "https://${parameters.endpoint}/v1/chat/completions" + } + ] + } + }, + { + "id": "register_openai_model", + "type": "register_remote_model", + "previous_node_inputs": { + "create_openai_connector": "connector_id" + }, + "user_inputs": { + "name": "openAI-gpt-3.5-turbo", + "deploy": true + } + }, + { + "id": "create_ingest_pipeline", + "type": "create_ingest_pipeline", + "previous_node_inputs": { + "register_openai_model": "model_id" + }, + "user_inputs": { + "pipeline_id": "nlp-ingest-pipeline", + "configurations": { + "description": "A text embedding pipeline", + "processors": [ + { + "text_embedding": { + "model_id": "${{register_openai_model.model_id}}", + "field_map": { + "passage_text": "passage_embedding" + } + } + } + ] + } + } + }, + { + "id": "create_index", + "type": "create_index", + "previous_node_inputs": { + "create_ingest_pipeline": "pipeline_id" + }, + "user_inputs": { + "index_name": "my-nlp-index", + "configurations": { + "settings": { + "index.knn": true, + "default_pipeline": "_none", + "index.number_of_shards": "2" + }, + "mappings": { + "properties": { + "passage_embedding": { + "type": "knn_vector", + "dimension": "768", + "method": { + "engine": "lucene", + "space_type": "l2", + "name": "hnsw", + "parameters": {} + } + }, + "passage_text": { + "type": "text" + } + } + } + } + } + } + ] + } + } +} diff --git a/src/test/resources/template/registerremotemodel-ingestpipeline.json b/src/test/resources/template/registerremotemodel-ingestpipeline.json new file mode 100644 index 000000000..dede163c1 --- /dev/null +++ b/src/test/resources/template/registerremotemodel-ingestpipeline.json @@ -0,0 +1,77 @@ +{ + "name": "semantic search with local pretrained model", + "description": "Setting up semantic search, with a local pretrained embedding model", + "use_case": "SEMANTIC_SEARCH", + "version": { + "template": "1.0.0", + "compatibility": [ + "2.12.0", + "3.0.0" + ] + }, + "workflows": { + "provision": { + "nodes": [ + { + "id": "create_openai_connector", + "type": "create_connector", + "user_inputs": { + "name": "OpenAI Chat Connector", + "description": "The connector to public OpenAI model service for text embedding model", + "version": "1", + "protocol": "http", + "parameters": { + "endpoint": "api.openai.com", + "model": "gpt-3.5-turbo", + "response_filter": "$.choices[0].message.content" + }, + "credential": { + "openAI_key": "12345" + }, + "actions": [ + { + "action_type": "predict", + "method": "POST", + "url": "https://${parameters.endpoint}/v1/chat/completions" + } + ] + } + }, + { + "id": "register_openai_model", + "type": "register_remote_model", + "previous_node_inputs": { + "create_openai_connector": "connector_id" + }, + "user_inputs": { + "name": "openAI-gpt-3.5-turbo", + "deploy": true + } + }, + { + "id": "create_ingest_pipeline", + "type": "create_ingest_pipeline", + "previous_node_inputs": { + "register_openai_model": "model_id" + }, + "user_inputs": { + "pipeline_id": "nlp-ingest-pipeline", + "configurations": { + "description": "A text embedding pipeline", + "processors": [ + { + "text_embedding": { + "model_id": "${{register_openai_model.model_id}}", + "field_map": { + "passage_text": "passage_embedding" + } + } + } + ] + } + } + } + ] + } + } +} diff --git a/src/test/resources/template/registerremotemodel.json b/src/test/resources/template/registerremotemodel.json new file mode 100644 index 000000000..58c520af4 --- /dev/null +++ b/src/test/resources/template/registerremotemodel.json @@ -0,0 +1,54 @@ +{ + "name": "semantic search with local pretrained model", + "description": "Setting up semantic search, with a local pretrained embedding model", + "use_case": "SEMANTIC_SEARCH", + "version": { + "template": "1.0.0", + "compatibility": [ + "2.12.0", + "3.0.0" + ] + }, + "workflows": { + "provision": { + "nodes": [ + { + "id": "create_openai_connector", + "type": "create_connector", + "user_inputs": { + "name": "OpenAI Chat Connector", + "description": "The connector to public OpenAI model service for text embedding model", + "version": "1", + "protocol": "http", + "parameters": { + "endpoint": "api.openai.com", + "model": "gpt-3.5-turbo", + "response_filter": "$.choices[0].message.content" + }, + "credential": { + "openAI_key": "12345" + }, + "actions": [ + { + "action_type": "predict", + "method": "POST", + "url": "https://${parameters.endpoint}/v1/chat/completions" + } + ] + } + }, + { + "id": "register_openai_model", + "type": "register_remote_model", + "previous_node_inputs": { + "create_openai_connector": "connector_id" + }, + "user_inputs": { + "name": "openAI-gpt-3.5-turbo", + "deploy": true + } + } + ] + } + } + }