diff --git a/hack/images/jinja2/README.md b/hack/images/jinja2/README.md index c08c0a4f3..4b2ece47e 100644 --- a/hack/images/jinja2/README.md +++ b/hack/images/jinja2/README.md @@ -59,6 +59,14 @@ postgres Prefix for db was removed. This is Jinja limitation. It shouldn't be a big problem as long as there is no need to render the template twice with the same prefix. +### Configuration + +There is a possibility of pre-processing data by setting options in the configuration file. +List of supported operations: +| Name | Default | Description | +| ------------------------- | ------------ | ---------------------------------------------------------------------------------------------------| +| prefix | "" | Adds a prefix to inputted data. The data will be accessible using the set prefix. | +| unpackValue | False | If the `value` prefix is set in the inputted data then the data will be unpacked from that prefix. | ## Prerequisites diff --git a/hack/images/jinja2/jinja2-cli/jinja2cli/cli.py b/hack/images/jinja2/jinja2-cli/jinja2cli/cli.py index 3f134f36d..56bf36bc3 100644 --- a/hack/images/jinja2/jinja2-cli/jinja2cli/cli.py +++ b/hack/images/jinja2/jinja2-cli/jinja2cli/cli.py @@ -312,17 +312,13 @@ def cli(opts, args, config): # noqa: C901 out = codecs.getwriter("utf8")(out) - if config.get("prefix") is not None and len(parsed_data) != 0: - parsed_data = {config["prefix"]: parsed_data} - if config.get("strip-value") is True and len(parsed_data) != 0 and "value" in parsed_data: - parsed_data = parsed_data.get("value", {}) + parsed_data = preprocessing_data(config, parsed_data) template_path = os.path.abspath(template_path) out.write(render(template_path, parsed_data, extensions, opts.filters, opts.strict)) out.flush() return 0 - def parse_kv_string(pairs): dict_ = {} for pair in pairs: @@ -331,6 +327,17 @@ def parse_kv_string(pairs): return dict_ +def preprocessing_data(config, data): + '''Return preprocessed data based on the applied configuration.''' + + if config.get("unpackValue") is True and len(data) != 0 and "value" in data: + data = data.get("value", {}) + + if config.get("prefix") is not None and len(data) != 0: + data = {config["prefix"]: data} + + return data + class LazyHelpOption(Option): "An Option class that resolves help from a callable" diff --git a/hack/images/jinja2/jinja2-cli/tests/test_jinja2cli.py b/hack/images/jinja2/jinja2-cli/tests/test_jinja2cli.py index f2534176d..043f3bb9c 100644 --- a/hack/images/jinja2/jinja2-cli/tests/test_jinja2cli.py +++ b/hack/images/jinja2/jinja2-cli/tests/test_jinja2cli.py @@ -12,76 +12,82 @@ @dataclass -class TestCase: +class RenderTestCase: name: str template: str data: typing.Dict[str, typing.Any] result: str +@dataclass +class PreprocessingDataTestCase: + name: str + config: typing.Dict[str, typing.Any] + data: typing.Dict[str, typing.Any] + result: typing.Dict[str, typing.Any] render_testcases = [ - TestCase(name="empty", template="", data={}, result=""), - TestCase( + RenderTestCase(name="empty", template="", data={}, result=""), + RenderTestCase( name="simple", template="<@ title @>", data={"title": b"\xc3\xb8".decode("utf8")}, result=b"\xc3\xb8".decode("utf8"), ), - TestCase( + RenderTestCase( name="prefix", template="<@ input.key @>", data={"input": {"key": "value"}}, result="value", ), - TestCase( + RenderTestCase( name="two prefixes but one provided", template="<@ input.key @>/<@ additionalinput.key @>", data={"input": {"key": "value"}}, result="value/<@ additionalinput.key @>", ), - TestCase( + RenderTestCase( name="missing prefix", template="<@ input.key @>", data={}, result="<@ input.key @>", ), - TestCase( + RenderTestCase( name="items before attrs", template="<@ input.values.key @>", data={"input": {"values": {"key": "value"}}}, result="value", ), - TestCase( + RenderTestCase( name="attrs still working", template="<@ input.values() @>", data={"input": {}}, result="dict_values([])", ), - TestCase( + RenderTestCase( name="key with dot", template="<@ input['foo.bar'] @>", data={"input": {"foo.bar": "value"}}, result="value", ), - TestCase( + RenderTestCase( name="missing key with dot", template='<@ input["foo.bar"] @>', data={}, result='<@ input["foo.bar"] @>', ), - TestCase( + RenderTestCase( name="use default value", template='<@ input["foo.bar"] | default("hello") @>', data={}, result="hello", ), - TestCase( + RenderTestCase( name="multiple dotted values", template='<@ input.key.key["foo.bar/baz"] | default("hello") @>', data={}, result="hello", ), - TestCase( + RenderTestCase( name="multiline strings", template="""<@ input.key.key["foo.bar/baz"] | default('hello hello') @>""", @@ -100,6 +106,33 @@ def test_render(tmp_path, case): assert output == case.result +preprocessing_data_testcases = [ + PreprocessingDataTestCase( + name="set prefix in the config should prefix the data", + config={"prefix": "testprefix"}, + data = {"test": "test"}, + result={"testprefix": {"test": "test"}} + ), + PreprocessingDataTestCase( + name="set unpackValue in the config should remove the value prefix", + config={"unpackValue": True}, + data = {"value": {"test": "test"}}, + result={"test": "test"} + ), + PreprocessingDataTestCase( + name="set unpackValue and prefix should output correct results", + config={"prefix": "testprefix", "unpackValue": True}, + data = {"value": {"test": "test"}}, + result={"testprefix": {"test": "test"}} + ) +] + +@pytest.mark.parametrize("case", preprocessing_data_testcases) +def test_preprocessing_data(case): + output = cli.preprocessing_data(case.config,case.data) + assert output == case.result + + def test_random_password(tmp_path): random_pass_path = tmp_path / "random.template" random_pass_path.write_text("<@ random_password(length=4) @>") diff --git a/hack/images/merger/README.md b/hack/images/merger/README.md index b00dcb834..de71eee8e 100755 --- a/hack/images/merger/README.md +++ b/hack/images/merger/README.md @@ -4,8 +4,7 @@ This folder contains the Docker image which merges multiple input YAML files into a single one. -The Docker image contains the `merger.sh` helper script. The script is an entrypoint of the image, and it is used to prefix and merge all YAML files found in `$SRC` directory. -Each file is prefixed with a file name without extension. +The Docker image contains the `merger.sh` helper script. The script is an entrypoint of the image, and it is used to prefix and merge all YAML files found in `$SRC` directory. Additionally, if the YAML file contains the `value` key, then it is unpacked from that key. Each file is prefixed with a file name without extension. ## Installation diff --git a/internal/installation/capact_register.go b/internal/installation/capact_register.go index 132ee63b8..2bd95b06a 100644 --- a/internal/installation/capact_register.go +++ b/internal/installation/capact_register.go @@ -177,14 +177,11 @@ func (i *CapactRegister) produceHelmReleaseTypeInstance(helmRelease *release.Rel return nil, errors.Wrap(err, "while producing Helm release definition") } - var unmarshalled map[string]interface{} + var unmarshalled interface{} err = yaml.Unmarshal(releaseOut, &unmarshalled) if err != nil { return nil, errors.Wrap(err, "while unmarshaling bytes") } - if _, ok := unmarshalled["value"]; !ok { - return nil, errors.Wrap(err, "while getting value from unmarshalled additional output") - } return &gqllocalapi.CreateTypeInstanceInput{ Alias: ptr.String(helmRelease.Name), @@ -192,7 +189,7 @@ func (i *CapactRegister) produceHelmReleaseTypeInstance(helmRelease *release.Rel Path: helmReleaseTypeRefPath, Revision: "0.1.0", }, - Value: unmarshalled["value"], + Value: unmarshalled, }, nil } @@ -210,22 +207,18 @@ func (i *CapactRegister) produceConfigTypeInstance(ownerName string, helmRelease return nil, errors.Wrap(err, "while producing additional info") } - var unmarshalled map[string]interface{} + var unmarshalled interface{} err = yaml.Unmarshal(data, &unmarshalled) if err != nil { return nil, errors.Wrap(err, "while unmarshaling bytes") } - if _, ok := unmarshalled["value"]; !ok { - return nil, errors.Wrap(err, "while getting value from unmarshalled additional output") - } - return &gqllocalapi.CreateTypeInstanceInput{ Alias: ptr.String(ownerName), TypeRef: &gqllocalapi.TypeInstanceTypeReferenceInput{ Path: capactTypeRefPath, Revision: "0.1.0", }, - Value: unmarshalled["value"], + Value: unmarshalled, }, nil } diff --git a/pkg/argo-actions/download_type_instances.go b/pkg/argo-actions/download_type_instances.go index 7ef5c0c7b..3848757d3 100644 --- a/pkg/argo-actions/download_type_instances.go +++ b/pkg/argo-actions/download_type_instances.go @@ -49,7 +49,7 @@ func (d *Download) Do(ctx context.Context) error { return fmt.Errorf("failed to find TypeInstance with ID %q", config.ID) } - data, err := yaml.Marshal(typeInstance.LatestResourceVersion.Spec.Value) + data, err := yaml.Marshal(typeInstance.LatestResourceVersion.Spec) if err != nil { return errors.Wrap(err, "while marshaling TypeInstance to YAML") } diff --git a/pkg/argo-actions/errors.go b/pkg/argo-actions/errors.go index fa4a03f07..c878848af 100644 --- a/pkg/argo-actions/errors.go +++ b/pkg/argo-actions/errors.go @@ -11,8 +11,3 @@ func ErrMissingTypeInstanceValue(typeInstanceName string) error { func ErrMissingResourceVersion() error { return errors.Errorf("resourceVersion is missing") } - -// ErrTypeConversion returns an error indicating incorrect type conversion. -func ErrTypeConversion(field string, targetType string, typeInstanceName string) error { - return errors.Errorf("%s cannot be converted to %s for TypeInstances %s", field, targetType, typeInstanceName) -} diff --git a/pkg/argo-actions/update_type_instances.go b/pkg/argo-actions/update_type_instances.go index 522796593..92c81b3fe 100644 --- a/pkg/argo-actions/update_type_instances.go +++ b/pkg/argo-actions/update_type_instances.go @@ -2,6 +2,7 @@ package argoactions import ( "context" + "encoding/json" "io/ioutil" "path" "path/filepath" @@ -116,7 +117,28 @@ func (u *Update) render(payload []graphqllocal.UpdateTypeInstancesInput, values return ErrMissingTypeInstanceValue(typeInstance.ID) } - typeInstance.TypeInstance.Value = value + if _, ok = value["value"]; !ok { + // for backward compatibility, if there is an artifact without value/backend syntax, + // treat it as a value for TypeInstance + typeInstance.TypeInstance.Value = value + continue + } + + data, err := json.Marshal(value) + if err != nil { + return errors.Wrap(err, "while marshaling TypeInstance") + } + + unmarshalledTI := graphqllocal.UpdateTypeInstanceInput{} + err = json.Unmarshal(data, &unmarshalledTI) + if err != nil { + return errors.Wrap(err, "while unmarshaling TypeInstance") + } + + typeInstance.TypeInstance.Value = unmarshalledTI.Value + if unmarshalledTI.Backend != nil { + typeInstance.TypeInstance.Backend = unmarshalledTI.Backend + } } return nil } diff --git a/pkg/argo-actions/upload_type_instances.go b/pkg/argo-actions/upload_type_instances.go index be53963bd..d99b6bda3 100644 --- a/pkg/argo-actions/upload_type_instances.go +++ b/pkg/argo-actions/upload_type_instances.go @@ -2,6 +2,7 @@ package argoactions import ( "context" + "encoding/json" "fmt" "io/ioutil" "path/filepath" @@ -119,31 +120,27 @@ func (u *Upload) render(payload *graphqllocal.CreateTypeInstancesInput, values m return ErrMissingTypeInstanceValue(*typeInstance.Alias) } - // render value - if _, ok = value["value"]; ok { - typeInstanceValue, ok := value["value"].(map[string]interface{}) - if !ok { - return ErrTypeConversion("value", "map[string]interface{}", *typeInstance.Alias) - } - typeInstance.Value = typeInstanceValue - } else { + if _, ok = value["value"]; !ok { // for backward compatibility, if there is an artifact without value/backend syntax, // treat it as a value for TypeInstance typeInstance.Value = value continue } - // render backend - if _, ok := value["backend"]; ok { - typeInstanceBackend, ok := value["backend"].(map[string]interface{}) - if !ok { - return ErrTypeConversion("backend", "map[string]interface{}", *typeInstance.Alias) - } - backendContext, ok := typeInstanceBackend["context"].(map[string]interface{}) - if !ok { - return ErrTypeConversion("backend.context", "map[string]interface{}", *typeInstance.Alias) - } - typeInstance.Backend.Context = backendContext + data, err := json.Marshal(value) + if err != nil { + return errors.Wrap(err, "while marshaling TypeInstance") + } + + unmarshalledTI := graphqllocal.CreateTypeInstanceInput{} + err = json.Unmarshal(data, &unmarshalledTI) + if err != nil { + return errors.Wrap(err, "while unmarshaling TypeInstance") + } + + typeInstance.Value = unmarshalledTI.Value + if unmarshalledTI.Backend != nil { + typeInstance.Backend.Context = unmarshalledTI.Backend.Context } } return nil diff --git a/pkg/runner/helm/helm.go b/pkg/runner/helm/helm.go index df21a7ec6..94c7aae5b 100644 --- a/pkg/runner/helm/helm.go +++ b/pkg/runner/helm/helm.go @@ -122,7 +122,11 @@ func (r *helmRunner) readCommandData(in runner.StartInput) (Input, error) { func (r *helmRunner) saveOutput(out Output) error { r.log.Debug("Saving Helm release output", zap.String("path", r.cfg.Output.HelmReleaseFilePath)) - err := runner.SaveToFile(r.cfg.Output.HelmReleaseFilePath, out.Release) + bytesRelease, err := runner.NestingOutputUnderValue(out.Release) + if err != nil { + return errors.Wrap(err, "while nesting Terrafrom release under value") + } + err = runner.SaveToFile(r.cfg.Output.HelmReleaseFilePath, bytesRelease) if err != nil { return errors.Wrap(err, "while saving Helm release output") } @@ -132,7 +136,11 @@ func (r *helmRunner) saveOutput(out Output) error { } r.log.Debug("Saving additional output", zap.String("path", r.cfg.Output.AdditionalFilePath)) - err = runner.SaveToFile(r.cfg.Output.AdditionalFilePath, out.Additional) + bytesAdditional, err := runner.NestingOutputUnderValue(out.Additional) + if err != nil { + return errors.Wrap(err, "while nesting Terrafrom additional under value") + } + err = runner.SaveToFile(r.cfg.Output.AdditionalFilePath, bytesAdditional) if err != nil { return errors.Wrap(err, "while saving default output") } diff --git a/pkg/runner/helm/output.go b/pkg/runner/helm/output.go index 9626ce96d..7dd170397 100644 --- a/pkg/runner/helm/output.go +++ b/pkg/runner/helm/output.go @@ -3,7 +3,6 @@ package helm import ( "strings" - "capact.io/capact/pkg/runner" "github.com/pkg/errors" "go.uber.org/zap" "helm.sh/helm/v3/pkg/chart" @@ -40,11 +39,7 @@ func (o *Outputter) ProduceHelmRelease(repository string, helmRelease *release.R }, } - nestingValue := map[string]interface{}{ - "value": releaseData, - } - - bytes, err := yaml.Marshal(&nestingValue) + bytes, err := yaml.Marshal(&releaseData) if err != nil { return nil, errors.Wrap(err, "while marshaling yaml") } @@ -65,10 +60,5 @@ func (o *Outputter) ProduceAdditional(args OutputArgs, chrt *chart.Chart, rel *r return nil, errors.Wrap(err, "while rendering additional output") } - bytes, err = runner.NestingOutputUnderValue(bytes) - if err != nil { - return nil, errors.Wrap(err, "while nesting output under a value key") - } - return bytes, nil } diff --git a/test/e2e/action_test.go b/test/e2e/action_test.go index 672d413ea..e541ab9ca 100644 --- a/test/e2e/action_test.go +++ b/test/e2e/action_test.go @@ -73,12 +73,12 @@ var _ = Describe("Action", func() { By("1. Preparing input Type Instances") By("1.1 Creating TypeInstance which will be downloaded") - download := getTypeInstanceInputForDownload(map[string]interface{}{"key": implIndicatorValue}) + download := getTypeInstanceInputForDownload(map[string]interface{}{"key": implIndicatorValue}, nil) downloadTI, downloadTICleanup := createTypeInstance(ctx, hubClient, download) defer downloadTICleanup() By("1.2 Creating TypeInstance which will be downloaded and updated") - update := getTypeInstanceInputForUpdate() + update := getTypeInstanceInputForUpdate(nil) updateTI, updateTICleanup := createTypeInstance(ctx, hubClient, update) defer updateTICleanup() @@ -154,12 +154,12 @@ var _ = Describe("Action", func() { // see: https://github.com/onsi/ginkgo/issues/70#issuecomment-924250145 By("1. Preparing input Type Instances") By("1.1 Creating TypeInstance which will be downloaded") - download := getTypeInstanceInputForDownload(map[string]interface{}{"key": implIndicatorValue}) + download := getTypeInstanceInputForDownload(map[string]interface{}{"key": implIndicatorValue}, nil) downloadTI, downloadTICleanup := createTypeInstance(ctx, hubClient, download) defer downloadTICleanup() By("1.2 Creating TypeInstance which will be downloaded and updated") - update := getTypeInstanceInputForUpdate() + update := getTypeInstanceInputForUpdate(nil) updateTI, updateTICleanup := createTypeInstance(ctx, hubClient, update) defer updateTICleanup() @@ -221,12 +221,12 @@ var _ = Describe("Action", func() { // see: https://github.com/onsi/ginkgo/issues/70#issuecomment-924250145 By("1. Preparing input Type Instances") By("1.1 Creating TypeInstance which will be downloaded") - download := getTypeInstanceInputForDownload(map[string]interface{}{"key": implIndicatorValue}) + download := getTypeInstanceInputForDownload(map[string]interface{}{"key": implIndicatorValue}, nil) downloadTI, downloadTICleanup := createTypeInstance(ctx, hubClient, download) defer downloadTICleanup() By("1.2 Creating TypeInstance which will be downloaded and updated") - update := getTypeInstanceInputForUpdate() + update := getTypeInstanceInputForUpdate(nil) updateTI, updateTICleanup := createTypeInstance(ctx, hubClient, update) defer updateTICleanup() @@ -289,21 +289,22 @@ var _ = Describe("Action", func() { // see: https://github.com/onsi/ginkgo/issues/70#issuecomment-924250145 By("1. Preparing input Type Instances") By("1.1 Creating TypeInstance which will be downloaded") - download := getTypeInstanceInputForDownload(map[string]interface{}{ - "value": map[string]interface{}{ - "key": implIndicatorValue, + backendInput := hublocalgraphql.TypeInstanceBackendInput{ + ID: testStorageBackendTI.ID, + Context: map[string]interface{}{ + "provider": "dotenv", }, - "backend": map[string]interface{}{ - "context": map[string]interface{}{ - "provider": "dotenv", - }, + } + download := getTypeInstanceInputForDownload( + map[string]interface{}{ + "key": implIndicatorValue, }, - }) + &backendInput) downloadTI, downloadTICleanup := createTypeInstance(ctx, hubClient, download) defer downloadTICleanup() By("1.2 Creating TypeInstance which will be downloaded and updated") - update := getTypeInstanceInputForUpdate() + update := getTypeInstanceInputForUpdate(&backendInput) updateTI, updateTICleanup := createTypeInstance(ctx, hubClient, update) defer updateTICleanup() @@ -339,70 +340,14 @@ var _ = Describe("Action", func() { By("4.1 Check uploaded TypeInstances") expUploadTIBackend := &hublocalgraphql.TypeInstanceBackendReference{ID: testStorageBackendTI.ID, Abstract: false} - fmt.Println("expUploadTIBackend", expUploadTIBackend.ID) uploadedTI, cleanupUploaded := getUploadedTypeInstanceByValue(ctx, hubClient, implIndicatorValue) defer cleanupUploaded() Expect(uploadedTI.Backend).Should(Equal(expUploadTIBackend)) By("4.2 Check Action output TypeInstances") + updateTIOutput := mapToOutputTypeInstanceDetails(updateTI, expUploadTIBackend) uploadedTIOutput := mapToOutputTypeInstanceDetails(uploadedTI, expUploadTIBackend) - assertOutputTypeInstancesInActionStatus(ctx, engineClient, action.Name, And(ContainElements(uploadedTIOutput), HaveLen(1))) - }) - - It("should fail due to incorrect storage provider", func() { - implIndicatorValue := "Implementation C" - testStorageBackendTI := getDefaultTestStorageTypeInstance(ctx, hubClient) - - // TODO: This can be extracted after switching to ginkgo v2 - // see: https://github.com/onsi/ginkgo/issues/70#issuecomment-924250145 - By("1. Preparing input Type Instances") - By("1.1 Creating TypeInstance which will be downloaded") - download := getTypeInstanceInputForDownload(map[string]interface{}{ - "value": map[string]interface{}{ - "key": implIndicatorValue, - }, - "backend": map[string]interface{}{ - "context": map[string]interface{}{ - "provider": "incorrect", - }, - }, - }) - downloadTI, downloadTICleanup := createTypeInstance(ctx, hubClient, download) - defer downloadTICleanup() - - By("1.2 Creating TypeInstance which will be downloaded and updated") - update := getTypeInstanceInputForUpdate() - updateTI, updateTICleanup := createTypeInstance(ctx, hubClient, update) - defer updateTICleanup() - - By("1.3 Create TypeInstance which is required for Implementation C to be picked based on Policy") - typeInstanceValue := getTypeInstanceInputForPolicy() - injectTypeInstance, tiCleanupFn := createTypeInstance(ctx, hubClient, typeInstanceValue) - defer tiCleanupFn() - - inputData := &enginegraphql.ActionInputData{ - TypeInstances: []*enginegraphql.InputTypeInstanceData{ - {Name: "testInput", ID: downloadTI.ID}, - {Name: "testUpdate", ID: updateTI.ID}, - }, - } - - By("2. Modifying default Policy to pick Implementation C...") - globalPolicyRequiredTypeInstances := []*enginegraphql.RequiredTypeInstanceReferenceInput{ - { - ID: injectTypeInstance.ID, - Description: ptr.String("Test TypeInstance"), - }, - { - ID: testStorageBackendTI.ID, - Description: ptr.String("Validation storage backend TypeInstance"), - }, - } - setGlobalTestPolicy(ctx, engineClient, addInterfacePolicyDefaultInjectionForPassingActionInterface(globalPolicyRequiredTypeInstances)) - - By("3. Create action and wait for a status phase failed") - createActionAndWaitForReadyToRunPhase(ctx, engineClient, actionName, actionPassingInterfacePath, inputData) - runActionAndWaitForStatus(ctx, engineClient, actionName, enginegraphql.ActionStatusPhaseFailed) + assertOutputTypeInstancesInActionStatus(ctx, engineClient, action.Name, And(ContainElements(updateTIOutput, uploadedTIOutput), HaveLen(2))) }) It("should have failed status after a failed workflow", func() { @@ -436,7 +381,7 @@ var _ = Describe("Action", func() { By("Prepare TypeInstance to update") - update := getTypeInstanceInputForUpdate() + update := getTypeInstanceInputForUpdate(nil) updateTI, updateTICleanup := createTypeInstance(ctx, hubClient, update) defer updateTICleanup() @@ -564,13 +509,14 @@ func getTypeInstanceInputForPolicy() *hublocalgraphql.CreateTypeInstanceInput { } } -func getTypeInstanceInputForDownload(testValues map[string]interface{}) *hublocalgraphql.CreateTypeInstanceInput { +func getTypeInstanceInputForDownload(testValues map[string]interface{}, backendInput *hublocalgraphql.TypeInstanceBackendInput) *hublocalgraphql.CreateTypeInstanceInput { return &hublocalgraphql.CreateTypeInstanceInput{ TypeRef: &hublocalgraphql.TypeInstanceTypeReferenceInput{ Path: "cap.type.capactio.capact.validation.download", Revision: "0.1.0", }, - Value: testValues, + Value: testValues, + Backend: backendInput, Attributes: []*hublocalgraphql.AttributeReferenceInput{ { Path: "cap.attribute.capactio.capact.attribute1", @@ -580,13 +526,14 @@ func getTypeInstanceInputForDownload(testValues map[string]interface{}) *hubloca } } -func getTypeInstanceInputForUpdate() *hublocalgraphql.CreateTypeInstanceInput { +func getTypeInstanceInputForUpdate(backendInput *hublocalgraphql.TypeInstanceBackendInput) *hublocalgraphql.CreateTypeInstanceInput { return &hublocalgraphql.CreateTypeInstanceInput{ TypeRef: &hublocalgraphql.TypeInstanceTypeReferenceInput{ Path: "cap.type.capactio.capact.validation.update", Revision: "0.1.0", }, Value: map[string]interface{}{"key": "random text to update"}, + Backend: backendInput, Attributes: []*hublocalgraphql.AttributeReferenceInput{ { Path: "cap.attribute.capactio.capact.attribute1",