diff --git a/config/.env.example b/config/.env.example
index ac8629d..970e259 100644
--- a/config/.env.example
+++ b/config/.env.example
@@ -148,18 +148,8 @@ CORE_RATELIMIT_BURST="30"
CORE_RATELIMIT_EXPIRES="10m"
CORE_OBJECTSTORAGE_ENABLED="true"
CORE_OBJECTSTORAGE_PROVIDER=""
-CORE_OBJECTSTORAGE_STORAGE=""
-CORE_OBJECTSTORAGE_DATASET_TYPE=""
-CORE_OBJECTSTORAGE_KIND=""
-CORE_OBJECTSTORAGE_PATH=""
-CORE_OBJECTSTORAGE_CONTAINER=""
CORE_OBJECTSTORAGE_ACCESSKEY=""
CORE_OBJECTSTORAGE_REGION=""
CORE_OBJECTSTORAGE_SECRETKEY=""
-CORE_OBJECTSTORAGE_CREDENTIALS_JSON=""
+CORE_OBJECTSTORAGE_CREDENTIALSJSON=""
CORE_OBJECTSTORAGE_BUCKET=""
-CORE_OBJECTSTORAGE_ENDPOINT=""
-CORE_OBJECTSTORAGE_DISABLE_SSL=""
-CORE_OBJECTSTORAGE_FORCE_PATH_STYLE=""
-CORE_OBJECTSTORAGE_PATH_STYLE=""
-CORE_OBJECTSTORAGE_ENDPOINT_STYLE=""
diff --git a/config/config.example.yaml b/config/config.example.yaml
index 7d65391..c6a0385 100644
--- a/config/config.example.yaml
+++ b/config/config.example.yaml
@@ -107,26 +107,13 @@ jobQueue:
Workers: null
runMigrations: false
objectStorage:
- ConfigurationPath: ""
- ExecutionTime: "0001-01-01T00:00:00Z"
- SourcePath: ""
- accesskey: ""
+ accessKey: ""
bucket: ""
- container: ""
- credentials_json: ""
- dataset_type: ""
- disable_ssl: false
+ credentialsJSON: ""
enabled: true
- endpoint: ""
- endpoint_style: false
- force_path_style: false
- kind: ""
- path: ""
- path_style: false
provider: ""
region: ""
- secretkey: ""
- storage: null
+ secretKey: ""
ratelimit:
burst: 30
enabled: false
diff --git a/config/config.go b/config/config.go
index 12718cd..803c7f6 100644
--- a/config/config.go
+++ b/config/config.go
@@ -65,8 +65,9 @@ type Config struct {
// TOTP contains the configuration for the TOTP provider
TOTP totp.Config `json:"totp" koanf:"totp"`
// Ratelimit contains the configuration for the rate limiter
- Ratelimit ratelimit.Config `json:"ratelimit" koanf:"ratelimit"`
- ObjectStorage objects.Config `json:"objectStorage" koanf:"objectStorage"`
+ Ratelimit ratelimit.Config `json:"ratelimit" koanf:"ratelimit"`
+ // ObjectStorage contains the configuration for the object storage backend
+ ObjectStorage objects.Config `json:"objectStorage" koanf:"objectStorage"`
}
// Server settings for the echo server
diff --git a/config/configmap.yaml b/config/configmap.yaml
index cce361a..6a776cc 100644
--- a/config/configmap.yaml
+++ b/config/configmap.yaml
@@ -160,18 +160,8 @@ data:
CORE_RATELIMIT_EXPIRES: {{ .Values.core.ratelimit.expires | default "10m" }}
CORE_OBJECTSTORAGE_ENABLED: {{ .Values.core.objectStorage.enabled | default true }}
CORE_OBJECTSTORAGE_PROVIDER: {{ .Values.core.objectStorage.provider }}
- CORE_OBJECTSTORAGE_STORAGE: {{ .Values.core.objectStorage.storage }}
- CORE_OBJECTSTORAGE_DATASET_TYPE: {{ .Values.core.objectStorage.dataset_type }}
- CORE_OBJECTSTORAGE_KIND: {{ .Values.core.objectStorage.kind }}
- CORE_OBJECTSTORAGE_PATH: {{ .Values.core.objectStorage.path }}
- CORE_OBJECTSTORAGE_CONTAINER: {{ .Values.core.objectStorage.container }}
- CORE_OBJECTSTORAGE_ACCESSKEY: {{ .Values.core.objectStorage.accesskey }}
+ CORE_OBJECTSTORAGE_ACCESSKEY: {{ .Values.core.objectStorage.accessKey }}
CORE_OBJECTSTORAGE_REGION: {{ .Values.core.objectStorage.region }}
- CORE_OBJECTSTORAGE_SECRETKEY: {{ .Values.core.objectStorage.secretkey }}
- CORE_OBJECTSTORAGE_CREDENTIALS_JSON: {{ .Values.core.objectStorage.credentials_json }}
+ CORE_OBJECTSTORAGE_SECRETKEY: {{ .Values.core.objectStorage.secretKey }}
+ CORE_OBJECTSTORAGE_CREDENTIALSJSON: {{ .Values.core.objectStorage.credentialsJSON }}
CORE_OBJECTSTORAGE_BUCKET: {{ .Values.core.objectStorage.bucket }}
- CORE_OBJECTSTORAGE_ENDPOINT: {{ .Values.core.objectStorage.endpoint }}
- CORE_OBJECTSTORAGE_DISABLE_SSL: {{ .Values.core.objectStorage.disable_ssl }}
- CORE_OBJECTSTORAGE_FORCE_PATH_STYLE: {{ .Values.core.objectStorage.force_path_style }}
- CORE_OBJECTSTORAGE_PATH_STYLE: {{ .Values.core.objectStorage.path_style }}
- CORE_OBJECTSTORAGE_ENDPOINT_STYLE: {{ .Values.core.objectStorage.endpoint_style }}
diff --git a/go.sum b/go.sum
index 1e4596e..0d36ef9 100644
--- a/go.sum
+++ b/go.sum
@@ -208,6 +208,8 @@ github.com/go-openapi/jsonpointer v0.21.0 h1:YgdVicSA9vH5RiHs9TZW5oyafXZFc6+2Vc1
github.com/go-openapi/jsonpointer v0.21.0/go.mod h1:IUyH9l/+uyhIYQ/PXVA41Rexl+kOkAPDdXEYns6fzUY=
github.com/go-openapi/swag v0.23.0 h1:vsEVJDUo2hPJ2tu0/Xc+4noaxyEffXNIs3cOULZ+GrE=
github.com/go-openapi/swag v0.23.0/go.mod h1:esZ8ITTYEsH1V2trKHjAN8Ai7xHb8RV+YSZ577vPjgQ=
+github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s=
+github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA=
github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY=
github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY=
diff --git a/internal/ent/generated/auditing.go b/internal/ent/generated/auditing.go
index b715115..db98efb 100644
--- a/internal/ent/generated/auditing.go
+++ b/internal/ent/generated/auditing.go
@@ -708,9 +708,6 @@ func (fh *FileHistory) changes(new *FileHistory) []Change {
if !reflect.DeepEqual(fh.StoreKey, new.StoreKey) {
changes = append(changes, NewChange(filehistory.FieldStoreKey, fh.StoreKey, new.StoreKey))
}
- if !reflect.DeepEqual(fh.CorrelationID, new.CorrelationID) {
- changes = append(changes, NewChange(filehistory.FieldCorrelationID, fh.CorrelationID, new.CorrelationID))
- }
if !reflect.DeepEqual(fh.CategoryType, new.CategoryType) {
changes = append(changes, NewChange(filehistory.FieldCategoryType, fh.CategoryType, new.CategoryType))
}
diff --git a/internal/ent/generated/entql.go b/internal/ent/generated/entql.go
index 1720252..36fcf59 100644
--- a/internal/ent/generated/entql.go
+++ b/internal/ent/generated/entql.go
@@ -648,7 +648,6 @@ var schemaGraph = func() *sqlgraph.Schema {
file.FieldMd5Hash: {Type: field.TypeString, Column: file.FieldMd5Hash},
file.FieldDetectedContentType: {Type: field.TypeString, Column: file.FieldDetectedContentType},
file.FieldStoreKey: {Type: field.TypeString, Column: file.FieldStoreKey},
- file.FieldCorrelationID: {Type: field.TypeString, Column: file.FieldCorrelationID},
file.FieldCategoryType: {Type: field.TypeString, Column: file.FieldCategoryType},
file.FieldURI: {Type: field.TypeString, Column: file.FieldURI},
file.FieldStorageScheme: {Type: field.TypeString, Column: file.FieldStorageScheme},
@@ -687,7 +686,6 @@ var schemaGraph = func() *sqlgraph.Schema {
filehistory.FieldMd5Hash: {Type: field.TypeString, Column: filehistory.FieldMd5Hash},
filehistory.FieldDetectedContentType: {Type: field.TypeString, Column: filehistory.FieldDetectedContentType},
filehistory.FieldStoreKey: {Type: field.TypeString, Column: filehistory.FieldStoreKey},
- filehistory.FieldCorrelationID: {Type: field.TypeString, Column: filehistory.FieldCorrelationID},
filehistory.FieldCategoryType: {Type: field.TypeString, Column: filehistory.FieldCategoryType},
filehistory.FieldURI: {Type: field.TypeString, Column: filehistory.FieldURI},
filehistory.FieldStorageScheme: {Type: field.TypeString, Column: filehistory.FieldStorageScheme},
@@ -6568,11 +6566,6 @@ func (f *FileFilter) WhereStoreKey(p entql.StringP) {
f.Where(p.Field(file.FieldStoreKey))
}
-// WhereCorrelationID applies the entql string predicate on the correlation_id field.
-func (f *FileFilter) WhereCorrelationID(p entql.StringP) {
- f.Where(p.Field(file.FieldCorrelationID))
-}
-
// WhereCategoryType applies the entql string predicate on the category_type field.
func (f *FileFilter) WhereCategoryType(p entql.StringP) {
f.Where(p.Field(file.FieldCategoryType))
@@ -6878,11 +6871,6 @@ func (f *FileHistoryFilter) WhereStoreKey(p entql.StringP) {
f.Where(p.Field(filehistory.FieldStoreKey))
}
-// WhereCorrelationID applies the entql string predicate on the correlation_id field.
-func (f *FileHistoryFilter) WhereCorrelationID(p entql.StringP) {
- f.Where(p.Field(filehistory.FieldCorrelationID))
-}
-
// WhereCategoryType applies the entql string predicate on the category_type field.
func (f *FileHistoryFilter) WhereCategoryType(p entql.StringP) {
f.Where(p.Field(filehistory.FieldCategoryType))
diff --git a/internal/ent/generated/file.go b/internal/ent/generated/file.go
index 8a899e4..e8b4e57 100644
--- a/internal/ent/generated/file.go
+++ b/internal/ent/generated/file.go
@@ -48,11 +48,9 @@ type File struct {
Md5Hash string `json:"md5_hash,omitempty"`
// the content type of the HTTP request - may be different than MIME type as multipart-form can transmit multiple files and different types
DetectedContentType string `json:"detected_content_type,omitempty"`
- // the key parsed out of a multipart-form request; if we allow multiple files to be uploaded we may want our API specifications to require the use of different keys allowing us to perfdorm easier conditional evaluation on the key and what to do with the file based on key
+ // the key parsed out of a multipart-form request; if we allow multiple files to be uploaded we may want our API specifications to require the use of different keys allowing us to perform easier conditional evaluation on the key and what to do with the file based on key
StoreKey string `json:"store_key,omitempty"`
- // the ULID provided in the http request indicating the ULID to correleate the file to
- CorrelationID string `json:"correlation_id,omitempty"`
- // the category type of the file, if any (e.g. contract, invoice, etc.)
+ // the category type of the file, if any (e.g. evidence, invoice, etc.)
CategoryType string `json:"category_type,omitempty"`
// the full URI of the file
URI string `json:"uri,omitempty"`
@@ -209,7 +207,7 @@ func (*File) scanValues(columns []string) ([]any, error) {
values[i] = new([]byte)
case file.FieldProvidedFileSize, file.FieldPersistedFileSize:
values[i] = new(sql.NullInt64)
- case file.FieldID, file.FieldCreatedBy, file.FieldUpdatedBy, file.FieldDeletedBy, file.FieldMappingID, file.FieldProvidedFileName, file.FieldProvidedFileExtension, file.FieldDetectedMimeType, file.FieldMd5Hash, file.FieldDetectedContentType, file.FieldStoreKey, file.FieldCorrelationID, file.FieldCategoryType, file.FieldURI, file.FieldStorageScheme, file.FieldStorageVolume, file.FieldStoragePath:
+ case file.FieldID, file.FieldCreatedBy, file.FieldUpdatedBy, file.FieldDeletedBy, file.FieldMappingID, file.FieldProvidedFileName, file.FieldProvidedFileExtension, file.FieldDetectedMimeType, file.FieldMd5Hash, file.FieldDetectedContentType, file.FieldStoreKey, file.FieldCategoryType, file.FieldURI, file.FieldStorageScheme, file.FieldStorageVolume, file.FieldStoragePath:
values[i] = new(sql.NullString)
case file.FieldCreatedAt, file.FieldUpdatedAt, file.FieldDeletedAt:
values[i] = new(sql.NullTime)
@@ -332,12 +330,6 @@ func (f *File) assignValues(columns []string, values []any) error {
} else if value.Valid {
f.StoreKey = value.String
}
- case file.FieldCorrelationID:
- if value, ok := values[i].(*sql.NullString); !ok {
- return fmt.Errorf("unexpected type %T for field correlation_id", values[i])
- } else if value.Valid {
- f.CorrelationID = value.String
- }
case file.FieldCategoryType:
if value, ok := values[i].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field category_type", values[i])
@@ -508,9 +500,6 @@ func (f *File) String() string {
builder.WriteString("store_key=")
builder.WriteString(f.StoreKey)
builder.WriteString(", ")
- builder.WriteString("correlation_id=")
- builder.WriteString(f.CorrelationID)
- builder.WriteString(", ")
builder.WriteString("category_type=")
builder.WriteString(f.CategoryType)
builder.WriteString(", ")
diff --git a/internal/ent/generated/file/file.go b/internal/ent/generated/file/file.go
index 8035d65..12b9ff1 100644
--- a/internal/ent/generated/file/file.go
+++ b/internal/ent/generated/file/file.go
@@ -47,8 +47,6 @@ const (
FieldDetectedContentType = "detected_content_type"
// FieldStoreKey holds the string denoting the store_key field in the database.
FieldStoreKey = "store_key"
- // FieldCorrelationID holds the string denoting the correlation_id field in the database.
- FieldCorrelationID = "correlation_id"
// FieldCategoryType holds the string denoting the category_type field in the database.
FieldCategoryType = "category_type"
// FieldURI holds the string denoting the uri field in the database.
@@ -154,7 +152,6 @@ var Columns = []string{
FieldMd5Hash,
FieldDetectedContentType,
FieldStoreKey,
- FieldCorrelationID,
FieldCategoryType,
FieldURI,
FieldStorageScheme,
@@ -315,11 +312,6 @@ func ByStoreKey(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldStoreKey, opts...).ToFunc()
}
-// ByCorrelationID orders the results by the correlation_id field.
-func ByCorrelationID(opts ...sql.OrderTermOption) OrderOption {
- return sql.OrderByField(FieldCorrelationID, opts...).ToFunc()
-}
-
// ByCategoryType orders the results by the category_type field.
func ByCategoryType(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldCategoryType, opts...).ToFunc()
diff --git a/internal/ent/generated/file/where.go b/internal/ent/generated/file/where.go
index f80ad94..c197628 100644
--- a/internal/ent/generated/file/where.go
+++ b/internal/ent/generated/file/where.go
@@ -142,11 +142,6 @@ func StoreKey(v string) predicate.File {
return predicate.File(sql.FieldEQ(FieldStoreKey, v))
}
-// CorrelationID applies equality check predicate on the "correlation_id" field. It's identical to CorrelationIDEQ.
-func CorrelationID(v string) predicate.File {
- return predicate.File(sql.FieldEQ(FieldCorrelationID, v))
-}
-
// CategoryType applies equality check predicate on the "category_type" field. It's identical to CategoryTypeEQ.
func CategoryType(v string) predicate.File {
return predicate.File(sql.FieldEQ(FieldCategoryType, v))
@@ -1147,81 +1142,6 @@ func StoreKeyContainsFold(v string) predicate.File {
return predicate.File(sql.FieldContainsFold(FieldStoreKey, v))
}
-// CorrelationIDEQ applies the EQ predicate on the "correlation_id" field.
-func CorrelationIDEQ(v string) predicate.File {
- return predicate.File(sql.FieldEQ(FieldCorrelationID, v))
-}
-
-// CorrelationIDNEQ applies the NEQ predicate on the "correlation_id" field.
-func CorrelationIDNEQ(v string) predicate.File {
- return predicate.File(sql.FieldNEQ(FieldCorrelationID, v))
-}
-
-// CorrelationIDIn applies the In predicate on the "correlation_id" field.
-func CorrelationIDIn(vs ...string) predicate.File {
- return predicate.File(sql.FieldIn(FieldCorrelationID, vs...))
-}
-
-// CorrelationIDNotIn applies the NotIn predicate on the "correlation_id" field.
-func CorrelationIDNotIn(vs ...string) predicate.File {
- return predicate.File(sql.FieldNotIn(FieldCorrelationID, vs...))
-}
-
-// CorrelationIDGT applies the GT predicate on the "correlation_id" field.
-func CorrelationIDGT(v string) predicate.File {
- return predicate.File(sql.FieldGT(FieldCorrelationID, v))
-}
-
-// CorrelationIDGTE applies the GTE predicate on the "correlation_id" field.
-func CorrelationIDGTE(v string) predicate.File {
- return predicate.File(sql.FieldGTE(FieldCorrelationID, v))
-}
-
-// CorrelationIDLT applies the LT predicate on the "correlation_id" field.
-func CorrelationIDLT(v string) predicate.File {
- return predicate.File(sql.FieldLT(FieldCorrelationID, v))
-}
-
-// CorrelationIDLTE applies the LTE predicate on the "correlation_id" field.
-func CorrelationIDLTE(v string) predicate.File {
- return predicate.File(sql.FieldLTE(FieldCorrelationID, v))
-}
-
-// CorrelationIDContains applies the Contains predicate on the "correlation_id" field.
-func CorrelationIDContains(v string) predicate.File {
- return predicate.File(sql.FieldContains(FieldCorrelationID, v))
-}
-
-// CorrelationIDHasPrefix applies the HasPrefix predicate on the "correlation_id" field.
-func CorrelationIDHasPrefix(v string) predicate.File {
- return predicate.File(sql.FieldHasPrefix(FieldCorrelationID, v))
-}
-
-// CorrelationIDHasSuffix applies the HasSuffix predicate on the "correlation_id" field.
-func CorrelationIDHasSuffix(v string) predicate.File {
- return predicate.File(sql.FieldHasSuffix(FieldCorrelationID, v))
-}
-
-// CorrelationIDIsNil applies the IsNil predicate on the "correlation_id" field.
-func CorrelationIDIsNil() predicate.File {
- return predicate.File(sql.FieldIsNull(FieldCorrelationID))
-}
-
-// CorrelationIDNotNil applies the NotNil predicate on the "correlation_id" field.
-func CorrelationIDNotNil() predicate.File {
- return predicate.File(sql.FieldNotNull(FieldCorrelationID))
-}
-
-// CorrelationIDEqualFold applies the EqualFold predicate on the "correlation_id" field.
-func CorrelationIDEqualFold(v string) predicate.File {
- return predicate.File(sql.FieldEqualFold(FieldCorrelationID, v))
-}
-
-// CorrelationIDContainsFold applies the ContainsFold predicate on the "correlation_id" field.
-func CorrelationIDContainsFold(v string) predicate.File {
- return predicate.File(sql.FieldContainsFold(FieldCorrelationID, v))
-}
-
// CategoryTypeEQ applies the EQ predicate on the "category_type" field.
func CategoryTypeEQ(v string) predicate.File {
return predicate.File(sql.FieldEQ(FieldCategoryType, v))
diff --git a/internal/ent/generated/file_create.go b/internal/ent/generated/file_create.go
index 7dced43..fa0fe0a 100644
--- a/internal/ent/generated/file_create.go
+++ b/internal/ent/generated/file_create.go
@@ -222,20 +222,6 @@ func (fc *FileCreate) SetNillableStoreKey(s *string) *FileCreate {
return fc
}
-// SetCorrelationID sets the "correlation_id" field.
-func (fc *FileCreate) SetCorrelationID(s string) *FileCreate {
- fc.mutation.SetCorrelationID(s)
- return fc
-}
-
-// SetNillableCorrelationID sets the "correlation_id" field if the given value is not nil.
-func (fc *FileCreate) SetNillableCorrelationID(s *string) *FileCreate {
- if s != nil {
- fc.SetCorrelationID(*s)
- }
- return fc
-}
-
// SetCategoryType sets the "category_type" field.
func (fc *FileCreate) SetCategoryType(s string) *FileCreate {
fc.mutation.SetCategoryType(s)
@@ -672,10 +658,6 @@ func (fc *FileCreate) createSpec() (*File, *sqlgraph.CreateSpec) {
_spec.SetField(file.FieldStoreKey, field.TypeString, value)
_node.StoreKey = value
}
- if value, ok := fc.mutation.CorrelationID(); ok {
- _spec.SetField(file.FieldCorrelationID, field.TypeString, value)
- _node.CorrelationID = value
- }
if value, ok := fc.mutation.CategoryType(); ok {
_spec.SetField(file.FieldCategoryType, field.TypeString, value)
_node.CategoryType = value
diff --git a/internal/ent/generated/file_update.go b/internal/ent/generated/file_update.go
index b35827c..f0d50fc 100644
--- a/internal/ent/generated/file_update.go
+++ b/internal/ent/generated/file_update.go
@@ -288,26 +288,6 @@ func (fu *FileUpdate) ClearStoreKey() *FileUpdate {
return fu
}
-// SetCorrelationID sets the "correlation_id" field.
-func (fu *FileUpdate) SetCorrelationID(s string) *FileUpdate {
- fu.mutation.SetCorrelationID(s)
- return fu
-}
-
-// SetNillableCorrelationID sets the "correlation_id" field if the given value is not nil.
-func (fu *FileUpdate) SetNillableCorrelationID(s *string) *FileUpdate {
- if s != nil {
- fu.SetCorrelationID(*s)
- }
- return fu
-}
-
-// ClearCorrelationID clears the value of the "correlation_id" field.
-func (fu *FileUpdate) ClearCorrelationID() *FileUpdate {
- fu.mutation.ClearCorrelationID()
- return fu
-}
-
// SetCategoryType sets the "category_type" field.
func (fu *FileUpdate) SetCategoryType(s string) *FileUpdate {
fu.mutation.SetCategoryType(s)
@@ -946,12 +926,6 @@ func (fu *FileUpdate) sqlSave(ctx context.Context) (n int, err error) {
if fu.mutation.StoreKeyCleared() {
_spec.ClearField(file.FieldStoreKey, field.TypeString)
}
- if value, ok := fu.mutation.CorrelationID(); ok {
- _spec.SetField(file.FieldCorrelationID, field.TypeString, value)
- }
- if fu.mutation.CorrelationIDCleared() {
- _spec.ClearField(file.FieldCorrelationID, field.TypeString)
- }
if value, ok := fu.mutation.CategoryType(); ok {
_spec.SetField(file.FieldCategoryType, field.TypeString, value)
}
@@ -1738,26 +1712,6 @@ func (fuo *FileUpdateOne) ClearStoreKey() *FileUpdateOne {
return fuo
}
-// SetCorrelationID sets the "correlation_id" field.
-func (fuo *FileUpdateOne) SetCorrelationID(s string) *FileUpdateOne {
- fuo.mutation.SetCorrelationID(s)
- return fuo
-}
-
-// SetNillableCorrelationID sets the "correlation_id" field if the given value is not nil.
-func (fuo *FileUpdateOne) SetNillableCorrelationID(s *string) *FileUpdateOne {
- if s != nil {
- fuo.SetCorrelationID(*s)
- }
- return fuo
-}
-
-// ClearCorrelationID clears the value of the "correlation_id" field.
-func (fuo *FileUpdateOne) ClearCorrelationID() *FileUpdateOne {
- fuo.mutation.ClearCorrelationID()
- return fuo
-}
-
// SetCategoryType sets the "category_type" field.
func (fuo *FileUpdateOne) SetCategoryType(s string) *FileUpdateOne {
fuo.mutation.SetCategoryType(s)
@@ -2426,12 +2380,6 @@ func (fuo *FileUpdateOne) sqlSave(ctx context.Context) (_node *File, err error)
if fuo.mutation.StoreKeyCleared() {
_spec.ClearField(file.FieldStoreKey, field.TypeString)
}
- if value, ok := fuo.mutation.CorrelationID(); ok {
- _spec.SetField(file.FieldCorrelationID, field.TypeString, value)
- }
- if fuo.mutation.CorrelationIDCleared() {
- _spec.ClearField(file.FieldCorrelationID, field.TypeString)
- }
if value, ok := fuo.mutation.CategoryType(); ok {
_spec.SetField(file.FieldCategoryType, field.TypeString, value)
}
diff --git a/internal/ent/generated/filehistory.go b/internal/ent/generated/filehistory.go
index 53d9a94..171fbef 100644
--- a/internal/ent/generated/filehistory.go
+++ b/internal/ent/generated/filehistory.go
@@ -55,11 +55,9 @@ type FileHistory struct {
Md5Hash string `json:"md5_hash,omitempty"`
// the content type of the HTTP request - may be different than MIME type as multipart-form can transmit multiple files and different types
DetectedContentType string `json:"detected_content_type,omitempty"`
- // the key parsed out of a multipart-form request; if we allow multiple files to be uploaded we may want our API specifications to require the use of different keys allowing us to perfdorm easier conditional evaluation on the key and what to do with the file based on key
+ // the key parsed out of a multipart-form request; if we allow multiple files to be uploaded we may want our API specifications to require the use of different keys allowing us to perform easier conditional evaluation on the key and what to do with the file based on key
StoreKey string `json:"store_key,omitempty"`
- // the ULID provided in the http request indicating the ULID to correleate the file to
- CorrelationID string `json:"correlation_id,omitempty"`
- // the category type of the file, if any (e.g. contract, invoice, etc.)
+ // the category type of the file, if any (e.g. evidence, invoice, etc.)
CategoryType string `json:"category_type,omitempty"`
// the full URI of the file
URI string `json:"uri,omitempty"`
@@ -85,7 +83,7 @@ func (*FileHistory) scanValues(columns []string) ([]any, error) {
values[i] = new(history.OpType)
case filehistory.FieldProvidedFileSize, filehistory.FieldPersistedFileSize:
values[i] = new(sql.NullInt64)
- case filehistory.FieldID, filehistory.FieldRef, filehistory.FieldCreatedBy, filehistory.FieldUpdatedBy, filehistory.FieldDeletedBy, filehistory.FieldMappingID, filehistory.FieldProvidedFileName, filehistory.FieldProvidedFileExtension, filehistory.FieldDetectedMimeType, filehistory.FieldMd5Hash, filehistory.FieldDetectedContentType, filehistory.FieldStoreKey, filehistory.FieldCorrelationID, filehistory.FieldCategoryType, filehistory.FieldURI, filehistory.FieldStorageScheme, filehistory.FieldStorageVolume, filehistory.FieldStoragePath:
+ case filehistory.FieldID, filehistory.FieldRef, filehistory.FieldCreatedBy, filehistory.FieldUpdatedBy, filehistory.FieldDeletedBy, filehistory.FieldMappingID, filehistory.FieldProvidedFileName, filehistory.FieldProvidedFileExtension, filehistory.FieldDetectedMimeType, filehistory.FieldMd5Hash, filehistory.FieldDetectedContentType, filehistory.FieldStoreKey, filehistory.FieldCategoryType, filehistory.FieldURI, filehistory.FieldStorageScheme, filehistory.FieldStorageVolume, filehistory.FieldStoragePath:
values[i] = new(sql.NullString)
case filehistory.FieldHistoryTime, filehistory.FieldCreatedAt, filehistory.FieldUpdatedAt, filehistory.FieldDeletedAt:
values[i] = new(sql.NullTime)
@@ -226,12 +224,6 @@ func (fh *FileHistory) assignValues(columns []string, values []any) error {
} else if value.Valid {
fh.StoreKey = value.String
}
- case filehistory.FieldCorrelationID:
- if value, ok := values[i].(*sql.NullString); !ok {
- return fmt.Errorf("unexpected type %T for field correlation_id", values[i])
- } else if value.Valid {
- fh.CorrelationID = value.String
- }
case filehistory.FieldCategoryType:
if value, ok := values[i].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field category_type", values[i])
@@ -361,9 +353,6 @@ func (fh *FileHistory) String() string {
builder.WriteString("store_key=")
builder.WriteString(fh.StoreKey)
builder.WriteString(", ")
- builder.WriteString("correlation_id=")
- builder.WriteString(fh.CorrelationID)
- builder.WriteString(", ")
builder.WriteString("category_type=")
builder.WriteString(fh.CategoryType)
builder.WriteString(", ")
diff --git a/internal/ent/generated/filehistory/filehistory.go b/internal/ent/generated/filehistory/filehistory.go
index 63ea56c..045cab3 100644
--- a/internal/ent/generated/filehistory/filehistory.go
+++ b/internal/ent/generated/filehistory/filehistory.go
@@ -54,8 +54,6 @@ const (
FieldDetectedContentType = "detected_content_type"
// FieldStoreKey holds the string denoting the store_key field in the database.
FieldStoreKey = "store_key"
- // FieldCorrelationID holds the string denoting the correlation_id field in the database.
- FieldCorrelationID = "correlation_id"
// FieldCategoryType holds the string denoting the category_type field in the database.
FieldCategoryType = "category_type"
// FieldURI holds the string denoting the uri field in the database.
@@ -94,7 +92,6 @@ var Columns = []string{
FieldMd5Hash,
FieldDetectedContentType,
FieldStoreKey,
- FieldCorrelationID,
FieldCategoryType,
FieldURI,
FieldStorageScheme,
@@ -238,11 +235,6 @@ func ByStoreKey(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldStoreKey, opts...).ToFunc()
}
-// ByCorrelationID orders the results by the correlation_id field.
-func ByCorrelationID(opts ...sql.OrderTermOption) OrderOption {
- return sql.OrderByField(FieldCorrelationID, opts...).ToFunc()
-}
-
// ByCategoryType orders the results by the category_type field.
func ByCategoryType(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldCategoryType, opts...).ToFunc()
diff --git a/internal/ent/generated/filehistory/where.go b/internal/ent/generated/filehistory/where.go
index e8195a3..e8393ea 100644
--- a/internal/ent/generated/filehistory/where.go
+++ b/internal/ent/generated/filehistory/where.go
@@ -150,11 +150,6 @@ func StoreKey(v string) predicate.FileHistory {
return predicate.FileHistory(sql.FieldEQ(FieldStoreKey, v))
}
-// CorrelationID applies equality check predicate on the "correlation_id" field. It's identical to CorrelationIDEQ.
-func CorrelationID(v string) predicate.FileHistory {
- return predicate.FileHistory(sql.FieldEQ(FieldCorrelationID, v))
-}
-
// CategoryType applies equality check predicate on the "category_type" field. It's identical to CategoryTypeEQ.
func CategoryType(v string) predicate.FileHistory {
return predicate.FileHistory(sql.FieldEQ(FieldCategoryType, v))
@@ -1290,81 +1285,6 @@ func StoreKeyContainsFold(v string) predicate.FileHistory {
return predicate.FileHistory(sql.FieldContainsFold(FieldStoreKey, v))
}
-// CorrelationIDEQ applies the EQ predicate on the "correlation_id" field.
-func CorrelationIDEQ(v string) predicate.FileHistory {
- return predicate.FileHistory(sql.FieldEQ(FieldCorrelationID, v))
-}
-
-// CorrelationIDNEQ applies the NEQ predicate on the "correlation_id" field.
-func CorrelationIDNEQ(v string) predicate.FileHistory {
- return predicate.FileHistory(sql.FieldNEQ(FieldCorrelationID, v))
-}
-
-// CorrelationIDIn applies the In predicate on the "correlation_id" field.
-func CorrelationIDIn(vs ...string) predicate.FileHistory {
- return predicate.FileHistory(sql.FieldIn(FieldCorrelationID, vs...))
-}
-
-// CorrelationIDNotIn applies the NotIn predicate on the "correlation_id" field.
-func CorrelationIDNotIn(vs ...string) predicate.FileHistory {
- return predicate.FileHistory(sql.FieldNotIn(FieldCorrelationID, vs...))
-}
-
-// CorrelationIDGT applies the GT predicate on the "correlation_id" field.
-func CorrelationIDGT(v string) predicate.FileHistory {
- return predicate.FileHistory(sql.FieldGT(FieldCorrelationID, v))
-}
-
-// CorrelationIDGTE applies the GTE predicate on the "correlation_id" field.
-func CorrelationIDGTE(v string) predicate.FileHistory {
- return predicate.FileHistory(sql.FieldGTE(FieldCorrelationID, v))
-}
-
-// CorrelationIDLT applies the LT predicate on the "correlation_id" field.
-func CorrelationIDLT(v string) predicate.FileHistory {
- return predicate.FileHistory(sql.FieldLT(FieldCorrelationID, v))
-}
-
-// CorrelationIDLTE applies the LTE predicate on the "correlation_id" field.
-func CorrelationIDLTE(v string) predicate.FileHistory {
- return predicate.FileHistory(sql.FieldLTE(FieldCorrelationID, v))
-}
-
-// CorrelationIDContains applies the Contains predicate on the "correlation_id" field.
-func CorrelationIDContains(v string) predicate.FileHistory {
- return predicate.FileHistory(sql.FieldContains(FieldCorrelationID, v))
-}
-
-// CorrelationIDHasPrefix applies the HasPrefix predicate on the "correlation_id" field.
-func CorrelationIDHasPrefix(v string) predicate.FileHistory {
- return predicate.FileHistory(sql.FieldHasPrefix(FieldCorrelationID, v))
-}
-
-// CorrelationIDHasSuffix applies the HasSuffix predicate on the "correlation_id" field.
-func CorrelationIDHasSuffix(v string) predicate.FileHistory {
- return predicate.FileHistory(sql.FieldHasSuffix(FieldCorrelationID, v))
-}
-
-// CorrelationIDIsNil applies the IsNil predicate on the "correlation_id" field.
-func CorrelationIDIsNil() predicate.FileHistory {
- return predicate.FileHistory(sql.FieldIsNull(FieldCorrelationID))
-}
-
-// CorrelationIDNotNil applies the NotNil predicate on the "correlation_id" field.
-func CorrelationIDNotNil() predicate.FileHistory {
- return predicate.FileHistory(sql.FieldNotNull(FieldCorrelationID))
-}
-
-// CorrelationIDEqualFold applies the EqualFold predicate on the "correlation_id" field.
-func CorrelationIDEqualFold(v string) predicate.FileHistory {
- return predicate.FileHistory(sql.FieldEqualFold(FieldCorrelationID, v))
-}
-
-// CorrelationIDContainsFold applies the ContainsFold predicate on the "correlation_id" field.
-func CorrelationIDContainsFold(v string) predicate.FileHistory {
- return predicate.FileHistory(sql.FieldContainsFold(FieldCorrelationID, v))
-}
-
// CategoryTypeEQ applies the EQ predicate on the "category_type" field.
func CategoryTypeEQ(v string) predicate.FileHistory {
return predicate.FileHistory(sql.FieldEQ(FieldCategoryType, v))
diff --git a/internal/ent/generated/filehistory_create.go b/internal/ent/generated/filehistory_create.go
index a187060..5afc593 100644
--- a/internal/ent/generated/filehistory_create.go
+++ b/internal/ent/generated/filehistory_create.go
@@ -247,20 +247,6 @@ func (fhc *FileHistoryCreate) SetNillableStoreKey(s *string) *FileHistoryCreate
return fhc
}
-// SetCorrelationID sets the "correlation_id" field.
-func (fhc *FileHistoryCreate) SetCorrelationID(s string) *FileHistoryCreate {
- fhc.mutation.SetCorrelationID(s)
- return fhc
-}
-
-// SetNillableCorrelationID sets the "correlation_id" field if the given value is not nil.
-func (fhc *FileHistoryCreate) SetNillableCorrelationID(s *string) *FileHistoryCreate {
- if s != nil {
- fhc.SetCorrelationID(*s)
- }
- return fhc
-}
-
// SetCategoryType sets the "category_type" field.
func (fhc *FileHistoryCreate) SetCategoryType(s string) *FileHistoryCreate {
fhc.mutation.SetCategoryType(s)
@@ -549,10 +535,6 @@ func (fhc *FileHistoryCreate) createSpec() (*FileHistory, *sqlgraph.CreateSpec)
_spec.SetField(filehistory.FieldStoreKey, field.TypeString, value)
_node.StoreKey = value
}
- if value, ok := fhc.mutation.CorrelationID(); ok {
- _spec.SetField(filehistory.FieldCorrelationID, field.TypeString, value)
- _node.CorrelationID = value
- }
if value, ok := fhc.mutation.CategoryType(); ok {
_spec.SetField(filehistory.FieldCategoryType, field.TypeString, value)
_node.CategoryType = value
diff --git a/internal/ent/generated/filehistory_update.go b/internal/ent/generated/filehistory_update.go
index 3d2b4ef..38113d1 100644
--- a/internal/ent/generated/filehistory_update.go
+++ b/internal/ent/generated/filehistory_update.go
@@ -278,26 +278,6 @@ func (fhu *FileHistoryUpdate) ClearStoreKey() *FileHistoryUpdate {
return fhu
}
-// SetCorrelationID sets the "correlation_id" field.
-func (fhu *FileHistoryUpdate) SetCorrelationID(s string) *FileHistoryUpdate {
- fhu.mutation.SetCorrelationID(s)
- return fhu
-}
-
-// SetNillableCorrelationID sets the "correlation_id" field if the given value is not nil.
-func (fhu *FileHistoryUpdate) SetNillableCorrelationID(s *string) *FileHistoryUpdate {
- if s != nil {
- fhu.SetCorrelationID(*s)
- }
- return fhu
-}
-
-// ClearCorrelationID clears the value of the "correlation_id" field.
-func (fhu *FileHistoryUpdate) ClearCorrelationID() *FileHistoryUpdate {
- fhu.mutation.ClearCorrelationID()
- return fhu
-}
-
// SetCategoryType sets the "category_type" field.
func (fhu *FileHistoryUpdate) SetCategoryType(s string) *FileHistoryUpdate {
fhu.mutation.SetCategoryType(s)
@@ -555,12 +535,6 @@ func (fhu *FileHistoryUpdate) sqlSave(ctx context.Context) (n int, err error) {
if fhu.mutation.StoreKeyCleared() {
_spec.ClearField(filehistory.FieldStoreKey, field.TypeString)
}
- if value, ok := fhu.mutation.CorrelationID(); ok {
- _spec.SetField(filehistory.FieldCorrelationID, field.TypeString, value)
- }
- if fhu.mutation.CorrelationIDCleared() {
- _spec.ClearField(filehistory.FieldCorrelationID, field.TypeString)
- }
if value, ok := fhu.mutation.CategoryType(); ok {
_spec.SetField(filehistory.FieldCategoryType, field.TypeString, value)
}
@@ -867,26 +841,6 @@ func (fhuo *FileHistoryUpdateOne) ClearStoreKey() *FileHistoryUpdateOne {
return fhuo
}
-// SetCorrelationID sets the "correlation_id" field.
-func (fhuo *FileHistoryUpdateOne) SetCorrelationID(s string) *FileHistoryUpdateOne {
- fhuo.mutation.SetCorrelationID(s)
- return fhuo
-}
-
-// SetNillableCorrelationID sets the "correlation_id" field if the given value is not nil.
-func (fhuo *FileHistoryUpdateOne) SetNillableCorrelationID(s *string) *FileHistoryUpdateOne {
- if s != nil {
- fhuo.SetCorrelationID(*s)
- }
- return fhuo
-}
-
-// ClearCorrelationID clears the value of the "correlation_id" field.
-func (fhuo *FileHistoryUpdateOne) ClearCorrelationID() *FileHistoryUpdateOne {
- fhuo.mutation.ClearCorrelationID()
- return fhuo
-}
-
// SetCategoryType sets the "category_type" field.
func (fhuo *FileHistoryUpdateOne) SetCategoryType(s string) *FileHistoryUpdateOne {
fhuo.mutation.SetCategoryType(s)
@@ -1174,12 +1128,6 @@ func (fhuo *FileHistoryUpdateOne) sqlSave(ctx context.Context) (_node *FileHisto
if fhuo.mutation.StoreKeyCleared() {
_spec.ClearField(filehistory.FieldStoreKey, field.TypeString)
}
- if value, ok := fhuo.mutation.CorrelationID(); ok {
- _spec.SetField(filehistory.FieldCorrelationID, field.TypeString, value)
- }
- if fhuo.mutation.CorrelationIDCleared() {
- _spec.ClearField(filehistory.FieldCorrelationID, field.TypeString)
- }
if value, ok := fhuo.mutation.CategoryType(); ok {
_spec.SetField(filehistory.FieldCategoryType, field.TypeString, value)
}
diff --git a/internal/ent/generated/gql_collection.go b/internal/ent/generated/gql_collection.go
index 5cb59d9..f57e956 100644
--- a/internal/ent/generated/gql_collection.go
+++ b/internal/ent/generated/gql_collection.go
@@ -3533,11 +3533,6 @@ func (f *FileQuery) collectField(ctx context.Context, oneNode bool, opCtx *graph
selectedFields = append(selectedFields, file.FieldStoreKey)
fieldSeen[file.FieldStoreKey] = struct{}{}
}
- case "correlationID":
- if _, ok := fieldSeen[file.FieldCorrelationID]; !ok {
- selectedFields = append(selectedFields, file.FieldCorrelationID)
- fieldSeen[file.FieldCorrelationID] = struct{}{}
- }
case "categoryType":
if _, ok := fieldSeen[file.FieldCategoryType]; !ok {
selectedFields = append(selectedFields, file.FieldCategoryType)
@@ -3715,11 +3710,6 @@ func (fh *FileHistoryQuery) collectField(ctx context.Context, oneNode bool, opCt
selectedFields = append(selectedFields, filehistory.FieldStoreKey)
fieldSeen[filehistory.FieldStoreKey] = struct{}{}
}
- case "correlationID":
- if _, ok := fieldSeen[filehistory.FieldCorrelationID]; !ok {
- selectedFields = append(selectedFields, filehistory.FieldCorrelationID)
- fieldSeen[filehistory.FieldCorrelationID] = struct{}{}
- }
case "categoryType":
if _, ok := fieldSeen[filehistory.FieldCategoryType]; !ok {
selectedFields = append(selectedFields, filehistory.FieldCategoryType)
diff --git a/internal/ent/generated/gql_mutation_input.go b/internal/ent/generated/gql_mutation_input.go
index 74e081b..ff5995c 100644
--- a/internal/ent/generated/gql_mutation_input.go
+++ b/internal/ent/generated/gql_mutation_input.go
@@ -1520,7 +1520,6 @@ type CreateFileInput struct {
Md5Hash *string
DetectedContentType string
StoreKey *string
- CorrelationID *string
CategoryType *string
URI *string
StorageScheme *string
@@ -1561,9 +1560,6 @@ func (i *CreateFileInput) Mutate(m *FileMutation) {
if v := i.StoreKey; v != nil {
m.SetStoreKey(*v)
}
- if v := i.CorrelationID; v != nil {
- m.SetCorrelationID(*v)
- }
if v := i.CategoryType; v != nil {
m.SetCategoryType(*v)
}
@@ -1635,8 +1631,6 @@ type UpdateFileInput struct {
DetectedContentType *string
ClearStoreKey bool
StoreKey *string
- ClearCorrelationID bool
- CorrelationID *string
ClearCategoryType bool
CategoryType *string
ClearURI bool
@@ -1729,12 +1723,6 @@ func (i *UpdateFileInput) Mutate(m *FileMutation) {
if v := i.StoreKey; v != nil {
m.SetStoreKey(*v)
}
- if i.ClearCorrelationID {
- m.ClearCorrelationID()
- }
- if v := i.CorrelationID; v != nil {
- m.SetCorrelationID(*v)
- }
if i.ClearCategoryType {
m.ClearCategoryType()
}
diff --git a/internal/ent/generated/gql_where_input.go b/internal/ent/generated/gql_where_input.go
index 8d2d161..5a18a04 100644
--- a/internal/ent/generated/gql_where_input.go
+++ b/internal/ent/generated/gql_where_input.go
@@ -15315,23 +15315,6 @@ type FileWhereInput struct {
StoreKeyEqualFold *string `json:"storeKeyEqualFold,omitempty"`
StoreKeyContainsFold *string `json:"storeKeyContainsFold,omitempty"`
- // "correlation_id" field predicates.
- CorrelationID *string `json:"correlationID,omitempty"`
- CorrelationIDNEQ *string `json:"correlationIDNEQ,omitempty"`
- CorrelationIDIn []string `json:"correlationIDIn,omitempty"`
- CorrelationIDNotIn []string `json:"correlationIDNotIn,omitempty"`
- CorrelationIDGT *string `json:"correlationIDGT,omitempty"`
- CorrelationIDGTE *string `json:"correlationIDGTE,omitempty"`
- CorrelationIDLT *string `json:"correlationIDLT,omitempty"`
- CorrelationIDLTE *string `json:"correlationIDLTE,omitempty"`
- CorrelationIDContains *string `json:"correlationIDContains,omitempty"`
- CorrelationIDHasPrefix *string `json:"correlationIDHasPrefix,omitempty"`
- CorrelationIDHasSuffix *string `json:"correlationIDHasSuffix,omitempty"`
- CorrelationIDIsNil bool `json:"correlationIDIsNil,omitempty"`
- CorrelationIDNotNil bool `json:"correlationIDNotNil,omitempty"`
- CorrelationIDEqualFold *string `json:"correlationIDEqualFold,omitempty"`
- CorrelationIDContainsFold *string `json:"correlationIDContainsFold,omitempty"`
-
// "category_type" field predicates.
CategoryType *string `json:"categoryType,omitempty"`
CategoryTypeNEQ *string `json:"categoryTypeNEQ,omitempty"`
@@ -16096,51 +16079,6 @@ func (i *FileWhereInput) P() (predicate.File, error) {
if i.StoreKeyContainsFold != nil {
predicates = append(predicates, file.StoreKeyContainsFold(*i.StoreKeyContainsFold))
}
- if i.CorrelationID != nil {
- predicates = append(predicates, file.CorrelationIDEQ(*i.CorrelationID))
- }
- if i.CorrelationIDNEQ != nil {
- predicates = append(predicates, file.CorrelationIDNEQ(*i.CorrelationIDNEQ))
- }
- if len(i.CorrelationIDIn) > 0 {
- predicates = append(predicates, file.CorrelationIDIn(i.CorrelationIDIn...))
- }
- if len(i.CorrelationIDNotIn) > 0 {
- predicates = append(predicates, file.CorrelationIDNotIn(i.CorrelationIDNotIn...))
- }
- if i.CorrelationIDGT != nil {
- predicates = append(predicates, file.CorrelationIDGT(*i.CorrelationIDGT))
- }
- if i.CorrelationIDGTE != nil {
- predicates = append(predicates, file.CorrelationIDGTE(*i.CorrelationIDGTE))
- }
- if i.CorrelationIDLT != nil {
- predicates = append(predicates, file.CorrelationIDLT(*i.CorrelationIDLT))
- }
- if i.CorrelationIDLTE != nil {
- predicates = append(predicates, file.CorrelationIDLTE(*i.CorrelationIDLTE))
- }
- if i.CorrelationIDContains != nil {
- predicates = append(predicates, file.CorrelationIDContains(*i.CorrelationIDContains))
- }
- if i.CorrelationIDHasPrefix != nil {
- predicates = append(predicates, file.CorrelationIDHasPrefix(*i.CorrelationIDHasPrefix))
- }
- if i.CorrelationIDHasSuffix != nil {
- predicates = append(predicates, file.CorrelationIDHasSuffix(*i.CorrelationIDHasSuffix))
- }
- if i.CorrelationIDIsNil {
- predicates = append(predicates, file.CorrelationIDIsNil())
- }
- if i.CorrelationIDNotNil {
- predicates = append(predicates, file.CorrelationIDNotNil())
- }
- if i.CorrelationIDEqualFold != nil {
- predicates = append(predicates, file.CorrelationIDEqualFold(*i.CorrelationIDEqualFold))
- }
- if i.CorrelationIDContainsFold != nil {
- predicates = append(predicates, file.CorrelationIDContainsFold(*i.CorrelationIDContainsFold))
- }
if i.CategoryType != nil {
predicates = append(predicates, file.CategoryTypeEQ(*i.CategoryType))
}
@@ -16816,23 +16754,6 @@ type FileHistoryWhereInput struct {
StoreKeyEqualFold *string `json:"storeKeyEqualFold,omitempty"`
StoreKeyContainsFold *string `json:"storeKeyContainsFold,omitempty"`
- // "correlation_id" field predicates.
- CorrelationID *string `json:"correlationID,omitempty"`
- CorrelationIDNEQ *string `json:"correlationIDNEQ,omitempty"`
- CorrelationIDIn []string `json:"correlationIDIn,omitempty"`
- CorrelationIDNotIn []string `json:"correlationIDNotIn,omitempty"`
- CorrelationIDGT *string `json:"correlationIDGT,omitempty"`
- CorrelationIDGTE *string `json:"correlationIDGTE,omitempty"`
- CorrelationIDLT *string `json:"correlationIDLT,omitempty"`
- CorrelationIDLTE *string `json:"correlationIDLTE,omitempty"`
- CorrelationIDContains *string `json:"correlationIDContains,omitempty"`
- CorrelationIDHasPrefix *string `json:"correlationIDHasPrefix,omitempty"`
- CorrelationIDHasSuffix *string `json:"correlationIDHasSuffix,omitempty"`
- CorrelationIDIsNil bool `json:"correlationIDIsNil,omitempty"`
- CorrelationIDNotNil bool `json:"correlationIDNotNil,omitempty"`
- CorrelationIDEqualFold *string `json:"correlationIDEqualFold,omitempty"`
- CorrelationIDContainsFold *string `json:"correlationIDContainsFold,omitempty"`
-
// "category_type" field predicates.
CategoryType *string `json:"categoryType,omitempty"`
CategoryTypeNEQ *string `json:"categoryTypeNEQ,omitempty"`
@@ -17638,51 +17559,6 @@ func (i *FileHistoryWhereInput) P() (predicate.FileHistory, error) {
if i.StoreKeyContainsFold != nil {
predicates = append(predicates, filehistory.StoreKeyContainsFold(*i.StoreKeyContainsFold))
}
- if i.CorrelationID != nil {
- predicates = append(predicates, filehistory.CorrelationIDEQ(*i.CorrelationID))
- }
- if i.CorrelationIDNEQ != nil {
- predicates = append(predicates, filehistory.CorrelationIDNEQ(*i.CorrelationIDNEQ))
- }
- if len(i.CorrelationIDIn) > 0 {
- predicates = append(predicates, filehistory.CorrelationIDIn(i.CorrelationIDIn...))
- }
- if len(i.CorrelationIDNotIn) > 0 {
- predicates = append(predicates, filehistory.CorrelationIDNotIn(i.CorrelationIDNotIn...))
- }
- if i.CorrelationIDGT != nil {
- predicates = append(predicates, filehistory.CorrelationIDGT(*i.CorrelationIDGT))
- }
- if i.CorrelationIDGTE != nil {
- predicates = append(predicates, filehistory.CorrelationIDGTE(*i.CorrelationIDGTE))
- }
- if i.CorrelationIDLT != nil {
- predicates = append(predicates, filehistory.CorrelationIDLT(*i.CorrelationIDLT))
- }
- if i.CorrelationIDLTE != nil {
- predicates = append(predicates, filehistory.CorrelationIDLTE(*i.CorrelationIDLTE))
- }
- if i.CorrelationIDContains != nil {
- predicates = append(predicates, filehistory.CorrelationIDContains(*i.CorrelationIDContains))
- }
- if i.CorrelationIDHasPrefix != nil {
- predicates = append(predicates, filehistory.CorrelationIDHasPrefix(*i.CorrelationIDHasPrefix))
- }
- if i.CorrelationIDHasSuffix != nil {
- predicates = append(predicates, filehistory.CorrelationIDHasSuffix(*i.CorrelationIDHasSuffix))
- }
- if i.CorrelationIDIsNil {
- predicates = append(predicates, filehistory.CorrelationIDIsNil())
- }
- if i.CorrelationIDNotNil {
- predicates = append(predicates, filehistory.CorrelationIDNotNil())
- }
- if i.CorrelationIDEqualFold != nil {
- predicates = append(predicates, filehistory.CorrelationIDEqualFold(*i.CorrelationIDEqualFold))
- }
- if i.CorrelationIDContainsFold != nil {
- predicates = append(predicates, filehistory.CorrelationIDContainsFold(*i.CorrelationIDContainsFold))
- }
if i.CategoryType != nil {
predicates = append(predicates, filehistory.CategoryTypeEQ(*i.CategoryType))
}
diff --git a/internal/ent/generated/history_from_mutation.go b/internal/ent/generated/history_from_mutation.go
index aff0fb2..88cda73 100644
--- a/internal/ent/generated/history_from_mutation.go
+++ b/internal/ent/generated/history_from_mutation.go
@@ -2181,10 +2181,6 @@ func (m *FileMutation) CreateHistoryFromCreate(ctx context.Context) error {
create = create.SetStoreKey(storeKey)
}
- if correlationID, exists := m.CorrelationID(); exists {
- create = create.SetCorrelationID(correlationID)
- }
-
if categoryType, exists := m.CategoryType(); exists {
create = create.SetCategoryType(categoryType)
}
@@ -2335,12 +2331,6 @@ func (m *FileMutation) CreateHistoryFromUpdate(ctx context.Context) error {
create = create.SetStoreKey(file.StoreKey)
}
- if correlationID, exists := m.CorrelationID(); exists {
- create = create.SetCorrelationID(correlationID)
- } else {
- create = create.SetCorrelationID(file.CorrelationID)
- }
-
if categoryType, exists := m.CategoryType(); exists {
create = create.SetCategoryType(categoryType)
} else {
@@ -2425,7 +2415,6 @@ func (m *FileMutation) CreateHistoryFromDelete(ctx context.Context) error {
SetMd5Hash(file.Md5Hash).
SetDetectedContentType(file.DetectedContentType).
SetStoreKey(file.StoreKey).
- SetCorrelationID(file.CorrelationID).
SetCategoryType(file.CategoryType).
SetURI(file.URI).
SetStorageScheme(file.StorageScheme).
diff --git a/internal/ent/generated/migrate/schema.go b/internal/ent/generated/migrate/schema.go
index 91708f0..edfd16c 100644
--- a/internal/ent/generated/migrate/schema.go
+++ b/internal/ent/generated/migrate/schema.go
@@ -781,7 +781,6 @@ var (
{Name: "md5_hash", Type: field.TypeString, Nullable: true},
{Name: "detected_content_type", Type: field.TypeString},
{Name: "store_key", Type: field.TypeString, Nullable: true},
- {Name: "correlation_id", Type: field.TypeString, Nullable: true},
{Name: "category_type", Type: field.TypeString, Nullable: true},
{Name: "uri", Type: field.TypeString, Nullable: true},
{Name: "storage_scheme", Type: field.TypeString, Nullable: true},
@@ -817,7 +816,6 @@ var (
{Name: "md5_hash", Type: field.TypeString, Nullable: true},
{Name: "detected_content_type", Type: field.TypeString},
{Name: "store_key", Type: field.TypeString, Nullable: true},
- {Name: "correlation_id", Type: field.TypeString, Nullable: true},
{Name: "category_type", Type: field.TypeString, Nullable: true},
{Name: "uri", Type: field.TypeString, Nullable: true},
{Name: "storage_scheme", Type: field.TypeString, Nullable: true},
diff --git a/internal/ent/generated/mutation.go b/internal/ent/generated/mutation.go
index 007a19a..234ce0a 100644
--- a/internal/ent/generated/mutation.go
+++ b/internal/ent/generated/mutation.go
@@ -29615,7 +29615,6 @@ type FileMutation struct {
md5_hash *string
detected_content_type *string
store_key *string
- correlation_id *string
category_type *string
uri *string
storage_scheme *string
@@ -30552,55 +30551,6 @@ func (m *FileMutation) ResetStoreKey() {
delete(m.clearedFields, file.FieldStoreKey)
}
-// SetCorrelationID sets the "correlation_id" field.
-func (m *FileMutation) SetCorrelationID(s string) {
- m.correlation_id = &s
-}
-
-// CorrelationID returns the value of the "correlation_id" field in the mutation.
-func (m *FileMutation) CorrelationID() (r string, exists bool) {
- v := m.correlation_id
- if v == nil {
- return
- }
- return *v, true
-}
-
-// OldCorrelationID returns the old "correlation_id" field's value of the File entity.
-// If the File object wasn't provided to the builder, the object is fetched from the database.
-// An error is returned if the mutation operation is not UpdateOne, or the database query fails.
-func (m *FileMutation) OldCorrelationID(ctx context.Context) (v string, err error) {
- if !m.op.Is(OpUpdateOne) {
- return v, errors.New("OldCorrelationID is only allowed on UpdateOne operations")
- }
- if m.id == nil || m.oldValue == nil {
- return v, errors.New("OldCorrelationID requires an ID field in the mutation")
- }
- oldValue, err := m.oldValue(ctx)
- if err != nil {
- return v, fmt.Errorf("querying old value for OldCorrelationID: %w", err)
- }
- return oldValue.CorrelationID, nil
-}
-
-// ClearCorrelationID clears the value of the "correlation_id" field.
-func (m *FileMutation) ClearCorrelationID() {
- m.correlation_id = nil
- m.clearedFields[file.FieldCorrelationID] = struct{}{}
-}
-
-// CorrelationIDCleared returns if the "correlation_id" field was cleared in this mutation.
-func (m *FileMutation) CorrelationIDCleared() bool {
- _, ok := m.clearedFields[file.FieldCorrelationID]
- return ok
-}
-
-// ResetCorrelationID resets all changes to the "correlation_id" field.
-func (m *FileMutation) ResetCorrelationID() {
- m.correlation_id = nil
- delete(m.clearedFields, file.FieldCorrelationID)
-}
-
// SetCategoryType sets the "category_type" field.
func (m *FileMutation) SetCategoryType(s string) {
m.category_type = &s
@@ -31469,7 +31419,7 @@ func (m *FileMutation) Type() string {
// order to get all numeric fields that were incremented/decremented, call
// AddedFields().
func (m *FileMutation) Fields() []string {
- fields := make([]string, 0, 23)
+ fields := make([]string, 0, 22)
if m.created_at != nil {
fields = append(fields, file.FieldCreatedAt)
}
@@ -31518,9 +31468,6 @@ func (m *FileMutation) Fields() []string {
if m.store_key != nil {
fields = append(fields, file.FieldStoreKey)
}
- if m.correlation_id != nil {
- fields = append(fields, file.FieldCorrelationID)
- }
if m.category_type != nil {
fields = append(fields, file.FieldCategoryType)
}
@@ -31579,8 +31526,6 @@ func (m *FileMutation) Field(name string) (ent.Value, bool) {
return m.DetectedContentType()
case file.FieldStoreKey:
return m.StoreKey()
- case file.FieldCorrelationID:
- return m.CorrelationID()
case file.FieldCategoryType:
return m.CategoryType()
case file.FieldURI:
@@ -31634,8 +31579,6 @@ func (m *FileMutation) OldField(ctx context.Context, name string) (ent.Value, er
return m.OldDetectedContentType(ctx)
case file.FieldStoreKey:
return m.OldStoreKey(ctx)
- case file.FieldCorrelationID:
- return m.OldCorrelationID(ctx)
case file.FieldCategoryType:
return m.OldCategoryType(ctx)
case file.FieldURI:
@@ -31769,13 +31712,6 @@ func (m *FileMutation) SetField(name string, value ent.Value) error {
}
m.SetStoreKey(v)
return nil
- case file.FieldCorrelationID:
- v, ok := value.(string)
- if !ok {
- return fmt.Errorf("unexpected type %T for field %s", value, name)
- }
- m.SetCorrelationID(v)
- return nil
case file.FieldCategoryType:
v, ok := value.(string)
if !ok {
@@ -31911,9 +31847,6 @@ func (m *FileMutation) ClearedFields() []string {
if m.FieldCleared(file.FieldStoreKey) {
fields = append(fields, file.FieldStoreKey)
}
- if m.FieldCleared(file.FieldCorrelationID) {
- fields = append(fields, file.FieldCorrelationID)
- }
if m.FieldCleared(file.FieldCategoryType) {
fields = append(fields, file.FieldCategoryType)
}
@@ -31982,9 +31915,6 @@ func (m *FileMutation) ClearField(name string) error {
case file.FieldStoreKey:
m.ClearStoreKey()
return nil
- case file.FieldCorrelationID:
- m.ClearCorrelationID()
- return nil
case file.FieldCategoryType:
m.ClearCategoryType()
return nil
@@ -32059,9 +31989,6 @@ func (m *FileMutation) ResetField(name string) error {
case file.FieldStoreKey:
m.ResetStoreKey()
return nil
- case file.FieldCorrelationID:
- m.ResetCorrelationID()
- return nil
case file.FieldCategoryType:
m.ResetCategoryType()
return nil
@@ -32430,7 +32357,6 @@ type FileHistoryMutation struct {
md5_hash *string
detected_content_type *string
store_key *string
- correlation_id *string
category_type *string
uri *string
storage_scheme *string
@@ -33458,55 +33384,6 @@ func (m *FileHistoryMutation) ResetStoreKey() {
delete(m.clearedFields, filehistory.FieldStoreKey)
}
-// SetCorrelationID sets the "correlation_id" field.
-func (m *FileHistoryMutation) SetCorrelationID(s string) {
- m.correlation_id = &s
-}
-
-// CorrelationID returns the value of the "correlation_id" field in the mutation.
-func (m *FileHistoryMutation) CorrelationID() (r string, exists bool) {
- v := m.correlation_id
- if v == nil {
- return
- }
- return *v, true
-}
-
-// OldCorrelationID returns the old "correlation_id" field's value of the FileHistory entity.
-// If the FileHistory object wasn't provided to the builder, the object is fetched from the database.
-// An error is returned if the mutation operation is not UpdateOne, or the database query fails.
-func (m *FileHistoryMutation) OldCorrelationID(ctx context.Context) (v string, err error) {
- if !m.op.Is(OpUpdateOne) {
- return v, errors.New("OldCorrelationID is only allowed on UpdateOne operations")
- }
- if m.id == nil || m.oldValue == nil {
- return v, errors.New("OldCorrelationID requires an ID field in the mutation")
- }
- oldValue, err := m.oldValue(ctx)
- if err != nil {
- return v, fmt.Errorf("querying old value for OldCorrelationID: %w", err)
- }
- return oldValue.CorrelationID, nil
-}
-
-// ClearCorrelationID clears the value of the "correlation_id" field.
-func (m *FileHistoryMutation) ClearCorrelationID() {
- m.correlation_id = nil
- m.clearedFields[filehistory.FieldCorrelationID] = struct{}{}
-}
-
-// CorrelationIDCleared returns if the "correlation_id" field was cleared in this mutation.
-func (m *FileHistoryMutation) CorrelationIDCleared() bool {
- _, ok := m.clearedFields[filehistory.FieldCorrelationID]
- return ok
-}
-
-// ResetCorrelationID resets all changes to the "correlation_id" field.
-func (m *FileHistoryMutation) ResetCorrelationID() {
- m.correlation_id = nil
- delete(m.clearedFields, filehistory.FieldCorrelationID)
-}
-
// SetCategoryType sets the "category_type" field.
func (m *FileHistoryMutation) SetCategoryType(s string) {
m.category_type = &s
@@ -33835,7 +33712,7 @@ func (m *FileHistoryMutation) Type() string {
// order to get all numeric fields that were incremented/decremented, call
// AddedFields().
func (m *FileHistoryMutation) Fields() []string {
- fields := make([]string, 0, 26)
+ fields := make([]string, 0, 25)
if m.history_time != nil {
fields = append(fields, filehistory.FieldHistoryTime)
}
@@ -33893,9 +33770,6 @@ func (m *FileHistoryMutation) Fields() []string {
if m.store_key != nil {
fields = append(fields, filehistory.FieldStoreKey)
}
- if m.correlation_id != nil {
- fields = append(fields, filehistory.FieldCorrelationID)
- }
if m.category_type != nil {
fields = append(fields, filehistory.FieldCategoryType)
}
@@ -33960,8 +33834,6 @@ func (m *FileHistoryMutation) Field(name string) (ent.Value, bool) {
return m.DetectedContentType()
case filehistory.FieldStoreKey:
return m.StoreKey()
- case filehistory.FieldCorrelationID:
- return m.CorrelationID()
case filehistory.FieldCategoryType:
return m.CategoryType()
case filehistory.FieldURI:
@@ -34021,8 +33893,6 @@ func (m *FileHistoryMutation) OldField(ctx context.Context, name string) (ent.Va
return m.OldDetectedContentType(ctx)
case filehistory.FieldStoreKey:
return m.OldStoreKey(ctx)
- case filehistory.FieldCorrelationID:
- return m.OldCorrelationID(ctx)
case filehistory.FieldCategoryType:
return m.OldCategoryType(ctx)
case filehistory.FieldURI:
@@ -34177,13 +34047,6 @@ func (m *FileHistoryMutation) SetField(name string, value ent.Value) error {
}
m.SetStoreKey(v)
return nil
- case filehistory.FieldCorrelationID:
- v, ok := value.(string)
- if !ok {
- return fmt.Errorf("unexpected type %T for field %s", value, name)
- }
- m.SetCorrelationID(v)
- return nil
case filehistory.FieldCategoryType:
v, ok := value.(string)
if !ok {
@@ -34322,9 +34185,6 @@ func (m *FileHistoryMutation) ClearedFields() []string {
if m.FieldCleared(filehistory.FieldStoreKey) {
fields = append(fields, filehistory.FieldStoreKey)
}
- if m.FieldCleared(filehistory.FieldCorrelationID) {
- fields = append(fields, filehistory.FieldCorrelationID)
- }
if m.FieldCleared(filehistory.FieldCategoryType) {
fields = append(fields, filehistory.FieldCategoryType)
}
@@ -34396,9 +34256,6 @@ func (m *FileHistoryMutation) ClearField(name string) error {
case filehistory.FieldStoreKey:
m.ClearStoreKey()
return nil
- case filehistory.FieldCorrelationID:
- m.ClearCorrelationID()
- return nil
case filehistory.FieldCategoryType:
m.ClearCategoryType()
return nil
@@ -34482,9 +34339,6 @@ func (m *FileHistoryMutation) ResetField(name string) error {
case filehistory.FieldStoreKey:
m.ResetStoreKey()
return nil
- case filehistory.FieldCorrelationID:
- m.ResetCorrelationID()
- return nil
case filehistory.FieldCategoryType:
m.ResetCategoryType()
return nil
diff --git a/internal/ent/schema/file.go b/internal/ent/schema/file.go
index eac0132..14dbae0 100644
--- a/internal/ent/schema/file.go
+++ b/internal/ent/schema/file.go
@@ -39,13 +39,10 @@ func (File) Fields() []ent.Field {
field.String("detected_content_type").
Comment("the content type of the HTTP request - may be different than MIME type as multipart-form can transmit multiple files and different types"),
field.String("store_key").
- Comment("the key parsed out of a multipart-form request; if we allow multiple files to be uploaded we may want our API specifications to require the use of different keys allowing us to perfdorm easier conditional evaluation on the key and what to do with the file based on key").
- Optional(),
- field.String("correlation_id").
- Comment("the ULID provided in the http request indicating the ULID to correleate the file to").
+ Comment("the key parsed out of a multipart-form request; if we allow multiple files to be uploaded we may want our API specifications to require the use of different keys allowing us to perform easier conditional evaluation on the key and what to do with the file based on key").
Optional(),
field.String("category_type").
- Comment("the category type of the file, if any (e.g. contract, invoice, etc.)").
+ Comment("the category type of the file, if any (e.g. evidence, invoice, etc.)").
Optional(),
field.String("uri").
Comment("the full URI of the file").
diff --git a/internal/graphapi/apitoken_test.go b/internal/graphapi/apitoken_test.go
index 548c798..3d6975b 100644
--- a/internal/graphapi/apitoken_test.go
+++ b/internal/graphapi/apitoken_test.go
@@ -8,6 +8,7 @@ import (
"github.com/brianvoe/gofakeit/v7"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
+ "github.com/theopenlane/core/internal/middleware/objects"
"github.com/theopenlane/core/pkg/openlaneclient"
"github.com/theopenlane/iam/auth"
mock_fga "github.com/theopenlane/iam/fgax/mockery"
@@ -398,7 +399,7 @@ func (suite *GraphTestSuite) TestLastUsedAPIToken() {
BearerToken: token.Token,
}
- graphClient, err := testutils.TestClientWithAuth(t, suite.client.db, openlaneclient.WithCredentials(authHeader))
+ graphClient, err := testutils.TestClientWithAuth(t, suite.client.db, &objects.Upload{}, openlaneclient.WithCredentials(authHeader))
require.NoError(t, err)
// get the token to make sure the last used is updated using the token
diff --git a/internal/graphapi/gen_server.go b/internal/graphapi/gen_server.go
index e4059a9..9492a0b 100644
--- a/internal/graphapi/gen_server.go
+++ b/internal/graphapi/gen_server.go
@@ -875,7 +875,6 @@ type ComplexityRoot struct {
File struct {
CategoryType func(childComplexity int) int
Contact func(childComplexity int) int
- CorrelationID func(childComplexity int) int
CreatedAt func(childComplexity int) int
CreatedBy func(childComplexity int) int
DeletedAt func(childComplexity int) int
@@ -932,7 +931,6 @@ type ComplexityRoot struct {
FileHistory struct {
CategoryType func(childComplexity int) int
- CorrelationID func(childComplexity int) int
CreatedAt func(childComplexity int) int
CreatedBy func(childComplexity int) int
DeletedAt func(childComplexity int) int
@@ -1490,7 +1488,7 @@ type ComplexityRoot struct {
CreateSubscriber func(childComplexity int, input generated.CreateSubscriberInput) int
CreateTFASetting func(childComplexity int, input generated.CreateTFASettingInput) int
CreateTemplate func(childComplexity int, input generated.CreateTemplateInput) int
- CreateUser func(childComplexity int, input generated.CreateUserInput) int
+ CreateUser func(childComplexity int, input generated.CreateUserInput, avatarURL *graphql.Upload) int
CreateUserSetting func(childComplexity int, input generated.CreateUserSettingInput) int
CreateWebhook func(childComplexity int, input generated.CreateWebhookInput) int
DeleteAPIToken func(childComplexity int, id string) int
@@ -1546,7 +1544,7 @@ type ComplexityRoot struct {
UpdateSubscriber func(childComplexity int, email string, input generated.UpdateSubscriberInput) int
UpdateTFASetting func(childComplexity int, input generated.UpdateTFASettingInput) int
UpdateTemplate func(childComplexity int, id string, input generated.UpdateTemplateInput) int
- UpdateUser func(childComplexity int, id string, input generated.UpdateUserInput) int
+ UpdateUser func(childComplexity int, id string, input generated.UpdateUserInput, avatarFile *graphql.Upload) int
UpdateUserSetting func(childComplexity int, id string, input generated.UpdateUserSettingInput) int
UpdateWebhook func(childComplexity int, id string, input generated.UpdateWebhookInput) int
}
@@ -2773,8 +2771,8 @@ type MutationResolver interface {
DeleteTemplate(ctx context.Context, id string) (*TemplateDeletePayload, error)
CreateTFASetting(ctx context.Context, input generated.CreateTFASettingInput) (*TFASettingCreatePayload, error)
UpdateTFASetting(ctx context.Context, input generated.UpdateTFASettingInput) (*TFASettingUpdatePayload, error)
- CreateUser(ctx context.Context, input generated.CreateUserInput) (*UserCreatePayload, error)
- UpdateUser(ctx context.Context, id string, input generated.UpdateUserInput) (*UserUpdatePayload, error)
+ CreateUser(ctx context.Context, input generated.CreateUserInput, avatarURL *graphql.Upload) (*UserCreatePayload, error)
+ UpdateUser(ctx context.Context, id string, input generated.UpdateUserInput, avatarFile *graphql.Upload) (*UserUpdatePayload, error)
DeleteUser(ctx context.Context, id string) (*UserDeletePayload, error)
CreateUserSetting(ctx context.Context, input generated.CreateUserSettingInput) (*UserSettingCreatePayload, error)
CreateBulkUserSetting(ctx context.Context, input []*generated.CreateUserSettingInput) (*UserSettingBulkCreatePayload, error)
@@ -6372,13 +6370,6 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
return e.complexity.File.Contact(childComplexity), true
- case "File.correlationID":
- if e.complexity.File.CorrelationID == nil {
- break
- }
-
- return e.complexity.File.CorrelationID(childComplexity), true
-
case "File.createdAt":
if e.complexity.File.CreatedAt == nil {
break
@@ -6645,13 +6636,6 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
return e.complexity.FileHistory.CategoryType(childComplexity), true
- case "FileHistory.correlationID":
- if e.complexity.FileHistory.CorrelationID == nil {
- break
- }
-
- return e.complexity.FileHistory.CorrelationID(childComplexity), true
-
case "FileHistory.createdAt":
if e.complexity.FileHistory.CreatedAt == nil {
break
@@ -9596,7 +9580,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
return 0, false
}
- return e.complexity.Mutation.CreateUser(childComplexity, args["input"].(generated.CreateUserInput)), true
+ return e.complexity.Mutation.CreateUser(childComplexity, args["input"].(generated.CreateUserInput), args["avatarURL"].(*graphql.Upload)), true
case "Mutation.createUserSetting":
if e.complexity.Mutation.CreateUserSetting == nil {
@@ -10268,7 +10252,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
return 0, false
}
- return e.complexity.Mutation.UpdateUser(childComplexity, args["id"].(string), args["input"].(generated.UpdateUserInput)), true
+ return e.complexity.Mutation.UpdateUser(childComplexity, args["id"].(string), args["input"].(generated.UpdateUserInput), args["avatarFile"].(*graphql.Upload)), true
case "Mutation.updateUserSetting":
if e.complexity.Mutation.UpdateUserSetting == nil {
@@ -18153,15 +18137,11 @@ input CreateFileInput {
"""
detectedContentType: String!
"""
- the key parsed out of a multipart-form request; if we allow multiple files to be uploaded we may want our API specifications to require the use of different keys allowing us to perfdorm easier conditional evaluation on the key and what to do with the file based on key
+ the key parsed out of a multipart-form request; if we allow multiple files to be uploaded we may want our API specifications to require the use of different keys allowing us to perform easier conditional evaluation on the key and what to do with the file based on key
"""
storeKey: String
"""
- the ULID provided in the http request indicating the ULID to correleate the file to
- """
- correlationID: String
- """
- the category type of the file, if any (e.g. contract, invoice, etc.)
+ the category type of the file, if any (e.g. evidence, invoice, etc.)
"""
categoryType: String
"""
@@ -23286,15 +23266,11 @@ type File implements Node {
"""
detectedContentType: String!
"""
- the key parsed out of a multipart-form request; if we allow multiple files to be uploaded we may want our API specifications to require the use of different keys allowing us to perfdorm easier conditional evaluation on the key and what to do with the file based on key
+ the key parsed out of a multipart-form request; if we allow multiple files to be uploaded we may want our API specifications to require the use of different keys allowing us to perform easier conditional evaluation on the key and what to do with the file based on key
"""
storeKey: String
"""
- the ULID provided in the http request indicating the ULID to correleate the file to
- """
- correlationID: String
- """
- the category type of the file, if any (e.g. contract, invoice, etc.)
+ the category type of the file, if any (e.g. evidence, invoice, etc.)
"""
categoryType: String
"""
@@ -23395,15 +23371,11 @@ type FileHistory implements Node {
"""
detectedContentType: String!
"""
- the key parsed out of a multipart-form request; if we allow multiple files to be uploaded we may want our API specifications to require the use of different keys allowing us to perfdorm easier conditional evaluation on the key and what to do with the file based on key
+ the key parsed out of a multipart-form request; if we allow multiple files to be uploaded we may want our API specifications to require the use of different keys allowing us to perform easier conditional evaluation on the key and what to do with the file based on key
"""
storeKey: String
"""
- the ULID provided in the http request indicating the ULID to correleate the file to
- """
- correlationID: String
- """
- the category type of the file, if any (e.g. contract, invoice, etc.)
+ the category type of the file, if any (e.g. evidence, invoice, etc.)
"""
categoryType: String
"""
@@ -23740,24 +23712,6 @@ input FileHistoryWhereInput {
storeKeyEqualFold: String
storeKeyContainsFold: String
"""
- correlation_id field predicates
- """
- correlationID: String
- correlationIDNEQ: String
- correlationIDIn: [String!]
- correlationIDNotIn: [String!]
- correlationIDGT: String
- correlationIDGTE: String
- correlationIDLT: String
- correlationIDLTE: String
- correlationIDContains: String
- correlationIDHasPrefix: String
- correlationIDHasSuffix: String
- correlationIDIsNil: Boolean
- correlationIDNotNil: Boolean
- correlationIDEqualFold: String
- correlationIDContainsFold: String
- """
category_type field predicates
"""
categoryType: String
@@ -24091,24 +24045,6 @@ input FileWhereInput {
storeKeyEqualFold: String
storeKeyContainsFold: String
"""
- correlation_id field predicates
- """
- correlationID: String
- correlationIDNEQ: String
- correlationIDIn: [String!]
- correlationIDNotIn: [String!]
- correlationIDGT: String
- correlationIDGTE: String
- correlationIDLT: String
- correlationIDLTE: String
- correlationIDContains: String
- correlationIDHasPrefix: String
- correlationIDHasSuffix: String
- correlationIDIsNil: Boolean
- correlationIDNotNil: Boolean
- correlationIDEqualFold: String
- correlationIDContainsFold: String
- """
category_type field predicates
"""
categoryType: String
@@ -33830,17 +33766,12 @@ input UpdateFileInput {
"""
detectedContentType: String
"""
- the key parsed out of a multipart-form request; if we allow multiple files to be uploaded we may want our API specifications to require the use of different keys allowing us to perfdorm easier conditional evaluation on the key and what to do with the file based on key
+ the key parsed out of a multipart-form request; if we allow multiple files to be uploaded we may want our API specifications to require the use of different keys allowing us to perform easier conditional evaluation on the key and what to do with the file based on key
"""
storeKey: String
clearStoreKey: Boolean
"""
- the ULID provided in the http request indicating the ULID to correleate the file to
- """
- correlationID: String
- clearCorrelationID: Boolean
- """
- the category type of the file, if any (e.g. contract, invoice, etc.)
+ the category type of the file, if any (e.g. evidence, invoice, etc.)
"""
categoryType: String
clearCategoryType: Boolean
@@ -39622,6 +39553,7 @@ extend type Mutation{
values of the user
"""
input: CreateUserInput!
+ avatarURL: Upload
): UserCreatePayload!
"""
Update an existing user
@@ -39635,6 +39567,7 @@ extend type Mutation{
New values for the user
"""
input: UpdateUserInput!
+ avatarFile: Upload
): UserUpdatePayload!
"""
Delete an existing user
@@ -42480,6 +42413,11 @@ func (ec *executionContext) field_Mutation_createUser_args(ctx context.Context,
return nil, err
}
args["input"] = arg0
+ arg1, err := ec.field_Mutation_createUser_argsAvatarURL(ctx, rawArgs)
+ if err != nil {
+ return nil, err
+ }
+ args["avatarURL"] = arg1
return args, nil
}
func (ec *executionContext) field_Mutation_createUser_argsInput(
@@ -42504,6 +42442,28 @@ func (ec *executionContext) field_Mutation_createUser_argsInput(
return zeroVal, nil
}
+func (ec *executionContext) field_Mutation_createUser_argsAvatarURL(
+ ctx context.Context,
+ rawArgs map[string]interface{},
+) (*graphql.Upload, error) {
+ // We won't call the directive if the argument is null.
+ // Set call_argument_directives_with_null to true to call directives
+ // even if the argument is null.
+ _, ok := rawArgs["avatarURL"]
+ if !ok {
+ var zeroVal *graphql.Upload
+ return zeroVal, nil
+ }
+
+ ctx = graphql.WithPathContext(ctx, graphql.NewPathWithField("avatarURL"))
+ if tmp, ok := rawArgs["avatarURL"]; ok {
+ return ec.unmarshalOUpload2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚐUpload(ctx, tmp)
+ }
+
+ var zeroVal *graphql.Upload
+ return zeroVal, nil
+}
+
func (ec *executionContext) field_Mutation_createWebhook_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) {
var err error
args := map[string]interface{}{}
@@ -45006,6 +44966,11 @@ func (ec *executionContext) field_Mutation_updateUser_args(ctx context.Context,
return nil, err
}
args["input"] = arg1
+ arg2, err := ec.field_Mutation_updateUser_argsAvatarFile(ctx, rawArgs)
+ if err != nil {
+ return nil, err
+ }
+ args["avatarFile"] = arg2
return args, nil
}
func (ec *executionContext) field_Mutation_updateUser_argsID(
@@ -45052,6 +45017,28 @@ func (ec *executionContext) field_Mutation_updateUser_argsInput(
return zeroVal, nil
}
+func (ec *executionContext) field_Mutation_updateUser_argsAvatarFile(
+ ctx context.Context,
+ rawArgs map[string]interface{},
+) (*graphql.Upload, error) {
+ // We won't call the directive if the argument is null.
+ // Set call_argument_directives_with_null to true to call directives
+ // even if the argument is null.
+ _, ok := rawArgs["avatarFile"]
+ if !ok {
+ var zeroVal *graphql.Upload
+ return zeroVal, nil
+ }
+
+ ctx = graphql.WithPathContext(ctx, graphql.NewPathWithField("avatarFile"))
+ if tmp, ok := rawArgs["avatarFile"]; ok {
+ return ec.unmarshalOUpload2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚐUpload(ctx, tmp)
+ }
+
+ var zeroVal *graphql.Upload
+ return zeroVal, nil
+}
+
func (ec *executionContext) field_Mutation_updateWebhook_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) {
var err error
args := map[string]interface{}{}
@@ -59023,8 +59010,6 @@ func (ec *executionContext) fieldContext_Contact_files(_ context.Context, field
return ec.fieldContext_File_detectedContentType(ctx, field)
case "storeKey":
return ec.fieldContext_File_storeKey(ctx, field)
- case "correlationID":
- return ec.fieldContext_File_correlationID(ctx, field)
case "categoryType":
return ec.fieldContext_File_categoryType(ctx, field)
case "uri":
@@ -61588,8 +61573,6 @@ func (ec *executionContext) fieldContext_DocumentData_files(_ context.Context, f
return ec.fieldContext_File_detectedContentType(ctx, field)
case "storeKey":
return ec.fieldContext_File_storeKey(ctx, field)
- case "correlationID":
- return ec.fieldContext_File_correlationID(ctx, field)
case "categoryType":
return ec.fieldContext_File_categoryType(ctx, field)
case "uri":
@@ -72000,8 +71983,6 @@ func (ec *executionContext) fieldContext_Entity_files(_ context.Context, field g
return ec.fieldContext_File_detectedContentType(ctx, field)
case "storeKey":
return ec.fieldContext_File_storeKey(ctx, field)
- case "correlationID":
- return ec.fieldContext_File_correlationID(ctx, field)
case "categoryType":
return ec.fieldContext_File_categoryType(ctx, field)
case "uri":
@@ -77568,8 +77549,6 @@ func (ec *executionContext) fieldContext_Event_file(_ context.Context, field gra
return ec.fieldContext_File_detectedContentType(ctx, field)
case "storeKey":
return ec.fieldContext_File_storeKey(ctx, field)
- case "correlationID":
- return ec.fieldContext_File_correlationID(ctx, field)
case "categoryType":
return ec.fieldContext_File_categoryType(ctx, field)
case "uri":
@@ -82375,47 +82354,6 @@ func (ec *executionContext) fieldContext_File_storeKey(_ context.Context, field
return fc, nil
}
-func (ec *executionContext) _File_correlationID(ctx context.Context, field graphql.CollectedField, obj *generated.File) (ret graphql.Marshaler) {
- fc, err := ec.fieldContext_File_correlationID(ctx, field)
- if err != nil {
- return graphql.Null
- }
- ctx = graphql.WithFieldContext(ctx, fc)
- defer func() {
- if r := recover(); r != nil {
- ec.Error(ctx, ec.Recover(ctx, r))
- ret = graphql.Null
- }
- }()
- resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
- ctx = rctx // use context from middleware stack in children
- return obj.CorrelationID, nil
- })
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
- if resTmp == nil {
- return graphql.Null
- }
- res := resTmp.(string)
- fc.Result = res
- return ec.marshalOString2string(ctx, field.Selections, res)
-}
-
-func (ec *executionContext) fieldContext_File_correlationID(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) {
- fc = &graphql.FieldContext{
- Object: "File",
- Field: field,
- IsMethod: false,
- IsResolver: false,
- Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) {
- return nil, errors.New("field of type String does not have child fields")
- },
- }
- return fc, nil
-}
-
func (ec *executionContext) _File_categoryType(ctx context.Context, field graphql.CollectedField, obj *generated.File) (ret graphql.Marshaler) {
fc, err := ec.fieldContext_File_categoryType(ctx, field)
if err != nil {
@@ -83571,8 +83509,6 @@ func (ec *executionContext) fieldContext_FileBulkCreatePayload_files(_ context.C
return ec.fieldContext_File_detectedContentType(ctx, field)
case "storeKey":
return ec.fieldContext_File_storeKey(ctx, field)
- case "correlationID":
- return ec.fieldContext_File_correlationID(ctx, field)
case "categoryType":
return ec.fieldContext_File_categoryType(ctx, field)
case "uri":
@@ -83826,8 +83762,6 @@ func (ec *executionContext) fieldContext_FileCreatePayload_file(_ context.Contex
return ec.fieldContext_File_detectedContentType(ctx, field)
case "storeKey":
return ec.fieldContext_File_storeKey(ctx, field)
- case "correlationID":
- return ec.fieldContext_File_correlationID(ctx, field)
case "categoryType":
return ec.fieldContext_File_categoryType(ctx, field)
case "uri":
@@ -83977,8 +83911,6 @@ func (ec *executionContext) fieldContext_FileEdge_node(_ context.Context, field
return ec.fieldContext_File_detectedContentType(ctx, field)
case "storeKey":
return ec.fieldContext_File_storeKey(ctx, field)
- case "correlationID":
- return ec.fieldContext_File_correlationID(ctx, field)
case "categoryType":
return ec.fieldContext_File_categoryType(ctx, field)
case "uri":
@@ -84857,47 +84789,6 @@ func (ec *executionContext) fieldContext_FileHistory_storeKey(_ context.Context,
return fc, nil
}
-func (ec *executionContext) _FileHistory_correlationID(ctx context.Context, field graphql.CollectedField, obj *generated.FileHistory) (ret graphql.Marshaler) {
- fc, err := ec.fieldContext_FileHistory_correlationID(ctx, field)
- if err != nil {
- return graphql.Null
- }
- ctx = graphql.WithFieldContext(ctx, fc)
- defer func() {
- if r := recover(); r != nil {
- ec.Error(ctx, ec.Recover(ctx, r))
- ret = graphql.Null
- }
- }()
- resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
- ctx = rctx // use context from middleware stack in children
- return obj.CorrelationID, nil
- })
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
- if resTmp == nil {
- return graphql.Null
- }
- res := resTmp.(string)
- fc.Result = res
- return ec.marshalOString2string(ctx, field.Selections, res)
-}
-
-func (ec *executionContext) fieldContext_FileHistory_correlationID(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) {
- fc = &graphql.FieldContext{
- Object: "FileHistory",
- Field: field,
- IsMethod: false,
- IsResolver: false,
- Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) {
- return nil, errors.New("field of type String does not have child fields")
- },
- }
- return fc, nil
-}
-
func (ec *executionContext) _FileHistory_categoryType(ctx context.Context, field graphql.CollectedField, obj *generated.FileHistory) (ret graphql.Marshaler) {
fc, err := ec.fieldContext_FileHistory_categoryType(ctx, field)
if err != nil {
@@ -85322,8 +85213,6 @@ func (ec *executionContext) fieldContext_FileHistoryEdge_node(_ context.Context,
return ec.fieldContext_FileHistory_detectedContentType(ctx, field)
case "storeKey":
return ec.fieldContext_FileHistory_storeKey(ctx, field)
- case "correlationID":
- return ec.fieldContext_FileHistory_correlationID(ctx, field)
case "categoryType":
return ec.fieldContext_FileHistory_categoryType(ctx, field)
case "uri":
@@ -85453,8 +85342,6 @@ func (ec *executionContext) fieldContext_FileSearchResult_files(_ context.Contex
return ec.fieldContext_File_detectedContentType(ctx, field)
case "storeKey":
return ec.fieldContext_File_storeKey(ctx, field)
- case "correlationID":
- return ec.fieldContext_File_correlationID(ctx, field)
case "categoryType":
return ec.fieldContext_File_categoryType(ctx, field)
case "uri":
@@ -85563,8 +85450,6 @@ func (ec *executionContext) fieldContext_FileUpdatePayload_file(_ context.Contex
return ec.fieldContext_File_detectedContentType(ctx, field)
case "storeKey":
return ec.fieldContext_File_storeKey(ctx, field)
- case "correlationID":
- return ec.fieldContext_File_correlationID(ctx, field)
case "categoryType":
return ec.fieldContext_File_categoryType(ctx, field)
case "uri":
@@ -86725,8 +86610,6 @@ func (ec *executionContext) fieldContext_Group_files(_ context.Context, field gr
return ec.fieldContext_File_detectedContentType(ctx, field)
case "storeKey":
return ec.fieldContext_File_storeKey(ctx, field)
- case "correlationID":
- return ec.fieldContext_File_correlationID(ctx, field)
case "categoryType":
return ec.fieldContext_File_categoryType(ctx, field)
case "uri":
@@ -106106,7 +105989,7 @@ func (ec *executionContext) _Mutation_createUser(ctx context.Context, field grap
}()
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
- return ec.resolvers.Mutation().CreateUser(rctx, fc.Args["input"].(generated.CreateUserInput))
+ return ec.resolvers.Mutation().CreateUser(rctx, fc.Args["input"].(generated.CreateUserInput), fc.Args["avatarURL"].(*graphql.Upload))
})
if err != nil {
ec.Error(ctx, err)
@@ -106165,7 +106048,7 @@ func (ec *executionContext) _Mutation_updateUser(ctx context.Context, field grap
}()
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
- return ec.resolvers.Mutation().UpdateUser(rctx, fc.Args["id"].(string), fc.Args["input"].(generated.UpdateUserInput))
+ return ec.resolvers.Mutation().UpdateUser(rctx, fc.Args["id"].(string), fc.Args["input"].(generated.UpdateUserInput), fc.Args["avatarFile"].(*graphql.Upload))
})
if err != nil {
ec.Error(ctx, err)
@@ -116867,8 +116750,6 @@ func (ec *executionContext) fieldContext_Organization_files(_ context.Context, f
return ec.fieldContext_File_detectedContentType(ctx, field)
case "storeKey":
return ec.fieldContext_File_storeKey(ctx, field)
- case "correlationID":
- return ec.fieldContext_File_correlationID(ctx, field)
case "categoryType":
return ec.fieldContext_File_categoryType(ctx, field)
case "uri":
@@ -120000,8 +119881,6 @@ func (ec *executionContext) fieldContext_OrganizationSetting_files(_ context.Con
return ec.fieldContext_File_detectedContentType(ctx, field)
case "storeKey":
return ec.fieldContext_File_storeKey(ctx, field)
- case "correlationID":
- return ec.fieldContext_File_correlationID(ctx, field)
case "categoryType":
return ec.fieldContext_File_categoryType(ctx, field)
case "uri":
@@ -129539,8 +129418,6 @@ func (ec *executionContext) fieldContext_Query_file(ctx context.Context, field g
return ec.fieldContext_File_detectedContentType(ctx, field)
case "storeKey":
return ec.fieldContext_File_storeKey(ctx, field)
- case "correlationID":
- return ec.fieldContext_File_correlationID(ctx, field)
case "categoryType":
return ec.fieldContext_File_categoryType(ctx, field)
case "uri":
@@ -136365,8 +136242,6 @@ func (ec *executionContext) fieldContext_Template_files(_ context.Context, field
return ec.fieldContext_File_detectedContentType(ctx, field)
case "storeKey":
return ec.fieldContext_File_storeKey(ctx, field)
- case "correlationID":
- return ec.fieldContext_File_correlationID(ctx, field)
case "categoryType":
return ec.fieldContext_File_categoryType(ctx, field)
case "uri":
@@ -139308,8 +139183,6 @@ func (ec *executionContext) fieldContext_User_files(_ context.Context, field gra
return ec.fieldContext_File_detectedContentType(ctx, field)
case "storeKey":
return ec.fieldContext_File_storeKey(ctx, field)
- case "correlationID":
- return ec.fieldContext_File_correlationID(ctx, field)
case "categoryType":
return ec.fieldContext_File_categoryType(ctx, field)
case "uri":
@@ -142370,8 +142243,6 @@ func (ec *executionContext) fieldContext_UserSetting_files(_ context.Context, fi
return ec.fieldContext_File_detectedContentType(ctx, field)
case "storeKey":
return ec.fieldContext_File_storeKey(ctx, field)
- case "correlationID":
- return ec.fieldContext_File_correlationID(ctx, field)
case "categoryType":
return ec.fieldContext_File_categoryType(ctx, field)
case "uri":
@@ -153545,7 +153416,7 @@ func (ec *executionContext) unmarshalInputCreateFileInput(ctx context.Context, o
asMap[k] = v
}
- fieldsInOrder := [...]string{"tags", "providedFileName", "providedFileExtension", "providedFileSize", "persistedFileSize", "detectedMimeType", "md5Hash", "detectedContentType", "storeKey", "correlationID", "categoryType", "uri", "storageScheme", "storageVolume", "storagePath", "userIDs", "organizationIDs", "groupIDs", "contactIDs", "entityIDs", "usersettingIDs", "organizationsettingIDs", "templateIDs", "documentdatumIDs", "eventIDs"}
+ fieldsInOrder := [...]string{"tags", "providedFileName", "providedFileExtension", "providedFileSize", "persistedFileSize", "detectedMimeType", "md5Hash", "detectedContentType", "storeKey", "categoryType", "uri", "storageScheme", "storageVolume", "storagePath", "userIDs", "organizationIDs", "groupIDs", "contactIDs", "entityIDs", "usersettingIDs", "organizationsettingIDs", "templateIDs", "documentdatumIDs", "eventIDs"}
for _, k := range fieldsInOrder {
v, ok := asMap[k]
if !ok {
@@ -153615,13 +153486,6 @@ func (ec *executionContext) unmarshalInputCreateFileInput(ctx context.Context, o
return it, err
}
it.StoreKey = data
- case "correlationID":
- ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("correlationID"))
- data, err := ec.unmarshalOString2ᚖstring(ctx, v)
- if err != nil {
- return it, err
- }
- it.CorrelationID = data
case "categoryType":
ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("categoryType"))
data, err := ec.unmarshalOString2ᚖstring(ctx, v)
@@ -173314,7 +173178,7 @@ func (ec *executionContext) unmarshalInputFileHistoryWhereInput(ctx context.Cont
asMap[k] = v
}
- fieldsInOrder := [...]string{"not", "and", "or", "id", "idNEQ", "idIn", "idNotIn", "idGT", "idGTE", "idLT", "idLTE", "idEqualFold", "idContainsFold", "historyTime", "historyTimeNEQ", "historyTimeIn", "historyTimeNotIn", "historyTimeGT", "historyTimeGTE", "historyTimeLT", "historyTimeLTE", "ref", "refNEQ", "refIn", "refNotIn", "refGT", "refGTE", "refLT", "refLTE", "refContains", "refHasPrefix", "refHasSuffix", "refIsNil", "refNotNil", "refEqualFold", "refContainsFold", "operation", "operationNEQ", "operationIn", "operationNotIn", "createdAt", "createdAtNEQ", "createdAtIn", "createdAtNotIn", "createdAtGT", "createdAtGTE", "createdAtLT", "createdAtLTE", "createdAtIsNil", "createdAtNotNil", "updatedAt", "updatedAtNEQ", "updatedAtIn", "updatedAtNotIn", "updatedAtGT", "updatedAtGTE", "updatedAtLT", "updatedAtLTE", "updatedAtIsNil", "updatedAtNotNil", "createdBy", "createdByNEQ", "createdByIn", "createdByNotIn", "createdByGT", "createdByGTE", "createdByLT", "createdByLTE", "createdByContains", "createdByHasPrefix", "createdByHasSuffix", "createdByIsNil", "createdByNotNil", "createdByEqualFold", "createdByContainsFold", "updatedBy", "updatedByNEQ", "updatedByIn", "updatedByNotIn", "updatedByGT", "updatedByGTE", "updatedByLT", "updatedByLTE", "updatedByContains", "updatedByHasPrefix", "updatedByHasSuffix", "updatedByIsNil", "updatedByNotNil", "updatedByEqualFold", "updatedByContainsFold", "deletedAt", "deletedAtNEQ", "deletedAtIn", "deletedAtNotIn", "deletedAtGT", "deletedAtGTE", "deletedAtLT", "deletedAtLTE", "deletedAtIsNil", "deletedAtNotNil", "deletedBy", "deletedByNEQ", "deletedByIn", "deletedByNotIn", "deletedByGT", "deletedByGTE", "deletedByLT", "deletedByLTE", "deletedByContains", "deletedByHasPrefix", "deletedByHasSuffix", "deletedByIsNil", "deletedByNotNil", "deletedByEqualFold", "deletedByContainsFold", "providedFileName", "providedFileNameNEQ", "providedFileNameIn", "providedFileNameNotIn", "providedFileNameGT", "providedFileNameGTE", "providedFileNameLT", "providedFileNameLTE", "providedFileNameContains", "providedFileNameHasPrefix", "providedFileNameHasSuffix", "providedFileNameEqualFold", "providedFileNameContainsFold", "providedFileExtension", "providedFileExtensionNEQ", "providedFileExtensionIn", "providedFileExtensionNotIn", "providedFileExtensionGT", "providedFileExtensionGTE", "providedFileExtensionLT", "providedFileExtensionLTE", "providedFileExtensionContains", "providedFileExtensionHasPrefix", "providedFileExtensionHasSuffix", "providedFileExtensionEqualFold", "providedFileExtensionContainsFold", "providedFileSize", "providedFileSizeNEQ", "providedFileSizeIn", "providedFileSizeNotIn", "providedFileSizeGT", "providedFileSizeGTE", "providedFileSizeLT", "providedFileSizeLTE", "providedFileSizeIsNil", "providedFileSizeNotNil", "persistedFileSize", "persistedFileSizeNEQ", "persistedFileSizeIn", "persistedFileSizeNotIn", "persistedFileSizeGT", "persistedFileSizeGTE", "persistedFileSizeLT", "persistedFileSizeLTE", "persistedFileSizeIsNil", "persistedFileSizeNotNil", "detectedMimeType", "detectedMimeTypeNEQ", "detectedMimeTypeIn", "detectedMimeTypeNotIn", "detectedMimeTypeGT", "detectedMimeTypeGTE", "detectedMimeTypeLT", "detectedMimeTypeLTE", "detectedMimeTypeContains", "detectedMimeTypeHasPrefix", "detectedMimeTypeHasSuffix", "detectedMimeTypeIsNil", "detectedMimeTypeNotNil", "detectedMimeTypeEqualFold", "detectedMimeTypeContainsFold", "md5Hash", "md5HashNEQ", "md5HashIn", "md5HashNotIn", "md5HashGT", "md5HashGTE", "md5HashLT", "md5HashLTE", "md5HashContains", "md5HashHasPrefix", "md5HashHasSuffix", "md5HashIsNil", "md5HashNotNil", "md5HashEqualFold", "md5HashContainsFold", "detectedContentType", "detectedContentTypeNEQ", "detectedContentTypeIn", "detectedContentTypeNotIn", "detectedContentTypeGT", "detectedContentTypeGTE", "detectedContentTypeLT", "detectedContentTypeLTE", "detectedContentTypeContains", "detectedContentTypeHasPrefix", "detectedContentTypeHasSuffix", "detectedContentTypeEqualFold", "detectedContentTypeContainsFold", "storeKey", "storeKeyNEQ", "storeKeyIn", "storeKeyNotIn", "storeKeyGT", "storeKeyGTE", "storeKeyLT", "storeKeyLTE", "storeKeyContains", "storeKeyHasPrefix", "storeKeyHasSuffix", "storeKeyIsNil", "storeKeyNotNil", "storeKeyEqualFold", "storeKeyContainsFold", "correlationID", "correlationIDNEQ", "correlationIDIn", "correlationIDNotIn", "correlationIDGT", "correlationIDGTE", "correlationIDLT", "correlationIDLTE", "correlationIDContains", "correlationIDHasPrefix", "correlationIDHasSuffix", "correlationIDIsNil", "correlationIDNotNil", "correlationIDEqualFold", "correlationIDContainsFold", "categoryType", "categoryTypeNEQ", "categoryTypeIn", "categoryTypeNotIn", "categoryTypeGT", "categoryTypeGTE", "categoryTypeLT", "categoryTypeLTE", "categoryTypeContains", "categoryTypeHasPrefix", "categoryTypeHasSuffix", "categoryTypeIsNil", "categoryTypeNotNil", "categoryTypeEqualFold", "categoryTypeContainsFold", "uri", "uriNEQ", "uriIn", "uriNotIn", "uriGT", "uriGTE", "uriLT", "uriLTE", "uriContains", "uriHasPrefix", "uriHasSuffix", "uriIsNil", "uriNotNil", "uriEqualFold", "uriContainsFold", "storageScheme", "storageSchemeNEQ", "storageSchemeIn", "storageSchemeNotIn", "storageSchemeGT", "storageSchemeGTE", "storageSchemeLT", "storageSchemeLTE", "storageSchemeContains", "storageSchemeHasPrefix", "storageSchemeHasSuffix", "storageSchemeIsNil", "storageSchemeNotNil", "storageSchemeEqualFold", "storageSchemeContainsFold", "storageVolume", "storageVolumeNEQ", "storageVolumeIn", "storageVolumeNotIn", "storageVolumeGT", "storageVolumeGTE", "storageVolumeLT", "storageVolumeLTE", "storageVolumeContains", "storageVolumeHasPrefix", "storageVolumeHasSuffix", "storageVolumeIsNil", "storageVolumeNotNil", "storageVolumeEqualFold", "storageVolumeContainsFold", "storagePath", "storagePathNEQ", "storagePathIn", "storagePathNotIn", "storagePathGT", "storagePathGTE", "storagePathLT", "storagePathLTE", "storagePathContains", "storagePathHasPrefix", "storagePathHasSuffix", "storagePathIsNil", "storagePathNotNil", "storagePathEqualFold", "storagePathContainsFold"}
+ fieldsInOrder := [...]string{"not", "and", "or", "id", "idNEQ", "idIn", "idNotIn", "idGT", "idGTE", "idLT", "idLTE", "idEqualFold", "idContainsFold", "historyTime", "historyTimeNEQ", "historyTimeIn", "historyTimeNotIn", "historyTimeGT", "historyTimeGTE", "historyTimeLT", "historyTimeLTE", "ref", "refNEQ", "refIn", "refNotIn", "refGT", "refGTE", "refLT", "refLTE", "refContains", "refHasPrefix", "refHasSuffix", "refIsNil", "refNotNil", "refEqualFold", "refContainsFold", "operation", "operationNEQ", "operationIn", "operationNotIn", "createdAt", "createdAtNEQ", "createdAtIn", "createdAtNotIn", "createdAtGT", "createdAtGTE", "createdAtLT", "createdAtLTE", "createdAtIsNil", "createdAtNotNil", "updatedAt", "updatedAtNEQ", "updatedAtIn", "updatedAtNotIn", "updatedAtGT", "updatedAtGTE", "updatedAtLT", "updatedAtLTE", "updatedAtIsNil", "updatedAtNotNil", "createdBy", "createdByNEQ", "createdByIn", "createdByNotIn", "createdByGT", "createdByGTE", "createdByLT", "createdByLTE", "createdByContains", "createdByHasPrefix", "createdByHasSuffix", "createdByIsNil", "createdByNotNil", "createdByEqualFold", "createdByContainsFold", "updatedBy", "updatedByNEQ", "updatedByIn", "updatedByNotIn", "updatedByGT", "updatedByGTE", "updatedByLT", "updatedByLTE", "updatedByContains", "updatedByHasPrefix", "updatedByHasSuffix", "updatedByIsNil", "updatedByNotNil", "updatedByEqualFold", "updatedByContainsFold", "deletedAt", "deletedAtNEQ", "deletedAtIn", "deletedAtNotIn", "deletedAtGT", "deletedAtGTE", "deletedAtLT", "deletedAtLTE", "deletedAtIsNil", "deletedAtNotNil", "deletedBy", "deletedByNEQ", "deletedByIn", "deletedByNotIn", "deletedByGT", "deletedByGTE", "deletedByLT", "deletedByLTE", "deletedByContains", "deletedByHasPrefix", "deletedByHasSuffix", "deletedByIsNil", "deletedByNotNil", "deletedByEqualFold", "deletedByContainsFold", "providedFileName", "providedFileNameNEQ", "providedFileNameIn", "providedFileNameNotIn", "providedFileNameGT", "providedFileNameGTE", "providedFileNameLT", "providedFileNameLTE", "providedFileNameContains", "providedFileNameHasPrefix", "providedFileNameHasSuffix", "providedFileNameEqualFold", "providedFileNameContainsFold", "providedFileExtension", "providedFileExtensionNEQ", "providedFileExtensionIn", "providedFileExtensionNotIn", "providedFileExtensionGT", "providedFileExtensionGTE", "providedFileExtensionLT", "providedFileExtensionLTE", "providedFileExtensionContains", "providedFileExtensionHasPrefix", "providedFileExtensionHasSuffix", "providedFileExtensionEqualFold", "providedFileExtensionContainsFold", "providedFileSize", "providedFileSizeNEQ", "providedFileSizeIn", "providedFileSizeNotIn", "providedFileSizeGT", "providedFileSizeGTE", "providedFileSizeLT", "providedFileSizeLTE", "providedFileSizeIsNil", "providedFileSizeNotNil", "persistedFileSize", "persistedFileSizeNEQ", "persistedFileSizeIn", "persistedFileSizeNotIn", "persistedFileSizeGT", "persistedFileSizeGTE", "persistedFileSizeLT", "persistedFileSizeLTE", "persistedFileSizeIsNil", "persistedFileSizeNotNil", "detectedMimeType", "detectedMimeTypeNEQ", "detectedMimeTypeIn", "detectedMimeTypeNotIn", "detectedMimeTypeGT", "detectedMimeTypeGTE", "detectedMimeTypeLT", "detectedMimeTypeLTE", "detectedMimeTypeContains", "detectedMimeTypeHasPrefix", "detectedMimeTypeHasSuffix", "detectedMimeTypeIsNil", "detectedMimeTypeNotNil", "detectedMimeTypeEqualFold", "detectedMimeTypeContainsFold", "md5Hash", "md5HashNEQ", "md5HashIn", "md5HashNotIn", "md5HashGT", "md5HashGTE", "md5HashLT", "md5HashLTE", "md5HashContains", "md5HashHasPrefix", "md5HashHasSuffix", "md5HashIsNil", "md5HashNotNil", "md5HashEqualFold", "md5HashContainsFold", "detectedContentType", "detectedContentTypeNEQ", "detectedContentTypeIn", "detectedContentTypeNotIn", "detectedContentTypeGT", "detectedContentTypeGTE", "detectedContentTypeLT", "detectedContentTypeLTE", "detectedContentTypeContains", "detectedContentTypeHasPrefix", "detectedContentTypeHasSuffix", "detectedContentTypeEqualFold", "detectedContentTypeContainsFold", "storeKey", "storeKeyNEQ", "storeKeyIn", "storeKeyNotIn", "storeKeyGT", "storeKeyGTE", "storeKeyLT", "storeKeyLTE", "storeKeyContains", "storeKeyHasPrefix", "storeKeyHasSuffix", "storeKeyIsNil", "storeKeyNotNil", "storeKeyEqualFold", "storeKeyContainsFold", "categoryType", "categoryTypeNEQ", "categoryTypeIn", "categoryTypeNotIn", "categoryTypeGT", "categoryTypeGTE", "categoryTypeLT", "categoryTypeLTE", "categoryTypeContains", "categoryTypeHasPrefix", "categoryTypeHasSuffix", "categoryTypeIsNil", "categoryTypeNotNil", "categoryTypeEqualFold", "categoryTypeContainsFold", "uri", "uriNEQ", "uriIn", "uriNotIn", "uriGT", "uriGTE", "uriLT", "uriLTE", "uriContains", "uriHasPrefix", "uriHasSuffix", "uriIsNil", "uriNotNil", "uriEqualFold", "uriContainsFold", "storageScheme", "storageSchemeNEQ", "storageSchemeIn", "storageSchemeNotIn", "storageSchemeGT", "storageSchemeGTE", "storageSchemeLT", "storageSchemeLTE", "storageSchemeContains", "storageSchemeHasPrefix", "storageSchemeHasSuffix", "storageSchemeIsNil", "storageSchemeNotNil", "storageSchemeEqualFold", "storageSchemeContainsFold", "storageVolume", "storageVolumeNEQ", "storageVolumeIn", "storageVolumeNotIn", "storageVolumeGT", "storageVolumeGTE", "storageVolumeLT", "storageVolumeLTE", "storageVolumeContains", "storageVolumeHasPrefix", "storageVolumeHasSuffix", "storageVolumeIsNil", "storageVolumeNotNil", "storageVolumeEqualFold", "storageVolumeContainsFold", "storagePath", "storagePathNEQ", "storagePathIn", "storagePathNotIn", "storagePathGT", "storagePathGTE", "storagePathLT", "storagePathLTE", "storagePathContains", "storagePathHasPrefix", "storagePathHasSuffix", "storagePathIsNil", "storagePathNotNil", "storagePathEqualFold", "storagePathContainsFold"}
for _, k := range fieldsInOrder {
v, ok := asMap[k]
if !ok {
@@ -174854,111 +174718,6 @@ func (ec *executionContext) unmarshalInputFileHistoryWhereInput(ctx context.Cont
return it, err
}
it.StoreKeyContainsFold = data
- case "correlationID":
- ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("correlationID"))
- data, err := ec.unmarshalOString2ᚖstring(ctx, v)
- if err != nil {
- return it, err
- }
- it.CorrelationID = data
- case "correlationIDNEQ":
- ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("correlationIDNEQ"))
- data, err := ec.unmarshalOString2ᚖstring(ctx, v)
- if err != nil {
- return it, err
- }
- it.CorrelationIDNEQ = data
- case "correlationIDIn":
- ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("correlationIDIn"))
- data, err := ec.unmarshalOString2ᚕstringᚄ(ctx, v)
- if err != nil {
- return it, err
- }
- it.CorrelationIDIn = data
- case "correlationIDNotIn":
- ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("correlationIDNotIn"))
- data, err := ec.unmarshalOString2ᚕstringᚄ(ctx, v)
- if err != nil {
- return it, err
- }
- it.CorrelationIDNotIn = data
- case "correlationIDGT":
- ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("correlationIDGT"))
- data, err := ec.unmarshalOString2ᚖstring(ctx, v)
- if err != nil {
- return it, err
- }
- it.CorrelationIDGT = data
- case "correlationIDGTE":
- ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("correlationIDGTE"))
- data, err := ec.unmarshalOString2ᚖstring(ctx, v)
- if err != nil {
- return it, err
- }
- it.CorrelationIDGTE = data
- case "correlationIDLT":
- ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("correlationIDLT"))
- data, err := ec.unmarshalOString2ᚖstring(ctx, v)
- if err != nil {
- return it, err
- }
- it.CorrelationIDLT = data
- case "correlationIDLTE":
- ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("correlationIDLTE"))
- data, err := ec.unmarshalOString2ᚖstring(ctx, v)
- if err != nil {
- return it, err
- }
- it.CorrelationIDLTE = data
- case "correlationIDContains":
- ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("correlationIDContains"))
- data, err := ec.unmarshalOString2ᚖstring(ctx, v)
- if err != nil {
- return it, err
- }
- it.CorrelationIDContains = data
- case "correlationIDHasPrefix":
- ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("correlationIDHasPrefix"))
- data, err := ec.unmarshalOString2ᚖstring(ctx, v)
- if err != nil {
- return it, err
- }
- it.CorrelationIDHasPrefix = data
- case "correlationIDHasSuffix":
- ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("correlationIDHasSuffix"))
- data, err := ec.unmarshalOString2ᚖstring(ctx, v)
- if err != nil {
- return it, err
- }
- it.CorrelationIDHasSuffix = data
- case "correlationIDIsNil":
- ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("correlationIDIsNil"))
- data, err := ec.unmarshalOBoolean2bool(ctx, v)
- if err != nil {
- return it, err
- }
- it.CorrelationIDIsNil = data
- case "correlationIDNotNil":
- ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("correlationIDNotNil"))
- data, err := ec.unmarshalOBoolean2bool(ctx, v)
- if err != nil {
- return it, err
- }
- it.CorrelationIDNotNil = data
- case "correlationIDEqualFold":
- ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("correlationIDEqualFold"))
- data, err := ec.unmarshalOString2ᚖstring(ctx, v)
- if err != nil {
- return it, err
- }
- it.CorrelationIDEqualFold = data
- case "correlationIDContainsFold":
- ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("correlationIDContainsFold"))
- data, err := ec.unmarshalOString2ᚖstring(ctx, v)
- if err != nil {
- return it, err
- }
- it.CorrelationIDContainsFold = data
case "categoryType":
ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("categoryType"))
data, err := ec.unmarshalOString2ᚖstring(ctx, v)
@@ -175497,7 +175256,7 @@ func (ec *executionContext) unmarshalInputFileWhereInput(ctx context.Context, ob
asMap[k] = v
}
- fieldsInOrder := [...]string{"not", "and", "or", "id", "idNEQ", "idIn", "idNotIn", "idGT", "idGTE", "idLT", "idLTE", "idEqualFold", "idContainsFold", "createdAt", "createdAtNEQ", "createdAtIn", "createdAtNotIn", "createdAtGT", "createdAtGTE", "createdAtLT", "createdAtLTE", "createdAtIsNil", "createdAtNotNil", "updatedAt", "updatedAtNEQ", "updatedAtIn", "updatedAtNotIn", "updatedAtGT", "updatedAtGTE", "updatedAtLT", "updatedAtLTE", "updatedAtIsNil", "updatedAtNotNil", "createdBy", "createdByNEQ", "createdByIn", "createdByNotIn", "createdByGT", "createdByGTE", "createdByLT", "createdByLTE", "createdByContains", "createdByHasPrefix", "createdByHasSuffix", "createdByIsNil", "createdByNotNil", "createdByEqualFold", "createdByContainsFold", "updatedBy", "updatedByNEQ", "updatedByIn", "updatedByNotIn", "updatedByGT", "updatedByGTE", "updatedByLT", "updatedByLTE", "updatedByContains", "updatedByHasPrefix", "updatedByHasSuffix", "updatedByIsNil", "updatedByNotNil", "updatedByEqualFold", "updatedByContainsFold", "deletedAt", "deletedAtNEQ", "deletedAtIn", "deletedAtNotIn", "deletedAtGT", "deletedAtGTE", "deletedAtLT", "deletedAtLTE", "deletedAtIsNil", "deletedAtNotNil", "deletedBy", "deletedByNEQ", "deletedByIn", "deletedByNotIn", "deletedByGT", "deletedByGTE", "deletedByLT", "deletedByLTE", "deletedByContains", "deletedByHasPrefix", "deletedByHasSuffix", "deletedByIsNil", "deletedByNotNil", "deletedByEqualFold", "deletedByContainsFold", "providedFileName", "providedFileNameNEQ", "providedFileNameIn", "providedFileNameNotIn", "providedFileNameGT", "providedFileNameGTE", "providedFileNameLT", "providedFileNameLTE", "providedFileNameContains", "providedFileNameHasPrefix", "providedFileNameHasSuffix", "providedFileNameEqualFold", "providedFileNameContainsFold", "providedFileExtension", "providedFileExtensionNEQ", "providedFileExtensionIn", "providedFileExtensionNotIn", "providedFileExtensionGT", "providedFileExtensionGTE", "providedFileExtensionLT", "providedFileExtensionLTE", "providedFileExtensionContains", "providedFileExtensionHasPrefix", "providedFileExtensionHasSuffix", "providedFileExtensionEqualFold", "providedFileExtensionContainsFold", "providedFileSize", "providedFileSizeNEQ", "providedFileSizeIn", "providedFileSizeNotIn", "providedFileSizeGT", "providedFileSizeGTE", "providedFileSizeLT", "providedFileSizeLTE", "providedFileSizeIsNil", "providedFileSizeNotNil", "persistedFileSize", "persistedFileSizeNEQ", "persistedFileSizeIn", "persistedFileSizeNotIn", "persistedFileSizeGT", "persistedFileSizeGTE", "persistedFileSizeLT", "persistedFileSizeLTE", "persistedFileSizeIsNil", "persistedFileSizeNotNil", "detectedMimeType", "detectedMimeTypeNEQ", "detectedMimeTypeIn", "detectedMimeTypeNotIn", "detectedMimeTypeGT", "detectedMimeTypeGTE", "detectedMimeTypeLT", "detectedMimeTypeLTE", "detectedMimeTypeContains", "detectedMimeTypeHasPrefix", "detectedMimeTypeHasSuffix", "detectedMimeTypeIsNil", "detectedMimeTypeNotNil", "detectedMimeTypeEqualFold", "detectedMimeTypeContainsFold", "md5Hash", "md5HashNEQ", "md5HashIn", "md5HashNotIn", "md5HashGT", "md5HashGTE", "md5HashLT", "md5HashLTE", "md5HashContains", "md5HashHasPrefix", "md5HashHasSuffix", "md5HashIsNil", "md5HashNotNil", "md5HashEqualFold", "md5HashContainsFold", "detectedContentType", "detectedContentTypeNEQ", "detectedContentTypeIn", "detectedContentTypeNotIn", "detectedContentTypeGT", "detectedContentTypeGTE", "detectedContentTypeLT", "detectedContentTypeLTE", "detectedContentTypeContains", "detectedContentTypeHasPrefix", "detectedContentTypeHasSuffix", "detectedContentTypeEqualFold", "detectedContentTypeContainsFold", "storeKey", "storeKeyNEQ", "storeKeyIn", "storeKeyNotIn", "storeKeyGT", "storeKeyGTE", "storeKeyLT", "storeKeyLTE", "storeKeyContains", "storeKeyHasPrefix", "storeKeyHasSuffix", "storeKeyIsNil", "storeKeyNotNil", "storeKeyEqualFold", "storeKeyContainsFold", "correlationID", "correlationIDNEQ", "correlationIDIn", "correlationIDNotIn", "correlationIDGT", "correlationIDGTE", "correlationIDLT", "correlationIDLTE", "correlationIDContains", "correlationIDHasPrefix", "correlationIDHasSuffix", "correlationIDIsNil", "correlationIDNotNil", "correlationIDEqualFold", "correlationIDContainsFold", "categoryType", "categoryTypeNEQ", "categoryTypeIn", "categoryTypeNotIn", "categoryTypeGT", "categoryTypeGTE", "categoryTypeLT", "categoryTypeLTE", "categoryTypeContains", "categoryTypeHasPrefix", "categoryTypeHasSuffix", "categoryTypeIsNil", "categoryTypeNotNil", "categoryTypeEqualFold", "categoryTypeContainsFold", "uri", "uriNEQ", "uriIn", "uriNotIn", "uriGT", "uriGTE", "uriLT", "uriLTE", "uriContains", "uriHasPrefix", "uriHasSuffix", "uriIsNil", "uriNotNil", "uriEqualFold", "uriContainsFold", "storageScheme", "storageSchemeNEQ", "storageSchemeIn", "storageSchemeNotIn", "storageSchemeGT", "storageSchemeGTE", "storageSchemeLT", "storageSchemeLTE", "storageSchemeContains", "storageSchemeHasPrefix", "storageSchemeHasSuffix", "storageSchemeIsNil", "storageSchemeNotNil", "storageSchemeEqualFold", "storageSchemeContainsFold", "storageVolume", "storageVolumeNEQ", "storageVolumeIn", "storageVolumeNotIn", "storageVolumeGT", "storageVolumeGTE", "storageVolumeLT", "storageVolumeLTE", "storageVolumeContains", "storageVolumeHasPrefix", "storageVolumeHasSuffix", "storageVolumeIsNil", "storageVolumeNotNil", "storageVolumeEqualFold", "storageVolumeContainsFold", "storagePath", "storagePathNEQ", "storagePathIn", "storagePathNotIn", "storagePathGT", "storagePathGTE", "storagePathLT", "storagePathLTE", "storagePathContains", "storagePathHasPrefix", "storagePathHasSuffix", "storagePathIsNil", "storagePathNotNil", "storagePathEqualFold", "storagePathContainsFold", "hasUser", "hasUserWith", "hasOrganization", "hasOrganizationWith", "hasGroup", "hasGroupWith", "hasContact", "hasContactWith", "hasEntity", "hasEntityWith", "hasUsersetting", "hasUsersettingWith", "hasOrganizationsetting", "hasOrganizationsettingWith", "hasTemplate", "hasTemplateWith", "hasDocumentdata", "hasDocumentdataWith", "hasEvents", "hasEventsWith"}
+ fieldsInOrder := [...]string{"not", "and", "or", "id", "idNEQ", "idIn", "idNotIn", "idGT", "idGTE", "idLT", "idLTE", "idEqualFold", "idContainsFold", "createdAt", "createdAtNEQ", "createdAtIn", "createdAtNotIn", "createdAtGT", "createdAtGTE", "createdAtLT", "createdAtLTE", "createdAtIsNil", "createdAtNotNil", "updatedAt", "updatedAtNEQ", "updatedAtIn", "updatedAtNotIn", "updatedAtGT", "updatedAtGTE", "updatedAtLT", "updatedAtLTE", "updatedAtIsNil", "updatedAtNotNil", "createdBy", "createdByNEQ", "createdByIn", "createdByNotIn", "createdByGT", "createdByGTE", "createdByLT", "createdByLTE", "createdByContains", "createdByHasPrefix", "createdByHasSuffix", "createdByIsNil", "createdByNotNil", "createdByEqualFold", "createdByContainsFold", "updatedBy", "updatedByNEQ", "updatedByIn", "updatedByNotIn", "updatedByGT", "updatedByGTE", "updatedByLT", "updatedByLTE", "updatedByContains", "updatedByHasPrefix", "updatedByHasSuffix", "updatedByIsNil", "updatedByNotNil", "updatedByEqualFold", "updatedByContainsFold", "deletedAt", "deletedAtNEQ", "deletedAtIn", "deletedAtNotIn", "deletedAtGT", "deletedAtGTE", "deletedAtLT", "deletedAtLTE", "deletedAtIsNil", "deletedAtNotNil", "deletedBy", "deletedByNEQ", "deletedByIn", "deletedByNotIn", "deletedByGT", "deletedByGTE", "deletedByLT", "deletedByLTE", "deletedByContains", "deletedByHasPrefix", "deletedByHasSuffix", "deletedByIsNil", "deletedByNotNil", "deletedByEqualFold", "deletedByContainsFold", "providedFileName", "providedFileNameNEQ", "providedFileNameIn", "providedFileNameNotIn", "providedFileNameGT", "providedFileNameGTE", "providedFileNameLT", "providedFileNameLTE", "providedFileNameContains", "providedFileNameHasPrefix", "providedFileNameHasSuffix", "providedFileNameEqualFold", "providedFileNameContainsFold", "providedFileExtension", "providedFileExtensionNEQ", "providedFileExtensionIn", "providedFileExtensionNotIn", "providedFileExtensionGT", "providedFileExtensionGTE", "providedFileExtensionLT", "providedFileExtensionLTE", "providedFileExtensionContains", "providedFileExtensionHasPrefix", "providedFileExtensionHasSuffix", "providedFileExtensionEqualFold", "providedFileExtensionContainsFold", "providedFileSize", "providedFileSizeNEQ", "providedFileSizeIn", "providedFileSizeNotIn", "providedFileSizeGT", "providedFileSizeGTE", "providedFileSizeLT", "providedFileSizeLTE", "providedFileSizeIsNil", "providedFileSizeNotNil", "persistedFileSize", "persistedFileSizeNEQ", "persistedFileSizeIn", "persistedFileSizeNotIn", "persistedFileSizeGT", "persistedFileSizeGTE", "persistedFileSizeLT", "persistedFileSizeLTE", "persistedFileSizeIsNil", "persistedFileSizeNotNil", "detectedMimeType", "detectedMimeTypeNEQ", "detectedMimeTypeIn", "detectedMimeTypeNotIn", "detectedMimeTypeGT", "detectedMimeTypeGTE", "detectedMimeTypeLT", "detectedMimeTypeLTE", "detectedMimeTypeContains", "detectedMimeTypeHasPrefix", "detectedMimeTypeHasSuffix", "detectedMimeTypeIsNil", "detectedMimeTypeNotNil", "detectedMimeTypeEqualFold", "detectedMimeTypeContainsFold", "md5Hash", "md5HashNEQ", "md5HashIn", "md5HashNotIn", "md5HashGT", "md5HashGTE", "md5HashLT", "md5HashLTE", "md5HashContains", "md5HashHasPrefix", "md5HashHasSuffix", "md5HashIsNil", "md5HashNotNil", "md5HashEqualFold", "md5HashContainsFold", "detectedContentType", "detectedContentTypeNEQ", "detectedContentTypeIn", "detectedContentTypeNotIn", "detectedContentTypeGT", "detectedContentTypeGTE", "detectedContentTypeLT", "detectedContentTypeLTE", "detectedContentTypeContains", "detectedContentTypeHasPrefix", "detectedContentTypeHasSuffix", "detectedContentTypeEqualFold", "detectedContentTypeContainsFold", "storeKey", "storeKeyNEQ", "storeKeyIn", "storeKeyNotIn", "storeKeyGT", "storeKeyGTE", "storeKeyLT", "storeKeyLTE", "storeKeyContains", "storeKeyHasPrefix", "storeKeyHasSuffix", "storeKeyIsNil", "storeKeyNotNil", "storeKeyEqualFold", "storeKeyContainsFold", "categoryType", "categoryTypeNEQ", "categoryTypeIn", "categoryTypeNotIn", "categoryTypeGT", "categoryTypeGTE", "categoryTypeLT", "categoryTypeLTE", "categoryTypeContains", "categoryTypeHasPrefix", "categoryTypeHasSuffix", "categoryTypeIsNil", "categoryTypeNotNil", "categoryTypeEqualFold", "categoryTypeContainsFold", "uri", "uriNEQ", "uriIn", "uriNotIn", "uriGT", "uriGTE", "uriLT", "uriLTE", "uriContains", "uriHasPrefix", "uriHasSuffix", "uriIsNil", "uriNotNil", "uriEqualFold", "uriContainsFold", "storageScheme", "storageSchemeNEQ", "storageSchemeIn", "storageSchemeNotIn", "storageSchemeGT", "storageSchemeGTE", "storageSchemeLT", "storageSchemeLTE", "storageSchemeContains", "storageSchemeHasPrefix", "storageSchemeHasSuffix", "storageSchemeIsNil", "storageSchemeNotNil", "storageSchemeEqualFold", "storageSchemeContainsFold", "storageVolume", "storageVolumeNEQ", "storageVolumeIn", "storageVolumeNotIn", "storageVolumeGT", "storageVolumeGTE", "storageVolumeLT", "storageVolumeLTE", "storageVolumeContains", "storageVolumeHasPrefix", "storageVolumeHasSuffix", "storageVolumeIsNil", "storageVolumeNotNil", "storageVolumeEqualFold", "storageVolumeContainsFold", "storagePath", "storagePathNEQ", "storagePathIn", "storagePathNotIn", "storagePathGT", "storagePathGTE", "storagePathLT", "storagePathLTE", "storagePathContains", "storagePathHasPrefix", "storagePathHasSuffix", "storagePathIsNil", "storagePathNotNil", "storagePathEqualFold", "storagePathContainsFold", "hasUser", "hasUserWith", "hasOrganization", "hasOrganizationWith", "hasGroup", "hasGroupWith", "hasContact", "hasContactWith", "hasEntity", "hasEntityWith", "hasUsersetting", "hasUsersettingWith", "hasOrganizationsetting", "hasOrganizationsettingWith", "hasTemplate", "hasTemplateWith", "hasDocumentdata", "hasDocumentdataWith", "hasEvents", "hasEventsWith"}
for _, k := range fieldsInOrder {
v, ok := asMap[k]
if !ok {
@@ -176848,111 +176607,6 @@ func (ec *executionContext) unmarshalInputFileWhereInput(ctx context.Context, ob
return it, err
}
it.StoreKeyContainsFold = data
- case "correlationID":
- ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("correlationID"))
- data, err := ec.unmarshalOString2ᚖstring(ctx, v)
- if err != nil {
- return it, err
- }
- it.CorrelationID = data
- case "correlationIDNEQ":
- ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("correlationIDNEQ"))
- data, err := ec.unmarshalOString2ᚖstring(ctx, v)
- if err != nil {
- return it, err
- }
- it.CorrelationIDNEQ = data
- case "correlationIDIn":
- ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("correlationIDIn"))
- data, err := ec.unmarshalOString2ᚕstringᚄ(ctx, v)
- if err != nil {
- return it, err
- }
- it.CorrelationIDIn = data
- case "correlationIDNotIn":
- ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("correlationIDNotIn"))
- data, err := ec.unmarshalOString2ᚕstringᚄ(ctx, v)
- if err != nil {
- return it, err
- }
- it.CorrelationIDNotIn = data
- case "correlationIDGT":
- ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("correlationIDGT"))
- data, err := ec.unmarshalOString2ᚖstring(ctx, v)
- if err != nil {
- return it, err
- }
- it.CorrelationIDGT = data
- case "correlationIDGTE":
- ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("correlationIDGTE"))
- data, err := ec.unmarshalOString2ᚖstring(ctx, v)
- if err != nil {
- return it, err
- }
- it.CorrelationIDGTE = data
- case "correlationIDLT":
- ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("correlationIDLT"))
- data, err := ec.unmarshalOString2ᚖstring(ctx, v)
- if err != nil {
- return it, err
- }
- it.CorrelationIDLT = data
- case "correlationIDLTE":
- ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("correlationIDLTE"))
- data, err := ec.unmarshalOString2ᚖstring(ctx, v)
- if err != nil {
- return it, err
- }
- it.CorrelationIDLTE = data
- case "correlationIDContains":
- ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("correlationIDContains"))
- data, err := ec.unmarshalOString2ᚖstring(ctx, v)
- if err != nil {
- return it, err
- }
- it.CorrelationIDContains = data
- case "correlationIDHasPrefix":
- ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("correlationIDHasPrefix"))
- data, err := ec.unmarshalOString2ᚖstring(ctx, v)
- if err != nil {
- return it, err
- }
- it.CorrelationIDHasPrefix = data
- case "correlationIDHasSuffix":
- ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("correlationIDHasSuffix"))
- data, err := ec.unmarshalOString2ᚖstring(ctx, v)
- if err != nil {
- return it, err
- }
- it.CorrelationIDHasSuffix = data
- case "correlationIDIsNil":
- ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("correlationIDIsNil"))
- data, err := ec.unmarshalOBoolean2bool(ctx, v)
- if err != nil {
- return it, err
- }
- it.CorrelationIDIsNil = data
- case "correlationIDNotNil":
- ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("correlationIDNotNil"))
- data, err := ec.unmarshalOBoolean2bool(ctx, v)
- if err != nil {
- return it, err
- }
- it.CorrelationIDNotNil = data
- case "correlationIDEqualFold":
- ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("correlationIDEqualFold"))
- data, err := ec.unmarshalOString2ᚖstring(ctx, v)
- if err != nil {
- return it, err
- }
- it.CorrelationIDEqualFold = data
- case "correlationIDContainsFold":
- ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("correlationIDContainsFold"))
- data, err := ec.unmarshalOString2ᚖstring(ctx, v)
- if err != nil {
- return it, err
- }
- it.CorrelationIDContainsFold = data
case "categoryType":
ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("categoryType"))
data, err := ec.unmarshalOString2ᚖstring(ctx, v)
@@ -208678,7 +208332,7 @@ func (ec *executionContext) unmarshalInputUpdateFileInput(ctx context.Context, o
asMap[k] = v
}
- fieldsInOrder := [...]string{"tags", "appendTags", "clearTags", "providedFileName", "providedFileExtension", "providedFileSize", "clearProvidedFileSize", "persistedFileSize", "clearPersistedFileSize", "detectedMimeType", "clearDetectedMimeType", "md5Hash", "clearMd5Hash", "detectedContentType", "storeKey", "clearStoreKey", "correlationID", "clearCorrelationID", "categoryType", "clearCategoryType", "uri", "clearURI", "storageScheme", "clearStorageScheme", "storageVolume", "clearStorageVolume", "storagePath", "clearStoragePath", "addUserIDs", "removeUserIDs", "clearUser", "addOrganizationIDs", "removeOrganizationIDs", "clearOrganization", "addGroupIDs", "removeGroupIDs", "clearGroup", "addContactIDs", "removeContactIDs", "clearContact", "addEntityIDs", "removeEntityIDs", "clearEntity", "addUsersettingIDs", "removeUsersettingIDs", "clearUsersetting", "addOrganizationsettingIDs", "removeOrganizationsettingIDs", "clearOrganizationsetting", "addTemplateIDs", "removeTemplateIDs", "clearTemplate", "addDocumentdatumIDs", "removeDocumentdatumIDs", "clearDocumentdata", "addEventIDs", "removeEventIDs", "clearEvents"}
+ fieldsInOrder := [...]string{"tags", "appendTags", "clearTags", "providedFileName", "providedFileExtension", "providedFileSize", "clearProvidedFileSize", "persistedFileSize", "clearPersistedFileSize", "detectedMimeType", "clearDetectedMimeType", "md5Hash", "clearMd5Hash", "detectedContentType", "storeKey", "clearStoreKey", "categoryType", "clearCategoryType", "uri", "clearURI", "storageScheme", "clearStorageScheme", "storageVolume", "clearStorageVolume", "storagePath", "clearStoragePath", "addUserIDs", "removeUserIDs", "clearUser", "addOrganizationIDs", "removeOrganizationIDs", "clearOrganization", "addGroupIDs", "removeGroupIDs", "clearGroup", "addContactIDs", "removeContactIDs", "clearContact", "addEntityIDs", "removeEntityIDs", "clearEntity", "addUsersettingIDs", "removeUsersettingIDs", "clearUsersetting", "addOrganizationsettingIDs", "removeOrganizationsettingIDs", "clearOrganizationsetting", "addTemplateIDs", "removeTemplateIDs", "clearTemplate", "addDocumentdatumIDs", "removeDocumentdatumIDs", "clearDocumentdata", "addEventIDs", "removeEventIDs", "clearEvents"}
for _, k := range fieldsInOrder {
v, ok := asMap[k]
if !ok {
@@ -208797,20 +208451,6 @@ func (ec *executionContext) unmarshalInputUpdateFileInput(ctx context.Context, o
return it, err
}
it.ClearStoreKey = data
- case "correlationID":
- ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("correlationID"))
- data, err := ec.unmarshalOString2ᚖstring(ctx, v)
- if err != nil {
- return it, err
- }
- it.CorrelationID = data
- case "clearCorrelationID":
- ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("clearCorrelationID"))
- data, err := ec.unmarshalOBoolean2bool(ctx, v)
- if err != nil {
- return it, err
- }
- it.ClearCorrelationID = data
case "categoryType":
ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("categoryType"))
data, err := ec.unmarshalOString2ᚖstring(ctx, v)
@@ -227978,8 +227618,6 @@ func (ec *executionContext) _File(ctx context.Context, sel ast.SelectionSet, obj
}
case "storeKey":
out.Values[i] = ec._File_storeKey(ctx, field, obj)
- case "correlationID":
- out.Values[i] = ec._File_correlationID(ctx, field, obj)
case "categoryType":
out.Values[i] = ec._File_categoryType(ctx, field, obj)
case "uri":
@@ -228611,8 +228249,6 @@ func (ec *executionContext) _FileHistory(ctx context.Context, sel ast.SelectionS
}
case "storeKey":
out.Values[i] = ec._FileHistory_storeKey(ctx, field, obj)
- case "correlationID":
- out.Values[i] = ec._FileHistory_correlationID(ctx, field, obj)
case "categoryType":
out.Values[i] = ec._FileHistory_categoryType(ctx, field, obj)
case "uri":
@@ -258946,6 +258582,22 @@ func (ec *executionContext) unmarshalOUpdateOrganizationSettingInput2ᚖgithub
return &res, graphql.ErrorOnPath(ctx, err)
}
+func (ec *executionContext) unmarshalOUpload2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚐUpload(ctx context.Context, v interface{}) (*graphql.Upload, error) {
+ if v == nil {
+ return nil, nil
+ }
+ res, err := graphql.UnmarshalUpload(v)
+ return &res, graphql.ErrorOnPath(ctx, err)
+}
+
+func (ec *executionContext) marshalOUpload2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚐUpload(ctx context.Context, sel ast.SelectionSet, v *graphql.Upload) graphql.Marshaler {
+ if v == nil {
+ return graphql.Null
+ }
+ res := graphql.MarshalUpload(*v)
+ return res
+}
+
func (ec *executionContext) marshalOUser2ᚕᚖgithubᚗcomᚋtheopenlaneᚋcoreᚋinternalᚋentᚋgeneratedᚐUserᚄ(ctx context.Context, sel ast.SelectionSet, v []*generated.User) graphql.Marshaler {
if v == nil {
return graphql.Null
diff --git a/internal/graphapi/helpers.go b/internal/graphapi/helpers.go
index e0c9687..53cc2b2 100644
--- a/internal/graphapi/helpers.go
+++ b/internal/graphapi/helpers.go
@@ -9,6 +9,7 @@ import (
"github.com/gocarina/gocsv"
ent "github.com/theopenlane/core/internal/ent/generated"
+ "github.com/theopenlane/core/internal/middleware/objects"
"github.com/theopenlane/core/pkg/events/soiree"
"github.com/theopenlane/echox/middleware/echocontext"
"github.com/theopenlane/iam/auth"
@@ -37,6 +38,53 @@ func injectClient(db *ent.Client) graphql.OperationMiddleware {
}
}
+// injectFileUploader adds the file uploader as middleware to the graphql operation
+// this is used to handle file uploads to a storage backend, add the file to the file schema
+// and add the uploaded files to the echo context
+func injectFileUploader(u *objects.Upload) graphql.OperationMiddleware {
+ return func(ctx context.Context, next graphql.OperationHandler) graphql.ResponseHandler {
+ op := graphql.GetOperationContext(ctx)
+
+ // get the uploads from the variables
+ // gqlgen will parse the variables and convert the graphql.Upload to a struct with the file data
+ uploads := []objects.FileUpload{}
+ for _, v := range op.Variables {
+ up, ok := v.(graphql.Upload)
+ if ok {
+ fileUpload := objects.FileUpload{
+ File: up.File,
+ Filename: up.Filename,
+ Size: up.Size,
+ ContentType: up.ContentType,
+ }
+
+ uploads = append(uploads, fileUpload)
+ }
+ }
+
+ // return the next handler if there are no uploads
+ if len(uploads) == 0 {
+ return next(ctx)
+ }
+
+ // handle the file uploads
+ ctx, err := u.FileUpload(ctx, uploads)
+ if err != nil {
+ return errorResponse(err)
+ }
+
+ // add the uploaded files to the echo context if there are any
+ // this is useful for using other middleware that depends on the echo context
+ // and the uploaded files (e.g. body dump middleware)
+ ec, err := echocontext.EchoContextFromContext(ctx)
+ if err == nil {
+ ec.SetRequest(ec.Request().WithContext(ctx))
+ }
+
+ return next(ctx)
+ }
+}
+
// withPool returns the existing pool or creates a new one if it does not exist
func (r *queryResolver) withPool() *soiree.PondPool {
if r.pool != nil {
diff --git a/internal/graphapi/personalaccesstoken_test.go b/internal/graphapi/personalaccesstoken_test.go
index 1438fc5..7ce9c20 100644
--- a/internal/graphapi/personalaccesstoken_test.go
+++ b/internal/graphapi/personalaccesstoken_test.go
@@ -11,6 +11,7 @@ import (
"github.com/stretchr/testify/require"
mock_fga "github.com/theopenlane/iam/fgax/mockery"
+ "github.com/theopenlane/core/internal/middleware/objects"
"github.com/theopenlane/core/pkg/openlaneclient"
"github.com/theopenlane/core/pkg/testutils"
@@ -441,7 +442,7 @@ func (suite *GraphTestSuite) TestLastUsedPersonalAccessToken() {
BearerToken: token.Token,
}
- graphClient, err := testutils.TestClientWithAuth(t, suite.client.db, openlaneclient.WithCredentials(authHeader))
+ graphClient, err := testutils.TestClientWithAuth(t, suite.client.db, &objects.Upload{}, openlaneclient.WithCredentials(authHeader))
require.NoError(t, err)
// get the token to make sure the last used is updated using the token
diff --git a/internal/graphapi/resolver.go b/internal/graphapi/resolver.go
index 5893275..45c65ac 100644
--- a/internal/graphapi/resolver.go
+++ b/internal/graphapi/resolver.go
@@ -22,6 +22,7 @@ import (
"github.com/wundergraph/graphql-go-tools/pkg/playground"
ent "github.com/theopenlane/core/internal/ent/generated"
+ "github.com/theopenlane/core/internal/middleware/objects"
"github.com/theopenlane/core/pkg/events/soiree"
)
@@ -48,12 +49,14 @@ type Resolver struct {
db *ent.Client
pool *soiree.PondPool
extensionsEnabled bool
+ uploader *objects.Upload
}
// NewResolver returns a resolver configured with the given ent client
-func NewResolver(db *ent.Client) *Resolver {
+func NewResolver(db *ent.Client, u *objects.Upload) *Resolver {
return &Resolver{
- db: db,
+ db: db,
+ uploader: u,
}
}
@@ -92,7 +95,10 @@ func (r *Resolver) Handler(withPlayground bool) *Handler {
srv.AddTransport(transport.Options{})
srv.AddTransport(transport.GET{})
srv.AddTransport(transport.POST{})
- srv.AddTransport(transport.MultipartForm{})
+ srv.AddTransport(transport.MultipartForm{
+ MaxUploadSize: r.uploader.ObjectStorage.MaxSize,
+ MaxMemory: r.uploader.ObjectStorage.MaxMemory,
+ })
srv.SetQueryCache(lru.New[*ast.QueryDocument](1000)) //nolint:mnd
@@ -112,6 +118,11 @@ func (r *Resolver) Handler(withPlayground bool) *Handler {
AddAllExtensions(srv)
}
+ // add file uploader if it is configured
+ if r.uploader != nil {
+ WithFileUploader(srv, r.uploader)
+ }
+
srv.Use(otelgqlgen.Middleware())
h := &Handler{
@@ -134,9 +145,14 @@ func (r *Resolver) Handler(withPlayground bool) *Handler {
func WithTransactions(h *handler.Server, d *ent.Client) {
// setup transactional db client
h.AroundOperations(injectClient(d))
+
h.Use(entgql.Transactioner{TxOpener: d})
}
+func WithFileUploader(h *handler.Server, u *objects.Upload) {
+ h.AroundOperations(injectFileUploader(u))
+}
+
// WithContextLevelCache adds a context level cache to the handler
func WithContextLevelCache(h *handler.Server) {
h.AroundResponses(func(ctx context.Context, next graphql.ResponseHandler) *graphql.Response {
diff --git a/internal/graphapi/responses.go b/internal/graphapi/responses.go
new file mode 100644
index 0000000..6f37ab8
--- /dev/null
+++ b/internal/graphapi/responses.go
@@ -0,0 +1,21 @@
+package graphapi
+
+import (
+ "context"
+
+ "github.com/99designs/gqlgen/graphql"
+ "github.com/vektah/gqlparser/v2/gqlerror"
+)
+
+// errorResponse returns a graphql response with a single error
+func errorResponse(err error) func(ctx context.Context) *graphql.Response {
+ return func(ctx context.Context) *graphql.Response {
+ return &graphql.Response{
+ Errors: gqlerror.List{
+ {
+ Message: err.Error(),
+ },
+ },
+ }
+ }
+}
diff --git a/internal/graphapi/search.go b/internal/graphapi/search.go
index 0a9dc1e..4b8bdae 100644
--- a/internal/graphapi/search.go
+++ b/internal/graphapi/search.go
@@ -415,7 +415,6 @@ func adminSearchFiles(ctx context.Context, query string) ([]*generated.File, err
file.Md5HashContainsFold(query), // search by Md5Hash
file.DetectedContentTypeContainsFold(query), // search by DetectedContentType
file.StoreKeyContainsFold(query), // search by StoreKey
- file.CorrelationIDContainsFold(query), // search by CorrelationID
file.CategoryTypeContainsFold(query), // search by CategoryType
file.URIContainsFold(query), // search by URI
file.StorageSchemeContainsFold(query), // search by StorageScheme
diff --git a/internal/graphapi/tools_test.go b/internal/graphapi/tools_test.go
index 8fa0e50..823d1bb 100644
--- a/internal/graphapi/tools_test.go
+++ b/internal/graphapi/tools_test.go
@@ -18,6 +18,7 @@ import (
"github.com/theopenlane/core/internal/ent/entconfig"
ent "github.com/theopenlane/core/internal/ent/generated"
"github.com/theopenlane/core/internal/entdb"
+ "github.com/theopenlane/core/internal/middleware/objects"
"github.com/theopenlane/core/pkg/openlaneclient"
coreutils "github.com/theopenlane/core/pkg/testutils"
"github.com/theopenlane/echox/middleware/echocontext"
@@ -126,7 +127,7 @@ func (suite *GraphTestSuite) SetupTest() {
// assign values
c.db = db
- c.api, err = coreutils.TestClient(t, c.db)
+ c.api, err = coreutils.TestClient(t, c.db, &objects.Upload{})
require.NoError(t, err)
// create test user
@@ -153,7 +154,7 @@ func (suite *GraphTestSuite) SetupTest() {
BearerToken: pat.Token,
}
- c.apiWithPAT, err = coreutils.TestClientWithAuth(t, c.db, openlaneclient.WithCredentials(authHeaderPAT))
+ c.apiWithPAT, err = coreutils.TestClientWithAuth(t, c.db, &objects.Upload{}, openlaneclient.WithCredentials(authHeaderPAT))
require.NoError(t, err)
// setup client with an API token
@@ -162,7 +163,7 @@ func (suite *GraphTestSuite) SetupTest() {
authHeaderAPIToken := openlaneclient.Authorization{
BearerToken: apiToken.Token,
}
- c.apiWithToken, err = coreutils.TestClientWithAuth(t, c.db, openlaneclient.WithCredentials(authHeaderAPIToken))
+ c.apiWithToken, err = coreutils.TestClientWithAuth(t, c.db, &objects.Upload{}, openlaneclient.WithCredentials(authHeaderAPIToken))
require.NoError(t, err)
suite.client = c
diff --git a/internal/graphapi/user.resolvers.go b/internal/graphapi/user.resolvers.go
index 1808613..b5f955d 100644
--- a/internal/graphapi/user.resolvers.go
+++ b/internal/graphapi/user.resolvers.go
@@ -8,20 +8,21 @@ import (
"context"
"errors"
+ "github.com/99designs/gqlgen/graphql"
"github.com/theopenlane/core/internal/ent/generated"
_ "github.com/theopenlane/core/internal/ent/generated/runtime"
"github.com/theopenlane/iam/auth"
)
// CreateUser is the resolver for the createUser field.
-func (r *mutationResolver) CreateUser(ctx context.Context, input generated.CreateUserInput) (*UserCreatePayload, error) {
+func (r *mutationResolver) CreateUser(ctx context.Context, input generated.CreateUserInput, avatarURL *graphql.Upload) (*UserCreatePayload, error) {
// TODO: look at allowing this resolver to invite the user instead of creating them directly
// for now, return permission denied
return nil, ErrPermissionDenied
}
// UpdateUser is the resolver for the updateUser field.
-func (r *mutationResolver) UpdateUser(ctx context.Context, id string, input generated.UpdateUserInput) (*UserUpdatePayload, error) {
+func (r *mutationResolver) UpdateUser(ctx context.Context, id string, input generated.UpdateUserInput, avatarFile *graphql.Upload) (*UserUpdatePayload, error) {
user, err := withTransactionalMutation(ctx).User.Get(ctx, id)
if err != nil {
return nil, parseRequestError(err, action{action: ActionUpdate, object: "user"})
diff --git a/internal/httpserve/handlers/tools_test.go b/internal/httpserve/handlers/tools_test.go
index c36178e..08ddd8e 100644
--- a/internal/httpserve/handlers/tools_test.go
+++ b/internal/httpserve/handlers/tools_test.go
@@ -24,6 +24,7 @@ import (
"github.com/theopenlane/core/internal/entdb"
"github.com/theopenlane/core/internal/httpserve/authmanager"
"github.com/theopenlane/core/internal/httpserve/handlers"
+ "github.com/theopenlane/core/internal/middleware/objects"
"github.com/theopenlane/core/pkg/middleware/transaction"
"github.com/theopenlane/core/pkg/openlaneclient"
coreutils "github.com/theopenlane/core/pkg/testutils"
@@ -107,7 +108,7 @@ func (suite *HandlerTestSuite) SetupTest() {
suite.db = db
// add the client
- suite.api, err = coreutils.TestClient(t, suite.db)
+ suite.api, err = coreutils.TestClient(t, suite.db, &objects.Upload{})
require.NoError(t, err)
// setup handler
diff --git a/internal/httpserve/handlers/upload.go b/internal/httpserve/handlers/upload.go
index de229e6..742e252 100644
--- a/internal/httpserve/handlers/upload.go
+++ b/internal/httpserve/handlers/upload.go
@@ -1,303 +1,49 @@
package handlers
import (
- "bytes"
- "context"
- "encoding/json"
- "fmt"
- "io"
- "mime/multipart"
- "net/http"
- "path/filepath"
- "sync"
-
echo "github.com/theopenlane/echox"
- "golang.org/x/sync/errgroup"
"github.com/rs/zerolog/log"
- ent "github.com/theopenlane/core/internal/ent/generated"
- "github.com/theopenlane/core/pkg/middleware/transaction"
"github.com/theopenlane/core/pkg/models"
"github.com/theopenlane/core/pkg/objects"
)
-func bindRequest(c echo.Context, req any) error {
- if err := c.Bind(req); err != nil {
- return err
- }
- return c.Validate(req)
-}
-
-type uploadFilesRequest struct {
- Files []*multipart.FileHeader `form:"files"`
-}
-
-func readFile(file *multipart.FileHeader) ([]byte, error) {
- src, err := file.Open()
- if err != nil {
- return nil, err
- }
- defer src.Close()
-
- buf := new(bytes.Buffer)
- if _, err := io.Copy(buf, src); err != nil {
- return nil, err
- }
- return buf.Bytes(), nil
-}
-
-func readFiles(files []*multipart.FileHeader) (filesBytes [][]byte, err error) {
- var wg sync.WaitGroup
- ch := make(chan []byte, len(files))
-
- for _, file := range files {
- wg.Add(1)
- go func(file *multipart.FileHeader) {
- defer wg.Done()
- f, err := readFile(file)
- if err != nil {
- return
- }
- ch <- f
- }(file)
- }
- wg.Wait()
- close(ch)
-
- var fb [][]byte
- for file := range ch {
- fb = append(fb, file)
- }
- return fb, err
-}
-
-func toJson[T any](obj T) string {
- b, _ := json.Marshal(obj)
- return string(b)
-}
-
-func (h *Handler) UploadFiles(c echo.Context) error {
- var req uploadFilesRequest
-
- if err := bindRequest(c, &req); err != nil {
- log.Error().Err(err).Msg("failed to bind request")
- return h.InvalidInput(c, err)
- }
-
- files, err := readFiles(req.Files)
- if err != nil {
- log.Error().Err(err).Msg("failed to read files")
- return h.InternalServerError(c, err)
- }
-
- if err := h.ObjectStorage.Storage.ManagerUpload(c.Request().Context(), files); err != nil {
- log.Error().Err(err).Msg("failed to upload files")
- return h.InternalServerError(c, err)
- }
-
- out := models.UploadFilesReply{
- Message: "Files uploaded successfully",
- }
-
- return h.Success(c, out)
-}
-
// FileUploadHandler is responsible for uploading files
func (h *Handler) FileUploadHandler(ctx echo.Context, keys ...string) error {
- var in models.UploadFilesRequest
- if err := ctx.Bind(&in); err != nil {
- return h.InvalidInput(ctx, err)
- }
-
- reqCtx := ctx.Request().Context()
-
r := ctx.Request()
- w := ctx.Response()
-
- r.Body = http.MaxBytesReader(w, r.Body, h.ObjectStorage.MaxSize)
-
- err := r.ParseMultipartForm(h.ObjectStorage.MaxSize)
- if err != nil {
- h.ObjectStorage.ErrorResponseHandler(err).ServeHTTP(w, r)
- return err
- }
-
- var wg errgroup.Group
+ // create the output struct
out := models.UploadFilesReply{}
- uploadedFiles := make(objects.Files, len(keys))
-
- for _, key := range keys {
- key := key
-
- wg.Go(func() error {
- fileHeaders, ok := r.MultipartForm.File[key]
- if !ok {
- if h.ObjectStorage.IgnoreNonExistentKeys {
- return nil
- }
-
- log.Error().Str("key", key).Msg("files not found")
-
- return err
- }
-
- uploadedFiles[key] = make([]objects.File, 0, len(fileHeaders))
-
- for _, header := range fileHeaders {
- f, err := header.Open()
- if err != nil {
- log.Error().Err(err).Str("key", key).Msg("failed to open file")
- return err
- }
-
- defer f.Close()
-
- mimeType, err := objects.DetectContentType(f)
- if err != nil {
- log.Error().Err(err).Str("key", key).Msg("failed to fetch content type")
- return err
- }
-
- set := ent.CreateFileInput{
- ProvidedFileName: header.Filename,
- ProvidedFileExtension: mimeType,
- DetectedContentType: formatFileSize(header.Size),
- }
-
- entfile, err := transaction.FromContext(reqCtx).File.Create().SetInput(set).Save(reqCtx)
- if err != nil {
- log.Error().Err(err).Msg("failed to create file")
- return err
- }
-
- uploadedFileName := h.ObjectStorage.NameFuncGenerator(entfile.ID + "_" + header.Filename)
- fileData := objects.File{
- FieldName: key,
- OriginalName: header.Filename,
- UploadedFileName: uploadedFileName,
- MimeType: mimeType,
- }
-
- if err := h.ObjectStorage.ValidationFunc(fileData); err != nil {
- log.Error().Err(err).Str("key", key).Msg("failed to validate file")
- return err
- }
-
- metadata, err := h.Storage.Upload(r.Context(), f, &objects.UploadFileOptions{
- FileName: uploadedFileName,
- ContentType: mimeType,
- Metadata: map[string]string{
- "file_id": entfile.ID,
- },
- })
- if err != nil {
- log.Error().Err(err).Str("key", key).Msg("failed to upload file")
- return err
- }
-
- presignedurl := h.Storage.GetPresignedURL(context.TODO(), uploadedFileName)
-
- newfile := objects.File{
- ID: entfile.ID,
- Name: header.Filename,
- MimeType: mimeType,
- ProvidedExtension: filepath.Ext(header.Filename),
- Size: header.Size,
- PresignedURL: presignedurl,
- }
-
- out.FileIdentifiers = append(out.FileIdentifiers, newfile.ID)
- out.PresignedURL = presignedurl
- out.Message = "File uploaded successfully, god damn matt you're a beautiful bastard"
- out.FileCount++
-
- fileData.PresignedURL = presignedurl
- fileData.Size = metadata.Size
- fileData.FolderDestination = metadata.FolderDestination
- fileData.StorageKey = metadata.Key
+ // files are upload via the middleware and stored in the context
+ files, err := objects.FilesFromContext(r.Context())
+ if err != nil {
+ log.Error().Err(err).Msg("failed to get files from context")
- log.Info().Str("file", fileData.UploadedFileName).Msg("file uploaded")
- log.Info().Str("file", fileData.UploadedFileName).Str("id", fileData.FolderDestination).Msg("ent file ID")
- log.Info().Str("file", fileData.UploadedFileName).Str("mime_type", fileData.MimeType).Msg("detected mime type")
- log.Info().Str("file", fileData.UploadedFileName).Str("size", formatFileSize(fileData.Size)).Msg("calculated file size")
- log.Info().Str("file", fileData.UploadedFileName).Str("presigned_url", fileData.PresignedURL).Msg("presigned URL")
+ return h.BadRequest(ctx, err)
+ }
- uploadedFiles[key] = append(uploadedFiles[key], fileData)
+ // check if any files were uploaded
+ // loop through keys
+ for _, file := range files {
+ // per key, loop through files
+ for _, f := range file {
+ outFile := models.File{
+ ID: f.ID,
+ Name: f.UploadedFileName,
+ PresignedURL: f.PresignedURL,
}
- return nil
- })
+ out.Files = append(out.Files, outFile)
+ out.FileCount++
+ }
}
- if err := wg.Wait(); err != nil {
- h.ObjectStorage.ErrorResponseHandler(err).ServeHTTP(w, r)
- return err
- }
+ out.Message = "file(s) uploaded successfully"
- r = r.WithContext(objects.WriteFilesToContext(r.Context(), uploadedFiles))
+ out.Success = true
+ // return the response
return h.SuccessBlob(ctx, out)
}
-
-const MaxUploadSize = 32 * 1024 * 1024 // 32MB
-
-func (h *Handler) CreateFile(ctx context.Context, input ent.CreateFileInput) (*ent.File, error) {
- file, err := transaction.FromContext(ctx).File.Create().SetInput(input).Save(ctx)
- if err != nil {
- return nil, err
- }
-
- return file, err
-}
-
-// Progress is used to track the progress of a file upload
-// It implements the io.Writer interface so it can be passed to an io.TeeReader()
-type Progress struct {
- TotalSize int64
- BytesRead int64
-}
-
-// Write is used to satisfy the io.Writer interface Instead of writing somewhere, it simply aggregates the total bytes on each read
-func (pr *Progress) Write(p []byte) (n int, err error) {
- n, err = len(p), nil
-
- pr.BytesRead += int64(n)
-
- pr.Print()
-
- return
-}
-
-// Print displays the current progress of the file upload
-func (pr *Progress) Print() {
- if pr.BytesRead == pr.TotalSize {
- fmt.Println("DONE!")
-
- return
- }
-
- fmt.Printf("File upload in progress: %d\n", pr.BytesRead)
-}
-
-// formatFileSize converts a file size in bytes to a human-readable string in MB/GB notation.
-func formatFileSize(size int64) string {
- const (
- KB = 1024
- MB = KB * 1024
- GB = MB * 1024
- )
-
- switch {
- case size >= GB:
- return fmt.Sprintf("%.2f GB", float64(size)/GB)
- case size >= MB:
- return fmt.Sprintf("%.2f MB", float64(size)/MB)
- case size >= KB:
- return fmt.Sprintf("%.2f KB", float64(size)/KB)
- default:
- return fmt.Sprintf("%d bytes", size)
- }
-}
diff --git a/internal/httpserve/route/upload.go b/internal/httpserve/route/upload.go
index 62a7b7e..12b0835 100644
--- a/internal/httpserve/route/upload.go
+++ b/internal/httpserve/route/upload.go
@@ -6,24 +6,26 @@ import (
echo "github.com/theopenlane/echox"
)
+var (
+ // uploadKeys are the keys that can be used to upload files in a multipart form
+ uploadKeys = []string{"uploadFile"}
+)
+
// registerFileUploadRoute registers the file upload route
func registerFileUploadRoute(router *Router) (err error) {
path := "/upload"
method := http.MethodPost
name := "FileUpload"
- // mw = append(mw, router.Handler.ObjectStorage.UploadHandlerCopy(("uploadFile")))
-
route := echo.Route{
Name: name,
Method: method,
Path: path,
- Middlewares: mw,
+ Middlewares: authMW,
Handler: func(c echo.Context) error {
- return router.Handler.FileUploadHandler(c, "uploadFile")
+ return router.Handler.FileUploadHandler(c, uploadKeys...)
},
}
- // switchOperation := router.Handler.BindFileUploadHandler()
if err := router.AddEchoOnlyRoute(path, method, route); err != nil {
return err
diff --git a/internal/httpserve/serveropts/option.go b/internal/httpserve/serveropts/option.go
index d299099..d35927f 100644
--- a/internal/httpserve/serveropts/option.go
+++ b/internal/httpserve/serveropts/option.go
@@ -36,6 +36,7 @@ import (
"github.com/theopenlane/core/internal/graphapi"
"github.com/theopenlane/core/internal/httpserve/config"
"github.com/theopenlane/core/internal/httpserve/server"
+ objmw "github.com/theopenlane/core/internal/middleware/objects"
authmw "github.com/theopenlane/core/pkg/middleware/auth"
"github.com/theopenlane/core/pkg/middleware/cachecontrol"
"github.com/theopenlane/core/pkg/middleware/cors"
@@ -144,12 +145,12 @@ func WithTokenManager() ServerOption {
// Setup token manager
tm, err := tokens.New(s.Config.Settings.Auth.Token)
if err != nil {
- panic(err)
+ log.Panic().Err(err).Msg("Error creating token manager")
}
keys, err := tm.Keys()
if err != nil {
- panic(err)
+ log.Panic().Err(err).Msg("Error getting keys from token manager")
}
// pass to the REST handlers
@@ -212,7 +213,11 @@ func WithReadyChecks(c *entx.EntClientConfig, f *fgax.Client, r *redis.Client, j
func WithGraphRoute(srv *server.Server, c *ent.Client) ServerOption {
return newApplyFunc(func(s *ServerOptions) {
// Setup Graph API Handlers
- r := graphapi.NewResolver(c).
+ r := graphapi.NewResolver(c,
+ &objmw.Upload{
+ ObjectStorage: s.Config.Handler.ObjectStorage,
+ Storage: s.Config.Handler.Storage,
+ }).
WithExtensions(s.Config.Settings.Server.EnableGraphExtensions)
// add pool to the resolver to manage the number of goroutines
@@ -400,35 +405,49 @@ func WithObjectStorage() ServerOption {
if s.Config.Settings.ObjectStorage.Enabled {
s3Config, _ := awsConfig.LoadDefaultConfig(
context.Background(),
- awsConfig.WithRegion("us-east-2"),
- // awsConfig.WithHTTPClient(httpClient),
+ awsConfig.WithRegion(s.Config.Settings.ObjectStorage.Region),
awsConfig.WithCredentialsProvider(
awsCreds.NewStaticCredentialsProvider(
- "",
- "",
+ s.Config.Settings.ObjectStorage.AccessKey,
+ s.Config.Settings.ObjectStorage.SecretKey,
"")),
)
s3store, err := storage.NewS3FromConfig(s3Config, storage.S3Options{
- Bucket: "openlane",
+ Bucket: s.Config.Settings.ObjectStorage.Bucket,
})
-
if err != nil {
- panic(err.Error())
+ log.Panic().Err(err).Msg("Error creating S3 store")
}
handler, err := objects.New(
objects.WithMaxFileSize(10<<20), // nolint:mnd
+ objects.WithMaxMemory(32<<20), // nolint:mnd
objects.WithStorage(s3store),
objects.WithNameFuncGenerator(objects.OrganizationNameFunc),
)
if err != nil {
- panic(err.Error())
+ log.Panic().Err(err).Msg("Error creating object storage")
}
s.Config.Handler.ObjectStorage = handler
s.Config.Handler.Storage = s3store
+
+ u := objmw.Upload{
+ ObjectStorage: handler,
+ Storage: s3store,
+ }
+
+ cf := objmw.Config{
+ Keys: []string{"uploadFile"},
+ Upload: &u,
+ }
+
+ // add upload middleware to authMW, non-authenticated endpoints will not have this middleware
+ uploadMw := echo.WrapMiddleware(objmw.FileUploadMiddleware(cf))
+
+ s.Config.Handler.AuthMiddleware = append(s.Config.Handler.AuthMiddleware, uploadMw)
}
})
}
diff --git a/internal/middleware/objects/middleware.go b/internal/middleware/objects/middleware.go
new file mode 100644
index 0000000..12e917d
--- /dev/null
+++ b/internal/middleware/objects/middleware.go
@@ -0,0 +1,376 @@
+package objects
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "io"
+ "mime/multipart"
+ "net/http"
+ "path/filepath"
+ "strings"
+
+ "github.com/rs/zerolog/log"
+ "golang.org/x/sync/errgroup"
+
+ ent "github.com/theopenlane/core/internal/ent/generated"
+ "github.com/theopenlane/core/pkg/middleware/transaction"
+ "github.com/theopenlane/core/pkg/objects"
+ "github.com/theopenlane/core/pkg/objects/storage"
+)
+
+// Upload is the object that handles the file upload process
+type Upload struct {
+ // ObjectStorage is the object storage configuration
+ ObjectStorage *objects.Objects
+ // Storage is the storage type to use, in this case, S3
+ Storage *storage.S3Store
+}
+
+// FileUpload is the object that holds the file information
+type FileUpload struct {
+ // File is the file to be uploaded
+ File io.ReadSeeker
+ // Filename is the name of the file provided in the multipart form
+ Filename string
+ // Size is the size of the file in bytes
+ Size int64
+ // ContentType is the content type of the file from the header
+ ContentType string
+}
+
+// Config defines the config for Mime middleware
+type Config struct {
+ // Keys is a list of keys to look for in the multipart form
+ Keys []string `yaml:"keys"`
+ // Skipper defines a function to skip middleware.
+ Skipper func(r *http.Request) bool `json:"-" koanf:"-"`
+ // Upload is the upload object that handles the file upload process
+ Upload *Upload
+}
+
+// FileUpload uploads the files to the storage and returns the the context with the uploaded files
+func (u *Upload) FileUpload(ctx context.Context, files []FileUpload) (context.Context, error) {
+ // set up a wait group to wait for all the uploads to finish
+ var wg errgroup.Group
+
+ uploadedFiles := []objects.File{}
+
+ wg.Go(func() (err error) {
+ uploadedFiles, err = u.upload(ctx, files)
+ if err != nil {
+ log.Error().Err(err).Msg("failed to upload files")
+
+ return err
+ }
+
+ return nil
+ })
+
+ // wait for all the uploads to finish
+ if err := wg.Wait(); err != nil {
+ return ctx, err
+ }
+
+ // check if any files were uploaded, if not return early
+ if len(uploadedFiles) == 0 {
+ return ctx, nil
+ }
+
+ // write the uploaded files to the context
+ ctx = objects.WriteFilesToContext(ctx, objects.Files{"upload": uploadedFiles})
+
+ // return the response
+ return ctx, nil
+}
+
+// FileUploadMiddleware is a middleware that handles the file upload process
+// this can be added to the middleware chain to handle file uploads prior to the main handler
+// Since gqlgen handles file uploads differently, this middleware is not used in the graphql handler
+func FileUploadMiddleware(config Config) func(http.Handler) http.Handler {
+ return func(next http.Handler) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ if config.Skipper != nil && config.Skipper(r) {
+ next.ServeHTTP(w, r)
+
+ return
+ }
+
+ ctx, err := config.Upload.multiformParseForm(w, r, config.Keys...)
+ if err != nil {
+ http.Error(w, err.Error(), http.StatusBadRequest)
+
+ return
+ }
+
+ r = r.WithContext(ctx)
+
+ next.ServeHTTP(w, r)
+ })
+ }
+}
+
+// multiformParseForm parses the multipart form and uploads the files to the storage and returns the context with the uploaded files
+func (u *Upload) multiformParseForm(w http.ResponseWriter, r *http.Request, keys ...string) (context.Context, error) {
+ ctx := r.Context()
+
+ r.Body = http.MaxBytesReader(w, r.Body, u.ObjectStorage.MaxSize)
+
+ // skip if the content type is not multipart
+ if !strings.Contains(r.Header.Get("Content-Type"), "multipart/form-data") {
+ return ctx, nil
+ }
+
+ if err := r.ParseMultipartForm(u.ObjectStorage.MaxSize); err != nil {
+ u.ObjectStorage.ErrorResponseHandler(err).ServeHTTP(w, r)
+
+ return nil, err
+ }
+
+ var wg errgroup.Group
+
+ for _, key := range keys {
+ wg.Go(func() error {
+ fileHeaders, err := u.getFileHeaders(r, key)
+ if err != nil {
+ // log the error and skip the key
+ // do not return an error if the key is not found
+ // this is to allow for optional keys
+ log.Info().Err(err).Str("key", key).Msg("key not found, skipping")
+
+ return nil
+ }
+
+ files, err := parse(fileHeaders)
+ if err != nil {
+ log.Error().Err(err).Str("key", key).Msg("failed to parse files from headers")
+ }
+
+ ctx, err = u.FileUpload(ctx, files)
+ if err != nil {
+ log.Error().Err(err).Str("key", key).Msg("failed to upload files")
+
+ return err
+ }
+
+ return nil
+ })
+ }
+
+ if err := wg.Wait(); err != nil {
+ return nil, err
+ }
+
+ return ctx, nil
+}
+
+// parse handles the parses the multipart form and returns the files to be uploaded
+func parse(fileHeaders []*multipart.FileHeader) ([]FileUpload, error) {
+ files := []FileUpload{}
+
+ for _, header := range fileHeaders {
+ f, err := header.Open()
+ if err != nil {
+ log.Error().Err(err).Str("file", header.Filename).Msg("failed to open file")
+ return nil, err
+ }
+
+ defer f.Close()
+
+ fileUpload := FileUpload{
+ File: f,
+ Filename: header.Filename,
+ Size: header.Size,
+ ContentType: header.Header.Get("Content-Type"),
+ }
+
+ files = append(files, fileUpload)
+ }
+
+ return files, nil
+}
+
+// upload handles the file upload process per key in the multipart form and returns the uploaded files
+func (u *Upload) upload(ctx context.Context, files []FileUpload) ([]objects.File, error) {
+ uploadedFiles := make([]objects.File, 0, len(files))
+
+ for _, f := range files {
+ // create the file in the database
+ entFile, err := u.createFile(ctx, f)
+ if err != nil {
+ log.Error().Err(err).Str("file", f.Filename).Msg("failed to create file")
+
+ return nil, err
+ }
+
+ // generate the uploaded file name
+ uploadedFileName := u.ObjectStorage.NameFuncGenerator(entFile.ID + "_" + f.Filename)
+ fileData := objects.File{
+ ID: entFile.ID,
+ // FieldName: key,
+ OriginalName: f.Filename,
+ UploadedFileName: uploadedFileName,
+ MimeType: entFile.DetectedMimeType,
+ }
+
+ // validate the file
+ if err := u.ObjectStorage.ValidationFunc(fileData); err != nil {
+ log.Error().Err(err).Str("file", f.Filename).Msg("failed to validate file")
+
+ return nil, err
+ }
+
+ // Upload the file to the storage and get the metadata
+ metadata, err := u.Storage.Upload(ctx, f.File, &objects.UploadFileOptions{
+ FileName: uploadedFileName,
+ ContentType: entFile.DetectedContentType,
+ Metadata: map[string]string{
+ "file_id": entFile.ID,
+ },
+ })
+ if err != nil {
+ log.Error().Err(err).Str("file", f.Filename).Msg("failed to upload file")
+
+ return nil, err
+ }
+
+ // add metadata to file information
+ fileData.Size = metadata.Size
+ fileData.FolderDestination = metadata.FolderDestination
+ fileData.StorageKey = metadata.Key
+
+ // generate a presigned URL that is valid for 15 minutes
+ fileData.PresignedURL, err = u.Storage.GetPresignedURL(ctx, uploadedFileName)
+ if err != nil {
+ log.Error().Err(err).Str("file", f.Filename).Msg("failed to get presigned URL")
+
+ return nil, err
+ }
+
+ // update the file with the size
+ if _, err := txClientFromContext(ctx).
+ UpdateOne(entFile).
+ SetPersistedFileSize(metadata.Size).
+ SetURI(createURI(entFile.StorageScheme, metadata.FolderDestination, metadata.Key)).
+ SetStorageVolume(metadata.FolderDestination).
+ SetStoragePath(metadata.Key).
+ Save(ctx); err != nil {
+ log.Error().Err(err).Msg("failed to update file with size")
+ return nil, err
+ }
+
+ log.Info().Str("file", fileData.UploadedFileName).
+ Str("id", fileData.FolderDestination).
+ Str("mime_type", fileData.MimeType).
+ Str("size", formatFileSize(fileData.Size)).
+ Str("presigned_url", fileData.PresignedURL).
+ Msg("file uploaded")
+
+ uploadedFiles = append(uploadedFiles, fileData)
+ }
+
+ return uploadedFiles, nil
+}
+
+// getFileHeaders returns the file headers for a given key in the multipart form
+func (u *Upload) getFileHeaders(r *http.Request, key string) ([]*multipart.FileHeader, error) {
+ fileHeaders, ok := r.MultipartForm.File[key]
+ if !ok {
+ if u.ObjectStorage.IgnoreNonExistentKeys {
+ return nil, nil
+ }
+
+ return nil, errors.New("file key not found") // nolint:goerr113
+ }
+
+ return fileHeaders, nil
+}
+
+// formatFileSize converts a file size in bytes to a human-readable string in MB/GB notation.
+func formatFileSize(size int64) string {
+ const (
+ KB = 1024
+ MB = KB * 1024
+ GB = MB * 1024
+ )
+
+ switch {
+ case size >= GB:
+ return fmt.Sprintf("%.2f GB", float64(size)/GB)
+ case size >= MB:
+ return fmt.Sprintf("%.2f MB", float64(size)/MB)
+ case size >= KB:
+ return fmt.Sprintf("%.2f KB", float64(size)/KB)
+ default:
+ return fmt.Sprintf("%d bytes", size)
+ }
+}
+
+// createURI creates a URI for the file
+func createURI(scheme, destination, key string) string {
+ return fmt.Sprintf("%s%s/%s", scheme, destination, key)
+}
+
+// createFile creates a file in the database and returns the file object
+func (u *Upload) createFile(ctx context.Context, f FileUpload) (*ent.File, error) {
+ contentType, err := objects.DetectContentType(f.File)
+ if err != nil {
+ log.Error().Err(err).Str("file", f.Filename).Msg("failed to fetch content type")
+
+ return nil, err
+ }
+
+ md5Hash, err := objects.ComputeChecksum(f.File)
+ if err != nil {
+ log.Error().Err(err).Str("file", f.Filename).Msg("failed to compute checksum")
+
+ return nil, err
+ }
+
+ set := ent.CreateFileInput{
+ ProvidedFileName: f.Filename,
+ ProvidedFileExtension: filepath.Ext(f.Filename),
+ ProvidedFileSize: &f.Size,
+ DetectedMimeType: &f.ContentType,
+ DetectedContentType: contentType,
+ Md5Hash: &md5Hash,
+ // StoreKey: &f.,
+ // StorageScheme: u.Storage.Scheme,
+ }
+
+ // get file contents
+ contents, err := objects.StreamToByte(f.File)
+ if err != nil {
+ log.Error().Err(err).Str("file", f.Filename).Msg("failed to read file contents")
+
+ return nil, err
+ }
+
+ entFile, err := txClientFromContext(ctx).Create().
+ SetFileContents(contents).
+ SetInput(set).
+ Save(ctx)
+ if err != nil {
+ log.Error().Err(err).Msg("failed to create file")
+
+ return nil, err
+ }
+
+ return entFile, nil
+}
+
+// txClientFromContext returns the file client from the context if it exists
+// used for transactional mutations, if the client does not exist, it will return nil
+func txClientFromContext(ctx context.Context) *ent.FileClient {
+ client := ent.FromContext(ctx)
+ if client != nil {
+ return client.File
+ }
+
+ tx := transaction.FromContext(ctx)
+ if tx != nil {
+ return tx.File
+ }
+
+ return nil
+}
diff --git a/internal/middleware/objects/middleware_test.go b/internal/middleware/objects/middleware_test.go
new file mode 100644
index 0000000..48695a7
--- /dev/null
+++ b/internal/middleware/objects/middleware_test.go
@@ -0,0 +1,27 @@
+package objects
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestFormatFileSize(t *testing.T) {
+ tests := []struct {
+ name string
+ size int64
+ expected string
+ }{
+ {"Bytes", 512, "512 bytes"},
+ {"Kilobytes", 2048, "2.00 KB"},
+ {"Megabytes", 10485760, "10.00 MB"},
+ {"Gigabytes", 10737418240, "10.00 GB"},
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ result := formatFileSize(tt.size)
+ assert.Equal(t, tt.expected, result)
+ })
+ }
+}
diff --git a/jsonschema/api-docs.md b/jsonschema/api-docs.md
index 648fb20..94ccf55 100644
--- a/jsonschema/api-docs.md
+++ b/jsonschema/api-docs.md
@@ -631,32 +631,11 @@ Config defines the configuration settings for the default rate limiter
|----|----|-----------|--------|
|**enabled**|`boolean`|||
|**provider**|`string`|||
-|**ConfigurationPath**|`string`|||
-|**SourcePath**|`string`|||
-|**ExecutionTime**|`string`|Format: `"date-time"`
||
-|[**storage**](#objectstoragestorage)|`object`|||
-|**dataset\_type**|`string`|||
-|**kind**|`string`|||
-|**path**|`string`|||
-|**container**|`string`|||
-|**accesskey**|`string`|||
+|**accessKey**|`string`|||
|**region**|`string`|||
-|**secretkey**|`string`|||
-|**credentials\_json**|`string`|||
+|**secretKey**|`string`|||
+|**credentialsJSON**|`string`|||
|**bucket**|`string`|||
-|**endpoint**|`string`|||
-|**disable\_ssl**|`boolean`|||
-|**force\_path\_style**|`boolean`|||
-|**path\_style**|`boolean`|||
-|**endpoint\_style**|`boolean`|||
**Additional Properties:** not allowed
-
-### objectStorage\.storage: object
-
-**Additional Properties**
-
-|Name|Type|Description|Required|
-|----|----|-----------|--------|
-
diff --git a/jsonschema/core.config.json b/jsonschema/core.config.json
index 46088ea..0eb79fa 100644
--- a/jsonschema/core.config.json
+++ b/jsonschema/core.config.json
@@ -564,60 +564,20 @@
"provider": {
"type": "string"
},
- "ConfigurationPath": {
- "type": "string"
- },
- "SourcePath": {
- "type": "string"
- },
- "ExecutionTime": {
- "type": "string",
- "format": "date-time"
- },
- "storage": {
- "$ref": "#/$defs/map[string]string"
- },
- "dataset_type": {
- "type": "string"
- },
- "kind": {
- "type": "string"
- },
- "path": {
- "type": "string"
- },
- "container": {
- "type": "string"
- },
- "accesskey": {
+ "accessKey": {
"type": "string"
},
"region": {
"type": "string"
},
- "secretkey": {
+ "secretKey": {
"type": "string"
},
- "credentials_json": {
+ "credentialsJSON": {
"type": "string"
},
"bucket": {
"type": "string"
- },
- "endpoint": {
- "type": "string"
- },
- "disable_ssl": {
- "type": "boolean"
- },
- "force_path_style": {
- "type": "boolean"
- },
- "path_style": {
- "type": "boolean"
- },
- "endpoint_style": {
- "type": "boolean"
}
},
"additionalProperties": false,
@@ -1034,7 +994,8 @@
"description": "Ratelimit contains the configuration for the rate limiter"
},
"objectStorage": {
- "$ref": "#/$defs/objects.Config"
+ "$ref": "#/$defs/objects.Config",
+ "description": "ObjectStorage contains the configuration for the object storage backend"
}
},
"additionalProperties": false,
diff --git a/pkg/middleware/debug/bodydump.go b/pkg/middleware/debug/bodydump.go
index 34abd27..924d576 100644
--- a/pkg/middleware/debug/bodydump.go
+++ b/pkg/middleware/debug/bodydump.go
@@ -1,60 +1,41 @@
package debug
import (
+ "context"
"encoding/json"
"net/http"
- "strings"
"github.com/rs/zerolog"
"github.com/rs/zerolog/log"
echo "github.com/theopenlane/echox"
"github.com/theopenlane/echox/middleware"
+
+ "github.com/theopenlane/core/pkg/objects"
)
// BodyDump prints out the request body for debugging purpose but attempts to obfuscate sensitive fields within the requests
func BodyDump() echo.MiddlewareFunc {
return middleware.BodyDump(func(c echo.Context, reqBody, resBody []byte) {
+ r := c.Request()
+ w := c.Response()
+ ctx := r.Context()
+
// Create a child logger for concurrency safety
logger := log.Logger.With().Logger()
// Add context fields for the request
logger.UpdateContext(func(l zerolog.Context) zerolog.Context {
- return l.Str("user-agent", c.Request().Header.Get("User-Agent")).
- Str("request-id", c.Response().Header().Get(echo.HeaderXRequestID)).
- Str("request-uri", c.Request().RequestURI).
- Str("request-method", c.Request().Method).
- Str("request-protocol", c.Request().Proto).
+ return l.Str("user-agent", r.Header.Get("User-Agent")).
+ Str("request-id", w.Header().Get(echo.HeaderXRequestID)).
+ Str("request-uri", r.RequestURI).
+ Str("request-method", r.Method).
+ Str("request-protocol", r.Proto).
Str("client-ip", c.RealIP())
})
- if len(reqBody) > 0 {
- contentType := c.Request().Header.Get("Content-Type")
- if strings.HasPrefix(contentType, "multipart/form-data") {
- // Parse the multipart form
- if err := c.Request().ParseMultipartForm(32 << 20); err == nil { // nolint:mnd
- form := c.Request().MultipartForm
- if form != nil {
- hasFile := false
-
- for _, files := range form.File {
- if len(files) > 0 {
- hasFile = true
- break
- }
- }
-
- if hasFile {
- logger.Info().Msg("request contains a file, not logging request body")
- } else {
- logRequestBody(logger, reqBody)
- }
- }
- } else {
- logRequestBody(logger, reqBody)
- }
- } else {
- logRequestBody(logger, reqBody)
- }
+ // Log the request body if it is not empty and the content type is not multipart/form-data
+ if shouldLogBody(ctx, logger, reqBody) {
+ logRequestBody(logger, reqBody)
}
if (c.Request().Method == http.MethodPost || c.Request().Method == http.MethodPatch) && len(resBody) > 0 {
@@ -70,6 +51,25 @@ func BodyDump() echo.MiddlewareFunc {
})
}
+// shouldLogBody determines if the request body should be logged based on the content type and the presence of files in the request
+func shouldLogBody(ctx context.Context, logger zerolog.Logger, reqBody []byte) bool {
+ // If the request body is empty, there is nothing to log
+ if len(reqBody) == 0 {
+ return false
+ }
+
+ files, _ := objects.FilesFromContext(ctx)
+ if len(files) > 0 {
+ logger.Info().Msg("request contains a file, not logging request body")
+
+ return false
+ }
+
+ // default to logging the request body
+ return true
+}
+
+// logRequestBody logs the request body to the logger
func logRequestBody(logger zerolog.Logger, reqBody []byte) {
var bodymap map[string]interface{}
if err := json.Unmarshal(reqBody, &bodymap); err == nil {
diff --git a/pkg/models/models.go b/pkg/models/models.go
index c3a4b26..e245a57 100644
--- a/pkg/models/models.go
+++ b/pkg/models/models.go
@@ -1,7 +1,6 @@
package models
import (
- "mime/multipart"
"strings"
"github.com/go-webauthn/webauthn/protocol"
@@ -720,43 +719,19 @@ var ExampleAccountRolesOrganizationReply = AccountRolesOrganizationReply{
// FILES
// =========
-type UploadFilesRequest struct {
- File multipart.File
- FileHeader *multipart.FileHeader
- ID string `form:"id"`
- Param string `param:"param"`
- Payload string `json:"payload"`
- Content string `json:"content" form:"content"`
-}
-
type UploadFilesReply struct {
rout.Reply
- Message string `json:"message,omitempty"`
- FileName string `json:"file_name,omitempty"`
- FilePath string `json:"file_path,omitempty"`
- Size int64 `json:"size,omitempty"`
- MimeType string `json:"mime_type,omitempty"`
- Link string `json:"link,omitempty"`
- FileCount int64
- FileIdentifiers []string
- PresignedURL string `json:"presigned_url,omitempty"`
-}
-
-func (r *UploadFilesRequest) Validate() error {
- if r.File == nil {
- return rout.NewMissingRequiredFieldError("file")
- }
-
- if r.FileHeader == nil {
- return rout.NewMissingRequiredFieldError("fileheader")
- }
-
- return nil
+ Message string `json:"message,omitempty"`
+ FileCount int64 `json:"file_count,omitempty"`
+ Files []File `json:"files,omitempty"`
}
-// ExampleLoginSuccessRequest is an example of a successful login request for OpenAPI documentation
-var ExampleUploadFilesSuccessRequest = UploadFilesRequest{
- ID: "123456",
+type File struct {
+ ID string `json:"id,omitempty"`
+ Name string `json:"name,omitempty"`
+ Size int64 `json:"size,omitempty"`
+ MimeType string `json:"mime_type,omitempty"`
+ PresignedURL string `json:"presigned_url,omitempty"`
}
// ExampleLoginSuccessResponse is an example of a successful login response for OpenAPI documentation
diff --git a/pkg/objects/binder.go b/pkg/objects/binder.go
index 55266ab..5b07283 100644
--- a/pkg/objects/binder.go
+++ b/pkg/objects/binder.go
@@ -20,37 +20,44 @@ func (fn BindFunc) Bind(ctx echo.Context, i interface{}) error {
return fn(ctx, i)
}
+// NewFileBinder returns a new FileBinder that binds the request body to the struct pointer and bind the form files to the struct fields.
func NewFileBinder(b echo.Binder) echo.Binder {
return BindFunc(func(ctx echo.Context, i interface{}) error {
- err := b.Bind(ctx, i)
- if err == nil {
- ctype := ctx.Request().Header.Get(echo.HeaderContentType)
- // if bind form
- if strings.HasPrefix(ctype, echo.MIMEApplicationForm) || strings.HasPrefix(ctype, echo.MIMEMultipartForm) {
- // get form files
- var form *multipart.Form
-
- form, err = ctx.MultipartForm()
- if err == nil {
- err = BindFile(i, ctx, form.File)
- }
+ if err := b.Bind(ctx, i); err != nil {
+ return err
+ }
+
+ ctype := ctx.Request().Header.Get(echo.HeaderContentType)
+
+ // if bind form
+ if strings.HasPrefix(ctype, echo.MIMEApplicationForm) || strings.HasPrefix(ctype, echo.MIMEMultipartForm) {
+ // get form files
+ var form *multipart.Form
+
+ form, err := ctx.MultipartForm()
+ if err != nil {
+ return err
}
+
+ return BindFile(i, ctx, form.File)
}
- return err
+ return nil
})
}
+// BindFile binds the form files to the struct fields.
func BindFile(i interface{}, ctx echo.Context, files map[string][]*multipart.FileHeader) error {
iValue := reflect.Indirect(reflect.ValueOf(i))
// check bind type is struct pointer
if iValue.Kind() != reflect.Struct {
- return fmt.Errorf("BindFile input not is struct pointer, indirect type is %s", iValue.Type().String())
+ return fmt.Errorf("%w: BindFile input not is struct pointer, indirect type is %s", ErrUnexpectedType, iValue.Type().String())
}
iType := iValue.Type()
- for i := 0; i < iType.NumField(); i++ {
+ for i := range iType.NumField() {
fType := iType.Field(i)
+
// check canset field
fValue := iValue.Field(i)
if !fValue.CanSet() {
@@ -74,6 +81,7 @@ func BindFile(i interface{}, ctx echo.Context, files map[string][]*multipart.Fil
return nil
}
+// getFiles returns the files by the field name or form name
func getFiles(files map[string][]*multipart.FileHeader, names ...string) []*multipart.FileHeader {
for _, name := range names {
file, ok := files[name]
diff --git a/pkg/objects/config.go b/pkg/objects/config.go
index bb8ab70..8554d3d 100644
--- a/pkg/objects/config.go
+++ b/pkg/objects/config.go
@@ -10,25 +10,17 @@ type Config struct {
// Enabled indicates if the store is enabled
Enabled bool `json:"enabled" koanf:"enabled" default:"true"`
// Provider is the name of the provider
- Provider string `json:"provider" koanf:"provider"`
- ConfigurationPath string
- SourcePath string
- ExecutionTime time.Time
- Storage map[string]string `json:"storage" koanf:"storage"`
- DatasetType string `json:"dataset_type" koanf:"dataset_type"`
- Kind string `json:"kind" koanf:"kind"`
- Path string `json:"path" koanf:"path"`
- Container string `json:"container" koanf:"container"`
- AccessKey string `json:"accesskey" koanf:"accesskey"`
- Region string `json:"region" koanf:"region"`
- SecretKey string `json:"secretkey" koanf:"secretkey"`
- CredentialsJSON string `json:"credentials_json" koanf:"credentials_json"`
- Bucket string `json:"bucket" koanf:"bucket"`
- Endpoint string `json:"endpoint" koanf:"endpoint"`
- DisableSSL bool `json:"disable_ssl" koanf:"disable_ssl"`
- ForcePathStyle bool `json:"force_path_style" koanf:"force_path_style"`
- PathStyle bool `json:"path_style" koanf:"path_style"`
- EndpointStyle bool `json:"endpoint_style" koanf:"endpoint_style"`
+ Provider string `json:"provider" koanf:"provider"`
+ // AccessKey is the access key for the storage provider
+ AccessKey string `json:"accessKey" koanf:"accessKey"`
+ // Region is the region for the storage provider
+ Region string `json:"region" koanf:"region"`
+ // SecretKey is the secret key for the storage provider
+ SecretKey string `json:"secretKey" koanf:"secretKey"`
+ // CredentialsJSON is the credentials JSON for the storage provider
+ CredentialsJSON string `json:"credentialsJSON" koanf:"credentialsJSON"`
+ // Bucket is the bucket name for the storage provider
+ Bucket string `json:"bucket" koanf:"bucket"`
}
var (
@@ -43,7 +35,8 @@ var (
return fmt.Sprintf("objects-%d-%s", time.Now().Unix(), s)
}
- defaultFileUploadMaxSize int64 = 1024 * 1024 * 5
+ defaultFileUploadMaxSize int64 = 10 << 20
+ defaultMaxMemorySize int64 = 32 << 20
defaultErrorResponseHandler ErrResponseHandler = func(err error) http.HandlerFunc {
return func(w http.ResponseWriter, _ *http.Request) {
@@ -55,5 +48,5 @@ var (
)
var OrganizationNameFunc NameGeneratorFunc = func(s string) string {
- return fmt.Sprintf("%s", s)
+ return s
}
diff --git a/pkg/objects/context.go b/pkg/objects/context.go
index b604fbe..fa1c348 100644
--- a/pkg/objects/context.go
+++ b/pkg/objects/context.go
@@ -2,19 +2,20 @@ package objects
import (
"context"
- "net/http"
)
-type contextKey string
+// FileContextKey is the context key for the files
+var FileContextKey = &ContextKey{"files"}
-const (
- fileKey contextKey = "files"
-)
+// ContextKey is the key name for the additional context
+type ContextKey struct {
+ name string
+}
// WriteFilesToContext retrieves any existing files from the context, appends the new files to the existing files map
// based on the form field name, then returns a new context with the updated files map stored in it
func WriteFilesToContext(ctx context.Context, f Files) context.Context {
- existingFiles, ok := ctx.Value(fileKey).(Files)
+ existingFiles, ok := ctx.Value(FileContextKey).(Files)
if !ok {
existingFiles = Files{}
}
@@ -24,12 +25,12 @@ func WriteFilesToContext(ctx context.Context, f Files) context.Context {
existingFiles[v[0].FieldName] = append(existingFiles[v[0].FieldName], v...)
}
- return context.WithValue(ctx, fileKey, existingFiles)
+ return context.WithValue(ctx, FileContextKey, existingFiles)
}
// FilesFromContext returns all files that have been uploaded during the request
-func FilesFromContext(r *http.Request) (Files, error) {
- files, ok := r.Context().Value(fileKey).(Files)
+func FilesFromContext(ctx context.Context) (Files, error) {
+ files, ok := ctx.Value(FileContextKey).(Files)
if !ok {
return nil, ErrNoFilesUploaded
}
@@ -37,10 +38,10 @@ func FilesFromContext(r *http.Request) (Files, error) {
return files, nil
}
-// FilesFromContextWithKey returns all files that have been uploaded during the request
+// FilesFromContextWithKey returns all files that have been uploaded during the request
// and sorts by the provided form field
-func FilesFromContextWithKey(r *http.Request, key string) ([]File, error) {
- files, ok := r.Context().Value(fileKey).(Files)
+func FilesFromContextWithKey(ctx context.Context, key string) ([]File, error) {
+ files, ok := ctx.Value(FileContextKey).(Files)
if !ok {
return nil, ErrNoFilesUploaded
}
diff --git a/pkg/objects/errors.go b/pkg/objects/errors.go
index fe862a4..e06ae4f 100644
--- a/pkg/objects/errors.go
+++ b/pkg/objects/errors.go
@@ -22,6 +22,8 @@ var (
ErrUnsupportedMimeType = errors.New("unsupported mime type uploaded")
// ErrMustProvideStorageBackend is returned when a storage backend is not provided
ErrMustProvideStorageBackend = errors.New("you must provide a storage backend")
+ // ErrUnexpectedType is returned when an invalid type is provided
+ ErrUnexpectedType = errors.New("unexpected type provided")
)
type errorMsg string
diff --git a/pkg/objects/handler.go b/pkg/objects/handler.go
index a3443e7..33314bb 100644
--- a/pkg/objects/handler.go
+++ b/pkg/objects/handler.go
@@ -8,15 +8,15 @@ import (
)
// Upload is a HTTP middleware that takes in a list of form fields and the next
-// HTTP handler to run after the upload prodcess is completed
+// HTTP handler to run after the upload process is completed
func (h *Objects) UploadHandler(keys ...string) func(next http.Handler) http.Handler {
return func(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
r.Body = http.MaxBytesReader(w, r.Body, h.MaxSize)
- err := r.ParseMultipartForm(h.MaxSize)
- if err != nil {
+ if err := r.ParseMultipartForm(h.MaxSize); err != nil {
h.ErrorResponseHandler(err).ServeHTTP(w, r)
+
return
}
@@ -26,72 +26,76 @@ func (h *Objects) UploadHandler(keys ...string) func(next http.Handler) http.Han
uploadedFiles := make(Files, len(keys))
for _, key := range keys {
- key := key
-
wg.Go(func() error {
- fileHeaders, ok := r.MultipartForm.File[key]
- if !ok {
- if h.IgnoreNonExistentKeys {
- return nil
- }
-
- return fmt.Errorf("%w: %s", ErrFilesNotFound, key)
- }
-
- uploadedFiles[key] = make([]File, 0, len(fileHeaders))
-
- for _, header := range fileHeaders {
- f, err := header.Open()
- if err != nil {
- return fmt.Errorf("%w (%s): %v", ErrFileOpenFailed, key, err)
- }
-
- defer f.Close()
-
- uploadedFileName := h.NameFuncGenerator(header.Filename)
-
- mimeType, err := DetectContentType(f)
- if err != nil {
- return fmt.Errorf("%w (%s): %v", ErrInvalidMimeType, key, err)
- }
-
- fileData := File{
- FieldName: key,
- OriginalName: header.Filename,
- UploadedFileName: uploadedFileName,
- MimeType: mimeType,
- }
-
- if err := h.ValidationFunc(fileData); err != nil {
- return fmt.Errorf("%w (%s): %v", ErrValidationFailed, key, err)
- }
-
- metadata, err := h.Storage.Upload(r.Context(), f, &UploadFileOptions{
- FileName: uploadedFileName,
- })
- if err != nil {
- return fmt.Errorf("%w: upload failed for (%s)", err, key)
- }
-
- fileData.Size = metadata.Size
- fileData.FolderDestination = metadata.FolderDestination
- fileData.StorageKey = metadata.Key
-
- uploadedFiles[key] = append(uploadedFiles[key], fileData)
- }
-
- return nil
+ return upload(r, h, key, uploadedFiles)
})
}
if err := wg.Wait(); err != nil {
h.ErrorResponseHandler(err).ServeHTTP(w, r)
+
return
}
+ // write the uploaded files to the context
r = r.WithContext(WriteFilesToContext(r.Context(), uploadedFiles))
next.ServeHTTP(w, r)
})
}
}
+
+func upload(r *http.Request, h *Objects, key string, uploadedFiles Files) error {
+ fileHeaders, ok := r.MultipartForm.File[key]
+ if !ok {
+ if h.IgnoreNonExistentKeys {
+ return nil
+ }
+
+ return fmt.Errorf("%w: %s", ErrFilesNotFound, key)
+ }
+
+ uploadedFiles[key] = make([]File, 0, len(fileHeaders))
+
+ for _, header := range fileHeaders {
+ f, err := header.Open()
+ if err != nil {
+ return fmt.Errorf("%w (%s): %v", ErrFileOpenFailed, key, err)
+ }
+
+ defer f.Close()
+
+ uploadedFileName := h.NameFuncGenerator(header.Filename)
+
+ mimeType, err := DetectContentType(f)
+ if err != nil {
+ return fmt.Errorf("%w (%s): %v", ErrInvalidMimeType, key, err)
+ }
+
+ fileData := File{
+ FieldName: key,
+ OriginalName: header.Filename,
+ UploadedFileName: uploadedFileName,
+ MimeType: mimeType,
+ }
+
+ if err := h.ValidationFunc(fileData); err != nil {
+ return fmt.Errorf("%w (%s): %v", ErrValidationFailed, key, err)
+ }
+
+ metadata, err := h.Storage.Upload(r.Context(), f, &UploadFileOptions{
+ FileName: uploadedFileName,
+ })
+ if err != nil {
+ return fmt.Errorf("%w: upload failed for (%s)", err, key)
+ }
+
+ fileData.Size = metadata.Size
+ fileData.FolderDestination = metadata.FolderDestination
+ fileData.StorageKey = metadata.Key
+
+ uploadedFiles[key] = append(uploadedFiles[key], fileData)
+ }
+
+ return nil
+}
diff --git a/pkg/objects/handler_test.go b/pkg/objects/handler_test.go
index 6c6d83f..d2c67c8 100644
--- a/pkg/objects/handler_test.go
+++ b/pkg/objects/handler_test.go
@@ -47,7 +47,7 @@ func TestObjects(t *testing.T) {
expectedStatusCode int
validMimeTypes []string
// ignoreFormField instructs the test to not add the
- // multipar form data part to the request
+ // multipart form data part to the request
ignoreFormField bool
useIgnoreSkipOpt bool
@@ -75,8 +75,8 @@ func TestObjects(t *testing.T) {
Upload(gomock.Any(), gomock.Any(), gomock.Any()).
Return(&objects.UploadedFileMetadata{
Size: size,
- }, errors.New("could not upload file")).
- Times(0) // make sure this is never called
+ }, errors.New("could not upload file")). // nolint:err113
+ Times(0) // make sure this is never called
},
expectedStatusCode: http.StatusInternalServerError,
pathToFile: "objects.md",
@@ -92,8 +92,8 @@ func TestObjects(t *testing.T) {
Upload(gomock.Any(), gomock.Any(), gomock.Any()).
Return(&objects.UploadedFileMetadata{
Size: size,
- }, errors.New("could not upload file")).
- Times(0) // make sure this is never called
+ }, errors.New("could not upload file")). // nolint:err113
+ Times(0) // make sure this is never called
},
expectedStatusCode: http.StatusAccepted,
pathToFile: "objects.md",
@@ -109,8 +109,8 @@ func TestObjects(t *testing.T) {
Upload(gomock.Any(), gomock.Any(), gomock.Any()).
Return(&objects.UploadedFileMetadata{
Size: size,
- }, errors.New("could not upload file")).
- Times(0) // make sure this is never called
+ }, errors.New("could not upload file")). // nolint:err113
+ Times(0) // make sure this is never called
},
expectedStatusCode: http.StatusInternalServerError,
pathToFile: "objects.md",
@@ -124,7 +124,7 @@ func TestObjects(t *testing.T) {
Upload(gomock.Any(), gomock.Any(), gomock.Any()).
Return(&objects.UploadedFileMetadata{
Size: size,
- }, errors.New("could not upload file")).
+ }, errors.New("could not upload file")). // nolint:err113
Times(1)
},
expectedStatusCode: http.StatusInternalServerError,
@@ -139,8 +139,8 @@ func TestObjects(t *testing.T) {
Upload(gomock.Any(), gomock.Any(), gomock.Any()).
Return(&objects.UploadedFileMetadata{
Size: size,
- }, errors.New("could not upload file")).
- Times(0) // never call this
+ }, errors.New("could not upload file")). // nolint:err113
+ Times(0) // never call this
},
expectedStatusCode: http.StatusInternalServerError,
pathToFile: "image.jpg",
@@ -202,7 +202,7 @@ func TestObjects(t *testing.T) {
return
}
- file, err := objects.FilesFromContextWithKey(r, "form-field")
+ file, err := objects.FilesFromContextWithKey(r.Context(), "form-field")
require.NoError(t, err)
@@ -212,7 +212,13 @@ func TestObjects(t *testing.T) {
fmt.Fprintf(w, "successfully uploaded the file")
})).ServeHTTP(recorder, r)
- require.Equal(t, v.expectedStatusCode, recorder.Result().StatusCode)
+ result := recorder.Result()
+
+ respBody := result.Body
+ defer respBody.Close()
+
+ require.Equal(t, v.expectedStatusCode, result.StatusCode)
+
verifyMatch(t, recorder)
})
}
diff --git a/pkg/objects/io.go b/pkg/objects/io.go
index 082bc66..a717255 100644
--- a/pkg/objects/io.go
+++ b/pkg/objects/io.go
@@ -1,15 +1,27 @@
package objects
import (
- "crypto/md5"
+ "bytes"
+ "crypto/md5" //nolint:gosec // MD5 is used for checksums, not for hashing passwords
"encoding/base64"
+ "fmt"
"io"
"os"
"github.com/gabriel-vasile/mimetype"
- "github.com/pkg/errors"
)
+// StreamToByte function reads the content of the provided io.Reader and returns it as a byte slice
+func StreamToByte(stream io.Reader) ([]byte, error) {
+ buf := new(bytes.Buffer)
+
+ if _, err := buf.ReadFrom(stream); err != nil {
+ return nil, err
+ }
+
+ return buf.Bytes(), nil
+}
+
// ReaderToSeeker function takes an io.Reader as input and returns an io.ReadSeeker which can be used to upload files to the object storage
func ReaderToSeeker(r io.Reader) (io.ReadSeeker, error) {
tmpfile, err := os.CreateTemp("", "upload-")
@@ -17,21 +29,20 @@ func ReaderToSeeker(r io.Reader) (io.ReadSeeker, error) {
return nil, err
}
- _, err = io.Copy(tmpfile, r)
- if err != nil {
+ if _, err = io.Copy(tmpfile, r); err != nil {
_ = tmpfile.Close()
_ = os.Remove(tmpfile.Name())
return nil, err
}
- _, err = tmpfile.Seek(0, 0)
- if err != nil {
+ if _, err = tmpfile.Seek(0, 0); err != nil {
_ = tmpfile.Close()
_ = os.Remove(tmpfile.Name())
return nil, err
}
+
// Return the file, which implements io.ReadSeeker which you can now pass to the objects uploader
return tmpfile, nil
}
@@ -40,13 +51,13 @@ func ReaderToSeeker(r io.Reader) (io.ReadSeeker, error) {
// the passed io object will be seeked to its beginning and will seek back to the
// beginning after reading its content.
func ComputeChecksum(data io.ReadSeeker) (string, error) {
- hash := md5.New()
+ hash := md5.New() //nolint:gosec // MD5 is used for checksums, not for hashing passwords
if _, err := io.Copy(hash, data); err != nil {
- return "", errors.Wrap(err, "could not read file")
+ return "", fmt.Errorf("could not read file: %w", err)
}
if _, err := data.Seek(0, io.SeekStart); err != nil { // seek back to beginning of file
- return "", errors.Wrap(err, "could not seek to beginning of file")
+ return "", fmt.Errorf("could not seek to beginning of file: %w", err)
}
return base64.StdEncoding.EncodeToString(hash.Sum(nil)), nil
@@ -57,17 +68,17 @@ func ComputeChecksum(data io.ReadSeeker) (string, error) {
// beginning and will seek back to the beginning after reading its content.
func DetectContentType(data io.ReadSeeker) (string, error) {
if _, err := data.Seek(0, io.SeekStart); err != nil { // seek back to beginning of file
- return "", errors.Wrap(err, "could not seek to beginning of file")
+ return "", fmt.Errorf("could not seek to beginning of file: %w", err)
}
// the default return value will default to application/octet-stream if unable to detect the MIME type
contentType, readErr := mimetype.DetectReader(data)
if readErr != nil {
- return "", errors.Wrap(readErr, "encountered error reading file content type")
+ return "", fmt.Errorf("encountered error reading file content type: %w", readErr)
}
if _, err := data.Seek(0, io.SeekStart); err != nil { // seek back to beginning of file
- return "", errors.Wrap(err, "could not seek to beginning of file")
+ return "", fmt.Errorf("could not seek to beginning of file: %w", err)
}
return contentType.String(), nil
diff --git a/pkg/objects/mocks/objects.go b/pkg/objects/mocks/objects.go
index 07fbe62..4dc863c 100644
--- a/pkg/objects/mocks/objects.go
+++ b/pkg/objects/mocks/objects.go
@@ -72,11 +72,12 @@ func (mr *MockStorageMockRecorder) Download(arg0, arg1, arg2 any) *gomock.Call {
}
// GetPresignedURL mocks base method.
-func (m *MockStorage) GetPresignedURL(arg0 context.Context, arg1 string) string {
+func (m *MockStorage) GetPresignedURL(arg0 context.Context, arg1 string) (string, error) {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "GetPresignedURL", arg0, arg1)
ret0, _ := ret[0].(string)
- return ret0
+ ret1, _ := ret[1].(error)
+ return ret0, ret1
}
// GetPresignedURL indicates an expected call of GetPresignedURL.
diff --git a/pkg/objects/objects.go b/pkg/objects/objects.go
index 941d706..01ba6ee 100644
--- a/pkg/objects/objects.go
+++ b/pkg/objects/objects.go
@@ -8,10 +8,14 @@ import (
// Storage is the primary interface that must be implemented by any storage backend and for interacting with Objects
type Storage interface {
+ // Upload is used to upload a file to the storage backend
Upload(context.Context, io.Reader, *UploadFileOptions) (*UploadedFileMetadata, error)
+ // ManagerUpload is used to upload multiple files to the storage backend
ManagerUpload(context.Context, [][]byte) error
+ // Download is used to download a file from the storage backend
Download(context.Context, string, *DownloadFileOptions) (*DownloadFileMetadata, io.ReadCloser, error)
- GetPresignedURL(context.Context, string) string
+ // GetPresignedURL is used to get a presigned URL for a file in the storage backend
+ GetPresignedURL(context.Context, string) (string, error)
io.Closer
}
@@ -20,17 +24,19 @@ type Storage interface {
// Objects is the definition for handling objects and file uploads
type Objects struct {
// Storage is the storage backend that will be used to store the uploaded files
- Storage Storage
+ Storage Storage `json:"-" koanf:"-"`
// MaxSize is the maximum size of file uploads to accept
- MaxSize int64
- // ignoreNonExistentKeys is a flag that indicates the handler should skip multipart form key values which do not match the configured
- IgnoreNonExistentKeys bool
+ MaxSize int64 `json:"maxSize" koanf:"maxSize"`
+ // MaxMemory is the maximum memory to use when parsing a multipart form
+ MaxMemory int64 `json:"maxMemory" koanf:"maxMemory"`
+ // IgnoreNonExistentKeys is a flag that indicates the handler should skip multipart form key values which do not match the configured
+ IgnoreNonExistentKeys bool `json:"ignoreNonExistentKeys" koanf:"ignoreNonExistentKeys"`
// ValidationFunc is a custom validation function
- ValidationFunc ValidationFunc
+ ValidationFunc ValidationFunc `json:"-" koanf:"-"`
// NameFuncGenerator is a function that allows you to rename your uploaded files
- NameFuncGenerator NameGeneratorFunc
+ NameFuncGenerator NameGeneratorFunc `json:"-" koanf:"-"`
// ErrorResponseHandler is a custom error response handler
- ErrorResponseHandler ErrResponseHandler
+ ErrorResponseHandler ErrResponseHandler `json:"-" koanf:"-"`
}
// New creates a new instance of Objects
@@ -45,6 +51,10 @@ func New(opts ...Option) (*Objects, error) {
handler.MaxSize = defaultFileUploadMaxSize
}
+ if handler.MaxMemory <= 0 {
+ handler.MaxMemory = defaultMaxMemorySize
+ }
+
if handler.ValidationFunc == nil {
handler.ValidationFunc = defaultValidationFunc
}
@@ -69,19 +79,28 @@ type Files map[string][]File
// File is a struct that holds information about a file - there is no distinction between a File received in a multipart form request or used in a download
type File struct {
- ID string `json:"id"`
- Name string `json:"name"`
- Path string `json:"path"`
- Type string `json:"type"`
- Thumbnail *string `json:"thumbnail"`
- MD5 []byte `json:"md5"`
+ // ID is the unique identifier for the file
+ ID string `json:"id"`
+ // Name of the file
+ Name string `json:"name"`
+ // Path of the file
+ Path string `json:"path"`
+ // Type of file that was uploaded
+ Type string `json:"type"`
+ // Thumbnail is a URL to the thumbnail of the file
+ Thumbnail *string `json:"thumbnail"`
+ // MD5 hash of the file
+ MD5 []byte `json:"md5"`
+ // CreatedAt is the time the file was created
CreatedAt time.Time `json:"created_at"`
+ // UpdatedAt is the time the file was last updated
UpdatedAt time.Time `json:"updated_at"`
- OwnerID string `json:"owner_id"`
+ // OwnerID is the ID of the organization or user who created the file
+ OwnerID string `json:"owner_id"`
// FieldName denotes the field from the multipart form
FieldName string `json:"field_name,omitempty"`
- // The name of the file from the client side
+ // OriginalName of the file from the client side
OriginalName string `json:"original_name,omitempty"`
// UploadedFileName denotes the name of the file when it was ultimately
// uploaded to the storage layer. The distinction is important because of
@@ -104,20 +123,27 @@ type File struct {
ProvidedExtension string `json:"provided_extension"`
}
-// NameGeneratorFunc allows you alter the name of the file before it is ultimately uplaoded and stored
+// NameGeneratorFunc allows you alter the name of the file before it is ultimately uploaded and stored
type NameGeneratorFunc func(s string) string
// UploadedFileMetadata is a struct that holds information about a file that was successfully uploaded
type UploadedFileMetadata struct {
+ // FolderDestination is the folder that holds the file
FolderDestination string `json:"folder_destination,omitempty"`
- Key string `json:"key,omitempty"`
- Size int64 `json:"size,omitempty"`
- PresignedURL string `json:"presigned_url,omitempty"`
+ // Key is the unique identifier for the file
+ Key string `json:"key,omitempty"`
+ // Size in bytes of the uploaded file
+ Size int64 `json:"size,omitempty"`
+ // PresignedURL is the URL that can be used to download the file
+ PresignedURL string `json:"presigned_url,omitempty"`
}
-// DonwloadFileMetadata is a struct that holds information about a file that was successfully downloaded
+// DownloadFileMetadata is a struct that holds information about a file that was successfully downloaded
type DownloadFileMetadata struct {
+ // FolderDestination is the folder that holds the file
FolderDestination string `json:"folder_destination,omitempty"`
- Key string `json:"key,omitempty"`
- Size int64 `json:"size,omitempty"`
+ // Key is the unique identifier for the file
+ Key string `json:"key,omitempty"`
+ // Size in bytes of the downloaded file
+ Size int64 `json:"size,omitempty"`
}
diff --git a/pkg/objects/options.go b/pkg/objects/options.go
index f3e851d..9ef4cf4 100644
--- a/pkg/objects/options.go
+++ b/pkg/objects/options.go
@@ -13,15 +13,22 @@ type Option func(*Objects)
// WithStorage allows you to provide a storage backend to the Objects
func WithStorage(store Storage) Option {
- return func(gh *Objects) {
- gh.Storage = store
+ return func(o *Objects) {
+ o.Storage = store
}
}
// WithMaxFileSize allows you limit the size of file uploads to accept
func WithMaxFileSize(i int64) Option {
- return func(gh *Objects) {
- gh.MaxSize = i
+ return func(o *Objects) {
+ o.MaxSize = i
+ }
+}
+
+// WithMaxMemory allows you limit the amount of memory to use when parsing a multipart form
+func WithMaxMemory(i int64) Option {
+ return func(o *Objects) {
+ o.MaxMemory = i
}
}
diff --git a/pkg/objects/storage/disk.go b/pkg/objects/storage/disk.go
index 3ee772c..1bcade5 100644
--- a/pkg/objects/storage/disk.go
+++ b/pkg/objects/storage/disk.go
@@ -2,7 +2,7 @@ package storage
import (
"context"
- "errors"
+ "fmt"
"io"
"os"
"path/filepath"
@@ -13,15 +13,17 @@ import (
type Disk struct {
destinationFolder string
+ Scheme string
}
func NewDiskStorage(folder string) (*Disk, error) {
if len(strings.TrimSpace(folder)) == 0 {
- return nil, errors.New("please provide a valid folder path")
+ return nil, fmt.Errorf("%w: please provide a valid folder path", ErrInvalidFolderPath)
}
return &Disk{
destinationFolder: folder,
+ Scheme: "file://",
}, nil
}
diff --git a/pkg/objects/storage/errors.go b/pkg/objects/storage/errors.go
index 5d2ea82..bf546ce 100644
--- a/pkg/objects/storage/errors.go
+++ b/pkg/objects/storage/errors.go
@@ -3,5 +3,8 @@ package storage
import "errors"
var (
- ErrProvideValidS3Bucket = errors.New("please provide a valid s3 bucket")
+ // ErrInvalidS3Bucket is returned when an invalid s3 bucket is provided
+ ErrInvalidS3Bucket = errors.New("invalid s3 bucket provided")
+ // ErrInvalidFolderPath is returned when an invalid folder path is provided
+ ErrInvalidFolderPath = errors.New("invalid folder path provided")
)
diff --git a/pkg/objects/storage/s3.go b/pkg/objects/storage/s3.go
index 1562613..e385306 100644
--- a/pkg/objects/storage/s3.go
+++ b/pkg/objects/storage/s3.go
@@ -3,7 +3,6 @@ package storage
import (
"bytes"
"context"
- "crypto/rsa"
"errors"
"fmt"
"io"
@@ -18,20 +17,26 @@ import (
"github.com/theopenlane/core/pkg/objects"
)
+var (
+ presignedURLTimeout = 15 * time.Minute
+)
+
+// S3Options is used to configure the S3Store
type S3Options struct {
+ // Bucket to store objects in
Bucket string
- // If true, this will log request and responses
+ // DebugMode will log all requests and responses
DebugMode bool
-
+ // UsePathStyle allows you to enable the client to use path-style addressing, i.e., https://s3.amazonaws.com/BUCKET/KEY .
+ // by default, the S3 client will use virtual hosted bucket addressing when possible( https://BUCKET.s3.amazonaws.com/KEY ).
UsePathStyle bool
-
- // Only use if the bucket supports ACL
- ACL types.ObjectCannedACL
- Keynamespace string
- requestTimeout time.Duration
- Privatekey *rsa.PrivateKey
+ // ACL should only be used if the bucket supports ACL
+ ACL types.ObjectCannedACL
+ // KeyNamespace is used to prefix all keys with a namespace
+ KeyNamespace string
}
+// S3Store is a store that uses S3 as the backend
type S3Store struct {
Client *s3.Client
Opts S3Options
@@ -42,11 +47,13 @@ type S3Store struct {
ObjNotExistsWaiter *s3.ObjectNotExistsWaiter
ACL types.ObjectCannedACL
CacheControl string
+ Scheme string
}
+// NewS3FromConfig creates a new S3Store from the provided configuration
func NewS3FromConfig(cfg aws.Config, opts S3Options) (*S3Store, error) {
- if IsStringEmpty(opts.Bucket) {
- return nil, ErrProvideValidS3Bucket
+ if isStringEmpty(opts.Bucket) {
+ return nil, ErrInvalidS3Bucket
}
client := s3.NewFromConfig(cfg, func(o *s3.Options) {
@@ -62,11 +69,11 @@ func NewS3FromConfig(cfg aws.Config, opts S3Options) (*S3Store, error) {
Opts: opts,
Downloader: manager.NewDownloader(client),
Uploader: manager.NewUploader(client),
+ Scheme: "s3://",
}, nil
}
-func (s *S3Store) Close() error { return nil }
-
+// Exists checks if an object exists in S3
func (s *S3Store) Exists(ctx context.Context, key string) (bool, error) {
_, err := s.Client.HeadObject(ctx, &s3.HeadObjectInput{
Bucket: aws.String(s.Opts.Bucket),
@@ -84,6 +91,10 @@ func (s *S3Store) Exists(ctx context.Context, key string) (bool, error) {
return true, nil
}
+// Close the S3Store satisfying the Storage interface
+func (s *S3Store) Close() error { return nil }
+
+// ManagerUpload uploads multiple files to S3
func (s *S3Store) ManagerUpload(ctx context.Context, files [][]byte) error {
for i, file := range files {
_, err := s.Client.PutObject(ctx, &s3.PutObjectInput{
@@ -99,6 +110,7 @@ func (s *S3Store) ManagerUpload(ctx context.Context, files [][]byte) error {
return nil
}
+// Upload an object to S3 and return the metadata
func (s *S3Store) Upload(ctx context.Context, r io.Reader, opts *objects.UploadFileOptions) (*objects.UploadedFileMetadata, error) {
b := new(bytes.Buffer)
@@ -114,15 +126,14 @@ func (s *S3Store) Upload(ctx context.Context, r io.Reader, opts *objects.UploadF
return nil, err
}
- _, err = s.Client.PutObject(ctx, &s3.PutObjectInput{
+ if _, err = s.Client.PutObject(ctx, &s3.PutObjectInput{
Bucket: aws.String(s.Opts.Bucket),
Metadata: opts.Metadata,
Key: aws.String(opts.FileName),
ACL: s.Opts.ACL,
Body: seeker,
ContentType: aws.String(opts.ContentType),
- })
- if err != nil {
+ }); err != nil {
return nil, err
}
@@ -133,6 +144,7 @@ func (s *S3Store) Upload(ctx context.Context, r io.Reader, opts *objects.UploadF
}, nil
}
+// Download an object from S3 and return the metadata and a reader
func (s *S3Store) Download(ctx context.Context, key string, opts *objects.DownloadFileOptions) (*objects.DownloadFileMetadata, io.ReadCloser, error) {
output, err := s.Client.GetObject(ctx, &s3.GetObjectInput{
Bucket: aws.String(s.Opts.Bucket),
@@ -150,21 +162,21 @@ func (s *S3Store) Download(ctx context.Context, key string, opts *objects.Downlo
}
// PresignedURL returns a URL that provides access to a file for 15 minutes
-func (s *S3Store) GetPresignedURL(cntext context.Context, key string) string {
- presignClient := s3.NewPresignClient(s.Client)
+func (s *S3Store) GetPresignedURL(ctx context.Context, key string) (string, error) {
+ client := s3.NewPresignClient(s.Client)
- presignurl, err := presignClient.PresignGetObject(context.Background(), &s3.GetObjectInput{
+ presignURL, err := client.PresignGetObject(context.Background(), &s3.GetObjectInput{
Bucket: aws.String(s.Opts.Bucket),
Key: aws.String(key),
- ResponseContentDisposition: StringPointer("attachment"),
+ ResponseContentDisposition: toPointer("attachment"),
}, func(opts *s3.PresignOptions) {
- opts.Expires = 15 * time.Minute // nolint:mnd
+ opts.Expires = presignedURLTimeout
})
if err != nil {
- return ""
+ return "", err
}
- log.Info().Str("presigned_url", presignurl.URL).Msg("HAY MATT presigned URL")
+ log.Debug().Str("presigned_url", presignURL.URL).Msg("presigned URL created")
- return presignurl.URL
+ return presignURL.URL, nil
}
diff --git a/pkg/objects/storage/utils.go b/pkg/objects/storage/utils.go
index acf20bb..868233f 100644
--- a/pkg/objects/storage/utils.go
+++ b/pkg/objects/storage/utils.go
@@ -2,9 +2,10 @@ package storage
import "strings"
-// StringPointer allows you to take the address of a string literal
-func StringPointer(s string) *string {
+// toPointer allows you to take the address of any literal
+func toPointer[T any](s T) *T {
return &s
}
-func IsStringEmpty(s string) bool { return len(strings.TrimSpace(s)) == 0 }
+// isStringEmpty checks if a string is empty
+func isStringEmpty(s string) bool { return len(strings.TrimSpace(s)) == 0 }
diff --git a/pkg/objects/storage/utils_test.go b/pkg/objects/storage/utils_test.go
index 4e03e03..979916a 100644
--- a/pkg/objects/storage/utils_test.go
+++ b/pkg/objects/storage/utils_test.go
@@ -18,6 +18,6 @@ func TestIsStringEmpty(t *testing.T) {
}
for _, v := range tt {
- require.Equal(t, v.empty, IsStringEmpty(v.s))
+ require.Equal(t, v.empty, isStringEmpty(v.s))
}
}
diff --git a/pkg/objects/testdata/golden/TestObjects/upload_fails_because_of_mimetype_validation_constraints.golden b/pkg/objects/testdata/golden/TestObjects/upload_fails_because_of_mimetype_validation_constraints.golden
index f3fc5bd..1b54e42 100644
--- a/pkg/objects/testdata/golden/TestObjects/upload_fails_because_of_mimetype_validation_constraints.golden
+++ b/pkg/objects/testdata/golden/TestObjects/upload_fails_because_of_mimetype_validation_constraints.golden
@@ -1 +1 @@
-{"message" : "could not upload file", "error" : validation failed (form-field): unsupported mime type uploaded: text/plain}
\ No newline at end of file
+{"message" : "could not upload file", "error" : validation failed (form-field): unsupported mime type uploaded: text/plain; charset=utf-8}
\ No newline at end of file
diff --git a/pkg/objects/upload.go b/pkg/objects/upload.go
new file mode 100644
index 0000000..47659cf
--- /dev/null
+++ b/pkg/objects/upload.go
@@ -0,0 +1,34 @@
+package objects
+
+import "github.com/rs/zerolog/log"
+
+// Progress is used to track the progress of a file upload
+// It implements the io.Writer interface so it can be passed to an io.TeeReader()
+type Progress struct {
+ // TotalSize is the total size of the file being uploaded
+ TotalSize int64
+ // BytesRead is the number of bytes that have been read so far
+ BytesRead int64
+}
+
+// Write is used to satisfy the io.Writer interface Instead of writing somewhere, it simply aggregates the total bytes on each read
+func (pr *Progress) Write(p []byte) (n int, err error) {
+ n, err = len(p), nil
+
+ pr.BytesRead += int64(n)
+
+ pr.Print()
+
+ return
+}
+
+// Print displays the current progress of the file upload
+func (pr *Progress) Print() {
+ if pr.BytesRead == pr.TotalSize {
+ log.Debug().Msg("file upload complete")
+
+ return
+ }
+
+ log.Debug().Int64("bytes_read", pr.BytesRead).Msg("file upload in progress")
+}
diff --git a/pkg/objects/validator.go b/pkg/objects/validator.go
index 5610475..111cc91 100644
--- a/pkg/objects/validator.go
+++ b/pkg/objects/validator.go
@@ -2,14 +2,17 @@ package objects
import "github.com/go-playground/validator/v10"
+// AppValidator is a wrapper around the validator package
type AppValidator struct {
validator *validator.Validate
}
+// Validate runs the validation against the provided struct
func (cv *AppValidator) Validate(i interface{}) error {
return cv.validator.Struct(i)
}
+// NewValidator creates a new instance of the AppValidator
func NewValidator() *AppValidator {
return &AppValidator{validator: validator.New()}
}
diff --git a/pkg/objects/validators.go b/pkg/objects/validators.go
index 880fd5b..1127ec9 100644
--- a/pkg/objects/validators.go
+++ b/pkg/objects/validators.go
@@ -22,7 +22,7 @@ func MimeTypeValidator(validMimeTypes ...string) ValidationFunc {
}
}
-// ChainValidators returns a validator that accepts multiple validating criteriacriteria
+// ChainValidators returns a validator that accepts multiple validating criteria
func ChainValidators(validators ...ValidationFunc) ValidationFunc {
return func(f File) error {
for _, validator := range validators {
diff --git a/pkg/openlaneclient/graphclient.go b/pkg/openlaneclient/graphclient.go
index a128634..29a02b6 100644
--- a/pkg/openlaneclient/graphclient.go
+++ b/pkg/openlaneclient/graphclient.go
@@ -947,7 +947,6 @@ type AdminSearch_AdminSearch_Nodes_FileSearchResult_Files struct {
Md5Hash *string "json:\"md5Hash,omitempty\" graphql:\"md5Hash\""
DetectedContentType string "json:\"detectedContentType\" graphql:\"detectedContentType\""
StoreKey *string "json:\"storeKey,omitempty\" graphql:\"storeKey\""
- CorrelationID *string "json:\"correlationID,omitempty\" graphql:\"correlationID\""
CategoryType *string "json:\"categoryType,omitempty\" graphql:\"categoryType\""
URI *string "json:\"uri,omitempty\" graphql:\"uri\""
StorageScheme *string "json:\"storageScheme,omitempty\" graphql:\"storageScheme\""
@@ -1009,12 +1008,6 @@ func (t *AdminSearch_AdminSearch_Nodes_FileSearchResult_Files) GetStoreKey() *st
}
return t.StoreKey
}
-func (t *AdminSearch_AdminSearch_Nodes_FileSearchResult_Files) GetCorrelationID() *string {
- if t == nil {
- t = &AdminSearch_AdminSearch_Nodes_FileSearchResult_Files{}
- }
- return t.CorrelationID
-}
func (t *AdminSearch_AdminSearch_Nodes_FileSearchResult_Files) GetCategoryType() *string {
if t == nil {
t = &AdminSearch_AdminSearch_Nodes_FileSearchResult_Files{}
@@ -11919,7 +11912,6 @@ func (t *GetFeatureHistories_FeatureHistories) GetEdges() []*GetFeatureHistories
type CreateBulkCSVFile_CreateBulkCSVFile_Files struct {
CategoryType *string "json:\"categoryType,omitempty\" graphql:\"categoryType\""
- CorrelationID *string "json:\"correlationID,omitempty\" graphql:\"correlationID\""
CreatedAt *time.Time "json:\"createdAt,omitempty\" graphql:\"createdAt\""
CreatedBy *string "json:\"createdBy,omitempty\" graphql:\"createdBy\""
DetectedContentType string "json:\"detectedContentType\" graphql:\"detectedContentType\""
@@ -11946,12 +11938,6 @@ func (t *CreateBulkCSVFile_CreateBulkCSVFile_Files) GetCategoryType() *string {
}
return t.CategoryType
}
-func (t *CreateBulkCSVFile_CreateBulkCSVFile_Files) GetCorrelationID() *string {
- if t == nil {
- t = &CreateBulkCSVFile_CreateBulkCSVFile_Files{}
- }
- return t.CorrelationID
-}
func (t *CreateBulkCSVFile_CreateBulkCSVFile_Files) GetCreatedAt() *time.Time {
if t == nil {
t = &CreateBulkCSVFile_CreateBulkCSVFile_Files{}
@@ -12074,7 +12060,6 @@ func (t *CreateBulkCSVFile_CreateBulkCSVFile) GetFiles() []*CreateBulkCSVFile_Cr
type CreateBulkFile_CreateBulkFile_Files struct {
CategoryType *string "json:\"categoryType,omitempty\" graphql:\"categoryType\""
- CorrelationID *string "json:\"correlationID,omitempty\" graphql:\"correlationID\""
CreatedAt *time.Time "json:\"createdAt,omitempty\" graphql:\"createdAt\""
CreatedBy *string "json:\"createdBy,omitempty\" graphql:\"createdBy\""
DetectedContentType string "json:\"detectedContentType\" graphql:\"detectedContentType\""
@@ -12101,12 +12086,6 @@ func (t *CreateBulkFile_CreateBulkFile_Files) GetCategoryType() *string {
}
return t.CategoryType
}
-func (t *CreateBulkFile_CreateBulkFile_Files) GetCorrelationID() *string {
- if t == nil {
- t = &CreateBulkFile_CreateBulkFile_Files{}
- }
- return t.CorrelationID
-}
func (t *CreateBulkFile_CreateBulkFile_Files) GetCreatedAt() *time.Time {
if t == nil {
t = &CreateBulkFile_CreateBulkFile_Files{}
@@ -12229,7 +12208,6 @@ func (t *CreateBulkFile_CreateBulkFile) GetFiles() []*CreateBulkFile_CreateBulkF
type CreateFile_CreateFile_File struct {
CategoryType *string "json:\"categoryType,omitempty\" graphql:\"categoryType\""
- CorrelationID *string "json:\"correlationID,omitempty\" graphql:\"correlationID\""
CreatedAt *time.Time "json:\"createdAt,omitempty\" graphql:\"createdAt\""
CreatedBy *string "json:\"createdBy,omitempty\" graphql:\"createdBy\""
DetectedContentType string "json:\"detectedContentType\" graphql:\"detectedContentType\""
@@ -12256,12 +12234,6 @@ func (t *CreateFile_CreateFile_File) GetCategoryType() *string {
}
return t.CategoryType
}
-func (t *CreateFile_CreateFile_File) GetCorrelationID() *string {
- if t == nil {
- t = &CreateFile_CreateFile_File{}
- }
- return t.CorrelationID
-}
func (t *CreateFile_CreateFile_File) GetCreatedAt() *time.Time {
if t == nil {
t = &CreateFile_CreateFile_File{}
@@ -12395,7 +12367,6 @@ func (t *DeleteFile_DeleteFile) GetDeletedID() string {
type GetAllFiles_Files_Edges_Node struct {
CategoryType *string "json:\"categoryType,omitempty\" graphql:\"categoryType\""
- CorrelationID *string "json:\"correlationID,omitempty\" graphql:\"correlationID\""
CreatedAt *time.Time "json:\"createdAt,omitempty\" graphql:\"createdAt\""
CreatedBy *string "json:\"createdBy,omitempty\" graphql:\"createdBy\""
DetectedContentType string "json:\"detectedContentType\" graphql:\"detectedContentType\""
@@ -12422,12 +12393,6 @@ func (t *GetAllFiles_Files_Edges_Node) GetCategoryType() *string {
}
return t.CategoryType
}
-func (t *GetAllFiles_Files_Edges_Node) GetCorrelationID() *string {
- if t == nil {
- t = &GetAllFiles_Files_Edges_Node{}
- }
- return t.CorrelationID
-}
func (t *GetAllFiles_Files_Edges_Node) GetCreatedAt() *time.Time {
if t == nil {
t = &GetAllFiles_Files_Edges_Node{}
@@ -12561,7 +12526,6 @@ func (t *GetAllFiles_Files) GetEdges() []*GetAllFiles_Files_Edges {
type GetFileByID_File struct {
CategoryType *string "json:\"categoryType,omitempty\" graphql:\"categoryType\""
- CorrelationID *string "json:\"correlationID,omitempty\" graphql:\"correlationID\""
CreatedAt *time.Time "json:\"createdAt,omitempty\" graphql:\"createdAt\""
CreatedBy *string "json:\"createdBy,omitempty\" graphql:\"createdBy\""
DetectedContentType string "json:\"detectedContentType\" graphql:\"detectedContentType\""
@@ -12588,12 +12552,6 @@ func (t *GetFileByID_File) GetCategoryType() *string {
}
return t.CategoryType
}
-func (t *GetFileByID_File) GetCorrelationID() *string {
- if t == nil {
- t = &GetFileByID_File{}
- }
- return t.CorrelationID
-}
func (t *GetFileByID_File) GetCreatedAt() *time.Time {
if t == nil {
t = &GetFileByID_File{}
@@ -12705,7 +12663,6 @@ func (t *GetFileByID_File) GetURI() *string {
type GetFiles_Files_Edges_Node struct {
CategoryType *string "json:\"categoryType,omitempty\" graphql:\"categoryType\""
- CorrelationID *string "json:\"correlationID,omitempty\" graphql:\"correlationID\""
CreatedAt *time.Time "json:\"createdAt,omitempty\" graphql:\"createdAt\""
CreatedBy *string "json:\"createdBy,omitempty\" graphql:\"createdBy\""
DetectedContentType string "json:\"detectedContentType\" graphql:\"detectedContentType\""
@@ -12732,12 +12689,6 @@ func (t *GetFiles_Files_Edges_Node) GetCategoryType() *string {
}
return t.CategoryType
}
-func (t *GetFiles_Files_Edges_Node) GetCorrelationID() *string {
- if t == nil {
- t = &GetFiles_Files_Edges_Node{}
- }
- return t.CorrelationID
-}
func (t *GetFiles_Files_Edges_Node) GetCreatedAt() *time.Time {
if t == nil {
t = &GetFiles_Files_Edges_Node{}
@@ -12871,7 +12822,6 @@ func (t *GetFiles_Files) GetEdges() []*GetFiles_Files_Edges {
type UpdateFile_UpdateFile_File struct {
CategoryType *string "json:\"categoryType,omitempty\" graphql:\"categoryType\""
- CorrelationID *string "json:\"correlationID,omitempty\" graphql:\"correlationID\""
CreatedAt *time.Time "json:\"createdAt,omitempty\" graphql:\"createdAt\""
CreatedBy *string "json:\"createdBy,omitempty\" graphql:\"createdBy\""
DetectedContentType string "json:\"detectedContentType\" graphql:\"detectedContentType\""
@@ -12898,12 +12848,6 @@ func (t *UpdateFile_UpdateFile_File) GetCategoryType() *string {
}
return t.CategoryType
}
-func (t *UpdateFile_UpdateFile_File) GetCorrelationID() *string {
- if t == nil {
- t = &UpdateFile_UpdateFile_File{}
- }
- return t.CorrelationID
-}
func (t *UpdateFile_UpdateFile_File) GetCreatedAt() *time.Time {
if t == nil {
t = &UpdateFile_UpdateFile_File{}
@@ -13026,7 +12970,6 @@ func (t *UpdateFile_UpdateFile) GetFile() *UpdateFile_UpdateFile_File {
type GetAllFileHistories_FileHistories_Edges_Node struct {
CategoryType *string "json:\"categoryType,omitempty\" graphql:\"categoryType\""
- CorrelationID *string "json:\"correlationID,omitempty\" graphql:\"correlationID\""
CreatedAt *time.Time "json:\"createdAt,omitempty\" graphql:\"createdAt\""
CreatedBy *string "json:\"createdBy,omitempty\" graphql:\"createdBy\""
DetectedContentType string "json:\"detectedContentType\" graphql:\"detectedContentType\""
@@ -13056,12 +12999,6 @@ func (t *GetAllFileHistories_FileHistories_Edges_Node) GetCategoryType() *string
}
return t.CategoryType
}
-func (t *GetAllFileHistories_FileHistories_Edges_Node) GetCorrelationID() *string {
- if t == nil {
- t = &GetAllFileHistories_FileHistories_Edges_Node{}
- }
- return t.CorrelationID
-}
func (t *GetAllFileHistories_FileHistories_Edges_Node) GetCreatedAt() *time.Time {
if t == nil {
t = &GetAllFileHistories_FileHistories_Edges_Node{}
@@ -13213,7 +13150,6 @@ func (t *GetAllFileHistories_FileHistories) GetEdges() []*GetAllFileHistories_Fi
type GetFileHistories_FileHistories_Edges_Node struct {
CategoryType *string "json:\"categoryType,omitempty\" graphql:\"categoryType\""
- CorrelationID *string "json:\"correlationID,omitempty\" graphql:\"correlationID\""
CreatedAt *time.Time "json:\"createdAt,omitempty\" graphql:\"createdAt\""
CreatedBy *string "json:\"createdBy,omitempty\" graphql:\"createdBy\""
DetectedContentType string "json:\"detectedContentType\" graphql:\"detectedContentType\""
@@ -13243,12 +13179,6 @@ func (t *GetFileHistories_FileHistories_Edges_Node) GetCategoryType() *string {
}
return t.CategoryType
}
-func (t *GetFileHistories_FileHistories_Edges_Node) GetCorrelationID() *string {
- if t == nil {
- t = &GetFileHistories_FileHistories_Edges_Node{}
- }
- return t.CorrelationID
-}
func (t *GetFileHistories_FileHistories_Edges_Node) GetCreatedAt() *time.Time {
if t == nil {
t = &GetFileHistories_FileHistories_Edges_Node{}
@@ -33195,7 +33125,6 @@ const AdminSearchDocument = `query AdminSearch ($query: String!) {
md5Hash
detectedContentType
storeKey
- correlationID
categoryType
uri
storageScheme
@@ -36898,7 +36827,6 @@ const CreateBulkCSVFileDocument = `mutation CreateBulkCSVFile ($input: Upload!)
createBulkCSVFile(input: $input) {
files {
categoryType
- correlationID
createdAt
createdBy
detectedContentType
@@ -36943,7 +36871,6 @@ const CreateBulkFileDocument = `mutation CreateBulkFile ($input: [CreateFileInpu
createBulkFile(input: $input) {
files {
categoryType
- correlationID
createdAt
createdBy
detectedContentType
@@ -36988,7 +36915,6 @@ const CreateFileDocument = `mutation CreateFile ($input: CreateFileInput!) {
createFile(input: $input) {
file {
categoryType
- correlationID
createdAt
createdBy
detectedContentType
@@ -37058,7 +36984,6 @@ const GetAllFilesDocument = `query GetAllFiles {
edges {
node {
categoryType
- correlationID
createdAt
createdBy
detectedContentType
@@ -37101,7 +37026,6 @@ func (c *Client) GetAllFiles(ctx context.Context, interceptors ...clientv2.Reque
const GetFileByIDDocument = `query GetFileByID ($fileId: ID!) {
file(id: $fileId) {
categoryType
- correlationID
createdAt
createdBy
detectedContentType
@@ -37146,7 +37070,6 @@ const GetFilesDocument = `query GetFiles ($where: FileWhereInput) {
edges {
node {
categoryType
- correlationID
createdAt
createdBy
detectedContentType
@@ -37192,7 +37115,6 @@ const UpdateFileDocument = `mutation UpdateFile ($updateFileId: ID!, $input: Upd
updateFile(id: $updateFileId, input: $input) {
file {
categoryType
- correlationID
createdAt
createdBy
detectedContentType
@@ -37239,7 +37161,6 @@ const GetAllFileHistoriesDocument = `query GetAllFileHistories {
edges {
node {
categoryType
- correlationID
createdAt
createdBy
detectedContentType
@@ -37287,7 +37208,6 @@ const GetFileHistoriesDocument = `query GetFileHistories ($where: FileHistoryWhe
edges {
node {
categoryType
- correlationID
createdAt
createdBy
detectedContentType
diff --git a/pkg/openlaneclient/models.go b/pkg/openlaneclient/models.go
index f3ca219..c90ec62 100644
--- a/pkg/openlaneclient/models.go
+++ b/pkg/openlaneclient/models.go
@@ -1089,11 +1089,9 @@ type CreateFileInput struct {
Md5Hash *string `json:"md5Hash,omitempty"`
// the content type of the HTTP request - may be different than MIME type as multipart-form can transmit multiple files and different types
DetectedContentType string `json:"detectedContentType"`
- // the key parsed out of a multipart-form request; if we allow multiple files to be uploaded we may want our API specifications to require the use of different keys allowing us to perfdorm easier conditional evaluation on the key and what to do with the file based on key
+ // the key parsed out of a multipart-form request; if we allow multiple files to be uploaded we may want our API specifications to require the use of different keys allowing us to perform easier conditional evaluation on the key and what to do with the file based on key
StoreKey *string `json:"storeKey,omitempty"`
- // the ULID provided in the http request indicating the ULID to correleate the file to
- CorrelationID *string `json:"correlationID,omitempty"`
- // the category type of the file, if any (e.g. contract, invoice, etc.)
+ // the category type of the file, if any (e.g. evidence, invoice, etc.)
CategoryType *string `json:"categoryType,omitempty"`
// the full URI of the file
URI *string `json:"uri,omitempty"`
@@ -5315,11 +5313,9 @@ type File struct {
Md5Hash *string `json:"md5Hash,omitempty"`
// the content type of the HTTP request - may be different than MIME type as multipart-form can transmit multiple files and different types
DetectedContentType string `json:"detectedContentType"`
- // the key parsed out of a multipart-form request; if we allow multiple files to be uploaded we may want our API specifications to require the use of different keys allowing us to perfdorm easier conditional evaluation on the key and what to do with the file based on key
+ // the key parsed out of a multipart-form request; if we allow multiple files to be uploaded we may want our API specifications to require the use of different keys allowing us to perform easier conditional evaluation on the key and what to do with the file based on key
StoreKey *string `json:"storeKey,omitempty"`
- // the ULID provided in the http request indicating the ULID to correleate the file to
- CorrelationID *string `json:"correlationID,omitempty"`
- // the category type of the file, if any (e.g. contract, invoice, etc.)
+ // the category type of the file, if any (e.g. evidence, invoice, etc.)
CategoryType *string `json:"categoryType,omitempty"`
// the full URI of the file
URI *string `json:"uri,omitempty"`
@@ -5405,11 +5401,9 @@ type FileHistory struct {
Md5Hash *string `json:"md5Hash,omitempty"`
// the content type of the HTTP request - may be different than MIME type as multipart-form can transmit multiple files and different types
DetectedContentType string `json:"detectedContentType"`
- // the key parsed out of a multipart-form request; if we allow multiple files to be uploaded we may want our API specifications to require the use of different keys allowing us to perfdorm easier conditional evaluation on the key and what to do with the file based on key
+ // the key parsed out of a multipart-form request; if we allow multiple files to be uploaded we may want our API specifications to require the use of different keys allowing us to perform easier conditional evaluation on the key and what to do with the file based on key
StoreKey *string `json:"storeKey,omitempty"`
- // the ULID provided in the http request indicating the ULID to correleate the file to
- CorrelationID *string `json:"correlationID,omitempty"`
- // the category type of the file, if any (e.g. contract, invoice, etc.)
+ // the category type of the file, if any (e.g. evidence, invoice, etc.)
CategoryType *string `json:"categoryType,omitempty"`
// the full URI of the file
URI *string `json:"uri,omitempty"`
@@ -5681,22 +5675,6 @@ type FileHistoryWhereInput struct {
StoreKeyNotNil *bool `json:"storeKeyNotNil,omitempty"`
StoreKeyEqualFold *string `json:"storeKeyEqualFold,omitempty"`
StoreKeyContainsFold *string `json:"storeKeyContainsFold,omitempty"`
- // correlation_id field predicates
- CorrelationID *string `json:"correlationID,omitempty"`
- CorrelationIDNeq *string `json:"correlationIDNEQ,omitempty"`
- CorrelationIDIn []string `json:"correlationIDIn,omitempty"`
- CorrelationIDNotIn []string `json:"correlationIDNotIn,omitempty"`
- CorrelationIDGt *string `json:"correlationIDGT,omitempty"`
- CorrelationIDGte *string `json:"correlationIDGTE,omitempty"`
- CorrelationIDLt *string `json:"correlationIDLT,omitempty"`
- CorrelationIDLte *string `json:"correlationIDLTE,omitempty"`
- CorrelationIDContains *string `json:"correlationIDContains,omitempty"`
- CorrelationIDHasPrefix *string `json:"correlationIDHasPrefix,omitempty"`
- CorrelationIDHasSuffix *string `json:"correlationIDHasSuffix,omitempty"`
- CorrelationIDIsNil *bool `json:"correlationIDIsNil,omitempty"`
- CorrelationIDNotNil *bool `json:"correlationIDNotNil,omitempty"`
- CorrelationIDEqualFold *string `json:"correlationIDEqualFold,omitempty"`
- CorrelationIDContainsFold *string `json:"correlationIDContainsFold,omitempty"`
// category_type field predicates
CategoryType *string `json:"categoryType,omitempty"`
CategoryTypeNeq *string `json:"categoryTypeNEQ,omitempty"`
@@ -6001,22 +5979,6 @@ type FileWhereInput struct {
StoreKeyNotNil *bool `json:"storeKeyNotNil,omitempty"`
StoreKeyEqualFold *string `json:"storeKeyEqualFold,omitempty"`
StoreKeyContainsFold *string `json:"storeKeyContainsFold,omitempty"`
- // correlation_id field predicates
- CorrelationID *string `json:"correlationID,omitempty"`
- CorrelationIDNeq *string `json:"correlationIDNEQ,omitempty"`
- CorrelationIDIn []string `json:"correlationIDIn,omitempty"`
- CorrelationIDNotIn []string `json:"correlationIDNotIn,omitempty"`
- CorrelationIDGt *string `json:"correlationIDGT,omitempty"`
- CorrelationIDGte *string `json:"correlationIDGTE,omitempty"`
- CorrelationIDLt *string `json:"correlationIDLT,omitempty"`
- CorrelationIDLte *string `json:"correlationIDLTE,omitempty"`
- CorrelationIDContains *string `json:"correlationIDContains,omitempty"`
- CorrelationIDHasPrefix *string `json:"correlationIDHasPrefix,omitempty"`
- CorrelationIDHasSuffix *string `json:"correlationIDHasSuffix,omitempty"`
- CorrelationIDIsNil *bool `json:"correlationIDIsNil,omitempty"`
- CorrelationIDNotNil *bool `json:"correlationIDNotNil,omitempty"`
- CorrelationIDEqualFold *string `json:"correlationIDEqualFold,omitempty"`
- CorrelationIDContainsFold *string `json:"correlationIDContainsFold,omitempty"`
// category_type field predicates
CategoryType *string `json:"categoryType,omitempty"`
CategoryTypeNeq *string `json:"categoryTypeNEQ,omitempty"`
@@ -12780,13 +12742,10 @@ type UpdateFileInput struct {
ClearMd5Hash *bool `json:"clearMd5Hash,omitempty"`
// the content type of the HTTP request - may be different than MIME type as multipart-form can transmit multiple files and different types
DetectedContentType *string `json:"detectedContentType,omitempty"`
- // the key parsed out of a multipart-form request; if we allow multiple files to be uploaded we may want our API specifications to require the use of different keys allowing us to perfdorm easier conditional evaluation on the key and what to do with the file based on key
+ // the key parsed out of a multipart-form request; if we allow multiple files to be uploaded we may want our API specifications to require the use of different keys allowing us to perform easier conditional evaluation on the key and what to do with the file based on key
StoreKey *string `json:"storeKey,omitempty"`
ClearStoreKey *bool `json:"clearStoreKey,omitempty"`
- // the ULID provided in the http request indicating the ULID to correleate the file to
- CorrelationID *string `json:"correlationID,omitempty"`
- ClearCorrelationID *bool `json:"clearCorrelationID,omitempty"`
- // the category type of the file, if any (e.g. contract, invoice, etc.)
+ // the category type of the file, if any (e.g. evidence, invoice, etc.)
CategoryType *string `json:"categoryType,omitempty"`
ClearCategoryType *bool `json:"clearCategoryType,omitempty"`
// the full URI of the file
diff --git a/pkg/testutils/client.go b/pkg/testutils/client.go
index 5f82ea3..358dd25 100644
--- a/pkg/testutils/client.go
+++ b/pkg/testutils/client.go
@@ -9,6 +9,7 @@ import (
"github.com/99designs/gqlgen/graphql/handler/extension"
"github.com/99designs/gqlgen/graphql/handler/lru"
"github.com/theopenlane/core/internal/graphapi"
+ "github.com/theopenlane/core/internal/middleware/objects"
"github.com/theopenlane/core/pkg/middleware/auth"
"github.com/theopenlane/core/pkg/openlaneclient"
echo "github.com/theopenlane/echox"
@@ -32,8 +33,8 @@ func (l localRoundTripper) RoundTrip(req *http.Request) (*http.Response, error)
}
// TestClient creates a new OpenlaneClient for testing
-func TestClient(t *testing.T, c *ent.Client, opts ...openlaneclient.ClientOption) (*openlaneclient.OpenlaneClient, error) {
- e := testEchoServer(t, c, false)
+func TestClient(t *testing.T, c *ent.Client, u *objects.Upload, opts ...openlaneclient.ClientOption) (*openlaneclient.OpenlaneClient, error) {
+ e := testEchoServer(t, c, u, false)
// setup interceptors
if opts == nil {
@@ -48,8 +49,8 @@ func TestClient(t *testing.T, c *ent.Client, opts ...openlaneclient.ClientOption
}
// TestClientWithAuth creates a new OpenlaneClient for testing that includes the auth middleware
-func TestClientWithAuth(t *testing.T, c *ent.Client, opts ...openlaneclient.ClientOption) (*openlaneclient.OpenlaneClient, error) {
- e := testEchoServer(t, c, true)
+func TestClientWithAuth(t *testing.T, c *ent.Client, u *objects.Upload, opts ...openlaneclient.ClientOption) (*openlaneclient.OpenlaneClient, error) {
+ e := testEchoServer(t, c, u, true)
// setup interceptors
if opts == nil {
@@ -65,8 +66,8 @@ func TestClientWithAuth(t *testing.T, c *ent.Client, opts ...openlaneclient.Clie
// testEchoServer creates a new echo server for testing the graph api
// and optionally includes the middleware for authentication testing
-func testEchoServer(t *testing.T, c *ent.Client, includeMiddleware bool) *echo.Echo {
- srv := testGraphServer(t, c)
+func testEchoServer(t *testing.T, c *ent.Client, u *objects.Upload, includeMiddleware bool) *echo.Echo {
+ srv := testGraphServer(t, c, u)
e := echo.New()
@@ -103,10 +104,10 @@ func createAuthConfig(c *ent.Client) *auth.AuthOptions {
}
// testGraphServer creates a new graphql server for testing the graph api
-func testGraphServer(t *testing.T, c *ent.Client) *handler.Server {
+func testGraphServer(t *testing.T, c *ent.Client, u *objects.Upload) *handler.Server {
srv := handler.NewDefaultServer(
graphapi.NewExecutableSchema(
- graphapi.Config{Resolvers: graphapi.NewResolver(c)},
+ graphapi.Config{Resolvers: graphapi.NewResolver(c, u)},
))
// lower the cache size for testing
diff --git a/pkg/transaction/doc.go b/pkg/transaction/doc.go
deleted file mode 100644
index cc06795..0000000
--- a/pkg/transaction/doc.go
+++ /dev/null
@@ -1,2 +0,0 @@
-// Package transaction implements a transaction middleware for REST endpoints using the ent db client
-package transaction
diff --git a/pkg/transaction/transaction.go b/pkg/transaction/transaction.go
deleted file mode 100644
index 3c356f6..0000000
--- a/pkg/transaction/transaction.go
+++ /dev/null
@@ -1,79 +0,0 @@
-package transaction
-
-import (
- "context"
- "errors"
- "net/http"
-
- "github.com/rs/zerolog/log"
- echo "github.com/theopenlane/echox"
-
- ent "github.com/theopenlane/core/internal/ent/generated"
-)
-
-const (
- rollbackErr = "error rolling back transaction"
- transactionStartErr = "error starting transaction"
- transactionCommitErr = "error committing transaction"
-)
-
-var (
- // ErrProcessingRequest is returned when the request cannot be processed
- ErrProcessingRequest = errors.New("error processing request, please try again")
-)
-
-type Client struct {
- EntDBClient *ent.Client
-}
-
-type entClientCtxKey struct{}
-
-// FromContext returns a TX Client stored inside a context, or nil if there isn't one
-func FromContext(ctx context.Context) *ent.Tx {
- c, _ := ctx.Value(entClientCtxKey{}).(*ent.Tx)
- return c
-}
-
-// NewContext returns a new context with the given TX Client attached
-func NewContext(parent context.Context, c *ent.Tx) context.Context {
- return context.WithValue(parent, entClientCtxKey{}, c)
-}
-
-// Middleware returns a middleware function for transactions on REST endpoints
-func (d *Client) Middleware(next echo.HandlerFunc) echo.HandlerFunc {
- return func(c echo.Context) error {
- client, err := d.EntDBClient.Tx(c.Request().Context())
- if err != nil {
- log.Error().Err(err).Msg(transactionStartErr)
-
- return c.JSON(http.StatusInternalServerError, ErrProcessingRequest)
- }
-
- // add to context
- ctx := NewContext(c.Request().Context(), client)
-
- c.SetRequest(c.Request().WithContext(ctx))
-
- if err := next(c); err != nil {
- log.Debug().Msg("rolling back transaction in middleware")
-
- if err := client.Rollback(); err != nil {
- log.Error().Err(err).Msg(rollbackErr)
-
- return c.JSON(http.StatusInternalServerError, ErrProcessingRequest)
- }
-
- return err
- }
-
- log.Debug().Msg("committing transaction in middleware")
-
- if err := client.Commit(); err != nil {
- log.Error().Err(err).Msg(transactionCommitErr)
-
- return c.JSON(http.StatusInternalServerError, ErrProcessingRequest)
- }
-
- return nil
- }
-}
diff --git a/query/adminsearch.graphql b/query/adminsearch.graphql
index 7e141f6..cd58852 100644
--- a/query/adminsearch.graphql
+++ b/query/adminsearch.graphql
@@ -127,7 +127,6 @@ query AdminSearch($query: String!) {
md5Hash
detectedContentType
storeKey
- correlationID
categoryType
uri
storageScheme
diff --git a/query/file.graphql b/query/file.graphql
index 9f3b4e4..1bead00 100644
--- a/query/file.graphql
+++ b/query/file.graphql
@@ -3,7 +3,6 @@ mutation CreateBulkCSVFile($input: Upload!) {
createBulkCSVFile(input: $input) {
files {
categoryType
- correlationID
createdAt
createdBy
detectedContentType
@@ -30,7 +29,6 @@ mutation CreateBulkFile($input: [CreateFileInput!]) {
createBulkFile(input: $input) {
files {
categoryType
- correlationID
createdAt
createdBy
detectedContentType
@@ -57,7 +55,6 @@ mutation CreateFile($input: CreateFileInput!) {
createFile(input: $input) {
file {
categoryType
- correlationID
createdAt
createdBy
detectedContentType
@@ -91,7 +88,6 @@ query GetAllFiles {
edges {
node {
categoryType
- correlationID
createdAt
createdBy
detectedContentType
@@ -117,7 +113,6 @@ query GetAllFiles {
query GetFileByID($fileId: ID!) {
file(id: $fileId) {
categoryType
- correlationID
createdAt
createdBy
detectedContentType
@@ -144,7 +139,6 @@ query GetFiles($where: FileWhereInput) {
edges {
node {
categoryType
- correlationID
createdAt
createdBy
detectedContentType
@@ -171,7 +165,6 @@ mutation UpdateFile($updateFileId: ID!, $input: UpdateFileInput!) {
updateFile(id: $updateFileId, input: $input) {
file {
categoryType
- correlationID
createdAt
createdBy
detectedContentType
diff --git a/query/filehistory.graphql b/query/filehistory.graphql
index 6f48ed9..0e48983 100644
--- a/query/filehistory.graphql
+++ b/query/filehistory.graphql
@@ -5,7 +5,6 @@ query GetAllFileHistories {
edges {
node {
categoryType
- correlationID
createdAt
createdBy
detectedContentType
@@ -37,7 +36,6 @@ query GetFileHistories($where: FileHistoryWhereInput) {
edges {
node {
categoryType
- correlationID
createdAt
createdBy
detectedContentType
diff --git a/schema.graphql b/schema.graphql
index d13075f..daf882b 100644
--- a/schema.graphql
+++ b/schema.graphql
@@ -1409,15 +1409,11 @@ input CreateFileInput {
"""
detectedContentType: String!
"""
- the key parsed out of a multipart-form request; if we allow multiple files to be uploaded we may want our API specifications to require the use of different keys allowing us to perfdorm easier conditional evaluation on the key and what to do with the file based on key
+ the key parsed out of a multipart-form request; if we allow multiple files to be uploaded we may want our API specifications to require the use of different keys allowing us to perform easier conditional evaluation on the key and what to do with the file based on key
"""
storeKey: String
"""
- the ULID provided in the http request indicating the ULID to correleate the file to
- """
- correlationID: String
- """
- the category type of the file, if any (e.g. contract, invoice, etc.)
+ the category type of the file, if any (e.g. evidence, invoice, etc.)
"""
categoryType: String
"""
@@ -6856,15 +6852,11 @@ type File implements Node {
"""
detectedContentType: String!
"""
- the key parsed out of a multipart-form request; if we allow multiple files to be uploaded we may want our API specifications to require the use of different keys allowing us to perfdorm easier conditional evaluation on the key and what to do with the file based on key
+ the key parsed out of a multipart-form request; if we allow multiple files to be uploaded we may want our API specifications to require the use of different keys allowing us to perform easier conditional evaluation on the key and what to do with the file based on key
"""
storeKey: String
"""
- the ULID provided in the http request indicating the ULID to correleate the file to
- """
- correlationID: String
- """
- the category type of the file, if any (e.g. contract, invoice, etc.)
+ the category type of the file, if any (e.g. evidence, invoice, etc.)
"""
categoryType: String
"""
@@ -6992,15 +6984,11 @@ type FileHistory implements Node {
"""
detectedContentType: String!
"""
- the key parsed out of a multipart-form request; if we allow multiple files to be uploaded we may want our API specifications to require the use of different keys allowing us to perfdorm easier conditional evaluation on the key and what to do with the file based on key
+ the key parsed out of a multipart-form request; if we allow multiple files to be uploaded we may want our API specifications to require the use of different keys allowing us to perform easier conditional evaluation on the key and what to do with the file based on key
"""
storeKey: String
"""
- the ULID provided in the http request indicating the ULID to correleate the file to
- """
- correlationID: String
- """
- the category type of the file, if any (e.g. contract, invoice, etc.)
+ the category type of the file, if any (e.g. evidence, invoice, etc.)
"""
categoryType: String
"""
@@ -7337,24 +7325,6 @@ input FileHistoryWhereInput {
storeKeyEqualFold: String
storeKeyContainsFold: String
"""
- correlation_id field predicates
- """
- correlationID: String
- correlationIDNEQ: String
- correlationIDIn: [String!]
- correlationIDNotIn: [String!]
- correlationIDGT: String
- correlationIDGTE: String
- correlationIDLT: String
- correlationIDLTE: String
- correlationIDContains: String
- correlationIDHasPrefix: String
- correlationIDHasSuffix: String
- correlationIDIsNil: Boolean
- correlationIDNotNil: Boolean
- correlationIDEqualFold: String
- correlationIDContainsFold: String
- """
category_type field predicates
"""
categoryType: String
@@ -7700,24 +7670,6 @@ input FileWhereInput {
storeKeyEqualFold: String
storeKeyContainsFold: String
"""
- correlation_id field predicates
- """
- correlationID: String
- correlationIDNEQ: String
- correlationIDIn: [String!]
- correlationIDNotIn: [String!]
- correlationIDGT: String
- correlationIDGTE: String
- correlationIDLT: String
- correlationIDLTE: String
- correlationIDContains: String
- correlationIDHasPrefix: String
- correlationIDHasSuffix: String
- correlationIDIsNil: Boolean
- correlationIDNotNil: Boolean
- correlationIDEqualFold: String
- correlationIDContainsFold: String
- """
category_type field predicates
"""
categoryType: String
@@ -12346,7 +12298,7 @@ type Mutation {
values of the user
"""
input: CreateUserInput!
- ): UserCreatePayload!
+ avatarURL: Upload): UserCreatePayload!
"""
Update an existing user
"""
@@ -12360,7 +12312,7 @@ type Mutation {
New values for the user
"""
input: UpdateUserInput!
- ): UserUpdatePayload!
+ avatarFile: Upload): UserUpdatePayload!
"""
Delete an existing user
"""
@@ -20157,17 +20109,12 @@ input UpdateFileInput {
"""
detectedContentType: String
"""
- the key parsed out of a multipart-form request; if we allow multiple files to be uploaded we may want our API specifications to require the use of different keys allowing us to perfdorm easier conditional evaluation on the key and what to do with the file based on key
+ the key parsed out of a multipart-form request; if we allow multiple files to be uploaded we may want our API specifications to require the use of different keys allowing us to perform easier conditional evaluation on the key and what to do with the file based on key
"""
storeKey: String
clearStoreKey: Boolean
"""
- the ULID provided in the http request indicating the ULID to correleate the file to
- """
- correlationID: String
- clearCorrelationID: Boolean
- """
- the category type of the file, if any (e.g. contract, invoice, etc.)
+ the category type of the file, if any (e.g. evidence, invoice, etc.)
"""
categoryType: String
clearCategoryType: Boolean
diff --git a/schema/ent.graphql b/schema/ent.graphql
index ef36cfb..dcb5395 100644
--- a/schema/ent.graphql
+++ b/schema/ent.graphql
@@ -1284,15 +1284,11 @@ input CreateFileInput {
"""
detectedContentType: String!
"""
- the key parsed out of a multipart-form request; if we allow multiple files to be uploaded we may want our API specifications to require the use of different keys allowing us to perfdorm easier conditional evaluation on the key and what to do with the file based on key
+ the key parsed out of a multipart-form request; if we allow multiple files to be uploaded we may want our API specifications to require the use of different keys allowing us to perform easier conditional evaluation on the key and what to do with the file based on key
"""
storeKey: String
"""
- the ULID provided in the http request indicating the ULID to correleate the file to
- """
- correlationID: String
- """
- the category type of the file, if any (e.g. contract, invoice, etc.)
+ the category type of the file, if any (e.g. evidence, invoice, etc.)
"""
categoryType: String
"""
@@ -6417,15 +6413,11 @@ type File implements Node {
"""
detectedContentType: String!
"""
- the key parsed out of a multipart-form request; if we allow multiple files to be uploaded we may want our API specifications to require the use of different keys allowing us to perfdorm easier conditional evaluation on the key and what to do with the file based on key
+ the key parsed out of a multipart-form request; if we allow multiple files to be uploaded we may want our API specifications to require the use of different keys allowing us to perform easier conditional evaluation on the key and what to do with the file based on key
"""
storeKey: String
"""
- the ULID provided in the http request indicating the ULID to correleate the file to
- """
- correlationID: String
- """
- the category type of the file, if any (e.g. contract, invoice, etc.)
+ the category type of the file, if any (e.g. evidence, invoice, etc.)
"""
categoryType: String
"""
@@ -6526,15 +6518,11 @@ type FileHistory implements Node {
"""
detectedContentType: String!
"""
- the key parsed out of a multipart-form request; if we allow multiple files to be uploaded we may want our API specifications to require the use of different keys allowing us to perfdorm easier conditional evaluation on the key and what to do with the file based on key
+ the key parsed out of a multipart-form request; if we allow multiple files to be uploaded we may want our API specifications to require the use of different keys allowing us to perform easier conditional evaluation on the key and what to do with the file based on key
"""
storeKey: String
"""
- the ULID provided in the http request indicating the ULID to correleate the file to
- """
- correlationID: String
- """
- the category type of the file, if any (e.g. contract, invoice, etc.)
+ the category type of the file, if any (e.g. evidence, invoice, etc.)
"""
categoryType: String
"""
@@ -6871,24 +6859,6 @@ input FileHistoryWhereInput {
storeKeyEqualFold: String
storeKeyContainsFold: String
"""
- correlation_id field predicates
- """
- correlationID: String
- correlationIDNEQ: String
- correlationIDIn: [String!]
- correlationIDNotIn: [String!]
- correlationIDGT: String
- correlationIDGTE: String
- correlationIDLT: String
- correlationIDLTE: String
- correlationIDContains: String
- correlationIDHasPrefix: String
- correlationIDHasSuffix: String
- correlationIDIsNil: Boolean
- correlationIDNotNil: Boolean
- correlationIDEqualFold: String
- correlationIDContainsFold: String
- """
category_type field predicates
"""
categoryType: String
@@ -7222,24 +7192,6 @@ input FileWhereInput {
storeKeyEqualFold: String
storeKeyContainsFold: String
"""
- correlation_id field predicates
- """
- correlationID: String
- correlationIDNEQ: String
- correlationIDIn: [String!]
- correlationIDNotIn: [String!]
- correlationIDGT: String
- correlationIDGTE: String
- correlationIDLT: String
- correlationIDLTE: String
- correlationIDContains: String
- correlationIDHasPrefix: String
- correlationIDHasSuffix: String
- correlationIDIsNil: Boolean
- correlationIDNotNil: Boolean
- correlationIDEqualFold: String
- correlationIDContainsFold: String
- """
category_type field predicates
"""
categoryType: String
@@ -16961,17 +16913,12 @@ input UpdateFileInput {
"""
detectedContentType: String
"""
- the key parsed out of a multipart-form request; if we allow multiple files to be uploaded we may want our API specifications to require the use of different keys allowing us to perfdorm easier conditional evaluation on the key and what to do with the file based on key
+ the key parsed out of a multipart-form request; if we allow multiple files to be uploaded we may want our API specifications to require the use of different keys allowing us to perform easier conditional evaluation on the key and what to do with the file based on key
"""
storeKey: String
clearStoreKey: Boolean
"""
- the ULID provided in the http request indicating the ULID to correleate the file to
- """
- correlationID: String
- clearCorrelationID: Boolean
- """
- the category type of the file, if any (e.g. contract, invoice, etc.)
+ the category type of the file, if any (e.g. evidence, invoice, etc.)
"""
categoryType: String
clearCategoryType: Boolean
diff --git a/schema/user.graphql b/schema/user.graphql
index 742fbbc..38fe427 100644
--- a/schema/user.graphql
+++ b/schema/user.graphql
@@ -19,6 +19,7 @@ extend type Mutation{
values of the user
"""
input: CreateUserInput!
+ avatarURL: Upload
): UserCreatePayload!
"""
Update an existing user
@@ -32,6 +33,7 @@ extend type Mutation{
New values for the user
"""
input: UpdateUserInput!
+ avatarFile: Upload
): UserUpdatePayload!
"""
Delete an existing user