Skip to content

Commit

Permalink
Merge pull request #226 from Digital-Engineering/development
Browse files Browse the repository at this point in the history
Development
  • Loading branch information
DnOberon authored and GitHub Enterprise committed Feb 13, 2023
2 parents f93adb3 + 49990be commit 8e3951f
Show file tree
Hide file tree
Showing 12 changed files with 370 additions and 489 deletions.
6 changes: 6 additions & 0 deletions API Documentation/Core.swagger_collection.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -3772,6 +3772,12 @@ paths:
file:
type: string
format: binary
parameters:
- schema:
type: boolean
default: false
in: query
name: isEnabled
'/containers/{container_id}/import/datasources/{data_source_id}/mappings/{mapping_id}/transformations':
get:
tags:
Expand Down
3 changes: 2 additions & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ FROM cimg/rust:1.65.0-node as build

USER root
RUN sudo apt-get update
RUN sudo apt-get upgrade libtasn1-6

# these settings are needed for the admin web gui build, these variables are all baked into the Vue application and thus
# are available to any end user that wants to dig deep enough in the webpage - as such we don't feel it a security risk
Expand Down Expand Up @@ -43,7 +44,7 @@ RUN npm run build:web-gl
# catch any env file a user might have accidentally built into the container
RUN rm -rf .env

FROM node:18.13-buster-slim as production
FROM node:18.14.0-buster-slim as production
RUN apt-get update

WORKDIR /srv/core_api
Expand Down
771 changes: 301 additions & 470 deletions package-lock.json

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,7 @@
"ioredis": "^4.24.2",
"isomorphic-dompurify": "^0.15.0",
"JSONStream": "^1.3.5",
"jsonwebtoken": "^8.5.1",
"jsonwebtoken": "^9.0.0",
"kind-of": "^6.0.3",
"method-override": "^3.0.0",
"minimist": "^1.2.5",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,7 @@ export default class EdgeMapper extends Mapper {
ON CONFLICT(container_id,relationship_pair_id,data_source_id,created_at, origin_id, destination_id) DO UPDATE SET
properties = EXCLUDED.properties,
metadata = EXCLUDED.metadata
WHERE EXCLUDED.id = edges.id
WHERE EXCLUDED.id = edges.id AND excluded.properties IS DISTINCT FROM edges.properties
RETURNING *`;

const values = edges.map((e) => [
Expand Down Expand Up @@ -236,7 +236,7 @@ export default class EdgeMapper extends Mapper {
LEFT JOIN metatype_relationships ON metatype_relationship_pairs.relationship_id = metatype_relationships.id
WHERE edges.id = $1 ORDER BY edges.created_at ASC`,
values: [edgeID],
}
};
}

private retrieveRawDataHistoryStatement(edgeID: string): QueryConfig {
Expand All @@ -247,7 +247,7 @@ export default class EdgeMapper extends Mapper {
LEFT JOIN data_staging ON edges.data_staging_id = data_staging.id
WHERE edges.id = $1 ORDER BY edges.created_at ASC`,
values: [edgeID],
}
};
}

private retrieveByRelationshipStatement(origin: string, relationship: string, destination: string): QueryConfig {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -166,7 +166,7 @@ export default class NodeMapper extends Mapper {
properties = EXCLUDED.properties,
metadata = EXCLUDED.metadata,
deleted_at = EXCLUDED.deleted_at
WHERE EXCLUDED.id = nodes.id
WHERE EXCLUDED.id = nodes.id AND EXCLUDED.properties IS DISTINCT FROM nodes.properties
RETURNING *`;

const values = nodes.map((n) => [
Expand Down Expand Up @@ -237,7 +237,7 @@ export default class NodeMapper extends Mapper {
FROM nodes LEFT JOIN metatypes ON nodes.metatype_id = metatypes.id
WHERE nodes.id = $1 ORDER BY nodes.created_at ASC`,
values: [nodeID],
}
};
}

// retrieves node history with raw data records attached
Expand All @@ -248,7 +248,7 @@ export default class NodeMapper extends Mapper {
LEFT JOIN data_staging ON nodes.data_staging_id = data_staging.id
WHERE nodes.id = $1 ORDER BY nodes.created_at ASC`,
values: [nodeID],
}
};
}

private domainRetrieveStatement(nodeID: string, containerID: string): QueryConfig {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
CREATE OR REPLACE FUNCTION node_insert_trigger() RETURNS TRIGGER AS $$
DECLARE
old_id bigint;
old_properties jsonb;
BEGIN
IF NEW.original_data_id IS NOT NULL THEN
BEGIN
SELECT nodes.id, nodes.properties
INTO old_id, old_properties
FROM nodes
WHERE original_data_id = NEW.original_data_id
AND metatype_id = NEW.metatype_id
AND data_source_id = NEW.data_source_id LIMIT 1;
EXCEPTION
WHEN NO_DATA_FOUND THEN
old_id := NULL;
END;

IF old_id IS NOT NULL THEN
NEW.id = old_id;
/*
if the old properties are exactly the same as the new properties, silently discard the insert
*/
IF old_properties IS NOT DISTINCT FROM NEW.properties THEN
RETURN NULL;
END IF;
END IF;
END IF;

RETURN NEW;
END;
$$ language plpgsql;
Original file line number Diff line number Diff line change
Expand Up @@ -178,7 +178,7 @@ export default class OAuthRepository extends Repository implements RepositoryInt
if (!valid) return new Promise((resolve) => resolve(Result.Failure('invalid client')));
}

const token = jwt.sign(classToPlain(UserToReturnUser(user.value)), Config.encryption_key_secret, {expiresIn: '720m', algorithm: 'RS256'});
const token = jwt.sign(classToPlain(UserToReturnUser(user.value)), Config.encryption_key_secret, {expiresIn: '720m', algorithm: 'RS256', allowInsecureKeySizes: true});

return new Promise((resolve) => resolve(Result.Success(token)));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -326,7 +326,7 @@ export default class TypeMappingRepository extends Repository implements Reposit
// by name instead of uuid - so there is potential for issues, use with caution. We return the newly modified/created
// type mappings as well as failed mappings so that the end user can perform a review of the export - check the value
// of isError on the return to determine if import was successful
async importToDataSource(targetSourceID: string, user: User, ...originalMappings: TypeMapping[]): Promise<Result<TypeMapping>[]> {
async importToDataSource(targetSourceID: string, user: User, active: boolean, ...originalMappings: TypeMapping[]): Promise<Result<TypeMapping>[]> {
// pull in the target data source, immediately error out if the source isn't valid, we also need it for the container
// in this case we're using the data source mapper because we have no need of actually performing any operations
const targetDataSource = await this.#dataSourceMapper.Retrieve(targetSourceID);
Expand Down Expand Up @@ -355,7 +355,7 @@ export default class TypeMappingRepository extends Repository implements Reposit
// call the repo's save method on the modified mapping.
mapping.data_source_id = targetDataSource.value.id;
mapping.container_id = targetDataSource.value.container_id;
mapping.active = false; // always inactivate the mapping after modification
mapping.active = active;

// now we must iterate through the transformations and potentially back-fill the metatype/relationship
// ids and key ids if all that are present are the names - note that this will not modify the mapping if
Expand Down
2 changes: 1 addition & 1 deletion src/http_server/routes/access_management/oauth_routes.ts
Original file line number Diff line number Diff line change
Expand Up @@ -630,7 +630,7 @@ export default class OAuthRoutes {
}

try {
const token = jwt.sign(classToPlain(user.value), Config.encryption_key_secret, {expiresIn: expiry, algorithm: 'RS256'});
const token = jwt.sign(classToPlain(user.value), Config.encryption_key_secret, {expiresIn: expiry, algorithm: 'RS256', allowInsecureKeySizes: true});
res.status(200).json(token);
return;
} catch (e: any) {
Expand Down
19 changes: 14 additions & 5 deletions src/http_server/routes/data_warehouse/etl/type_mapping_routes.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,8 @@ import TypeTransformation from '../../../../domain_objects/data_warehouse/etl/ty
import TypeTransformationRepository from '../../../../data_access_layer/repositories/data_warehouse/etl/type_transformation_repository';
import TypeMapping, {TypeMappingExportPayload, TypeMappingUpgradePayload} from '../../../../domain_objects/data_warehouse/etl/type_mapping';
import {FileInfo} from 'busboy';
import DataSourceRecord from '../../../../domain_objects/data_warehouse/import/data_source';
import { none } from 'fp-ts/lib/Option';

const JSONStream = require('JSONStream');
const Busboy = require('busboy');
Expand Down Expand Up @@ -199,7 +201,7 @@ export default class TypeMappingRoutes {
});
} else {
mappingRepo
.importToDataSource(payload.target_data_source, user, ...results.value)
.importToDataSource(payload.target_data_source, user, false, ...results.value)
.then((result) => {
res.status(200).json(result);
next();
Expand Down Expand Up @@ -426,6 +428,13 @@ export default class TypeMappingRoutes {
private static importTypeMappings(req: Request, res: Response, next: NextFunction) {
const user = req.currentUser!;
const importResults: Promise<Result<TypeMapping>[]>[] = [];
let active : boolean;

if(req.query['isEnabled' as keyof object] === 'true') {
active = true;
} else {
active = false;
}

if (!req.dataSource) {
Result.Failure(`unable to find data source`, 404).asResponse(res);
Expand All @@ -437,8 +446,8 @@ export default class TypeMappingRoutes {
if (req.headers['content-type']?.includes('application/json')) {
const repo = new TypeMappingRepository();
const payload = plainToClass(TypeMapping, req.body);

repo.importToDataSource(req.dataSource.DataSourceRecord?.id!, user, ...payload)
repo.importToDataSource(req.dataSource.DataSourceRecord?.id!, user, active, ...payload)
.then((results) => {
res.status(201).json(results);
next();
Expand Down Expand Up @@ -468,11 +477,11 @@ export default class TypeMappingRoutes {
Result.Failure(e).asResponse(res);
return;
});

// once the file has been read, convert to mappings and then attempt the import
stream.on('end', () => {
const mappings = plainToClass(TypeMapping, objects);
importResults.push(repo.importToDataSource(req.dataSource?.DataSourceRecord?.id!, user, ...mappings));
importResults.push(repo.importToDataSource(req.dataSource?.DataSourceRecord?.id!, user, active, ...mappings));
});

try {
Expand Down
6 changes: 4 additions & 2 deletions src/tests/data_warehouse/etl/type_mapping_repository.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ describe('A Type Mapping Repository', async () => {
let containerID: string = process.env.TEST_CONTAINER_ID || '';
let dataSourceID: string;
let targetDataSourceID: string;
let active: boolean;
let user: User;
let metatype: Metatype;
let key: MetatypeKey;
Expand Down Expand Up @@ -172,6 +173,7 @@ describe('A Type Mapping Repository', async () => {
expect(exp2.isError).false;
expect(exp2.value).not.empty;
targetDataSourceID = exp2.value.id!;
active = exp2.value.active;

// create the data source in the new container for mapping/transformation import/export tests
const exp3 = await DataSourceMapper.Instance.Create(
Expand Down Expand Up @@ -305,7 +307,7 @@ describe('A Type Mapping Repository', async () => {
expect(mapping.transformations![0]!.id).not.undefined;

// first we attempt to export them into the same container but separate data source
let exported = await repo.importToDataSource(targetDataSourceID, user, mapping);
let exported = await repo.importToDataSource(targetDataSourceID, user, active, mapping);
for (const result of exported) {
expect(result.isError).false;
expect(result.value.id).not.eq(mapping.id); // should be a new mapping
Expand All @@ -322,7 +324,7 @@ describe('A Type Mapping Repository', async () => {
expect(mappings.value[0].transformations?.length).eq(1);

// next export the mappings into a separate container
exported = await repo.importToDataSource(dataSource2ID, user, mapping);
exported = await repo.importToDataSource(dataSource2ID, user, active, mapping);
for (const result of exported) {
expect(result.isError).false;
expect(result.value.id).not.eq(mapping.id); // should be a new mapping
Expand Down

0 comments on commit 8e3951f

Please sign in to comment.