Skip to content

Commit

Permalink
Merge branch 'bugfix/duplicate-nodes' into 'master'
Browse files Browse the repository at this point in the history
fixed duplicate nodes

See merge request b650/Deep-Lynx!280
  • Loading branch information
DnOberon committed Jun 9, 2022
2 parents 9069d23 + a557807 commit 713ba2d
Showing 1 changed file with 6 additions and 1 deletion.
7 changes: 6 additions & 1 deletion src/data_processing/process.ts
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ export async function ProcessData(staging: DataStaging): Promise<Result<boolean>
Logger.error(`unable to fetch files for data staging records ${stagingFiles.error?.error}`);
}

const nodesToInsert: Node[] = [];
let nodesToInsert: Node[] = [];
const edgesToInsert: Edge[] = [];
const timeseriesToInsert: TimeseriesEntry[] = [];

Expand Down Expand Up @@ -115,6 +115,11 @@ export async function ProcessData(staging: DataStaging): Promise<Result<boolean>
if (IsTimeseries(results.value)) timeseriesToInsert.push(...results.value);
}

// we must deduplicate nodes based on original ID in order to avoid a database transaction error. We toss out the
// duplicates because even if we inserted them they'd be overwritten, or overwrite, the original. Users should be made
// aware that if their import is generating records with the same original ID only one instance is going to be inserted
nodesToInsert = nodesToInsert.filter((value, index, self) => index === self.findIndex((t) => t.original_data_id === value.original_data_id));

// insert all nodes and files
if (nodesToInsert.length > 0) {
const inserted = await nodeRepository.bulkSave(staging.data_source_id!, nodesToInsert, transaction.value);
Expand Down

0 comments on commit 713ba2d

Please sign in to comment.