Skip to content

Commit

Permalink
Usage metrics for release v2024.1 (#1128)
Browse files Browse the repository at this point in the history
* Usage metrics for release v2024.1

* Update lib/model/query/analytics.js
  • Loading branch information
ktuite authored Apr 24, 2024
1 parent 60ac8ca commit cfe46c9
Show file tree
Hide file tree
Showing 4 changed files with 149 additions and 9 deletions.
2 changes: 1 addition & 1 deletion config/default.json
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
"analytics": {
"url": "https://data.getodk.cloud/v1/key/eOZ7S4bzyUW!g1PF6dIXsnSqktRuewzLTpmc6ipBtRq$LDfIMTUKswCexvE0UwJ9/projects/1/forms/odk-analytics/submissions",
"formId": "odk-analytics",
"version": "v2023.5.0_1"
"version": "v2024.1.0_1"
}
}
},
Expand Down
7 changes: 6 additions & 1 deletion lib/data/analytics.js
Original file line number Diff line number Diff line change
Expand Up @@ -179,7 +179,12 @@ const metricsTemplate = {
recent: 0
},
"num_entity_conflicts": 0,
"num_entity_conflicts_resolved": 0
"num_entity_conflicts_resolved": 0,
"num_bulk_create_events": {
total: 0,
recent: 0
},
"biggest_bulk_upload": 0
}]
}
]
Expand Down
29 changes: 26 additions & 3 deletions lib/model/query/analytics.js
Original file line number Diff line number Diff line change
Expand Up @@ -464,6 +464,19 @@ GROUP BY ds.id, ds."projectId"
`);
/* eslint-enable no-tabs */

const getDatasetEvents = () => ({ all }) => all(sql`
SELECT
ds.id, ds."projectId",
COUNT (*) num_bulk_create_events_total,
SUM (CASE WHEN audits."loggedAt" >= current_date - cast(${DAY_RANGE} as int) THEN 1 ELSE 0 END) num_bulk_create_events_recent,
MAX (CAST(sources."details"->'count' AS integer)) AS biggest_bulk_upload
FROM datasets ds
JOIN audits ON ds."acteeId" = audits."acteeId"
JOIN entity_def_sources sources ON CAST(audits."details"->'sourceId' AS integer) = sources.id
WHERE audits.action = 'entity.bulk.create'
GROUP BY ds.id, ds."projectId"
`);

// Other
const getProjectsWithDescriptions = () => ({ all }) => all(sql`
select id as "projectId", length(trim(description)) as description_length from projects where coalesce(trim(description),'')!=''`);
Expand All @@ -484,11 +497,12 @@ const projectMetrics = () => (({ Analytics }) => Promise.all([
Analytics.countSubmissionsComments(),
Analytics.countSubmissionsByUserType(),
Analytics.getProjectsWithDescriptions(),
Analytics.getDatasets()
Analytics.getDatasets(),
Analytics.getDatasetEvents()
]).then(([ userRoles, appUsers, deviceIds, pubLinks,
forms, formGeoRepeats, formsEncrypt, formStates, reusedIds,
subs, subStates, subEdited, subComments, subUsers,
projWithDesc, datasets ]) => {
projWithDesc, datasets, datasetEvents ]) => {
const projects = {};

// users
Expand Down Expand Up @@ -597,6 +611,10 @@ const projectMetrics = () => (({ Analytics }) => Promise.all([
for (const row of datasets) {
const project = _getProject(projects, row.projectId);

// Additional dataset metrics are returned in a separate query. Look up the correct dataset/project row.
const eventsRow = datasetEvents.find(d => (d.projectId === row.projectId && d.id === row.id)) ||
{ num_bulk_create_events_total: 0, num_bulk_create_events_recent: 0, biggest_bulk_upload: 0 };

project.datasets.push({
id: row.id,
num_properties: row.num_properties,
Expand All @@ -612,6 +630,10 @@ const projectMetrics = () => (({ Analytics }) => Promise.all([
num_entities_updated: { total: row.num_entities_updated_total, recent: row.num_entities_updated_recent },
num_entity_conflicts: row.num_entity_conflicts,
num_entity_conflicts_resolved: row.num_entity_conflicts_resolved,

// 2024.1 metrics
num_bulk_create_events: { total: eventsRow.num_bulk_create_events_total, recent: eventsRow.num_bulk_create_events_recent },
biggest_bulk_upload: eventsRow.biggest_bulk_upload
});
}

Expand Down Expand Up @@ -727,5 +749,6 @@ module.exports = {
previewMetrics,
projectMetrics,
getLatestAudit,
getDatasets
getDatasets,
getDatasetEvents
};
120 changes: 116 additions & 4 deletions test/integration/other/analytics-queries.js
Original file line number Diff line number Diff line change
Expand Up @@ -1217,6 +1217,100 @@ describe('analytics task queries', function () {
datasetOfSecondProject.num_entities_total.should.be.equal(0);

}));

it('should return recent and total entity.bulk.create events per dataset', testService(async (service, container) => {
const asAlice = await service.login('alice');

// First dataset in first project: "people"
await asAlice.post('/v1/projects/1/datasets')
.send({ name: 'people' })
.expect(200);

// Past bulk upload with 3 rows in "people"
await asAlice.post('/v1/projects/1/datasets/people/entities')
.send({
source: { name: 'people.csv', size: 100, },
entities: [ { label: 'a label' }, { label: 'a label' }, { label: 'a label' } ]
})
.expect(200);

const secondProjectId = await createTestProject(service, container, 'second');

// First dataset in second project: "shovels"
await asAlice.post(`/v1/projects/${secondProjectId}/datasets`)
.send({ name: 'shovels' })
.expect(200);

// Past bulk upload with 1 row in "shovels"
await asAlice.post(`/v1/projects/${secondProjectId}/datasets/shovels/entities`)
.send({
source: { name: 'shovels.csv', size: 100, },
entities: [ { label: 'a label' } ]
})
.expect(200);

// Make existing audits in the distant past
await container.run(sql`UPDATE audits SET "loggedAt" = '1999-1-1' WHERE action = 'entity.bulk.create'`);

// Recent bulk upload with 1 row in "people"
await asAlice.post('/v1/projects/1/datasets/people/entities')
.send({
source: { name: 'people.csv', size: 100, },
entities: [ { label: 'a label' } ]
})
.expect(200);

const dsInDatabase = (await container.all(sql`SELECT * FROM datasets`)).reduce((map, obj) => ({ [obj.id]: obj, ...map }), {});
const datasets = await container.Analytics.getDatasetEvents();

const datasetOfFirstProject = datasets.find(d => d.projectId === 1);
datasetOfFirstProject.id.should.be.equal(dsInDatabase[datasetOfFirstProject.id].id);
datasetOfFirstProject.num_bulk_create_events_total.should.be.equal(2);
datasetOfFirstProject.num_bulk_create_events_recent.should.be.equal(1);
datasetOfFirstProject.biggest_bulk_upload.should.be.equal(3);

const datasetOfSecondProject = datasets.find(d => d.projectId === secondProjectId);
datasetOfSecondProject.id.should.be.equal(dsInDatabase[datasetOfSecondProject.id].id);
datasetOfSecondProject.num_bulk_create_events_total.should.be.equal(1);
datasetOfSecondProject.num_bulk_create_events_recent.should.be.equal(0);
datasetOfSecondProject.biggest_bulk_upload.should.be.equal(1);
}));

it('should show dataset event metrics within project metrics', testService(async (service, container) => {
const asAlice = await service.login('alice');

// First dataset in first project: "people"
await asAlice.post('/v1/projects/1/datasets')
.send({ name: 'people' })
.expect(200);

// Recent bulk upload with 1 row in "people"
await asAlice.post('/v1/projects/1/datasets/people/entities')
.send({
source: { name: 'people.csv', size: 100, },
entities: [ { label: 'a label' } ]
})
.expect(200);

const projects = await container.Analytics.projectMetrics();
const ds = projects[0].datasets[0];
ds.num_bulk_create_events.should.eql({ total: 1, recent: 1 });
ds.biggest_bulk_upload.should.equal(1);
}));

it('should combine dataset event metrics with other project metrics even if no bulk create events', testService(async (service, container) => {
const asAlice = await service.login('alice');

// First dataset in first project: "people"
await asAlice.post('/v1/projects/1/datasets')
.send({ name: 'people' })
.expect(200);

const projects = await container.Analytics.projectMetrics();
const ds = projects[0].datasets[0];
ds.num_bulk_create_events.should.eql({ total: 0, recent: 0 });
ds.biggest_bulk_upload.should.equal(0);
}));
});

describe('other project metrics', () => {
Expand Down Expand Up @@ -1546,6 +1640,14 @@ describe('analytics task queries', function () {
.replace(/goodone/g, 'people')
.replace(/files\/badsubpath/g, 'file/employees'), 1);

// Create entities in bulk
await asAlice.post('/v1/projects/1/datasets/people/entities')
.send({
source: { name: 'people.csv', size: 100, },
entities: [ { label: 'a label' }, { label: 'a label' }, { label: 'a label' } ]
})
.expect(200);

// Create an empty project
const secondProject = await createTestProject(service, container, 'second');
await createTestForm(service, container, testData.forms.simple, secondProject);
Expand All @@ -1560,8 +1662,8 @@ describe('analytics task queries', function () {
num_creation_forms: 2,
num_followup_forms: 1,
num_entities: {
total: 2, // made one Entity ancient
recent: 1
total: 5, // made one Entity ancient
recent: 4 // 2 from submissions, 3 from bulk uploads
},
num_failed_entities: { // two Submissions failed due to invalid UUID
total: 2, // made one Error ancient
Expand All @@ -1584,7 +1686,12 @@ describe('analytics task queries', function () {
recent: 1
},
num_entity_conflicts: 1,
num_entity_conflicts_resolved: 1
num_entity_conflicts_resolved: 1,
num_bulk_create_events: {
total: 1,
recent: 1
},
biggest_bulk_upload: 3
});

secondDataset.should.be.eql({
Expand Down Expand Up @@ -1616,7 +1723,12 @@ describe('analytics task queries', function () {
recent: 0
},
num_entity_conflicts: 0,
num_entity_conflicts_resolved: 0
num_entity_conflicts_resolved: 0,
num_bulk_create_events: {
total: 0,
recent: 0
},
biggest_bulk_upload: 0
});

// Assert that a Project without a Dataset returns an empty array
Expand Down

0 comments on commit cfe46c9

Please sign in to comment.