diff --git a/.gitignore b/.gitignore
index 9f0816c..7f11ee6 100644
--- a/.gitignore
+++ b/.gitignore
@@ -9,4 +9,5 @@ testdata/minikube-logs2.html
testdata/minikube-logs2.out
testdata/docker-test/testout.json
testdata/docker-test/testout.html
-./gopogh
\ No newline at end of file
+./gopogh
+cmd/gopogh-server/gopogh-server
diff --git a/cmd/gopogh-server/flake_chart.html b/cmd/gopogh-server/flake_chart.html
new file mode 100644
index 0000000..e963023
--- /dev/null
+++ b/cmd/gopogh-server/flake_chart.html
@@ -0,0 +1,35 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/cmd/gopogh-server/flake_chart.js b/cmd/gopogh-server/flake_chart.js
new file mode 100644
index 0000000..9e1f72a
--- /dev/null
+++ b/cmd/gopogh-server/flake_chart.js
@@ -0,0 +1,816 @@
+// Displays an error message to the UI. Any previous message will be erased.
+function displayError(message) {
+ // Clear the body of all children.
+ while (document.body.firstChild) {
+ document.body.removeChild(document.body.firstChild);
+ }
+ const element = document.createElement("p");
+ element.innerText = "Error: " + message;
+ element.style.color = "red";
+ element.style.fontFamily = "Arial";
+ element.style.fontWeight = "bold";
+ element.style.margin = "5rem";
+ document.body.appendChild(element);
+}
+
+const testGopoghLink = (jobId, environment, testName, status) => {
+ return `https://storage.googleapis.com/minikube-builds/logs/master/${jobId}/${environment}.html${testName ? `#${status}_${testName}` : ``}`;
+}
+
+// Parse URL search `query` into [{key, value}].
+function parseUrlQuery(query) {
+ if (query[0] === '?') {
+ query = query.substring(1);
+ }
+ return Object.fromEntries((query === "" ? [] : query.split("&")).map(element => {
+ const keyValue = element.split("=");
+ return [unescape(keyValue[0]), unescape(keyValue[1])];
+ }));
+}
+
+function createRecentNumberOfFailTable(summaryTable) {
+ const createCell = (elementType, text) => {
+ const element = document.createElement(elementType);
+ element.innerHTML = text;
+ return element;
+ }
+ const table = document.createElement("table");
+ const tableHeaderRow = document.createElement("tr");
+ tableHeaderRow.appendChild(createCell("th", "Rank"));
+ tableHeaderRow.appendChild(createCell("th", "Env Name")).style.textAlign = "left";
+ tableHeaderRow.appendChild(createCell("th", "Recent Number of Fails"));
+ tableHeaderRow.appendChild(createCell("th", "Growth (since last 15 days)"));
+ table.appendChild(tableHeaderRow);
+ const tableBody = document.createElement("tbody");
+ for (let i = 0; i < summaryTable.length; i++) {
+ const {
+ envName,
+ recentNumberOfFail,
+ growth
+ } = summaryTable[i];
+ const row = document.createElement("tr");
+ row.appendChild(createCell("td", "" + (i + 1))).style.textAlign = "center";
+ row.appendChild(createCell("td", `${envName}`));
+ row.appendChild(createCell("td", recentNumberOfFail)).style.textAlign = "right";
+ row.appendChild(createCell("td", ` 0 ? "red" : "green")}">${growth > 0 ? '+' + growth : growth}`));
+ tableBody.appendChild(row);
+ }
+ table.appendChild(tableBody);
+ new Tablesort(table);
+ return table;
+}
+
+
+function createRecentFlakePercentageTable(recentFlakePercentTable, query) {
+ const createCell = (elementType, text) => {
+ const element = document.createElement(elementType);
+ element.innerHTML = text;
+ return element;
+ }
+
+ const table = document.createElement("table");
+ const tableHeaderRow = document.createElement("tr");
+ tableHeaderRow.appendChild(createCell("th", "Rank"));
+ tableHeaderRow.appendChild(createCell("th", "Test Name")).style.textAlign = "left";
+ tableHeaderRow.appendChild(createCell("th", "Recent Flake Percentage"));
+ tableHeaderRow.appendChild(createCell("th", "Growth (since last 15 days)"));
+ table.appendChild(tableHeaderRow);
+ const tableBody = document.createElement("tbody");
+ for (let i = 0; i < recentFlakePercentTable.length; i++) {
+ const {
+ testName,
+ recentFlakePercentage,
+ growthRate
+ } = recentFlakePercentTable[i];
+ const row = document.createElement("tr");
+ row.appendChild(createCell("td", "" + (i + 1))).style.textAlign = "center";
+ row.appendChild(createCell("td", `${testName}`));
+ row.appendChild(createCell("td", recentFlakePercentage + "%")).style.textAlign = "right";
+ row.appendChild(createCell("td", ` 0 ? "red" : "green")}">${growthRate > 0 ? '+' + growthRate : growthRate}%`));
+ tableBody.appendChild(row);
+ }
+ table.appendChild(tableBody);
+ new Tablesort(table);
+ return table;
+}
+
+function displayTestAndEnvironmentChart(data, query) {
+ const chartsContainer = document.getElementById('chart_div');
+
+ const dayData = data.flakeByDay
+ const dayChart = new google.visualization.DataTable();
+ dayChart.addColumn('date', 'Date');
+ dayChart.addColumn('number', 'Flake Percentage');
+ dayChart.addColumn({
+ type: 'string',
+ role: 'tooltip',
+ 'p': {
+ 'html': true
+ }
+ });
+ dayChart.addColumn('number', 'Duration');
+ dayChart.addColumn({
+ type: 'string',
+ role: 'tooltip',
+ 'p': {
+ 'html': true
+ }
+ });
+
+ dayChart.addRows(
+ dayData
+ .map(groupData => {
+ let dataArr = groupData.commitResultsAndDurations.split(',')
+ dataArr = dataArr.map((commit) => commit.split(":"))
+ const resultArr = dataArr.map((commit) => ({
+ id: commit[commit.length - 3],
+ status: (commit[commit.length - 2]).trim()
+ }))
+ const durationArr = dataArr.map((commit) => ({
+ id: commit[commit.length - 3],
+ status: (commit[commit.length - 2]).trim(),
+ duration: (commit[commit.length - 1]).trim()
+ }))
+
+ return [
+ new Date(groupData.startOfDate),
+ groupData.flakePercentage,
+ `
+
Date: ${groupData.startOfDate.toLocaleString([], {dateStyle: 'medium'})}
+
Flake Percentage: ${groupData.flakePercentage.toFixed(2)}%
+
Jobs:
+ ${resultArr.map(({ id, status }) => ` -
${id} (${status})`).join("
")}
+
`,
+ groupData.avgDuration,
+ `
+
Date: ${groupData.startOfDate.toLocaleString([], {dateStyle: 'medium'})}
+
Average Duration: ${groupData.avgDuration.toFixed(2)}s
+
Jobs:
+ ${durationArr.map(({ id, duration, status }) => ` -
${id} (${duration}s)`).join("
")}
+
`,
+ ]
+ })
+ );
+ const dayOptions = {
+ title: `Flake rate and duration by day of ${query.test} on ${query.env}`,
+ width: window.innerWidth,
+ height: window.innerHeight,
+ pointSize: 10,
+ pointShape: "circle",
+ series: {
+ 0: {
+ targetAxisIndex: 0
+ },
+ 1: {
+ targetAxisIndex: 1
+ },
+ },
+ vAxes: {
+ 0: {
+ title: "Flake rate",
+ minValue: 0,
+ maxValue: 100
+ },
+ 1: {
+ title: "Duration (seconds)"
+ },
+ },
+ colors: ['#dc3912', '#3366cc'],
+ tooltip: {
+ trigger: "selection",
+ isHtml: true
+ }
+ };
+ const flakeRateDayContainer = document.createElement("div");
+ flakeRateDayContainer.style.width = "100vw";
+ flakeRateDayContainer.style.height = "100vh";
+ chartsContainer.appendChild(flakeRateDayContainer);
+ const dChart = new google.visualization.LineChart(flakeRateDayContainer);
+ dChart.draw(dayChart, dayOptions);
+
+ const weekData = data.flakeByWeek
+ const weekChart = new google.visualization.DataTable();
+ weekChart.addColumn('date', 'Date');
+ weekChart.addColumn('number', 'Flake Percentage');
+ weekChart.addColumn({
+ type: 'string',
+ role: 'tooltip',
+ 'p': {
+ 'html': true
+ }
+ });
+ weekChart.addColumn('number', 'Duration');
+ weekChart.addColumn({
+ type: 'string',
+ role: 'tooltip',
+ 'p': {
+ 'html': true
+ }
+ });
+
+ console.log(weekChart)
+ weekChart.addRows(
+ weekData
+ .map(groupData => {
+ let dataArr = groupData.commitResultsAndDurations.split(',')
+ dataArr = dataArr.map((commit) => commit.split(":"))
+ const resultArr = dataArr.map((commit) => ({
+ id: commit[commit.length - 3],
+ status: (commit[commit.length - 2]).trim()
+ }))
+ const durationArr = dataArr.map((commit) => ({
+ id: commit[commit.length - 3],
+ status: (commit[commit.length - 2]).trim(),
+ duration: (commit[commit.length - 1]).trim()
+ }))
+
+ return [
+ new Date(groupData.startOfDate),
+ groupData.flakePercentage,
+ `
+
Date: ${groupData.startOfDate.toLocaleString([], {dateStyle: 'medium'})}
+
Flake Percentage: ${groupData.flakePercentage.toFixed(2)}%
+
Jobs:
+ ${resultArr.map(({ id, status }) => ` -
${id} (${status})`).join("
")}
+
`,
+ groupData.avgDuration,
+ `
+
Date: ${groupData.startOfDate.toLocaleString([], {dateStyle: 'medium'})}
+
Average Duration: ${groupData.avgDuration.toFixed(2)}s
+
Jobs:
+ ${durationArr.map(({ id, duration, status }) => ` -
${id} (${duration}s)`).join("
")}
+
`,
+ ]
+ })
+ );
+ const weekOptions = {
+ title: `Flake rate and duration by week of ${query.test} on ${query.env}`,
+ width: window.innerWidth,
+ height: window.innerHeight,
+ pointSize: 10,
+ pointShape: "circle",
+ series: {
+ 0: {
+ targetAxisIndex: 0
+ },
+ 1: {
+ targetAxisIndex: 1
+ },
+ },
+ vAxes: {
+ 0: {
+ title: "Flake rate",
+ minValue: 0,
+ maxValue: 100
+ },
+ 1: {
+ title: "Duration (seconds)"
+ },
+ },
+ colors: ['#dc3912', '#3366cc'],
+ tooltip: {
+ trigger: "selection",
+ isHtml: true
+ }
+ };
+ const flakeRateWeekContainer = document.createElement("div");
+ flakeRateWeekContainer.style.width = "100vw";
+ flakeRateWeekContainer.style.height = "100vh";
+ chartsContainer.appendChild(flakeRateWeekContainer);
+ const wChart = new google.visualization.LineChart(flakeRateWeekContainer);
+ wChart.draw(weekChart, weekOptions);
+}
+
+function displaySummaryChart(data) {
+ const chartsContainer = document.getElementById('chart_div');
+ const summaryData = data.summaryAvgFail
+
+ const uniqueDayDates = new Set();
+ const summaryEnvDateMap = {};
+ for (const envDay of summaryData) {
+ const {
+ startOfDate,
+ envName,
+ avgFailedTests,
+ avgDuration
+ } = envDay
+ uniqueDayDates.add(startOfDate)
+ if (!summaryEnvDateMap[envName]) {
+ summaryEnvDateMap[envName] = {};
+ }
+ summaryEnvDateMap[envName][startOfDate] = {
+ avgFailedTests,
+ avgDuration
+ }
+ }
+ const uniqueEnvs = Object.keys(summaryEnvDateMap);
+ const orderedDayDates = Array.from(uniqueDayDates).sort()
+
+
+ const dayChart = new google.visualization.DataTable();
+ dayChart.addColumn('date', 'Date');
+ for (const env of uniqueEnvs) {
+ dayChart.addColumn('number', `${env}`);
+ dayChart.addColumn({
+ type: 'string',
+ role: 'tooltip',
+ 'p': {
+ 'html': true
+ }
+ });
+ }
+ dayChart.addRows(orderedDayDates.map(dateTime => [new Date(dateTime)].concat(uniqueEnvs.map(name => {
+ const avgVal = summaryEnvDateMap[name][dateTime];
+ if (avgVal !== undefined) {
+ const {
+ avgFailedTests,
+ } = avgVal
+ return [
+ avgFailedTests,
+ `
+ ${name}
+ Date: ${dateTime.toLocaleString([], {dateStyle: 'medium'})}
+ Number of Failed Tests (avg): ${+avgFailedTests.toFixed(2)}
+
`
+ ]
+ }
+ return [null, null];
+ })).flat()))
+
+ const dayOptions = {
+ title: `Average Daily Failed Tests`,
+ width: window.innerWidth,
+ height: window.innerHeight,
+ pointSize: 10,
+ pointShape: "circle",
+ vAxes: {
+ 0: {
+ title: "# of Failed Tests",
+ minValue: 0
+ },
+ },
+ tooltip: {
+ trigger: "selection",
+ isHtml: true
+ }
+ };
+ // Create the chart and draw it
+ const summaryDayContainer = document.createElement("div");
+ summaryDayContainer.style.width = "100vw";
+ summaryDayContainer.style.height = "100vh";
+ chartsContainer.appendChild(summaryDayContainer);
+ const dChart = new google.visualization.LineChart(summaryDayContainer);
+ dChart.draw(dayChart, dayOptions);
+
+
+ const durChart = new google.visualization.DataTable();
+ durChart.addColumn('date', 'Date');
+ for (const env of uniqueEnvs) {
+ durChart.addColumn('number', `${env}`);
+ durChart.addColumn({
+ type: 'string',
+ role: 'tooltip',
+ 'p': {
+ 'html': true
+ }
+ });
+ }
+ durChart.addRows(orderedDayDates.map(dateTime => [new Date(dateTime)].concat(uniqueEnvs.map(name => {
+ const avgVal = summaryEnvDateMap[name][dateTime];
+ if (avgVal !== undefined) {
+ const {
+ avgDuration
+ } = avgVal
+ return [
+ avgDuration,
+ `
+ ${name}
+ Date: ${dateTime.toLocaleString([], {dateStyle: 'medium'})}
+ Duration (avg): ${+avgDuration.toFixed(2)}
+
`
+ ]
+ }
+ return [null, null];
+ })).flat()))
+
+ const durOptions = {
+ title: `Average Total Duration per day`,
+ width: window.innerWidth,
+ height: window.innerHeight,
+ pointSize: 10,
+ pointShape: "circle",
+ vAxes: {
+ 0: {
+ title: "Total Duration",
+ minValue: 0
+ },
+ },
+ tooltip: {
+ trigger: "selection",
+ isHtml: true
+ }
+ };
+ // Create the chart and draw it
+ const summaryDurContainer = document.createElement("div");
+ summaryDurContainer.style.width = "100vw";
+ summaryDurContainer.style.height = "100vh";
+ chartsContainer.appendChild(summaryDurContainer);
+ const durationChart = new google.visualization.LineChart(summaryDurContainer);
+ durationChart.draw(durChart, durOptions);
+
+
+ chartsContainer.appendChild(createRecentNumberOfFailTable(data.summaryTable))
+}
+
+function displayEnvironmentChart(data, query) {
+ const chartsContainer = document.getElementById('chart_div');
+
+ //By Day Chart
+
+ const dayData = data.flakeRateByDay
+ const uniqueDayTestNames = new Set();
+ const uniqueDayDates = new Set();
+ for (const flakeDay of dayData) {
+ uniqueDayTestNames.add(flakeDay.testName);
+ uniqueDayDates.add(flakeDay.startOfDate)
+ }
+ const uniqueDayTestNamesArray = Array.from(uniqueDayTestNames);
+ const orderedDayDates = Array.from(uniqueDayDates).sort();
+ const flakeDayDataMap = {};
+ dayData.forEach((day) => {
+ const {
+ testName,
+ startOfDate,
+ flakePercentage,
+ commitResults
+ } = day;
+ // If the test name doesn't exist in the map, create a new entry
+ if (!flakeDayDataMap[testName]) {
+ flakeDayDataMap[testName] = {};
+ }
+ // Set the flakePercentage for the corresponding startOfDate
+ flakeDayDataMap[testName][startOfDate] = {
+ fp: flakePercentage,
+ cr: commitResults
+ };
+ });
+ const dayChart = new google.visualization.DataTable();
+ dayChart.addColumn('date', 'Date');
+ for (const testName of uniqueDayTestNamesArray) {
+ dayChart.addColumn('number', `${testName}`);
+ dayChart.addColumn({
+ type: 'string',
+ role: 'tooltip',
+ 'p': {
+ 'html': true
+ }
+ });
+ }
+ dayChart.addRows(orderedDayDates.map(dateTime => [new Date(dateTime)].concat(uniqueDayTestNamesArray.map(name => {
+ const fpAndCr = flakeDayDataMap[name][dateTime];
+ if (fpAndCr !== undefined) {
+ const {
+ fp,
+ cr
+ } = fpAndCr
+ let commitArr = cr.split(",")
+ commitArr = commitArr.map((commit) => commit.split(":"))
+ commitArr = commitArr.map((commit) => ({
+ id: commit[commit.length - 2],
+ status: (commit[commit.length - 1]).trim()
+ }))
+ return [
+ fp,
+ `
+
${name}
+
Date: ${dateTime.toLocaleString([], {dateStyle: 'medium'})}
+
Flake Percentage: ${+fp.toFixed(2)}%
+
Jobs:
+ ${commitArr.map(({ id, status }) => ` -
${id} (${status})`).join("
")}
+
`
+ ]
+ }
+ return [null, null];
+ })).flat()))
+
+ const dayOptions = {
+ title: `Flake rate by day of top ${uniqueDayTestNamesArray.length} recent test flakiness (past 15 days) on ${query.env}`,
+ width: window.innerWidth,
+ height: window.innerHeight,
+ pointSize: 10,
+ pointShape: "circle",
+ vAxes: {
+ 0: {
+ title: "Flake rate",
+ minValue: 0,
+ maxValue: 100
+ },
+ },
+ tooltip: {
+ trigger: "selection",
+ isHtml: true
+ }
+ };
+ // Create the chart and draw it
+ const flakeRateDayContainer = document.createElement("div");
+ flakeRateDayContainer.style.width = "100vw";
+ flakeRateDayContainer.style.height = "100vh";
+ chartsContainer.appendChild(flakeRateDayContainer);
+ const dChart = new google.visualization.LineChart(flakeRateDayContainer);
+ dChart.draw(dayChart, dayOptions);
+
+ // Weekly Chart
+
+ const weekData = data.flakeRateByWeek
+ const uniqueWeekTestNames = new Set();
+ const uniqueWeekDates = new Set();
+ for (const flakeWeek of weekData) {
+ uniqueWeekTestNames.add(flakeWeek.testName);
+ uniqueWeekDates.add(flakeWeek.startOfDate)
+ }
+ const uniqueWeekTestNamesArray = Array.from(uniqueWeekTestNames);
+ const orderedWeekDates = Array.from(uniqueWeekDates).sort();
+ const flakeWeekDataMap = {};
+ weekData.forEach((week) => {
+ const {
+ testName,
+ startOfDate,
+ flakePercentage,
+ commitResults
+ } = week;
+ // If the test name doesn't exist in the map, create a new entry
+ if (!flakeWeekDataMap[testName]) {
+ flakeWeekDataMap[testName] = {};
+ }
+ // Set the flakePercentage for the corresponding startOfDate
+ flakeWeekDataMap[testName][startOfDate] = {
+ fp: flakePercentage,
+ cr: commitResults
+ };
+ });
+ {
+ // Create the DataTable
+ const weekChart = new google.visualization.DataTable();
+ // Add the columns to the DataTable
+ weekChart.addColumn('date', 'Date');
+ for (const testName of uniqueWeekTestNamesArray) {
+ weekChart.addColumn('number', `${testName}`);
+ weekChart.addColumn({
+ type: 'string',
+ role: 'tooltip',
+ 'p': {
+ 'html': true
+ }
+ });
+ }
+ weekChart.addRows(orderedWeekDates.map(dateTime => [new Date(dateTime)].concat(uniqueWeekTestNamesArray.map(name => {
+ const fpAndcr = flakeWeekDataMap[name][dateTime];
+ if (fpAndcr != undefined) {
+ const {
+ fp,
+ cr
+ } = fpAndcr
+ let commitArr = cr.split(",")
+ commitArr = commitArr.map((commit) => commit.split(":"))
+ commitArr = commitArr.map((commit) => ({
+ id: commit[commit.length - 2],
+ status: (commit[commit.length - 1]).trim()
+ }))
+ return [
+ fp,
+ `
+
${name}
+
Date: ${dateTime.toLocaleString([], {dateStyle: 'medium'})}
+
Flake Percentage: ${+fp.toFixed(2)}%
+
Jobs:
+ ${commitArr.map(({ id, status }) => ` -
${id} (${status})`).join("
")}
+
`
+ ];
+ }
+ return [null, null];
+ })).flat()))
+
+ const weekOptions = {
+ title: `Flake rate by week of top ${uniqueWeekTestNamesArray.length} of recent test flakiness (past week) on ${query.env}`,
+ width: window.innerWidth,
+ height: window.innerHeight,
+ pointSize: 10,
+ pointShape: "circle",
+ vAxes: {
+ 0: {
+ title: "Flake rate",
+ minValue: 0,
+ maxValue: 100
+ },
+ },
+ tooltip: {
+ trigger: "selection",
+ isHtml: true
+ }
+ };
+ // Create the chart and draw it
+ const flakeRateWeekContainer = document.createElement("div");
+ flakeRateWeekContainer.style.width = "100vw";
+ flakeRateWeekContainer.style.height = "100vh";
+ chartsContainer.appendChild(flakeRateWeekContainer);
+ const wChart = new google.visualization.LineChart(flakeRateWeekContainer);
+ wChart.draw(weekChart, weekOptions);
+ }
+
+ // Duration Chart
+
+ {
+ const durationChart = new google.visualization.DataTable();
+ const durationData = data.countsAndDurations
+ durationChart.addColumn('date', 'Date');
+ durationChart.addColumn('number', 'Test Count');
+ durationChart.addColumn({
+ type: 'string',
+ role: 'tooltip',
+ 'p': {
+ 'html': true
+ }
+ });
+ durationChart.addColumn('number', 'Duration');
+ durationChart.addColumn({
+ type: 'string',
+ role: 'tooltip',
+ 'p': {
+ 'html': true
+ }
+ });
+ durationChart.addRows(
+ durationData.map(dateInfo => {
+ let countArr = dateInfo.commitCounts.split(",")
+ countArr = countArr.map((commit) => commit.split(":"))
+ countArr = countArr.map((commit) => ({
+ rootJob: commit[commit.length - 2],
+ testCount: +(commit[commit.length - 1]).trim()
+ }))
+ let durationArr = dateInfo.commitDurations.split(",")
+ durationArr = durationArr.map((commit) => commit.split(":"))
+ durationArr = durationArr.map((commit) => ({
+ rootJob: commit[commit.length - 2],
+ totalDuration: +(commit[commit.length - 1]).trim()
+ }))
+ return [
+ new Date(dateInfo.startOfDate),
+ dateInfo.testCount,
+ `
+
Date: ${dateInfo.startOfDate.toLocaleString([], {dateStyle: 'medium'})}
+
Test Count (averaged): ${+dateInfo.testCount.toFixed(2)}
+
Jobs:
+ ${countArr.map(job => ` -
${job.rootJob} Test count: ${job.testCount}`).join("
")}
+
`,
+ dateInfo.duration,
+ `
+
Date: ${dateInfo.startOfDate.toLocaleString([], {dateStyle: 'medium'})}
+
Total Duration (averaged): ${+dateInfo.duration.toFixed(2)}
+
Jobs:
+ ${durationArr.map(job => ` -
${job.rootJob} Total Duration: ${+job.totalDuration.toFixed(2)}s`).join("
")}
+
`,
+ ]
+ }));
+ const durOptions = {
+ title: `Test count and total duration by day on ${query.env}`,
+ width: window.innerWidth,
+ height: window.innerHeight,
+ pointSize: 10,
+ pointShape: "circle",
+ series: {
+ 0: {
+ targetAxisIndex: 0
+ },
+ 1: {
+ targetAxisIndex: 1
+ },
+ },
+ vAxes: {
+ 0: {
+ title: "Test Count",
+ minValue: 0
+ },
+ 1: {
+ title: "Duration (seconds)",
+ minValue: 0
+ },
+ },
+ tooltip: {
+ trigger: "selection",
+ isHtml: true
+ }
+ };
+ const envDurationContainer = document.createElement("div");
+ envDurationContainer.style.width = "100vw";
+ envDurationContainer.style.height = "100vh";
+ chartsContainer.appendChild(envDurationContainer);
+ const durChart = new google.visualization.LineChart(envDurationContainer);
+ durChart.draw(durationChart, durOptions);
+ }
+
+ chartsContainer.appendChild(createRecentFlakePercentageTable(data.recentFlakePercentTable, query))
+}
+
+function createTopnDropdown(currentTopn) {
+ const dropdownContainer = document.createElement("div");
+ dropdownContainer.style.margin = "1rem";
+
+ const dropdownLabel = document.createElement("label");
+ dropdownLabel.innerText = "Select topn value: ";
+
+ const dropdown = document.createElement("select");
+ dropdown.id = "topnDropdown";
+
+ const values = [3, 5, 10, 15];
+ values.forEach(value => {
+ const option = document.createElement("option");
+ option.value = value;
+ option.text = value;
+ if (value.toString() === currentTopn) {
+ option.selected = true;
+ }
+ dropdown.appendChild(option);
+ });
+
+ dropdown.addEventListener("change", () => {
+ const selectedValue = dropdown.value;
+ const currentURL = new URL(window.location.href);
+ currentURL.searchParams.set("tests_in_top", selectedValue);
+ window.location.href = currentURL.href;
+ });
+
+ dropdownContainer.appendChild(dropdownLabel);
+ dropdownContainer.appendChild(dropdown);
+
+ document.getElementById('dropdown_container').appendChild(dropdownContainer)
+}
+
+function displayGopoghVersion(verData) {
+ const footerElement = document.getElementById('version_div');
+ const version = verData.version
+
+ footerElement.className = "mdl-mega-footer";
+ footerElement.innerHTML = "generated by Gopogh " + version + "";
+}
+
+
+async function init() {
+ const query = parseUrlQuery(window.location.search);
+ const desiredTest = query.test,
+ desiredEnvironment = query.env,
+ desiredPeriod = query.period || "",
+ desiredTestNumber = query.tests_in_top || "";
+ const currentTopn = query.tests_in_top || "10"; // Default to 10 (for top 10 tests)
+
+ google.charts.load('current', {
+ 'packages': ['corechart']
+ });
+ try {
+ // Wait for Google Charts to load
+ await new Promise(resolve => google.charts.setOnLoadCallback(resolve));
+
+ let url;
+ const basePath = 'http://localhost:8080' // Base Server Path. Modify to actual server path if deploying
+ if (desiredEnvironment === undefined) {
+ // URL for displaySummaryChart
+ url = basePath + '/summary'
+ } else if (desiredTest === undefined) {
+ // URL for displayEnvironmentChart
+ url = basePath + '/env' + '?env=' + desiredEnvironment + '&tests_in_top=' + desiredTestNumber;
+ } else {
+ // URL for displayTestAndEnvironmentChart
+ url = basePath + '/test' + '?env=' + desiredEnvironment + '&test=' + desiredTest;
+ }
+
+ // Fetch data from the determined URL
+ const response = await fetch(url);
+ if (!response.ok) {
+ throw new Error('Network response was not ok');
+ }
+ const data = await response.json();
+ console.log(data)
+
+ // Call the appropriate chart display function based on the desired condition
+ if (desiredTest == undefined && desiredEnvironment === undefined) {
+ displaySummaryChart(data)
+ } else if (desiredTest === undefined) {
+ createTopnDropdown(currentTopn);
+ displayEnvironmentChart(data, query);
+ } else {
+ displayTestAndEnvironmentChart(data, query);
+ }
+ url = basePath + '/version'
+
+ const verResponse = await fetch(url);
+ if (!verResponse.ok) {
+ throw new Error('Network response was not ok');
+ }
+ const verData = await verResponse.json();
+ console.log(verData)
+ displayGopoghVersion(verData)
+ } catch (err) {
+ displayError(err);
+ }
+}
+
+init();
\ No newline at end of file
diff --git a/cmd/dbbrowser/main.go b/cmd/gopogh-server/main.go
similarity index 73%
rename from cmd/dbbrowser/main.go
rename to cmd/gopogh-server/main.go
index 21e49a9..e572ea2 100644
--- a/cmd/dbbrowser/main.go
+++ b/cmd/gopogh-server/main.go
@@ -6,6 +6,7 @@ import (
"net/http"
"github.com/medyagh/gopogh/pkg/db"
+ "github.com/medyagh/gopogh/pkg/handler"
)
var dbPath = flag.String("db_path", "", "path to postgres db in the form of 'user=DB_USER dbname=DB_NAME password=DB_PASS'")
@@ -28,12 +29,23 @@ func main() {
UseCloudSQL: *useCloudSQL,
UseIAMAuth: *useIAMAuth,
}
- db, err := db.FromEnv(flagValues)
+ datab, err := db.FromEnv(flagValues)
if err != nil {
log.Fatal(err)
}
+ db := handler.DB{
+ Database: datab,
+ }
// Create an HTTP server and register the handlers
- http.HandleFunc("/db", db.PrintEnvironmentTestsAndTestCases)
+ http.HandleFunc("/db", db.ServeEnvironmentTestsAndTestCases)
+
+ http.HandleFunc("/env", db.ServeEnvCharts)
+
+ http.HandleFunc("/test", db.ServeTestCharts)
+
+ http.HandleFunc("/summary", db.ServeOverview)
+
+ http.HandleFunc("/version", handler.ServeGopoghVersion)
// Start the HTTP server
err = http.ListenAndServe(":8080", nil)
diff --git a/pkg/db/cloudsql.go b/pkg/db/cloudsql.go
index d8b5d77..49853d7 100644
--- a/pkg/db/cloudsql.go
+++ b/pkg/db/cloudsql.go
@@ -14,7 +14,7 @@ import (
)
// NewCloudSQL returns a new Google Cloud SQL database
-func NewCloudSQL(cfg config) (datab, error) {
+func NewCloudSQL(cfg config) (Datab, error) {
switch cfg.dbType {
case "postgres":
return newCloudPostgres(cfg)
diff --git a/pkg/db/db.go b/pkg/db/db.go
index 6692b46..3ab11db 100644
--- a/pkg/db/db.go
+++ b/pkg/db/db.go
@@ -2,7 +2,6 @@ package db
import (
"fmt"
- "net/http"
"os"
"github.com/medyagh/gopogh/pkg/models"
@@ -25,17 +24,23 @@ type config struct {
useIAMAuth bool
}
-// datab is the database interface we support
-type datab interface {
+// Datab is the database interface we support
+type Datab interface {
Set(models.DBEnvironmentTest, []models.DBTestCase) error
Initialize() error
- PrintEnvironmentTestsAndTestCases(http.ResponseWriter, *http.Request)
+ GetEnvironmentTestsAndTestCases() (map[string]interface{}, error)
+
+ GetEnvCharts(string, int) (map[string]interface{}, error)
+
+ GetOverview() (map[string]interface{}, error)
+
+ GetTestCharts(string, string) (map[string]interface{}, error)
}
// newDB handles which database driver to use and initializes the db
-func newDB(cfg config) (datab, error) {
+func newDB(cfg config) (Datab, error) {
switch cfg.dbType {
case "sqlite":
return newSQLite(cfg)
@@ -48,7 +53,7 @@ func newDB(cfg config) (datab, error) {
// FromEnv configures and returns a database instance.
// backend and path parameters are default config, otherwise gets config from the environment variables DB_BACKEND and DB_PATH
-func FromEnv(fv FlagValues) (c datab, err error) {
+func FromEnv(fv FlagValues) (c Datab, err error) {
backend, err := getFlagOrEnv(fv.Backend, "DB_BACKEND")
if err != nil {
return nil, err
diff --git a/pkg/db/postgres.go b/pkg/db/postgres.go
index c2911a5..2a77313 100644
--- a/pkg/db/postgres.go
+++ b/pkg/db/postgres.go
@@ -2,7 +2,9 @@ package db
import (
"fmt"
- "net/http"
+ "log"
+ "strings"
+ "time"
"github.com/jmoiron/sqlx"
_ "github.com/lib/pq" // Blank import used for registering postgres driver as a database driver
@@ -116,47 +118,361 @@ func (m *Postgres) Initialize() error {
return nil
}
-// PrintEnvironmentTestsAndTestCases writes the environment tests and test cases tables to an HTTP response in a combined page
-func (m *Postgres) PrintEnvironmentTestsAndTestCases(w http.ResponseWriter, _ *http.Request) {
+// GetEnvironmentTestsAndTestCases writes the database tables to a map with the keys environmentTests and testCases
+func (m *Postgres) GetEnvironmentTestsAndTestCases() (map[string]interface{}, error) {
+ start := time.Now()
+
var environmentTests []models.DBEnvironmentTest
var testCases []models.DBTestCase
- err := m.db.Select(&environmentTests, "SELECT * FROM db_environment_tests")
+ err := m.db.Select(&environmentTests, "SELECT * FROM db_environment_tests ORDER BY TestTime DESC LIMIT 100")
if err != nil {
- http.Error(w, fmt.Sprintf("failed to execute SQL query for environment tests: %v", err), http.StatusInternalServerError)
- return
+ return nil, fmt.Errorf("failed to execute SQL query for environment tests: %v", err)
}
- err = m.db.Select(&testCases, "SELECT * FROM db_test_cases")
+ err = m.db.Select(&testCases, "SELECT * FROM db_test_cases ORDER BY TestTime DESC LIMIT 100")
if err != nil {
- http.Error(w, fmt.Sprintf("failed to execute SQL query for test cases: %v", err), http.StatusInternalServerError)
- return
+ return nil, fmt.Errorf("failed to execute SQL query for test cases: %v", err)
+
+ }
+ data := map[string]interface{}{
+ "environmentTests": environmentTests,
+ "testCases": testCases,
}
+ log.Printf("\nduration metric: took %f seconds to gather all table data since start of handler\n\n", time.Since(start).Seconds())
+ return data, nil
+}
- // Write the response header to be html
- w.Header().Set("Content-Type", "text/html")
+func (m *Postgres) createMaterializedView(env string, viewName string) error {
+ createView := fmt.Sprintf(`
+ CREATE MATERIALIZED VIEW IF NOT EXISTS %s AS
+ SELECT * FROM db_test_cases
+ WHERE Result != 'skip' AND EnvName = '%s' AND TestTime >= NOW() - INTERVAL '90 days'
+ `, viewName, env)
- // Write the HTML page structure
- fmt.Fprintf(w, "Environment Tests and Test Cases")
+ _, err := m.db.Exec(createView)
+ return err
+}
- // Environment tests table
- fmt.Fprintf(w, "Environment Tests
")
- fmt.Fprintf(w, "CommitID | EnvName | GopoghTime | TestTime | NumberOfFail | NumberOfPass | NumberOfSkip | TotalDuration |
")
- for _, row := range environmentTests {
- fmt.Fprintf(w, "%s | %s | %s | %s | %d | %d | %d | %f |
",
- row.CommitID, row.EnvName, row.GopoghTime, row.TestTime, row.NumberOfFail, row.NumberOfPass, row.NumberOfSkip, row.TotalDuration)
+// GetTestCharts writes the individual test chart data to a map with the keys flakeByDay and flakeByWeek
+func (m *Postgres) GetTestCharts(env string, test string) (map[string]interface{}, error) {
+ start := time.Now()
+
+ var validEnvs []string
+ err := m.db.Select(&validEnvs, "SELECT DISTINCT EnvName FROM db_environment_tests")
+ if err != nil {
+ return nil, fmt.Errorf("failed to execute SQL query for list of valid environments: %v", err)
+ }
+ isValidEnv := false
+ for _, e := range validEnvs {
+ if env == e {
+ isValidEnv = true
+ }
+ }
+ if !isValidEnv {
+ return nil, fmt.Errorf("invalid environment. Not found in database: %v", err)
+ }
+
+ viewName := fmt.Sprintf("\"lastn_data_%s\"", env)
+ err = m.createMaterializedView(env, viewName)
+ if err != nil {
+ return nil, fmt.Errorf("failed to execute SQL query for view creation: %v", err)
}
- fmt.Fprintf(w, "
")
- // Test cases table
- fmt.Fprintf(w, "Test Cases
")
- fmt.Fprintf(w, "PR | CommitID | EnvName | TestName | Result | TestTime | Duration |
")
- for _, row := range testCases {
- fmt.Fprintf(w, "%s | %s | %s | %s | %s | %s | %f |
",
- row.PR, row.CommitID, row.EnvName, row.TestName, row.Result, row.TestTime, row.Duration)
+ log.Printf("\nduration metric: took %f seconds to execute SQL query for refreshing materialized view since start of handler", time.Since(start).Seconds())
+
+ // Groups the datetimes together by date, calculating flake percentage and aggregating the individual results/durations for each date
+ sqlQuery := fmt.Sprintf(`
+ SELECT
+ DATE_TRUNC('day', TestTime) AS StartOfDate,
+ AVG(Duration) AS AvgDuration,
+ ROUND(COALESCE(AVG(CASE WHEN Result = 'fail' THEN 1 ELSE 0 END) * 100, 0), 2) AS FlakePercentage,
+ STRING_AGG(CommitID || ': ' || Result || ': ' || Duration, ', ') AS CommitResultsAndDurations
+ FROM %s
+ WHERE TestName = $1
+ GROUP BY StartOfDate
+ ORDER BY StartOfDate DESC
+ `, viewName)
+
+ var flakeByDay []models.DBTestRateAndDuration
+ err = m.db.Select(&flakeByDay, sqlQuery, test)
+ if err != nil {
+ return nil, fmt.Errorf("failed to execute SQL query for flake rate and duration by day chart: %v", err)
+ }
+
+ log.Printf("\nduration metric: took %f seconds to execute SQL query for flake rate and duration by day chart since start of handler", time.Since(start).Seconds())
+
+ // Groups the datetimes together by week, calculating flake percentage and aggregating the individual results/durations for each date
+ sqlQuery = fmt.Sprintf(`
+ SELECT
+ DATE_TRUNC('week', TestTime) AS StartOfDate,
+ AVG(Duration) AS AvgDuration,
+ ROUND(COALESCE(AVG(CASE WHEN Result = 'fail' THEN 1 ELSE 0 END) * 100, 0), 2) AS FlakePercentage,
+ STRING_AGG(CommitID || ': ' || Result || ': ' || Duration, ', ') AS CommitResultsAndDurations
+ FROM %s
+ WHERE TestName = $1
+ GROUP BY StartOfDate
+ ORDER BY StartOfDate DESC
+ `, viewName)
+ var flakeByWeek []models.DBTestRateAndDuration
+ err = m.db.Select(&flakeByWeek, sqlQuery, test)
+ if err != nil {
+ return nil, fmt.Errorf("failed to execute SQL query for flake rate and duration by week chart: %v", err)
+ }
+ log.Printf("\nduration metric: took %f seconds to execute SQL query for flake rate and duration by week chart since start of handler", time.Since(start).Seconds())
+
+ data := map[string]interface{}{
+ "flakeByDay": flakeByDay,
+ "flakeByWeek": flakeByWeek,
+ }
+ log.Printf("\nduration metric: took %f seconds to gather individual test chart data since start of handler\n\n", time.Since(start).Seconds())
+ return data, nil
+}
+
+// GetEnvCharts writes the overall environment charts to a map with the keys recentFlakePercentTable, flakeRateByWeek, flakeRateByDay, and countsAndDurations
+func (m *Postgres) GetEnvCharts(env string, testsInTop int) (map[string]interface{}, error) {
+ start := time.Now()
+
+ var validEnvs []string
+ err := m.db.Select(&validEnvs, "SELECT DISTINCT EnvName FROM db_environment_tests")
+ if err != nil {
+ return nil, fmt.Errorf("failed to execute SQL query for list of valid environments: %v", err)
+ }
+ isValidEnv := false
+ for _, e := range validEnvs {
+ if env == e {
+ isValidEnv = true
+ }
+ }
+ if !isValidEnv {
+ return nil, fmt.Errorf("invalid environment. Not found in database: %v", err)
+ }
+
+ viewName := fmt.Sprintf("\"lastn_data_%s\"", env)
+ err = m.createMaterializedView(env, viewName)
+ if err != nil {
+ return nil, fmt.Errorf("failed to execute SQL query for view creation: %v", err)
}
- fmt.Fprintf(w, "
")
- // Close the HTML page structure
- fmt.Fprintf(w, "")
+ log.Printf("\nduration metric: took %f seconds to execute SQL query for refreshing materialized view since start of handler", time.Since(start).Seconds())
+
+ // Number of days to use to look for "flaky-est" tests.
+ const dateRange = 15
+
+ // This query first makes a temp table containing the $1 (30) most recent dates
+ // Then it computes the recentCutoff and prevCutoff (15th most recent and 30th most recent dates)
+ // Then we calculate the flake rate and the flake rate growth
+ // for the 15 most recent days and the 15 days following that
+ sqlQuer := fmt.Sprintf(`
+ WITH dates AS (
+ SELECT DISTINCT DATE_TRUNC('day', TestTime) AS Date
+ FROM %s
+ ORDER BY Date DESC
+ LIMIT $1
+ ), recentCutoff AS (
+ SELECT Date
+ FROM dates
+ ORDER BY Date DESC
+ OFFSET $2
+ LIMIT 1
+ ), prevCutoff AS (
+ SELECT Date
+ FROM dates
+ ORDER BY Date DESC
+ OFFSET $3
+ LIMIT 1
+ ), temp AS (
+ SELECT TestName,
+ ROUND(COALESCE(AVG(CASE WHEN TestTime > (SELECT Date FROM recentCutoff) THEN CASE WHEN Result = 'fail' THEN 1 ELSE 0 END END) * 100, 0), 2) AS RecentFlakePercentage,
+ ROUND(COALESCE(AVG(CASE WHEN TestTime <= (SELECT Date FROM recentCutoff) AND TestTime > (SELECT Date FROM prevCutoff) THEN CASE WHEN Result = 'fail' THEN 1 ELSE 0 END END) * 100, 0), 2) AS PrevFlakePercentage
+ FROM %s
+ GROUP BY TestName
+ ORDER BY RecentFlakePercentage DESC
+ )
+ SELECT TestName, RecentFlakePercentage, RecentFlakePercentage - PrevFlakePercentage AS GrowthRate
+ FROM temp
+ ORDER BY RecentFlakePercentage DESC;
+ `, viewName, viewName)
+ var flakeRates []models.DBFlakeRow
+ err = m.db.Select(&flakeRates, sqlQuer, 2*dateRange, dateRange-1, 2*dateRange-1)
+ if err != nil {
+ return nil, fmt.Errorf("failed to execute SQL query for flake table: %v", err)
+ }
+ log.Printf("\nduration metric: took %f seconds to execute SQL query for flake table since start of handler", time.Since(start).Seconds())
+
+ var topTestNames []string
+ for _, row := range flakeRates {
+ topTestNames = append(topTestNames, row.TestName)
+ if len(topTestNames) >= testsInTop {
+ break
+ }
+ }
+
+ // Gets the data on just the top ten previously calculated and aggregates flake rates and results per date
+ sqlQuer = fmt.Sprintf(`
+ WITH lastn_data_top AS (
+ SELECT *
+ FROM %s
+ WHERE TestName IN ('%s')
+ )
+ SELECT TestName,
+ DATE_TRUNC('day', TestTime) AS StartOfDate,
+ COALESCE(AVG(CASE WHEN Result = 'fail' THEN 1 ELSE 0 END) * 100, 0) AS FlakePercentage,
+ STRING_AGG(CommitID || ': ' || Result, ', ') AS CommitResults
+ FROM lastn_data_top
+ GROUP BY TestName, StartOfDate
+ ORDER BY StartOfDate DESC
+ `, viewName,
+ strings.Join(topTestNames, "', '"))
+ var flakeRateByDay []models.DBFlakeBy
+ err = m.db.Select(&flakeRateByDay, sqlQuer)
+ if err != nil {
+ return nil, fmt.Errorf("failed to execute SQL query for by day flake chart: %v", err)
+ }
+ log.Printf("\nduration metric: took %f seconds to execute SQL query for day flake chart since start of handler", time.Since(start).Seconds())
+
+ // Filters to get the top flakiest in the past week, calculating flake rate per week for those tests
+ sqlQuer = fmt.Sprintf(`
+ WITH recent_week AS (
+ SELECT MAX (DATE_TRUNC('week', TestTime)) AS weekCutoff
+ FROM %s
+ ),
+ recent_week_data AS (
+ SELECT *
+ FROM %s
+ WHERE TestTime >= (SELECT weekCutoff FROM recent_week)
+ ),
+ top_flakiest AS (
+ SELECT TestName, COALESCE(AVG(CASE WHEN Result = 'fail' THEN 1 ELSE 0 END) * 100, 0) AS RecentFlakePercentage
+ FROM recent_week_data
+ GROUP BY TestName
+ ORDER BY RecentFlakePercentage DESC
+ LIMIT $1
+ ),
+ top_flakiest_data AS (
+ SELECT * FROM %s
+ WHERE TestName IN (SELECT TestName FROM top_flakiest)
+ )
+ SELECT TestName,
+ DATE_TRUNC('week', TestTime) AS StartOfDate,
+ ROUND(COALESCE(AVG(CASE WHEN Result = 'fail' THEN 1 ELSE 0 END) * 100, 0), 2) AS FlakePercentage,
+ STRING_AGG(CommitID || ': ' || Result, ', ') AS CommitResults
+ FROM top_flakiest_data
+ GROUP BY TestName, StartOfDate
+ ORDER BY StartOfDate DESC;
+ `, viewName, viewName, viewName)
+ var flakeRateByWeek []models.DBFlakeBy
+ err = m.db.Select(&flakeRateByWeek, sqlQuer, testsInTop)
+ if err != nil {
+ return nil, fmt.Errorf("failed to execute SQL query for by week flake chart: %v", err)
+ }
+ log.Printf("\nduration metric: took %f seconds to execute SQL query for flake by week chart since start of handler", time.Since(start).Seconds())
+
+ // Filters out data prior to 90 days and with the incorrect environment
+ // Then calculates for each date aggregates the duration and number of tests, calculating the average for both
+ sqlQuer = `
+ WITH lastn_env_data AS (
+ SELECT *
+ FROM db_environment_tests
+ WHERE EnvName = $1 AND TestTime >= NOW() - INTERVAL '90 days'
+ )
+ SELECT
+ DATE_TRUNC('day', TestTime) AS StartOfDate,
+ AVG(NumberOfPass + NumberOfFail) AS TestCount,
+ AVG(TotalDuration) AS Duration,
+ STRING_AGG(CommitID || ': ' || (NumberOfPass + NumberOfFail), ', ') AS CommitCounts,
+ STRING_AGG(CommitID || ': ' || TotalDuration, ', ') AS CommitDurations
+ FROM lastn_env_data
+ GROUP BY StartOfDate
+ ORDER BY StartOfDate DESC
+ `
+ var countsAndDurations []models.DBEnvDuration
+ err = m.db.Select(&countsAndDurations, sqlQuer, env)
+ if err != nil {
+ return nil, fmt.Errorf("failed to execute SQL query for environment test count and duration chart: %v", err)
+ }
+ log.Printf("\nduration metric: took %f seconds to execute SQL query for env duration chart since start of handler", time.Since(start).Seconds())
+
+ data := map[string]interface{}{
+ "recentFlakePercentTable": flakeRates,
+ "flakeRateByWeek": flakeRateByWeek,
+ "flakeRateByDay": flakeRateByDay,
+ "countsAndDurations": countsAndDurations,
+ }
+ log.Printf("\nduration metric: took %f seconds to gather env chart data since start of handler\n\n", time.Since(start).Seconds())
+ return data, nil
+}
+
+// GetOverview writes the overview charts to a map with the keys summaryAvgFail and summaryTable
+func (m *Postgres) GetOverview() (map[string]interface{}, error) {
+ start := time.Now()
+ // Filters out old data and calculates the average number of failures and average duration per day per environment
+ sqlQuery := `
+ SELECT DATE_TRUNC('day', TestTime) AS StartOfDate, EnvName, AVG(NumberOfFail) AS AvgFailedTests, AVG(TotalDuration) AS AvgDuration
+ FROM db_environment_tests
+ WHERE TestTime >= NOW() - INTERVAL '90 days'
+ GROUP BY StartOfDate, EnvName
+ ORDER BY StartOfDate, EnvName;
+ `
+
+ var summaryAvgFail []models.DBSummaryAvgFail
+ err := m.db.Select(&summaryAvgFail, sqlQuery)
+ if err != nil {
+ return nil, fmt.Errorf("failed to execute SQL query for summary chart: %v", err)
+ }
+ log.Printf("\nduration metric: took %f seconds to execute SQL query for summary duration and failure charts since start of handler", time.Since(start).Seconds())
+
+ // Number of days to use to look for "flaky-est" envs.
+ const dateRange = 15
+
+ // Filters out data from prior to 90 days
+ // Then computes average number of fails for each environment for each time frame
+ // Then calculates the change in the average number of fails between the time frames
+ sqlQuery = `
+ WITH data AS (
+ SELECT *
+ FROM db_environment_tests
+ WHERE TestTime >= NOW() - INTERVAL '90 days'
+ ), dates AS (
+ SELECT DISTINCT DATE_TRUNC('day', TestTime) AS Date
+ FROM data
+ ORDER BY Date DESC
+ LIMIT $1
+ ), recentCutoff AS (
+ SELECT Date
+ FROM dates
+ ORDER BY Date DESC
+ OFFSET $2
+ LIMIT 1
+ ), prevCutoff AS (
+ SELECT Date
+ FROM dates
+ ORDER BY Date DESC
+ OFFSET $3
+ LIMIT 1
+ ), temp AS (
+ SELECT EnvName,
+ ROUND(COALESCE(AVG(CASE WHEN TestTime > (SELECT Date FROM recentCutoff) THEN NumberOfFail END), 0), 2) AS RecentNumberOfFail,
+ ROUND(COALESCE(AVG(CASE WHEN TestTime <= (SELECT Date FROM recentCutoff) AND TestTime > (SELECT Date FROM prevCutoff) THEN NumberOfFail END), 0), 2) AS PrevNumberOfFail
+ FROM data
+ GROUP BY EnvName
+ ORDER BY RecentNumberOfFail DESC
+ )
+ SELECT EnvName, RecentNumberOfFail, RecentNumberOfFail - PrevNumberOfFail AS Growth
+ FROM temp
+ ORDER BY RecentNumberOfFail DESC;
+ `
+ var summaryTable []models.DBSummaryTable
+ err = m.db.Select(&summaryTable, sqlQuery, 2*dateRange, dateRange-1, 2*dateRange-1)
+ if err != nil {
+ return nil, fmt.Errorf("failed to execute SQL query for flake table: %v", err)
+ }
+ log.Printf("\nduration metric: took %f seconds to execute SQL query for summary failure change table since start of handler", time.Since(start).Seconds())
+
+ data := map[string]interface{}{
+ "summaryAvgFail": summaryAvgFail,
+ "summaryTable": summaryTable,
+ }
+ log.Printf("\nduration metric: took %f seconds to gather summary data since start of handler\n\n", time.Since(start).Seconds())
+ return data, nil
}
diff --git a/pkg/db/sqlite.go b/pkg/db/sqlite.go
index 7af8508..6e30d50 100644
--- a/pkg/db/sqlite.go
+++ b/pkg/db/sqlite.go
@@ -2,7 +2,6 @@ package db
import (
"fmt"
- "net/http"
"os"
"path/filepath"
@@ -114,7 +113,26 @@ func (m *sqlite) Initialize() error {
return nil
}
-// PrintEnvironmentTestsAndTestCases writes the environment tests and test cases tables to an HTTP response in a combined page
+// GetEnvironmentTestsAndTestCases writes the database tables to a map with the keys environmentTests and testCases
// This is not yet supported for sqlite
-func (m *sqlite) PrintEnvironmentTestsAndTestCases(_ http.ResponseWriter, _ *http.Request) {
+func (m *sqlite) GetEnvironmentTestsAndTestCases() (map[string]interface{}, error) {
+ return nil, nil
+}
+
+// GetEnvCharts writes the overall environment charts to a map with the keys recentFlakePercentTable, flakeRateByWeek, flakeRateByDay, and countsAndDurations
+// This is not yet supported for sqlite
+func (m *sqlite) GetEnvCharts(_ string, _ int) (map[string]interface{}, error) {
+ return nil, nil
+}
+
+// GetTestCharts writes the individual test chart data to a map with the keys flakeByDay and flakeByWeek
+// This is not yet supported for sqlite
+func (m *sqlite) GetTestCharts(_ string, _ string) (map[string]interface{}, error) {
+ return nil, nil
+}
+
+// GetOverview writes the overview charts to a map with the keys summaryAvgFail and summaryTable
+// This is not yet supported for sqlite
+func (m *sqlite) GetOverview() (map[string]interface{}, error) {
+ return nil, nil
}
diff --git a/pkg/handler/handler.go b/pkg/handler/handler.go
new file mode 100644
index 0000000..b47a9a9
--- /dev/null
+++ b/pkg/handler/handler.go
@@ -0,0 +1,160 @@
+package handler
+
+import (
+ "encoding/json"
+ "fmt"
+ "net/http"
+ "strconv"
+
+ "github.com/medyagh/gopogh/pkg/db"
+ "github.com/medyagh/gopogh/pkg/report"
+)
+
+type DB struct {
+ Database db.Datab
+}
+
+func (m *DB) ServeEnvironmentTestsAndTestCases(w http.ResponseWriter, _ *http.Request) {
+ data, err := m.Database.GetEnvironmentTestsAndTestCases()
+ if err != nil {
+ http.Error(w, err.Error(), http.StatusInternalServerError)
+ return
+ }
+ if data == nil {
+ http.Error(w, err.Error(), http.StatusNotImplemented)
+ return
+ }
+ jsonData, err := json.Marshal(data)
+ if err != nil {
+ http.Error(w, "Failed to marshal JSON", http.StatusInternalServerError)
+ return
+ }
+ w.Header().Set("Content-Type", "application/json")
+ w.Header().Set("Access-Control-Allow-Origin", "*")
+ _, err = w.Write(jsonData)
+ if err != nil {
+ http.Error(w, "Failed to write JSON data", http.StatusInternalServerError)
+ return
+ }
+}
+
+// ServeTestCharts writes the individual test charts to a JSON HTTP response
+func (m *DB) ServeTestCharts(w http.ResponseWriter, r *http.Request) {
+ queryValues := r.URL.Query()
+ env := queryValues.Get("env")
+ if env == "" {
+ http.Error(w, "missing environment name", http.StatusUnprocessableEntity)
+ return
+ }
+ test := queryValues.Get("test")
+ if test == "" {
+ http.Error(w, "missing test name", http.StatusUnprocessableEntity)
+ return
+ }
+
+ data, err := m.Database.GetTestCharts(env, test)
+ if err != nil {
+ http.Error(w, err.Error(), http.StatusInternalServerError)
+ return
+ }
+ if data == nil {
+ http.Error(w, err.Error(), http.StatusNotImplemented)
+ return
+ }
+ jsonData, err := json.Marshal(data)
+ if err != nil {
+ http.Error(w, "Failed to marshal JSON", http.StatusInternalServerError)
+ return
+ }
+ w.Header().Set("Content-Type", "application/json")
+ w.Header().Set("Access-Control-Allow-Origin", "*")
+ _, err = w.Write(jsonData)
+ if err != nil {
+ http.Error(w, "Failed to write JSON data", http.StatusInternalServerError)
+ return
+ }
+}
+
+// ServeEnvCharts writes the overall environment charts to a JSON HTTP response
+func (m *DB) ServeEnvCharts(w http.ResponseWriter, r *http.Request) {
+ queryValues := r.URL.Query()
+ env := queryValues.Get("env")
+ if env == "" {
+ http.Error(w, "missing environment name", http.StatusUnprocessableEntity)
+ return
+ }
+ testsInTopStr := queryValues.Get("tests_in_top")
+ if testsInTopStr == "" {
+ testsInTopStr = "10"
+ }
+ testsInTop, err := strconv.Atoi(testsInTopStr)
+ if err != nil {
+ http.Error(w, fmt.Sprintf("invalid number of top tests to use: %v", err), http.StatusUnprocessableEntity)
+ return
+ }
+ data, err := m.Database.GetEnvCharts(env, testsInTop)
+ if err != nil {
+ http.Error(w, err.Error(), http.StatusInternalServerError)
+ return
+ }
+ if data == nil {
+ http.Error(w, err.Error(), http.StatusNotImplemented)
+ return
+ }
+ jsonData, err := json.Marshal(data)
+ if err != nil {
+ http.Error(w, "Failed to marshal JSON", http.StatusInternalServerError)
+ return
+ }
+ w.Header().Set("Content-Type", "application/json")
+ w.Header().Set("Access-Control-Allow-Origin", "*")
+ _, err = w.Write(jsonData)
+ if err != nil {
+ http.Error(w, "Failed to write JSON data", http.StatusInternalServerError)
+ return
+ }
+}
+
+// ServeOverview writes the overview chart for all of the environments to a JSON HTTP response
+func (m *DB) ServeOverview(w http.ResponseWriter, _ *http.Request) {
+ data, err := m.Database.GetOverview()
+ if err != nil {
+ http.Error(w, err.Error(), http.StatusInternalServerError)
+ return
+ }
+ if data == nil {
+ http.Error(w, err.Error(), http.StatusNotImplemented)
+ return
+ }
+ jsonData, err := json.Marshal(data)
+ if err != nil {
+ http.Error(w, "Failed to marshal JSON", http.StatusInternalServerError)
+ return
+ }
+ w.Header().Set("Content-Type", "application/json")
+ w.Header().Set("Access-Control-Allow-Origin", "*")
+ _, err = w.Write(jsonData)
+ if err != nil {
+ http.Error(w, "Failed to write JSON data", http.StatusInternalServerError)
+ return
+ }
+}
+
+// ServeGopoghVersion writes the gopogh version to a json response
+func ServeGopoghVersion(w http.ResponseWriter, _ *http.Request) {
+ data := map[string]interface{}{
+ "version": report.Version,
+ }
+ jsonData, err := json.Marshal(data)
+ if err != nil {
+ http.Error(w, "Failed to marshal JSON", http.StatusInternalServerError)
+ return
+ }
+ w.Header().Set("Content-Type", "application/json")
+ w.Header().Set("Access-Control-Allow-Origin", "*")
+ _, err = w.Write(jsonData)
+ if err != nil {
+ http.Error(w, "Failed to write JSON data", http.StatusInternalServerError)
+ return
+ }
+}
diff --git a/pkg/models/models.go b/pkg/models/models.go
index 1b423a8..a41ffc4 100644
--- a/pkg/models/models.go
+++ b/pkg/models/models.go
@@ -55,3 +55,50 @@ type DBEnvironmentTest struct {
TotalDuration float64
GopoghVersion string
}
+
+// DBFlakeRow represents a row in the basic flake rate table
+type DBFlakeRow struct {
+ TestName string `json:"testName"`
+ RecentFlakePercentage float32 `json:"recentFlakePercentage"`
+ GrowthRate float32 `json:"growthRate"`
+}
+
+// DBFlakeBy represents a "row" in the flake rate by _ of top 10 of recent test flakiness charts
+type DBFlakeBy struct {
+ TestName string `json:"testName"`
+ StartOfDate time.Time `json:"startOfDate"`
+ FlakePercentage float32 `json:"flakePercentage"`
+ CommitResults string `json:"commitResults"`
+}
+
+// DBEnvDuration represents a "row" in the test count and total duration by day chart
+type DBEnvDuration struct {
+ StartOfDate time.Time `json:"startOfDate"`
+ TestCount float32 `json:"testCount"`
+ Duration float32 `json:"duration"`
+ CommitCounts string `json:"commitCounts"`
+ CommitDurations string `json:"commitDurations"`
+}
+
+// DBTestRateAndDuration represents a "row" in the flake rate and duration chart for a given test
+type DBTestRateAndDuration struct {
+ StartOfDate time.Time `json:"startOfDate"`
+ AvgDuration float32 `json:"avgDuration"`
+ FlakePercentage float32 `json:"flakePercentage"`
+ CommitResultsAndDurations string `json:"commitResultsAndDurations"`
+}
+
+// DBSummaryAvgFail represents a "row" in most flakey environments summary chart
+type DBSummaryAvgFail struct {
+ StartOfDate time.Time `json:"startOfDate"`
+ EnvName string `json:"envName"`
+ AvgFailedTests float32 `json:"avgFailedTests"`
+ AvgDuration float32 `json:"avgDuration"`
+}
+
+// DBSummaryTable represents a row in the summary number of fail table
+type DBSummaryTable struct {
+ EnvName string `json:"envName"`
+ RecentNumberOfFail float32 `json:"recentNumberOfFail"`
+ Growth float32 `json:"growth"`
+}