devops(flakiness): azure function to store dashboard in a new format (#4540)

This refactors azure function and adds a new dashboard format implementation that is 15x smaller while tracking specs with non-empty annotations.

Old dashboard is still updated.
This commit is contained in:
Andrey Lushnikov 2020-11-30 10:16:03 -08:00 committed by GitHub
parent d96330bbec
commit d104591aa1
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 348 additions and 151 deletions

View File

@ -0,0 +1,102 @@
/**
* Copyright (c) Microsoft Corporation.
*
* Licensed under the Apache License, Version 2.0 (the 'License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
const {SimpleBlob, flattenSpecs} = require('./utils.js');
const DASHBOARD_VERSION = 1;
class Dashboard {
constructor() {
this._runs = [];
}
initialize(jsonData) {
if (jsonData.version !== DASHBOARD_VERSION) {
// Run migrations here!
}
this._runs = jsonData.buildbotRuns;
}
addReport(report) {
// We cannot use linenumber to identify specs since line numbers
// might be different across commits.
const getSpecId = spec => spec.file + ' @@@ ' + spec.title;
const faultySpecIds = new Set();
for (const run of this._runs) {
for (const spec of run.specs)
faultySpecIds.add(getSpecId(spec));
}
const specs = [];
for (const spec of flattenSpecs(report)) {
// Filter out specs that didn't have a single test that was run in the
// given shard.
if (spec.tests.every(test => test.runs.length === 1 && !test.runs[0].status))
continue;
const hasFlakyAnnotation = spec.tests.some(test => test.annotations.some(a => a.type === 'flaky'));
if (!spec.ok || hasFlakyAnnotation || faultySpecIds.has(getSpecId(spec)))
specs.push(spec);
}
if (specs.length) {
this._runs.push({
metadata: report.metadata,
specs,
});
}
return specs.length;
}
serialize(maxCommits = 100) {
const shaToTimestamp = new Map();
for (const run of this._runs)
shaToTimestamp.set(run.metadata.commitSHA, run.metadata.commitTimestamp);
const commits = [...shaToTimestamp].sort(([sha1, ts1], [sha2, ts2]) => ts2 - ts1).slice(0, maxCommits);
const commitsSet = new Set(commits.map(([sha, ts]) => sha));
return {
version: DASHBOARD_VERSION,
timestamp: Date.now(),
buildbotRuns: this._runs.filter(run => commitsSet.has(run.metadata.commitSHA)),
};
}
}
async function processDashboardV1(context, report) {
const timestamp = Date.now();
const dashboardBlob = await SimpleBlob.create('dashboards', 'main.json');
const dashboardData = await dashboardBlob.download();
const dashboard = new Dashboard();
if (dashboardData)
dashboard.initialize(dashboardData);
try {
const addedSpecs = dashboard.addReport(report);
await dashboardBlob.uploadGzipped(dashboard.serialize());
context.log(`
===== started dashboard v1 =====
SHA: ${report.metadata.commitSHA}
URL: ${report.metadata.runURL}
timestamp: ${report.metadata.commitTimestamp}
added specs: ${addedSpecs}
===== complete in ${Date.now() - timestamp}ms =====
`);
} catch (e) {
context.log(e);
return;
}
}
module.exports = {processDashboardV1};

View File

@ -0,0 +1,155 @@
/**
* Copyright (c) Microsoft Corporation.
*
* Licensed under the Apache License, Version 2.0 (the 'License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
const {SimpleBlob, flattenSpecs} = require('./utils.js');
const DASHBOARD_VERSION = 1;
class Dashboard {
constructor() {
this._specs = new Map();
this._commits = new Map();
}
initialize(jsonData) {
if (jsonData.version !== DASHBOARD_VERSION) {
// Run migrations here!
}
for (const spec of jsonData.specs) {
const commitCoordinates = new Map();
for (const coord of spec.commitCoordinates)
commitCoordinates.set(coord.sha, coord);
this._specs.set(spec.specId, {
specId: spec.specId,
file: spec.file,
title: spec.title,
problematicTests: spec.problematicTests,
commitCoordinates,
});
}
for (const commit of jsonData.commits)
this._commits.set(commit.sha, commit);
}
addReport(report) {
const sha = report.metadata.commitSHA;
this._commits.set(sha, {
sha,
timestamp: report.metadata.commitTimestamp,
message: report.metadata.commitTitle,
author: report.metadata.commitAuthorName,
email: report.metadata.commitAuthorEmail,
});
let addedSpecs = 0;
for (const spec of flattenSpecs(report)) {
// We cannot use linenumber to identify specs since line numbers
// might be different across commits.
const specId = spec.file + ' --- ' + spec.title;
const tests = spec.tests.filter(test => !isHealthyTest(test));
// If there are no problematic testruns now and before - ignore the spec.
if (!tests.length && !this._specs.has(specId))
continue;
++addedSpecs;
let specInfo = this._specs.get(specId);
if (!specInfo) {
specInfo = {
specId,
title: spec.title,
file: spec.file,
commitCoordinates: new Map(),
problematicTests: [],
};
this._specs.set(specId, specInfo);
}
specInfo.problematicTests.push(...tests.map(test => ({sha, test})));
specInfo.commitCoordinates.set(sha, ({sha, line: spec.line, column: spec.column}));
}
return addedSpecs;
}
serialize(maxCommits = 100) {
const commits = [...this._commits.values()].sort((a, b) => a.timestamp - b.timestamp).slice(-maxCommits);
const whitelistedCommits = new Set();
for (const c of commits)
whitelistedCommits.add(c.sha);
const specs = [...this._specs.values()].map(spec => ({
specId: spec.specId,
title: spec.title,
file: spec.file,
commitCoordinates: [...spec.commitCoordinates.values()].filter(coord => whitelistedCommits.has(coord.sha)),
problematicTests: [...spec.problematicTests.values()].filter(test => whitelistedCommits.has(test.sha)),
})).filter(spec => spec.commitCoordinates.length && spec.problematicTests.length);
return {
version: DASHBOARD_VERSION,
timestamp: Date.now(),
commits,
specs,
};
}
}
async function processDashboardV2(context, report) {
const timestamp = Date.now();
const dashboardBlob = await SimpleBlob.create('dashboards', 'main_v2.json');
const dashboardData = await dashboardBlob.download();
const dashboard = new Dashboard();
if (dashboardData)
dashboard.initialize(dashboardData);
try {
const addedSpecs = dashboard.addReport(report);
await dashboardBlob.uploadGzipped(dashboard.serialize());
context.log(`
===== started dashboard v2 =====
SHA: ${report.metadata.commitSHA}
URL: ${report.metadata.runURL}
timestamp: ${report.metadata.commitTimestamp}
added specs: ${addedSpecs}
===== complete in ${Date.now() - timestamp}ms =====
`);
} catch (e) {
context.log(e);
return;
}
}
module.exports = {processDashboardV2};
function isHealthyTest(test) {
// If test has any annotations - it's not healthy and requires attention.
if (test.annotations.length)
return false;
// If test does not have annotations and doesn't have runs - it's healthy.
if (!test.runs.length)
return true;
// If test was run more than once - it's been retried and thus unhealthy.
if (test.runs.length > 1)
return false;
const run = test.runs[0];
// Test might not have status if it was sharded away - consider it healthy.
if (!run.status)
return true;
// if status is not "passed", then it's a bad test.
if (run.status !== 'passed')
return false;
// if run passed, but that's not what we expected - it's a bad test.
if (run.status !== test.expectedStatus)
return false;
// Otherwise, the test is healthy.
return true;
}

View File

@ -14,160 +14,21 @@
* limitations under the License.
*/
const { BlobServiceClient } = require("@azure/storage-blob");
const zlib = require('zlib');
const path = require('path');
const util = require('util');
const gzipAsync = util.promisify(zlib.gzip);
const gunzipAsync = util.promisify(zlib.gunzip);
const blobServiceClient = BlobServiceClient.fromConnectionString(process.env.AzureWebJobsStorage);
const DASHBOARD_CONTAINER = 'dashboards';
const DASHBOARD_NAME = 'main.json';
const DASHBOARD_VERSION = 1;
class SimpleBlob {
static async create(container, blobName) {
const dashboardContainerClient = await blobServiceClient.getContainerClient(DASHBOARD_CONTAINER);
return new SimpleBlob(dashboardContainerClient);
}
constructor(containerClient) {
this._blobClient = containerClient.getBlobClient(DASHBOARD_NAME);
this._blockBlobClient = this._blobClient.getBlockBlobClient();
}
async download() {
if (!await this._blobClient.exists())
return undefined;
const response = await this._blobClient.download();
const responseStream = response.readableStreamBody;
const buffer = await new Promise((resolve, reject) => {
const chunks = [];
responseStream.on('data', data => chunks.push(data instanceof Buffer ? data : Buffer.from(data)));
responseStream.on('end', () => resolve(Buffer.concat(chunks)));
responseStream.on('error', reject);
});
const properties = await this._blobClient.getProperties();
const content = properties.contentEncoding.toLowerCase().trim() === 'gzip' ? await gunzipAsync(buffer) : buffer.toString('utf8');
return JSON.parse(content);
}
async uploadGzipped(data) {
const content = JSON.stringify(data);
const zipped = await gzipAsync(content);
await this._blockBlobClient.upload(zipped, Buffer.byteLength(zipped), {
blobHTTPHeaders: {
blobContentEncoding: 'gzip',
blobContentType: 'application/json; charset=UTF-8',
}
});
}
}
async function deleteUploadBlob(blobName) {
// First we do - delete the blob.
const containerClient = await blobServiceClient.getContainerClient('uploads');
await containerClient.deleteBlob(blobName, {});
}
function flattenSpecs(suite, result = []) {
if (suite.suites) {
for (const child of suite.suites)
flattenSpecs(child, result);
}
for (const spec of suite.specs || [])
result.push(spec);
return result;
}
class Dashboard {
constructor() {
this._runs = [];
}
initialize(jsonData) {
if (jsonData.version !== DASHBOARD_VERSION) {
// Run migrations here!
}
this._runs = jsonData.buildbotRuns;
}
addReport(report) {
// We cannot use linenumber to identify specs since line numbers
// might be different across commits.
const getSpecId = spec => spec.file + ' @@@ ' + spec.title;
const faultySpecIds = new Set();
for (const run of this._runs) {
for (const spec of run.specs)
faultySpecIds.add(getSpecId(spec));
}
const specs = [];
for (const spec of flattenSpecs(report)) {
// Filter out specs that didn't have a single test that was run in the
// given shard.
if (spec.tests.every(test => test.runs.length === 1 && !test.runs[0].status))
continue;
const hasFlakyAnnotation = spec.tests.some(test => test.annotations.some(a => a.type === 'flaky'));
if (!spec.ok || hasFlakyAnnotation || faultySpecIds.has(getSpecId(spec)))
specs.push(spec);
}
if (specs.length) {
this._runs.push({
metadata: report.metadata,
specs,
});
}
return specs.length;
}
serialize(maxCommits = 100) {
const shaToTimestamp = new Map();
for (const run of this._runs)
shaToTimestamp.set(run.metadata.commitSHA, run.metadata.commitTimestamp);
const commits = [...shaToTimestamp].sort(([sha1, ts1], [sha2, ts2]) => ts2 - ts1).slice(0, maxCommits);
const commitsSet = new Set(commits.map(([sha, ts]) => sha));
return {
version: DASHBOARD_VERSION,
timestamp: Date.now(),
buildbotRuns: this._runs.filter(run => commitsSet.has(run.metadata.commitSHA)),
};
}
}
const {blobServiceClient, gunzipAsync, deleteBlob} = require('./utils.js');
const {processDashboardV1} = require('./dashboard_v1.js');
const {processDashboardV2} = require('./dashboard_v2.js');
module.exports = async function(context) {
const timestamp = Date.now();
const blobName = context.bindingData.name;
// First thing we do - delete the blob.
await deleteUploadBlob(blobName);
await deleteBlob('uploads', context.bindingData.name);
const dashboardBlob = await SimpleBlob.create();
const dashboardData = await dashboardBlob.download();
const dashboard = new Dashboard();
if (dashboardData)
dashboard.initialize(dashboardData);
// Get report data.
const data = await gunzipAsync(context.bindings.newBlob);
const report = JSON.parse(data.toString('utf8'));
try {
const data = await gunzipAsync(context.bindings.newBlob);
const report = JSON.parse(data.toString('utf8'));
const addedSpecs = dashboard.addReport(report);
await dashboardBlob.uploadGzipped(dashboard.serialize());
context.log(`
===== started =====
SHA: ${report.metadata.commitSHA}
URL: ${report.metadata.runURL}
timestamp: ${report.metadata.commitTimestamp}
added specs: ${addedSpecs}
===== complete in ${Date.now() - timestamp}ms =====
`);
} catch (e) {
context.log(e);
return;
}
// Upload report to both dashboards.
await Promise.all([
processDashboardV1(context, report),
processDashboardV2(context, report),
]);
}

View File

@ -0,0 +1,79 @@
/**
* Copyright (c) Microsoft Corporation.
*
* Licensed under the Apache License, Version 2.0 (the 'License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
const { BlobServiceClient } = require("@azure/storage-blob");
const zlib = require('zlib');
const util = require('util');
const gzipAsync = util.promisify(zlib.gzip);
const gunzipAsync = util.promisify(zlib.gunzip);
const blobServiceClient = BlobServiceClient.fromConnectionString(process.env.AzureWebJobsStorage);
function flattenSpecs(suite, result = []) {
if (suite.suites) {
for (const child of suite.suites)
flattenSpecs(child, result);
}
for (const spec of suite.specs || [])
result.push(spec);
return result;
}
class SimpleBlob {
static async create(container, blobName) {
const dashboardContainerClient = await blobServiceClient.getContainerClient(container);
return new SimpleBlob(dashboardContainerClient, blobName);
}
constructor(containerClient, blobName) {
this._blobClient = containerClient.getBlobClient(blobName);
this._blockBlobClient = this._blobClient.getBlockBlobClient();
}
async download() {
if (!await this._blobClient.exists())
return undefined;
const response = await this._blobClient.download();
const responseStream = response.readableStreamBody;
const buffer = await new Promise((resolve, reject) => {
const chunks = [];
responseStream.on('data', data => chunks.push(data instanceof Buffer ? data : Buffer.from(data)));
responseStream.on('end', () => resolve(Buffer.concat(chunks)));
responseStream.on('error', reject);
});
const properties = await this._blobClient.getProperties();
const content = properties.contentEncoding.toLowerCase().trim() === 'gzip' ? await gunzipAsync(buffer) : buffer.toString('utf8');
return JSON.parse(content);
}
async uploadGzipped(data) {
const content = JSON.stringify(data);
const zipped = await gzipAsync(content);
await this._blockBlobClient.upload(zipped, Buffer.byteLength(zipped), {
blobHTTPHeaders: {
blobContentEncoding: 'gzip',
blobContentType: 'application/json; charset=UTF-8',
}
});
}
}
async function deleteBlob(container, blobName) {
const containerClient = await blobServiceClient.getContainerClient(container);
await containerClient.deleteBlob(blobName, {});
}
module.exports = {gzipAsync, gunzipAsync, flattenSpecs, SimpleBlob, blobServiceClient, deleteBlob};