mirror of
https://github.com/microsoft/playwright.git
synced 2025-06-26 21:40:17 +00:00
chore(merge): read test events from one file at a time (#26333)
Reference https://github.com/microsoft/playwright/issues/24451
This commit is contained in:
parent
7f1f62795d
commit
0e20d352cf
@ -30,10 +30,16 @@ import { relativeFilePath } from '../util';
|
|||||||
|
|
||||||
type StatusCallback = (message: string) => void;
|
type StatusCallback = (message: string) => void;
|
||||||
|
|
||||||
|
type ReportData = {
|
||||||
|
idsPatcher: IdsPatcher;
|
||||||
|
reportFile: string;
|
||||||
|
};
|
||||||
|
|
||||||
export async function createMergedReport(config: FullConfigInternal, dir: string, reporterDescriptions: ReporterDescription[]) {
|
export async function createMergedReport(config: FullConfigInternal, dir: string, reporterDescriptions: ReporterDescription[]) {
|
||||||
const reporters = await createReporters(config, 'merge', reporterDescriptions);
|
const reporters = await createReporters(config, 'merge', reporterDescriptions);
|
||||||
const multiplexer = new Multiplexer(reporters);
|
const multiplexer = new Multiplexer(reporters);
|
||||||
const receiver = new TeleReporterReceiver(path.sep, multiplexer, false, config.config);
|
const receiver = new TeleReporterReceiver(path.sep, multiplexer, false, config.config);
|
||||||
|
const stringPool = new StringInternPool();
|
||||||
|
|
||||||
let printStatus: StatusCallback = () => {};
|
let printStatus: StatusCallback = () => {};
|
||||||
if (!multiplexer.printsToStdio()) {
|
if (!multiplexer.printsToStdio()) {
|
||||||
@ -44,10 +50,10 @@ export async function createMergedReport(config: FullConfigInternal, dir: string
|
|||||||
const shardFiles = await sortedShardFiles(dir);
|
const shardFiles = await sortedShardFiles(dir);
|
||||||
if (shardFiles.length === 0)
|
if (shardFiles.length === 0)
|
||||||
throw new Error(`No report files found in ${dir}`);
|
throw new Error(`No report files found in ${dir}`);
|
||||||
const events = await mergeEvents(dir, shardFiles, printStatus);
|
const eventData = await mergeEvents(dir, shardFiles, stringPool, printStatus);
|
||||||
patchAttachmentPaths(events, dir);
|
printStatus(`processing test events`);
|
||||||
|
|
||||||
printStatus(`processing ${events.length} test events`);
|
const dispatchEvents = async (events: JsonEvent[]) => {
|
||||||
for (const event of events) {
|
for (const event of events) {
|
||||||
if (event.method === 'onEnd')
|
if (event.method === 'onEnd')
|
||||||
printStatus(`building final report`);
|
printStatus(`building final report`);
|
||||||
@ -55,6 +61,18 @@ export async function createMergedReport(config: FullConfigInternal, dir: string
|
|||||||
if (event.method === 'onEnd')
|
if (event.method === 'onEnd')
|
||||||
printStatus(`finished building report`);
|
printStatus(`finished building report`);
|
||||||
}
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
await dispatchEvents(eventData.prologue);
|
||||||
|
for (const { reportFile, idsPatcher } of eventData.reports) {
|
||||||
|
const reportJsonl = await fs.promises.readFile(reportFile);
|
||||||
|
const events = parseTestEvents(reportJsonl);
|
||||||
|
new JsonStringInternalizer(stringPool).traverse(events);
|
||||||
|
idsPatcher.patchEvents(events);
|
||||||
|
patchAttachmentPaths(events, dir);
|
||||||
|
await dispatchEvents(events);
|
||||||
|
}
|
||||||
|
await dispatchEvents(eventData.epilogue);
|
||||||
}
|
}
|
||||||
|
|
||||||
function patchAttachmentPaths(events: JsonEvent[], resourceDir: string) {
|
function patchAttachmentPaths(events: JsonEvent[], resourceDir: string) {
|
||||||
@ -70,15 +88,27 @@ function patchAttachmentPaths(events: JsonEvent[], resourceDir: string) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function parseEvents(reportJsonl: Buffer): JsonEvent[] {
|
const commonEventNames = ['onBlobReportMetadata', 'onConfigure', 'onProject', 'onBegin', 'onEnd'];
|
||||||
return reportJsonl.toString().split('\n').filter(line => line.length).map(line => JSON.parse(line)) as JsonEvent[];
|
const commonEvents = new Set(commonEventNames);
|
||||||
|
const commonEventRegex = new RegExp(`${commonEventNames.join('|')}`);
|
||||||
|
|
||||||
|
function parseCommonEvents(reportJsonl: Buffer): JsonEvent[] {
|
||||||
|
return reportJsonl.toString().split('\n')
|
||||||
|
.filter(line => commonEventRegex.test(line)) // quick filter
|
||||||
|
.map(line => JSON.parse(line) as JsonEvent)
|
||||||
|
.filter(event => commonEvents.has(event.method));
|
||||||
}
|
}
|
||||||
|
|
||||||
async function extractAndParseReports(dir: string, shardFiles: string[], stringPool: StringInternPool, printStatus: StatusCallback) {
|
function parseTestEvents(reportJsonl: Buffer): JsonEvent[] {
|
||||||
const shardEvents: { file: string, metadata: BlobReportMetadata, parsedEvents: JsonEvent[] }[] = [];
|
return reportJsonl.toString().split('\n')
|
||||||
await fs.promises.mkdir(path.join(dir, 'resources'), { recursive: true });
|
.filter(line => line.length)
|
||||||
|
.map(line => JSON.parse(line) as JsonEvent)
|
||||||
|
.filter(event => !commonEvents.has(event.method));
|
||||||
|
}
|
||||||
|
|
||||||
const internalizer = new JsonStringInternalizer(stringPool);
|
async function extractAndParseReports(dir: string, shardFiles: string[], internalizer: JsonStringInternalizer, printStatus: StatusCallback) {
|
||||||
|
const shardEvents: { file: string, localPath: string, metadata: BlobReportMetadata, parsedEvents: JsonEvent[] }[] = [];
|
||||||
|
await fs.promises.mkdir(path.join(dir, 'resources'), { recursive: true });
|
||||||
|
|
||||||
for (const file of shardFiles) {
|
for (const file of shardFiles) {
|
||||||
const absolutePath = path.join(dir, file);
|
const absolutePath = path.join(dir, file);
|
||||||
@ -86,22 +116,22 @@ async function extractAndParseReports(dir: string, shardFiles: string[], stringP
|
|||||||
const zipFile = new ZipFile(absolutePath);
|
const zipFile = new ZipFile(absolutePath);
|
||||||
const entryNames = await zipFile.entries();
|
const entryNames = await zipFile.entries();
|
||||||
for (const entryName of entryNames.sort()) {
|
for (const entryName of entryNames.sort()) {
|
||||||
|
const fileName = path.join(dir, entryName);
|
||||||
const content = await zipFile.read(entryName);
|
const content = await zipFile.read(entryName);
|
||||||
if (entryName.endsWith('.jsonl')) {
|
if (entryName.endsWith('.jsonl')) {
|
||||||
const parsedEvents = parseEvents(content);
|
const parsedEvents = parseCommonEvents(content);
|
||||||
// Passing reviver to JSON.parse doesn't work, as the original strings
|
// Passing reviver to JSON.parse doesn't work, as the original strings
|
||||||
// keep beeing used. To work around that we traverse the parsed events
|
// keep beeing used. To work around that we traverse the parsed events
|
||||||
// as a post-processing step.
|
// as a post-processing step.
|
||||||
internalizer.traverse(parsedEvents);
|
internalizer.traverse(parsedEvents);
|
||||||
shardEvents.push({
|
shardEvents.push({
|
||||||
file,
|
file,
|
||||||
|
localPath: fileName,
|
||||||
metadata: findMetadata(parsedEvents, file),
|
metadata: findMetadata(parsedEvents, file),
|
||||||
parsedEvents
|
parsedEvents
|
||||||
});
|
});
|
||||||
} else {
|
|
||||||
const fileName = path.join(dir, entryName);
|
|
||||||
await fs.promises.writeFile(fileName, content);
|
|
||||||
}
|
}
|
||||||
|
await fs.promises.writeFile(fileName, content);
|
||||||
}
|
}
|
||||||
zipFile.close();
|
zipFile.close();
|
||||||
}
|
}
|
||||||
@ -117,14 +147,14 @@ function findMetadata(events: JsonEvent[], file: string): BlobReportMetadata {
|
|||||||
return metadata;
|
return metadata;
|
||||||
}
|
}
|
||||||
|
|
||||||
async function mergeEvents(dir: string, shardReportFiles: string[], printStatus: StatusCallback) {
|
async function mergeEvents(dir: string, shardReportFiles: string[], stringPool: StringInternPool, printStatus: StatusCallback) {
|
||||||
const stringPool = new StringInternPool();
|
const internalizer = new JsonStringInternalizer(stringPool);
|
||||||
const events: JsonEvent[] = [];
|
|
||||||
const configureEvents: JsonEvent[] = [];
|
const configureEvents: JsonEvent[] = [];
|
||||||
const projectEvents: JsonEvent[] = [];
|
const projectEvents: JsonEvent[] = [];
|
||||||
const endEvents: JsonEvent[] = [];
|
const endEvents: JsonEvent[] = [];
|
||||||
|
|
||||||
const blobs = await extractAndParseReports(dir, shardReportFiles, stringPool, printStatus);
|
const blobs = await extractAndParseReports(dir, shardReportFiles, internalizer, printStatus);
|
||||||
// Sort by (report name; shard; file name), so that salt generation below is deterministic when:
|
// Sort by (report name; shard; file name), so that salt generation below is deterministic when:
|
||||||
// - report names are unique;
|
// - report names are unique;
|
||||||
// - report names are missing;
|
// - report names are missing;
|
||||||
@ -145,7 +175,9 @@ async function mergeEvents(dir: string, shardReportFiles: string[], printStatus:
|
|||||||
|
|
||||||
printStatus(`merging events`);
|
printStatus(`merging events`);
|
||||||
|
|
||||||
for (const { file, parsedEvents, metadata } of blobs) {
|
const reports: ReportData[] = [];
|
||||||
|
|
||||||
|
for (const { file, parsedEvents, metadata, localPath } of blobs) {
|
||||||
// Generate unique salt for each blob.
|
// Generate unique salt for each blob.
|
||||||
const sha1 = calculateSha1(metadata.name || path.basename(file)).substring(0, 16);
|
const sha1 = calculateSha1(metadata.name || path.basename(file)).substring(0, 16);
|
||||||
let salt = sha1;
|
let salt = sha1;
|
||||||
@ -153,7 +185,8 @@ async function mergeEvents(dir: string, shardReportFiles: string[], printStatus:
|
|||||||
salt = sha1 + '-' + i;
|
salt = sha1 + '-' + i;
|
||||||
saltSet.add(salt);
|
saltSet.add(salt);
|
||||||
|
|
||||||
new IdsPatcher(stringPool, metadata.name, salt).patchEvents(parsedEvents);
|
const idsPatcher = new IdsPatcher(stringPool, metadata.name, salt);
|
||||||
|
idsPatcher.patchEvents(parsedEvents);
|
||||||
|
|
||||||
for (const event of parsedEvents) {
|
for (const event of parsedEvents) {
|
||||||
if (event.method === 'onConfigure')
|
if (event.method === 'onConfigure')
|
||||||
@ -162,18 +195,27 @@ async function mergeEvents(dir: string, shardReportFiles: string[], printStatus:
|
|||||||
projectEvents.push(event);
|
projectEvents.push(event);
|
||||||
else if (event.method === 'onEnd')
|
else if (event.method === 'onEnd')
|
||||||
endEvents.push(event);
|
endEvents.push(event);
|
||||||
else if (event.method !== 'onBlobReportMetadata' && event.method !== 'onBegin')
|
|
||||||
events.push(event);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Save information about the reports to stream their test events later.
|
||||||
|
reports.push({
|
||||||
|
idsPatcher,
|
||||||
|
reportFile: localPath,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
return [
|
|
||||||
|
return {
|
||||||
|
prologue: [
|
||||||
mergeConfigureEvents(configureEvents),
|
mergeConfigureEvents(configureEvents),
|
||||||
...projectEvents,
|
...projectEvents,
|
||||||
{ method: 'onBegin', params: undefined },
|
{ method: 'onBegin', params: undefined },
|
||||||
...events,
|
],
|
||||||
|
reports,
|
||||||
|
epilogue: [
|
||||||
mergeEndEvents(endEvents),
|
mergeEndEvents(endEvents),
|
||||||
{ method: 'onExit', params: undefined },
|
{ method: 'onExit', params: undefined },
|
||||||
];
|
]
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
function mergeConfigureEvents(configureEvents: JsonEvent[]): JsonEvent {
|
function mergeConfigureEvents(configureEvents: JsonEvent[]): JsonEvent {
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user