mirror of
https://github.com/microsoft/playwright.git
synced 2025-06-26 21:40:17 +00:00
feat: preserve attachments in blob reports (#22605)
Attachments with `path` are stored as `resources/sha1.ext` zip entries and extracted under `report-dir/tmp` when merging. This way normal fs.readFile keeps working as before even thought the file path is different. The clients should rely on `attachment.name` instead of `attachment.path` when deriving user visible titles in the UI. If this turns out not to be the case we can reconsider later. #10437
This commit is contained in:
parent
78acb5ef58
commit
24478be565
@ -17,13 +17,15 @@
|
|||||||
import type { EventEmitter } from 'events';
|
import type { EventEmitter } from 'events';
|
||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import { ManualPromise, ZipFile } from 'playwright-core/lib/utils';
|
import os from 'os';
|
||||||
|
import { ManualPromise, ZipFile, calculateSha1, removeFolders } from 'playwright-core/lib/utils';
|
||||||
|
import { mime } from 'playwright-core/lib/utilsBundle';
|
||||||
import { yazl } from 'playwright-core/lib/zipBundle';
|
import { yazl } from 'playwright-core/lib/zipBundle';
|
||||||
import { Readable } from 'stream';
|
import { Readable } from 'stream';
|
||||||
import type { FullConfig, FullResult, Reporter } from '../../types/testReporter';
|
import type { FullConfig, FullResult, Reporter, TestResult } from '../../types/testReporter';
|
||||||
import type { BuiltInReporter, FullConfigInternal } from '../common/config';
|
import type { BuiltInReporter, FullConfigInternal } from '../common/config';
|
||||||
import type { Suite } from '../common/test';
|
import type { Suite } from '../common/test';
|
||||||
import { TeleReporterReceiver, type JsonEvent, type JsonProject, type JsonSuite } from '../isomorphic/teleReceiver';
|
import { TeleReporterReceiver, type JsonEvent, type JsonProject, type JsonSuite, type JsonTestResultEnd } from '../isomorphic/teleReceiver';
|
||||||
import DotReporter from '../reporters/dot';
|
import DotReporter from '../reporters/dot';
|
||||||
import EmptyReporter from '../reporters/empty';
|
import EmptyReporter from '../reporters/empty';
|
||||||
import GitHubReporter from '../reporters/github';
|
import GitHubReporter from '../reporters/github';
|
||||||
@ -44,7 +46,9 @@ type BlobReporterOptions = {
|
|||||||
export class BlobReporter extends TeleReporterEmitter {
|
export class BlobReporter extends TeleReporterEmitter {
|
||||||
private _messages: any[] = [];
|
private _messages: any[] = [];
|
||||||
private _options: BlobReporterOptions;
|
private _options: BlobReporterOptions;
|
||||||
private _outputFile!: string;
|
|
||||||
|
private readonly _zipFile = new yazl.ZipFile();
|
||||||
|
private readonly _zipFinishPromise = new ManualPromise<undefined>();
|
||||||
|
|
||||||
constructor(options: BlobReporterOptions) {
|
constructor(options: BlobReporterOptions) {
|
||||||
super(message => this._messages.push(message));
|
super(message => this._messages.push(message));
|
||||||
@ -53,14 +57,40 @@ export class BlobReporter extends TeleReporterEmitter {
|
|||||||
|
|
||||||
override onBegin(config: FullConfig<{}, {}>, suite: Suite): void {
|
override onBegin(config: FullConfig<{}, {}>, suite: Suite): void {
|
||||||
super.onBegin(config, suite);
|
super.onBegin(config, suite);
|
||||||
this._computeOutputFileName(config);
|
this._initializeZipFile(config);
|
||||||
}
|
}
|
||||||
|
|
||||||
override async onEnd(result: FullResult): Promise<void> {
|
override async onEnd(result: FullResult): Promise<void> {
|
||||||
await super.onEnd(result);
|
await super.onEnd(result);
|
||||||
fs.mkdirSync(path.dirname(this._outputFile), { recursive: true });
|
|
||||||
const lines = this._messages.map(m => JSON.stringify(m) + '\n');
|
const lines = this._messages.map(m => JSON.stringify(m) + '\n');
|
||||||
await zipReport(this._outputFile, lines);
|
const content = Readable.from(lines);
|
||||||
|
this._zipFile.addReadStream(content, 'report.jsonl');
|
||||||
|
this._zipFile.end();
|
||||||
|
await this._zipFinishPromise;
|
||||||
|
}
|
||||||
|
|
||||||
|
override _serializeAttachments(attachments: TestResult['attachments']): TestResult['attachments'] {
|
||||||
|
return attachments.map(attachment => {
|
||||||
|
if (!attachment.path || !fs.statSync(attachment.path).isFile())
|
||||||
|
return attachment;
|
||||||
|
const sha1 = calculateSha1(attachment.path);
|
||||||
|
const extension = mime.getExtension(attachment.contentType) || 'dat';
|
||||||
|
const newPath = `resources/${sha1}.${extension}`;
|
||||||
|
this._zipFile.addFile(attachment.path, newPath);
|
||||||
|
return {
|
||||||
|
...attachment,
|
||||||
|
path: newPath,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
private _initializeZipFile(config: FullConfig) {
|
||||||
|
(this._zipFile as any as EventEmitter).on('error', error => this._zipFinishPromise.reject(error));
|
||||||
|
const zipFileName = this._computeOutputFileName(config);
|
||||||
|
fs.mkdirSync(path.dirname(zipFileName), { recursive: true });
|
||||||
|
this._zipFile.outputStream.pipe(fs.createWriteStream(zipFileName)).on('close', () => {
|
||||||
|
this._zipFinishPromise.resolve(undefined);
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
private _computeOutputFileName(config: FullConfig) {
|
private _computeOutputFileName(config: FullConfig) {
|
||||||
@ -70,7 +100,7 @@ export class BlobReporter extends TeleReporterEmitter {
|
|||||||
const paddedNumber = `${config.shard.current}`.padStart(`${config.shard.total}`.length, '0');
|
const paddedNumber = `${config.shard.current}`.padStart(`${config.shard.total}`.length, '0');
|
||||||
shardSuffix = `-${paddedNumber}-of-${config.shard.total}`;
|
shardSuffix = `-${paddedNumber}-of-${config.shard.total}`;
|
||||||
}
|
}
|
||||||
this._outputFile = path.join(outputDir, `report${shardSuffix}.zip`);
|
return path.join(outputDir, `report${shardSuffix}.zip`);
|
||||||
}
|
}
|
||||||
|
|
||||||
private _resolveOutputDir(): string {
|
private _resolveOutputDir(): string {
|
||||||
@ -83,7 +113,12 @@ export class BlobReporter extends TeleReporterEmitter {
|
|||||||
|
|
||||||
export async function createMergedReport(config: FullConfigInternal, dir: string, reporterName?: string) {
|
export async function createMergedReport(config: FullConfigInternal, dir: string, reporterName?: string) {
|
||||||
const shardFiles = await sortedShardFiles(dir);
|
const shardFiles = await sortedShardFiles(dir);
|
||||||
const events = await mergeEvents(dir, shardFiles);
|
const resourceDir = await fs.promises.mkdtemp(path.join(os.tmpdir(), 'playwright-report-'));
|
||||||
|
await fs.promises.mkdir(resourceDir, { recursive: true });
|
||||||
|
try {
|
||||||
|
const shardReports = await extractReports(dir, shardFiles, resourceDir);
|
||||||
|
const events = mergeEvents(shardReports);
|
||||||
|
patchAttachmentPaths(events, resourceDir);
|
||||||
|
|
||||||
const defaultReporters: {[key in BuiltInReporter]: new(arg: any) => Reporter} = {
|
const defaultReporters: {[key in BuiltInReporter]: new(arg: any) => Reporter} = {
|
||||||
dot: DotReporter,
|
dot: DotReporter,
|
||||||
@ -102,7 +137,6 @@ export async function createMergedReport(config: FullConfigInternal, dir: string
|
|||||||
const options = {
|
const options = {
|
||||||
...arg,
|
...arg,
|
||||||
configDir: process.cwd(),
|
configDir: process.cwd(),
|
||||||
outputFolder: dir
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let reporter: Reporter | undefined;
|
let reporter: Reporter | undefined;
|
||||||
@ -116,25 +150,58 @@ export async function createMergedReport(config: FullConfigInternal, dir: string
|
|||||||
const receiver = new TeleReporterReceiver(path.sep, reporter);
|
const receiver = new TeleReporterReceiver(path.sep, reporter);
|
||||||
for (const event of events)
|
for (const event of events)
|
||||||
await receiver.dispatch(event);
|
await receiver.dispatch(event);
|
||||||
|
} finally {
|
||||||
|
await removeFolders([resourceDir]);
|
||||||
|
}
|
||||||
console.log(`Done.`);
|
console.log(`Done.`);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function mergeEvents(dir: string, shardFiles: string[]) {
|
async function extractReports(dir: string, shardFiles: string[], resourceDir: string): Promise<string[]> {
|
||||||
const events: JsonEvent[] = [];
|
const reports = [];
|
||||||
const beginEvents: JsonEvent[] = [];
|
|
||||||
const endEvents: JsonEvent[] = [];
|
|
||||||
for (const file of shardFiles) {
|
for (const file of shardFiles) {
|
||||||
const zipFile = new ZipFile(path.join(dir, file));
|
const zipFile = new ZipFile(path.join(dir, file));
|
||||||
const entryNames = await zipFile.entries();
|
const entryNames = await zipFile.entries();
|
||||||
const reportEntryName = entryNames.find(e => e.endsWith('.jsonl'));
|
for (const entryName of entryNames) {
|
||||||
if (!reportEntryName)
|
const content = await zipFile.read(entryName);
|
||||||
throw new Error(`Zip file ${file} does not contain a .jsonl file`);
|
if (entryName.endsWith('report.jsonl')) {
|
||||||
const reportJson = await zipFile.read(reportEntryName);
|
reports.push(content.toString());
|
||||||
const parsedEvents = reportJson.toString().split('\n').filter(line => line.length).map(line => JSON.parse(line)) as JsonEvent[];
|
} else {
|
||||||
|
const fileName = path.join(resourceDir, entryName);
|
||||||
|
await fs.promises.mkdir(path.dirname(fileName), { recursive: true });
|
||||||
|
await fs.promises.writeFile(fileName, content);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return reports;
|
||||||
|
}
|
||||||
|
|
||||||
|
function patchAttachmentPaths(events: JsonEvent[], resourceDir: string) {
|
||||||
|
for (const event of events) {
|
||||||
|
if (event.method !== 'onTestEnd')
|
||||||
|
continue;
|
||||||
|
for (const attachment of (event.params.result as JsonTestResultEnd).attachments) {
|
||||||
|
if (!attachment.path)
|
||||||
|
continue;
|
||||||
|
|
||||||
|
attachment.path = path.join(resourceDir, attachment.path);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseEvents(reportJsonl: string): JsonEvent[] {
|
||||||
|
return reportJsonl.toString().split('\n').filter(line => line.length).map(line => JSON.parse(line)) as JsonEvent[];
|
||||||
|
}
|
||||||
|
|
||||||
|
function mergeEvents(shardReports: string[]) {
|
||||||
|
const events: JsonEvent[] = [];
|
||||||
|
const beginEvents: JsonEvent[] = [];
|
||||||
|
const endEvents: JsonEvent[] = [];
|
||||||
|
for (const reportJsonl of shardReports) {
|
||||||
|
const parsedEvents = parseEvents(reportJsonl);
|
||||||
for (const event of parsedEvents) {
|
for (const event of parsedEvents) {
|
||||||
// TODO: show remaining events?
|
// TODO: show remaining events?
|
||||||
if (event.method === 'onError')
|
if (event.method === 'onError')
|
||||||
throw new Error('Error in shard: ' + file);
|
throw new Error('Error in shard');
|
||||||
if (event.method === 'onBegin')
|
if (event.method === 'onBegin')
|
||||||
beginEvents.push(event);
|
beginEvents.push(event);
|
||||||
else if (event.method === 'onEnd')
|
else if (event.method === 'onEnd')
|
||||||
@ -142,7 +209,6 @@ async function mergeEvents(dir: string, shardFiles: string[]) {
|
|||||||
else
|
else
|
||||||
events.push(event);
|
events.push(event);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
return [mergeBeginEvents(beginEvents), ...events, mergeEndEvents(endEvents)];
|
return [mergeBeginEvents(beginEvents), ...events, mergeEndEvents(endEvents)];
|
||||||
}
|
}
|
||||||
@ -212,17 +278,3 @@ async function sortedShardFiles(dir: string) {
|
|||||||
const files = await fs.promises.readdir(dir);
|
const files = await fs.promises.readdir(dir);
|
||||||
return files.filter(file => file.endsWith('.zip')).sort();
|
return files.filter(file => file.endsWith('.zip')).sort();
|
||||||
}
|
}
|
||||||
|
|
||||||
async function zipReport(zipFileName: string, lines: string[]) {
|
|
||||||
const zipFile = new yazl.ZipFile();
|
|
||||||
const result = new ManualPromise<undefined>();
|
|
||||||
(zipFile as any as EventEmitter).on('error', error => result.reject(error));
|
|
||||||
// TODO: feed events on the fly.
|
|
||||||
const content = Readable.from(lines);
|
|
||||||
zipFile.addReadStream(content, 'report.jsonl');
|
|
||||||
zipFile.end();
|
|
||||||
zipFile.outputStream.pipe(fs.createWriteStream(zipFileName)).on('close', () => {
|
|
||||||
result.resolve(undefined);
|
|
||||||
});
|
|
||||||
await result;
|
|
||||||
}
|
|
||||||
|
@ -193,10 +193,14 @@ export class TeleReporterEmitter implements Reporter {
|
|||||||
duration: result.duration,
|
duration: result.duration,
|
||||||
status: result.status,
|
status: result.status,
|
||||||
errors: result.errors,
|
errors: result.errors,
|
||||||
attachments: result.attachments,
|
attachments: this._serializeAttachments(result.attachments),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
_serializeAttachments(attachments: TestResult['attachments']): TestResult['attachments'] {
|
||||||
|
return attachments;
|
||||||
|
}
|
||||||
|
|
||||||
private _serializeStepStart(step: TestStep): JsonTestStepStart {
|
private _serializeStepStart(step: TestStep): JsonTestStepStart {
|
||||||
return {
|
return {
|
||||||
id: (step as any)[idSymbol],
|
id: (step as any)[idSymbol],
|
||||||
|
@ -28,17 +28,17 @@ const test = baseTest.extend<{
|
|||||||
showReport: (reportFolder?: string) => Promise<void>,
|
showReport: (reportFolder?: string) => Promise<void>,
|
||||||
mergeReports: (reportFolder: string, env?: NodeJS.ProcessEnv, options?: RunOptions) => Promise<CliRunResult>
|
mergeReports: (reportFolder: string, env?: NodeJS.ProcessEnv, options?: RunOptions) => Promise<CliRunResult>
|
||||||
}>({
|
}>({
|
||||||
showReport: async ({ page }, use, testInfo) => {
|
showReport: async ({ page }, use) => {
|
||||||
let server: HttpServer | undefined;
|
let server: HttpServer | undefined;
|
||||||
await use(async (reportFolder?: string) => {
|
await use(async (reportFolder?: string) => {
|
||||||
reportFolder ??= testInfo.outputPath('playwright-report');
|
reportFolder ??= test.info().outputPath('playwright-report');
|
||||||
server = startHtmlReportServer(reportFolder) as HttpServer;
|
server = startHtmlReportServer(reportFolder) as HttpServer;
|
||||||
const location = await server.start();
|
const location = await server.start();
|
||||||
await page.goto(location);
|
await page.goto(location);
|
||||||
});
|
});
|
||||||
await server?.stop();
|
await server?.stop();
|
||||||
},
|
},
|
||||||
mergeReports: async ({ childProcess, page }, use, testInfo) => {
|
mergeReports: async ({ childProcess }, use) => {
|
||||||
await use(async (reportFolder: string, env: NodeJS.ProcessEnv = {}, options: RunOptions = {}) => {
|
await use(async (reportFolder: string, env: NodeJS.ProcessEnv = {}, options: RunOptions = {}) => {
|
||||||
const command = ['node', cliEntrypoint, 'merge-reports', reportFolder];
|
const command = ['node', cliEntrypoint, 'merge-reports', reportFolder];
|
||||||
if (options.additionalArgs)
|
if (options.additionalArgs)
|
||||||
@ -47,7 +47,7 @@ const test = baseTest.extend<{
|
|||||||
const testProcess = childProcess({
|
const testProcess = childProcess({
|
||||||
command,
|
command,
|
||||||
env: cleanEnv(env),
|
env: cleanEnv(env),
|
||||||
// cwd,
|
cwd: test.info().outputDir,
|
||||||
});
|
});
|
||||||
const { exitCode } = await testProcess.exited;
|
const { exitCode } = await testProcess.exited;
|
||||||
return { exitCode, output: testProcess.output.toString() };
|
return { exitCode, output: testProcess.output.toString() };
|
||||||
@ -110,7 +110,7 @@ test('should merge into html', async ({ runInlineTest, mergeReports, showReport,
|
|||||||
const { exitCode } = await mergeReports(reportDir, {}, { additionalArgs: ['--reporter', 'html'] });
|
const { exitCode } = await mergeReports(reportDir, {}, { additionalArgs: ['--reporter', 'html'] });
|
||||||
expect(exitCode).toBe(0);
|
expect(exitCode).toBe(0);
|
||||||
|
|
||||||
await showReport(reportDir);
|
await showReport();
|
||||||
|
|
||||||
await expect(page.locator('.subnav-item:has-text("All") .counter')).toHaveText('10');
|
await expect(page.locator('.subnav-item:has-text("All") .counter')).toHaveText('10');
|
||||||
await expect(page.locator('.subnav-item:has-text("Passed") .counter')).toHaveText('3');
|
await expect(page.locator('.subnav-item:has-text("Passed") .counter')).toHaveText('3');
|
||||||
@ -169,7 +169,7 @@ test('be able to merge incomplete shards', async ({ runInlineTest, mergeReports,
|
|||||||
const { exitCode } = await mergeReports(reportDir, {}, { additionalArgs: ['--reporter', 'html'] });
|
const { exitCode } = await mergeReports(reportDir, {}, { additionalArgs: ['--reporter', 'html'] });
|
||||||
expect(exitCode).toBe(0);
|
expect(exitCode).toBe(0);
|
||||||
|
|
||||||
await showReport(reportDir);
|
await showReport();
|
||||||
|
|
||||||
await expect(page.locator('.subnav-item:has-text("All") .counter')).toHaveText('6');
|
await expect(page.locator('.subnav-item:has-text("All") .counter')).toHaveText('6');
|
||||||
await expect(page.locator('.subnav-item:has-text("Passed") .counter')).toHaveText('2');
|
await expect(page.locator('.subnav-item:has-text("Passed") .counter')).toHaveText('2');
|
||||||
@ -257,3 +257,57 @@ test('merge into list report by default', async ({ runInlineTest, mergeReports }
|
|||||||
`9 : - 10 b.test.js:9:12 › skipped 2`
|
`9 : - 10 b.test.js:9:12 › skipped 2`
|
||||||
]);
|
]);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test('preserve attachments', async ({ runInlineTest, mergeReports, showReport, page }) => {
|
||||||
|
test.slow();
|
||||||
|
const reportDir = test.info().outputPath('blob-report');
|
||||||
|
const files = {
|
||||||
|
'playwright.config.ts': `
|
||||||
|
module.exports = {
|
||||||
|
retries: 1,
|
||||||
|
reporter: [['blob', { outputDir: '${reportDir.replace(/\\/g, '/')}' }]]
|
||||||
|
};
|
||||||
|
`,
|
||||||
|
'a.test.js': `
|
||||||
|
import { test, expect } from '@playwright/test';
|
||||||
|
import fs from 'fs';
|
||||||
|
|
||||||
|
test('first', async ({}) => {
|
||||||
|
const attachmentPath = test.info().outputPath('foo.txt');
|
||||||
|
fs.writeFileSync(attachmentPath, 'hello!');
|
||||||
|
await test.info().attach('file-attachment', {path: attachmentPath});
|
||||||
|
|
||||||
|
console.log('console info');
|
||||||
|
console.error('console error');
|
||||||
|
});
|
||||||
|
test('failing 1', async ({}) => {
|
||||||
|
await test.info().attach('text-attachment', { body: 'hi!' });
|
||||||
|
expect(1).toBe(2);
|
||||||
|
});
|
||||||
|
test.skip('skipped 1', async ({}) => {});
|
||||||
|
`,
|
||||||
|
'b.test.js': `
|
||||||
|
import { test, expect } from '@playwright/test';
|
||||||
|
test('math 2', async ({}) => { });
|
||||||
|
test('failing 2', async ({}) => {
|
||||||
|
expect(1).toBe(2);
|
||||||
|
});
|
||||||
|
test.skip('skipped 2', async ({}) => {});
|
||||||
|
`
|
||||||
|
};
|
||||||
|
await runInlineTest(files, { shard: `1/2` });
|
||||||
|
|
||||||
|
const reportFiles = await fs.promises.readdir(reportDir);
|
||||||
|
reportFiles.sort();
|
||||||
|
expect(reportFiles).toEqual(['report-1-of-2.zip']);
|
||||||
|
const { exitCode } = await mergeReports(reportDir, {}, { additionalArgs: ['--reporter', 'html'] });
|
||||||
|
expect(exitCode).toBe(0);
|
||||||
|
|
||||||
|
await showReport();
|
||||||
|
await page.getByText('first').click();
|
||||||
|
await expect(page.getByText('file-attachment')).toBeVisible();
|
||||||
|
await page.goBack();
|
||||||
|
|
||||||
|
await page.getByText('failing 1').click();
|
||||||
|
await expect(page.getByText('\'text-attachment\', { body: \'hi!\'')).toBeVisible();
|
||||||
|
});
|
Loading…
x
Reference in New Issue
Block a user