chore: refactor screencast tests (#4007)

This commit is contained in:
Pavel Feldman 2020-09-29 18:52:30 -07:00 committed by GitHub
parent 20b83ee0b4
commit d658b687ca
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 139 additions and 189 deletions

View File

@ -14,84 +14,105 @@
* limitations under the License. * limitations under the License.
*/ */
import { fixtures as playwrightFixtures, config } from './fixtures'; import { fixtures } from './fixtures';
import type { Page, Browser } from '..';
import fs from 'fs'; import fs from 'fs';
import path from 'path'; import path from 'path';
import { TestServer } from '../utils/testserver'; import { spawnSync } from 'child_process';
import { PNG } from 'pngjs';
type WorkerState = { const { it, expect, describe } = fixtures;
videoPlayerBrowser: Browser,
};
type TestState = {
videoPlayer: VideoPlayer;
relativeArtifactsPath: string;
videoDir: string;
};
const fixtures = playwrightFixtures.declareWorkerFixtures<WorkerState>().declareTestFixtures<TestState>();
const { it, expect, describe, defineTestFixture, defineWorkerFixture, overrideWorkerFixture } = fixtures;
overrideWorkerFixture('browser', async ({ browserType, defaultBrowserOptions }, test) => { let ffmpegName = '';
const browser = await browserType.launch({ if (process.platform === 'win32')
...defaultBrowserOptions, ffmpegName = process.arch === 'ia32' ? 'ffmpeg-win32' : 'ffmpeg-win64';
// Make sure videos are stored on the same volume as the test output dir. else if (process.platform === 'darwin')
artifactsPath: path.join(config.outputDir, '.screencast'), ffmpegName = 'ffmpeg-mac';
}); else if (process.platform === 'linux')
await test(browser); ffmpegName = 'ffmpeg-linux';
await browser.close(); const ffmpeg = path.join(__dirname, '..', 'third_party', 'ffmpeg', ffmpegName);
});
defineWorkerFixture('videoPlayerBrowser', async ({playwright}, runTest) => { export class VideoPlayer {
// WebKit on Mac & Windows cannot replay webm/vp8 video, is unrelyable fileName: string;
// on Linux (times out) and in Firefox, so we always launch chromium for output: string;
// playback. duration: number;
const browser = await playwright.chromium.launch(); frames: number;
await runTest(browser); videoWidth: number;
await browser.close(); videoHeight: number;
}); cache = new Map<number, PNG>();
defineTestFixture('videoPlayer', async ({videoPlayerBrowser, server}, test) => { constructor(fileName: string) {
const page = await videoPlayerBrowser.newPage(); this.fileName = fileName;
await test(new VideoPlayer(page, server)); this.output = spawnSync(ffmpeg, ['-i', this.fileName, `${this.fileName}-%03d.png`]).stderr.toString();
await page.close();
});
defineTestFixture('relativeArtifactsPath', async ({ browserType, testInfo }, runTest) => { const lines = this.output.split('\n');
const sanitizedTitle = testInfo.title.replace(/[^\w\d]+/g, '_'); let framesLine = lines.find(l => l.startsWith('frame='))!;
const relativeArtifactsPath = `${browserType.name()}-${sanitizedTitle}`; framesLine = framesLine.substring(framesLine.lastIndexOf('frame='));
await runTest(relativeArtifactsPath); const framesMatch = framesLine.match(/frame=\s+(\d+)/);
}); const streamLine = lines.find(l => l.trim().startsWith('Stream #0:0'));
const resolutionMatch = streamLine.match(/, (\d+)x(\d+),/);
const durationMatch = lines.find(l => l.trim().startsWith('Duration'))!.match(/Duration: (\d+):(\d\d):(\d\d.\d\d)/);
this.duration = (((parseInt(durationMatch![1], 10) * 60) + parseInt(durationMatch![2], 10)) * 60 + parseFloat(durationMatch![3])) * 1000;
this.frames = parseInt(framesMatch![1], 10);
this.videoWidth = parseInt(resolutionMatch![1], 10);
this.videoHeight = parseInt(resolutionMatch![2], 10);
}
defineTestFixture('videoDir', async ({ relativeArtifactsPath }, runTest) => { seekFirstNonEmptyFrame(offset?: { x: number, y: number } | undefined): PNG | undefined {
await runTest(path.join(config.outputDir, '.screencast', relativeArtifactsPath)); for (let f = 1; f <= this.frames; ++f) {
}); const frame = this.frame(f, { x: 0, y: 0 });
let hasColor = false;
for (let i = 0; i < frame.data.length; i += 4) {
if (frame.data[i + 0] < 230 || frame.data[i + 1] < 230 || frame.data[i + 2] < 230) {
hasColor = true;
break;
}
}
if (hasColor)
return this.frame(f, offset);
}
}
seekLastFrame(offset?: { x: number, y: number }): PNG {
return this.frame(this.frames, offset);
}
frame(frame: number, offset = { x: 10, y: 10 }): PNG {
if (!this.cache.has(frame)) {
const gap = '0'.repeat(3 - String(frame).length);
const buffer = fs.readFileSync(`${this.fileName}-${gap}${frame}.png`);
this.cache.set(frame, PNG.sync.read(buffer));
}
const decoded = this.cache.get(frame);
const dst = new PNG({ width: 10, height: 10 });
PNG.bitblt(decoded, dst, offset.x, offset.y, 10, 10, 0, 0);
return dst;
}
}
function almostRed(r, g, b, alpha) { function almostRed(r, g, b, alpha) {
expect(r).toBeGreaterThan(240);
expect(g).toBeLessThan(50); expect(g).toBeLessThan(50);
expect(b).toBeLessThan(50); expect(b).toBeLessThan(50);
expect(alpha).toBe(255); expect(alpha).toBe(255);
} }
function almostBlack(r, g, b, alpha) { function almostBlack(r, g, b, alpha) {
expect(r).toBeLessThan(10); expect(r).toBeLessThan(30);
expect(g).toBeLessThan(10); expect(g).toBeLessThan(30);
expect(b).toBeLessThan(10); expect(b).toBeLessThan(30);
expect(alpha).toBe(255); expect(alpha).toBe(255);
} }
function almostGrey(r, g, b, alpha) { function almostGrey(r, g, b, alpha) {
expect(r).toBeGreaterThanOrEqual(90); expect(r).toBeGreaterThan(70);
expect(g).toBeGreaterThanOrEqual(90); expect(g).toBeGreaterThan(70);
expect(b).toBeGreaterThanOrEqual(90); expect(b).toBeGreaterThan(70);
expect(r).toBeLessThan(110); expect(r).toBeLessThan(130);
expect(g).toBeLessThan(110); expect(g).toBeLessThan(130);
expect(b).toBeLessThan(110); expect(b).toBeLessThan(130);
expect(alpha).toBe(255); expect(alpha).toBe(255);
} }
function expectAll(pixels, rgbaPredicate) { function expectAll(pixels: Buffer, rgbaPredicate) {
const checkPixel = i => { const checkPixel = i => {
const r = pixels[i]; const r = pixels[i];
const g = pixels[i + 1]; const g = pixels[i + 1];
@ -109,94 +130,18 @@ function expectAll(pixels, rgbaPredicate) {
} }
} }
async function findVideo(videoDir: string) { function findVideo(videoDir: string) {
const files = await fs.promises.readdir(videoDir); const files = fs.readdirSync(videoDir);
return path.join(videoDir, files.find(file => file.endsWith('webm'))); return path.join(videoDir, files.find(file => file.endsWith('webm')));
} }
async function findVideos(videoDir: string) { function findVideos(videoDir: string) {
const files = await fs.promises.readdir(videoDir); const files = fs.readdirSync(videoDir);
return files.filter(file => file.endsWith('webm')).map(file => path.join(videoDir, file)); return files.filter(file => file.endsWith('webm')).map(file => path.join(videoDir, file));
} }
class VideoPlayer {
private readonly _page: Page;
private readonly _server: TestServer;
constructor(page: Page, server: TestServer) {
this._page = page;
this._server = server;
}
async load(videoFile: string) {
const servertPath = '/v.webm';
this._server.setRoute(servertPath, (req, response) => {
this._server.serveFile(req, response, videoFile);
});
await this._page.goto(this._server.PREFIX + '/player.html');
}
async duration() {
return await this._page.$eval('video', (v: HTMLVideoElement) => v.duration);
}
async videoWidth() {
return await this._page.$eval('video', (v: HTMLVideoElement) => v.videoWidth);
}
async videoHeight() {
return await this._page.$eval('video', (v: HTMLVideoElement) => v.videoHeight);
}
async seekFirstNonEmptyFrame() {
await this._page.evaluate(async () => await (window as any).playToTheEnd());
while (true) {
await this._page.evaluate(async () => await (window as any).playOneFrame());
const ended = await this._page.$eval('video', (video: HTMLVideoElement) => video.ended);
if (ended)
throw new Error('All frames are empty');
const pixels = await this.pixels();
// Quick check if all pixels are almost white. In Firefox blank page is not
// truly white so whe check approximately.
if (!pixels.every(p => p > 245))
return;
}
}
async countFrames() {
return await this._page.evaluate(async () => await (window as any).countFrames());
}
async currentTime() {
return await this._page.$eval('video', (v: HTMLVideoElement) => v.currentTime);
}
async playOneFrame() {
return await this._page.evaluate(async () => await (window as any).playOneFrame());
}
async seekLastFrame() {
return await this._page.evaluate(async x => await (window as any).seekLastFrame());
}
async pixels(point = {x: 0, y: 0}) {
const pixels = await this._page.$eval('video', (video: HTMLVideoElement, point) => {
const canvas = document.createElement('canvas');
if (!video.videoWidth || !video.videoHeight)
throw new Error('Video element is empty');
canvas.width = video.videoWidth;
canvas.height = video.videoHeight;
const context = canvas.getContext('2d');
context.drawImage(video, 0, 0);
const imgd = context.getImageData(point.x, point.y, 10, 10);
return Array.from(imgd.data);
}, point);
return pixels;
}
}
describe('screencast', suite => { describe('screencast', suite => {
suite.slow(); suite.slow();
suite.flaky('We should migrate these to ffmpeg');
}, () => { }, () => {
it('should require artifactsPath', async ({browserType, defaultBrowserOptions}) => { it('should require artifactsPath', async ({browserType, defaultBrowserOptions}) => {
const browser = await browserType.launch({ const browser = await browserType.launch({
@ -208,11 +153,15 @@ describe('screencast', suite => {
await browser.close(); await browser.close();
}); });
it('should capture static page', async ({browser, videoPlayer, relativeArtifactsPath, videoDir}) => { it('should capture static page', (test, { browserName }) => {
test.fixme(browserName === 'firefox', 'Always clips to square');
}, async ({browser, testRelativeArtifactsPath, testOutputPath}) => {
const size = { width: 320, height: 240 };
const context = await browser.newContext({ const context = await browser.newContext({
relativeArtifactsPath, relativeArtifactsPath: testRelativeArtifactsPath,
recordVideos: true, recordVideos: true,
videoSize: { width: 320, height: 240 } viewport: size,
videoSize: size
}); });
const page = await context.newPage(); const page = await context.newPage();
@ -220,22 +169,27 @@ describe('screencast', suite => {
await new Promise(r => setTimeout(r, 1000)); await new Promise(r => setTimeout(r, 1000));
await context.close(); await context.close();
const videoFile = await findVideo(videoDir); const videoFile = findVideo(testOutputPath(''));
await videoPlayer.load(videoFile); const videoPlayer = new VideoPlayer(videoFile);
const duration = await videoPlayer.duration(); const duration = videoPlayer.duration;
expect(duration).toBeGreaterThan(0); expect(duration).toBeGreaterThan(0);
expect(await videoPlayer.videoWidth()).toBe(320); expect(videoPlayer.videoWidth).toBe(320);
expect(await videoPlayer.videoHeight()).toBe(240); expect(videoPlayer.videoHeight).toBe(240);
await videoPlayer.seekLastFrame(); {
const pixels = await videoPlayer.pixels(); const pixels = videoPlayer.seekLastFrame().data;
expectAll(pixels, almostRed); expectAll(pixels, almostRed);
}
{
const pixels = videoPlayer.seekLastFrame({ x: 300, y: 0}).data;
expectAll(pixels, almostRed);
}
}); });
it('should capture navigation', async ({browser, server, videoPlayer, relativeArtifactsPath, videoDir}) => { it('should capture navigation', async ({browser, server, testRelativeArtifactsPath, testOutputPath}) => {
const context = await browser.newContext({ const context = await browser.newContext({
relativeArtifactsPath, relativeArtifactsPath: testRelativeArtifactsPath,
recordVideos: true, recordVideos: true,
videoSize: { width: 1280, height: 720 } videoSize: { width: 1280, height: 720 }
}); });
@ -247,20 +201,18 @@ describe('screencast', suite => {
await new Promise(r => setTimeout(r, 1000)); await new Promise(r => setTimeout(r, 1000));
await context.close(); await context.close();
const videoFile = await findVideo(videoDir); const videoFile = findVideo(testOutputPath(''));
await videoPlayer.load(videoFile); const videoPlayer = new VideoPlayer(videoFile);
const duration = await videoPlayer.duration(); const duration = videoPlayer.duration;
expect(duration).toBeGreaterThan(0); expect(duration).toBeGreaterThan(0);
{ {
await videoPlayer.seekFirstNonEmptyFrame(); const pixels = videoPlayer.seekFirstNonEmptyFrame().data;
const pixels = await videoPlayer.pixels();
expectAll(pixels, almostBlack); expectAll(pixels, almostBlack);
} }
{ {
await videoPlayer.seekLastFrame(); const pixels = videoPlayer.seekLastFrame().data;
const pixels = await videoPlayer.pixels();
expectAll(pixels, almostGrey); expectAll(pixels, almostGrey);
} }
}); });
@ -268,11 +220,11 @@ describe('screencast', suite => {
it('should capture css transformation', (test, { browserName, platform, headful }) => { it('should capture css transformation', (test, { browserName, platform, headful }) => {
test.fail(browserName === 'webkit' && platform === 'win32', 'Does not work on WebKit Windows'); test.fail(browserName === 'webkit' && platform === 'win32', 'Does not work on WebKit Windows');
test.fixme(headful, 'Fails on headful'); test.fixme(headful, 'Fails on headful');
}, async ({browser, server, videoPlayer, relativeArtifactsPath, videoDir}) => { }, async ({browser, server, testRelativeArtifactsPath, testOutputPath}) => {
const size = { width: 320, height: 240 }; const size = { width: 320, height: 240 };
// Set viewport equal to screencast frame size to avoid scaling. // Set viewport equal to screencast frame size to avoid scaling.
const context = await browser.newContext({ const context = await browser.newContext({
relativeArtifactsPath, relativeArtifactsPath: testRelativeArtifactsPath,
recordVideos: true, recordVideos: true,
videoSize: size, videoSize: size,
viewport: size, viewport: size,
@ -283,21 +235,20 @@ describe('screencast', suite => {
await new Promise(r => setTimeout(r, 1000)); await new Promise(r => setTimeout(r, 1000));
await context.close(); await context.close();
const videoFile = await findVideo(videoDir); const videoFile = findVideo(testOutputPath(''));
await videoPlayer.load(videoFile); const videoPlayer = new VideoPlayer(videoFile);
const duration = await videoPlayer.duration(); const duration = videoPlayer.duration;
expect(duration).toBeGreaterThan(0); expect(duration).toBeGreaterThan(0);
{ {
await videoPlayer.seekLastFrame(); const pixels = videoPlayer.seekLastFrame({ x: 95, y: 45 }).data;
const pixels = await videoPlayer.pixels({x: 95, y: 45});
expectAll(pixels, almostRed); expectAll(pixels, almostRed);
} }
}); });
it('should work for popups', async ({browser, relativeArtifactsPath, videoDir, server}) => { it('should work for popups', async ({browser, testRelativeArtifactsPath, testOutputPath, server}) => {
const context = await browser.newContext({ const context = await browser.newContext({
relativeArtifactsPath, relativeArtifactsPath: testRelativeArtifactsPath,
recordVideos: true, recordVideos: true,
videoSize: { width: 320, height: 240 } videoSize: { width: 320, height: 240 }
}); });
@ -311,15 +262,15 @@ describe('screencast', suite => {
await new Promise(r => setTimeout(r, 1000)); await new Promise(r => setTimeout(r, 1000));
await context.close(); await context.close();
const videoFiles = await findVideos(videoDir); const videoFiles = findVideos(testOutputPath(''));
expect(videoFiles.length).toBe(2); expect(videoFiles.length).toBe(2);
}); });
it('should scale frames down to the requested size ', (test, parameters) => { it('should scale frames down to the requested size ', (test, parameters) => {
test.fixme(parameters.headful, 'Fails on headful'); test.fixme(parameters.headful, 'Fails on headful');
}, async ({browser, videoPlayer, relativeArtifactsPath, videoDir, server}) => { }, async ({browser, testRelativeArtifactsPath, testOutputPath, server}) => {
const context = await browser.newContext({ const context = await browser.newContext({
relativeArtifactsPath, relativeArtifactsPath: testRelativeArtifactsPath,
recordVideos: true, recordVideos: true,
viewport: {width: 640, height: 480}, viewport: {width: 640, height: 480},
// Set size to 1/2 of the viewport. // Set size to 1/2 of the viewport.
@ -339,34 +290,33 @@ describe('screencast', suite => {
await new Promise(r => setTimeout(r, 1000)); await new Promise(r => setTimeout(r, 1000));
await context.close(); await context.close();
const videoFile = await findVideo(videoDir); const videoFile = findVideo(testOutputPath(''));
await videoPlayer.load(videoFile); const videoPlayer = new VideoPlayer(videoFile);
const duration = await videoPlayer.duration(); const duration = videoPlayer.duration;
expect(duration).toBeGreaterThan(0); expect(duration).toBeGreaterThan(0);
await videoPlayer.seekLastFrame();
{ {
const pixels = await videoPlayer.pixels({x: 0, y: 0}); const pixels = videoPlayer.seekLastFrame({x: 0, y: 0}).data;
expectAll(pixels, almostRed); expectAll(pixels, almostRed);
} }
{ {
const pixels = await videoPlayer.pixels({x: 300, y: 0}); const pixels = videoPlayer.seekLastFrame({x: 300, y: 0}).data;
expectAll(pixels, almostGrey); expectAll(pixels, almostGrey);
} }
{ {
const pixels = await videoPlayer.pixels({x: 0, y: 200}); const pixels = videoPlayer.seekLastFrame({x: 0, y: 200}).data;
expectAll(pixels, almostGrey); expectAll(pixels, almostGrey);
} }
{ {
const pixels = await videoPlayer.pixels({x: 300, y: 200}); const pixels = videoPlayer.seekLastFrame({x: 300, y: 200}).data;
expectAll(pixels, almostRed); expectAll(pixels, almostRed);
} }
}); });
it('should use viewport as default size', async ({browser, videoPlayer, relativeArtifactsPath, videoDir}) => { it('should use viewport as default size', async ({browser, testRelativeArtifactsPath, testOutputPath}) => {
const size = {width: 800, height: 600}; const size = {width: 800, height: 600};
const context = await browser.newContext({ const context = await browser.newContext({
relativeArtifactsPath, relativeArtifactsPath: testRelativeArtifactsPath,
recordVideos: true, recordVideos: true,
viewport: size, viewport: size,
}); });
@ -375,15 +325,15 @@ describe('screencast', suite => {
await new Promise(r => setTimeout(r, 1000)); await new Promise(r => setTimeout(r, 1000));
await context.close(); await context.close();
const videoFile = await findVideo(videoDir); const videoFile = findVideo(testOutputPath(''));
await videoPlayer.load(videoFile); const videoPlayer = new VideoPlayer(videoFile);
expect(await videoPlayer.videoWidth()).toBe(size.width); expect(await videoPlayer.videoWidth).toBe(size.width);
expect(await videoPlayer.videoHeight()).toBe(size.height); expect(await videoPlayer.videoHeight).toBe(size.height);
}); });
it('should be 1280x720 by default', async ({browser, videoPlayer, relativeArtifactsPath, videoDir}) => { it('should be 1280x720 by default', async ({browser, testRelativeArtifactsPath, testOutputPath}) => {
const context = await browser.newContext({ const context = await browser.newContext({
relativeArtifactsPath, relativeArtifactsPath: testRelativeArtifactsPath,
recordVideos: true, recordVideos: true,
}); });
@ -391,9 +341,9 @@ describe('screencast', suite => {
await new Promise(r => setTimeout(r, 1000)); await new Promise(r => setTimeout(r, 1000));
await context.close(); await context.close();
const videoFile = await findVideo(videoDir); const videoFile = findVideo(testOutputPath(''));
await videoPlayer.load(videoFile); const videoPlayer = new VideoPlayer(videoFile);
expect(await videoPlayer.videoWidth()).toBe(1280); expect(await videoPlayer.videoWidth).toBe(1280);
expect(await videoPlayer.videoHeight()).toBe(720); expect(await videoPlayer.videoHeight).toBe(720);
}); });
}); });

View File

@ -114,7 +114,7 @@ DEPS['src/server/injected/'] = ['src/server/common/'];
DEPS['src/server/electron/'] = [...DEPS['src/server/'], 'src/server/chromium/']; DEPS['src/server/electron/'] = [...DEPS['src/server/'], 'src/server/chromium/'];
DEPS['src/server/playwright.ts'] = [...DEPS['src/server/'], 'src/server/chromium/', 'src/server/webkit/', 'src/server/firefox/']; DEPS['src/server/playwright.ts'] = [...DEPS['src/server/'], 'src/server/chromium/', 'src/server/webkit/', 'src/server/firefox/'];
DEPS['src/server.ts'] = DEPS['src/inprocess.ts'] = DEPS['src/browserServerImpl.ts'] = ['src/**']; DEPS['src/driver.ts'] = DEPS['src/inprocess.ts'] = DEPS['src/browserServerImpl.ts'] = ['src/**'];
// Tracing is a client/server plugin, nothing should depend on it. // Tracing is a client/server plugin, nothing should depend on it.
DEPS['src/trace/'] = ['src/utils/', 'src/client/**', 'src/server/**']; DEPS['src/trace/'] = ['src/utils/', 'src/client/**', 'src/server/**'];