mirror of
https://github.com/microsoft/playwright.git
synced 2025-06-26 21:40:17 +00:00
chore: extract tasks into separate methods (#20413)
This commit is contained in:
parent
249d969d4b
commit
0c84d88127
@ -1,6 +1,7 @@
|
||||
[*]
|
||||
../types.ts
|
||||
./utilsBundle.ts
|
||||
runner/
|
||||
matchers/
|
||||
reporters/
|
||||
third_party/
|
||||
|
||||
@ -20,15 +20,16 @@ import type { Command } from 'playwright-core/lib/utilsBundle';
|
||||
import fs from 'fs';
|
||||
import url from 'url';
|
||||
import path from 'path';
|
||||
import { Runner, builtInReporters, kDefaultConfigFiles } from './runner';
|
||||
import { Runner } from './runner';
|
||||
import type { ConfigCLIOverrides } from './runner';
|
||||
import { stopProfiling, startProfiling } from './profiler';
|
||||
import { fileIsModule } from './util';
|
||||
import type { TestFileFilter } from './util';
|
||||
import { createTitleMatcher } from './util';
|
||||
import { showHTMLReport } from './reporters/html';
|
||||
import { baseFullConfig, defaultTimeout } from './configLoader';
|
||||
import { baseFullConfig, defaultTimeout, kDefaultConfigFiles, resolveConfigFile } from './configLoader';
|
||||
import type { TraceMode } from './types';
|
||||
import { builtInReporters } from './runner/reporters';
|
||||
|
||||
export function addTestCommands(program: Command) {
|
||||
addTestCommand(program);
|
||||
@ -147,7 +148,7 @@ async function runTests(args: string[], opts: { [key: string]: any }) {
|
||||
|
||||
// When no --config option is passed, let's look for the config file in the current directory.
|
||||
const configFileOrDirectory = opts.config ? path.resolve(process.cwd(), opts.config) : process.cwd();
|
||||
const resolvedConfigFile = Runner.resolveConfigFile(configFileOrDirectory);
|
||||
const resolvedConfigFile = resolveConfigFile(configFileOrDirectory);
|
||||
if (restartWithExperimentalTsEsm(resolvedConfigFile))
|
||||
return;
|
||||
|
||||
@ -184,13 +185,12 @@ async function runTests(args: string[], opts: { [key: string]: any }) {
|
||||
process.exit(status === 'passed' ? 0 : 1);
|
||||
}
|
||||
|
||||
|
||||
async function listTestFiles(opts: { [key: string]: any }) {
|
||||
// Redefine process.stdout.write in case config decides to pollute stdio.
|
||||
const write = process.stdout.write.bind(process.stdout);
|
||||
process.stdout.write = (() => {}) as any;
|
||||
const configFileOrDirectory = opts.config ? path.resolve(process.cwd(), opts.config) : process.cwd();
|
||||
const resolvedConfigFile = Runner.resolveConfigFile(configFileOrDirectory)!;
|
||||
const resolvedConfigFile = resolveConfigFile(configFileOrDirectory)!;
|
||||
if (restartWithExperimentalTsEsm(resolvedConfigFile))
|
||||
return;
|
||||
|
||||
|
||||
@ -20,8 +20,8 @@ import * as path from 'path';
|
||||
import { isRegExp } from 'playwright-core/lib/utils';
|
||||
import type { Reporter } from '../types/testReporter';
|
||||
import type { SerializedConfig } from './ipc';
|
||||
import type { BuiltInReporter, ConfigCLIOverrides } from './runner';
|
||||
import { builtInReporters } from './runner';
|
||||
import type { ConfigCLIOverrides } from './runner';
|
||||
import { builtInReporters, toReporters } from './runner/reporters';
|
||||
import { requireOrImport } from './transform';
|
||||
import type { Config, FullConfigInternal, FullProjectInternal, Project, ReporterDescription } from './types';
|
||||
import { errorWithFile, getPackageJsonPath, mergeObjects } from './util';
|
||||
@ -266,14 +266,6 @@ function takeFirst<T>(...args: (T | undefined)[]): T {
|
||||
return undefined as any as T;
|
||||
}
|
||||
|
||||
function toReporters(reporters: BuiltInReporter | ReporterDescription[] | undefined): ReporterDescription[] | undefined {
|
||||
if (!reporters)
|
||||
return;
|
||||
if (typeof reporters === 'string')
|
||||
return [[reporters]];
|
||||
return reporters;
|
||||
}
|
||||
|
||||
function validateConfig(file: string, config: Config) {
|
||||
if (typeof config !== 'object' || !config)
|
||||
throw errorWithFile(file, `Configuration file must export a single object`);
|
||||
@ -490,3 +482,35 @@ function resolveScript(id: string, rootDir: string) {
|
||||
return localPath;
|
||||
return require.resolve(id, { paths: [rootDir] });
|
||||
}
|
||||
|
||||
export const kDefaultConfigFiles = ['playwright.config.ts', 'playwright.config.js', 'playwright.config.mjs'];
|
||||
|
||||
export function resolveConfigFile(configFileOrDirectory: string): string | null {
|
||||
const resolveConfig = (configFile: string) => {
|
||||
if (fs.existsSync(configFile))
|
||||
return configFile;
|
||||
};
|
||||
|
||||
const resolveConfigFileFromDirectory = (directory: string) => {
|
||||
for (const configName of kDefaultConfigFiles) {
|
||||
const configFile = resolveConfig(path.resolve(directory, configName));
|
||||
if (configFile)
|
||||
return configFile;
|
||||
}
|
||||
};
|
||||
|
||||
if (!fs.existsSync(configFileOrDirectory))
|
||||
throw new Error(`${configFileOrDirectory} does not exist`);
|
||||
if (fs.statSync(configFileOrDirectory).isDirectory()) {
|
||||
// When passed a directory, look for a config file inside.
|
||||
const configFile = resolveConfigFileFromDirectory(configFileOrDirectory);
|
||||
if (configFile)
|
||||
return configFile;
|
||||
// If there is no config, assume this as a root testing directory.
|
||||
return null;
|
||||
} else {
|
||||
// When passed a file, it must be a config file.
|
||||
const configFile = resolveConfig(configFileOrDirectory);
|
||||
return configFile!;
|
||||
}
|
||||
}
|
||||
|
||||
@ -22,14 +22,7 @@ import type { ProcessExitData } from './processHost';
|
||||
import type { TestCase } from './test';
|
||||
import { ManualPromise } from 'playwright-core/lib/utils';
|
||||
import { WorkerHost } from './workerHost';
|
||||
|
||||
export type TestGroup = {
|
||||
workerHash: string;
|
||||
requireFile: string;
|
||||
repeatEachIndex: number;
|
||||
projectId: string;
|
||||
tests: TestCase[];
|
||||
};
|
||||
import type { TestGroup } from './runner/testGroups';
|
||||
|
||||
type TestResultData = {
|
||||
result: TestResult;
|
||||
|
||||
@ -15,52 +15,19 @@
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import { monotonicTime } from 'playwright-core/lib/utils';
|
||||
import { colors, minimatch, rimraf } from 'playwright-core/lib/utilsBundle';
|
||||
import { promisify } from 'util';
|
||||
import type { FullResult, Reporter, TestError } from '../types/testReporter';
|
||||
import type { TestGroup } from './dispatcher';
|
||||
import { Dispatcher } from './dispatcher';
|
||||
import type { FullResult } from '../types/testReporter';
|
||||
import { ConfigLoader } from './configLoader';
|
||||
import type { TestRunnerPlugin } from './plugins';
|
||||
import { setRunnerToAddPluginsTo } from './plugins';
|
||||
import { dockerPlugin } from './plugins/dockerPlugin';
|
||||
import { webServerPluginsForConfig } from './plugins/webServerPlugin';
|
||||
import { formatError } from './reporters/base';
|
||||
import DotReporter from './reporters/dot';
|
||||
import EmptyReporter from './reporters/empty';
|
||||
import GitHubReporter from './reporters/github';
|
||||
import HtmlReporter from './reporters/html';
|
||||
import JSONReporter from './reporters/json';
|
||||
import JUnitReporter from './reporters/junit';
|
||||
import LineReporter from './reporters/line';
|
||||
import ListReporter from './reporters/list';
|
||||
import { Multiplexer } from './reporters/multiplexer';
|
||||
import type { TestCase } from './test';
|
||||
import { Suite } from './test';
|
||||
import type { Config, FullConfigInternal, FullProjectInternal } from './types';
|
||||
import { createFileMatcher, createFileMatcherFromFilters, createTitleMatcher } from './util';
|
||||
import { collectFilesForProjects, collectProjects } from './runner/projectUtils';
|
||||
import { createReporter } from './runner/reporters';
|
||||
import { createTaskRunner } from './runner/tasks';
|
||||
import type { TaskRunnerState } from './runner/tasks';
|
||||
import type { Config, FullConfigInternal } from './types';
|
||||
import type { Matcher, TestFileFilter } from './util';
|
||||
import { buildFileSuiteForProject, filterOnly, filterSuite, filterSuiteWithOnlySemantics, filterTestsRemoveEmptySuites } from './suiteUtils';
|
||||
import { LoaderHost } from './loaderHost';
|
||||
import { loadTestFilesInProcess } from './testLoader';
|
||||
import { TaskRunner } from './taskRunner';
|
||||
import type { LoadError } from './fixtures';
|
||||
|
||||
const removeFolderAsync = promisify(rimraf);
|
||||
const readDirAsync = promisify(fs.readdir);
|
||||
const readFileAsync = promisify(fs.readFile);
|
||||
export const kDefaultConfigFiles = ['playwright.config.ts', 'playwright.config.js', 'playwright.config.mjs'];
|
||||
|
||||
type RunOptions = {
|
||||
listOnly: boolean;
|
||||
testFileFilters: TestFileFilter[];
|
||||
testTitleMatcher: Matcher;
|
||||
projectFilter?: string[];
|
||||
passWithNoTests?: boolean;
|
||||
};
|
||||
|
||||
export type ConfigCLIOverrides = {
|
||||
forbidOnly?: boolean;
|
||||
@ -81,9 +48,16 @@ export type ConfigCLIOverrides = {
|
||||
use?: any;
|
||||
};
|
||||
|
||||
export type RunOptions = {
|
||||
listOnly: boolean;
|
||||
testFileFilters: TestFileFilter[];
|
||||
testTitleMatcher: Matcher;
|
||||
projectFilter?: string[];
|
||||
passWithNoTests?: boolean;
|
||||
};
|
||||
|
||||
export class Runner {
|
||||
private _configLoader: ConfigLoader;
|
||||
private _reporter!: Multiplexer;
|
||||
private _plugins: TestRunnerPlugin[] = [];
|
||||
|
||||
constructor(configCLIOverrides?: ConfigCLIOverrides) {
|
||||
@ -103,80 +77,9 @@ export class Runner {
|
||||
return this._configLoader.loadEmptyConfig(configFileOrDirectory);
|
||||
}
|
||||
|
||||
static resolveConfigFile(configFileOrDirectory: string): string | null {
|
||||
const resolveConfig = (configFile: string) => {
|
||||
if (fs.existsSync(configFile))
|
||||
return configFile;
|
||||
};
|
||||
|
||||
const resolveConfigFileFromDirectory = (directory: string) => {
|
||||
for (const configName of kDefaultConfigFiles) {
|
||||
const configFile = resolveConfig(path.resolve(directory, configName));
|
||||
if (configFile)
|
||||
return configFile;
|
||||
}
|
||||
};
|
||||
|
||||
if (!fs.existsSync(configFileOrDirectory))
|
||||
throw new Error(`${configFileOrDirectory} does not exist`);
|
||||
if (fs.statSync(configFileOrDirectory).isDirectory()) {
|
||||
// When passed a directory, look for a config file inside.
|
||||
const configFile = resolveConfigFileFromDirectory(configFileOrDirectory);
|
||||
if (configFile)
|
||||
return configFile;
|
||||
// If there is no config, assume this as a root testing directory.
|
||||
return null;
|
||||
} else {
|
||||
// When passed a file, it must be a config file.
|
||||
const configFile = resolveConfig(configFileOrDirectory);
|
||||
return configFile!;
|
||||
}
|
||||
}
|
||||
|
||||
private async _createReporter(list: boolean) {
|
||||
const defaultReporters: {[key in BuiltInReporter]: new(arg: any) => Reporter} = {
|
||||
dot: list ? ListModeReporter : DotReporter,
|
||||
line: list ? ListModeReporter : LineReporter,
|
||||
list: list ? ListModeReporter : ListReporter,
|
||||
github: GitHubReporter,
|
||||
json: JSONReporter,
|
||||
junit: JUnitReporter,
|
||||
null: EmptyReporter,
|
||||
html: HtmlReporter,
|
||||
};
|
||||
const reporters: Reporter[] = [];
|
||||
for (const r of this._configLoader.fullConfig().reporter) {
|
||||
const [name, arg] = r;
|
||||
if (name in defaultReporters) {
|
||||
reporters.push(new defaultReporters[name as keyof typeof defaultReporters](arg));
|
||||
} else {
|
||||
const reporterConstructor = await this._configLoader.loadReporter(name);
|
||||
reporters.push(new reporterConstructor(arg));
|
||||
}
|
||||
}
|
||||
if (process.env.PW_TEST_REPORTER) {
|
||||
const reporterConstructor = await this._configLoader.loadReporter(process.env.PW_TEST_REPORTER);
|
||||
reporters.push(new reporterConstructor());
|
||||
}
|
||||
|
||||
const someReporterPrintsToStdio = reporters.some(r => {
|
||||
const prints = r.printsToStdio ? r.printsToStdio() : true;
|
||||
return prints;
|
||||
});
|
||||
if (reporters.length && !someReporterPrintsToStdio) {
|
||||
// Add a line/dot/list-mode reporter for convenience.
|
||||
// Important to put it first, jsut in case some other reporter stalls onEnd.
|
||||
if (list)
|
||||
reporters.unshift(new ListModeReporter());
|
||||
else
|
||||
reporters.unshift(!process.env.CI ? new LineReporter({ omitFailures: true }) : new DotReporter());
|
||||
}
|
||||
return new Multiplexer(reporters);
|
||||
}
|
||||
|
||||
async listTestFiles(projectNames: string[] | undefined): Promise<any> {
|
||||
const projects = this._collectProjects(projectNames);
|
||||
const filesByProject = await this._collectFiles(projects, []);
|
||||
const projects = collectProjects(this._configLoader.fullConfig(), projectNames);
|
||||
const filesByProject = await collectFilesForProjects(projects, []);
|
||||
const report: any = {
|
||||
projects: []
|
||||
};
|
||||
@ -189,306 +92,34 @@ export class Runner {
|
||||
return report;
|
||||
}
|
||||
|
||||
private _collectProjects(projectNames?: string[]): FullProjectInternal[] {
|
||||
const fullConfig = this._configLoader.fullConfig();
|
||||
if (!projectNames)
|
||||
return [...fullConfig.projects];
|
||||
const projectsToFind = new Set<string>();
|
||||
const unknownProjects = new Map<string, string>();
|
||||
projectNames.forEach(n => {
|
||||
const name = n.toLocaleLowerCase();
|
||||
projectsToFind.add(name);
|
||||
unknownProjects.set(name, n);
|
||||
});
|
||||
const projects = fullConfig.projects.filter(project => {
|
||||
const name = project.name.toLocaleLowerCase();
|
||||
unknownProjects.delete(name);
|
||||
return projectsToFind.has(name);
|
||||
});
|
||||
if (unknownProjects.size) {
|
||||
const names = fullConfig.projects.map(p => p.name).filter(name => !!name);
|
||||
if (!names.length)
|
||||
throw new Error(`No named projects are specified in the configuration file`);
|
||||
const unknownProjectNames = Array.from(unknownProjects.values()).map(n => `"${n}"`).join(', ');
|
||||
throw new Error(`Project(s) ${unknownProjectNames} not found. Available named projects: ${names.map(name => `"${name}"`).join(', ')}`);
|
||||
}
|
||||
return projects;
|
||||
}
|
||||
|
||||
private async _collectFiles(projects: FullProjectInternal[], commandLineFileFilters: TestFileFilter[]): Promise<Map<FullProjectInternal, string[]>> {
|
||||
const extensions = ['.js', '.ts', '.mjs', '.tsx', '.jsx'];
|
||||
const testFileExtension = (file: string) => extensions.includes(path.extname(file));
|
||||
const filesByProject = new Map<FullProjectInternal, string[]>();
|
||||
const fileToProjectName = new Map<string, string>();
|
||||
const commandLineFileMatcher = commandLineFileFilters.length ? createFileMatcherFromFilters(commandLineFileFilters) : () => true;
|
||||
for (const project of projects) {
|
||||
const allFiles = await collectFiles(project.testDir, project._respectGitIgnore);
|
||||
const testMatch = createFileMatcher(project.testMatch);
|
||||
const testIgnore = createFileMatcher(project.testIgnore);
|
||||
const testFiles = allFiles.filter(file => {
|
||||
if (!testFileExtension(file))
|
||||
return false;
|
||||
const isTest = !testIgnore(file) && testMatch(file) && commandLineFileMatcher(file);
|
||||
if (!isTest)
|
||||
return false;
|
||||
fileToProjectName.set(file, project.name);
|
||||
return true;
|
||||
});
|
||||
filesByProject.set(project, testFiles);
|
||||
}
|
||||
|
||||
return filesByProject;
|
||||
}
|
||||
|
||||
private async _loadAllTests(options: RunOptions, errors: TestError[]): Promise<{ rootSuite: Suite, testGroups: TestGroup[] }> {
|
||||
const config = this._configLoader.fullConfig();
|
||||
const projects = this._collectProjects(options.projectFilter);
|
||||
const filesByProject = await this._collectFiles(projects, options.testFileFilters);
|
||||
const allTestFiles = new Set<string>();
|
||||
for (const files of filesByProject.values())
|
||||
files.forEach(file => allTestFiles.add(file));
|
||||
|
||||
// Load all tests.
|
||||
const preprocessRoot = await this._loadTests(allTestFiles, errors);
|
||||
|
||||
// Complain about duplicate titles.
|
||||
errors.push(...createDuplicateTitlesErrors(config, preprocessRoot));
|
||||
|
||||
// Filter tests to respect line/column filter.
|
||||
filterByFocusedLine(preprocessRoot, options.testFileFilters);
|
||||
|
||||
// Complain about only.
|
||||
if (config.forbidOnly) {
|
||||
const onlyTestsAndSuites = preprocessRoot._getOnlyItems();
|
||||
if (onlyTestsAndSuites.length > 0)
|
||||
errors.push(...createForbidOnlyErrors(config, onlyTestsAndSuites));
|
||||
}
|
||||
|
||||
// Filter only.
|
||||
if (!options.listOnly)
|
||||
filterOnly(preprocessRoot);
|
||||
|
||||
const rootSuite = await this._createRootSuite(preprocessRoot, options, filesByProject);
|
||||
|
||||
// Do not create test groups when listing.
|
||||
if (options.listOnly)
|
||||
return { rootSuite, testGroups: [] };
|
||||
|
||||
const testGroups = createTestGroups(rootSuite.suites, config.workers);
|
||||
return { rootSuite, testGroups };
|
||||
}
|
||||
|
||||
private async _createRootSuite(preprocessRoot: Suite, options: RunOptions, filesByProject: Map<FullProjectInternal, string[]>): Promise<Suite> {
|
||||
// Generate projects.
|
||||
const fileSuites = new Map<string, Suite>();
|
||||
for (const fileSuite of preprocessRoot.suites)
|
||||
fileSuites.set(fileSuite._requireFile, fileSuite);
|
||||
|
||||
const rootSuite = new Suite('', 'root');
|
||||
for (const [project, files] of filesByProject) {
|
||||
const grepMatcher = createTitleMatcher(project.grep);
|
||||
const grepInvertMatcher = project.grepInvert ? createTitleMatcher(project.grepInvert) : null;
|
||||
|
||||
const titleMatcher = (test: TestCase) => {
|
||||
const grepTitle = test.titlePath().join(' ');
|
||||
if (grepInvertMatcher?.(grepTitle))
|
||||
return false;
|
||||
return grepMatcher(grepTitle) && options.testTitleMatcher(grepTitle);
|
||||
};
|
||||
|
||||
const projectSuite = new Suite(project.name, 'project');
|
||||
projectSuite._projectConfig = project;
|
||||
if (project._fullyParallel)
|
||||
projectSuite._parallelMode = 'parallel';
|
||||
rootSuite._addSuite(projectSuite);
|
||||
for (const file of files) {
|
||||
const fileSuite = fileSuites.get(file);
|
||||
if (!fileSuite)
|
||||
continue;
|
||||
for (let repeatEachIndex = 0; repeatEachIndex < project.repeatEach; repeatEachIndex++) {
|
||||
const builtSuite = buildFileSuiteForProject(project, fileSuite, repeatEachIndex);
|
||||
if (!filterTestsRemoveEmptySuites(builtSuite, titleMatcher))
|
||||
continue;
|
||||
projectSuite._addSuite(builtSuite);
|
||||
}
|
||||
}
|
||||
}
|
||||
return rootSuite;
|
||||
}
|
||||
|
||||
private async _loadTests(testFiles: Set<string>, errors: TestError[]): Promise<Suite> {
|
||||
if (process.env.PW_TEST_OOP_LOADER) {
|
||||
const loaderHost = new LoaderHost();
|
||||
await loaderHost.start(this._configLoader.serializedConfig());
|
||||
try {
|
||||
return await loaderHost.loadTestFiles([...testFiles], this._reporter);
|
||||
} finally {
|
||||
await loaderHost.stop();
|
||||
}
|
||||
}
|
||||
const loadErrors: LoadError[] = [];
|
||||
try {
|
||||
return await loadTestFilesInProcess(this._configLoader.fullConfig(), [...testFiles], loadErrors);
|
||||
} finally {
|
||||
errors.push(...loadErrors);
|
||||
}
|
||||
}
|
||||
|
||||
private _filterForCurrentShard(rootSuite: Suite, testGroups: TestGroup[]) {
|
||||
const shard = this._configLoader.fullConfig().shard;
|
||||
if (!shard)
|
||||
return;
|
||||
|
||||
// Each shard includes:
|
||||
// - its portion of the regular tests
|
||||
// - project setup tests for the projects that have regular tests in this shard
|
||||
let shardableTotal = 0;
|
||||
for (const group of testGroups)
|
||||
shardableTotal += group.tests.length;
|
||||
|
||||
const shardTests = new Set<TestCase>();
|
||||
|
||||
// Each shard gets some tests.
|
||||
const shardSize = Math.floor(shardableTotal / shard.total);
|
||||
// First few shards get one more test each.
|
||||
const extraOne = shardableTotal - shardSize * shard.total;
|
||||
|
||||
const currentShard = shard.current - 1; // Make it zero-based for calculations.
|
||||
const from = shardSize * currentShard + Math.min(extraOne, currentShard);
|
||||
const to = from + shardSize + (currentShard < extraOne ? 1 : 0);
|
||||
let current = 0;
|
||||
const shardProjects = new Set<string>();
|
||||
const shardTestGroups = [];
|
||||
for (const group of testGroups) {
|
||||
// Any test group goes to the shard that contains the first test of this group.
|
||||
// So, this shard gets any group that starts at [from; to)
|
||||
if (current >= from && current < to) {
|
||||
shardProjects.add(group.projectId);
|
||||
shardTestGroups.push(group);
|
||||
for (const test of group.tests)
|
||||
shardTests.add(test);
|
||||
}
|
||||
current += group.tests.length;
|
||||
}
|
||||
testGroups.length = 0;
|
||||
testGroups.push(...shardTestGroups);
|
||||
|
||||
if (!shardTests.size) {
|
||||
// Filtering with "only semantics" does not work when we have zero tests - it leaves all the tests.
|
||||
// We need an empty suite in this case.
|
||||
rootSuite._entries = [];
|
||||
} else {
|
||||
filterSuiteWithOnlySemantics(rootSuite, () => false, test => shardTests.has(test));
|
||||
}
|
||||
}
|
||||
|
||||
async runAllTests(options: RunOptions): Promise<FullResult['status']> {
|
||||
const config = this._configLoader.fullConfig();
|
||||
const deadline = config.globalTimeout ? monotonicTime() + config.globalTimeout : 0;
|
||||
|
||||
// Legacy webServer support.
|
||||
this._plugins.push(...webServerPluginsForConfig(config));
|
||||
// Docker support.
|
||||
this._plugins.push(dockerPlugin);
|
||||
|
||||
this._reporter = await this._createReporter(options.listOnly);
|
||||
const taskRunner = new TaskRunner(this._reporter, config.globalTimeout);
|
||||
const reporter = await createReporter(this._configLoader, options.listOnly);
|
||||
const taskRunner = createTaskRunner(config, reporter, this._plugins, options);
|
||||
|
||||
// Setup the plugins.
|
||||
for (const plugin of this._plugins) {
|
||||
taskRunner.addTask('plugin setup', async () => {
|
||||
await plugin.setup?.(config, config._configDir, this._reporter);
|
||||
return () => plugin.teardown?.();
|
||||
});
|
||||
}
|
||||
|
||||
// Run global setup & teardown.
|
||||
if (config.globalSetup || config.globalTeardown) {
|
||||
taskRunner.addTask('global setup', async () => {
|
||||
const setupHook = config.globalSetup ? await this._configLoader.loadGlobalHook(config.globalSetup) : undefined;
|
||||
const teardownHook = config.globalTeardown ? await this._configLoader.loadGlobalHook(config.globalTeardown) : undefined;
|
||||
const globalSetupResult = setupHook ? await setupHook(this._configLoader.fullConfig()) : undefined;
|
||||
return async () => {
|
||||
if (typeof globalSetupResult === 'function')
|
||||
await globalSetupResult();
|
||||
await teardownHook?.(config);
|
||||
};
|
||||
});
|
||||
}
|
||||
const context: TaskRunnerState = {
|
||||
config,
|
||||
configLoader: this._configLoader,
|
||||
options,
|
||||
reporter,
|
||||
};
|
||||
|
||||
reporter.onConfigure(config);
|
||||
const taskStatus = await taskRunner.run(context, deadline);
|
||||
let status: FullResult['status'] = 'passed';
|
||||
|
||||
// Load tests.
|
||||
let loadedTests!: { rootSuite: Suite, testGroups: TestGroup[] };
|
||||
taskRunner.addTask('load tests', async ({ errors }) => {
|
||||
loadedTests = await this._loadAllTests(options, errors);
|
||||
if (errors.length)
|
||||
return;
|
||||
|
||||
// Fail when no tests.
|
||||
if (!loadedTests.rootSuite.allTests().length && !options.passWithNoTests)
|
||||
throw new Error(`No tests found`);
|
||||
|
||||
if (!options.listOnly) {
|
||||
this._filterForCurrentShard(loadedTests.rootSuite, loadedTests.testGroups);
|
||||
config._maxConcurrentTestGroups = loadedTests.testGroups.length;
|
||||
}
|
||||
});
|
||||
|
||||
if (!options.listOnly) {
|
||||
taskRunner.addTask('prepare to run', async () => {
|
||||
// Remove output directores.
|
||||
await this._removeOutputDirs(options);
|
||||
});
|
||||
|
||||
taskRunner.addTask('plugin begin', async () => {
|
||||
for (const plugin of this._plugins)
|
||||
await plugin.begin?.(loadedTests.rootSuite);
|
||||
});
|
||||
}
|
||||
|
||||
taskRunner.addTask('report begin', async () => {
|
||||
this._reporter.onBegin?.(config, loadedTests.rootSuite);
|
||||
return async () => {
|
||||
await this._reporter.onEnd();
|
||||
};
|
||||
});
|
||||
|
||||
if (!options.listOnly) {
|
||||
let dispatcher: Dispatcher;
|
||||
|
||||
taskRunner.addTask('setup workers', async () => {
|
||||
const { rootSuite, testGroups } = loadedTests;
|
||||
|
||||
if (config._ignoreSnapshots) {
|
||||
this._reporter.onStdOut(colors.dim([
|
||||
'NOTE: running with "ignoreSnapshots" option. All of the following asserts are silently ignored:',
|
||||
'- expect().toMatchSnapshot()',
|
||||
'- expect().toHaveScreenshot()',
|
||||
'',
|
||||
].join('\n')));
|
||||
}
|
||||
|
||||
dispatcher = new Dispatcher(this._configLoader, testGroups, this._reporter);
|
||||
|
||||
return async () => {
|
||||
// Stop will stop workers and mark some tests as interrupted.
|
||||
await dispatcher.stop();
|
||||
if (dispatcher.hasWorkerErrors() || rootSuite.allTests().some(test => !test.ok()))
|
||||
status = 'failed';
|
||||
};
|
||||
});
|
||||
|
||||
taskRunner.addTask('test suite', async () => {
|
||||
await dispatcher.run();
|
||||
});
|
||||
}
|
||||
|
||||
const deadline = config.globalTimeout ? monotonicTime() + config.globalTimeout : 0;
|
||||
|
||||
this._reporter.onConfigure(config);
|
||||
const taskStatus = await taskRunner.run(deadline);
|
||||
if (context.dispatcher?.hasWorkerErrors() || context.rootSuite?.allTests().some(test => !test.ok()))
|
||||
status = 'failed';
|
||||
if (status === 'passed' && taskStatus !== 'passed')
|
||||
status = taskStatus;
|
||||
await this._reporter.onExit({ status });
|
||||
await reporter.onExit({ status });
|
||||
|
||||
// Calling process.exit() might truncate large stdout/stderr output.
|
||||
// See https://github.com/nodejs/node/issues/6456.
|
||||
// See https://github.com/nodejs/node/issues/12921
|
||||
@ -496,297 +127,6 @@ export class Runner {
|
||||
await new Promise<void>(resolve => process.stderr.write('', () => resolve()));
|
||||
return status;
|
||||
}
|
||||
|
||||
private async _removeOutputDirs(options: RunOptions) {
|
||||
const config = this._configLoader.fullConfig();
|
||||
const outputDirs = new Set<string>();
|
||||
for (const p of config.projects) {
|
||||
if (!options.projectFilter || options.projectFilter.includes(p.name))
|
||||
outputDirs.add(p.outputDir);
|
||||
}
|
||||
|
||||
await Promise.all(Array.from(outputDirs).map(outputDir => removeFolderAsync(outputDir).catch(async (error: any) => {
|
||||
if ((error as any).code === 'EBUSY') {
|
||||
// We failed to remove folder, might be due to the whole folder being mounted inside a container:
|
||||
// https://github.com/microsoft/playwright/issues/12106
|
||||
// Do a best-effort to remove all files inside of it instead.
|
||||
const entries = await readDirAsync(outputDir).catch(e => []);
|
||||
await Promise.all(entries.map(entry => removeFolderAsync(path.join(outputDir, entry))));
|
||||
} else {
|
||||
throw error;
|
||||
}
|
||||
})));
|
||||
}
|
||||
}
|
||||
|
||||
function createFileMatcherFromFilter(filter: TestFileFilter) {
|
||||
const fileMatcher = createFileMatcher(filter.re || filter.exact || '');
|
||||
return (testFileName: string, testLine: number, testColumn: number) =>
|
||||
fileMatcher(testFileName) && (filter.line === testLine || filter.line === null) && (filter.column === testColumn || filter.column === null);
|
||||
}
|
||||
|
||||
function filterByFocusedLine(suite: Suite, focusedTestFileLines: TestFileFilter[]) {
|
||||
if (!focusedTestFileLines.length)
|
||||
return;
|
||||
const matchers = focusedTestFileLines.map(createFileMatcherFromFilter);
|
||||
const testFileLineMatches = (testFileName: string, testLine: number, testColumn: number) => matchers.some(m => m(testFileName, testLine, testColumn));
|
||||
const suiteFilter = (suite: Suite) => !!suite.location && testFileLineMatches(suite.location.file, suite.location.line, suite.location.column);
|
||||
const testFilter = (test: TestCase) => testFileLineMatches(test.location.file, test.location.line, test.location.column);
|
||||
return filterSuite(suite, suiteFilter, testFilter);
|
||||
}
|
||||
|
||||
async function collectFiles(testDir: string, respectGitIgnore: boolean): Promise<string[]> {
|
||||
if (!fs.existsSync(testDir))
|
||||
return [];
|
||||
if (!fs.statSync(testDir).isDirectory())
|
||||
return [];
|
||||
|
||||
type Rule = {
|
||||
dir: string;
|
||||
negate: boolean;
|
||||
match: (s: string, partial?: boolean) => boolean
|
||||
};
|
||||
type IgnoreStatus = 'ignored' | 'included' | 'ignored-but-recurse';
|
||||
|
||||
const checkIgnores = (entryPath: string, rules: Rule[], isDirectory: boolean, parentStatus: IgnoreStatus) => {
|
||||
let status = parentStatus;
|
||||
for (const rule of rules) {
|
||||
const ruleIncludes = rule.negate;
|
||||
if ((status === 'included') === ruleIncludes)
|
||||
continue;
|
||||
const relative = path.relative(rule.dir, entryPath);
|
||||
if (rule.match('/' + relative) || rule.match(relative)) {
|
||||
// Matches "/dir/file" or "dir/file"
|
||||
status = ruleIncludes ? 'included' : 'ignored';
|
||||
} else if (isDirectory && (rule.match('/' + relative + '/') || rule.match(relative + '/'))) {
|
||||
// Matches "/dir/subdir/" or "dir/subdir/" for directories.
|
||||
status = ruleIncludes ? 'included' : 'ignored';
|
||||
} else if (isDirectory && ruleIncludes && (rule.match('/' + relative, true) || rule.match(relative, true))) {
|
||||
// Matches "/dir/donotskip/" when "/dir" is excluded, but "!/dir/donotskip/file" is included.
|
||||
status = 'ignored-but-recurse';
|
||||
}
|
||||
}
|
||||
return status;
|
||||
};
|
||||
|
||||
const files: string[] = [];
|
||||
|
||||
const visit = async (dir: string, rules: Rule[], status: IgnoreStatus) => {
|
||||
const entries = await readDirAsync(dir, { withFileTypes: true });
|
||||
entries.sort((a, b) => a.name.localeCompare(b.name));
|
||||
|
||||
if (respectGitIgnore) {
|
||||
const gitignore = entries.find(e => e.isFile() && e.name === '.gitignore');
|
||||
if (gitignore) {
|
||||
const content = await readFileAsync(path.join(dir, gitignore.name), 'utf8');
|
||||
const newRules: Rule[] = content.split(/\r?\n/).map(s => {
|
||||
s = s.trim();
|
||||
if (!s)
|
||||
return;
|
||||
// Use flipNegate, because we handle negation ourselves.
|
||||
const rule = new minimatch.Minimatch(s, { matchBase: true, dot: true, flipNegate: true }) as any;
|
||||
if (rule.comment)
|
||||
return;
|
||||
rule.dir = dir;
|
||||
return rule;
|
||||
}).filter(rule => !!rule);
|
||||
rules = [...rules, ...newRules];
|
||||
}
|
||||
}
|
||||
|
||||
for (const entry of entries) {
|
||||
if (entry.name === '.' || entry.name === '..')
|
||||
continue;
|
||||
if (entry.isFile() && entry.name === '.gitignore')
|
||||
continue;
|
||||
if (entry.isDirectory() && entry.name === 'node_modules')
|
||||
continue;
|
||||
const entryPath = path.join(dir, entry.name);
|
||||
const entryStatus = checkIgnores(entryPath, rules, entry.isDirectory(), status);
|
||||
if (entry.isDirectory() && entryStatus !== 'ignored')
|
||||
await visit(entryPath, rules, entryStatus);
|
||||
else if (entry.isFile() && entryStatus === 'included')
|
||||
files.push(entryPath);
|
||||
}
|
||||
};
|
||||
await visit(testDir, [], 'included');
|
||||
return files;
|
||||
}
|
||||
|
||||
function buildItemLocation(rootDir: string, testOrSuite: Suite | TestCase) {
|
||||
if (!testOrSuite.location)
|
||||
return '';
|
||||
return `${path.relative(rootDir, testOrSuite.location.file)}:${testOrSuite.location.line}`;
|
||||
}
|
||||
|
||||
function createTestGroups(projectSuites: Suite[], workers: number): TestGroup[] {
|
||||
// This function groups tests that can be run together.
|
||||
// Tests cannot be run together when:
|
||||
// - They belong to different projects - requires different workers.
|
||||
// - They have a different repeatEachIndex - requires different workers.
|
||||
// - They have a different set of worker fixtures in the pool - requires different workers.
|
||||
// - They have a different requireFile - reuses the worker, but runs each requireFile separately.
|
||||
// - They belong to a parallel suite.
|
||||
|
||||
// Using the map "workerHash -> requireFile -> group" makes us preserve the natural order
|
||||
// of worker hashes and require files for the simple cases.
|
||||
const groups = new Map<string, Map<string, {
|
||||
// Tests that must be run in order are in the same group.
|
||||
general: TestGroup,
|
||||
|
||||
// There are 3 kinds of parallel tests:
|
||||
// - Tests belonging to parallel suites, without beforeAll/afterAll hooks.
|
||||
// These can be run independently, they are put into their own group, key === test.
|
||||
// - Tests belonging to parallel suites, with beforeAll/afterAll hooks.
|
||||
// These should share the worker as much as possible, put into single parallelWithHooks group.
|
||||
// We'll divide them into equally-sized groups later.
|
||||
// - Tests belonging to serial suites inside parallel suites.
|
||||
// These should run as a serial group, each group is independent, key === serial suite.
|
||||
parallel: Map<Suite | TestCase, TestGroup>,
|
||||
parallelWithHooks: TestGroup,
|
||||
}>>();
|
||||
|
||||
const createGroup = (test: TestCase): TestGroup => {
|
||||
return {
|
||||
workerHash: test._workerHash,
|
||||
requireFile: test._requireFile,
|
||||
repeatEachIndex: test.repeatEachIndex,
|
||||
projectId: test._projectId,
|
||||
tests: [],
|
||||
};
|
||||
};
|
||||
|
||||
for (const projectSuite of projectSuites) {
|
||||
for (const test of projectSuite.allTests()) {
|
||||
let withWorkerHash = groups.get(test._workerHash);
|
||||
if (!withWorkerHash) {
|
||||
withWorkerHash = new Map();
|
||||
groups.set(test._workerHash, withWorkerHash);
|
||||
}
|
||||
let withRequireFile = withWorkerHash.get(test._requireFile);
|
||||
if (!withRequireFile) {
|
||||
withRequireFile = {
|
||||
general: createGroup(test),
|
||||
parallel: new Map(),
|
||||
parallelWithHooks: createGroup(test),
|
||||
};
|
||||
withWorkerHash.set(test._requireFile, withRequireFile);
|
||||
}
|
||||
|
||||
// Note that a parallel suite cannot be inside a serial suite. This is enforced in TestType.
|
||||
let insideParallel = false;
|
||||
let outerMostSerialSuite: Suite | undefined;
|
||||
let hasAllHooks = false;
|
||||
for (let parent: Suite | undefined = test.parent; parent; parent = parent.parent) {
|
||||
if (parent._parallelMode === 'serial')
|
||||
outerMostSerialSuite = parent;
|
||||
insideParallel = insideParallel || parent._parallelMode === 'parallel';
|
||||
hasAllHooks = hasAllHooks || parent._hooks.some(hook => hook.type === 'beforeAll' || hook.type === 'afterAll');
|
||||
}
|
||||
|
||||
if (insideParallel) {
|
||||
if (hasAllHooks && !outerMostSerialSuite) {
|
||||
withRequireFile.parallelWithHooks.tests.push(test);
|
||||
} else {
|
||||
const key = outerMostSerialSuite || test;
|
||||
let group = withRequireFile.parallel.get(key);
|
||||
if (!group) {
|
||||
group = createGroup(test);
|
||||
withRequireFile.parallel.set(key, group);
|
||||
}
|
||||
group.tests.push(test);
|
||||
}
|
||||
} else {
|
||||
withRequireFile.general.tests.push(test);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const result: TestGroup[] = [];
|
||||
for (const withWorkerHash of groups.values()) {
|
||||
for (const withRequireFile of withWorkerHash.values()) {
|
||||
// Tests without parallel mode should run serially as a single group.
|
||||
if (withRequireFile.general.tests.length)
|
||||
result.push(withRequireFile.general);
|
||||
|
||||
// Parallel test groups without beforeAll/afterAll can be run independently.
|
||||
result.push(...withRequireFile.parallel.values());
|
||||
|
||||
// Tests with beforeAll/afterAll should try to share workers as much as possible.
|
||||
const parallelWithHooksGroupSize = Math.ceil(withRequireFile.parallelWithHooks.tests.length / workers);
|
||||
let lastGroup: TestGroup | undefined;
|
||||
for (const test of withRequireFile.parallelWithHooks.tests) {
|
||||
if (!lastGroup || lastGroup.tests.length >= parallelWithHooksGroupSize) {
|
||||
lastGroup = createGroup(test);
|
||||
result.push(lastGroup);
|
||||
}
|
||||
lastGroup.tests.push(test);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
class ListModeReporter implements Reporter {
|
||||
private config!: FullConfigInternal;
|
||||
|
||||
onBegin(config: FullConfigInternal, suite: Suite): void {
|
||||
this.config = config;
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(`Listing tests:`);
|
||||
const tests = suite.allTests();
|
||||
const files = new Set<string>();
|
||||
for (const test of tests) {
|
||||
// root, project, file, ...describes, test
|
||||
const [, projectName, , ...titles] = test.titlePath();
|
||||
const location = `${path.relative(config.rootDir, test.location.file)}:${test.location.line}:${test.location.column}`;
|
||||
const projectTitle = projectName ? `[${projectName}] › ` : '';
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(` ${projectTitle}${location} › ${titles.join(' ')}`);
|
||||
files.add(test.location.file);
|
||||
}
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(`Total: ${tests.length} ${tests.length === 1 ? 'test' : 'tests'} in ${files.size} ${files.size === 1 ? 'file' : 'files'}`);
|
||||
}
|
||||
|
||||
onError(error: TestError) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.error('\n' + formatError(this.config, error, false).message);
|
||||
}
|
||||
}
|
||||
|
||||
function createForbidOnlyErrors(config: FullConfigInternal, onlyTestsAndSuites: (TestCase | Suite)[]): TestError[] {
|
||||
const errors: TestError[] = [];
|
||||
for (const testOrSuite of onlyTestsAndSuites) {
|
||||
// Skip root and file.
|
||||
const title = testOrSuite.titlePath().slice(2).join(' ');
|
||||
const error: TestError = {
|
||||
message: `Error: focused item found in the --forbid-only mode: "${title}"`,
|
||||
location: testOrSuite.location!,
|
||||
};
|
||||
errors.push(error);
|
||||
}
|
||||
return errors;
|
||||
}
|
||||
|
||||
function createDuplicateTitlesErrors(config: FullConfigInternal, rootSuite: Suite): TestError[] {
|
||||
const errors: TestError[] = [];
|
||||
for (const fileSuite of rootSuite.suites) {
|
||||
const testsByFullTitle = new Map<string, TestCase>();
|
||||
for (const test of fileSuite.allTests()) {
|
||||
const fullTitle = test.titlePath().slice(2).join(' › ');
|
||||
const existingTest = testsByFullTitle.get(fullTitle);
|
||||
if (existingTest) {
|
||||
const error: TestError = {
|
||||
message: `Error: duplicate test title "${fullTitle}", first declared in ${buildItemLocation(config.rootDir, existingTest)}`,
|
||||
location: test.location,
|
||||
};
|
||||
errors.push(error);
|
||||
}
|
||||
testsByFullTitle.set(fullTitle, test);
|
||||
}
|
||||
}
|
||||
return errors;
|
||||
}
|
||||
|
||||
function sanitizeConfigForJSON(object: any, visited: Set<any>): any {
|
||||
@ -817,6 +157,3 @@ function sanitizeConfigForJSON(object: any, visited: Set<any>): any {
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
export const builtInReporters = ['list', 'line', 'dot', 'json', 'junit', 'null', 'github', 'html'] as const;
|
||||
export type BuiltInReporter = typeof builtInReporters[number];
|
||||
|
||||
7
packages/playwright-test/src/runner/DEPS.list
Normal file
7
packages/playwright-test/src/runner/DEPS.list
Normal file
@ -0,0 +1,7 @@
|
||||
[*]
|
||||
../
|
||||
../../types.ts
|
||||
../matchers/
|
||||
../reporters/
|
||||
../third_party/
|
||||
../plugins/
|
||||
142
packages/playwright-test/src/runner/loadUtils.ts
Normal file
142
packages/playwright-test/src/runner/loadUtils.ts
Normal file
@ -0,0 +1,142 @@
|
||||
/**
|
||||
* Copyright Microsoft Corporation. All rights reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import path from 'path';
|
||||
import type { TestError } from '../../types/testReporter';
|
||||
import type { ConfigLoader } from '../configLoader';
|
||||
import type { LoadError } from '../fixtures';
|
||||
import { LoaderHost } from '../loaderHost';
|
||||
import type { Multiplexer } from '../reporters/multiplexer';
|
||||
import { createRootSuite, filterOnly, filterSuite } from '../suiteUtils';
|
||||
import type { Suite, TestCase } from '../test';
|
||||
import { loadTestFilesInProcess } from '../testLoader';
|
||||
import type { FullConfigInternal } from '../types';
|
||||
import type { Matcher, TestFileFilter } from '../util';
|
||||
import { createFileMatcher } from '../util';
|
||||
import { collectFilesForProjects, collectProjects } from './projectUtils';
|
||||
|
||||
type LoadOptions = {
|
||||
listOnly: boolean;
|
||||
testFileFilters: TestFileFilter[];
|
||||
testTitleMatcher: Matcher;
|
||||
projectFilter?: string[];
|
||||
passWithNoTests?: boolean;
|
||||
};
|
||||
|
||||
export async function loadAllTests(configLoader: ConfigLoader, reporter: Multiplexer, options: LoadOptions, errors: TestError[]): Promise<Suite> {
|
||||
const config = configLoader.fullConfig();
|
||||
const projects = collectProjects(config, options.projectFilter);
|
||||
const filesByProject = await collectFilesForProjects(projects, options.testFileFilters);
|
||||
const allTestFiles = new Set<string>();
|
||||
for (const files of filesByProject.values())
|
||||
files.forEach(file => allTestFiles.add(file));
|
||||
|
||||
// Load all tests.
|
||||
const preprocessRoot = await loadTests(configLoader, reporter, allTestFiles, errors);
|
||||
|
||||
// Complain about duplicate titles.
|
||||
errors.push(...createDuplicateTitlesErrors(config, preprocessRoot));
|
||||
|
||||
// Filter tests to respect line/column filter.
|
||||
filterByFocusedLine(preprocessRoot, options.testFileFilters);
|
||||
|
||||
// Complain about only.
|
||||
if (config.forbidOnly) {
|
||||
const onlyTestsAndSuites = preprocessRoot._getOnlyItems();
|
||||
if (onlyTestsAndSuites.length > 0)
|
||||
errors.push(...createForbidOnlyErrors(onlyTestsAndSuites));
|
||||
}
|
||||
|
||||
// Filter only.
|
||||
if (!options.listOnly)
|
||||
filterOnly(preprocessRoot);
|
||||
|
||||
return await createRootSuite(preprocessRoot, options.testTitleMatcher, filesByProject);
|
||||
}
|
||||
|
||||
async function loadTests(configLoader: ConfigLoader, reporter: Multiplexer, testFiles: Set<string>, errors: TestError[]): Promise<Suite> {
|
||||
if (process.env.PW_TEST_OOP_LOADER) {
|
||||
const loaderHost = new LoaderHost();
|
||||
await loaderHost.start(configLoader.serializedConfig());
|
||||
try {
|
||||
return await loaderHost.loadTestFiles([...testFiles], reporter);
|
||||
} finally {
|
||||
await loaderHost.stop();
|
||||
}
|
||||
}
|
||||
const loadErrors: LoadError[] = [];
|
||||
try {
|
||||
return await loadTestFilesInProcess(configLoader.fullConfig(), [...testFiles], loadErrors);
|
||||
} finally {
|
||||
errors.push(...loadErrors);
|
||||
}
|
||||
}
|
||||
|
||||
function createFileMatcherFromFilter(filter: TestFileFilter) {
|
||||
const fileMatcher = createFileMatcher(filter.re || filter.exact || '');
|
||||
return (testFileName: string, testLine: number, testColumn: number) =>
|
||||
fileMatcher(testFileName) && (filter.line === testLine || filter.line === null) && (filter.column === testColumn || filter.column === null);
|
||||
}
|
||||
|
||||
function filterByFocusedLine(suite: Suite, focusedTestFileLines: TestFileFilter[]) {
|
||||
if (!focusedTestFileLines.length)
|
||||
return;
|
||||
const matchers = focusedTestFileLines.map(createFileMatcherFromFilter);
|
||||
const testFileLineMatches = (testFileName: string, testLine: number, testColumn: number) => matchers.some(m => m(testFileName, testLine, testColumn));
|
||||
const suiteFilter = (suite: Suite) => !!suite.location && testFileLineMatches(suite.location.file, suite.location.line, suite.location.column);
|
||||
const testFilter = (test: TestCase) => testFileLineMatches(test.location.file, test.location.line, test.location.column);
|
||||
return filterSuite(suite, suiteFilter, testFilter);
|
||||
}
|
||||
|
||||
function createForbidOnlyErrors(onlyTestsAndSuites: (TestCase | Suite)[]): TestError[] {
|
||||
const errors: TestError[] = [];
|
||||
for (const testOrSuite of onlyTestsAndSuites) {
|
||||
// Skip root and file.
|
||||
const title = testOrSuite.titlePath().slice(2).join(' ');
|
||||
const error: TestError = {
|
||||
message: `Error: focused item found in the --forbid-only mode: "${title}"`,
|
||||
location: testOrSuite.location!,
|
||||
};
|
||||
errors.push(error);
|
||||
}
|
||||
return errors;
|
||||
}
|
||||
|
||||
function createDuplicateTitlesErrors(config: FullConfigInternal, rootSuite: Suite): TestError[] {
|
||||
const errors: TestError[] = [];
|
||||
for (const fileSuite of rootSuite.suites) {
|
||||
const testsByFullTitle = new Map<string, TestCase>();
|
||||
for (const test of fileSuite.allTests()) {
|
||||
const fullTitle = test.titlePath().slice(2).join(' › ');
|
||||
const existingTest = testsByFullTitle.get(fullTitle);
|
||||
if (existingTest) {
|
||||
const error: TestError = {
|
||||
message: `Error: duplicate test title "${fullTitle}", first declared in ${buildItemLocation(config.rootDir, existingTest)}`,
|
||||
location: test.location,
|
||||
};
|
||||
errors.push(error);
|
||||
}
|
||||
testsByFullTitle.set(fullTitle, test);
|
||||
}
|
||||
}
|
||||
return errors;
|
||||
}
|
||||
|
||||
function buildItemLocation(rootDir: string, testOrSuite: Suite | TestCase) {
|
||||
if (!testOrSuite.location)
|
||||
return '';
|
||||
return `${path.relative(rootDir, testOrSuite.location.file)}:${testOrSuite.location.line}`;
|
||||
}
|
||||
154
packages/playwright-test/src/runner/projectUtils.ts
Normal file
154
packages/playwright-test/src/runner/projectUtils.ts
Normal file
@ -0,0 +1,154 @@
|
||||
/**
|
||||
* Copyright Microsoft Corporation. All rights reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { minimatch } from 'playwright-core/lib/utilsBundle';
|
||||
import { promisify } from 'util';
|
||||
import type { FullConfigInternal, FullProjectInternal } from '../types';
|
||||
import type { TestFileFilter } from '../util';
|
||||
import { createFileMatcher, createFileMatcherFromFilters } from '../util';
|
||||
|
||||
const readFileAsync = promisify(fs.readFile);
|
||||
const readDirAsync = promisify(fs.readdir);
|
||||
|
||||
export function collectProjects(config: FullConfigInternal, projectNames?: string[]): FullProjectInternal[] {
|
||||
if (!projectNames)
|
||||
return [...config.projects];
|
||||
const projectsToFind = new Set<string>();
|
||||
const unknownProjects = new Map<string, string>();
|
||||
projectNames.forEach(n => {
|
||||
const name = n.toLocaleLowerCase();
|
||||
projectsToFind.add(name);
|
||||
unknownProjects.set(name, n);
|
||||
});
|
||||
const projects = config.projects.filter(project => {
|
||||
const name = project.name.toLocaleLowerCase();
|
||||
unknownProjects.delete(name);
|
||||
return projectsToFind.has(name);
|
||||
});
|
||||
if (unknownProjects.size) {
|
||||
const names = config.projects.map(p => p.name).filter(name => !!name);
|
||||
if (!names.length)
|
||||
throw new Error(`No named projects are specified in the configuration file`);
|
||||
const unknownProjectNames = Array.from(unknownProjects.values()).map(n => `"${n}"`).join(', ');
|
||||
throw new Error(`Project(s) ${unknownProjectNames} not found. Available named projects: ${names.map(name => `"${name}"`).join(', ')}`);
|
||||
}
|
||||
return projects;
|
||||
}
|
||||
|
||||
export async function collectFilesForProjects(projects: FullProjectInternal[], commandLineFileFilters: TestFileFilter[]): Promise<Map<FullProjectInternal, string[]>> {
|
||||
const extensions = ['.js', '.ts', '.mjs', '.tsx', '.jsx'];
|
||||
const testFileExtension = (file: string) => extensions.includes(path.extname(file));
|
||||
const filesByProject = new Map<FullProjectInternal, string[]>();
|
||||
const fileToProjectName = new Map<string, string>();
|
||||
const commandLineFileMatcher = commandLineFileFilters.length ? createFileMatcherFromFilters(commandLineFileFilters) : () => true;
|
||||
for (const project of projects) {
|
||||
const allFiles = await collectFiles(project.testDir, project._respectGitIgnore);
|
||||
const testMatch = createFileMatcher(project.testMatch);
|
||||
const testIgnore = createFileMatcher(project.testIgnore);
|
||||
const testFiles = allFiles.filter(file => {
|
||||
if (!testFileExtension(file))
|
||||
return false;
|
||||
const isTest = !testIgnore(file) && testMatch(file) && commandLineFileMatcher(file);
|
||||
if (!isTest)
|
||||
return false;
|
||||
fileToProjectName.set(file, project.name);
|
||||
return true;
|
||||
});
|
||||
filesByProject.set(project, testFiles);
|
||||
}
|
||||
|
||||
return filesByProject;
|
||||
}
|
||||
|
||||
async function collectFiles(testDir: string, respectGitIgnore: boolean): Promise<string[]> {
|
||||
if (!fs.existsSync(testDir))
|
||||
return [];
|
||||
if (!fs.statSync(testDir).isDirectory())
|
||||
return [];
|
||||
|
||||
type Rule = {
|
||||
dir: string;
|
||||
negate: boolean;
|
||||
match: (s: string, partial?: boolean) => boolean
|
||||
};
|
||||
type IgnoreStatus = 'ignored' | 'included' | 'ignored-but-recurse';
|
||||
|
||||
const checkIgnores = (entryPath: string, rules: Rule[], isDirectory: boolean, parentStatus: IgnoreStatus) => {
|
||||
let status = parentStatus;
|
||||
for (const rule of rules) {
|
||||
const ruleIncludes = rule.negate;
|
||||
if ((status === 'included') === ruleIncludes)
|
||||
continue;
|
||||
const relative = path.relative(rule.dir, entryPath);
|
||||
if (rule.match('/' + relative) || rule.match(relative)) {
|
||||
// Matches "/dir/file" or "dir/file"
|
||||
status = ruleIncludes ? 'included' : 'ignored';
|
||||
} else if (isDirectory && (rule.match('/' + relative + '/') || rule.match(relative + '/'))) {
|
||||
// Matches "/dir/subdir/" or "dir/subdir/" for directories.
|
||||
status = ruleIncludes ? 'included' : 'ignored';
|
||||
} else if (isDirectory && ruleIncludes && (rule.match('/' + relative, true) || rule.match(relative, true))) {
|
||||
// Matches "/dir/donotskip/" when "/dir" is excluded, but "!/dir/donotskip/file" is included.
|
||||
status = 'ignored-but-recurse';
|
||||
}
|
||||
}
|
||||
return status;
|
||||
};
|
||||
|
||||
const files: string[] = [];
|
||||
|
||||
const visit = async (dir: string, rules: Rule[], status: IgnoreStatus) => {
|
||||
const entries = await readDirAsync(dir, { withFileTypes: true });
|
||||
entries.sort((a, b) => a.name.localeCompare(b.name));
|
||||
|
||||
if (respectGitIgnore) {
|
||||
const gitignore = entries.find(e => e.isFile() && e.name === '.gitignore');
|
||||
if (gitignore) {
|
||||
const content = await readFileAsync(path.join(dir, gitignore.name), 'utf8');
|
||||
const newRules: Rule[] = content.split(/\r?\n/).map(s => {
|
||||
s = s.trim();
|
||||
if (!s)
|
||||
return;
|
||||
// Use flipNegate, because we handle negation ourselves.
|
||||
const rule = new minimatch.Minimatch(s, { matchBase: true, dot: true, flipNegate: true }) as any;
|
||||
if (rule.comment)
|
||||
return;
|
||||
rule.dir = dir;
|
||||
return rule;
|
||||
}).filter(rule => !!rule);
|
||||
rules = [...rules, ...newRules];
|
||||
}
|
||||
}
|
||||
|
||||
for (const entry of entries) {
|
||||
if (entry.name === '.' || entry.name === '..')
|
||||
continue;
|
||||
if (entry.isFile() && entry.name === '.gitignore')
|
||||
continue;
|
||||
if (entry.isDirectory() && entry.name === 'node_modules')
|
||||
continue;
|
||||
const entryPath = path.join(dir, entry.name);
|
||||
const entryStatus = checkIgnores(entryPath, rules, entry.isDirectory(), status);
|
||||
if (entry.isDirectory() && entryStatus !== 'ignored')
|
||||
await visit(entryPath, rules, entryStatus);
|
||||
else if (entry.isFile() && entryStatus === 'included')
|
||||
files.push(entryPath);
|
||||
}
|
||||
};
|
||||
await visit(testDir, [], 'included');
|
||||
return files;
|
||||
}
|
||||
111
packages/playwright-test/src/runner/reporters.ts
Normal file
111
packages/playwright-test/src/runner/reporters.ts
Normal file
@ -0,0 +1,111 @@
|
||||
/**
|
||||
* Copyright Microsoft Corporation. All rights reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import path from 'path';
|
||||
import type { Reporter, TestError } from '../../types/testReporter';
|
||||
import type { ConfigLoader } from '../configLoader';
|
||||
import { formatError } from '../reporters/base';
|
||||
import DotReporter from '../reporters/dot';
|
||||
import EmptyReporter from '../reporters/empty';
|
||||
import GitHubReporter from '../reporters/github';
|
||||
import HtmlReporter from '../reporters/html';
|
||||
import JSONReporter from '../reporters/json';
|
||||
import JUnitReporter from '../reporters/junit';
|
||||
import LineReporter from '../reporters/line';
|
||||
import ListReporter from '../reporters/list';
|
||||
import { Multiplexer } from '../reporters/multiplexer';
|
||||
import type { Suite } from '../test';
|
||||
import type { FullConfigInternal, ReporterDescription } from '../types';
|
||||
|
||||
export async function createReporter(configLoader: ConfigLoader, list: boolean) {
|
||||
const defaultReporters: {[key in BuiltInReporter]: new(arg: any) => Reporter} = {
|
||||
dot: list ? ListModeReporter : DotReporter,
|
||||
line: list ? ListModeReporter : LineReporter,
|
||||
list: list ? ListModeReporter : ListReporter,
|
||||
github: GitHubReporter,
|
||||
json: JSONReporter,
|
||||
junit: JUnitReporter,
|
||||
null: EmptyReporter,
|
||||
html: HtmlReporter,
|
||||
};
|
||||
const reporters: Reporter[] = [];
|
||||
for (const r of configLoader.fullConfig().reporter) {
|
||||
const [name, arg] = r;
|
||||
if (name in defaultReporters) {
|
||||
reporters.push(new defaultReporters[name as keyof typeof defaultReporters](arg));
|
||||
} else {
|
||||
const reporterConstructor = await configLoader.loadReporter(name);
|
||||
reporters.push(new reporterConstructor(arg));
|
||||
}
|
||||
}
|
||||
if (process.env.PW_TEST_REPORTER) {
|
||||
const reporterConstructor = await configLoader.loadReporter(process.env.PW_TEST_REPORTER);
|
||||
reporters.push(new reporterConstructor());
|
||||
}
|
||||
|
||||
const someReporterPrintsToStdio = reporters.some(r => {
|
||||
const prints = r.printsToStdio ? r.printsToStdio() : true;
|
||||
return prints;
|
||||
});
|
||||
if (reporters.length && !someReporterPrintsToStdio) {
|
||||
// Add a line/dot/list-mode reporter for convenience.
|
||||
// Important to put it first, jsut in case some other reporter stalls onEnd.
|
||||
if (list)
|
||||
reporters.unshift(new ListModeReporter());
|
||||
else
|
||||
reporters.unshift(!process.env.CI ? new LineReporter({ omitFailures: true }) : new DotReporter());
|
||||
}
|
||||
return new Multiplexer(reporters);
|
||||
}
|
||||
|
||||
export class ListModeReporter implements Reporter {
|
||||
private config!: FullConfigInternal;
|
||||
|
||||
onBegin(config: FullConfigInternal, suite: Suite): void {
|
||||
this.config = config;
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(`Listing tests:`);
|
||||
const tests = suite.allTests();
|
||||
const files = new Set<string>();
|
||||
for (const test of tests) {
|
||||
// root, project, file, ...describes, test
|
||||
const [, projectName, , ...titles] = test.titlePath();
|
||||
const location = `${path.relative(config.rootDir, test.location.file)}:${test.location.line}:${test.location.column}`;
|
||||
const projectTitle = projectName ? `[${projectName}] › ` : '';
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(` ${projectTitle}${location} › ${titles.join(' ')}`);
|
||||
files.add(test.location.file);
|
||||
}
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(`Total: ${tests.length} ${tests.length === 1 ? 'test' : 'tests'} in ${files.size} ${files.size === 1 ? 'file' : 'files'}`);
|
||||
}
|
||||
|
||||
onError(error: TestError) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.error('\n' + formatError(this.config, error, false).message);
|
||||
}
|
||||
}
|
||||
|
||||
export function toReporters(reporters: BuiltInReporter | ReporterDescription[] | undefined): ReporterDescription[] | undefined {
|
||||
if (!reporters)
|
||||
return;
|
||||
if (typeof reporters === 'string')
|
||||
return [[reporters]];
|
||||
return reporters;
|
||||
}
|
||||
|
||||
export const builtInReporters = ['list', 'line', 'dot', 'json', 'junit', 'null', 'github', 'html'] as const;
|
||||
export type BuiltInReporter = typeof builtInReporters[number];
|
||||
169
packages/playwright-test/src/runner/tasks.ts
Normal file
169
packages/playwright-test/src/runner/tasks.ts
Normal file
@ -0,0 +1,169 @@
|
||||
/**
|
||||
* Copyright Microsoft Corporation. All rights reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { promisify } from 'util';
|
||||
import { colors, rimraf } from 'playwright-core/lib/utilsBundle';
|
||||
import type { ConfigLoader } from '../configLoader';
|
||||
import { Dispatcher } from '../dispatcher';
|
||||
import type { TestRunnerPlugin } from '../plugins';
|
||||
import type { Multiplexer } from '../reporters/multiplexer';
|
||||
import type { TestGroup } from '../runner/testGroups';
|
||||
import { createTestGroups, filterForShard } from '../runner/testGroups';
|
||||
import type { Task } from '../taskRunner';
|
||||
import { TaskRunner } from '../taskRunner';
|
||||
import type { Suite } from '../test';
|
||||
import type { FullConfigInternal } from '../types';
|
||||
import { loadAllTests } from './loadUtils';
|
||||
import type { Matcher, TestFileFilter } from '../util';
|
||||
|
||||
const removeFolderAsync = promisify(rimraf);
|
||||
const readDirAsync = promisify(fs.readdir);
|
||||
|
||||
type TaskRunnerOptions = {
|
||||
listOnly: boolean;
|
||||
testFileFilters: TestFileFilter[];
|
||||
testTitleMatcher: Matcher;
|
||||
projectFilter?: string[];
|
||||
passWithNoTests?: boolean;
|
||||
};
|
||||
|
||||
export type TaskRunnerState = {
|
||||
options: TaskRunnerOptions;
|
||||
reporter: Multiplexer;
|
||||
config: FullConfigInternal;
|
||||
configLoader: ConfigLoader;
|
||||
rootSuite?: Suite;
|
||||
testGroups?: TestGroup[];
|
||||
dispatcher?: Dispatcher;
|
||||
};
|
||||
|
||||
export function createTaskRunner(config: FullConfigInternal, reporter: Multiplexer, plugins: TestRunnerPlugin[], options: TaskRunnerOptions): TaskRunner<TaskRunnerState> {
|
||||
const taskRunner = new TaskRunner<TaskRunnerState>(reporter, config.globalTimeout);
|
||||
|
||||
for (const plugin of plugins)
|
||||
taskRunner.addTask('plugin setup', createPluginSetupTask(plugin));
|
||||
if (config.globalSetup || config.globalTeardown)
|
||||
taskRunner.addTask('global setup', createGlobalSetupTask());
|
||||
taskRunner.addTask('load tests', createLoadTask());
|
||||
|
||||
if (!options.listOnly) {
|
||||
taskRunner.addTask('prepare to run', createRemoveOutputDirsTask());
|
||||
taskRunner.addTask('plugin begin', async ({ rootSuite }) => {
|
||||
for (const plugin of plugins)
|
||||
await plugin.begin?.(rootSuite!);
|
||||
});
|
||||
}
|
||||
|
||||
taskRunner.addTask('report begin', async ({ reporter, rootSuite }) => {
|
||||
reporter.onBegin?.(config, rootSuite!);
|
||||
return () => reporter.onEnd();
|
||||
});
|
||||
|
||||
if (!options.listOnly) {
|
||||
taskRunner.addTask('setup workers', createSetupWorkersTask());
|
||||
taskRunner.addTask('test suite', async ({ dispatcher }) => dispatcher!.run());
|
||||
}
|
||||
|
||||
return taskRunner;
|
||||
}
|
||||
|
||||
export function createPluginSetupTask(plugin: TestRunnerPlugin): Task<TaskRunnerState> {
|
||||
return async ({ config, reporter }) => {
|
||||
await plugin.setup?.(config, config._configDir, reporter);
|
||||
return () => plugin.teardown?.();
|
||||
};
|
||||
}
|
||||
|
||||
export function createGlobalSetupTask(): Task<TaskRunnerState> {
|
||||
return async ({ config, configLoader }) => {
|
||||
const setupHook = config.globalSetup ? await configLoader.loadGlobalHook(config.globalSetup) : undefined;
|
||||
const teardownHook = config.globalTeardown ? await configLoader.loadGlobalHook(config.globalTeardown) : undefined;
|
||||
const globalSetupResult = setupHook ? await setupHook(configLoader.fullConfig()) : undefined;
|
||||
return async () => {
|
||||
if (typeof globalSetupResult === 'function')
|
||||
await globalSetupResult();
|
||||
await teardownHook?.(config);
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
export function createSetupWorkersTask(): Task<TaskRunnerState> {
|
||||
return async params => {
|
||||
const { config, configLoader, testGroups, reporter } = params;
|
||||
if (config._ignoreSnapshots) {
|
||||
reporter.onStdOut(colors.dim([
|
||||
'NOTE: running with "ignoreSnapshots" option. All of the following asserts are silently ignored:',
|
||||
'- expect().toMatchSnapshot()',
|
||||
'- expect().toHaveScreenshot()',
|
||||
'',
|
||||
].join('\n')));
|
||||
}
|
||||
|
||||
const dispatcher = new Dispatcher(configLoader, testGroups!, reporter);
|
||||
params.dispatcher = dispatcher;
|
||||
return async () => {
|
||||
await dispatcher.stop();
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
export function createRemoveOutputDirsTask(): Task<TaskRunnerState> {
|
||||
return async ({ options, configLoader }) => {
|
||||
const config = configLoader.fullConfig();
|
||||
const outputDirs = new Set<string>();
|
||||
for (const p of config.projects) {
|
||||
if (!options.projectFilter || options.projectFilter.includes(p.name))
|
||||
outputDirs.add(p.outputDir);
|
||||
}
|
||||
|
||||
await Promise.all(Array.from(outputDirs).map(outputDir => removeFolderAsync(outputDir).catch(async (error: any) => {
|
||||
if ((error as any).code === 'EBUSY') {
|
||||
// We failed to remove folder, might be due to the whole folder being mounted inside a container:
|
||||
// https://github.com/microsoft/playwright/issues/12106
|
||||
// Do a best-effort to remove all files inside of it instead.
|
||||
const entries = await readDirAsync(outputDir).catch(e => []);
|
||||
await Promise.all(entries.map(entry => removeFolderAsync(path.join(outputDir, entry))));
|
||||
} else {
|
||||
throw error;
|
||||
}
|
||||
})));
|
||||
};
|
||||
}
|
||||
|
||||
function createLoadTask(): Task<TaskRunnerState> {
|
||||
return async (context, errors) => {
|
||||
const { config, reporter, options, configLoader } = context;
|
||||
const rootSuite = await loadAllTests(configLoader, reporter, options, errors);
|
||||
const testGroups = options.listOnly ? [] : createTestGroups(rootSuite.suites, config.workers);
|
||||
|
||||
context.rootSuite = rootSuite;
|
||||
context.testGroups = testGroups;
|
||||
if (errors.length)
|
||||
return;
|
||||
|
||||
// Fail when no tests.
|
||||
if (!rootSuite.allTests().length && !context.options.passWithNoTests)
|
||||
throw new Error(`No tests found`);
|
||||
|
||||
if (!context.options.listOnly) {
|
||||
if (context.config.shard)
|
||||
filterForShard(context.config.shard, rootSuite, testGroups);
|
||||
context.config._maxConcurrentTestGroups = testGroups.length;
|
||||
}
|
||||
};
|
||||
}
|
||||
178
packages/playwright-test/src/runner/testGroups.ts
Normal file
178
packages/playwright-test/src/runner/testGroups.ts
Normal file
@ -0,0 +1,178 @@
|
||||
/**
|
||||
* Copyright Microsoft Corporation. All rights reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { filterSuiteWithOnlySemantics } from '../suiteUtils';
|
||||
import type { Suite, TestCase } from '../test';
|
||||
|
||||
export type TestGroup = {
|
||||
workerHash: string;
|
||||
requireFile: string;
|
||||
repeatEachIndex: number;
|
||||
projectId: string;
|
||||
tests: TestCase[];
|
||||
};
|
||||
|
||||
export function createTestGroups(projectSuites: Suite[], workers: number): TestGroup[] {
|
||||
// This function groups tests that can be run together.
|
||||
// Tests cannot be run together when:
|
||||
// - They belong to different projects - requires different workers.
|
||||
// - They have a different repeatEachIndex - requires different workers.
|
||||
// - They have a different set of worker fixtures in the pool - requires different workers.
|
||||
// - They have a different requireFile - reuses the worker, but runs each requireFile separately.
|
||||
// - They belong to a parallel suite.
|
||||
|
||||
// Using the map "workerHash -> requireFile -> group" makes us preserve the natural order
|
||||
// of worker hashes and require files for the simple cases.
|
||||
const groups = new Map<string, Map<string, {
|
||||
// Tests that must be run in order are in the same group.
|
||||
general: TestGroup,
|
||||
|
||||
// There are 3 kinds of parallel tests:
|
||||
// - Tests belonging to parallel suites, without beforeAll/afterAll hooks.
|
||||
// These can be run independently, they are put into their own group, key === test.
|
||||
// - Tests belonging to parallel suites, with beforeAll/afterAll hooks.
|
||||
// These should share the worker as much as possible, put into single parallelWithHooks group.
|
||||
// We'll divide them into equally-sized groups later.
|
||||
// - Tests belonging to serial suites inside parallel suites.
|
||||
// These should run as a serial group, each group is independent, key === serial suite.
|
||||
parallel: Map<Suite | TestCase, TestGroup>,
|
||||
parallelWithHooks: TestGroup,
|
||||
}>>();
|
||||
|
||||
const createGroup = (test: TestCase): TestGroup => {
|
||||
return {
|
||||
workerHash: test._workerHash,
|
||||
requireFile: test._requireFile,
|
||||
repeatEachIndex: test.repeatEachIndex,
|
||||
projectId: test._projectId,
|
||||
tests: [],
|
||||
};
|
||||
};
|
||||
|
||||
for (const projectSuite of projectSuites) {
|
||||
for (const test of projectSuite.allTests()) {
|
||||
let withWorkerHash = groups.get(test._workerHash);
|
||||
if (!withWorkerHash) {
|
||||
withWorkerHash = new Map();
|
||||
groups.set(test._workerHash, withWorkerHash);
|
||||
}
|
||||
let withRequireFile = withWorkerHash.get(test._requireFile);
|
||||
if (!withRequireFile) {
|
||||
withRequireFile = {
|
||||
general: createGroup(test),
|
||||
parallel: new Map(),
|
||||
parallelWithHooks: createGroup(test),
|
||||
};
|
||||
withWorkerHash.set(test._requireFile, withRequireFile);
|
||||
}
|
||||
|
||||
// Note that a parallel suite cannot be inside a serial suite. This is enforced in TestType.
|
||||
let insideParallel = false;
|
||||
let outerMostSerialSuite: Suite | undefined;
|
||||
let hasAllHooks = false;
|
||||
for (let parent: Suite | undefined = test.parent; parent; parent = parent.parent) {
|
||||
if (parent._parallelMode === 'serial')
|
||||
outerMostSerialSuite = parent;
|
||||
insideParallel = insideParallel || parent._parallelMode === 'parallel';
|
||||
hasAllHooks = hasAllHooks || parent._hooks.some(hook => hook.type === 'beforeAll' || hook.type === 'afterAll');
|
||||
}
|
||||
|
||||
if (insideParallel) {
|
||||
if (hasAllHooks && !outerMostSerialSuite) {
|
||||
withRequireFile.parallelWithHooks.tests.push(test);
|
||||
} else {
|
||||
const key = outerMostSerialSuite || test;
|
||||
let group = withRequireFile.parallel.get(key);
|
||||
if (!group) {
|
||||
group = createGroup(test);
|
||||
withRequireFile.parallel.set(key, group);
|
||||
}
|
||||
group.tests.push(test);
|
||||
}
|
||||
} else {
|
||||
withRequireFile.general.tests.push(test);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const result: TestGroup[] = [];
|
||||
for (const withWorkerHash of groups.values()) {
|
||||
for (const withRequireFile of withWorkerHash.values()) {
|
||||
// Tests without parallel mode should run serially as a single group.
|
||||
if (withRequireFile.general.tests.length)
|
||||
result.push(withRequireFile.general);
|
||||
|
||||
// Parallel test groups without beforeAll/afterAll can be run independently.
|
||||
result.push(...withRequireFile.parallel.values());
|
||||
|
||||
// Tests with beforeAll/afterAll should try to share workers as much as possible.
|
||||
const parallelWithHooksGroupSize = Math.ceil(withRequireFile.parallelWithHooks.tests.length / workers);
|
||||
let lastGroup: TestGroup | undefined;
|
||||
for (const test of withRequireFile.parallelWithHooks.tests) {
|
||||
if (!lastGroup || lastGroup.tests.length >= parallelWithHooksGroupSize) {
|
||||
lastGroup = createGroup(test);
|
||||
result.push(lastGroup);
|
||||
}
|
||||
lastGroup.tests.push(test);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
export async function filterForShard(shard: { total: number, current: number }, rootSuite: Suite, testGroups: TestGroup[]) {
|
||||
// Each shard includes:
|
||||
// - its portion of the regular tests
|
||||
// - project setup tests for the projects that have regular tests in this shard
|
||||
let shardableTotal = 0;
|
||||
for (const group of testGroups)
|
||||
shardableTotal += group.tests.length;
|
||||
|
||||
const shardTests = new Set<TestCase>();
|
||||
|
||||
// Each shard gets some tests.
|
||||
const shardSize = Math.floor(shardableTotal / shard.total);
|
||||
// First few shards get one more test each.
|
||||
const extraOne = shardableTotal - shardSize * shard.total;
|
||||
|
||||
const currentShard = shard.current - 1; // Make it zero-based for calculations.
|
||||
const from = shardSize * currentShard + Math.min(extraOne, currentShard);
|
||||
const to = from + shardSize + (currentShard < extraOne ? 1 : 0);
|
||||
let current = 0;
|
||||
const shardProjects = new Set<string>();
|
||||
const shardTestGroups = [];
|
||||
for (const group of testGroups) {
|
||||
// Any test group goes to the shard that contains the first test of this group.
|
||||
// So, this shard gets any group that starts at [from; to)
|
||||
if (current >= from && current < to) {
|
||||
shardProjects.add(group.projectId);
|
||||
shardTestGroups.push(group);
|
||||
for (const test of group.tests)
|
||||
shardTests.add(test);
|
||||
}
|
||||
current += group.tests.length;
|
||||
}
|
||||
testGroups.length = 0;
|
||||
testGroups.push(...shardTestGroups);
|
||||
|
||||
if (!shardTests.size) {
|
||||
// Filtering with "only semantics" does not work when we have zero tests - it leaves all the tests.
|
||||
// We need an empty suite in this case.
|
||||
rootSuite._entries = [];
|
||||
} else {
|
||||
filterSuiteWithOnlySemantics(rootSuite, () => false, test => shardTests.has(test));
|
||||
}
|
||||
}
|
||||
@ -16,8 +16,49 @@
|
||||
|
||||
import path from 'path';
|
||||
import { calculateSha1 } from 'playwright-core/lib/utils';
|
||||
import type { Suite, TestCase } from './test';
|
||||
import type { TestCase } from './test';
|
||||
import { Suite } from './test';
|
||||
import type { FullProjectInternal } from './types';
|
||||
import type { Matcher } from './util';
|
||||
import { createTitleMatcher } from './util';
|
||||
|
||||
export async function createRootSuite(preprocessRoot: Suite, testTitleMatcher: Matcher, filesByProject: Map<FullProjectInternal, string[]>): Promise<Suite> {
|
||||
// Generate projects.
|
||||
const fileSuites = new Map<string, Suite>();
|
||||
for (const fileSuite of preprocessRoot.suites)
|
||||
fileSuites.set(fileSuite._requireFile, fileSuite);
|
||||
|
||||
const rootSuite = new Suite('', 'root');
|
||||
for (const [project, files] of filesByProject) {
|
||||
const grepMatcher = createTitleMatcher(project.grep);
|
||||
const grepInvertMatcher = project.grepInvert ? createTitleMatcher(project.grepInvert) : null;
|
||||
|
||||
const titleMatcher = (test: TestCase) => {
|
||||
const grepTitle = test.titlePath().join(' ');
|
||||
if (grepInvertMatcher?.(grepTitle))
|
||||
return false;
|
||||
return grepMatcher(grepTitle) && testTitleMatcher(grepTitle);
|
||||
};
|
||||
|
||||
const projectSuite = new Suite(project.name, 'project');
|
||||
projectSuite._projectConfig = project;
|
||||
if (project._fullyParallel)
|
||||
projectSuite._parallelMode = 'parallel';
|
||||
rootSuite._addSuite(projectSuite);
|
||||
for (const file of files) {
|
||||
const fileSuite = fileSuites.get(file);
|
||||
if (!fileSuite)
|
||||
continue;
|
||||
for (let repeatEachIndex = 0; repeatEachIndex < project.repeatEach; repeatEachIndex++) {
|
||||
const builtSuite = buildFileSuiteForProject(project, fileSuite, repeatEachIndex);
|
||||
if (!filterTestsRemoveEmptySuites(builtSuite, titleMatcher))
|
||||
continue;
|
||||
projectSuite._addSuite(builtSuite);
|
||||
}
|
||||
}
|
||||
}
|
||||
return rootSuite;
|
||||
}
|
||||
|
||||
export function filterSuite(suite: Suite, suiteFilter: (suites: Suite) => boolean, testFilter: (test: TestCase) => boolean) {
|
||||
for (const child of suite.suites) {
|
||||
|
||||
@ -21,10 +21,10 @@ import { SigIntWatcher } from './sigIntWatcher';
|
||||
import { serializeError } from './util';
|
||||
|
||||
type TaskTeardown = () => Promise<any> | undefined;
|
||||
type Task = (params: { errors: TestError[] }) => Promise<TaskTeardown | void> | undefined;
|
||||
export type Task<Context> = (context: Context, errors: TestError[]) => Promise<TaskTeardown | void> | undefined;
|
||||
|
||||
export class TaskRunner {
|
||||
private _tasks: { name: string, task: Task }[] = [];
|
||||
export class TaskRunner<Context> {
|
||||
private _tasks: { name: string, task: Task<Context> }[] = [];
|
||||
private _reporter: Reporter;
|
||||
private _hasErrors = false;
|
||||
private _interrupted = false;
|
||||
@ -36,7 +36,7 @@ export class TaskRunner {
|
||||
this._globalTimeoutForError = globalTimeoutForError;
|
||||
}
|
||||
|
||||
addTask(name: string, task: Task) {
|
||||
addTask(name: string, task: Task<Context>) {
|
||||
this._tasks.push({ name, task });
|
||||
}
|
||||
|
||||
@ -44,7 +44,7 @@ export class TaskRunner {
|
||||
this._interrupted = true;
|
||||
}
|
||||
|
||||
async run(deadline: number): Promise<FullResult['status']> {
|
||||
async run(context: Context, deadline: number): Promise<FullResult['status']> {
|
||||
const sigintWatcher = new SigIntWatcher();
|
||||
const timeoutWatcher = new TimeoutWatcher(deadline);
|
||||
const teardownRunner = new TaskRunner(this._reporter, this._globalTimeoutForError);
|
||||
@ -60,7 +60,7 @@ export class TaskRunner {
|
||||
debug('pw:test:task')(`"${name}" started`);
|
||||
const errors: TestError[] = [];
|
||||
try {
|
||||
const teardown = await task({ errors });
|
||||
const teardown = await task(context, errors);
|
||||
if (teardown)
|
||||
teardownRunner._tasks.unshift({ name: `teardown for ${name}`, task: teardown });
|
||||
} catch (e) {
|
||||
@ -103,7 +103,7 @@ export class TaskRunner {
|
||||
sigintWatcher.disarm();
|
||||
timeoutWatcher.disarm();
|
||||
if (!this._isTearDown)
|
||||
await teardownRunner.run(deadline);
|
||||
await teardownRunner.run(context, deadline);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -14,7 +14,7 @@
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import type { TestGroup } from './dispatcher';
|
||||
import type { TestGroup } from './runner/testGroups';
|
||||
import type { RunPayload, SerializedConfig, WorkerInitParams } from './ipc';
|
||||
import { ProcessHost } from './processHost';
|
||||
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user