mirror of
https://github.com/microsoft/playwright.git
synced 2025-06-26 21:40:17 +00:00
chore: load tests file by file (#20557)
This commit is contained in:
parent
635b47025e
commit
a1cdd939f8
@ -56,6 +56,8 @@ export type LoadError = {
|
||||
location: Location;
|
||||
};
|
||||
|
||||
export type LoadErrorSink = (error: LoadError) => void;
|
||||
|
||||
class Fixture {
|
||||
runner: FixtureRunner;
|
||||
registration: FixtureRegistration;
|
||||
@ -190,9 +192,9 @@ export function isFixtureOption(value: any): value is FixtureTuple {
|
||||
export class FixturePool {
|
||||
readonly digest: string;
|
||||
readonly registrations: Map<string, FixtureRegistration>;
|
||||
private _onLoadError: (error: LoadError) => void;
|
||||
private _onLoadError: LoadErrorSink;
|
||||
|
||||
constructor(fixturesList: FixturesWithLocation[], onLoadError: (error: LoadError) => void, parentPool?: FixturePool, disallowWorkerFixtures?: boolean) {
|
||||
constructor(fixturesList: FixturesWithLocation[], onLoadError: LoadErrorSink, parentPool?: FixturePool, disallowWorkerFixtures?: boolean) {
|
||||
this.registrations = new Map(parentPool ? parentPool.registrations : []);
|
||||
this._onLoadError = onLoadError;
|
||||
|
||||
@ -429,7 +431,7 @@ function serializeAndThrowError(e: LoadError) {
|
||||
|
||||
const signatureSymbol = Symbol('signature');
|
||||
|
||||
function fixtureParameterNames(fn: Function | any, location: Location, onError: (error: LoadError) => void): string[] {
|
||||
function fixtureParameterNames(fn: Function | any, location: Location, onError: LoadErrorSink): string[] {
|
||||
if (typeof fn !== 'function')
|
||||
return [];
|
||||
if (!fn[signatureSymbol])
|
||||
@ -437,7 +439,7 @@ function fixtureParameterNames(fn: Function | any, location: Location, onError:
|
||||
return fn[signatureSymbol];
|
||||
}
|
||||
|
||||
function innerFixtureParameterNames(fn: Function, location: Location, onError: (error: LoadError) => void): string[] {
|
||||
function innerFixtureParameterNames(fn: Function, location: Location, onError: LoadErrorSink): string[] {
|
||||
const text = fn.toString();
|
||||
const match = text.match(/(?:async)?(?:\s+function)?[^(]*\(([^)]*)/);
|
||||
if (!match)
|
||||
@ -446,8 +448,10 @@ function innerFixtureParameterNames(fn: Function, location: Location, onError: (
|
||||
if (!trimmedParams)
|
||||
return [];
|
||||
const [firstParam] = splitByComma(trimmedParams);
|
||||
if (firstParam[0] !== '{' || firstParam[firstParam.length - 1] !== '}')
|
||||
if (firstParam[0] !== '{' || firstParam[firstParam.length - 1] !== '}') {
|
||||
onError({ message: 'First argument must use the object destructuring pattern: ' + firstParam, location });
|
||||
return [];
|
||||
}
|
||||
const props = splitByComma(firstParam.substring(1, firstParam.length - 1)).map(prop => {
|
||||
const colon = prop.indexOf(':');
|
||||
return colon === -1 ? prop : prop.substring(0, colon).trim();
|
||||
|
@ -20,30 +20,29 @@ import type { Suite, TestCase } from './test';
|
||||
import type { TestTypeImpl } from './testType';
|
||||
import type { Fixtures, FixturesWithLocation, FullProjectInternal } from './types';
|
||||
import { formatLocation } from '../util';
|
||||
import type { TestError } from '../../reporter';
|
||||
|
||||
export class PoolBuilder {
|
||||
private _project: FullProjectInternal | undefined;
|
||||
private _testTypePools = new Map<TestTypeImpl, FixturePool>();
|
||||
private _type: 'loader' | 'worker';
|
||||
private _loadErrors: LoadError[] | undefined;
|
||||
|
||||
static buildForLoader(suite: Suite, loadErrors: LoadError[]) {
|
||||
new PoolBuilder('loader', loadErrors).buildPools(suite);
|
||||
static createForLoader() {
|
||||
return new PoolBuilder('loader');
|
||||
}
|
||||
|
||||
static createForWorker(project: FullProjectInternal) {
|
||||
return new PoolBuilder('worker', undefined, project);
|
||||
return new PoolBuilder('worker', project);
|
||||
}
|
||||
|
||||
private constructor(type: 'loader' | 'worker', loadErrors?: LoadError[], project?: FullProjectInternal) {
|
||||
private constructor(type: 'loader' | 'worker', project?: FullProjectInternal) {
|
||||
this._type = type;
|
||||
this._loadErrors = loadErrors;
|
||||
this._project = project;
|
||||
}
|
||||
|
||||
buildPools(suite: Suite) {
|
||||
buildPools(suite: Suite, testErrors?: TestError[]) {
|
||||
suite.forEachTest(test => {
|
||||
const pool = this._buildPoolForTest(test);
|
||||
const pool = this._buildPoolForTest(test, testErrors);
|
||||
if (this._type === 'loader')
|
||||
test._poolDigest = pool.digest;
|
||||
if (this._type === 'worker')
|
||||
@ -51,8 +50,8 @@ export class PoolBuilder {
|
||||
});
|
||||
}
|
||||
|
||||
private _buildPoolForTest(test: TestCase): FixturePool {
|
||||
let pool = this._buildTestTypePool(test._testType);
|
||||
private _buildPoolForTest(test: TestCase, testErrors?: TestError[]): FixturePool {
|
||||
let pool = this._buildTestTypePool(test._testType, testErrors);
|
||||
|
||||
const parents: Suite[] = [];
|
||||
for (let parent: Suite | undefined = test.parent; parent; parent = parent.parent)
|
||||
@ -61,7 +60,7 @@ export class PoolBuilder {
|
||||
|
||||
for (const parent of parents) {
|
||||
if (parent._use.length)
|
||||
pool = new FixturePool(parent._use, e => this._onLoadError(e), pool, parent._type === 'describe');
|
||||
pool = new FixturePool(parent._use, e => this._handleLoadError(e, testErrors), pool, parent._type === 'describe');
|
||||
for (const hook of parent._hooks)
|
||||
pool.validateFunction(hook.fn, hook.type + ' hook', hook.location);
|
||||
for (const modifier of parent._modifiers)
|
||||
@ -72,18 +71,18 @@ export class PoolBuilder {
|
||||
return pool;
|
||||
}
|
||||
|
||||
private _buildTestTypePool(testType: TestTypeImpl): FixturePool {
|
||||
private _buildTestTypePool(testType: TestTypeImpl, testErrors?: TestError[]): FixturePool {
|
||||
if (!this._testTypePools.has(testType)) {
|
||||
const fixtures = this._project ? this._applyConfigUseOptions(this._project, testType) : testType.fixtures;
|
||||
const pool = new FixturePool(fixtures, e => this._onLoadError(e));
|
||||
const pool = new FixturePool(fixtures, e => this._handleLoadError(e, testErrors));
|
||||
this._testTypePools.set(testType, pool);
|
||||
}
|
||||
return this._testTypePools.get(testType)!;
|
||||
}
|
||||
|
||||
private _onLoadError(e: LoadError): void {
|
||||
if (this._loadErrors)
|
||||
this._loadErrors.push(e);
|
||||
private _handleLoadError(e: LoadError, testErrors?: TestError[]): void {
|
||||
if (testErrors)
|
||||
testErrors.push(e);
|
||||
else
|
||||
throw new Error(`${formatLocation(e.location)}: ${e.message}`);
|
||||
}
|
||||
|
@ -16,9 +16,7 @@
|
||||
|
||||
import path from 'path';
|
||||
import type { TestError } from '../../reporter';
|
||||
import type { LoadError } from './fixtures';
|
||||
import { setCurrentlyLoadingFileSuite } from './globals';
|
||||
import { PoolBuilder } from './poolBuilder';
|
||||
import { Suite } from './test';
|
||||
import { requireOrImport } from './transform';
|
||||
import { serializeError } from '../util';
|
||||
@ -29,64 +27,44 @@ export const defaultTimeout = 30000;
|
||||
// we make these maps global.
|
||||
const cachedFileSuites = new Map<string, Suite>();
|
||||
|
||||
export class TestLoader {
|
||||
private _rootDir: string;
|
||||
export async function loadTestFile(file: string, rootDir: string, testErrors?: TestError[]): Promise<Suite> {
|
||||
if (cachedFileSuites.has(file))
|
||||
return cachedFileSuites.get(file)!;
|
||||
const suite = new Suite(path.relative(rootDir, file) || path.basename(file), 'file');
|
||||
suite._requireFile = file;
|
||||
suite.location = { file, line: 0, column: 0 };
|
||||
|
||||
constructor(rootDir: string) {
|
||||
this._rootDir = rootDir;
|
||||
setCurrentlyLoadingFileSuite(suite);
|
||||
try {
|
||||
await requireOrImport(file);
|
||||
cachedFileSuites.set(file, suite);
|
||||
} catch (e) {
|
||||
if (!testErrors)
|
||||
throw e;
|
||||
testErrors.push(serializeError(e));
|
||||
} finally {
|
||||
setCurrentlyLoadingFileSuite(undefined);
|
||||
}
|
||||
|
||||
async loadTestFile(file: string, environment: 'loader' | 'worker', loadErrors: TestError[]): Promise<Suite> {
|
||||
if (cachedFileSuites.has(file))
|
||||
return cachedFileSuites.get(file)!;
|
||||
const suite = new Suite(path.relative(this._rootDir, file) || path.basename(file), 'file');
|
||||
suite._requireFile = file;
|
||||
suite.location = { file, line: 0, column: 0 };
|
||||
{
|
||||
// Test locations that we discover potentially have different file name.
|
||||
// This could be due to either
|
||||
// a) use of source maps or due to
|
||||
// b) require of one file from another.
|
||||
// Try fixing (a) w/o regressing (b).
|
||||
|
||||
setCurrentlyLoadingFileSuite(suite);
|
||||
try {
|
||||
await requireOrImport(file);
|
||||
cachedFileSuites.set(file, suite);
|
||||
} catch (e) {
|
||||
if (environment === 'worker')
|
||||
throw e;
|
||||
loadErrors.push(serializeError(e));
|
||||
} finally {
|
||||
setCurrentlyLoadingFileSuite(undefined);
|
||||
}
|
||||
|
||||
{
|
||||
// Test locations that we discover potentially have different file name.
|
||||
// This could be due to either
|
||||
// a) use of source maps or due to
|
||||
// b) require of one file from another.
|
||||
// Try fixing (a) w/o regressing (b).
|
||||
|
||||
const files = new Set<string>();
|
||||
suite.allTests().map(t => files.add(t.location.file));
|
||||
if (files.size === 1) {
|
||||
// All tests point to one file.
|
||||
const mappedFile = files.values().next().value;
|
||||
if (suite.location.file !== mappedFile) {
|
||||
// The file is different, check for a likely source map case.
|
||||
if (path.extname(mappedFile) !== path.extname(suite.location.file))
|
||||
suite.location.file = mappedFile;
|
||||
}
|
||||
const files = new Set<string>();
|
||||
suite.allTests().map(t => files.add(t.location.file));
|
||||
if (files.size === 1) {
|
||||
// All tests point to one file.
|
||||
const mappedFile = files.values().next().value;
|
||||
if (suite.location.file !== mappedFile) {
|
||||
// The file is different, check for a likely source map case.
|
||||
if (path.extname(mappedFile) !== path.extname(suite.location.file))
|
||||
suite.location.file = mappedFile;
|
||||
}
|
||||
}
|
||||
|
||||
return suite;
|
||||
}
|
||||
}
|
||||
|
||||
export async function loadTestFilesInProcess(rootDir: string, testFiles: string[], loadErrors: LoadError[]): Promise<Suite> {
|
||||
const testLoader = new TestLoader(rootDir);
|
||||
const rootSuite = new Suite('', 'root');
|
||||
for (const file of testFiles) {
|
||||
const fileSuite = await testLoader.loadTestFile(file, 'loader', loadErrors);
|
||||
rootSuite._addSuite(fileSuite);
|
||||
}
|
||||
// Generate hashes.
|
||||
PoolBuilder.buildForLoader(rootSuite, loadErrors);
|
||||
return rootSuite;
|
||||
return suite;
|
||||
}
|
||||
|
@ -17,9 +17,9 @@
|
||||
import type { SerializedConfig } from '../common/ipc';
|
||||
import { ConfigLoader } from '../common/configLoader';
|
||||
import { ProcessRunner } from '../common/process';
|
||||
import { loadTestFilesInProcess } from '../common/testLoader';
|
||||
import type { LoadError } from '../common/fixtures';
|
||||
import type { FullConfigInternal } from '../common/types';
|
||||
import { loadTestFile } from '../common/testLoader';
|
||||
import type { TestError } from '../../reporter';
|
||||
|
||||
export class LoaderMain extends ProcessRunner {
|
||||
private _serializedConfig: SerializedConfig;
|
||||
@ -36,11 +36,11 @@ export class LoaderMain extends ProcessRunner {
|
||||
return this._configPromise;
|
||||
}
|
||||
|
||||
async loadTestFiles(params: { files: string[] }) {
|
||||
const loadErrors: LoadError[] = [];
|
||||
async loadTestFile(params: { file: string }) {
|
||||
const testErrors: TestError[] = [];
|
||||
const config = await this._config();
|
||||
const rootSuite = await loadTestFilesInProcess(config.rootDir, params.files, loadErrors);
|
||||
return { rootSuite: rootSuite._deepSerialize(), loadErrors };
|
||||
const rootSuite = await loadTestFile(params.file, config.rootDir, testErrors);
|
||||
return { rootSuite: rootSuite._deepSerialize(), testErrors };
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -16,18 +16,15 @@
|
||||
|
||||
import path from 'path';
|
||||
import type { Reporter, TestError } from '../../types/testReporter';
|
||||
import type { LoadError } from '../common/fixtures';
|
||||
import { LoaderHost } from './loaderHost';
|
||||
import type { Multiplexer } from '../reporters/multiplexer';
|
||||
import { InProcessLoaderHost, OutOfProcessLoaderHost } from './loaderHost';
|
||||
import type { LoaderHost } from './loaderHost';
|
||||
import { Suite } from '../common/test';
|
||||
import type { TestCase } from '../common/test';
|
||||
import { loadTestFilesInProcess } from '../common/testLoader';
|
||||
import type { FullConfigInternal, FullProjectInternal } from '../common/types';
|
||||
import { createFileMatcherFromFilters, createTitleMatcher, errorWithFile } from '../util';
|
||||
import type { Matcher, TestFileFilter } from '../util';
|
||||
import { collectFilesForProject, filterProjects, projectsThatAreDependencies } from './projectUtils';
|
||||
import { requireOrImport } from '../common/transform';
|
||||
import { serializeConfig } from '../common/ipc';
|
||||
import { buildFileSuiteForProject, filterByFocusedLine, filterOnly, filterTestsRemoveEmptySuites } from '../common/suiteUtils';
|
||||
import { filterForShard } from './testGroups';
|
||||
|
||||
@ -39,17 +36,20 @@ type LoadOptions = {
|
||||
passWithNoTests?: boolean;
|
||||
};
|
||||
|
||||
export async function loadAllTests(config: FullConfigInternal, reporter: Multiplexer, options: LoadOptions, errors: TestError[]): Promise<Suite> {
|
||||
export async function loadAllTests(config: FullConfigInternal, options: LoadOptions, errors: TestError[]): Promise<Suite> {
|
||||
const projects = filterProjects(config.projects, options.projectFilter);
|
||||
|
||||
let filesToRunByProject = new Map<FullProjectInternal, string[]>();
|
||||
let topLevelProjects: FullProjectInternal[];
|
||||
let dependencyProjects: FullProjectInternal[];
|
||||
// Collect files, categorize top level and dependency projects.
|
||||
{
|
||||
const fsCache = new Map();
|
||||
|
||||
// First collect all files for the projects in the command line, don't apply any file filters.
|
||||
const allFilesForProject = new Map<FullProjectInternal, string[]>();
|
||||
for (const project of projects) {
|
||||
const files = await collectFilesForProject(project);
|
||||
const files = await collectFilesForProject(project, fsCache);
|
||||
allFilesForProject.set(project, files);
|
||||
}
|
||||
|
||||
@ -74,26 +74,34 @@ export async function loadAllTests(config: FullConfigInternal, reporter: Multipl
|
||||
|
||||
// (Re-)add all files for dependent projects, disregard filters.
|
||||
for (const project of dependencyProjects) {
|
||||
const files = allFilesForProject.get(project) || await collectFilesForProject(project);
|
||||
const files = allFilesForProject.get(project) || await collectFilesForProject(project, fsCache);
|
||||
filesToRunByProject.set(project, files);
|
||||
}
|
||||
}
|
||||
|
||||
// Load all test files and create a preprocessed root. Child suites are files there.
|
||||
const allTestFiles = new Set<string>();
|
||||
for (const files of filesToRunByProject.values())
|
||||
files.forEach(file => allTestFiles.add(file));
|
||||
const preprocessRoot = await loadTests(config, reporter, allTestFiles, errors);
|
||||
const fileSuits: Suite[] = [];
|
||||
{
|
||||
const loaderHost: LoaderHost = process.env.PW_TEST_OOP_LOADER ? new OutOfProcessLoaderHost(config) : new InProcessLoaderHost(config);
|
||||
const allTestFiles = new Set<string>();
|
||||
for (const files of filesToRunByProject.values())
|
||||
files.forEach(file => allTestFiles.add(file));
|
||||
for (const file of allTestFiles) {
|
||||
const fileSuite = await loaderHost.loadTestFile(file, errors);
|
||||
fileSuits.push(fileSuite);
|
||||
}
|
||||
await loaderHost.stop();
|
||||
}
|
||||
|
||||
// Complain about duplicate titles.
|
||||
errors.push(...createDuplicateTitlesErrors(config, preprocessRoot));
|
||||
errors.push(...createDuplicateTitlesErrors(config, fileSuits));
|
||||
|
||||
// Create root suites with clones for the projects.
|
||||
const rootSuite = new Suite('', 'root');
|
||||
|
||||
// First iterate leaf projects to focus only, then add all other projects.
|
||||
for (const project of topLevelProjects) {
|
||||
const projectSuite = await createProjectSuite(preprocessRoot, project, options, filesToRunByProject.get(project)!);
|
||||
const projectSuite = await createProjectSuite(fileSuits, project, options, filesToRunByProject.get(project)!);
|
||||
if (projectSuite)
|
||||
rootSuite._addSuite(projectSuite);
|
||||
}
|
||||
@ -110,7 +118,7 @@ export async function loadAllTests(config: FullConfigInternal, reporter: Multipl
|
||||
|
||||
// Prepend the projects that are dependencies.
|
||||
for (const project of dependencyProjects) {
|
||||
const projectSuite = await createProjectSuite(preprocessRoot, project, { ...options, testFileFilters: [], testTitleMatcher: undefined }, filesToRunByProject.get(project)!);
|
||||
const projectSuite = await createProjectSuite(fileSuits, project, { ...options, testFileFilters: [], testTitleMatcher: undefined }, filesToRunByProject.get(project)!);
|
||||
if (projectSuite)
|
||||
rootSuite._prependSuite(projectSuite);
|
||||
}
|
||||
@ -118,17 +126,17 @@ export async function loadAllTests(config: FullConfigInternal, reporter: Multipl
|
||||
return rootSuite;
|
||||
}
|
||||
|
||||
async function createProjectSuite(preprocessRoot: Suite, project: FullProjectInternal, options: LoadOptions, files: string[]): Promise<Suite | null> {
|
||||
const fileSuites = new Map<string, Suite>();
|
||||
for (const fileSuite of preprocessRoot.suites)
|
||||
fileSuites.set(fileSuite._requireFile, fileSuite);
|
||||
async function createProjectSuite(fileSuits: Suite[], project: FullProjectInternal, options: LoadOptions, files: string[]): Promise<Suite | null> {
|
||||
const fileSuitesMap = new Map<string, Suite>();
|
||||
for (const fileSuite of fileSuits)
|
||||
fileSuitesMap.set(fileSuite._requireFile, fileSuite);
|
||||
|
||||
const projectSuite = new Suite(project.name, 'project');
|
||||
projectSuite._projectConfig = project;
|
||||
if (project._fullyParallel)
|
||||
projectSuite._parallelMode = 'parallel';
|
||||
for (const file of files) {
|
||||
const fileSuite = fileSuites.get(file);
|
||||
const fileSuite = fileSuitesMap.get(file);
|
||||
if (!fileSuite)
|
||||
continue;
|
||||
for (let repeatEachIndex = 0; repeatEachIndex < project.repeatEach; repeatEachIndex++) {
|
||||
@ -154,24 +162,6 @@ async function createProjectSuite(preprocessRoot: Suite, project: FullProjectInt
|
||||
return null;
|
||||
}
|
||||
|
||||
async function loadTests(config: FullConfigInternal, reporter: Multiplexer, testFiles: Set<string>, errors: TestError[]): Promise<Suite> {
|
||||
if (process.env.PW_TEST_OOP_LOADER) {
|
||||
const loaderHost = new LoaderHost();
|
||||
await loaderHost.start(serializeConfig(config));
|
||||
try {
|
||||
return await loaderHost.loadTestFiles([...testFiles], reporter);
|
||||
} finally {
|
||||
await loaderHost.stop();
|
||||
}
|
||||
}
|
||||
const loadErrors: LoadError[] = [];
|
||||
try {
|
||||
return await loadTestFilesInProcess(config.rootDir, [...testFiles], loadErrors);
|
||||
} finally {
|
||||
errors.push(...loadErrors);
|
||||
}
|
||||
}
|
||||
|
||||
function createForbidOnlyErrors(onlyTestsAndSuites: (TestCase | Suite)[]): TestError[] {
|
||||
const errors: TestError[] = [];
|
||||
for (const testOrSuite of onlyTestsAndSuites) {
|
||||
@ -186,12 +176,12 @@ function createForbidOnlyErrors(onlyTestsAndSuites: (TestCase | Suite)[]): TestE
|
||||
return errors;
|
||||
}
|
||||
|
||||
function createDuplicateTitlesErrors(config: FullConfigInternal, rootSuite: Suite): TestError[] {
|
||||
function createDuplicateTitlesErrors(config: FullConfigInternal, fileSuites: Suite[]): TestError[] {
|
||||
const errors: TestError[] = [];
|
||||
for (const fileSuite of rootSuite.suites) {
|
||||
for (const fileSuite of fileSuites) {
|
||||
const testsByFullTitle = new Map<string, TestCase>();
|
||||
for (const test of fileSuite.allTests()) {
|
||||
const fullTitle = test.titlePath().slice(2).join(' › ');
|
||||
const fullTitle = test.titlePath().slice(1).join(' › ');
|
||||
const existingTest = testsByFullTitle.get(fullTitle);
|
||||
if (existingTest) {
|
||||
const error: TestError = {
|
||||
|
@ -14,23 +14,60 @@
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import type { Reporter, TestError } from '../../reporter';
|
||||
import type { SerializedConfig } from '../common/ipc';
|
||||
import type { TestError } from '../../reporter';
|
||||
import { serializeConfig } from '../common/ipc';
|
||||
import { ProcessHost } from './processHost';
|
||||
import { Suite } from '../common/test';
|
||||
import { loadTestFile } from '../common/testLoader';
|
||||
import type { LoadError } from '../common/fixtures';
|
||||
import type { FullConfigInternal } from '../common/types';
|
||||
import { PoolBuilder } from '../common/poolBuilder';
|
||||
|
||||
export class LoaderHost extends ProcessHost {
|
||||
constructor() {
|
||||
super(require.resolve('../loaderMain.js'), 'loader');
|
||||
export abstract class LoaderHost {
|
||||
protected _config: FullConfigInternal;
|
||||
private _poolBuilder: PoolBuilder;
|
||||
|
||||
constructor(config: FullConfigInternal) {
|
||||
this._config = config;
|
||||
this._poolBuilder = PoolBuilder.createForLoader();
|
||||
}
|
||||
|
||||
async start(config: SerializedConfig) {
|
||||
await this.startRunner(config, true, {});
|
||||
async loadTestFile(file: string, testErrors: TestError[]): Promise<Suite> {
|
||||
const result = await this.doLoadTestFile(file, testErrors);
|
||||
this._poolBuilder.buildPools(result, testErrors);
|
||||
return result;
|
||||
}
|
||||
|
||||
async loadTestFiles(files: string[], reporter: Reporter): Promise<Suite> {
|
||||
const result = await this.sendMessage({ method: 'loadTestFiles', params: { files } }) as any;
|
||||
result.loadErrors.forEach((e: TestError) => reporter.onError?.(e));
|
||||
return Suite._deepParse(result.rootSuite);
|
||||
protected abstract doLoadTestFile(file: string, testErrors: TestError[]): Promise<Suite>;
|
||||
|
||||
async stop() {}
|
||||
}
|
||||
|
||||
export class InProcessLoaderHost extends LoaderHost {
|
||||
|
||||
doLoadTestFile(file: string, testErrors: TestError[]): Promise<Suite> {
|
||||
return loadTestFile(file, this._config.rootDir, testErrors);
|
||||
}
|
||||
}
|
||||
|
||||
export class OutOfProcessLoaderHost extends LoaderHost {
|
||||
private _startPromise: Promise<void>;
|
||||
private _processHost: ProcessHost;
|
||||
|
||||
constructor(config: FullConfigInternal) {
|
||||
super(config);
|
||||
this._processHost = new ProcessHost(require.resolve('../loaderMain.js'), 'loader');
|
||||
this._startPromise = this._processHost.startRunner(serializeConfig(config), true, {});
|
||||
}
|
||||
|
||||
async doLoadTestFile(file: string, loadErrors: LoadError[]): Promise<Suite> {
|
||||
await this._startPromise;
|
||||
const result = await this._processHost.sendMessage({ method: 'loadTestFile', params: { file } }) as any;
|
||||
loadErrors.push(...result.loadErrors);
|
||||
return Suite._deepParse(result.fileSuite);
|
||||
}
|
||||
|
||||
override async stop() {
|
||||
await this._processHost.stop();
|
||||
}
|
||||
}
|
||||
|
@ -42,7 +42,7 @@ export class ProcessHost extends EventEmitter {
|
||||
this._processName = processName;
|
||||
}
|
||||
|
||||
protected async startRunner(runnerParams: any, inheritStdio: boolean, env: NodeJS.ProcessEnv) {
|
||||
async startRunner(runnerParams: any, inheritStdio: boolean, env: NodeJS.ProcessEnv) {
|
||||
this.process = child_process.fork(require.resolve('../common/process'), {
|
||||
detached: false,
|
||||
env: { ...process.env, ...env },
|
||||
@ -104,7 +104,7 @@ export class ProcessHost extends EventEmitter {
|
||||
});
|
||||
}
|
||||
|
||||
protected sendMessage(message: { method: string, params?: any }) {
|
||||
sendMessage(message: { method: string, params?: any }) {
|
||||
const id = ++this._lastMessageId;
|
||||
this.send({
|
||||
method: '__dispatch__',
|
||||
|
@ -66,10 +66,10 @@ export function projectsThatAreDependencies(projects: FullProjectInternal[]): Fu
|
||||
return [...result];
|
||||
}
|
||||
|
||||
export async function collectFilesForProject(project: FullProjectInternal): Promise<string[]> {
|
||||
export async function collectFilesForProject(project: FullProjectInternal, fsCache = new Map<string, string[]>()): Promise<string[]> {
|
||||
const extensions = ['.js', '.ts', '.mjs', '.tsx', '.jsx'];
|
||||
const testFileExtension = (file: string) => extensions.includes(path.extname(file));
|
||||
const allFiles = await collectFiles(project.testDir, project._respectGitIgnore);
|
||||
const allFiles = await cachedCollectFiles(project.testDir, project._respectGitIgnore, fsCache);
|
||||
const testMatch = createFileMatcher(project.testMatch);
|
||||
const testIgnore = createFileMatcher(project.testIgnore);
|
||||
const testFiles = allFiles.filter(file => {
|
||||
@ -83,6 +83,16 @@ export async function collectFilesForProject(project: FullProjectInternal): Prom
|
||||
return testFiles;
|
||||
}
|
||||
|
||||
async function cachedCollectFiles(testDir: string, respectGitIgnore: boolean, fsCache: Map<string, string[]>) {
|
||||
const key = testDir + ':' + respectGitIgnore;
|
||||
let result = fsCache.get(key);
|
||||
if (!result) {
|
||||
result = await collectFiles(testDir, respectGitIgnore);
|
||||
fsCache.set(key, result);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
async function collectFiles(testDir: string, respectGitIgnore: boolean): Promise<string[]> {
|
||||
if (!fs.existsSync(testDir))
|
||||
return [];
|
||||
|
@ -144,8 +144,8 @@ function createRemoveOutputDirsTask(): Task<TaskRunnerState> {
|
||||
|
||||
function createLoadTask(): Task<TaskRunnerState> {
|
||||
return async (context, errors) => {
|
||||
const { config, reporter, options } = context;
|
||||
context.rootSuite = await loadAllTests(config, reporter, options, errors);
|
||||
const { config, options } = context;
|
||||
context.rootSuite = await loadAllTests(config, options, errors);
|
||||
// Fail when no tests.
|
||||
if (!context.rootSuite.allTests().length && !context.options.passWithNoTests && !config.shard)
|
||||
throw new Error(`No tests found`);
|
||||
|
@ -28,7 +28,7 @@ import { TestInfoImpl } from '../common/testInfo';
|
||||
import type { TimeSlot } from '../common/timeoutManager';
|
||||
import { TimeoutManager } from '../common/timeoutManager';
|
||||
import { ProcessRunner } from '../common/process';
|
||||
import { TestLoader } from '../common/testLoader';
|
||||
import { loadTestFile } from '../common/testLoader';
|
||||
import { buildFileSuiteForProject, filterTestsRemoveEmptySuites } from '../common/suiteUtils';
|
||||
import { PoolBuilder } from '../common/poolBuilder';
|
||||
|
||||
@ -37,7 +37,6 @@ const removeFolderAsync = util.promisify(rimraf);
|
||||
export class WorkerMain extends ProcessRunner {
|
||||
private _params: WorkerInitParams;
|
||||
private _config!: FullConfigInternal;
|
||||
private _testLoader!: TestLoader;
|
||||
private _project!: FullProjectInternal;
|
||||
private _poolBuilder!: PoolBuilder;
|
||||
private _fixtureRunner: FixtureRunner;
|
||||
@ -195,7 +194,6 @@ export class WorkerMain extends ProcessRunner {
|
||||
|
||||
const configLoader = await ConfigLoader.deserialize(this._params.config);
|
||||
this._config = configLoader.fullConfig();
|
||||
this._testLoader = new TestLoader(this._config.rootDir);
|
||||
this._project = this._config.projects.find(p => p._id === this._params.projectId)!;
|
||||
this._poolBuilder = PoolBuilder.createForWorker(this._project);
|
||||
}
|
||||
@ -206,7 +204,7 @@ export class WorkerMain extends ProcessRunner {
|
||||
let fatalUnknownTestIds;
|
||||
try {
|
||||
await this._loadIfNeeded();
|
||||
const fileSuite = await this._testLoader.loadTestFile(runPayload.file, 'worker', []);
|
||||
const fileSuite = await loadTestFile(runPayload.file, this._config.rootDir);
|
||||
const suite = buildFileSuiteForProject(this._project, fileSuite, this._params.repeatEachIndex);
|
||||
const hasEntries = filterTestsRemoveEmptySuites(suite, test => entries.has(test.id));
|
||||
if (hasEntries) {
|
||||
|
Loading…
x
Reference in New Issue
Block a user