mirror of
https://github.com/microsoft/playwright.git
synced 2025-06-26 21:40:17 +00:00
test: unflake some tests (#27354)
This commit is contained in:
parent
d136b0aeb6
commit
81694b7401
@ -147,7 +147,7 @@ export abstract class ChannelOwner<T extends channels.Channel = channels.Channel
|
||||
apiZone.reported = true;
|
||||
if (csi && stackTrace && stackTrace.apiName)
|
||||
csi.onApiCallBegin(stackTrace.apiName, params, stackTrace, wallTime, callCookie);
|
||||
return this._connection.sendMessageToServer(this, this._type, prop, validator(params, '', { tChannelImpl: tChannelImplToWire, binary: this._connection.isRemote() ? 'toBase64' : 'buffer' }), stackTrace, wallTime);
|
||||
return this._connection.sendMessageToServer(this, prop, validator(params, '', { tChannelImpl: tChannelImplToWire, binary: this._connection.isRemote() ? 'toBase64' : 'buffer' }), stackTrace, wallTime);
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
@ -113,7 +113,7 @@ export class Connection extends EventEmitter {
|
||||
this._tracingCount--;
|
||||
}
|
||||
|
||||
async sendMessageToServer(object: ChannelOwner, type: string, method: string, params: any, stackTrace: ParsedStackTrace | null, wallTime: number | undefined): Promise<any> {
|
||||
async sendMessageToServer(object: ChannelOwner, method: string, params: any, stackTrace: ParsedStackTrace | null, wallTime: number | undefined): Promise<any> {
|
||||
if (this._closedErrorMessage)
|
||||
throw new Error(this._closedErrorMessage);
|
||||
if (object._wasCollected)
|
||||
@ -121,15 +121,16 @@ export class Connection extends EventEmitter {
|
||||
|
||||
const { apiName, frames } = stackTrace || { apiName: '', frames: [] };
|
||||
const guid = object._guid;
|
||||
const type = object._type;
|
||||
const id = ++this._lastId;
|
||||
const converted = { id, guid, method, params };
|
||||
// Do not include metadata in debug logs to avoid noise.
|
||||
debugLogger.log('channel:command', converted);
|
||||
const location = frames[0] ? { file: frames[0].file, line: frames[0].line, column: frames[0].column } : undefined;
|
||||
const metadata: channels.Metadata = { wallTime, apiName, location, internal: !apiName };
|
||||
this.onmessage({ ...converted, metadata });
|
||||
if (this._tracingCount && frames && type !== 'LocalUtils')
|
||||
this._localUtils?._channel.addStackToTracingNoReply({ callData: { stack: frames, id } }).catch(() => {});
|
||||
this.onmessage({ ...converted, metadata });
|
||||
return await new Promise((resolve, reject) => this._callbacks.set(id, { resolve, reject, stackTrace, type, method }));
|
||||
}
|
||||
|
||||
|
||||
@ -319,7 +319,7 @@ it('should isolate proxy credentials between contexts', async ({ contextFactory,
|
||||
}
|
||||
});
|
||||
|
||||
it('should exclude patterns', async ({ contextFactory, server, browserName, headless, proxyServer }) => {
|
||||
it('should exclude patterns', async ({ contextFactory, server, proxyServer }) => {
|
||||
proxyServer.forwardTo(server.PORT);
|
||||
// FYI: using long and weird domain names to avoid ATT DNS hijacking
|
||||
// that resolves everything to some weird search results page.
|
||||
@ -329,56 +329,53 @@ it('should exclude patterns', async ({ contextFactory, server, browserName, head
|
||||
proxy: { server: `localhost:${proxyServer.PORT}`, bypass: '1.non.existent.domain.for.the.test, 2.non.existent.domain.for.the.test, .another.test' }
|
||||
});
|
||||
|
||||
const page = await context.newPage();
|
||||
await page.goto('http://0.non.existent.domain.for.the.test/target.html');
|
||||
expect(proxyServer.requestUrls).toContain('http://0.non.existent.domain.for.the.test/target.html');
|
||||
expect(await page.title()).toBe('Served by the proxy');
|
||||
proxyServer.requestUrls = [];
|
||||
|
||||
const nonFaviconUrls = () => {
|
||||
return proxyServer.requestUrls.filter(u => !u.includes('favicon'));
|
||||
};
|
||||
|
||||
{
|
||||
proxyServer.requestUrls = [];
|
||||
const page = await context.newPage();
|
||||
await page.goto('http://0.non.existent.domain.for.the.test/target.html');
|
||||
expect(proxyServer.requestUrls).toContain('http://0.non.existent.domain.for.the.test/target.html');
|
||||
expect(await page.title()).toBe('Served by the proxy');
|
||||
await page.close();
|
||||
}
|
||||
|
||||
{
|
||||
proxyServer.requestUrls = [];
|
||||
const page = await context.newPage();
|
||||
const error = await page.goto('http://1.non.existent.domain.for.the.test/target.html').catch(e => e);
|
||||
expect(nonFaviconUrls()).toEqual([]);
|
||||
expect(error.message).toBeTruthy();
|
||||
|
||||
// Make sure error page commits.
|
||||
if (browserName === 'chromium')
|
||||
await page.waitForURL('chrome-error://chromewebdata/');
|
||||
else if (browserName === 'firefox')
|
||||
await page.waitForURL('http://1.non.existent.domain.for.the.test/target.html', { waitUntil: 'commit' });
|
||||
await page.close();
|
||||
}
|
||||
|
||||
{
|
||||
proxyServer.requestUrls = [];
|
||||
const page = await context.newPage();
|
||||
const error = await page.goto('http://2.non.existent.domain.for.the.test/target.html').catch(e => e);
|
||||
expect(nonFaviconUrls()).toEqual([]);
|
||||
expect(error.message).toBeTruthy();
|
||||
|
||||
// Make sure error page commits.
|
||||
if (browserName === 'chromium')
|
||||
await page.waitForURL('chrome-error://chromewebdata/');
|
||||
else if (browserName === 'firefox')
|
||||
await page.waitForURL('http://2.non.existent.domain.for.the.test/target.html', { waitUntil: 'commit' });
|
||||
await page.close();
|
||||
}
|
||||
|
||||
{
|
||||
proxyServer.requestUrls = [];
|
||||
const page = await context.newPage();
|
||||
const error = await page.goto('http://foo.is.the.another.test/target.html').catch(e => e);
|
||||
expect(nonFaviconUrls()).toEqual([]);
|
||||
expect(error.message).toBeTruthy();
|
||||
|
||||
// Make sure error page commits.
|
||||
if (browserName === 'chromium')
|
||||
await page.waitForURL('chrome-error://chromewebdata/');
|
||||
else if (browserName === 'firefox')
|
||||
await page.waitForURL('http://foo.is.the.another.test/target.html', { waitUntil: 'commit' });
|
||||
await page.close();
|
||||
}
|
||||
|
||||
{
|
||||
proxyServer.requestUrls = [];
|
||||
const page = await context.newPage();
|
||||
await page.goto('http://3.non.existent.domain.for.the.test/target.html');
|
||||
expect(nonFaviconUrls()).toContain('http://3.non.existent.domain.for.the.test/target.html');
|
||||
expect(await page.title()).toBe('Served by the proxy');
|
||||
await page.close();
|
||||
}
|
||||
|
||||
await context.close();
|
||||
|
||||
@ -237,14 +237,24 @@ test('should respect tracesDir and name', async ({ browserType, server, mode },
|
||||
});
|
||||
|
||||
test('should not include trace resources from the previous chunks', async ({ context, page, server, browserName, mode }, testInfo) => {
|
||||
test.skip(browserName !== 'chromium', 'The number of screenshots is flaky in non-Chromium');
|
||||
test.skip(mode.startsWith('service'), 'The number of screenshots is flaky');
|
||||
await context.tracing.start({ screenshots: true, snapshots: true, sources: true });
|
||||
|
||||
await context.tracing.startChunk();
|
||||
await page.goto(server.EMPTY_PAGE);
|
||||
await page.setContent('<button>Click</button>');
|
||||
await page.click('"Click"');
|
||||
await page.setContent(`
|
||||
<style>
|
||||
@keyframes move {
|
||||
from { marign-left: 0; }
|
||||
to { margin-left: 1000px; }
|
||||
}
|
||||
button {
|
||||
animation: 20s linear move;
|
||||
animation-iteration-count: infinite;
|
||||
}
|
||||
</style>
|
||||
<button>Click</button>
|
||||
`);
|
||||
await page.click('"Click"', { force: true });
|
||||
// Give it enough time for both screenshots to get into the trace.
|
||||
await new Promise(f => setTimeout(f, 3000));
|
||||
await context.tracing.stopChunk({ path: testInfo.outputPath('trace1.zip') });
|
||||
@ -252,11 +262,13 @@ test('should not include trace resources from the previous chunks', async ({ con
|
||||
await context.tracing.startChunk();
|
||||
await context.tracing.stopChunk({ path: testInfo.outputPath('trace2.zip') });
|
||||
|
||||
let jpegs: string[] = [];
|
||||
{
|
||||
const { resources } = await parseTraceRaw(testInfo.outputPath('trace1.zip'));
|
||||
const names = Array.from(resources.keys());
|
||||
expect(names.filter(n => n.endsWith('.html')).length).toBe(1);
|
||||
expect(names.filter(n => n.endsWith('.jpeg')).length).toBeGreaterThan(0);
|
||||
jpegs = names.filter(n => n.endsWith('.jpeg'));
|
||||
expect(jpegs.length).toBeGreaterThan(0);
|
||||
// 1 source file for the test.
|
||||
expect(names.filter(n => n.endsWith('.txt')).length).toBe(1);
|
||||
}
|
||||
@ -266,8 +278,9 @@ test('should not include trace resources from the previous chunks', async ({ con
|
||||
const names = Array.from(resources.keys());
|
||||
// 1 network resource should be preserved.
|
||||
expect(names.filter(n => n.endsWith('.html')).length).toBe(1);
|
||||
expect(names.filter(n => n.endsWith('.jpeg')).length).toBe(0);
|
||||
// 0 source file for the second test.
|
||||
// screenshots from the previous chunk should not be preserved.
|
||||
expect(names.filter(n => jpegs.includes(n)).length).toBe(0);
|
||||
// 0 source files for the second test.
|
||||
expect(names.filter(n => n.endsWith('.txt')).length).toBe(0);
|
||||
}
|
||||
});
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user