fix(tests): timeouts

This commit is contained in:
Gergő Móricz 2025-06-23 21:36:07 +02:00
parent f9678b00ae
commit 2aa4995016
4 changed files with 17 additions and 17 deletions

View File

@ -1,4 +1,4 @@
import { batchScrape } from "./lib";
import { batchScrape, scrapeTimeout } from "./lib";
describe("Batch scrape tests", () => {
it.concurrent("works", async () => {
@ -8,7 +8,7 @@ describe("Batch scrape tests", () => {
expect(response.data[0]).toHaveProperty("markdown");
expect(response.data[0].markdown).toContain("Firecrawl");
}, 180000);
}, scrapeTimeout);
if (!process.env.TEST_SUITE_SELF_HOSTED) {
describe("JSON format", () => {
@ -55,5 +55,5 @@ describe("Batch scrape tests", () => {
});
expect(response.data[0].metadata.sourceURL).toBe("https://firecrawl.dev/?pagewanted=all&et_blog");
}, 35000);
}, scrapeTimeout);
});

View File

@ -1,4 +1,4 @@
import { asyncCrawl, asyncCrawlWaitForFinish, crawl, crawlOngoing } from "./lib";
import { asyncCrawl, asyncCrawlWaitForFinish, crawl, crawlOngoing, scrapeTimeout } from "./lib";
import { describe, it, expect } from "@jest/globals";
describe("Crawl tests", () => {
@ -7,7 +7,7 @@ describe("Crawl tests", () => {
url: "https://firecrawl.dev",
limit: 10,
});
}, 120000);
}, 10 * scrapeTimeout);
it.concurrent("filters URLs properly", async () => {
const res = await crawl({
@ -24,7 +24,7 @@ describe("Crawl tests", () => {
expect(url.pathname).toMatch(/^\/pricing$/);
}
}
}, 120000);
}, 10 * scrapeTimeout);
it.concurrent("filters URLs properly when using regexOnFullURL", async () => {
const res = await crawl({
@ -39,7 +39,7 @@ describe("Crawl tests", () => {
expect(res.completed).toBe(1);
expect(res.data[0].metadata.sourceURL).toBe("https://firecrawl.dev/pricing");
}
}, 120000);
}, 10 * scrapeTimeout);
it.concurrent("delay parameter works", async () => {
await crawl({
@ -47,7 +47,7 @@ describe("Crawl tests", () => {
limit: 3,
delay: 5,
});
}, 300000);
}, 3 * scrapeTimeout + 3 * 5000);
it.concurrent("ongoing crawls endpoint works", async () => {
const res = await asyncCrawl({
@ -64,7 +64,7 @@ describe("Crawl tests", () => {
const ongoing2 = await crawlOngoing();
expect(ongoing2.crawls.find(x => x.id === res.id)).toBeUndefined();
}, 120000);
}, 3 * scrapeTimeout);
// TEMP: Flaky
// it.concurrent("discovers URLs properly when origin is not included", async () => {
@ -112,7 +112,7 @@ describe("Crawl tests", () => {
if (res.success) {
expect(res.completed).toBeGreaterThan(0);
}
}, 120000);
}, 5 * scrapeTimeout);
it.concurrent("crawlEntireDomain takes precedence over allowBackwardLinks", async () => {
const res = await crawl({
@ -126,7 +126,7 @@ describe("Crawl tests", () => {
if (res.success) {
expect(res.completed).toBeGreaterThan(0);
}
}, 120000);
}, 5 * scrapeTimeout);
it.concurrent("backward compatibility - allowBackwardLinks still works", async () => {
const res = await crawl({
@ -139,5 +139,5 @@ describe("Crawl tests", () => {
if (res.success) {
expect(res.completed).toBeGreaterThan(0);
}
}, 120000);
}, 5 * scrapeTimeout);
});

View File

@ -18,6 +18,10 @@ export const defaultIdentity: Identity = {
apiKey: process.env.TEST_API_KEY!,
};
// Due to the limited resources of the CI runner, we need to set a longer timeout for the many many scrape tests
export const scrapeTimeout = 75000;
export const indexCooldown = 30000;
// =========================================
// Scrape API
// =========================================

View File

@ -1,10 +1,6 @@
import { scrape, scrapeStatus, scrapeWithFailure } from "./lib";
import { scrape, scrapeStatus, scrapeWithFailure, scrapeTimeout, indexCooldown } from "./lib";
import crypto from "crypto";
// Due to the limited resources of the CI runner, we need to set a longer timeout for the many many scrape tests
const scrapeTimeout = 75000;
const indexCooldown = 30000;
describe("Scrape tests", () => {
it.concurrent("mocking works properly", async () => {
// depends on falsified mock mocking-works-properly