mirror of
https://github.com/mendableai/firecrawl.git
synced 2025-06-27 00:41:33 +00:00
Add created_at field to /crawl/active endpoint response
- Updated OngoingCrawlsResponse type to include created_at field - Modified ongoingCrawlsController to return ISO timestamp from createdAt - Added comprehensive tests for created_at field validation - Updated requests.http with test endpoint for /crawl/active - Tested endpoint manually and confirmed working correctly Co-Authored-By: rafael@sideguide.dev <rafael@sideguide.dev>
This commit is contained in:
parent
9a5d40c3cf
commit
1b5bebde0f
@ -126,3 +126,8 @@ content-type: application/json
|
||||
"query": "firecrawl",
|
||||
"limit": 50
|
||||
}
|
||||
|
||||
### Get Active Crawls
|
||||
# @name activeCrawls
|
||||
GET {{baseUrl}}/v1/crawl/active HTTP/1.1
|
||||
Authorization: Bearer {{$dotenv TEST_API_KEY}}
|
||||
|
@ -75,6 +75,54 @@ describe("Crawl tests", () => {
|
||||
|
||||
expect(ongoing2.crawls.find(x => x.id === res.id)).toBeUndefined();
|
||||
}, 3 * scrapeTimeout);
|
||||
|
||||
it.concurrent("ongoing crawls endpoint includes created_at field", async () => {
|
||||
const res = await asyncCrawl({
|
||||
url: "https://firecrawl.dev",
|
||||
limit: 3,
|
||||
}, identity);
|
||||
|
||||
const ongoing = await crawlOngoing(identity);
|
||||
const crawlItem = ongoing.crawls.find(x => x.id === res.id);
|
||||
|
||||
expect(crawlItem).toBeDefined();
|
||||
if (crawlItem) {
|
||||
expect(crawlItem.created_at).toBeDefined();
|
||||
expect(typeof crawlItem.created_at).toBe("string");
|
||||
|
||||
const createdAtDate = new Date(crawlItem.created_at);
|
||||
expect(createdAtDate).toBeInstanceOf(Date);
|
||||
expect(createdAtDate.getTime()).not.toBeNaN();
|
||||
|
||||
expect(crawlItem.created_at).toMatch(/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$/);
|
||||
}
|
||||
|
||||
await asyncCrawlWaitForFinish(res.id, identity);
|
||||
}, 3 * scrapeTimeout);
|
||||
|
||||
it.concurrent("created_at timestamp is recent for new crawls", async () => {
|
||||
const beforeCrawl = new Date();
|
||||
|
||||
const res = await asyncCrawl({
|
||||
url: "https://firecrawl.dev",
|
||||
limit: 3,
|
||||
}, identity);
|
||||
|
||||
const ongoing = await crawlOngoing(identity);
|
||||
const afterCrawl = new Date();
|
||||
|
||||
const crawlItem = ongoing.crawls.find(x => x.id === res.id);
|
||||
|
||||
expect(crawlItem).toBeDefined();
|
||||
if (crawlItem) {
|
||||
const createdAt = new Date(crawlItem.created_at);
|
||||
|
||||
expect(createdAt.getTime()).toBeGreaterThanOrEqual(beforeCrawl.getTime() - 1000);
|
||||
expect(createdAt.getTime()).toBeLessThanOrEqual(afterCrawl.getTime() + 1000);
|
||||
}
|
||||
|
||||
await asyncCrawlWaitForFinish(res.id, identity);
|
||||
}, 3 * scrapeTimeout);
|
||||
|
||||
// TEMP: Flaky
|
||||
// it.concurrent("discovers URLs properly when origin is not included", async () => {
|
||||
|
@ -25,6 +25,7 @@ export async function ongoingCrawlsController(
|
||||
id: x.id,
|
||||
teamId: x.team_id!,
|
||||
url: x.originUrl!,
|
||||
created_at: new Date(x.createdAt || Date.now()).toISOString(),
|
||||
options: {
|
||||
...toNewCrawlerOptions(x.crawlerOptions),
|
||||
scrapeOptions: x.scrapeOptions,
|
||||
|
@ -937,6 +937,7 @@ export type OngoingCrawlsResponse =
|
||||
id: string;
|
||||
teamId: string;
|
||||
url: string;
|
||||
created_at: string;
|
||||
options: CrawlerOptions;
|
||||
}[];
|
||||
};
|
||||
|
Loading…
x
Reference in New Issue
Block a user