feat(api): refactor crawl request handling to streamline single and multiple URL processing

This commit is contained in:
UncleCode 2025-03-13 22:30:38 +08:00
parent b750542e6d
commit 6e3c048328

View File

@ -2,6 +2,7 @@ import os
import json
import asyncio
from typing import List, Tuple
from functools import partial
import logging
from typing import Optional, AsyncGenerator
@ -389,19 +390,9 @@ async def handle_crawl_request(
async with AsyncWebCrawler(config=browser_config) as crawler:
results = []
if len(urls) == 1:
results = await crawler.arun(
url=urls[0],
config=crawler_config,
dispatcher=dispatcher
)
else:
results = await crawler.arun_many(
urls=urls,
config=crawler_config,
dispatcher=dispatcher
)
func = getattr(crawler, "arun" if len(urls) == 1 else "arun_many")
partial_func = partial(func, urls[0] if len(urls) == 1 else urls, config=crawler_config, dispatcher=dispatcher)
results = await partial_func()
return {
"success": True,
"results": [result.model_dump() for result in results]