mirror of
https://github.com/unclecode/crawl4ai.git
synced 2026-01-05 23:10:49 +00:00
feat(api): refactor crawl request handling to streamline single and multiple URL processing
This commit is contained in:
parent
b750542e6d
commit
6e3c048328
@ -2,6 +2,7 @@ import os
|
||||
import json
|
||||
import asyncio
|
||||
from typing import List, Tuple
|
||||
from functools import partial
|
||||
|
||||
import logging
|
||||
from typing import Optional, AsyncGenerator
|
||||
@ -389,19 +390,9 @@ async def handle_crawl_request(
|
||||
|
||||
async with AsyncWebCrawler(config=browser_config) as crawler:
|
||||
results = []
|
||||
if len(urls) == 1:
|
||||
results = await crawler.arun(
|
||||
url=urls[0],
|
||||
config=crawler_config,
|
||||
dispatcher=dispatcher
|
||||
)
|
||||
else:
|
||||
results = await crawler.arun_many(
|
||||
urls=urls,
|
||||
config=crawler_config,
|
||||
dispatcher=dispatcher
|
||||
)
|
||||
|
||||
func = getattr(crawler, "arun" if len(urls) == 1 else "arun_many")
|
||||
partial_func = partial(func, urls[0] if len(urls) == 1 else urls, config=crawler_config, dispatcher=dispatcher)
|
||||
results = await partial_func()
|
||||
return {
|
||||
"success": True,
|
||||
"results": [result.model_dump() for result in results]
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user