feat(api): refactor crawl request handling to streamline single and multiple URL processing

This commit is contained in:
UncleCode
2025-03-13 22:30:38 +08:00
parent b750542e6d
commit 6e3c048328

View File

@@ -2,6 +2,7 @@ import os
import json import json
import asyncio import asyncio
from typing import List, Tuple from typing import List, Tuple
from functools import partial
import logging import logging
from typing import Optional, AsyncGenerator from typing import Optional, AsyncGenerator
@@ -389,19 +390,9 @@ async def handle_crawl_request(
async with AsyncWebCrawler(config=browser_config) as crawler: async with AsyncWebCrawler(config=browser_config) as crawler:
results = [] results = []
if len(urls) == 1: func = getattr(crawler, "arun" if len(urls) == 1 else "arun_many")
results = await crawler.arun( partial_func = partial(func, urls[0] if len(urls) == 1 else urls, config=crawler_config, dispatcher=dispatcher)
url=urls[0], results = await partial_func()
config=crawler_config,
dispatcher=dispatcher
)
else:
results = await crawler.arun_many(
urls=urls,
config=crawler_config,
dispatcher=dispatcher
)
return { return {
"success": True, "success": True,
"results": [result.model_dump() for result in results] "results": [result.model_dump() for result in results]