feat(api): implement crawler pool manager for improved resource handling

Adds a new CrawlerManager class to handle browser instance pooling and failover:
- Implements auto-scaling based on system resources
- Adds primary/backup crawler management
- Integrates memory monitoring and throttling
- Adds streaming support with memory tracking
- Updates API endpoints to use pooled crawlers

BREAKING CHANGE: API endpoints now require CrawlerManager initialization
This commit is contained in:
UncleCode
2025-04-18 22:26:24 +08:00
parent 907cba194f
commit 16b2318242
9 changed files with 2082 additions and 59 deletions

View File

@@ -40,8 +40,19 @@ from utils import (
decode_redis_hash
)
import psutil, time
logger = logging.getLogger(__name__)
# --- Helper to get memory ---
def _get_memory_mb():
try:
return psutil.Process().memory_info().rss / (1024 * 1024)
except Exception as e:
logger.warning(f"Could not get memory info: {e}")
return None
async def handle_llm_qa(
url: str,
query: str,
@@ -351,7 +362,9 @@ async def stream_results(crawler: AsyncWebCrawler, results_gen: AsyncGenerator)
try:
async for result in results_gen:
try:
server_memory_mb = _get_memory_mb()
result_dict = result.model_dump()
result_dict['server_memory_mb'] = server_memory_mb
logger.info(f"Streaming result for {result_dict.get('url', 'unknown')}")
data = json.dumps(result_dict, default=datetime_handler) + "\n"
yield data.encode('utf-8')
@@ -364,19 +377,25 @@ async def stream_results(crawler: AsyncWebCrawler, results_gen: AsyncGenerator)
except asyncio.CancelledError:
logger.warning("Client disconnected during streaming")
finally:
try:
await crawler.close()
except Exception as e:
logger.error(f"Crawler cleanup error: {e}")
# finally:
# try:
# await crawler.close()
# except Exception as e:
# logger.error(f"Crawler cleanup error: {e}")
async def handle_crawl_request(
crawler: AsyncWebCrawler,
urls: List[str],
browser_config: dict,
crawler_config: dict,
config: dict
) -> dict:
"""Handle non-streaming crawl requests."""
start_mem_mb = _get_memory_mb() # <--- Get memory before
start_time = time.time()
mem_delta_mb = None
peak_mem_mb = start_mem_mb
try:
browser_config = BrowserConfig.load(browser_config)
crawler_config = CrawlerRunConfig.load(crawler_config)
@@ -388,31 +407,63 @@ async def handle_crawl_request(
)
)
crawler: AsyncWebCrawler = AsyncWebCrawler(config=browser_config)
await crawler.start()
# crawler: AsyncWebCrawler = AsyncWebCrawler(config=browser_config)
# await crawler.start()
results = []
func = getattr(crawler, "arun" if len(urls) == 1 else "arun_many")
partial_func = partial(func,
urls[0] if len(urls) == 1 else urls,
config=crawler_config,
dispatcher=dispatcher)
# Simulate work being done by the crawler
# logger.debug(f"Request (URLs: {len(urls)}) starting simulated work...") # Add log
# await asyncio.sleep(2) # <--- ADD ARTIFICIAL DELAY (e.g., 0.5 seconds)
# logger.debug(f"Request (URLs: {len(urls)}) finished simulated work.")
results = await partial_func()
await crawler.close()
# await crawler.close()
end_mem_mb = _get_memory_mb() # <--- Get memory after
end_time = time.time()
if start_mem_mb is not None and end_mem_mb is not None:
mem_delta_mb = end_mem_mb - start_mem_mb # <--- Calculate delta
peak_mem_mb = max(peak_mem_mb if peak_mem_mb else 0, end_mem_mb) # <--- Get peak memory
logger.info(f"Memory usage: Start: {start_mem_mb} MB, End: {end_mem_mb} MB, Delta: {mem_delta_mb} MB, Peak: {peak_mem_mb} MB")
return {
"success": True,
"results": [result.model_dump() for result in results]
"results": [result.model_dump() for result in results],
"server_processing_time_s": end_time - start_time,
"server_memory_delta_mb": mem_delta_mb,
"server_peak_memory_mb": peak_mem_mb
}
except Exception as e:
logger.error(f"Crawl error: {str(e)}", exc_info=True)
if 'crawler' in locals():
await crawler.close()
# if 'crawler' in locals() and crawler.ready: # Check if crawler was initialized and started
# try:
# await crawler.close()
# except Exception as close_e:
# logger.error(f"Error closing crawler during exception handling: {close_e}")
# Measure memory even on error if possible
end_mem_mb_error = _get_memory_mb()
if start_mem_mb is not None and end_mem_mb_error is not None:
mem_delta_mb = end_mem_mb_error - start_mem_mb
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=str(e)
detail=json.dumps({ # Send structured error
"error": str(e),
"server_memory_delta_mb": mem_delta_mb,
"server_peak_memory_mb": max(peak_mem_mb if peak_mem_mb else 0, end_mem_mb_error or 0)
})
)
async def handle_stream_crawl_request(
crawler: AsyncWebCrawler,
urls: List[str],
browser_config: dict,
crawler_config: dict,
@@ -421,9 +472,11 @@ async def handle_stream_crawl_request(
"""Handle streaming crawl requests."""
try:
browser_config = BrowserConfig.load(browser_config)
browser_config.verbose = True
# browser_config.verbose = True # Set to False or remove for production stress testing
browser_config.verbose = False
crawler_config = CrawlerRunConfig.load(crawler_config)
crawler_config.scraping_strategy = LXMLWebScrapingStrategy()
crawler_config.stream = True
dispatcher = MemoryAdaptiveDispatcher(
memory_threshold_percent=config["crawler"]["memory_threshold_percent"],
@@ -432,8 +485,8 @@ async def handle_stream_crawl_request(
)
)
crawler = AsyncWebCrawler(config=browser_config)
await crawler.start()
# crawler = AsyncWebCrawler(config=browser_config)
# await crawler.start()
results_gen = await crawler.arun_many(
urls=urls,
@@ -441,12 +494,19 @@ async def handle_stream_crawl_request(
dispatcher=dispatcher
)
# Return the *same* crawler instance and the generator
# The caller (server.py) manages the crawler lifecycle via the pool context
return crawler, results_gen
except Exception as e:
if 'crawler' in locals():
await crawler.close()
# Make sure to close crawler if started during an error here
# if 'crawler' in locals() and crawler.ready:
# try:
# await crawler.close()
# except Exception as close_e:
# logger.error(f"Error closing crawler during stream setup exception: {close_e}")
logger.error(f"Stream crawl error: {str(e)}", exc_info=True)
# Raising HTTPException here will prevent streaming response
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=str(e)