diff --git a/crawl4ai/async_crawler_strategy.py b/crawl4ai/async_crawler_strategy.py index df23c43e..3f332eb0 100644 --- a/crawl4ai/async_crawler_strategy.py +++ b/crawl4ai/async_crawler_strategy.py @@ -68,8 +68,7 @@ class ManagedBrowser: stderr=subprocess.PIPE ) # Monitor browser process output for errors - loop = asyncio.get_event_loop() - loop.create_task(self._monitor_browser_process()) + asyncio.create_task(self._monitor_browser_process()) await asyncio.sleep(2) # Give browser time to start return f"http://localhost:{self.debugging_port}" except Exception as e: diff --git a/crawl4ai/scraper/bfs_scraper_strategy.py b/crawl4ai/scraper/bfs_scraper_strategy.py index 73a4f8ae..3a6d09a5 100644 --- a/crawl4ai/scraper/bfs_scraper_strategy.py +++ b/crawl4ai/scraper/bfs_scraper_strategy.py @@ -264,8 +264,7 @@ class BFSScraperStrategy(ScraperStrategy): self.stats.current_depth = depth if parallel_processing: - loop = asyncio.get_event_loop() - task = loop.create_task( + task = asyncio.create_task( self.process_url(url, depth, crawler, queue, visited, depths) ) pending_tasks.add(task) diff --git a/main.py b/main.py index bc5dfe7e..6d217410 100644 --- a/main.py +++ b/main.py @@ -125,8 +125,7 @@ class TaskManager: self.cleanup_task = None async def start(self): - loop = asyncio.get_event_loop() - self.cleanup_task = loop.create_task(self._cleanup_loop()) + self.cleanup_task = asyncio.create_task(self._cleanup_loop()) async def stop(self): if self.cleanup_task: @@ -232,8 +231,7 @@ class CrawlerService: async def start(self): await self.task_manager.start() - loop = asyncio.get_event_loop() - self._processing_task = loop.create_task(self._process_queue()) + self._processing_task = asyncio.create_task(self._process_queue()) async def stop(self): if self._processing_task: