diff --git a/README.md b/README.md index 28563762..1f36aca6 100644 --- a/README.md +++ b/README.md @@ -22,7 +22,7 @@ Use the [Crawl4AI GPT Assistant](https://tinyurl.com/crawl4ai-gpt) as your AI-po ## New in 0.3.73 ✨ - 🐳 Docker Ready: Full API server with seamless deployment & scaling -- 🎯 Smart Browser: Managed browser integration with CDP support +- 🎯 Browser Takeover: Use your own browser with cookies & history intact (CDP support) - 📝 Mockdown+: Enhanced tag preservation & content extraction - ⚡️ Parallel Power: Supercharged multi-URL crawling performance - 🌟 And many more exciting updates... diff --git a/main.py b/main.py index 3e32fe9c..853cd0b7 100644 --- a/main.py +++ b/main.py @@ -62,6 +62,7 @@ class CrawlRequest(BaseModel): css_selector: Optional[str] = None screenshot: bool = False magic: bool = False + extra: Optional[Dict[str, Any]] = {} @dataclass class TaskInfo: @@ -251,7 +252,7 @@ class CrawlerService: while True: try: available_slots = await self.resource_monitor.get_available_slots() - if available_slots <= 0: + if False and available_slots <= 0: await asyncio.sleep(1) continue diff --git a/tests/test_docker.py b/tests/test_docker.py index 913450ca..c22acd55 100644 --- a/tests/test_docker.py +++ b/tests/test_docker.py @@ -7,7 +7,7 @@ import os from typing import Dict, Any class Crawl4AiTester: - def __init__(self, base_url: str = "http://localhost:8000"): + def __init__(self, base_url: str = "http://localhost:11235"): self.base_url = base_url def submit_and_wait(self, request_data: Dict[str, Any], timeout: int = 300) -> Dict[str, Any]: @@ -54,8 +54,9 @@ def test_docker_deployment(version="basic"): # Test cases based on version test_basic_crawl(tester) - if version in ["full", "transformer"]: - test_cosine_extraction(tester) + + # if version in ["full", "transformer"]: + # test_cosine_extraction(tester) # test_js_execution(tester) # test_css_selector(tester)