Compare commits
11 Commits
v0.7.6
...
docker/fix
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
05ec0535cd | ||
|
|
83aeb565ee | ||
|
|
c0f1865287 | ||
|
|
46ef1116c4 | ||
|
|
4df83893ac | ||
|
|
13e116610d | ||
|
|
b74524fdfb | ||
|
|
bcac486921 | ||
|
|
6aef5a120f | ||
|
|
7cac008c10 | ||
|
|
97c92c4f62 |
@@ -785,6 +785,54 @@ curl http://localhost:11235/crawl/job/crawl_xyz
|
|||||||
|
|
||||||
The response includes `status` field: `"processing"`, `"completed"`, or `"failed"`.
|
The response includes `status` field: `"processing"`, `"completed"`, or `"failed"`.
|
||||||
|
|
||||||
|
#### LLM Extraction Jobs with Webhooks
|
||||||
|
|
||||||
|
The same webhook system works for LLM extraction jobs via `/llm/job`:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Submit LLM extraction job with webhook
|
||||||
|
curl -X POST http://localhost:11235/llm/job \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d '{
|
||||||
|
"url": "https://example.com/article",
|
||||||
|
"q": "Extract the article title, author, and main points",
|
||||||
|
"provider": "openai/gpt-4o-mini",
|
||||||
|
"webhook_config": {
|
||||||
|
"webhook_url": "https://myapp.com/webhooks/llm-complete",
|
||||||
|
"webhook_data_in_payload": true,
|
||||||
|
"webhook_headers": {
|
||||||
|
"X-Webhook-Secret": "your-secret-token"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}'
|
||||||
|
|
||||||
|
# Response: {"task_id": "llm_1234567890"}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Your webhook receives:**
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"task_id": "llm_1234567890",
|
||||||
|
"task_type": "llm_extraction",
|
||||||
|
"status": "completed",
|
||||||
|
"timestamp": "2025-10-22T12:30:00.000000+00:00",
|
||||||
|
"urls": ["https://example.com/article"],
|
||||||
|
"data": {
|
||||||
|
"extracted_content": {
|
||||||
|
"title": "Understanding Web Scraping",
|
||||||
|
"author": "John Doe",
|
||||||
|
"main_points": ["Point 1", "Point 2", "Point 3"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Key Differences for LLM Jobs:**
|
||||||
|
- Task type is `"llm_extraction"` instead of `"crawl"`
|
||||||
|
- Extracted data is in `data.extracted_content`
|
||||||
|
- Single URL only (not an array)
|
||||||
|
- Supports schema-based extraction with `schema` parameter
|
||||||
|
|
||||||
> 💡 **Pro tip**: See [WEBHOOK_EXAMPLES.md](./WEBHOOK_EXAMPLES.md) for detailed examples including TypeScript client code, Flask webhook handlers, and failure handling.
|
> 💡 **Pro tip**: See [WEBHOOK_EXAMPLES.md](./WEBHOOK_EXAMPLES.md) for detailed examples including TypeScript client code, Flask webhook handlers, and failure handling.
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|||||||
@@ -2,8 +2,8 @@
|
|||||||
import asyncio, json, hashlib, time, psutil
|
import asyncio, json, hashlib, time, psutil
|
||||||
from contextlib import suppress
|
from contextlib import suppress
|
||||||
from typing import Dict
|
from typing import Dict
|
||||||
from crawl4ai import AsyncWebCrawler, BrowserConfig
|
from crawl4ai import AsyncWebCrawler, BrowserConfig, BrowserAdapter
|
||||||
from typing import Dict
|
from typing import Dict ,Optional
|
||||||
from utils import load_config
|
from utils import load_config
|
||||||
|
|
||||||
CONFIG = load_config()
|
CONFIG = load_config()
|
||||||
@@ -15,11 +15,22 @@ LOCK = asyncio.Lock()
|
|||||||
MEM_LIMIT = CONFIG.get("crawler", {}).get("memory_threshold_percent", 95.0) # % RAM – refuse new browsers above this
|
MEM_LIMIT = CONFIG.get("crawler", {}).get("memory_threshold_percent", 95.0) # % RAM – refuse new browsers above this
|
||||||
IDLE_TTL = CONFIG.get("crawler", {}).get("pool", {}).get("idle_ttl_sec", 1800) # close if unused for 30 min
|
IDLE_TTL = CONFIG.get("crawler", {}).get("pool", {}).get("idle_ttl_sec", 1800) # close if unused for 30 min
|
||||||
|
|
||||||
def _sig(cfg: BrowserConfig) -> str:
|
|
||||||
payload = json.dumps(cfg.to_dict(), sort_keys=True, separators=(",",":"))
|
def _sig(cfg: BrowserConfig, adapter: Optional[BrowserAdapter] = None) -> str:
|
||||||
|
try:
|
||||||
|
config_payload = json.dumps(cfg.to_dict(), sort_keys=True, separators=(",", ":"))
|
||||||
|
except (TypeError, ValueError):
|
||||||
|
# Fallback to string representation if JSON serialization fails
|
||||||
|
config_payload = str(cfg.to_dict())
|
||||||
|
adapter_name = adapter.__class__.__name__ if adapter else "PlaywrightAdapter"
|
||||||
|
payload = f"{config_payload}:{adapter_name}"
|
||||||
return hashlib.sha1(payload.encode()).hexdigest()
|
return hashlib.sha1(payload.encode()).hexdigest()
|
||||||
|
|
||||||
async def get_crawler(cfg: BrowserConfig) -> AsyncWebCrawler:
|
|
||||||
|
async def get_crawler(
|
||||||
|
cfg: BrowserConfig, adapter: Optional[BrowserAdapter] = None
|
||||||
|
) -> AsyncWebCrawler:
|
||||||
|
sig = None
|
||||||
try:
|
try:
|
||||||
sig = _sig(cfg)
|
sig = _sig(cfg)
|
||||||
async with LOCK:
|
async with LOCK:
|
||||||
@@ -37,12 +48,13 @@ async def get_crawler(cfg: BrowserConfig) -> AsyncWebCrawler:
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise RuntimeError(f"Failed to start browser: {e}")
|
raise RuntimeError(f"Failed to start browser: {e}")
|
||||||
finally:
|
finally:
|
||||||
if sig in POOL:
|
if sig:
|
||||||
LAST_USED[sig] = time.time()
|
if sig in POOL:
|
||||||
else:
|
LAST_USED[sig] = time.time()
|
||||||
# If we failed to start the browser, we should remove it from the pool
|
else:
|
||||||
POOL.pop(sig, None)
|
# If we failed to start the browser, we should remove it from the pool
|
||||||
LAST_USED.pop(sig, None)
|
POOL.pop(sig, None)
|
||||||
|
LAST_USED.pop(sig, None)
|
||||||
# If we failed to start the browser, we should remove it from the pool
|
# If we failed to start the browser, we should remove it from the pool
|
||||||
async def close_all():
|
async def close_all():
|
||||||
async with LOCK:
|
async with LOCK:
|
||||||
|
|||||||
@@ -1,229 +1,445 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
"""
|
"""
|
||||||
Comprehensive test demonstrating all hook types from hooks_example.py
|
🚀 Crawl4AI Docker Hooks System - Complete Examples
|
||||||
adapted for the Docker API with real URLs
|
====================================================
|
||||||
|
|
||||||
|
This file demonstrates the Docker Hooks System with three different approaches:
|
||||||
|
|
||||||
|
1. String-based hooks for REST API
|
||||||
|
2. hooks_to_string() utility to convert functions
|
||||||
|
3. Docker Client with automatic conversion (most convenient)
|
||||||
|
|
||||||
|
Requirements:
|
||||||
|
- Docker container running: docker run -p 11235:11235 unclecode/crawl4ai:latest
|
||||||
|
- crawl4ai installed: pip install crawl4ai
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
import requests
|
import requests
|
||||||
import json
|
import json
|
||||||
import time
|
import time
|
||||||
from typing import Dict, Any
|
from typing import Dict, Any
|
||||||
|
|
||||||
# API_BASE_URL = "http://localhost:11234"
|
# Import Crawl4AI components
|
||||||
API_BASE_URL = "http://localhost:11235"
|
from crawl4ai import hooks_to_string
|
||||||
|
from crawl4ai.docker_client import Crawl4aiDockerClient
|
||||||
|
|
||||||
|
# Configuration
|
||||||
|
DOCKER_URL = "http://localhost:11235"
|
||||||
|
TEST_URLS = [
|
||||||
|
"https://www.kidocode.com",
|
||||||
|
"https://quotes.toscrape.com",
|
||||||
|
"https://httpbin.org/html",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
def test_all_hooks_demo():
|
def print_section(title: str, description: str = ""):
|
||||||
"""Demonstrate all 8 hook types with practical examples"""
|
"""Print a formatted section header"""
|
||||||
print("=" * 70)
|
print("\n" + "=" * 70)
|
||||||
print("Testing: All Hooks Comprehensive Demo")
|
print(f" {title}")
|
||||||
print("=" * 70)
|
if description:
|
||||||
|
print(f" {description}")
|
||||||
|
print("=" * 70 + "\n")
|
||||||
|
|
||||||
hooks_code = {
|
|
||||||
"on_browser_created": """
|
|
||||||
async def hook(browser, **kwargs):
|
|
||||||
# Hook called after browser is created
|
|
||||||
print("[HOOK] on_browser_created - Browser is ready!")
|
|
||||||
# Browser-level configurations would go here
|
|
||||||
return browser
|
|
||||||
""",
|
|
||||||
|
|
||||||
"on_page_context_created": """
|
def check_docker_service() -> bool:
|
||||||
async def hook(page, context, **kwargs):
|
"""Check if Docker service is running"""
|
||||||
# Hook called after a new page and context are created
|
try:
|
||||||
print("[HOOK] on_page_context_created - New page created!")
|
response = requests.get(f"{DOCKER_URL}/health", timeout=3)
|
||||||
|
return response.status_code == 200
|
||||||
|
except:
|
||||||
|
return False
|
||||||
|
|
||||||
# Set viewport size for consistent rendering
|
|
||||||
await page.set_viewport_size({"width": 1920, "height": 1080})
|
|
||||||
|
|
||||||
# Add cookies for the session (using httpbin.org domain)
|
# ============================================================================
|
||||||
await context.add_cookies([
|
# REUSABLE HOOK LIBRARY
|
||||||
{
|
# ============================================================================
|
||||||
"name": "test_session",
|
|
||||||
"value": "abc123xyz",
|
|
||||||
"domain": ".httpbin.org",
|
|
||||||
"path": "/",
|
|
||||||
"httpOnly": True,
|
|
||||||
"secure": True
|
|
||||||
}
|
|
||||||
])
|
|
||||||
|
|
||||||
# Block ads and tracking scripts to speed up crawling
|
async def performance_optimization_hook(page, context, **kwargs):
|
||||||
await context.route("**/*.{png,jpg,jpeg,gif,webp,svg}", lambda route: route.abort())
|
"""
|
||||||
|
Performance Hook: Block unnecessary resources to speed up crawling
|
||||||
|
"""
|
||||||
|
print(" [Hook] 🚀 Optimizing performance - blocking images and ads...")
|
||||||
|
|
||||||
|
# Block images
|
||||||
|
await context.route(
|
||||||
|
"**/*.{png,jpg,jpeg,gif,webp,svg,ico}",
|
||||||
|
lambda route: route.abort()
|
||||||
|
)
|
||||||
|
|
||||||
|
# Block ads and analytics
|
||||||
await context.route("**/analytics/*", lambda route: route.abort())
|
await context.route("**/analytics/*", lambda route: route.abort())
|
||||||
await context.route("**/ads/*", lambda route: route.abort())
|
await context.route("**/ads/*", lambda route: route.abort())
|
||||||
|
await context.route("**/google-analytics.com/*", lambda route: route.abort())
|
||||||
|
|
||||||
print("[HOOK] Viewport set, cookies added, and ads blocked")
|
print(" [Hook] ✓ Performance optimization applied")
|
||||||
return page
|
return page
|
||||||
""",
|
|
||||||
|
|
||||||
"on_user_agent_updated": """
|
|
||||||
async def hook(page, context, user_agent, **kwargs):
|
async def viewport_setup_hook(page, context, **kwargs):
|
||||||
# Hook called when user agent is updated
|
"""
|
||||||
print(f"[HOOK] on_user_agent_updated - User agent: {user_agent[:50]}...")
|
Viewport Hook: Set consistent viewport size for rendering
|
||||||
|
"""
|
||||||
|
print(" [Hook] 🖥️ Setting viewport to 1920x1080...")
|
||||||
|
await page.set_viewport_size({"width": 1920, "height": 1080})
|
||||||
|
print(" [Hook] ✓ Viewport configured")
|
||||||
|
return page
|
||||||
|
|
||||||
|
|
||||||
|
async def authentication_headers_hook(page, context, url, **kwargs):
|
||||||
|
"""
|
||||||
|
Headers Hook: Add custom authentication and tracking headers
|
||||||
|
"""
|
||||||
|
print(f" [Hook] 🔐 Adding custom headers for {url[:50]}...")
|
||||||
|
|
||||||
|
await page.set_extra_http_headers({
|
||||||
|
'X-Crawl4AI': 'docker-hooks',
|
||||||
|
'X-Custom-Hook': 'function-based',
|
||||||
|
'Accept-Language': 'en-US,en;q=0.9',
|
||||||
|
})
|
||||||
|
|
||||||
|
print(" [Hook] ✓ Custom headers added")
|
||||||
|
return page
|
||||||
|
|
||||||
|
|
||||||
|
async def lazy_loading_handler_hook(page, context, **kwargs):
|
||||||
|
"""
|
||||||
|
Content Hook: Handle lazy-loaded content by scrolling
|
||||||
|
"""
|
||||||
|
print(" [Hook] 📜 Scrolling to load lazy content...")
|
||||||
|
|
||||||
|
# Scroll to bottom
|
||||||
|
await page.evaluate("window.scrollTo(0, document.body.scrollHeight)")
|
||||||
|
await page.wait_for_timeout(1000)
|
||||||
|
|
||||||
|
# Scroll to middle
|
||||||
|
await page.evaluate("window.scrollTo(0, document.body.scrollHeight / 2)")
|
||||||
|
await page.wait_for_timeout(500)
|
||||||
|
|
||||||
|
# Scroll back to top
|
||||||
|
await page.evaluate("window.scrollTo(0, 0)")
|
||||||
|
await page.wait_for_timeout(500)
|
||||||
|
|
||||||
|
print(" [Hook] ✓ Lazy content loaded")
|
||||||
|
return page
|
||||||
|
|
||||||
|
|
||||||
|
async def page_analytics_hook(page, context, **kwargs):
|
||||||
|
"""
|
||||||
|
Analytics Hook: Log page metrics before extraction
|
||||||
|
"""
|
||||||
|
print(" [Hook] 📊 Collecting page analytics...")
|
||||||
|
|
||||||
|
metrics = await page.evaluate('''
|
||||||
|
() => ({
|
||||||
|
title: document.title,
|
||||||
|
images: document.images.length,
|
||||||
|
links: document.links.length,
|
||||||
|
scripts: document.scripts.length,
|
||||||
|
headings: document.querySelectorAll('h1, h2, h3').length,
|
||||||
|
paragraphs: document.querySelectorAll('p').length
|
||||||
|
})
|
||||||
|
''')
|
||||||
|
|
||||||
|
print(f" [Hook] 📈 Page: {metrics['title'][:50]}...")
|
||||||
|
print(f" Links: {metrics['links']}, Images: {metrics['images']}, "
|
||||||
|
f"Headings: {metrics['headings']}, Paragraphs: {metrics['paragraphs']}")
|
||||||
|
|
||||||
|
return page
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# APPROACH 1: String-Based Hooks (REST API)
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
def example_1_string_based_hooks():
|
||||||
|
"""
|
||||||
|
Demonstrate string-based hooks with REST API
|
||||||
|
Use this when working with REST API directly or non-Python clients
|
||||||
|
"""
|
||||||
|
print_section(
|
||||||
|
"APPROACH 1: String-Based Hooks (REST API)",
|
||||||
|
"Define hooks as strings for REST API requests"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Define hooks as strings
|
||||||
|
hooks_config = {
|
||||||
|
"on_page_context_created": """
|
||||||
|
async def hook(page, context, **kwargs):
|
||||||
|
print(" [String Hook] Setting up page context...")
|
||||||
|
# Block images for performance
|
||||||
|
await context.route("**/*.{png,jpg,jpeg,gif,webp}", lambda route: route.abort())
|
||||||
|
await page.set_viewport_size({"width": 1920, "height": 1080})
|
||||||
return page
|
return page
|
||||||
""",
|
""",
|
||||||
|
|
||||||
"before_goto": """
|
"before_goto": """
|
||||||
async def hook(page, context, url, **kwargs):
|
async def hook(page, context, url, **kwargs):
|
||||||
# Hook called before navigating to each URL
|
print(f" [String Hook] Navigating to {url[:50]}...")
|
||||||
print(f"[HOOK] before_goto - About to visit: {url}")
|
|
||||||
|
|
||||||
# Add custom headers for the request
|
|
||||||
await page.set_extra_http_headers({
|
await page.set_extra_http_headers({
|
||||||
"X-Custom-Header": "crawl4ai-test",
|
'X-Crawl4AI': 'string-based-hooks',
|
||||||
"Accept-Language": "en-US,en;q=0.9",
|
|
||||||
"DNT": "1"
|
|
||||||
})
|
})
|
||||||
|
|
||||||
return page
|
|
||||||
""",
|
|
||||||
|
|
||||||
"after_goto": """
|
|
||||||
async def hook(page, context, url, response, **kwargs):
|
|
||||||
# Hook called after navigating to each URL
|
|
||||||
print(f"[HOOK] after_goto - Successfully loaded: {url}")
|
|
||||||
|
|
||||||
# Wait a moment for dynamic content to load
|
|
||||||
await page.wait_for_timeout(1000)
|
|
||||||
|
|
||||||
# Check if specific elements exist (with error handling)
|
|
||||||
try:
|
|
||||||
# For httpbin.org, wait for body element
|
|
||||||
await page.wait_for_selector("body", timeout=2000)
|
|
||||||
print("[HOOK] Body element found and loaded")
|
|
||||||
except:
|
|
||||||
print("[HOOK] Timeout waiting for body, continuing anyway")
|
|
||||||
|
|
||||||
return page
|
|
||||||
""",
|
|
||||||
|
|
||||||
"on_execution_started": """
|
|
||||||
async def hook(page, context, **kwargs):
|
|
||||||
# Hook called after custom JavaScript execution
|
|
||||||
print("[HOOK] on_execution_started - Custom JS executed!")
|
|
||||||
|
|
||||||
# You could inject additional JavaScript here if needed
|
|
||||||
await page.evaluate("console.log('[INJECTED] Hook JS running');")
|
|
||||||
|
|
||||||
return page
|
return page
|
||||||
""",
|
""",
|
||||||
|
|
||||||
"before_retrieve_html": """
|
"before_retrieve_html": """
|
||||||
async def hook(page, context, **kwargs):
|
async def hook(page, context, **kwargs):
|
||||||
# Hook called before retrieving the HTML content
|
print(" [String Hook] Scrolling page...")
|
||||||
print("[HOOK] before_retrieve_html - Preparing to get HTML")
|
await page.evaluate("window.scrollTo(0, document.body.scrollHeight)")
|
||||||
|
await page.wait_for_timeout(1000)
|
||||||
# Scroll to bottom to trigger lazy loading
|
|
||||||
await page.evaluate("window.scrollTo(0, document.body.scrollHeight);")
|
|
||||||
await page.wait_for_timeout(500)
|
|
||||||
|
|
||||||
# Scroll back to top
|
|
||||||
await page.evaluate("window.scrollTo(0, 0);")
|
|
||||||
await page.wait_for_timeout(500)
|
|
||||||
|
|
||||||
# One more scroll to middle for good measure
|
|
||||||
await page.evaluate("window.scrollTo(0, document.body.scrollHeight / 2);")
|
|
||||||
|
|
||||||
print("[HOOK] Scrolling completed for lazy-loaded content")
|
|
||||||
return page
|
|
||||||
""",
|
|
||||||
|
|
||||||
"before_return_html": """
|
|
||||||
async def hook(page, context, html, **kwargs):
|
|
||||||
# Hook called before returning the HTML content
|
|
||||||
print(f"[HOOK] before_return_html - HTML length: {len(html)} characters")
|
|
||||||
|
|
||||||
# Log some page metrics
|
|
||||||
metrics = await page.evaluate('''() => {
|
|
||||||
return {
|
|
||||||
images: document.images.length,
|
|
||||||
links: document.links.length,
|
|
||||||
scripts: document.scripts.length
|
|
||||||
}
|
|
||||||
}''')
|
|
||||||
|
|
||||||
print(f"[HOOK] Page metrics - Images: {metrics['images']}, Links: {metrics['links']}, Scripts: {metrics['scripts']}")
|
|
||||||
|
|
||||||
return page
|
return page
|
||||||
"""
|
"""
|
||||||
}
|
}
|
||||||
|
|
||||||
# Create request payload
|
# Prepare request payload
|
||||||
payload = {
|
payload = {
|
||||||
"urls": ["https://httpbin.org/html"],
|
"urls": [TEST_URLS[2]], # httpbin.org
|
||||||
"hooks": {
|
"hooks": {
|
||||||
"code": hooks_code,
|
"code": hooks_config,
|
||||||
"timeout": 30
|
"timeout": 30
|
||||||
},
|
},
|
||||||
"crawler_config": {
|
"crawler_config": {
|
||||||
"js_code": "window.scrollTo(0, document.body.scrollHeight);",
|
|
||||||
"wait_for": "body",
|
|
||||||
"cache_mode": "bypass"
|
"cache_mode": "bypass"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
print("\nSending request with all 8 hooks...")
|
print(f"🎯 Target URL: {TEST_URLS[2]}")
|
||||||
start_time = time.time()
|
print(f"🔧 Configured {len(hooks_config)} string-based hooks")
|
||||||
|
print(f"📡 Sending request to Docker API...\n")
|
||||||
|
|
||||||
response = requests.post(f"{API_BASE_URL}/crawl", json=payload)
|
try:
|
||||||
|
start_time = time.time()
|
||||||
|
response = requests.post(f"{DOCKER_URL}/crawl", json=payload, timeout=60)
|
||||||
|
execution_time = time.time() - start_time
|
||||||
|
|
||||||
elapsed_time = time.time() - start_time
|
if response.status_code == 200:
|
||||||
print(f"Request completed in {elapsed_time:.2f} seconds")
|
result = response.json()
|
||||||
|
|
||||||
if response.status_code == 200:
|
print(f"\n✅ Request successful! (took {execution_time:.2f}s)")
|
||||||
data = response.json()
|
|
||||||
print("\n✅ Request successful!")
|
|
||||||
|
|
||||||
# Check hooks execution
|
# Display results
|
||||||
if 'hooks' in data:
|
if result.get('results') and result['results'][0].get('success'):
|
||||||
hooks_info = data['hooks']
|
crawl_result = result['results'][0]
|
||||||
print("\n📊 Hooks Execution Summary:")
|
html_length = len(crawl_result.get('html', ''))
|
||||||
print(f" Status: {hooks_info['status']['status']}")
|
markdown_length = len(crawl_result.get('markdown', ''))
|
||||||
print(f" Attached hooks: {len(hooks_info['status']['attached_hooks'])}")
|
|
||||||
|
|
||||||
for hook_name in hooks_info['status']['attached_hooks']:
|
print(f"\n📊 Results:")
|
||||||
print(f" ✓ {hook_name}")
|
print(f" • HTML length: {html_length:,} characters")
|
||||||
|
print(f" • Markdown length: {markdown_length:,} characters")
|
||||||
|
print(f" • URL: {crawl_result.get('url')}")
|
||||||
|
|
||||||
if 'summary' in hooks_info:
|
# Check hooks execution
|
||||||
summary = hooks_info['summary']
|
if 'hooks' in result:
|
||||||
print(f"\n📈 Execution Statistics:")
|
hooks_info = result['hooks']
|
||||||
print(f" Total executions: {summary['total_executions']}")
|
print(f"\n🎣 Hooks Execution:")
|
||||||
print(f" Successful: {summary['successful']}")
|
print(f" • Status: {hooks_info['status']['status']}")
|
||||||
print(f" Failed: {summary['failed']}")
|
print(f" • Attached hooks: {len(hooks_info['status']['attached_hooks'])}")
|
||||||
print(f" Timed out: {summary['timed_out']}")
|
|
||||||
print(f" Success rate: {summary['success_rate']:.1f}%")
|
|
||||||
|
|
||||||
if hooks_info.get('execution_log'):
|
if 'summary' in hooks_info:
|
||||||
print(f"\n📝 Execution Log:")
|
summary = hooks_info['summary']
|
||||||
for log_entry in hooks_info['execution_log']:
|
print(f" • Total executions: {summary['total_executions']}")
|
||||||
status_icon = "✅" if log_entry['status'] == 'success' else "❌"
|
print(f" • Successful: {summary['successful']}")
|
||||||
exec_time = log_entry.get('execution_time', 0)
|
print(f" • Success rate: {summary['success_rate']:.1f}%")
|
||||||
print(f" {status_icon} {log_entry['hook_point']}: {exec_time:.3f}s")
|
else:
|
||||||
|
print(f"⚠️ Crawl completed but no results")
|
||||||
|
|
||||||
# Check crawl results
|
else:
|
||||||
if 'results' in data and len(data['results']) > 0:
|
print(f"❌ Request failed with status {response.status_code}")
|
||||||
print(f"\n📄 Crawl Results:")
|
print(f" Error: {response.text[:200]}")
|
||||||
for result in data['results']:
|
|
||||||
print(f" URL: {result['url']}")
|
|
||||||
print(f" Success: {result.get('success', False)}")
|
|
||||||
if result.get('html'):
|
|
||||||
print(f" HTML length: {len(result['html'])} characters")
|
|
||||||
|
|
||||||
else:
|
except requests.exceptions.Timeout:
|
||||||
print(f"❌ Error: {response.status_code}")
|
print("⏰ Request timed out after 60 seconds")
|
||||||
try:
|
except Exception as e:
|
||||||
error_data = response.json()
|
print(f"❌ Error: {str(e)}")
|
||||||
print(f"Error details: {json.dumps(error_data, indent=2)}")
|
|
||||||
except:
|
print("\n" + "─" * 70)
|
||||||
print(f"Error text: {response.text[:500]}")
|
print("✓ String-based hooks example complete\n")
|
||||||
|
|
||||||
|
|
||||||
def test_authentication_flow():
|
# ============================================================================
|
||||||
"""Test a complete authentication flow with multiple hooks"""
|
# APPROACH 2: Function-Based Hooks with hooks_to_string() Utility
|
||||||
print("\n" + "=" * 70)
|
# ============================================================================
|
||||||
print("Testing: Authentication Flow with Multiple Hooks")
|
|
||||||
print("=" * 70)
|
def example_2_hooks_to_string_utility():
|
||||||
|
"""
|
||||||
|
Demonstrate the hooks_to_string() utility for converting functions
|
||||||
|
Use this when you want to write hooks as functions but use REST API
|
||||||
|
"""
|
||||||
|
print_section(
|
||||||
|
"APPROACH 2: hooks_to_string() Utility",
|
||||||
|
"Convert Python functions to strings for REST API"
|
||||||
|
)
|
||||||
|
|
||||||
|
print("📦 Creating hook functions...")
|
||||||
|
print(" • performance_optimization_hook")
|
||||||
|
print(" • authentication_headers_hook")
|
||||||
|
print(" • lazy_loading_handler_hook")
|
||||||
|
|
||||||
|
# Convert function objects to strings using the utility
|
||||||
|
print("\n🔄 Converting functions to strings with hooks_to_string()...")
|
||||||
|
|
||||||
|
hooks_dict = {
|
||||||
|
"on_page_context_created": performance_optimization_hook,
|
||||||
|
"before_goto": authentication_headers_hook,
|
||||||
|
"before_retrieve_html": lazy_loading_handler_hook,
|
||||||
|
}
|
||||||
|
|
||||||
|
hooks_as_strings = hooks_to_string(hooks_dict)
|
||||||
|
|
||||||
|
print(f"✅ Successfully converted {len(hooks_as_strings)} functions to strings")
|
||||||
|
|
||||||
|
# Show a preview
|
||||||
|
print("\n📝 Sample converted hook (first 200 characters):")
|
||||||
|
print("─" * 70)
|
||||||
|
sample_hook = list(hooks_as_strings.values())[0]
|
||||||
|
print(sample_hook[:200] + "...")
|
||||||
|
print("─" * 70)
|
||||||
|
|
||||||
|
# Use the converted hooks with REST API
|
||||||
|
print("\n📡 Using converted hooks with REST API...")
|
||||||
|
|
||||||
|
payload = {
|
||||||
|
"urls": [TEST_URLS[2]],
|
||||||
|
"hooks": {
|
||||||
|
"code": hooks_as_strings,
|
||||||
|
"timeout": 30
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
start_time = time.time()
|
||||||
|
response = requests.post(f"{DOCKER_URL}/crawl", json=payload, timeout=60)
|
||||||
|
execution_time = time.time() - start_time
|
||||||
|
|
||||||
|
if response.status_code == 200:
|
||||||
|
result = response.json()
|
||||||
|
print(f"\n✅ Request successful! (took {execution_time:.2f}s)")
|
||||||
|
|
||||||
|
if result.get('results') and result['results'][0].get('success'):
|
||||||
|
crawl_result = result['results'][0]
|
||||||
|
print(f" • HTML length: {len(crawl_result.get('html', '')):,} characters")
|
||||||
|
print(f" • Hooks executed successfully!")
|
||||||
|
else:
|
||||||
|
print(f"❌ Request failed: {response.status_code}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"❌ Error: {str(e)}")
|
||||||
|
|
||||||
|
print("\n💡 Benefits of hooks_to_string():")
|
||||||
|
print(" ✓ Write hooks as regular Python functions")
|
||||||
|
print(" ✓ Full IDE support (autocomplete, syntax highlighting)")
|
||||||
|
print(" ✓ Type checking and linting")
|
||||||
|
print(" ✓ Easy to test and debug")
|
||||||
|
print(" ✓ Reusable across projects")
|
||||||
|
print(" ✓ Works with any REST API client")
|
||||||
|
|
||||||
|
print("\n" + "─" * 70)
|
||||||
|
print("✓ hooks_to_string() utility example complete\n")
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# APPROACH 3: Docker Client with Automatic Conversion (RECOMMENDED)
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
async def example_3_docker_client_auto_conversion():
|
||||||
|
"""
|
||||||
|
Demonstrate Docker Client with automatic hook conversion (RECOMMENDED)
|
||||||
|
Use this for the best developer experience with Python
|
||||||
|
"""
|
||||||
|
print_section(
|
||||||
|
"APPROACH 3: Docker Client with Auto-Conversion (RECOMMENDED)",
|
||||||
|
"Pass function objects directly - conversion happens automatically!"
|
||||||
|
)
|
||||||
|
|
||||||
|
print("🐳 Initializing Crawl4AI Docker Client...")
|
||||||
|
client = Crawl4aiDockerClient(base_url=DOCKER_URL)
|
||||||
|
|
||||||
|
print("✅ Client ready!\n")
|
||||||
|
|
||||||
|
# Use our reusable hook library - just pass the function objects!
|
||||||
|
print("📚 Using reusable hook library:")
|
||||||
|
print(" • performance_optimization_hook")
|
||||||
|
print(" • authentication_headers_hook")
|
||||||
|
print(" • lazy_loading_handler_hook")
|
||||||
|
print(" • page_analytics_hook")
|
||||||
|
|
||||||
|
print("\n🎯 Target URL: " + TEST_URLS[0])
|
||||||
|
print("🚀 Starting crawl with automatic hook conversion...\n")
|
||||||
|
|
||||||
|
try:
|
||||||
|
start_time = time.time()
|
||||||
|
|
||||||
|
# Pass function objects directly - NO manual conversion needed! ✨
|
||||||
|
results = await client.crawl(
|
||||||
|
urls=[TEST_URLS[0]],
|
||||||
|
hooks={
|
||||||
|
"on_page_context_created": performance_optimization_hook,
|
||||||
|
"before_goto": authentication_headers_hook,
|
||||||
|
"before_retrieve_html": lazy_loading_handler_hook,
|
||||||
|
"before_return_html": page_analytics_hook,
|
||||||
|
},
|
||||||
|
hooks_timeout=30
|
||||||
|
)
|
||||||
|
|
||||||
|
execution_time = time.time() - start_time
|
||||||
|
|
||||||
|
print(f"\n✅ Crawl completed! (took {execution_time:.2f}s)\n")
|
||||||
|
|
||||||
|
# Display results
|
||||||
|
if results and results.success:
|
||||||
|
result = results
|
||||||
|
print(f"📊 Results:")
|
||||||
|
print(f" • URL: {result.url}")
|
||||||
|
print(f" • Success: {result.success}")
|
||||||
|
print(f" • HTML length: {len(result.html):,} characters")
|
||||||
|
print(f" • Markdown length: {len(result.markdown):,} characters")
|
||||||
|
|
||||||
|
# Show metadata
|
||||||
|
if result.metadata:
|
||||||
|
print(f"\n📋 Metadata:")
|
||||||
|
print(f" • Title: {result.metadata.get('title', 'N/A')[:50]}...")
|
||||||
|
|
||||||
|
# Show links
|
||||||
|
if result.links:
|
||||||
|
internal_count = len(result.links.get('internal', []))
|
||||||
|
external_count = len(result.links.get('external', []))
|
||||||
|
print(f"\n🔗 Links Found:")
|
||||||
|
print(f" • Internal: {internal_count}")
|
||||||
|
print(f" • External: {external_count}")
|
||||||
|
else:
|
||||||
|
print(f"⚠️ Crawl completed but no successful results")
|
||||||
|
if results:
|
||||||
|
print(f" Error: {results.error_message}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"❌ Error: {str(e)}")
|
||||||
|
import traceback
|
||||||
|
traceback.print_exc()
|
||||||
|
|
||||||
|
print("\n🌟 Why Docker Client is RECOMMENDED:")
|
||||||
|
print(" ✓ Automatic function-to-string conversion")
|
||||||
|
print(" ✓ No manual hooks_to_string() calls needed")
|
||||||
|
print(" ✓ Cleaner, more Pythonic code")
|
||||||
|
print(" ✓ Full type hints and IDE support")
|
||||||
|
print(" ✓ Built-in error handling")
|
||||||
|
print(" ✓ Async/await support")
|
||||||
|
|
||||||
|
print("\n" + "─" * 70)
|
||||||
|
print("✓ Docker Client auto-conversion example complete\n")
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# APPROACH 4: Authentication Example
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
def example_4_authentication_flow():
|
||||||
|
"""
|
||||||
|
Demonstrate authentication flow with multiple hooks
|
||||||
|
"""
|
||||||
|
print_section(
|
||||||
|
"EXAMPLE 4: Authentication Flow",
|
||||||
|
"Using hooks for authentication with cookies and headers"
|
||||||
|
)
|
||||||
|
|
||||||
hooks_code = {
|
hooks_code = {
|
||||||
"on_page_context_created": """
|
"on_page_context_created": """
|
||||||
@@ -242,12 +458,6 @@ async def hook(page, context, **kwargs):
|
|||||||
}
|
}
|
||||||
])
|
])
|
||||||
|
|
||||||
# Set localStorage items (for SPA authentication)
|
|
||||||
await page.evaluate('''
|
|
||||||
localStorage.setItem('user_id', '12345');
|
|
||||||
localStorage.setItem('auth_time', new Date().toISOString());
|
|
||||||
''')
|
|
||||||
|
|
||||||
return page
|
return page
|
||||||
""",
|
""",
|
||||||
|
|
||||||
@@ -269,9 +479,7 @@ async def hook(page, context, url, **kwargs):
|
|||||||
}
|
}
|
||||||
|
|
||||||
payload = {
|
payload = {
|
||||||
"urls": [
|
"urls": ["https://httpbin.org/basic-auth/user/passwd"],
|
||||||
"https://httpbin.org/basic-auth/user/passwd"
|
|
||||||
],
|
|
||||||
"hooks": {
|
"hooks": {
|
||||||
"code": hooks_code,
|
"code": hooks_code,
|
||||||
"timeout": 15
|
"timeout": 15
|
||||||
@@ -279,7 +487,7 @@ async def hook(page, context, url, **kwargs):
|
|||||||
}
|
}
|
||||||
|
|
||||||
print("\nTesting authentication with httpbin endpoints...")
|
print("\nTesting authentication with httpbin endpoints...")
|
||||||
response = requests.post(f"{API_BASE_URL}/crawl", json=payload)
|
response = requests.post(f"{DOCKER_URL}/crawl", json=payload)
|
||||||
|
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
data = response.json()
|
data = response.json()
|
||||||
@@ -300,214 +508,120 @@ async def hook(page, context, url, **kwargs):
|
|||||||
else:
|
else:
|
||||||
print(f"❌ Error: {response.status_code}")
|
print(f"❌ Error: {response.status_code}")
|
||||||
|
|
||||||
|
print("\n" + "─" * 70)
|
||||||
|
print("✓ Authentication example complete\n")
|
||||||
|
|
||||||
def test_performance_optimization_hooks():
|
|
||||||
"""Test hooks for performance optimization"""
|
# ============================================================================
|
||||||
|
# MAIN EXECUTION
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
async def main():
|
||||||
|
"""
|
||||||
|
Run all example demonstrations
|
||||||
|
"""
|
||||||
print("\n" + "=" * 70)
|
print("\n" + "=" * 70)
|
||||||
print("Testing: Performance Optimization Hooks")
|
print(" 🚀 Crawl4AI - Docker Hooks System Examples")
|
||||||
print("=" * 70)
|
print("=" * 70)
|
||||||
|
|
||||||
hooks_code = {
|
# Check Docker service
|
||||||
"on_page_context_created": """
|
print("\n🔍 Checking Docker service status...")
|
||||||
async def hook(page, context, **kwargs):
|
if not check_docker_service():
|
||||||
print("[HOOK] Optimizing page for performance")
|
print("❌ Docker service is not running!")
|
||||||
|
print("\n📋 To start the Docker service:")
|
||||||
|
print(" docker run -p 11235:11235 unclecode/crawl4ai:latest")
|
||||||
|
print("\nPlease start the service and run this example again.")
|
||||||
|
return
|
||||||
|
|
||||||
# Block resource-heavy content
|
print("✅ Docker service is running!\n")
|
||||||
await context.route("**/*.{png,jpg,jpeg,gif,webp,svg,ico}", lambda route: route.abort())
|
|
||||||
await context.route("**/*.{woff,woff2,ttf,otf}", lambda route: route.abort())
|
|
||||||
await context.route("**/*.{mp4,webm,ogg,mp3,wav}", lambda route: route.abort())
|
|
||||||
await context.route("**/googletagmanager.com/*", lambda route: route.abort())
|
|
||||||
await context.route("**/google-analytics.com/*", lambda route: route.abort())
|
|
||||||
await context.route("**/doubleclick.net/*", lambda route: route.abort())
|
|
||||||
await context.route("**/facebook.com/*", lambda route: route.abort())
|
|
||||||
|
|
||||||
# Disable animations and transitions
|
# Run all examples
|
||||||
await page.add_style_tag(content='''
|
examples = [
|
||||||
*, *::before, *::after {
|
("String-Based Hooks (REST API)", example_1_string_based_hooks, False),
|
||||||
animation-duration: 0s !important;
|
("hooks_to_string() Utility", example_2_hooks_to_string_utility, False),
|
||||||
animation-delay: 0s !important;
|
("Docker Client Auto-Conversion (Recommended)", example_3_docker_client_auto_conversion, True),
|
||||||
transition-duration: 0s !important;
|
("Authentication Flow", example_4_authentication_flow, False),
|
||||||
transition-delay: 0s !important;
|
|
||||||
}
|
|
||||||
''')
|
|
||||||
|
|
||||||
print("[HOOK] Performance optimizations applied")
|
|
||||||
return page
|
|
||||||
""",
|
|
||||||
|
|
||||||
"before_retrieve_html": """
|
|
||||||
async def hook(page, context, **kwargs):
|
|
||||||
print("[HOOK] Removing unnecessary elements before extraction")
|
|
||||||
|
|
||||||
# Remove ads, popups, and other unnecessary elements
|
|
||||||
await page.evaluate('''() => {
|
|
||||||
// Remove common ad containers
|
|
||||||
const adSelectors = [
|
|
||||||
'.ad', '.ads', '.advertisement', '[id*="ad-"]', '[class*="ad-"]',
|
|
||||||
'.popup', '.modal', '.overlay', '.cookie-banner', '.newsletter-signup'
|
|
||||||
];
|
|
||||||
|
|
||||||
adSelectors.forEach(selector => {
|
|
||||||
document.querySelectorAll(selector).forEach(el => el.remove());
|
|
||||||
});
|
|
||||||
|
|
||||||
// Remove script tags to clean up HTML
|
|
||||||
document.querySelectorAll('script').forEach(el => el.remove());
|
|
||||||
|
|
||||||
// Remove style tags we don't need
|
|
||||||
document.querySelectorAll('style').forEach(el => el.remove());
|
|
||||||
}''')
|
|
||||||
|
|
||||||
return page
|
|
||||||
"""
|
|
||||||
}
|
|
||||||
|
|
||||||
payload = {
|
|
||||||
"urls": ["https://httpbin.org/html"],
|
|
||||||
"hooks": {
|
|
||||||
"code": hooks_code,
|
|
||||||
"timeout": 10
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
print("\nTesting performance optimization hooks...")
|
|
||||||
start_time = time.time()
|
|
||||||
|
|
||||||
response = requests.post(f"{API_BASE_URL}/crawl", json=payload)
|
|
||||||
|
|
||||||
elapsed_time = time.time() - start_time
|
|
||||||
print(f"Request completed in {elapsed_time:.2f} seconds")
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
print("✅ Performance optimization test completed")
|
|
||||||
|
|
||||||
if 'results' in data and len(data['results']) > 0:
|
|
||||||
result = data['results'][0]
|
|
||||||
if result.get('html'):
|
|
||||||
print(f" HTML size: {len(result['html'])} characters")
|
|
||||||
print(" Resources blocked, ads removed, animations disabled")
|
|
||||||
else:
|
|
||||||
print(f"❌ Error: {response.status_code}")
|
|
||||||
|
|
||||||
|
|
||||||
def test_content_extraction_hooks():
|
|
||||||
"""Test hooks for intelligent content extraction"""
|
|
||||||
print("\n" + "=" * 70)
|
|
||||||
print("Testing: Content Extraction Hooks")
|
|
||||||
print("=" * 70)
|
|
||||||
|
|
||||||
hooks_code = {
|
|
||||||
"after_goto": """
|
|
||||||
async def hook(page, context, url, response, **kwargs):
|
|
||||||
print(f"[HOOK] Waiting for dynamic content on {url}")
|
|
||||||
|
|
||||||
# Wait for any lazy-loaded content
|
|
||||||
await page.wait_for_timeout(2000)
|
|
||||||
|
|
||||||
# Trigger any "Load More" buttons
|
|
||||||
try:
|
|
||||||
load_more = await page.query_selector('[class*="load-more"], [class*="show-more"], button:has-text("Load More")')
|
|
||||||
if load_more:
|
|
||||||
await load_more.click()
|
|
||||||
await page.wait_for_timeout(1000)
|
|
||||||
print("[HOOK] Clicked 'Load More' button")
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
return page
|
|
||||||
""",
|
|
||||||
|
|
||||||
"before_retrieve_html": """
|
|
||||||
async def hook(page, context, **kwargs):
|
|
||||||
print("[HOOK] Extracting structured data")
|
|
||||||
|
|
||||||
# Extract metadata
|
|
||||||
metadata = await page.evaluate('''() => {
|
|
||||||
const getMeta = (name) => {
|
|
||||||
const element = document.querySelector(`meta[name="${name}"], meta[property="${name}"]`);
|
|
||||||
return element ? element.getAttribute('content') : null;
|
|
||||||
};
|
|
||||||
|
|
||||||
return {
|
|
||||||
title: document.title,
|
|
||||||
description: getMeta('description') || getMeta('og:description'),
|
|
||||||
author: getMeta('author'),
|
|
||||||
keywords: getMeta('keywords'),
|
|
||||||
ogTitle: getMeta('og:title'),
|
|
||||||
ogImage: getMeta('og:image'),
|
|
||||||
canonical: document.querySelector('link[rel="canonical"]')?.href,
|
|
||||||
jsonLd: Array.from(document.querySelectorAll('script[type="application/ld+json"]'))
|
|
||||||
.map(el => el.textContent).filter(Boolean)
|
|
||||||
};
|
|
||||||
}''')
|
|
||||||
|
|
||||||
print(f"[HOOK] Extracted metadata: {json.dumps(metadata, indent=2)}")
|
|
||||||
|
|
||||||
# Infinite scroll handling
|
|
||||||
for i in range(3):
|
|
||||||
await page.evaluate("window.scrollTo(0, document.body.scrollHeight);")
|
|
||||||
await page.wait_for_timeout(1000)
|
|
||||||
print(f"[HOOK] Scroll iteration {i+1}/3")
|
|
||||||
|
|
||||||
return page
|
|
||||||
"""
|
|
||||||
}
|
|
||||||
|
|
||||||
payload = {
|
|
||||||
"urls": ["https://httpbin.org/html", "https://httpbin.org/json"],
|
|
||||||
"hooks": {
|
|
||||||
"code": hooks_code,
|
|
||||||
"timeout": 20
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
print("\nTesting content extraction hooks...")
|
|
||||||
response = requests.post(f"{API_BASE_URL}/crawl", json=payload)
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
print("✅ Content extraction test completed")
|
|
||||||
|
|
||||||
if 'hooks' in data and 'summary' in data['hooks']:
|
|
||||||
summary = data['hooks']['summary']
|
|
||||||
print(f" Hooks executed: {summary['successful']}/{summary['total_executions']}")
|
|
||||||
|
|
||||||
if 'results' in data:
|
|
||||||
for result in data['results']:
|
|
||||||
print(f"\n URL: {result['url']}")
|
|
||||||
print(f" Success: {result.get('success', False)}")
|
|
||||||
else:
|
|
||||||
print(f"❌ Error: {response.status_code}")
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
"""Run comprehensive hook tests"""
|
|
||||||
print("🔧 Crawl4AI Docker API - Comprehensive Hooks Testing")
|
|
||||||
print("Based on docs/examples/hooks_example.py")
|
|
||||||
print("=" * 70)
|
|
||||||
|
|
||||||
tests = [
|
|
||||||
("All Hooks Demo", test_all_hooks_demo),
|
|
||||||
("Authentication Flow", test_authentication_flow),
|
|
||||||
("Performance Optimization", test_performance_optimization_hooks),
|
|
||||||
("Content Extraction", test_content_extraction_hooks),
|
|
||||||
]
|
]
|
||||||
|
|
||||||
for i, (name, test_func) in enumerate(tests, 1):
|
for i, (name, example_func, is_async) in enumerate(examples, 1):
|
||||||
print(f"\n📌 Test {i}/{len(tests)}: {name}")
|
print(f"\n{'🔷' * 35}")
|
||||||
|
print(f"Example {i}/{len(examples)}: {name}")
|
||||||
|
print(f"{'🔷' * 35}\n")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
test_func()
|
if is_async:
|
||||||
print(f"✅ {name} completed")
|
await example_func()
|
||||||
|
else:
|
||||||
|
example_func()
|
||||||
|
|
||||||
|
print(f"✅ Example {i} completed successfully!")
|
||||||
|
|
||||||
|
# Pause between examples (except the last one)
|
||||||
|
if i < len(examples):
|
||||||
|
print("\n⏸️ Press Enter to continue to next example...")
|
||||||
|
input()
|
||||||
|
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
print(f"\n⏹️ Examples interrupted by user")
|
||||||
|
break
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"❌ {name} failed: {e}")
|
print(f"\n❌ Example {i} failed: {str(e)}")
|
||||||
import traceback
|
import traceback
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
|
print("\nContinuing to next example...\n")
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Final summary
|
||||||
|
print("\n" + "=" * 70)
|
||||||
|
print(" 🎉 All Examples Complete!")
|
||||||
|
print("=" * 70)
|
||||||
|
|
||||||
|
print("\n📊 Summary - Three Approaches to Docker Hooks:")
|
||||||
|
|
||||||
|
print("\n✨ 1. String-Based Hooks:")
|
||||||
|
print(" • Write hooks as strings directly in JSON")
|
||||||
|
print(" • Best for: REST API, non-Python clients, simple use cases")
|
||||||
|
print(" • Cons: No IDE support, harder to debug")
|
||||||
|
|
||||||
|
print("\n✨ 2. hooks_to_string() Utility:")
|
||||||
|
print(" • Write hooks as Python functions, convert to strings")
|
||||||
|
print(" • Best for: Python with REST API, reusable hook libraries")
|
||||||
|
print(" • Pros: IDE support, type checking, easy debugging")
|
||||||
|
|
||||||
|
print("\n✨ 3. Docker Client (RECOMMENDED):")
|
||||||
|
print(" • Pass function objects directly, automatic conversion")
|
||||||
|
print(" • Best for: Python applications, best developer experience")
|
||||||
|
print(" • Pros: All benefits of #2 + cleaner code, no manual conversion")
|
||||||
|
|
||||||
|
print("\n💡 Recommendation:")
|
||||||
|
print(" Use Docker Client (#3) for Python applications")
|
||||||
|
print(" Use hooks_to_string() (#2) when you need REST API flexibility")
|
||||||
|
print(" Use string-based (#1) for non-Python clients or simple scripts")
|
||||||
|
|
||||||
|
print("\n🎯 8 Hook Points Available:")
|
||||||
|
print(" • on_browser_created, on_page_context_created")
|
||||||
|
print(" • on_user_agent_updated, before_goto, after_goto")
|
||||||
|
print(" • on_execution_started, before_retrieve_html, before_return_html")
|
||||||
|
|
||||||
|
print("\n📚 Resources:")
|
||||||
|
print(" • Docs: https://docs.crawl4ai.com/core/docker-deployment")
|
||||||
|
print(" • GitHub: https://github.com/unclecode/crawl4ai")
|
||||||
|
print(" • Discord: https://discord.gg/jP8KfhDhyN")
|
||||||
|
|
||||||
print("\n" + "=" * 70)
|
print("\n" + "=" * 70)
|
||||||
print("🎉 All comprehensive hook tests completed!")
|
print(" Happy Crawling! 🕷️")
|
||||||
print("=" * 70)
|
print("=" * 70 + "\n")
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
print("\n🎬 Starting Crawl4AI Docker Hooks Examples...")
|
||||||
|
print("Press Ctrl+C anytime to exit\n")
|
||||||
|
|
||||||
|
try:
|
||||||
|
asyncio.run(main())
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
print("\n\n👋 Examples stopped by user. Thanks for exploring Crawl4AI!")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"\n\n❌ Error: {str(e)}")
|
||||||
|
import traceback
|
||||||
|
traceback.print_exc()
|
||||||
|
|||||||
@@ -27,6 +27,14 @@
|
|||||||
- [Hook Response Information](#hook-response-information)
|
- [Hook Response Information](#hook-response-information)
|
||||||
- [Error Handling](#error-handling)
|
- [Error Handling](#error-handling)
|
||||||
- [Hooks Utility: Function-Based Approach (Python)](#hooks-utility-function-based-approach-python)
|
- [Hooks Utility: Function-Based Approach (Python)](#hooks-utility-function-based-approach-python)
|
||||||
|
- [Job Queue & Webhook API](#job-queue-webhook-api)
|
||||||
|
- [Why Use the Job Queue API?](#why-use-the-job-queue-api)
|
||||||
|
- [Available Endpoints](#available-endpoints)
|
||||||
|
- [Webhook Configuration](#webhook-configuration)
|
||||||
|
- [Usage Examples](#usage-examples)
|
||||||
|
- [Webhook Best Practices](#webhook-best-practices)
|
||||||
|
- [Use Cases](#use-cases)
|
||||||
|
- [Troubleshooting](#troubleshooting)
|
||||||
- [Dockerfile Parameters](#dockerfile-parameters)
|
- [Dockerfile Parameters](#dockerfile-parameters)
|
||||||
- [Using the API](#using-the-api)
|
- [Using the API](#using-the-api)
|
||||||
- [Playground Interface](#playground-interface)
|
- [Playground Interface](#playground-interface)
|
||||||
@@ -1110,6 +1118,464 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
## Job Queue & Webhook API
|
||||||
|
|
||||||
|
The Docker deployment includes a powerful asynchronous job queue system with webhook support for both crawling and LLM extraction tasks. Instead of waiting for long-running operations to complete, submit jobs and receive real-time notifications via webhooks when they finish.
|
||||||
|
|
||||||
|
### Why Use the Job Queue API?
|
||||||
|
|
||||||
|
**Traditional Synchronous API (`/crawl`):**
|
||||||
|
- Client waits for entire crawl to complete
|
||||||
|
- Timeout issues with long-running crawls
|
||||||
|
- Resource blocking during execution
|
||||||
|
- Constant polling required for status updates
|
||||||
|
|
||||||
|
**Asynchronous Job Queue API (`/crawl/job`, `/llm/job`):**
|
||||||
|
- ✅ Submit job and continue immediately
|
||||||
|
- ✅ No timeout concerns for long operations
|
||||||
|
- ✅ Real-time webhook notifications on completion
|
||||||
|
- ✅ Better resource utilization
|
||||||
|
- ✅ Perfect for batch processing
|
||||||
|
- ✅ Ideal for microservice architectures
|
||||||
|
|
||||||
|
### Available Endpoints
|
||||||
|
|
||||||
|
#### 1. Crawl Job Endpoint
|
||||||
|
|
||||||
|
```
|
||||||
|
POST /crawl/job
|
||||||
|
```
|
||||||
|
|
||||||
|
Submit an asynchronous crawl job with optional webhook notification.
|
||||||
|
|
||||||
|
**Request Body:**
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"urls": ["https://example.com"],
|
||||||
|
"cache_mode": "bypass",
|
||||||
|
"extraction_strategy": {
|
||||||
|
"type": "JsonCssExtractionStrategy",
|
||||||
|
"schema": {
|
||||||
|
"title": "h1",
|
||||||
|
"content": ".article-body"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"webhook_config": {
|
||||||
|
"webhook_url": "https://your-app.com/webhook/crawl-complete",
|
||||||
|
"webhook_data_in_payload": true,
|
||||||
|
"webhook_headers": {
|
||||||
|
"X-Webhook-Secret": "your-secret-token",
|
||||||
|
"X-Custom-Header": "value"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Response:**
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"task_id": "crawl_1698765432",
|
||||||
|
"message": "Crawl job submitted"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### 2. LLM Extraction Job Endpoint
|
||||||
|
|
||||||
|
```
|
||||||
|
POST /llm/job
|
||||||
|
```
|
||||||
|
|
||||||
|
Submit an asynchronous LLM extraction job with optional webhook notification.
|
||||||
|
|
||||||
|
**Request Body:**
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"url": "https://example.com/article",
|
||||||
|
"q": "Extract the article title, author, publication date, and main points",
|
||||||
|
"provider": "openai/gpt-4o-mini",
|
||||||
|
"schema": "{\"title\": \"string\", \"author\": \"string\", \"date\": \"string\", \"points\": [\"string\"]}",
|
||||||
|
"cache": false,
|
||||||
|
"webhook_config": {
|
||||||
|
"webhook_url": "https://your-app.com/webhook/llm-complete",
|
||||||
|
"webhook_data_in_payload": true,
|
||||||
|
"webhook_headers": {
|
||||||
|
"X-Webhook-Secret": "your-secret-token"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Response:**
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"task_id": "llm_1698765432",
|
||||||
|
"message": "LLM job submitted"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### 3. Job Status Endpoint
|
||||||
|
|
||||||
|
```
|
||||||
|
GET /job/{task_id}
|
||||||
|
```
|
||||||
|
|
||||||
|
Check the status and retrieve results of a submitted job.
|
||||||
|
|
||||||
|
**Response (In Progress):**
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"task_id": "crawl_1698765432",
|
||||||
|
"status": "processing",
|
||||||
|
"message": "Job is being processed"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Response (Completed):**
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"task_id": "crawl_1698765432",
|
||||||
|
"status": "completed",
|
||||||
|
"result": {
|
||||||
|
"markdown": "# Page Title\n\nContent...",
|
||||||
|
"extracted_content": {...},
|
||||||
|
"links": {...}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Webhook Configuration
|
||||||
|
|
||||||
|
Webhooks provide real-time notifications when your jobs complete, eliminating the need for constant polling.
|
||||||
|
|
||||||
|
#### Webhook Config Parameters
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
|-----------|------|----------|-------------|
|
||||||
|
| `webhook_url` | string | Yes | Your HTTP(S) endpoint to receive notifications |
|
||||||
|
| `webhook_data_in_payload` | boolean | No | Include full result data in webhook payload (default: false) |
|
||||||
|
| `webhook_headers` | object | No | Custom headers for authentication/identification |
|
||||||
|
|
||||||
|
#### Webhook Payload Format
|
||||||
|
|
||||||
|
**Success Notification (Crawl Job):**
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"task_id": "crawl_1698765432",
|
||||||
|
"task_type": "crawl",
|
||||||
|
"status": "completed",
|
||||||
|
"timestamp": "2025-10-22T12:30:00.000000+00:00",
|
||||||
|
"urls": ["https://example.com"],
|
||||||
|
"data": {
|
||||||
|
"markdown": "# Page content...",
|
||||||
|
"extracted_content": {...},
|
||||||
|
"links": {...}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Success Notification (LLM Job):**
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"task_id": "llm_1698765432",
|
||||||
|
"task_type": "llm_extraction",
|
||||||
|
"status": "completed",
|
||||||
|
"timestamp": "2025-10-22T12:30:00.000000+00:00",
|
||||||
|
"urls": ["https://example.com/article"],
|
||||||
|
"data": {
|
||||||
|
"extracted_content": {
|
||||||
|
"title": "Understanding Web Scraping",
|
||||||
|
"author": "John Doe",
|
||||||
|
"date": "2025-10-22",
|
||||||
|
"points": ["Point 1", "Point 2"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Failure Notification:**
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"task_id": "crawl_1698765432",
|
||||||
|
"task_type": "crawl",
|
||||||
|
"status": "failed",
|
||||||
|
"timestamp": "2025-10-22T12:30:00.000000+00:00",
|
||||||
|
"urls": ["https://example.com"],
|
||||||
|
"error": "Connection timeout after 30 seconds"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Webhook Delivery & Retry
|
||||||
|
|
||||||
|
- **Delivery Method:** HTTP POST to your `webhook_url`
|
||||||
|
- **Content-Type:** `application/json`
|
||||||
|
- **Retry Policy:** Exponential backoff with 5 attempts
|
||||||
|
- Attempt 1: Immediate
|
||||||
|
- Attempt 2: 1 second delay
|
||||||
|
- Attempt 3: 2 seconds delay
|
||||||
|
- Attempt 4: 4 seconds delay
|
||||||
|
- Attempt 5: 8 seconds delay
|
||||||
|
- **Success Status Codes:** 200-299
|
||||||
|
- **Custom Headers:** Your `webhook_headers` are included in every request
|
||||||
|
|
||||||
|
### Usage Examples
|
||||||
|
|
||||||
|
#### Example 1: Python with Webhook Handler (Flask)
|
||||||
|
|
||||||
|
```python
|
||||||
|
from flask import Flask, request, jsonify
|
||||||
|
import requests
|
||||||
|
|
||||||
|
app = Flask(__name__)
|
||||||
|
|
||||||
|
# Webhook handler
|
||||||
|
@app.route('/webhook/crawl-complete', methods=['POST'])
|
||||||
|
def handle_crawl_webhook():
|
||||||
|
payload = request.json
|
||||||
|
|
||||||
|
if payload['status'] == 'completed':
|
||||||
|
print(f"✅ Job {payload['task_id']} completed!")
|
||||||
|
print(f"Task type: {payload['task_type']}")
|
||||||
|
|
||||||
|
# Access the crawl results
|
||||||
|
if 'data' in payload:
|
||||||
|
markdown = payload['data'].get('markdown', '')
|
||||||
|
extracted = payload['data'].get('extracted_content', {})
|
||||||
|
print(f"Extracted {len(markdown)} characters")
|
||||||
|
print(f"Structured data: {extracted}")
|
||||||
|
else:
|
||||||
|
print(f"❌ Job {payload['task_id']} failed: {payload.get('error')}")
|
||||||
|
|
||||||
|
return jsonify({"status": "received"}), 200
|
||||||
|
|
||||||
|
# Submit a crawl job with webhook
|
||||||
|
def submit_crawl_job():
|
||||||
|
response = requests.post(
|
||||||
|
"http://localhost:11235/crawl/job",
|
||||||
|
json={
|
||||||
|
"urls": ["https://example.com"],
|
||||||
|
"extraction_strategy": {
|
||||||
|
"type": "JsonCssExtractionStrategy",
|
||||||
|
"schema": {
|
||||||
|
"name": "Example Schema",
|
||||||
|
"baseSelector": "body",
|
||||||
|
"fields": [
|
||||||
|
{"name": "title", "selector": "h1", "type": "text"},
|
||||||
|
{"name": "description", "selector": "meta[name='description']", "type": "attribute", "attribute": "content"}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"webhook_config": {
|
||||||
|
"webhook_url": "https://your-app.com/webhook/crawl-complete",
|
||||||
|
"webhook_data_in_payload": True,
|
||||||
|
"webhook_headers": {
|
||||||
|
"X-Webhook-Secret": "your-secret-token"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
task_id = response.json()['task_id']
|
||||||
|
print(f"Job submitted: {task_id}")
|
||||||
|
return task_id
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
app.run(port=5000)
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Example 2: LLM Extraction with Webhooks
|
||||||
|
|
||||||
|
```python
|
||||||
|
import requests
|
||||||
|
|
||||||
|
def submit_llm_job_with_webhook():
|
||||||
|
response = requests.post(
|
||||||
|
"http://localhost:11235/llm/job",
|
||||||
|
json={
|
||||||
|
"url": "https://example.com/article",
|
||||||
|
"q": "Extract the article title, author, and main points",
|
||||||
|
"provider": "openai/gpt-4o-mini",
|
||||||
|
"webhook_config": {
|
||||||
|
"webhook_url": "https://your-app.com/webhook/llm-complete",
|
||||||
|
"webhook_data_in_payload": True,
|
||||||
|
"webhook_headers": {
|
||||||
|
"X-Webhook-Secret": "your-secret-token"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
task_id = response.json()['task_id']
|
||||||
|
print(f"LLM job submitted: {task_id}")
|
||||||
|
return task_id
|
||||||
|
|
||||||
|
# Webhook handler for LLM jobs
|
||||||
|
@app.route('/webhook/llm-complete', methods=['POST'])
|
||||||
|
def handle_llm_webhook():
|
||||||
|
payload = request.json
|
||||||
|
|
||||||
|
if payload['status'] == 'completed':
|
||||||
|
extracted = payload['data']['extracted_content']
|
||||||
|
print(f"✅ LLM extraction completed!")
|
||||||
|
print(f"Results: {extracted}")
|
||||||
|
else:
|
||||||
|
print(f"❌ LLM extraction failed: {payload.get('error')}")
|
||||||
|
|
||||||
|
return jsonify({"status": "received"}), 200
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Example 3: Without Webhooks (Polling)
|
||||||
|
|
||||||
|
If you don't use webhooks, you can poll for results:
|
||||||
|
|
||||||
|
```python
|
||||||
|
import requests
|
||||||
|
import time
|
||||||
|
|
||||||
|
# Submit job
|
||||||
|
response = requests.post(
|
||||||
|
"http://localhost:11235/crawl/job",
|
||||||
|
json={"urls": ["https://example.com"]}
|
||||||
|
)
|
||||||
|
task_id = response.json()['task_id']
|
||||||
|
|
||||||
|
# Poll for results
|
||||||
|
while True:
|
||||||
|
result = requests.get(f"http://localhost:11235/job/{task_id}")
|
||||||
|
data = result.json()
|
||||||
|
|
||||||
|
if data['status'] == 'completed':
|
||||||
|
print("Job completed!")
|
||||||
|
print(data['result'])
|
||||||
|
break
|
||||||
|
elif data['status'] == 'failed':
|
||||||
|
print(f"Job failed: {data.get('error')}")
|
||||||
|
break
|
||||||
|
|
||||||
|
print("Still processing...")
|
||||||
|
time.sleep(2)
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Example 4: Global Webhook Configuration
|
||||||
|
|
||||||
|
Set a default webhook URL in your `config.yml` to avoid repeating it in every request:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
# config.yml
|
||||||
|
api:
|
||||||
|
crawler:
|
||||||
|
# ... other settings ...
|
||||||
|
webhook:
|
||||||
|
default_url: "https://your-app.com/webhook/default"
|
||||||
|
default_headers:
|
||||||
|
X-Webhook-Secret: "your-secret-token"
|
||||||
|
```
|
||||||
|
|
||||||
|
Then submit jobs without webhook config:
|
||||||
|
|
||||||
|
```python
|
||||||
|
# Uses the global webhook configuration
|
||||||
|
response = requests.post(
|
||||||
|
"http://localhost:11235/crawl/job",
|
||||||
|
json={"urls": ["https://example.com"]}
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Webhook Best Practices
|
||||||
|
|
||||||
|
1. **Authentication:** Always use custom headers for webhook authentication
|
||||||
|
```json
|
||||||
|
"webhook_headers": {
|
||||||
|
"X-Webhook-Secret": "your-secret-token"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
2. **Idempotency:** Design your webhook handler to be idempotent (safe to receive duplicate notifications)
|
||||||
|
|
||||||
|
3. **Fast Response:** Return HTTP 200 quickly; process data asynchronously if needed
|
||||||
|
```python
|
||||||
|
@app.route('/webhook', methods=['POST'])
|
||||||
|
def webhook():
|
||||||
|
payload = request.json
|
||||||
|
# Queue for background processing
|
||||||
|
queue.enqueue(process_webhook, payload)
|
||||||
|
return jsonify({"status": "received"}), 200
|
||||||
|
```
|
||||||
|
|
||||||
|
4. **Error Handling:** Handle both success and failure notifications
|
||||||
|
```python
|
||||||
|
if payload['status'] == 'completed':
|
||||||
|
# Process success
|
||||||
|
elif payload['status'] == 'failed':
|
||||||
|
# Log error, retry, or alert
|
||||||
|
```
|
||||||
|
|
||||||
|
5. **Validation:** Verify webhook authenticity using custom headers
|
||||||
|
```python
|
||||||
|
secret = request.headers.get('X-Webhook-Secret')
|
||||||
|
if secret != os.environ['EXPECTED_SECRET']:
|
||||||
|
return jsonify({"error": "Unauthorized"}), 401
|
||||||
|
```
|
||||||
|
|
||||||
|
6. **Logging:** Log webhook deliveries for debugging
|
||||||
|
```python
|
||||||
|
logger.info(f"Webhook received: {payload['task_id']} - {payload['status']}")
|
||||||
|
```
|
||||||
|
|
||||||
|
### Use Cases
|
||||||
|
|
||||||
|
**1. Batch Processing**
|
||||||
|
Submit hundreds of URLs and get notified as each completes:
|
||||||
|
```python
|
||||||
|
urls = ["https://site1.com", "https://site2.com", ...]
|
||||||
|
for url in urls:
|
||||||
|
submit_crawl_job(url, webhook_url="https://app.com/webhook")
|
||||||
|
```
|
||||||
|
|
||||||
|
**2. Microservice Integration**
|
||||||
|
Integrate with event-driven architectures:
|
||||||
|
```python
|
||||||
|
# Service A submits job
|
||||||
|
task_id = submit_crawl_job(url)
|
||||||
|
|
||||||
|
# Service B receives webhook and triggers next step
|
||||||
|
@app.route('/webhook')
|
||||||
|
def webhook():
|
||||||
|
process_result(request.json)
|
||||||
|
trigger_next_service()
|
||||||
|
return "OK", 200
|
||||||
|
```
|
||||||
|
|
||||||
|
**3. Long-Running Extractions**
|
||||||
|
Handle complex LLM extractions without timeouts:
|
||||||
|
```python
|
||||||
|
submit_llm_job(
|
||||||
|
url="https://long-article.com",
|
||||||
|
q="Comprehensive summary with key points and analysis",
|
||||||
|
webhook_url="https://app.com/webhook/llm"
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Troubleshooting
|
||||||
|
|
||||||
|
**Webhook not receiving notifications?**
|
||||||
|
- Check your webhook URL is publicly accessible
|
||||||
|
- Verify firewall/security group settings
|
||||||
|
- Use webhook testing tools like webhook.site for debugging
|
||||||
|
- Check server logs for delivery attempts
|
||||||
|
- Ensure your handler returns 200-299 status code
|
||||||
|
|
||||||
|
**Job stuck in processing?**
|
||||||
|
- Check Redis connection: `docker logs <container_name> | grep redis`
|
||||||
|
- Verify worker processes: `docker exec <container_name> ps aux | grep worker`
|
||||||
|
- Check server logs: `docker logs <container_name>`
|
||||||
|
|
||||||
|
**Need to cancel a job?**
|
||||||
|
Jobs are processed asynchronously. If you need to cancel:
|
||||||
|
- Delete the task from Redis (requires Redis CLI access)
|
||||||
|
- Or implement a cancellation endpoint in your webhook handler
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
## Dockerfile Parameters
|
## Dockerfile Parameters
|
||||||
|
|
||||||
You can customize the image build process using build arguments (`--build-arg`). These are typically used via `docker buildx build` or within the `docker-compose.yml` file.
|
You can customize the image build process using build arguments (`--build-arg`). These are typically used via `docker buildx build` or within the `docker-compose.yml` file.
|
||||||
|
|||||||
@@ -529,8 +529,19 @@ class AdminDashboard {
|
|||||||
</label>
|
</label>
|
||||||
</div>
|
</div>
|
||||||
<div class="form-group full-width">
|
<div class="form-group full-width">
|
||||||
<label>Integration Guide</label>
|
<label>Long Description (Markdown - Overview tab)</label>
|
||||||
<textarea id="form-integration" rows="10">${app?.integration_guide || ''}</textarea>
|
<textarea id="form-long-description" rows="10" placeholder="Enter detailed description with markdown formatting...">${app?.long_description || ''}</textarea>
|
||||||
|
<small>Markdown support: **bold**, *italic*, [links](url), # headers, code blocks, lists</small>
|
||||||
|
</div>
|
||||||
|
<div class="form-group full-width">
|
||||||
|
<label>Integration Guide (Markdown - Integration tab)</label>
|
||||||
|
<textarea id="form-integration" rows="20" placeholder="Enter integration guide with installation, examples, and code snippets using markdown...">${app?.integration_guide || ''}</textarea>
|
||||||
|
<small>Single markdown field with installation, examples, and complete guide. Code blocks get auto copy buttons.</small>
|
||||||
|
</div>
|
||||||
|
<div class="form-group full-width">
|
||||||
|
<label>Documentation (Markdown - Documentation tab)</label>
|
||||||
|
<textarea id="form-documentation" rows="20" placeholder="Enter documentation with API reference, examples, and best practices using markdown...">${app?.documentation || ''}</textarea>
|
||||||
|
<small>Full documentation with API reference, examples, best practices, etc.</small>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
`;
|
`;
|
||||||
@@ -712,7 +723,9 @@ class AdminDashboard {
|
|||||||
data.contact_email = document.getElementById('form-email').value;
|
data.contact_email = document.getElementById('form-email').value;
|
||||||
data.featured = document.getElementById('form-featured').checked ? 1 : 0;
|
data.featured = document.getElementById('form-featured').checked ? 1 : 0;
|
||||||
data.sponsored = document.getElementById('form-sponsored').checked ? 1 : 0;
|
data.sponsored = document.getElementById('form-sponsored').checked ? 1 : 0;
|
||||||
|
data.long_description = document.getElementById('form-long-description').value;
|
||||||
data.integration_guide = document.getElementById('form-integration').value;
|
data.integration_guide = document.getElementById('form-integration').value;
|
||||||
|
data.documentation = document.getElementById('form-documentation').value;
|
||||||
} else if (type === 'articles') {
|
} else if (type === 'articles') {
|
||||||
data.title = document.getElementById('form-title').value;
|
data.title = document.getElementById('form-title').value;
|
||||||
data.slug = this.generateSlug(data.title);
|
data.slug = this.generateSlug(data.title);
|
||||||
|
|||||||
@@ -278,12 +278,12 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
.tab-content {
|
.tab-content {
|
||||||
display: none;
|
display: none !important;
|
||||||
padding: 2rem;
|
padding: 2rem;
|
||||||
}
|
}
|
||||||
|
|
||||||
.tab-content.active {
|
.tab-content.active {
|
||||||
display: block;
|
display: block !important;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Overview Layout */
|
/* Overview Layout */
|
||||||
@@ -510,6 +510,31 @@
|
|||||||
line-height: 1.5;
|
line-height: 1.5;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* Markdown rendered code blocks */
|
||||||
|
.integration-content pre,
|
||||||
|
.docs-content pre {
|
||||||
|
background: var(--bg-dark);
|
||||||
|
border: 1px solid var(--border-color);
|
||||||
|
margin: 1rem 0;
|
||||||
|
padding: 1rem;
|
||||||
|
padding-top: 2.5rem; /* Space for copy button */
|
||||||
|
overflow-x: auto;
|
||||||
|
position: relative;
|
||||||
|
max-height: none; /* Remove any height restrictions */
|
||||||
|
height: auto; /* Allow content to expand */
|
||||||
|
}
|
||||||
|
|
||||||
|
.integration-content pre code,
|
||||||
|
.docs-content pre code {
|
||||||
|
background: transparent;
|
||||||
|
padding: 0;
|
||||||
|
color: var(--text-secondary);
|
||||||
|
font-size: 0.875rem;
|
||||||
|
line-height: 1.5;
|
||||||
|
white-space: pre; /* Preserve whitespace and line breaks */
|
||||||
|
display: block;
|
||||||
|
}
|
||||||
|
|
||||||
/* Feature Grid */
|
/* Feature Grid */
|
||||||
.feature-grid {
|
.feature-grid {
|
||||||
display: grid;
|
display: grid;
|
||||||
|
|||||||
@@ -73,27 +73,14 @@
|
|||||||
<div class="tabs">
|
<div class="tabs">
|
||||||
<button class="tab-btn active" data-tab="overview">Overview</button>
|
<button class="tab-btn active" data-tab="overview">Overview</button>
|
||||||
<button class="tab-btn" data-tab="integration">Integration</button>
|
<button class="tab-btn" data-tab="integration">Integration</button>
|
||||||
<button class="tab-btn" data-tab="docs">Documentation</button>
|
<!-- <button class="tab-btn" data-tab="docs">Documentation</button>
|
||||||
<button class="tab-btn" data-tab="support">Support</button>
|
<button class="tab-btn" data-tab="support">Support</button> -->
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<section id="overview-tab" class="tab-content active">
|
<section id="overview-tab" class="tab-content active">
|
||||||
<div class="overview-columns">
|
<div class="overview-columns">
|
||||||
<div class="overview-main">
|
<div class="overview-main">
|
||||||
<h2>Overview</h2>
|
|
||||||
<div id="app-overview">Overview content goes here.</div>
|
<div id="app-overview">Overview content goes here.</div>
|
||||||
|
|
||||||
<h3>Key Features</h3>
|
|
||||||
<ul id="app-features" class="features-list">
|
|
||||||
<li>Feature 1</li>
|
|
||||||
<li>Feature 2</li>
|
|
||||||
<li>Feature 3</li>
|
|
||||||
</ul>
|
|
||||||
|
|
||||||
<h3>Use Cases</h3>
|
|
||||||
<div id="app-use-cases" class="use-cases">
|
|
||||||
<p>Describe how this app can help your workflow.</p>
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<aside class="sidebar">
|
<aside class="sidebar">
|
||||||
@@ -142,37 +129,16 @@
|
|||||||
</section>
|
</section>
|
||||||
|
|
||||||
<section id="integration-tab" class="tab-content">
|
<section id="integration-tab" class="tab-content">
|
||||||
<div class="integration-content">
|
<div class="integration-content" id="app-integration">
|
||||||
<h2>Integration Guide</h2>
|
|
||||||
|
|
||||||
<h3>Installation</h3>
|
|
||||||
<div class="code-block">
|
|
||||||
<pre><code id="install-code"># Installation instructions will appear here</code></pre>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<h3>Basic Usage</h3>
|
|
||||||
<div class="code-block">
|
|
||||||
<pre><code id="usage-code"># Usage example will appear here</code></pre>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<h3>Complete Integration Example</h3>
|
|
||||||
<div class="code-block">
|
|
||||||
<button class="copy-btn" id="copy-integration">Copy</button>
|
|
||||||
<pre><code id="integration-code"># Complete integration guide will appear here</code></pre>
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
</section>
|
</section>
|
||||||
|
|
||||||
<section id="docs-tab" class="tab-content">
|
<!-- <section id="docs-tab" class="tab-content">
|
||||||
<div class="docs-content">
|
<div class="docs-content" id="app-docs">
|
||||||
<h2>Documentation</h2>
|
|
||||||
<div id="app-docs" class="doc-sections">
|
|
||||||
<p>Documentation coming soon.</p>
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
</section>
|
</section> -->
|
||||||
|
|
||||||
<section id="support-tab" class="tab-content">
|
<!-- <section id="support-tab" class="tab-content">
|
||||||
<div class="docs-content">
|
<div class="docs-content">
|
||||||
<h2>Support</h2>
|
<h2>Support</h2>
|
||||||
<div class="support-grid">
|
<div class="support-grid">
|
||||||
@@ -190,7 +156,7 @@
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</section>
|
</section> -->
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
</main>
|
</main>
|
||||||
|
|||||||
@@ -112,7 +112,7 @@ class AppDetailPage {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Contact
|
// Contact
|
||||||
document.getElementById('app-contact').textContent = this.appData.contact_email || 'Not available';
|
document.getElementById('app-contact') && (document.getElementById('app-contact').textContent = this.appData.contact_email || 'Not available');
|
||||||
|
|
||||||
// Sidebar info
|
// Sidebar info
|
||||||
document.getElementById('sidebar-downloads').textContent = this.formatNumber(this.appData.downloads || 0);
|
document.getElementById('sidebar-downloads').textContent = this.formatNumber(this.appData.downloads || 0);
|
||||||
@@ -123,144 +123,132 @@ class AppDetailPage {
|
|||||||
document.getElementById('sidebar-pricing').textContent = this.appData.pricing || 'Free';
|
document.getElementById('sidebar-pricing').textContent = this.appData.pricing || 'Free';
|
||||||
document.getElementById('sidebar-contact').textContent = this.appData.contact_email || 'contact@example.com';
|
document.getElementById('sidebar-contact').textContent = this.appData.contact_email || 'contact@example.com';
|
||||||
|
|
||||||
// Integration guide
|
// Render tab contents from database fields
|
||||||
this.renderIntegrationGuide();
|
this.renderTabContents();
|
||||||
}
|
}
|
||||||
|
|
||||||
renderIntegrationGuide() {
|
renderTabContents() {
|
||||||
// Installation code
|
// Overview tab - use long_description from database
|
||||||
const installCode = document.getElementById('install-code');
|
const overviewDiv = document.getElementById('app-overview');
|
||||||
if (installCode) {
|
if (overviewDiv) {
|
||||||
if (this.appData.type === 'Open Source' && this.appData.github_url) {
|
if (this.appData.long_description) {
|
||||||
installCode.textContent = `# Clone from GitHub
|
overviewDiv.innerHTML = this.renderMarkdown(this.appData.long_description);
|
||||||
git clone ${this.appData.github_url}
|
} else {
|
||||||
|
overviewDiv.innerHTML = `<p>${this.appData.description || 'No overview available.'}</p>`;
|
||||||
# Install dependencies
|
|
||||||
pip install -r requirements.txt`;
|
|
||||||
} else if (this.appData.name.toLowerCase().includes('api')) {
|
|
||||||
installCode.textContent = `# Install via pip
|
|
||||||
pip install ${this.appData.slug}
|
|
||||||
|
|
||||||
# Or install from source
|
|
||||||
pip install git+${this.appData.github_url || 'https://github.com/example/repo'}`;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Usage code - customize based on category
|
// Integration tab - use integration_guide field from database
|
||||||
const usageCode = document.getElementById('usage-code');
|
const integrationDiv = document.getElementById('app-integration');
|
||||||
if (usageCode) {
|
if (integrationDiv) {
|
||||||
if (this.appData.category === 'Browser Automation') {
|
if (this.appData.integration_guide) {
|
||||||
usageCode.textContent = `from crawl4ai import AsyncWebCrawler
|
integrationDiv.innerHTML = this.renderMarkdown(this.appData.integration_guide);
|
||||||
from ${this.appData.slug.replace(/-/g, '_')} import ${this.appData.name.replace(/\s+/g, '')}
|
// Add copy buttons to all code blocks
|
||||||
|
this.addCopyButtonsToCodeBlocks(integrationDiv);
|
||||||
async def main():
|
} else {
|
||||||
# Initialize ${this.appData.name}
|
integrationDiv.innerHTML = '<p>Integration guide not yet available. Please check the official website for details.</p>';
|
||||||
automation = ${this.appData.name.replace(/\s+/g, '')}()
|
|
||||||
|
|
||||||
async with AsyncWebCrawler() as crawler:
|
|
||||||
result = await crawler.arun(
|
|
||||||
url="https://example.com",
|
|
||||||
browser_config=automation.config,
|
|
||||||
wait_for="css:body"
|
|
||||||
)
|
|
||||||
print(result.markdown)`;
|
|
||||||
} else if (this.appData.category === 'Proxy Services') {
|
|
||||||
usageCode.textContent = `from crawl4ai import AsyncWebCrawler
|
|
||||||
import ${this.appData.slug.replace(/-/g, '_')}
|
|
||||||
|
|
||||||
# Configure proxy
|
|
||||||
proxy_config = {
|
|
||||||
"server": "${this.appData.website_url || 'https://proxy.example.com'}",
|
|
||||||
"username": "your_username",
|
|
||||||
"password": "your_password"
|
|
||||||
}
|
|
||||||
|
|
||||||
async with AsyncWebCrawler(proxy=proxy_config) as crawler:
|
|
||||||
result = await crawler.arun(
|
|
||||||
url="https://example.com",
|
|
||||||
bypass_cache=True
|
|
||||||
)
|
|
||||||
print(result.status_code)`;
|
|
||||||
} else if (this.appData.category === 'LLM Integration') {
|
|
||||||
usageCode.textContent = `from crawl4ai import AsyncWebCrawler
|
|
||||||
from crawl4ai.extraction_strategy import LLMExtractionStrategy
|
|
||||||
|
|
||||||
# Configure LLM extraction
|
|
||||||
strategy = LLMExtractionStrategy(
|
|
||||||
provider="${this.appData.name.toLowerCase().includes('gpt') ? 'openai' : 'anthropic'}",
|
|
||||||
api_key="your-api-key",
|
|
||||||
model="${this.appData.name.toLowerCase().includes('gpt') ? 'gpt-4' : 'claude-3'}",
|
|
||||||
instruction="Extract structured data"
|
|
||||||
)
|
|
||||||
|
|
||||||
async with AsyncWebCrawler() as crawler:
|
|
||||||
result = await crawler.arun(
|
|
||||||
url="https://example.com",
|
|
||||||
extraction_strategy=strategy
|
|
||||||
)
|
|
||||||
print(result.extracted_content)`;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Integration example
|
// Documentation tab - use documentation field from database
|
||||||
const integrationCode = document.getElementById('integration-code');
|
const docsDiv = document.getElementById('app-docs');
|
||||||
if (integrationCode) {
|
if (docsDiv) {
|
||||||
integrationCode.textContent = this.appData.integration_guide ||
|
if (this.appData.documentation) {
|
||||||
`# Complete ${this.appData.name} Integration Example
|
docsDiv.innerHTML = this.renderMarkdown(this.appData.documentation);
|
||||||
|
// Add copy buttons to all code blocks
|
||||||
from crawl4ai import AsyncWebCrawler
|
this.addCopyButtonsToCodeBlocks(docsDiv);
|
||||||
from crawl4ai.extraction_strategy import JsonCssExtractionStrategy
|
} else {
|
||||||
import json
|
docsDiv.innerHTML = '<p>Documentation coming soon.</p>';
|
||||||
|
}
|
||||||
async def crawl_with_${this.appData.slug.replace(/-/g, '_')}():
|
}
|
||||||
"""
|
|
||||||
Complete example showing how to use ${this.appData.name}
|
|
||||||
with Crawl4AI for production web scraping
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Define extraction schema
|
|
||||||
schema = {
|
|
||||||
"name": "ProductList",
|
|
||||||
"baseSelector": "div.product",
|
|
||||||
"fields": [
|
|
||||||
{"name": "title", "selector": "h2", "type": "text"},
|
|
||||||
{"name": "price", "selector": ".price", "type": "text"},
|
|
||||||
{"name": "image", "selector": "img", "type": "attribute", "attribute": "src"},
|
|
||||||
{"name": "link", "selector": "a", "type": "attribute", "attribute": "href"}
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
|
|
||||||
# Initialize crawler with ${this.appData.name}
|
addCopyButtonsToCodeBlocks(container) {
|
||||||
async with AsyncWebCrawler(
|
// Find all code blocks and add copy buttons
|
||||||
browser_type="chromium",
|
const codeBlocks = container.querySelectorAll('pre code');
|
||||||
headless=True,
|
codeBlocks.forEach(codeBlock => {
|
||||||
verbose=True
|
const pre = codeBlock.parentElement;
|
||||||
) as crawler:
|
|
||||||
|
|
||||||
# Crawl with extraction
|
// Skip if already has a copy button
|
||||||
result = await crawler.arun(
|
if (pre.querySelector('.copy-btn')) return;
|
||||||
url="https://example.com/products",
|
|
||||||
extraction_strategy=JsonCssExtractionStrategy(schema),
|
|
||||||
cache_mode="bypass",
|
|
||||||
wait_for="css:.product",
|
|
||||||
screenshot=True
|
|
||||||
)
|
|
||||||
|
|
||||||
# Process results
|
// Create copy button
|
||||||
if result.success:
|
const copyBtn = document.createElement('button');
|
||||||
products = json.loads(result.extracted_content)
|
copyBtn.className = 'copy-btn';
|
||||||
print(f"Found {len(products)} products")
|
copyBtn.textContent = 'Copy';
|
||||||
|
copyBtn.onclick = () => {
|
||||||
|
navigator.clipboard.writeText(codeBlock.textContent).then(() => {
|
||||||
|
copyBtn.textContent = '✓ Copied!';
|
||||||
|
setTimeout(() => {
|
||||||
|
copyBtn.textContent = 'Copy';
|
||||||
|
}, 2000);
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
for product in products[:5]:
|
// Add button to pre element
|
||||||
print(f"- {product['title']}: {product['price']}")
|
pre.style.position = 'relative';
|
||||||
|
pre.insertBefore(copyBtn, codeBlock);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
return products
|
renderMarkdown(text) {
|
||||||
|
if (!text) return '';
|
||||||
|
|
||||||
# Run the crawler
|
// Store code blocks temporarily to protect them from processing
|
||||||
if __name__ == "__main__":
|
const codeBlocks = [];
|
||||||
import asyncio
|
let processed = text.replace(/```(\w+)?\n([\s\S]*?)```/g, (match, lang, code) => {
|
||||||
asyncio.run(crawl_with_${this.appData.slug.replace(/-/g, '_')}())`;
|
const placeholder = `___CODE_BLOCK_${codeBlocks.length}___`;
|
||||||
}
|
codeBlocks.push(`<pre><code class="language-${lang || ''}">${this.escapeHtml(code)}</code></pre>`);
|
||||||
|
return placeholder;
|
||||||
|
});
|
||||||
|
|
||||||
|
// Store inline code temporarily
|
||||||
|
const inlineCodes = [];
|
||||||
|
processed = processed.replace(/`([^`]+)`/g, (match, code) => {
|
||||||
|
const placeholder = `___INLINE_CODE_${inlineCodes.length}___`;
|
||||||
|
inlineCodes.push(`<code>${this.escapeHtml(code)}</code>`);
|
||||||
|
return placeholder;
|
||||||
|
});
|
||||||
|
|
||||||
|
// Now process the rest of the markdown
|
||||||
|
processed = processed
|
||||||
|
// Headers
|
||||||
|
.replace(/^### (.*$)/gim, '<h3>$1</h3>')
|
||||||
|
.replace(/^## (.*$)/gim, '<h2>$1</h2>')
|
||||||
|
.replace(/^# (.*$)/gim, '<h1>$1</h1>')
|
||||||
|
// Bold
|
||||||
|
.replace(/\*\*(.*?)\*\*/g, '<strong>$1</strong>')
|
||||||
|
// Italic
|
||||||
|
.replace(/\*(.*?)\*/g, '<em>$1</em>')
|
||||||
|
// Links
|
||||||
|
.replace(/\[([^\]]+)\]\(([^)]+)\)/g, '<a href="$2" target="_blank">$1</a>')
|
||||||
|
// Line breaks
|
||||||
|
.replace(/\n\n/g, '</p><p>')
|
||||||
|
.replace(/\n/g, '<br>')
|
||||||
|
// Lists
|
||||||
|
.replace(/^\* (.*)$/gim, '<li>$1</li>')
|
||||||
|
.replace(/^- (.*)$/gim, '<li>$1</li>')
|
||||||
|
// Wrap in paragraphs
|
||||||
|
.replace(/^(?!<[h|p|pre|ul|ol|li])/gim, '<p>')
|
||||||
|
.replace(/(?<![>])$/gim, '</p>');
|
||||||
|
|
||||||
|
// Restore inline code
|
||||||
|
inlineCodes.forEach((code, i) => {
|
||||||
|
processed = processed.replace(`___INLINE_CODE_${i}___`, code);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Restore code blocks
|
||||||
|
codeBlocks.forEach((block, i) => {
|
||||||
|
processed = processed.replace(`___CODE_BLOCK_${i}___`, block);
|
||||||
|
});
|
||||||
|
|
||||||
|
return processed;
|
||||||
|
}
|
||||||
|
|
||||||
|
escapeHtml(text) {
|
||||||
|
const div = document.createElement('div');
|
||||||
|
div.textContent = text;
|
||||||
|
return div.innerHTML;
|
||||||
}
|
}
|
||||||
|
|
||||||
formatNumber(num) {
|
formatNumber(num) {
|
||||||
@@ -275,45 +263,27 @@ if __name__ == "__main__":
|
|||||||
setupEventListeners() {
|
setupEventListeners() {
|
||||||
// Tab switching
|
// Tab switching
|
||||||
const tabs = document.querySelectorAll('.tab-btn');
|
const tabs = document.querySelectorAll('.tab-btn');
|
||||||
|
|
||||||
tabs.forEach(tab => {
|
tabs.forEach(tab => {
|
||||||
tab.addEventListener('click', () => {
|
tab.addEventListener('click', () => {
|
||||||
// Update active tab
|
// Update active tab button
|
||||||
tabs.forEach(t => t.classList.remove('active'));
|
tabs.forEach(t => t.classList.remove('active'));
|
||||||
tab.classList.add('active');
|
tab.classList.add('active');
|
||||||
|
|
||||||
// Show corresponding content
|
// Show corresponding content
|
||||||
const tabName = tab.dataset.tab;
|
const tabName = tab.dataset.tab;
|
||||||
document.querySelectorAll('.tab-content').forEach(content => {
|
|
||||||
|
// Hide all tab contents
|
||||||
|
const allTabContents = document.querySelectorAll('.tab-content');
|
||||||
|
allTabContents.forEach(content => {
|
||||||
content.classList.remove('active');
|
content.classList.remove('active');
|
||||||
});
|
});
|
||||||
document.getElementById(`${tabName}-tab`).classList.add('active');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
// Copy integration code
|
// Show the selected tab content
|
||||||
document.getElementById('copy-integration').addEventListener('click', () => {
|
const targetTab = document.getElementById(`${tabName}-tab`);
|
||||||
const code = document.getElementById('integration-code').textContent;
|
if (targetTab) {
|
||||||
navigator.clipboard.writeText(code).then(() => {
|
targetTab.classList.add('active');
|
||||||
const btn = document.getElementById('copy-integration');
|
}
|
||||||
const originalText = btn.innerHTML;
|
|
||||||
btn.innerHTML = '<span>✓</span> Copied!';
|
|
||||||
setTimeout(() => {
|
|
||||||
btn.innerHTML = originalText;
|
|
||||||
}, 2000);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
// Copy code buttons
|
|
||||||
document.querySelectorAll('.copy-btn').forEach(btn => {
|
|
||||||
btn.addEventListener('click', (e) => {
|
|
||||||
const codeBlock = e.target.closest('.code-block');
|
|
||||||
const code = codeBlock.querySelector('code').textContent;
|
|
||||||
navigator.clipboard.writeText(code).then(() => {
|
|
||||||
btn.textContent = 'Copied!';
|
|
||||||
setTimeout(() => {
|
|
||||||
btn.textContent = 'Copy';
|
|
||||||
}, 2000);
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -471,13 +471,17 @@ async def delete_sponsor(sponsor_id: int):
|
|||||||
|
|
||||||
app.include_router(router)
|
app.include_router(router)
|
||||||
|
|
||||||
|
# Version info
|
||||||
|
VERSION = "1.1.0"
|
||||||
|
BUILD_DATE = "2025-10-26"
|
||||||
|
|
||||||
@app.get("/")
|
@app.get("/")
|
||||||
async def root():
|
async def root():
|
||||||
"""API info"""
|
"""API info"""
|
||||||
return {
|
return {
|
||||||
"name": "Crawl4AI Marketplace API",
|
"name": "Crawl4AI Marketplace API",
|
||||||
"version": "1.0.0",
|
"version": VERSION,
|
||||||
|
"build_date": BUILD_DATE,
|
||||||
"endpoints": [
|
"endpoints": [
|
||||||
"/marketplace/api/apps",
|
"/marketplace/api/apps",
|
||||||
"/marketplace/api/articles",
|
"/marketplace/api/articles",
|
||||||
|
|||||||
Reference in New Issue
Block a user