Implements webhook support for the crawl job API to eliminate polling requirements. Changes: - Added WebhookConfig and WebhookPayload schemas to schemas.py - Created webhook.py with WebhookDeliveryService class - Integrated webhook notifications in api.py handle_crawl_job - Updated job.py CrawlJobPayload to accept webhook_config - Added webhook configuration section to config.yml - Included comprehensive usage examples in WEBHOOK_EXAMPLES.md Features: - Webhook notifications on job completion (success/failure) - Configurable data inclusion in webhook payload - Custom webhook headers support - Global default webhook URL configuration - Exponential backoff retry logic (5 attempts: 1s, 2s, 4s, 8s, 16s) - 30-second timeout per webhook call Usage: POST /crawl/job with optional webhook_config: - webhook_url: URL to receive notifications - webhook_data_in_payload: include full results (default: false) - webhook_headers: custom headers for authentication Generated with Claude Code https://claude.com/claude-code Co-Authored-By: Claude <noreply@anthropic.com>
104 lines
2.6 KiB
YAML
104 lines
2.6 KiB
YAML
# Application Configuration
|
||
app:
|
||
title: "Crawl4AI API"
|
||
version: "1.0.0"
|
||
host: "0.0.0.0"
|
||
port: 11234
|
||
reload: False
|
||
workers: 1
|
||
timeout_keep_alive: 300
|
||
|
||
# Default LLM Configuration
|
||
llm:
|
||
provider: "openai/gpt-4o-mini"
|
||
api_key_env: "OPENAI_API_KEY"
|
||
# api_key: sk-... # If you pass the API key directly then api_key_env will be ignored
|
||
|
||
# Redis Configuration
|
||
redis:
|
||
host: "localhost"
|
||
port: 6379
|
||
db: 0
|
||
password: ""
|
||
ssl: False
|
||
ssl_cert_reqs: None
|
||
ssl_ca_certs: None
|
||
ssl_certfile: None
|
||
ssl_keyfile: None
|
||
ssl_cert_reqs: None
|
||
ssl_ca_certs: None
|
||
ssl_certfile: None
|
||
ssl_keyfile: None
|
||
|
||
# Rate Limiting Configuration
|
||
rate_limiting:
|
||
enabled: True
|
||
default_limit: "1000/minute"
|
||
trusted_proxies: []
|
||
storage_uri: "memory://" # Use "redis://localhost:6379" for production
|
||
|
||
# Security Configuration
|
||
security:
|
||
enabled: false
|
||
jwt_enabled: false
|
||
https_redirect: false
|
||
trusted_hosts: ["*"]
|
||
headers:
|
||
x_content_type_options: "nosniff"
|
||
x_frame_options: "DENY"
|
||
content_security_policy: "default-src 'self'"
|
||
strict_transport_security: "max-age=63072000; includeSubDomains"
|
||
|
||
# Crawler Configuration
|
||
crawler:
|
||
base_config:
|
||
simulate_user: true
|
||
memory_threshold_percent: 95.0
|
||
rate_limiter:
|
||
enabled: true
|
||
base_delay: [1.0, 2.0]
|
||
timeouts:
|
||
stream_init: 30.0 # Timeout for stream initialization
|
||
batch_process: 300.0 # Timeout for batch processing
|
||
pool:
|
||
max_pages: 40 # ← GLOBAL_SEM permits
|
||
idle_ttl_sec: 1800 # ← 30 min janitor cutoff
|
||
browser:
|
||
kwargs:
|
||
headless: true
|
||
text_mode: true
|
||
extra_args:
|
||
# - "--single-process"
|
||
- "--no-sandbox"
|
||
- "--disable-dev-shm-usage"
|
||
- "--disable-gpu"
|
||
- "--disable-software-rasterizer"
|
||
- "--disable-web-security"
|
||
- "--allow-insecure-localhost"
|
||
- "--ignore-certificate-errors"
|
||
|
||
# Logging Configuration
|
||
logging:
|
||
level: "INFO"
|
||
format: "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
|
||
|
||
# Observability Configuration
|
||
observability:
|
||
prometheus:
|
||
enabled: True
|
||
endpoint: "/metrics"
|
||
health_check:
|
||
endpoint: "/health"
|
||
|
||
# Webhook Configuration
|
||
webhooks:
|
||
enabled: true
|
||
default_url: null # Optional: default webhook URL for all jobs
|
||
data_in_payload: false # Optional: default behavior for including data
|
||
retry:
|
||
max_attempts: 5
|
||
initial_delay_ms: 1000 # 1s, 2s, 4s, 8s, 16s exponential backoff
|
||
max_delay_ms: 32000
|
||
timeout_ms: 30000 # 30s timeout per webhook call
|
||
headers: # Optional: default headers to include
|
||
User-Agent: "Crawl4AI-Webhook/1.0" |