refactor(link_extractor): remove link_extractor and rename to link_preview

This change removes the link_extractor module and renames it to link_preview, streamlining the codebase. The removal of 395 lines of code reduces complexity and improves maintainability. Other files have been updated to reflect this change, ensuring consistency across the project.

BREAKING CHANGE: The link_extractor module has been deleted and replaced with link_preview. Update imports accordingly.
This commit is contained in:
UncleCode
2025-06-27 21:54:22 +08:00
parent 5c9c305dbf
commit 539a324cf6
7 changed files with 71 additions and 71 deletions

View File

@@ -18,7 +18,7 @@ Usage:
import asyncio
from crawl4ai import AsyncWebCrawler, CrawlerRunConfig
from crawl4ai.async_configs import LinkExtractionConfig
from crawl4ai.async_configs import LinkPreviewConfig
async def basic_link_head_extraction():
@@ -30,7 +30,7 @@ async def basic_link_head_extraction():
config = CrawlerRunConfig(
# Enable link head extraction
link_extraction_config=LinkExtractionConfig(
link_preview_config=LinkPreviewConfig(
include_internal=True, # Process internal links
include_external=False, # Skip external links for this demo
max_links=5, # Limit to 5 links
@@ -94,7 +94,7 @@ async def research_assistant_example():
print("=" * 50)
config = CrawlerRunConfig(
link_extraction_config=LinkExtractionConfig(
link_preview_config=LinkPreviewConfig(
include_internal=True,
include_external=True,
include_patterns=["*/docs/*", "*/tutorial/*", "*/guide/*"],
@@ -149,7 +149,7 @@ async def api_discovery_example():
print("=" * 50)
config = CrawlerRunConfig(
link_extraction_config=LinkExtractionConfig(
link_preview_config=LinkPreviewConfig(
include_internal=True,
include_patterns=["*/api/*", "*/reference/*", "*/endpoint/*"],
exclude_patterns=["*/deprecated/*", "*/v1/*"], # Skip old versions
@@ -214,7 +214,7 @@ async def link_quality_analysis():
print("=" * 50)
config = CrawlerRunConfig(
link_extraction_config=LinkExtractionConfig(
link_preview_config=LinkPreviewConfig(
include_internal=True,
max_links=30, # Analyze more links for better statistics
concurrency=15,
@@ -281,7 +281,7 @@ async def pattern_filtering_example():
filters = [
{
"name": "Documentation Only",
"config": LinkExtractionConfig(
"config": LinkPreviewConfig(
include_internal=True,
max_links=10,
concurrency=5,
@@ -292,7 +292,7 @@ async def pattern_filtering_example():
},
{
"name": "API References Only",
"config": LinkExtractionConfig(
"config": LinkPreviewConfig(
include_internal=True,
max_links=10,
concurrency=5,
@@ -303,7 +303,7 @@ async def pattern_filtering_example():
},
{
"name": "Exclude Admin Areas",
"config": LinkExtractionConfig(
"config": LinkPreviewConfig(
include_internal=True,
max_links=10,
concurrency=5,
@@ -318,7 +318,7 @@ async def pattern_filtering_example():
print(f"\n🔍 Testing: {filter_example['name']}")
config = CrawlerRunConfig(
link_extraction_config=filter_example['config'],
link_preview_config=filter_example['config'],
score_links=True
)

View File

@@ -125,7 +125,7 @@ Here's a full example you can copy, paste, and run immediately:
```python
import asyncio
from crawl4ai import AsyncWebCrawler, CrawlerRunConfig
from crawl4ai.async_configs import LinkExtractionConfig
from crawl4ai.async_configs import LinkPreviewConfig
async def extract_link_heads_example():
"""
@@ -136,7 +136,7 @@ async def extract_link_heads_example():
# Configure link head extraction
config = CrawlerRunConfig(
# Enable link head extraction with detailed configuration
link_extraction_config=LinkExtractionConfig(
link_preview_config=LinkPreviewConfig(
include_internal=True, # Extract from internal links
include_external=False, # Skip external links for this example
max_links=10, # Limit to 10 links for demo
@@ -234,12 +234,12 @@ if __name__ == "__main__":
### 2.3 Configuration Deep Dive
The `LinkExtractionConfig` class supports these options:
The `LinkPreviewConfig` class supports these options:
```python
from crawl4ai.async_configs import LinkExtractionConfig
from crawl4ai.async_configs import LinkPreviewConfig
link_extraction_config = LinkExtractionConfig(
link_preview_config = LinkPreviewConfig(
# BASIC SETTINGS
verbose=True, # Show detailed logs (recommended for learning)
@@ -316,7 +316,7 @@ Find the most relevant documentation pages:
```python
async def research_assistant():
config = CrawlerRunConfig(
link_extraction_config=LinkExtractionConfig(
link_preview_config=LinkPreviewConfig(
include_internal=True,
include_external=True,
include_patterns=["*/docs/*", "*/tutorial/*", "*/guide/*"],
@@ -348,7 +348,7 @@ Find all API endpoints and references:
```python
async def api_discovery():
config = CrawlerRunConfig(
link_extraction_config=LinkExtractionConfig(
link_preview_config=LinkPreviewConfig(
include_internal=True,
include_patterns=["*/api/*", "*/reference/*"],
exclude_patterns=["*/deprecated/*"],
@@ -387,7 +387,7 @@ Analyze website structure and content quality:
```python
async def quality_analysis():
config = CrawlerRunConfig(
link_extraction_config=LinkExtractionConfig(
link_preview_config=LinkPreviewConfig(
include_internal=True,
max_links=200,
concurrency=20,
@@ -434,7 +434,7 @@ async def quality_analysis():
```python
# Check your configuration:
config = CrawlerRunConfig(
link_extraction_config=LinkExtractionConfig(
link_preview_config=LinkPreviewConfig(
verbose=True # ← Enable to see what's happening
)
)
@@ -445,7 +445,7 @@ config = CrawlerRunConfig(
# Make sure scoring is enabled:
config = CrawlerRunConfig(
score_links=True, # ← Enable intrinsic scoring
link_extraction_config=LinkExtractionConfig(
link_preview_config=LinkPreviewConfig(
query="your search terms" # ← For contextual scoring
)
)
@@ -454,7 +454,7 @@ config = CrawlerRunConfig(
**Process taking too long?**
```python
# Optimize performance:
link_extraction_config = LinkExtractionConfig(
link_preview_config = LinkPreviewConfig(
max_links=20, # ← Reduce number
concurrency=10, # ← Increase parallelism
timeout=3, # ← Shorter timeout