Parallel processing with retry on failure with exponential backoff - Simplified URL validation and normalisation - respecting Robots.txt
This commit is contained in:
@@ -1,7 +1,8 @@
|
||||
from pydantic import BaseModel
|
||||
from typing import List, Dict
|
||||
from ..models import CrawlResult
|
||||
|
||||
class ScraperResult(BaseModel):
|
||||
url: str
|
||||
crawled_urls: List[str]
|
||||
extracted_data: Dict
|
||||
extracted_data: Dict[str,CrawlResult]
|
||||
Reference in New Issue
Block a user