Compare commits

...

6 Commits

Author SHA1 Message Date
UncleCode
5eeb682719 Delete test.txt 2024-11-19 18:55:11 +08:00
ntohidikplay
593c7ad307 test: trying to push to main 2024-11-19 11:45:26 +01:00
UncleCode
38044d4afe Merge pull request #255 from maheshpec/feature/configure-cache-directory
feat(config): Adding a configurable way of setting the cache directory for constrained environments
2024-11-13 09:43:29 +01:00
Mahesh
00026b5f8b feat(config): Adding a configurable way of setting the cache directory for constrained environments 2024-11-12 14:52:51 -07:00
UncleCode
8c22396d8b Merge pull request #234 from devatnull/patch-1
Fix typo: scrapper → scraper
2024-11-12 08:37:14 +01:00
devatnull
2879344d9c Update README.md 2024-11-06 17:36:46 +03:00
11 changed files with 16 additions and 16 deletions

View File

@@ -1,4 +1,4 @@
# 🔥🕷️ Crawl4AI: LLM Friendly Web Crawler & Scrapper # 🔥🕷️ Crawl4AI: LLM Friendly Web Crawler & Scraper
<a href="https://trendshift.io/repositories/11716" target="_blank"><img src="https://trendshift.io/api/badge/repositories/11716" alt="unclecode%2Fcrawl4ai | Trendshift" style="width: 250px; height: 55px;" width="250" height="55"/></a> <a href="https://trendshift.io/repositories/11716" target="_blank"><img src="https://trendshift.io/api/badge/repositories/11716" alt="unclecode%2Fcrawl4ai | Trendshift" style="width: 250px; height: 55px;" width="250" height="55"/></a>

View File

@@ -525,7 +525,7 @@ class AsyncPlaywrightCrawlerStrategy(AsyncCrawlerStrategy):
if self.use_cached_html: if self.use_cached_html:
cache_file_path = os.path.join( cache_file_path = os.path.join(
Path.home(), ".crawl4ai", "cache", hashlib.md5(url.encode()).hexdigest() os.getenv("CRAWL4_AI_BASE_DIRECTORY", Path.home()), ".crawl4ai", "cache", hashlib.md5(url.encode()).hexdigest()
) )
if os.path.exists(cache_file_path): if os.path.exists(cache_file_path):
html = "" html = ""
@@ -725,7 +725,7 @@ class AsyncPlaywrightCrawlerStrategy(AsyncCrawlerStrategy):
if self.use_cached_html: if self.use_cached_html:
cache_file_path = os.path.join( cache_file_path = os.path.join(
Path.home(), ".crawl4ai", "cache", hashlib.md5(url.encode()).hexdigest() os.getenv("CRAWL4_AI_BASE_DIRECTORY", Path.home()), ".crawl4ai", "cache", hashlib.md5(url.encode()).hexdigest()
) )
with open(cache_file_path, "w", encoding="utf-8") as f: with open(cache_file_path, "w", encoding="utf-8") as f:
f.write(html) f.write(html)

View File

@@ -10,7 +10,7 @@ import logging
logging.basicConfig(level=logging.INFO) logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
DB_PATH = os.path.join(Path.home(), ".crawl4ai") DB_PATH = os.path.join(os.getenv("CRAWL4_AI_BASE_DIRECTORY", Path.home()), ".crawl4ai")
os.makedirs(DB_PATH, exist_ok=True) os.makedirs(DB_PATH, exist_ok=True)
DB_PATH = os.path.join(DB_PATH, "crawl4ai.db") DB_PATH = os.path.join(DB_PATH, "crawl4ai.db")

View File

@@ -23,14 +23,14 @@ class AsyncWebCrawler:
self, self,
crawler_strategy: Optional[AsyncCrawlerStrategy] = None, crawler_strategy: Optional[AsyncCrawlerStrategy] = None,
always_by_pass_cache: bool = False, always_by_pass_cache: bool = False,
base_directory: str = str(Path.home()), base_directory: str = str(os.getenv("CRAWL4_AI_BASE_DIRECTORY", Path.home())),
**kwargs, **kwargs,
): ):
self.crawler_strategy = crawler_strategy or AsyncPlaywrightCrawlerStrategy( self.crawler_strategy = crawler_strategy or AsyncPlaywrightCrawlerStrategy(
**kwargs **kwargs
) )
self.always_by_pass_cache = always_by_pass_cache self.always_by_pass_cache = always_by_pass_cache
# self.crawl4ai_folder = os.path.join(Path.home(), ".crawl4ai") # self.crawl4ai_folder = os.path.join(os.getenv("CRAWL4_AI_BASE_DIRECTORY", Path.home()), ".crawl4ai")
self.crawl4ai_folder = os.path.join(base_directory, ".crawl4ai") self.crawl4ai_folder = os.path.join(base_directory, ".crawl4ai")
os.makedirs(self.crawl4ai_folder, exist_ok=True) os.makedirs(self.crawl4ai_folder, exist_ok=True)
os.makedirs(f"{self.crawl4ai_folder}/cache", exist_ok=True) os.makedirs(f"{self.crawl4ai_folder}/cache", exist_ok=True)

View File

@@ -132,7 +132,7 @@ class LocalSeleniumCrawlerStrategy(CrawlerStrategy):
# chromedriver_autoinstaller.install() # chromedriver_autoinstaller.install()
# import chromedriver_autoinstaller # import chromedriver_autoinstaller
# crawl4ai_folder = os.path.join(Path.home(), ".crawl4ai") # crawl4ai_folder = os.path.join(os.getenv("CRAWL4_AI_BASE_DIRECTORY", Path.home()), ".crawl4ai")
# driver = webdriver.Chrome(service=ChromeService(ChromeDriverManager().install()), options=self.options) # driver = webdriver.Chrome(service=ChromeService(ChromeDriverManager().install()), options=self.options)
# chromedriver_path = chromedriver_autoinstaller.install() # chromedriver_path = chromedriver_autoinstaller.install()
# chromedriver_path = chromedriver_autoinstaller.utils.download_chromedriver() # chromedriver_path = chromedriver_autoinstaller.utils.download_chromedriver()
@@ -205,7 +205,7 @@ class LocalSeleniumCrawlerStrategy(CrawlerStrategy):
url_hash = hashlib.md5(url.encode()).hexdigest() url_hash = hashlib.md5(url.encode()).hexdigest()
if self.use_cached_html: if self.use_cached_html:
cache_file_path = os.path.join(Path.home(), ".crawl4ai", "cache", url_hash) cache_file_path = os.path.join(os.getenv("CRAWL4_AI_BASE_DIRECTORY", Path.home()), ".crawl4ai", "cache", url_hash)
if os.path.exists(cache_file_path): if os.path.exists(cache_file_path):
with open(cache_file_path, "r") as f: with open(cache_file_path, "r") as f:
return sanitize_input_encode(f.read()) return sanitize_input_encode(f.read())
@@ -275,7 +275,7 @@ class LocalSeleniumCrawlerStrategy(CrawlerStrategy):
self.driver = self.execute_hook('before_return_html', self.driver, html) self.driver = self.execute_hook('before_return_html', self.driver, html)
# Store in cache # Store in cache
cache_file_path = os.path.join(Path.home(), ".crawl4ai", "cache", url_hash) cache_file_path = os.path.join(os.getenv("CRAWL4_AI_BASE_DIRECTORY", Path.home()), ".crawl4ai", "cache", url_hash)
with open(cache_file_path, "w", encoding="utf-8") as f: with open(cache_file_path, "w", encoding="utf-8") as f:
f.write(html) f.write(html)

View File

@@ -3,7 +3,7 @@ from pathlib import Path
import sqlite3 import sqlite3
from typing import Optional, Tuple from typing import Optional, Tuple
DB_PATH = os.path.join(Path.home(), ".crawl4ai") DB_PATH = os.path.join(os.getenv("CRAWL4_AI_BASE_DIRECTORY", Path.home()), ".crawl4ai")
os.makedirs(DB_PATH, exist_ok=True) os.makedirs(DB_PATH, exist_ok=True)
DB_PATH = os.path.join(DB_PATH, "crawl4ai.db") DB_PATH = os.path.join(DB_PATH, "crawl4ai.db")

View File

@@ -56,7 +56,7 @@ def set_model_device(model):
@lru_cache() @lru_cache()
def get_home_folder(): def get_home_folder():
home_folder = os.path.join(Path.home(), ".crawl4ai") home_folder = os.path.join(os.getenv("CRAWL4_AI_BASE_DIRECTORY", Path.home()), ".crawl4ai")
os.makedirs(home_folder, exist_ok=True) os.makedirs(home_folder, exist_ok=True)
os.makedirs(f"{home_folder}/cache", exist_ok=True) os.makedirs(f"{home_folder}/cache", exist_ok=True)
os.makedirs(f"{home_folder}/models", exist_ok=True) os.makedirs(f"{home_folder}/models", exist_ok=True)

View File

@@ -60,7 +60,7 @@ def get_system_memory():
raise OSError("Unsupported operating system") raise OSError("Unsupported operating system")
def get_home_folder(): def get_home_folder():
home_folder = os.path.join(Path.home(), ".crawl4ai") home_folder = os.path.join(os.getenv("CRAWL4_AI_BASE_DIRECTORY", os.getenv("CRAWL4_AI_BASE_DIRECTORY", Path.home())), ".crawl4ai")
os.makedirs(home_folder, exist_ok=True) os.makedirs(home_folder, exist_ok=True)
os.makedirs(f"{home_folder}/cache", exist_ok=True) os.makedirs(f"{home_folder}/cache", exist_ok=True)
os.makedirs(f"{home_folder}/models", exist_ok=True) os.makedirs(f"{home_folder}/models", exist_ok=True)

View File

@@ -20,7 +20,7 @@ class WebCrawler:
def __init__(self, crawler_strategy: CrawlerStrategy = None, always_by_pass_cache: bool = False, verbose: bool = False): def __init__(self, crawler_strategy: CrawlerStrategy = None, always_by_pass_cache: bool = False, verbose: bool = False):
self.crawler_strategy = crawler_strategy or LocalSeleniumCrawlerStrategy(verbose=verbose) self.crawler_strategy = crawler_strategy or LocalSeleniumCrawlerStrategy(verbose=verbose)
self.always_by_pass_cache = always_by_pass_cache self.always_by_pass_cache = always_by_pass_cache
self.crawl4ai_folder = os.path.join(Path.home(), ".crawl4ai") self.crawl4ai_folder = os.path.join(os.getenv("CRAWL4_AI_BASE_DIRECTORY", Path.home()), ".crawl4ai")
os.makedirs(self.crawl4ai_folder, exist_ok=True) os.makedirs(self.crawl4ai_folder, exist_ok=True)
os.makedirs(f"{self.crawl4ai_folder}/cache", exist_ok=True) os.makedirs(f"{self.crawl4ai_folder}/cache", exist_ok=True)
init_db() init_db()

View File

@@ -13,7 +13,7 @@ AsyncWebCrawler(
# Cache Settings # Cache Settings
always_by_pass_cache: bool = False, # Always bypass cache always_by_pass_cache: bool = False, # Always bypass cache
base_directory: str = str(Path.home()), # Base directory for cache base_directory: str = str(os.getenv("CRAWL4_AI_BASE_DIRECTORY", Path.home())), # Base directory for cache
# Network Settings # Network Settings
proxy: str = None, # Simple proxy URL proxy: str = None, # Simple proxy URL

View File

@@ -8,7 +8,7 @@ import sys
# Create the .crawl4ai folder in the user's home directory if it doesn't exist # Create the .crawl4ai folder in the user's home directory if it doesn't exist
# If the folder already exists, remove the cache folder # If the folder already exists, remove the cache folder
crawl4ai_folder = Path.home() / ".crawl4ai" crawl4ai_folder = os.getenv("CRAWL4_AI_BASE_DIRECTORY", Path.home()) / ".crawl4ai"
cache_folder = crawl4ai_folder / "cache" cache_folder = crawl4ai_folder / "cache"
if cache_folder.exists(): if cache_folder.exists():