diff --git a/requirements.txt b/requirements.txt index b388d193..1ec0b72e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,6 +2,7 @@ aiohttp==3.9.5 aiosqlite==0.20.0 bs4==0.0.2 fastapi==0.111.0 +typer==0.9.0 html2text==2024.2.26 httpx==0.27.0 lazy_import==0.2.2 diff --git a/setup.py b/setup.py index 9c81e402..a7b25d29 100644 --- a/setup.py +++ b/setup.py @@ -1,9 +1,20 @@ from setuptools import setup, find_packages +from setuptools.command.install import install as _install +import subprocess + +class InstallCommand(_install): + def run(self): + # Run the standard install first + _install.run(self) + # Now handle the dependencies manually + self.manual_dependencies_install() + + def manual_dependencies_install(self): + with open('requirements.txt') as f: + dependencies = f.read().splitlines() + for dependency in dependencies: + subprocess.check_call([self.executable, '-m', 'pip', 'install', dependency]) -# Read the requirements from requirements.txt -with open("requirements.txt") as f: - requirements = f.read().splitlines() - setup( name="Crawl4AI", version="0.1.0", @@ -15,7 +26,10 @@ setup( author_email="unclecode@kidocode.com", license="MIT", packages=find_packages(), - install_requires=requirements, + install_requires=[], # Leave this empty to avoid default dependency resolution + cmdclass={ + 'install': InstallCommand, + }, entry_points={ 'console_scripts': [ 'crawl4ai-download-models=crawl4ai.model_loader:main', @@ -32,4 +46,4 @@ setup( "Programming Language :: Python :: 3.10", ], python_requires=">=3.7", -) \ No newline at end of file +)