chore: Update pip installation command and requirements, add new dependencies

This commit is contained in:
unclecode
2024-05-17 15:21:45 +08:00
parent a317dc5e1d
commit bf3b040f10

100
setup.py
View File

@@ -1,3 +1,39 @@
from setuptools import setup, find_packages
# Read the requirements from requirements.txt
with open("requirements.txt") as f:
requirements = f.read().splitlines()
setup(
name="Crawl4AI",
version="0.1.2",
description="🔥🕷️ Crawl4AI: Open-source LLM Friendly Web Crawler & Scrapper",
long_description=open("README.md").read(),
long_description_content_type="text/markdown",
url="https://github.com/unclecode/crawl4ai",
author="Unclecode",
author_email="unclecode@kidocode.com",
license="MIT",
packages=find_packages(),
install_requires=requirements,
entry_points={
'console_scripts': [
'crawl4ai-download-models=crawl4ai.model_loader:main',
],
},
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
],
python_requires=">=3.7",
)
from setuptools import setup, find_packages
from setuptools.command.install import install as _install
import subprocess
@@ -16,35 +52,35 @@ class InstallCommand(_install):
for dependency in dependencies:
subprocess.check_call([sys.executable, '-m', 'pip', 'install', dependency])
setup(
name="Crawl4AI",
version="0.1.0",
description="🔥🕷️ Crawl4AI: Open-source LLM Friendly Web Crawler & Scrapper",
long_description=open("README.md").read(),
long_description_content_type="text/markdown",
url="https://github.com/unclecode/crawl4ai",
author="Unclecode",
author_email="unclecode@kidocode.com",
license="MIT",
packages=find_packages(),
install_requires=[], # Leave this empty to avoid default dependency resolution
cmdclass={
'install': InstallCommand,
},
entry_points={
'console_scripts': [
'crawl4ai-download-models=crawl4ai.model_loader:main',
],
},
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
],
python_requires=">=3.7",
)
# setup(
# name="Crawl4AI",
# version="0.1.0",
# description="🔥🕷️ Crawl4AI: Open-source LLM Friendly Web Crawler & Scrapper",
# long_description=open("README.md").read(),
# long_description_content_type="text/markdown",
# url="https://github.com/unclecode/crawl4ai",
# author="Unclecode",
# author_email="unclecode@kidocode.com",
# license="MIT",
# packages=find_packages(),
# install_requires=[], # Leave this empty to avoid default dependency resolution
# cmdclass={
# 'install': InstallCommand,
# },
# entry_points={
# 'console_scripts': [
# 'crawl4ai-download-models=crawl4ai.model_loader:main',
# ],
# },
# classifiers=[
# "Development Status :: 3 - Alpha",
# "Intended Audience :: Developers",
# "License :: OSI Approved :: Apache Software License",
# "Programming Language :: Python :: 3",
# "Programming Language :: Python :: 3.7",
# "Programming Language :: Python :: 3.8",
# "Programming Language :: Python :: 3.9",
# "Programming Language :: Python :: 3.10",
# ],
# python_requires=">=3.7",
# )