1from os.path import dirname, join
2from pkg_resources import parse_version
3from setuptools import setup, find_packages, __version__ as setuptools_version
4
5
6with open(join(dirname(__file__), 'scrapy/VERSION'), 'rb') as f:
7    version = f.read().decode('ascii').strip()
8
9
10def has_environment_marker_platform_impl_support():
11    """Code extracted from 'pytest/setup.py'
12    https://github.com/pytest-dev/pytest/blob/7538680c/setup.py#L31
13
14    The first known release to support environment marker with range operators
15    it is 18.5, see:
16    https://setuptools.readthedocs.io/en/latest/history.html#id235
17    """
18    return parse_version(setuptools_version) >= parse_version('18.5')
19
20
21install_requires = [
22    'cryptography>=2.0',
23    'cssselect>=0.9.1',
24    'itemloaders>=1.0.1',
25    'parsel>=1.5.0',
26    'pyOpenSSL>=16.2.0',
27    'queuelib>=1.4.2',
28    'service_identity>=16.0.0',
29    'w3lib>=1.17.0',
30    'zope.interface>=4.1.3',
31    'protego>=0.1.15',
32    'itemadapter>=0.1.0',
33]
34extras_require = {}
35cpython_dependencies = [
36    'lxml>=3.5.0',
37    'PyDispatcher>=2.0.5',
38]
39if has_environment_marker_platform_impl_support():
40    extras_require[':platform_python_implementation == "CPython"'] = cpython_dependencies
41    extras_require[':platform_python_implementation == "PyPy"'] = [
42        # Earlier lxml versions are affected by
43        # https://foss.heptapod.net/pypy/pypy/-/issues/2498,
44        # which was fixed in Cython 0.26, released on 2017-06-19, and used to
45        # generate the C headers of lxml release tarballs published since then, the
46        # first of which was:
47        'lxml>=4.0.0',
48        'PyPyDispatcher>=2.1.0',
49    ]
50else:
51    install_requires.extend(cpython_dependencies)
52
53
54setup(
55    name='Scrapy',
56    version=version,
57    url='https://scrapy.org',
58    project_urls={
59        'Documentation': 'https://docs.scrapy.org/',
60        'Source': 'https://github.com/scrapy/scrapy',
61        'Tracker': 'https://github.com/scrapy/scrapy/issues',
62    },
63    description='A high-level Web Crawling and Web Scraping framework',
64    long_description=open('README.rst').read(),
65    author='Scrapy developers',
66    maintainer='Pablo Hoffman',
67    maintainer_email='pablo@pablohoffman.com',
68    license='BSD',
69    packages=find_packages(exclude=('tests', 'tests.*')),
70    include_package_data=True,
71    zip_safe=False,
72    entry_points={
73        'console_scripts': ['scrapy = scrapy.cmdline:execute']
74    },
75    classifiers=[
76        'Framework :: Scrapy',
77        'Development Status :: 5 - Production/Stable',
78        'Environment :: Console',
79        'Intended Audience :: Developers',
80        'License :: OSI Approved :: BSD License',
81        'Operating System :: OS Independent',
82        'Programming Language :: Python',
83        'Programming Language :: Python :: 3',
84        'Programming Language :: Python :: 3.6',
85        'Programming Language :: Python :: 3.7',
86        'Programming Language :: Python :: 3.8',
87        'Programming Language :: Python :: 3.9',
88        'Programming Language :: Python :: Implementation :: CPython',
89        'Programming Language :: Python :: Implementation :: PyPy',
90        'Topic :: Internet :: WWW/HTTP',
91        'Topic :: Software Development :: Libraries :: Application Frameworks',
92        'Topic :: Software Development :: Libraries :: Python Modules',
93    ],
94    python_requires='>=3.6',
95    install_requires=install_requires,
96    extras_require=extras_require,
97)
98