forked from scrapinghub/scrapyrt
-
Notifications
You must be signed in to change notification settings - Fork 0
/
setup.py
48 lines (46 loc) · 1.4 KB
/
setup.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
# -*- coding: utf-8 -*-
#!/usr/bin/python
from setuptools import setup, find_packages
from os.path import join, dirname
with open(join(dirname(__file__), 'scrapyrt/VERSION'), 'rb') as f:
version = f.read().decode('ascii').strip()
setup(
name="scrapyrt",
version=version,
author='Scrapinghub',
author_email='info@scrapinghub.com',
url="https://github.com/scrapinghub/scrapyrt",
maintainer='Scrapinghub',
maintainer_email='info@scrapinghub.com',
description='Put Scrapy spiders behind an HTTP API',
long_description=open('README.rst').read(),
license='BSD',
packages=find_packages(),
entry_points={
'console_scripts': ['scrapyrt = scrapyrt.cmdline:execute']
},
zip_safe=False,
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Operating System :: OS Independent',
'Environment :: Console',
'Environment :: No Input/Output (Daemon)',
'Topic :: Internet :: WWW/HTTP',
'License :: OSI Approved :: BSD License',
],
install_requires=[
'Twisted>=14.0.0',
'Scrapy>=1.0.0',
'demjson',
'six>=1.5.2'
],
package_data={
'scrapyrt': [
'VERSION',
]
},
)