-
Notifications
You must be signed in to change notification settings - Fork 1
/
process_items.py
executable file
·123 lines (101 loc) · 3.45 KB
/
process_items.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""A script to process items from a redis queue."""
from __future__ import print_function, unicode_literals
import argparse
import json
import logging
import pprint
import sys
import time
import MySQLdb
from scrapy_redis import get_redis
logger = logging.getLogger('process_items')
conn = MySQLdb.connect(host='127.0.0.1', user='root', passwd='gzr123123', db = 'dangdang', port=3306,charset="utf8")
cur = conn.cursor()
def process_items(r, keys, timeout, limit=0, log_every=1000, wait=.1):
"""Process items from a redis queue.
Parameters
----------
r : Redis
Redis connection instance.
keys : list
List of keys to read the items from.
timeout: int
Read timeout.
"""
limit = limit or float('inf')
processed = 0
while processed < limit:
# Change ``blpop`` to ``brpop`` to process as LIFO.
ret = r.blpop(keys, timeout)
# If data is found before the timeout then we consider we are done.
if ret is None:
time.sleep(wait)
continue
source, data = ret
try:
item = json.loads(data)
except Exception:
logger.exception("Failed to load item:\n%r", pprint.pformat(data))
continue
try:
sqlstr = '''insert into infos(name,picurl,url,comment,publish_author,publish_time,publish_company,price,crawled,spider)
values('%s','%s','%s','%d','%s','%s','%s','%s','%s','%s')'''%(
item['name'],
item['picurl'],
item['url'],
int(item['comment'].encode('UTF-8')),
item['publish_author'],
item['publish_time'],
item['publish_company'],
item['price'],
item['crawled'],
item['spider']);
# print(sqlstr)
cur.execute(sqlstr)
conn.commit()
except KeyError:
logger.exception("[%s] Failed to process item:\n%r",
source, pprint.pformat(item))
continue
processed += 1
if processed % log_every == 0:
logger.info("Processed %s items", processed)
def main():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('key', help="Redis key where items are stored")
parser.add_argument('--host')
parser.add_argument('--port')
parser.add_argument('--timeout', type=int, default=5)
parser.add_argument('--limit', type=int, default=0)
parser.add_argument('--progress-every', type=int, default=100)
parser.add_argument('-v', '--verbose', action='store_true')
args = parser.parse_args()
params = {}
if args.host:
params['host'] = args.host
if args.port:
params['port'] = args.port
logging.basicConfig(level=logging.DEBUG if args.verbose else logging.INFO)
r = get_redis(**params)
host = r.connection_pool.get_connection('info').host
logger.info("Waiting for items in '%s' (server: %s)", args.key, host)
kwargs = {
'keys': [args.key],
'timeout': args.timeout,
'limit': args.limit,
'log_every': args.progress_every,
}
try:
process_items(r, **kwargs)
retcode = 0 # ok
except KeyboardInterrupt:
retcode = 0 # ok
except Exception:
logger.exception("Unhandled exception")
retcode = 2
cur.close()
return retcode
if __name__ == '__main__':
sys.exit(main())