forked from binux/pyspider
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathbase_handler.py
326 lines (279 loc) · 10 KB
/
base_handler.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# vim: set et sw=4 ts=4 sts=4 ff=unix fenc=utf8:
# Author: Binux<[email protected]>
# http://binux.me
# Created on 2014-02-16 23:12:48
import os
import sys
import time
import inspect
import functools
import traceback
import fractions
from libs.log import LogFormatter
from libs.url import quote_chinese, _build_url, _encode_params
from libs.utils import md5string, hide_me, unicode_obj
from libs.ListIO import ListO
from libs.response import rebuild_response
from collections import namedtuple
from libs.pprint import pprint
class ProcessorResult(object):
def __init__(self, result, follows, messages, logs, exception, extinfo):
self.result = unicode_obj(result)
self.follows = unicode_obj(follows)
self.messages = unicode_obj(messages)
self.logs = logs
self.exception = unicode_obj(exception)
self.extinfo = unicode_obj(extinfo)
def rethrow(self):
if self.exception:
raise self.exception
def logstr(self):
result = []
formater = LogFormatter(color=False)
for record in self.logs:
if isinstance(record, basestring):
result.append(record)
continue
else:
if record.exc_info:
a, b, tb = record.exc_info
tb = hide_me(tb, globals())
record.exc_info = a, b, tb
result.append(formater.format(record))
result.append('\n')
ret = ''.join(result)
if isinstance(ret, unicode):
return ret
else:
try:
return ret.decode('utf8')
except UnicodeDecodeError as e:
return repr(ret)
def catch_status_code_error(func):
func._catch_status_code_error = True
return func
def not_send_status(func):
@functools.wraps(func)
def wrapper(self, response, task):
self._extinfo['not_send_status'] = True
function = func.__get__(self, self.__class__)
return self._run_func(function, response, task)
return wrapper
def config(_config=None, **kwargs):
if _config is None:
_config = {}
_config.update(kwargs)
def wrapper(func):
func._config = _config
return func
return wrapper
class NOTSET(object): pass
def every(minutes=NOTSET, seconds=NOTSET):
def wrapper(func):
@functools.wraps(func)
def on_cronjob(self, response, task):
if response.save and 'tick' in response.save and response.save['tick'] % (minutes * 60 + seconds) != 0:
return None
function = func.__get__(self, self.__class__)
return self._run_func(function, response, task)
on_cronjob.is_cronjob = True
on_cronjob.tick = minutes * 60 + seconds
return on_cronjob
if inspect.isfunction(minutes):
func = minutes
minutes = 1
seconds = 0
return wrapper(func)
if minutes is NOTSET:
if seconds is NOTSET:
minutes = 1
seconds = 0
else:
minutes = 0
if seconds is NOTSET:
seconds = 0
return wrapper
class BaseHandlerMeta(type):
def __new__(cls, name, bases, attrs):
cron_jobs = []
min_tick = 0
for each in attrs.values():
if inspect.isfunction(each) and getattr(each, 'is_cronjob', False):
cron_jobs.append(each)
min_tick = fractions.gcd(min_tick, each.tick)
newcls = type.__new__(cls, name, bases, attrs)
newcls.cron_jobs = cron_jobs
newcls.min_tick = min_tick
return newcls
class BaseHandler(object):
__metaclass__ = BaseHandlerMeta
cron_jobs = []
min_tick = 0
def _reset(self):
self._extinfo = {}
self._messages = []
self._follows = []
def _run_func(self, function, *arguments):
args, varargs, keywords, defaults = inspect.getargspec(function)
return function(*arguments[:len(args)-1])
def _run(self, task, response):
self._reset()
if isinstance(response, dict):
response = rebuild_response(response)
process = task.get('process', {})
callback = process.get('callback', '__call__')
if not hasattr(self, callback):
raise NotImplementedError("self.%s() not implemented!" % callback)
function = getattr(self, callback)
if not getattr(function, '_catch_status_code_error', False):
response.raise_for_status()
return self._run_func(function, response, task)
def run(self, module, task, response):
logger = module.logger
result = None
exception = None
stdout = sys.stdout
self.task = task
self.response = response
try:
sys.stdout = ListO(module.log_buffer)
if inspect.isgeneratorfunction(self._run):
for result in self._run(task, response):
self._run_func(self.on_result, result, response, task)
else:
result = self._run(task, response)
self._run_func(self.on_result, result, response, task)
except Exception, e:
logger.exception(e)
exception = e
finally:
self.task = None
self.response = None
sys.stdout = stdout
follows = self._follows
messages = self._messages
logs = list(module.log_buffer)
extinfo = self._extinfo
module.log_buffer[:] = []
return ProcessorResult(result, follows, messages, logs, exception, extinfo)
def _crawl(self, url, **kwargs):
task = {}
if kwargs.get('callback'):
callback = kwargs['callback']
if isinstance(callback, basestring) and hasattr(self, callback):
func = getattr(self, callback)
elif hasattr(callback, 'im_self') and callback.im_self is self:
func = callback
kwargs['callback'] = func.__name__
else:
raise NotImplementedError("self.%s() not implemented!" % callback)
if hasattr(func, '_config'):
for k, v in func._config.iteritems():
kwargs.setdefault(k, v)
if hasattr(self, 'crawl_config'):
for k, v in self.crawl_config.iteritems():
kwargs.setdefault(k, v)
url = quote_chinese(_build_url(url.strip(), kwargs.get('params')))
if kwargs.get('files'):
assert isinstance(kwargs.get('data', {}), dict), "data must be a dict when using with files!"
content_type, data = _encode_multipart_formdata(kwargs.get('data', {}),
kwargs.get('files', {}))
kwargs.setdefault('headers', {})
kwargs['headers']['Content-Type'] = content_type
kwargs['data'] = data
if kwargs.get('data'):
kwargs['data'] = _encode_params(kwargs['data'])
if kwargs.get('data'):
kwargs.setdefault('method', 'POST')
schedule = {}
for key in ('priority', 'retries', 'exetime', 'age', 'itag', 'force_update'):
if key in kwargs and kwargs[key] is not None:
schedule[key] = kwargs[key]
if schedule:
task['schedule'] = schedule
fetch = {}
for key in ('method', 'headers', 'data', 'timeout', 'allow_redirects', 'cookies', 'proxy', 'etag', 'last_modifed', 'save', 'js_run_at', 'js_script', 'load_images', 'fetch_type'):
if key in kwargs and kwargs[key] is not None:
fetch[key] = kwargs[key]
if fetch:
task['fetch'] = fetch
process = {}
for key in ('callback', ):
if key in kwargs and kwargs[key] is not None:
process[key] = kwargs[key]
if process:
task['process'] = process
task['project'] = self.project_name
task['url'] = url
task['taskid'] = task.get('taskid') or md5string(url)
self._follows.append(task)
return task
# apis
def crawl(self, url, **kwargs):
'''
params:
url
callback
method
params
data
files
headers
timeout
allow_redirects
cookies
proxy
etag
last_modifed
fetch_type
js_run_at
js_script
load_images
priority
retries
exetime
age
itag
save
taskid
'''
if isinstance(url, basestring):
return self._crawl(url, **kwargs)
elif hasattr(url, "__iter__"):
result = []
for each in url:
result.append(self._crawl(each, **kwargs))
return result
def is_debugger(self):
return self.__env__.get('debugger')
def send_message(self, project, msg, url='data:,on_message'):
self._messages.append((project, msg, url))
def on_message(self, project, msg):
pass
def on_result(self, result):
if not result:
return
assert self.task, "on_result can't outside a callback."
if self.is_debugger():
pprint(result)
if self.__env__.get('result_queue'):
self.__env__['result_queue'].put((self.task, result))
@not_send_status
def _on_message(self, response):
project, msg = response.save
return self.on_message(project, msg)
@not_send_status
def _on_cronjob(self, response, task):
for cronjob in self.cron_jobs:
function = cronjob.__get__(self, self.__class__)
self._run_func(function, response, task)
@not_send_status
def _on_get_info(self, response, task):
result = {}
assert response.save
for each in response.save:
if each == 'min_tick':
result[each] = self.min_tick
self.crawl('data:,on_get_info', save=result)