Skip to content

Commit

Permalink
Apparently (x,) is legal now, so no need for (x, )
Browse files Browse the repository at this point in the history
  • Loading branch information
ask committed Jul 11, 2015
1 parent b286238 commit 72b16ac
Show file tree
Hide file tree
Showing 83 changed files with 191 additions and 190 deletions.
2 changes: 1 addition & 1 deletion celery/app/amqp.py
Original file line number Diff line number Diff line change
Expand Up @@ -238,7 +238,7 @@ def Queues(self, queues, create_missing=None, ha_policy=None,
if not queues and conf.CELERY_DEFAULT_QUEUE:
queues = (Queue(conf.CELERY_DEFAULT_QUEUE,
exchange=self.default_exchange,
routing_key=conf.CELERY_DEFAULT_ROUTING_KEY), )
routing_key=conf.CELERY_DEFAULT_ROUTING_KEY),)
autoexchange = (self.autoexchange if autoexchange is None
else autoexchange)
return self.queues_cls(
Expand Down
2 changes: 1 addition & 1 deletion celery/app/annotations.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ def expand_annotation(annotation):
if annotations is None:
return ()
elif not isinstance(annotations, (list, tuple)):
annotations = (annotations, )
annotations = (annotations,)
return [expand_annotation(anno) for anno in annotations]


Expand Down
6 changes: 3 additions & 3 deletions celery/app/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -253,7 +253,7 @@ def _create_task_cls(fun):
ret = self._task_from_fun(fun, **opts)
else:
# return a proxy object that evaluates on first use
ret = PromiseProxy(self._task_from_fun, (fun, ), opts,
ret = PromiseProxy(self._task_from_fun, (fun,), opts,
__doc__=fun.__doc__)
self._pending.append(ret)
if _filt:
Expand All @@ -280,7 +280,7 @@ def _task_from_fun(self, fun, name=None, base=None, bind=False, **options):

if name not in self._tasks:
run = fun if bind else staticmethod(fun)
task = type(fun.__name__, (base, ), dict({
task = type(fun.__name__, (base,), dict({
'app': self,
'name': name,
'run': run,
Expand Down Expand Up @@ -583,7 +583,7 @@ def __reduce__(self):
if not keep_reduce:
attrs['__reduce__'] = __reduce__

return type(name or Class.__name__, (Class, ), attrs)
return type(name or Class.__name__, (Class,), attrs)

def _rgetattr(self, path):
return attrgetter(path)(self)
Expand Down
2 changes: 1 addition & 1 deletion celery/app/routes.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,5 +92,5 @@ def expand_route(route):
if routes is None:
return ()
if not isinstance(routes, (list, tuple)):
routes = (routes, )
routes = (routes,)
return [expand_route(route) for route in routes]
7 changes: 3 additions & 4 deletions celery/app/task.py
Original file line number Diff line number Diff line change
Expand Up @@ -470,15 +470,14 @@ def apply_async(self, args=None, kwargs=None, task_id=None, producer=None,
# add 'self' if this is a "task_method".
if self.__self__ is not None:
args = args if isinstance(args, tuple) else tuple(args or ())
args = (self.__self__, ) + args
shadow = shadow or self.shadow_name(args, kwargs, final_options)
args = (self.__self__,) + args

preopts = self._get_exec_options()
options = dict(preopts, **options) if options else preopts
return app.send_task(
self.name, args, kwargs, task_id=task_id, producer=producer,
link=link, link_error=link_error, result_cls=self.AsyncResult,
shadow=shadow,
shadow=shadow or self.shadow_name(args, kwargs, options),
**options
)

Expand Down Expand Up @@ -658,7 +657,7 @@ def apply(self, args=None, kwargs=None,
args = args or ()
# add 'self' if this is a bound method.
if self.__self__ is not None:
args = (self.__self__, ) + tuple(args)
args = (self.__self__,) + tuple(args)
kwargs = kwargs or {}
task_id = options.get('task_id') or uuid()
retries = options.get('retries', 0)
Expand Down
6 changes: 3 additions & 3 deletions celery/app/trace.py
Original file line number Diff line number Diff line change
Expand Up @@ -310,7 +310,7 @@ def on_error(request, exc, uuid, state=FAILURE, call_errbacks=True):
group(
[signature(errback, app=app)
for errback in request.errbacks or []], app=app,
).apply_async((uuid, ))
).apply_async((uuid,))
return I, R, I.state, I.retval

def trace_task(uuid, args, kwargs, request=None):
Expand Down Expand Up @@ -392,9 +392,9 @@ def trace_task(uuid, args, kwargs, request=None):
else:
sigs.append(sig)
for group_ in groups:
group.apply_async((retval, ))
group.apply_async((retval,))
if sigs:
group(sigs).apply_async((retval, ))
group(sigs).apply_async((retval,))
else:
signature(callbacks[0], app=app).delay(retval)
if publish_result:
Expand Down
2 changes: 1 addition & 1 deletion celery/apps/worker.py
Original file line number Diff line number Diff line change
Expand Up @@ -175,7 +175,7 @@ def on_start(self):

def on_consumer_ready(self, consumer):
signals.worker_ready.send(sender=consumer)
print('{0} ready.'.format(safe_str(self.hostname), ))
print('{0} ready.'.format(safe_str(self.hostname),))

def setup_logging(self, colorize=None):
if colorize is None and self.no_color is not None:
Expand Down
4 changes: 2 additions & 2 deletions celery/backends/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,7 @@ def chord_error_from_stack(self, callback, exc=None):
[app.signature(errback)
for errback in callback.options.get('link_error') or []],
app=app,
).apply_async((callback.id, ))
).apply_async((callback.id,))
except Exception as eb_exc:
return backend.fail_from_current_stack(callback.id, exc=eb_exc)
else:
Expand Down Expand Up @@ -352,7 +352,7 @@ def fallback_chord_unlock(self, group_id, body, result=None,
countdown=1, **kwargs):
kwargs['result'] = [r.as_tuple() for r in result]
self.app.tasks['celery.chord_unlock'].apply_async(
(group_id, body, ), kwargs, countdown=countdown,
(group_id, body,), kwargs, countdown=countdown,
)

def apply_chord(self, header, partial_args, group_id, body,
Expand Down
2 changes: 1 addition & 1 deletion celery/backends/mongodb.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ def __init__(self, app=None, url=None, **kwargs):
self.options.update(config)

def _prepare_client_options(self):
if pymongo.version_tuple >= (3, ):
if pymongo.version_tuple >= (3,):
return {'maxPoolSize': self.max_pool_size}
else: # pragma: no cover
return {'max_pool_size': self.max_pool_size,
Expand Down
2 changes: 1 addition & 1 deletion celery/backends/redis.py
Original file line number Diff line number Diff line change
Expand Up @@ -264,7 +264,7 @@ def client(self):

def __reduce__(self, args=(), kwargs={}):
return super(RedisBackend, self).__reduce__(
(self.url, ), {'expires': self.expires},
(self.url,), {'expires': self.expires},
)

@deprecated_property(3.2, 3.3)
Expand Down
2 changes: 1 addition & 1 deletion celery/bin/logtool.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,7 @@ def incomplete(self, files):
audit = Audit()
audit.run(files)
for task_id in audit.incomplete_tasks():
self.error('Did not complete: %r' % (task_id, ))
self.error('Did not complete: %r' % (task_id,))

def debug(self, files):
Audit(on_debug=self.out).run(files)
Expand Down
6 changes: 3 additions & 3 deletions celery/bin/multi.py
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,7 @@ def main():


def celery_exe(*args):
return ' '.join((CELERY_EXE, ) + args)
return ' '.join((CELERY_EXE,) + args)


class MultiTool(object):
Expand Down Expand Up @@ -494,11 +494,11 @@ def multi_args(p, cmd='celery worker', append='', prefix='', suffix=''):
if ns_name.isdigit():
ns_index = int(ns_name) - 1
if ns_index < 0:
raise KeyError('Indexes start at 1 got: %r' % (ns_name, ))
raise KeyError('Indexes start at 1 got: %r' % (ns_name,))
try:
p.namespaces[names[ns_index]].update(ns_opts)
except IndexError:
raise KeyError('No node at index %r' % (ns_name, ))
raise KeyError('No node at index %r' % (ns_name,))

for name in names:
hostname = suffix
Expand Down
4 changes: 2 additions & 2 deletions celery/bootsteps.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@

try:
from greenlet import GreenletExit
IGNORE_ERRORS = (GreenletExit, )
IGNORE_ERRORS = (GreenletExit,)
except ImportError: # pragma: no cover
IGNORE_ERRORS = ()

Expand Down Expand Up @@ -393,7 +393,7 @@ def include(self, parent):


class ConsumerStep(StartStopStep):
requires = ('celery.worker.consumer:Connection', )
requires = ('celery.worker.consumer:Connection',)
consumers = None

def get_consumers(self, channel):
Expand Down
10 changes: 5 additions & 5 deletions celery/canvas.py
Original file line number Diff line number Diff line change
Expand Up @@ -280,12 +280,12 @@ def __or__(self, other):
if isinstance(other, group):
other = maybe_unroll_group(other)
if not isinstance(self, chain) and isinstance(other, chain):
return chain((self, ) + other.tasks, app=self._app)
return chain((self,) + other.tasks, app=self._app)
elif isinstance(other, chain):
return chain(*self.tasks + other.tasks, app=self._app)
elif isinstance(other, Signature):
if isinstance(self, chain):
return chain(*self.tasks + (other, ), app=self._app)
return chain(*self.tasks + (other,), app=self._app)
return chain(self, other, app=self._app)
return NotImplemented

Expand All @@ -299,7 +299,7 @@ def __invert__(self):
def __reduce__(self):
# for serialization, the task type is lazily loaded,
# and not stored in the dict itself.
return signature, (dict(self), )
return signature, (dict(self),)

def __json__(self):
return dict(self)
Expand Down Expand Up @@ -484,7 +484,7 @@ def apply(self, args=(), kwargs={}, **options):
last, fargs = None, args
for task in self.tasks:
res = task.clone(fargs).apply(
last and (last.get(), ), **dict(self.options, **options))
last and (last.get(),), **dict(self.options, **options))
res.parent, last, fargs = last, res, None
return last

Expand Down Expand Up @@ -835,7 +835,7 @@ def apply(self, args=(), kwargs={}, propagate=True, body=None, **options):
tasks = (self.tasks.clone() if isinstance(self.tasks, group)
else group(self.tasks))
return body.apply(
args=(tasks.apply().get(propagate=propagate), ),
args=(tasks.apply().get(propagate=propagate),),
)

def _traverse_tasks(self, tasks, value=None):
Expand Down
10 changes: 5 additions & 5 deletions celery/concurrency/asynpool.py
Original file line number Diff line number Diff line change
Expand Up @@ -196,7 +196,7 @@ def on_loop_start(self, pid):
# our version sends a WORKER_UP message when the process is ready
# to accept work, this will tell the parent that the inqueue fd
# is writable.
self.outq.put((WORKER_UP, (pid, )))
self.outq.put((WORKER_UP, (pid,)))


class ResultHandler(_pool.ResultHandler):
Expand Down Expand Up @@ -644,8 +644,8 @@ def _create_write_handlers(self, hub,
revoked_tasks = worker_state.revoked
getpid = os.getpid

precalc = {ACK: self._create_payload(ACK, (0, )),
NACK: self._create_payload(NACK, (0, ))}
precalc = {ACK: self._create_payload(ACK, (0,)),
NACK: self._create_payload(NACK, (0,))}

def _put_back(job, _time=time.time):
# puts back at the end of the queue
Expand Down Expand Up @@ -854,7 +854,7 @@ def send_ack(response, pid, job, fd, WRITE=WRITE, ERR=ERR):
cor = _write_ack(fd, msg, callback=callback)
mark_write_gen_as_active(cor)
mark_write_fd_as_active(fd)
callback.args = (cor, )
callback.args = (cor,)
add_writer(fd, cor)
self.send_ack = send_ack

Expand Down Expand Up @@ -1225,7 +1225,7 @@ def _set_result_sentinel(cls, _outqueue, _pool):
def _help_stuff_finish_args(self):
# Pool._help_stuff_finished is a classmethod so we have to use this
# trick to modify the arguments passed to it.
return (self._pool, )
return (self._pool,)

@classmethod
def _help_stuff_finish(cls, pool):
Expand Down
2 changes: 1 addition & 1 deletion celery/concurrency/gevent.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ def apply_timeout(target, args=(), kwargs={}, callback=None,
with Timeout(timeout):
return apply_target(target, args, kwargs, callback,
accept_callback, pid,
propagate=(Timeout, ), **rest)
propagate=(Timeout,), **rest)
except Timeout:
return timeout_callback(False, timeout)

Expand Down
2 changes: 1 addition & 1 deletion celery/contrib/batches.py
Original file line number Diff line number Diff line change
Expand Up @@ -230,7 +230,7 @@ def task_message_handler(message, body, ack, reject, callbacks, **kw):

def flush(self, requests):
return self.apply_buffer(requests, ([SimpleRequest.from_request(r)
for r in requests], ))
for r in requests],))

def _do_flush(self):
logger.debug('Batches: Wake-up to flush buffer...')
Expand Down
2 changes: 1 addition & 1 deletion celery/events/cursesmon.py
Original file line number Diff line number Diff line change
Expand Up @@ -236,7 +236,7 @@ def readline(self, x, y):
if ch != -1:
if ch in (10, curses.KEY_ENTER): # enter
break
if ch in (27, ):
if ch in (27,):
buffer = str()
break
buffer += chr(ch)
Expand Down
2 changes: 1 addition & 1 deletion celery/events/snapshot.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@

class Polaroid(object):
timer = None
shutter_signal = Signal(providing_args=('state', ))
shutter_signal = Signal(providing_args=('state',))
cleanup_signal = Signal()
clear_after = False

Expand Down
6 changes: 3 additions & 3 deletions celery/five.py
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,7 @@ def __dir__(self):
return list(set(self.__all__) | DEFAULT_ATTRS)

def __reduce__(self):
return import_module, (self.__name__, )
return import_module, (self.__name__,)


def create_module(name, attrs, cls_attrs=None, pkg=None,
Expand All @@ -174,7 +174,7 @@ def create_module(name, attrs, cls_attrs=None, pkg=None,
attr_name: (prepare_attr(attr) if prepare_attr else attr)
for attr_name, attr in items(attrs)
}
module = sys.modules[fqdn] = type(modname, (base, ), cls_attrs)(name)
module = sys.modules[fqdn] = type(modname, (base,), cls_attrs)(name)
module.__dict__.update(attrs)
return module

Expand Down Expand Up @@ -206,7 +206,7 @@ def get_compat_module(pkg, name):

def prepare(attr):
if isinstance(attr, string_t):
return Proxy(getappattr, (attr, ))
return Proxy(getappattr, (attr,))
return attr

attrs = COMPAT_MODULES[pkg.__name__][name]
Expand Down
2 changes: 1 addition & 1 deletion celery/fixups/django.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,7 @@ def __init__(self, app):
except (ImportError, AttributeError):
self._close_old_connections = None
self.database_errors = (
(DatabaseError, ) +
(DatabaseError,) +
_my_database_errors +
_pg_database_errors +
_lite_database_errors +
Expand Down
4 changes: 2 additions & 2 deletions celery/local.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ def __new__(cls, getter):
def __get__(self, obj, cls=None):
return self.__getter(obj) if obj is not None else self

return type(name, (type_, ), {
return type(name, (type_,), {
'__new__': __new__, '__get__': __get__,
})

Expand Down Expand Up @@ -212,7 +212,7 @@ class PromiseProxy(Proxy):
"""

__slots__ = ('__pending__', )
__slots__ = ('__pending__',)

def _get_current_object(self):
try:
Expand Down
2 changes: 1 addition & 1 deletion celery/platforms.py
Original file line number Diff line number Diff line change
Expand Up @@ -693,7 +693,7 @@ def ignore_errno(*errnos, **kwargs):
:keyword types: A tuple of exceptions to ignore (when the errno matches),
defaults to :exc:`Exception`.
"""
types = kwargs.get('types') or (Exception, )
types = kwargs.get('types') or (Exception,)
errnos = [get_errno_name(errno) for errno in errnos]
try:
yield
Expand Down
2 changes: 1 addition & 1 deletion celery/schedules.py
Original file line number Diff line number Diff line change
Expand Up @@ -251,7 +251,7 @@ def _parse_part(self, part):
m = regex.match(part)
if m:
return handler(m.groups())
return self._expand_range((part, ))
return self._expand_range((part,))

def _expand_range(self, toks):
fr = self._expand_number(toks[0])
Expand Down
Loading

0 comments on commit 72b16ac

Please sign in to comment.