Commit 7c296b9c authored by sim's avatar sim

wip2

parent ce469a9d
......@@ -19,7 +19,7 @@ class UserAuthMixin(object):
user, token = result
logger.debug("%s: authenticate as %s",
self.__class__.__name__, user.username or '<anon>')
self.__class__.__name__, user.username or '<anon>', token)
# Authenticate user in database session
request.db.login(user)
......
import time
import logging
from django.conf import settings
......@@ -18,7 +19,8 @@ logger = get_task_logger(__name__)
def task_view(task, *args, **kwargs):
@api_view(['GET'])
@renderer_classes((JSONRenderer,))
def view(*_, **__):
def view(request, **params):
logging.getLogger('gargantext').info('From view: %r', request.user)
r = schedule(task, args=args, kwargs=kwargs)
return Response({
"task": task.__name__,
......@@ -48,6 +50,7 @@ def dummy(self, duration=30):
logger.info('Start %r task (DEBUG=%r): wait %s seconds...' % (
self.name, settings.DEBUG, duration))
logger.info('Dummy: %r', self.request)
time.sleep(duration)
me = 1234 #request.db.query(UserNode).filter_by(user_id=request.user.id).one_or_none()
......@@ -63,17 +66,20 @@ def fail(self):
raise Exception("Ay Caramba! Failed again!")
@shared_task(bind=True, default_retry_delay=30, max_retries=10)
@shared_task(bind=True, autoretry_for=(Exception,), default_retry_delay=10,
retry_kwargs=dict(max_retries=3))
def fail_random(self):
# Failing randomly
from random import random
p = random()
logger.info("Run randomly failing task (p=%s)" % p)
try:
if p < 2/3:
raise Exception("Fail fail fail")
except Exception as e:
self.retry()
if p < 2/3:
raise Exception("Fail fail fail")
#try:
# if p < 2/3:
# raise Exception("Fail fail fail")
#except Exception as e:
# self.retry()
@shared_task(bind=True)
......@@ -91,6 +97,7 @@ def simulate_work(self):
@shared_task
def produce():
time.sleep(0.1)
return "Bidule"
@shared_task
......@@ -98,8 +105,8 @@ def process1(data):
return "{%s}" % data
@shared_task
def process2(data):
return "[%s]" % data
def reduce(many):
return ':'.join(many)
@shared_task
def workflow():
......@@ -108,7 +115,9 @@ def workflow():
#chain = produce.s("Bidule", 10) | process.map.s()
#r = chain.apply_async()
r = chord([ produce.s() for _ in range(10) ])(process1.s() | process2.s())
#r = chord([ produce.s() for _ in range(10) ])(process1.s() | process2.s())
maps = (produce.s() | process1.s() for _ in range(20))
r = chord(maps)(reduce.s())
logger.info("Worflow: %r", r)
......
......@@ -213,8 +213,8 @@ CELERYBEAT_SCHEDULER = 'django_celery_beat.schedulers:DatabaseScheduler'
CELERY_IMPORTS = ()
# See: https://wiredcraft.com/blog/3-gotchas-for-celery/
# And: http://docs.celeryproject.org/en/3.1/userguide/optimizing.html#optimizing-prefetch-limit
#CELERY_ACKS_LATE = True
#CELERYD_PREFETCH_MULTIPLIER = 1
CELERY_ACKS_LATE = True
CELERYD_PREFETCH_MULTIPLIER = 1
CELERY_TRACK_STARTED = True
CELERY_RESULT_BACKEND = 'rpc://'
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment