Logo Questions Linux Laravel Mysql Ubuntu Git Menu
 

AttributeError: 'DisabledBackend' object has no attribute '_get_task_meta_for'

I am trying to read meta info from celery task in case of timeout (if task is not finished in given time). I have 3 celery workers. When I execute tasks on 3 workers serially my timeout logic (getting meta info from redis backend) works fine. But, when I execute tasks in parallel using threads, I get error 'AttributeError: 'DisabledBackend' object has no attribute '_get_task_meta_for''.

main script.

from threading import Thread
from util.tasks import app
from celery.exceptions import TimeoutError
# from celery.task.control import revoke
from celery.result import AsyncResult

def run(cmd, workerName, async=False, timeout=9999999):
        print "Executing Celery cmd: ", cmd
        ret = app.send_task(workerName+'.run_cmd', args=[cmd], kwargs={}, queue=workerName)
        if async:
            return ret
        else:
            try:
                return ret.get(timeout=timeout)
            except TimeoutError:
                task = AsyncResult(ret.task_id)
                # print task.info
                out = task.info['PROGRESS']
                # stop_task(ret.task_id)
                print 'TIMEOUT', out
                return 'TIMEOUT', out


cmd = r'ping 10.10.10.10'
threads = []

# this block works
print "This block works"
run(cmd, 'MH_VTF203', timeout=10)
run(cmd, 'MH_VTF1661', timeout=10)
run(cmd, 'MH_VTF106', timeout=10)


# this block errors
print "This block erros"
for vtf in ['MH_VTF203', 'MH_VTF1661', 'MH_VTF106']:
    t = Thread(target=run, args=[cmd, vtf], kwargs={'timeout': 10})
    t.start()
    threads.append(t)
for t in threads:
    t.join()

util.tasks.py

from celery import Celery
import subprocess


app = Celery('tasks', backend='redis://', broker='redis://localhost:6379/0')
app.conf.CELERY_IGNORE_RESULT = False
app.conf.CELERY_RESULT_BACKEND = 'redis://localhost:6379/0'


@app.task()
def run_cmd(*args, **kwargs):

    cmd = " ".join(args)
    print "executing command :",cmd
    try:
        p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
        out = ""
        while p.poll() is None:
            l = p.stdout.readline()
            print l
            out += l
            run_cmd.update_state(
                state='PROGRESS',
                meta={'PROGRESS': out}
            )
        l = p.stdout.read()
        print l
        out += l
        return out
    except subprocess.CalledProcessError, e:
        print 'Error executing command: ', cmd
        return str(e)

Output.

C:\Python27\python.exe C:/Users/mkr/Documents/work/New_RoD/testing/run.py
    This block works
    Executing Celery cmd:  ping 10.10.10.10
    TIMEOUT 
    Pinging 10.10.10.10 with 32 bytes of data:
    Request timed out.
    Request timed out.

    Executing Celery cmd:  ping 10.10.10.10
    TIMEOUT 
    Pinging 10.10.10.10 with 32 bytes of data:
    Request timed out.
    Request timed out.

    Executing Celery cmd:  ping 10.10.10.10
    TIMEOUT 
    Pinging 10.10.10.10 with 32 bytes of data:
    Request timed out.
    Request timed out.

    This block erros
    Executing Celery cmd:  ping 10.10.10.10
    Executing Celery cmd:  ping 10.10.10.10
    Executing Celery cmd:  ping 10.10.10.10
    Exception in thread Thread-1:
    Traceback (most recent call last):
      File "C:\Python27\lib\threading.py", line 810, in __bootstrap_inner
        self.run()
      File "C:\Python27\lib\threading.py", line 763, in run
        self.__target(*self.__args, **self.__kwargs)
      File "C:/Users/mkr/Documents/work/New_RoD/testing/run.py", line 18, in run
        out = task.info['PROGRESS']
      File "C:\Python27\lib\site-packages\celery\result.py", line 356, in result
        return self._get_task_meta()['result']
      File "C:\Python27\lib\site-packages\celery\result.py", line 339, in _get_task_meta
        return self._maybe_set_cache(self.backend.get_task_meta(self.id))
      File "C:\Python27\lib\site-packages\celery\backends\base.py", line 292, in get_task_meta
        meta = self._get_task_meta_for(task_id)
    AttributeError: 'DisabledBackend' object has no attribute '_get_task_meta_for'

    Exception in thread Thread-2:
    Traceback (most recent call last):
      File "C:\Python27\lib\threading.py", line 810, in __bootstrap_inner
        self.run()
      File "C:\Python27\lib\threading.py", line 763, in run
        self.__target(*self.__args, **self.__kwargs)
      File "C:/Users/mkr/Documents/work/New_RoD/testing/run.py", line 18, in run
        out = task.info['PROGRESS']
      File "C:\Python27\lib\site-packages\celery\result.py", line 356, in result
        return self._get_task_meta()['result']
      File "C:\Python27\lib\site-packages\celery\result.py", line 339, in _get_task_meta
        return self._maybe_set_cache(self.backend.get_task_meta(self.id))
      File "C:\Python27\lib\site-packages\celery\backends\base.py", line 292, in get_task_meta
        meta = self._get_task_meta_for(task_id)
    AttributeError: 'DisabledBackend' object has no attribute '_get_task_meta_for'

    Exception in thread Thread-3:
    Traceback (most recent call last):
      File "C:\Python27\lib\threading.py", line 810, in __bootstrap_inner
        self.run()
      File "C:\Python27\lib\threading.py", line 763, in run
        self.__target(*self.__args, **self.__kwargs)
      File "C:/Users/mkr/Documents/work/New_RoD/testing/run.py", line 18, in run
        out = task.info['PROGRESS']
      File "C:\Python27\lib\site-packages\celery\result.py", line 356, in result
        return self._get_task_meta()['result']
      File "C:\Python27\lib\site-packages\celery\result.py", line 339, in _get_task_meta
        return self._maybe_set_cache(self.backend.get_task_meta(self.id))
      File "C:\Python27\lib\site-packages\celery\backends\base.py", line 292, in get_task_meta
        meta = self._get_task_meta_for(task_id)
    AttributeError: 'DisabledBackend' object has no attribute '_get_task_meta_for'


    Process finished with exit code 0
like image 535
mylari Avatar asked Jun 28 '15 10:06

mylari


2 Answers

using app.AsyncResult worked for me

like image 195
mylari Avatar answered Sep 28 '22 06:09

mylari


Works for me as suggested by https://stackoverflow.com/users/2682417/mylari in one of the comments above

celery1 = Celery('mytasks', backend='redis://localhost:6379/1', broker='redis://localhost:6379/0')
def t_status(id):
    c = celery1.AsyncResult(id)
    return c

Calling method:

@app.route("/tasks/<task_id>", methods=["GET"])
def get_status(task_id):
    task_result = t_status(task_id)
    result = {
        "task_id": task_id,
        "task_status": task_result.status,
        "task_result": task_result.result
    }
    return jsonify(result), 200
like image 35
Manik Sidana Avatar answered Sep 28 '22 07:09

Manik Sidana