-update to latest master

- have mg generate task_id

remove
This commit is contained in:
Rodney Ewing 2013-08-19 12:58:00 -07:00
parent 65875f24e4
commit b505952508
4 changed files with 19 additions and 18 deletions

View File

@ -181,10 +181,8 @@ class BaseProcessingFail(Exception):
return u"%s:%s" % (
self.__class__.__module__, self.__class__.__name__)
def __init__(self, *args, **kwargs):
# next line is REQUIRED to have pickable exceptions if you want
# to be able to pass in custom arguments (see celery docs)
Exception.__init__(self, *args, **metadata)
def __init__(self, **metadata):
self.metadata = metadata or {}
class BadMediaFail(BaseProcessingFail):
"""

View File

@ -18,13 +18,13 @@ import logging
import urllib
import urllib2
#TODO: newer celeries use from celery import Task. Change when we upgrade
from celery.task import Task
import celery
from celery.registry import tasks
from mediagoblin import mg_globals as mgg
from mediagoblin.db.sql.models import MediaEntry
from mediagoblin.processing import mark_entry_failed, BaseProcessingFail
from mediagoblin.db.models import MediaEntry
from mediagoblin.processing import (mark_entry_failed, BaseProcessingFail,
ProcessingState)
from mediagoblin.tools.processing import json_processing_callback
_log = logging.getLogger(__name__)
@ -32,7 +32,7 @@ logging.basicConfig()
_log.setLevel(logging.DEBUG)
@task.task(default_retry_delay=2 * 60)
@celery.task(default_retry_delay=2 * 60)
def handle_push_urls(feed_url):
"""Subtask, notifying the PuSH servers of new content
@ -62,10 +62,14 @@ def handle_push_urls(feed_url):
'Giving up.'.format(feed_url))
return False
################################
# Media processing initial steps
################################
class ProcessMedia(Task):
class ProcessMedia(celery.Task):
"""
Pass this entry off for processing.
"""
track_started=True
def run(self, media_id):
@ -81,7 +85,6 @@ class ProcessMedia(Task):
# Try to process, and handle expected errors.
try:
entry.state = u'processing'
entry.queued_task_id = self.request.id
entry.save()
_log.debug('Processing {0}'.format(entry))

View File

@ -21,7 +21,7 @@ from werkzeug.datastructures import FileStorage
from mediagoblin.db.models import MediaEntry
from mediagoblin.processing import mark_entry_failed
from mediagoblin.processing.task import process_media
from mediagoblin.processing.task import ProcessMedia
_log = logging.getLogger(__name__)
@ -85,7 +85,7 @@ def run_process_media(entry, feed_url=None):
'mediagoblin.user_pages.atom_feed',qualified=True,
user=request.user.username)`"""
try:
process_media.apply_async(
ProcessMedia().apply_async(
[entry.id, feed_url], {},
task_id=entry.queued_task_id)
except BaseException as exc: