-update to latest master
- have mg generate task_id remove
This commit is contained in:
parent
65875f24e4
commit
b505952508
@ -146,7 +146,7 @@ class RequestToken(Base):
|
||||
callback = Column(Unicode, nullable=False, default=u"oob")
|
||||
created = Column(DateTime, nullable=False, default=datetime.datetime.now)
|
||||
updated = Column(DateTime, nullable=False, default=datetime.datetime.now)
|
||||
|
||||
|
||||
class AccessToken(Base):
|
||||
"""
|
||||
Model for representing the access tokens
|
||||
@ -159,7 +159,7 @@ class AccessToken(Base):
|
||||
request_token = Column(Unicode, ForeignKey(RequestToken.token))
|
||||
created = Column(DateTime, nullable=False, default=datetime.datetime.now)
|
||||
updated = Column(DateTime, nullable=False, default=datetime.datetime.now)
|
||||
|
||||
|
||||
|
||||
class NonceTimestamp(Base):
|
||||
"""
|
||||
@ -646,13 +646,13 @@ with_polymorphic(
|
||||
[ProcessingNotification, CommentNotification])
|
||||
|
||||
MODELS = [
|
||||
User, Client, RequestToken, AccessToken, NonceTimestamp, MediaEntry, Tag,
|
||||
MediaTag, MediaComment, Collection, CollectionItem, MediaFile, FileKeynames,
|
||||
User, Client, RequestToken, AccessToken, NonceTimestamp, MediaEntry, Tag,
|
||||
MediaTag, MediaComment, Collection, CollectionItem, MediaFile, FileKeynames,
|
||||
MediaAttachmentFile, ProcessingMetaData, Notification, CommentNotification,
|
||||
ProcessingNotification, CommentSubscription]
|
||||
|
||||
"""
|
||||
Foundations are the default rows that are created immediately after the tables
|
||||
Foundations are the default rows that are created immediately after the tables
|
||||
are initialized. Each entry to this dictionary should be in the format of:
|
||||
ModelConstructorObject:List of Dictionaries
|
||||
(Each Dictionary represents a row on the Table to be created, containing each
|
||||
|
@ -181,10 +181,8 @@ class BaseProcessingFail(Exception):
|
||||
return u"%s:%s" % (
|
||||
self.__class__.__module__, self.__class__.__name__)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
# next line is REQUIRED to have pickable exceptions if you want
|
||||
# to be able to pass in custom arguments (see celery docs)
|
||||
Exception.__init__(self, *args, **metadata)
|
||||
def __init__(self, **metadata):
|
||||
self.metadata = metadata or {}
|
||||
|
||||
class BadMediaFail(BaseProcessingFail):
|
||||
"""
|
||||
|
@ -18,13 +18,13 @@ import logging
|
||||
import urllib
|
||||
import urllib2
|
||||
|
||||
#TODO: newer celeries use from celery import Task. Change when we upgrade
|
||||
from celery.task import Task
|
||||
import celery
|
||||
from celery.registry import tasks
|
||||
|
||||
from mediagoblin import mg_globals as mgg
|
||||
from mediagoblin.db.sql.models import MediaEntry
|
||||
from mediagoblin.processing import mark_entry_failed, BaseProcessingFail
|
||||
from mediagoblin.db.models import MediaEntry
|
||||
from mediagoblin.processing import (mark_entry_failed, BaseProcessingFail,
|
||||
ProcessingState)
|
||||
from mediagoblin.tools.processing import json_processing_callback
|
||||
|
||||
_log = logging.getLogger(__name__)
|
||||
@ -32,7 +32,7 @@ logging.basicConfig()
|
||||
_log.setLevel(logging.DEBUG)
|
||||
|
||||
|
||||
@task.task(default_retry_delay=2 * 60)
|
||||
@celery.task(default_retry_delay=2 * 60)
|
||||
def handle_push_urls(feed_url):
|
||||
"""Subtask, notifying the PuSH servers of new content
|
||||
|
||||
@ -62,10 +62,14 @@ def handle_push_urls(feed_url):
|
||||
'Giving up.'.format(feed_url))
|
||||
return False
|
||||
|
||||
|
||||
################################
|
||||
# Media processing initial steps
|
||||
################################
|
||||
class ProcessMedia(Task):
|
||||
class ProcessMedia(celery.Task):
|
||||
"""
|
||||
Pass this entry off for processing.
|
||||
"""
|
||||
track_started=True
|
||||
|
||||
def run(self, media_id):
|
||||
@ -81,7 +85,6 @@ class ProcessMedia(Task):
|
||||
# Try to process, and handle expected errors.
|
||||
try:
|
||||
entry.state = u'processing'
|
||||
entry.queued_task_id = self.request.id
|
||||
entry.save()
|
||||
_log.debug('Processing {0}'.format(entry))
|
||||
|
||||
|
@ -21,7 +21,7 @@ from werkzeug.datastructures import FileStorage
|
||||
|
||||
from mediagoblin.db.models import MediaEntry
|
||||
from mediagoblin.processing import mark_entry_failed
|
||||
from mediagoblin.processing.task import process_media
|
||||
from mediagoblin.processing.task import ProcessMedia
|
||||
|
||||
|
||||
_log = logging.getLogger(__name__)
|
||||
@ -85,7 +85,7 @@ def run_process_media(entry, feed_url=None):
|
||||
'mediagoblin.user_pages.atom_feed',qualified=True,
|
||||
user=request.user.username)`"""
|
||||
try:
|
||||
process_media.apply_async(
|
||||
ProcessMedia().apply_async(
|
||||
[entry.id, feed_url], {},
|
||||
task_id=entry.queued_task_id)
|
||||
except BaseException as exc:
|
||||
|
Loading…
x
Reference in New Issue
Block a user