-update to latest master
- have mg generate task_id remove
This commit is contained in:
parent
65875f24e4
commit
b505952508
@ -146,7 +146,7 @@ class RequestToken(Base):
|
|||||||
callback = Column(Unicode, nullable=False, default=u"oob")
|
callback = Column(Unicode, nullable=False, default=u"oob")
|
||||||
created = Column(DateTime, nullable=False, default=datetime.datetime.now)
|
created = Column(DateTime, nullable=False, default=datetime.datetime.now)
|
||||||
updated = Column(DateTime, nullable=False, default=datetime.datetime.now)
|
updated = Column(DateTime, nullable=False, default=datetime.datetime.now)
|
||||||
|
|
||||||
class AccessToken(Base):
|
class AccessToken(Base):
|
||||||
"""
|
"""
|
||||||
Model for representing the access tokens
|
Model for representing the access tokens
|
||||||
@ -159,7 +159,7 @@ class AccessToken(Base):
|
|||||||
request_token = Column(Unicode, ForeignKey(RequestToken.token))
|
request_token = Column(Unicode, ForeignKey(RequestToken.token))
|
||||||
created = Column(DateTime, nullable=False, default=datetime.datetime.now)
|
created = Column(DateTime, nullable=False, default=datetime.datetime.now)
|
||||||
updated = Column(DateTime, nullable=False, default=datetime.datetime.now)
|
updated = Column(DateTime, nullable=False, default=datetime.datetime.now)
|
||||||
|
|
||||||
|
|
||||||
class NonceTimestamp(Base):
|
class NonceTimestamp(Base):
|
||||||
"""
|
"""
|
||||||
@ -646,13 +646,13 @@ with_polymorphic(
|
|||||||
[ProcessingNotification, CommentNotification])
|
[ProcessingNotification, CommentNotification])
|
||||||
|
|
||||||
MODELS = [
|
MODELS = [
|
||||||
User, Client, RequestToken, AccessToken, NonceTimestamp, MediaEntry, Tag,
|
User, Client, RequestToken, AccessToken, NonceTimestamp, MediaEntry, Tag,
|
||||||
MediaTag, MediaComment, Collection, CollectionItem, MediaFile, FileKeynames,
|
MediaTag, MediaComment, Collection, CollectionItem, MediaFile, FileKeynames,
|
||||||
MediaAttachmentFile, ProcessingMetaData, Notification, CommentNotification,
|
MediaAttachmentFile, ProcessingMetaData, Notification, CommentNotification,
|
||||||
ProcessingNotification, CommentSubscription]
|
ProcessingNotification, CommentSubscription]
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Foundations are the default rows that are created immediately after the tables
|
Foundations are the default rows that are created immediately after the tables
|
||||||
are initialized. Each entry to this dictionary should be in the format of:
|
are initialized. Each entry to this dictionary should be in the format of:
|
||||||
ModelConstructorObject:List of Dictionaries
|
ModelConstructorObject:List of Dictionaries
|
||||||
(Each Dictionary represents a row on the Table to be created, containing each
|
(Each Dictionary represents a row on the Table to be created, containing each
|
||||||
|
@ -181,10 +181,8 @@ class BaseProcessingFail(Exception):
|
|||||||
return u"%s:%s" % (
|
return u"%s:%s" % (
|
||||||
self.__class__.__module__, self.__class__.__name__)
|
self.__class__.__module__, self.__class__.__name__)
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, **metadata):
|
||||||
# next line is REQUIRED to have pickable exceptions if you want
|
self.metadata = metadata or {}
|
||||||
# to be able to pass in custom arguments (see celery docs)
|
|
||||||
Exception.__init__(self, *args, **metadata)
|
|
||||||
|
|
||||||
class BadMediaFail(BaseProcessingFail):
|
class BadMediaFail(BaseProcessingFail):
|
||||||
"""
|
"""
|
||||||
|
@ -18,13 +18,13 @@ import logging
|
|||||||
import urllib
|
import urllib
|
||||||
import urllib2
|
import urllib2
|
||||||
|
|
||||||
#TODO: newer celeries use from celery import Task. Change when we upgrade
|
import celery
|
||||||
from celery.task import Task
|
|
||||||
from celery.registry import tasks
|
from celery.registry import tasks
|
||||||
|
|
||||||
from mediagoblin import mg_globals as mgg
|
from mediagoblin import mg_globals as mgg
|
||||||
from mediagoblin.db.sql.models import MediaEntry
|
from mediagoblin.db.models import MediaEntry
|
||||||
from mediagoblin.processing import mark_entry_failed, BaseProcessingFail
|
from mediagoblin.processing import (mark_entry_failed, BaseProcessingFail,
|
||||||
|
ProcessingState)
|
||||||
from mediagoblin.tools.processing import json_processing_callback
|
from mediagoblin.tools.processing import json_processing_callback
|
||||||
|
|
||||||
_log = logging.getLogger(__name__)
|
_log = logging.getLogger(__name__)
|
||||||
@ -32,7 +32,7 @@ logging.basicConfig()
|
|||||||
_log.setLevel(logging.DEBUG)
|
_log.setLevel(logging.DEBUG)
|
||||||
|
|
||||||
|
|
||||||
@task.task(default_retry_delay=2 * 60)
|
@celery.task(default_retry_delay=2 * 60)
|
||||||
def handle_push_urls(feed_url):
|
def handle_push_urls(feed_url):
|
||||||
"""Subtask, notifying the PuSH servers of new content
|
"""Subtask, notifying the PuSH servers of new content
|
||||||
|
|
||||||
@ -62,10 +62,14 @@ def handle_push_urls(feed_url):
|
|||||||
'Giving up.'.format(feed_url))
|
'Giving up.'.format(feed_url))
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
################################
|
################################
|
||||||
# Media processing initial steps
|
# Media processing initial steps
|
||||||
################################
|
################################
|
||||||
class ProcessMedia(Task):
|
class ProcessMedia(celery.Task):
|
||||||
|
"""
|
||||||
|
Pass this entry off for processing.
|
||||||
|
"""
|
||||||
track_started=True
|
track_started=True
|
||||||
|
|
||||||
def run(self, media_id):
|
def run(self, media_id):
|
||||||
@ -81,7 +85,6 @@ class ProcessMedia(Task):
|
|||||||
# Try to process, and handle expected errors.
|
# Try to process, and handle expected errors.
|
||||||
try:
|
try:
|
||||||
entry.state = u'processing'
|
entry.state = u'processing'
|
||||||
entry.queued_task_id = self.request.id
|
|
||||||
entry.save()
|
entry.save()
|
||||||
_log.debug('Processing {0}'.format(entry))
|
_log.debug('Processing {0}'.format(entry))
|
||||||
|
|
||||||
|
@ -21,7 +21,7 @@ from werkzeug.datastructures import FileStorage
|
|||||||
|
|
||||||
from mediagoblin.db.models import MediaEntry
|
from mediagoblin.db.models import MediaEntry
|
||||||
from mediagoblin.processing import mark_entry_failed
|
from mediagoblin.processing import mark_entry_failed
|
||||||
from mediagoblin.processing.task import process_media
|
from mediagoblin.processing.task import ProcessMedia
|
||||||
|
|
||||||
|
|
||||||
_log = logging.getLogger(__name__)
|
_log = logging.getLogger(__name__)
|
||||||
@ -85,7 +85,7 @@ def run_process_media(entry, feed_url=None):
|
|||||||
'mediagoblin.user_pages.atom_feed',qualified=True,
|
'mediagoblin.user_pages.atom_feed',qualified=True,
|
||||||
user=request.user.username)`"""
|
user=request.user.username)`"""
|
||||||
try:
|
try:
|
||||||
process_media.apply_async(
|
ProcessMedia().apply_async(
|
||||||
[entry.id, feed_url], {},
|
[entry.id, feed_url], {},
|
||||||
task_id=entry.queued_task_id)
|
task_id=entry.queued_task_id)
|
||||||
except BaseException as exc:
|
except BaseException as exc:
|
||||||
|
Loading…
x
Reference in New Issue
Block a user