Simple hack to handle main workflow problem
Remove redundunt workflow methods from the other media type's processing.py. Fixes #1
This commit is contained in:
parent
5b91098ca7
commit
33d5ac6c4d
@ -273,8 +273,3 @@ class AsciiProcessingManager(ProcessingManager):
|
||||
super(AsciiProcessingManager, self).__init__()
|
||||
self.add_processor(InitialProcessor)
|
||||
self.add_processor(Resizer)
|
||||
|
||||
def workflow(self, entry_id, feed_url, reprocess_action, reprocess_info=None):
|
||||
ProcessMedia().apply_async(
|
||||
[entry.id, feed_url, reprocess_action, reprocess_info], {},
|
||||
task_id=entry.queued_task_id)
|
||||
|
@ -365,8 +365,3 @@ class AudioProcessingManager(ProcessingManager):
|
||||
self.add_processor(InitialProcessor)
|
||||
self.add_processor(Resizer)
|
||||
self.add_processor(Transcoder)
|
||||
|
||||
def workflow(self, entry_id, feed_url, reprocess_action, reprocess_info=None):
|
||||
ProcessMedia().apply_async(
|
||||
[entry.id, feed_url, reprocess_action, reprocess_info], {},
|
||||
task_id=entry.queued_task_id)
|
||||
|
@ -431,11 +431,6 @@ class ImageProcessingManager(ProcessingManager):
|
||||
self.add_processor(Resizer)
|
||||
self.add_processor(MetadataProcessing)
|
||||
|
||||
def workflow(self, entry_id, feed_url, reprocess_action, reprocess_info=None):
|
||||
ProcessMedia().apply_async(
|
||||
[entry.id, feed_url, reprocess_action, reprocess_info], {},
|
||||
task_id=entry.queued_task_id)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
import sys
|
||||
|
@ -470,8 +470,3 @@ class PdfProcessingManager(ProcessingManager):
|
||||
super(PdfProcessingManager, self).__init__()
|
||||
self.add_processor(InitialProcessor)
|
||||
self.add_processor(Resizer)
|
||||
|
||||
def workflow(self, entry_id, feed_url, reprocess_action, reprocess_info=None):
|
||||
ProcessMedia().apply_async(
|
||||
[entry.id, feed_url, reprocess_action, reprocess_info], {},
|
||||
task_id=entry.queued_task_id)
|
||||
|
@ -80,8 +80,3 @@ class RawImageProcessingManager(ProcessingManager):
|
||||
super(RawImageProcessingManager, self).__init__()
|
||||
self.add_processor(InitialRawProcessor)
|
||||
self.add_processor(Resizer)
|
||||
|
||||
def workflow(self, entry_id, feed_url, reprocess_action, reprocess_info=None):
|
||||
ProcessMedia().apply_async(
|
||||
[entry.id, feed_url, reprocess_action, reprocess_info], {},
|
||||
task_id=entry.queued_task_id)
|
||||
|
@ -368,8 +368,3 @@ class StlProcessingManager(ProcessingManager):
|
||||
super(StlProcessingManager, self).__init__()
|
||||
self.add_processor(InitialProcessor)
|
||||
self.add_processor(Resizer)
|
||||
|
||||
def workflow(self, entry_id, feed_url, reprocess_action, reprocess_info=None):
|
||||
ProcessMedia().apply_async(
|
||||
[entry.id, feed_url, reprocess_action, reprocess_info], {},
|
||||
task_id=entry.queued_task_id)
|
||||
|
@ -30,7 +30,6 @@ from mediagoblin.processing import (
|
||||
ProcessingManager, request_from_args,
|
||||
get_process_filename, store_public,
|
||||
copy_original, get_entry_and_processing_manager)
|
||||
from mediagoblin.processing.task import ProcessMedia
|
||||
from mediagoblin.tools.translate import lazy_pass_to_ugettext as _
|
||||
from mediagoblin.media_types import MissingComponents
|
||||
|
||||
@ -543,7 +542,7 @@ class VideoProcessingManager(ProcessingManager):
|
||||
self.add_processor(Resizer)
|
||||
self.add_processor(Transcoder)
|
||||
|
||||
def workflow(self, entry_id, feed_url, reprocess_action, reprocess_info=None):
|
||||
def workflow(self, entry, feed_url, reprocess_action, reprocess_info=None):
|
||||
|
||||
reprocess_info = reprocess_info or {}
|
||||
if 'vp8_quality' not in reprocess_info:
|
||||
@ -556,25 +555,21 @@ class VideoProcessingManager(ProcessingManager):
|
||||
reprocess_info['thumb_size'] = None
|
||||
|
||||
transcoding_tasks = group([
|
||||
main_task.signature(args=(entry_id, '480p', ACCEPTED_RESOLUTIONS['480p']),
|
||||
main_task.signature(args=(entry.id, '480p', ACCEPTED_RESOLUTIONS['480p']),
|
||||
kwargs=reprocess_info, queue='default',
|
||||
priority=5, immutable=True),
|
||||
complimentary_task.signature(args=(entry_id, '360p', ACCEPTED_RESOLUTIONS['360p']),
|
||||
complimentary_task.signature(args=(entry.id, '360p', ACCEPTED_RESOLUTIONS['360p']),
|
||||
kwargs=reprocess_info, queue='default',
|
||||
priority=4, immutable=True),
|
||||
complimentary_task.signature(args=(entry_id, '720p', ACCEPTED_RESOLUTIONS['720p']),
|
||||
complimentary_task.signature(args=(entry.id, '720p', ACCEPTED_RESOLUTIONS['720p']),
|
||||
kwargs=reprocess_info, queue='default',
|
||||
priority=3, immutable=True),
|
||||
])
|
||||
|
||||
cleanup_task = processing_cleanup.signature(args=(entry_id,),
|
||||
cleanup_task = processing_cleanup.signature(args=(entry.id,),
|
||||
queue='default', immutable=True)
|
||||
|
||||
"""
|
||||
main_task.apply_async(args=(entry_id, '480p', ACCEPTED_RESOLUTIONS['480p']),
|
||||
kwargs=reprocess_info, queue='default',
|
||||
priority=5, immutable=True)
|
||||
processing_cleanup.apply_async(args=(entry_id,), queue='default', immutable=True)
|
||||
"""
|
||||
|
||||
chord(transcoding_tasks)(cleanup_task)
|
||||
|
||||
# Not sure what to return since we are scheduling the task here itself
|
||||
return 1
|
||||
|
@ -257,12 +257,11 @@ class ProcessingManager(object):
|
||||
|
||||
return processor
|
||||
|
||||
def workflow(self, entry_id, feed_url, reprocess_action, reprocess_info=None):
|
||||
def workflow(self, entry, feed_url, reprocess_action, reprocess_info=None):
|
||||
"""
|
||||
Returns the Celery command needed to proceed with media processing
|
||||
*This method has to be implemented in all media types*
|
||||
"""
|
||||
raise NotImplementedError
|
||||
return None
|
||||
|
||||
|
||||
def request_from_args(args, which_args):
|
||||
|
@ -266,7 +266,11 @@ def run_process_media(entry, feed_url=None,
|
||||
entry, manager = get_entry_and_processing_manager(entry.id)
|
||||
|
||||
try:
|
||||
manager.workflow(entry.id, feed_url, reprocess_action, reprocess_info)
|
||||
wf = manager.workflow(entry, feed_url, reprocess_action, reprocess_info)
|
||||
if wf is None:
|
||||
ProcessMedia().apply_async(
|
||||
[entry.id, feed_url, reprocess_action, reprocess_info], {},
|
||||
task_id=entry.queued_task_id)
|
||||
except BaseException as exc:
|
||||
# The purpose of this section is because when running in "lazy"
|
||||
# or always-eager-with-exceptions-propagated celery mode that
|
||||
|
Loading…
x
Reference in New Issue
Block a user