Implement ProcessingState class and use for images

The idea is to have a class that has the knowledge of the
currently being processed media and also has tools for
that.

The long term idea is to make reprocessing easier by for
example hiding the way the original comes into the
processing code.
This commit is contained in:
Elrond 2013-01-23 19:44:28 +01:00
parent 9d7c69fb74
commit 93b14fc300
3 changed files with 44 additions and 12 deletions

View File

@ -19,7 +19,6 @@ import os
import logging import logging
from mediagoblin import mg_globals as mgg from mediagoblin import mg_globals as mgg
from mediagoblin.decorators import get_workbench
from mediagoblin.processing import BadMediaFail, \ from mediagoblin.processing import BadMediaFail, \
create_pub_filepath, FilenameBuilder create_pub_filepath, FilenameBuilder
from mediagoblin.tools.exif import exif_fix_image_orientation, \ from mediagoblin.tools.exif import exif_fix_image_orientation, \
@ -95,21 +94,21 @@ def sniff_handler(media_file, **kw):
return False return False
@get_workbench def process_image(entry):
def process_image(entry, workbench=None):
"""Code to process an image. Will be run by celery. """Code to process an image. Will be run by celery.
A Workbench() represents a local tempory dir. It is automatically A Workbench() represents a local tempory dir. It is automatically
cleaned up when this function exits. cleaned up when this function exits.
""" """
proc_state = entry.proc_state
workbench = proc_state.workbench
# Conversions subdirectory to avoid collisions # Conversions subdirectory to avoid collisions
conversions_subdir = os.path.join( conversions_subdir = os.path.join(
workbench.dir, 'conversions') workbench.dir, 'conversions')
os.mkdir(conversions_subdir) os.mkdir(conversions_subdir)
queued_filepath = entry.queued_media_file
queued_filename = workbench.localized_file( queued_filename = proc_state.get_queued_filename()
mgg.queue_store, queued_filepath,
'source')
name_builder = FilenameBuilder(queued_filename) name_builder = FilenameBuilder(queued_filename)
# EXIF extraction # EXIF extraction
@ -147,8 +146,7 @@ def process_image(entry, workbench=None):
mgg.public_store.copy_local_to_storage(queued_filename, original_filepath) mgg.public_store.copy_local_to_storage(queued_filename, original_filepath)
# Remove queued media file from storage and database # Remove queued media file from storage and database
mgg.queue_store.delete_file(queued_filepath) proc_state.delete_queue_file()
entry.queued_media_file = []
# Insert media file information into database # Insert media file information into database
media_files_dict = entry.setdefault('media_files', {}) media_files_dict = entry.setdefault('media_files', {})

View File

@ -74,6 +74,37 @@ class FilenameBuilder(object):
ext=self.ext) ext=self.ext)
class ProcessingState(object):
def __init__(self, entry):
self.entry = entry
self.workbench = None
self.queued_filename = None
# Monkey patch us onto the entry
entry.proc_state = self
def set_workbench(self, wb):
self.workbench = wb
def get_queued_filename(self):
"""
Get the a filename for the original, on local storage
"""
if self.queued_filename is not None:
return self.queued_filename
queued_filepath = self.entry.queued_media_file
queued_filename = self.workbench.localized_file(
mgg.queue_store, queued_filepath,
'source')
self.queued_filename = queued_filename
return queued_filename
def delete_queue_file(self):
queued_filepath = self.entry.queued_media_file
mgg.queue_store.delete_file(queued_filepath)
self.entry.queued_media_file = []
def mark_entry_failed(entry_id, exc): def mark_entry_failed(entry_id, exc):
""" """
Mark a media entry as having failed in its conversion. Mark a media entry as having failed in its conversion.

View File

@ -22,7 +22,7 @@ from celery import registry, task
from mediagoblin import mg_globals as mgg from mediagoblin import mg_globals as mgg
from mediagoblin.db.models import MediaEntry from mediagoblin.db.models import MediaEntry
from mediagoblin.processing import mark_entry_failed, BaseProcessingFail from . import mark_entry_failed, BaseProcessingFail, ProcessingState
from mediagoblin.tools.processing import json_processing_callback from mediagoblin.tools.processing import json_processing_callback
_log = logging.getLogger(__name__) _log = logging.getLogger(__name__)
@ -85,8 +85,11 @@ class ProcessMedia(task.Task):
_log.debug('Processing {0}'.format(entry)) _log.debug('Processing {0}'.format(entry))
# run the processing code proc_state = ProcessingState(entry)
entry.media_manager['processor'](entry) with mgg.workbench_manager.create() as workbench:
proc_state.set_workbench(workbench)
# run the processing code
entry.media_manager['processor'](entry)
# We set the state to processed and save the entry here so there's # We set the state to processed and save the entry here so there's
# no need to save at the end of the processing stage, probably ;) # no need to save at the end of the processing stage, probably ;)