added image reprocessing

This commit is contained in:
Rodney Ewing 2013-08-02 11:40:41 -07:00
parent 273c79513d
commit 9a2c66ca9e
6 changed files with 139 additions and 46 deletions

View File

@ -13,6 +13,7 @@
# #
# You should have received a copy of the GNU Affero General Public License # You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>. # along with this program. If not, see <http://www.gnu.org/licenses/>.
from mediagoblin import mg_globals
from mediagoblin.db.models import MediaEntry from mediagoblin.db.models import MediaEntry
from mediagoblin.gmg_commands import util as commands_util from mediagoblin.gmg_commands import util as commands_util
from mediagoblin.tools.translate import lazy_pass_to_ugettext as _ from mediagoblin.tools.translate import lazy_pass_to_ugettext as _
@ -143,6 +144,8 @@ def reprocess(args):
_set_media_state(args) _set_media_state(args)
_set_media_type(args) _set_media_type(args)
import ipdb
ipdb.set_trace()
if not args[0].media_id: if not args[0].media_id:
return _reprocess_all(args) return _reprocess_all(args)

View File

@ -15,13 +15,18 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>. # along with this program. If not, see <http://www.gnu.org/licenses/>.
import argparse import argparse
import datetime import datetime
import logging
from mediagoblin.db.models import MediaEntry
from mediagoblin.media_types import MediaManagerBase from mediagoblin.media_types import MediaManagerBase
from mediagoblin.media_types.image.processing import process_image, \ from mediagoblin.media_types.image.processing import process_image, \
sniff_handler sniff_handler
from mediagoblin.submit.lib import run_process_media
from mediagoblin.tools import pluginapi from mediagoblin.tools import pluginapi
from mediagoblin.tools.translate import lazy_pass_to_ugettext as _ from mediagoblin.tools.translate import lazy_pass_to_ugettext as _
_log = logging.getLogger(__name__)
ACCEPTED_EXTENSIONS = ["jpg", "jpeg", "png", "gif", "tiff"] ACCEPTED_EXTENSIONS = ["jpg", "jpeg", "png", "gif", "tiff"]
MEDIA_TYPE = 'mediagoblin.media_types.image' MEDIA_TYPE = 'mediagoblin.media_types.image'
@ -69,7 +74,7 @@ def get_media_type_and_manager(ext):
def reprocess_action(args): def reprocess_action(args):
if args[0].state == 'processed': if args[0].state == 'processed':
print _('\n Available reprocessing actions for processed images:' print _('\n Available reprocessing actions for processed images:'
'\n \t --resize: thumbnail or medium' '\n \t --resize: thumb or medium'
'\n Options:' '\n Options:'
'\n \t --size: max_width max_height (defaults to config specs)') '\n \t --size: max_width max_height (defaults to config specs)')
return True return True
@ -78,8 +83,7 @@ def reprocess_action(args):
def _parser(args): def _parser(args):
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument( parser.add_argument(
'--resize', '--resize')
action='store_true')
parser.add_argument( parser.add_argument(
'--size', '--size',
nargs=2) nargs=2)
@ -100,6 +104,9 @@ def _check_eligible(entry_args, reprocess_args):
if reprocess_args.resize: if reprocess_args.resize:
raise Exception(_('You can not run --resize on media that has not' raise Exception(_('You can not run --resize on media that has not'
'been processed.')) 'been processed.'))
if reprocess_args.size:
_log.warn('With --initial_processing, the --size flag will be'
' ignored.')
if entry_args.state == 'processing': if entry_args.state == 'processing':
raise Exception(_('We currently do not support reprocessing on media' raise Exception(_('We currently do not support reprocessing on media'
@ -111,8 +118,38 @@ def media_reprocess(args):
entry_args = args[0] entry_args = args[0]
_check_eligible(entry_args, reprocess_args) _check_eligible(entry_args, reprocess_args)
import ipdb if reprocess_args.initial_processing:
ipdb.set_trace() for id in entry_args.media_id:
entry = MediaEntry.query.filter_by(id=id).first()
# Should we get the feed_url?
run_process_media(entry)
elif reprocess_args.resize:
if reprocess_args.resize == 'medium' or reprocess_args.resize == \
'thumb':
for id in entry_args.media_id:
entry = MediaEntry.query.filter_by(id=id).first()
# For now we can only reprocess with the original file
if not entry.media_files.get('original'):
raise Exception(_('The original file for this media entry'
'does not exist.'))
reprocess_info = {'resize': reprocess_args.resize}
if reprocess_args.size and len(reprocess_args.size) == 2:
reprocess_info['max_width'] = reprocess_args.size[0]
reprocess_info['max_height'] = reprocess_args.size[1]
run_process_media(entry, reprocess_info)
else:
raise Exception(_('The --resize flag must set either "thumb"'
' or "medium".'))
else:
_log.warn('You must set either --resize or --initial_processing flag'
' to reprocess an image.')
hooks = { hooks = {

View File

@ -74,11 +74,13 @@ def resize_image(proc_state, resized, keyname, target_name, new_size,
def resize_tool(proc_state, force, keyname, target_name, def resize_tool(proc_state, force, keyname, target_name,
conversions_subdir, exif_tags): conversions_subdir, exif_tags, new_size=None):
# filename -- the filename of the original image being resized # filename -- the filename of the original image being resized
filename = proc_state.get_queued_filename() filename = proc_state.get_queued_filename()
if not new_size:
max_width = mgg.global_config['media:' + keyname]['max_width'] max_width = mgg.global_config['media:' + keyname]['max_width']
max_height = mgg.global_config['media:' + keyname]['max_height'] max_height = mgg.global_config['media:' + keyname]['max_height']
new_size = (max_width, max_height)
# If the size of the original file exceeds the specified size for the desized # If the size of the original file exceeds the specified size for the desized
# file, a target_name file is created and later associated with the media # file, a target_name file is created and later associated with the media
# entry. # entry.
@ -93,7 +95,7 @@ def resize_tool(proc_state, force, keyname, target_name,
or exif_image_needs_rotation(exif_tags): or exif_image_needs_rotation(exif_tags):
resize_image( resize_image(
proc_state, im, unicode(keyname), target_name, proc_state, im, unicode(keyname), target_name,
(max_width, max_height), new_size,
exif_tags, conversions_subdir) exif_tags, conversions_subdir)
@ -119,7 +121,7 @@ def sniff_handler(media_file, **kw):
return None return None
def process_image(proc_state): def process_image(proc_state, reprocess_info=None):
"""Code to process an image. Will be run by celery. """Code to process an image. Will be run by celery.
A Workbench() represents a local tempory dir. It is automatically A Workbench() represents a local tempory dir. It is automatically
@ -127,12 +129,17 @@ def process_image(proc_state):
""" """
entry = proc_state.entry entry = proc_state.entry
workbench = proc_state.workbench workbench = proc_state.workbench
import ipdb
ipdb.set_trace()
# Conversions subdirectory to avoid collisions # Conversions subdirectory to avoid collisions
conversions_subdir = os.path.join( conversions_subdir = os.path.join(
workbench.dir, 'conversions') workbench.dir, 'conversions')
os.mkdir(conversions_subdir) os.mkdir(conversions_subdir)
if reprocess_info:
_reprocess_image(proc_state, reprocess_info, conversions_subdir)
else:
queued_filename = proc_state.get_queued_filename() queued_filename = proc_state.get_queued_filename()
name_builder = FilenameBuilder(queued_filename) name_builder = FilenameBuilder(queued_filename)
@ -168,6 +175,31 @@ def process_image(proc_state):
entry.media_data_init(**gps_data) entry.media_data_init(**gps_data)
def _reprocess_image(proc_state, reprocess_info, conversions_subdir):
reprocess_filename = proc_state.get_reprocess_filename()
name_builder = FilenameBuilder(reprocess_filename)
exif_tags = extract_exif(reprocess_filename)
if reprocess_info.get('max_width'):
max_width = reprocess_info['max_width']
else:
max_width = mgg.global_config \
['media:' + reprocess_info['resize']]['max_width']
if reprocess_info.get('max_height'):
max_height = reprocess_info['max_height']
else:
max_height = mgg.global_config \
['media:' + reprocess_info['resize']]['max_height']
new_size = (max_width, max_height)
resize_tool(proc_state, False, reprocess_info['resize'],
name_builder.fill('{basename}.thumbnail{ext}'),
conversions_subdir, exif_tags, new_size)
if __name__ == '__main__': if __name__ == '__main__':
import sys import sys
import pprint import pprint

View File

@ -87,6 +87,7 @@ class ProcessingState(object):
self.entry = entry self.entry = entry
self.workbench = None self.workbench = None
self.queued_filename = None self.queued_filename = None
self.reprocess_filename = None
def set_workbench(self, wb): def set_workbench(self, wb):
self.workbench = wb self.workbench = wb
@ -128,6 +129,22 @@ class ProcessingState(object):
mgg.queue_store.delete_dir(queued_filepath[:-1]) # rm dir mgg.queue_store.delete_dir(queued_filepath[:-1]) # rm dir
self.entry.queued_media_file = [] self.entry.queued_media_file = []
def get_reprocess_filename(self):
"""
Get the filename to use during reprocessing
"""
# Currently only returns the original file, but eventually will return
# the highest quality file if the original doesn't exist
if self.reprocess_filename is not None:
return self.reprocess_filename
reprocess_filepath = self.entry.media_files['original'][2]
reprocess_filename = self.workbench.local_file(
mgg.public_store, reprocess_filepath,
'original')
self.reprocess_filename = reprocess_filename
return reprocess_filename
def mark_entry_failed(entry_id, exc): def mark_entry_failed(entry_id, exc):
""" """

View File

@ -68,13 +68,15 @@ class ProcessMedia(task.Task):
""" """
Pass this entry off for processing. Pass this entry off for processing.
""" """
def run(self, media_id, feed_url): def run(self, media_id, feed_url, reprocess_info=None):
""" """
Pass the media entry off to the appropriate processing function Pass the media entry off to the appropriate processing function
(for now just process_image...) (for now just process_image...)
:param feed_url: The feed URL that the PuSH server needs to be :param feed_url: The feed URL that the PuSH server needs to be
updated for. updated for.
:param reprocess: A dict containing all of the necessary reprocessing
info for the media_type.
""" """
entry = MediaEntry.query.get(media_id) entry = MediaEntry.query.get(media_id)
@ -89,7 +91,7 @@ class ProcessMedia(task.Task):
with mgg.workbench_manager.create() as workbench: with mgg.workbench_manager.create() as workbench:
proc_state.set_workbench(workbench) proc_state.set_workbench(workbench)
# run the processing code # run the processing code
entry.media_manager.processor(proc_state) entry.media_manager.processor(proc_state, reprocess_info)
# We set the state to processed and save the entry here so there's # We set the state to processed and save the entry here so there's
# no need to save at the end of the processing stage, probably ;) # no need to save at the end of the processing stage, probably ;)

View File

@ -76,17 +76,19 @@ def prepare_queue_task(app, entry, filename):
return queue_file return queue_file
def run_process_media(entry, feed_url=None): def run_process_media(entry, feed_url=None, reprocess_info=None):
"""Process the media asynchronously """Process the media asynchronously
:param entry: MediaEntry() instance to be processed. :param entry: MediaEntry() instance to be processed.
:param feed_url: A string indicating the feed_url that the PuSH servers :param feed_url: A string indicating the feed_url that the PuSH servers
should be notified of. This will be sth like: `request.urlgen( should be notified of. This will be sth like: `request.urlgen(
'mediagoblin.user_pages.atom_feed',qualified=True, 'mediagoblin.user_pages.atom_feed',qualified=True,
user=request.user.username)`""" user=request.user.username)`
:param reprocess: A dict containing all of the necessary reprocessing
info for the given media_type"""
try: try:
process_media.apply_async( process_media.apply_async(
[entry.id, feed_url], {}, [entry.id, feed_url, reprocess_info], {},
task_id=entry.queued_task_id) task_id=entry.queued_task_id)
except BaseException as exc: except BaseException as exc:
# The purpose of this section is because when running in "lazy" # The purpose of this section is because when running in "lazy"