Fixes after merging video branch into master
- Removed debug output from init/celery - Moved process_media/__init__ to processing.py - Centralized the processing.ProcessMedia task class - Updated media managers to reference the processing function instead of the ProcessMedia instance - Updated new-style image processing to previous, newer old-style image processing - Updated video transcoding - Changed method in progress output, sometimes message.structure['percent'] raises KeyError
This commit is contained in:
parent
b9e1fa280e
commit
8e5f974684
@ -18,9 +18,7 @@ import os
|
|||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
|
||||||
MANDATORY_CELERY_IMPORTS = ['mediagoblin.process_media']
|
MANDATORY_CELERY_IMPORTS = ['mediagoblin.processing']
|
||||||
|
|
||||||
print(MANDATORY_CELERY_IMPORTS)
|
|
||||||
|
|
||||||
DEFAULT_SETTINGS_MODULE = 'mediagoblin.init.celery.dummy_settings_module'
|
DEFAULT_SETTINGS_MODULE = 'mediagoblin.init.celery.dummy_settings_module'
|
||||||
|
|
||||||
|
@ -14,15 +14,13 @@
|
|||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
from mediagoblin.media_types.image.processing import process_media
|
from mediagoblin.media_types.image.processing import process_image
|
||||||
|
|
||||||
|
|
||||||
MEDIA_MANAGER = {
|
MEDIA_MANAGER = {
|
||||||
"human_readable": "Image",
|
"human_readable": "Image",
|
||||||
"processor": process_media, # alternately a string,
|
"processor": process_image, # alternately a string,
|
||||||
# 'mediagoblin.media_types.image.processing'?
|
# 'mediagoblin.media_types.image.processing'?
|
||||||
"display_template": "mediagoblin/media_displays/image.html",
|
"display_template": "mediagoblin/media_displays/image.html",
|
||||||
"default_thumb": "images/media_thumbs/image.jpg",
|
"default_thumb": "images/media_thumbs/image.jpg",
|
||||||
"accepted_extensions": ["jpg", "jpeg", "png", "gif", "tiff"],
|
"accepted_extensions": ["jpg", "jpeg", "png", "gif", "tiff"]}
|
||||||
"accepted_mimetypes": [
|
|
||||||
"image/jpeg", "image/png", "image/gif", "image/tiff"]}
|
|
||||||
|
@ -15,6 +15,7 @@
|
|||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import Image
|
import Image
|
||||||
|
import os
|
||||||
|
|
||||||
from celery.task import Task
|
from celery.task import Task
|
||||||
from celery import registry
|
from celery import registry
|
||||||
@ -22,19 +23,9 @@ from celery import registry
|
|||||||
from mediagoblin.db.util import ObjectId
|
from mediagoblin.db.util import ObjectId
|
||||||
from mediagoblin import mg_globals as mgg
|
from mediagoblin import mg_globals as mgg
|
||||||
|
|
||||||
from mediagoblin.util import lazy_pass_to_ugettext as _
|
from mediagoblin.processing import BaseProcessingFail, \
|
||||||
|
mark_entry_failed, BadMediaFail, create_pub_filepath, THUMB_SIZE, \
|
||||||
from mediagoblin.process_media.errors import *
|
MEDIUM_SIZE
|
||||||
|
|
||||||
THUMB_SIZE = 180, 180
|
|
||||||
MEDIUM_SIZE = 640, 640
|
|
||||||
|
|
||||||
|
|
||||||
def create_pub_filepath(entry, filename):
|
|
||||||
return mgg.public_store.get_unique_filepath(
|
|
||||||
['media_entries',
|
|
||||||
unicode(entry['_id']),
|
|
||||||
filename])
|
|
||||||
|
|
||||||
################################
|
################################
|
||||||
# Media processing initial steps
|
# Media processing initial steps
|
||||||
@ -77,67 +68,39 @@ class ProcessMedia(Task):
|
|||||||
process_media = registry.tasks[ProcessMedia.name]
|
process_media = registry.tasks[ProcessMedia.name]
|
||||||
|
|
||||||
|
|
||||||
def mark_entry_failed(entry_id, exc):
|
|
||||||
"""
|
|
||||||
Mark a media entry as having failed in its conversion.
|
|
||||||
|
|
||||||
Uses the exception that was raised to mark more information. If the
|
|
||||||
exception is a derivative of BaseProcessingFail then we can store extra
|
|
||||||
information that can be useful for users telling them why their media failed
|
|
||||||
to process.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
- entry_id: The id of the media entry
|
|
||||||
|
|
||||||
"""
|
|
||||||
# Was this a BaseProcessingFail? In other words, was this a
|
|
||||||
# type of error that we know how to handle?
|
|
||||||
if isinstance(exc, BaseProcessingFail):
|
|
||||||
# Looks like yes, so record information about that failure and any
|
|
||||||
# metadata the user might have supplied.
|
|
||||||
mgg.database['media_entries'].update(
|
|
||||||
{'_id': entry_id},
|
|
||||||
{'$set': {u'state': u'failed',
|
|
||||||
u'fail_error': exc.exception_path,
|
|
||||||
u'fail_metadata': exc.metadata}})
|
|
||||||
else:
|
|
||||||
# Looks like no, so just mark it as failed and don't record a
|
|
||||||
# failure_error (we'll assume it wasn't handled) and don't record
|
|
||||||
# metadata (in fact overwrite it if somehow it had previous info
|
|
||||||
# here)
|
|
||||||
mgg.database['media_entries'].update(
|
|
||||||
{'_id': entry_id},
|
|
||||||
{'$set': {u'state': u'failed',
|
|
||||||
u'fail_error': None,
|
|
||||||
u'fail_metadata': {}}})
|
|
||||||
|
|
||||||
|
|
||||||
def process_image(entry):
|
def process_image(entry):
|
||||||
"""
|
"""
|
||||||
Code to process an image
|
Code to process an image
|
||||||
"""
|
"""
|
||||||
workbench = mgg.workbench_manager.create_workbench()
|
workbench = mgg.workbench_manager.create_workbench()
|
||||||
|
# Conversions subdirectory to avoid collisions
|
||||||
|
conversions_subdir = os.path.join(
|
||||||
|
workbench.dir, 'conversions')
|
||||||
|
os.mkdir(conversions_subdir)
|
||||||
|
|
||||||
queued_filepath = entry['queued_media_file']
|
queued_filepath = entry['queued_media_file']
|
||||||
queued_filename = workbench.localized_file(
|
queued_filename = workbench.localized_file(
|
||||||
mgg.queue_store, queued_filepath,
|
mgg.queue_store, queued_filepath,
|
||||||
'source')
|
'source')
|
||||||
|
|
||||||
|
extension = os.path.splitext(queued_filename)[1]
|
||||||
|
|
||||||
try:
|
try:
|
||||||
thumb = Image.open(queued_filename)
|
thumb = Image.open(queued_filename)
|
||||||
except IOError:
|
except IOError:
|
||||||
raise BadMediaFail()
|
raise BadMediaFail()
|
||||||
|
|
||||||
thumb.thumbnail(THUMB_SIZE, Image.ANTIALIAS)
|
thumb.thumbnail(THUMB_SIZE, Image.ANTIALIAS)
|
||||||
# ensure color mode is compatible with jpg
|
|
||||||
if thumb.mode != "RGB":
|
|
||||||
thumb = thumb.convert("RGB")
|
|
||||||
|
|
||||||
thumb_filepath = create_pub_filepath(entry, 'thumbnail.jpg')
|
# Copy the thumb to the conversion subdir, then remotely.
|
||||||
thumb_file = mgg.public_store.get_file(thumb_filepath, 'w')
|
thumb_filename = 'thumbnail' + extension
|
||||||
|
thumb_filepath = create_pub_filepath(entry, thumb_filename)
|
||||||
with thumb_file:
|
tmp_thumb_filename = os.path.join(
|
||||||
thumb.save(thumb_file, "JPEG", quality=90)
|
conversions_subdir, thumb_filename)
|
||||||
|
with file(tmp_thumb_filename, 'w') as thumb_file:
|
||||||
|
thumb.save(thumb_file)
|
||||||
|
mgg.public_store.copy_local_to_storage(
|
||||||
|
tmp_thumb_filename, thumb_filepath)
|
||||||
|
|
||||||
# If the size of the original file exceeds the specified size of a `medium`
|
# If the size of the original file exceeds the specified size of a `medium`
|
||||||
# file, a `medium.jpg` files is created and later associated with the media
|
# file, a `medium.jpg` files is created and later associated with the media
|
||||||
@ -148,15 +111,18 @@ def process_image(entry):
|
|||||||
if medium.size[0] > MEDIUM_SIZE[0] or medium.size[1] > MEDIUM_SIZE[1]:
|
if medium.size[0] > MEDIUM_SIZE[0] or medium.size[1] > MEDIUM_SIZE[1]:
|
||||||
medium.thumbnail(MEDIUM_SIZE, Image.ANTIALIAS)
|
medium.thumbnail(MEDIUM_SIZE, Image.ANTIALIAS)
|
||||||
|
|
||||||
if medium.mode != "RGB":
|
medium_filename = 'medium' + extension
|
||||||
medium = medium.convert("RGB")
|
medium_filepath = create_pub_filepath(entry, medium_filename)
|
||||||
|
tmp_medium_filename = os.path.join(
|
||||||
|
conversions_subdir, medium_filename)
|
||||||
|
|
||||||
medium_filepath = create_pub_filepath(entry, 'medium.jpg')
|
with file(tmp_medium_filename, 'w') as medium_file:
|
||||||
medium_file = mgg.public_store.get_file(medium_filepath, 'w')
|
medium.save(medium_file)
|
||||||
|
|
||||||
with medium_file:
|
mgg.public_store.copy_local_to_storage(
|
||||||
medium.save(medium_file, "JPEG", quality=90)
|
tmp_medium_filename, medium_filepath)
|
||||||
medium_processed = True
|
|
||||||
|
medium_processed = True
|
||||||
|
|
||||||
# we have to re-read because unlike PIL, not everything reads
|
# we have to re-read because unlike PIL, not everything reads
|
||||||
# things in string representation :)
|
# things in string representation :)
|
||||||
@ -165,7 +131,8 @@ def process_image(entry):
|
|||||||
with queued_file:
|
with queued_file:
|
||||||
original_filepath = create_pub_filepath(entry, queued_filepath[-1])
|
original_filepath = create_pub_filepath(entry, queued_filepath[-1])
|
||||||
|
|
||||||
with mgg.public_store.get_file(original_filepath, 'wb') as original_file:
|
with mgg.public_store.get_file(original_filepath, 'wb') \
|
||||||
|
as original_file:
|
||||||
original_file.write(queued_file.read())
|
original_file.write(queued_file.read())
|
||||||
|
|
||||||
mgg.queue_store.delete_file(queued_filepath)
|
mgg.queue_store.delete_file(queued_filepath)
|
||||||
|
@ -14,13 +14,14 @@
|
|||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
from mediagoblin.media_types.video.processing import process_media
|
from mediagoblin.media_types.video.processing import process_video
|
||||||
|
|
||||||
|
|
||||||
MEDIA_MANAGER = {
|
MEDIA_MANAGER = {
|
||||||
"human_readable": "Video",
|
"human_readable": "Video",
|
||||||
"processor": process_media, # alternately a string,
|
"processor": process_video, # alternately a string,
|
||||||
# 'mediagoblin.media_types.image.processing'?
|
# 'mediagoblin.media_types.image.processing'?
|
||||||
"display_template": "mediagoblin/media_displays/video.html",
|
"display_template": "mediagoblin/media_displays/video.html",
|
||||||
"default_thumb": "images/media_thumbs/video.jpg",
|
"default_thumb": "images/media_thumbs/video.jpg",
|
||||||
"accepted_extensions": ["mp4", "mov", "webm", "avi", "3gp", "3gpp", "mkv", "ogv", "ogg"]}
|
"accepted_extensions": [
|
||||||
|
"mp4", "mov", "webm", "avi", "3gp", "3gpp", "mkv", "ogv", "ogg"]}
|
||||||
|
@ -18,21 +18,15 @@ import tempfile
|
|||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from celery.task import Task
|
|
||||||
from celery import registry
|
|
||||||
|
|
||||||
from mediagoblin.db.util import ObjectId
|
|
||||||
from mediagoblin import mg_globals as mgg
|
from mediagoblin import mg_globals as mgg
|
||||||
from mediagoblin.process_media import BaseProcessingFail
|
from mediagoblin.processing import mark_entry_failed, \
|
||||||
from mediagoblin.process_media import mark_entry_failed
|
THUMB_SIZE, MEDIUM_SIZE, create_pub_filepath
|
||||||
from . import transcoders
|
from . import transcoders
|
||||||
|
|
||||||
THUMB_SIZE = 180, 180
|
|
||||||
MEDIUM_SIZE = 640, 640
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
logging.basicConfig()
|
logging.basicConfig()
|
||||||
logger.setLevel(logging.DEBUG)
|
|
||||||
|
_log = logging.getLogger(__name__)
|
||||||
|
_log.setLevel(logging.DEBUG)
|
||||||
|
|
||||||
|
|
||||||
def process_video(entry):
|
def process_video(entry):
|
||||||
@ -73,8 +67,10 @@ def process_video(entry):
|
|||||||
transcoder = transcoders.VideoTranscoder(queued_filename, tmp_dst.name)
|
transcoder = transcoders.VideoTranscoder(queued_filename, tmp_dst.name)
|
||||||
|
|
||||||
# Push transcoded video to public storage
|
# Push transcoded video to public storage
|
||||||
|
_log.debug('Saving medium...')
|
||||||
mgg.public_store.get_file(medium_filepath, 'wb').write(
|
mgg.public_store.get_file(medium_filepath, 'wb').write(
|
||||||
tmp_dst.read())
|
tmp_dst.read())
|
||||||
|
_log.debug('Saved medium')
|
||||||
|
|
||||||
entry['media_files']['webm_640'] = medium_filepath
|
entry['media_files']['webm_640'] = medium_filepath
|
||||||
|
|
||||||
@ -91,8 +87,10 @@ def process_video(entry):
|
|||||||
transcoders.VideoThumbnailer(queued_filename, tmp_thumb.name)
|
transcoders.VideoThumbnailer(queued_filename, tmp_thumb.name)
|
||||||
|
|
||||||
# Push the thumbnail to public storage
|
# Push the thumbnail to public storage
|
||||||
|
_log.debug('Saving thumbnail...')
|
||||||
mgg.public_store.get_file(thumbnail_filepath, 'wb').write(
|
mgg.public_store.get_file(thumbnail_filepath, 'wb').write(
|
||||||
tmp_thumb.read())
|
tmp_thumb.read())
|
||||||
|
_log.debug('Saved thumbnail')
|
||||||
|
|
||||||
entry['media_files']['thumb'] = thumbnail_filepath
|
entry['media_files']['thumb'] = thumbnail_filepath
|
||||||
|
|
||||||
@ -107,7 +105,9 @@ def process_video(entry):
|
|||||||
|
|
||||||
with mgg.public_store.get_file(original_filepath, 'wb') as \
|
with mgg.public_store.get_file(original_filepath, 'wb') as \
|
||||||
original_file:
|
original_file:
|
||||||
|
_log.debug('Saving original...')
|
||||||
original_file.write(queued_file.read())
|
original_file.write(queued_file.read())
|
||||||
|
_log.debug('Saved original')
|
||||||
|
|
||||||
entry['media_files']['original'] = original_filepath
|
entry['media_files']['original'] = original_filepath
|
||||||
|
|
||||||
@ -116,50 +116,3 @@ def process_video(entry):
|
|||||||
|
|
||||||
# Save the MediaEntry
|
# Save the MediaEntry
|
||||||
entry.save()
|
entry.save()
|
||||||
|
|
||||||
def create_pub_filepath(entry, filename):
|
|
||||||
return mgg.public_store.get_unique_filepath(
|
|
||||||
['media_entries',
|
|
||||||
unicode(entry['_id']),
|
|
||||||
filename])
|
|
||||||
|
|
||||||
|
|
||||||
################################
|
|
||||||
# Media processing initial steps
|
|
||||||
################################
|
|
||||||
|
|
||||||
class ProcessMedia(Task):
|
|
||||||
"""
|
|
||||||
Pass this entry off for processing.
|
|
||||||
"""
|
|
||||||
def run(self, media_id):
|
|
||||||
"""
|
|
||||||
Pass the media entry off to the appropriate processing function
|
|
||||||
(for now just process_image...)
|
|
||||||
"""
|
|
||||||
entry = mgg.database.MediaEntry.one(
|
|
||||||
{'_id': ObjectId(media_id)})
|
|
||||||
|
|
||||||
# Try to process, and handle expected errors.
|
|
||||||
try:
|
|
||||||
process_video(entry)
|
|
||||||
except BaseProcessingFail, exc:
|
|
||||||
mark_entry_failed(entry[u'_id'], exc)
|
|
||||||
return
|
|
||||||
|
|
||||||
entry['state'] = u'processed'
|
|
||||||
entry.save()
|
|
||||||
|
|
||||||
def on_failure(self, exc, task_id, args, kwargs, einfo):
|
|
||||||
"""
|
|
||||||
If the processing failed we should mark that in the database.
|
|
||||||
|
|
||||||
Assuming that the exception raised is a subclass of BaseProcessingFail,
|
|
||||||
we can use that to get more information about the failure and store that
|
|
||||||
for conveying information to users about the failure, etc.
|
|
||||||
"""
|
|
||||||
entry_id = args[0]
|
|
||||||
mark_entry_failed(entry_id, exc)
|
|
||||||
|
|
||||||
|
|
||||||
process_media = registry.tasks[ProcessMedia.name]
|
|
||||||
|
@ -195,7 +195,6 @@ class VideoThumbnailer:
|
|||||||
|
|
||||||
_log.debug('seek amount: {0}'.format(seek_amount))
|
_log.debug('seek amount: {0}'.format(seek_amount))
|
||||||
|
|
||||||
|
|
||||||
seek_result = self.thumbnail_pipeline.seek(
|
seek_result = self.thumbnail_pipeline.seek(
|
||||||
1.0,
|
1.0,
|
||||||
gst.FORMAT_TIME,
|
gst.FORMAT_TIME,
|
||||||
@ -204,14 +203,6 @@ class VideoThumbnailer:
|
|||||||
seek_amount,
|
seek_amount,
|
||||||
gst.SEEK_TYPE_NONE,
|
gst.SEEK_TYPE_NONE,
|
||||||
0)
|
0)
|
||||||
'''
|
|
||||||
|
|
||||||
seek_result = self.thumbnail_pipeline.seek_simple(
|
|
||||||
gst.FORMAT_TIME,
|
|
||||||
gst.SEEK_FLAG_FLUSH | gst.SEEK_FLAG_ACCURATE,
|
|
||||||
seek_amount)
|
|
||||||
|
|
||||||
'''
|
|
||||||
|
|
||||||
if not seek_result:
|
if not seek_result:
|
||||||
self.errors.append('COULD_NOT_SEEK')
|
self.errors.append('COULD_NOT_SEEK')
|
||||||
@ -576,17 +567,13 @@ class VideoTranscoder:
|
|||||||
|
|
||||||
elif t == gst.MESSAGE_ELEMENT:
|
elif t == gst.MESSAGE_ELEMENT:
|
||||||
if message.structure.get_name() == 'progress':
|
if message.structure.get_name() == 'progress':
|
||||||
data = {
|
data = dict(message.structure)
|
||||||
'structure': message.structure,
|
|
||||||
'percent': message.structure['percent'],
|
|
||||||
'total': message.structure['total'],
|
|
||||||
'current': message.structure['current']}
|
|
||||||
|
|
||||||
if self._progress_callback:
|
if self._progress_callback:
|
||||||
self._progress_callback(data)
|
self._progress_callback(data)
|
||||||
|
|
||||||
_log.info('{percent}% done...'.format(
|
_log.info('{percent}% done...'.format(
|
||||||
percent=data['percent']))
|
percent=data.get('percent')))
|
||||||
_log.debug(data)
|
_log.debug(data)
|
||||||
|
|
||||||
elif t == gst.MESSAGE_ERROR:
|
elif t == gst.MESSAGE_ERROR:
|
||||||
|
@ -1,45 +0,0 @@
|
|||||||
# GNU MediaGoblin -- federated, autonomous media hosting
|
|
||||||
# Copyright (C) 2011 MediaGoblin contributors. See AUTHORS.
|
|
||||||
#
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU Affero General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
from mediagoblin.tools.translate import lazy_pass_to_ugettext as _
|
|
||||||
|
|
||||||
|
|
||||||
class BaseProcessingFail(Exception):
|
|
||||||
"""
|
|
||||||
Base exception that all other processing failure messages should
|
|
||||||
subclass from.
|
|
||||||
|
|
||||||
You shouldn't call this itself; instead you should subclass it
|
|
||||||
and provid the exception_path and general_message applicable to
|
|
||||||
this error.
|
|
||||||
"""
|
|
||||||
general_message = u''
|
|
||||||
|
|
||||||
@property
|
|
||||||
def exception_path(self):
|
|
||||||
return u"%s:%s" % (
|
|
||||||
self.__class__.__module__, self.__class__.__name__)
|
|
||||||
|
|
||||||
def __init__(self, **metadata):
|
|
||||||
self.metadata = metadata or {}
|
|
||||||
|
|
||||||
|
|
||||||
class BadMediaFail(BaseProcessingFail):
|
|
||||||
"""
|
|
||||||
Error that should be raised when an inappropriate file was given
|
|
||||||
for the media type specified.
|
|
||||||
"""
|
|
||||||
general_message = _(u'Invalid file given for media type.')
|
|
@ -14,15 +14,14 @@
|
|||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import os
|
|
||||||
|
|
||||||
import Image
|
|
||||||
from celery.task import Task
|
from celery.task import Task
|
||||||
from celery import registry
|
|
||||||
|
|
||||||
from mediagoblin.db.util import ObjectId
|
from mediagoblin.db.util import ObjectId
|
||||||
from mediagoblin import mg_globals as mgg
|
from mediagoblin import mg_globals as mgg
|
||||||
from mediagoblin.process_media.errors import BaseProcessingFail, BadMediaFail
|
|
||||||
|
from mediagoblin.util import lazy_pass_to_ugettext as _
|
||||||
|
|
||||||
|
from mediagoblin.media_types import get_media_manager
|
||||||
|
|
||||||
|
|
||||||
THUMB_SIZE = 180, 180
|
THUMB_SIZE = 180, 180
|
||||||
@ -42,6 +41,8 @@ def create_pub_filepath(entry, filename):
|
|||||||
|
|
||||||
class ProcessMedia(Task):
|
class ProcessMedia(Task):
|
||||||
"""
|
"""
|
||||||
|
DEPRECATED -- This now resides in the individual media plugins
|
||||||
|
|
||||||
Pass this entry off for processing.
|
Pass this entry off for processing.
|
||||||
"""
|
"""
|
||||||
def run(self, media_id):
|
def run(self, media_id):
|
||||||
@ -54,8 +55,9 @@ class ProcessMedia(Task):
|
|||||||
|
|
||||||
# Try to process, and handle expected errors.
|
# Try to process, and handle expected errors.
|
||||||
try:
|
try:
|
||||||
__import__(entry['media_type'])
|
#__import__(entry['media_type'])
|
||||||
process_image(entry)
|
manager = get_media_manager(entry['media_type'])
|
||||||
|
manager['processor'](entry)
|
||||||
except BaseProcessingFail, exc:
|
except BaseProcessingFail, exc:
|
||||||
mark_entry_failed(entry._id, exc)
|
mark_entry_failed(entry._id, exc)
|
||||||
return
|
return
|
||||||
@ -78,9 +80,6 @@ class ProcessMedia(Task):
|
|||||||
mark_entry_failed(entry_id, exc)
|
mark_entry_failed(entry_id, exc)
|
||||||
|
|
||||||
|
|
||||||
process_media = registry.tasks[ProcessMedia.name]
|
|
||||||
|
|
||||||
|
|
||||||
def mark_entry_failed(entry_id, exc):
|
def mark_entry_failed(entry_id, exc):
|
||||||
"""
|
"""
|
||||||
Mark a media entry as having failed in its conversion.
|
Mark a media entry as having failed in its conversion.
|
||||||
@ -116,80 +115,29 @@ def mark_entry_failed(entry_id, exc):
|
|||||||
u'fail_metadata': {}}})
|
u'fail_metadata': {}}})
|
||||||
|
|
||||||
|
|
||||||
def process_image(entry):
|
class BaseProcessingFail(Exception):
|
||||||
"""
|
"""
|
||||||
Code to process an image
|
Base exception that all other processing failure messages should
|
||||||
|
subclass from.
|
||||||
|
|
||||||
|
You shouldn't call this itself; instead you should subclass it
|
||||||
|
and provid the exception_path and general_message applicable to
|
||||||
|
this error.
|
||||||
"""
|
"""
|
||||||
workbench = mgg.workbench_manager.create_workbench()
|
general_message = u''
|
||||||
# Conversions subdirectory to avoid collisions
|
|
||||||
conversions_subdir = os.path.join(
|
|
||||||
workbench.dir, 'conversions')
|
|
||||||
os.mkdir(conversions_subdir)
|
|
||||||
|
|
||||||
queued_filepath = entry['queued_media_file']
|
@property
|
||||||
queued_filename = workbench.localized_file(
|
def exception_path(self):
|
||||||
mgg.queue_store, queued_filepath,
|
return u"%s:%s" % (
|
||||||
'source')
|
self.__class__.__module__, self.__class__.__name__)
|
||||||
|
|
||||||
extension = os.path.splitext(queued_filename)[1]
|
def __init__(self, **metadata):
|
||||||
|
self.metadata = metadata or {}
|
||||||
|
|
||||||
try:
|
|
||||||
thumb = Image.open(queued_filename)
|
|
||||||
except IOError:
|
|
||||||
raise BadMediaFail()
|
|
||||||
|
|
||||||
thumb.thumbnail(THUMB_SIZE, Image.ANTIALIAS)
|
class BadMediaFail(BaseProcessingFail):
|
||||||
|
"""
|
||||||
# Copy the thumb to the conversion subdir, then remotely.
|
Error that should be raised when an inappropriate file was given
|
||||||
thumb_filename = 'thumbnail' + extension
|
for the media type specified.
|
||||||
thumb_filepath = create_pub_filepath(entry, thumb_filename)
|
"""
|
||||||
tmp_thumb_filename = os.path.join(
|
general_message = _(u'Invalid file given for media type.')
|
||||||
conversions_subdir, thumb_filename)
|
|
||||||
with file(tmp_thumb_filename, 'w') as thumb_file:
|
|
||||||
thumb.save(thumb_file)
|
|
||||||
mgg.public_store.copy_local_to_storage(
|
|
||||||
tmp_thumb_filename, thumb_filepath)
|
|
||||||
|
|
||||||
# If the size of the original file exceeds the specified size of a `medium`
|
|
||||||
# file, a `medium.jpg` files is created and later associated with the media
|
|
||||||
# entry.
|
|
||||||
medium = Image.open(queued_filename)
|
|
||||||
medium_processed = False
|
|
||||||
|
|
||||||
if medium.size[0] > MEDIUM_SIZE[0] or medium.size[1] > MEDIUM_SIZE[1]:
|
|
||||||
medium.thumbnail(MEDIUM_SIZE, Image.ANTIALIAS)
|
|
||||||
|
|
||||||
medium_filename = 'medium' + extension
|
|
||||||
medium_filepath = create_pub_filepath(entry, medium_filename)
|
|
||||||
tmp_medium_filename = os.path.join(
|
|
||||||
conversions_subdir, medium_filename)
|
|
||||||
|
|
||||||
with file(tmp_medium_filename, 'w') as medium_file:
|
|
||||||
medium.save(medium_file)
|
|
||||||
|
|
||||||
mgg.public_store.copy_local_to_storage(
|
|
||||||
tmp_medium_filename, medium_filepath)
|
|
||||||
|
|
||||||
medium_processed = True
|
|
||||||
|
|
||||||
# we have to re-read because unlike PIL, not everything reads
|
|
||||||
# things in string representation :)
|
|
||||||
queued_file = file(queued_filename, 'rb')
|
|
||||||
|
|
||||||
with queued_file:
|
|
||||||
original_filepath = create_pub_filepath(entry, queued_filepath[-1])
|
|
||||||
|
|
||||||
with mgg.public_store.get_file(original_filepath, 'wb') \
|
|
||||||
as original_file:
|
|
||||||
original_file.write(queued_file.read())
|
|
||||||
|
|
||||||
mgg.queue_store.delete_file(queued_filepath)
|
|
||||||
entry['queued_media_file'] = []
|
|
||||||
media_files_dict = entry.setdefault('media_files', {})
|
|
||||||
media_files_dict['thumb'] = thumb_filepath
|
|
||||||
media_files_dict['original'] = original_filepath
|
|
||||||
if medium_processed:
|
|
||||||
media_files_dict['medium'] = medium_filepath
|
|
||||||
|
|
||||||
# clean up workbench
|
|
||||||
workbench.destroy_self()
|
|
@ -19,6 +19,8 @@ import uuid
|
|||||||
from os.path import splitext
|
from os.path import splitext
|
||||||
from cgi import FieldStorage
|
from cgi import FieldStorage
|
||||||
|
|
||||||
|
from celery import registry
|
||||||
|
|
||||||
from werkzeug.utils import secure_filename
|
from werkzeug.utils import secure_filename
|
||||||
|
|
||||||
from mediagoblin.db.util import ObjectId
|
from mediagoblin.db.util import ObjectId
|
||||||
@ -27,7 +29,7 @@ from mediagoblin.tools.translate import pass_to_ugettext as _
|
|||||||
from mediagoblin.tools.response import render_to_response, redirect
|
from mediagoblin.tools.response import render_to_response, redirect
|
||||||
from mediagoblin.decorators import require_active_login
|
from mediagoblin.decorators import require_active_login
|
||||||
from mediagoblin.submit import forms as submit_forms, security
|
from mediagoblin.submit import forms as submit_forms, security
|
||||||
from mediagoblin.process_media import mark_entry_failed
|
from mediagoblin.processing import mark_entry_failed, ProcessMedia
|
||||||
from mediagoblin.messages import add_message, SUCCESS
|
from mediagoblin.messages import add_message, SUCCESS
|
||||||
from mediagoblin.media_types import get_media_type_and_manager
|
from mediagoblin.media_types import get_media_type_and_manager
|
||||||
|
|
||||||
@ -104,8 +106,9 @@ def submit_start(request):
|
|||||||
#
|
#
|
||||||
# (... don't change entry after this point to avoid race
|
# (... don't change entry after this point to avoid race
|
||||||
# conditions with changes to the document via processing code)
|
# conditions with changes to the document via processing code)
|
||||||
|
process_media = registry.tasks[ProcessMedia.name]
|
||||||
try:
|
try:
|
||||||
media_manager['processor'].apply_async(
|
process_media.apply_async(
|
||||||
[unicode(entry._id)], {},
|
[unicode(entry._id)], {},
|
||||||
task_id=task_id)
|
task_id=task_id)
|
||||||
except BaseException as exc:
|
except BaseException as exc:
|
||||||
|
Loading…
x
Reference in New Issue
Block a user