Merge branch 'master' into joar-skip_transcoding

Conflicts:
	mediagoblin/config_spec.ini
This commit is contained in:
Christopher Allan Webber
2013-03-01 16:29:22 -06:00
140 changed files with 10244 additions and 6895 deletions

View File

@@ -49,7 +49,8 @@ def sniff_media(media):
for media_type, manager in get_media_managers():
_log.info('Sniffing {0}'.format(media_type))
if manager['sniff_handler'](media_file, media=media):
if 'sniff_handler' in manager and \
manager['sniff_handler'](media_file, media=media):
_log.info('{0} accepts the file'.format(media_type))
return media_type, manager
else:

View File

@@ -32,7 +32,8 @@ class AsciiData(Base):
media_entry = Column(Integer, ForeignKey('core__media_entries.id'),
primary_key=True)
get_media_entry = relationship("MediaEntry",
backref=backref(BACKREF_NAME, cascade="all, delete-orphan"))
backref=backref(BACKREF_NAME, uselist=False,
cascade="all, delete-orphan"))
DATA_MODEL = AsciiData

View File

@@ -19,7 +19,6 @@ import Image
import logging
from mediagoblin import mg_globals as mgg
from mediagoblin.decorators import get_workbench
from mediagoblin.processing import create_pub_filepath
from mediagoblin.media_types.ascii import asciitoimage
@@ -39,13 +38,14 @@ def sniff_handler(media_file, **kw):
return False
@get_workbench
def process_ascii(entry, workbench=None):
def process_ascii(proc_state):
"""Code to process a txt file. Will be run by celery.
A Workbench() represents a local tempory dir. It is automatically
cleaned up when this function exits.
"""
entry = proc_state.entry
workbench = proc_state.workbench
ascii_config = mgg.global_config['media_type:mediagoblin.media_types.ascii']
# Conversions subdirectory to avoid collisions
conversions_subdir = os.path.join(

View File

@@ -32,7 +32,8 @@ class AudioData(Base):
media_entry = Column(Integer, ForeignKey('core__media_entries.id'),
primary_key=True)
get_media_entry = relationship("MediaEntry",
backref=backref(BACKREF_NAME, cascade="all, delete-orphan"))
backref=backref(BACKREF_NAME, uselist=False,
cascade="all, delete-orphan"))
DATA_MODEL = AudioData

View File

@@ -19,7 +19,6 @@ from tempfile import NamedTemporaryFile
import os
from mediagoblin import mg_globals as mgg
from mediagoblin.decorators import get_workbench
from mediagoblin.processing import (create_pub_filepath, BadMediaFail,
FilenameBuilder, ProgressCallback)
@@ -43,13 +42,14 @@ def sniff_handler(media_file, **kw):
return False
@get_workbench
def process_audio(entry, workbench=None):
def process_audio(proc_state):
"""Code to process uploaded audio. Will be run by celery.
A Workbench() represents a local tempory dir. It is automatically
cleaned up when this function exits.
"""
entry = proc_state.entry
workbench = proc_state.workbench
audio_config = mgg.global_config['media_type:mediagoblin.media_types.audio']
queued_filepath = entry.queued_media_file

View File

@@ -33,7 +33,8 @@ class ImageData(Base):
media_entry = Column(Integer, ForeignKey('core__media_entries.id'),
primary_key=True)
get_media_entry = relationship("MediaEntry",
backref=backref(BACKREF_NAME, cascade="all, delete-orphan"))
backref=backref(BACKREF_NAME, uselist=False,
cascade="all, delete-orphan"))
width = Column(Integer)
height = Column(Integer)

View File

@@ -19,7 +19,6 @@ import os
import logging
from mediagoblin import mg_globals as mgg
from mediagoblin.decorators import get_workbench
from mediagoblin.processing import BadMediaFail, \
create_pub_filepath, FilenameBuilder
from mediagoblin.tools.exif import exif_fix_image_orientation, \
@@ -28,6 +27,12 @@ from mediagoblin.tools.exif import exif_fix_image_orientation, \
_log = logging.getLogger(__name__)
PIL_FILTERS = {
'NEAREST': Image.NEAREST,
'BILINEAR': Image.BILINEAR,
'BICUBIC': Image.BICUBIC,
'ANTIALIAS': Image.ANTIALIAS}
def resize_image(entry, filename, new_path, exif_tags, workdir, new_size,
size_limits=(0, 0)):
@@ -47,7 +52,19 @@ def resize_image(entry, filename, new_path, exif_tags, workdir, new_size,
except IOError:
raise BadMediaFail()
resized = exif_fix_image_orientation(resized, exif_tags) # Fix orientation
resized.thumbnail(new_size, Image.ANTIALIAS)
filter_config = \
mgg.global_config['media_type:mediagoblin.media_types.image']\
['resize_filter']
try:
resize_filter = PIL_FILTERS[filter_config.upper()]
except KeyError:
raise Exception('Filter "{0}" not found, choose one of {1}'.format(
unicode(filter_config),
u', '.join(PIL_FILTERS.keys())))
resized.thumbnail(new_size, resize_filter)
# Copy the new file to the conversion subdir, then remotely.
tmp_resized_filename = os.path.join(workdir, new_path[-1])
@@ -77,21 +94,21 @@ def sniff_handler(media_file, **kw):
return False
@get_workbench
def process_image(entry, workbench=None):
def process_image(proc_state):
"""Code to process an image. Will be run by celery.
A Workbench() represents a local tempory dir. It is automatically
cleaned up when this function exits.
"""
entry = proc_state.entry
workbench = proc_state.workbench
# Conversions subdirectory to avoid collisions
conversions_subdir = os.path.join(
workbench.dir, 'conversions')
os.mkdir(conversions_subdir)
queued_filepath = entry.queued_media_file
queued_filename = workbench.localized_file(
mgg.queue_store, queued_filepath,
'source')
queued_filename = proc_state.get_queued_filename()
name_builder = FilenameBuilder(queued_filename)
# EXIF extraction
@@ -124,18 +141,14 @@ def process_image(entry, workbench=None):
medium_filepath = None
# Copy our queued local workbench to its final destination
original_filepath = create_pub_filepath(
entry, name_builder.fill('{basename}{ext}'))
mgg.public_store.copy_local_to_storage(queued_filename, original_filepath)
proc_state.copy_original(name_builder.fill('{basename}{ext}'))
# Remove queued media file from storage and database
mgg.queue_store.delete_file(queued_filepath)
entry.queued_media_file = []
proc_state.delete_queue_file()
# Insert media file information into database
media_files_dict = entry.setdefault('media_files', {})
media_files_dict[u'thumb'] = thumb_filepath
media_files_dict[u'original'] = original_filepath
if medium_filepath:
media_files_dict[u'medium'] = medium_filepath

View File

@@ -32,7 +32,8 @@ class StlData(Base):
media_entry = Column(Integer, ForeignKey('core__media_entries.id'),
primary_key=True)
get_media_entry = relationship("MediaEntry",
backref=backref(BACKREF_NAME, cascade="all, delete-orphan"))
backref=backref(BACKREF_NAME, uselist=False,
cascade="all, delete-orphan"))
center_x = Column(Float)
center_y = Column(Float)

View File

@@ -21,7 +21,6 @@ import subprocess
import pkg_resources
from mediagoblin import mg_globals as mgg
from mediagoblin.decorators import get_workbench
from mediagoblin.processing import create_pub_filepath, \
FilenameBuilder
@@ -76,13 +75,15 @@ def blender_render(config):
env=env)
@get_workbench
def process_stl(entry, workbench=None):
def process_stl(proc_state):
"""Code to process an stl or obj model. Will be run by celery.
A Workbench() represents a local tempory dir. It is automatically
cleaned up when this function exits.
"""
entry = proc_state.entry
workbench = proc_state.workbench
queued_filepath = entry.queued_media_file
queued_filename = workbench.localized_file(
mgg.queue_store, queued_filepath, 'source')

View File

@@ -32,7 +32,8 @@ class VideoData(Base):
media_entry = Column(Integer, ForeignKey('core__media_entries.id'),
primary_key=True)
get_media_entry = relationship("MediaEntry",
backref=backref(BACKREF_NAME, cascade="all, delete-orphan"))
backref=backref(BACKREF_NAME, uselist=False,
cascade="all, delete-orphan"))
width = Column(SmallInteger)
height = Column(SmallInteger)

View File

@@ -18,7 +18,6 @@ from tempfile import NamedTemporaryFile
import logging
from mediagoblin import mg_globals as mgg
from mediagoblin.decorators import get_workbench
from mediagoblin.processing import \
create_pub_filepath, FilenameBuilder, BaseProcessingFail, ProgressCallback
from mediagoblin.tools.translate import lazy_pass_to_ugettext as _
@@ -54,8 +53,8 @@ def sniff_handler(media_file, **kw):
return False
@get_workbench
def process_video(entry, workbench=None):
def process_video(proc_state):
"""
Process a video entry, transcode the queued media files (originals) and
create a thumbnail for the entry.
@@ -63,12 +62,12 @@ def process_video(entry, workbench=None):
A Workbench() represents a local tempory dir. It is automatically
cleaned up when this function exits.
"""
entry = proc_state.entry
workbench = proc_state.workbench
video_config = mgg.global_config['media_type:mediagoblin.media_types.video']
queued_filepath = entry.queued_media_file
queued_filename = workbench.localized_file(
mgg.queue_store, queued_filepath,
'source')
queued_filename = proc_state.get_queued_filename()
name_builder = FilenameBuilder(queued_filename)
medium_filepath = create_pub_filepath(
@@ -138,8 +137,7 @@ def process_video(entry, workbench=None):
if video_config['keep_original']:
# Push original file to public storage
_log.debug('Saving original...')
original_filepath = create_pub_filepath(entry, queued_filepath[-1])
mgg.public_store.copy_local_to_storage(queued_filename, original_filepath)
entry.media_files['original'] = original_filepath
proc_state.copy_original(queued_filepath[-1])
mgg.queue_store.delete_file(queued_filepath)
# Remove queued media file from storage and database
proc_state.delete_queue_file()

View File

@@ -477,8 +477,8 @@ from playbin')
_log.debug('thumbnail message: {0}'.format(message))
if message.type == gst.MESSAGE_ERROR:
_log.error('thumbnail error: {0}'.format(message))
gobject.idle_add(self.on_thumbnail_error)
_log.error('thumbnail error: {0}'.format(message.parse_error()))
gobject.idle_add(self.on_thumbnail_error, message)
if message.type == gst.MESSAGE_STATE_CHANGED:
prev_state, cur_state, pending_state = \
@@ -570,10 +570,37 @@ pending: {2}'.format(
return False
def on_thumbnail_error(self):
_log.error('Thumbnailing failed.')
def on_thumbnail_error(self, message):
scaling_failed = False
if 'Error calculating the output scaled size - integer overflow' \
in message.parse_error()[1]:
# GStreamer videoscale sometimes fails to calculate the dimensions
# given only one of the destination dimensions and the source
# dimensions. This is a workaround in case videoscale returns an
# error that indicates this has happened.
scaling_failed = True
_log.error('Thumbnailing failed because of videoscale integer'
' overflow. Will retry with fallback.')
else:
_log.error('Thumbnailing failed: {0}'.format(message.parse_error()))
# Kill the current mainloop
self.disconnect()
if scaling_failed:
# Manually scale the destination dimensions
_log.info('Retrying with manually set sizes...')
info = VideoTranscoder().discover(self.source_path)
h = info['videoheight']
w = info['videowidth']
ratio = 180 / int(w)
h = int(h * ratio)
self.__init__(self.source_path, self.dest_path, 180, h)
def disconnect(self):
self.state = self.STATE_HALTING
@@ -1009,4 +1036,4 @@ if __name__ == '__main__':
print('I\'m a callback!')
transcoder.transcode(*args, progress_callback=cb)
elif options.action == 'discover':
print transcoder.discover(*args).__dict__
print transcoder.discover(*args)