Making the API make use of the new submit tooling.
This commit sponsored by Zakkai Kauffman-Rogoff. Thanks! :)
This commit is contained in:
parent
c802c41af4
commit
131b749529
@ -17,17 +17,19 @@
|
|||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from os.path import splitext
|
from werkzeug.exceptions import BadRequest
|
||||||
from werkzeug.exceptions import BadRequest, Forbidden
|
|
||||||
from werkzeug.wrappers import Response
|
from werkzeug.wrappers import Response
|
||||||
|
|
||||||
|
from mediagoblin.tools.translate import pass_to_ugettext as _
|
||||||
from mediagoblin.tools.response import json_response
|
from mediagoblin.tools.response import json_response
|
||||||
from mediagoblin.decorators import require_active_login
|
from mediagoblin.decorators import require_active_login
|
||||||
from mediagoblin.meddleware.csrf import csrf_exempt
|
from mediagoblin.meddleware.csrf import csrf_exempt
|
||||||
from mediagoblin.media_types import sniff_media
|
from mediagoblin.media_types import \
|
||||||
|
InvalidFileType, FileTypeNotSupported
|
||||||
from mediagoblin.plugins.api.tools import api_auth, get_entry_serializable
|
from mediagoblin.plugins.api.tools import api_auth, get_entry_serializable
|
||||||
from mediagoblin.submit.lib import check_file_field, prepare_queue_task, \
|
from mediagoblin.submit.lib import \
|
||||||
run_process_media, new_upload_entry
|
check_file_field, submit_media, get_upload_file_limits, \
|
||||||
|
FileUploadLimit, UserUploadLimit, UserPastUploadLimit
|
||||||
|
|
||||||
_log = logging.getLogger(__name__)
|
_log = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -49,45 +51,46 @@ def post_entry(request):
|
|||||||
_log.debug('File field not found')
|
_log.debug('File field not found')
|
||||||
raise BadRequest()
|
raise BadRequest()
|
||||||
|
|
||||||
media_file = request.files['file']
|
upload_limit, max_file_size = get_upload_file_limits(request.user)
|
||||||
|
|
||||||
media_type, media_manager = sniff_media(media_file)
|
callback_url = request.form.get('callback_url')
|
||||||
|
if callback_url:
|
||||||
|
callback_url = unicode(callback_url)
|
||||||
|
try:
|
||||||
|
entry = submit_media(
|
||||||
|
mg_app=request.app, user=request.user,
|
||||||
|
submitted_file=request.files['file'],
|
||||||
|
filename=request.files['file'].filename,
|
||||||
|
title=unicode(request.form.get('title')),
|
||||||
|
description=unicode(request.form.get('description')),
|
||||||
|
license=unicode(request.form.get('license', '')),
|
||||||
|
upload_limit=upload_limit, max_file_size=max_file_size,
|
||||||
|
callback_url=callback_url)
|
||||||
|
|
||||||
entry = new_upload_entry(request.user)
|
return json_response(get_entry_serializable(entry, request.urlgen))
|
||||||
entry.media_type = unicode(media_type)
|
|
||||||
entry.title = unicode(request.form.get('title')
|
|
||||||
or splitext(media_file.filename)[0])
|
|
||||||
|
|
||||||
entry.description = unicode(request.form.get('description'))
|
# Handle upload limit issues
|
||||||
entry.license = unicode(request.form.get('license', ''))
|
except FileUploadLimit:
|
||||||
|
raise BadRequest(
|
||||||
|
_(u'Sorry, the file size is too big.'))
|
||||||
|
except UserUploadLimit:
|
||||||
|
raise BadRequest(
|
||||||
|
_('Sorry, uploading this file will put you over your'
|
||||||
|
' upload limit.'))
|
||||||
|
except UserPastUploadLimit:
|
||||||
|
raise BadRequest(
|
||||||
|
_('Sorry, you have reached your upload limit.'))
|
||||||
|
|
||||||
entry.generate_slug()
|
except Exception as e:
|
||||||
|
'''
|
||||||
# queue appropriately
|
This section is intended to catch exceptions raised in
|
||||||
queue_file = prepare_queue_task(request.app, entry, media_file.filename)
|
mediagoblin.media_types
|
||||||
|
'''
|
||||||
with queue_file:
|
if isinstance(e, InvalidFileType) or \
|
||||||
queue_file.write(request.files['file'].stream.read())
|
isinstance(e, FileTypeNotSupported):
|
||||||
|
raise BadRequest(unicode(e))
|
||||||
# Save now so we have this data before kicking off processing
|
else:
|
||||||
entry.save()
|
raise
|
||||||
|
|
||||||
if request.form.get('callback_url'):
|
|
||||||
metadata = request.db.ProcessingMetaData()
|
|
||||||
metadata.media_entry = entry
|
|
||||||
metadata.callback_url = unicode(request.form['callback_url'])
|
|
||||||
metadata.save()
|
|
||||||
|
|
||||||
# Pass off to processing
|
|
||||||
#
|
|
||||||
# (... don't change entry after this point to avoid race
|
|
||||||
# conditions with changes to the document via processing code)
|
|
||||||
feed_url = request.urlgen(
|
|
||||||
'mediagoblin.user_pages.atom_feed',
|
|
||||||
qualified=True, user=request.user.username)
|
|
||||||
run_process_media(entry, feed_url)
|
|
||||||
|
|
||||||
return json_response(get_entry_serializable(entry, request.urlgen))
|
|
||||||
|
|
||||||
|
|
||||||
@api_auth
|
@api_auth
|
||||||
|
@ -23,7 +23,7 @@ from werkzeug.datastructures import FileStorage
|
|||||||
|
|
||||||
from mediagoblin import mg_globals
|
from mediagoblin import mg_globals
|
||||||
from mediagoblin.tools.text import convert_to_tag_list_of_dicts
|
from mediagoblin.tools.text import convert_to_tag_list_of_dicts
|
||||||
from mediagoblin.db.models import MediaEntry
|
from mediagoblin.db.models import MediaEntry, ProcessingMetaData
|
||||||
from mediagoblin.processing import mark_entry_failed
|
from mediagoblin.processing import mark_entry_failed
|
||||||
from mediagoblin.processing.task import ProcessMedia
|
from mediagoblin.processing.task import ProcessMedia
|
||||||
from mediagoblin.notifications import add_comment_subscription
|
from mediagoblin.notifications import add_comment_subscription
|
||||||
@ -100,8 +100,9 @@ def submit_media(mg_app, user, submitted_file, filename,
|
|||||||
title=None, description=None,
|
title=None, description=None,
|
||||||
license=None, tags_string=u"",
|
license=None, tags_string=u"",
|
||||||
upload_limit=None, max_file_size=None,
|
upload_limit=None, max_file_size=None,
|
||||||
|
callback_url=None,
|
||||||
# If provided we'll do the feed_url update, otherwise ignore
|
# If provided we'll do the feed_url update, otherwise ignore
|
||||||
urlgen=None):
|
urlgen=None,):
|
||||||
"""
|
"""
|
||||||
Args:
|
Args:
|
||||||
- mg_app: The MediaGoblinApp instantiated for this process
|
- mg_app: The MediaGoblinApp instantiated for this process
|
||||||
@ -118,6 +119,7 @@ def submit_media(mg_app, user, submitted_file, filename,
|
|||||||
with this entry
|
with this entry
|
||||||
- upload_limit: size in megabytes that's the per-user upload limit
|
- upload_limit: size in megabytes that's the per-user upload limit
|
||||||
- max_file_size: maximum size each file can be that's uploaded
|
- max_file_size: maximum size each file can be that's uploaded
|
||||||
|
- callback_url: possible post-hook to call after submission
|
||||||
- urlgen: if provided, used to do the feed_url update
|
- urlgen: if provided, used to do the feed_url update
|
||||||
"""
|
"""
|
||||||
if upload_limit and user.uploaded >= upload_limit:
|
if upload_limit and user.uploaded >= upload_limit:
|
||||||
@ -172,6 +174,14 @@ def submit_media(mg_app, user, submitted_file, filename,
|
|||||||
# Save now so we have this data before kicking off processing
|
# Save now so we have this data before kicking off processing
|
||||||
entry.save()
|
entry.save()
|
||||||
|
|
||||||
|
# Various "submit to stuff" things, callbackurl and this silly urlgen
|
||||||
|
# thing
|
||||||
|
if callback_url:
|
||||||
|
metadata = ProcessingMetaData()
|
||||||
|
metadata.media_entry = entry
|
||||||
|
metadata.callback_url = callback_url
|
||||||
|
metadata.save()
|
||||||
|
|
||||||
if urlgen:
|
if urlgen:
|
||||||
feed_url = urlgen(
|
feed_url = urlgen(
|
||||||
'mediagoblin.user_pages.atom_feed',
|
'mediagoblin.user_pages.atom_feed',
|
||||||
|
Loading…
x
Reference in New Issue
Block a user