Feature 477 - Support Cloud Files public storage

*   Added configuration options to mediagoblin.ini
*   process_media supports the python-cloudfiles
    almost-file-like objects by wrapping them in a
    contextlib.contextmanager-decorated func.
*   storage now has the CloudFilesStorage
*   New dependency added to setup.py; `python-cloudfiles`
This commit is contained in:
Joar Wandborg 2011-08-04 01:32:34 +02:00
parent 4d74812dfc
commit 851c51a354
4 changed files with 94 additions and 4 deletions

View File

@ -1,7 +1,33 @@
[mediagoblin] [mediagoblin]
queuestore_base_dir = %(here)s/user_dev/media/queue ##
# BEGIN CloudFiles public storage
##
# Uncomment the following line and fill in your details to enable Cloud Files
# (or OpenStack Object Storage [Swift])
# -
# publicstore_storage_class = mediagoblin.storage:CloudFilesStorage
publicstore_cloudfiles_user = user
publicstore_cloudfiles_api_key = 1a2b3c4d5e6f7g8h9i
publicstore_cloudfiles_container = mediagoblin
# Only applicable if you run MediaGoblin on a Rackspace Cloud Server
# it routes traffic through the internal Rackspace network, this
# means that the bandwith betis free.
publicstore_cloudfiles_use_servicenet = false
##
# END CloudFiles
##
##
# BEGIN filesystem public storage
##
publicstore_base_dir = %(here)s/user_dev/media/public publicstore_base_dir = %(here)s/user_dev/media/public
publicstore_base_url = /mgoblin_media/ publicstore_base_url = /mgoblin_media/
##
# END
##
queuestore_base_dir = %(here)s/user_dev/media/queue
direct_remote_path = /mgoblin_static/ direct_remote_path = /mgoblin_static/
email_sender_address = "notice@mediagoblin.example.org" email_sender_address = "notice@mediagoblin.example.org"

View File

@ -19,6 +19,7 @@ from mediagoblin.db.util import ObjectId
from celery.task import task from celery.task import task
from mediagoblin import mg_globals as mgg from mediagoblin import mg_globals as mgg
from contextlib import contextmanager
THUMB_SIZE = 180, 180 THUMB_SIZE = 180, 180
@ -31,6 +32,12 @@ def create_pub_filepath(entry, filename):
unicode(entry['_id']), unicode(entry['_id']),
filename]) filename])
@contextmanager
def closing(callback):
try:
yield callback
finally:
pass
@task @task
def process_media_initial(media_id): def process_media_initial(media_id):
@ -53,7 +60,7 @@ def process_media_initial(media_id):
thumb_filepath = create_pub_filepath(entry, 'thumbnail.jpg') thumb_filepath = create_pub_filepath(entry, 'thumbnail.jpg')
thumb_file = mgg.public_store.get_file(thumb_filepath, 'w') thumb_file = mgg.public_store.get_file(thumb_filepath, 'w')
with thumb_file: with closing(thumb_file):
thumb.save(thumb_file, "JPEG", quality=90) thumb.save(thumb_file, "JPEG", quality=90)
""" """
@ -73,7 +80,7 @@ def process_media_initial(media_id):
medium_filepath = create_pub_filepath(entry, 'medium.jpg') medium_filepath = create_pub_filepath(entry, 'medium.jpg')
medium_file = mgg.public_store.get_file(medium_filepath, 'w') medium_file = mgg.public_store.get_file(medium_filepath, 'w')
with medium_file: with closing(medium_file):
medium.save(medium_file, "JPEG", quality=90) medium.save(medium_file, "JPEG", quality=90)
medium_processed = True medium_processed = True
@ -84,7 +91,7 @@ def process_media_initial(media_id):
with queued_file: with queued_file:
original_filepath = create_pub_filepath(entry, queued_filepath[-1]) original_filepath = create_pub_filepath(entry, queued_filepath[-1])
with mgg.public_store.get_file(original_filepath, 'wb') as original_file: with closing(mgg.public_store.get_file(original_filepath, 'wb')) as original_file:
original_file.write(queued_file.read()) original_file.write(queued_file.read())
mgg.queue_store.delete_file(queued_filepath) mgg.queue_store.delete_file(queued_filepath)

View File

@ -19,6 +19,7 @@ import re
import shutil import shutil
import urlparse import urlparse
import uuid import uuid
import cloudfiles
from werkzeug.utils import secure_filename from werkzeug.utils import secure_filename
@ -161,6 +162,61 @@ class StorageInterface(object):
dest_file.write(source_file.read()) dest_file.write(source_file.read())
class CloudFilesStorage(StorageInterface):
def __init__(self, **kwargs):
self.param_container = kwargs.get('cloudfiles_container')
self.param_user = kwargs.get('cloudfiles_user')
self.param_api_key = kwargs.get('cloudfiles_api_key')
self.param_host = kwargs.get('cloudfiles_host')
self.param_use_servicenet = kwargs.get('cloudfiles_use_servicenet')
if not self.param_host:
print('No CloudFiles host URL specified, defaulting to Rackspace US')
self.connection = cloudfiles.get_connection(
username=self.param_user,
api_key=self.param_api_key,
servicenet=True if self.param_use_servicenet == 'true' or \
self.param_use_servicenet == True else False)
if not self.param_container in [self.connection.get_container(self.param_container)]:
self.container = self.connection.create_container(self.param_container)
self.container.make_public(
ttl=60 * 60 * 2)
else:
self.container = self.connection.get_container(self.param_container)
def _resolve_filepath(self, filepath):
return '-'.join(
clean_listy_filepath(filepath))
def file_exists(self, filepath):
try:
object = self.container.get_object(
self._resolve_filepath(filepath))
return True
except cloudfiles.errors.NoSuchObject:
return False
def get_file(self, filepath, mode='r'):
try:
obj = self.container.get_object(
self._resolve_filepath(filepath))
except cloudfiles.errors.NoSuchObject:
obj = self.container.create_object(
self._resolve_filepath(filepath))
return obj
def delete_file(self, filepath):
# TODO: Also delete unused directories if empty (safely, with
# checks to avoid race conditions).
self.container.delete_object(filepath)
def file_url(self, filepath):
return self.get_file(filepath).public_uri()
class BasicFileStorage(StorageInterface): class BasicFileStorage(StorageInterface):
""" """
Basic local filesystem implementation of storage API Basic local filesystem implementation of storage API

View File

@ -44,6 +44,7 @@ setup(
'webtest', 'webtest',
'ConfigObj', 'ConfigObj',
'Markdown', 'Markdown',
'python-cloudfiles',
## For now we're expecting that users will install this from ## For now we're expecting that users will install this from
## their package managers. ## their package managers.
# 'lxml', # 'lxml',