Merge remote-tracking branch 'remotes/nyergler/pep8-ification'
Conflicts: mediagoblin/db/migrations.py mediagoblin/db/models.py mediagoblin/user_pages/views.py mediagoblin/util.py
This commit is contained in:
commit
ee91c2b88d
@ -94,7 +94,7 @@ class MediaGoblinApp(object):
|
|||||||
# object.
|
# object.
|
||||||
#######################################################
|
#######################################################
|
||||||
|
|
||||||
setup_globals(app = self)
|
setup_globals(app=self)
|
||||||
|
|
||||||
# Workbench *currently* only used by celery, so this only
|
# Workbench *currently* only used by celery, so this only
|
||||||
# matters in always eager mode :)
|
# matters in always eager mode :)
|
||||||
@ -104,7 +104,6 @@ class MediaGoblinApp(object):
|
|||||||
self.middleware = [common.import_component(m)(self)
|
self.middleware = [common.import_component(m)(self)
|
||||||
for m in middleware.ENABLED_MIDDLEWARE]
|
for m in middleware.ENABLED_MIDDLEWARE]
|
||||||
|
|
||||||
|
|
||||||
def __call__(self, environ, start_response):
|
def __call__(self, environ, start_response):
|
||||||
request = Request(environ)
|
request = Request(environ)
|
||||||
|
|
||||||
|
@ -59,9 +59,10 @@ class ForgotPassForm(wtforms.Form):
|
|||||||
'Username or email',
|
'Username or email',
|
||||||
[wtforms.validators.Required()])
|
[wtforms.validators.Required()])
|
||||||
|
|
||||||
def validate_username(form,field):
|
def validate_username(form, field):
|
||||||
if not (re.match(r'^\w+$',field.data) or
|
if not (re.match(r'^\w+$', field.data) or
|
||||||
re.match(r'^.+@[^.].*\.[a-z]{2,10}$',field.data, re.IGNORECASE)):
|
re.match(r'^.+@[^.].*\.[a-z]{2,10}$', field.data,
|
||||||
|
re.IGNORECASE)):
|
||||||
raise wtforms.ValidationError(u'Incorrect input')
|
raise wtforms.ValidationError(u'Incorrect input')
|
||||||
|
|
||||||
|
|
||||||
@ -82,4 +83,3 @@ class ChangePassForm(wtforms.Form):
|
|||||||
token = wtforms.HiddenField(
|
token = wtforms.HiddenField(
|
||||||
'',
|
'',
|
||||||
[wtforms.validators.Required()])
|
[wtforms.validators.Required()])
|
||||||
|
|
||||||
|
@ -94,6 +94,7 @@ EMAIL_VERIFICATION_TEMPLATE = (
|
|||||||
u"http://{host}{uri}?"
|
u"http://{host}{uri}?"
|
||||||
u"userid={userid}&token={verification_key}")
|
u"userid={userid}&token={verification_key}")
|
||||||
|
|
||||||
|
|
||||||
def send_verification_email(user, request):
|
def send_verification_email(user, request):
|
||||||
"""
|
"""
|
||||||
Send the verification email to users to activate their accounts.
|
Send the verification email to users to activate their accounts.
|
||||||
@ -128,6 +129,7 @@ EMAIL_FP_VERIFICATION_TEMPLATE = (
|
|||||||
u"http://{host}{uri}?"
|
u"http://{host}{uri}?"
|
||||||
u"userid={userid}&token={fp_verification_key}")
|
u"userid={userid}&token={fp_verification_key}")
|
||||||
|
|
||||||
|
|
||||||
def send_fp_verification_email(user, request):
|
def send_fp_verification_email(user, request):
|
||||||
"""
|
"""
|
||||||
Send the verification email to users to change their password.
|
Send the verification email to users to change their password.
|
||||||
@ -151,4 +153,3 @@ def send_fp_verification_email(user, request):
|
|||||||
[user['email']],
|
[user['email']],
|
||||||
'GNU MediaGoblin - Change forgotten password!',
|
'GNU MediaGoblin - Change forgotten password!',
|
||||||
rendered_email)
|
rendered_email)
|
||||||
|
|
||||||
|
@ -33,7 +33,8 @@ auth_routes = [
|
|||||||
controller='mediagoblin.views:simple_template_render'),
|
controller='mediagoblin.views:simple_template_render'),
|
||||||
Route('mediagoblin.auth.forgot_password', '/forgot_password/',
|
Route('mediagoblin.auth.forgot_password', '/forgot_password/',
|
||||||
controller='mediagoblin.auth.views:forgot_password'),
|
controller='mediagoblin.auth.views:forgot_password'),
|
||||||
Route('mediagoblin.auth.verify_forgot_password', '/forgot_password/verify/',
|
Route('mediagoblin.auth.verify_forgot_password',
|
||||||
|
'/forgot_password/verify/',
|
||||||
controller='mediagoblin.auth.views:verify_forgot_password'),
|
controller='mediagoblin.auth.views:verify_forgot_password'),
|
||||||
Route('mediagoblin.auth.fp_changed_success',
|
Route('mediagoblin.auth.fp_changed_success',
|
||||||
'/forgot_password/changed_success/',
|
'/forgot_password/changed_success/',
|
||||||
|
@ -160,7 +160,7 @@ def verify_email(request):
|
|||||||
you are lucky :)
|
you are lucky :)
|
||||||
"""
|
"""
|
||||||
# If we don't have userid and token parameters, we can't do anything; 404
|
# If we don't have userid and token parameters, we can't do anything; 404
|
||||||
if not request.GET.has_key('userid') or not request.GET.has_key('token'):
|
if not 'userid' in request.GET or not 'token' in request.GET:
|
||||||
return render_404(request)
|
return render_404(request)
|
||||||
|
|
||||||
user = request.db.User.find_one(
|
user = request.db.User.find_one(
|
||||||
@ -253,8 +253,7 @@ def forgot_password(request):
|
|||||||
request, 'mediagoblin.user_pages.user_home',
|
request, 'mediagoblin.user_pages.user_home',
|
||||||
user=user['username'])
|
user=user['username'])
|
||||||
|
|
||||||
|
# do not reveal whether or not there is a matching user
|
||||||
# do not reveal whether or not there is a matching user, just move along
|
|
||||||
return redirect(request, 'mediagoblin.auth.fp_email_sent')
|
return redirect(request, 'mediagoblin.auth.fp_email_sent')
|
||||||
|
|
||||||
return render_to_response(
|
return render_to_response(
|
||||||
@ -328,6 +327,6 @@ def _process_for_token(request):
|
|||||||
formdata = {
|
formdata = {
|
||||||
'vars': formdata_vars,
|
'vars': formdata_vars,
|
||||||
'has_userid_and_token':
|
'has_userid_and_token':
|
||||||
formdata_vars.has_key('userid') and formdata_vars.has_key('token')}
|
'userid' in formdata_vars and 'token' in formdata_vars}
|
||||||
|
|
||||||
return formdata
|
return formdata
|
||||||
|
@ -93,8 +93,9 @@ MEDIAENTRY_INDEXES = {
|
|||||||
('created', DESCENDING)]},
|
('created', DESCENDING)]},
|
||||||
|
|
||||||
'state_uploader_tags_created': {
|
'state_uploader_tags_created': {
|
||||||
# Indexing on processed?, media uploader, associated tags, and timestamp
|
# Indexing on processed?, media uploader, associated tags, and
|
||||||
# Used for showing media items matching a tag search, most recent first.
|
# timestamp Used for showing media items matching a tag
|
||||||
|
# search, most recent first.
|
||||||
'index': [('state', ASCENDING),
|
'index': [('state', ASCENDING),
|
||||||
('uploader', ASCENDING),
|
('uploader', ASCENDING),
|
||||||
('tags.slug', DESCENDING),
|
('tags.slug', DESCENDING),
|
||||||
|
@ -14,7 +14,8 @@
|
|||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import datetime, uuid
|
import datetime
|
||||||
|
import uuid
|
||||||
|
|
||||||
from mongokit import Document
|
from mongokit import Document
|
||||||
|
|
||||||
@ -67,17 +68,17 @@ class User(Document):
|
|||||||
'username': unicode,
|
'username': unicode,
|
||||||
'email': unicode,
|
'email': unicode,
|
||||||
'created': datetime.datetime,
|
'created': datetime.datetime,
|
||||||
'plugin_data': dict, # plugins can dump stuff here.
|
'plugin_data': dict, # plugins can dump stuff here.
|
||||||
'pw_hash': unicode,
|
'pw_hash': unicode,
|
||||||
'email_verified': bool,
|
'email_verified': bool,
|
||||||
'status': unicode,
|
'status': unicode,
|
||||||
'verification_key': unicode,
|
'verification_key': unicode,
|
||||||
'is_admin': bool,
|
'is_admin': bool,
|
||||||
'url' : unicode,
|
'url': unicode,
|
||||||
'bio' : unicode, # May contain markdown
|
'bio': unicode, # May contain markdown
|
||||||
'bio_html': unicode, # May contain plaintext, or HTML
|
'bio_html': unicode, # May contain plaintext, or HTML
|
||||||
'fp_verification_key': unicode, # forgotten password verification key
|
'fp_verification_key': unicode, # forgotten password verification key
|
||||||
'fp_token_expire': datetime.datetime
|
'fp_token_expire': datetime.datetime,
|
||||||
}
|
}
|
||||||
|
|
||||||
required_fields = ['username', 'created', 'pw_hash', 'email']
|
required_fields = ['username', 'created', 'pw_hash', 'email']
|
||||||
@ -182,11 +183,11 @@ class MediaEntry(Document):
|
|||||||
'title': unicode,
|
'title': unicode,
|
||||||
'slug': unicode,
|
'slug': unicode,
|
||||||
'created': datetime.datetime,
|
'created': datetime.datetime,
|
||||||
'description': unicode, # May contain markdown/up
|
'description': unicode, # May contain markdown/up
|
||||||
'description_html': unicode, # May contain plaintext, or HTML
|
'description_html': unicode, # May contain plaintext, or HTML
|
||||||
'media_type': unicode,
|
'media_type': unicode,
|
||||||
'media_data': dict, # extra data relevant to this media_type
|
'media_data': dict, # extra data relevant to this media_type
|
||||||
'plugin_data': dict, # plugins can dump stuff here.
|
'plugin_data': dict, # plugins can dump stuff here.
|
||||||
'tags': [dict],
|
'tags': [dict],
|
||||||
'state': unicode,
|
'state': unicode,
|
||||||
|
|
||||||
@ -218,7 +219,8 @@ class MediaEntry(Document):
|
|||||||
return self.db.MediaComment.find({
|
return self.db.MediaComment.find({
|
||||||
'media_entry': self['_id']}).sort('created', DESCENDING)
|
'media_entry': self['_id']}).sort('created', DESCENDING)
|
||||||
|
|
||||||
def get_display_media(self, media_map, fetch_order=common.DISPLAY_IMAGE_FETCHING_ORDER):
|
def get_display_media(self, media_map,
|
||||||
|
fetch_order=common.DISPLAY_IMAGE_FETCHING_ORDER):
|
||||||
"""
|
"""
|
||||||
Find the best media for display.
|
Find the best media for display.
|
||||||
|
|
||||||
@ -271,7 +273,7 @@ class MediaEntry(Document):
|
|||||||
"""
|
"""
|
||||||
Provide a url to the previous entry from this user, if there is one
|
Provide a url to the previous entry from this user, if there is one
|
||||||
"""
|
"""
|
||||||
cursor = self.db.MediaEntry.find({'_id' : {"$gt": self['_id']},
|
cursor = self.db.MediaEntry.find({'_id': {"$gt": self['_id']},
|
||||||
'uploader': self['uploader'],
|
'uploader': self['uploader'],
|
||||||
'state': 'processed'}).sort(
|
'state': 'processed'}).sort(
|
||||||
'_id', ASCENDING).limit(1)
|
'_id', ASCENDING).limit(1)
|
||||||
@ -284,7 +286,7 @@ class MediaEntry(Document):
|
|||||||
"""
|
"""
|
||||||
Provide a url to the next entry from this user, if there is one
|
Provide a url to the next entry from this user, if there is one
|
||||||
"""
|
"""
|
||||||
cursor = self.db.MediaEntry.find({'_id' : {"$lt": self['_id']},
|
cursor = self.db.MediaEntry.find({'_id': {"$lt": self['_id']},
|
||||||
'uploader': self['uploader'],
|
'uploader': self['uploader'],
|
||||||
'state': 'processed'}).sort(
|
'state': 'processed'}).sort(
|
||||||
'_id', DESCENDING).limit(1)
|
'_id', DESCENDING).limit(1)
|
||||||
@ -351,4 +353,3 @@ def register_models(connection):
|
|||||||
Register all models in REGISTER_MODELS with this connection.
|
Register all models in REGISTER_MODELS with this connection.
|
||||||
"""
|
"""
|
||||||
connection.register(REGISTER_MODELS)
|
connection.register(REGISTER_MODELS)
|
||||||
|
|
||||||
|
@ -122,7 +122,8 @@ def remove_deprecated_indexes(database, deprecated_indexes=DEPRECATED_INDEXES):
|
|||||||
# Don't set this yourself! RegisterMigration will automatically fill
|
# Don't set this yourself! RegisterMigration will automatically fill
|
||||||
# this with stuff via decorating methods in migrations.py
|
# this with stuff via decorating methods in migrations.py
|
||||||
|
|
||||||
class MissingCurrentMigration(Exception): pass
|
class MissingCurrentMigration(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
MIGRATIONS = {}
|
MIGRATIONS = {}
|
||||||
@ -147,7 +148,7 @@ class RegisterMigration(object):
|
|||||||
"""
|
"""
|
||||||
def __init__(self, migration_number, migration_registry=MIGRATIONS):
|
def __init__(self, migration_number, migration_registry=MIGRATIONS):
|
||||||
assert migration_number > 0, "Migration number must be > 0!"
|
assert migration_number > 0, "Migration number must be > 0!"
|
||||||
assert not migration_registry.has_key(migration_number), \
|
assert migration_number not in migration_registry, \
|
||||||
"Duplicate migration numbers detected! That's not allowed!"
|
"Duplicate migration numbers detected! That's not allowed!"
|
||||||
|
|
||||||
self.migration_number = migration_number
|
self.migration_number = migration_number
|
||||||
|
@ -119,6 +119,7 @@ def get_user_media_entry(controller):
|
|||||||
|
|
||||||
return _make_safe(wrapper, controller)
|
return _make_safe(wrapper, controller)
|
||||||
|
|
||||||
|
|
||||||
def get_media_entry_by_id(controller):
|
def get_media_entry_by_id(controller):
|
||||||
"""
|
"""
|
||||||
Pass in a MediaEntry based off of a url component
|
Pass in a MediaEntry based off of a url component
|
||||||
@ -138,4 +139,3 @@ def get_media_entry_by_id(controller):
|
|||||||
return controller(request, media=media, *args, **kwargs)
|
return controller(request, media=media, *args, **kwargs)
|
||||||
|
|
||||||
return _make_safe(wrapper, controller)
|
return _make_safe(wrapper, controller)
|
||||||
|
|
||||||
|
@ -13,5 +13,3 @@
|
|||||||
#
|
#
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
|
||||||
|
@ -120,7 +120,7 @@ def edit_attachments(request, media):
|
|||||||
name=request.POST['attachment_name'] \
|
name=request.POST['attachment_name'] \
|
||||||
or request.POST['attachment_file'].filename,
|
or request.POST['attachment_file'].filename,
|
||||||
filepath=attachment_public_filepath,
|
filepath=attachment_public_filepath,
|
||||||
created=datetime.utcnow()
|
created=datetime.utcnow(),
|
||||||
))
|
))
|
||||||
|
|
||||||
media.save()
|
media.save()
|
||||||
|
@ -29,7 +29,7 @@ SUBCOMMAND_MAP = {
|
|||||||
'setup': 'mediagoblin.gmg_commands.migrate:migrate_parser_setup',
|
'setup': 'mediagoblin.gmg_commands.migrate:migrate_parser_setup',
|
||||||
'func': 'mediagoblin.gmg_commands.migrate:migrate',
|
'func': 'mediagoblin.gmg_commands.migrate:migrate',
|
||||||
'help': 'Apply all unapplied bulk migrations to the database'},
|
'help': 'Apply all unapplied bulk migrations to the database'},
|
||||||
'adduser':{
|
'adduser': {
|
||||||
'setup': 'mediagoblin.gmg_commands.users:adduser_parser_setup',
|
'setup': 'mediagoblin.gmg_commands.users:adduser_parser_setup',
|
||||||
'func': 'mediagoblin.gmg_commands.users:adduser',
|
'func': 'mediagoblin.gmg_commands.users:adduser',
|
||||||
'help': 'Creates an user'},
|
'help': 'Creates an user'},
|
||||||
@ -68,7 +68,7 @@ def main_cli():
|
|||||||
|
|
||||||
subparsers = parser.add_subparsers(help='sub-command help')
|
subparsers = parser.add_subparsers(help='sub-command help')
|
||||||
for command_name, command_struct in SUBCOMMAND_MAP.iteritems():
|
for command_name, command_struct in SUBCOMMAND_MAP.iteritems():
|
||||||
if command_struct.has_key('help'):
|
if 'help' in command_struct:
|
||||||
subparser = subparsers.add_parser(
|
subparser = subparsers.add_parser(
|
||||||
command_name, help=command_struct['help'])
|
command_name, help=command_struct['help'])
|
||||||
else:
|
else:
|
||||||
@ -94,4 +94,3 @@ def main_cli():
|
|||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main_cli()
|
main_cli()
|
||||||
|
|
||||||
|
@ -226,7 +226,8 @@ def env_export(args):
|
|||||||
'''
|
'''
|
||||||
if args.cache_path:
|
if args.cache_path:
|
||||||
if os.path.exists(args.cache_path):
|
if os.path.exists(args.cache_path):
|
||||||
_log.error('The cache directory must not exist before you run this script')
|
_log.error('The cache directory must not exist '
|
||||||
|
'before you run this script')
|
||||||
_log.error('Cache directory: {0}'.format(args.cache_path))
|
_log.error('Cache directory: {0}'.format(args.cache_path))
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
@ -38,7 +38,7 @@ def adduser(args):
|
|||||||
db = mg_globals.database
|
db = mg_globals.database
|
||||||
users_with_username = \
|
users_with_username = \
|
||||||
db.User.find({
|
db.User.find({
|
||||||
'username': args.username.lower()
|
'username': args.username.lower(),
|
||||||
}).count()
|
}).count()
|
||||||
|
|
||||||
if users_with_username:
|
if users_with_username:
|
||||||
@ -68,7 +68,7 @@ def makeadmin(args):
|
|||||||
|
|
||||||
db = mg_globals.database
|
db = mg_globals.database
|
||||||
|
|
||||||
user = db.User.one({'username':unicode(args.username.lower())})
|
user = db.User.one({'username': unicode(args.username.lower())})
|
||||||
if user:
|
if user:
|
||||||
user['is_admin'] = True
|
user['is_admin'] = True
|
||||||
user.save()
|
user.save()
|
||||||
@ -91,11 +91,10 @@ def changepw(args):
|
|||||||
|
|
||||||
db = mg_globals.database
|
db = mg_globals.database
|
||||||
|
|
||||||
user = db.User.one({'username':unicode(args.username.lower())})
|
user = db.User.one({'username': unicode(args.username.lower())})
|
||||||
if user:
|
if user:
|
||||||
user['pw_hash'] = auth_lib.bcrypt_gen_password_hash(args.password)
|
user['pw_hash'] = auth_lib.bcrypt_gen_password_hash(args.password)
|
||||||
user.save()
|
user.save()
|
||||||
print 'Password successfully changed'
|
print 'Password successfully changed'
|
||||||
else:
|
else:
|
||||||
print 'The user doesn\'t exist'
|
print 'The user doesn\'t exist'
|
||||||
|
|
||||||
|
@ -29,8 +29,12 @@ from mediagoblin.workbench import WorkbenchManager
|
|||||||
from mediagoblin.storage import storage_system_from_config
|
from mediagoblin.storage import storage_system_from_config
|
||||||
|
|
||||||
|
|
||||||
class Error(Exception): pass
|
class Error(Exception):
|
||||||
class ImproperlyConfigured(Error): pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class ImproperlyConfigured(Error):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
def setup_global_and_app_config(config_path):
|
def setup_global_and_app_config(config_path):
|
||||||
@ -76,8 +80,8 @@ def setup_database():
|
|||||||
"in fact they appear to be from the future?!")
|
"in fact they appear to be from the future?!")
|
||||||
|
|
||||||
setup_globals(
|
setup_globals(
|
||||||
db_connection = connection,
|
db_connection=connection,
|
||||||
database = db)
|
database=db)
|
||||||
|
|
||||||
return connection, db
|
return connection, db
|
||||||
|
|
||||||
@ -99,10 +103,10 @@ def get_jinja_loader(user_template_path=None):
|
|||||||
|
|
||||||
|
|
||||||
def get_staticdirector(app_config):
|
def get_staticdirector(app_config):
|
||||||
if app_config.has_key('direct_remote_path'):
|
if 'direct_remote_path' in app_config:
|
||||||
return staticdirect.RemoteStaticDirect(
|
return staticdirect.RemoteStaticDirect(
|
||||||
app_config['direct_remote_path'].strip())
|
app_config['direct_remote_path'].strip())
|
||||||
elif app_config.has_key('direct_remote_paths'):
|
elif 'direct_remote_paths' in app_config:
|
||||||
direct_remote_path_lines = app_config[
|
direct_remote_path_lines = app_config[
|
||||||
'direct_remote_paths'].strip().splitlines()
|
'direct_remote_paths'].strip().splitlines()
|
||||||
return staticdirect.MultiRemoteStaticDirect(
|
return staticdirect.MultiRemoteStaticDirect(
|
||||||
@ -126,8 +130,8 @@ def setup_storage():
|
|||||||
queue_store = storage_system_from_config(global_config[key_long])
|
queue_store = storage_system_from_config(global_config[key_long])
|
||||||
|
|
||||||
setup_globals(
|
setup_globals(
|
||||||
public_store = public_store,
|
public_store=public_store,
|
||||||
queue_store = queue_store)
|
queue_store=queue_store)
|
||||||
|
|
||||||
return public_store, queue_store
|
return public_store, queue_store
|
||||||
|
|
||||||
@ -137,7 +141,7 @@ def setup_workbench():
|
|||||||
|
|
||||||
workbench_manager = WorkbenchManager(app_config['workbench_path'])
|
workbench_manager = WorkbenchManager(app_config['workbench_path'])
|
||||||
|
|
||||||
setup_globals(workbench_manager = workbench_manager)
|
setup_globals(workbench_manager=workbench_manager)
|
||||||
|
|
||||||
|
|
||||||
def setup_beaker_cache():
|
def setup_beaker_cache():
|
||||||
|
@ -40,7 +40,7 @@ def setup_celery_from_config(app_config, global_config,
|
|||||||
- set_environ: if set, this will CELERY_CONFIG_MODULE to the
|
- set_environ: if set, this will CELERY_CONFIG_MODULE to the
|
||||||
settings_module
|
settings_module
|
||||||
"""
|
"""
|
||||||
if global_config.has_key('celery'):
|
if 'celery' in global_config:
|
||||||
celery_conf = global_config['celery']
|
celery_conf = global_config['celery']
|
||||||
else:
|
else:
|
||||||
celery_conf = {}
|
celery_conf = {}
|
||||||
@ -49,16 +49,16 @@ def setup_celery_from_config(app_config, global_config,
|
|||||||
|
|
||||||
# set up mongodb stuff
|
# set up mongodb stuff
|
||||||
celery_settings['CELERY_RESULT_BACKEND'] = 'mongodb'
|
celery_settings['CELERY_RESULT_BACKEND'] = 'mongodb'
|
||||||
if not celery_settings.has_key('BROKER_BACKEND'):
|
if 'BROKER_BACKEND' not in celery_settings:
|
||||||
celery_settings['BROKER_BACKEND'] = 'mongodb'
|
celery_settings['BROKER_BACKEND'] = 'mongodb'
|
||||||
|
|
||||||
celery_mongo_settings = {}
|
celery_mongo_settings = {}
|
||||||
|
|
||||||
if app_config.has_key('db_host'):
|
if 'db_host' in app_config:
|
||||||
celery_mongo_settings['host'] = app_config['db_host']
|
celery_mongo_settings['host'] = app_config['db_host']
|
||||||
if celery_settings['BROKER_BACKEND'] == 'mongodb':
|
if celery_settings['BROKER_BACKEND'] == 'mongodb':
|
||||||
celery_settings['BROKER_HOST'] = app_config['db_host']
|
celery_settings['BROKER_HOST'] = app_config['db_host']
|
||||||
if app_config.has_key('db_port'):
|
if 'db_port' in app_config:
|
||||||
celery_mongo_settings['port'] = app_config['db_port']
|
celery_mongo_settings['port'] = app_config['db_port']
|
||||||
if celery_settings['BROKER_BACKEND'] == 'mongodb':
|
if celery_settings['BROKER_BACKEND'] == 'mongodb':
|
||||||
celery_settings['BROKER_PORT'] = app_config['db_port']
|
celery_settings['BROKER_PORT'] = app_config['db_port']
|
||||||
|
@ -25,4 +25,3 @@ tag_routes = [
|
|||||||
Route('mediagoblin.listings.tag_atom_feed', "/{tag}/atom/",
|
Route('mediagoblin.listings.tag_atom_feed', "/{tag}/atom/",
|
||||||
controller="mediagoblin.listings.views:tag_atom_feed"),
|
controller="mediagoblin.listings.views:tag_atom_feed"),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -64,6 +64,7 @@ def tag_listing(request, page):
|
|||||||
|
|
||||||
ATOM_DEFAULT_NR_OF_UPDATED_ITEMS = 15
|
ATOM_DEFAULT_NR_OF_UPDATED_ITEMS = 15
|
||||||
|
|
||||||
|
|
||||||
def tag_atom_feed(request):
|
def tag_atom_feed(request):
|
||||||
"""
|
"""
|
||||||
generates the atom feed with the tag images
|
generates the atom feed with the tag images
|
||||||
|
@ -20,11 +20,13 @@ SUCCESS = 'success'
|
|||||||
WARNING = 'warning'
|
WARNING = 'warning'
|
||||||
ERROR = 'error'
|
ERROR = 'error'
|
||||||
|
|
||||||
|
|
||||||
def add_message(request, level, text):
|
def add_message(request, level, text):
|
||||||
messages = request.session.setdefault('messages', [])
|
messages = request.session.setdefault('messages', [])
|
||||||
messages.append({'level': level, 'text': text})
|
messages.append({'level': level, 'text': text})
|
||||||
request.session.save()
|
request.session.save()
|
||||||
|
|
||||||
|
|
||||||
def fetch_messages(request, clear_from_session=True):
|
def fetch_messages(request, clear_from_session=True):
|
||||||
messages = request.session.get('messages')
|
messages = request.session.get('messages')
|
||||||
if messages and clear_from_session:
|
if messages and clear_from_session:
|
||||||
|
@ -14,6 +14,7 @@
|
|||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
|
||||||
class NoOpMiddleware(object):
|
class NoOpMiddleware(object):
|
||||||
|
|
||||||
def __init__(self, mg_app):
|
def __init__(self, mg_app):
|
||||||
|
@ -66,9 +66,10 @@ class ProcessMedia(Task):
|
|||||||
"""
|
"""
|
||||||
If the processing failed we should mark that in the database.
|
If the processing failed we should mark that in the database.
|
||||||
|
|
||||||
Assuming that the exception raised is a subclass of BaseProcessingFail,
|
Assuming that the exception raised is a subclass of
|
||||||
we can use that to get more information about the failure and store that
|
BaseProcessingFail, we can use that to get more information
|
||||||
for conveying information to users about the failure, etc.
|
about the failure and store that for conveying information to
|
||||||
|
users about the failure, etc.
|
||||||
"""
|
"""
|
||||||
entry_id = args[0]
|
entry_id = args[0]
|
||||||
mark_entry_failed(entry_id, exc)
|
mark_entry_failed(entry_id, exc)
|
||||||
@ -81,10 +82,10 @@ def mark_entry_failed(entry_id, exc):
|
|||||||
"""
|
"""
|
||||||
Mark a media entry as having failed in its conversion.
|
Mark a media entry as having failed in its conversion.
|
||||||
|
|
||||||
Uses the exception that was raised to mark more information. If the
|
Uses the exception that was raised to mark more information. If
|
||||||
exception is a derivative of BaseProcessingFail then we can store extra
|
the exception is a derivative of BaseProcessingFail then we can
|
||||||
information that can be useful for users telling them why their media failed
|
store extra information that can be useful for users telling them
|
||||||
to process.
|
why their media failed to process.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
- entry_id: The id of the media entry
|
- entry_id: The id of the media entry
|
||||||
@ -161,7 +162,8 @@ def process_image(entry):
|
|||||||
with queued_file:
|
with queued_file:
|
||||||
original_filepath = create_pub_filepath(entry, queued_filepath[-1])
|
original_filepath = create_pub_filepath(entry, queued_filepath[-1])
|
||||||
|
|
||||||
with mgg.public_store.get_file(original_filepath, 'wb') as original_file:
|
with mgg.public_store.get_file(original_filepath, 'wb') \
|
||||||
|
as original_file:
|
||||||
original_file.write(queued_file.read())
|
original_file.write(queued_file.read())
|
||||||
|
|
||||||
mgg.queue_store.delete_file(queued_filepath)
|
mgg.queue_store.delete_file(queued_filepath)
|
||||||
|
@ -16,6 +16,7 @@
|
|||||||
|
|
||||||
from mediagoblin.tools.translate import lazy_pass_to_ugettext as _
|
from mediagoblin.tools.translate import lazy_pass_to_ugettext as _
|
||||||
|
|
||||||
|
|
||||||
class BaseProcessingFail(Exception):
|
class BaseProcessingFail(Exception):
|
||||||
"""
|
"""
|
||||||
Base exception that all other processing failure messages should
|
Base exception that all other processing failure messages should
|
||||||
|
@ -28,18 +28,18 @@ import urlparse
|
|||||||
import pkg_resources
|
import pkg_resources
|
||||||
import urlparse
|
import urlparse
|
||||||
|
|
||||||
|
|
||||||
class StaticDirect(object):
|
class StaticDirect(object):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.cache = {}
|
self.cache = {}
|
||||||
|
|
||||||
def __call__(self, filepath):
|
def __call__(self, filepath):
|
||||||
if self.cache.has_key(filepath):
|
if filepath in self.cache:
|
||||||
return self.cache[filepath]
|
return self.cache[filepath]
|
||||||
|
|
||||||
static_direction = self.cache[filepath] = self.get(filepath)
|
static_direction = self.cache[filepath] = self.get(filepath)
|
||||||
return static_direction
|
return static_direction
|
||||||
|
|
||||||
|
|
||||||
def get(self, filepath):
|
def get(self, filepath):
|
||||||
# should be implemented by the individual staticdirector
|
# should be implemented by the individual staticdirector
|
||||||
pass
|
pass
|
||||||
|
@ -27,6 +27,7 @@ from mediagoblin.storage import StorageInterface, clean_listy_filepath
|
|||||||
import cloudfiles
|
import cloudfiles
|
||||||
import mimetypes
|
import mimetypes
|
||||||
|
|
||||||
|
|
||||||
class CloudFilesStorage(StorageInterface):
|
class CloudFilesStorage(StorageInterface):
|
||||||
'''
|
'''
|
||||||
OpenStack/Rackspace Cloud's Swift/CloudFiles support
|
OpenStack/Rackspace Cloud's Swift/CloudFiles support
|
||||||
|
@ -13,5 +13,3 @@
|
|||||||
#
|
#
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
|
||||||
|
@ -16,9 +16,9 @@
|
|||||||
|
|
||||||
from mimetypes import guess_type
|
from mimetypes import guess_type
|
||||||
|
|
||||||
|
|
||||||
ALLOWED = ['image/jpeg', 'image/png', 'image/tiff', 'image/gif']
|
ALLOWED = ['image/jpeg', 'image/png', 'image/tiff', 'image/gif']
|
||||||
|
|
||||||
|
|
||||||
def check_filetype(posted_file):
|
def check_filetype(posted_file):
|
||||||
if not guess_type(posted_file.filename)[0] in ALLOWED:
|
if not guess_type(posted_file.filename)[0] in ALLOWED:
|
||||||
return False
|
return False
|
||||||
|
@ -39,7 +39,7 @@ def submit_start(request):
|
|||||||
submit_form = submit_forms.SubmitStartForm(request.POST)
|
submit_form = submit_forms.SubmitStartForm(request.POST)
|
||||||
|
|
||||||
if request.method == 'POST' and submit_form.validate():
|
if request.method == 'POST' and submit_form.validate():
|
||||||
if not (request.POST.has_key('file')
|
if not ('file' in request.POST
|
||||||
and isinstance(request.POST['file'], FieldStorage)
|
and isinstance(request.POST['file'], FieldStorage)
|
||||||
and request.POST['file'].file):
|
and request.POST['file'].file):
|
||||||
submit_form.file.errors.append(
|
submit_form.file.errors.append(
|
||||||
@ -61,7 +61,7 @@ def submit_start(request):
|
|||||||
entry['description_html'] = cleaned_markdown_conversion(
|
entry['description_html'] = cleaned_markdown_conversion(
|
||||||
entry['description'])
|
entry['description'])
|
||||||
|
|
||||||
entry['media_type'] = u'image' # heh
|
entry['media_type'] = u'image' # heh
|
||||||
entry['uploader'] = request.user['_id']
|
entry['uploader'] = request.user['_id']
|
||||||
|
|
||||||
# Process the user's folksonomy "tags"
|
# Process the user's folksonomy "tags"
|
||||||
@ -89,8 +89,10 @@ def submit_start(request):
|
|||||||
|
|
||||||
# We generate this ourselves so we know what the taks id is for
|
# We generate this ourselves so we know what the taks id is for
|
||||||
# retrieval later.
|
# retrieval later.
|
||||||
# (If we got it off the task's auto-generation, there'd be a risk of
|
|
||||||
# a race condition when we'd save after sending off the task)
|
# (If we got it off the task's auto-generation, there'd be
|
||||||
|
# a risk of a race condition when we'd save after sending
|
||||||
|
# off the task)
|
||||||
task_id = unicode(uuid.uuid4())
|
task_id = unicode(uuid.uuid4())
|
||||||
entry['queued_task_id'] = task_id
|
entry['queued_task_id'] = task_id
|
||||||
|
|
||||||
@ -112,8 +114,8 @@ def submit_start(request):
|
|||||||
# expect a lot of users to run things in this way we have to
|
# expect a lot of users to run things in this way we have to
|
||||||
# capture stuff here.
|
# capture stuff here.
|
||||||
#
|
#
|
||||||
# ... not completely the diaper pattern because the exception is
|
# ... not completely the diaper pattern because the
|
||||||
# re-raised :)
|
# exception is re-raised :)
|
||||||
mark_entry_failed(entry[u'_id'], exc)
|
mark_entry_failed(entry[u'_id'], exc)
|
||||||
# re-raise the exception
|
# re-raise the exception
|
||||||
raise
|
raise
|
||||||
@ -121,7 +123,7 @@ def submit_start(request):
|
|||||||
add_message(request, SUCCESS, _('Woohoo! Submitted!'))
|
add_message(request, SUCCESS, _('Woohoo! Submitted!'))
|
||||||
|
|
||||||
return redirect(request, "mediagoblin.user_pages.user_home",
|
return redirect(request, "mediagoblin.user_pages.user_home",
|
||||||
user = request.user['username'])
|
user=request.user['username'])
|
||||||
|
|
||||||
return render_to_response(
|
return render_to_response(
|
||||||
request,
|
request,
|
||||||
|
@ -21,6 +21,7 @@ DISPLAY_IMAGE_FETCHING_ORDER = [u'medium', u'original', u'thumb']
|
|||||||
global TESTS_ENABLED
|
global TESTS_ENABLED
|
||||||
TESTS_ENABLED = False
|
TESTS_ENABLED = False
|
||||||
|
|
||||||
|
|
||||||
def import_component(import_string):
|
def import_component(import_string):
|
||||||
"""
|
"""
|
||||||
Import a module component defined by STRING. Probably a method,
|
Import a module component defined by STRING. Probably a method,
|
||||||
|
@ -49,6 +49,7 @@ from mediagoblin.tools import common
|
|||||||
EMAIL_TEST_INBOX = []
|
EMAIL_TEST_INBOX = []
|
||||||
EMAIL_TEST_MBOX_INBOX = []
|
EMAIL_TEST_MBOX_INBOX = []
|
||||||
|
|
||||||
|
|
||||||
class FakeMhost(object):
|
class FakeMhost(object):
|
||||||
"""
|
"""
|
||||||
Just a fake mail host so we can capture and test messages
|
Just a fake mail host so we can capture and test messages
|
||||||
@ -63,12 +64,14 @@ class FakeMhost(object):
|
|||||||
'to': to_addrs,
|
'to': to_addrs,
|
||||||
'message': message})
|
'message': message})
|
||||||
|
|
||||||
|
|
||||||
def _clear_test_inboxes():
|
def _clear_test_inboxes():
|
||||||
global EMAIL_TEST_INBOX
|
global EMAIL_TEST_INBOX
|
||||||
global EMAIL_TEST_MBOX_INBOX
|
global EMAIL_TEST_MBOX_INBOX
|
||||||
EMAIL_TEST_INBOX = []
|
EMAIL_TEST_INBOX = []
|
||||||
EMAIL_TEST_MBOX_INBOX = []
|
EMAIL_TEST_MBOX_INBOX = []
|
||||||
|
|
||||||
|
|
||||||
### ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
### ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
### </Special email test stuff>
|
### </Special email test stuff>
|
||||||
### ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
### ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
@ -19,8 +19,10 @@ import copy
|
|||||||
from math import ceil, floor
|
from math import ceil, floor
|
||||||
from itertools import izip, count
|
from itertools import izip, count
|
||||||
|
|
||||||
|
|
||||||
PAGINATION_DEFAULT_PER_PAGE = 30
|
PAGINATION_DEFAULT_PER_PAGE = 30
|
||||||
|
|
||||||
|
|
||||||
class Pagination(object):
|
class Pagination(object):
|
||||||
"""
|
"""
|
||||||
Pagination class for mongodb queries.
|
Pagination class for mongodb queries.
|
||||||
@ -38,8 +40,8 @@ class Pagination(object):
|
|||||||
- page: requested page
|
- page: requested page
|
||||||
- per_page: number of objects per page
|
- per_page: number of objects per page
|
||||||
- cursor: db cursor
|
- cursor: db cursor
|
||||||
- jump_to_id: ObjectId, sets the page to the page containing the object
|
- jump_to_id: ObjectId, sets the page to the page containing the
|
||||||
with _id == jump_to_id.
|
object with _id == jump_to_id.
|
||||||
"""
|
"""
|
||||||
self.page = page
|
self.page = page
|
||||||
self.per_page = per_page
|
self.per_page = per_page
|
||||||
|
@ -17,12 +17,14 @@
|
|||||||
from webob import Response, exc
|
from webob import Response, exc
|
||||||
from mediagoblin.tools.template import render_template
|
from mediagoblin.tools.template import render_template
|
||||||
|
|
||||||
|
|
||||||
def render_to_response(request, template, context, status=200):
|
def render_to_response(request, template, context, status=200):
|
||||||
"""Much like Django's shortcut.render()"""
|
"""Much like Django's shortcut.render()"""
|
||||||
return Response(
|
return Response(
|
||||||
render_template(request, template, context),
|
render_template(request, template, context),
|
||||||
status=status)
|
status=status)
|
||||||
|
|
||||||
|
|
||||||
def render_404(request):
|
def render_404(request):
|
||||||
"""
|
"""
|
||||||
Render a 404.
|
Render a 404.
|
||||||
@ -30,6 +32,7 @@ def render_404(request):
|
|||||||
return render_to_response(
|
return render_to_response(
|
||||||
request, 'mediagoblin/404.html', {}, status=400)
|
request, 'mediagoblin/404.html', {}, status=400)
|
||||||
|
|
||||||
|
|
||||||
def redirect(request, *args, **kwargs):
|
def redirect(request, *args, **kwargs):
|
||||||
"""Returns a HTTPFound(), takes a request and then urlgen params"""
|
"""Returns a HTTPFound(), takes a request and then urlgen params"""
|
||||||
|
|
||||||
|
@ -17,15 +17,16 @@
|
|||||||
from math import ceil
|
from math import ceil
|
||||||
import jinja2
|
import jinja2
|
||||||
from babel.localedata import exists
|
from babel.localedata import exists
|
||||||
from babel.support import LazyProxy
|
|
||||||
from mediagoblin import mg_globals
|
from mediagoblin import mg_globals
|
||||||
from mediagoblin import messages
|
from mediagoblin import messages
|
||||||
from mediagoblin.tools import common
|
from mediagoblin.tools import common
|
||||||
from mediagoblin.tools.translate import setup_gettext
|
from mediagoblin.tools.translate import setup_gettext
|
||||||
from mediagoblin.middleware.csrf import render_csrf_form_token
|
from mediagoblin.middleware.csrf import render_csrf_form_token
|
||||||
|
|
||||||
|
|
||||||
SETUP_JINJA_ENVS = {}
|
SETUP_JINJA_ENVS = {}
|
||||||
|
|
||||||
|
|
||||||
def get_jinja_env(template_loader, locale):
|
def get_jinja_env(template_loader, locale):
|
||||||
"""
|
"""
|
||||||
Set up the Jinja environment,
|
Set up the Jinja environment,
|
||||||
@ -60,6 +61,7 @@ def get_jinja_env(template_loader, locale):
|
|||||||
|
|
||||||
return template_env
|
return template_env
|
||||||
|
|
||||||
|
|
||||||
# We'll store context information here when doing unit tests
|
# We'll store context information here when doing unit tests
|
||||||
TEMPLATE_TEST_CONTEXT = {}
|
TEMPLATE_TEST_CONTEXT = {}
|
||||||
|
|
||||||
@ -87,6 +89,7 @@ def clear_test_template_context():
|
|||||||
global TEMPLATE_TEST_CONTEXT
|
global TEMPLATE_TEST_CONTEXT
|
||||||
TEMPLATE_TEST_CONTEXT = {}
|
TEMPLATE_TEST_CONTEXT = {}
|
||||||
|
|
||||||
|
|
||||||
def gridify_list(this_list, num_cols=5):
|
def gridify_list(this_list, num_cols=5):
|
||||||
"""
|
"""
|
||||||
Generates a list of lists where each sub-list's length depends on
|
Generates a list of lists where each sub-list's length depends on
|
||||||
|
@ -21,6 +21,7 @@ from lxml.html.clean import Cleaner
|
|||||||
from mediagoblin import mg_globals
|
from mediagoblin import mg_globals
|
||||||
from mediagoblin.tools import url
|
from mediagoblin.tools import url
|
||||||
|
|
||||||
|
|
||||||
# A super strict version of the lxml.html cleaner class
|
# A super strict version of the lxml.html cleaner class
|
||||||
HTML_CLEANER = Cleaner(
|
HTML_CLEANER = Cleaner(
|
||||||
scripts=True,
|
scripts=True,
|
||||||
@ -42,6 +43,7 @@ HTML_CLEANER = Cleaner(
|
|||||||
host_whitelist=(),
|
host_whitelist=(),
|
||||||
whitelist_tags=set([]))
|
whitelist_tags=set([]))
|
||||||
|
|
||||||
|
|
||||||
def clean_html(html):
|
def clean_html(html):
|
||||||
# clean_html barfs on an empty string
|
# clean_html barfs on an empty string
|
||||||
if not html:
|
if not html:
|
||||||
@ -49,6 +51,7 @@ def clean_html(html):
|
|||||||
|
|
||||||
return HTML_CLEANER.clean_html(html)
|
return HTML_CLEANER.clean_html(html)
|
||||||
|
|
||||||
|
|
||||||
def convert_to_tag_list_of_dicts(tag_string):
|
def convert_to_tag_list_of_dicts(tag_string):
|
||||||
"""
|
"""
|
||||||
Filter input from incoming string containing user tags,
|
Filter input from incoming string containing user tags,
|
||||||
@ -73,6 +76,7 @@ def convert_to_tag_list_of_dicts(tag_string):
|
|||||||
'slug': url.slugify(tag.strip())})
|
'slug': url.slugify(tag.strip())})
|
||||||
return taglist
|
return taglist
|
||||||
|
|
||||||
|
|
||||||
def media_tags_as_string(media_entry_tags):
|
def media_tags_as_string(media_entry_tags):
|
||||||
"""
|
"""
|
||||||
Generate a string from a media item's tags, stored as a list of dicts
|
Generate a string from a media item's tags, stored as a list of dicts
|
||||||
@ -85,9 +89,11 @@ def media_tags_as_string(media_entry_tags):
|
|||||||
[tag['name'] for tag in media_entry_tags])
|
[tag['name'] for tag in media_entry_tags])
|
||||||
return media_tag_string
|
return media_tag_string
|
||||||
|
|
||||||
|
|
||||||
TOO_LONG_TAG_WARNING = \
|
TOO_LONG_TAG_WARNING = \
|
||||||
u'Tags must be shorter than %s characters. Tags that are too long: %s'
|
u'Tags must be shorter than %s characters. Tags that are too long: %s'
|
||||||
|
|
||||||
|
|
||||||
def tag_length_validator(form, field):
|
def tag_length_validator(form, field):
|
||||||
"""
|
"""
|
||||||
Make sure tags do not exceed the maximum tag length.
|
Make sure tags do not exceed the maximum tag length.
|
||||||
@ -105,6 +111,7 @@ def tag_length_validator(form, field):
|
|||||||
|
|
||||||
MARKDOWN_INSTANCE = markdown.Markdown(safe_mode='escape')
|
MARKDOWN_INSTANCE = markdown.Markdown(safe_mode='escape')
|
||||||
|
|
||||||
|
|
||||||
def cleaned_markdown_conversion(text):
|
def cleaned_markdown_conversion(text):
|
||||||
"""
|
"""
|
||||||
Take a block of text, run it through MarkDown, and clean its HTML.
|
Take a block of text, run it through MarkDown, and clean its HTML.
|
||||||
|
@ -17,8 +17,10 @@
|
|||||||
import re
|
import re
|
||||||
import translitcodec
|
import translitcodec
|
||||||
|
|
||||||
|
|
||||||
_punct_re = re.compile(r'[\t !"#$%&\'()*\-/<=>?@\[\\\]^_`{|},.]+')
|
_punct_re = re.compile(r'[\t !"#$%&\'()*\-/<=>?@\[\\\]^_`{|},.]+')
|
||||||
|
|
||||||
|
|
||||||
def slugify(text, delim=u'-'):
|
def slugify(text, delim=u'-'):
|
||||||
"""
|
"""
|
||||||
Generates an ASCII-only slug. Taken from http://flask.pocoo.org/snippets/5/
|
Generates an ASCII-only slug. Taken from http://flask.pocoo.org/snippets/5/
|
||||||
|
@ -13,5 +13,3 @@
|
|||||||
#
|
#
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
|
||||||
|
@ -67,6 +67,7 @@ def user_home(request, page):
|
|||||||
'media_entries': media_entries,
|
'media_entries': media_entries,
|
||||||
'pagination': pagination})
|
'pagination': pagination})
|
||||||
|
|
||||||
|
|
||||||
@uses_pagination
|
@uses_pagination
|
||||||
def user_gallery(request, page):
|
def user_gallery(request, page):
|
||||||
"""'Gallery' of a User()"""
|
"""'Gallery' of a User()"""
|
||||||
@ -96,6 +97,7 @@ def user_gallery(request, page):
|
|||||||
|
|
||||||
MEDIA_COMMENTS_PER_PAGE = 50
|
MEDIA_COMMENTS_PER_PAGE = 50
|
||||||
|
|
||||||
|
|
||||||
@get_user_media_entry
|
@get_user_media_entry
|
||||||
@uses_pagination
|
@uses_pagination
|
||||||
def media_home(request, media, page, **kwargs):
|
def media_home(request, media, page, **kwargs):
|
||||||
@ -192,6 +194,7 @@ def media_confirm_delete(request, media):
|
|||||||
|
|
||||||
ATOM_DEFAULT_NR_OF_UPDATED_ITEMS = 15
|
ATOM_DEFAULT_NR_OF_UPDATED_ITEMS = 15
|
||||||
|
|
||||||
|
|
||||||
def atom_feed(request):
|
def atom_feed(request):
|
||||||
"""
|
"""
|
||||||
generates the atom feed with the newest images
|
generates the atom feed with the newest images
|
||||||
|
@ -20,6 +20,7 @@ from mediagoblin.tools.response import render_to_response
|
|||||||
from mediagoblin.db.util import DESCENDING
|
from mediagoblin.db.util import DESCENDING
|
||||||
from mediagoblin.decorators import uses_pagination
|
from mediagoblin.decorators import uses_pagination
|
||||||
|
|
||||||
|
|
||||||
@uses_pagination
|
@uses_pagination
|
||||||
def root_view(request, page):
|
def root_view(request, page):
|
||||||
cursor = request.db.MediaEntry.find(
|
cursor = request.db.MediaEntry.find(
|
||||||
|
@ -42,8 +42,10 @@ class Workbench(object):
|
|||||||
|
|
||||||
def __unicode__(self):
|
def __unicode__(self):
|
||||||
return unicode(self.dir)
|
return unicode(self.dir)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return str(self.dir)
|
return str(self.dir)
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return repr(self.dir)
|
return repr(self.dir)
|
||||||
|
|
||||||
|
12
setup.py
12
setup.py
@ -29,16 +29,17 @@ def get_version():
|
|||||||
if mo:
|
if mo:
|
||||||
return mo.group(1)
|
return mo.group(1)
|
||||||
else:
|
else:
|
||||||
raise RuntimeError("Unable to find version string in %s." % VERSIONFILE)
|
raise RuntimeError("Unable to find version string in %s." %
|
||||||
|
VERSIONFILE)
|
||||||
|
|
||||||
|
|
||||||
setup(
|
setup(
|
||||||
name = "mediagoblin",
|
name="mediagoblin",
|
||||||
version = get_version(),
|
version=get_version(),
|
||||||
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
|
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
|
||||||
zip_safe=False,
|
zip_safe=False,
|
||||||
# scripts and dependencies
|
# scripts and dependencies
|
||||||
install_requires = [
|
install_requires=[
|
||||||
'setuptools',
|
'setuptools',
|
||||||
'PasteScript',
|
'PasteScript',
|
||||||
'beaker',
|
'beaker',
|
||||||
@ -66,7 +67,7 @@ setup(
|
|||||||
# 'lxml',
|
# 'lxml',
|
||||||
],
|
],
|
||||||
test_suite='nose.collector',
|
test_suite='nose.collector',
|
||||||
entry_points = """\
|
entry_points="""\
|
||||||
[console_scripts]
|
[console_scripts]
|
||||||
gmg = mediagoblin.gmg_commands:main_cli
|
gmg = mediagoblin.gmg_commands:main_cli
|
||||||
pybabel = mediagoblin.babel.messages.frontend:main
|
pybabel = mediagoblin.babel.messages.frontend:main
|
||||||
@ -83,7 +84,6 @@ setup(
|
|||||||
[babel.extractors]
|
[babel.extractors]
|
||||||
jinja2 = jinja2.ext:babel_extract
|
jinja2 = jinja2.ext:babel_extract
|
||||||
""",
|
""",
|
||||||
|
|
||||||
license='AGPLv3',
|
license='AGPLv3',
|
||||||
author='Free Software Foundation and contributors',
|
author='Free Software Foundation and contributors',
|
||||||
author_email='cwebber@gnu.org',
|
author_email='cwebber@gnu.org',
|
||||||
|
Loading…
x
Reference in New Issue
Block a user