Apply pyupgrade --py3-plus to remove Python 2 compatibility code.

This commit is contained in:
Ben Sturmfels 2021-03-05 23:12:19 +11:00
parent 5f3a782fef
commit dec47c7102
158 changed files with 1353 additions and 1391 deletions

View File

@ -3,15 +3,12 @@ import warnings
import six import six
if six.PY3: from email.mime.text import MIMEText
from email.mime.text import MIMEText
else:
from email.MIMEText import MIMEText
def encode_to_utf8(method): def encode_to_utf8(method):
def wrapper(self): def wrapper(self):
if six.PY2 and isinstance(method(self), six.text_type): if six.PY2 and isinstance(method(self), str):
return method(self).encode('utf-8') return method(self).encode('utf-8')
return method(self) return method(self)
functools.update_wrapper(wrapper, method, ['__name__', '__doc__']) functools.update_wrapper(wrapper, method, ['__name__', '__doc__'])

View File

@ -36,7 +36,7 @@ def user_has_privilege(privilege_name):
@require_active_login @require_active_login
def wrapper(request, *args, **kwargs): def wrapper(request, *args, **kwargs):
if not request.user.has_privilege(privilege_name): if not request.user.has_privilege(privilege_name):
error = "User '{0}' needs '{1}' privilege".format( error = "User '{}' needs '{}' privilege".format(
request.user.username, request.user.username,
privilege_name privilege_name
) )

View File

@ -62,7 +62,7 @@ def profile_endpoint(request):
if user is None: if user is None:
username = request.matchdict["username"] username = request.matchdict["username"]
return json_error( return json_error(
"No such 'user' with username '{0}'".format(username), "No such 'user' with username '{}'".format(username),
status=404 status=404
) )
@ -77,7 +77,7 @@ def user_endpoint(request):
if user is None: if user is None:
username = request.matchdict["username"] username = request.matchdict["username"]
return json_error( return json_error(
"No such 'user' with username '{0}'".format(username), "No such 'user' with username '{}'".format(username),
status=404 status=404
) )
@ -90,14 +90,14 @@ def user_endpoint(request):
@oauth_required @oauth_required
@csrf_exempt @csrf_exempt
@user_has_privilege(u'uploader') @user_has_privilege('uploader')
def uploads_endpoint(request): def uploads_endpoint(request):
""" Endpoint for file uploads """ """ Endpoint for file uploads """
username = request.matchdict["username"] username = request.matchdict["username"]
requested_user = LocalUser.query.filter(LocalUser.username==username).first() requested_user = LocalUser.query.filter(LocalUser.username==username).first()
if requested_user is None: if requested_user is None:
return json_error("No such 'user' with id '{0}'".format(username), 404) return json_error("No such 'user' with id '{}'".format(username), 404)
if request.method == "POST": if request.method == "POST":
# Ensure that the user is only able to upload to their own # Ensure that the user is only able to upload to their own
@ -123,7 +123,7 @@ def uploads_endpoint(request):
if not filenames: if not filenames:
return json_error('Unknown mimetype: {}'.format(mimetype), return json_error('Unknown mimetype: {}'.format(mimetype),
status=415) status=415)
filename = 'unknown{0}'.format(filenames[0]) filename = 'unknown{}'.format(filenames[0])
file_data = FileStorage( file_data = FileStorage(
stream=io.BytesIO(request.data), stream=io.BytesIO(request.data),
@ -153,13 +153,13 @@ def inbox_endpoint(request, inbox=None):
user = LocalUser.query.filter(LocalUser.username==username).first() user = LocalUser.query.filter(LocalUser.username==username).first()
if user is None: if user is None:
return json_error("No such 'user' with id '{0}'".format(username), 404) return json_error("No such 'user' with id '{}'".format(username), 404)
# Only the user who's authorized should be able to read their inbox # Only the user who's authorized should be able to read their inbox
if user.id != request.user.id: if user.id != request.user.id:
return json_error( return json_error(
"Only '{0}' can read this inbox.".format(user.username), "Only '{}' can read this inbox.".format(user.username),
403 403
) )
@ -190,7 +190,7 @@ def inbox_endpoint(request, inbox=None):
# build the inbox feed # build the inbox feed
feed = { feed = {
"displayName": "Activities for {0}".format(user.username), "displayName": "Activities for {}".format(user.username),
"author": user.serialize(request), "author": user.serialize(request),
"objectTypes": ["activity"], "objectTypes": ["activity"],
"url": request.base_url, "url": request.base_url,
@ -237,7 +237,7 @@ def feed_endpoint(request, outbox=None):
# check if the user exists # check if the user exists
if requested_user is None: if requested_user is None:
return json_error("No such 'user' with id '{0}'".format(username), 404) return json_error("No such 'user' with id '{}'".format(username), 404)
if request.data: if request.data:
data = json.loads(request.data.decode()) data = json.loads(request.data.decode())
@ -270,7 +270,7 @@ def feed_endpoint(request, outbox=None):
if obj.get("objectType", None) == "comment": if obj.get("objectType", None) == "comment":
# post a comment # post a comment
if not request.user.has_privilege(u'commenter'): if not request.user.has_privilege('commenter'):
return json_error( return json_error(
"Privilege 'commenter' required to comment.", "Privilege 'commenter' required to comment.",
status=403 status=403
@ -313,7 +313,7 @@ def feed_endpoint(request, outbox=None):
if media is None: if media is None:
return json_response( return json_response(
"No such 'image' with id '{0}'".format(media_id), "No such 'image' with id '{}'".format(media_id),
status=404 status=404
) )
@ -326,7 +326,7 @@ def feed_endpoint(request, outbox=None):
if not media.unserialize(data["object"]): if not media.unserialize(data["object"]):
return json_error( return json_error(
"Invalid 'image' with id '{0}'".format(media_id) "Invalid 'image' with id '{}'".format(media_id)
) )
@ -346,7 +346,7 @@ def feed_endpoint(request, outbox=None):
# Oh no! We don't know about this type of object (yet) # Oh no! We don't know about this type of object (yet)
object_type = obj.get("objectType", None) object_type = obj.get("objectType", None)
return json_error( return json_error(
"Unknown object type '{0}'.".format(object_type) "Unknown object type '{}'.".format(object_type)
) )
# Updating existing objects # Updating existing objects
@ -377,7 +377,7 @@ def feed_endpoint(request, outbox=None):
# Now try and find object # Now try and find object
if obj["objectType"] == "comment": if obj["objectType"] == "comment":
if not request.user.has_privilege(u'commenter'): if not request.user.has_privilege('commenter'):
return json_error( return json_error(
"Privilege 'commenter' required to comment.", "Privilege 'commenter' required to comment.",
status=403 status=403
@ -388,7 +388,7 @@ def feed_endpoint(request, outbox=None):
).first() ).first()
if comment is None: if comment is None:
return json_error( return json_error(
"No such 'comment' with id '{0}'.".format(obj_id) "No such 'comment' with id '{}'.".format(obj_id)
) )
# Check that the person trying to update the comment is # Check that the person trying to update the comment is
@ -401,7 +401,7 @@ def feed_endpoint(request, outbox=None):
if not comment.unserialize(data["object"], request): if not comment.unserialize(data["object"], request):
return json_error( return json_error(
"Invalid 'comment' with id '{0}'".format(obj["id"]) "Invalid 'comment' with id '{}'".format(obj["id"])
) )
comment.save() comment.save()
@ -423,7 +423,7 @@ def feed_endpoint(request, outbox=None):
).first() ).first()
if image is None: if image is None:
return json_error( return json_error(
"No such 'image' with the id '{0}'.".format(obj["id"]) "No such 'image' with the id '{}'.".format(obj["id"])
) )
# Check that the person trying to update the comment is # Check that the person trying to update the comment is
@ -436,7 +436,7 @@ def feed_endpoint(request, outbox=None):
if not image.unserialize(obj): if not image.unserialize(obj):
return json_error( return json_error(
"Invalid 'image' with id '{0}'".format(obj_id) "Invalid 'image' with id '{}'".format(obj_id)
) )
image.generate_slug() image.generate_slug()
image.save() image.save()
@ -504,7 +504,7 @@ def feed_endpoint(request, outbox=None):
if comment is None: if comment is None:
return json_error( return json_error(
"No such 'comment' with id '{0}'.".format(obj_id) "No such 'comment' with id '{}'.".format(obj_id)
) )
# Make a delete activity # Make a delete activity
@ -533,7 +533,7 @@ def feed_endpoint(request, outbox=None):
if entry is None: if entry is None:
return json_error( return json_error(
"No such 'image' with id '{0}'.".format(obj_id) "No such 'image' with id '{}'.".format(obj_id)
) )
# Make the delete activity # Make the delete activity
@ -555,7 +555,7 @@ def feed_endpoint(request, outbox=None):
elif request.method != "GET": elif request.method != "GET":
return json_error( return json_error(
"Unsupported HTTP method {0}".format(request.method), "Unsupported HTTP method {}".format(request.method),
status=501 status=501
) )
@ -645,7 +645,7 @@ def object_endpoint(request):
try: try:
object_id = request.matchdict["id"] object_id = request.matchdict["id"]
except ValueError: except ValueError:
error = "Invalid object ID '{0}' for '{1}'".format( error = "Invalid object ID '{}' for '{}'".format(
request.matchdict["id"], request.matchdict["id"],
object_type object_type
) )
@ -654,7 +654,7 @@ def object_endpoint(request):
if object_type not in ["image"]: if object_type not in ["image"]:
# not sure why this is 404, maybe ask evan. Maybe 400? # not sure why this is 404, maybe ask evan. Maybe 400?
return json_error( return json_error(
"Unknown type: {0}".format(object_type), "Unknown type: {}".format(object_type),
status=404 status=404
) )
@ -668,7 +668,7 @@ def object_endpoint(request):
media = MediaEntry.query.filter_by(public_id=public_id).first() media = MediaEntry.query.filter_by(public_id=public_id).first()
if media is None: if media is None:
return json_error( return json_error(
"Can't find '{0}' with ID '{1}'".format(object_type, object_id), "Can't find '{}' with ID '{}'".format(object_type, object_id),
status=404 status=404
) )
@ -685,7 +685,7 @@ def object_comments(request):
) )
media = MediaEntry.query.filter_by(public_id=public_id).first() media = MediaEntry.query.filter_by(public_id=public_id).first()
if media is None: if media is None:
return json_error("Can't find '{0}' with ID '{1}'".format( return json_error("Can't find '{}' with ID '{}'".format(
request.matchdict["object_type"], request.matchdict["object_type"],
request.matchdict["id"] request.matchdict["id"]
), 404) ), 404)
@ -702,7 +702,7 @@ def object_comments(request):
) )
}) })
comments["displayName"] = "Replies to {0}".format(comments["url"]) comments["displayName"] = "Replies to {}".format(comments["url"])
comments["links"] = { comments["links"] = {
"first": comments["url"], "first": comments["url"],
"self": comments["url"], "self": comments["url"],
@ -805,7 +805,7 @@ def lrdd_lookup(request):
if user is None: if user is None:
return json_error( return json_error(
"Can't find 'user' with username '{0}'".format(username)) "Can't find 'user' with username '{}'".format(username))
return json_response([ return json_response([
{ {

View File

@ -52,7 +52,7 @@ from mediagoblin.tools.transition import DISABLE_GLOBALS
_log = logging.getLogger(__name__) _log = logging.getLogger(__name__)
class Context(object): class Context:
""" """
MediaGoblin context object. MediaGoblin context object.
@ -65,7 +65,7 @@ class Context(object):
pass pass
class MediaGoblinApp(object): class MediaGoblinApp:
""" """
WSGI application of MediaGoblin WSGI application of MediaGoblin
@ -359,7 +359,7 @@ def paste_app_factory(global_config, **app_config):
break break
if not mediagoblin_config: if not mediagoblin_config:
raise IOError("Usable mediagoblin config not found.") raise OSError("Usable mediagoblin config not found.")
del app_config['config'] del app_config['config']
mgoblin_app = MediaGoblinApp(mediagoblin_config) mgoblin_app = MediaGoblinApp(mediagoblin_config)

View File

@ -47,12 +47,12 @@ def normalize_user_or_email_field(allow_email=True, allow_user=True,
If is_login is True, does not check the length of username. If is_login is True, does not check the length of username.
""" """
message = _(u'Invalid User name or email address.') message = _('Invalid User name or email address.')
nomail_msg = _(u"This field does not take email addresses.") nomail_msg = _("This field does not take email addresses.")
nouser_msg = _(u"This field requires an email address.") nouser_msg = _("This field requires an email address.")
def _normalize_field(form, field): def _normalize_field(form, field):
email = u'@' in field.data email = '@' in field.data
if email: # normalize email address casing if email: # normalize email address casing
if not allow_email: if not allow_email:
raise wtforms.ValidationError(nomail_msg) raise wtforms.ValidationError(nomail_msg)
@ -71,8 +71,8 @@ def normalize_user_or_email_field(allow_email=True, allow_user=True,
EMAIL_VERIFICATION_TEMPLATE = ( EMAIL_VERIFICATION_TEMPLATE = (
u"{uri}?" "{uri}?"
u"token={verification_key}") "token={verification_key}")
def send_verification_email(user, request, email=None, def send_verification_email(user, request, email=None,
@ -121,11 +121,11 @@ def basic_extra_validation(register_form, *args):
if users_with_username: if users_with_username:
register_form.username.errors.append( register_form.username.errors.append(
_(u'Sorry, a user with that name already exists.')) _('Sorry, a user with that name already exists.'))
extra_validation_passes = False extra_validation_passes = False
if users_with_email: if users_with_email:
register_form.email.errors.append( register_form.email.errors.append(
_(u'Sorry, a user with that email address already exists.')) _('Sorry, a user with that email address already exists.'))
extra_validation_passes = False extra_validation_passes = False
return extra_validation_passes return extra_validation_passes
@ -144,7 +144,7 @@ def register_user(request, register_form):
user.save() user.save()
# log the user in # log the user in
request.session['user_id'] = six.text_type(user.id) request.session['user_id'] = str(user.id)
request.session.save() request.session.save()
# send verification email # send verification email

View File

@ -45,7 +45,7 @@ def register(request):
if 'pass_auth' not in request.template_env.globals: if 'pass_auth' not in request.template_env.globals:
redirect_name = hook_handle('auth_no_pass_redirect') redirect_name = hook_handle('auth_no_pass_redirect')
if redirect_name: if redirect_name:
return redirect(request, 'mediagoblin.plugins.{0}.register'.format( return redirect(request, 'mediagoblin.plugins.{}.register'.format(
redirect_name)) redirect_name))
else: else:
return redirect(request, 'index') return redirect(request, 'index')
@ -80,7 +80,7 @@ def login(request):
if 'pass_auth' not in request.template_env.globals: if 'pass_auth' not in request.template_env.globals:
redirect_name = hook_handle('auth_no_pass_redirect') redirect_name = hook_handle('auth_no_pass_redirect')
if redirect_name: if redirect_name:
return redirect(request, 'mediagoblin.plugins.{0}.login'.format( return redirect(request, 'mediagoblin.plugins.{}.login'.format(
redirect_name)) redirect_name))
else: else:
return redirect(request, 'index') return redirect(request, 'index')
@ -100,7 +100,7 @@ def login(request):
# set up login in session # set up login in session
if login_form.stay_logged_in.data: if login_form.stay_logged_in.data:
request.session['stay_logged_in'] = True request.session['stay_logged_in'] = True
request.session['user_id'] = six.text_type(user.id) request.session['user_id'] = str(user.id)
request.session.save() request.session.save()
if request.form.get('next'): if request.form.get('next'):
@ -157,11 +157,11 @@ def verify_email(request):
user = User.query.filter_by(id=int(token)).first() user = User.query.filter_by(id=int(token)).first()
if user and user.has_privilege(u'active') is False: if user and user.has_privilege('active') is False:
user.verification_key = None user.verification_key = None
user.all_privileges.append( user.all_privileges.append(
Privilege.query.filter( Privilege.query.filter(
Privilege.privilege_name==u'active').first()) Privilege.privilege_name=='active').first())
user.save() user.save()
@ -196,7 +196,7 @@ def resend_activation(request):
return redirect(request, 'mediagoblin.auth.login') return redirect(request, 'mediagoblin.auth.login')
if request.user.has_privilege(u'active'): if request.user.has_privilege('active'):
messages.add_message( messages.add_message(
request, request,
messages.ERROR, messages.ERROR,

View File

@ -25,7 +25,7 @@ if not DISABLE_GLOBALS:
from sqlalchemy.orm import scoped_session, sessionmaker from sqlalchemy.orm import scoped_session, sessionmaker
Session = scoped_session(sessionmaker()) Session = scoped_session(sessionmaker())
class FakeCursor(object): class FakeCursor:
def __init__ (self, cursor, mapper, filter=None): def __init__ (self, cursor, mapper, filter=None):
self.cursor = cursor self.cursor = cursor
@ -50,7 +50,7 @@ class FakeCursor(object):
r = self.cursor.slice(*args, **kwargs) r = self.cursor.slice(*args, **kwargs)
return list(six.moves.filter(self.filter, six.moves.map(self.mapper, r))) return list(six.moves.filter(self.filter, six.moves.map(self.mapper, r)))
class GMGTableBase(object): class GMGTableBase:
# Deletion types # Deletion types
HARD_DELETE = "hard-deletion" HARD_DELETE = "hard-deletion"
SOFT_DELETE = "soft-deletion" SOFT_DELETE = "soft-deletion"
@ -194,7 +194,7 @@ class GMGTableBase(object):
Base = declarative_base(cls=GMGTableBase) Base = declarative_base(cls=GMGTableBase)
class DictReadAttrProxy(object): class DictReadAttrProxy:
""" """
Maps read accesses to obj['key'] to obj.key Maps read accesses to obj['key'] to obj.key
and hides all the rest of the obj and hides all the rest of the obj

View File

@ -14,7 +14,6 @@
# You should have received a copy of the GNU Affero General Public License # You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>. # along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
import logging import logging
import os import os
@ -36,7 +35,7 @@ class TableAlreadyExists(Exception):
pass pass
class MigrationManager(object): class MigrationManager:
""" """
Migration handling tool. Migration handling tool.
@ -148,7 +147,7 @@ class MigrationManager(object):
# Maybe in the future just print out a "Yikes!" or something? # Maybe in the future just print out a "Yikes!" or something?
if model.__table__.exists(self.session.bind): if model.__table__.exists(self.session.bind):
raise TableAlreadyExists( raise TableAlreadyExists(
u"Intended to create table '%s' but it already exists" % "Intended to create table '%s' but it already exists" %
model.__table__.name) model.__table__.name)
self.migration_model.metadata.create_all( self.migration_model.metadata.create_all(
@ -171,26 +170,26 @@ class MigrationManager(object):
""" """
if self.database_current_migration is None: if self.database_current_migration is None:
self.printer( self.printer(
u'~> Woulda initialized: %s\n' % self.name_for_printing()) '~> Woulda initialized: %s\n' % self.name_for_printing())
return u'inited' return 'inited'
migrations_to_run = self.migrations_to_run() migrations_to_run = self.migrations_to_run()
if migrations_to_run: if migrations_to_run:
self.printer( self.printer(
u'~> Woulda updated %s:\n' % self.name_for_printing()) '~> Woulda updated %s:\n' % self.name_for_printing())
for migration_number, migration_func in migrations_to_run(): for migration_number, migration_func in migrations_to_run():
self.printer( self.printer(
u' + Would update %s, "%s"\n' % ( ' + Would update {}, "{}"\n'.format(
migration_number, migration_func.func_name)) migration_number, migration_func.func_name))
return u'migrated' return 'migrated'
def name_for_printing(self): def name_for_printing(self):
if self.name == u'__main__': if self.name == '__main__':
return u"main mediagoblin tables" return "main mediagoblin tables"
else: else:
return u'plugin "%s"' % self.name return 'plugin "%s"' % self.name
def init_or_migrate(self): def init_or_migrate(self):
""" """
@ -213,36 +212,36 @@ class MigrationManager(object):
# - print / inform the user # - print / inform the user
# - return 'inited' # - return 'inited'
if migration_number is None: if migration_number is None:
self.printer(u"-> Initializing %s... " % self.name_for_printing()) self.printer("-> Initializing %s... " % self.name_for_printing())
self.init_tables() self.init_tables()
# auto-set at latest migration number # auto-set at latest migration number
self.create_new_migration_record() self.create_new_migration_record()
self.printer(u"done.\n") self.printer("done.\n")
self.set_current_migration() self.set_current_migration()
return u'inited' return 'inited'
# Run migrations, if appropriate. # Run migrations, if appropriate.
migrations_to_run = self.migrations_to_run() migrations_to_run = self.migrations_to_run()
if migrations_to_run: if migrations_to_run:
self.printer( self.printer(
u'-> Updating %s:\n' % self.name_for_printing()) '-> Updating %s:\n' % self.name_for_printing())
for migration_number, migration_func in migrations_to_run: for migration_number, migration_func in migrations_to_run:
self.printer( self.printer(
u' + Running migration %s, "%s"... ' % ( ' + Running migration {}, "{}"... '.format(
migration_number, migration_func.__name__)) migration_number, migration_func.__name__))
migration_func(self.session) migration_func(self.session)
self.set_current_migration(migration_number) self.set_current_migration(migration_number)
self.printer('done.\n') self.printer('done.\n')
return u'migrated' return 'migrated'
# Otherwise return None. Well it would do this anyway, but # Otherwise return None. Well it would do this anyway, but
# for clarity... ;) # for clarity... ;)
return None return None
class RegisterMigration(object): class RegisterMigration:
""" """
Tool for registering migrations Tool for registering migrations
@ -348,9 +347,9 @@ def populate_table_foundations(session, foundations, name,
Create the table foundations (default rows) as layed out in FOUNDATIONS Create the table foundations (default rows) as layed out in FOUNDATIONS
in mediagoblin.db.models in mediagoblin.db.models
""" """
printer(u'Laying foundations for %s:\n' % name) printer('Laying foundations for %s:\n' % name)
for Model, rows in foundations.items(): for Model, rows in foundations.items():
printer(u' + Laying foundations for %s table\n' % printer(' + Laying foundations for %s table\n' %
(Model.__name__)) (Model.__name__))
for parameters in rows: for parameters in rows:
new_row = Model(**parameters) new_row = Model(**parameters)

View File

@ -14,7 +14,6 @@
# You should have received a copy of the GNU Affero General Public License # You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>. # along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import print_function
import datetime import datetime
import uuid import uuid
@ -267,11 +266,11 @@ def mediaentry_new_slug_era(db):
for row in db.execute(media_table.select()): for row in db.execute(media_table.select()):
# no slug, try setting to an id # no slug, try setting to an id
if not row.slug: if not row.slug:
append_garbage_till_unique(row, six.text_type(row.id)) append_garbage_till_unique(row, str(row.id))
# has "=" or ":" in it... we're getting rid of those # has "=" or ":" in it... we're getting rid of those
elif u"=" in row.slug or u":" in row.slug: elif "=" in row.slug or ":" in row.slug:
append_garbage_till_unique( append_garbage_till_unique(
row, row.slug.replace(u"=", u"-").replace(u":", u"-")) row, row.slug.replace("=", "-").replace(":", "-"))
db.commit() db.commit()
@ -296,7 +295,7 @@ def unique_collections_slug(db):
existing_slugs[row.creator].append(row.slug) existing_slugs[row.creator].append(row.slug)
for row_id in slugs_to_change: for row_id in slugs_to_change:
new_slug = six.text_type(uuid.uuid4()) new_slug = str(uuid.uuid4())
db.execute(collection_table.update(). db.execute(collection_table.update().
where(collection_table.c.id == row_id). where(collection_table.c.id == row_id).
values(slug=new_slug)) values(slug=new_slug))
@ -428,9 +427,9 @@ class Client_v0(declarative_base()):
def __repr__(self): def __repr__(self):
if self.application_name: if self.application_name:
return "<Client {0} - {1}>".format(self.application_name, self.id) return "<Client {} - {}>".format(self.application_name, self.id)
else: else:
return "<Client {0}>".format(self.id) return "<Client {}>".format(self.id)
class RequestToken_v0(declarative_base()): class RequestToken_v0(declarative_base()):
""" """
@ -445,7 +444,7 @@ class RequestToken_v0(declarative_base()):
used = Column(Boolean, default=False) used = Column(Boolean, default=False)
authenticated = Column(Boolean, default=False) authenticated = Column(Boolean, default=False)
verifier = Column(Unicode, nullable=True) verifier = Column(Unicode, nullable=True)
callback = Column(Unicode, nullable=False, default=u"oob") callback = Column(Unicode, nullable=False, default="oob")
created = Column(DateTime, nullable=False, default=datetime.datetime.now) created = Column(DateTime, nullable=False, default=datetime.datetime.now)
updated = Column(DateTime, nullable=False, default=datetime.datetime.now) updated = Column(DateTime, nullable=False, default=datetime.datetime.now)
@ -589,12 +588,12 @@ class PrivilegeUserAssociation_v0(declarative_base()):
primary_key=True) primary_key=True)
PRIVILEGE_FOUNDATIONS_v0 = [{'privilege_name':u'admin'}, PRIVILEGE_FOUNDATIONS_v0 = [{'privilege_name':'admin'},
{'privilege_name':u'moderator'}, {'privilege_name':'moderator'},
{'privilege_name':u'uploader'}, {'privilege_name':'uploader'},
{'privilege_name':u'reporter'}, {'privilege_name':'reporter'},
{'privilege_name':u'commenter'}, {'privilege_name':'commenter'},
{'privilege_name':u'active'}] {'privilege_name':'active'}]
# vR1 stands for "version Rename 1". This only exists because we need # vR1 stands for "version Rename 1". This only exists because we need
# to deal with dropping some booleans and it's otherwise impossible # to deal with dropping some booleans and it's otherwise impossible
@ -656,11 +655,11 @@ def create_moderation_tables(db):
db.execute( db.execute(
user_table.select().where( user_table.select().where(
user_table.c.is_admin==False).where( user_table.c.is_admin==False).where(
user_table.c.status==u"active")).fetchall(), user_table.c.status=="active")).fetchall(),
db.execute( db.execute(
user_table.select().where( user_table.select().where(
user_table.c.is_admin==False).where( user_table.c.is_admin==False).where(
user_table.c.status!=u"active")).fetchall()) user_table.c.status!="active")).fetchall())
# Get the ids for each of the privileges so we can reference them ~~~~~~~~~ # Get the ids for each of the privileges so we can reference them ~~~~~~~~~
(admin_privilege_id, uploader_privilege_id, (admin_privilege_id, uploader_privilege_id,
@ -669,7 +668,7 @@ def create_moderation_tables(db):
db.execute(privileges_table.select().where( db.execute(privileges_table.select().where(
privileges_table.c.privilege_name==privilege_name)).first()['id'] privileges_table.c.privilege_name==privilege_name)).first()['id']
for privilege_name in for privilege_name in
[u"admin",u"uploader",u"reporter",u"commenter",u"active"] ["admin","uploader","reporter","commenter","active"]
] ]
# Give each user the appopriate privileges depending whether they are an # Give each user the appopriate privileges depending whether they are an
@ -854,14 +853,14 @@ def revert_username_index(db):
""" """
metadata = MetaData(bind=db.bind) metadata = MetaData(bind=db.bind)
user_table = inspect_table(metadata, "core__users") user_table = inspect_table(metadata, "core__users")
indexes = dict( indexes = {
[(index.name, index) for index in user_table.indexes]) index.name: index for index in user_table.indexes}
# index from unnecessary migration # index from unnecessary migration
users_uploader_index = indexes.get(u'ix_core__users_uploader') users_uploader_index = indexes.get('ix_core__users_uploader')
# index created from models.py after (unique=True, index=True) # index created from models.py after (unique=True, index=True)
# was set in models.py # was set in models.py
users_username_index = indexes.get(u'ix_core__users_username') users_username_index = indexes.get('ix_core__users_username')
if users_uploader_index is None and users_username_index is None: if users_uploader_index is None and users_username_index is None:
# We don't need to do anything. # We don't need to do anything.
@ -988,7 +987,7 @@ def activity_migration(db):
# Get the ID of that generator # Get the ID of that generator
gmg_generator = db.execute(generator_table.select( gmg_generator = db.execute(generator_table.select(
generator_table.c.name==u"GNU Mediagoblin")).first() generator_table.c.name=="GNU Mediagoblin")).first()
# Now we want to modify the tables which MAY have an activity at some point # Now we want to modify the tables which MAY have an activity at some point

View File

@ -1,4 +1,3 @@
from __future__ import with_statement
from alembic import context from alembic import context
from sqlalchemy import engine_from_config, pool from sqlalchemy import engine_from_config, pool
from logging.config import fileConfig from logging.config import fileConfig

View File

@ -24,7 +24,7 @@ def upgrade():
sa.Column('name', sa.Unicode(), nullable=False), sa.Column('name', sa.Unicode(), nullable=False),
sa.Column('filepath', PathTupleWithSlashes(), nullable=True), sa.Column('filepath', PathTupleWithSlashes(), nullable=True),
sa.Column('created', sa.DateTime(), nullable=False), sa.Column('created', sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(['media_entry'], [u'core__media_entries.id'], ), sa.ForeignKeyConstraint(['media_entry'], ['core__media_entries.id'], ),
sa.PrimaryKeyConstraint('id') sa.PrimaryKeyConstraint('id')
) )
### end Alembic commands ### ### end Alembic commands ###

View File

@ -41,7 +41,7 @@ from mediagoblin.tools.text import cleaned_markdown_conversion
from mediagoblin.tools.url import slugify from mediagoblin.tools.url import slugify
from mediagoblin.tools.translate import pass_to_ugettext as _ from mediagoblin.tools.translate import pass_to_ugettext as _
class CommentingMixin(object): class CommentingMixin:
""" """
Mixin that gives classes methods to get and add the comments on/to it Mixin that gives classes methods to get and add the comments on/to it
@ -80,9 +80,9 @@ class CommentingMixin(object):
link = self.get_comment_link() link = self.get_comment_link()
if link is not None: if link is not None:
link.delete() link.delete()
super(CommentingMixin, self).soft_delete(*args, **kwargs) super().soft_delete(*args, **kwargs)
class GeneratePublicIDMixin(object): class GeneratePublicIDMixin:
""" """
Mixin that ensures that a the public_id field is populated. Mixin that ensures that a the public_id field is populated.
@ -118,7 +118,7 @@ class GeneratePublicIDMixin(object):
self.save() self.save()
return self.public_id return self.public_id
class UserMixin(object): class UserMixin:
object_type = "person" object_type = "person"
@property @property
@ -132,7 +132,7 @@ class UserMixin(object):
user=self.username, **kwargs) user=self.username, **kwargs)
class GenerateSlugMixin(object): class GenerateSlugMixin:
""" """
Mixin to add a generate_slug method to objects. Mixin to add a generate_slug method to objects.
@ -179,7 +179,7 @@ class GenerateSlugMixin(object):
return return
# We don't want any empty string slugs # We don't want any empty string slugs
if slug == u"": if slug == "":
return return
# Otherwise, let's see if this is unique. # Otherwise, let's see if this is unique.
@ -188,7 +188,7 @@ class GenerateSlugMixin(object):
# Can we just append the object's id to the end? # Can we just append the object's id to the end?
if self.id: if self.id:
slug_with_id = u"%s-%s" % (slug, self.id) slug_with_id = "{}-{}".format(slug, self.id)
if not self.check_slug_used(slug_with_id): if not self.check_slug_used(slug_with_id):
self.slug = slug_with_id self.slug = slug_with_id
return # success! return # success!
@ -284,7 +284,7 @@ class MediaEntryMixin(GenerateSlugMixin, GeneratePublicIDMixin):
if self.slug: if self.slug:
return self.slug return self.slug
else: else:
return u'id:%s' % self.id return 'id:%s' % self.id
def url_for_self(self, urlgen, **extra_args): def url_for_self(self, urlgen, **extra_args):
""" """
@ -306,26 +306,26 @@ class MediaEntryMixin(GenerateSlugMixin, GeneratePublicIDMixin):
Will return either the real thumbnail or a default fallback icon.""" Will return either the real thumbnail or a default fallback icon."""
# TODO: implement generic fallback in case MEDIA_MANAGER does # TODO: implement generic fallback in case MEDIA_MANAGER does
# not specify one? # not specify one?
if u'thumb' in self.media_files: if 'thumb' in self.media_files:
thumb_url = self._app.public_store.file_url( thumb_url = self._app.public_store.file_url(
self.media_files[u'thumb']) self.media_files['thumb'])
else: else:
# No thumbnail in media available. Get the media's # No thumbnail in media available. Get the media's
# MEDIA_MANAGER for the fallback icon and return static URL # MEDIA_MANAGER for the fallback icon and return static URL
# Raises FileTypeNotSupported in case no such manager is enabled # Raises FileTypeNotSupported in case no such manager is enabled
manager = self.media_manager manager = self.media_manager
thumb_url = self._app.staticdirector(manager[u'default_thumb']) thumb_url = self._app.staticdirector(manager['default_thumb'])
return thumb_url return thumb_url
@property @property
def original_url(self): def original_url(self):
""" Returns the URL for the original image """ Returns the URL for the original image
will return self.thumb_url if original url doesn't exist""" will return self.thumb_url if original url doesn't exist"""
if u"original" not in self.media_files: if "original" not in self.media_files:
return self.thumb_url return self.thumb_url
return self._app.public_store.file_url( return self._app.public_store.file_url(
self.media_files[u"original"] self.media_files["original"]
) )
@property @property
@ -442,7 +442,7 @@ class TextCommentMixin(GeneratePublicIDMixin):
return cleaned_markdown_conversion(self.content) return cleaned_markdown_conversion(self.content)
def __unicode__(self): def __unicode__(self):
return u'<{klass} #{id} {actor} "{comment}">'.format( return '<{klass} #{id} {actor} "{comment}">'.format(
klass=self.__class__.__name__, klass=self.__class__.__name__,
id=self.id, id=self.id,
actor=self.get_actor, actor=self.get_actor,
@ -514,7 +514,7 @@ class CollectionMixin(GenerateSlugMixin, GeneratePublicIDMixin):
item.save(commit=commit) item.save(commit=commit)
return item return item
class CollectionItemMixin(object): class CollectionItemMixin:
@property @property
def note_html(self): def note_html(self):
""" """

View File

@ -18,7 +18,6 @@
TODO: indexes on foreignkeys, where useful. TODO: indexes on foreignkeys, where useful.
""" """
from __future__ import print_function
import logging import logging
import datetime import datetime
@ -114,9 +113,9 @@ class GenericModelReference(Base):
# to prevent circular imports do import here # to prevent circular imports do import here
registry = dict(Base._decl_class_registry).values() registry = dict(Base._decl_class_registry).values()
self._TYPE_MAP = dict( self._TYPE_MAP = {
((m.__tablename__, m) for m in registry if hasattr(m, "__tablename__")) m.__tablename__: m for m in registry if hasattr(m, "__tablename__")
) }
setattr(type(self), "_TYPE_MAP", self._TYPE_MAP) setattr(type(self), "_TYPE_MAP", self._TYPE_MAP)
return self.__class__._TYPE_MAP[model_type] return self.__class__._TYPE_MAP[model_type]
@ -271,7 +270,7 @@ class User(Base, UserMixin):
for activity in Activity.query.filter_by(actor=self.id): for activity in Activity.query.filter_by(actor=self.id):
activity.delete(**kwargs) activity.delete(**kwargs)
super(User, self).soft_delete(*args, **kwargs) super().soft_delete(*args, **kwargs)
def delete(self, *args, **kwargs): def delete(self, *args, **kwargs):
@ -291,8 +290,8 @@ class User(Base, UserMixin):
# Delete user, pass through commit=False/True in kwargs # Delete user, pass through commit=False/True in kwargs
username = self.username username = self.username
super(User, self).delete(*args, **kwargs) super().delete(*args, **kwargs)
_log.info('Deleted user "{0}" account'.format(username)) _log.info('Deleted user "{}" account'.format(username))
def has_privilege(self, privilege, allow_admin=True): def has_privilege(self, privilege, allow_admin=True):
""" """
@ -311,7 +310,7 @@ class User(Base, UserMixin):
priv = Privilege.query.filter_by(privilege_name=privilege).one() priv = Privilege.query.filter_by(privilege_name=privilege).one()
if priv in self.all_privileges: if priv in self.all_privileges:
return True return True
elif allow_admin and self.has_privilege(u'admin', allow_admin=False): elif allow_admin and self.has_privilege('admin', allow_admin=False):
return True return True
return False return False
@ -383,15 +382,15 @@ class LocalUser(User):
# plugin data would be in a separate model # plugin data would be in a separate model
def __repr__(self): def __repr__(self):
return '<{0} #{1} {2} {3} "{4}">'.format( return '<{} #{} {} {} "{}">'.format(
self.__class__.__name__, self.__class__.__name__,
self.id, self.id,
'verified' if self.has_privilege(u'active') else 'non-verified', 'verified' if self.has_privilege('active') else 'non-verified',
'admin' if self.has_privilege(u'admin') else 'user', 'admin' if self.has_privilege('admin') else 'user',
self.username) self.username)
def get_public_id(self, host): def get_public_id(self, host):
return "acct:{0}@{1}".format(self.username, host) return "acct:{}@{}".format(self.username, host)
def serialize(self, request): def serialize(self, request):
user = { user = {
@ -423,7 +422,7 @@ class LocalUser(User):
}, },
} }
user.update(super(LocalUser, self).serialize(request)) user.update(super().serialize(request))
return user return user
class RemoteUser(User): class RemoteUser(User):
@ -438,7 +437,7 @@ class RemoteUser(User):
} }
def __repr__(self): def __repr__(self):
return "<{0} #{1} {2}>".format( return "<{} #{} {}>".format(
self.__class__.__name__, self.__class__.__name__,
self.id, self.id,
self.webfinger self.webfinger
@ -466,9 +465,9 @@ class Client(Base):
def __repr__(self): def __repr__(self):
if self.application_name: if self.application_name:
return "<Client {0} - {1}>".format(self.application_name, self.id) return "<Client {} - {}>".format(self.application_name, self.id)
else: else:
return "<Client {0}>".format(self.id) return "<Client {}>".format(self.id)
class RequestToken(Base): class RequestToken(Base):
""" """
@ -483,7 +482,7 @@ class RequestToken(Base):
used = Column(Boolean, default=False) used = Column(Boolean, default=False)
authenticated = Column(Boolean, default=False) authenticated = Column(Boolean, default=False)
verifier = Column(Unicode, nullable=True) verifier = Column(Unicode, nullable=True)
callback = Column(Unicode, nullable=False, default=u"oob") callback = Column(Unicode, nullable=False, default="oob")
created = Column(DateTime, nullable=False, default=datetime.datetime.utcnow) created = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
updated = Column(DateTime, nullable=False, default=datetime.datetime.utcnow) updated = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
@ -529,7 +528,7 @@ class MediaEntry(Base, MediaEntryMixin, CommentingMixin):
slug = Column(Unicode) slug = Column(Unicode)
description = Column(UnicodeText) # ?? description = Column(UnicodeText) # ??
media_type = Column(Unicode, nullable=False) media_type = Column(Unicode, nullable=False)
state = Column(Unicode, default=u'unprocessed', nullable=False) state = Column(Unicode, default='unprocessed', nullable=False)
# or use sqlalchemy.types.Enum? # or use sqlalchemy.types.Enum?
license = Column(Unicode) license = Column(Unicode)
file_size = Column(Integer, default=0) file_size = Column(Integer, default=0)
@ -636,7 +635,7 @@ class MediaEntry(Base, MediaEntryMixin, CommentingMixin):
"""get the next 'newer' entry by this user""" """get the next 'newer' entry by this user"""
media = MediaEntry.query.filter( media = MediaEntry.query.filter(
(MediaEntry.actor == self.actor) (MediaEntry.actor == self.actor)
& (MediaEntry.state == u'processed') & (MediaEntry.state == 'processed')
& (MediaEntry.id > self.id)).order_by(MediaEntry.id).first() & (MediaEntry.id > self.id)).order_by(MediaEntry.id).first()
if media is not None: if media is not None:
@ -646,7 +645,7 @@ class MediaEntry(Base, MediaEntryMixin, CommentingMixin):
"""get the next 'older' entry by this user""" """get the next 'older' entry by this user"""
media = MediaEntry.query.filter( media = MediaEntry.query.filter(
(MediaEntry.actor == self.actor) (MediaEntry.actor == self.actor)
& (MediaEntry.state == u'processed') & (MediaEntry.state == 'processed')
& (MediaEntry.id < self.id)).order_by(desc(MediaEntry.id)).first() & (MediaEntry.id < self.id)).order_by(desc(MediaEntry.id)).first()
if media is not None: if media is not None:
@ -658,7 +657,7 @@ class MediaEntry(Base, MediaEntryMixin, CommentingMixin):
return the value of the key. return the value of the key.
""" """
media_file = MediaFile.query.filter_by(media_entry=self.id, media_file = MediaFile.query.filter_by(media_entry=self.id,
name=six.text_type(file_key)).first() name=str(file_key)).first()
if media_file: if media_file:
if metadata_key: if metadata_key:
@ -671,11 +670,11 @@ class MediaEntry(Base, MediaEntryMixin, CommentingMixin):
Update the file_metadata of a MediaFile. Update the file_metadata of a MediaFile.
""" """
media_file = MediaFile.query.filter_by(media_entry=self.id, media_file = MediaFile.query.filter_by(media_entry=self.id,
name=six.text_type(file_key)).first() name=str(file_key)).first()
file_metadata = media_file.file_metadata or {} file_metadata = media_file.file_metadata or {}
for key, value in six.iteritems(kwargs): for key, value in kwargs.items():
file_metadata[key] = value file_metadata[key] = value
media_file.file_metadata = file_metadata media_file.file_metadata = file_metadata
@ -700,7 +699,7 @@ class MediaEntry(Base, MediaEntryMixin, CommentingMixin):
media_data.get_media_entry = self media_data.get_media_entry = self
else: else:
# Update old media data # Update old media data
for field, value in six.iteritems(kwargs): for field, value in kwargs.items():
setattr(media_data, field, value) setattr(media_data, field, value)
@memoized_property @memoized_property
@ -708,10 +707,6 @@ class MediaEntry(Base, MediaEntryMixin, CommentingMixin):
return import_component(self.media_type + '.models:BACKREF_NAME') return import_component(self.media_type + '.models:BACKREF_NAME')
def __repr__(self): def __repr__(self):
if six.PY2:
# obj.__repr__() should return a str on Python 2
safe_title = self.title.encode('utf-8', 'replace')
else:
safe_title = self.title safe_title = self.title
return '<{classname} {id}: {title}>'.format( return '<{classname} {id}: {title}>'.format(
@ -724,7 +719,7 @@ class MediaEntry(Base, MediaEntryMixin, CommentingMixin):
for comment in self.get_comments(): for comment in self.get_comments():
comment.delete(*args, **kwargs) comment.delete(*args, **kwargs)
super(MediaEntry, self).soft_delete(*args, **kwargs) super().soft_delete(*args, **kwargs)
def delete(self, del_orphan_tags=True, **kwargs): def delete(self, del_orphan_tags=True, **kwargs):
"""Delete MediaEntry and all related files/attachments/comments """Delete MediaEntry and all related files/attachments/comments
@ -744,7 +739,7 @@ class MediaEntry(Base, MediaEntryMixin, CommentingMixin):
# Returns list of files we failed to delete # Returns list of files we failed to delete
_log.error('No such files from the user "{1}" to delete: ' _log.error('No such files from the user "{1}" to delete: '
'{0}'.format(str(error), self.get_actor)) '{0}'.format(str(error), self.get_actor))
_log.info('Deleted Media entry id "{0}"'.format(self.id)) _log.info('Deleted Media entry id "{}"'.format(self.id))
# Related MediaTag's are automatically cleaned, but we might # Related MediaTag's are automatically cleaned, but we might
# want to clean out unused Tag's too. # want to clean out unused Tag's too.
if del_orphan_tags: if del_orphan_tags:
@ -753,7 +748,7 @@ class MediaEntry(Base, MediaEntryMixin, CommentingMixin):
from mediagoblin.db.util import clean_orphan_tags from mediagoblin.db.util import clean_orphan_tags
clean_orphan_tags(commit=False) clean_orphan_tags(commit=False)
# pass through commit=False/True in kwargs # pass through commit=False/True in kwargs
super(MediaEntry, self).delete(**kwargs) super().delete(**kwargs)
def serialize(self, request, show_comments=True): def serialize(self, request, show_comments=True):
""" Unserialize MediaEntry to object """ """ Unserialize MediaEntry to object """
@ -864,7 +859,7 @@ class FileKeynames(Base):
name = Column(Unicode, unique=True) name = Column(Unicode, unique=True)
def __repr__(self): def __repr__(self):
return "<FileKeyname %r: %r>" % (self.id, self.name) return "<FileKeyname {!r}: {!r}>".format(self.id, self.name)
@classmethod @classmethod
def find_or_new(cls, name): def find_or_new(cls, name):
@ -893,7 +888,7 @@ class MediaFile(Base):
{}) {})
def __repr__(self): def __repr__(self):
return "<MediaFile %s: %r>" % (self.name, self.file_path) return "<MediaFile {}: {!r}>".format(self.name, self.file_path)
name_helper = relationship(FileKeynames, lazy="joined", innerjoin=True) name_helper = relationship(FileKeynames, lazy="joined", innerjoin=True)
name = association_proxy('name_helper', 'name', name = association_proxy('name_helper', 'name',
@ -941,7 +936,7 @@ class Tag(Base):
slug = Column(Unicode, nullable=False, unique=True) slug = Column(Unicode, nullable=False, unique=True)
def __repr__(self): def __repr__(self):
return "<Tag %r: %r>" % (self.id, self.slug) return "<Tag {!r}: {!r}>".format(self.id, self.slug)
@classmethod @classmethod
def find_or_new(cls, slug): def find_or_new(cls, slug):
@ -1040,7 +1035,7 @@ class Comment(Base):
# fetch it from self.comment() # fetch it from self.comment()
raise AttributeError raise AttributeError
try: try:
_log.debug('Old attr is being accessed: {0}'.format(attr)) _log.debug('Old attr is being accessed: {}'.format(attr))
return getattr(self.comment(), attr) # noqa return getattr(self.comment(), attr) # noqa
except Exception as e: except Exception as e:
_log.error(e) _log.error(e)
@ -1347,7 +1342,7 @@ class Notification(Base):
seen='unseen' if not self.seen else 'seen') seen='unseen' if not self.seen else 'seen')
def __unicode__(self): def __unicode__(self):
return u'<{klass} #{id}: {user}: {subject} ({seen})>'.format( return '<{klass} #{id}: {user}: {subject} ({seen})>'.format(
id=self.id, id=self.id,
klass=self.__class__.__name__, klass=self.__class__.__name__,
user=self.user, user=self.user,
@ -1603,7 +1598,7 @@ class Activity(Base, ActivityMixin):
def save(self, set_updated=True, *args, **kwargs): def save(self, set_updated=True, *args, **kwargs):
if set_updated: if set_updated:
self.updated = datetime.datetime.now() self.updated = datetime.datetime.now()
super(Activity, self).save(*args, **kwargs) super().save(*args, **kwargs)
class Graveyard(Base): class Graveyard(Base):
""" Where models come to die """ """ Where models come to die """
@ -1663,12 +1658,12 @@ MODELS = [
FOUNDATIONS = {User:user_foundations} FOUNDATIONS = {User:user_foundations}
""" """
privilege_foundations = [{'privilege_name':u'admin'}, privilege_foundations = [{'privilege_name':'admin'},
{'privilege_name':u'moderator'}, {'privilege_name':'moderator'},
{'privilege_name':u'uploader'}, {'privilege_name':'uploader'},
{'privilege_name':u'reporter'}, {'privilege_name':'reporter'},
{'privilege_name':u'commenter'}, {'privilege_name':'commenter'},
{'privilege_name':u'active'}] {'privilege_name':'active'}]
FOUNDATIONS = {Privilege:privilege_foundations} FOUNDATIONS = {Privilege:privilege_foundations}
###################################################### ######################################################

View File

@ -34,14 +34,14 @@ def set_models_as_attributes(obj):
TODO: This should eventually be deprecated. TODO: This should eventually be deprecated.
""" """
for k, v in six.iteritems(Base._decl_class_registry): for k, v in Base._decl_class_registry.items():
setattr(obj, k, v) setattr(obj, k, v)
if not DISABLE_GLOBALS: if not DISABLE_GLOBALS:
from mediagoblin.db.base import Session from mediagoblin.db.base import Session
class DatabaseMaster(object): class DatabaseMaster:
def __init__(self, engine): def __init__(self, engine):
self.engine = engine self.engine = engine
@ -71,7 +71,7 @@ if not DISABLE_GLOBALS:
else: else:
from sqlalchemy.orm import sessionmaker from sqlalchemy.orm import sessionmaker
class DatabaseManager(object): class DatabaseManager:
""" """
Manage database connections. Manage database connections.
@ -136,7 +136,7 @@ def load_models(app_config):
try: try:
__import__(plugin + ".models") __import__(plugin + ".models")
except ImportError as exc: except ImportError as exc:
_log.debug("Could not load {0}.models: {1}".format( _log.debug("Could not load {}.models: {}".format(
plugin, plugin,
exc)) exc))

View File

@ -49,7 +49,7 @@ def media_entries_for_tag_slug(dummy_db, tag_slug):
.join(MediaEntry.tags_helper) \ .join(MediaEntry.tags_helper) \
.join(MediaTag.tag_helper) \ .join(MediaTag.tag_helper) \
.filter( .filter(
(MediaEntry.state == u'processed') (MediaEntry.state == 'processed')
& (Tag.slug == tag_slug)) & (Tag.slug == tag_slug))

View File

@ -58,11 +58,11 @@ def require_active_login(controller):
@user_not_banned @user_not_banned
def new_controller_func(request, *args, **kwargs): def new_controller_func(request, *args, **kwargs):
if request.user and \ if request.user and \
not request.user.has_privilege(u'active'): not request.user.has_privilege('active'):
return redirect( return redirect(
request, 'mediagoblin.user_pages.user_home', request, 'mediagoblin.user_pages.user_home',
user=request.user.username) user=request.user.username)
elif not request.user or not request.user.has_privilege(u'active'): elif not request.user or not request.user.has_privilege('active'):
next_url = urljoin( next_url = urljoin(
request.urlgen('mediagoblin.auth.login', request.urlgen('mediagoblin.auth.login',
qualified=True), qualified=True),
@ -128,7 +128,7 @@ def user_may_delete_media(controller):
@wraps(controller) @wraps(controller)
def wrapper(request, *args, **kwargs): def wrapper(request, *args, **kwargs):
uploader_id = kwargs['media'].actor uploader_id = kwargs['media'].actor
if not (request.user.has_privilege(u'admin') or if not (request.user.has_privilege('admin') or
request.user.id == uploader_id): request.user.id == uploader_id):
raise Forbidden() raise Forbidden()
@ -145,7 +145,7 @@ def user_may_alter_collection(controller):
def wrapper(request, *args, **kwargs): def wrapper(request, *args, **kwargs):
creator_id = request.db.LocalUser.query.filter_by( creator_id = request.db.LocalUser.query.filter_by(
username=request.matchdict['user']).first().id username=request.matchdict['user']).first().id
if not (request.user.has_privilege(u'admin') or if not (request.user.has_privilege('admin') or
request.user.id == creator_id): request.user.id == creator_id):
raise Forbidden() raise Forbidden()
@ -188,11 +188,11 @@ def get_user_media_entry(controller):
media_slug = request.matchdict['media'] media_slug = request.matchdict['media']
# if it starts with id: it actually isn't a slug, it's an id. # if it starts with id: it actually isn't a slug, it's an id.
if media_slug.startswith(u'id:'): if media_slug.startswith('id:'):
try: try:
media = MediaEntry.query.filter_by( media = MediaEntry.query.filter_by(
id=int(media_slug[3:]), id=int(media_slug[3:]),
state=u'processed', state='processed',
actor=user.id).first() actor=user.id).first()
except ValueError: except ValueError:
raise NotFound() raise NotFound()
@ -200,7 +200,7 @@ def get_user_media_entry(controller):
# no magical id: stuff? It's a slug! # no magical id: stuff? It's a slug!
media = MediaEntry.query.filter_by( media = MediaEntry.query.filter_by(
slug=media_slug, slug=media_slug,
state=u'processed', state='processed',
actor=user.id).first() actor=user.id).first()
if not media: if not media:
@ -374,8 +374,8 @@ def require_admin_or_moderator_login(controller):
@wraps(controller) @wraps(controller)
def new_controller_func(request, *args, **kwargs): def new_controller_func(request, *args, **kwargs):
if request.user and \ if request.user and \
not (request.user.has_privilege(u'admin') not (request.user.has_privilege('admin')
or request.user.has_privilege(u'moderator')): or request.user.has_privilege('moderator')):
raise Forbidden() raise Forbidden()
elif not request.user: elif not request.user:
@ -419,7 +419,7 @@ def oauth_required(controller):
return json_response({"error": error}, status=400) return json_response({"error": error}, status=400)
# Fill user if not already # Fill user if not already
token = authorization[u"oauth_token"] token = authorization["oauth_token"]
request.access_token = AccessToken.query.filter_by(token=token).first() request.access_token = AccessToken.query.filter_by(token=token).first()
if request.access_token is not None and request.user is None: if request.access_token is not None and request.user is None:
user_id = request.access_token.actor user_id = request.access_token.actor

View File

@ -29,11 +29,11 @@ class WebsiteField(wtforms.StringField):
def process_formdata(self, valuelist): def process_formdata(self, valuelist):
if valuelist: if valuelist:
data = valuelist[0] data = valuelist[0]
if not data.startswith((u'http://', u'https://')): if not data.startswith(('http://', 'https://')):
data = u'http://' + data data = 'http://' + data
self.data = data self.data = data
else: else:
super(WebsiteField, self).process_formdata(valuelist) super().process_formdata(valuelist)
class EditForm(wtforms.Form): class EditForm(wtforms.Form):
@ -143,7 +143,7 @@ class ChangeEmailForm(wtforms.Form):
"Enter your password to prove you own this account.")) "Enter your password to prove you own this account."))
class MetaDataValidator(object): class MetaDataValidator:
""" """
Custom validator which runs form data in a MetaDataForm through a jsonschema Custom validator which runs form data in a MetaDataForm through a jsonschema
validator and passes errors recieved in jsonschema to wtforms. validator and passes errors recieved in jsonschema to wtforms.
@ -171,8 +171,8 @@ class MetaDataValidator(object):
class MetaDataForm(wtforms.Form): class MetaDataForm(wtforms.Form):
identifier = wtforms.StringField(_(u'Identifier'),[MetaDataValidator()]) identifier = wtforms.StringField(_('Identifier'),[MetaDataValidator()])
value = wtforms.StringField(_(u'Value')) value = wtforms.StringField(_('Value'))
class EditMetaDataForm(wtforms.Form): class EditMetaDataForm(wtforms.Form):

View File

@ -19,6 +19,6 @@ def may_edit_media(request, media):
"""Check, if the request's user may edit the media details""" """Check, if the request's user may edit the media details"""
if media.actor == request.user.id: if media.actor == request.user.id:
return True return True
if request.user.has_privilege(u'admin'): if request.user.has_privilege('admin'):
return True return True
return False return False

View File

@ -1,4 +1,3 @@
# Copyright (C) 2011, 2012 MediaGoblin contributors. See AUTHORS. # Copyright (C) 2011, 2012 MediaGoblin contributors. See AUTHORS.
# #
# This program is free software: you can redistribute it and/or modify # This program is free software: you can redistribute it and/or modify
@ -56,7 +55,7 @@ import mimetypes
@require_active_login @require_active_login
def edit_media(request, media): def edit_media(request, media):
# If media is not processed, return NotFound. # If media is not processed, return NotFound.
if not media.state == u'processed': if not media.state == 'processed':
return render_404(request) return render_404(request)
if not may_edit_media(request, media): if not may_edit_media(request, media):
@ -81,20 +80,20 @@ def edit_media(request, media):
if slug_used: if slug_used:
form.slug.errors.append( form.slug.errors.append(
_(u'An entry with that slug already exists for this user.')) _('An entry with that slug already exists for this user.'))
else: else:
media.title = form.title.data media.title = form.title.data
media.description = form.description.data media.description = form.description.data
media.tags = convert_to_tag_list_of_dicts( media.tags = convert_to_tag_list_of_dicts(
form.tags.data) form.tags.data)
media.license = six.text_type(form.license.data) or None media.license = str(form.license.data) or None
media.slug = slug media.slug = slug
media.save() media.save()
return redirect_obj(request, media) return redirect_obj(request, media)
if request.user.has_privilege(u'admin') \ if request.user.has_privilege('admin') \
and media.actor != request.user.id \ and media.actor != request.user.id \
and request.method != 'POST': and request.method != 'POST':
messages.add_message( messages.add_message(
@ -120,7 +119,7 @@ UNSAFE_MIMETYPES = [
@require_active_login @require_active_login
def edit_attachments(request, media): def edit_attachments(request, media):
# If media is not processed, return NotFound. # If media is not processed, return NotFound.
if not media.state == u'processed': if not media.state == 'processed':
return render_404(request) return render_404(request)
if mg_globals.app_config['allow_attachments']: if mg_globals.app_config['allow_attachments']:
@ -143,7 +142,7 @@ def edit_attachments(request, media):
if mimetypes.guess_type( if mimetypes.guess_type(
request.files['attachment_file'].filename)[0] in \ request.files['attachment_file'].filename)[0] in \
UNSAFE_MIMETYPES: UNSAFE_MIMETYPES:
public_filename = secure_filename('{0}.notsafe'.format( public_filename = secure_filename('{}.notsafe'.format(
request.files['attachment_file'].filename)) request.files['attachment_file'].filename))
else: else:
public_filename = secure_filename( public_filename = secure_filename(
@ -151,7 +150,7 @@ def edit_attachments(request, media):
attachment_public_filepath \ attachment_public_filepath \
= mg_globals.public_store.get_unique_filepath( = mg_globals.public_store.get_unique_filepath(
['media_entries', six.text_type(media.id), 'attachment', ['media_entries', str(media.id), 'attachment',
public_filename]) public_filename])
attachment_public_file = mg_globals.public_store.get_file( attachment_public_file = mg_globals.public_store.get_file(
@ -201,7 +200,7 @@ def legacy_edit_profile(request):
def edit_profile(request, url_user=None): def edit_profile(request, url_user=None):
# admins may edit any user profile # admins may edit any user profile
if request.user.username != url_user.username: if request.user.username != url_user.username:
if not request.user.has_privilege(u'admin'): if not request.user.has_privilege('admin'):
raise Forbidden(_("You can only edit your own profile.")) raise Forbidden(_("You can only edit your own profile."))
# No need to warn again if admin just submitted an edited profile # No need to warn again if admin just submitted an edited profile
@ -226,15 +225,15 @@ def edit_profile(request, url_user=None):
location=location) location=location)
if request.method == 'POST' and form.validate(): if request.method == 'POST' and form.validate():
user.url = six.text_type(form.url.data) user.url = str(form.url.data)
user.bio = six.text_type(form.bio.data) user.bio = str(form.bio.data)
# Save location # Save location
if form.location.data and user.location is None: if form.location.data and user.location is None:
user.get_location = Location(name=six.text_type(form.location.data)) user.get_location = Location(name=str(form.location.data))
elif form.location.data: elif form.location.data:
location = user.get_location location = user.get_location
location.name = six.text_type(form.location.data) location.name = str(form.location.data)
location.save() location.save()
else: else:
user.location = None user.location = None
@ -256,8 +255,8 @@ def edit_profile(request, url_user=None):
'form': form}) 'form': form})
EMAIL_VERIFICATION_TEMPLATE = ( EMAIL_VERIFICATION_TEMPLATE = (
u'{uri}?' '{uri}?'
u'token={verification_key}') 'token={verification_key}')
@require_active_login @require_active_login
@ -324,7 +323,7 @@ def delete_account(request):
"""Delete a user completely""" """Delete a user completely"""
user = request.user user = request.user
if request.method == 'POST': if request.method == 'POST':
if request.form.get(u'confirmed'): if request.form.get('confirmed'):
# Form submitted and confirmed. Actually delete the user account # Form submitted and confirmed. Actually delete the user account
# Log out user and delete cookies etc. # Log out user and delete cookies etc.
# TODO: Should we be using MG.auth.views.py:logout for this? # TODO: Should we be using MG.auth.views.py:logout for this?
@ -384,17 +383,17 @@ def edit_collection(request, collection):
form.title.data) form.title.data)
elif slug_used: elif slug_used:
form.slug.errors.append( form.slug.errors.append(
_(u'A collection with that slug already exists for this user.')) _('A collection with that slug already exists for this user.'))
else: else:
collection.title = six.text_type(form.title.data) collection.title = str(form.title.data)
collection.description = six.text_type(form.description.data) collection.description = str(form.description.data)
collection.slug = six.text_type(form.slug.data) collection.slug = str(form.slug.data)
collection.save() collection.save()
return redirect_obj(request, collection) return redirect_obj(request, collection)
if request.user.has_privilege(u'admin') \ if request.user.has_privilege('admin') \
and collection.actor != request.user.id \ and collection.actor != request.user.id \
and request.method != 'POST': and request.method != 'POST':
messages.add_message( messages.add_message(
@ -508,19 +507,19 @@ def change_email(request):
{'form': form, {'form': form,
'user': user}) 'user': user})
@user_has_privilege(u'admin') @user_has_privilege('admin')
@require_active_login @require_active_login
@get_media_entry_by_id @get_media_entry_by_id
def edit_metadata(request, media): def edit_metadata(request, media):
# If media is not processed, return NotFound. # If media is not processed, return NotFound.
if not media.state == u'processed': if not media.state == 'processed':
return render_404(request) return render_404(request)
form = forms.EditMetaDataForm( form = forms.EditMetaDataForm(
request.method == 'POST' and request.form or None) request.method == 'POST' and request.form or None)
if request.method == "POST" and form.validate(): if request.method == "POST" and form.validate():
metadata_dict = dict([(row['identifier'],row['value']) metadata_dict = {row['identifier']:row['value']
for row in form.media_metadata.data]) for row in form.media_metadata.data}
json_ld_metadata = None json_ld_metadata = None
json_ld_metadata = compact_and_validate(metadata_dict) json_ld_metadata = compact_and_validate(metadata_dict)
media.media_metadata = json_ld_metadata media.media_metadata = json_ld_metadata
@ -528,7 +527,7 @@ def edit_metadata(request, media):
return redirect_obj(request, media) return redirect_obj(request, media)
if len(form.media_metadata) == 0: if len(form.media_metadata) == 0:
for identifier, value in six.iteritems(media.media_metadata): for identifier, value in media.media_metadata.items():
if identifier == "@context": continue if identifier == "@context": continue
form.media_metadata.append_entry({ form.media_metadata.append_entry({
'identifier':identifier, 'identifier':identifier,

View File

@ -20,15 +20,15 @@ MGOBLIN_ERROR_MESSAGE = """\
<pre> <pre>
.-------------------------. .-------------------------.
| __ _ | | __ _ |
| -, \_,------,_// | | -, \\_,------,_// |
| <\ ,-- --.\ | | <\\ ,-- --.\\ |
| / (x ) ( X ) | | / (x ) ( X ) |
| ' '--, ,--'\ | | ' '--, ,--'\\ |
| / \ -v-v-u-v / | | / \\ -v-v-u-v / |
| . '.__.--__'.\ | | . '.__.--__'.\\ |
| / ',___/ / \__/' | | / ',___/ / \\__/' |
| | | ,'\_'/, || | | | | ,'\\_'/, || |
| \_| | | | | || | | \\_| | | | | || |
| W',_ ||| |||_'' | | W',_ ||| |||_'' |
| | '------'| | | | '------'| |
| |__| |_|_ | | |__| |_|_ |

View File

@ -101,7 +101,7 @@ def main_cli():
"otherwise mediagoblin.ini")) "otherwise mediagoblin.ini"))
subparsers = parser.add_subparsers(help='sub-command help') subparsers = parser.add_subparsers(help='sub-command help')
for command_name, command_struct in six.iteritems(SUBCOMMAND_MAP): for command_name, command_struct in SUBCOMMAND_MAP.items():
if 'help' in command_struct: if 'help' in command_struct:
subparser = subparsers.add_parser( subparser = subparsers.add_parser(
command_name, help=command_struct['help']) command_name, help=command_struct['help'])

View File

@ -14,7 +14,6 @@
# You should have received a copy of the GNU Affero General Public License # You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>. # along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import print_function
import os import os
@ -95,7 +94,7 @@ def addmedia(args):
if some_string is None: if some_string is None:
return None return None
if six.PY2: if six.PY2:
return six.text_type(some_string, 'utf-8') return str(some_string, 'utf-8')
return some_string return some_string
try: try:
@ -107,7 +106,7 @@ def addmedia(args):
description=maybe_unicodeify(args.description), description=maybe_unicodeify(args.description),
collection_slug=args.collection_slug, collection_slug=args.collection_slug,
license=maybe_unicodeify(args.license), license=maybe_unicodeify(args.license),
tags_string=maybe_unicodeify(args.tags) or u"") tags_string=maybe_unicodeify(args.tags) or "")
except FileUploadLimit: except FileUploadLimit:
print("This file is larger than the upload limits for this site.") print("This file is larger than the upload limits for this site.")
except UserUploadLimit: except UserUploadLimit:

View File

@ -35,7 +35,7 @@ class FudgedCommandLine(config.CommandLine):
plugins = global_config.get('plugins', {}).keys() plugins = global_config.get('plugins', {}).keys()
for plugin in plugins: for plugin in plugins:
try: try:
import_component('{0}.models:MODELS'.format(plugin)) import_component('{}.models:MODELS'.format(plugin))
except ImportError: except ImportError:
# It doesn't really matter if there's no models to import # It doesn't really matter if there's no models to import
# here. # here.

View File

@ -85,7 +85,7 @@ def link_theme_assets(theme, link_dir, printer=simple_printer):
os.symlink( os.symlink(
theme['assets_dir'].rstrip(os.path.sep), theme['assets_dir'].rstrip(os.path.sep),
link_dir) link_dir)
printer("Linked the theme's asset directory:\n %s\nto:\n %s\n" % ( printer("Linked the theme's asset directory:\n {}\nto:\n {}\n".format(
theme['assets_dir'], link_dir)) theme['assets_dir'], link_dir))
@ -128,7 +128,7 @@ def link_plugin_assets(plugin_static, plugins_link_dir, printer=simple_printer):
os.symlink( os.symlink(
plugin_static.file_path.rstrip(os.path.sep), plugin_static.file_path.rstrip(os.path.sep),
link_dir) link_dir)
printer('Linked asset directory for plugin "%s":\n %s\nto:\n %s\n' % ( printer('Linked asset directory for plugin "{}":\n {}\nto:\n {}\n'.format(
plugin_static.name, plugin_static.name,
plugin_static.file_path.rstrip(os.path.sep), plugin_static.file_path.rstrip(os.path.sep),
link_dir)) link_dir))

View File

@ -14,7 +14,6 @@
# You should have received a copy of the GNU Affero General Public License # You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>. # along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import print_function, unicode_literals
import csv import csv
import os import os
@ -84,7 +83,7 @@ def batchaddmedia(args):
abs_metadata_filename = os.path.abspath(metadata_path) abs_metadata_filename = os.path.abspath(metadata_path)
abs_metadata_dir = os.path.dirname(abs_metadata_filename) abs_metadata_dir = os.path.dirname(abs_metadata_filename)
all_metadata = open(abs_metadata_filename, 'r') all_metadata = open(abs_metadata_filename)
media_metadata = csv.DictReader(all_metadata) media_metadata = csv.DictReader(all_metadata)
for index, file_metadata in enumerate(media_metadata): for index, file_metadata in enumerate(media_metadata):
if six.PY2: if six.PY2:
@ -159,7 +158,7 @@ Metadata was not uploaded.""".format(
file_abs_path = os.path.abspath(file_path) file_abs_path = os.path.abspath(file_path)
try: try:
media_file = open(file_abs_path, 'rb') media_file = open(file_abs_path, 'rb')
except IOError: except OSError:
print(_("""\ print(_("""\
FAIL: Local file {filename} could not be accessed. FAIL: Local file {filename} could not be accessed.
{filename} will not be uploaded.""".format(filename=filename))) {filename} will not be uploaded.""".format(filename=filename)))

View File

@ -36,7 +36,7 @@ def dbupdate_parse_setup(subparser):
pass pass
class DatabaseData(object): class DatabaseData:
def __init__(self, name, models, migrations): def __init__(self, name, models, migrations):
self.name = name self.name = name
self.models = models self.models = models
@ -64,34 +64,34 @@ def gather_database_data(plugins):
managed_dbdata.append( managed_dbdata.append(
DatabaseData( DatabaseData(
u'__main__', MAIN_MODELS, MAIN_MIGRATIONS)) '__main__', MAIN_MODELS, MAIN_MIGRATIONS))
for plugin in plugins: for plugin in plugins:
try: try:
models = import_component('{0}.models:MODELS'.format(plugin)) models = import_component('{}.models:MODELS'.format(plugin))
except ImportError as exc: except ImportError as exc:
_log.debug('No models found for {0}: {1}'.format( _log.debug('No models found for {}: {}'.format(
plugin, plugin,
exc)) exc))
models = [] models = []
except AttributeError as exc: except AttributeError as exc:
_log.warning('Could not find MODELS in {0}.models, have you ' _log.warning('Could not find MODELS in {}.models, have you '
'forgotten to add it? ({1})'.format(plugin, exc)) 'forgotten to add it? ({})'.format(plugin, exc))
models = [] models = []
try: try:
migrations = import_component('{0}.migrations:MIGRATIONS'.format( migrations = import_component('{}.migrations:MIGRATIONS'.format(
plugin)) plugin))
except ImportError as exc: except ImportError as exc:
_log.debug('No migrations found for {0}: {1}'.format( _log.debug('No migrations found for {}: {}'.format(
plugin, plugin,
exc)) exc))
migrations = {} migrations = {}
except AttributeError as exc: except AttributeError as exc:
_log.debug('Could not find MIGRATIONS in {0}.migrations, have you' _log.debug('Could not find MIGRATIONS in {}.migrations, have you'
'forgotten to add it? ({1})'.format(plugin, exc)) 'forgotten to add it? ({})'.format(plugin, exc))
migrations = {} migrations = {}
if models: if models:
@ -106,7 +106,7 @@ def run_foundations(db, global_config):
Gather foundations data and run it. Gather foundations data and run it.
""" """
from mediagoblin.db.models import FOUNDATIONS as MAIN_FOUNDATIONS from mediagoblin.db.models import FOUNDATIONS as MAIN_FOUNDATIONS
all_foundations = [(u"__main__", MAIN_FOUNDATIONS)] all_foundations = [("__main__", MAIN_FOUNDATIONS)]
Session = sessionmaker(bind=db.engine) Session = sessionmaker(bind=db.engine)
session = Session() session = Session()
@ -116,7 +116,7 @@ def run_foundations(db, global_config):
for plugin in plugins: for plugin in plugins:
try: try:
foundations = import_component( foundations = import_component(
'{0}.models:FOUNDATIONS'.format(plugin)) '{}.models:FOUNDATIONS'.format(plugin))
all_foundations.append((plugin, foundations)) all_foundations.append((plugin, foundations))
except ImportError as exc: except ImportError as exc:
continue continue
@ -215,7 +215,7 @@ def sqam_migrations_to_run(db, app_config, global_config):
# was never installed with any migrations # was never installed with any migrations
from mediagoblin.db.models import MigrationData from mediagoblin.db.models import MigrationData
if Session().query(MigrationData).filter_by( if Session().query(MigrationData).filter_by(
name=u"__main__").first() is None: name="__main__").first() is None:
return False return False
# Setup media managers for all dbdata, run init/migrate and print info # Setup media managers for all dbdata, run init/migrate and print info

View File

@ -14,7 +14,6 @@
# You should have received a copy of the GNU Affero General Public License # You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>. # along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import print_function
import sys import sys
from mediagoblin.gmg_commands import util as commands_util from mediagoblin.gmg_commands import util as commands_util
@ -28,7 +27,7 @@ def parser_setup(subparser):
def deletemedia(args): def deletemedia(args):
app = commands_util.setup_app(args) app = commands_util.setup_app(args)
media_ids = set([int(mid) for mid in args.media_ids.split(',') if mid.isdigit()]) media_ids = {int(mid) for mid in args.media_ids.split(',') if mid.isdigit()}
if not media_ids: if not media_ids:
print('Can\'t find any valid media ID(s).') print('Can\'t find any valid media ID(s).')
sys.exit(1) sys.exit(1)

View File

@ -14,7 +14,6 @@
# You should have received a copy of the GNU Affero General Public License # You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>. # along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import print_function
import argparse import argparse
import os import os
@ -146,7 +145,7 @@ def available(args):
manager = get_processing_manager_for_type(media_type) manager = get_processing_manager_for_type(media_type)
except ProcessingManagerDoesNotExist: except ProcessingManagerDoesNotExist:
entry = MediaEntry.query.filter_by(id=args.id_or_type).first() entry = MediaEntry.query.filter_by(id=args.id_or_type).first()
print('No such processing manager for {0}'.format(entry.media_type)) print('No such processing manager for {}'.format(entry.media_type))
if args.state: if args.state:
processors = manager.list_all_processors_by_state(args.state) processors = manager.list_all_processors_by_state(args.state)
@ -171,7 +170,7 @@ def available(args):
else: else:
for processor in processors: for processor in processors:
if processor.description: if processor.description:
print(" - %s: %s" % (processor.name, processor.description)) print(" - {}: {}".format(processor.name, processor.description))
else: else:
print(" - %s" % processor.name) print(" - %s" % processor.name)
@ -188,11 +187,11 @@ def run(args, media_id=None):
processor_class = manager.get_processor( processor_class = manager.get_processor(
args.reprocess_command, media_entry) args.reprocess_command, media_entry)
except ProcessorDoesNotExist: except ProcessorDoesNotExist:
print('No such processor "%s" for media with id "%s"' % ( print('No such processor "{}" for media with id "{}"'.format(
args.reprocess_command, media_entry.id)) args.reprocess_command, media_entry.id))
return return
except ProcessorNotEligible: except ProcessorNotEligible:
print('Processor "%s" exists but media "%s" is not eligible' % ( print('Processor "{}" exists but media "{}" is not eligible'.format(
args.reprocess_command, media_entry.id)) args.reprocess_command, media_entry.id))
return return
@ -206,7 +205,7 @@ def run(args, media_id=None):
except ProcessingManagerDoesNotExist: except ProcessingManagerDoesNotExist:
entry = MediaEntry.query.filter_by(id=media_id).first() entry = MediaEntry.query.filter_by(id=media_id).first()
print('No such processing manager for {0}'.format(entry.media_type)) print('No such processing manager for {}'.format(entry.media_type))
def bulk_run(args): def bulk_run(args):
@ -236,11 +235,11 @@ def thumbs(args):
processor_class = manager.get_processor( processor_class = manager.get_processor(
'resize', media_entry) 'resize', media_entry)
except ProcessorDoesNotExist: except ProcessorDoesNotExist:
print('No such processor "%s" for media with id "%s"' % ( print('No such processor "{}" for media with id "{}"'.format(
'resize', media_entry.id)) 'resize', media_entry.id))
return return
except ProcessorNotEligible: except ProcessorNotEligible:
print('Processor "%s" exists but media "%s" is not eligible' % ( print('Processor "{}" exists but media "{}" is not eligible'.format(
'resize', media_entry.id)) 'resize', media_entry.id))
return return
@ -248,7 +247,7 @@ def thumbs(args):
# prepare filetype and size to be passed into reprocess_parser # prepare filetype and size to be passed into reprocess_parser
if args.size: if args.size:
extra_args = 'thumb --{0} {1} {2}'.format( extra_args = 'thumb --{} {} {}'.format(
processor_class.thumb_size, processor_class.thumb_size,
args.size[0], args.size[0],
args.size[1]) args.size[1])
@ -263,7 +262,7 @@ def thumbs(args):
reprocess_info=reprocess_request) reprocess_info=reprocess_request)
except ProcessingManagerDoesNotExist: except ProcessingManagerDoesNotExist:
print('No such processing manager for {0}'.format(entry.media_type)) print('No such processing manager for {}'.format(entry.media_type))
def initial(args): def initial(args):
@ -279,7 +278,7 @@ def initial(args):
media_entry, media_entry,
reprocess_action='initial') reprocess_action='initial')
except ProcessingManagerDoesNotExist: except ProcessingManagerDoesNotExist:
print('No such processing manager for {0}'.format(entry.media_type)) print('No such processing manager for {}'.format(entry.media_type))
def reprocess(args): def reprocess(args):

View File

@ -14,12 +14,11 @@
# You should have received a copy of the GNU Affero General Public License # You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>. # along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import print_function
from paste.deploy import loadapp, loadserver from paste.deploy import loadapp, loadserver
class ServeCommand(object): class ServeCommand:
def loadserver(self, server_spec, name, relative_to, **kwargs): def loadserver(self, server_spec, name, relative_to, **kwargs):
return loadserver(server_spec, name=name, relative_to=relative_to, return loadserver(server_spec, name=name, relative_to=relative_to,

View File

@ -14,7 +14,6 @@
# You should have received a copy of the GNU Affero General Public License # You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>. # along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import print_function
import sys import sys
@ -41,7 +40,7 @@ def adduser(args):
#TODO: Lets trust admins this do not validate Emails :) #TODO: Lets trust admins this do not validate Emails :)
commands_util.setup_app(args) commands_util.setup_app(args)
args.username = six.text_type(commands_util.prompt_if_not_set(args.username, "Username:")) args.username = str(commands_util.prompt_if_not_set(args.username, "Username:"))
args.password = commands_util.prompt_if_not_set(args.password, "Password:",True) args.password = commands_util.prompt_if_not_set(args.password, "Password:",True)
args.email = commands_util.prompt_if_not_set(args.email, "Email:") args.email = commands_util.prompt_if_not_set(args.email, "Email:")
@ -52,36 +51,36 @@ def adduser(args):
).count() ).count()
if users_with_username: if users_with_username:
print(u'Sorry, a user with that name already exists.') print('Sorry, a user with that name already exists.')
sys.exit(1) sys.exit(1)
else: else:
# Create the user # Create the user
entry = db.LocalUser() entry = db.LocalUser()
entry.username = six.text_type(args.username.lower()) entry.username = str(args.username.lower())
entry.email = six.text_type(args.email) entry.email = str(args.email)
entry.pw_hash = auth.gen_password_hash(args.password) entry.pw_hash = auth.gen_password_hash(args.password)
default_privileges = [ default_privileges = [
db.Privilege.query.filter( db.Privilege.query.filter(
db.Privilege.privilege_name==u'commenter').one(), db.Privilege.privilege_name=='commenter').one(),
db.Privilege.query.filter( db.Privilege.query.filter(
db.Privilege.privilege_name==u'uploader').one(), db.Privilege.privilege_name=='uploader').one(),
db.Privilege.query.filter( db.Privilege.query.filter(
db.Privilege.privilege_name==u'reporter').one(), db.Privilege.privilege_name=='reporter').one(),
db.Privilege.query.filter( db.Privilege.query.filter(
db.Privilege.privilege_name==u'active').one() db.Privilege.privilege_name=='active').one()
] ]
entry.all_privileges = default_privileges entry.all_privileges = default_privileges
entry.save() entry.save()
print(u"User created (and email marked as verified).") print("User created (and email marked as verified).")
def makeadmin_parser_setup(subparser): def makeadmin_parser_setup(subparser):
subparser.add_argument( subparser.add_argument(
'username', 'username',
help="Username to give admin level", help="Username to give admin level",
type=six.text_type) type=str)
def makeadmin(args): def makeadmin(args):
@ -95,12 +94,12 @@ def makeadmin(args):
if user: if user:
user.all_privileges.append( user.all_privileges.append(
db.Privilege.query.filter( db.Privilege.query.filter(
db.Privilege.privilege_name==u'admin').one() db.Privilege.privilege_name=='admin').one()
) )
user.save() user.save()
print(u'The user %s is now an admin.' % args.username) print('The user %s is now an admin.' % args.username)
else: else:
print(u'The user %s doesn\'t exist.' % args.username) print('The user %s doesn\'t exist.' % args.username)
sys.exit(1) sys.exit(1)
@ -108,7 +107,7 @@ def changepw_parser_setup(subparser):
subparser.add_argument( subparser.add_argument(
'username', 'username',
help="Username used to login", help="Username used to login",
type=six.text_type) type=str)
subparser.add_argument( subparser.add_argument(
'password', 'password',
help="Your NEW supersecret word to login") help="Your NEW supersecret word to login")
@ -125,9 +124,9 @@ def changepw(args):
if user: if user:
user.pw_hash = auth.gen_password_hash(args.password) user.pw_hash = auth.gen_password_hash(args.password)
user.save() user.save()
print(u'Password successfully changed for user %s.' % args.username) print('Password successfully changed for user %s.' % args.username)
else: else:
print(u'The user %s doesn\'t exist.' % args.username) print('The user %s doesn\'t exist.' % args.username)
sys.exit(1) sys.exit(1)
@ -135,7 +134,7 @@ def deleteuser_parser_setup(subparser):
subparser.add_argument( subparser.add_argument(
'username', 'username',
help="Username to delete", help="Username to delete",
type=six.text_type) type=str)
def deleteuser(args): def deleteuser(args):

View File

@ -35,8 +35,8 @@ def prompt_if_not_set(variable, text, password=False):
""" """
if variable is None: if variable is None:
if not password: if not password:
variable = six.moves.input(text + u' ') variable = six.moves.input(text + ' ')
else: else:
variable=getpass.getpass(text + u' ') variable=getpass.getpass(text + ' ')
return variable return variable

View File

@ -112,8 +112,8 @@ def get_jinja_loader(user_template_path=None, current_theme=None,
# Add plugin template paths next--takes precedence over # Add plugin template paths next--takes precedence over
# core templates. # core templates.
if plugin_template_paths is not None: if plugin_template_paths is not None:
path_list.extend((jinja2.FileSystemLoader(path) path_list.extend(jinja2.FileSystemLoader(path)
for path in plugin_template_paths)) for path in plugin_template_paths)
# Add core templates last. # Add core templates last.
path_list.append(jinja2.PackageLoader('mediagoblin', 'templates')) path_list.append(jinja2.PackageLoader('mediagoblin', 'templates'))
@ -133,7 +133,7 @@ def get_staticdirector(app_config):
# Let plugins load additional paths # Let plugins load additional paths
for plugin_static in hook_runall("static_setup"): for plugin_static in hook_runall("static_setup"):
direct_domains[plugin_static.name] = "%s/%s" % ( direct_domains[plugin_static.name] = "{}/{}".format(
app_config['plugin_web_path'].rstrip('/'), app_config['plugin_web_path'].rstrip('/'),
plugin_static.name) plugin_static.name)

View File

@ -58,7 +58,7 @@ def get_celery_settings_dict(app_config, global_config,
celery_settings = {} celery_settings = {}
# Add all celery settings from config # Add all celery settings from config
for key, value in six.iteritems(celery_conf): for key, value in celery_conf.items():
celery_settings[key] = value celery_settings[key] = value
# TODO: use default result stuff here if it exists # TODO: use default result stuff here if it exists
@ -122,7 +122,7 @@ def setup_celery_from_config(app_config, global_config,
__import__(settings_module) __import__(settings_module)
this_module = sys.modules[settings_module] this_module = sys.modules[settings_module]
for key, value in six.iteritems(celery_settings): for key, value in celery_settings.items():
setattr(this_module, key, value) setattr(this_module, key, value)
if set_environ: if set_environ:
@ -132,8 +132,8 @@ def setup_celery_from_config(app_config, global_config,
# initiated # initiated
from celery import current_app from celery import current_app
_log.info('Setting celery configuration from object "{0}"'.format( _log.info('Setting celery configuration from object "{}"'.format(
settings_module)) settings_module))
current_app.config_from_object(this_module) current_app.config_from_object(this_module)
_log.debug('Celery broker host: {0}'.format(current_app.conf['BROKER_HOST'])) _log.debug('Celery broker host: {}'.format(current_app.conf['BROKER_HOST']))

View File

@ -42,7 +42,7 @@ def setup_logging_from_paste_ini(loglevel, **kw):
'PASTE_CONFIG', logging_conf_file) 'PASTE_CONFIG', logging_conf_file)
if not os.path.exists(logging_conf_file): if not os.path.exists(logging_conf_file):
raise IOError('{0} does not exist. Logging can not be set up.'.format( raise OSError('{} does not exist. Logging can not be set up.'.format(
logging_conf_file)) logging_conf_file))
logging.config.fileConfig(logging_conf_file) logging.config.fileConfig(logging_conf_file)
@ -78,7 +78,7 @@ def setup_self(check_environ_for_conf=True, module_name=OUR_MODULENAME,
mgoblin_conf_file = default_conf_file mgoblin_conf_file = default_conf_file
if not os.path.exists(mgoblin_conf_file): if not os.path.exists(mgoblin_conf_file):
raise IOError( raise OSError(
"MEDIAGOBLIN_CONFIG not set or file does not exist") "MEDIAGOBLIN_CONFIG not set or file does not exist")
# By setting the environment variable here we should ensure that # By setting the environment variable here we should ensure that

View File

@ -149,7 +149,7 @@ def read_mediagoblin_config(config_path, config_spec_path=CONFIG_SPEC_PATH):
return config, validation_result return config, validation_result
REPORT_HEADER = u"""\ REPORT_HEADER = """\
There were validation problems loading this config file: There were validation problems loading this config file:
-------------------------------------------------------- --------------------------------------------------------
""" """
@ -173,17 +173,17 @@ def generate_validation_report(config, validation_result):
if key is not None: if key is not None:
section_list.append(key) section_list.append(key)
else: else:
section_list.append(u'[missing section]') section_list.append('[missing section]')
section_string = u':'.join(section_list) section_string = ':'.join(section_list)
if error == False: if error == False:
# We don't care about missing values for now. # We don't care about missing values for now.
continue continue
report.append(u"%s = %s" % (section_string, error)) report.append("{} = {}".format(section_string, error))
if report: if report:
return REPORT_HEADER + u"\n".join(report) return REPORT_HEADER + "\n".join(report)
else: else:
return None return None

View File

@ -45,7 +45,7 @@ def _get_tag_name_from_entries(media_entries, tag_slug):
@uses_pagination @uses_pagination
def tag_listing(request, page): def tag_listing(request, page):
"""'Gallery'/listing for this tag slug""" """'Gallery'/listing for this tag slug"""
tag_slug = request.matchdict[u'tag'] tag_slug = request.matchdict['tag']
cursor = media_entries_for_tag_slug(request.db, tag_slug) cursor = media_entries_for_tag_slug(request.db, tag_slug)
cursor = cursor.order_by(MediaEntry.created.desc()) cursor = cursor.order_by(MediaEntry.created.desc())
@ -71,7 +71,7 @@ def atom_feed(request):
""" """
generates the atom feed with the tag images generates the atom feed with the tag images
""" """
tag_slug = request.matchdict.get(u'tag') tag_slug = request.matchdict.get('tag')
feed_title = "MediaGoblin Feed" feed_title = "MediaGoblin Feed"
if tag_slug: if tag_slug:
feed_title += " for tag '%s'" % tag_slug feed_title += " for tag '%s'" % tag_slug
@ -81,7 +81,7 @@ def atom_feed(request):
else: # all recent item feed else: # all recent item feed
feed_title += " for all recent items" feed_title += " for all recent items"
link = request.urlgen('index', qualified=True) link = request.urlgen('index', qualified=True)
cursor = MediaEntry.query.filter_by(state=u'processed') cursor = MediaEntry.query.filter_by(state='processed')
cursor = cursor.order_by(MediaEntry.created.desc()) cursor = cursor.order_by(MediaEntry.created.desc())
cursor = cursor.limit(ATOM_DEFAULT_NR_OF_UPDATED_ITEMS) cursor = cursor.limit(ATOM_DEFAULT_NR_OF_UPDATED_ITEMS)
@ -110,7 +110,7 @@ def atom_feed(request):
# Include a thumbnail image in content. # Include a thumbnail image in content.
file_urls = get_media_file_paths(entry.media_files, request.urlgen) file_urls = get_media_file_paths(entry.media_files, request.urlgen)
if 'thumb' in file_urls: if 'thumb' in file_urls:
content = u'<img src="{thumb}" alt='' /> {desc}'.format( content = '<img src="{thumb}" alt='' /> {desc}'.format(
thumb=file_urls['thumb'], desc=entry.description_html) thumb=file_urls['thumb'], desc=entry.description_html)
else: else:
content = entry.description_html content = entry.description_html

View File

@ -19,7 +19,7 @@ ENABLED_MEDDLEWARE = [
] ]
class BaseMeddleware(object): class BaseMeddleware:
def __init__(self, mg_app): def __init__(self, mg_app):
self.app = mg_app self.app = mg_app

View File

@ -116,7 +116,7 @@ class CsrfMeddleware(BaseMeddleware):
def _make_token(self, request): def _make_token(self, request):
"""Generate a new token to use for CSRF protection.""" """Generate a new token to use for CSRF protection."""
return "%s" % (getrandbits(self.CSRF_KEYLEN),) return "{}".format(getrandbits(self.CSRF_KEYLEN))
def verify_tokens(self, request): def verify_tokens(self, request):
"""Verify that the CSRF Cookie exists and that it matches the """Verify that the CSRF Cookie exists and that it matches the

View File

@ -39,7 +39,7 @@ class MissingComponents(FileTypeNotSupported):
pass pass
class MediaManagerBase(object): class MediaManagerBase:
"Base class for all media managers" "Base class for all media managers"
# Please override in actual media managers # Please override in actual media managers
@ -68,13 +68,13 @@ def sniff_media_contents(media_file, filename):
''' '''
media_type = hook_handle('sniff_handler', media_file, filename) media_type = hook_handle('sniff_handler', media_file, filename)
if media_type: if media_type:
_log.info('{0} accepts the file'.format(media_type)) _log.info('{} accepts the file'.format(media_type))
return media_type, hook_handle(('media_manager', media_type)) return media_type, hook_handle(('media_manager', media_type))
else: else:
_log.debug('{0} did not accept the file'.format(media_type)) _log.debug('{} did not accept the file'.format(media_type))
raise FileTypeNotSupported( raise FileTypeNotSupported(
# TODO: Provide information on which file types are supported # TODO: Provide information on which file types are supported
_(u'Sorry, I don\'t support that file type :(')) _('Sorry, I don\'t support that file type :('))
def get_media_type_and_manager(filename): def get_media_type_and_manager(filename):
''' '''
@ -93,11 +93,11 @@ def get_media_type_and_manager(filename):
if hook_handle('get_media_type_and_manager', ext[1:]): if hook_handle('get_media_type_and_manager', ext[1:]):
return hook_handle('get_media_type_and_manager', ext[1:]) return hook_handle('get_media_type_and_manager', ext[1:])
else: else:
_log.info('File {0} has no file extension, let\'s hope the sniffers get it.'.format( _log.info('File {} has no file extension, let\'s hope the sniffers get it.'.format(
filename)) filename))
raise TypeNotFound( raise TypeNotFound(
_(u'Sorry, I don\'t support that file type :(')) _('Sorry, I don\'t support that file type :('))
def type_match_handler(media_file, filename): def type_match_handler(media_file, filename):
'''Check media file by name and then by content '''Check media file by name and then by content
@ -129,11 +129,11 @@ def type_match_handler(media_file, filename):
_log.debug(e) _log.debug(e)
raise raise
else: else:
_log.info('No plugins handled extension {0}'.format(ext)) _log.info('No plugins handled extension {}'.format(ext))
else: else:
_log.info('File {0} has no known file extension, let\'s hope ' _log.info('File {} has no known file extension, let\'s hope '
'the sniffers get it.'.format(filename)) 'the sniffers get it.'.format(filename))
raise TypeNotFound(_(u'Sorry, I don\'t support that file type :(')) raise TypeNotFound(_('Sorry, I don\'t support that file type :('))
def sniff_media(media_file, filename): def sniff_media(media_file, filename):

View File

@ -29,7 +29,7 @@ import os
_log = logging.getLogger(__name__) _log = logging.getLogger(__name__)
class AsciiToImage(object): class AsciiToImage:
''' '''
Converter of ASCII art into image files, preserving whitespace Converter of ASCII art into image files, preserving whitespace
@ -51,7 +51,7 @@ class AsciiToImage(object):
self._font_size, self._font_size,
encoding='unic') encoding='unic')
_log.info('Font set to {0}, size {1}'.format( _log.info('Font set to {}, size {}'.format(
self._font, self._font,
self._font_size)) self._font_size))
@ -68,7 +68,7 @@ class AsciiToImage(object):
# PIL's Image.save will handle both file-likes and paths # PIL's Image.save will handle both file-likes and paths
if im.save(destination): if im.save(destination):
_log.info('Saved image in {0}'.format( _log.info('Saved image in {}'.format(
destination)) destination))
def _create_image(self, text): def _create_image(self, text):
@ -93,7 +93,7 @@ class AsciiToImage(object):
max(line_lengths) * self._if_dims[0], max(line_lengths) * self._if_dims[0],
len(line_lengths) * self._if_dims[1]) len(line_lengths) * self._if_dims[1])
_log.info('Destination image dimensions will be {0}'.format( _log.info('Destination image dimensions will be {}'.format(
im_dims)) im_dims))
im = Image.new( im = Image.new(
@ -108,14 +108,14 @@ class AsciiToImage(object):
for line in lines: for line in lines:
line_length = len(line) line_length = len(line)
_log.debug('Writing line at {0}'.format(char_pos)) _log.debug('Writing line at {}'.format(char_pos))
for _pos in range(0, line_length): for _pos in range(0, line_length):
char = line[_pos] char = line[_pos]
px_pos = self._px_pos(char_pos) px_pos = self._px_pos(char_pos)
_log.debug('Writing character "{0}" at {1} (px pos {2})'.format( _log.debug('Writing character "{}" at {} (px pos {})'.format(
char.encode('ascii', 'replace'), char.encode('ascii', 'replace'),
char_pos, char_pos,
px_pos)) px_pos))

View File

@ -39,7 +39,7 @@ MEDIA_TYPE = 'mediagoblin.media_types.ascii'
def sniff_handler(media_file, filename): def sniff_handler(media_file, filename):
_log.info('Sniffing {0}'.format(MEDIA_TYPE)) _log.info('Sniffing {}'.format(MEDIA_TYPE))
name, ext = os.path.splitext(filename) name, ext = os.path.splitext(filename)
clean_ext = ext[1:].lower() clean_ext = ext[1:].lower()
@ -87,7 +87,7 @@ class CommonAsciiProcessor(MediaProcessor):
else: else:
self.charset = d_charset['encoding'] self.charset = d_charset['encoding']
_log.info('Charset detected: {0}\nWill interpret as: {1}'.format( _log.info('Charset detected: {}\nWill interpret as: {}'.format(
d_charset, d_charset,
self.charset)) self.charset))
@ -106,7 +106,7 @@ class CommonAsciiProcessor(MediaProcessor):
# Encode the unicode instance to ASCII and replace any # Encode the unicode instance to ASCII and replace any
# non-ASCII with an HTML entity (&# # non-ASCII with an HTML entity (&#
unicode_file.write( unicode_file.write(
six.text_type(orig_file.read().decode( str(orig_file.read().decode(
self.charset)).encode( self.charset)).encode(
'ascii', 'ascii',
'xmlcharrefreplace')) 'xmlcharrefreplace'))
@ -270,6 +270,6 @@ class Resizer(CommonAsciiProcessor):
class AsciiProcessingManager(ProcessingManager): class AsciiProcessingManager(ProcessingManager):
def __init__(self): def __init__(self):
super(AsciiProcessingManager, self).__init__() super().__init__()
self.add_processor(InitialProcessor) self.add_processor(InitialProcessor)
self.add_processor(Resizer) self.add_processor(Resizer)

View File

@ -282,7 +282,7 @@ if __name__ == "__main__":
sys.stdout.flush() sys.stdout.flush()
if not (len(sys.argv) == 2 or len(sys.argv) == 3): if not (len(sys.argv) == 2 or len(sys.argv) == 3):
print("Usage:\n{0} input_file [output_file]".format(sys.argv[0])) print("Usage:\n{} input_file [output_file]".format(sys.argv[0]))
exit() exit()
audioFile = sys.argv[1] audioFile = sys.argv[1]
@ -292,6 +292,6 @@ if __name__ == "__main__":
else: else:
outputFile = 'spectrogram.png' outputFile = 'spectrogram.png'
sys.stdout.write("Input : {0}\nOutput : {1}\n".format(audioFile, outputFile)) sys.stdout.write("Input : {}\nOutput : {}\n".format(audioFile, outputFile))
drawSpectrogram(audioFile, outputFile, progressCallback = printProgress) drawSpectrogram(audioFile, outputFile, progressCallback = printProgress)
sys.stdout.write("\nDone!\n") sys.stdout.write("\nDone!\n")

View File

@ -37,11 +37,11 @@ MEDIA_TYPE = 'mediagoblin.media_types.audio'
def sniff_handler(media_file, filename): def sniff_handler(media_file, filename):
_log.info('Sniffing {0}'.format(MEDIA_TYPE)) _log.info('Sniffing {}'.format(MEDIA_TYPE))
try: try:
data = discover(media_file.name) data = discover(media_file.name)
except Exception as e: except Exception as e:
_log.info(six.text_type(e)) _log.info(str(e))
return None return None
if data and data.get_audio_streams() and not data.get_video_streams(): if data and data.get_audio_streams() and not data.get_video_streams():
return MEDIA_TYPE return MEDIA_TYPE
@ -361,7 +361,7 @@ class Transcoder(CommonAudioProcessor):
class AudioProcessingManager(ProcessingManager): class AudioProcessingManager(ProcessingManager):
def __init__(self): def __init__(self):
super(AudioProcessingManager, self).__init__() super().__init__()
self.add_processor(InitialProcessor) self.add_processor(InitialProcessor)
self.add_processor(Resizer) self.add_processor(Resizer)
self.add_processor(Transcoder) self.add_processor(Transcoder)

View File

@ -43,9 +43,9 @@ gi.require_version('Gst', '1.0')
from gi.repository import GObject, Gst from gi.repository import GObject, Gst
Gst.init(None) Gst.init(None)
class Python3AudioThumbnailer(object): class Python3AudioThumbnailer:
def __init__(self): def __init__(self):
_log.info('Initializing {0}'.format(self.__class__.__name__)) _log.info('Initializing {}'.format(self.__class__.__name__))
def spectrogram(self, src, dst, **kw): def spectrogram(self, src, dst, **kw):
from mediagoblin.media_types.audio import audiotospectrogram from mediagoblin.media_types.audio import audiotospectrogram
@ -83,9 +83,9 @@ class Python3AudioThumbnailer(object):
AudioThumbnailer = Python3AudioThumbnailer AudioThumbnailer = Python3AudioThumbnailer
class AudioTranscoder(object): class AudioTranscoder:
def __init__(self): def __init__(self):
_log.info('Initializing {0}'.format(self.__class__.__name__)) _log.info('Initializing {}'.format(self.__class__.__name__))
# Instantiate MainLoop # Instantiate MainLoop
self._loop = GObject.MainLoop() self._loop = GObject.MainLoop()
@ -96,10 +96,10 @@ class AudioTranscoder(object):
def _on_pad_added(element, pad, connect_to): def _on_pad_added(element, pad, connect_to):
caps = pad.query_caps(None) caps = pad.query_caps(None)
name = caps.to_string() name = caps.to_string()
_log.debug('on_pad_added: {0}'.format(name)) _log.debug('on_pad_added: {}'.format(name))
if name.startswith('audio') and not connect_to.is_linked(): if name.startswith('audio') and not connect_to.is_linked():
pad.link(connect_to) pad.link(connect_to)
_log.info('Transcoding {0} into {1}'.format(src, dst)) _log.info('Transcoding {} into {}'.format(src, dst))
self.__on_progress = progress_callback self.__on_progress = progress_callback
# Set up pipeline # Set up pipeline
tolerance = 80000000 tolerance = 80000000
@ -155,7 +155,7 @@ class AudioTranscoder(object):
(success, percent) = structure.get_int('percent') (success, percent) = structure.get_int('percent')
if self.__on_progress and success: if self.__on_progress and success:
self.__on_progress(percent) self.__on_progress(percent)
_log.info('{0}% done...'.format(percent)) _log.info('{}% done...'.format(percent))
elif message.type == Gst.MessageType.EOS: elif message.type == Gst.MessageType.EOS:
_log.info('Done') _log.info('Done')
self.halt() self.halt()

View File

@ -24,15 +24,15 @@ def check_blog_slug_used(author_id, slug, ignore_b_id=None):
return does_exist return does_exist
def may_edit_blogpost(request, blog): def may_edit_blogpost(request, blog):
if request.user.has_privilege(u'admin') or request.user.id == blog.author: if request.user.has_privilege('admin') or request.user.id == blog.author:
return True return True
return False return False
def set_blogpost_state(request, blogpost): def set_blogpost_state(request, blogpost):
if request.form['status'] == 'Publish': if request.form['status'] == 'Publish':
blogpost.state = u'processed' blogpost.state = 'processed'
else: else:
blogpost.state = u'failed' blogpost.state = 'failed'
def get_all_blogposts_of_blog(request, blog, state=None): def get_all_blogposts_of_blog(request, blog, state=None):
blog_posts_list = [] blog_posts_list = []

View File

@ -48,7 +48,7 @@ class Blog(Base, BlogMixin):
@property @property
def slug_or_id(self): def slug_or_id(self):
return (self.slug or u'blog_{0}'.format(self.id)) return (self.slug or 'blog_{}'.format(self.id))
def get_all_blog_posts(self, state=None): def get_all_blog_posts(self, state=None):
blog_posts = Session.query(MediaEntry).join(BlogPostData)\ blog_posts = Session.query(MediaEntry).join(BlogPostData)\
@ -63,7 +63,7 @@ class Blog(Base, BlogMixin):
post.delete(del_orphan_tags=False, commit=False) post.delete(del_orphan_tags=False, commit=False)
from mediagoblin.db.util import clean_orphan_tags from mediagoblin.db.util import clean_orphan_tags
clean_orphan_tags(commit=False) clean_orphan_tags(commit=False)
super(Blog, self).delete(**kwargs) super().delete(**kwargs)

View File

@ -76,8 +76,8 @@ def blog_edit(request):
if request.method=='POST' and form.validate(): if request.method=='POST' and form.validate():
_log.info("Here") _log.info("Here")
blog = request.db.Blog() blog = request.db.Blog()
blog.title = six.text_type(form.title.data) blog.title = str(form.title.data)
blog.description = six.text_type(cleaned_markdown_conversion((form.description.data))) blog.description = str(cleaned_markdown_conversion(form.description.data))
blog.author = request.user.id blog.author = request.user.id
blog.generate_slug() blog.generate_slug()
@ -115,8 +115,8 @@ def blog_edit(request):
'app_config': mg_globals.app_config}) 'app_config': mg_globals.app_config})
else: else:
if request.method == 'POST' and form.validate(): if request.method == 'POST' and form.validate():
blog.title = six.text_type(form.title.data) blog.title = str(form.title.data)
blog.description = six.text_type(cleaned_markdown_conversion((form.description.data))) blog.description = str(cleaned_markdown_conversion(form.description.data))
blog.author = request.user.id blog.author = request.user.id
blog.generate_slug() blog.generate_slug()
@ -143,10 +143,10 @@ def blogpost_create(request):
blogpost = request.db.MediaEntry() blogpost = request.db.MediaEntry()
blogpost.media_type = 'mediagoblin.media_types.blogpost' blogpost.media_type = 'mediagoblin.media_types.blogpost'
blogpost.title = six.text_type(form.title.data) blogpost.title = str(form.title.data)
blogpost.description = six.text_type(cleaned_markdown_conversion((form.description.data))) blogpost.description = str(cleaned_markdown_conversion(form.description.data))
blogpost.tags = convert_to_tag_list_of_dicts(form.tags.data) blogpost.tags = convert_to_tag_list_of_dicts(form.tags.data)
blogpost.license = six.text_type(form.license.data) or None blogpost.license = str(form.license.data) or None
blogpost.actor = request.user.id blogpost.actor = request.user.id
blogpost.generate_slug() blogpost.generate_slug()
@ -196,10 +196,10 @@ def blogpost_edit(request):
form = blog_forms.BlogPostEditForm(request.form, **defaults) form = blog_forms.BlogPostEditForm(request.form, **defaults)
if request.method == 'POST' and form.validate(): if request.method == 'POST' and form.validate():
blogpost.title = six.text_type(form.title.data) blogpost.title = str(form.title.data)
blogpost.description = six.text_type(cleaned_markdown_conversion((form.description.data))) blogpost.description = str(cleaned_markdown_conversion(form.description.data))
blogpost.tags = convert_to_tag_list_of_dicts(form.tags.data) blogpost.tags = convert_to_tag_list_of_dicts(form.tags.data)
blogpost.license = six.text_type(form.license.data) blogpost.license = str(form.license.data)
set_blogpost_state(request, blogpost) set_blogpost_state(request, blogpost)
blogpost.generate_slug() blogpost.generate_slug()
blogpost.save() blogpost.save()
@ -233,7 +233,7 @@ def blog_dashboard(request, page, url_user=None):
blogs = request.db.Blog.query.filter_by(author=url_user.id) blogs = request.db.Blog.query.filter_by(author=url_user.id)
config = pluginapi.get_config('mediagoblin.media_types.blog') config = pluginapi.get_config('mediagoblin.media_types.blog')
max_blog_count = config['max_blog_count'] max_blog_count = config['max_blog_count']
if request.user and (request.user.id == url_user.id or request.user.has_privilege(u'admin')): if request.user and (request.user.id == url_user.id or request.user.has_privilege('admin')):
if blog_slug: if blog_slug:
blog = get_blog_by_slug(request, blog_slug) blog = get_blog_by_slug(request, blog_slug)
if not blog: if not blog:
@ -276,7 +276,7 @@ def blog_post_listing(request, page, url_user=None):
if not blog: if not blog:
return render_404(request) return render_404(request)
all_blog_posts = blog.get_all_blog_posts(u'processed').order_by(MediaEntry.created.desc()) all_blog_posts = blog.get_all_blog_posts('processed').order_by(MediaEntry.created.desc())
pagination = Pagination(page, all_blog_posts) pagination = Pagination(page, all_blog_posts)
pagination.per_page = 8 pagination.per_page = 8
blog_posts_on_a_page = pagination() blog_posts_on_a_page = pagination()
@ -297,7 +297,7 @@ def draft_view(request):
blog_post_slug = request.matchdict.get('blog_post_slug', None) blog_post_slug = request.matchdict.get('blog_post_slug', None)
user = request.matchdict.get('user') user = request.matchdict.get('user')
blog = get_blog_by_slug(request, blog_slug, author=request.user.id) blog = get_blog_by_slug(request, blog_slug, author=request.user.id)
blogpost = request.db.MediaEntry.query.filter_by(state = u'failed', actor=request.user.id, slug=blog_post_slug).first() blogpost = request.db.MediaEntry.query.filter_by(state = 'failed', actor=request.user.id, slug=blog_post_slug).first()
if not blog or not blogpost: if not blog or not blogpost:
return render_404(request) return render_404(request)
@ -326,7 +326,7 @@ def blog_delete(request, **kwargs):
return render_404(request) return render_404(request)
form = blog_forms.ConfirmDeleteForm(request.form) form = blog_forms.ConfirmDeleteForm(request.form)
if request.user.id == blog.author or request.user.has_privilege(u'admin'): if request.user.id == blog.author or request.user.has_privilege('admin'):
if request.method == 'POST' and form.validate(): if request.method == 'POST' and form.validate():
if form.confirm.data is True: if form.confirm.data is True:
blog.delete() blog.delete()
@ -345,7 +345,7 @@ def blog_delete(request, **kwargs):
return redirect(request, "mediagoblin.media_types.blog.blog_admin_dashboard", return redirect(request, "mediagoblin.media_types.blog.blog_admin_dashboard",
user=request.user.username) user=request.user.username)
else: else:
if request.user.has_privilege(u'admin'): if request.user.has_privilege('admin'):
messages.add_message( messages.add_message(
request, request,
messages.WARNING, messages.WARNING,
@ -384,7 +384,7 @@ def blog_about_view(request):
return render_404(request) return render_404(request)
else: else:
blog_posts_processed = blog.get_all_blog_posts(u'processed').count() blog_posts_processed = blog.get_all_blog_posts('processed').count()
return render_to_response( return render_to_response(
request, request,
'mediagoblin/blog/blog_about.html', 'mediagoblin/blog/blog_about.html',

View File

@ -32,7 +32,7 @@ class ImageMediaManager(MediaManagerBase):
display_template = "mediagoblin/media_displays/image.html" display_template = "mediagoblin/media_displays/image.html"
default_thumb = "images/media_thumbs/image.png" default_thumb = "images/media_thumbs/image.png"
media_fetch_order = [u'medium', u'original', u'thumb'] media_fetch_order = ['medium', 'original', 'thumb']
def get_original_date(self): def get_original_date(self):
""" """

View File

@ -14,7 +14,6 @@
# You should have received a copy of the GNU Affero General Public License # You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>. # along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import print_function
try: try:
from PIL import Image from PIL import Image
@ -69,9 +68,9 @@ def resize_image(entry, resized, keyname, target_name, new_size,
try: try:
resize_filter = PIL_FILTERS[filter.upper()] resize_filter = PIL_FILTERS[filter.upper()]
except KeyError: except KeyError:
raise Exception('Filter "{0}" not found, choose one of {1}'.format( raise Exception('Filter "{}" not found, choose one of {}'.format(
six.text_type(filter), str(filter),
u', '.join(PIL_FILTERS.keys()))) ', '.join(PIL_FILTERS.keys())))
resized.thumbnail(new_size, resize_filter) resized.thumbnail(new_size, resize_filter)
@ -101,8 +100,8 @@ def resize_tool(entry,
# If thumb or medium is already the same quality and size, then don't # If thumb or medium is already the same quality and size, then don't
# reprocess # reprocess
if _skip_resizing(entry, keyname, new_size, quality, filter): if _skip_resizing(entry, keyname, new_size, quality, filter):
_log.info('{0} of same size and quality already in use, skipping ' _log.info('{} of same size and quality already in use, skipping '
'resizing of media {1}.'.format(keyname, entry.id)) 'resizing of media {}.'.format(keyname, entry.id))
return return
# If the size of the original file exceeds the specified size for the desized # If the size of the original file exceeds the specified size for the desized
@ -111,14 +110,14 @@ def resize_tool(entry,
# Also created if the file needs rotation, or if forced. # Also created if the file needs rotation, or if forced.
try: try:
im = Image.open(orig_file) im = Image.open(orig_file)
except IOError: except OSError:
raise BadMediaFail() raise BadMediaFail()
if force \ if force \
or im.size[0] > new_size[0]\ or im.size[0] > new_size[0]\
or im.size[1] > new_size[1]\ or im.size[1] > new_size[1]\
or exif_image_needs_rotation(exif_tags): or exif_image_needs_rotation(exif_tags):
resize_image( resize_image(
entry, im, six.text_type(keyname), target_name, entry, im, str(keyname), target_name,
tuple(new_size), tuple(new_size),
exif_tags, conversions_subdir, exif_tags, conversions_subdir,
quality, filter) quality, filter)
@ -154,7 +153,7 @@ SUPPORTED_FILETYPES = ['png', 'gif', 'jpg', 'jpeg', 'tiff']
def sniff_handler(media_file, filename): def sniff_handler(media_file, filename):
_log.info('Sniffing {0}'.format(MEDIA_TYPE)) _log.info('Sniffing {}'.format(MEDIA_TYPE))
name, ext = os.path.splitext(filename) name, ext = os.path.splitext(filename)
clean_ext = ext[1:].lower() # Strip the . from ext and make lowercase clean_ext = ext[1:].lower() # Strip the . from ext and make lowercase
@ -162,7 +161,7 @@ def sniff_handler(media_file, filename):
_log.info('Found file extension in supported filetypes') _log.info('Found file extension in supported filetypes')
return MEDIA_TYPE return MEDIA_TYPE
else: else:
_log.debug('Media present, extension not found in {0}'.format( _log.debug('Media present, extension not found in {}'.format(
SUPPORTED_FILETYPES)) SUPPORTED_FILETYPES))
return None return None
@ -241,7 +240,7 @@ class CommonImageProcessor(MediaProcessor):
# Extract file metadata # Extract file metadata
try: try:
im = Image.open(self.process_filename) im = Image.open(self.process_filename)
except IOError: except OSError:
raise BadMediaFail() raise BadMediaFail()
metadata = { metadata = {
@ -426,7 +425,7 @@ class MetadataProcessing(CommonImageProcessor):
class ImageProcessingManager(ProcessingManager): class ImageProcessingManager(ProcessingManager):
def __init__(self): def __init__(self):
super(ImageProcessingManager, self).__init__() super().__init__()
self.add_processor(InitialProcessor) self.add_processor(InitialProcessor)
self.add_processor(Resizer) self.add_processor(Resizer)
self.add_processor(MetadataProcessing) self.add_processor(MetadataProcessing)

View File

@ -169,7 +169,7 @@ def check_prerequisites():
return True return True
def sniff_handler(media_file, filename): def sniff_handler(media_file, filename):
_log.info('Sniffing {0}'.format(MEDIA_TYPE)) _log.info('Sniffing {}'.format(MEDIA_TYPE))
if not check_prerequisites(): if not check_prerequisites():
return None return None
@ -185,7 +185,7 @@ def create_pdf_thumb(original, thumb_filename, width, height):
executable = where('pdftocairo') executable = where('pdftocairo')
args = [executable, '-scale-to', str(min(width, height)), args = [executable, '-scale-to', str(min(width, height)),
'-singlefile', '-png', original, thumb_filename] '-singlefile', '-png', original, thumb_filename]
_log.debug('calling {0}'.format(repr(' '.join(args)))) _log.debug('calling {}'.format(repr(' '.join(args))))
Popen(executable=executable, args=args).wait() Popen(executable=executable, args=args).wait()
def pdf_info(original): def pdf_info(original):
@ -303,7 +303,7 @@ class CommonPdfProcessor(MediaProcessor):
args = [executable, '-scale-to', str(min(thumb_size)), args = [executable, '-scale-to', str(min(thumb_size)),
'-singlefile', '-png', self.pdf_filename, thumb_filename] '-singlefile', '-png', self.pdf_filename, thumb_filename]
_log.debug('calling {0}'.format(repr(' '.join(args)))) _log.debug('calling {}'.format(repr(' '.join(args))))
Popen(executable=executable, args=args).wait() Popen(executable=executable, args=args).wait()
# since pdftocairo added '.png', we need to include it with the # since pdftocairo added '.png', we need to include it with the
@ -355,7 +355,7 @@ class CommonPdfProcessor(MediaProcessor):
args = [executable, '-scale-to', str(min(size)), args = [executable, '-scale-to', str(min(size)),
'-singlefile', '-png', self.pdf_filename, filename] '-singlefile', '-png', self.pdf_filename, filename]
_log.debug('calling {0}'.format(repr(' '.join(args)))) _log.debug('calling {}'.format(repr(' '.join(args))))
Popen(executable=executable, args=args).wait() Popen(executable=executable, args=args).wait()
# since pdftocairo added '.png', we need to include it with the # since pdftocairo added '.png', we need to include it with the
@ -467,6 +467,6 @@ class Resizer(CommonPdfProcessor):
class PdfProcessingManager(ProcessingManager): class PdfProcessingManager(ProcessingManager):
def __init__(self): def __init__(self):
super(PdfProcessingManager, self).__init__() super().__init__()
self.add_processor(InitialProcessor) self.add_processor(InitialProcessor)
self.add_processor(Resizer) self.add_processor(Resizer)

View File

@ -35,7 +35,7 @@ ACCEPTED_EXTENSIONS = ['nef', 'cr2']
# The entire function have to be copied # The entire function have to be copied
def sniff_handler(media_file, filename): def sniff_handler(media_file, filename):
_log.info('Sniffing {0}'.format(MEDIA_TYPE)) _log.info('Sniffing {}'.format(MEDIA_TYPE))
name, ext = os.path.splitext(filename) name, ext = os.path.splitext(filename)
clean_ext = ext[1:].lower() # Strip the . from ext and make lowercase clean_ext = ext[1:].lower() # Strip the . from ext and make lowercase
@ -43,7 +43,7 @@ def sniff_handler(media_file, filename):
_log.info('Found file extension in supported filetypes') _log.info('Found file extension in supported filetypes')
return MEDIA_TYPE return MEDIA_TYPE
else: else:
_log.debug('Media present, extension not found in {0}'.format( _log.debug('Media present, extension not found in {}'.format(
ACCEPTED_EXTENSIONS)) ACCEPTED_EXTENSIONS))
return None return None
@ -54,7 +54,7 @@ class InitialRawProcessor(InitialProcessor):
""" """
Pull out a full-size JPEG-preview Pull out a full-size JPEG-preview
""" """
super(InitialRawProcessor, self).common_setup() super().common_setup()
self._original_raw = self.process_filename self._original_raw = self.process_filename
@ -68,7 +68,7 @@ class InitialRawProcessor(InitialProcessor):
md.previews[-1].write_to_file( md.previews[-1].write_to_file(
self.process_filename.encode('utf-8')) self.process_filename.encode('utf-8'))
self.process_filename += '.jpg' self.process_filename += '.jpg'
_log.debug(u'Wrote new file from {0} to preview (jpg) {1}'.format( _log.debug('Wrote new file from {} to preview (jpg) {}'.format(
self._original_raw, self.process_filename)) self._original_raw, self.process_filename))
# Override the namebuilder with our new jpg-based name # Override the namebuilder with our new jpg-based name
@ -77,6 +77,6 @@ class InitialRawProcessor(InitialProcessor):
class RawImageProcessingManager(ProcessingManager): class RawImageProcessingManager(ProcessingManager):
def __init__(self): def __init__(self):
super(RawImageProcessingManager, self).__init__() super().__init__()
self.add_processor(InitialRawProcessor) self.add_processor(InitialRawProcessor)
self.add_processor(Resizer) self.add_processor(Resizer)

View File

@ -22,7 +22,7 @@ class ThreeDeeParseError(Exception):
pass pass
class ThreeDee(object): class ThreeDee:
""" """
3D model parser base class. Derrived classes are used for basic 3D model parser base class. Derrived classes are used for basic
analysis of 3D models, and are not intended to be used for 3D analysis of 3D models, and are not intended to be used for 3D

View File

@ -48,7 +48,7 @@ BLEND_SCRIPT = pkg_resources.resource_filename(
def sniff_handler(media_file, filename): def sniff_handler(media_file, filename):
_log.info('Sniffing {0}'.format(MEDIA_TYPE)) _log.info('Sniffing {}'.format(MEDIA_TYPE))
name, ext = os.path.splitext(filename) name, ext = os.path.splitext(filename)
clean_ext = ext[1:].lower() clean_ext = ext[1:].lower()
@ -57,7 +57,7 @@ def sniff_handler(media_file, filename):
_log.info('Found file extension in supported filetypes') _log.info('Found file extension in supported filetypes')
return MEDIA_TYPE return MEDIA_TYPE
else: else:
_log.debug('Media present, extension not found in {0}'.format( _log.debug('Media present, extension not found in {}'.format(
SUPPORTED_FILETYPES)) SUPPORTED_FILETYPES))
return None return None
@ -365,6 +365,6 @@ class Resizer(CommonStlProcessor):
class StlProcessingManager(ProcessingManager): class StlProcessingManager(ProcessingManager):
def __init__(self): def __init__(self):
super(StlProcessingManager, self).__init__() super().__init__()
self.add_processor(InitialProcessor) self.add_processor(InitialProcessor)
self.add_processor(Resizer) self.add_processor(Resizer)

View File

@ -40,7 +40,7 @@ def discover(src):
# init before import to work around https://bugzilla.gnome.org/show_bug.cgi?id=736260 # init before import to work around https://bugzilla.gnome.org/show_bug.cgi?id=736260
from gi.repository import GstPbutils from gi.repository import GstPbutils
_log.info('Discovering {0}...'.format(src)) _log.info('Discovering {}...'.format(src))
uri = 'file://{0}'.format(src) uri = 'file://{}'.format(src)
discoverer = GstPbutils.Discoverer.new(60 * Gst.SECOND) discoverer = GstPbutils.Discoverer.new(60 * Gst.SECOND)
return discoverer.discover_uri(uri) return discoverer.discover_uri(uri)

View File

@ -41,7 +41,7 @@ class VideoMediaManager(MediaManagerBase):
video_res.remove(video_config['default_resolution']) video_res.remove(video_config['default_resolution'])
video_res.insert(0, video_config['default_resolution']) video_res.insert(0, video_config['default_resolution'])
video_res = ['webm_{}'.format(x) for x in video_res] video_res = ['webm_{}'.format(x) for x in video_res]
return ([u'webm_video'] + video_res + [u'original']) return (['webm_video'] + video_res + ['original'])
def get_media_type_and_manager(ext): def get_media_type_and_manager(ext):

View File

@ -79,12 +79,12 @@ def change_metadata_format(db):
'videolength': 'length', 'videolength': 'length',
} }
new_metadata['video'] = [dict((v, metadata.get(k)) new_metadata['video'] = [{v: metadata.get(k)
for k, v in video_key_map.items() if metadata.get(k))] for k, v in video_key_map.items() if metadata.get(k)}]
new_metadata['audio'] = [dict((v, metadata.get(k)) new_metadata['audio'] = [{v: metadata.get(k)
for k, v in audio_key_map.items() if metadata.get(k))] for k, v in audio_key_map.items() if metadata.get(k)}]
new_metadata['common'] = dict((v, metadata.get(k)) new_metadata['common'] = {v: metadata.get(k)
for k, v in common_key_map.items() if metadata.get(k)) for k, v in common_key_map.items() if metadata.get(k)}
# 'mimetype' should be in tags # 'mimetype' should be in tags
new_metadata['common']['tags'] = {'mimetype': metadata.get('mimetype')} new_metadata['common']['tags'] = {'mimetype': metadata.get('mimetype')}

View File

@ -86,7 +86,7 @@ class VideoData(Base):
if video_codec == "vp8 video": if video_codec == "vp8 video":
video_codec = "vp8" video_codec = "vp8"
return '%s; codecs="%s, %s"' % ( return '{}; codecs="{}, {}"'.format(
mimetype, video_codec, audio_codec) mimetype, video_codec, audio_codec)
else: else:
return video.VideoMediaManager.default_webm_type return video.VideoMediaManager.default_webm_type

View File

@ -46,19 +46,19 @@ class VideoTranscodingFail(BaseProcessingFail):
''' '''
Error raised if video transcoding fails Error raised if video transcoding fails
''' '''
general_message = _(u'Video transcoding failed') general_message = _('Video transcoding failed')
def sniffer(media_file): def sniffer(media_file):
'''New style sniffer, used in two-steps check; requires to have .name''' '''New style sniffer, used in two-steps check; requires to have .name'''
_log.info('Sniffing {0}'.format(MEDIA_TYPE)) _log.info('Sniffing {}'.format(MEDIA_TYPE))
try: try:
data = transcoders.discover(media_file.name) data = transcoders.discover(media_file.name)
except Exception as e: except Exception as e:
# this is usually GLib.GError, but we don't really care which one # this is usually GLib.GError, but we don't really care which one
_log.warning(u'GStreamer: {0}'.format(six.text_type(e))) _log.warning('GStreamer: {}'.format(str(e)))
raise MissingComponents(u'GStreamer: {0}'.format(six.text_type(e))) raise MissingComponents('GStreamer: {}'.format(str(e)))
_log.debug('Discovered: {0}'.format(data)) _log.debug('Discovered: {}'.format(data))
if not data.get_video_streams(): if not data.get_video_streams():
raise MissingComponents('No video streams found in this video') raise MissingComponents('No video streams found in this video')
@ -66,17 +66,17 @@ def sniffer(media_file):
if data.get_result() != 0: # it's 0 if success if data.get_result() != 0: # it's 0 if success
try: try:
missing = data.get_misc().get_string('name') missing = data.get_misc().get_string('name')
_log.warning('GStreamer: missing {0}'.format(missing)) _log.warning('GStreamer: missing {}'.format(missing))
except AttributeError as e: except AttributeError as e:
# AttributeError happens here on gstreamer >1.4, when get_misc # AttributeError happens here on gstreamer >1.4, when get_misc
# returns None. There is a special function to get info about # returns None. There is a special function to get info about
# missing plugin. This info should be printed to logs for admin and # missing plugin. This info should be printed to logs for admin and
# showed to the user in a short and nice version # showed to the user in a short and nice version
details = data.get_missing_elements_installer_details() details = data.get_missing_elements_installer_details()
_log.warning('GStreamer: missing: {0}'.format(', '.join(details))) _log.warning('GStreamer: missing: {}'.format(', '.join(details)))
missing = u', '.join([u'{0} ({1})'.format(*d.split('|')[3:]) missing = ', '.join(['{} ({})'.format(*d.split('|')[3:])
for d in details]) for d in details])
raise MissingComponents(u'{0} is missing'.format(missing)) raise MissingComponents('{} is missing'.format(missing))
return MEDIA_TYPE return MEDIA_TYPE
@ -89,13 +89,13 @@ def sniff_handler(media_file, filename):
if clean_ext in EXCLUDED_EXTS: if clean_ext in EXCLUDED_EXTS:
# We don't handle this filetype, though gstreamer might think we can # We don't handle this filetype, though gstreamer might think we can
_log.info('Refused to process {0} due to excluded extension'.format(filename)) _log.info('Refused to process {} due to excluded extension'.format(filename))
return None return None
try: try:
return sniffer(media_file) return sniffer(media_file)
except: except:
_log.error('Could not discover {0}'.format(filename)) _log.error('Could not discover {}'.format(filename))
return None return None
def get_tags(stream_info): def get_tags(stream_info):
@ -111,7 +111,7 @@ def get_tags(stream_info):
# date/datetime should be converted from GDate/GDateTime to strings # date/datetime should be converted from GDate/GDateTime to strings
if 'date' in tags: if 'date' in tags:
date = tags['date'] date = tags['date']
tags['date'] = "%s-%s-%s" % ( tags['date'] = "{}-{}-{}".format(
date.year, date.month, date.day) date.year, date.month, date.day)
if 'datetime' in tags: if 'datetime' in tags:
@ -127,7 +127,7 @@ def get_tags(stream_info):
tags['datetime'] = None tags['datetime'] = None
for k, v in tags.copy().items(): for k, v in tags.copy().items():
# types below are accepted by json; others must not present # types below are accepted by json; others must not present
if not isinstance(v, (dict, list, six.string_types, int, float, bool, if not isinstance(v, (dict, list, (str,), int, float, bool,
type(None))): type(None))):
del tags[k] del tags[k]
return dict(tags) return dict(tags)
@ -192,10 +192,10 @@ def main_task(entry_id, resolution, medium_size, **process_info):
processor.generate_thumb(thumb_size=process_info['thumb_size']) processor.generate_thumb(thumb_size=process_info['thumb_size'])
processor.store_orig_metadata() processor.store_orig_metadata()
# Make state of entry as processed # Make state of entry as processed
entry.state = u'processed' entry.state = 'processed'
entry.save() entry.save()
_log.info(u'MediaEntry ID {0} is processed (transcoded to default' _log.info('MediaEntry ID {} is processed (transcoded to default'
' resolution): {1}'.format(entry.id, medium_size)) ' resolution): {}'.format(entry.id, medium_size))
_log.debug('MediaEntry processed') _log.debug('MediaEntry processed')
@ -211,7 +211,7 @@ def complementary_task(entry_id, resolution, medium_size, **process_info):
vp8_quality=process_info['vp8_quality'], vp8_quality=process_info['vp8_quality'],
vp8_threads=process_info['vp8_threads'], vp8_threads=process_info['vp8_threads'],
vorbis_quality=process_info['vorbis_quality']) vorbis_quality=process_info['vorbis_quality'])
_log.info(u'MediaEntry ID {0} is transcoded to {1}'.format( _log.info('MediaEntry ID {} is transcoded to {}'.format(
entry.id, medium_size)) entry.id, medium_size))
@ -585,7 +585,7 @@ class Transcoder(CommonVideoProcessor):
class VideoProcessingManager(ProcessingManager): class VideoProcessingManager(ProcessingManager):
def __init__(self): def __init__(self):
super(VideoProcessingManager, self).__init__() super().__init__()
self.add_processor(InitialProcessor) self.add_processor(InitialProcessor)
self.add_processor(Resizer) self.add_processor(Resizer)
self.add_processor(Transcoder) self.add_processor(Transcoder)
@ -596,7 +596,7 @@ class VideoProcessingManager(ProcessingManager):
def_res = video_config['default_resolution'] def_res = video_config['default_resolution']
priority_num = len(video_config['available_resolutions']) + 1 priority_num = len(video_config['available_resolutions']) + 1
entry.state = u'processing' entry.state = 'processing'
entry.save() entry.save()
reprocess_info = reprocess_info or {} reprocess_info = reprocess_info or {}

View File

@ -14,7 +14,6 @@
# You should have received a copy of the GNU Affero General Public License # You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>. # along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import division, print_function
import os import os
import sys import sys
@ -63,7 +62,7 @@ def capture_thumb(video_path, dest_path, width=None, height=None, percent=0.5):
'''This is a callback to dynamically add element to pipeline''' '''This is a callback to dynamically add element to pipeline'''
caps = pad.query_caps(None) caps = pad.query_caps(None)
name = caps.to_string() name = caps.to_string()
_log.debug('on_pad_added: {0}'.format(name)) _log.debug('on_pad_added: {}'.format(name))
if name.startswith('video') and not connect_to.is_linked(): if name.startswith('video') and not connect_to.is_linked():
pad.link(connect_to) pad.link(connect_to)
@ -71,7 +70,7 @@ def capture_thumb(video_path, dest_path, width=None, height=None, percent=0.5):
# ! CAPS ! appsink # ! CAPS ! appsink
pipeline = Gst.Pipeline() pipeline = Gst.Pipeline()
uridecodebin = Gst.ElementFactory.make('uridecodebin', None) uridecodebin = Gst.ElementFactory.make('uridecodebin', None)
uridecodebin.set_property('uri', 'file://{0}'.format(video_path)) uridecodebin.set_property('uri', 'file://{}'.format(video_path))
videoconvert = Gst.ElementFactory.make('videoconvert', None) videoconvert = Gst.ElementFactory.make('videoconvert', None)
uridecodebin.connect('pad-added', pad_added, uridecodebin.connect('pad-added', pad_added,
videoconvert.get_static_pad('sink')) videoconvert.get_static_pad('sink'))
@ -105,7 +104,7 @@ def capture_thumb(video_path, dest_path, width=None, height=None, percent=0.5):
# timeout of 3 seconds below was set experimentally # timeout of 3 seconds below was set experimentally
state = pipeline.get_state(Gst.SECOND * 3) state = pipeline.get_state(Gst.SECOND * 3)
if state[0] != Gst.StateChangeReturn.SUCCESS: if state[0] != Gst.StateChangeReturn.SUCCESS:
_log.warning('state change failed, {0}'.format(state)) _log.warning('state change failed, {}'.format(state))
return return
# get duration # get duration
@ -115,7 +114,7 @@ def capture_thumb(video_path, dest_path, width=None, height=None, percent=0.5):
return return
seek_to = int(duration * int(percent * 100) / 100) seek_to = int(duration * int(percent * 100) / 100)
_log.debug('Seeking to {0} of {1}'.format( _log.debug('Seeking to {} of {}'.format(
float(seek_to) / Gst.SECOND, float(duration) / Gst.SECOND)) float(seek_to) / Gst.SECOND, float(duration) / Gst.SECOND))
seek = pipeline.seek_simple(Gst.Format.TIME, Gst.SeekFlags.FLUSH, seek_to) seek = pipeline.seek_simple(Gst.Format.TIME, Gst.SeekFlags.FLUSH, seek_to)
if not seek: if not seek:
@ -140,13 +139,13 @@ def capture_thumb(video_path, dest_path, width=None, height=None, percent=0.5):
im = Image.frombytes('RGB', (width, height), im = Image.frombytes('RGB', (width, height),
buffer.extract_dup(0, buffer.get_size())) buffer.extract_dup(0, buffer.get_size()))
im.save(dest_path) im.save(dest_path)
_log.info('thumbnail saved to {0}'.format(dest_path)) _log.info('thumbnail saved to {}'.format(dest_path))
# cleanup # cleanup
pipeline.set_state(Gst.State.NULL) pipeline.set_state(Gst.State.NULL)
class VideoTranscoder(object): class VideoTranscoder:
''' '''
Video transcoder Video transcoder
@ -375,7 +374,7 @@ class VideoTranscoder(object):
_log.info('{percent}% of {dest} resolution done..' _log.info('{percent}% of {dest} resolution done..'
'.'.format(percent=percent, dest=self.destination_dimensions)) '.'.format(percent=percent, dest=self.destination_dimensions))
elif message.type == Gst.MessageType.ERROR: elif message.type == Gst.MessageType.ERROR:
_log.error('Got error: {0}'.format(message.parse_error())) _log.error('Got error: {}'.format(message.parse_error()))
self.dst_data = None self.dst_data = None
self.__stop() self.__stop()

View File

@ -43,7 +43,7 @@ def skip_transcode(metadata, size):
# XXX: how were we supposed to use it? # XXX: how were we supposed to use it?
medium_config = mgg.global_config['media:medium'] medium_config = mgg.global_config['media:medium']
_log.debug('skip_transcode config: {0}'.format(config)) _log.debug('skip_transcode config: {}'.format(config))
metadata_tags = metadata.get_tags() metadata_tags = metadata.get_tags()
if not metadata_tags: if not metadata_tags:

View File

@ -65,7 +65,7 @@ def setup_globals(**kwargs):
""" """
from mediagoblin import mg_globals from mediagoblin import mg_globals
for key, value in six.iteritems(kwargs): for key, value in kwargs.items():
if not hasattr(mg_globals, key): if not hasattr(mg_globals, key):
raise AssertionError("Global %s not known" % key) raise AssertionError("Global %s not known" % key)
setattr(mg_globals, key, value) setattr(mg_globals, key, value)

View File

@ -18,10 +18,10 @@ import wtforms
from mediagoblin.tools.translate import lazy_pass_to_ugettext as _ from mediagoblin.tools.translate import lazy_pass_to_ugettext as _
ACTION_CHOICES = [ ACTION_CHOICES = [
(u'takeaway', _(u'Take away privilege')), ('takeaway', _('Take away privilege')),
(u'userban', _(u'Ban the user')), ('userban', _('Ban the user')),
(u'sendmessage', _(u'Send the user a message')), ('sendmessage', _('Send the user a message')),
(u'delete', _(u'Delete the content'))] ('delete', _('Delete the content'))]
class MultiCheckboxField(wtforms.SelectMultipleField): class MultiCheckboxField(wtforms.SelectMultipleField):
""" """
@ -50,11 +50,11 @@ class BanForm(wtforms.Form):
This form is used by an admin to ban a user directly from their user page. This form is used by an admin to ban a user directly from their user page.
""" """
user_banned_until = wtforms.DateField( user_banned_until = wtforms.DateField(
_(u'User will be banned until:'), _('User will be banned until:'),
format='%Y-%m-%d', format='%Y-%m-%d',
validators=[wtforms.validators.optional()]) validators=[wtforms.validators.optional()])
why_user_was_banned = wtforms.TextAreaField( why_user_was_banned = wtforms.TextAreaField(
_(u'Why are you banning this User?'), _('Why are you banning this User?'),
validators=[wtforms.validators.optional()]) validators=[wtforms.validators.optional()])
# =========== Forms for mediagoblin.moderation.report page ================= # # =========== Forms for mediagoblin.moderation.report page ================= #
@ -106,26 +106,26 @@ class ReportResolutionForm(wtforms.Form):
-ved the report in such a way. -ved the report in such a way.
""" """
action_to_resolve = MultiCheckboxField( action_to_resolve = MultiCheckboxField(
_(u'What action will you take to resolve the report?'), _('What action will you take to resolve the report?'),
validators=[wtforms.validators.optional()], validators=[wtforms.validators.optional()],
choices=ACTION_CHOICES) choices=ACTION_CHOICES)
targeted_user = wtforms.HiddenField('', targeted_user = wtforms.HiddenField('',
validators=[wtforms.validators.required()]) validators=[wtforms.validators.required()])
take_away_privileges = wtforms.SelectMultipleField( take_away_privileges = wtforms.SelectMultipleField(
_(u'What privileges will you take away?'), _('What privileges will you take away?'),
validators=[wtforms.validators.optional()]) validators=[wtforms.validators.optional()])
user_banned_until = wtforms.DateField( user_banned_until = wtforms.DateField(
_(u'User will be banned until:'), _('User will be banned until:'),
format='%Y-%m-%d', format='%Y-%m-%d',
validators=[wtforms.validators.optional()]) validators=[wtforms.validators.optional()])
why_user_was_banned = wtforms.TextAreaField( why_user_was_banned = wtforms.TextAreaField(
_(u'Why user was banned:'), _('Why user was banned:'),
validators=[wtforms.validators.optional()]) validators=[wtforms.validators.optional()])
message_to_user = wtforms.TextAreaField( message_to_user = wtforms.TextAreaField(
_(u'Message to user:'), _('Message to user:'),
validators=[wtforms.validators.optional()]) validators=[wtforms.validators.optional()])
resolution_content = wtforms.TextAreaField( resolution_content = wtforms.TextAreaField(
_(u'Resolution content:')) _('Resolution content:'))
# ======== Forms for mediagoblin.moderation.report_panel page ============== # # ======== Forms for mediagoblin.moderation.report_panel page ============== #

View File

@ -30,24 +30,24 @@ def take_punitive_actions(request, form, report, user):
# The bulk of this action is running through all of the different # The bulk of this action is running through all of the different
# punitive actions that a moderator could take. # punitive actions that a moderator could take.
if u'takeaway' in form.action_to_resolve.data: if 'takeaway' in form.action_to_resolve.data:
for privilege_name in form.take_away_privileges.data: for privilege_name in form.take_away_privileges.data:
take_away_privileges(user.username, privilege_name) take_away_privileges(user.username, privilege_name)
form.resolution_content.data += \ form.resolution_content.data += \
_(u"\n{mod} took away {user}\'s {privilege} privileges.").format( _("\n{mod} took away {user}\'s {privilege} privileges.").format(
mod=request.user.username, mod=request.user.username,
user=user.username, user=user.username,
privilege=privilege_name) privilege=privilege_name)
# If the moderator elects to ban the user, a new instance of user_ban # If the moderator elects to ban the user, a new instance of user_ban
# will be created. # will be created.
if u'userban' in form.action_to_resolve.data: if 'userban' in form.action_to_resolve.data:
user_ban = ban_user(form.targeted_user.data, user_ban = ban_user(form.targeted_user.data,
expiration_date=form.user_banned_until.data, expiration_date=form.user_banned_until.data,
reason=form.why_user_was_banned.data) reason=form.why_user_was_banned.data)
Session.add(user_ban) Session.add(user_ban)
form.resolution_content.data += \ form.resolution_content.data += \
_(u"\n{mod} banned user {user} {expiration_date}.").format( _("\n{mod} banned user {user} {expiration_date}.").format(
mod=request.user.username, mod=request.user.username,
user=user.username, user=user.username,
expiration_date = ( expiration_date = (
@ -59,26 +59,26 @@ def take_punitive_actions(request, form, report, user):
# If the moderator elects to send a warning message. An email will be # If the moderator elects to send a warning message. An email will be
# sent to the email address given at sign up # sent to the email address given at sign up
if u'sendmessage' in form.action_to_resolve.data: if 'sendmessage' in form.action_to_resolve.data:
message_body = form.message_to_user.data message_body = form.message_to_user.data
form.resolution_content.data += \ form.resolution_content.data += \
_(u"\n{mod} sent a warning email to the {user}.").format( _("\n{mod} sent a warning email to the {user}.").format(
mod=request.user.username, mod=request.user.username,
user=user.username) user=user.username)
if u'delete' in form.action_to_resolve.data and \ if 'delete' in form.action_to_resolve.data and \
report.is_comment_report(): report.is_comment_report():
deleted_comment = report.obj() deleted_comment = report.obj()
deleted_comment.delete() deleted_comment.delete()
form.resolution_content.data += \ form.resolution_content.data += \
_(u"\n{mod} deleted the comment.").format( _("\n{mod} deleted the comment.").format(
mod=request.user.username) mod=request.user.username)
elif u'delete' in form.action_to_resolve.data and \ elif 'delete' in form.action_to_resolve.data and \
report.is_media_entry_report(): report.is_media_entry_report():
deleted_media = report.obj() deleted_media = report.obj()
deleted_media.delete() deleted_media.delete()
form.resolution_content.data += \ form.resolution_content.data += \
_(u"\n{mod} deleted the media entry.").format( _("\n{mod} deleted the media entry.").format(
mod=request.user.username) mod=request.user.username)
report.archive( report.archive(
resolver_id=request.user.id, resolver_id=request.user.id,
@ -216,7 +216,7 @@ def parse_report_panel_settings(form):
filters['reported_user_id'] = form.reported_user.data filters['reported_user_id'] = form.reported_user.data
filters['reporter_id'] = form.reporter.data filters['reporter_id'] = form.reporter.data
filters = dict((k, v) filters = {k: v
for k, v in six.iteritems(filters) if v) for k, v in filters.items() if v}
return filters return filters

View File

@ -32,14 +32,14 @@ def moderation_media_processing_panel(request):
''' '''
Show the global media processing panel for this instance Show the global media processing panel for this instance
''' '''
processing_entries = MediaEntry.query.filter_by(state = u'processing').\ processing_entries = MediaEntry.query.filter_by(state = 'processing').\
order_by(MediaEntry.created.desc()) order_by(MediaEntry.created.desc())
# Get media entries which have failed to process # Get media entries which have failed to process
failed_entries = MediaEntry.query.filter_by(state = u'failed').\ failed_entries = MediaEntry.query.filter_by(state = 'failed').\
order_by(MediaEntry.created.desc()) order_by(MediaEntry.created.desc())
processed_entries = MediaEntry.query.filter_by(state = u'processed').\ processed_entries = MediaEntry.query.filter_by(state = 'processed').\
order_by(MediaEntry.created.desc()).limit(10) order_by(MediaEntry.created.desc()).limit(10)
# Render to response # Render to response
@ -163,8 +163,8 @@ def moderation_reports_detail(request):
] ]
if request.method == "POST" and form.validate() and not ( if request.method == "POST" and form.validate() and not (
not request.user.has_privilege(u'admin') and not request.user.has_privilege('admin') and
report.reported_user.has_privilege(u'admin')): report.reported_user.has_privilege('admin')):
user = User.query.get(form.targeted_user.data) user = User.query.get(form.targeted_user.data)
return take_punitive_actions(request, form, report, user) return take_punitive_actions(request, form, report, user)
@ -178,7 +178,7 @@ def moderation_reports_detail(request):
{'report':report, {'report':report,
'form':form}) 'form':form})
@user_has_privilege(u'admin') @user_has_privilege('admin')
@active_user_from_url @active_user_from_url
def give_or_take_away_privilege(request, url_user): def give_or_take_away_privilege(request, url_user):
''' '''
@ -200,7 +200,7 @@ def give_or_take_away_privilege(request, url_user):
'mediagoblin.moderation.users_detail', 'mediagoblin.moderation.users_detail',
user=url_user.username) user=url_user.username)
@user_has_privilege(u'admin') @user_has_privilege('admin')
@active_user_from_url @active_user_from_url
def ban_or_unban(request, url_user): def ban_or_unban(request, url_user):
""" """

View File

@ -90,7 +90,7 @@ def mark_comment_notification_seen(comment_id, user):
object_id=comment_gmr.id object_id=comment_gmr.id
).first() ).first()
_log.debug(u'Marking {0} as seen.'.format(notification)) _log.debug('Marking {} as seen.'.format(notification))
mark_notification_seen(notification) mark_notification_seen(notification)

View File

@ -35,7 +35,7 @@ class EmailNotificationTask(Task):
''' '''
def run(self, notification_id, message): def run(self, notification_id, message):
cn = Notification.query.filter_by(id=notification_id).first() cn = Notification.query.filter_by(id=notification_id).first()
_log.info(u'Sending notification email about {0}'.format(cn)) _log.info('Sending notification email about {}'.format(cn))
return send_email( return send_email(
message['from'], message['from'],

View File

@ -27,7 +27,7 @@ class GMGRequestValidator(RequestValidator):
def __init__(self, data=None, *args, **kwargs): def __init__(self, data=None, *args, **kwargs):
self.POST = data self.POST = data
super(GMGRequestValidator, self).__init__(*args, **kwargs) super().__init__(*args, **kwargs)
def check_nonce(self, nonce): def check_nonce(self, nonce):
""" """
@ -43,15 +43,15 @@ class GMGRequestValidator(RequestValidator):
def save_request_token(self, token, request): def save_request_token(self, token, request):
""" Saves request token in db """ """ Saves request token in db """
client_id = self.POST[u"oauth_consumer_key"] client_id = self.POST["oauth_consumer_key"]
request_token = RequestToken( request_token = RequestToken(
token=token["oauth_token"], token=token["oauth_token"],
secret=token["oauth_token_secret"], secret=token["oauth_token_secret"],
) )
request_token.client = client_id request_token.client = client_id
if u"oauth_callback" in self.POST: if "oauth_callback" in self.POST:
request_token.callback = self.POST[u"oauth_callback"] request_token.callback = self.POST["oauth_callback"]
request_token.save() request_token.save()
def save_verifier(self, token, verifier, request): def save_verifier(self, token, verifier, request):
@ -188,4 +188,4 @@ class GMGRequest(Request):
kwargs["body"] = kwargs.get("body", request.data) kwargs["body"] = kwargs.get("body", request.data)
kwargs["headers"] = kwargs.get("headers", dict(request.headers)) kwargs["headers"] = kwargs.get("headers", dict(request.headers))
super(GMGRequest, self).__init__(*args, **kwargs) super().__init__(*args, **kwargs)

View File

@ -50,7 +50,7 @@ def client_register(request):
error = "Could not decode data." error = "Could not decode data."
return json_response({"error": error}, status=400) return json_response({"error": error}, status=400)
if data is "": if data == "":
error = "Unknown Content-Type" error = "Unknown Content-Type"
return json_response({"error": error}, status=400) return json_response({"error": error}, status=400)
@ -128,7 +128,7 @@ def client_register(request):
logo_uri = data.get("logo_uri", client.logo_url) logo_uri = data.get("logo_uri", client.logo_url)
if logo_uri is not None and not validate_url(logo_uri): if logo_uri is not None and not validate_url(logo_uri):
error = "Logo URI {0} is not a valid URI.".format(logo_uri) error = "Logo URI {} is not a valid URI.".format(logo_uri)
return json_response( return json_response(
{"error": error}, {"error": error},
status=400 status=400
@ -140,7 +140,7 @@ def client_register(request):
contacts = data.get("contacts", None) contacts = data.get("contacts", None)
if contacts is not None: if contacts is not None:
if not isinstance(contacts, six.text_type): if not isinstance(contacts, str):
error = "Contacts must be a string of space-seporated email addresses." error = "Contacts must be a string of space-seporated email addresses."
return json_response({"error": error}, status=400) return json_response({"error": error}, status=400)
@ -148,7 +148,7 @@ def client_register(request):
for contact in contacts: for contact in contacts:
if not validate_email(contact): if not validate_email(contact):
# not a valid email # not a valid email
error = "Email {0} is not a valid email.".format(contact) error = "Email {} is not a valid email.".format(contact)
return json_response({"error": error}, status=400) return json_response({"error": error}, status=400)
@ -156,7 +156,7 @@ def client_register(request):
redirect_uris = data.get("redirect_uris", None) redirect_uris = data.get("redirect_uris", None)
if redirect_uris is not None: if redirect_uris is not None:
if not isinstance(redirect_uris, six.text_type): if not isinstance(redirect_uris, str):
error = "redirect_uris must be space-seporated URLs." error = "redirect_uris must be space-seporated URLs."
return json_response({"error": error}, status=400) return json_response({"error": error}, status=400)
@ -165,7 +165,7 @@ def client_register(request):
for uri in redirect_uris: for uri in redirect_uris:
if not validate_url(uri): if not validate_url(uri):
# not a valid uri # not a valid uri
error = "URI {0} is not a valid URI".format(uri) error = "URI {} is not a valid URI".format(uri)
return json_response({"error": error}, status=400) return json_response({"error": error}, status=400)
client.redirect_uri = redirect_uris client.redirect_uri = redirect_uris
@ -198,12 +198,12 @@ def request_token(request):
authorization = decode_authorization_header(data) authorization = decode_authorization_header(data)
if authorization == dict() or u"oauth_consumer_key" not in authorization: if authorization == dict() or "oauth_consumer_key" not in authorization:
error = "Missing required parameter." error = "Missing required parameter."
return json_response({"error": error}, status=400) return json_response({"error": error}, status=400)
# check the client_id # check the client_id
client_id = authorization[u"oauth_consumer_key"] client_id = authorization["oauth_consumer_key"]
client = Client.query.filter_by(id=client_id).first() client = Client.query.filter_by(id=client_id).first()
if client == None: if client == None:
@ -217,8 +217,8 @@ def request_token(request):
tokens = rv.create_request_token(request, authorization) tokens = rv.create_request_token(request, authorization)
# store the nonce & timestamp before we return back # store the nonce & timestamp before we return back
nonce = authorization[u"oauth_nonce"] nonce = authorization["oauth_nonce"]
timestamp = authorization[u"oauth_timestamp"] timestamp = authorization["oauth_timestamp"]
timestamp = datetime.datetime.fromtimestamp(float(timestamp)) timestamp = datetime.datetime.fromtimestamp(float(timestamp))
nc = NonceTimestamp(nonce=nonce, timestamp=timestamp) nc = NonceTimestamp(nonce=nonce, timestamp=timestamp)
@ -309,7 +309,7 @@ def authorize_finish(request):
) )
# okay we need to redirect them then! # okay we need to redirect them then!
querystring = "?oauth_token={0}&oauth_verifier={1}".format( querystring = "?oauth_token={}&oauth_verifier={}".format(
oauth_request.token, oauth_request.token,
oauth_request.verifier oauth_request.verifier
) )

View File

@ -28,7 +28,7 @@ def setup_plugin():
config = pluginapi.get_config(__name__) config = pluginapi.get_config(__name__)
_log.debug('API config: {0}'.format(config)) _log.debug('API config: {}'.format(config))
routes = [ routes = [
('mediagoblin.plugins.api.test', ('mediagoblin.plugins.api.test',

View File

@ -30,7 +30,7 @@ from mediagoblin.storage.filestorage import BasicFileStorage
_log = logging.getLogger(__name__) _log = logging.getLogger(__name__)
class Auth(object): class Auth:
''' '''
An object with two significant methods, 'trigger' and 'run'. An object with two significant methods, 'trigger' and 'run'.
@ -115,7 +115,7 @@ def api_auth(controller):
for auth in PluginManager().get_hook_callables('auth'): for auth in PluginManager().get_hook_callables('auth'):
if auth.trigger(request): if auth.trigger(request):
_log.debug('{0} believes it is capable of authenticating this request.'.format(auth)) _log.debug('{} believes it is capable of authenticating this request.'.format(auth))
auth_candidates.append(auth) auth_candidates.append(auth)
# If we can't find any authentication methods, we should not let them # If we can't find any authentication methods, we should not let them
@ -126,7 +126,7 @@ def api_auth(controller):
# For now, just select the first one in the list # For now, just select the first one in the list
auth = auth_candidates[0] auth = auth_candidates[0]
_log.debug('Using {0} to authorize request {1}'.format( _log.debug('Using {} to authorize request {}'.format(
auth, request.url)) auth, request.url))
if not auth(request, *args, **kw): if not auth(request, *args, **kw):

View File

@ -54,16 +54,16 @@ def post_entry(request):
callback_url = request.form.get('callback_url') callback_url = request.form.get('callback_url')
if callback_url: if callback_url:
callback_url = six.text_type(callback_url) callback_url = str(callback_url)
try: try:
entry = submit_media( entry = submit_media(
mg_app=request.app, user=request.user, mg_app=request.app, user=request.user,
submitted_file=request.files['file'], submitted_file=request.files['file'],
filename=request.files['file'].filename, filename=request.files['file'].filename,
title=six.text_type(request.form.get('title')), title=str(request.form.get('title')),
description=six.text_type(request.form.get('description')), description=str(request.form.get('description')),
license=six.text_type(request.form.get('license', '')), license=str(request.form.get('license', '')),
tags_string=six.text_type(request.form.get('tags', '')), tags_string=str(request.form.get('tags', '')),
callback_url=callback_url) callback_url=callback_url)
return json_response(get_entry_serializable(entry, request.urlgen)) return json_response(get_entry_serializable(entry, request.urlgen))
@ -71,7 +71,7 @@ def post_entry(request):
# Handle upload limit issues # Handle upload limit issues
except FileUploadLimit: except FileUploadLimit:
raise BadRequest( raise BadRequest(
_(u'Sorry, the file size is too big.')) _('Sorry, the file size is too big.'))
except UserUploadLimit: except UserUploadLimit:
raise BadRequest( raise BadRequest(
_('Sorry, uploading this file will put you over your' _('Sorry, uploading this file will put you over your'
@ -99,7 +99,7 @@ def get_entries(request):
entries = request.db.MediaEntry.query entries = request.db.MediaEntry.query
# TODO: Make it possible to fetch unprocessed media, or media in-processing # TODO: Make it possible to fetch unprocessed media, or media in-processing
entries = entries.filter_by(state=u'processed') entries = entries.filter_by(state='processed')
# TODO: Add sort order customization # TODO: Add sort order customization
entries = entries.order_by(request.db.MediaEntry.created.desc()) entries = entries.order_by(request.db.MediaEntry.created.desc())

View File

@ -64,17 +64,17 @@ class FeaturedMedia(Base):
self.save() self.save()
def demote(self): def demote(self):
if self.is_last_of_type() and self.display_type == u'primary': if self.is_last_of_type() and self.display_type == 'primary':
self.display_type = u'secondary' self.display_type = 'secondary'
elif self.is_last_of_type() and self.display_type == u'secondary': elif self.is_last_of_type() and self.display_type == 'secondary':
self.display_type = u'tertiary' self.display_type = 'tertiary'
self.save() self.save()
def promote(self): def promote(self):
if self.is_first_of_type() and self.display_type == u'secondary': if self.is_first_of_type() and self.display_type == 'secondary':
self.display_type = u'primary' self.display_type = 'primary'
elif self.is_first_of_type() and self.display_type == u'tertiary': elif self.is_first_of_type() and self.display_type == 'tertiary':
self.display_type = u'secondary' self.display_type = 'secondary'
self.save() self.save()
def is_first_of_type(self): def is_first_of_type(self):

View File

@ -56,7 +56,7 @@ def parse_url(url):
who uploaded the piece of media, slug is who uploaded the piece of media, slug is
the media entry's url slug. the media entry's url slug.
""" """
url = six.text_type(url) url = str(url)
u_end, m_start, m_end, end = (url.find('/u/') + 3, u_end, m_start, m_end, end = (url.find('/u/') + 3,
url.find('/m/'), url.find('/m/'),
url.find('/m/') + 3, url.find('/m/') + 3,
@ -87,14 +87,14 @@ def split_featured_media_list(featured_media):
or tertiary) or tertiary)
""" """
featured_media = six.text_type(featured_media) featured_media = str(featured_media)
featured_media_list = featured_media.split("\n") featured_media_list = featured_media.split("\n")
display_type = 0 display_type = 0
media_already_featured = [] media_already_featured = []
all_featured_media = [] all_featured_media = []
for line in featured_media_list: for line in featured_media_list:
if line == '' or line.isspace(): continue if line == '' or line.isspace(): continue
elif line.startswith(u'-'): elif line.startswith('-'):
display_type += 1 display_type += 1
elif display_type <= 0 or display_type > 3: continue elif display_type <= 0 or display_type > 3: continue
else: else:
@ -106,9 +106,9 @@ def split_featured_media_list(featured_media):
media_already_featured.append(media) media_already_featured.append(media)
all_featured_media.append((media, all_featured_media.append((media,
[None, [None,
u'primary', 'primary',
u'secondary', 'secondary',
u'tertiary'][display_type])) 'tertiary'][display_type]))
return all_featured_media return all_featured_media
@ -123,24 +123,24 @@ def create_featured_media_textbox():
primaries = FeaturedMedia.query.order_by( primaries = FeaturedMedia.query.order_by(
FeaturedMedia.order.asc()).filter( FeaturedMedia.order.asc()).filter(
FeaturedMedia.display_type == u'primary').all() FeaturedMedia.display_type == 'primary').all()
secondaries = FeaturedMedia.query.order_by( secondaries = FeaturedMedia.query.order_by(
FeaturedMedia.order.asc()).filter( FeaturedMedia.order.asc()).filter(
FeaturedMedia.display_type == u'secondary').all() FeaturedMedia.display_type == 'secondary').all()
tertiaries = FeaturedMedia.query.order_by( tertiaries = FeaturedMedia.query.order_by(
FeaturedMedia.order.asc()).filter( FeaturedMedia.order.asc()).filter(
FeaturedMedia.display_type == u'tertiary').all() FeaturedMedia.display_type == 'tertiary').all()
output_text = u'' output_text = ''
for display_type, feature_list in [ for display_type, feature_list in [
(_(u'Primary'),primaries), (_('Primary'),primaries),
(_(u'Secondary'),secondaries), (_('Secondary'),secondaries),
(_(u'Tertiary'),tertiaries)]: (_('Tertiary'),tertiaries)]:
output_text += _( output_text += _(
u"""-----------{display_type}-Features--------------------------- """-----------{display_type}-Features---------------------------
""").format(display_type=display_type) """).format(display_type=display_type)
for feature in feature_list: for feature in feature_list:
media_entry = feature.media_entry media_entry = feature.media_entry
output_text += u'/u/{uploader_username}/m/{media_slug}/\n'.format( output_text += '/u/{uploader_username}/m/{media_slug}/\n'.format(
uploader_username = media_entry.get_actor.username, uploader_username = media_entry.get_actor.username,
media_slug = media_entry.slug) media_slug = media_entry.slug)
@ -164,9 +164,9 @@ def automatically_add_new_feature(media_entry):
# secondary features, but in the future this should be a variable editable # secondary features, but in the future this should be a variable editable
# by the site admin. # by the site admin.
too_many_primaries = FeaturedMedia.query.filter( too_many_primaries = FeaturedMedia.query.filter(
FeaturedMedia.display_type==u'primary').count() >= 1 FeaturedMedia.display_type=='primary').count() >= 1
too_many_secondaries = FeaturedMedia.query.filter( too_many_secondaries = FeaturedMedia.query.filter(
FeaturedMedia.display_type==u'secondary').count() >= 2 FeaturedMedia.display_type=='secondary').count() >= 2
featured_first_to_last = FeaturedMedia.query.order_by( featured_first_to_last = FeaturedMedia.query.order_by(
FeaturedMedia.order.asc()).all() FeaturedMedia.order.asc()).all()
@ -174,11 +174,11 @@ def automatically_add_new_feature(media_entry):
# Some features have the option to demote or promote themselves to a # Some features have the option to demote or promote themselves to a
# different display_type, based on their position. But all features move # different display_type, based on their position. But all features move
# up and down one step in the stack. # up and down one step in the stack.
if (feature.is_last_of_type() and feature.display_type == u'primary' if (feature.is_last_of_type() and feature.display_type == 'primary'
and too_many_primaries): and too_many_primaries):
feature.demote() feature.demote()
too_many_primaries = False too_many_primaries = False
elif (feature.is_last_of_type() and feature.display_type == u'secondary' elif (feature.is_last_of_type() and feature.display_type == 'secondary'
and too_many_secondaries): and too_many_secondaries):
feature.demote() feature.demote()
too_many_secondaries = False too_many_secondaries = False
@ -188,7 +188,7 @@ def automatically_add_new_feature(media_entry):
# Create the new feature at the top of the stack. # Create the new feature at the top of the stack.
new_feature = FeaturedMedia( new_feature = FeaturedMedia(
media_entry=media_entry, media_entry=media_entry,
display_type=u"primary", display_type="primary",
order=0) order=0)
new_feature.save() new_feature.save()
return new_feature return new_feature
@ -252,10 +252,10 @@ def promote_feature(media_entry):
target_feature.display_type) target_feature.display_type)
above_feature.save() above_feature.save()
# Change the feature's display type to a more prominent one # Change the feature's display type to a more prominent one
elif target_feature.display_type == u'secondary': elif target_feature.display_type == 'secondary':
target_feature.display_type = u'primary' target_feature.display_type = 'primary'
elif target_feature.display_type == u'tertiary': elif target_feature.display_type == 'tertiary':
target_feature.display_type = u'secondary' target_feature.display_type = 'secondary'
target_feature.save() target_feature.save()
def demote_feature(media_entry): def demote_feature(media_entry):
@ -287,8 +287,8 @@ def demote_feature(media_entry):
target_feature.display_type) target_feature.display_type)
below_feature.save() below_feature.save()
# Change the feature's display type to a less prominent one # Change the feature's display type to a less prominent one
elif target_feature.display_type == u'secondary': elif target_feature.display_type == 'secondary':
target_feature.display_type = u'tertiary' target_feature.display_type = 'tertiary'
elif target_feature.display_type == u'primary': elif target_feature.display_type == 'primary':
target_feature.display_type = u'secondary' target_feature.display_type = 'secondary'
target_feature.save() target_feature.save()

View File

@ -40,15 +40,15 @@ def root_view(request):
displaying featured media. displaying featured media.
""" """
featured_media = { featured_media = {
u'primary':FeaturedMedia.query.order_by( 'primary':FeaturedMedia.query.order_by(
FeaturedMedia.order.asc()).filter( FeaturedMedia.order.asc()).filter(
FeaturedMedia.display_type==u'primary').all(), FeaturedMedia.display_type=='primary').all(),
u'secondary':FeaturedMedia.query.order_by( 'secondary':FeaturedMedia.query.order_by(
FeaturedMedia.order.asc()).filter( FeaturedMedia.order.asc()).filter(
FeaturedMedia.display_type==u'secondary').all(), FeaturedMedia.display_type=='secondary').all(),
u'tertiary':FeaturedMedia.query.order_by( 'tertiary':FeaturedMedia.query.order_by(
FeaturedMedia.order.asc()).filter( FeaturedMedia.order.asc()).filter(
FeaturedMedia.display_type==u'tertiary').all()} FeaturedMedia.display_type=='tertiary').all()}
return render_to_response( return render_to_response(
request, 'archivalook/root.html', request, 'archivalook/root.html',
@ -56,7 +56,7 @@ def root_view(request):
'allow_registration': mg_globals.app_config["allow_registration"], 'allow_registration': mg_globals.app_config["allow_registration"],
'feature_template': feature_template}) 'feature_template': feature_template})
@user_has_privilege(u'featurer') @user_has_privilege('featurer')
def featured_media_panel(request): def featured_media_panel(request):
""" """
This is a new administrator panel to manage featured media. This is an This is a new administrator panel to manage featured media. This is an
@ -99,7 +99,7 @@ def recent_media_gallery_view(request, page):
""" """
The replaced homepage is available through this view. The replaced homepage is available through this view.
""" """
cursor = MediaEntry.query.filter_by(state=u'processed').\ cursor = MediaEntry.query.filter_by(state='processed').\
order_by(MediaEntry.created.desc()) order_by(MediaEntry.created.desc())
pagination = Pagination(page, cursor) pagination = Pagination(page, cursor)
@ -117,7 +117,7 @@ def add_featured_media_to_media_home(context):
context['featured_media'] = FeaturedMedia.query context['featured_media'] = FeaturedMedia.query
return context return context
@user_has_privilege(u'featurer') @user_has_privilege('featurer')
@get_user_media_entry @get_user_media_entry
def feature_media(request, media, **kwargs): def feature_media(request, media, **kwargs):
""" """
@ -130,7 +130,7 @@ def feature_media(request, media, **kwargs):
return redirect( return redirect(
request, 'index') request, 'index')
@user_has_privilege(u'featurer') @user_has_privilege('featurer')
@get_user_media_entry @get_user_media_entry
def unfeature_media(request, media, **kwargs): def unfeature_media(request, media, **kwargs):
""" """
@ -143,7 +143,7 @@ def unfeature_media(request, media, **kwargs):
return redirect( return redirect(
request, 'index') request, 'index')
@user_has_privilege(u'featurer') @user_has_privilege('featurer')
@get_user_media_entry @get_user_media_entry
def promote_featured_media(request, media, **kwargs): def promote_featured_media(request, media, **kwargs):
""" """
@ -156,7 +156,7 @@ def promote_featured_media(request, media, **kwargs):
return redirect( return redirect(
request, 'index') request, 'index')
@user_has_privilege(u'featurer') @user_has_privilege('featurer')
@get_user_media_entry @get_user_media_entry
def demote_featured_media(request, media, **kwargs): def demote_featured_media(request, media, **kwargs):
""" """

View File

@ -40,7 +40,7 @@ def bcrypt_check_password(raw_pass, stored_hash, extra_salt=None):
True or False depending on success. True or False depending on success.
""" """
if extra_salt: if extra_salt:
raw_pass = u"%s:%s" % (extra_salt, raw_pass) raw_pass = "{}:{}".format(extra_salt, raw_pass)
hashed_pass = bcrypt.hashpw(raw_pass.encode('utf-8'), stored_hash) hashed_pass = bcrypt.hashpw(raw_pass.encode('utf-8'), stored_hash)
@ -66,9 +66,9 @@ def bcrypt_gen_password_hash(raw_pass, extra_salt=None):
non-database extra salt non-database extra salt
""" """
if extra_salt: if extra_salt:
raw_pass = u"%s:%s" % (extra_salt, raw_pass) raw_pass = "{}:{}".format(extra_salt, raw_pass)
return six.text_type( return str(
bcrypt.hashpw(raw_pass.encode('utf-8'), bcrypt.gensalt())) bcrypt.hashpw(raw_pass.encode('utf-8'), bcrypt.gensalt()))
@ -92,8 +92,8 @@ def fake_login_attempt():
EMAIL_FP_VERIFICATION_TEMPLATE = ( EMAIL_FP_VERIFICATION_TEMPLATE = (
u"{uri}?" "{uri}?"
u"token={fp_verification_key}") "token={fp_verification_key}")
def send_fp_verification_email(user, request): def send_fp_verification_email(user, request):

View File

@ -70,7 +70,7 @@ def forgot_password(request):
success_message = _("An email has been sent with instructions " success_message = _("An email has been sent with instructions "
"on how to change your password.") "on how to change your password.")
if user and user.has_privilege(u'active') is False: if user and user.has_privilege('active') is False:
# Don't send reminder because user is inactive or has no verified email # Don't send reminder because user is inactive or has no verified email
messages.add_message( messages.add_message(
request, request,
@ -128,7 +128,7 @@ def verify_forgot_password(request):
request, 'index') request, 'index')
# check if user active and has email verified # check if user active and has email verified
if user.has_privilege(u'active'): if user.has_privilege('active'):
cp_form = forms.ChangeForgotPassForm(formdata_vars) cp_form = forms.ChangeForgotPassForm(formdata_vars)
if request.method == 'POST' and cp_form.validate(): if request.method == 'POST' and cp_form.validate():

View File

@ -34,7 +34,7 @@ _log = logging.getLogger(__name__)
def print_context(c): def print_context(c):
s = [] s = []
for key, val in c.items(): for key, val in c.items():
s.append('%s: %s' % (key, repr(val))) s.append('{}: {}'.format(key, repr(val)))
return '\n'.join(s) return '\n'.join(s)

View File

@ -42,7 +42,7 @@ class HTTPAuth(Auth):
if not request.authorization: if not request.authorization:
return False return False
user = check_login_simple(six.text_type(request.authorization['username']), user = check_login_simple(str(request.authorization['username']),
request.authorization['password']) request.authorization['password'])
if user: if user:

View File

@ -23,12 +23,12 @@ from mediagoblin.tools import pluginapi
_log = logging.getLogger(__name__) _log = logging.getLogger(__name__)
class LDAP(object): class LDAP:
def __init__(self): def __init__(self):
self.ldap_settings = pluginapi.get_config('mediagoblin.plugins.ldap') self.ldap_settings = pluginapi.get_config('mediagoblin.plugins.ldap')
def _connect(self, server): def _connect(self, server):
_log.info('Connecting to {0}.'.format(server['LDAP_SERVER_URI'])) _log.info('Connecting to {}.'.format(server['LDAP_SERVER_URI']))
self.conn = ldap.initialize(server['LDAP_SERVER_URI']) self.conn = ldap.initialize(server['LDAP_SERVER_URI'])
if server['LDAP_START_TLS'] == 'true': if server['LDAP_START_TLS'] == 'true':
@ -38,7 +38,7 @@ class LDAP(object):
def _get_email(self, server, username): def _get_email(self, server, username):
try: try:
results = self.conn.search_s(server['LDAP_SEARCH_BASE'], results = self.conn.search_s(server['LDAP_SEARCH_BASE'],
ldap.SCOPE_SUBTREE, 'uid={0}' ldap.SCOPE_SUBTREE, 'uid={}'
.format(username), .format(username),
[server['EMAIL_SEARCH_FIELD']]) [server['EMAIL_SEARCH_FIELD']])
@ -49,7 +49,7 @@ class LDAP(object):
return email return email
def login(self, username, password): def login(self, username, password):
for k, v in six.iteritems(self.ldap_settings): for k, v in self.ldap_settings.items():
try: try:
self._connect(v) self._connect(v)
user_dn = v['LDAP_USER_DN_TEMPLATE'].format(username=username) user_dn = v['LDAP_USER_DN_TEMPLATE'].format(username=username)
@ -61,7 +61,7 @@ class LDAP(object):
_log.info(e) _log.info(e)
finally: finally:
_log.info('Unbinding {0}.'.format(v['LDAP_SERVER_URI'])) _log.info('Unbinding {}.'.format(v['LDAP_SERVER_URI']))
self.conn.unbind() self.conn.unbind()
return False, None return False, None

View File

@ -44,7 +44,7 @@ def login(request):
if user: if user:
# set up login in session # set up login in session
request.session['user_id'] = six.text_type(user.id) request.session['user_id'] = str(user.id)
request.session.save() request.session.save()
if request.form.get('next'): if request.form.get('next'):

View File

@ -19,11 +19,11 @@ def rdfa_to_readable(rdfa_predicate):
A simple script to convert rdfa resource descriptors into a form more A simple script to convert rdfa resource descriptors into a form more
accessible for humans. accessible for humans.
""" """
components = rdfa_predicate.split(u":") components = rdfa_predicate.split(":")
if len(components) >= 2: if len(components) >= 2:
readable = components[1].capitalize() readable = components[1].capitalize()
else: else:
readable = u"" readable = ""
return readable return readable
def add_rdfa_to_readable_to_media_home(context): def add_rdfa_to_readable_to_media_home(context):

View File

@ -41,7 +41,7 @@ class Nonce(Base):
salt = Column(Unicode, primary_key=True) salt = Column(Unicode, primary_key=True)
def __unicode__(self): def __unicode__(self):
return u'Nonce: %r, %r' % (self.server_url, self.salt) return 'Nonce: {!r}, {!r}'.format(self.server_url, self.salt)
class Association(Base): class Association(Base):
@ -55,7 +55,7 @@ class Association(Base):
assoc_type = Column(Unicode) assoc_type = Column(Unicode)
def __unicode__(self): def __unicode__(self):
return u'Association: %r, %r' % (self.server_url, self.handle) return 'Association: {!r}, {!r}'.format(self.server_url, self.handle)
MODELS = [ MODELS = [

View File

@ -36,12 +36,12 @@ class SQLAlchemyOpenIDStore(OpenIDStore):
if not assoc: if not assoc:
assoc = Association() assoc = Association()
assoc.server_url = six.text_type(server_url) assoc.server_url = str(server_url)
assoc.handle = association.handle assoc.handle = association.handle
# django uses base64 encoding, python-openid uses a blob field for # django uses base64 encoding, python-openid uses a blob field for
# secret # secret
assoc.secret = six.text_type(base64.encodestring(association.secret)) assoc.secret = str(base64.encodestring(association.secret))
assoc.issued = association.issued assoc.issued = association.issued
assoc.lifetime = association.lifetime assoc.lifetime = association.lifetime
assoc.assoc_type = association.assoc_type assoc.assoc_type = association.assoc_type

View File

@ -189,7 +189,7 @@ def finish_login(request):
if user: if user:
# Set up login in session # Set up login in session
request.session['user_id'] = six.text_type(user.id) request.session['user_id'] = str(user.id)
request.session.save() request.session.save()
if request.session.get('next'): if request.session.get('next'):

View File

@ -65,7 +65,7 @@ def login(request):
user = query.user if query else None user = query.user if query else None
if user: if user:
request.session['user_id'] = six.text_type(user.id) request.session['user_id'] = str(user.id)
request.session['persona_login_email'] = email request.session['persona_login_email'] = email
request.session.save() request.session.save()

View File

@ -47,9 +47,9 @@ class PwgNamedArray(list):
def _fill_element_dict(el, data, as_attr=()): def _fill_element_dict(el, data, as_attr=()):
for k, v in six.iteritems(data): for k, v in data.items():
if k in as_attr: if k in as_attr:
if not isinstance(v, six.string_types): if not isinstance(v, str):
v = str(v) v = str(v)
el.set(k, v) el.set(k, v)
else: else:
@ -63,7 +63,7 @@ def _fill_element(el, data):
el.text = "1" el.text = "1"
else: else:
el.text = "0" el.text = "0"
elif isinstance(data, six.string_types): elif isinstance(data, str):
el.text = data el.text = data
elif isinstance(data, int): elif isinstance(data, int):
el.text = str(data) el.text = str(data)
@ -92,7 +92,7 @@ def response_xml(result):
mimetype='text/xml', status=status) mimetype='text/xml', status=status)
class CmdTable(object): class CmdTable:
_cmd_table = {} _cmd_table = {}
def __init__(self, cmd_name, only_post=False): def __init__(self, cmd_name, only_post=False):
@ -131,11 +131,11 @@ def check_form(form):
raise BadRequest() raise BadRequest()
dump = [] dump = []
for f in form: for f in form:
dump.append("%s=%r" % (f.name, f.data)) dump.append("{}={!r}".format(f.name, f.data))
_log.debug("form: %s", " ".join(dump)) _log.debug("form: %s", " ".join(dump))
class PWGSession(object): class PWGSession:
session_manager = None session_manager = None
def __init__(self, request): def __init__(self, request):

View File

@ -121,7 +121,7 @@ def pwg_images_addSimple(request):
raise BadRequest() raise BadRequest()
dump = [] dump = []
for f in form: for f in form:
dump.append("%s=%r" % (f.name, f.data)) dump.append("{}={!r}".format(f.name, f.data))
_log.info("addSimple: %r %s %r", request.form, " ".join(dump), _log.info("addSimple: %r %s %r", request.form, " ".join(dump),
request.files) request.files)
@ -133,8 +133,8 @@ def pwg_images_addSimple(request):
mg_app=request.app, user=request.user, mg_app=request.app, user=request.user,
submitted_file=request.files['image'], submitted_file=request.files['image'],
filename=request.files['image'].filename, filename=request.files['image'].filename,
title=six.text_type(form.name.data), title=str(form.name.data),
description=six.text_type(form.comment.data)) description=str(form.comment.data))
collection_id = form.category.data collection_id = form.category.data
if collection_id > 0: if collection_id > 0:
@ -151,7 +151,7 @@ def pwg_images_addSimple(request):
# Handle upload limit issues # Handle upload limit issues
except FileUploadLimit: except FileUploadLimit:
raise BadRequest( raise BadRequest(
_(u'Sorry, the file size is too big.')) _('Sorry, the file size is too big.'))
except UserUploadLimit: except UserUploadLimit:
raise BadRequest( raise BadRequest(
_('Sorry, uploading this file will put you over your' _('Sorry, uploading this file will put you over your'

View File

@ -37,10 +37,10 @@ def make_stats(context):
user = request.user user = request.user
if user: if user:
num_queued = MediaEntry.query.filter_by( num_queued = MediaEntry.query.filter_by(
actor=user.id, state=u'processing').count() actor=user.id, state='processing').count()
context['num_queued'] = num_queued context['num_queued'] = num_queued
num_failed = MediaEntry.query.filter_by( num_failed = MediaEntry.query.filter_by(
actor=user.id, state=u'failed').count() actor=user.id, state='failed').count()
context['num_failed'] = num_failed context['num_failed'] = num_failed
return context return context

View File

@ -31,11 +31,11 @@ def get_client():
client = None client = None
if sentry_dsn: if sentry_dsn:
_log.info('Setting up raven from plugin config: {0}'.format( _log.info('Setting up raven from plugin config: {}'.format(
sentry_dsn)) sentry_dsn))
client = Client(sentry_dsn) client = Client(sentry_dsn)
elif os.environ.get('SENTRY_DSN'): elif os.environ.get('SENTRY_DSN'):
_log.info('Setting up raven from SENTRY_DSN environment variable: {0}'\ _log.info('Setting up raven from SENTRY_DSN environment variable: {}'\
.format(os.environ.get('SENTRY_DSN'))) .format(os.environ.get('SENTRY_DSN')))
client = Client() # Implicitly looks for SENTRY_DSN client = Client() # Implicitly looks for SENTRY_DSN

View File

@ -57,7 +57,7 @@ def edit_subtitles(request, media):
if mimetypes.guess_type( if mimetypes.guess_type(
request.files['subtitle_file'].filename)[0] in \ request.files['subtitle_file'].filename)[0] in \
UNSAFE_MIMETYPES: UNSAFE_MIMETYPES:
public_filename = secure_filename('{0}.notsafe'.format( public_filename = secure_filename('{}.notsafe'.format(
request.files['subtitle_file'].filename)) request.files['subtitle_file'].filename))
else: else:
public_filename = secure_filename( public_filename = secure_filename(
@ -72,7 +72,7 @@ def edit_subtitles(request, media):
return redirect(request, return redirect(request,
location=media.url_for_self(request.urlgen)) location=media.url_for_self(request.urlgen))
subtitle_public_filepath = mg_globals.public_store.get_unique_filepath( subtitle_public_filepath = mg_globals.public_store.get_unique_filepath(
['media_entries', six.text_type(media.id), 'subtitle', ['media_entries', str(media.id), 'subtitle',
public_filename]) public_filename])
with mg_globals.public_store.get_file( with mg_globals.public_store.get_file(

View File

@ -13,7 +13,6 @@
# #
# You should have received a copy of the GNU Affero General Public License # You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>. # along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
import logging import logging
import re import re
@ -23,7 +22,7 @@ _log = logging.getLogger(__name__)
class TrimWhiteSpaceMeddleware(meddleware.BaseMeddleware): class TrimWhiteSpaceMeddleware(meddleware.BaseMeddleware):
_setup_plugin_called = 0 _setup_plugin_called = 0
RE_MULTI_WHITESPACE = re.compile(b'(\s)\s+', re.M) RE_MULTI_WHITESPACE = re.compile(br'(\s)\s+', re.M)
def process_response(self, request, response): def process_response(self, request, response):
"""Perform very naive html tidying by removing multiple whitespaces""" """Perform very naive html tidying by removing multiple whitespaces"""
@ -65,7 +64,7 @@ class TrimWhiteSpaceMeddleware(meddleware.BaseMeddleware):
# Append ourselves to the list of enabled Meddlewares # Append ourselves to the list of enabled Meddlewares
meddleware.ENABLED_MEDDLEWARE.append( meddleware.ENABLED_MEDDLEWARE.append(
'{0}:{1}'.format(cls.__module__, cls.__name__)) '{}:{}'.format(cls.__module__, cls.__name__))
hooks = { hooks = {

View File

@ -35,7 +35,7 @@ from mediagoblin.tools.translate import lazy_pass_to_ugettext as _
_log = logging.getLogger(__name__) _log = logging.getLogger(__name__)
class ProgressCallback(object): class ProgressCallback:
def __init__(self, entry): def __init__(self, entry):
self.entry = entry self.entry = entry
@ -53,11 +53,11 @@ class ProgressCallback(object):
def create_pub_filepath(entry, filename): def create_pub_filepath(entry, filename):
return mgg.public_store.get_unique_filepath( return mgg.public_store.get_unique_filepath(
['media_entries', ['media_entries',
six.text_type(entry.id), str(entry.id),
filename]) filename])
class FilenameBuilder(object): class FilenameBuilder:
"""Easily slice and dice filenames. """Easily slice and dice filenames.
Initialize this class with an original file path, then use the fill() Initialize this class with an original file path, then use the fill()
@ -90,7 +90,7 @@ class FilenameBuilder(object):
class MediaProcessor(object): class MediaProcessor:
"""A particular processor for this media type. """A particular processor for this media type.
While the ProcessingManager handles all types of MediaProcessing While the ProcessingManager handles all types of MediaProcessing
@ -192,7 +192,7 @@ class ProcessingManagerDoesNotExist(ProcessingKeyError): pass
class ProcessingManager(object): class ProcessingManager:
"""Manages all the processing actions available for a media type """Manages all the processing actions available for a media type
Specific processing actions, MediaProcessor subclasses, are added Specific processing actions, MediaProcessor subclasses, are added
@ -290,7 +290,7 @@ def get_processing_manager_for_type(media_type):
manager_class = hook_handle(('reprocess_manager', media_type)) manager_class = hook_handle(('reprocess_manager', media_type))
if not manager_class: if not manager_class:
raise ProcessingManagerDoesNotExist( raise ProcessingManagerDoesNotExist(
"A processing manager does not exist for {0}".format(media_type)) "A processing manager does not exist for {}".format(media_type))
manager = manager_class() manager = manager_class()
return manager return manager
@ -331,19 +331,19 @@ def mark_entry_failed(entry_id, exc):
# metadata the user might have supplied. # metadata the user might have supplied.
atomic_update(mgg.database.MediaEntry, atomic_update(mgg.database.MediaEntry,
{'id': entry_id}, {'id': entry_id},
{u'state': u'failed', {'state': 'failed',
u'fail_error': six.text_type(exc.exception_path), 'fail_error': str(exc.exception_path),
u'fail_metadata': exc.metadata}) 'fail_metadata': exc.metadata})
else: else:
_log.warn("No idea what happened here, but it failed: %r", exc) _log.warn("No idea what happened here, but it failed: %r", exc)
# Looks like no, let's record it so that admin could ask us about the # Looks like no, let's record it so that admin could ask us about the
# reason # reason
atomic_update(mgg.database.MediaEntry, atomic_update(mgg.database.MediaEntry,
{'id': entry_id}, {'id': entry_id},
{u'state': u'failed', {'state': 'failed',
u'fail_error': u'Unhandled exception: {0}'.format( 'fail_error': 'Unhandled exception: {}'.format(
six.text_type(exc)), str(exc)),
u'fail_metadata': {}}) 'fail_metadata': {}})
def get_process_filename(entry, workbench, acceptable_files): def get_process_filename(entry, workbench, acceptable_files):
@ -391,7 +391,7 @@ def store_public(entry, keyname, local_file, target_name=None,
try: try:
mgg.public_store.copy_local_to_storage(local_file, target_filepath) mgg.public_store.copy_local_to_storage(local_file, target_filepath)
except Exception as e: except Exception as e:
_log.error(u'Exception happened: {0}'.format(e)) _log.error('Exception happened: {}'.format(e))
raise PublicStoreFail(keyname=keyname) raise PublicStoreFail(keyname=keyname)
# raise an error if the file failed to copy # raise an error if the file failed to copy
if not mgg.public_store.file_exists(target_filepath): if not mgg.public_store.file_exists(target_filepath):
@ -400,7 +400,7 @@ def store_public(entry, keyname, local_file, target_name=None,
entry.media_files[keyname] = target_filepath entry.media_files[keyname] = target_filepath
def copy_original(entry, orig_filename, target_name, keyname=u"original"): def copy_original(entry, orig_filename, target_name, keyname="original"):
store_public(entry, keyname, orig_filename, target_name) store_public(entry, keyname, orig_filename, target_name)
@ -413,16 +413,16 @@ class BaseProcessingFail(Exception):
and provide the exception_path and general_message applicable to and provide the exception_path and general_message applicable to
this error. this error.
""" """
general_message = u'' general_message = ''
@property @property
def exception_path(self): def exception_path(self):
return u"%s:%s" % ( return "{}:{}".format(
self.__class__.__module__, self.__class__.__name__) self.__class__.__module__, self.__class__.__name__)
def __init__(self, message=None, **metadata): def __init__(self, message=None, **metadata):
if message is not None: if message is not None:
super(BaseProcessingFail, self).__init__(message) super().__init__(message)
metadata['message'] = message metadata['message'] = message
self.metadata = metadata self.metadata = metadata
@ -431,7 +431,7 @@ class BadMediaFail(BaseProcessingFail):
Error that should be raised when an inappropriate file was given Error that should be raised when an inappropriate file was given
for the media type specified. for the media type specified.
""" """
general_message = _(u'Invalid file given for media type.') general_message = _('Invalid file given for media type.')
class PublicStoreFail(BaseProcessingFail): class PublicStoreFail(BaseProcessingFail):
@ -446,4 +446,4 @@ class ProcessFileNotFound(BaseProcessingFail):
Error that should be raised when an acceptable file for processing Error that should be raised when an acceptable file for processing
is not found. is not found.
""" """
general_message = _(u'An acceptable processing file was not found') general_message = _('An acceptable processing file was not found')

View File

@ -38,7 +38,7 @@ def handle_push_urls(feed_url):
Retry 3 times every 2 minutes if run in separate process before failing.""" Retry 3 times every 2 minutes if run in separate process before failing."""
if not mgg.app_config["push_urls"]: if not mgg.app_config["push_urls"]:
return # Nothing to do return # Nothing to do
_log.debug('Notifying Push servers for feed {0}'.format(feed_url)) _log.debug('Notifying Push servers for feed {}'.format(feed_url))
hubparameters = { hubparameters = {
'hub.mode': 'publish', 'hub.mode': 'publish',
'hub.url': feed_url} 'hub.url': feed_url}
@ -57,7 +57,7 @@ def handle_push_urls(feed_url):
return handle_push_urls.retry(exc=exc, throw=False) return handle_push_urls.retry(exc=exc, throw=False)
except Exception as e: except Exception as e:
# All retries failed, Failure is no tragedy here, probably. # All retries failed, Failure is no tragedy here, probably.
_log.warn('Failed to notify PuSH server for feed {0}. ' _log.warn('Failed to notify PuSH server for feed {}. '
'Giving up.'.format(feed_url)) 'Giving up.'.format(feed_url))
return False return False
@ -95,18 +95,18 @@ class ProcessMedia(celery.Task):
with processor_class(manager, entry) as processor: with processor_class(manager, entry) as processor:
# Initial state change has to be here because # Initial state change has to be here because
# the entry.state gets recorded on processor_class init # the entry.state gets recorded on processor_class init
entry.state = u'processing' entry.state = 'processing'
entry.save() entry.save()
_log.debug('Processing {0}'.format(entry)) _log.debug('Processing {}'.format(entry))
try: try:
processor.process(**reprocess_info) processor.process(**reprocess_info)
except Exception as exc: except Exception as exc:
if processor.entry_orig_state == 'processed': if processor.entry_orig_state == 'processed':
_log.error( _log.error(
'Entry {0} failed to process due to the following' 'Entry {} failed to process due to the following'
' error: {1}'.format(entry.id, exc)) ' error: {}'.format(entry.id, exc))
_log.info( _log.info(
'Setting entry.state back to "processed"') 'Setting entry.state back to "processed"')
pass pass
@ -115,7 +115,7 @@ class ProcessMedia(celery.Task):
# We set the state to processed and save the entry here so there's # We set the state to processed and save the entry here so there's
# no need to save at the end of the processing stage, probably ;) # no need to save at the end of the processing stage, probably ;)
entry.state = u'processed' entry.state = 'processed'
entry.save() entry.save()
# Notify the PuSH servers as async task # Notify the PuSH servers as async task
@ -130,7 +130,7 @@ class ProcessMedia(celery.Task):
except ImportError as exc: except ImportError as exc:
_log.error( _log.error(
'Entry {0} failed to process due to an import error: {1}'\ 'Entry {} failed to process due to an import error: {}'\
.format( .format(
entry.title, entry.title,
exc)) exc))
@ -140,7 +140,7 @@ class ProcessMedia(celery.Task):
except Exception as exc: except Exception as exc:
_log.error('An unhandled exception was raised while' _log.error('An unhandled exception was raised while'
+ ' processing {0}'.format( + ' processing {}'.format(
entry)) entry))
mark_entry_failed(entry.id, exc) mark_entry_failed(entry.id, exc)

View File

@ -14,7 +14,6 @@
# You should have received a copy of the GNU Affero General Public License # You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>. # along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import
import shutil import shutil
import uuid import uuid
@ -50,7 +49,7 @@ class NotImplementedError(Error):
# Storage interface & basic file implementation # Storage interface & basic file implementation
############################################### ###############################################
class StorageInterface(object): class StorageInterface:
""" """
Interface for the storage API. Interface for the storage API.
@ -148,7 +147,7 @@ class StorageInterface(object):
filepath = clean_listy_filepath(filepath) filepath = clean_listy_filepath(filepath)
if self.file_exists(filepath): if self.file_exists(filepath):
return filepath[:-1] + ["%s-%s" % (uuid.uuid4(), filepath[-1])] return filepath[:-1] + ["{}-{}".format(uuid.uuid4(), filepath[-1])]
else: else:
return filepath return filepath
@ -224,10 +223,10 @@ def clean_listy_filepath(listy_filepath):
A cleaned list of unicode objects. A cleaned list of unicode objects.
""" """
cleaned_filepath = [ cleaned_filepath = [
six.text_type(secure_filename(filepath)) str(secure_filename(filepath))
for filepath in listy_filepath] for filepath in listy_filepath]
if u'' in cleaned_filepath: if '' in cleaned_filepath:
raise InvalidFilepath( raise InvalidFilepath(
"A filename component could not be resolved into a usable name.") "A filename component could not be resolved into a usable name.")
@ -261,7 +260,7 @@ def storage_system_from_config(config_section):
""" """
# This construct is needed, because dict(config) does # This construct is needed, because dict(config) does
# not replace the variables in the config items. # not replace the variables in the config items.
config_params = dict(six.iteritems(config_section)) config_params = dict(config_section.items())
if 'storage_class' in config_params: if 'storage_class' in config_params:
storage_class = config_params['storage_class'] storage_class = config_params['storage_class']

View File

@ -20,7 +20,6 @@ python-cloudfiles one.
http://docs.python.org/whatsnew/2.5.html#pep-328-absolute-and-relative-imports http://docs.python.org/whatsnew/2.5.html#pep-328-absolute-and-relative-imports
''' '''
from __future__ import absolute_import
from mediagoblin.storage import StorageInterface, clean_listy_filepath from mediagoblin.storage import StorageInterface, clean_listy_filepath
@ -58,7 +57,7 @@ class CloudFilesStorage(StorageInterface):
servicenet=True if self.param_use_servicenet == 'true' or \ servicenet=True if self.param_use_servicenet == 'true' or \
self.param_use_servicenet == True else False) self.param_use_servicenet == True else False)
_log.debug('Connected to {0} (auth: {1})'.format( _log.debug('Connected to {} (auth: {})'.format(
self.connection.connection.host, self.connection.connection.host,
self.connection.auth.host)) self.connection.auth.host))
@ -72,7 +71,7 @@ class CloudFilesStorage(StorageInterface):
self.container = self.connection.get_container( self.container = self.connection.get_container(
self.param_container) self.param_container)
_log.debug('Container: {0}'.format( _log.debug('Container: {}'.format(
self.container.name)) self.container.name))
self.container_uri = self.container.public_ssl_uri() self.container_uri = self.container.public_ssl_uri()
@ -162,7 +161,7 @@ class CloudFilesStorage(StorageInterface):
# and bandwidth usage. So, override this method and use the # and bandwidth usage. So, override this method and use the
# Cloudfile's "send" interface instead. # Cloudfile's "send" interface instead.
# TODO: Fixing write() still seems worthwhile though. # TODO: Fixing write() still seems worthwhile though.
_log.debug('Sending {0} to cloudfiles...'.format(filepath)) _log.debug('Sending {} to cloudfiles...'.format(filepath))
with self.get_file(filepath, 'wb') as dest_file: with self.get_file(filepath, 'wb') as dest_file:
with open(filename, 'rb') as source_file: with open(filename, 'rb') as source_file:
# Copy to storage system in 4096 byte chunks # Copy to storage system in 4096 byte chunks
@ -188,7 +187,7 @@ class CloudFilesStorageObjectWrapper():
self.storage_object = storage_object self.storage_object = storage_object
def read(self, *args, **kwargs): def read(self, *args, **kwargs):
_log.debug('Reading {0}'.format( _log.debug('Reading {}'.format(
self.storage_object.name)) self.storage_object.name))
return self.storage_object.read(*args, **kwargs) return self.storage_object.read(*args, **kwargs)

View File

@ -32,7 +32,7 @@ class FileObjectAwareFile(io.FileIO):
# object, which should be saved RAM-friendly way # object, which should be saved RAM-friendly way
shutil.copyfileobj(data, self) shutil.copyfileobj(data, self)
else: else:
super(FileObjectAwareFile, self).write(data) super().write(data)
class BasicFileStorage(StorageInterface): class BasicFileStorage(StorageInterface):

View File

@ -14,7 +14,6 @@
# You should have received a copy of the GNU Affero General Public License # You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>. # along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import print_function
import six import six
@ -124,7 +123,7 @@ class MountStorage(StorageInterface):
v = table.get(None) v = table.get(None)
if v: if v:
res.append(" " * len(indent) + repr(indent) + ": " + repr(v)) res.append(" " * len(indent) + repr(indent) + ": " + repr(v))
for k, v in six.iteritems(table): for k, v in table.items():
if k == None: if k == None:
continue continue
res.append(" " * len(indent) + repr(k) + ":") res.append(" " * len(indent) + repr(k) + ":")

View File

@ -27,7 +27,7 @@ def get_submit_start_form(form, **kwargs):
max_file_size = kwargs.get('max_file_size') max_file_size = kwargs.get('max_file_size')
desc = None desc = None
if max_file_size: if max_file_size:
desc = _('Max file size: {0} mb'.format(max_file_size)) desc = _('Max file size: {} mb'.format(max_file_size))
class SubmitStartForm(wtforms.Form): class SubmitStartForm(wtforms.Form):
file = wtforms.FileField( file = wtforms.FileField(

View File

@ -105,7 +105,7 @@ class UserPastUploadLimit(UploadLimitError):
def submit_media(mg_app, user, submitted_file, filename, def submit_media(mg_app, user, submitted_file, filename,
title=None, description=None, collection_slug=None, title=None, description=None, collection_slug=None,
license=None, metadata=None, tags_string=u"", license=None, metadata=None, tags_string="",
callback_url=None, urlgen=None,): callback_url=None, urlgen=None,):
""" """
Args: Args:
@ -132,7 +132,7 @@ def submit_media(mg_app, user, submitted_file, filename,
# If the filename contains non ascii generate a unique name # If the filename contains non ascii generate a unique name
if not all(ord(c) < 128 for c in filename): if not all(ord(c) < 128 for c in filename):
filename = six.text_type(uuid.uuid4()) + splitext(filename)[-1] filename = str(uuid.uuid4()) + splitext(filename)[-1]
# Sniff the submitted media to determine which # Sniff the submitted media to determine which
# media plugin should handle processing # media plugin should handle processing
@ -141,9 +141,9 @@ def submit_media(mg_app, user, submitted_file, filename,
# create entry and save in database # create entry and save in database
entry = new_upload_entry(user) entry = new_upload_entry(user)
entry.media_type = media_type entry.media_type = media_type
entry.title = (title or six.text_type(splitext(filename)[0])) entry.title = (title or str(splitext(filename)[0]))
entry.description = description or u"" entry.description = description or ""
entry.license = license or None entry.license = license or None
@ -163,7 +163,7 @@ def submit_media(mg_app, user, submitted_file, filename,
# Get file size and round to 2 decimal places # Get file size and round to 2 decimal places
file_size = mg_app.queue_store.get_file_size( file_size = mg_app.queue_store.get_file_size(
entry.queued_media_file) / (1024.0 * 1024) entry.queued_media_file) / (1024.0 * 1024)
file_size = float('{0:.2f}'.format(file_size)) file_size = float('{:.2f}'.format(file_size))
# Check if file size is over the limit # Check if file size is over the limit
if max_file_size and file_size >= max_file_size: if max_file_size and file_size >= max_file_size:
@ -233,7 +233,7 @@ def prepare_queue_task(app, entry, filename):
# (If we got it off the task's auto-generation, there'd be # (If we got it off the task's auto-generation, there'd be
# a risk of a race condition when we'd save after sending # a risk of a race condition when we'd save after sending
# off the task) # off the task)
task_id = six.text_type(uuid.uuid4()) task_id = str(uuid.uuid4())
entry.queued_task_id = task_id entry.queued_task_id = task_id
# Now store generate the queueing related filename # Now store generate the queueing related filename

View File

@ -37,7 +37,7 @@ from mediagoblin.user_pages.lib import add_media_to_collection
@require_active_login @require_active_login
@user_has_privilege(u'uploader') @user_has_privilege('uploader')
def submit_start(request): def submit_start(request):
""" """
First view for submitting a file. First view for submitting a file.
@ -65,16 +65,16 @@ def submit_start(request):
if request.method == 'POST' and submit_form.validate(): if request.method == 'POST' and submit_form.validate():
if not check_file_field(request, 'file'): if not check_file_field(request, 'file'):
submit_form.file.errors.append( submit_form.file.errors.append(
_(u'You must provide a file.')) _('You must provide a file.'))
else: else:
try: try:
media = submit_media( media = submit_media(
mg_app=request.app, user=request.user, mg_app=request.app, user=request.user,
submitted_file=request.files['file'], submitted_file=request.files['file'],
filename=request.files['file'].filename, filename=request.files['file'].filename,
title=six.text_type(submit_form.title.data), title=str(submit_form.title.data),
description=six.text_type(submit_form.description.data), description=str(submit_form.description.data),
license=six.text_type(submit_form.license.data) or None, license=str(submit_form.license.data) or None,
tags_string=submit_form.tags.data, tags_string=submit_form.tags.data,
urlgen=request.urlgen) urlgen=request.urlgen)
@ -97,7 +97,7 @@ def submit_start(request):
# Handle upload limit issues # Handle upload limit issues
except FileUploadLimit: except FileUploadLimit:
submit_form.file.errors.append( submit_form.file.errors.append(
_(u'Sorry, the file size is too big.')) _('Sorry, the file size is too big.'))
except UserUploadLimit: except UserUploadLimit:
submit_form.file.errors.append( submit_form.file.errors.append(
_('Sorry, uploading this file will put you over your' _('Sorry, uploading this file will put you over your'
@ -131,8 +131,8 @@ def add_collection(request, media=None):
if request.method == 'POST' and submit_form.validate(): if request.method == 'POST' and submit_form.validate():
collection = request.db.Collection() collection = request.db.Collection()
collection.title = six.text_type(submit_form.title.data) collection.title = str(submit_form.title.data)
collection.description = six.text_type(submit_form.description.data) collection.description = str(submit_form.description.data)
collection.actor = request.user.id collection.actor = request.user.id
collection.type = request.db.Collection.USER_DEFINED_TYPE collection.type = request.db.Collection.USER_DEFINED_TYPE
collection.generate_slug() collection.generate_slug()

Some files were not shown because too many files have changed in this diff Show More