Apply pyupgrade --py36-plus.

This removes some 'u' prefixes and converts simple format() calls to f-strings.
This commit is contained in:
Ben Sturmfels 2021-09-23 11:51:04 +10:00
parent f90707e22c
commit 6f48143f4c
No known key found for this signature in database
GPG Key ID: 023C05E2C9C068F0
57 changed files with 160 additions and 162 deletions

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
# #
# GNU MediaGoblin documentation build configuration file, created by # GNU MediaGoblin documentation build configuration file, created by
# sphinx-quickstart on Sun Apr 1 01:11:46 2012. # sphinx-quickstart on Sun Apr 1 01:11:46 2012.
@ -40,8 +39,8 @@ source_suffix = '.rst'
master_doc = 'index' master_doc = 'index'
# General information about the project. # General information about the project.
project = u'GNU MediaGoblin' project = 'GNU MediaGoblin'
copyright = u'2011, 2012, GNU MediaGoblin contributors' copyright = '2011, 2012, GNU MediaGoblin contributors'
# The version info for the project you're documenting, acts as replacement for # The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the # |version| and |release|, also used in various other places throughout the
@ -183,8 +182,8 @@ latex_elements = {
# Grouping the document tree into LaTeX files. List of tuples # Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]). # (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [ latex_documents = [
('index', 'GNUMediaGoblin.tex', u'GNU MediaGoblin Documentation', ('index', 'GNUMediaGoblin.tex', 'GNU MediaGoblin Documentation',
u'See AUTHORS', 'manual'), 'See AUTHORS', 'manual'),
] ]
# The name of an image file (relative to this directory) to place at the top of # The name of an image file (relative to this directory) to place at the top of
@ -213,8 +212,8 @@ latex_documents = [
# One entry per manual page. List of tuples # One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section). # (source start file, name, description, authors, manual section).
man_pages = [ man_pages = [
('index', 'gnumediagoblin', u'GNU MediaGoblin Documentation', ('index', 'gnumediagoblin', 'GNU MediaGoblin Documentation',
[u'See AUTHORS'], 1) ['See AUTHORS'], 1)
] ]
# If true, show URL addresses after external links. # If true, show URL addresses after external links.
@ -227,8 +226,8 @@ man_pages = [
# (source start file, target name, title, author, # (source start file, target name, title, author,
# dir menu entry, description, category) # dir menu entry, description, category)
texinfo_documents = [ texinfo_documents = [
('index', 'GNUMediaGoblin', u'GNU MediaGoblin Documentation', ('index', 'GNUMediaGoblin', 'GNU MediaGoblin Documentation',
u'See AUTHORS', 'GNUMediaGoblin', 'One line description of project.', 'See AUTHORS', 'GNUMediaGoblin', 'One line description of project.',
'Miscellaneous'), 'Miscellaneous'),
] ]

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
# #
# GNU MediaGoblin documentation build configuration file, created by # GNU MediaGoblin documentation build configuration file, created by
# sphinx-quickstart on Thu Apr 7 20:10:27 2011. # sphinx-quickstart on Thu Apr 7 20:10:27 2011.
@ -42,8 +41,8 @@ source_suffix = '.rst'
master_doc = 'index' master_doc = 'index'
# General information about the project. # General information about the project.
project = u'GNU MediaGoblin' project = 'GNU MediaGoblin'
copyright = u'2011, 2012 GNU MediaGoblin contributors' copyright = '2011, 2012 GNU MediaGoblin contributors'
# The version info for the project you're documenting, acts as replacement for # The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the # |version| and |release|, also used in various other places throughout the
@ -186,8 +185,8 @@ htmlhelp_basename = 'GNUMediaGoblindoc'
# Grouping the document tree into LaTeX files. List of tuples # Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]). # (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [ latex_documents = [
('index', 'GNUMediaGoblin.tex', u'GNU MediaGoblin Documentation', ('index', 'GNUMediaGoblin.tex', 'GNU MediaGoblin Documentation',
u'Chris Webber, et al', 'manual'), 'Chris Webber, et al', 'manual'),
] ]
# The name of an image file (relative to this directory) to place at the top of # The name of an image file (relative to this directory) to place at the top of
@ -219,8 +218,8 @@ latex_documents = [
# One entry per manual page. List of tuples # One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section). # (source start file, name, description, authors, manual section).
man_pages = [ man_pages = [
('index', 'mediagoblin', u'GNU MediaGoblin Documentation', ('index', 'mediagoblin', 'GNU MediaGoblin Documentation',
[u'Chris Webber, et al'], 1) ['Chris Webber, et al'], 1)
] ]
# If true, show URL addresses after external links. # If true, show URL addresses after external links.
@ -233,7 +232,7 @@ man_pages = [
# (source start file, target name, title, author, # (source start file, target name, title, author,
# dir menu entry, description, category) # dir menu entry, description, category)
texinfo_documents = [ texinfo_documents = [
('index', 'mediagoblin', u'GNU MediaGoblin Documentation', u'mediagoblin', ('index', 'mediagoblin', 'GNU MediaGoblin Documentation', 'mediagoblin',
'GNU MediaGoblin', 'Media sharing web application.', 'Miscellaneous'), 'GNU MediaGoblin', 'Media sharing web application.', 'Miscellaneous'),
] ]

View File

@ -62,7 +62,7 @@ def profile_endpoint(request):
if user is None: if user is None:
username = request.matchdict["username"] username = request.matchdict["username"]
return json_error( return json_error(
"No such 'user' with username '{}'".format(username), f"No such 'user' with username '{username}'",
status=404 status=404
) )
@ -77,7 +77,7 @@ def user_endpoint(request):
if user is None: if user is None:
username = request.matchdict["username"] username = request.matchdict["username"]
return json_error( return json_error(
"No such 'user' with username '{}'".format(username), f"No such 'user' with username '{username}'",
status=404 status=404
) )
@ -97,7 +97,7 @@ def uploads_endpoint(request):
requested_user = LocalUser.query.filter(LocalUser.username==username).first() requested_user = LocalUser.query.filter(LocalUser.username==username).first()
if requested_user is None: if requested_user is None:
return json_error("No such 'user' with id '{}'".format(username), 404) return json_error(f"No such 'user' with id '{username}'", 404)
if request.method == "POST": if request.method == "POST":
# Ensure that the user is only able to upload to their own # Ensure that the user is only able to upload to their own
@ -121,9 +121,9 @@ def uploads_endpoint(request):
else: else:
filenames = sorted(mimetypes.guess_all_extensions(mimetype)) filenames = sorted(mimetypes.guess_all_extensions(mimetype))
if not filenames: if not filenames:
return json_error('Unknown mimetype: {}'.format(mimetype), return json_error(f'Unknown mimetype: {mimetype}',
status=415) status=415)
filename = 'unknown{}'.format(filenames[0]) filename = f'unknown{filenames[0]}'
file_data = FileStorage( file_data = FileStorage(
stream=io.BytesIO(request.data), stream=io.BytesIO(request.data),
@ -153,13 +153,13 @@ def inbox_endpoint(request, inbox=None):
user = LocalUser.query.filter(LocalUser.username==username).first() user = LocalUser.query.filter(LocalUser.username==username).first()
if user is None: if user is None:
return json_error("No such 'user' with id '{}'".format(username), 404) return json_error(f"No such 'user' with id '{username}'", 404)
# Only the user who's authorized should be able to read their inbox # Only the user who's authorized should be able to read their inbox
if user.id != request.user.id: if user.id != request.user.id:
return json_error( return json_error(
"Only '{}' can read this inbox.".format(user.username), f"Only '{user.username}' can read this inbox.",
403 403
) )
@ -190,7 +190,7 @@ def inbox_endpoint(request, inbox=None):
# build the inbox feed # build the inbox feed
feed = { feed = {
"displayName": "Activities for {}".format(user.username), "displayName": f"Activities for {user.username}",
"author": user.serialize(request), "author": user.serialize(request),
"objectTypes": ["activity"], "objectTypes": ["activity"],
"url": request.base_url, "url": request.base_url,
@ -237,7 +237,7 @@ def feed_endpoint(request, outbox=None):
# check if the user exists # check if the user exists
if requested_user is None: if requested_user is None:
return json_error("No such 'user' with id '{}'".format(username), 404) return json_error(f"No such 'user' with id '{username}'", 404)
if request.data: if request.data:
data = json.loads(request.data.decode()) data = json.loads(request.data.decode())
@ -313,7 +313,7 @@ def feed_endpoint(request, outbox=None):
if media is None: if media is None:
return json_response( return json_response(
"No such 'image' with id '{}'".format(media_id), f"No such 'image' with id '{media_id}'",
status=404 status=404
) )
@ -326,7 +326,7 @@ def feed_endpoint(request, outbox=None):
if not media.unserialize(data["object"]): if not media.unserialize(data["object"]):
return json_error( return json_error(
"Invalid 'image' with id '{}'".format(media_id) f"Invalid 'image' with id '{media_id}'"
) )
@ -346,7 +346,7 @@ def feed_endpoint(request, outbox=None):
# Oh no! We don't know about this type of object (yet) # Oh no! We don't know about this type of object (yet)
object_type = obj.get("objectType", None) object_type = obj.get("objectType", None)
return json_error( return json_error(
"Unknown object type '{}'.".format(object_type) f"Unknown object type '{object_type}'."
) )
# Updating existing objects # Updating existing objects
@ -388,7 +388,7 @@ def feed_endpoint(request, outbox=None):
).first() ).first()
if comment is None: if comment is None:
return json_error( return json_error(
"No such 'comment' with id '{}'.".format(obj_id) f"No such 'comment' with id '{obj_id}'."
) )
# Check that the person trying to update the comment is # Check that the person trying to update the comment is
@ -436,7 +436,7 @@ def feed_endpoint(request, outbox=None):
if not image.unserialize(obj): if not image.unserialize(obj):
return json_error( return json_error(
"Invalid 'image' with id '{}'".format(obj_id) f"Invalid 'image' with id '{obj_id}'"
) )
image.generate_slug() image.generate_slug()
image.save() image.save()
@ -504,7 +504,7 @@ def feed_endpoint(request, outbox=None):
if comment is None: if comment is None:
return json_error( return json_error(
"No such 'comment' with id '{}'.".format(obj_id) f"No such 'comment' with id '{obj_id}'."
) )
# Make a delete activity # Make a delete activity
@ -533,7 +533,7 @@ def feed_endpoint(request, outbox=None):
if entry is None: if entry is None:
return json_error( return json_error(
"No such 'image' with id '{}'.".format(obj_id) f"No such 'image' with id '{obj_id}'."
) )
# Make the delete activity # Make the delete activity
@ -555,7 +555,7 @@ def feed_endpoint(request, outbox=None):
elif request.method != "GET": elif request.method != "GET":
return json_error( return json_error(
"Unsupported HTTP method {}".format(request.method), f"Unsupported HTTP method {request.method}",
status=501 status=501
) )
@ -654,7 +654,7 @@ def object_endpoint(request):
if object_type not in ["image"]: if object_type not in ["image"]:
# not sure why this is 404, maybe ask evan. Maybe 400? # not sure why this is 404, maybe ask evan. Maybe 400?
return json_error( return json_error(
"Unknown type: {}".format(object_type), f"Unknown type: {object_type}",
status=404 status=404
) )
@ -668,7 +668,7 @@ def object_endpoint(request):
media = MediaEntry.query.filter_by(public_id=public_id).first() media = MediaEntry.query.filter_by(public_id=public_id).first()
if media is None: if media is None:
return json_error( return json_error(
"Can't find '{}' with ID '{}'".format(object_type, object_id), f"Can't find '{object_type}' with ID '{object_id}'",
status=404 status=404
) )
@ -805,7 +805,7 @@ def lrdd_lookup(request):
if user is None: if user is None:
return json_error( return json_error(
"Can't find 'user' with username '{}'".format(username)) f"Can't find 'user' with username '{username}'")
return json_response([ return json_response([
{ {

View File

@ -425,9 +425,9 @@ class Client_v0(declarative_base()):
def __repr__(self): def __repr__(self):
if self.application_name: if self.application_name:
return "<Client {} - {}>".format(self.application_name, self.id) return f"<Client {self.application_name} - {self.id}>"
else: else:
return "<Client {}>".format(self.id) return f"<Client {self.id}>"
class RequestToken_v0(declarative_base()): class RequestToken_v0(declarative_base()):
""" """

View File

@ -188,7 +188,7 @@ class GenerateSlugMixin:
# Can we just append the object's id to the end? # Can we just append the object's id to the end?
if self.id: if self.id:
slug_with_id = "{}-{}".format(slug, self.id) slug_with_id = f"{slug}-{self.id}"
if not self.check_slug_used(slug_with_id): if not self.check_slug_used(slug_with_id):
self.slug = slug_with_id self.slug = slug_with_id
return # success! return # success!

View File

@ -290,7 +290,7 @@ class User(Base, UserMixin):
# Delete user, pass through commit=False/True in kwargs # Delete user, pass through commit=False/True in kwargs
username = self.username username = self.username
super().delete(*args, **kwargs) super().delete(*args, **kwargs)
_log.info('Deleted user "{}" account'.format(username)) _log.info(f'Deleted user "{username}" account')
def has_privilege(self, privilege, allow_admin=True): def has_privilege(self, privilege, allow_admin=True):
""" """
@ -389,7 +389,7 @@ class LocalUser(User):
self.username) self.username)
def get_public_id(self, host): def get_public_id(self, host):
return "acct:{}@{}".format(self.username, host) return f"acct:{self.username}@{host}"
def serialize(self, request): def serialize(self, request):
user = { user = {
@ -464,9 +464,9 @@ class Client(Base):
def __repr__(self): def __repr__(self):
if self.application_name: if self.application_name:
return "<Client {} - {}>".format(self.application_name, self.id) return f"<Client {self.application_name} - {self.id}>"
else: else:
return "<Client {}>".format(self.id) return f"<Client {self.id}>"
class RequestToken(Base): class RequestToken(Base):
""" """
@ -738,7 +738,7 @@ class MediaEntry(Base, MediaEntryMixin, CommentingMixin):
# Returns list of files we failed to delete # Returns list of files we failed to delete
_log.error('No such files from the user "{1}" to delete: ' _log.error('No such files from the user "{1}" to delete: '
'{0}'.format(str(error), self.get_actor)) '{0}'.format(str(error), self.get_actor))
_log.info('Deleted Media entry id "{}"'.format(self.id)) _log.info(f'Deleted Media entry id "{self.id}"')
# Related MediaTag's are automatically cleaned, but we might # Related MediaTag's are automatically cleaned, but we might
# want to clean out unused Tag's too. # want to clean out unused Tag's too.
if del_orphan_tags: if del_orphan_tags:
@ -858,7 +858,7 @@ class FileKeynames(Base):
name = Column(Unicode, unique=True) name = Column(Unicode, unique=True)
def __repr__(self): def __repr__(self):
return "<FileKeyname {!r}: {!r}>".format(self.id, self.name) return f"<FileKeyname {self.id!r}: {self.name!r}>"
@classmethod @classmethod
def find_or_new(cls, name): def find_or_new(cls, name):
@ -887,7 +887,7 @@ class MediaFile(Base):
{}) {})
def __repr__(self): def __repr__(self):
return "<MediaFile {}: {!r}>".format(self.name, self.file_path) return f"<MediaFile {self.name}: {self.file_path!r}>"
name_helper = relationship(FileKeynames, lazy="joined", innerjoin=True) name_helper = relationship(FileKeynames, lazy="joined", innerjoin=True)
name = association_proxy('name_helper', 'name', name = association_proxy('name_helper', 'name',
@ -935,7 +935,7 @@ class Tag(Base):
slug = Column(Unicode, nullable=False, unique=True) slug = Column(Unicode, nullable=False, unique=True)
def __repr__(self): def __repr__(self):
return "<Tag {!r}: {!r}>".format(self.id, self.slug) return f"<Tag {self.id!r}: {self.slug!r}>"
@classmethod @classmethod
def find_or_new(cls, slug): def find_or_new(cls, slug):
@ -1034,7 +1034,7 @@ class Comment(Base):
# fetch it from self.comment() # fetch it from self.comment()
raise AttributeError raise AttributeError
try: try:
_log.debug('Old attr is being accessed: {}'.format(attr)) _log.debug(f'Old attr is being accessed: {attr}')
return getattr(self.comment(), attr) # noqa return getattr(self.comment(), attr) # noqa
except Exception as e: except Exception as e:
_log.error(e) _log.error(e)

View File

@ -35,7 +35,7 @@ class FudgedCommandLine(config.CommandLine):
plugins = global_config.get('plugins', {}).keys() plugins = global_config.get('plugins', {}).keys()
for plugin in plugins: for plugin in plugins:
try: try:
import_component('{}.models:MODELS'.format(plugin)) import_component(f'{plugin}.models:MODELS')
except ImportError: except ImportError:
# It doesn't really matter if there's no models to import # It doesn't really matter if there's no models to import
# here. # here.

View File

@ -67,7 +67,7 @@ def gather_database_data(plugins):
for plugin in plugins: for plugin in plugins:
try: try:
models = import_component('{}.models:MODELS'.format(plugin)) models = import_component(f'{plugin}.models:MODELS')
except ImportError as exc: except ImportError as exc:
_log.debug('No models found for {}: {}'.format( _log.debug('No models found for {}: {}'.format(
plugin, plugin,
@ -115,7 +115,7 @@ def run_foundations(db, global_config):
for plugin in plugins: for plugin in plugins:
try: try:
foundations = import_component( foundations = import_component(
'{}.models:FOUNDATIONS'.format(plugin)) f'{plugin}.models:FOUNDATIONS')
all_foundations.append((plugin, foundations)) all_foundations.append((plugin, foundations))
except ImportError as exc: except ImportError as exc:
continue continue

View File

@ -145,7 +145,7 @@ def available(args):
manager = get_processing_manager_for_type(media_type) manager = get_processing_manager_for_type(media_type)
except ProcessingManagerDoesNotExist: except ProcessingManagerDoesNotExist:
entry = MediaEntry.query.filter_by(id=args.id_or_type).first() entry = MediaEntry.query.filter_by(id=args.id_or_type).first()
print('No such processing manager for {}'.format(entry.media_type)) print(f'No such processing manager for {entry.media_type}')
if args.state: if args.state:
processors = manager.list_all_processors_by_state(args.state) processors = manager.list_all_processors_by_state(args.state)
@ -170,7 +170,7 @@ def available(args):
else: else:
for processor in processors: for processor in processors:
if processor.description: if processor.description:
print(" - {}: {}".format(processor.name, processor.description)) print(f" - {processor.name}: {processor.description}")
else: else:
print(" - %s" % processor.name) print(" - %s" % processor.name)
@ -205,7 +205,7 @@ def run(args, media_id=None):
except ProcessingManagerDoesNotExist: except ProcessingManagerDoesNotExist:
entry = MediaEntry.query.filter_by(id=media_id).first() entry = MediaEntry.query.filter_by(id=media_id).first()
print('No such processing manager for {}'.format(entry.media_type)) print(f'No such processing manager for {entry.media_type}')
def bulk_run(args): def bulk_run(args):
@ -262,7 +262,7 @@ def thumbs(args):
reprocess_info=reprocess_request) reprocess_info=reprocess_request)
except ProcessingManagerDoesNotExist: except ProcessingManagerDoesNotExist:
print('No such processing manager for {}'.format(entry.media_type)) print(f'No such processing manager for {entry.media_type}')
def initial(args): def initial(args):
@ -278,7 +278,7 @@ def initial(args):
media_entry, media_entry,
reprocess_action='initial') reprocess_action='initial')
except ProcessingManagerDoesNotExist: except ProcessingManagerDoesNotExist:
print('No such processing manager for {}'.format(entry.media_type)) print(f'No such processing manager for {entry.media_type}')
def reprocess(args): def reprocess(args):

View File

@ -181,7 +181,7 @@ def generate_validation_report(config, validation_result):
# We don't care about missing values for now. # We don't care about missing values for now.
continue continue
report.append("{} = {}".format(section_string, error)) report.append(f"{section_string} = {error}")
if report: if report:
return REPORT_HEADER + "\n".join(report) return REPORT_HEADER + "\n".join(report)

View File

@ -116,7 +116,7 @@ class CsrfMeddleware(BaseMeddleware):
def _make_token(self, request): def _make_token(self, request):
"""Generate a new token to use for CSRF protection.""" """Generate a new token to use for CSRF protection."""
return "{}".format(getrandbits(self.CSRF_KEYLEN)) return f"{getrandbits(self.CSRF_KEYLEN)}"
def verify_tokens(self, request): def verify_tokens(self, request):
"""Verify that the CSRF Cookie exists and that it matches the """Verify that the CSRF Cookie exists and that it matches the

View File

@ -68,10 +68,10 @@ def sniff_media_contents(media_file, filename):
''' '''
media_type = hook_handle('sniff_handler', media_file, filename) media_type = hook_handle('sniff_handler', media_file, filename)
if media_type: if media_type:
_log.info('{} accepts the file'.format(media_type)) _log.info(f'{media_type} accepts the file')
return media_type, hook_handle(('media_manager', media_type)) return media_type, hook_handle(('media_manager', media_type))
else: else:
_log.debug('{} did not accept the file'.format(media_type)) _log.debug(f'{media_type} did not accept the file')
raise FileTypeNotSupported( raise FileTypeNotSupported(
# TODO: Provide information on which file types are supported # TODO: Provide information on which file types are supported
_('Sorry, I don\'t support that file type :(')) _('Sorry, I don\'t support that file type :('))
@ -129,7 +129,7 @@ def type_match_handler(media_file, filename):
_log.debug(e) _log.debug(e)
raise raise
else: else:
_log.info('No plugins handled extension {}'.format(ext)) _log.info(f'No plugins handled extension {ext}')
else: else:
_log.info('File {} has no known file extension, let\'s hope ' _log.info('File {} has no known file extension, let\'s hope '
'the sniffers get it.'.format(filename)) 'the sniffers get it.'.format(filename))

View File

@ -108,7 +108,7 @@ class AsciiToImage:
for line in lines: for line in lines:
line_length = len(line) line_length = len(line)
_log.debug('Writing line at {}'.format(char_pos)) _log.debug(f'Writing line at {char_pos}')
for _pos in range(0, line_length): for _pos in range(0, line_length):
char = line[_pos] char = line[_pos]

View File

@ -37,7 +37,7 @@ MEDIA_TYPE = 'mediagoblin.media_types.ascii'
def sniff_handler(media_file, filename): def sniff_handler(media_file, filename):
_log.info('Sniffing {}'.format(MEDIA_TYPE)) _log.info(f'Sniffing {MEDIA_TYPE}')
name, ext = os.path.splitext(filename) name, ext = os.path.splitext(filename)
clean_ext = ext[1:].lower() clean_ext = ext[1:].lower()

View File

@ -278,11 +278,11 @@ if __name__ == "__main__":
import sys import sys
def printProgress(p): def printProgress(p):
sys.stdout.write("\rProgress : {}%".format(p)) sys.stdout.write(f"\rProgress : {p}%")
sys.stdout.flush() sys.stdout.flush()
if not (len(sys.argv) == 2 or len(sys.argv) == 3): if not (len(sys.argv) == 2 or len(sys.argv) == 3):
print("Usage:\n{} input_file [output_file]".format(sys.argv[0])) print(f"Usage:\n{sys.argv[0]} input_file [output_file]")
exit() exit()
audioFile = sys.argv[1] audioFile = sys.argv[1]
@ -292,6 +292,6 @@ if __name__ == "__main__":
else: else:
outputFile = 'spectrogram.png' outputFile = 'spectrogram.png'
sys.stdout.write("Input : {}\nOutput : {}\n".format(audioFile, outputFile)) sys.stdout.write(f"Input : {audioFile}\nOutput : {outputFile}\n")
drawSpectrogram(audioFile, outputFile, progressCallback = printProgress) drawSpectrogram(audioFile, outputFile, progressCallback = printProgress)
sys.stdout.write("\nDone!\n") sys.stdout.write("\nDone!\n")

View File

@ -35,7 +35,7 @@ MEDIA_TYPE = 'mediagoblin.media_types.audio'
def sniff_handler(media_file, filename): def sniff_handler(media_file, filename):
_log.info('Sniffing {}'.format(MEDIA_TYPE)) _log.info(f'Sniffing {MEDIA_TYPE}')
try: try:
data = discover(media_file.name) data = discover(media_file.name)
except Exception as e: except Exception as e:

View File

@ -45,7 +45,7 @@ Gst.init(None)
class Python3AudioThumbnailer: class Python3AudioThumbnailer:
def __init__(self): def __init__(self):
_log.info('Initializing {}'.format(self.__class__.__name__)) _log.info(f'Initializing {self.__class__.__name__}')
def spectrogram(self, src, dst, **kw): def spectrogram(self, src, dst, **kw):
from mediagoblin.media_types.audio import audiotospectrogram from mediagoblin.media_types.audio import audiotospectrogram
@ -85,7 +85,7 @@ AudioThumbnailer = Python3AudioThumbnailer
class AudioTranscoder: class AudioTranscoder:
def __init__(self): def __init__(self):
_log.info('Initializing {}'.format(self.__class__.__name__)) _log.info(f'Initializing {self.__class__.__name__}')
# Instantiate MainLoop # Instantiate MainLoop
self._loop = GObject.MainLoop() self._loop = GObject.MainLoop()
@ -96,10 +96,10 @@ class AudioTranscoder:
def _on_pad_added(element, pad, connect_to): def _on_pad_added(element, pad, connect_to):
caps = pad.query_caps(None) caps = pad.query_caps(None)
name = caps.to_string() name = caps.to_string()
_log.debug('on_pad_added: {}'.format(name)) _log.debug(f'on_pad_added: {name}')
if name.startswith('audio') and not connect_to.is_linked(): if name.startswith('audio') and not connect_to.is_linked():
pad.link(connect_to) pad.link(connect_to)
_log.info('Transcoding {} into {}'.format(src, dst)) _log.info(f'Transcoding {src} into {dst}')
self.__on_progress = progress_callback self.__on_progress = progress_callback
# Set up pipeline # Set up pipeline
tolerance = 80000000 tolerance = 80000000
@ -155,7 +155,7 @@ class AudioTranscoder:
(success, percent) = structure.get_int('percent') (success, percent) = structure.get_int('percent')
if self.__on_progress and success: if self.__on_progress and success:
self.__on_progress(percent) self.__on_progress(percent)
_log.info('{}% done...'.format(percent)) _log.info(f'{percent}% done...')
elif message.type == Gst.MessageType.EOS: elif message.type == Gst.MessageType.EOS:
_log.info('Done') _log.info('Done')
self.halt() self.halt()

View File

@ -48,7 +48,7 @@ class Blog(Base, BlogMixin):
@property @property
def slug_or_id(self): def slug_or_id(self):
return (self.slug or 'blog_{}'.format(self.id)) return (self.slug or f'blog_{self.id}')
def get_all_blog_posts(self, state=None): def get_all_blog_posts(self, state=None):
blog_posts = Session.query(MediaEntry).join(BlogPostData)\ blog_posts = Session.query(MediaEntry).join(BlogPostData)\

View File

@ -151,7 +151,7 @@ SUPPORTED_FILETYPES = ['png', 'gif', 'jpg', 'jpeg', 'tiff']
def sniff_handler(media_file, filename): def sniff_handler(media_file, filename):
_log.info('Sniffing {}'.format(MEDIA_TYPE)) _log.info(f'Sniffing {MEDIA_TYPE}')
name, ext = os.path.splitext(filename) name, ext = os.path.splitext(filename)
clean_ext = ext[1:].lower() # Strip the . from ext and make lowercase clean_ext = ext[1:].lower() # Strip the . from ext and make lowercase

View File

@ -169,7 +169,7 @@ def check_prerequisites():
return True return True
def sniff_handler(media_file, filename): def sniff_handler(media_file, filename):
_log.info('Sniffing {}'.format(MEDIA_TYPE)) _log.info(f'Sniffing {MEDIA_TYPE}')
if not check_prerequisites(): if not check_prerequisites():
return None return None

View File

@ -35,7 +35,7 @@ ACCEPTED_EXTENSIONS = ['nef', 'cr2']
# The entire function have to be copied # The entire function have to be copied
def sniff_handler(media_file, filename): def sniff_handler(media_file, filename):
_log.info('Sniffing {}'.format(MEDIA_TYPE)) _log.info(f'Sniffing {MEDIA_TYPE}')
name, ext = os.path.splitext(filename) name, ext = os.path.splitext(filename)
clean_ext = ext[1:].lower() # Strip the . from ext and make lowercase clean_ext = ext[1:].lower() # Strip the . from ext and make lowercase

View File

@ -48,7 +48,7 @@ BLEND_SCRIPT = pkg_resources.resource_filename(
def sniff_handler(media_file, filename): def sniff_handler(media_file, filename):
_log.info('Sniffing {}'.format(MEDIA_TYPE)) _log.info(f'Sniffing {MEDIA_TYPE}')
name, ext = os.path.splitext(filename) name, ext = os.path.splitext(filename)
clean_ext = ext[1:].lower() clean_ext = ext[1:].lower()

View File

@ -40,7 +40,7 @@ def discover(src):
# init before import to work around https://bugzilla.gnome.org/show_bug.cgi?id=736260 # init before import to work around https://bugzilla.gnome.org/show_bug.cgi?id=736260
from gi.repository import GstPbutils from gi.repository import GstPbutils
_log.info('Discovering {}...'.format(src)) _log.info(f'Discovering {src}...')
uri = 'file://{}'.format(src) uri = f'file://{src}'
discoverer = GstPbutils.Discoverer.new(60 * Gst.SECOND) discoverer = GstPbutils.Discoverer.new(60 * Gst.SECOND)
return discoverer.discover_uri(uri) return discoverer.discover_uri(uri)

View File

@ -40,7 +40,7 @@ class VideoMediaManager(MediaManagerBase):
video_res = video_config['available_resolutions'] video_res = video_config['available_resolutions']
video_res.remove(video_config['default_resolution']) video_res.remove(video_config['default_resolution'])
video_res.insert(0, video_config['default_resolution']) video_res.insert(0, video_config['default_resolution'])
video_res = ['webm_{}'.format(x) for x in video_res] video_res = [f'webm_{x}' for x in video_res]
return (['webm_video'] + video_res + ['original']) return (['webm_video'] + video_res + ['original'])

View File

@ -49,14 +49,14 @@ class VideoTranscodingFail(BaseProcessingFail):
def sniffer(media_file): def sniffer(media_file):
'''New style sniffer, used in two-steps check; requires to have .name''' '''New style sniffer, used in two-steps check; requires to have .name'''
_log.info('Sniffing {}'.format(MEDIA_TYPE)) _log.info(f'Sniffing {MEDIA_TYPE}')
try: try:
data = transcoders.discover(media_file.name) data = transcoders.discover(media_file.name)
except Exception as e: except Exception as e:
# this is usually GLib.GError, but we don't really care which one # this is usually GLib.GError, but we don't really care which one
_log.warning('GStreamer: {}'.format(str(e))) _log.warning(f'GStreamer: {str(e)}')
raise MissingComponents('GStreamer: {}'.format(str(e))) raise MissingComponents(f'GStreamer: {str(e)}')
_log.debug('Discovered: {}'.format(data)) _log.debug(f'Discovered: {data}')
if not data.get_video_streams(): if not data.get_video_streams():
raise MissingComponents('No video streams found in this video') raise MissingComponents('No video streams found in this video')
@ -64,7 +64,7 @@ def sniffer(media_file):
if data.get_result() != 0: # it's 0 if success if data.get_result() != 0: # it's 0 if success
try: try:
missing = data.get_misc().get_string('name') missing = data.get_misc().get_string('name')
_log.warning('GStreamer: missing {}'.format(missing)) _log.warning(f'GStreamer: missing {missing}')
except AttributeError as e: except AttributeError as e:
# AttributeError happens here on gstreamer >1.4, when get_misc # AttributeError happens here on gstreamer >1.4, when get_misc
# returns None. There is a special function to get info about # returns None. There is a special function to get info about
@ -74,7 +74,7 @@ def sniffer(media_file):
_log.warning('GStreamer: missing: {}'.format(', '.join(details))) _log.warning('GStreamer: missing: {}'.format(', '.join(details)))
missing = ', '.join(['{} ({})'.format(*d.split('|')[3:]) missing = ', '.join(['{} ({})'.format(*d.split('|')[3:])
for d in details]) for d in details])
raise MissingComponents('{} is missing'.format(missing)) raise MissingComponents(f'{missing} is missing')
return MEDIA_TYPE return MEDIA_TYPE
@ -87,13 +87,13 @@ def sniff_handler(media_file, filename):
if clean_ext in EXCLUDED_EXTS: if clean_ext in EXCLUDED_EXTS:
# We don't handle this filetype, though gstreamer might think we can # We don't handle this filetype, though gstreamer might think we can
_log.info('Refused to process {} due to excluded extension'.format(filename)) _log.info(f'Refused to process {filename} due to excluded extension')
return None return None
try: try:
return sniffer(media_file) return sniffer(media_file)
except: except:
_log.error('Could not discover {}'.format(filename)) _log.error(f'Could not discover {filename}')
return None return None
def get_tags(stream_info): def get_tags(stream_info):
@ -258,7 +258,7 @@ class CommonVideoProcessor(MediaProcessor):
# If we didn't transcode, then we need to keep the original # If we didn't transcode, then we need to keep the original
self.did_transcode = False self.did_transcode = False
for each_res in self.video_config['available_resolutions']: for each_res in self.video_config['available_resolutions']:
if 'webm_{}'.format(each_res) in self.entry.media_files: if f'webm_{each_res}' in self.entry.media_files:
self.did_transcode = True self.did_transcode = True
break break
if not self.did_transcode or self.video_config['keep_original']: if not self.did_transcode or self.video_config['keep_original']:

View File

@ -62,7 +62,7 @@ def capture_thumb(video_path, dest_path, width=None, height=None, percent=0.5):
'''This is a callback to dynamically add element to pipeline''' '''This is a callback to dynamically add element to pipeline'''
caps = pad.query_caps(None) caps = pad.query_caps(None)
name = caps.to_string() name = caps.to_string()
_log.debug('on_pad_added: {}'.format(name)) _log.debug(f'on_pad_added: {name}')
if name.startswith('video') and not connect_to.is_linked(): if name.startswith('video') and not connect_to.is_linked():
pad.link(connect_to) pad.link(connect_to)
@ -70,7 +70,7 @@ def capture_thumb(video_path, dest_path, width=None, height=None, percent=0.5):
# ! CAPS ! appsink # ! CAPS ! appsink
pipeline = Gst.Pipeline() pipeline = Gst.Pipeline()
uridecodebin = Gst.ElementFactory.make('uridecodebin', None) uridecodebin = Gst.ElementFactory.make('uridecodebin', None)
uridecodebin.set_property('uri', 'file://{}'.format(video_path)) uridecodebin.set_property('uri', f'file://{video_path}')
videoconvert = Gst.ElementFactory.make('videoconvert', None) videoconvert = Gst.ElementFactory.make('videoconvert', None)
uridecodebin.connect('pad-added', pad_added, uridecodebin.connect('pad-added', pad_added,
videoconvert.get_static_pad('sink')) videoconvert.get_static_pad('sink'))
@ -104,7 +104,7 @@ def capture_thumb(video_path, dest_path, width=None, height=None, percent=0.5):
# timeout of 3 seconds below was set experimentally # timeout of 3 seconds below was set experimentally
state = pipeline.get_state(Gst.SECOND * 3) state = pipeline.get_state(Gst.SECOND * 3)
if state[0] != Gst.StateChangeReturn.SUCCESS: if state[0] != Gst.StateChangeReturn.SUCCESS:
_log.warning('state change failed, {}'.format(state)) _log.warning(f'state change failed, {state}')
return return
# get duration # get duration
@ -139,7 +139,7 @@ def capture_thumb(video_path, dest_path, width=None, height=None, percent=0.5):
im = Image.frombytes('RGB', (width, height), im = Image.frombytes('RGB', (width, height),
buffer.extract_dup(0, buffer.get_size())) buffer.extract_dup(0, buffer.get_size()))
im.save(dest_path) im.save(dest_path)
_log.info('thumbnail saved to {}'.format(dest_path)) _log.info(f'thumbnail saved to {dest_path}')
# cleanup # cleanup
pipeline.set_state(Gst.State.NULL) pipeline.set_state(Gst.State.NULL)
@ -374,7 +374,7 @@ class VideoTranscoder:
_log.info('{percent}% of {dest} resolution done..' _log.info('{percent}% of {dest} resolution done..'
'.'.format(percent=percent, dest=self.destination_dimensions)) '.'.format(percent=percent, dest=self.destination_dimensions))
elif message.type == Gst.MessageType.ERROR: elif message.type == Gst.MessageType.ERROR:
_log.error('Got error: {}'.format(message.parse_error())) _log.error(f'Got error: {message.parse_error()}')
self.dst_data = None self.dst_data = None
self.__stop() self.__stop()

View File

@ -43,7 +43,7 @@ def skip_transcode(metadata, size):
# XXX: how were we supposed to use it? # XXX: how were we supposed to use it?
medium_config = mgg.global_config['media:medium'] medium_config = mgg.global_config['media:medium']
_log.debug('skip_transcode config: {}'.format(config)) _log.debug(f'skip_transcode config: {config}')
metadata_tags = metadata.get_tags() metadata_tags = metadata.get_tags()
if not metadata_tags: if not metadata_tags:

View File

@ -91,7 +91,7 @@ def mark_comment_notification_seen(comment_id, user):
object_id=comment_gmr.id object_id=comment_gmr.id
).first() ).first()
_log.debug('Marking {} as seen.'.format(notification)) _log.debug(f'Marking {notification} as seen.')
mark_notification_seen(notification) mark_notification_seen(notification)

View File

@ -35,7 +35,7 @@ class EmailNotificationTask(Task):
''' '''
def run(self, notification_id, message): def run(self, notification_id, message):
cn = Notification.query.filter_by(id=notification_id).first() cn = Notification.query.filter_by(id=notification_id).first()
_log.info('Sending notification email about {}'.format(cn)) _log.info(f'Sending notification email about {cn}')
return send_email( return send_email(
message['from'], message['from'],

View File

@ -126,7 +126,7 @@ def client_register(request):
logo_uri = data.get("logo_uri", client.logo_url) logo_uri = data.get("logo_uri", client.logo_url)
if logo_uri is not None and not validate_url(logo_uri): if logo_uri is not None and not validate_url(logo_uri):
error = "Logo URI {} is not a valid URI.".format(logo_uri) error = f"Logo URI {logo_uri} is not a valid URI."
return json_response( return json_response(
{"error": error}, {"error": error},
status=400 status=400
@ -146,7 +146,7 @@ def client_register(request):
for contact in contacts: for contact in contacts:
if not validate_email(contact): if not validate_email(contact):
# not a valid email # not a valid email
error = "Email {} is not a valid email.".format(contact) error = f"Email {contact} is not a valid email."
return json_response({"error": error}, status=400) return json_response({"error": error}, status=400)
@ -163,7 +163,7 @@ def client_register(request):
for uri in redirect_uris: for uri in redirect_uris:
if not validate_url(uri): if not validate_url(uri):
# not a valid uri # not a valid uri
error = "URI {} is not a valid URI".format(uri) error = f"URI {uri} is not a valid URI"
return json_response({"error": error}, status=400) return json_response({"error": error}, status=400)
client.redirect_uri = redirect_uris client.redirect_uri = redirect_uris

View File

@ -28,7 +28,7 @@ def setup_plugin():
config = pluginapi.get_config(__name__) config = pluginapi.get_config(__name__)
_log.debug('API config: {}'.format(config)) _log.debug(f'API config: {config}')
routes = [ routes = [
('mediagoblin.plugins.api.test', ('mediagoblin.plugins.api.test',

View File

@ -115,7 +115,7 @@ def api_auth(controller):
for auth in PluginManager().get_hook_callables('auth'): for auth in PluginManager().get_hook_callables('auth'):
if auth.trigger(request): if auth.trigger(request):
_log.debug('{} believes it is capable of authenticating this request.'.format(auth)) _log.debug(f'{auth} believes it is capable of authenticating this request.')
auth_candidates.append(auth) auth_candidates.append(auth)
# If we can't find any authentication methods, we should not let them # If we can't find any authentication methods, we should not let them

View File

@ -38,7 +38,7 @@ def bcrypt_check_password(raw_pass, stored_hash, extra_salt=None):
True or False depending on success. True or False depending on success.
""" """
if extra_salt: if extra_salt:
raw_pass = "{}:{}".format(extra_salt, raw_pass) raw_pass = f"{extra_salt}:{raw_pass}"
hashed_pass = bcrypt.hashpw(raw_pass.encode('utf-8'), stored_hash) hashed_pass = bcrypt.hashpw(raw_pass.encode('utf-8'), stored_hash)
@ -64,7 +64,7 @@ def bcrypt_gen_password_hash(raw_pass, extra_salt=None):
non-database extra salt non-database extra salt
""" """
if extra_salt: if extra_salt:
raw_pass = "{}:{}".format(extra_salt, raw_pass) raw_pass = f"{extra_salt}:{raw_pass}"
return str( return str(
bcrypt.hashpw(raw_pass.encode('utf-8'), bcrypt.gensalt())) bcrypt.hashpw(raw_pass.encode('utf-8'), bcrypt.gensalt()))

View File

@ -34,7 +34,7 @@ _log = logging.getLogger(__name__)
def print_context(c): def print_context(c):
s = [] s = []
for key, val in c.items(): for key, val in c.items():
s.append('{}: {}'.format(key, repr(val))) s.append(f'{key}: {val!r}')
return '\n'.join(s) return '\n'.join(s)

View File

@ -41,7 +41,7 @@ class Nonce(Base):
salt = Column(Unicode, primary_key=True) salt = Column(Unicode, primary_key=True)
def __unicode__(self): def __unicode__(self):
return 'Nonce: {!r}, {!r}'.format(self.server_url, self.salt) return f'Nonce: {self.server_url!r}, {self.salt!r}'
class Association(Base): class Association(Base):
@ -55,7 +55,7 @@ class Association(Base):
assoc_type = Column(Unicode) assoc_type = Column(Unicode)
def __unicode__(self): def __unicode__(self):
return 'Association: {!r}, {!r}'.format(self.server_url, self.handle) return f'Association: {self.server_url!r}, {self.handle!r}'
MODELS = [ MODELS = [

View File

@ -130,7 +130,7 @@ def check_form(form):
raise BadRequest() raise BadRequest()
dump = [] dump = []
for f in form: for f in form:
dump.append("{}={!r}".format(f.name, f.data)) dump.append(f"{f.name}={f.data!r}")
_log.debug("form: %s", " ".join(dump)) _log.debug("form: %s", " ".join(dump))

View File

@ -119,7 +119,7 @@ def pwg_images_addSimple(request):
raise BadRequest() raise BadRequest()
dump = [] dump = []
for f in form: for f in form:
dump.append("{}={!r}".format(f.name, f.data)) dump.append(f"{f.name}={f.data!r}")
_log.info("addSimple: %r %s %r", request.form, " ".join(dump), _log.info("addSimple: %r %s %r", request.form, " ".join(dump),
request.files) request.files)

View File

@ -64,7 +64,7 @@ class TrimWhiteSpaceMeddleware(meddleware.BaseMeddleware):
# Append ourselves to the list of enabled Meddlewares # Append ourselves to the list of enabled Meddlewares
meddleware.ENABLED_MEDDLEWARE.append( meddleware.ENABLED_MEDDLEWARE.append(
'{}:{}'.format(cls.__module__, cls.__name__)) f'{cls.__module__}:{cls.__name__}')
hooks = { hooks = {

View File

@ -288,7 +288,7 @@ def get_processing_manager_for_type(media_type):
manager_class = hook_handle(('reprocess_manager', media_type)) manager_class = hook_handle(('reprocess_manager', media_type))
if not manager_class: if not manager_class:
raise ProcessingManagerDoesNotExist( raise ProcessingManagerDoesNotExist(
"A processing manager does not exist for {}".format(media_type)) f"A processing manager does not exist for {media_type}")
manager = manager_class() manager = manager_class()
return manager return manager
@ -389,7 +389,7 @@ def store_public(entry, keyname, local_file, target_name=None,
try: try:
mgg.public_store.copy_local_to_storage(local_file, target_filepath) mgg.public_store.copy_local_to_storage(local_file, target_filepath)
except Exception as e: except Exception as e:
_log.error('Exception happened: {}'.format(e)) _log.error(f'Exception happened: {e}')
raise PublicStoreFail(keyname=keyname) raise PublicStoreFail(keyname=keyname)
# raise an error if the file failed to copy # raise an error if the file failed to copy
if not mgg.public_store.file_exists(target_filepath): if not mgg.public_store.file_exists(target_filepath):

View File

@ -38,7 +38,7 @@ def handle_push_urls(feed_url):
Retry 3 times every 2 minutes if run in separate process before failing.""" Retry 3 times every 2 minutes if run in separate process before failing."""
if not mgg.app_config["push_urls"]: if not mgg.app_config["push_urls"]:
return # Nothing to do return # Nothing to do
_log.debug('Notifying Push servers for feed {}'.format(feed_url)) _log.debug(f'Notifying Push servers for feed {feed_url}')
hubparameters = { hubparameters = {
'hub.mode': 'publish', 'hub.mode': 'publish',
'hub.url': feed_url} 'hub.url': feed_url}
@ -98,7 +98,7 @@ class ProcessMedia(celery.Task):
entry.state = 'processing' entry.state = 'processing'
entry.save() entry.save()
_log.debug('Processing {}'.format(entry)) _log.debug(f'Processing {entry}')
try: try:
processor.process(**reprocess_info) processor.process(**reprocess_info)

View File

@ -161,7 +161,7 @@ class CloudFilesStorage(StorageInterface):
# and bandwidth usage. So, override this method and use the # and bandwidth usage. So, override this method and use the
# Cloudfile's "send" interface instead. # Cloudfile's "send" interface instead.
# TODO: Fixing write() still seems worthwhile though. # TODO: Fixing write() still seems worthwhile though.
_log.debug('Sending {} to cloudfiles...'.format(filepath)) _log.debug(f'Sending {filepath} to cloudfiles...')
with self.get_file(filepath, 'wb') as dest_file: with self.get_file(filepath, 'wb') as dest_file:
with open(filename, 'rb') as source_file: with open(filename, 'rb') as source_file:
# Copy to storage system in 4096 byte chunks # Copy to storage system in 4096 byte chunks

View File

@ -27,7 +27,7 @@ def get_submit_start_form(form, **kwargs):
max_file_size = kwargs.get('max_file_size') max_file_size = kwargs.get('max_file_size')
desc = None desc = None
if max_file_size: if max_file_size:
desc = _('Max file size: {} mb'.format(max_file_size)) desc = _(f'Max file size: {max_file_size} mb')
class SubmitStartForm(wtforms.Form): class SubmitStartForm(wtforms.Form):
file = wtforms.FileField( file = wtforms.FileField(

View File

@ -161,7 +161,7 @@ def submit_media(mg_app, user, submitted_file, filename,
# Get file size and round to 2 decimal places # Get file size and round to 2 decimal places
file_size = mg_app.queue_store.get_file_size( file_size = mg_app.queue_store.get_file_size(
entry.queued_media_file) / (1024.0 * 1024) entry.queued_media_file) / (1024.0 * 1024)
file_size = float('{:.2f}'.format(file_size)) file_size = float(f'{file_size:.2f}')
# Check if file size is over the limit # Check if file size is over the limit
if max_file_size and file_size >= max_file_size: if max_file_size and file_size >= max_file_size:

View File

@ -55,7 +55,7 @@ class TestAPI:
with self.mock_oauth(): with self.mock_oauth():
response = test_app.post( response = test_app.post(
"/api/user/{}/feed".format(self.active_user.username), f"/api/user/{self.active_user.username}/feed",
json.dumps(activity), json.dumps(activity),
headers=headers headers=headers
) )
@ -75,7 +75,7 @@ class TestAPI:
with self.mock_oauth(): with self.mock_oauth():
response = test_app.post( response = test_app.post(
"/api/user/{}/uploads".format(self.active_user.username), f"/api/user/{self.active_user.username}/uploads",
data, data,
headers=headers headers=headers
) )
@ -192,7 +192,7 @@ class TestAPI:
# Will be self.user trying to upload as self.other_user # Will be self.user trying to upload as self.other_user
with pytest.raises(AppError) as excinfo: with pytest.raises(AppError) as excinfo:
test_app.post( test_app.post(
"/api/user/{}/uploads".format(self.other_user.username), f"/api/user/{self.other_user.username}/uploads",
data, data,
headers=headers headers=headers
) )
@ -215,7 +215,7 @@ class TestAPI:
with self.mock_oauth(): with self.mock_oauth():
with pytest.raises(AppError) as excinfo: with pytest.raises(AppError) as excinfo:
test_app.post( test_app.post(
"/api/user/{}/feed".format(self.other_user.username), f"/api/user/{self.other_user.username}/feed",
json.dumps(activity), json.dumps(activity),
headers=headers headers=headers
) )
@ -250,7 +250,7 @@ class TestAPI:
with self.mock_oauth(): with self.mock_oauth():
with pytest.raises(AppError) as excinfo: with pytest.raises(AppError) as excinfo:
test_app.post( test_app.post(
"/api/user/{}/feed".format(self.user.username), f"/api/user/{self.user.username}/feed",
json.dumps(activity), json.dumps(activity),
headers=headers headers=headers
) )
@ -277,7 +277,7 @@ class TestAPI:
with self.mock_oauth(): with self.mock_oauth():
response = test_app.post( response = test_app.post(
"/api/user/{}/feed".format(self.user.username), f"/api/user/{self.user.username}/feed",
json.dumps(activity), json.dumps(activity),
headers={"Content-Type": "application/json"} headers={"Content-Type": "application/json"}
) )
@ -311,7 +311,7 @@ class TestAPI:
with self.mock_oauth(): with self.mock_oauth():
with pytest.raises(AppError) as excinfo: with pytest.raises(AppError) as excinfo:
test_app.post( test_app.post(
"/api/user/{}/uploads".format(self.user.username), f"/api/user/{self.user.username}/uploads",
data, data,
headers=headers headers=headers
) )
@ -406,7 +406,7 @@ class TestAPI:
with self.mock_oauth(): with self.mock_oauth():
with pytest.raises(AppError) as excinfo: with pytest.raises(AppError) as excinfo:
test_app.post( test_app.post(
"/api/user/{}/feed".format(self.other_user.username), f"/api/user/{self.other_user.username}/feed",
json.dumps(activity), json.dumps(activity),
headers=headers headers=headers
) )
@ -452,7 +452,7 @@ class TestAPI:
with self.mock_oauth(): with self.mock_oauth():
with pytest.raises(AppError) as excinfo: with pytest.raises(AppError) as excinfo:
test_app.post( test_app.post(
"/api/user/{}/feed".format(self.user.username), f"/api/user/{self.user.username}/feed",
json.dumps(activity), json.dumps(activity),
headers=headers headers=headers
) )
@ -461,7 +461,7 @@ class TestAPI:
def test_profile(self, test_app): def test_profile(self, test_app):
""" Tests profile endpoint """ """ Tests profile endpoint """
uri = "/api/user/{}/profile".format(self.user.username) uri = f"/api/user/{self.user.username}/profile"
with self.mock_oauth(): with self.mock_oauth():
response = test_app.get(uri) response = test_app.get(uri)
profile = json.loads(response.body.decode()) profile = json.loads(response.body.decode())
@ -475,7 +475,7 @@ class TestAPI:
def test_user(self, test_app): def test_user(self, test_app):
""" Test the user endpoint """ """ Test the user endpoint """
uri = "/api/user/{}/".format(self.user.username) uri = f"/api/user/{self.user.username}/"
with self.mock_oauth(): with self.mock_oauth():
response = test_app.get(uri) response = test_app.get(uri)
user = json.loads(response.body.decode()) user = json.loads(response.body.decode())
@ -501,7 +501,7 @@ class TestAPI:
response, image_data = self._upload_image(test_app, GOOD_JPG) response, image_data = self._upload_image(test_app, GOOD_JPG)
response, data = self._post_image_to_feed(test_app, image_data) response, data = self._post_image_to_feed(test_app, image_data)
uri = "/api/user/{}/feed".format(self.active_user.username) uri = f"/api/user/{self.active_user.username}/feed"
with self.mock_oauth(): with self.mock_oauth():
response = test_app.get(uri) response = test_app.get(uri)
feed = json.loads(response.body.decode()) feed = json.loads(response.body.decode())
@ -574,7 +574,7 @@ class TestAPI:
self.active_user = self.other_user self.active_user = self.other_user
# Fetch the feed # Fetch the feed
url = "/api/user/{}/feed".format(self.user.username) url = f"/api/user/{self.user.username}/feed"
with self.mock_oauth(): with self.mock_oauth():
response = test_app.get(url) response = test_app.get(url)
feed = json.loads(response.body.decode()) feed = json.loads(response.body.decode())

View File

@ -164,7 +164,7 @@ def test_register_views(test_app):
## Verify the email activation works ## Verify the email activation works
template.clear_test_template_context() template.clear_test_template_context()
response = test_app.get("{}?{}".format(path, get_params)) response = test_app.get(f"{path}?{get_params}")
response.follow() response.follow()
context = template.TEMPLATE_TEST_CONTEXT[ context = template.TEMPLATE_TEST_CONTEXT[
'mediagoblin/user_pages/user.html'] 'mediagoblin/user_pages/user.html']
@ -230,7 +230,7 @@ def test_register_views(test_app):
## Verify step 1 of password-change works -- can see form to change password ## Verify step 1 of password-change works -- can see form to change password
template.clear_test_template_context() template.clear_test_template_context()
response = test_app.get("{}?{}".format(path, get_params)) response = test_app.get(f"{path}?{get_params}")
assert 'mediagoblin/plugins/basic_auth/change_fp.html' in \ assert 'mediagoblin/plugins/basic_auth/change_fp.html' in \
template.TEMPLATE_TEST_CONTEXT template.TEMPLATE_TEST_CONTEXT

View File

@ -168,7 +168,7 @@ class TestUserEdit:
# Verify email activation works # Verify email activation works
template.clear_test_template_context() template.clear_test_template_context()
get_params = urlparse.urlsplit(email_context['verification_url'])[3] get_params = urlparse.urlsplit(email_context['verification_url'])[3]
res = test_app.get('{}?{}'.format(path, get_params)) res = test_app.get(f'{path}?{get_params}')
res.follow() res.follow()
# New email saved? # New email saved?

View File

@ -28,7 +28,7 @@ from .resources import GOOD_JPG, EMPTY_JPG, BAD_JPG, GPS_JPG, BAD_GPS_JPG
def assert_in(a, b): def assert_in(a, b):
assert a in b, "{!r} not in {!r}".format(a, b) assert a in b, f"{a!r} not in {b!r}"
def test_exif_extraction(): def test_exif_extraction():

View File

@ -69,7 +69,7 @@ class TestModerationViews:
# First, test an admin taking away a privilege from a user # First, test an admin taking away a privilege from a user
#---------------------------------------------------------------------- #----------------------------------------------------------------------
response, context = self.do_post({'privilege_name':'commenter'}, response, context = self.do_post({'privilege_name':'commenter'},
url='/mod/users/{}/privilege/'.format(self.user.username)) url=f'/mod/users/{self.user.username}/privilege/')
assert response.status == '302 FOUND' assert response.status == '302 FOUND'
self.query_for_users() self.query_for_users()
assert not self.user.has_privilege('commenter') assert not self.user.has_privilege('commenter')
@ -77,7 +77,7 @@ class TestModerationViews:
# Then, test an admin giving a privilege to a user # Then, test an admin giving a privilege to a user
#---------------------------------------------------------------------- #----------------------------------------------------------------------
response, context = self.do_post({'privilege_name':'commenter'}, response, context = self.do_post({'privilege_name':'commenter'},
url='/mod/users/{}/privilege/'.format(self.user.username)) url=f'/mod/users/{self.user.username}/privilege/')
assert response.status == '302 FOUND' assert response.status == '302 FOUND'
self.query_for_users() self.query_for_users()
assert self.user.has_privilege('commenter') assert self.user.has_privilege('commenter')
@ -90,7 +90,7 @@ class TestModerationViews:
with pytest.raises(AppError) as excinfo: with pytest.raises(AppError) as excinfo:
response, context = self.do_post({'privilege_name':'commenter'}, response, context = self.do_post({'privilege_name':'commenter'},
url='/mod/users/{}/privilege/'.format(self.user.username)) url=f'/mod/users/{self.user.username}/privilege/')
assert 'Bad response: 403 FORBIDDEN' in str(excinfo) assert 'Bad response: 403 FORBIDDEN' in str(excinfo)
self.query_for_users() self.query_for_users()
@ -116,7 +116,7 @@ class TestModerationViews:
response, context = self.do_post({'action_to_resolve':['takeaway'], response, context = self.do_post({'action_to_resolve':['takeaway'],
'take_away_privileges':['commenter'], 'take_away_privileges':['commenter'],
'targeted_user':self.user.id}, 'targeted_user':self.user.id},
url='/mod/reports/{}/'.format(comment_report.id)) url=f'/mod/reports/{comment_report.id}/')
self.query_for_users() self.query_for_users()
comment_report = Report.query.filter( comment_report = Report.query.filter(
@ -137,7 +137,7 @@ class TestModerationViews:
response, context = self.do_post({'action_to_resolve':['sendmessage'], response, context = self.do_post({'action_to_resolve':['sendmessage'],
'message_to_user':'This is your last warning, regular....', 'message_to_user':'This is your last warning, regular....',
'targeted_user':self.user.id}, 'targeted_user':self.user.id},
url='/mod/reports/{}/'.format(comment_report.id)) url=f'/mod/reports/{comment_report.id}/')
self.query_for_users() self.query_for_users()
comment_report = Report.query.filter( comment_report = Report.query.filter(
@ -175,7 +175,7 @@ VGhpcyBpcyB5b3VyIGxhc3Qgd2FybmluZywgcmVndWxhci4uLi4=\n',
'targeted_user':self.user.id, 'targeted_user':self.user.id,
'why_user_was_banned':'', 'why_user_was_banned':'',
'user_banned_until':''}, 'user_banned_until':''},
url='/mod/reports/{}/'.format(comment_report.id)) url=f'/mod/reports/{comment_report.id}/')
assert response.status == '302 FOUND' assert response.status == '302 FOUND'
self.query_for_users() self.query_for_users()
test_user_ban = UserBan.query.filter( test_user_ban = UserBan.query.filter(
@ -196,7 +196,7 @@ VGhpcyBpcyB5b3VyIGxhc3Qgd2FybmluZywgcmVndWxhci4uLi4=\n',
response, context = self.do_post({'action_to_resolve':['takeaway'], response, context = self.do_post({'action_to_resolve':['takeaway'],
'take_away_privileges':['active'], 'take_away_privileges':['active'],
'targeted_user':self.admin_user.id}, 'targeted_user':self.admin_user.id},
url='/mod/reports/{}/'.format(comment_report.id)) url=f'/mod/reports/{comment_report.id}/')
self.query_for_users() self.query_for_users()
assert response.status == '200 OK' assert response.status == '200 OK'
@ -216,7 +216,7 @@ VGhpcyBpcyB5b3VyIGxhc3Qgd2FybmluZywgcmVndWxhci4uLi4=\n',
response = self.test_app.get('/mod/users/') response = self.test_app.get('/mod/users/')
assert response.status == "200 OK" assert response.status == "200 OK"
user_page_url = '/mod/users/{}/'.format(username) user_page_url = f'/mod/users/{username}/'
response = self.test_app.get(user_page_url) response = self.test_app.get(user_page_url)
assert response.status == "200 OK" assert response.status == "200 OK"
@ -227,7 +227,7 @@ VGhpcyBpcyB5b3VyIGxhc3Qgd2FybmluZywgcmVndWxhci4uLi4=\n',
self.login('admin') self.login('admin')
username = self.user.username username = self.user.username
user_id = self.user.id user_id = self.user.id
ban_url = '/mod/users/{}/ban/'.format(username) ban_url = f'/mod/users/{username}/ban/'
response, context = self.do_post({ response, context = self.do_post({
'user_banned_until':'', 'user_banned_until':'',
'why_user_was_banned':'Because I said so'}, 'why_user_was_banned':'Because I said so'},

View File

@ -147,7 +147,7 @@ otherperson@example.com\n\nSGkgb3RoZXJwZXJzb24sCmNocmlzIGNvbW1lbnRlZCBvbiB5b3VyI
self.logout() self.logout()
self.login('otherperson', 'nosreprehto') self.login('otherperson', 'nosreprehto')
self.test_app.get(media_uri_slug + 'c/{}/'.format(comment_id)) self.test_app.get(media_uri_slug + f'c/{comment_id}/')
notification = Notification.query.filter_by(id=notification_id).first() notification = Notification.query.filter_by(id=notification_id).first()

View File

@ -123,7 +123,7 @@ class TestOAuth:
def to_authorize_headers(self, data): def to_authorize_headers(self, data):
headers = "" headers = ""
for key, value in data.items(): for key, value in data.items():
headers += '{}="{}",'.format(key, value) headers += f'{key}="{value}",'
return {"Authorization": "OAuth " + headers[:-1]} return {"Authorization": "OAuth " + headers[:-1]}
def test_request_token(self): def test_request_token(self):

View File

@ -143,7 +143,7 @@ class TestReportFiling:
{'action_to_resolve':['userban', 'delete'], {'action_to_resolve':['userban', 'delete'],
'targeted_user':allie_user.id, 'targeted_user':allie_user.id,
'resolution_content':'This is a test of archiving reports.'}, 'resolution_content':'This is a test of archiving reports.'},
url='/mod/reports/{}/'.format(comment_report.id)) url=f'/mod/reports/{comment_report.id}/')
assert response.status == "302 FOUND" assert response.status == "302 FOUND"
allie_user, natalie_user = self.query_for_users() allie_user, natalie_user = self.query_for_users()

View File

@ -173,7 +173,7 @@ class BaseTestSubmission:
def check_normal_upload(self, title, filename): def check_normal_upload(self, title, filename):
response, context = self.do_post({'title': title}, do_follow=True, response, context = self.do_post({'title': title}, do_follow=True,
**self.upload_data(filename)) **self.upload_data(filename))
self.check_url(response, '/u/{}/'.format(self.our_user().username)) self.check_url(response, f'/u/{self.our_user().username}/')
assert 'mediagoblin/user_pages/user.html' in context assert 'mediagoblin/user_pages/user.html' in context
# Make sure the media view is at least reachable, logged in... # Make sure the media view is at least reachable, logged in...
url = '/u/{}/m/{}/'.format(self.our_user().username, url = '/u/{}/m/{}/'.format(self.our_user().username,
@ -215,7 +215,7 @@ class TestSubmissionBasics(BaseTestSubmission):
# User uploaded should be the same as GOOD_JPG size in Mb # User uploaded should be the same as GOOD_JPG size in Mb
file_size = os.stat(GOOD_JPG).st_size / (1024.0 * 1024) file_size = os.stat(GOOD_JPG).st_size / (1024.0 * 1024)
file_size = float('{:.2f}'.format(file_size)) file_size = float(f'{file_size:.2f}')
# Reload user # Reload user
assert self.our_user().uploaded == file_size assert self.our_user().uploaded == file_size
@ -242,7 +242,7 @@ class TestSubmissionBasics(BaseTestSubmission):
response, context = self.do_post({'title': 'Normal upload 4'}, response, context = self.do_post({'title': 'Normal upload 4'},
do_follow=True, do_follow=True,
**self.upload_data(GOOD_JPG)) **self.upload_data(GOOD_JPG))
self.check_url(response, '/u/{}/'.format(self.our_user().username)) self.check_url(response, f'/u/{self.our_user().username}/')
assert 'mediagoblin/user_pages/user.html' in context assert 'mediagoblin/user_pages/user.html' in context
# Shouldn't have uploaded # Shouldn't have uploaded
@ -257,7 +257,7 @@ class TestSubmissionBasics(BaseTestSubmission):
response, context = self.do_post({'title': 'Normal upload 5'}, response, context = self.do_post({'title': 'Normal upload 5'},
do_follow=True, do_follow=True,
**self.upload_data(GOOD_JPG)) **self.upload_data(GOOD_JPG))
self.check_url(response, '/u/{}/'.format(self.our_user().username)) self.check_url(response, f'/u/{self.our_user().username}/')
assert 'mediagoblin/user_pages/user.html' in context assert 'mediagoblin/user_pages/user.html' in context
# Shouldn't have uploaded # Shouldn't have uploaded
@ -421,7 +421,7 @@ class TestSubmissionBasics(BaseTestSubmission):
# they'll be caught as failures during the processing step. # they'll be caught as failures during the processing step.
response, context = self.do_post({'title': title}, do_follow=True, response, context = self.do_post({'title': title}, do_follow=True,
**self.upload_data(filename)) **self.upload_data(filename))
self.check_url(response, '/u/{}/'.format(self.our_user().username)) self.check_url(response, f'/u/{self.our_user().username}/')
entry = mg_globals.database.MediaEntry.query.filter_by(title=title).first() entry = mg_globals.database.MediaEntry.query.filter_by(title=title).first()
assert entry.state == 'failed' assert entry.state == 'failed'
assert entry.fail_error == 'mediagoblin.processing:BadMediaFail' assert entry.fail_error == 'mediagoblin.processing:BadMediaFail'
@ -583,7 +583,7 @@ class TestSubmissionVideo(BaseTestSubmission):
assert len(result) == len(video_config['available_resolutions']) assert len(result) == len(video_config['available_resolutions'])
for i in range(len(video_config['available_resolutions'])): for i in range(len(video_config['available_resolutions'])):
media_file = MediaFile.query.filter_by(media_entry=media.id, media_file = MediaFile.query.filter_by(media_entry=media.id,
name=('webm_{}'.format(str(result[i][0])))).first() name=(f'webm_{str(result[i][0])}')).first()
# check media_file label # check media_file label
assert result[i][0] == video_config['available_resolutions'][i] assert result[i][0] == video_config['available_resolutions'][i]
# check dimensions of media_file # check dimensions of media_file
@ -771,6 +771,6 @@ class TestSubmissionPDF(BaseTestSubmission):
response, context = self.do_post({'title': 'Normal upload 3 (pdf)'}, response, context = self.do_post({'title': 'Normal upload 3 (pdf)'},
do_follow=True, do_follow=True,
**self.upload_data(GOOD_PDF)) **self.upload_data(GOOD_PDF))
self.check_url(response, '/u/{}/'.format(self.our_user().username)) self.check_url(response, f'/u/{self.our_user().username}/')
assert 'mediagoblin/user_pages/user.html' in context assert 'mediagoblin/user_pages/user.html' in context

View File

@ -128,7 +128,7 @@ class PluginManager:
def register_route(self, route): def register_route(self, route):
"""Registers a single route""" """Registers a single route"""
_log.debug('registering route: {}'.format(route)) _log.debug(f'registering route: {route}')
self.routes.append(route) self.routes.append(route)
def get_routes(self): def get_routes(self):

View File

@ -47,7 +47,7 @@ def json_processing_callback(entry):
Send an HTTP post to the registered callback url, if any. Send an HTTP post to the registered callback url, if any.
''' '''
if not entry.processing_metadata: if not entry.processing_metadata:
_log.debug('No processing callback URL for {}'.format(entry)) _log.debug(f'No processing callback URL for {entry}')
return return
url = entry.processing_metadata[0].callback_url url = entry.processing_metadata[0].callback_url
@ -76,7 +76,7 @@ def json_processing_callback(entry):
try: try:
request.urlopen(request) request.urlopen(request)
_log.debug('Processing callback for {} sent'.format(entry)) _log.debug(f'Processing callback for {entry} sent')
return True return True
except request.HTTPError: except request.HTTPError:

View File

@ -105,7 +105,7 @@ def render_http_exception(request, exc, description):
elif stock_desc and exc.code == 404: elif stock_desc and exc.code == 404:
return render_404(request) return render_404(request)
return render_error(request, title='{} {}'.format(exc.code, exc.name), return render_error(request, title=f'{exc.code} {exc.name}',
err_msg=description, err_msg=description,
status=exc.code) status=exc.code)

View File

@ -49,7 +49,7 @@ def endpoint_to_controller(rule):
endpoint = rule.endpoint endpoint = rule.endpoint
view_func = rule.gmg_controller view_func = rule.gmg_controller
_log.debug('endpoint: {} view_func: {}'.format(endpoint, view_func)) _log.debug(f'endpoint: {endpoint} view_func: {view_func}')
# import the endpoint, or if it's already a callable, call that # import the endpoint, or if it's already a callable, call that
if isinstance(view_func, str): if isinstance(view_func, str):

View File

@ -69,10 +69,10 @@ def locale_to_lower_upper(locale):
""" """
if '-' in locale: if '-' in locale:
lang, country = locale.split('-', 1) lang, country = locale.split('-', 1)
return '{}_{}'.format(lang.lower(), country.upper()) return f'{lang.lower()}_{country.upper()}'
elif '_' in locale: elif '_' in locale:
lang, country = locale.split('_', 1) lang, country = locale.split('_', 1)
return '{}_{}'.format(lang.lower(), country.upper()) return f'{lang.lower()}_{country.upper()}'
else: else:
return locale.lower() return locale.lower()
@ -83,7 +83,7 @@ def locale_to_lower_lower(locale):
""" """
if '_' in locale: if '_' in locale:
lang, country = locale.split('_', 1) lang, country = locale.split('_', 1)
return '{}-{}'.format(lang.lower(), country.lower()) return f'{lang.lower()}-{country.lower()}'
else: else:
return locale.lower() return locale.lower()