From a8cc6112400142c7a69c6d6d2829364ef3b3a66f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jes=C3=BAs?= Date: Tue, 25 Feb 2020 12:57:22 -0500 Subject: [PATCH] pep8 --- plugins/compressor/compressor.py | 5 +-- plugins/compressor/js_minifer.py | 4 +-- plugins/compressor/minify.py | 18 ++++++---- plugins/i18n_subsites/i18n_subsites.py | 38 ++++++++++++-------- plugins/i18n_subsites/test_i18n_subsites.py | 8 ++--- plugins/pelican-js/custom_js.py | 8 +++-- plugins/pelican_comments/pelican_comments.py | 12 +++++-- plugins/sitemap/sitemap.py | 24 +++++++------ plugins/tipue-search/tipue_search.py | 3 -- 9 files changed, 74 insertions(+), 46 deletions(-) diff --git a/plugins/compressor/compressor.py b/plugins/compressor/compressor.py index daa2ec9..652ac51 100644 --- a/plugins/compressor/compressor.py +++ b/plugins/compressor/compressor.py @@ -4,8 +4,6 @@ css-js-minify wrapper for Pelican """ import glob -import os -import sys from .minify import ( process_single_css_file, @@ -14,6 +12,7 @@ from .minify import ( from pelican import signals + def main(pelican): """ Compiler """ for file in glob.iglob(pelican.output_path + '/**/*.css', recursive=True): @@ -21,10 +20,12 @@ def main(pelican): for file in glob.iglob(pelican.output_path + '/**/*.js', recursive=True): process_single_js_file(file, overwrite=True) + def register(): """ Register """ signals.finalized.connect(main) + SUPPORT_JS = """ ----------------------------------------------------------------- COMPRESSOR: diff --git a/plugins/compressor/js_minifer.py b/plugins/compressor/js_minifer.py index c03f887..6654b76 100644 --- a/plugins/compressor/js_minifer.py +++ b/plugins/compressor/js_minifer.py @@ -13,8 +13,8 @@ def remove_commented_lines(js): """Force remove commented out lines from Javascript.""" result = "" for line in js.splitlines(): - line = re.sub(r"/\*.*\*/", "", line) # (/*COMMENT */) - line = re.sub(r"//.*", "", line) # (//COMMENT) + line = re.sub(r"/\*.*\*/", "", line) # (/*COMMENT */) + line = re.sub(r"//.*", "", line) # (//COMMENT) result += '\n'+line return result diff --git a/plugins/compressor/minify.py b/plugins/compressor/minify.py index dcf50fe..5837849 100644 --- a/plugins/compressor/minify.py +++ b/plugins/compressor/minify.py @@ -27,11 +27,12 @@ __all__ = ('process_multiple_files', 'prefixer_extensioner', 'make_arguments_parser', 'main') color = { - 'cyan' : '\033[1;36m', - 'end' : '\033[0m', - 'green' : '\033[1;32m' + 'cyan': '\033[1;36m', + 'end': '\033[0m', + 'green': '\033[1;32m' } + def process_multiple_files(file_path, watch=False, wrap=False, timestamp=False, comments=False, sort=False, overwrite=False, zipy=False, prefix='', add_hash=False): @@ -95,7 +96,9 @@ def process_single_css_file(css_file_path, wrap=False, timestamp=False, zipy=False, prefix='', add_hash=False, output_path=None): """Process a single CSS file.""" - print("Processing %sCSS%s file: %s" % (color['cyan'], color['end'], css_file_path)) + print("Processing %sCSS%s file: %s" % (color['cyan'], + color['end'], + css_file_path)) with open(css_file_path, encoding="utf-8") as css_file: original_css = css_file.read() @@ -134,7 +137,9 @@ def process_single_css_file(css_file_path, wrap=False, timestamp=False, def process_single_js_file(js_file_path, timestamp=False, overwrite=False, zipy=False, output_path=None): """Process a single JS file.""" - print("Processing %sJS%s file: %s" % (color['green'], color['end'], js_file_path)) + print("Processing %sJS%s file: %s" % (color['green'], + color['end'], + js_file_path)) with open(js_file_path, encoding="utf-8") as js_file: original_js = js_file.read() print("INPUT: Reading JS file %s" % js_file_path) @@ -173,7 +178,8 @@ def make_arguments_parser(): SHA1 HEX-Digest 11 Chars Hash on Filenames is used for Server Cache. CSS Properties are Alpha-Sorted, to help spot cloned ones, Selectors not. Watch works for whole folders, with minimum of ~60 Secs between runs.""") - # parser.add_argument('--version', action='version', version=css_js_minify.__version__) + # parser.add_argument('--version', action='version', + # version=css_js_minify.__version__) parser.add_argument('fullpath', metavar='fullpath', type=str, help='Full path to local file or folder.') parser.add_argument('--wrap', action='store_true', diff --git a/plugins/i18n_subsites/i18n_subsites.py b/plugins/i18n_subsites/i18n_subsites.py index dc27799..18481dd 100644 --- a/plugins/i18n_subsites/i18n_subsites.py +++ b/plugins/i18n_subsites/i18n_subsites.py @@ -32,16 +32,16 @@ except ImportError: # Global vars -_MAIN_SETTINGS = None # settings dict of the main Pelican instance -_MAIN_LANG = None # lang of the main Pelican instance -_MAIN_SITEURL = None # siteurl of the main Pelican instance -_MAIN_STATIC_FILES = None # list of Static instances the main Pelican instance -_SUBSITE_QUEUE = {} # map: lang -> settings overrides -_SITE_DB = OrderedDict() # OrderedDict: lang -> siteurl +_MAIN_SETTINGS = None # settings dict of the main Pelican instance +_MAIN_LANG = None # lang of the main Pelican instance +_MAIN_SITEURL = None # siteurl of the main Pelican instance +_MAIN_STATIC_FILES = None # list of Static instances the main Pelican +_SUBSITE_QUEUE = {} # map: lang -> settings overrides +_SITE_DB = OrderedDict() # OrderedDict: lang -> siteurl _SITES_RELPATH_DB = {} # map: (lang, base_lang) -> relpath # map: generator -> list of removed contents that need interlinking _GENERATOR_DB = {} -_NATIVE_CONTENT_URL_DB = {} # map: source_path -> content in its native lang +_NATIVE_CONTENT_URL_DB = {} # map: source_path -> content in its native lang _LOGGER = logging.getLogger(__name__) @@ -254,12 +254,16 @@ def filter_contents_translations(generator): hiding_func = inspector.hiding_function() untrans_policy = inspector.untranslated_policy(default='hide') for (contents, other_contents) in inspector.contents_list_pairs(): - for content in other_contents: # save any hidden native content first - if content.lang == current_lang: # in native lang + # save any hidden native content first + for content in other_contents: + # in native lang + if content.lang == current_lang: # save the native URL attr formatted in the current locale _NATIVE_CONTENT_URL_DB[content.source_path] = content.url - for content in contents[:]: # copy for removing in loop - if content.lang == current_lang: # in native lang + # copy for removing in loop + for content in contents[:]: + # in native lang + if content.lang == current_lang: # save the native URL attr formatted in the current locale _NATIVE_CONTENT_URL_DB[content.source_path] = content.url elif content.lang in langs_with_sites and untrans_policy != 'keep': @@ -276,7 +280,8 @@ def install_templates_translations(generator): Only if the 'jinja2.ext.i18n' jinja2 extension is enabled the translations for the current DEFAULT_LANG are installed. ''' - if 'JINJA_ENVIRONMENT' in generator.settings: # pelican 3.7+ + # pelican 3.7+ + if 'JINJA_ENVIRONMENT' in generator.settings: jinja_extensions = generator.settings['JINJA_ENVIRONMENT'].get( 'extensions', []) else: @@ -357,16 +362,19 @@ def interlink_removed_content(generator): def interlink_static_files(generator): '''Add links to static files in the main site if necessary''' + # customized STATIC_PATHS if generator.settings['STATIC_PATHS'] != []: - return # customized STATIC_PATHS - try: # minimize attr lookup + return + # minimize attr lookup + try: static_content = generator.context['static_content'] except KeyError: static_content = generator.context['filenames'] relpath = relpath_to_site(generator.settings['DEFAULT_LANG'], _MAIN_LANG) for staticfile in _MAIN_STATIC_FILES: if staticfile.get_relative_source_path() not in static_content: - staticfile = copy(staticfile) # prevent override in main site + # prevent override in main site + staticfile = copy(staticfile) staticfile.override_url = posixpath.join(relpath, staticfile.url) try: generator.add_source_path(staticfile, static=True) diff --git a/plugins/i18n_subsites/test_i18n_subsites.py b/plugins/i18n_subsites/test_i18n_subsites.py index c6c1d6a..07c603a 100644 --- a/plugins/i18n_subsites/test_i18n_subsites.py +++ b/plugins/i18n_subsites/test_i18n_subsites.py @@ -42,12 +42,12 @@ class TestSettingsManipulation(unittest.TestCase): self.settings['PELICAN_CLASS'] = object cls = i18ns.get_pelican_cls(self.settings) self.assertIs(cls, object) - + def test_get_pelican_cls_str(self): '''Test that we get correct class given by string''' cls = i18ns.get_pelican_cls(self.settings) self.assertIs(cls, Pelican) - + class TestSitesRelpath(unittest.TestCase): '''Test relative path between sites generation''' @@ -72,7 +72,7 @@ class TestSitesRelpath(unittest.TestCase): self.assertEqual(i18ns.relpath_to_site('en', 'de'), 'de') self.assertEqual(i18ns.relpath_to_site('de', 'en'), '..') - + class TestRegistration(unittest.TestCase): '''Test plugin registration''' @@ -91,7 +91,7 @@ class TestRegistration(unittest.TestCase): self.assertIn(id(handler), sig.receivers) # clean up sig.disconnect(handler) - + class TestFullRun(unittest.TestCase): '''Test running Pelican with the Plugin''' diff --git a/plugins/pelican-js/custom_js.py b/plugins/pelican-js/custom_js.py index 7ebeed9..247dbd0 100644 --- a/plugins/pelican-js/custom_js.py +++ b/plugins/pelican-js/custom_js.py @@ -53,7 +53,8 @@ def format_js(gen, metastring, formatter): formatted_strings = [] for i in range(len(metalist)): pos = position_regex.search(metalist[i]).group() - format_string = formatter.format(site_url, metalist[i][:-len(pos)], pos) + format_string = formatter.format( + site_url, metalist[i][:-len(pos)], pos) formatted_strings.append(format_string) return formatted_strings @@ -89,7 +90,10 @@ def add_tags(gen, metadata): return head + replace_with + tail if 'js' in metadata.keys(): minification_string = '.min' - metadata['js'] = replace_last(metadata['js'], '.js', minification_string + '.js') + metadata['js'] = replace_last( + metadata['js'], + '.js', + minification_string + '.js') script = '{2}' metadata['js'] = format_js(gen, metadata['js'], script) diff --git a/plugins/pelican_comments/pelican_comments.py b/plugins/pelican_comments/pelican_comments.py index c1c1cc8..80d9ec3 100644 --- a/plugins/pelican_comments/pelican_comments.py +++ b/plugins/pelican_comments/pelican_comments.py @@ -14,23 +14,29 @@ import pelican.readers as readers from pelican.contents import Content -# Python 3 __cmp__ compatibility mixin from https://stackoverflow.com/a/39166382/807307 +# Python 3 compatibility mixin from https://stackoverflow.com/a/39166382/807307 PY3 = sys.version_info[0] >= 3 if PY3: def cmp(a, b): return (a > b) - (a < b) # mixin class for Python3 supporting __cmp__ + class PY3__cmp__: def __eq__(self, other): return self.__cmp__(other) == 0 + def __ne__(self, other): return self.__cmp__(other) != 0 + def __gt__(self, other): return self.__cmp__(other) > 0 + def __lt__(self, other): return self.__cmp__(other) < 0 + def __ge__(self, other): return self.__cmp__(other) >= 0 + def __le__(self, other): return self.__cmp__(other) <= 0 else: @@ -40,7 +46,8 @@ else: class Comment(Content, PY3__cmp__): mandatory_properties = ('post_id', 'author') - default_template = 'comment' # this is required, but not used + # this is required, but not used + default_template = 'comment' default_status = 'published' def __cmp__(self, other): @@ -98,6 +105,7 @@ class CommentReader(object): def get_comments(self, slug): return self._comments[slug] + def comment_initialization(generator): """ Set up the comment plugin. diff --git a/plugins/sitemap/sitemap.py b/plugins/sitemap/sitemap.py index 0cf7227..327ca2b 100644 --- a/plugins/sitemap/sitemap.py +++ b/plugins/sitemap/sitemap.py @@ -54,6 +54,7 @@ def format_date(date): tz = "-00:00" return date.strftime("%Y-%m-%dT%H:%M:%S") + tz + class SitemapGenerator(object): def __init__(self, context, settings, path, theme, output_path, *null): @@ -63,7 +64,6 @@ class SitemapGenerator(object): self.now = datetime.now() self.siteurl = settings.get('SITEURL') - self.default_timezone = settings.get('TIMEZONE', 'UTC') self.timezone = getattr(self, 'timezone', self.default_timezone) self.timezone = timezone(self.timezone) @@ -103,7 +103,7 @@ class SitemapGenerator(object): valid_keys = ('articles', 'indexes', 'pages') valid_chfreqs = ('always', 'hourly', 'daily', 'weekly', 'monthly', - 'yearly', 'never') + 'yearly', 'never') if isinstance(pris, dict): # We use items for Py3k compat. .iteritems() otherwise @@ -169,7 +169,7 @@ class SitemapGenerator(object): pageurl = '' if page.url == 'index.html' else page.url - #Exclude URLs from the sitemap: + # Exclude URLs from the sitemap: if self.format == 'xml': flag = False for regstr in self.sitemapExclude: @@ -177,7 +177,8 @@ class SitemapGenerator(object): flag = True break if not flag: - fd.write(XML_URL.format(self.siteurl, pageurl, lastmod, chfreq, pri)) + fd.write(XML_URL.format( + self.siteurl, pageurl, lastmod, chfreq, pri)) else: fd.write(self.siteurl + '/' + pageurl + '\n') @@ -193,9 +194,11 @@ class SitemapGenerator(object): for (wrapper, articles) in wrappers: lastmod = datetime.min.replace(tzinfo=self.timezone) for article in articles: - lastmod = max(lastmod, article.date.replace(tzinfo=self.timezone)) + lastmod = max(lastmod, + article.date.replace(tzinfo=self.timezone)) try: - modified = self.get_date_modified(article, datetime.min).replace(tzinfo=self.timezone) + modified = self.get_date_modified( + article, datetime.min).replace(tzinfo=self.timezone) lastmod = max(lastmod, modified) except ValueError: # Supressed: user will be notified. @@ -203,12 +206,13 @@ class SitemapGenerator(object): setattr(wrapper, 'modified', str(lastmod)) def generate_output(self, writer): - path = os.path.join(self.output_path, 'sitemap.{0}'.format(self.format)) + path = os.path.join( + self.output_path, 'sitemap.{0}'.format(self.format)) pages = self.context['pages'] + self.context['articles'] \ - + [ c for (c, a) in self.context['categories']] \ - + [ t for (t, a) in self.context['tags']] \ - + [ a for (a, b) in self.context['authors']] + + [c for (c, a) in self.context['categories']] \ + + [t for (t, a) in self.context['tags']] \ + + [a for (a, b) in self.context['authors']] self.set_url_wrappers_modification_date(self.context['categories']) self.set_url_wrappers_modification_date(self.context['tags']) diff --git a/plugins/tipue-search/tipue_search.py b/plugins/tipue-search/tipue_search.py index c5ad06d..2880850 100644 --- a/plugins/tipue-search/tipue_search.py +++ b/plugins/tipue-search/tipue_search.py @@ -35,7 +35,6 @@ class Tipue_Search_JSON_Generator(object): self.output_path = output_path self.json_nodes = [] - def create_json_node(self, page): if getattr(page, 'status', 'published') != 'published': @@ -61,7 +60,6 @@ class Tipue_Search_JSON_Generator(object): self.json_nodes.append(node) - def create_tpage_node(self, srclink): srcfile = open(os.path.join(self.output_path, self.tpages[srclink]), encoding='utf-8') @@ -80,7 +78,6 @@ class Tipue_Search_JSON_Generator(object): self.json_nodes.append(node) - def generate_output(self, writer): path = os.path.join(self.output_path, 'tipuesearch_content.json')