This commit is contained in:
Jesús 2020-02-25 12:57:22 -05:00
parent a1dd60961f
commit a8cc611240
No known key found for this signature in database
GPG Key ID: F6EE7BC59A315766
9 changed files with 74 additions and 46 deletions

View File

@ -4,8 +4,6 @@ css-js-minify wrapper for Pelican
"""
import glob
import os
import sys
from .minify import (
process_single_css_file,
@ -14,6 +12,7 @@ from .minify import (
from pelican import signals
def main(pelican):
""" Compiler """
for file in glob.iglob(pelican.output_path + '/**/*.css', recursive=True):
@ -21,10 +20,12 @@ def main(pelican):
for file in glob.iglob(pelican.output_path + '/**/*.js', recursive=True):
process_single_js_file(file, overwrite=True)
def register():
""" Register """
signals.finalized.connect(main)
SUPPORT_JS = """
-----------------------------------------------------------------
COMPRESSOR:

View File

@ -27,11 +27,12 @@ __all__ = ('process_multiple_files', 'prefixer_extensioner',
'make_arguments_parser', 'main')
color = {
'cyan' : '\033[1;36m',
'end' : '\033[0m',
'green' : '\033[1;32m'
'cyan': '\033[1;36m',
'end': '\033[0m',
'green': '\033[1;32m'
}
def process_multiple_files(file_path, watch=False, wrap=False, timestamp=False,
comments=False, sort=False, overwrite=False,
zipy=False, prefix='', add_hash=False):
@ -95,7 +96,9 @@ def process_single_css_file(css_file_path, wrap=False, timestamp=False,
zipy=False, prefix='', add_hash=False,
output_path=None):
"""Process a single CSS file."""
print("Processing %sCSS%s file: %s" % (color['cyan'], color['end'], css_file_path))
print("Processing %sCSS%s file: %s" % (color['cyan'],
color['end'],
css_file_path))
with open(css_file_path, encoding="utf-8") as css_file:
original_css = css_file.read()
@ -134,7 +137,9 @@ def process_single_css_file(css_file_path, wrap=False, timestamp=False,
def process_single_js_file(js_file_path, timestamp=False, overwrite=False,
zipy=False, output_path=None):
"""Process a single JS file."""
print("Processing %sJS%s file: %s" % (color['green'], color['end'], js_file_path))
print("Processing %sJS%s file: %s" % (color['green'],
color['end'],
js_file_path))
with open(js_file_path, encoding="utf-8") as js_file:
original_js = js_file.read()
print("INPUT: Reading JS file %s" % js_file_path)
@ -173,7 +178,8 @@ def make_arguments_parser():
SHA1 HEX-Digest 11 Chars Hash on Filenames is used for Server Cache.
CSS Properties are Alpha-Sorted, to help spot cloned ones, Selectors not.
Watch works for whole folders, with minimum of ~60 Secs between runs.""")
# parser.add_argument('--version', action='version', version=css_js_minify.__version__)
# parser.add_argument('--version', action='version',
# version=css_js_minify.__version__)
parser.add_argument('fullpath', metavar='fullpath', type=str,
help='Full path to local file or folder.')
parser.add_argument('--wrap', action='store_true',

View File

@ -35,7 +35,7 @@ except ImportError:
_MAIN_SETTINGS = None # settings dict of the main Pelican instance
_MAIN_LANG = None # lang of the main Pelican instance
_MAIN_SITEURL = None # siteurl of the main Pelican instance
_MAIN_STATIC_FILES = None # list of Static instances the main Pelican instance
_MAIN_STATIC_FILES = None # list of Static instances the main Pelican
_SUBSITE_QUEUE = {} # map: lang -> settings overrides
_SITE_DB = OrderedDict() # OrderedDict: lang -> siteurl
_SITES_RELPATH_DB = {} # map: (lang, base_lang) -> relpath
@ -254,12 +254,16 @@ def filter_contents_translations(generator):
hiding_func = inspector.hiding_function()
untrans_policy = inspector.untranslated_policy(default='hide')
for (contents, other_contents) in inspector.contents_list_pairs():
for content in other_contents: # save any hidden native content first
if content.lang == current_lang: # in native lang
# save any hidden native content first
for content in other_contents:
# in native lang
if content.lang == current_lang:
# save the native URL attr formatted in the current locale
_NATIVE_CONTENT_URL_DB[content.source_path] = content.url
for content in contents[:]: # copy for removing in loop
if content.lang == current_lang: # in native lang
# copy for removing in loop
for content in contents[:]:
# in native lang
if content.lang == current_lang:
# save the native URL attr formatted in the current locale
_NATIVE_CONTENT_URL_DB[content.source_path] = content.url
elif content.lang in langs_with_sites and untrans_policy != 'keep':
@ -276,7 +280,8 @@ def install_templates_translations(generator):
Only if the 'jinja2.ext.i18n' jinja2 extension is enabled
the translations for the current DEFAULT_LANG are installed.
'''
if 'JINJA_ENVIRONMENT' in generator.settings: # pelican 3.7+
# pelican 3.7+
if 'JINJA_ENVIRONMENT' in generator.settings:
jinja_extensions = generator.settings['JINJA_ENVIRONMENT'].get(
'extensions', [])
else:
@ -357,16 +362,19 @@ def interlink_removed_content(generator):
def interlink_static_files(generator):
'''Add links to static files in the main site if necessary'''
# customized STATIC_PATHS
if generator.settings['STATIC_PATHS'] != []:
return # customized STATIC_PATHS
try: # minimize attr lookup
return
# minimize attr lookup
try:
static_content = generator.context['static_content']
except KeyError:
static_content = generator.context['filenames']
relpath = relpath_to_site(generator.settings['DEFAULT_LANG'], _MAIN_LANG)
for staticfile in _MAIN_STATIC_FILES:
if staticfile.get_relative_source_path() not in static_content:
staticfile = copy(staticfile) # prevent override in main site
# prevent override in main site
staticfile = copy(staticfile)
staticfile.override_url = posixpath.join(relpath, staticfile.url)
try:
generator.add_source_path(staticfile, static=True)

View File

@ -53,7 +53,8 @@ def format_js(gen, metastring, formatter):
formatted_strings = []
for i in range(len(metalist)):
pos = position_regex.search(metalist[i]).group()
format_string = formatter.format(site_url, metalist[i][:-len(pos)], pos)
format_string = formatter.format(
site_url, metalist[i][:-len(pos)], pos)
formatted_strings.append(format_string)
return formatted_strings
@ -89,7 +90,10 @@ def add_tags(gen, metadata):
return head + replace_with + tail
if 'js' in metadata.keys():
minification_string = '.min'
metadata['js'] = replace_last(metadata['js'], '.js', minification_string + '.js')
metadata['js'] = replace_last(
metadata['js'],
'.js',
minification_string + '.js')
script = '<script src="{0}/vendor/{1}"></script>{2}'
metadata['js'] = format_js(gen, metadata['js'], script)

View File

@ -14,23 +14,29 @@ import pelican.readers as readers
from pelican.contents import Content
# Python 3 __cmp__ compatibility mixin from https://stackoverflow.com/a/39166382/807307
# Python 3 compatibility mixin from https://stackoverflow.com/a/39166382/807307
PY3 = sys.version_info[0] >= 3
if PY3:
def cmp(a, b):
return (a > b) - (a < b)
# mixin class for Python3 supporting __cmp__
class PY3__cmp__:
def __eq__(self, other):
return self.__cmp__(other) == 0
def __ne__(self, other):
return self.__cmp__(other) != 0
def __gt__(self, other):
return self.__cmp__(other) > 0
def __lt__(self, other):
return self.__cmp__(other) < 0
def __ge__(self, other):
return self.__cmp__(other) >= 0
def __le__(self, other):
return self.__cmp__(other) <= 0
else:
@ -40,7 +46,8 @@ else:
class Comment(Content, PY3__cmp__):
mandatory_properties = ('post_id', 'author')
default_template = 'comment' # this is required, but not used
# this is required, but not used
default_template = 'comment'
default_status = 'published'
def __cmp__(self, other):
@ -98,6 +105,7 @@ class CommentReader(object):
def get_comments(self, slug):
return self._comments[slug]
def comment_initialization(generator):
"""
Set up the comment plugin.

View File

@ -54,6 +54,7 @@ def format_date(date):
tz = "-00:00"
return date.strftime("%Y-%m-%dT%H:%M:%S") + tz
class SitemapGenerator(object):
def __init__(self, context, settings, path, theme, output_path, *null):
@ -63,7 +64,6 @@ class SitemapGenerator(object):
self.now = datetime.now()
self.siteurl = settings.get('SITEURL')
self.default_timezone = settings.get('TIMEZONE', 'UTC')
self.timezone = getattr(self, 'timezone', self.default_timezone)
self.timezone = timezone(self.timezone)
@ -169,7 +169,7 @@ class SitemapGenerator(object):
pageurl = '' if page.url == 'index.html' else page.url
#Exclude URLs from the sitemap:
# Exclude URLs from the sitemap:
if self.format == 'xml':
flag = False
for regstr in self.sitemapExclude:
@ -177,7 +177,8 @@ class SitemapGenerator(object):
flag = True
break
if not flag:
fd.write(XML_URL.format(self.siteurl, pageurl, lastmod, chfreq, pri))
fd.write(XML_URL.format(
self.siteurl, pageurl, lastmod, chfreq, pri))
else:
fd.write(self.siteurl + '/' + pageurl + '\n')
@ -193,9 +194,11 @@ class SitemapGenerator(object):
for (wrapper, articles) in wrappers:
lastmod = datetime.min.replace(tzinfo=self.timezone)
for article in articles:
lastmod = max(lastmod, article.date.replace(tzinfo=self.timezone))
lastmod = max(lastmod,
article.date.replace(tzinfo=self.timezone))
try:
modified = self.get_date_modified(article, datetime.min).replace(tzinfo=self.timezone)
modified = self.get_date_modified(
article, datetime.min).replace(tzinfo=self.timezone)
lastmod = max(lastmod, modified)
except ValueError:
# Supressed: user will be notified.
@ -203,12 +206,13 @@ class SitemapGenerator(object):
setattr(wrapper, 'modified', str(lastmod))
def generate_output(self, writer):
path = os.path.join(self.output_path, 'sitemap.{0}'.format(self.format))
path = os.path.join(
self.output_path, 'sitemap.{0}'.format(self.format))
pages = self.context['pages'] + self.context['articles'] \
+ [ c for (c, a) in self.context['categories']] \
+ [ t for (t, a) in self.context['tags']] \
+ [ a for (a, b) in self.context['authors']]
+ [c for (c, a) in self.context['categories']] \
+ [t for (t, a) in self.context['tags']] \
+ [a for (a, b) in self.context['authors']]
self.set_url_wrappers_modification_date(self.context['categories'])
self.set_url_wrappers_modification_date(self.context['tags'])

View File

@ -35,7 +35,6 @@ class Tipue_Search_JSON_Generator(object):
self.output_path = output_path
self.json_nodes = []
def create_json_node(self, page):
if getattr(page, 'status', 'published') != 'published':
@ -61,7 +60,6 @@ class Tipue_Search_JSON_Generator(object):
self.json_nodes.append(node)
def create_tpage_node(self, srclink):
srcfile = open(os.path.join(self.output_path, self.tpages[srclink]), encoding='utf-8')
@ -80,7 +78,6 @@ class Tipue_Search_JSON_Generator(object):
self.json_nodes.append(node)
def generate_output(self, writer):
path = os.path.join(self.output_path, 'tipuesearch_content.json')