pep8
This commit is contained in:
parent
a1dd60961f
commit
a8cc611240
@ -4,8 +4,6 @@ css-js-minify wrapper for Pelican
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
import glob
|
import glob
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from .minify import (
|
from .minify import (
|
||||||
process_single_css_file,
|
process_single_css_file,
|
||||||
@ -14,6 +12,7 @@ from .minify import (
|
|||||||
|
|
||||||
from pelican import signals
|
from pelican import signals
|
||||||
|
|
||||||
|
|
||||||
def main(pelican):
|
def main(pelican):
|
||||||
""" Compiler """
|
""" Compiler """
|
||||||
for file in glob.iglob(pelican.output_path + '/**/*.css', recursive=True):
|
for file in glob.iglob(pelican.output_path + '/**/*.css', recursive=True):
|
||||||
@ -21,10 +20,12 @@ def main(pelican):
|
|||||||
for file in glob.iglob(pelican.output_path + '/**/*.js', recursive=True):
|
for file in glob.iglob(pelican.output_path + '/**/*.js', recursive=True):
|
||||||
process_single_js_file(file, overwrite=True)
|
process_single_js_file(file, overwrite=True)
|
||||||
|
|
||||||
|
|
||||||
def register():
|
def register():
|
||||||
""" Register """
|
""" Register """
|
||||||
signals.finalized.connect(main)
|
signals.finalized.connect(main)
|
||||||
|
|
||||||
|
|
||||||
SUPPORT_JS = """
|
SUPPORT_JS = """
|
||||||
-----------------------------------------------------------------
|
-----------------------------------------------------------------
|
||||||
COMPRESSOR:
|
COMPRESSOR:
|
||||||
|
@ -13,8 +13,8 @@ def remove_commented_lines(js):
|
|||||||
"""Force remove commented out lines from Javascript."""
|
"""Force remove commented out lines from Javascript."""
|
||||||
result = ""
|
result = ""
|
||||||
for line in js.splitlines():
|
for line in js.splitlines():
|
||||||
line = re.sub(r"/\*.*\*/", "", line) # (/*COMMENT */)
|
line = re.sub(r"/\*.*\*/", "", line) # (/*COMMENT */)
|
||||||
line = re.sub(r"//.*", "", line) # (//COMMENT)
|
line = re.sub(r"//.*", "", line) # (//COMMENT)
|
||||||
result += '\n'+line
|
result += '\n'+line
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
@ -27,11 +27,12 @@ __all__ = ('process_multiple_files', 'prefixer_extensioner',
|
|||||||
'make_arguments_parser', 'main')
|
'make_arguments_parser', 'main')
|
||||||
|
|
||||||
color = {
|
color = {
|
||||||
'cyan' : '\033[1;36m',
|
'cyan': '\033[1;36m',
|
||||||
'end' : '\033[0m',
|
'end': '\033[0m',
|
||||||
'green' : '\033[1;32m'
|
'green': '\033[1;32m'
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def process_multiple_files(file_path, watch=False, wrap=False, timestamp=False,
|
def process_multiple_files(file_path, watch=False, wrap=False, timestamp=False,
|
||||||
comments=False, sort=False, overwrite=False,
|
comments=False, sort=False, overwrite=False,
|
||||||
zipy=False, prefix='', add_hash=False):
|
zipy=False, prefix='', add_hash=False):
|
||||||
@ -95,7 +96,9 @@ def process_single_css_file(css_file_path, wrap=False, timestamp=False,
|
|||||||
zipy=False, prefix='', add_hash=False,
|
zipy=False, prefix='', add_hash=False,
|
||||||
output_path=None):
|
output_path=None):
|
||||||
"""Process a single CSS file."""
|
"""Process a single CSS file."""
|
||||||
print("Processing %sCSS%s file: %s" % (color['cyan'], color['end'], css_file_path))
|
print("Processing %sCSS%s file: %s" % (color['cyan'],
|
||||||
|
color['end'],
|
||||||
|
css_file_path))
|
||||||
with open(css_file_path, encoding="utf-8") as css_file:
|
with open(css_file_path, encoding="utf-8") as css_file:
|
||||||
original_css = css_file.read()
|
original_css = css_file.read()
|
||||||
|
|
||||||
@ -134,7 +137,9 @@ def process_single_css_file(css_file_path, wrap=False, timestamp=False,
|
|||||||
def process_single_js_file(js_file_path, timestamp=False, overwrite=False,
|
def process_single_js_file(js_file_path, timestamp=False, overwrite=False,
|
||||||
zipy=False, output_path=None):
|
zipy=False, output_path=None):
|
||||||
"""Process a single JS file."""
|
"""Process a single JS file."""
|
||||||
print("Processing %sJS%s file: %s" % (color['green'], color['end'], js_file_path))
|
print("Processing %sJS%s file: %s" % (color['green'],
|
||||||
|
color['end'],
|
||||||
|
js_file_path))
|
||||||
with open(js_file_path, encoding="utf-8") as js_file:
|
with open(js_file_path, encoding="utf-8") as js_file:
|
||||||
original_js = js_file.read()
|
original_js = js_file.read()
|
||||||
print("INPUT: Reading JS file %s" % js_file_path)
|
print("INPUT: Reading JS file %s" % js_file_path)
|
||||||
@ -173,7 +178,8 @@ def make_arguments_parser():
|
|||||||
SHA1 HEX-Digest 11 Chars Hash on Filenames is used for Server Cache.
|
SHA1 HEX-Digest 11 Chars Hash on Filenames is used for Server Cache.
|
||||||
CSS Properties are Alpha-Sorted, to help spot cloned ones, Selectors not.
|
CSS Properties are Alpha-Sorted, to help spot cloned ones, Selectors not.
|
||||||
Watch works for whole folders, with minimum of ~60 Secs between runs.""")
|
Watch works for whole folders, with minimum of ~60 Secs between runs.""")
|
||||||
# parser.add_argument('--version', action='version', version=css_js_minify.__version__)
|
# parser.add_argument('--version', action='version',
|
||||||
|
# version=css_js_minify.__version__)
|
||||||
parser.add_argument('fullpath', metavar='fullpath', type=str,
|
parser.add_argument('fullpath', metavar='fullpath', type=str,
|
||||||
help='Full path to local file or folder.')
|
help='Full path to local file or folder.')
|
||||||
parser.add_argument('--wrap', action='store_true',
|
parser.add_argument('--wrap', action='store_true',
|
||||||
|
@ -32,16 +32,16 @@ except ImportError:
|
|||||||
|
|
||||||
|
|
||||||
# Global vars
|
# Global vars
|
||||||
_MAIN_SETTINGS = None # settings dict of the main Pelican instance
|
_MAIN_SETTINGS = None # settings dict of the main Pelican instance
|
||||||
_MAIN_LANG = None # lang of the main Pelican instance
|
_MAIN_LANG = None # lang of the main Pelican instance
|
||||||
_MAIN_SITEURL = None # siteurl of the main Pelican instance
|
_MAIN_SITEURL = None # siteurl of the main Pelican instance
|
||||||
_MAIN_STATIC_FILES = None # list of Static instances the main Pelican instance
|
_MAIN_STATIC_FILES = None # list of Static instances the main Pelican
|
||||||
_SUBSITE_QUEUE = {} # map: lang -> settings overrides
|
_SUBSITE_QUEUE = {} # map: lang -> settings overrides
|
||||||
_SITE_DB = OrderedDict() # OrderedDict: lang -> siteurl
|
_SITE_DB = OrderedDict() # OrderedDict: lang -> siteurl
|
||||||
_SITES_RELPATH_DB = {} # map: (lang, base_lang) -> relpath
|
_SITES_RELPATH_DB = {} # map: (lang, base_lang) -> relpath
|
||||||
# map: generator -> list of removed contents that need interlinking
|
# map: generator -> list of removed contents that need interlinking
|
||||||
_GENERATOR_DB = {}
|
_GENERATOR_DB = {}
|
||||||
_NATIVE_CONTENT_URL_DB = {} # map: source_path -> content in its native lang
|
_NATIVE_CONTENT_URL_DB = {} # map: source_path -> content in its native lang
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@ -254,12 +254,16 @@ def filter_contents_translations(generator):
|
|||||||
hiding_func = inspector.hiding_function()
|
hiding_func = inspector.hiding_function()
|
||||||
untrans_policy = inspector.untranslated_policy(default='hide')
|
untrans_policy = inspector.untranslated_policy(default='hide')
|
||||||
for (contents, other_contents) in inspector.contents_list_pairs():
|
for (contents, other_contents) in inspector.contents_list_pairs():
|
||||||
for content in other_contents: # save any hidden native content first
|
# save any hidden native content first
|
||||||
if content.lang == current_lang: # in native lang
|
for content in other_contents:
|
||||||
|
# in native lang
|
||||||
|
if content.lang == current_lang:
|
||||||
# save the native URL attr formatted in the current locale
|
# save the native URL attr formatted in the current locale
|
||||||
_NATIVE_CONTENT_URL_DB[content.source_path] = content.url
|
_NATIVE_CONTENT_URL_DB[content.source_path] = content.url
|
||||||
for content in contents[:]: # copy for removing in loop
|
# copy for removing in loop
|
||||||
if content.lang == current_lang: # in native lang
|
for content in contents[:]:
|
||||||
|
# in native lang
|
||||||
|
if content.lang == current_lang:
|
||||||
# save the native URL attr formatted in the current locale
|
# save the native URL attr formatted in the current locale
|
||||||
_NATIVE_CONTENT_URL_DB[content.source_path] = content.url
|
_NATIVE_CONTENT_URL_DB[content.source_path] = content.url
|
||||||
elif content.lang in langs_with_sites and untrans_policy != 'keep':
|
elif content.lang in langs_with_sites and untrans_policy != 'keep':
|
||||||
@ -276,7 +280,8 @@ def install_templates_translations(generator):
|
|||||||
Only if the 'jinja2.ext.i18n' jinja2 extension is enabled
|
Only if the 'jinja2.ext.i18n' jinja2 extension is enabled
|
||||||
the translations for the current DEFAULT_LANG are installed.
|
the translations for the current DEFAULT_LANG are installed.
|
||||||
'''
|
'''
|
||||||
if 'JINJA_ENVIRONMENT' in generator.settings: # pelican 3.7+
|
# pelican 3.7+
|
||||||
|
if 'JINJA_ENVIRONMENT' in generator.settings:
|
||||||
jinja_extensions = generator.settings['JINJA_ENVIRONMENT'].get(
|
jinja_extensions = generator.settings['JINJA_ENVIRONMENT'].get(
|
||||||
'extensions', [])
|
'extensions', [])
|
||||||
else:
|
else:
|
||||||
@ -357,16 +362,19 @@ def interlink_removed_content(generator):
|
|||||||
|
|
||||||
def interlink_static_files(generator):
|
def interlink_static_files(generator):
|
||||||
'''Add links to static files in the main site if necessary'''
|
'''Add links to static files in the main site if necessary'''
|
||||||
|
# customized STATIC_PATHS
|
||||||
if generator.settings['STATIC_PATHS'] != []:
|
if generator.settings['STATIC_PATHS'] != []:
|
||||||
return # customized STATIC_PATHS
|
return
|
||||||
try: # minimize attr lookup
|
# minimize attr lookup
|
||||||
|
try:
|
||||||
static_content = generator.context['static_content']
|
static_content = generator.context['static_content']
|
||||||
except KeyError:
|
except KeyError:
|
||||||
static_content = generator.context['filenames']
|
static_content = generator.context['filenames']
|
||||||
relpath = relpath_to_site(generator.settings['DEFAULT_LANG'], _MAIN_LANG)
|
relpath = relpath_to_site(generator.settings['DEFAULT_LANG'], _MAIN_LANG)
|
||||||
for staticfile in _MAIN_STATIC_FILES:
|
for staticfile in _MAIN_STATIC_FILES:
|
||||||
if staticfile.get_relative_source_path() not in static_content:
|
if staticfile.get_relative_source_path() not in static_content:
|
||||||
staticfile = copy(staticfile) # prevent override in main site
|
# prevent override in main site
|
||||||
|
staticfile = copy(staticfile)
|
||||||
staticfile.override_url = posixpath.join(relpath, staticfile.url)
|
staticfile.override_url = posixpath.join(relpath, staticfile.url)
|
||||||
try:
|
try:
|
||||||
generator.add_source_path(staticfile, static=True)
|
generator.add_source_path(staticfile, static=True)
|
||||||
|
@ -42,12 +42,12 @@ class TestSettingsManipulation(unittest.TestCase):
|
|||||||
self.settings['PELICAN_CLASS'] = object
|
self.settings['PELICAN_CLASS'] = object
|
||||||
cls = i18ns.get_pelican_cls(self.settings)
|
cls = i18ns.get_pelican_cls(self.settings)
|
||||||
self.assertIs(cls, object)
|
self.assertIs(cls, object)
|
||||||
|
|
||||||
def test_get_pelican_cls_str(self):
|
def test_get_pelican_cls_str(self):
|
||||||
'''Test that we get correct class given by string'''
|
'''Test that we get correct class given by string'''
|
||||||
cls = i18ns.get_pelican_cls(self.settings)
|
cls = i18ns.get_pelican_cls(self.settings)
|
||||||
self.assertIs(cls, Pelican)
|
self.assertIs(cls, Pelican)
|
||||||
|
|
||||||
|
|
||||||
class TestSitesRelpath(unittest.TestCase):
|
class TestSitesRelpath(unittest.TestCase):
|
||||||
'''Test relative path between sites generation'''
|
'''Test relative path between sites generation'''
|
||||||
@ -72,7 +72,7 @@ class TestSitesRelpath(unittest.TestCase):
|
|||||||
self.assertEqual(i18ns.relpath_to_site('en', 'de'), 'de')
|
self.assertEqual(i18ns.relpath_to_site('en', 'de'), 'de')
|
||||||
self.assertEqual(i18ns.relpath_to_site('de', 'en'), '..')
|
self.assertEqual(i18ns.relpath_to_site('de', 'en'), '..')
|
||||||
|
|
||||||
|
|
||||||
class TestRegistration(unittest.TestCase):
|
class TestRegistration(unittest.TestCase):
|
||||||
'''Test plugin registration'''
|
'''Test plugin registration'''
|
||||||
|
|
||||||
@ -91,7 +91,7 @@ class TestRegistration(unittest.TestCase):
|
|||||||
self.assertIn(id(handler), sig.receivers)
|
self.assertIn(id(handler), sig.receivers)
|
||||||
# clean up
|
# clean up
|
||||||
sig.disconnect(handler)
|
sig.disconnect(handler)
|
||||||
|
|
||||||
|
|
||||||
class TestFullRun(unittest.TestCase):
|
class TestFullRun(unittest.TestCase):
|
||||||
'''Test running Pelican with the Plugin'''
|
'''Test running Pelican with the Plugin'''
|
||||||
|
@ -53,7 +53,8 @@ def format_js(gen, metastring, formatter):
|
|||||||
formatted_strings = []
|
formatted_strings = []
|
||||||
for i in range(len(metalist)):
|
for i in range(len(metalist)):
|
||||||
pos = position_regex.search(metalist[i]).group()
|
pos = position_regex.search(metalist[i]).group()
|
||||||
format_string = formatter.format(site_url, metalist[i][:-len(pos)], pos)
|
format_string = formatter.format(
|
||||||
|
site_url, metalist[i][:-len(pos)], pos)
|
||||||
formatted_strings.append(format_string)
|
formatted_strings.append(format_string)
|
||||||
return formatted_strings
|
return formatted_strings
|
||||||
|
|
||||||
@ -89,7 +90,10 @@ def add_tags(gen, metadata):
|
|||||||
return head + replace_with + tail
|
return head + replace_with + tail
|
||||||
if 'js' in metadata.keys():
|
if 'js' in metadata.keys():
|
||||||
minification_string = '.min'
|
minification_string = '.min'
|
||||||
metadata['js'] = replace_last(metadata['js'], '.js', minification_string + '.js')
|
metadata['js'] = replace_last(
|
||||||
|
metadata['js'],
|
||||||
|
'.js',
|
||||||
|
minification_string + '.js')
|
||||||
script = '<script src="{0}/vendor/{1}"></script>{2}'
|
script = '<script src="{0}/vendor/{1}"></script>{2}'
|
||||||
metadata['js'] = format_js(gen, metadata['js'], script)
|
metadata['js'] = format_js(gen, metadata['js'], script)
|
||||||
|
|
||||||
|
@ -14,23 +14,29 @@ import pelican.readers as readers
|
|||||||
from pelican.contents import Content
|
from pelican.contents import Content
|
||||||
|
|
||||||
|
|
||||||
# Python 3 __cmp__ compatibility mixin from https://stackoverflow.com/a/39166382/807307
|
# Python 3 compatibility mixin from https://stackoverflow.com/a/39166382/807307
|
||||||
PY3 = sys.version_info[0] >= 3
|
PY3 = sys.version_info[0] >= 3
|
||||||
if PY3:
|
if PY3:
|
||||||
def cmp(a, b):
|
def cmp(a, b):
|
||||||
return (a > b) - (a < b)
|
return (a > b) - (a < b)
|
||||||
# mixin class for Python3 supporting __cmp__
|
# mixin class for Python3 supporting __cmp__
|
||||||
|
|
||||||
class PY3__cmp__:
|
class PY3__cmp__:
|
||||||
def __eq__(self, other):
|
def __eq__(self, other):
|
||||||
return self.__cmp__(other) == 0
|
return self.__cmp__(other) == 0
|
||||||
|
|
||||||
def __ne__(self, other):
|
def __ne__(self, other):
|
||||||
return self.__cmp__(other) != 0
|
return self.__cmp__(other) != 0
|
||||||
|
|
||||||
def __gt__(self, other):
|
def __gt__(self, other):
|
||||||
return self.__cmp__(other) > 0
|
return self.__cmp__(other) > 0
|
||||||
|
|
||||||
def __lt__(self, other):
|
def __lt__(self, other):
|
||||||
return self.__cmp__(other) < 0
|
return self.__cmp__(other) < 0
|
||||||
|
|
||||||
def __ge__(self, other):
|
def __ge__(self, other):
|
||||||
return self.__cmp__(other) >= 0
|
return self.__cmp__(other) >= 0
|
||||||
|
|
||||||
def __le__(self, other):
|
def __le__(self, other):
|
||||||
return self.__cmp__(other) <= 0
|
return self.__cmp__(other) <= 0
|
||||||
else:
|
else:
|
||||||
@ -40,7 +46,8 @@ else:
|
|||||||
|
|
||||||
class Comment(Content, PY3__cmp__):
|
class Comment(Content, PY3__cmp__):
|
||||||
mandatory_properties = ('post_id', 'author')
|
mandatory_properties = ('post_id', 'author')
|
||||||
default_template = 'comment' # this is required, but not used
|
# this is required, but not used
|
||||||
|
default_template = 'comment'
|
||||||
default_status = 'published'
|
default_status = 'published'
|
||||||
|
|
||||||
def __cmp__(self, other):
|
def __cmp__(self, other):
|
||||||
@ -98,6 +105,7 @@ class CommentReader(object):
|
|||||||
def get_comments(self, slug):
|
def get_comments(self, slug):
|
||||||
return self._comments[slug]
|
return self._comments[slug]
|
||||||
|
|
||||||
|
|
||||||
def comment_initialization(generator):
|
def comment_initialization(generator):
|
||||||
"""
|
"""
|
||||||
Set up the comment plugin.
|
Set up the comment plugin.
|
||||||
|
@ -54,6 +54,7 @@ def format_date(date):
|
|||||||
tz = "-00:00"
|
tz = "-00:00"
|
||||||
return date.strftime("%Y-%m-%dT%H:%M:%S") + tz
|
return date.strftime("%Y-%m-%dT%H:%M:%S") + tz
|
||||||
|
|
||||||
|
|
||||||
class SitemapGenerator(object):
|
class SitemapGenerator(object):
|
||||||
|
|
||||||
def __init__(self, context, settings, path, theme, output_path, *null):
|
def __init__(self, context, settings, path, theme, output_path, *null):
|
||||||
@ -63,7 +64,6 @@ class SitemapGenerator(object):
|
|||||||
self.now = datetime.now()
|
self.now = datetime.now()
|
||||||
self.siteurl = settings.get('SITEURL')
|
self.siteurl = settings.get('SITEURL')
|
||||||
|
|
||||||
|
|
||||||
self.default_timezone = settings.get('TIMEZONE', 'UTC')
|
self.default_timezone = settings.get('TIMEZONE', 'UTC')
|
||||||
self.timezone = getattr(self, 'timezone', self.default_timezone)
|
self.timezone = getattr(self, 'timezone', self.default_timezone)
|
||||||
self.timezone = timezone(self.timezone)
|
self.timezone = timezone(self.timezone)
|
||||||
@ -103,7 +103,7 @@ class SitemapGenerator(object):
|
|||||||
|
|
||||||
valid_keys = ('articles', 'indexes', 'pages')
|
valid_keys = ('articles', 'indexes', 'pages')
|
||||||
valid_chfreqs = ('always', 'hourly', 'daily', 'weekly', 'monthly',
|
valid_chfreqs = ('always', 'hourly', 'daily', 'weekly', 'monthly',
|
||||||
'yearly', 'never')
|
'yearly', 'never')
|
||||||
|
|
||||||
if isinstance(pris, dict):
|
if isinstance(pris, dict):
|
||||||
# We use items for Py3k compat. .iteritems() otherwise
|
# We use items for Py3k compat. .iteritems() otherwise
|
||||||
@ -169,7 +169,7 @@ class SitemapGenerator(object):
|
|||||||
|
|
||||||
pageurl = '' if page.url == 'index.html' else page.url
|
pageurl = '' if page.url == 'index.html' else page.url
|
||||||
|
|
||||||
#Exclude URLs from the sitemap:
|
# Exclude URLs from the sitemap:
|
||||||
if self.format == 'xml':
|
if self.format == 'xml':
|
||||||
flag = False
|
flag = False
|
||||||
for regstr in self.sitemapExclude:
|
for regstr in self.sitemapExclude:
|
||||||
@ -177,7 +177,8 @@ class SitemapGenerator(object):
|
|||||||
flag = True
|
flag = True
|
||||||
break
|
break
|
||||||
if not flag:
|
if not flag:
|
||||||
fd.write(XML_URL.format(self.siteurl, pageurl, lastmod, chfreq, pri))
|
fd.write(XML_URL.format(
|
||||||
|
self.siteurl, pageurl, lastmod, chfreq, pri))
|
||||||
else:
|
else:
|
||||||
fd.write(self.siteurl + '/' + pageurl + '\n')
|
fd.write(self.siteurl + '/' + pageurl + '\n')
|
||||||
|
|
||||||
@ -193,9 +194,11 @@ class SitemapGenerator(object):
|
|||||||
for (wrapper, articles) in wrappers:
|
for (wrapper, articles) in wrappers:
|
||||||
lastmod = datetime.min.replace(tzinfo=self.timezone)
|
lastmod = datetime.min.replace(tzinfo=self.timezone)
|
||||||
for article in articles:
|
for article in articles:
|
||||||
lastmod = max(lastmod, article.date.replace(tzinfo=self.timezone))
|
lastmod = max(lastmod,
|
||||||
|
article.date.replace(tzinfo=self.timezone))
|
||||||
try:
|
try:
|
||||||
modified = self.get_date_modified(article, datetime.min).replace(tzinfo=self.timezone)
|
modified = self.get_date_modified(
|
||||||
|
article, datetime.min).replace(tzinfo=self.timezone)
|
||||||
lastmod = max(lastmod, modified)
|
lastmod = max(lastmod, modified)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
# Supressed: user will be notified.
|
# Supressed: user will be notified.
|
||||||
@ -203,12 +206,13 @@ class SitemapGenerator(object):
|
|||||||
setattr(wrapper, 'modified', str(lastmod))
|
setattr(wrapper, 'modified', str(lastmod))
|
||||||
|
|
||||||
def generate_output(self, writer):
|
def generate_output(self, writer):
|
||||||
path = os.path.join(self.output_path, 'sitemap.{0}'.format(self.format))
|
path = os.path.join(
|
||||||
|
self.output_path, 'sitemap.{0}'.format(self.format))
|
||||||
|
|
||||||
pages = self.context['pages'] + self.context['articles'] \
|
pages = self.context['pages'] + self.context['articles'] \
|
||||||
+ [ c for (c, a) in self.context['categories']] \
|
+ [c for (c, a) in self.context['categories']] \
|
||||||
+ [ t for (t, a) in self.context['tags']] \
|
+ [t for (t, a) in self.context['tags']] \
|
||||||
+ [ a for (a, b) in self.context['authors']]
|
+ [a for (a, b) in self.context['authors']]
|
||||||
|
|
||||||
self.set_url_wrappers_modification_date(self.context['categories'])
|
self.set_url_wrappers_modification_date(self.context['categories'])
|
||||||
self.set_url_wrappers_modification_date(self.context['tags'])
|
self.set_url_wrappers_modification_date(self.context['tags'])
|
||||||
|
@ -35,7 +35,6 @@ class Tipue_Search_JSON_Generator(object):
|
|||||||
self.output_path = output_path
|
self.output_path = output_path
|
||||||
self.json_nodes = []
|
self.json_nodes = []
|
||||||
|
|
||||||
|
|
||||||
def create_json_node(self, page):
|
def create_json_node(self, page):
|
||||||
|
|
||||||
if getattr(page, 'status', 'published') != 'published':
|
if getattr(page, 'status', 'published') != 'published':
|
||||||
@ -61,7 +60,6 @@ class Tipue_Search_JSON_Generator(object):
|
|||||||
|
|
||||||
self.json_nodes.append(node)
|
self.json_nodes.append(node)
|
||||||
|
|
||||||
|
|
||||||
def create_tpage_node(self, srclink):
|
def create_tpage_node(self, srclink):
|
||||||
|
|
||||||
srcfile = open(os.path.join(self.output_path, self.tpages[srclink]), encoding='utf-8')
|
srcfile = open(os.path.join(self.output_path, self.tpages[srclink]), encoding='utf-8')
|
||||||
@ -80,7 +78,6 @@ class Tipue_Search_JSON_Generator(object):
|
|||||||
|
|
||||||
self.json_nodes.append(node)
|
self.json_nodes.append(node)
|
||||||
|
|
||||||
|
|
||||||
def generate_output(self, writer):
|
def generate_output(self, writer):
|
||||||
path = os.path.join(self.output_path, 'tipuesearch_content.json')
|
path = os.path.join(self.output_path, 'tipuesearch_content.json')
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user