pep8
This commit is contained in:
parent
8ce1426785
commit
edcc28d056
@ -4,8 +4,6 @@ css-js-minify wrapper for Pelican
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
import glob
|
import glob
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from .minify import (
|
from .minify import (
|
||||||
process_single_css_file,
|
process_single_css_file,
|
||||||
@ -17,6 +15,7 @@ from pelican import signals
|
|||||||
CSS_DIR = '/theme/css'
|
CSS_DIR = '/theme/css'
|
||||||
JS_DIR = '/theme/js'
|
JS_DIR = '/theme/js'
|
||||||
|
|
||||||
|
|
||||||
def main(pelican):
|
def main(pelican):
|
||||||
""" Compiler """
|
""" Compiler """
|
||||||
for file in glob.iglob(pelican.output_path + CSS_DIR + '/**/*.css', recursive=True):
|
for file in glob.iglob(pelican.output_path + CSS_DIR + '/**/*.css', recursive=True):
|
||||||
@ -24,10 +23,12 @@ def main(pelican):
|
|||||||
for file in glob.iglob(pelican.output_path + JS_DIR + '/**/*.js', recursive=True):
|
for file in glob.iglob(pelican.output_path + JS_DIR + '/**/*.js', recursive=True):
|
||||||
process_single_js_file(file, overwrite=True)
|
process_single_js_file(file, overwrite=True)
|
||||||
|
|
||||||
|
|
||||||
def register():
|
def register():
|
||||||
""" Register """
|
""" Register """
|
||||||
signals.finalized.connect(main)
|
signals.finalized.connect(main)
|
||||||
|
|
||||||
|
|
||||||
SUPPORT_JS = """
|
SUPPORT_JS = """
|
||||||
-----------------------------------------------------------------
|
-----------------------------------------------------------------
|
||||||
COMPRESSOR:
|
COMPRESSOR:
|
||||||
|
@ -13,8 +13,8 @@ def remove_commented_lines(js):
|
|||||||
"""Force remove commented out lines from Javascript."""
|
"""Force remove commented out lines from Javascript."""
|
||||||
result = ""
|
result = ""
|
||||||
for line in js.splitlines():
|
for line in js.splitlines():
|
||||||
line = re.sub(r"/\*.*\*/", "", line) # (/*COMMENT */)
|
line = re.sub(r"/\*.*\*/", "", line) # (/*COMMENT */)
|
||||||
line = re.sub(r"//.*", "", line) # (//COMMENT)
|
line = re.sub(r"//.*", "", line) # (//COMMENT)
|
||||||
result += '\n'+line
|
result += '\n'+line
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
@ -27,11 +27,12 @@ __all__ = ('process_multiple_files', 'prefixer_extensioner',
|
|||||||
'make_arguments_parser', 'main')
|
'make_arguments_parser', 'main')
|
||||||
|
|
||||||
color = {
|
color = {
|
||||||
'cyan' : '\033[1;36m',
|
'cyan': '\033[1;36m',
|
||||||
'end' : '\033[0m',
|
'end': '\033[0m',
|
||||||
'green' : '\033[1;32m'
|
'green': '\033[1;32m'
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def process_multiple_files(file_path, watch=False, wrap=False, timestamp=False,
|
def process_multiple_files(file_path, watch=False, wrap=False, timestamp=False,
|
||||||
comments=False, sort=False, overwrite=False,
|
comments=False, sort=False, overwrite=False,
|
||||||
zipy=False, prefix='', add_hash=False):
|
zipy=False, prefix='', add_hash=False):
|
||||||
@ -95,7 +96,9 @@ def process_single_css_file(css_file_path, wrap=False, timestamp=False,
|
|||||||
zipy=False, prefix='', add_hash=False,
|
zipy=False, prefix='', add_hash=False,
|
||||||
output_path=None):
|
output_path=None):
|
||||||
"""Process a single CSS file."""
|
"""Process a single CSS file."""
|
||||||
print("Processing %sCSS%s file: %s" % (color['cyan'], color['end'], css_file_path))
|
print("Processing %sCSS%s file: %s" % (color['cyan'],
|
||||||
|
color['end'],
|
||||||
|
css_file_path))
|
||||||
with open(css_file_path, encoding="utf-8") as css_file:
|
with open(css_file_path, encoding="utf-8") as css_file:
|
||||||
original_css = css_file.read()
|
original_css = css_file.read()
|
||||||
|
|
||||||
@ -134,7 +137,9 @@ def process_single_css_file(css_file_path, wrap=False, timestamp=False,
|
|||||||
def process_single_js_file(js_file_path, timestamp=False, overwrite=False,
|
def process_single_js_file(js_file_path, timestamp=False, overwrite=False,
|
||||||
zipy=False, output_path=None):
|
zipy=False, output_path=None):
|
||||||
"""Process a single JS file."""
|
"""Process a single JS file."""
|
||||||
print("Processing %sJS%s file: %s" % (color['green'], color['end'], js_file_path))
|
print("Processing %sJS%s file: %s" % (color['green'],
|
||||||
|
color['end'],
|
||||||
|
js_file_path))
|
||||||
with open(js_file_path, encoding="utf-8") as js_file:
|
with open(js_file_path, encoding="utf-8") as js_file:
|
||||||
original_js = js_file.read()
|
original_js = js_file.read()
|
||||||
print("INPUT: Reading JS file %s" % js_file_path)
|
print("INPUT: Reading JS file %s" % js_file_path)
|
||||||
@ -173,7 +178,8 @@ def make_arguments_parser():
|
|||||||
SHA1 HEX-Digest 11 Chars Hash on Filenames is used for Server Cache.
|
SHA1 HEX-Digest 11 Chars Hash on Filenames is used for Server Cache.
|
||||||
CSS Properties are Alpha-Sorted, to help spot cloned ones, Selectors not.
|
CSS Properties are Alpha-Sorted, to help spot cloned ones, Selectors not.
|
||||||
Watch works for whole folders, with minimum of ~60 Secs between runs.""")
|
Watch works for whole folders, with minimum of ~60 Secs between runs.""")
|
||||||
# parser.add_argument('--version', action='version', version=css_js_minify.__version__)
|
# parser.add_argument('--version', action='version',
|
||||||
|
# version=css_js_minify.__version__)
|
||||||
parser.add_argument('fullpath', metavar='fullpath', type=str,
|
parser.add_argument('fullpath', metavar='fullpath', type=str,
|
||||||
help='Full path to local file or folder.')
|
help='Full path to local file or folder.')
|
||||||
parser.add_argument('--wrap', action='store_true',
|
parser.add_argument('--wrap', action='store_true',
|
||||||
|
@ -32,16 +32,16 @@ except ImportError:
|
|||||||
|
|
||||||
|
|
||||||
# Global vars
|
# Global vars
|
||||||
_MAIN_SETTINGS = None # settings dict of the main Pelican instance
|
_MAIN_SETTINGS = None # settings dict of the main Pelican instance
|
||||||
_MAIN_LANG = None # lang of the main Pelican instance
|
_MAIN_LANG = None # lang of the main Pelican instance
|
||||||
_MAIN_SITEURL = None # siteurl of the main Pelican instance
|
_MAIN_SITEURL = None # siteurl of the main Pelican instance
|
||||||
_MAIN_STATIC_FILES = None # list of Static instances the main Pelican instance
|
_MAIN_STATIC_FILES = None # list of Static instances the main Pelican
|
||||||
_SUBSITE_QUEUE = {} # map: lang -> settings overrides
|
_SUBSITE_QUEUE = {} # map: lang -> settings overrides
|
||||||
_SITE_DB = OrderedDict() # OrderedDict: lang -> siteurl
|
_SITE_DB = OrderedDict() # OrderedDict: lang -> siteurl
|
||||||
_SITES_RELPATH_DB = {} # map: (lang, base_lang) -> relpath
|
_SITES_RELPATH_DB = {} # map: (lang, base_lang) -> relpath
|
||||||
# map: generator -> list of removed contents that need interlinking
|
# map: generator -> list of removed contents that need interlinking
|
||||||
_GENERATOR_DB = {}
|
_GENERATOR_DB = {}
|
||||||
_NATIVE_CONTENT_URL_DB = {} # map: source_path -> content in its native lang
|
_NATIVE_CONTENT_URL_DB = {} # map: source_path -> content in its native lang
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@ -81,30 +81,37 @@ def prepare_site_db_and_overrides():
|
|||||||
|
|
||||||
_SITE_DB.keys() need to be ready for filter_translations
|
_SITE_DB.keys() need to be ready for filter_translations
|
||||||
'''
|
'''
|
||||||
|
|
||||||
_SITE_DB.clear()
|
_SITE_DB.clear()
|
||||||
_SITE_DB[_MAIN_LANG] = _MAIN_SITEURL
|
_SITE_DB[_MAIN_LANG] = _MAIN_SITEURL
|
||||||
|
|
||||||
# make sure it works for both root-relative and absolute
|
# make sure it works for both root-relative and absolute
|
||||||
main_siteurl = '/' if _MAIN_SITEURL == '' else _MAIN_SITEURL
|
|
||||||
for lang, overrides in _SUBSITE_QUEUE.items():
|
main_siteurl = ('/' if _MAIN_SITEURL == '' else _MAIN_SITEURL)
|
||||||
|
for (lang, overrides) in _SUBSITE_QUEUE.items():
|
||||||
if 'SITEURL' not in overrides:
|
if 'SITEURL' not in overrides:
|
||||||
overrides['SITEURL'] = posixpath.join(main_siteurl, lang)
|
overrides['SITEURL'] = posixpath.join(main_siteurl, lang)
|
||||||
_SITE_DB[lang] = overrides['SITEURL']
|
_SITE_DB[lang] = overrides['SITEURL']
|
||||||
|
|
||||||
# default subsite hierarchy
|
# default subsite hierarchy
|
||||||
|
|
||||||
if 'OUTPUT_PATH' not in overrides:
|
if 'OUTPUT_PATH' not in overrides:
|
||||||
overrides['OUTPUT_PATH'] = os.path.join(
|
overrides['OUTPUT_PATH'] = \
|
||||||
_MAIN_SETTINGS['OUTPUT_PATH'], lang)
|
os.path.join(_MAIN_SETTINGS['OUTPUT_PATH'], lang)
|
||||||
if 'CACHE_PATH' not in overrides:
|
if 'CACHE_PATH' not in overrides:
|
||||||
overrides['CACHE_PATH'] = os.path.join(
|
overrides['CACHE_PATH'] = \
|
||||||
_MAIN_SETTINGS['CACHE_PATH'], lang)
|
os.path.join(_MAIN_SETTINGS['CACHE_PATH'], lang)
|
||||||
if 'STATIC_PATHS' not in overrides:
|
if 'STATIC_PATHS' not in overrides:
|
||||||
overrides['STATIC_PATHS'] = []
|
overrides['STATIC_PATHS'] = []
|
||||||
if ('THEME' not in overrides and 'THEME_STATIC_DIR' not in overrides and
|
if 'THEME' not in overrides and 'THEME_STATIC_DIR' \
|
||||||
'THEME_STATIC_PATHS' not in overrides):
|
not in overrides and 'THEME_STATIC_PATHS' not in overrides:
|
||||||
relpath = relpath_to_site(lang, _MAIN_LANG)
|
relpath = relpath_to_site(lang, _MAIN_LANG)
|
||||||
overrides['THEME_STATIC_DIR'] = posixpath.join(
|
overrides['THEME_STATIC_DIR'] = posixpath.join(
|
||||||
relpath, _MAIN_SETTINGS['THEME_STATIC_DIR'])
|
relpath, _MAIN_SETTINGS['THEME_STATIC_DIR'])
|
||||||
overrides['THEME_STATIC_PATHS'] = []
|
overrides['THEME_STATIC_PATHS'] = []
|
||||||
|
|
||||||
# to change what is perceived as translations
|
# to change what is perceived as translations
|
||||||
|
|
||||||
overrides['DEFAULT_LANG'] = lang
|
overrides['DEFAULT_LANG'] = lang
|
||||||
|
|
||||||
|
|
||||||
@ -254,12 +261,12 @@ def filter_contents_translations(generator):
|
|||||||
hiding_func = inspector.hiding_function()
|
hiding_func = inspector.hiding_function()
|
||||||
untrans_policy = inspector.untranslated_policy(default='hide')
|
untrans_policy = inspector.untranslated_policy(default='hide')
|
||||||
for (contents, other_contents) in inspector.contents_list_pairs():
|
for (contents, other_contents) in inspector.contents_list_pairs():
|
||||||
for content in other_contents: # save any hidden native content first
|
for content in other_contents: # save any hidden native content first
|
||||||
if content.lang == current_lang: # in native lang
|
if content.lang == current_lang: # in native lang
|
||||||
# save the native URL attr formatted in the current locale
|
# save the native URL attr formatted in the current locale
|
||||||
_NATIVE_CONTENT_URL_DB[content.source_path] = content.url
|
_NATIVE_CONTENT_URL_DB[content.source_path] = content.url
|
||||||
for content in contents[:]: # copy for removing in loop
|
for content in contents[:]: # copy for removing in loop
|
||||||
if content.lang == current_lang: # in native lang
|
if content.lang == current_lang: # in native lang
|
||||||
# save the native URL attr formatted in the current locale
|
# save the native URL attr formatted in the current locale
|
||||||
_NATIVE_CONTENT_URL_DB[content.source_path] = content.url
|
_NATIVE_CONTENT_URL_DB[content.source_path] = content.url
|
||||||
elif content.lang in langs_with_sites and untrans_policy != 'keep':
|
elif content.lang in langs_with_sites and untrans_policy != 'keep':
|
||||||
@ -276,7 +283,7 @@ def install_templates_translations(generator):
|
|||||||
Only if the 'jinja2.ext.i18n' jinja2 extension is enabled
|
Only if the 'jinja2.ext.i18n' jinja2 extension is enabled
|
||||||
the translations for the current DEFAULT_LANG are installed.
|
the translations for the current DEFAULT_LANG are installed.
|
||||||
'''
|
'''
|
||||||
if 'JINJA_ENVIRONMENT' in generator.settings: # pelican 3.7+
|
if 'JINJA_ENVIRONMENT' in generator.settings: # pelican 3.7+
|
||||||
jinja_extensions = generator.settings['JINJA_ENVIRONMENT'].get(
|
jinja_extensions = generator.settings['JINJA_ENVIRONMENT'].get(
|
||||||
'extensions', [])
|
'extensions', [])
|
||||||
else:
|
else:
|
||||||
@ -359,14 +366,14 @@ def interlink_static_files(generator):
|
|||||||
'''Add links to static files in the main site if necessary'''
|
'''Add links to static files in the main site if necessary'''
|
||||||
if generator.settings['STATIC_PATHS'] != []:
|
if generator.settings['STATIC_PATHS'] != []:
|
||||||
return # customized STATIC_PATHS
|
return # customized STATIC_PATHS
|
||||||
try: # minimize attr lookup
|
try: # minimize attr lookup
|
||||||
static_content = generator.context['static_content']
|
static_content = generator.context['static_content']
|
||||||
except KeyError:
|
except KeyError:
|
||||||
static_content = generator.context['filenames']
|
static_content = generator.context['filenames']
|
||||||
relpath = relpath_to_site(generator.settings['DEFAULT_LANG'], _MAIN_LANG)
|
relpath = relpath_to_site(generator.settings['DEFAULT_LANG'], _MAIN_LANG)
|
||||||
for staticfile in _MAIN_STATIC_FILES:
|
for staticfile in _MAIN_STATIC_FILES:
|
||||||
if staticfile.get_relative_source_path() not in static_content:
|
if staticfile.get_relative_source_path() not in static_content:
|
||||||
staticfile = copy(staticfile) # prevent override in main site
|
staticfile = copy(staticfile) # prevent override in main site
|
||||||
staticfile.override_url = posixpath.join(relpath, staticfile.url)
|
staticfile.override_url = posixpath.join(relpath, staticfile.url)
|
||||||
try:
|
try:
|
||||||
generator.add_source_path(staticfile, static=True)
|
generator.add_source_path(staticfile, static=True)
|
||||||
|
@ -42,12 +42,12 @@ class TestSettingsManipulation(unittest.TestCase):
|
|||||||
self.settings['PELICAN_CLASS'] = object
|
self.settings['PELICAN_CLASS'] = object
|
||||||
cls = i18ns.get_pelican_cls(self.settings)
|
cls = i18ns.get_pelican_cls(self.settings)
|
||||||
self.assertIs(cls, object)
|
self.assertIs(cls, object)
|
||||||
|
|
||||||
def test_get_pelican_cls_str(self):
|
def test_get_pelican_cls_str(self):
|
||||||
'''Test that we get correct class given by string'''
|
'''Test that we get correct class given by string'''
|
||||||
cls = i18ns.get_pelican_cls(self.settings)
|
cls = i18ns.get_pelican_cls(self.settings)
|
||||||
self.assertIs(cls, Pelican)
|
self.assertIs(cls, Pelican)
|
||||||
|
|
||||||
|
|
||||||
class TestSitesRelpath(unittest.TestCase):
|
class TestSitesRelpath(unittest.TestCase):
|
||||||
'''Test relative path between sites generation'''
|
'''Test relative path between sites generation'''
|
||||||
@ -72,7 +72,7 @@ class TestSitesRelpath(unittest.TestCase):
|
|||||||
self.assertEqual(i18ns.relpath_to_site('en', 'de'), 'de')
|
self.assertEqual(i18ns.relpath_to_site('en', 'de'), 'de')
|
||||||
self.assertEqual(i18ns.relpath_to_site('de', 'en'), '..')
|
self.assertEqual(i18ns.relpath_to_site('de', 'en'), '..')
|
||||||
|
|
||||||
|
|
||||||
class TestRegistration(unittest.TestCase):
|
class TestRegistration(unittest.TestCase):
|
||||||
'''Test plugin registration'''
|
'''Test plugin registration'''
|
||||||
|
|
||||||
@ -91,7 +91,7 @@ class TestRegistration(unittest.TestCase):
|
|||||||
self.assertIn(id(handler), sig.receivers)
|
self.assertIn(id(handler), sig.receivers)
|
||||||
# clean up
|
# clean up
|
||||||
sig.disconnect(handler)
|
sig.disconnect(handler)
|
||||||
|
|
||||||
|
|
||||||
class TestFullRun(unittest.TestCase):
|
class TestFullRun(unittest.TestCase):
|
||||||
'''Test running Pelican with the Plugin'''
|
'''Test running Pelican with the Plugin'''
|
||||||
|
@ -8,6 +8,7 @@ variables to the article's context
|
|||||||
"""
|
"""
|
||||||
from pelican import signals
|
from pelican import signals
|
||||||
|
|
||||||
|
|
||||||
def iter3(seq):
|
def iter3(seq):
|
||||||
it = iter(seq)
|
it = iter(seq)
|
||||||
nxt = None
|
nxt = None
|
||||||
@ -17,6 +18,7 @@ def iter3(seq):
|
|||||||
nxt, cur = cur, prv
|
nxt, cur = cur, prv
|
||||||
yield nxt, cur, None
|
yield nxt, cur, None
|
||||||
|
|
||||||
|
|
||||||
def get_translation(article, prefered_language):
|
def get_translation(article, prefered_language):
|
||||||
if not article:
|
if not article:
|
||||||
return None
|
return None
|
||||||
@ -25,18 +27,16 @@ def get_translation(article, prefered_language):
|
|||||||
return translation
|
return translation
|
||||||
return article
|
return article
|
||||||
|
|
||||||
|
|
||||||
def set_neighbors(articles, next_name, prev_name):
|
def set_neighbors(articles, next_name, prev_name):
|
||||||
for nxt, cur, prv in iter3(articles):
|
for nxt, cur, prv in iter3(articles):
|
||||||
exec("cur.{} = nxt".format(next_name))
|
exec("cur.{} = nxt".format(next_name))
|
||||||
exec("cur.{} = prv".format(prev_name))
|
exec("cur.{} = prv".format(prev_name))
|
||||||
|
|
||||||
for translation in cur.translations:
|
for translation in cur.translations:
|
||||||
exec(
|
exec("translation.{} = get_translation(nxt, translation.lang)".format(next_name))
|
||||||
"translation.{} = get_translation(nxt, translation.lang)".format(
|
exec("translation.{} = get_translation(prv, translation.lang)".format(prev_name))
|
||||||
next_name))
|
|
||||||
exec(
|
|
||||||
"translation.{} = get_translation(prv, translation.lang)".format(
|
|
||||||
prev_name))
|
|
||||||
|
|
||||||
def neighbors(generator):
|
def neighbors(generator):
|
||||||
set_neighbors(generator.articles, 'next_article', 'prev_article')
|
set_neighbors(generator.articles, 'next_article', 'prev_article')
|
||||||
@ -54,5 +54,6 @@ def neighbors(generator):
|
|||||||
prev_name = 'prev_article_in_subcategory{}'.format(index)
|
prev_name = 'prev_article_in_subcategory{}'.format(index)
|
||||||
set_neighbors(articles, next_name, prev_name)
|
set_neighbors(articles, next_name, prev_name)
|
||||||
|
|
||||||
|
|
||||||
def register():
|
def register():
|
||||||
signals.article_generator_finalized.connect(neighbors)
|
signals.article_generator_finalized.connect(neighbors)
|
||||||
|
@ -10,6 +10,7 @@ import shutil
|
|||||||
|
|
||||||
from pelican import signals
|
from pelican import signals
|
||||||
|
|
||||||
|
|
||||||
def copy_resources(src, dest, file_list):
|
def copy_resources(src, dest, file_list):
|
||||||
"""
|
"""
|
||||||
Copy files from content folder to output folder
|
Copy files from content folder to output folder
|
||||||
@ -33,6 +34,7 @@ def copy_resources(src, dest, file_list):
|
|||||||
file_src = os.path.join(src, file_)
|
file_src = os.path.join(src, file_)
|
||||||
shutil.copy2(file_src, dest)
|
shutil.copy2(file_src, dest)
|
||||||
|
|
||||||
|
|
||||||
def add_files(gen, metadata):
|
def add_files(gen, metadata):
|
||||||
"""
|
"""
|
||||||
The registered handler for the dynamic resources plugin. It will
|
The registered handler for the dynamic resources plugin. It will
|
||||||
@ -59,6 +61,7 @@ def add_files(gen, metadata):
|
|||||||
htmls.append(html)
|
htmls.append(html)
|
||||||
metadata[key] = htmls
|
metadata[key] = htmls
|
||||||
|
|
||||||
|
|
||||||
def move_resources(gen):
|
def move_resources(gen):
|
||||||
"""
|
"""
|
||||||
Move files from js/css folders to output folder
|
Move files from js/css folders to output folder
|
||||||
|
@ -54,6 +54,7 @@ def format_date(date):
|
|||||||
tz = "-00:00"
|
tz = "-00:00"
|
||||||
return date.strftime("%Y-%m-%dT%H:%M:%S") + tz
|
return date.strftime("%Y-%m-%dT%H:%M:%S") + tz
|
||||||
|
|
||||||
|
|
||||||
class SitemapGenerator(object):
|
class SitemapGenerator(object):
|
||||||
|
|
||||||
def __init__(self, context, settings, path, theme, output_path, *null):
|
def __init__(self, context, settings, path, theme, output_path, *null):
|
||||||
@ -63,7 +64,6 @@ class SitemapGenerator(object):
|
|||||||
self.now = datetime.now()
|
self.now = datetime.now()
|
||||||
self.siteurl = settings.get('SITEURL')
|
self.siteurl = settings.get('SITEURL')
|
||||||
|
|
||||||
|
|
||||||
self.default_timezone = settings.get('TIMEZONE', 'UTC')
|
self.default_timezone = settings.get('TIMEZONE', 'UTC')
|
||||||
self.timezone = getattr(self, 'timezone', self.default_timezone)
|
self.timezone = getattr(self, 'timezone', self.default_timezone)
|
||||||
self.timezone = timezone(self.timezone)
|
self.timezone = timezone(self.timezone)
|
||||||
@ -103,7 +103,7 @@ class SitemapGenerator(object):
|
|||||||
|
|
||||||
valid_keys = ('articles', 'indexes', 'pages')
|
valid_keys = ('articles', 'indexes', 'pages')
|
||||||
valid_chfreqs = ('always', 'hourly', 'daily', 'weekly', 'monthly',
|
valid_chfreqs = ('always', 'hourly', 'daily', 'weekly', 'monthly',
|
||||||
'yearly', 'never')
|
'yearly', 'never')
|
||||||
|
|
||||||
if isinstance(pris, dict):
|
if isinstance(pris, dict):
|
||||||
# We use items for Py3k compat. .iteritems() otherwise
|
# We use items for Py3k compat. .iteritems() otherwise
|
||||||
@ -137,7 +137,7 @@ class SitemapGenerator(object):
|
|||||||
|
|
||||||
if getattr(page, 'status', 'published') != 'published':
|
if getattr(page, 'status', 'published') != 'published':
|
||||||
return
|
return
|
||||||
|
|
||||||
if getattr(page, 'private', 'False') == 'True':
|
if getattr(page, 'private', 'False') == 'True':
|
||||||
return
|
return
|
||||||
|
|
||||||
@ -169,7 +169,7 @@ class SitemapGenerator(object):
|
|||||||
|
|
||||||
pageurl = '' if page.url == 'index.html' else page.url
|
pageurl = '' if page.url == 'index.html' else page.url
|
||||||
|
|
||||||
#Exclude URLs from the sitemap:
|
# Exclude URLs from the sitemap:
|
||||||
if self.format == 'xml':
|
if self.format == 'xml':
|
||||||
flag = False
|
flag = False
|
||||||
for regstr in self.sitemapExclude:
|
for regstr in self.sitemapExclude:
|
||||||
@ -206,9 +206,9 @@ class SitemapGenerator(object):
|
|||||||
path = os.path.join(self.output_path, 'sitemap.{0}'.format(self.format))
|
path = os.path.join(self.output_path, 'sitemap.{0}'.format(self.format))
|
||||||
|
|
||||||
pages = self.context['pages'] + self.context['articles'] \
|
pages = self.context['pages'] + self.context['articles'] \
|
||||||
+ [ c for (c, a) in self.context['categories']] \
|
+ [c for (c, a) in self.context['categories']] \
|
||||||
+ [ t for (t, a) in self.context['tags']] \
|
+ [t for (t, a) in self.context['tags']] \
|
||||||
+ [ a for (a, b) in self.context['authors']]
|
+ [a for (a, b) in self.context['authors']]
|
||||||
|
|
||||||
self.set_url_wrappers_modification_date(self.context['categories'])
|
self.set_url_wrappers_modification_date(self.context['categories'])
|
||||||
self.set_url_wrappers_modification_date(self.context['tags'])
|
self.set_url_wrappers_modification_date(self.context['tags'])
|
||||||
|
Loading…
x
Reference in New Issue
Block a user