2011-02-01 22:49:33 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
2013-01-11 02:57:43 +01:00
|
|
|
|
2015-06-16 09:25:09 +02:00
|
|
|
import calendar
|
2016-07-23 03:04:04 -04:00
|
|
|
import errno
|
2015-06-16 09:25:09 +02:00
|
|
|
import fnmatch
|
2012-03-20 13:01:21 +00:00
|
|
|
import logging
|
2015-06-16 09:25:09 +02:00
|
|
|
import os
|
2012-02-21 17:53:53 +01:00
|
|
|
from collections import defaultdict
|
|
|
|
|
from functools import partial
|
2013-03-09 21:01:47 -08:00
|
|
|
from itertools import chain, groupby
|
2015-04-20 12:16:05 +02:00
|
|
|
from operator import attrgetter
|
2012-02-21 17:53:53 +01:00
|
|
|
|
2015-06-16 09:25:09 +02:00
|
|
|
from jinja2 import (BaseLoader, ChoiceLoader, Environment, FileSystemLoader,
|
|
|
|
|
PrefixLoader, TemplateNotFound)
|
|
|
|
|
|
2015-06-05 12:11:53 +02:00
|
|
|
from pelican.cache import FileStampDataCacher
|
2017-07-24 19:01:14 +02:00
|
|
|
from pelican.contents import Article, Page, Static
|
2019-12-01 18:14:13 +03:00
|
|
|
from pelican.plugins import signals
|
2013-08-04 17:02:58 +02:00
|
|
|
from pelican.readers import Readers
|
2018-03-18 12:21:56 +01:00
|
|
|
from pelican.utils import (DateFormatter, copy, mkdir_p, order_content,
|
2019-11-05 23:17:19 -08:00
|
|
|
posixize_path, process_translations)
|
2010-10-30 00:56:40 +01:00
|
|
|
|
|
|
|
|
|
2012-03-20 13:01:21 +00:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
|
|
2014-08-31 09:55:45 +02:00
|
|
|
class PelicanTemplateNotFound(Exception):
|
|
|
|
|
pass
|
|
|
|
|
|
2015-06-16 09:25:09 +02:00
|
|
|
|
2014-04-20 14:34:52 +02:00
|
|
|
class Generator(object):
|
2010-12-02 03:22:24 +00:00
|
|
|
"""Baseclass generator"""
|
2010-11-20 02:26:49 +00:00
|
|
|
|
2014-04-20 14:34:52 +02:00
|
|
|
def __init__(self, context, settings, path, theme, output_path,
|
|
|
|
|
readers_cache_name='', **kwargs):
|
2013-01-05 11:41:33 -05:00
|
|
|
self.context = context
|
|
|
|
|
self.settings = settings
|
|
|
|
|
self.path = path
|
|
|
|
|
self.theme = theme
|
|
|
|
|
self.output_path = output_path
|
2010-12-02 03:22:24 +00:00
|
|
|
|
|
|
|
|
for arg, value in kwargs.items():
|
|
|
|
|
setattr(self, arg, value)
|
2010-10-30 00:56:40 +01:00
|
|
|
|
2014-04-20 14:34:52 +02:00
|
|
|
self.readers = Readers(self.settings, readers_cache_name)
|
2013-08-04 17:02:58 +02:00
|
|
|
|
2011-01-02 02:50:08 +03:00
|
|
|
# templates cache
|
|
|
|
|
self._templates = {}
|
2016-10-16 15:47:22 +08:00
|
|
|
self._templates_path = list(self.settings['THEME_TEMPLATES_OVERRIDES'])
|
2012-09-03 00:57:23 +01:00
|
|
|
|
2016-10-16 15:47:22 +08:00
|
|
|
theme_templates_path = os.path.expanduser(
|
|
|
|
|
os.path.join(self.theme, 'templates'))
|
|
|
|
|
self._templates_path.append(theme_templates_path)
|
|
|
|
|
theme_loader = FileSystemLoader(theme_templates_path)
|
|
|
|
|
|
|
|
|
|
simple_theme_path = os.path.dirname(os.path.abspath(__file__))
|
|
|
|
|
simple_loader = FileSystemLoader(
|
|
|
|
|
os.path.join(simple_theme_path, "themes", "simple", "templates"))
|
2012-03-09 16:21:38 +01:00
|
|
|
|
2012-05-07 17:11:57 +02:00
|
|
|
self.env = Environment(
|
2011-07-19 12:31:18 +02:00
|
|
|
loader=ChoiceLoader([
|
|
|
|
|
FileSystemLoader(self._templates_path),
|
2012-03-09 16:21:38 +01:00
|
|
|
simple_loader, # implicit inheritance
|
2016-10-16 15:47:22 +08:00
|
|
|
PrefixLoader({
|
|
|
|
|
'!simple': simple_loader,
|
|
|
|
|
'!theme': theme_loader
|
|
|
|
|
}) # explicit ones
|
2011-07-19 12:31:18 +02:00
|
|
|
]),
|
2016-08-29 11:19:29 -07:00
|
|
|
**self.settings['JINJA_ENVIRONMENT']
|
2011-03-23 10:25:38 +03:00
|
|
|
)
|
2012-03-09 16:21:38 +01:00
|
|
|
|
2014-07-22 11:48:15 -04:00
|
|
|
logger.debug('Template list: %s', self.env.list_templates())
|
2011-05-06 17:01:34 +06:00
|
|
|
|
2013-04-20 15:56:07 -04:00
|
|
|
# provide utils.strftime as a jinja filter
|
|
|
|
|
self.env.filters.update({'strftime': DateFormatter()})
|
|
|
|
|
|
2011-04-12 02:17:42 +08:00
|
|
|
# get custom Jinja filters from user settings
|
2013-03-24 15:45:36 -04:00
|
|
|
custom_filters = self.settings['JINJA_FILTERS']
|
2012-05-07 17:11:57 +02:00
|
|
|
self.env.filters.update(custom_filters)
|
2011-01-02 02:50:08 +03:00
|
|
|
|
2020-04-12 17:03:53 +02:00
|
|
|
# get custom Jinja globals from user settings
|
|
|
|
|
custom_globals = self.settings['JINJA_GLOBALS']
|
|
|
|
|
self.env.globals.update(custom_globals)
|
|
|
|
|
|
|
|
|
|
# get custom Jinja tests from user settings
|
|
|
|
|
custom_tests = self.settings['JINJA_TESTS']
|
|
|
|
|
self.env.tests.update(custom_tests)
|
|
|
|
|
|
2012-11-20 00:07:44 +01:00
|
|
|
signals.generator_init.send(self)
|
|
|
|
|
|
2011-01-02 02:50:08 +03:00
|
|
|
def get_template(self, name):
|
|
|
|
|
"""Return the template by name.
|
2010-11-20 02:25:42 +00:00
|
|
|
Use self.theme to get the templates to use, and return a list of
|
|
|
|
|
templates ready to use with Jinja2.
|
|
|
|
|
"""
|
2011-01-02 02:50:08 +03:00
|
|
|
if name not in self._templates:
|
2017-01-15 22:57:01 +00:00
|
|
|
for ext in self.settings['TEMPLATE_EXTENSIONS']:
|
|
|
|
|
try:
|
|
|
|
|
self._templates[name] = self.env.get_template(name + ext)
|
|
|
|
|
break
|
|
|
|
|
except TemplateNotFound:
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
if name not in self._templates:
|
2015-06-16 09:25:09 +02:00
|
|
|
raise PelicanTemplateNotFound(
|
2017-01-15 22:57:01 +00:00
|
|
|
'[templates] unable to load {}[{}] from {}'.format(
|
|
|
|
|
name, ', '.join(self.settings['TEMPLATE_EXTENSIONS']),
|
|
|
|
|
self._templates_path))
|
|
|
|
|
|
2011-01-02 02:50:08 +03:00
|
|
|
return self._templates[name]
|
2010-10-30 00:56:40 +01:00
|
|
|
|
2013-01-04 09:20:40 -05:00
|
|
|
def _include_path(self, path, extensions=None):
|
|
|
|
|
"""Inclusion logic for .get_files(), returns True/False
|
|
|
|
|
|
|
|
|
|
:param path: the path which might be including
|
2016-10-12 23:22:33 -07:00
|
|
|
:param extensions: the list of allowed extensions, or False if all
|
|
|
|
|
extensions are allowed
|
2013-01-04 09:20:40 -05:00
|
|
|
"""
|
|
|
|
|
if extensions is None:
|
2013-08-04 17:02:58 +02:00
|
|
|
extensions = tuple(self.readers.extensions)
|
2013-01-04 09:20:40 -05:00
|
|
|
basename = os.path.basename(path)
|
2013-11-03 18:08:08 +08:00
|
|
|
|
2015-06-16 09:25:09 +02:00
|
|
|
# check IGNORE_FILES
|
2013-11-03 18:08:08 +08:00
|
|
|
ignores = self.settings['IGNORE_FILES']
|
|
|
|
|
if any(fnmatch.fnmatch(basename, ignore) for ignore in ignores):
|
|
|
|
|
return False
|
|
|
|
|
|
2016-10-12 23:22:33 -07:00
|
|
|
ext = os.path.splitext(basename)[1][1:]
|
|
|
|
|
if extensions is False or ext in extensions:
|
2013-01-04 09:20:40 -05:00
|
|
|
return True
|
2016-10-12 23:22:33 -07:00
|
|
|
|
2013-01-04 09:20:40 -05:00
|
|
|
return False
|
|
|
|
|
|
2014-04-21 11:36:17 +02:00
|
|
|
def get_files(self, paths, exclude=[], extensions=None):
|
2010-11-20 02:25:42 +00:00
|
|
|
"""Return a list of files to use, based on rules
|
2010-10-30 20:17:23 +01:00
|
|
|
|
2014-04-21 11:36:17 +02:00
|
|
|
:param paths: the list pf paths to search (relative to self.path)
|
2010-10-30 20:17:23 +01:00
|
|
|
:param exclude: the list of path to exclude
|
2012-11-30 10:46:32 +01:00
|
|
|
:param extensions: the list of allowed extensions (if False, all
|
|
|
|
|
extensions are allowed)
|
2010-10-30 20:17:23 +01:00
|
|
|
"""
|
2015-06-16 09:25:09 +02:00
|
|
|
# backward compatibility for older generators
|
2019-11-05 23:17:19 -08:00
|
|
|
if isinstance(paths, str):
|
2015-06-16 09:25:09 +02:00
|
|
|
paths = [paths]
|
2014-10-12 20:34:53 -07:00
|
|
|
|
|
|
|
|
# group the exclude dir names by parent path, for use with os.walk()
|
|
|
|
|
exclusions_by_dirpath = {}
|
|
|
|
|
for e in exclude:
|
|
|
|
|
parent_path, subdir = os.path.split(os.path.join(self.path, e))
|
|
|
|
|
exclusions_by_dirpath.setdefault(parent_path, set()).add(subdir)
|
|
|
|
|
|
2018-07-11 15:54:47 +02:00
|
|
|
files = set()
|
2015-06-04 17:52:30 -04:00
|
|
|
ignores = self.settings['IGNORE_FILES']
|
2014-04-21 11:36:17 +02:00
|
|
|
for path in paths:
|
2014-10-12 20:34:53 -07:00
|
|
|
# careful: os.path.join() will add a slash when path == ''.
|
|
|
|
|
root = os.path.join(self.path, path) if path else self.path
|
2014-04-21 11:36:17 +02:00
|
|
|
|
|
|
|
|
if os.path.isdir(root):
|
2015-06-16 09:25:09 +02:00
|
|
|
for dirpath, dirs, temp_files in os.walk(
|
2019-10-17 13:05:53 +02:00
|
|
|
root, topdown=True, followlinks=True):
|
2015-06-04 17:52:30 -04:00
|
|
|
excl = exclusions_by_dirpath.get(dirpath, ())
|
2019-10-17 13:05:53 +02:00
|
|
|
# We copy the `dirs` list as we will modify it in the loop:
|
|
|
|
|
for d in list(dirs):
|
2015-06-04 17:52:30 -04:00
|
|
|
if (d in excl or
|
|
|
|
|
any(fnmatch.fnmatch(d, ignore)
|
|
|
|
|
for ignore in ignores)):
|
2019-10-17 13:05:53 +02:00
|
|
|
if d in dirs:
|
|
|
|
|
dirs.remove(d)
|
2015-06-04 17:52:30 -04:00
|
|
|
|
2014-04-21 11:36:17 +02:00
|
|
|
reldir = os.path.relpath(dirpath, self.path)
|
|
|
|
|
for f in temp_files:
|
|
|
|
|
fp = os.path.join(reldir, f)
|
|
|
|
|
if self._include_path(fp, extensions):
|
2018-07-11 15:54:47 +02:00
|
|
|
files.add(fp)
|
2014-04-21 11:36:17 +02:00
|
|
|
elif os.path.exists(root) and self._include_path(path, extensions):
|
2018-07-11 15:54:47 +02:00
|
|
|
files.add(path) # can't walk non-directories
|
2010-10-30 20:17:23 +01:00
|
|
|
return files
|
2010-12-02 03:22:24 +00:00
|
|
|
|
2018-07-11 15:54:47 +02:00
|
|
|
def add_source_path(self, content, static=False):
|
2014-10-18 13:11:59 -07:00
|
|
|
"""Record a source file path that a Generator found and processed.
|
|
|
|
|
Store a reference to its Content object, for url lookups later.
|
|
|
|
|
"""
|
2013-01-04 13:54:08 -05:00
|
|
|
location = content.get_relative_source_path()
|
2018-07-11 15:54:47 +02:00
|
|
|
key = 'static_content' if static else 'generated_content'
|
|
|
|
|
self.context[key][location] = content
|
2012-11-30 10:46:32 +01:00
|
|
|
|
2018-07-11 15:54:47 +02:00
|
|
|
def _add_failed_source_path(self, path, static=False):
|
2014-10-18 13:11:59 -07:00
|
|
|
"""Record a source file path that a Generator failed to process.
|
|
|
|
|
(For example, one that was missing mandatory metadata.)
|
|
|
|
|
The path argument is expected to be relative to self.path.
|
|
|
|
|
"""
|
2018-07-11 15:54:47 +02:00
|
|
|
key = 'static_content' if static else 'generated_content'
|
|
|
|
|
self.context[key][posixize_path(os.path.normpath(path))] = None
|
2014-10-18 13:11:59 -07:00
|
|
|
|
2018-07-11 15:54:47 +02:00
|
|
|
def _is_potential_source_path(self, path, static=False):
|
2014-10-18 13:11:59 -07:00
|
|
|
"""Return True if path was supposed to be used as a source file.
|
|
|
|
|
(This includes all source files that have been found by generators
|
|
|
|
|
before this method is called, even if they failed to process.)
|
|
|
|
|
The path argument is expected to be relative to self.path.
|
|
|
|
|
"""
|
2018-07-11 15:54:47 +02:00
|
|
|
key = 'static_content' if static else 'generated_content'
|
|
|
|
|
return (posixize_path(os.path.normpath(path)) in self.context[key])
|
|
|
|
|
|
|
|
|
|
def add_static_links(self, content):
|
|
|
|
|
"""Add file links in content to context to be processed as Static
|
|
|
|
|
content.
|
|
|
|
|
"""
|
|
|
|
|
self.context['static_links'] |= content.get_static_links()
|
2014-10-18 13:11:59 -07:00
|
|
|
|
2010-12-02 03:22:24 +00:00
|
|
|
def _update_context(self, items):
|
|
|
|
|
"""Update the context with the given items from the currrent
|
|
|
|
|
processor.
|
|
|
|
|
"""
|
|
|
|
|
for item in items:
|
|
|
|
|
value = getattr(self, item)
|
|
|
|
|
if hasattr(value, 'items'):
|
2013-01-11 18:55:04 +01:00
|
|
|
value = list(value.items()) # py3k safeguard for iterators
|
2010-12-02 03:22:24 +00:00
|
|
|
self.context[item] = value
|
|
|
|
|
|
2014-07-22 11:48:15 -04:00
|
|
|
def __str__(self):
|
|
|
|
|
# return the name of the class for logging purposes
|
|
|
|
|
return self.__class__.__name__
|
|
|
|
|
|
2010-12-02 03:22:24 +00:00
|
|
|
|
2014-04-20 14:34:52 +02:00
|
|
|
class CachingGenerator(Generator, FileStampDataCacher):
|
|
|
|
|
'''Subclass of Generator and FileStampDataCacher classes
|
|
|
|
|
|
|
|
|
|
enables content caching, either at the generator or reader level
|
|
|
|
|
'''
|
|
|
|
|
|
|
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
|
'''Initialize the generator, then set up caching
|
|
|
|
|
|
|
|
|
|
note the multiple inheritance structure
|
|
|
|
|
'''
|
|
|
|
|
cls_name = self.__class__.__name__
|
|
|
|
|
Generator.__init__(self, *args,
|
|
|
|
|
readers_cache_name=(cls_name + '-Readers'),
|
|
|
|
|
**kwargs)
|
|
|
|
|
|
2015-06-16 09:25:09 +02:00
|
|
|
cache_this_level = \
|
|
|
|
|
self.settings['CONTENT_CACHING_LAYER'] == 'generator'
|
2014-04-20 14:34:52 +02:00
|
|
|
caching_policy = cache_this_level and self.settings['CACHE_CONTENT']
|
|
|
|
|
load_policy = cache_this_level and self.settings['LOAD_CONTENT_CACHE']
|
|
|
|
|
FileStampDataCacher.__init__(self, self.settings, cls_name,
|
|
|
|
|
caching_policy, load_policy
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
def _get_file_stamp(self, filename):
|
|
|
|
|
'''Get filestamp for path relative to generator.path'''
|
|
|
|
|
filename = os.path.join(self.path, filename)
|
2019-11-18 20:28:48 +03:00
|
|
|
return super()._get_file_stamp(filename)
|
2014-04-20 14:34:52 +02:00
|
|
|
|
|
|
|
|
|
2012-03-05 15:52:14 +01:00
|
|
|
class _FileLoader(BaseLoader):
|
|
|
|
|
|
2012-10-30 02:33:01 +01:00
|
|
|
def __init__(self, path, basedir):
|
2012-03-05 15:52:14 +01:00
|
|
|
self.path = path
|
2012-10-30 02:33:01 +01:00
|
|
|
self.fullpath = os.path.join(basedir, path)
|
2012-03-05 15:52:14 +01:00
|
|
|
|
|
|
|
|
def get_source(self, environment, template):
|
2012-10-30 02:33:01 +01:00
|
|
|
if template != self.path or not os.path.exists(self.fullpath):
|
2012-03-05 15:52:14 +01:00
|
|
|
raise TemplateNotFound(template)
|
2012-10-30 02:33:01 +01:00
|
|
|
mtime = os.path.getmtime(self.fullpath)
|
2013-01-11 02:57:43 +01:00
|
|
|
with open(self.fullpath, 'r', encoding='utf-8') as f:
|
|
|
|
|
source = f.read()
|
2013-03-06 11:46:17 -08:00
|
|
|
return (source, self.fullpath,
|
|
|
|
|
lambda: mtime == os.path.getmtime(self.fullpath))
|
2012-03-05 15:52:14 +01:00
|
|
|
|
|
|
|
|
|
2012-10-30 00:27:18 +01:00
|
|
|
class TemplatePagesGenerator(Generator):
|
2012-03-05 15:52:14 +01:00
|
|
|
|
|
|
|
|
def generate_output(self, writer):
|
2012-10-30 02:33:01 +01:00
|
|
|
for source, dest in self.settings['TEMPLATE_PAGES'].items():
|
|
|
|
|
self.env.loader.loaders.insert(0, _FileLoader(source, self.path))
|
2012-03-05 15:52:14 +01:00
|
|
|
try:
|
|
|
|
|
template = self.env.get_template(source)
|
2013-03-24 15:45:36 -04:00
|
|
|
rurls = self.settings['RELATIVE_URLS']
|
2013-06-22 14:49:48 +01:00
|
|
|
writer.write_file(dest, template, self.context, rurls,
|
2017-10-26 18:23:17 +02:00
|
|
|
override_output=True, url='')
|
2012-03-05 15:52:14 +01:00
|
|
|
finally:
|
|
|
|
|
del self.env.loader.loaders[0]
|
|
|
|
|
|
|
|
|
|
|
2014-04-20 14:34:52 +02:00
|
|
|
class ArticlesGenerator(CachingGenerator):
|
2010-12-02 03:22:24 +00:00
|
|
|
"""Generate blog articles"""
|
|
|
|
|
|
|
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
|
"""initialize properties"""
|
2015-06-16 09:25:09 +02:00
|
|
|
self.articles = [] # only articles in default language
|
2010-12-19 00:32:43 +03:00
|
|
|
self.translations = []
|
2010-12-02 03:22:24 +00:00
|
|
|
self.dates = {}
|
2010-12-26 23:59:30 +03:00
|
|
|
self.tags = defaultdict(list)
|
|
|
|
|
self.categories = defaultdict(list)
|
2012-07-10 11:06:07 +05:45
|
|
|
self.related_posts = []
|
2011-06-30 23:49:09 +02:00
|
|
|
self.authors = defaultdict(list)
|
2015-06-16 09:25:09 +02:00
|
|
|
self.drafts = [] # only drafts in default language
|
2013-12-26 19:30:55 +01:00
|
|
|
self.drafts_translations = []
|
2019-11-18 20:28:48 +03:00
|
|
|
super().__init__(*args, **kwargs)
|
2011-09-07 17:08:28 +02:00
|
|
|
signals.article_generator_init.send(self)
|
2010-12-02 03:22:24 +00:00
|
|
|
|
|
|
|
|
def generate_feeds(self, writer):
|
|
|
|
|
"""Generate the feeds from the current context, and output files."""
|
|
|
|
|
|
2012-07-16 09:35:05 -07:00
|
|
|
if self.settings.get('FEED_ATOM'):
|
2018-11-03 09:12:20 -06:00
|
|
|
writer.write_feed(
|
|
|
|
|
self.articles,
|
|
|
|
|
self.context,
|
|
|
|
|
self.settings['FEED_ATOM'],
|
|
|
|
|
self.settings.get('FEED_ATOM_URL', self.settings['FEED_ATOM'])
|
|
|
|
|
)
|
2010-12-02 03:22:24 +00:00
|
|
|
|
2012-03-10 21:18:01 +09:00
|
|
|
if self.settings.get('FEED_RSS'):
|
2018-11-03 09:12:20 -06:00
|
|
|
writer.write_feed(
|
|
|
|
|
self.articles,
|
|
|
|
|
self.context,
|
|
|
|
|
self.settings['FEED_RSS'],
|
|
|
|
|
self.settings.get('FEED_RSS_URL', self.settings['FEED_RSS']),
|
|
|
|
|
feed_type='rss'
|
|
|
|
|
)
|
2010-12-02 03:22:24 +00:00
|
|
|
|
2015-06-16 09:25:09 +02:00
|
|
|
if (self.settings.get('FEED_ALL_ATOM') or
|
|
|
|
|
self.settings.get('FEED_ALL_RSS')):
|
2012-10-22 23:37:52 +02:00
|
|
|
all_articles = list(self.articles)
|
|
|
|
|
for article in self.articles:
|
|
|
|
|
all_articles.extend(article.translations)
|
2018-03-18 12:21:56 +01:00
|
|
|
order_content(all_articles,
|
|
|
|
|
order_by=self.settings['ARTICLE_ORDER_BY'])
|
2012-10-22 23:37:52 +02:00
|
|
|
|
|
|
|
|
if self.settings.get('FEED_ALL_ATOM'):
|
2018-11-03 09:12:20 -06:00
|
|
|
writer.write_feed(
|
|
|
|
|
all_articles,
|
|
|
|
|
self.context,
|
|
|
|
|
self.settings['FEED_ALL_ATOM'],
|
|
|
|
|
self.settings.get('FEED_ALL_ATOM_URL',
|
|
|
|
|
self.settings['FEED_ALL_ATOM'])
|
|
|
|
|
)
|
2012-10-22 23:37:52 +02:00
|
|
|
|
|
|
|
|
if self.settings.get('FEED_ALL_RSS'):
|
2018-11-03 09:12:20 -06:00
|
|
|
writer.write_feed(
|
|
|
|
|
all_articles,
|
|
|
|
|
self.context,
|
|
|
|
|
self.settings['FEED_ALL_RSS'],
|
|
|
|
|
self.settings.get('FEED_ALL_RSS_URL',
|
2017-10-29 19:58:26 +01:00
|
|
|
self.settings['FEED_ALL_RSS']),
|
2018-11-03 09:12:20 -06:00
|
|
|
feed_type='rss'
|
|
|
|
|
)
|
2012-10-22 23:37:52 +02:00
|
|
|
|
2011-02-01 01:57:39 +00:00
|
|
|
for cat, arts in self.categories:
|
2012-07-16 09:35:05 -07:00
|
|
|
if self.settings.get('CATEGORY_FEED_ATOM'):
|
2018-11-03 09:12:20 -06:00
|
|
|
writer.write_feed(
|
|
|
|
|
arts,
|
|
|
|
|
self.context,
|
|
|
|
|
self.settings['CATEGORY_FEED_ATOM'].format(slug=cat.slug),
|
|
|
|
|
self.settings.get(
|
|
|
|
|
'CATEGORY_FEED_ATOM_URL',
|
|
|
|
|
self.settings['CATEGORY_FEED_ATOM']).format(
|
|
|
|
|
slug=cat.slug
|
|
|
|
|
),
|
|
|
|
|
feed_title=cat.name
|
|
|
|
|
)
|
2010-12-02 03:22:24 +00:00
|
|
|
|
2012-03-10 21:18:01 +09:00
|
|
|
if self.settings.get('CATEGORY_FEED_RSS'):
|
2018-11-03 09:12:20 -06:00
|
|
|
writer.write_feed(
|
|
|
|
|
arts,
|
|
|
|
|
self.context,
|
|
|
|
|
self.settings['CATEGORY_FEED_RSS'].format(slug=cat.slug),
|
|
|
|
|
self.settings.get(
|
|
|
|
|
'CATEGORY_FEED_RSS_URL',
|
|
|
|
|
self.settings['CATEGORY_FEED_RSS']).format(
|
|
|
|
|
slug=cat.slug
|
|
|
|
|
),
|
|
|
|
|
feed_title=cat.name,
|
|
|
|
|
feed_type='rss'
|
|
|
|
|
)
|
2010-12-02 03:22:24 +00:00
|
|
|
|
2014-04-05 15:27:03 -04:00
|
|
|
for auth, arts in self.authors:
|
|
|
|
|
if self.settings.get('AUTHOR_FEED_ATOM'):
|
2018-11-03 09:12:20 -06:00
|
|
|
writer.write_feed(
|
|
|
|
|
arts,
|
|
|
|
|
self.context,
|
|
|
|
|
self.settings['AUTHOR_FEED_ATOM'].format(slug=auth.slug),
|
|
|
|
|
self.settings.get(
|
|
|
|
|
'AUTHOR_FEED_ATOM_URL',
|
|
|
|
|
self.settings['AUTHOR_FEED_ATOM']
|
|
|
|
|
).format(slug=auth.slug),
|
|
|
|
|
feed_title=auth.name
|
|
|
|
|
)
|
2014-04-05 15:27:03 -04:00
|
|
|
|
|
|
|
|
if self.settings.get('AUTHOR_FEED_RSS'):
|
2018-11-03 09:12:20 -06:00
|
|
|
writer.write_feed(
|
|
|
|
|
arts,
|
|
|
|
|
self.context,
|
|
|
|
|
self.settings['AUTHOR_FEED_RSS'].format(slug=auth.slug),
|
|
|
|
|
self.settings.get(
|
|
|
|
|
'AUTHOR_FEED_RSS_URL',
|
|
|
|
|
self.settings['AUTHOR_FEED_RSS']
|
|
|
|
|
).format(slug=auth.slug),
|
|
|
|
|
feed_title=auth.name,
|
|
|
|
|
feed_type='rss'
|
|
|
|
|
)
|
2014-04-05 15:27:03 -04:00
|
|
|
|
2015-06-16 09:25:09 +02:00
|
|
|
if (self.settings.get('TAG_FEED_ATOM') or
|
|
|
|
|
self.settings.get('TAG_FEED_RSS')):
|
2010-12-17 09:25:19 +01:00
|
|
|
for tag, arts in self.tags.items():
|
2012-07-16 09:35:05 -07:00
|
|
|
if self.settings.get('TAG_FEED_ATOM'):
|
2018-11-03 09:12:20 -06:00
|
|
|
writer.write_feed(
|
|
|
|
|
arts,
|
|
|
|
|
self.context,
|
|
|
|
|
self.settings['TAG_FEED_ATOM'].format(slug=tag.slug),
|
|
|
|
|
self.settings.get(
|
|
|
|
|
'TAG_FEED_ATOM_URL',
|
|
|
|
|
self.settings['TAG_FEED_ATOM']
|
|
|
|
|
).format(slug=tag.slug),
|
|
|
|
|
feed_title=tag.name
|
|
|
|
|
)
|
2010-12-17 09:25:19 +01:00
|
|
|
|
2012-03-10 21:18:01 +09:00
|
|
|
if self.settings.get('TAG_FEED_RSS'):
|
2018-11-03 09:12:20 -06:00
|
|
|
writer.write_feed(
|
|
|
|
|
arts,
|
|
|
|
|
self.context,
|
|
|
|
|
self.settings['TAG_FEED_RSS'].format(slug=tag.slug),
|
|
|
|
|
self.settings.get(
|
|
|
|
|
'TAG_FEED_RSS_URL',
|
|
|
|
|
self.settings['TAG_FEED_RSS']
|
|
|
|
|
).format(slug=tag.slug),
|
|
|
|
|
feed_title=tag.name,
|
|
|
|
|
feed_type='rss'
|
|
|
|
|
)
|
2010-12-20 22:42:29 +00:00
|
|
|
|
2015-06-16 09:25:09 +02:00
|
|
|
if (self.settings.get('TRANSLATION_FEED_ATOM') or
|
|
|
|
|
self.settings.get('TRANSLATION_FEED_RSS')):
|
2012-03-10 21:18:01 +09:00
|
|
|
translations_feeds = defaultdict(list)
|
|
|
|
|
for article in chain(self.articles, self.translations):
|
|
|
|
|
translations_feeds[article.lang].append(article)
|
2010-12-19 00:32:43 +03:00
|
|
|
|
2012-03-10 21:18:01 +09:00
|
|
|
for lang, items in translations_feeds.items():
|
2018-03-18 12:21:56 +01:00
|
|
|
items = order_content(
|
|
|
|
|
items, order_by=self.settings['ARTICLE_ORDER_BY'])
|
2012-10-22 16:34:55 +02:00
|
|
|
if self.settings.get('TRANSLATION_FEED_ATOM'):
|
2013-12-26 00:02:06 +01:00
|
|
|
writer.write_feed(
|
2018-11-03 09:12:20 -06:00
|
|
|
items,
|
|
|
|
|
self.context,
|
|
|
|
|
self.settings['TRANSLATION_FEED_ATOM']
|
|
|
|
|
.format(lang=lang),
|
2017-10-29 19:58:26 +01:00
|
|
|
self.settings.get(
|
|
|
|
|
'TRANSLATION_FEED_ATOM_URL',
|
2018-11-03 09:12:20 -06:00
|
|
|
self.settings['TRANSLATION_FEED_ATOM']
|
|
|
|
|
).format(lang=lang),
|
|
|
|
|
)
|
2012-10-22 16:34:55 +02:00
|
|
|
if self.settings.get('TRANSLATION_FEED_RSS'):
|
2013-12-26 00:02:06 +01:00
|
|
|
writer.write_feed(
|
2018-11-03 09:12:20 -06:00
|
|
|
items,
|
|
|
|
|
self.context,
|
|
|
|
|
self.settings['TRANSLATION_FEED_RSS']
|
|
|
|
|
.format(lang=lang),
|
2017-10-29 19:58:26 +01:00
|
|
|
self.settings.get(
|
|
|
|
|
'TRANSLATION_FEED_RSS_URL',
|
2018-11-03 09:12:20 -06:00
|
|
|
self.settings['TRANSLATION_FEED_RSS']
|
|
|
|
|
).format(lang=lang),
|
|
|
|
|
feed_type='rss'
|
|
|
|
|
)
|
2010-12-17 09:25:19 +01:00
|
|
|
|
2012-05-02 09:26:33 +01:00
|
|
|
def generate_articles(self, write):
|
2012-04-29 10:34:20 +01:00
|
|
|
"""Generate the articles."""
|
2010-12-30 14:11:37 +00:00
|
|
|
for article in chain(self.translations, self.articles):
|
2014-01-02 18:43:45 +01:00
|
|
|
signals.article_generator_write_article.send(self, content=article)
|
2012-07-07 12:15:35 -07:00
|
|
|
write(article.save_as, self.get_template(article.template),
|
2013-12-26 00:02:06 +01:00
|
|
|
self.context, article=article, category=article.category,
|
2015-02-19 12:19:49 -08:00
|
|
|
override_output=hasattr(article, 'override_save_as'),
|
2017-10-26 18:23:17 +02:00
|
|
|
url=article.url, blog=True)
|
2010-12-30 14:11:37 +00:00
|
|
|
|
2013-03-09 21:01:47 -08:00
|
|
|
def generate_period_archives(self, write):
|
|
|
|
|
"""Generate per-year, per-month, and per-day archives."""
|
|
|
|
|
try:
|
|
|
|
|
template = self.get_template('period_archives')
|
2014-08-31 09:55:45 +02:00
|
|
|
except PelicanTemplateNotFound:
|
2013-03-09 21:01:47 -08:00
|
|
|
template = self.get_template('archives')
|
|
|
|
|
|
2014-01-26 10:12:01 -05:00
|
|
|
period_save_as = {
|
|
|
|
|
'year': self.settings['YEAR_ARCHIVE_SAVE_AS'],
|
|
|
|
|
'month': self.settings['MONTH_ARCHIVE_SAVE_AS'],
|
|
|
|
|
'day': self.settings['DAY_ARCHIVE_SAVE_AS'],
|
|
|
|
|
}
|
|
|
|
|
|
2017-10-26 18:23:17 +02:00
|
|
|
period_url = {
|
|
|
|
|
'year': self.settings['YEAR_ARCHIVE_URL'],
|
|
|
|
|
'month': self.settings['MONTH_ARCHIVE_URL'],
|
|
|
|
|
'day': self.settings['DAY_ARCHIVE_URL'],
|
|
|
|
|
}
|
|
|
|
|
|
2014-01-26 10:12:01 -05:00
|
|
|
period_date_key = {
|
|
|
|
|
'year': attrgetter('date.year'),
|
|
|
|
|
'month': attrgetter('date.year', 'date.month'),
|
|
|
|
|
'day': attrgetter('date.year', 'date.month', 'date.day')
|
|
|
|
|
}
|
|
|
|
|
|
2017-10-26 18:23:17 +02:00
|
|
|
def _generate_period_archives(dates, key, save_as_fmt, url_fmt):
|
2013-03-09 21:01:47 -08:00
|
|
|
"""Generate period archives from `dates`, grouped by
|
|
|
|
|
`key` and written to `save_as`.
|
|
|
|
|
"""
|
|
|
|
|
# `dates` is already sorted by date
|
|
|
|
|
for _period, group in groupby(dates, key=key):
|
|
|
|
|
archive = list(group)
|
2018-07-12 13:41:05 +02:00
|
|
|
articles = [a for a in self.articles if a in archive]
|
2013-03-09 21:01:47 -08:00
|
|
|
# arbitrarily grab the first date so that the usual
|
|
|
|
|
# format string syntax can be used for specifying the
|
|
|
|
|
# period archive dates
|
|
|
|
|
date = archive[0].date
|
2014-04-27 10:25:57 +02:00
|
|
|
save_as = save_as_fmt.format(date=date)
|
2017-10-26 18:23:17 +02:00
|
|
|
url = url_fmt.format(date=date)
|
2014-01-26 10:12:01 -05:00
|
|
|
context = self.context.copy()
|
|
|
|
|
|
|
|
|
|
if key == period_date_key['year']:
|
|
|
|
|
context["period"] = (_period,)
|
|
|
|
|
else:
|
2014-04-27 10:25:57 +02:00
|
|
|
month_name = calendar.month_name[_period[1]]
|
|
|
|
|
if key == period_date_key['month']:
|
|
|
|
|
context["period"] = (_period[0],
|
|
|
|
|
month_name)
|
|
|
|
|
else:
|
|
|
|
|
context["period"] = (_period[0],
|
|
|
|
|
month_name,
|
|
|
|
|
_period[2])
|
2014-01-26 10:12:01 -05:00
|
|
|
|
2018-07-12 13:41:05 +02:00
|
|
|
write(save_as, template, context, articles=articles,
|
|
|
|
|
dates=archive, template_name='period_archives',
|
2019-04-19 16:55:17 +02:00
|
|
|
blog=True, url=url, all_articles=self.articles)
|
2013-03-09 21:01:47 -08:00
|
|
|
|
|
|
|
|
for period in 'year', 'month', 'day':
|
|
|
|
|
save_as = period_save_as[period]
|
2017-10-26 18:23:17 +02:00
|
|
|
url = period_url[period]
|
2013-03-09 21:01:47 -08:00
|
|
|
if save_as:
|
|
|
|
|
key = period_date_key[period]
|
2017-10-26 18:23:17 +02:00
|
|
|
_generate_period_archives(self.dates, key, save_as, url)
|
2013-03-09 21:01:47 -08:00
|
|
|
|
2012-05-02 09:26:33 +01:00
|
|
|
def generate_direct_templates(self, write):
|
2012-04-29 10:34:20 +01:00
|
|
|
"""Generate direct templates pages"""
|
2013-03-24 15:45:36 -04:00
|
|
|
for template in self.settings['DIRECT_TEMPLATES']:
|
2012-05-02 09:26:33 +01:00
|
|
|
save_as = self.settings.get("%s_SAVE_AS" % template.upper(),
|
2013-12-26 00:02:06 +01:00
|
|
|
'%s.html' % template)
|
2017-10-26 18:23:17 +02:00
|
|
|
url = self.settings.get("%s_URL" % template.upper(),
|
|
|
|
|
'%s.html' % template)
|
2012-05-02 09:26:33 +01:00
|
|
|
if not save_as:
|
2012-09-08 18:24:15 +03:00
|
|
|
continue
|
2012-03-09 16:21:38 +01:00
|
|
|
|
2018-07-12 13:41:05 +02:00
|
|
|
write(save_as, self.get_template(template), self.context,
|
|
|
|
|
articles=self.articles, dates=self.dates, blog=True,
|
|
|
|
|
template_name=template,
|
2017-10-26 18:23:17 +02:00
|
|
|
page_name=os.path.splitext(save_as)[0], url=url)
|
2011-01-02 02:50:08 +03:00
|
|
|
|
2012-05-02 09:26:33 +01:00
|
|
|
def generate_tags(self, write):
|
2012-04-29 10:34:20 +01:00
|
|
|
"""Generate Tags pages."""
|
2011-01-02 02:50:08 +03:00
|
|
|
tag_template = self.get_template('tag')
|
2010-12-05 19:15:02 +00:00
|
|
|
for tag, articles in self.tags.items():
|
2011-02-15 13:44:36 +01:00
|
|
|
dates = [article for article in self.dates if article in articles]
|
2011-12-23 23:43:32 +00:00
|
|
|
write(tag.save_as, tag_template, self.context, tag=tag,
|
2017-10-26 18:23:17 +02:00
|
|
|
url=tag.url, articles=articles, dates=dates,
|
2018-07-12 13:41:05 +02:00
|
|
|
template_name='tag', blog=True, page_name=tag.page_name,
|
|
|
|
|
all_articles=self.articles)
|
2011-01-02 02:50:08 +03:00
|
|
|
|
2012-05-02 09:26:33 +01:00
|
|
|
def generate_categories(self, write):
|
2012-04-29 10:34:20 +01:00
|
|
|
"""Generate category pages."""
|
2011-01-02 02:50:08 +03:00
|
|
|
category_template = self.get_template('category')
|
2011-02-01 01:57:39 +00:00
|
|
|
for cat, articles in self.categories:
|
2011-02-15 13:44:36 +01:00
|
|
|
dates = [article for article in self.dates if article in articles]
|
2018-07-12 13:41:05 +02:00
|
|
|
write(cat.save_as, category_template, self.context, url=cat.url,
|
|
|
|
|
category=cat, articles=articles, dates=dates,
|
|
|
|
|
template_name='category', blog=True, page_name=cat.page_name,
|
|
|
|
|
all_articles=self.articles)
|
2011-01-02 02:50:08 +03:00
|
|
|
|
2012-05-02 09:26:33 +01:00
|
|
|
def generate_authors(self, write):
|
2012-04-29 10:34:20 +01:00
|
|
|
"""Generate Author pages."""
|
2011-06-30 23:49:09 +02:00
|
|
|
author_template = self.get_template('author')
|
|
|
|
|
for aut, articles in self.authors:
|
|
|
|
|
dates = [article for article in self.dates if article in articles]
|
2011-12-23 23:43:32 +00:00
|
|
|
write(aut.save_as, author_template, self.context,
|
2017-10-26 18:23:17 +02:00
|
|
|
url=aut.url, author=aut, articles=articles, dates=dates,
|
2018-07-12 13:41:05 +02:00
|
|
|
template_name='author', blog=True,
|
2013-12-26 00:02:06 +01:00
|
|
|
page_name=aut.page_name, all_articles=self.articles)
|
2011-06-30 23:49:09 +02:00
|
|
|
|
2012-05-02 09:26:33 +01:00
|
|
|
def generate_drafts(self, write):
|
2012-04-29 10:34:20 +01:00
|
|
|
"""Generate drafts pages."""
|
2013-12-26 19:30:55 +01:00
|
|
|
for draft in chain(self.drafts_translations, self.drafts):
|
|
|
|
|
write(draft.save_as, self.get_template(draft.template),
|
2015-06-16 09:25:09 +02:00
|
|
|
self.context, article=draft, category=draft.category,
|
|
|
|
|
override_output=hasattr(draft, 'override_save_as'),
|
2017-10-26 18:23:17 +02:00
|
|
|
blog=True, all_articles=self.articles, url=draft.url)
|
2010-12-02 03:22:24 +00:00
|
|
|
|
2012-04-29 10:34:20 +01:00
|
|
|
def generate_pages(self, writer):
|
|
|
|
|
"""Generate the pages on the disk"""
|
2012-05-02 09:26:33 +01:00
|
|
|
write = partial(writer.write_file,
|
2013-03-24 15:45:36 -04:00
|
|
|
relative_urls=self.settings['RELATIVE_URLS'])
|
2012-04-29 10:34:20 +01:00
|
|
|
|
|
|
|
|
# to minimize the number of relative path stuff modification
|
|
|
|
|
# in writer, articles pass first
|
2012-05-02 09:26:33 +01:00
|
|
|
self.generate_articles(write)
|
2013-03-09 21:01:47 -08:00
|
|
|
self.generate_period_archives(write)
|
2012-05-02 09:26:33 +01:00
|
|
|
self.generate_direct_templates(write)
|
|
|
|
|
|
2012-04-29 10:34:20 +01:00
|
|
|
# and subfolders after that
|
2012-05-02 09:26:33 +01:00
|
|
|
self.generate_tags(write)
|
|
|
|
|
self.generate_categories(write)
|
|
|
|
|
self.generate_authors(write)
|
|
|
|
|
self.generate_drafts(write)
|
2012-04-29 10:34:20 +01:00
|
|
|
|
2010-12-02 03:22:24 +00:00
|
|
|
def generate_context(self):
|
2012-11-30 10:46:32 +01:00
|
|
|
"""Add the articles into the shared context"""
|
2010-12-02 03:22:24 +00:00
|
|
|
|
2010-12-19 00:32:43 +03:00
|
|
|
all_articles = []
|
2013-12-26 19:30:55 +01:00
|
|
|
all_drafts = []
|
2012-03-06 00:29:56 +01:00
|
|
|
for f in self.get_files(
|
2014-04-21 11:36:17 +02:00
|
|
|
self.settings['ARTICLE_PATHS'],
|
2012-03-06 00:29:56 +01:00
|
|
|
exclude=self.settings['ARTICLE_EXCLUDES']):
|
2017-07-24 19:01:14 +02:00
|
|
|
article = self.get_cached_data(f, None)
|
|
|
|
|
if article is None:
|
2014-02-15 21:20:51 +01:00
|
|
|
try:
|
2017-07-24 19:01:14 +02:00
|
|
|
article = self.readers.read_file(
|
2014-02-15 21:20:51 +01:00
|
|
|
base_path=self.path, path=f, content_class=Article,
|
|
|
|
|
context=self.context,
|
|
|
|
|
preread_signal=signals.article_generator_preread,
|
|
|
|
|
preread_sender=self,
|
|
|
|
|
context_signal=signals.article_generator_context,
|
|
|
|
|
context_sender=self)
|
|
|
|
|
except Exception as e:
|
2015-06-16 09:25:09 +02:00
|
|
|
logger.error(
|
|
|
|
|
'Could not process %s\n%s', f, e,
|
2014-07-22 11:48:15 -04:00
|
|
|
exc_info=self.settings.get('DEBUG', False))
|
2014-10-18 13:11:59 -07:00
|
|
|
self._add_failed_source_path(f)
|
2014-02-15 21:20:51 +01:00
|
|
|
continue
|
|
|
|
|
|
2017-07-24 19:01:14 +02:00
|
|
|
if not article.is_valid():
|
2014-10-18 13:11:59 -07:00
|
|
|
self._add_failed_source_path(f)
|
2014-02-15 21:20:51 +01:00
|
|
|
continue
|
|
|
|
|
|
2017-07-24 19:01:14 +02:00
|
|
|
self.cache_data(f, article)
|
2015-06-05 12:11:53 +02:00
|
|
|
|
2017-07-24 19:01:14 +02:00
|
|
|
if article.status == "published":
|
|
|
|
|
all_articles.append(article)
|
|
|
|
|
elif article.status == "draft":
|
|
|
|
|
all_drafts.append(article)
|
|
|
|
|
self.add_source_path(article)
|
2018-07-11 15:54:47 +02:00
|
|
|
self.add_static_links(article)
|
2015-06-05 12:11:53 +02:00
|
|
|
|
2018-03-22 23:47:51 +01:00
|
|
|
def _process(arts):
|
|
|
|
|
origs, translations = process_translations(
|
|
|
|
|
arts, translation_id=self.settings['ARTICLE_TRANSLATION_ID'])
|
|
|
|
|
origs = order_content(origs, self.settings['ARTICLE_ORDER_BY'])
|
|
|
|
|
return origs, translations
|
|
|
|
|
|
|
|
|
|
self.articles, self.translations = _process(all_articles)
|
|
|
|
|
self.drafts, self.drafts_translations = _process(all_drafts)
|
2010-12-19 00:32:43 +03:00
|
|
|
|
2013-07-05 01:08:45 +02:00
|
|
|
signals.article_generator_pretaxonomy.send(self)
|
2014-02-07 08:34:27 +01:00
|
|
|
|
2010-12-19 00:32:43 +03:00
|
|
|
for article in self.articles:
|
2013-03-26 01:28:42 -04:00
|
|
|
# only main articles are listed in categories and tags
|
|
|
|
|
# not translations
|
2010-12-26 23:59:30 +03:00
|
|
|
self.categories[article.category].append(article)
|
2013-03-26 01:28:42 -04:00
|
|
|
if hasattr(article, 'tags'):
|
|
|
|
|
for tag in article.tags:
|
|
|
|
|
self.tags[tag].append(article)
|
2013-07-05 01:08:45 +02:00
|
|
|
for author in getattr(article, 'authors', []):
|
Ignore empty metadata. Fixes #1469. Fixes #1398.
Some metadata values cause problems when empty. For example, a markdown file
containing a Slug: line with no additional text causing Pelican to produce a
file named ".html" instead of generating a proper file name. Others, like
those created by a PATH_METADATA regex, must be preserved even if empty,
so things like PAGE_URL="filename{customvalue}.html" will always work.
Essentially, we want to discard empty metadata that we know will be useless
or problematic. This is better than raising an exception because (a) it
allows users to deliberately keep empty metadata in their source files for
filling in later, and (b) users shouldn't be forced to fix empty metadata
created by blog migration tools (see #1398).
The metadata processors are the ideal place to do this, because they know
the type of data they are handling and whether an empty value is wanted.
Unfortunately, they can't discard items, and neither can process_metadata(),
because their return values are always saved by calling code. We can't
safely change the calling code, because some of it lives in custom reader
classes out in the field, and we don't want to break those working systems.
Discarding empty values at the time of use isn't good enough, because that
still allows useless empty values in a source file to override configured
defaults.
My solution:
- When processing a list of values, a metadata processor will omit any
unwanted empty ones from the list it returns.
- When processing an entirely unwanted value, it will return something easily
identifiable that will pass through the reader code.
- When collecting the processed metadata, read_file() will filter out items
identified as unwanted.
These metadata are affected by this change:
author, authors, category, slug, status, tags.
I also removed a bit of now-superfluous code from generators.py that was
discarding empty authors at the time of use.
2014-09-29 22:51:13 -07:00
|
|
|
self.authors[author].append(article)
|
2015-06-12 17:15:19 -04:00
|
|
|
|
2010-12-02 03:22:24 +00:00
|
|
|
self.dates = list(self.articles)
|
2011-05-06 17:01:34 +06:00
|
|
|
self.dates.sort(key=attrgetter('date'),
|
2013-12-26 00:02:06 +01:00
|
|
|
reverse=self.context['NEWEST_FIRST_ARCHIVES'])
|
2011-01-01 23:08:29 +03:00
|
|
|
|
2010-12-02 03:22:24 +00:00
|
|
|
# and generate the output :)
|
2011-02-01 01:57:39 +00:00
|
|
|
|
|
|
|
|
# order the categories per name
|
|
|
|
|
self.categories = list(self.categories.items())
|
2012-05-06 02:43:13 +02:00
|
|
|
self.categories.sort(
|
2013-12-26 00:02:06 +01:00
|
|
|
reverse=self.settings['REVERSE_CATEGORY_ORDER'])
|
2011-06-30 23:49:09 +02:00
|
|
|
|
|
|
|
|
self.authors = list(self.authors.items())
|
2013-01-03 13:54:56 -05:00
|
|
|
self.authors.sort()
|
2011-06-30 23:49:09 +02:00
|
|
|
|
2012-03-09 16:21:38 +01:00
|
|
|
self._update_context(('articles', 'dates', 'tags', 'categories',
|
2015-06-16 22:33:39 +01:00
|
|
|
'authors', 'related_posts', 'drafts'))
|
2014-02-15 21:20:51 +01:00
|
|
|
self.save_cache()
|
2014-04-20 14:34:52 +02:00
|
|
|
self.readers.save_cache()
|
2012-10-13 19:17:16 +02:00
|
|
|
signals.article_generator_finalized.send(self)
|
2010-12-02 03:22:24 +00:00
|
|
|
|
|
|
|
|
def generate_output(self, writer):
|
|
|
|
|
self.generate_feeds(writer)
|
|
|
|
|
self.generate_pages(writer)
|
2014-01-24 06:31:26 +00:00
|
|
|
signals.article_writer_finalized.send(self, writer=writer)
|
2010-12-02 03:22:24 +00:00
|
|
|
|
2018-02-09 10:39:11 +01:00
|
|
|
def refresh_metadata_intersite_links(self):
|
|
|
|
|
for e in chain(self.articles,
|
|
|
|
|
self.translations,
|
|
|
|
|
self.drafts,
|
|
|
|
|
self.drafts_translations):
|
|
|
|
|
if hasattr(e, 'refresh_metadata_intersite_links'):
|
|
|
|
|
e.refresh_metadata_intersite_links()
|
|
|
|
|
|
2010-12-02 03:22:24 +00:00
|
|
|
|
2014-04-20 14:34:52 +02:00
|
|
|
class PagesGenerator(CachingGenerator):
|
2010-12-02 03:22:24 +00:00
|
|
|
"""Generate pages"""
|
|
|
|
|
|
|
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
|
self.pages = []
|
2018-07-03 12:08:27 +02:00
|
|
|
self.translations = []
|
2012-06-26 19:26:43 -07:00
|
|
|
self.hidden_pages = []
|
|
|
|
|
self.hidden_translations = []
|
2018-07-03 12:08:27 +02:00
|
|
|
self.draft_pages = []
|
|
|
|
|
self.draft_translations = []
|
2019-11-18 20:28:48 +03:00
|
|
|
super().__init__(*args, **kwargs)
|
2013-01-04 18:19:26 -05:00
|
|
|
signals.page_generator_init.send(self)
|
2010-12-02 03:22:24 +00:00
|
|
|
|
|
|
|
|
def generate_context(self):
|
2010-12-19 00:32:43 +03:00
|
|
|
all_pages = []
|
2012-06-26 19:26:43 -07:00
|
|
|
hidden_pages = []
|
2018-07-03 12:08:27 +02:00
|
|
|
draft_pages = []
|
2012-03-06 00:29:56 +01:00
|
|
|
for f in self.get_files(
|
2014-04-21 11:36:17 +02:00
|
|
|
self.settings['PAGE_PATHS'],
|
2012-03-06 00:29:56 +01:00
|
|
|
exclude=self.settings['PAGE_EXCLUDES']):
|
2014-02-15 21:20:51 +01:00
|
|
|
page = self.get_cached_data(f, None)
|
|
|
|
|
if page is None:
|
|
|
|
|
try:
|
|
|
|
|
page = self.readers.read_file(
|
|
|
|
|
base_path=self.path, path=f, content_class=Page,
|
|
|
|
|
context=self.context,
|
|
|
|
|
preread_signal=signals.page_generator_preread,
|
|
|
|
|
preread_sender=self,
|
|
|
|
|
context_signal=signals.page_generator_context,
|
|
|
|
|
context_sender=self)
|
|
|
|
|
except Exception as e:
|
2015-06-16 09:25:09 +02:00
|
|
|
logger.error(
|
|
|
|
|
'Could not process %s\n%s', f, e,
|
2014-07-22 11:48:15 -04:00
|
|
|
exc_info=self.settings.get('DEBUG', False))
|
2014-10-18 13:11:59 -07:00
|
|
|
self._add_failed_source_path(f)
|
2014-02-15 21:20:51 +01:00
|
|
|
continue
|
|
|
|
|
|
2017-07-24 19:01:14 +02:00
|
|
|
if not page.is_valid():
|
2015-06-05 12:11:53 +02:00
|
|
|
self._add_failed_source_path(f)
|
|
|
|
|
continue
|
|
|
|
|
|
2014-02-15 21:20:51 +01:00
|
|
|
self.cache_data(f, page)
|
2012-11-30 10:46:32 +01:00
|
|
|
|
2017-07-24 19:01:14 +02:00
|
|
|
if page.status == "published":
|
2016-12-10 01:58:38 -05:00
|
|
|
all_pages.append(page)
|
2017-07-24 19:01:14 +02:00
|
|
|
elif page.status == "hidden":
|
2016-12-10 01:58:38 -05:00
|
|
|
hidden_pages.append(page)
|
2018-07-03 12:08:27 +02:00
|
|
|
elif page.status == "draft":
|
|
|
|
|
draft_pages.append(page)
|
2013-01-04 10:50:09 -05:00
|
|
|
self.add_source_path(page)
|
2018-07-11 15:54:47 +02:00
|
|
|
self.add_static_links(page)
|
2012-11-30 10:46:32 +01:00
|
|
|
|
2018-03-22 23:47:51 +01:00
|
|
|
def _process(pages):
|
|
|
|
|
origs, translations = process_translations(
|
|
|
|
|
pages, translation_id=self.settings['PAGE_TRANSLATION_ID'])
|
|
|
|
|
origs = order_content(origs, self.settings['PAGE_ORDER_BY'])
|
|
|
|
|
return origs, translations
|
|
|
|
|
|
|
|
|
|
self.pages, self.translations = _process(all_pages)
|
|
|
|
|
self.hidden_pages, self.hidden_translations = _process(hidden_pages)
|
|
|
|
|
self.draft_pages, self.draft_translations = _process(draft_pages)
|
2010-12-02 03:22:24 +00:00
|
|
|
|
2018-07-03 12:08:27 +02:00
|
|
|
self._update_context(('pages', 'hidden_pages', 'draft_pages'))
|
2010-12-02 03:22:24 +00:00
|
|
|
|
2014-02-15 21:20:51 +01:00
|
|
|
self.save_cache()
|
2014-04-20 14:34:52 +02:00
|
|
|
self.readers.save_cache()
|
2013-01-04 18:19:26 -05:00
|
|
|
signals.page_generator_finalized.send(self)
|
2013-02-10 12:42:54 -08:00
|
|
|
|
2010-12-02 03:22:24 +00:00
|
|
|
def generate_output(self, writer):
|
2012-06-26 19:26:43 -07:00
|
|
|
for page in chain(self.translations, self.pages,
|
2018-07-03 12:08:27 +02:00
|
|
|
self.hidden_translations, self.hidden_pages,
|
|
|
|
|
self.draft_translations, self.draft_pages):
|
2017-10-17 07:38:18 +05:00
|
|
|
signals.page_generator_write_page.send(self, content=page)
|
2013-12-26 00:02:06 +01:00
|
|
|
writer.write_file(
|
|
|
|
|
page.save_as, self.get_template(page.template),
|
|
|
|
|
self.context, page=page,
|
|
|
|
|
relative_urls=self.settings['RELATIVE_URLS'],
|
2017-10-26 18:23:17 +02:00
|
|
|
override_output=hasattr(page, 'override_save_as'),
|
|
|
|
|
url=page.url)
|
2014-09-18 22:31:47 +02:00
|
|
|
signals.page_writer_finalized.send(self, writer=writer)
|
2010-12-02 03:22:24 +00:00
|
|
|
|
2018-02-09 10:39:11 +01:00
|
|
|
def refresh_metadata_intersite_links(self):
|
|
|
|
|
for e in chain(self.pages,
|
|
|
|
|
self.hidden_pages,
|
2018-07-03 12:08:27 +02:00
|
|
|
self.hidden_translations,
|
|
|
|
|
self.draft_pages,
|
|
|
|
|
self.draft_translations):
|
2018-02-09 10:39:11 +01:00
|
|
|
if hasattr(e, 'refresh_metadata_intersite_links'):
|
|
|
|
|
e.refresh_metadata_intersite_links()
|
|
|
|
|
|
2010-12-02 03:22:24 +00:00
|
|
|
|
|
|
|
|
class StaticGenerator(Generator):
|
2012-04-07 18:02:40 -06:00
|
|
|
"""copy static paths (what you want to copy, like images, medias etc.
|
2010-12-05 19:15:02 +00:00
|
|
|
to output"""
|
2010-12-02 03:22:24 +00:00
|
|
|
|
2014-04-24 15:30:34 +02:00
|
|
|
def __init__(self, *args, **kwargs):
|
2019-11-18 20:28:48 +03:00
|
|
|
super().__init__(*args, **kwargs)
|
2016-07-23 03:04:04 -04:00
|
|
|
self.fallback_to_symlinks = False
|
2014-04-24 15:30:34 +02:00
|
|
|
signals.static_generator_init.send(self)
|
|
|
|
|
|
2012-11-30 10:46:32 +01:00
|
|
|
def generate_context(self):
|
|
|
|
|
self.staticfiles = []
|
2019-07-09 23:50:17 +05:30
|
|
|
linked_files = set(self.context['static_links'])
|
2018-07-11 15:54:47 +02:00
|
|
|
found_files = self.get_files(self.settings['STATIC_PATHS'],
|
|
|
|
|
exclude=self.settings['STATIC_EXCLUDES'],
|
|
|
|
|
extensions=False)
|
|
|
|
|
for f in linked_files | found_files:
|
2014-10-18 13:11:59 -07:00
|
|
|
|
|
|
|
|
# skip content source files unless the user explicitly wants them
|
|
|
|
|
if self.settings['STATIC_EXCLUDE_SOURCES']:
|
|
|
|
|
if self._is_potential_source_path(f):
|
|
|
|
|
continue
|
|
|
|
|
|
2014-04-21 11:36:17 +02:00
|
|
|
static = self.readers.read_file(
|
|
|
|
|
base_path=self.path, path=f, content_class=Static,
|
|
|
|
|
fmt='static', context=self.context,
|
|
|
|
|
preread_signal=signals.static_generator_preread,
|
|
|
|
|
preread_sender=self,
|
|
|
|
|
context_signal=signals.static_generator_context,
|
|
|
|
|
context_sender=self)
|
|
|
|
|
self.staticfiles.append(static)
|
2018-07-11 15:54:47 +02:00
|
|
|
self.add_source_path(static, static=True)
|
2013-08-16 13:31:14 -07:00
|
|
|
self._update_context(('staticfiles',))
|
2014-04-24 15:30:34 +02:00
|
|
|
signals.static_generator_finalized.send(self)
|
2013-01-05 07:21:47 -05:00
|
|
|
|
2012-11-30 10:46:32 +01:00
|
|
|
def generate_output(self, writer):
|
2010-12-05 19:15:02 +00:00
|
|
|
self._copy_paths(self.settings['THEME_STATIC_PATHS'], self.theme,
|
2013-07-18 10:36:44 +08:00
|
|
|
self.settings['THEME_STATIC_DIR'], self.output_path,
|
|
|
|
|
os.curdir)
|
2013-08-16 13:31:14 -07:00
|
|
|
for sc in self.context['staticfiles']:
|
2016-07-23 03:04:04 -04:00
|
|
|
if self._file_update_required(sc):
|
|
|
|
|
self._link_or_copy_staticfile(sc)
|
|
|
|
|
else:
|
|
|
|
|
logger.debug('%s is up to date, not copying', sc.source_path)
|
|
|
|
|
|
|
|
|
|
def _copy_paths(self, paths, source, destination, output_path,
|
|
|
|
|
final_path=None):
|
|
|
|
|
"""Copy all the paths from source to destination"""
|
|
|
|
|
for path in paths:
|
2017-11-03 16:04:52 +01:00
|
|
|
source_path = os.path.join(source, path)
|
|
|
|
|
|
2016-07-23 03:04:04 -04:00
|
|
|
if final_path:
|
2017-11-03 16:04:52 +01:00
|
|
|
if os.path.isfile(source_path):
|
|
|
|
|
destination_path = os.path.join(output_path, destination,
|
|
|
|
|
final_path,
|
|
|
|
|
os.path.basename(path))
|
|
|
|
|
else:
|
|
|
|
|
destination_path = os.path.join(output_path, destination,
|
|
|
|
|
final_path)
|
2016-07-23 03:04:04 -04:00
|
|
|
else:
|
2017-11-03 16:04:52 +01:00
|
|
|
destination_path = os.path.join(output_path, destination, path)
|
|
|
|
|
|
|
|
|
|
copy(source_path, destination_path,
|
|
|
|
|
self.settings['IGNORE_FILES'])
|
2016-07-23 03:04:04 -04:00
|
|
|
|
|
|
|
|
def _file_update_required(self, staticfile):
|
|
|
|
|
source_path = os.path.join(self.path, staticfile.source_path)
|
|
|
|
|
save_as = os.path.join(self.output_path, staticfile.save_as)
|
|
|
|
|
if not os.path.exists(save_as):
|
|
|
|
|
return True
|
|
|
|
|
elif (self.settings['STATIC_CREATE_LINKS'] and
|
|
|
|
|
os.path.samefile(source_path, save_as)):
|
|
|
|
|
return False
|
|
|
|
|
elif (self.settings['STATIC_CREATE_LINKS'] and
|
|
|
|
|
os.path.realpath(save_as) == source_path):
|
|
|
|
|
return False
|
|
|
|
|
elif not self.settings['STATIC_CHECK_IF_MODIFIED']:
|
|
|
|
|
return True
|
|
|
|
|
else:
|
|
|
|
|
return self._source_is_newer(staticfile)
|
|
|
|
|
|
|
|
|
|
def _source_is_newer(self, staticfile):
|
|
|
|
|
source_path = os.path.join(self.path, staticfile.source_path)
|
|
|
|
|
save_as = os.path.join(self.output_path, staticfile.save_as)
|
|
|
|
|
s_mtime = os.path.getmtime(source_path)
|
|
|
|
|
d_mtime = os.path.getmtime(save_as)
|
2018-10-03 19:13:39 +02:00
|
|
|
return s_mtime - d_mtime > 0.000001
|
2016-07-23 03:04:04 -04:00
|
|
|
|
|
|
|
|
def _link_or_copy_staticfile(self, sc):
|
|
|
|
|
if self.settings['STATIC_CREATE_LINKS']:
|
|
|
|
|
self._link_staticfile(sc)
|
|
|
|
|
else:
|
|
|
|
|
self._copy_staticfile(sc)
|
|
|
|
|
|
|
|
|
|
def _copy_staticfile(self, sc):
|
|
|
|
|
source_path = os.path.join(self.path, sc.source_path)
|
|
|
|
|
save_as = os.path.join(self.output_path, sc.save_as)
|
|
|
|
|
self._mkdir(os.path.dirname(save_as))
|
|
|
|
|
copy(source_path, save_as)
|
|
|
|
|
logger.info('Copying %s to %s', sc.source_path, sc.save_as)
|
|
|
|
|
|
|
|
|
|
def _link_staticfile(self, sc):
|
|
|
|
|
source_path = os.path.join(self.path, sc.source_path)
|
|
|
|
|
save_as = os.path.join(self.output_path, sc.save_as)
|
|
|
|
|
self._mkdir(os.path.dirname(save_as))
|
|
|
|
|
try:
|
|
|
|
|
if os.path.lexists(save_as):
|
|
|
|
|
os.unlink(save_as)
|
|
|
|
|
logger.info('Linking %s and %s', sc.source_path, sc.save_as)
|
|
|
|
|
if self.fallback_to_symlinks:
|
|
|
|
|
os.symlink(source_path, save_as)
|
|
|
|
|
else:
|
|
|
|
|
os.link(source_path, save_as)
|
|
|
|
|
except OSError as err:
|
|
|
|
|
if err.errno == errno.EXDEV: # 18: Invalid cross-device link
|
|
|
|
|
logger.debug(
|
|
|
|
|
"Cross-device links not valid. "
|
|
|
|
|
"Creating symbolic links instead."
|
|
|
|
|
)
|
|
|
|
|
self.fallback_to_symlinks = True
|
|
|
|
|
self._link_staticfile(sc)
|
|
|
|
|
else:
|
|
|
|
|
raise err
|
|
|
|
|
|
|
|
|
|
def _mkdir(self, path):
|
|
|
|
|
if os.path.lexists(path) and not os.path.isdir(path):
|
|
|
|
|
os.unlink(path)
|
|
|
|
|
mkdir_p(path)
|
2011-05-07 22:46:56 +01:00
|
|
|
|
2010-12-02 03:22:24 +00:00
|
|
|
|
2012-09-07 08:46:38 +02:00
|
|
|
class SourceFileGenerator(Generator):
|
2013-12-26 00:02:06 +01:00
|
|
|
|
2012-09-28 14:59:05 +02:00
|
|
|
def generate_context(self):
|
|
|
|
|
self.output_extension = self.settings['OUTPUT_SOURCES_EXTENSION']
|
|
|
|
|
|
2013-03-03 21:00:52 -08:00
|
|
|
def _create_source(self, obj):
|
|
|
|
|
output_path, _ = os.path.splitext(obj.save_as)
|
|
|
|
|
dest = os.path.join(self.output_path,
|
|
|
|
|
output_path + self.output_extension)
|
2013-12-07 21:11:15 +01:00
|
|
|
copy(obj.source_path, dest)
|
2012-09-07 08:46:38 +02:00
|
|
|
|
|
|
|
|
def generate_output(self, writer=None):
|
2014-07-22 11:48:15 -04:00
|
|
|
logger.info('Generating source files...')
|
2013-03-03 21:00:52 -08:00
|
|
|
for obj in chain(self.context['articles'], self.context['pages']):
|
|
|
|
|
self._create_source(obj)
|
2013-04-14 19:34:05 +01:00
|
|
|
for obj_trans in obj.translations:
|
|
|
|
|
self._create_source(obj_trans)
|