2011-02-01 22:49:33 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
2013-01-11 02:57:43 +01:00
|
|
|
from __future__ import unicode_literals, print_function
|
|
|
|
|
|
2010-10-30 00:56:40 +01:00
|
|
|
import os
|
2014-04-27 10:25:57 +02:00
|
|
|
import six
|
2011-01-01 23:08:29 +03:00
|
|
|
import math
|
|
|
|
|
import random
|
2012-03-20 13:01:21 +00:00
|
|
|
import logging
|
2012-11-30 10:46:32 +01:00
|
|
|
import shutil
|
2013-11-03 18:08:08 +08:00
|
|
|
import fnmatch
|
2014-01-26 10:12:01 -05:00
|
|
|
import calendar
|
2010-10-30 00:56:40 +01:00
|
|
|
|
2012-08-18 20:32:43 +03:00
|
|
|
from codecs import open
|
2012-02-21 17:53:53 +01:00
|
|
|
from collections import defaultdict
|
|
|
|
|
from functools import partial
|
2013-03-09 21:01:47 -08:00
|
|
|
from itertools import chain, groupby
|
2012-02-21 17:53:53 +01:00
|
|
|
from operator import attrgetter, itemgetter
|
|
|
|
|
|
2013-08-04 17:02:58 +02:00
|
|
|
from jinja2 import (Environment, FileSystemLoader, PrefixLoader, ChoiceLoader,
|
|
|
|
|
BaseLoader, TemplateNotFound)
|
2013-03-03 20:12:31 -08:00
|
|
|
|
2013-12-26 19:30:55 +01:00
|
|
|
from pelican.contents import Article, Draft, Page, Static, is_valid_content
|
2013-08-04 17:02:58 +02:00
|
|
|
from pelican.readers import Readers
|
2014-02-15 21:20:51 +01:00
|
|
|
from pelican.utils import (copy, process_translations, mkdir_p, DateFormatter,
|
|
|
|
|
FileStampDataCacher)
|
2011-06-18 01:03:53 +02:00
|
|
|
from pelican import signals
|
2010-10-30 00:56:40 +01:00
|
|
|
|
|
|
|
|
|
2012-03-20 13:01:21 +00:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
|
|
2014-04-20 14:34:52 +02:00
|
|
|
class Generator(object):
|
2010-12-02 03:22:24 +00:00
|
|
|
"""Baseclass generator"""
|
2010-11-20 02:26:49 +00:00
|
|
|
|
2014-04-20 14:34:52 +02:00
|
|
|
def __init__(self, context, settings, path, theme, output_path,
|
|
|
|
|
readers_cache_name='', **kwargs):
|
2013-01-05 11:41:33 -05:00
|
|
|
self.context = context
|
|
|
|
|
self.settings = settings
|
|
|
|
|
self.path = path
|
|
|
|
|
self.theme = theme
|
|
|
|
|
self.output_path = output_path
|
2010-12-02 03:22:24 +00:00
|
|
|
|
|
|
|
|
for arg, value in kwargs.items():
|
|
|
|
|
setattr(self, arg, value)
|
2010-10-30 00:56:40 +01:00
|
|
|
|
2014-04-20 14:34:52 +02:00
|
|
|
self.readers = Readers(self.settings, readers_cache_name)
|
2013-08-04 17:02:58 +02:00
|
|
|
|
2011-01-02 02:50:08 +03:00
|
|
|
# templates cache
|
|
|
|
|
self._templates = {}
|
2012-09-03 00:57:23 +01:00
|
|
|
self._templates_path = []
|
|
|
|
|
self._templates_path.append(os.path.expanduser(
|
2013-08-04 17:02:58 +02:00
|
|
|
os.path.join(self.theme, 'templates')))
|
2013-03-24 15:45:36 -04:00
|
|
|
self._templates_path += self.settings['EXTRA_TEMPLATES_PATHS']
|
2012-09-03 00:57:23 +01:00
|
|
|
|
2012-03-10 12:21:54 +01:00
|
|
|
theme_path = os.path.dirname(os.path.abspath(__file__))
|
2012-03-09 16:21:38 +01:00
|
|
|
|
2012-03-10 11:32:22 +01:00
|
|
|
simple_loader = FileSystemLoader(os.path.join(theme_path,
|
|
|
|
|
"themes", "simple", "templates"))
|
2012-05-07 17:11:57 +02:00
|
|
|
self.env = Environment(
|
2012-11-21 16:38:27 +01:00
|
|
|
trim_blocks=True,
|
2013-08-03 16:17:26 -07:00
|
|
|
lstrip_blocks=True,
|
2011-07-19 12:31:18 +02:00
|
|
|
loader=ChoiceLoader([
|
|
|
|
|
FileSystemLoader(self._templates_path),
|
2012-03-09 16:21:38 +01:00
|
|
|
simple_loader, # implicit inheritance
|
|
|
|
|
PrefixLoader({'!simple': simple_loader}) # explicit one
|
2011-07-19 12:31:18 +02:00
|
|
|
]),
|
2013-03-24 15:45:36 -04:00
|
|
|
extensions=self.settings['JINJA_EXTENSIONS'],
|
2011-03-23 10:25:38 +03:00
|
|
|
)
|
2012-03-09 16:21:38 +01:00
|
|
|
|
2012-05-07 17:11:57 +02:00
|
|
|
logger.debug('template list: {0}'.format(self.env.list_templates()))
|
2011-05-06 17:01:34 +06:00
|
|
|
|
2013-04-20 15:56:07 -04:00
|
|
|
# provide utils.strftime as a jinja filter
|
|
|
|
|
self.env.filters.update({'strftime': DateFormatter()})
|
|
|
|
|
|
2011-04-12 02:17:42 +08:00
|
|
|
# get custom Jinja filters from user settings
|
2013-03-24 15:45:36 -04:00
|
|
|
custom_filters = self.settings['JINJA_FILTERS']
|
2012-05-07 17:11:57 +02:00
|
|
|
self.env.filters.update(custom_filters)
|
2011-01-02 02:50:08 +03:00
|
|
|
|
2012-11-20 00:07:44 +01:00
|
|
|
signals.generator_init.send(self)
|
|
|
|
|
|
2011-01-02 02:50:08 +03:00
|
|
|
def get_template(self, name):
|
|
|
|
|
"""Return the template by name.
|
2010-11-20 02:25:42 +00:00
|
|
|
Use self.theme to get the templates to use, and return a list of
|
|
|
|
|
templates ready to use with Jinja2.
|
|
|
|
|
"""
|
2011-01-02 02:50:08 +03:00
|
|
|
if name not in self._templates:
|
2010-10-30 00:56:40 +01:00
|
|
|
try:
|
2012-05-07 17:11:57 +02:00
|
|
|
self._templates[name] = self.env.get_template(name + '.html')
|
2010-10-30 00:56:40 +01:00
|
|
|
except TemplateNotFound:
|
2013-08-04 17:02:58 +02:00
|
|
|
raise Exception('[templates] unable to load %s.html from %s'
|
|
|
|
|
% (name, self._templates_path))
|
2011-01-02 02:50:08 +03:00
|
|
|
return self._templates[name]
|
2010-10-30 00:56:40 +01:00
|
|
|
|
2013-01-04 09:20:40 -05:00
|
|
|
def _include_path(self, path, extensions=None):
|
|
|
|
|
"""Inclusion logic for .get_files(), returns True/False
|
|
|
|
|
|
|
|
|
|
:param path: the path which might be including
|
|
|
|
|
:param extensions: the list of allowed extensions (if False, all
|
|
|
|
|
extensions are allowed)
|
|
|
|
|
"""
|
|
|
|
|
if extensions is None:
|
2013-08-04 17:02:58 +02:00
|
|
|
extensions = tuple(self.readers.extensions)
|
2013-01-04 09:20:40 -05:00
|
|
|
basename = os.path.basename(path)
|
2013-11-03 18:08:08 +08:00
|
|
|
|
|
|
|
|
#check IGNORE_FILES
|
|
|
|
|
ignores = self.settings['IGNORE_FILES']
|
|
|
|
|
if any(fnmatch.fnmatch(basename, ignore) for ignore in ignores):
|
|
|
|
|
return False
|
|
|
|
|
|
2013-03-06 11:46:17 -08:00
|
|
|
if extensions is False or basename.endswith(extensions):
|
2013-01-04 09:20:40 -05:00
|
|
|
return True
|
|
|
|
|
return False
|
|
|
|
|
|
2014-04-21 11:36:17 +02:00
|
|
|
def get_files(self, paths, exclude=[], extensions=None):
|
2010-11-20 02:25:42 +00:00
|
|
|
"""Return a list of files to use, based on rules
|
2010-10-30 20:17:23 +01:00
|
|
|
|
2014-04-21 11:36:17 +02:00
|
|
|
:param paths: the list pf paths to search (relative to self.path)
|
2010-10-30 20:17:23 +01:00
|
|
|
:param exclude: the list of path to exclude
|
2012-11-30 10:46:32 +01:00
|
|
|
:param extensions: the list of allowed extensions (if False, all
|
|
|
|
|
extensions are allowed)
|
2010-10-30 20:17:23 +01:00
|
|
|
"""
|
2014-06-27 20:18:17 +02:00
|
|
|
if isinstance(paths, six.string_types):
|
|
|
|
|
paths = [paths] # backward compatibility for older generators
|
2010-10-30 20:17:23 +01:00
|
|
|
files = []
|
2014-04-21 11:36:17 +02:00
|
|
|
for path in paths:
|
|
|
|
|
root = os.path.join(self.path, path)
|
|
|
|
|
|
|
|
|
|
if os.path.isdir(root):
|
|
|
|
|
for dirpath, dirs, temp_files in os.walk(root, followlinks=True):
|
|
|
|
|
for e in exclude:
|
|
|
|
|
if e in dirs:
|
|
|
|
|
dirs.remove(e)
|
|
|
|
|
reldir = os.path.relpath(dirpath, self.path)
|
|
|
|
|
for f in temp_files:
|
|
|
|
|
fp = os.path.join(reldir, f)
|
|
|
|
|
if self._include_path(fp, extensions):
|
|
|
|
|
files.append(fp)
|
|
|
|
|
elif os.path.exists(root) and self._include_path(path, extensions):
|
|
|
|
|
files.append(path) # can't walk non-directories
|
2010-10-30 20:17:23 +01:00
|
|
|
return files
|
2010-12-02 03:22:24 +00:00
|
|
|
|
2013-01-04 10:50:09 -05:00
|
|
|
def add_source_path(self, content):
|
2013-01-04 13:54:08 -05:00
|
|
|
location = content.get_relative_source_path()
|
2012-11-30 10:46:32 +01:00
|
|
|
self.context['filenames'][location] = content
|
|
|
|
|
|
2010-12-02 03:22:24 +00:00
|
|
|
def _update_context(self, items):
|
|
|
|
|
"""Update the context with the given items from the currrent
|
|
|
|
|
processor.
|
|
|
|
|
"""
|
|
|
|
|
for item in items:
|
|
|
|
|
value = getattr(self, item)
|
|
|
|
|
if hasattr(value, 'items'):
|
2013-01-11 18:55:04 +01:00
|
|
|
value = list(value.items()) # py3k safeguard for iterators
|
2010-12-02 03:22:24 +00:00
|
|
|
self.context[item] = value
|
|
|
|
|
|
|
|
|
|
|
2014-04-20 14:34:52 +02:00
|
|
|
class CachingGenerator(Generator, FileStampDataCacher):
|
|
|
|
|
'''Subclass of Generator and FileStampDataCacher classes
|
|
|
|
|
|
|
|
|
|
enables content caching, either at the generator or reader level
|
|
|
|
|
'''
|
|
|
|
|
|
|
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
|
'''Initialize the generator, then set up caching
|
|
|
|
|
|
|
|
|
|
note the multiple inheritance structure
|
|
|
|
|
'''
|
|
|
|
|
cls_name = self.__class__.__name__
|
|
|
|
|
Generator.__init__(self, *args,
|
|
|
|
|
readers_cache_name=(cls_name + '-Readers'),
|
|
|
|
|
**kwargs)
|
|
|
|
|
|
|
|
|
|
cache_this_level = self.settings['CONTENT_CACHING_LAYER'] == 'generator'
|
|
|
|
|
caching_policy = cache_this_level and self.settings['CACHE_CONTENT']
|
|
|
|
|
load_policy = cache_this_level and self.settings['LOAD_CONTENT_CACHE']
|
|
|
|
|
FileStampDataCacher.__init__(self, self.settings, cls_name,
|
|
|
|
|
caching_policy, load_policy
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
def _get_file_stamp(self, filename):
|
|
|
|
|
'''Get filestamp for path relative to generator.path'''
|
|
|
|
|
filename = os.path.join(self.path, filename)
|
2014-04-25 19:44:26 +02:00
|
|
|
return super(CachingGenerator, self)._get_file_stamp(filename)
|
2014-04-20 14:34:52 +02:00
|
|
|
|
|
|
|
|
|
2012-03-05 15:52:14 +01:00
|
|
|
class _FileLoader(BaseLoader):
|
|
|
|
|
|
2012-10-30 02:33:01 +01:00
|
|
|
def __init__(self, path, basedir):
|
2012-03-05 15:52:14 +01:00
|
|
|
self.path = path
|
2012-10-30 02:33:01 +01:00
|
|
|
self.fullpath = os.path.join(basedir, path)
|
2012-03-05 15:52:14 +01:00
|
|
|
|
|
|
|
|
def get_source(self, environment, template):
|
2012-10-30 02:33:01 +01:00
|
|
|
if template != self.path or not os.path.exists(self.fullpath):
|
2012-03-05 15:52:14 +01:00
|
|
|
raise TemplateNotFound(template)
|
2012-10-30 02:33:01 +01:00
|
|
|
mtime = os.path.getmtime(self.fullpath)
|
2013-01-11 02:57:43 +01:00
|
|
|
with open(self.fullpath, 'r', encoding='utf-8') as f:
|
|
|
|
|
source = f.read()
|
2013-03-06 11:46:17 -08:00
|
|
|
return (source, self.fullpath,
|
|
|
|
|
lambda: mtime == os.path.getmtime(self.fullpath))
|
2012-03-05 15:52:14 +01:00
|
|
|
|
|
|
|
|
|
2012-10-30 00:27:18 +01:00
|
|
|
class TemplatePagesGenerator(Generator):
|
2012-03-05 15:52:14 +01:00
|
|
|
|
|
|
|
|
def generate_output(self, writer):
|
2012-10-30 02:33:01 +01:00
|
|
|
for source, dest in self.settings['TEMPLATE_PAGES'].items():
|
|
|
|
|
self.env.loader.loaders.insert(0, _FileLoader(source, self.path))
|
2012-03-05 15:52:14 +01:00
|
|
|
try:
|
|
|
|
|
template = self.env.get_template(source)
|
2013-03-24 15:45:36 -04:00
|
|
|
rurls = self.settings['RELATIVE_URLS']
|
2013-06-22 14:49:48 +01:00
|
|
|
writer.write_file(dest, template, self.context, rurls,
|
|
|
|
|
override_output=True)
|
2012-03-05 15:52:14 +01:00
|
|
|
finally:
|
|
|
|
|
del self.env.loader.loaders[0]
|
|
|
|
|
|
|
|
|
|
|
2014-04-20 14:34:52 +02:00
|
|
|
class ArticlesGenerator(CachingGenerator):
|
2010-12-02 03:22:24 +00:00
|
|
|
"""Generate blog articles"""
|
|
|
|
|
|
|
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
|
"""initialize properties"""
|
2012-03-09 16:21:38 +01:00
|
|
|
self.articles = [] # only articles in default language
|
2010-12-19 00:32:43 +03:00
|
|
|
self.translations = []
|
2010-12-02 03:22:24 +00:00
|
|
|
self.dates = {}
|
2010-12-26 23:59:30 +03:00
|
|
|
self.tags = defaultdict(list)
|
|
|
|
|
self.categories = defaultdict(list)
|
2012-07-10 11:06:07 +05:45
|
|
|
self.related_posts = []
|
2011-06-30 23:49:09 +02:00
|
|
|
self.authors = defaultdict(list)
|
2013-12-26 19:30:55 +01:00
|
|
|
self.drafts = [] # only drafts in default language
|
|
|
|
|
self.drafts_translations = []
|
2013-03-03 20:12:31 -08:00
|
|
|
super(ArticlesGenerator, self).__init__(*args, **kwargs)
|
2011-09-07 17:08:28 +02:00
|
|
|
signals.article_generator_init.send(self)
|
2010-12-02 03:22:24 +00:00
|
|
|
|
|
|
|
|
def generate_feeds(self, writer):
|
|
|
|
|
"""Generate the feeds from the current context, and output files."""
|
|
|
|
|
|
2012-07-16 09:35:05 -07:00
|
|
|
if self.settings.get('FEED_ATOM'):
|
2012-10-22 23:37:52 +02:00
|
|
|
writer.write_feed(self.articles, self.context,
|
2012-07-16 09:35:05 -07:00
|
|
|
self.settings['FEED_ATOM'])
|
2010-12-02 03:22:24 +00:00
|
|
|
|
2012-03-10 21:18:01 +09:00
|
|
|
if self.settings.get('FEED_RSS'):
|
2012-10-22 23:37:52 +02:00
|
|
|
writer.write_feed(self.articles, self.context,
|
2012-03-10 21:18:01 +09:00
|
|
|
self.settings['FEED_RSS'], feed_type='rss')
|
2010-12-02 03:22:24 +00:00
|
|
|
|
2013-03-03 20:12:31 -08:00
|
|
|
if (self.settings.get('FEED_ALL_ATOM')
|
|
|
|
|
or self.settings.get('FEED_ALL_RSS')):
|
2012-10-22 23:37:52 +02:00
|
|
|
all_articles = list(self.articles)
|
|
|
|
|
for article in self.articles:
|
|
|
|
|
all_articles.extend(article.translations)
|
|
|
|
|
all_articles.sort(key=attrgetter('date'), reverse=True)
|
|
|
|
|
|
|
|
|
|
if self.settings.get('FEED_ALL_ATOM'):
|
|
|
|
|
writer.write_feed(all_articles, self.context,
|
|
|
|
|
self.settings['FEED_ALL_ATOM'])
|
|
|
|
|
|
|
|
|
|
if self.settings.get('FEED_ALL_RSS'):
|
|
|
|
|
writer.write_feed(all_articles, self.context,
|
2013-03-03 20:12:31 -08:00
|
|
|
self.settings['FEED_ALL_RSS'],
|
|
|
|
|
feed_type='rss')
|
2012-10-22 23:37:52 +02:00
|
|
|
|
2011-02-01 01:57:39 +00:00
|
|
|
for cat, arts in self.categories:
|
2010-12-02 03:22:24 +00:00
|
|
|
arts.sort(key=attrgetter('date'), reverse=True)
|
2012-07-16 09:35:05 -07:00
|
|
|
if self.settings.get('CATEGORY_FEED_ATOM'):
|
2012-03-10 21:18:01 +09:00
|
|
|
writer.write_feed(arts, self.context,
|
2013-03-11 23:07:54 -07:00
|
|
|
self.settings['CATEGORY_FEED_ATOM']
|
|
|
|
|
% cat.slug)
|
2010-12-02 03:22:24 +00:00
|
|
|
|
2012-03-10 21:18:01 +09:00
|
|
|
if self.settings.get('CATEGORY_FEED_RSS'):
|
2010-12-02 03:22:24 +00:00
|
|
|
writer.write_feed(arts, self.context,
|
2013-03-11 23:07:54 -07:00
|
|
|
self.settings['CATEGORY_FEED_RSS']
|
|
|
|
|
% cat.slug, feed_type='rss')
|
2010-12-02 03:22:24 +00:00
|
|
|
|
2014-04-05 15:27:03 -04:00
|
|
|
for auth, arts in self.authors:
|
|
|
|
|
arts.sort(key=attrgetter('date'), reverse=True)
|
|
|
|
|
if self.settings.get('AUTHOR_FEED_ATOM'):
|
|
|
|
|
writer.write_feed(arts, self.context,
|
|
|
|
|
self.settings['AUTHOR_FEED_ATOM']
|
|
|
|
|
% auth.slug)
|
|
|
|
|
|
|
|
|
|
if self.settings.get('AUTHOR_FEED_RSS'):
|
|
|
|
|
writer.write_feed(arts, self.context,
|
|
|
|
|
self.settings['AUTHOR_FEED_RSS']
|
|
|
|
|
% auth.slug, feed_type='rss')
|
|
|
|
|
|
2013-03-03 20:12:31 -08:00
|
|
|
if (self.settings.get('TAG_FEED_ATOM')
|
|
|
|
|
or self.settings.get('TAG_FEED_RSS')):
|
2010-12-17 09:25:19 +01:00
|
|
|
for tag, arts in self.tags.items():
|
|
|
|
|
arts.sort(key=attrgetter('date'), reverse=True)
|
2012-07-16 09:35:05 -07:00
|
|
|
if self.settings.get('TAG_FEED_ATOM'):
|
2012-03-10 21:18:01 +09:00
|
|
|
writer.write_feed(arts, self.context,
|
2013-03-11 23:07:54 -07:00
|
|
|
self.settings['TAG_FEED_ATOM']
|
|
|
|
|
% tag.slug)
|
2010-12-17 09:25:19 +01:00
|
|
|
|
2012-03-10 21:18:01 +09:00
|
|
|
if self.settings.get('TAG_FEED_RSS'):
|
2011-05-06 17:01:34 +06:00
|
|
|
writer.write_feed(arts, self.context,
|
2012-11-27 16:06:55 +04:00
|
|
|
self.settings['TAG_FEED_RSS'] % tag.slug,
|
2012-03-09 16:21:38 +01:00
|
|
|
feed_type='rss')
|
2010-12-20 22:42:29 +00:00
|
|
|
|
2013-03-03 20:12:31 -08:00
|
|
|
if (self.settings.get('TRANSLATION_FEED_ATOM')
|
|
|
|
|
or self.settings.get('TRANSLATION_FEED_RSS')):
|
2012-03-10 21:18:01 +09:00
|
|
|
translations_feeds = defaultdict(list)
|
|
|
|
|
for article in chain(self.articles, self.translations):
|
|
|
|
|
translations_feeds[article.lang].append(article)
|
2010-12-19 00:32:43 +03:00
|
|
|
|
2012-03-10 21:18:01 +09:00
|
|
|
for lang, items in translations_feeds.items():
|
|
|
|
|
items.sort(key=attrgetter('date'), reverse=True)
|
2012-10-22 16:34:55 +02:00
|
|
|
if self.settings.get('TRANSLATION_FEED_ATOM'):
|
2013-12-26 00:02:06 +01:00
|
|
|
writer.write_feed(
|
|
|
|
|
items, self.context,
|
|
|
|
|
self.settings['TRANSLATION_FEED_ATOM'] % lang)
|
2012-10-22 16:34:55 +02:00
|
|
|
if self.settings.get('TRANSLATION_FEED_RSS'):
|
2013-12-26 00:02:06 +01:00
|
|
|
writer.write_feed(
|
|
|
|
|
items, self.context,
|
|
|
|
|
self.settings['TRANSLATION_FEED_RSS'] % lang,
|
|
|
|
|
feed_type='rss')
|
2010-12-17 09:25:19 +01:00
|
|
|
|
2012-05-02 09:26:33 +01:00
|
|
|
def generate_articles(self, write):
|
2012-04-29 10:34:20 +01:00
|
|
|
"""Generate the articles."""
|
2010-12-30 14:11:37 +00:00
|
|
|
for article in chain(self.translations, self.articles):
|
2014-01-02 18:43:45 +01:00
|
|
|
signals.article_generator_write_article.send(self, content=article)
|
2012-07-07 12:15:35 -07:00
|
|
|
write(article.save_as, self.get_template(article.template),
|
2013-12-26 00:02:06 +01:00
|
|
|
self.context, article=article, category=article.category,
|
|
|
|
|
override_output=hasattr(article, 'override_save_as'))
|
2010-12-30 14:11:37 +00:00
|
|
|
|
2013-03-09 21:01:47 -08:00
|
|
|
def generate_period_archives(self, write):
|
|
|
|
|
"""Generate per-year, per-month, and per-day archives."""
|
|
|
|
|
try:
|
|
|
|
|
template = self.get_template('period_archives')
|
|
|
|
|
except Exception:
|
|
|
|
|
template = self.get_template('archives')
|
|
|
|
|
|
2014-01-26 10:12:01 -05:00
|
|
|
period_save_as = {
|
|
|
|
|
'year': self.settings['YEAR_ARCHIVE_SAVE_AS'],
|
|
|
|
|
'month': self.settings['MONTH_ARCHIVE_SAVE_AS'],
|
|
|
|
|
'day': self.settings['DAY_ARCHIVE_SAVE_AS'],
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
period_date_key = {
|
|
|
|
|
'year': attrgetter('date.year'),
|
|
|
|
|
'month': attrgetter('date.year', 'date.month'),
|
|
|
|
|
'day': attrgetter('date.year', 'date.month', 'date.day')
|
|
|
|
|
}
|
|
|
|
|
|
2013-03-09 21:01:47 -08:00
|
|
|
def _generate_period_archives(dates, key, save_as_fmt):
|
|
|
|
|
"""Generate period archives from `dates`, grouped by
|
|
|
|
|
`key` and written to `save_as`.
|
|
|
|
|
"""
|
|
|
|
|
# `dates` is already sorted by date
|
|
|
|
|
for _period, group in groupby(dates, key=key):
|
|
|
|
|
archive = list(group)
|
|
|
|
|
# arbitrarily grab the first date so that the usual
|
|
|
|
|
# format string syntax can be used for specifying the
|
|
|
|
|
# period archive dates
|
|
|
|
|
date = archive[0].date
|
2014-04-27 10:25:57 +02:00
|
|
|
save_as = save_as_fmt.format(date=date)
|
2014-01-26 10:12:01 -05:00
|
|
|
context = self.context.copy()
|
|
|
|
|
|
|
|
|
|
if key == period_date_key['year']:
|
|
|
|
|
context["period"] = (_period,)
|
|
|
|
|
else:
|
2014-04-27 10:25:57 +02:00
|
|
|
month_name = calendar.month_name[_period[1]]
|
|
|
|
|
if not six.PY3:
|
|
|
|
|
month_name = month_name.decode('utf-8')
|
|
|
|
|
if key == period_date_key['month']:
|
|
|
|
|
context["period"] = (_period[0],
|
|
|
|
|
month_name)
|
|
|
|
|
else:
|
|
|
|
|
context["period"] = (_period[0],
|
|
|
|
|
month_name,
|
|
|
|
|
_period[2])
|
2014-01-26 10:12:01 -05:00
|
|
|
|
|
|
|
|
write(save_as, template, context,
|
2013-03-09 21:01:47 -08:00
|
|
|
dates=archive, blog=True)
|
|
|
|
|
|
|
|
|
|
for period in 'year', 'month', 'day':
|
|
|
|
|
save_as = period_save_as[period]
|
|
|
|
|
if save_as:
|
|
|
|
|
key = period_date_key[period]
|
|
|
|
|
_generate_period_archives(self.dates, key, save_as)
|
|
|
|
|
|
2012-05-02 09:26:33 +01:00
|
|
|
def generate_direct_templates(self, write):
|
2012-04-29 10:34:20 +01:00
|
|
|
"""Generate direct templates pages"""
|
2013-03-24 15:45:36 -04:00
|
|
|
PAGINATED_TEMPLATES = self.settings['PAGINATED_DIRECT_TEMPLATES']
|
|
|
|
|
for template in self.settings['DIRECT_TEMPLATES']:
|
2011-02-15 14:36:55 +01:00
|
|
|
paginated = {}
|
2011-03-23 10:25:38 +03:00
|
|
|
if template in PAGINATED_TEMPLATES:
|
2011-02-15 14:36:55 +01:00
|
|
|
paginated = {'articles': self.articles, 'dates': self.dates}
|
2012-05-02 09:26:33 +01:00
|
|
|
save_as = self.settings.get("%s_SAVE_AS" % template.upper(),
|
2013-12-26 00:02:06 +01:00
|
|
|
'%s.html' % template)
|
2012-05-02 09:26:33 +01:00
|
|
|
if not save_as:
|
2012-09-08 18:24:15 +03:00
|
|
|
continue
|
2012-03-09 16:21:38 +01:00
|
|
|
|
2012-05-02 09:26:33 +01:00
|
|
|
write(save_as, self.get_template(template),
|
2013-12-26 00:02:06 +01:00
|
|
|
self.context, blog=True, paginated=paginated,
|
|
|
|
|
page_name=os.path.splitext(save_as)[0])
|
2011-01-02 02:50:08 +03:00
|
|
|
|
2012-05-02 09:26:33 +01:00
|
|
|
def generate_tags(self, write):
|
2012-04-29 10:34:20 +01:00
|
|
|
"""Generate Tags pages."""
|
2011-01-02 02:50:08 +03:00
|
|
|
tag_template = self.get_template('tag')
|
2010-12-05 19:15:02 +00:00
|
|
|
for tag, articles in self.tags.items():
|
2011-06-12 22:18:50 +02:00
|
|
|
articles.sort(key=attrgetter('date'), reverse=True)
|
2011-02-15 13:44:36 +01:00
|
|
|
dates = [article for article in self.dates if article in articles]
|
2011-12-23 23:43:32 +00:00
|
|
|
write(tag.save_as, tag_template, self.context, tag=tag,
|
2013-12-26 00:02:06 +01:00
|
|
|
articles=articles, dates=dates,
|
|
|
|
|
paginated={'articles': articles, 'dates': dates},
|
|
|
|
|
page_name=tag.page_name, all_articles=self.articles)
|
2011-01-02 02:50:08 +03:00
|
|
|
|
2012-05-02 09:26:33 +01:00
|
|
|
def generate_categories(self, write):
|
2012-04-29 10:34:20 +01:00
|
|
|
"""Generate category pages."""
|
2011-01-02 02:50:08 +03:00
|
|
|
category_template = self.get_template('category')
|
2011-02-01 01:57:39 +00:00
|
|
|
for cat, articles in self.categories:
|
2013-04-29 20:57:05 -07:00
|
|
|
articles.sort(key=attrgetter('date'), reverse=True)
|
2011-02-15 13:44:36 +01:00
|
|
|
dates = [article for article in self.dates if article in articles]
|
2011-12-23 23:43:32 +00:00
|
|
|
write(cat.save_as, category_template, self.context,
|
2013-12-26 00:02:06 +01:00
|
|
|
category=cat, articles=articles, dates=dates,
|
|
|
|
|
paginated={'articles': articles, 'dates': dates},
|
|
|
|
|
page_name=cat.page_name, all_articles=self.articles)
|
2011-01-02 02:50:08 +03:00
|
|
|
|
2012-05-02 09:26:33 +01:00
|
|
|
def generate_authors(self, write):
|
2012-04-29 10:34:20 +01:00
|
|
|
"""Generate Author pages."""
|
2011-06-30 23:49:09 +02:00
|
|
|
author_template = self.get_template('author')
|
|
|
|
|
for aut, articles in self.authors:
|
2013-04-29 20:57:05 -07:00
|
|
|
articles.sort(key=attrgetter('date'), reverse=True)
|
2011-06-30 23:49:09 +02:00
|
|
|
dates = [article for article in self.dates if article in articles]
|
2011-12-23 23:43:32 +00:00
|
|
|
write(aut.save_as, author_template, self.context,
|
2013-12-26 00:02:06 +01:00
|
|
|
author=aut, articles=articles, dates=dates,
|
|
|
|
|
paginated={'articles': articles, 'dates': dates},
|
|
|
|
|
page_name=aut.page_name, all_articles=self.articles)
|
2011-06-30 23:49:09 +02:00
|
|
|
|
2012-05-02 09:26:33 +01:00
|
|
|
def generate_drafts(self, write):
|
2012-04-29 10:34:20 +01:00
|
|
|
"""Generate drafts pages."""
|
2013-12-26 19:30:55 +01:00
|
|
|
for draft in chain(self.drafts_translations, self.drafts):
|
|
|
|
|
write(draft.save_as, self.get_template(draft.template),
|
|
|
|
|
self.context, article=draft, category=draft.category,
|
|
|
|
|
override_output=hasattr(draft, 'override_save_as'),
|
|
|
|
|
all_articles=self.articles)
|
2010-12-02 03:22:24 +00:00
|
|
|
|
2012-04-29 10:34:20 +01:00
|
|
|
def generate_pages(self, writer):
|
|
|
|
|
"""Generate the pages on the disk"""
|
2012-05-02 09:26:33 +01:00
|
|
|
write = partial(writer.write_file,
|
2013-03-24 15:45:36 -04:00
|
|
|
relative_urls=self.settings['RELATIVE_URLS'])
|
2012-04-29 10:34:20 +01:00
|
|
|
|
|
|
|
|
# to minimize the number of relative path stuff modification
|
|
|
|
|
# in writer, articles pass first
|
2012-05-02 09:26:33 +01:00
|
|
|
self.generate_articles(write)
|
2013-03-09 21:01:47 -08:00
|
|
|
self.generate_period_archives(write)
|
2012-05-02 09:26:33 +01:00
|
|
|
self.generate_direct_templates(write)
|
|
|
|
|
|
2012-04-29 10:34:20 +01:00
|
|
|
# and subfolders after that
|
2012-05-02 09:26:33 +01:00
|
|
|
self.generate_tags(write)
|
|
|
|
|
self.generate_categories(write)
|
|
|
|
|
self.generate_authors(write)
|
|
|
|
|
self.generate_drafts(write)
|
2012-04-29 10:34:20 +01:00
|
|
|
|
2010-12-02 03:22:24 +00:00
|
|
|
def generate_context(self):
|
2012-11-30 10:46:32 +01:00
|
|
|
"""Add the articles into the shared context"""
|
2010-12-02 03:22:24 +00:00
|
|
|
|
2010-12-19 00:32:43 +03:00
|
|
|
all_articles = []
|
2013-12-26 19:30:55 +01:00
|
|
|
all_drafts = []
|
2012-03-06 00:29:56 +01:00
|
|
|
for f in self.get_files(
|
2014-04-21 11:36:17 +02:00
|
|
|
self.settings['ARTICLE_PATHS'],
|
2012-03-06 00:29:56 +01:00
|
|
|
exclude=self.settings['ARTICLE_EXCLUDES']):
|
2014-02-15 21:20:51 +01:00
|
|
|
article = self.get_cached_data(f, None)
|
|
|
|
|
if article is None:
|
|
|
|
|
try:
|
|
|
|
|
article = self.readers.read_file(
|
|
|
|
|
base_path=self.path, path=f, content_class=Article,
|
|
|
|
|
context=self.context,
|
|
|
|
|
preread_signal=signals.article_generator_preread,
|
|
|
|
|
preread_sender=self,
|
|
|
|
|
context_signal=signals.article_generator_context,
|
|
|
|
|
context_sender=self)
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.warning('Could not process {}\n{}'.format(f, e))
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
if not is_valid_content(article, f):
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
self.cache_data(f, article)
|
2010-12-02 03:22:24 +00:00
|
|
|
|
2013-01-04 10:50:09 -05:00
|
|
|
self.add_source_path(article)
|
2012-11-30 10:46:32 +01:00
|
|
|
|
2013-12-04 09:32:52 -08:00
|
|
|
if article.status.lower() == "published":
|
2011-05-08 14:58:57 +01:00
|
|
|
all_articles.append(article)
|
2013-12-04 09:32:52 -08:00
|
|
|
elif article.status.lower() == "draft":
|
2013-12-26 19:30:55 +01:00
|
|
|
draft = self.readers.read_file(
|
|
|
|
|
base_path=self.path, path=f, content_class=Draft,
|
|
|
|
|
context=self.context,
|
|
|
|
|
preread_signal=signals.article_generator_preread,
|
|
|
|
|
preread_sender=self,
|
|
|
|
|
context_signal=signals.article_generator_context,
|
|
|
|
|
context_sender=self)
|
|
|
|
|
all_drafts.append(draft)
|
2012-05-15 17:48:07 -04:00
|
|
|
else:
|
2013-01-11 02:57:43 +01:00
|
|
|
logger.warning("Unknown status %s for file %s, skipping it." %
|
|
|
|
|
(repr(article.status),
|
2012-05-15 17:48:07 -04:00
|
|
|
repr(f)))
|
2010-12-19 00:32:43 +03:00
|
|
|
|
|
|
|
|
self.articles, self.translations = process_translations(all_articles)
|
2013-12-26 19:30:55 +01:00
|
|
|
self.drafts, self.drafts_translations = \
|
|
|
|
|
process_translations(all_drafts)
|
2010-12-19 00:32:43 +03:00
|
|
|
|
2013-07-05 01:08:45 +02:00
|
|
|
signals.article_generator_pretaxonomy.send(self)
|
2014-02-07 08:34:27 +01:00
|
|
|
|
2010-12-19 00:32:43 +03:00
|
|
|
for article in self.articles:
|
2013-03-26 01:28:42 -04:00
|
|
|
# only main articles are listed in categories and tags
|
|
|
|
|
# not translations
|
2010-12-26 23:59:30 +03:00
|
|
|
self.categories[article.category].append(article)
|
2013-03-26 01:28:42 -04:00
|
|
|
if hasattr(article, 'tags'):
|
|
|
|
|
for tag in article.tags:
|
|
|
|
|
self.tags[tag].append(article)
|
2012-10-26 18:20:05 +01:00
|
|
|
# ignore blank authors as well as undefined
|
2013-07-05 01:08:45 +02:00
|
|
|
for author in getattr(article, 'authors', []):
|
|
|
|
|
if author.name != '':
|
|
|
|
|
self.authors[author].append(article)
|
2010-12-02 03:22:24 +00:00
|
|
|
# sort the articles by date
|
|
|
|
|
self.articles.sort(key=attrgetter('date'), reverse=True)
|
|
|
|
|
self.dates = list(self.articles)
|
2011-05-06 17:01:34 +06:00
|
|
|
self.dates.sort(key=attrgetter('date'),
|
2013-12-26 00:02:06 +01:00
|
|
|
reverse=self.context['NEWEST_FIRST_ARCHIVES'])
|
2011-01-01 23:08:29 +03:00
|
|
|
|
|
|
|
|
# create tag cloud
|
|
|
|
|
tag_cloud = defaultdict(int)
|
|
|
|
|
for article in self.articles:
|
2011-03-23 16:41:24 +00:00
|
|
|
for tag in getattr(article, 'tags', []):
|
2011-01-01 23:08:29 +03:00
|
|
|
tag_cloud[tag] += 1
|
|
|
|
|
|
2012-03-09 16:21:38 +01:00
|
|
|
tag_cloud = sorted(tag_cloud.items(), key=itemgetter(1), reverse=True)
|
2011-01-01 23:08:29 +03:00
|
|
|
tag_cloud = tag_cloud[:self.settings.get('TAG_CLOUD_MAX_ITEMS')]
|
|
|
|
|
|
2013-01-11 02:57:43 +01:00
|
|
|
tags = list(map(itemgetter(1), tag_cloud))
|
2011-03-27 12:49:28 +02:00
|
|
|
if tags:
|
2011-08-22 19:42:42 +02:00
|
|
|
max_count = max(tags)
|
2011-01-01 23:08:29 +03:00
|
|
|
steps = self.settings.get('TAG_CLOUD_STEPS')
|
2011-05-06 17:01:34 +06:00
|
|
|
|
2011-01-01 23:08:29 +03:00
|
|
|
# calculate word sizes
|
|
|
|
|
self.tag_cloud = [
|
|
|
|
|
(
|
|
|
|
|
tag,
|
2012-03-09 16:21:38 +01:00
|
|
|
int(math.floor(steps - (steps - 1) * math.log(count)
|
|
|
|
|
/ (math.log(max_count)or 1)))
|
2011-01-01 23:08:29 +03:00
|
|
|
)
|
|
|
|
|
for tag, count in tag_cloud
|
|
|
|
|
]
|
|
|
|
|
# put words in chaos
|
|
|
|
|
random.shuffle(self.tag_cloud)
|
|
|
|
|
|
2010-12-02 03:22:24 +00:00
|
|
|
# and generate the output :)
|
2011-02-01 01:57:39 +00:00
|
|
|
|
|
|
|
|
# order the categories per name
|
|
|
|
|
self.categories = list(self.categories.items())
|
2012-05-06 02:43:13 +02:00
|
|
|
self.categories.sort(
|
2013-12-26 00:02:06 +01:00
|
|
|
reverse=self.settings['REVERSE_CATEGORY_ORDER'])
|
2011-06-30 23:49:09 +02:00
|
|
|
|
|
|
|
|
self.authors = list(self.authors.items())
|
2013-01-03 13:54:56 -05:00
|
|
|
self.authors.sort()
|
2011-06-30 23:49:09 +02:00
|
|
|
|
2012-03-09 16:21:38 +01:00
|
|
|
self._update_context(('articles', 'dates', 'tags', 'categories',
|
2012-07-10 11:06:07 +05:45
|
|
|
'tag_cloud', 'authors', 'related_posts'))
|
2014-02-15 21:20:51 +01:00
|
|
|
self.save_cache()
|
2014-04-20 14:34:52 +02:00
|
|
|
self.readers.save_cache()
|
2012-10-13 19:17:16 +02:00
|
|
|
signals.article_generator_finalized.send(self)
|
2010-12-02 03:22:24 +00:00
|
|
|
|
|
|
|
|
def generate_output(self, writer):
|
|
|
|
|
self.generate_feeds(writer)
|
|
|
|
|
self.generate_pages(writer)
|
2014-01-24 06:31:26 +00:00
|
|
|
signals.article_writer_finalized.send(self, writer=writer)
|
2010-12-02 03:22:24 +00:00
|
|
|
|
|
|
|
|
|
2014-04-20 14:34:52 +02:00
|
|
|
class PagesGenerator(CachingGenerator):
|
2010-12-02 03:22:24 +00:00
|
|
|
"""Generate pages"""
|
|
|
|
|
|
|
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
|
self.pages = []
|
2012-06-26 19:26:43 -07:00
|
|
|
self.hidden_pages = []
|
|
|
|
|
self.hidden_translations = []
|
2010-12-02 03:22:24 +00:00
|
|
|
super(PagesGenerator, self).__init__(*args, **kwargs)
|
2013-01-04 18:19:26 -05:00
|
|
|
signals.page_generator_init.send(self)
|
2010-12-02 03:22:24 +00:00
|
|
|
|
|
|
|
|
def generate_context(self):
|
2010-12-19 00:32:43 +03:00
|
|
|
all_pages = []
|
2012-06-26 19:26:43 -07:00
|
|
|
hidden_pages = []
|
2012-03-06 00:29:56 +01:00
|
|
|
for f in self.get_files(
|
2014-04-21 11:36:17 +02:00
|
|
|
self.settings['PAGE_PATHS'],
|
2012-03-06 00:29:56 +01:00
|
|
|
exclude=self.settings['PAGE_EXCLUDES']):
|
2014-02-15 21:20:51 +01:00
|
|
|
page = self.get_cached_data(f, None)
|
|
|
|
|
if page is None:
|
|
|
|
|
try:
|
|
|
|
|
page = self.readers.read_file(
|
|
|
|
|
base_path=self.path, path=f, content_class=Page,
|
|
|
|
|
context=self.context,
|
|
|
|
|
preread_signal=signals.page_generator_preread,
|
|
|
|
|
preread_sender=self,
|
|
|
|
|
context_signal=signals.page_generator_context,
|
|
|
|
|
context_sender=self)
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.warning('Could not process {}\n{}'.format(f, e))
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
if not is_valid_content(page, f):
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
self.cache_data(f, page)
|
2012-11-30 10:46:32 +01:00
|
|
|
|
2013-01-04 10:50:09 -05:00
|
|
|
self.add_source_path(page)
|
2012-11-30 10:46:32 +01:00
|
|
|
|
2012-06-26 19:26:43 -07:00
|
|
|
if page.status == "published":
|
|
|
|
|
all_pages.append(page)
|
|
|
|
|
elif page.status == "hidden":
|
|
|
|
|
hidden_pages.append(page)
|
|
|
|
|
else:
|
2013-01-11 02:57:43 +01:00
|
|
|
logger.warning("Unknown status %s for file %s, skipping it." %
|
|
|
|
|
(repr(page.status),
|
2012-06-26 19:26:43 -07:00
|
|
|
repr(f)))
|
2010-12-19 00:32:43 +03:00
|
|
|
|
|
|
|
|
self.pages, self.translations = process_translations(all_pages)
|
2013-03-03 20:12:31 -08:00
|
|
|
self.hidden_pages, self.hidden_translations = (
|
2013-12-26 00:02:06 +01:00
|
|
|
process_translations(hidden_pages))
|
2010-12-02 03:22:24 +00:00
|
|
|
|
|
|
|
|
self._update_context(('pages', ))
|
2010-12-14 15:48:35 +00:00
|
|
|
self.context['PAGES'] = self.pages
|
2010-12-02 03:22:24 +00:00
|
|
|
|
2014-02-15 21:20:51 +01:00
|
|
|
self.save_cache()
|
2014-04-20 14:34:52 +02:00
|
|
|
self.readers.save_cache()
|
2013-01-04 18:19:26 -05:00
|
|
|
signals.page_generator_finalized.send(self)
|
2013-02-10 12:42:54 -08:00
|
|
|
|
2010-12-02 03:22:24 +00:00
|
|
|
def generate_output(self, writer):
|
2012-06-26 19:26:43 -07:00
|
|
|
for page in chain(self.translations, self.pages,
|
2013-12-26 00:02:06 +01:00
|
|
|
self.hidden_translations, self.hidden_pages):
|
|
|
|
|
writer.write_file(
|
|
|
|
|
page.save_as, self.get_template(page.template),
|
|
|
|
|
self.context, page=page,
|
|
|
|
|
relative_urls=self.settings['RELATIVE_URLS'],
|
|
|
|
|
override_output=hasattr(page, 'override_save_as'))
|
2010-12-02 03:22:24 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
class StaticGenerator(Generator):
|
2012-04-07 18:02:40 -06:00
|
|
|
"""copy static paths (what you want to copy, like images, medias etc.
|
2010-12-05 19:15:02 +00:00
|
|
|
to output"""
|
2010-12-02 03:22:24 +00:00
|
|
|
|
2014-04-24 15:30:34 +02:00
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
|
super(StaticGenerator, self).__init__(*args, **kwargs)
|
|
|
|
|
signals.static_generator_init.send(self)
|
|
|
|
|
|
2010-12-02 03:22:24 +00:00
|
|
|
def _copy_paths(self, paths, source, destination, output_path,
|
2013-12-26 00:02:06 +01:00
|
|
|
final_path=None):
|
2011-05-07 22:46:56 +01:00
|
|
|
"""Copy all the paths from source to destination"""
|
2010-12-02 03:22:24 +00:00
|
|
|
for path in paths:
|
2013-12-07 21:11:15 +01:00
|
|
|
if final_path:
|
|
|
|
|
copy(os.path.join(source, path),
|
|
|
|
|
os.path.join(output_path, destination, final_path))
|
|
|
|
|
else:
|
|
|
|
|
copy(os.path.join(source, path),
|
|
|
|
|
os.path.join(output_path, destination, path))
|
2010-12-02 03:22:24 +00:00
|
|
|
|
2012-11-30 10:46:32 +01:00
|
|
|
def generate_context(self):
|
|
|
|
|
self.staticfiles = []
|
2014-04-21 11:36:17 +02:00
|
|
|
for f in self.get_files(self.settings['STATIC_PATHS'],
|
|
|
|
|
extensions=False):
|
|
|
|
|
static = self.readers.read_file(
|
|
|
|
|
base_path=self.path, path=f, content_class=Static,
|
|
|
|
|
fmt='static', context=self.context,
|
|
|
|
|
preread_signal=signals.static_generator_preread,
|
|
|
|
|
preread_sender=self,
|
|
|
|
|
context_signal=signals.static_generator_context,
|
|
|
|
|
context_sender=self)
|
|
|
|
|
self.staticfiles.append(static)
|
|
|
|
|
self.add_source_path(static)
|
2013-08-16 13:31:14 -07:00
|
|
|
self._update_context(('staticfiles',))
|
2014-04-24 15:30:34 +02:00
|
|
|
signals.static_generator_finalized.send(self)
|
2013-01-05 07:21:47 -05:00
|
|
|
|
2012-11-30 10:46:32 +01:00
|
|
|
def generate_output(self, writer):
|
2010-12-05 19:15:02 +00:00
|
|
|
self._copy_paths(self.settings['THEME_STATIC_PATHS'], self.theme,
|
2013-07-18 10:36:44 +08:00
|
|
|
self.settings['THEME_STATIC_DIR'], self.output_path,
|
|
|
|
|
os.curdir)
|
2013-01-04 13:54:08 -05:00
|
|
|
# copy all Static files
|
2013-08-16 13:31:14 -07:00
|
|
|
for sc in self.context['staticfiles']:
|
2013-01-04 13:54:08 -05:00
|
|
|
source_path = os.path.join(self.path, sc.source_path)
|
|
|
|
|
save_as = os.path.join(self.output_path, sc.save_as)
|
|
|
|
|
mkdir_p(os.path.dirname(save_as))
|
2013-10-15 10:37:03 +08:00
|
|
|
shutil.copy2(source_path, save_as)
|
2013-01-04 10:50:09 -05:00
|
|
|
logger.info('copying {} to {}'.format(sc.source_path, sc.save_as))
|
2011-05-07 22:46:56 +01:00
|
|
|
|
2010-12-02 03:22:24 +00:00
|
|
|
|
2012-09-07 08:46:38 +02:00
|
|
|
class SourceFileGenerator(Generator):
|
2013-12-26 00:02:06 +01:00
|
|
|
|
2012-09-28 14:59:05 +02:00
|
|
|
def generate_context(self):
|
|
|
|
|
self.output_extension = self.settings['OUTPUT_SOURCES_EXTENSION']
|
|
|
|
|
|
2013-03-03 21:00:52 -08:00
|
|
|
def _create_source(self, obj):
|
|
|
|
|
output_path, _ = os.path.splitext(obj.save_as)
|
|
|
|
|
dest = os.path.join(self.output_path,
|
|
|
|
|
output_path + self.output_extension)
|
2013-12-07 21:11:15 +01:00
|
|
|
copy(obj.source_path, dest)
|
2012-09-07 08:46:38 +02:00
|
|
|
|
|
|
|
|
def generate_output(self, writer=None):
|
2013-01-11 02:57:43 +01:00
|
|
|
logger.info(' Generating source files...')
|
2013-03-03 21:00:52 -08:00
|
|
|
for obj in chain(self.context['articles'], self.context['pages']):
|
|
|
|
|
self._create_source(obj)
|
2013-04-14 19:34:05 +01:00
|
|
|
for obj_trans in obj.translations:
|
|
|
|
|
self._create_source(obj_trans)
|