2011-02-01 22:49:33 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
2010-10-30 00:56:40 +01:00
|
|
|
import os
|
2011-01-01 23:08:29 +03:00
|
|
|
import math
|
|
|
|
|
import random
|
2012-03-20 13:01:21 +00:00
|
|
|
import logging
|
|
|
|
|
import datetime
|
2012-04-15 02:20:20 +03:00
|
|
|
import subprocess
|
2010-10-30 00:56:40 +01:00
|
|
|
|
2012-02-21 17:53:53 +01:00
|
|
|
from collections import defaultdict
|
|
|
|
|
from functools import partial
|
|
|
|
|
from itertools import chain
|
|
|
|
|
from operator import attrgetter, itemgetter
|
|
|
|
|
|
2011-07-19 12:31:18 +02:00
|
|
|
from jinja2 import Environment, FileSystemLoader, PrefixLoader, ChoiceLoader
|
2010-10-30 00:56:40 +01:00
|
|
|
from jinja2.exceptions import TemplateNotFound
|
|
|
|
|
|
2012-03-01 14:19:46 +00:00
|
|
|
from pelican.contents import Article, Page, Category, is_valid_content
|
2012-02-21 17:53:53 +01:00
|
|
|
from pelican.readers import read_file
|
|
|
|
|
from pelican.utils import copy, process_translations, open
|
2010-10-30 00:56:40 +01:00
|
|
|
|
|
|
|
|
|
2012-03-20 13:01:21 +00:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
|
|
2010-10-30 00:56:40 +01:00
|
|
|
class Generator(object):
|
2010-12-02 03:22:24 +00:00
|
|
|
"""Baseclass generator"""
|
2010-11-20 02:26:49 +00:00
|
|
|
|
2010-12-02 03:22:24 +00:00
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
|
for idx, item in enumerate(('context', 'settings', 'path', 'theme',
|
|
|
|
|
'output_path', 'markup')):
|
|
|
|
|
setattr(self, item, args[idx])
|
|
|
|
|
|
|
|
|
|
for arg, value in kwargs.items():
|
|
|
|
|
setattr(self, arg, value)
|
2010-10-30 00:56:40 +01:00
|
|
|
|
2011-01-02 02:50:08 +03:00
|
|
|
# templates cache
|
|
|
|
|
self._templates = {}
|
2012-03-09 16:21:38 +01:00
|
|
|
self._templates_path = os.path.expanduser(
|
|
|
|
|
os.path.join(self.theme, 'templates'))
|
|
|
|
|
|
2012-03-10 12:21:54 +01:00
|
|
|
theme_path = os.path.dirname(os.path.abspath(__file__))
|
2012-03-09 16:21:38 +01:00
|
|
|
|
2012-03-10 11:32:22 +01:00
|
|
|
simple_loader = FileSystemLoader(os.path.join(theme_path,
|
|
|
|
|
"themes", "simple", "templates"))
|
2011-03-23 10:25:38 +03:00
|
|
|
self._env = Environment(
|
2011-07-19 12:31:18 +02:00
|
|
|
loader=ChoiceLoader([
|
|
|
|
|
FileSystemLoader(self._templates_path),
|
2012-03-09 16:21:38 +01:00
|
|
|
simple_loader, # implicit inheritance
|
|
|
|
|
PrefixLoader({'!simple': simple_loader}) # explicit one
|
2011-07-19 12:31:18 +02:00
|
|
|
]),
|
2011-03-23 10:25:38 +03:00
|
|
|
extensions=self.settings.get('JINJA_EXTENSIONS', []),
|
|
|
|
|
)
|
2012-03-09 16:21:38 +01:00
|
|
|
|
2012-03-20 13:01:21 +00:00
|
|
|
logger.debug('template list: {0}'.format(self._env.list_templates()))
|
2011-05-06 17:01:34 +06:00
|
|
|
|
2011-04-12 02:17:42 +08:00
|
|
|
# get custom Jinja filters from user settings
|
|
|
|
|
custom_filters = self.settings.get('JINJA_FILTERS', {})
|
|
|
|
|
self._env.filters.update(custom_filters)
|
2011-01-02 02:50:08 +03:00
|
|
|
|
|
|
|
|
def get_template(self, name):
|
|
|
|
|
"""Return the template by name.
|
2010-11-20 02:25:42 +00:00
|
|
|
Use self.theme to get the templates to use, and return a list of
|
|
|
|
|
templates ready to use with Jinja2.
|
|
|
|
|
"""
|
2011-01-02 02:50:08 +03:00
|
|
|
if name not in self._templates:
|
2010-10-30 00:56:40 +01:00
|
|
|
try:
|
2011-01-02 02:50:08 +03:00
|
|
|
self._templates[name] = self._env.get_template(name + '.html')
|
2010-10-30 00:56:40 +01:00
|
|
|
except TemplateNotFound:
|
2012-03-09 16:21:38 +01:00
|
|
|
raise Exception('[templates] unable to load %s.html from %s' \
|
|
|
|
|
% (name, self._templates_path))
|
2011-01-02 02:50:08 +03:00
|
|
|
return self._templates[name]
|
2010-10-30 00:56:40 +01:00
|
|
|
|
2010-11-20 02:25:42 +00:00
|
|
|
def get_files(self, path, exclude=[], extensions=None):
|
|
|
|
|
"""Return a list of files to use, based on rules
|
2010-10-30 20:17:23 +01:00
|
|
|
|
|
|
|
|
:param path: the path to search the file on
|
|
|
|
|
:param exclude: the list of path to exclude
|
|
|
|
|
"""
|
2010-11-20 02:25:42 +00:00
|
|
|
if not extensions:
|
|
|
|
|
extensions = self.markup
|
|
|
|
|
|
2010-10-30 20:17:23 +01:00
|
|
|
files = []
|
2011-05-06 17:01:34 +06:00
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
iter = os.walk(path, followlinks=True)
|
2012-03-09 16:21:38 +01:00
|
|
|
except TypeError: # python 2.5 does not support followlinks
|
2011-05-06 17:01:34 +06:00
|
|
|
iter = os.walk(path)
|
|
|
|
|
|
|
|
|
|
for root, dirs, temp_files in iter:
|
2010-10-30 20:17:23 +01:00
|
|
|
for e in exclude:
|
|
|
|
|
if e in dirs:
|
|
|
|
|
dirs.remove(e)
|
|
|
|
|
files.extend([os.sep.join((root, f)) for f in temp_files
|
2010-11-20 02:25:42 +00:00
|
|
|
if True in [f.endswith(ext) for ext in extensions]])
|
2010-10-30 20:17:23 +01:00
|
|
|
return files
|
2010-12-02 03:22:24 +00:00
|
|
|
|
|
|
|
|
def _update_context(self, items):
|
|
|
|
|
"""Update the context with the given items from the currrent
|
|
|
|
|
processor.
|
|
|
|
|
"""
|
|
|
|
|
for item in items:
|
|
|
|
|
value = getattr(self, item)
|
|
|
|
|
if hasattr(value, 'items'):
|
|
|
|
|
value = value.items()
|
|
|
|
|
self.context[item] = value
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class ArticlesGenerator(Generator):
|
|
|
|
|
"""Generate blog articles"""
|
|
|
|
|
|
|
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
|
"""initialize properties"""
|
2012-03-09 16:21:38 +01:00
|
|
|
self.articles = [] # only articles in default language
|
2010-12-19 00:32:43 +03:00
|
|
|
self.translations = []
|
2010-12-02 03:22:24 +00:00
|
|
|
self.dates = {}
|
2010-12-26 23:59:30 +03:00
|
|
|
self.tags = defaultdict(list)
|
|
|
|
|
self.categories = defaultdict(list)
|
2011-06-30 23:49:09 +02:00
|
|
|
self.authors = defaultdict(list)
|
2010-12-02 03:22:24 +00:00
|
|
|
super(ArticlesGenerator, self).__init__(*args, **kwargs)
|
2011-05-08 14:58:57 +01:00
|
|
|
self.drafts = []
|
2010-12-02 03:22:24 +00:00
|
|
|
|
|
|
|
|
def generate_feeds(self, writer):
|
|
|
|
|
"""Generate the feeds from the current context, and output files."""
|
|
|
|
|
|
2012-03-10 21:18:01 +09:00
|
|
|
if self.settings.get('FEED'):
|
|
|
|
|
writer.write_feed(self.articles, self.context,
|
|
|
|
|
self.settings['FEED'])
|
2010-12-02 03:22:24 +00:00
|
|
|
|
2012-03-10 21:18:01 +09:00
|
|
|
if self.settings.get('FEED_RSS'):
|
2010-12-02 03:22:24 +00:00
|
|
|
writer.write_feed(self.articles, self.context,
|
2012-03-10 21:18:01 +09:00
|
|
|
self.settings['FEED_RSS'], feed_type='rss')
|
2010-12-02 03:22:24 +00:00
|
|
|
|
2011-02-01 01:57:39 +00:00
|
|
|
for cat, arts in self.categories:
|
2010-12-02 03:22:24 +00:00
|
|
|
arts.sort(key=attrgetter('date'), reverse=True)
|
2012-03-10 21:18:01 +09:00
|
|
|
if self.settings.get('CATEGORY_FEED'):
|
|
|
|
|
writer.write_feed(arts, self.context,
|
|
|
|
|
self.settings['CATEGORY_FEED'] % cat)
|
2010-12-02 03:22:24 +00:00
|
|
|
|
2012-03-10 21:18:01 +09:00
|
|
|
if self.settings.get('CATEGORY_FEED_RSS'):
|
2010-12-02 03:22:24 +00:00
|
|
|
writer.write_feed(arts, self.context,
|
2012-03-10 21:18:01 +09:00
|
|
|
self.settings['CATEGORY_FEED_RSS'] % cat,
|
|
|
|
|
feed_type='rss')
|
2010-12-02 03:22:24 +00:00
|
|
|
|
2012-03-10 21:18:01 +09:00
|
|
|
if self.settings.get('TAG_FEED') or self.settings.get('TAG_FEED_RSS'):
|
2010-12-17 09:25:19 +01:00
|
|
|
for tag, arts in self.tags.items():
|
|
|
|
|
arts.sort(key=attrgetter('date'), reverse=True)
|
2012-03-10 21:18:01 +09:00
|
|
|
if self.settings.get('TAG_FEED'):
|
|
|
|
|
writer.write_feed(arts, self.context,
|
|
|
|
|
self.settings['TAG_FEED'] % tag)
|
2010-12-17 09:25:19 +01:00
|
|
|
|
2012-03-10 21:18:01 +09:00
|
|
|
if self.settings.get('TAG_FEED_RSS'):
|
2011-05-06 17:01:34 +06:00
|
|
|
writer.write_feed(arts, self.context,
|
2012-03-09 16:21:38 +01:00
|
|
|
self.settings['TAG_FEED_RSS'] % tag,
|
|
|
|
|
feed_type='rss')
|
2010-12-20 22:42:29 +00:00
|
|
|
|
2012-03-10 21:18:01 +09:00
|
|
|
if self.settings.get('TRANSLATION_FEED'):
|
|
|
|
|
translations_feeds = defaultdict(list)
|
|
|
|
|
for article in chain(self.articles, self.translations):
|
|
|
|
|
translations_feeds[article.lang].append(article)
|
2010-12-19 00:32:43 +03:00
|
|
|
|
2012-03-10 21:18:01 +09:00
|
|
|
for lang, items in translations_feeds.items():
|
|
|
|
|
items.sort(key=attrgetter('date'), reverse=True)
|
|
|
|
|
writer.write_feed(items, self.context,
|
|
|
|
|
self.settings['TRANSLATION_FEED'] % lang)
|
2010-12-17 09:25:19 +01:00
|
|
|
|
2010-12-02 03:22:24 +00:00
|
|
|
def generate_pages(self, writer):
|
2011-05-08 14:58:57 +01:00
|
|
|
"""Generate the pages on the disk"""
|
2010-12-02 03:22:24 +00:00
|
|
|
|
2012-03-09 16:21:38 +01:00
|
|
|
write = partial(writer.write_file,
|
|
|
|
|
relative_urls=self.settings.get('RELATIVE_URLS'))
|
2011-03-23 10:25:38 +03:00
|
|
|
|
2011-05-06 17:01:34 +06:00
|
|
|
# to minimize the number of relative path stuff modification
|
2011-02-15 13:48:57 +00:00
|
|
|
# in writer, articles pass first
|
2011-03-23 10:25:38 +03:00
|
|
|
article_template = self.get_template('article')
|
2010-12-30 14:11:37 +00:00
|
|
|
for article in chain(self.translations, self.articles):
|
2011-03-23 10:25:38 +03:00
|
|
|
write(article.save_as,
|
|
|
|
|
article_template, self.context, article=article,
|
|
|
|
|
category=article.category)
|
2010-12-30 14:11:37 +00:00
|
|
|
|
2011-03-23 10:25:38 +03:00
|
|
|
PAGINATED_TEMPLATES = self.settings.get('PAGINATED_DIRECT_TEMPLATES')
|
2011-01-02 02:50:08 +03:00
|
|
|
for template in self.settings.get('DIRECT_TEMPLATES'):
|
2011-02-15 14:36:55 +01:00
|
|
|
paginated = {}
|
2011-03-23 10:25:38 +03:00
|
|
|
if template in PAGINATED_TEMPLATES:
|
2011-02-15 14:36:55 +01:00
|
|
|
paginated = {'articles': self.articles, 'dates': self.dates}
|
2012-03-09 16:21:38 +01:00
|
|
|
|
|
|
|
|
write('%s.html' % template, self.get_template(template),
|
|
|
|
|
self.context, blog=True, paginated=paginated,
|
|
|
|
|
page_name=template)
|
2011-01-02 02:50:08 +03:00
|
|
|
|
2010-12-30 14:11:37 +00:00
|
|
|
# and subfolders after that
|
2011-01-02 02:50:08 +03:00
|
|
|
tag_template = self.get_template('tag')
|
2010-12-05 19:15:02 +00:00
|
|
|
for tag, articles in self.tags.items():
|
2011-06-12 22:18:50 +02:00
|
|
|
articles.sort(key=attrgetter('date'), reverse=True)
|
2011-02-15 13:44:36 +01:00
|
|
|
dates = [article for article in self.dates if article in articles]
|
2011-12-23 23:43:32 +00:00
|
|
|
write(tag.save_as, tag_template, self.context, tag=tag,
|
2011-03-23 10:25:38 +03:00
|
|
|
articles=articles, dates=dates,
|
2011-02-15 14:36:55 +01:00
|
|
|
paginated={'articles': articles, 'dates': dates},
|
2012-03-02 16:32:05 +01:00
|
|
|
page_name=u'tag/%s' % tag)
|
2011-01-02 02:50:08 +03:00
|
|
|
|
|
|
|
|
category_template = self.get_template('category')
|
2011-02-01 01:57:39 +00:00
|
|
|
for cat, articles in self.categories:
|
2011-02-15 13:44:36 +01:00
|
|
|
dates = [article for article in self.dates if article in articles]
|
2011-12-23 23:43:32 +00:00
|
|
|
write(cat.save_as, category_template, self.context,
|
2011-02-15 14:36:55 +01:00
|
|
|
category=cat, articles=articles, dates=dates,
|
|
|
|
|
paginated={'articles': articles, 'dates': dates},
|
2012-03-02 16:32:05 +01:00
|
|
|
page_name=u'category/%s' % cat)
|
2011-01-02 02:50:08 +03:00
|
|
|
|
2011-06-30 23:49:09 +02:00
|
|
|
author_template = self.get_template('author')
|
|
|
|
|
for aut, articles in self.authors:
|
|
|
|
|
dates = [article for article in self.dates if article in articles]
|
2011-12-23 23:43:32 +00:00
|
|
|
write(aut.save_as, author_template, self.context,
|
2011-06-30 23:49:09 +02:00
|
|
|
author=aut, articles=articles, dates=dates,
|
|
|
|
|
paginated={'articles': articles, 'dates': dates},
|
2012-03-02 16:32:05 +01:00
|
|
|
page_name=u'author/%s' % aut)
|
2011-06-30 23:49:09 +02:00
|
|
|
|
2011-05-08 14:58:57 +01:00
|
|
|
for article in self.drafts:
|
2012-03-09 16:21:38 +01:00
|
|
|
write('drafts/%s.html' % article.slug, article_template,
|
|
|
|
|
self.context, article=article, category=article.category)
|
2010-12-02 03:22:24 +00:00
|
|
|
|
|
|
|
|
def generate_context(self):
|
|
|
|
|
"""change the context"""
|
|
|
|
|
|
2012-03-16 15:04:06 +09:00
|
|
|
article_path = os.path.join(self.path, self.settings['ARTICLE_DIR'])
|
2010-12-19 00:32:43 +03:00
|
|
|
all_articles = []
|
2012-03-06 00:29:56 +01:00
|
|
|
for f in self.get_files(
|
2012-03-16 15:04:06 +09:00
|
|
|
article_path,
|
2012-03-06 00:29:56 +01:00
|
|
|
exclude=self.settings['ARTICLE_EXCLUDES']):
|
2011-11-23 20:35:45 +00:00
|
|
|
try:
|
|
|
|
|
content, metadata = read_file(f, settings=self.settings)
|
|
|
|
|
except Exception, e:
|
2012-03-20 13:01:21 +00:00
|
|
|
logger.warning(u'Could not process %s\n%s' % (f, str(e)))
|
2011-11-23 20:35:45 +00:00
|
|
|
continue
|
2010-12-02 03:22:24 +00:00
|
|
|
|
|
|
|
|
# if no category is set, use the name of the path as a category
|
2012-03-14 09:38:36 +01:00
|
|
|
if 'category' not in metadata:
|
2010-12-02 03:22:24 +00:00
|
|
|
|
2012-03-16 15:04:06 +09:00
|
|
|
if os.path.dirname(f) == article_path:
|
2010-12-02 03:22:24 +00:00
|
|
|
category = self.settings['DEFAULT_CATEGORY']
|
2011-03-27 12:49:28 +02:00
|
|
|
else:
|
2012-03-09 16:21:38 +01:00
|
|
|
category = os.path.basename(os.path.dirname(f))\
|
|
|
|
|
.decode('utf-8')
|
2010-12-02 03:22:24 +00:00
|
|
|
|
|
|
|
|
if category != '':
|
2011-12-23 23:43:32 +00:00
|
|
|
metadata['category'] = Category(category, self.settings)
|
2010-12-02 03:22:24 +00:00
|
|
|
|
2012-03-14 09:38:36 +01:00
|
|
|
if 'date' not in metadata and self.settings['FALLBACK_ON_FS_DATE']:
|
2012-03-09 16:21:38 +01:00
|
|
|
metadata['date'] = datetime.datetime.fromtimestamp(
|
|
|
|
|
os.stat(f).st_ctime)
|
2010-12-02 03:22:24 +00:00
|
|
|
|
2011-05-07 20:00:30 +01:00
|
|
|
article = Article(content, metadata, settings=self.settings,
|
2010-12-02 03:22:24 +00:00
|
|
|
filename=f)
|
|
|
|
|
if not is_valid_content(article, f):
|
|
|
|
|
continue
|
|
|
|
|
|
2011-05-08 14:58:57 +01:00
|
|
|
if article.status == "published":
|
|
|
|
|
if hasattr(article, 'tags'):
|
|
|
|
|
for tag in article.tags:
|
|
|
|
|
self.tags[tag].append(article)
|
|
|
|
|
all_articles.append(article)
|
|
|
|
|
elif article.status == "draft":
|
|
|
|
|
self.drafts.append(article)
|
2010-12-19 00:32:43 +03:00
|
|
|
|
|
|
|
|
self.articles, self.translations = process_translations(all_articles)
|
|
|
|
|
|
|
|
|
|
for article in self.articles:
|
|
|
|
|
# only main articles are listed in categories, not translations
|
2010-12-26 23:59:30 +03:00
|
|
|
self.categories[article.category].append(article)
|
2011-06-30 23:49:09 +02:00
|
|
|
self.authors[article.author].append(article)
|
2010-12-02 03:22:24 +00:00
|
|
|
|
|
|
|
|
# sort the articles by date
|
|
|
|
|
self.articles.sort(key=attrgetter('date'), reverse=True)
|
|
|
|
|
self.dates = list(self.articles)
|
2011-05-06 17:01:34 +06:00
|
|
|
self.dates.sort(key=attrgetter('date'),
|
2010-12-20 22:42:29 +00:00
|
|
|
reverse=self.context['REVERSE_ARCHIVE_ORDER'])
|
2011-01-01 23:08:29 +03:00
|
|
|
|
|
|
|
|
# create tag cloud
|
|
|
|
|
tag_cloud = defaultdict(int)
|
|
|
|
|
for article in self.articles:
|
2011-03-23 16:41:24 +00:00
|
|
|
for tag in getattr(article, 'tags', []):
|
2011-01-01 23:08:29 +03:00
|
|
|
tag_cloud[tag] += 1
|
|
|
|
|
|
2012-03-09 16:21:38 +01:00
|
|
|
tag_cloud = sorted(tag_cloud.items(), key=itemgetter(1), reverse=True)
|
2011-01-01 23:08:29 +03:00
|
|
|
tag_cloud = tag_cloud[:self.settings.get('TAG_CLOUD_MAX_ITEMS')]
|
|
|
|
|
|
2011-03-27 12:46:33 +02:00
|
|
|
tags = map(itemgetter(1), tag_cloud)
|
2011-03-27 12:49:28 +02:00
|
|
|
if tags:
|
2011-08-22 19:42:42 +02:00
|
|
|
max_count = max(tags)
|
2011-01-01 23:08:29 +03:00
|
|
|
steps = self.settings.get('TAG_CLOUD_STEPS')
|
2011-05-06 17:01:34 +06:00
|
|
|
|
2011-01-01 23:08:29 +03:00
|
|
|
# calculate word sizes
|
|
|
|
|
self.tag_cloud = [
|
|
|
|
|
(
|
|
|
|
|
tag,
|
2012-03-09 16:21:38 +01:00
|
|
|
int(math.floor(steps - (steps - 1) * math.log(count)
|
|
|
|
|
/ (math.log(max_count)or 1)))
|
2011-01-01 23:08:29 +03:00
|
|
|
)
|
|
|
|
|
for tag, count in tag_cloud
|
|
|
|
|
]
|
|
|
|
|
# put words in chaos
|
|
|
|
|
random.shuffle(self.tag_cloud)
|
|
|
|
|
|
2010-12-02 03:22:24 +00:00
|
|
|
# and generate the output :)
|
2011-02-01 01:57:39 +00:00
|
|
|
|
|
|
|
|
# order the categories per name
|
|
|
|
|
self.categories = list(self.categories.items())
|
2012-03-09 16:21:38 +01:00
|
|
|
self.categories.sort(reverse=self.settings['REVERSE_CATEGORY_ORDER'])
|
2011-06-30 23:49:09 +02:00
|
|
|
|
|
|
|
|
self.authors = list(self.authors.items())
|
|
|
|
|
self.authors.sort()
|
|
|
|
|
|
2012-03-09 16:21:38 +01:00
|
|
|
self._update_context(('articles', 'dates', 'tags', 'categories',
|
|
|
|
|
'tag_cloud', 'authors'))
|
2010-12-02 03:22:24 +00:00
|
|
|
|
|
|
|
|
def generate_output(self, writer):
|
|
|
|
|
self.generate_feeds(writer)
|
|
|
|
|
self.generate_pages(writer)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class PagesGenerator(Generator):
|
|
|
|
|
"""Generate pages"""
|
|
|
|
|
|
|
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
|
self.pages = []
|
|
|
|
|
super(PagesGenerator, self).__init__(*args, **kwargs)
|
|
|
|
|
|
|
|
|
|
def generate_context(self):
|
2010-12-19 00:32:43 +03:00
|
|
|
all_pages = []
|
2012-03-06 00:29:56 +01:00
|
|
|
for f in self.get_files(
|
|
|
|
|
os.path.join(self.path, self.settings['PAGE_DIR']),
|
|
|
|
|
exclude=self.settings['PAGE_EXCLUDES']):
|
2011-11-23 20:35:45 +00:00
|
|
|
try:
|
|
|
|
|
content, metadata = read_file(f)
|
|
|
|
|
except Exception, e:
|
2012-03-20 13:01:21 +00:00
|
|
|
logger.error(u'Could not process %s\n%s' % (f, str(e)))
|
2011-11-23 20:35:45 +00:00
|
|
|
continue
|
2011-05-07 20:00:30 +01:00
|
|
|
page = Page(content, metadata, settings=self.settings,
|
2010-12-02 03:22:24 +00:00
|
|
|
filename=f)
|
|
|
|
|
if not is_valid_content(page, f):
|
|
|
|
|
continue
|
2010-12-19 00:32:43 +03:00
|
|
|
all_pages.append(page)
|
|
|
|
|
|
|
|
|
|
self.pages, self.translations = process_translations(all_pages)
|
2010-12-02 03:22:24 +00:00
|
|
|
|
|
|
|
|
self._update_context(('pages', ))
|
2010-12-14 15:48:35 +00:00
|
|
|
self.context['PAGES'] = self.pages
|
2010-12-02 03:22:24 +00:00
|
|
|
|
|
|
|
|
def generate_output(self, writer):
|
2010-12-19 00:32:43 +03:00
|
|
|
for page in chain(self.translations, self.pages):
|
2011-12-23 22:01:32 +00:00
|
|
|
writer.write_file(page.save_as, self.get_template('page'),
|
2010-12-22 03:19:35 +03:00
|
|
|
self.context, page=page,
|
2012-03-09 16:21:38 +01:00
|
|
|
relative_urls=self.settings.get('RELATIVE_URLS'))
|
2010-12-02 03:22:24 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
class StaticGenerator(Generator):
|
2010-12-05 19:15:02 +00:00
|
|
|
"""copy static paths (what you want to cpy, like images, medias etc.
|
|
|
|
|
to output"""
|
2010-12-02 03:22:24 +00:00
|
|
|
|
|
|
|
|
def _copy_paths(self, paths, source, destination, output_path,
|
|
|
|
|
final_path=None):
|
2011-05-07 22:46:56 +01:00
|
|
|
"""Copy all the paths from source to destination"""
|
2010-12-02 03:22:24 +00:00
|
|
|
for path in paths:
|
2012-03-09 16:21:38 +01:00
|
|
|
copy(path, source, os.path.join(output_path, destination),
|
|
|
|
|
final_path, overwrite=True)
|
2010-12-02 03:22:24 +00:00
|
|
|
|
|
|
|
|
def generate_output(self, writer):
|
|
|
|
|
self._copy_paths(self.settings['STATIC_PATHS'], self.path,
|
|
|
|
|
'static', self.output_path)
|
2010-12-05 19:15:02 +00:00
|
|
|
self._copy_paths(self.settings['THEME_STATIC_PATHS'], self.theme,
|
2010-12-02 03:22:24 +00:00
|
|
|
'theme', self.output_path, '.')
|
|
|
|
|
|
2011-05-07 22:46:56 +01:00
|
|
|
# copy all the files needed
|
|
|
|
|
for source, destination in self.settings['FILES_TO_COPY']:
|
2012-03-09 16:21:38 +01:00
|
|
|
copy(source, self.path, self.output_path, destination,
|
|
|
|
|
overwrite=True)
|
2011-05-07 22:46:56 +01:00
|
|
|
|
2010-12-02 03:22:24 +00:00
|
|
|
|
|
|
|
|
class PdfGenerator(Generator):
|
|
|
|
|
"""Generate PDFs on the output dir, for all articles and pages coming from
|
|
|
|
|
rst"""
|
|
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
|
try:
|
|
|
|
|
from rst2pdf.createpdf import RstToPdf
|
2012-03-09 16:21:38 +01:00
|
|
|
self.pdfcreator = RstToPdf(breakside=0,
|
|
|
|
|
stylesheets=['twelvepoint'])
|
2010-12-02 03:22:24 +00:00
|
|
|
except ImportError:
|
|
|
|
|
raise Exception("unable to find rst2pdf")
|
2010-12-22 04:46:11 +08:00
|
|
|
super(PdfGenerator, self).__init__(*args, **kwargs)
|
2010-12-02 03:22:24 +00:00
|
|
|
|
|
|
|
|
def _create_pdf(self, obj, output_path):
|
|
|
|
|
if obj.filename.endswith(".rst"):
|
2010-12-22 04:46:11 +08:00
|
|
|
filename = obj.slug + ".pdf"
|
2012-03-09 16:21:38 +01:00
|
|
|
output_pdf = os.path.join(output_path, filename)
|
2010-12-22 04:46:11 +08:00
|
|
|
# print "Generating pdf for", obj.filename, " in ", output_pdf
|
2012-03-09 16:17:09 +01:00
|
|
|
with open(obj.filename) as f:
|
|
|
|
|
self.pdfcreator.createPdf(text=f, output=output_pdf)
|
2012-03-20 13:01:21 +00:00
|
|
|
logger.info(u' [ok] writing %s' % output_pdf)
|
2011-05-06 17:01:34 +06:00
|
|
|
|
2010-12-02 03:22:24 +00:00
|
|
|
def generate_context(self):
|
2010-12-22 04:46:11 +08:00
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
def generate_output(self, writer=None):
|
2011-02-15 13:48:57 +00:00
|
|
|
# we don't use the writer passed as argument here
|
|
|
|
|
# since we write our own files
|
2012-03-20 13:01:21 +00:00
|
|
|
logger.info(u' Generating PDF files...')
|
2010-12-02 03:22:24 +00:00
|
|
|
pdf_path = os.path.join(self.output_path, 'pdf')
|
2011-08-20 09:51:18 -04:00
|
|
|
if not os.path.exists(pdf_path):
|
|
|
|
|
try:
|
|
|
|
|
os.mkdir(pdf_path)
|
|
|
|
|
except OSError:
|
2012-03-20 13:01:21 +00:00
|
|
|
logger.error("Couldn't create the pdf output folder in " + pdf_path)
|
2011-08-20 09:51:18 -04:00
|
|
|
pass
|
2010-12-02 03:22:24 +00:00
|
|
|
|
|
|
|
|
for article in self.context['articles']:
|
|
|
|
|
self._create_pdf(article, pdf_path)
|
|
|
|
|
|
|
|
|
|
for page in self.context['pages']:
|
|
|
|
|
self._create_pdf(page, pdf_path)
|
2012-04-15 02:20:20 +03:00
|
|
|
|
|
|
|
|
|
|
|
|
|
class LessCSSGenerator(Generator):
|
|
|
|
|
"""Compile less css files. This assumes we have `lessc` in our PATH."""
|
|
|
|
|
|
|
|
|
|
def generate_context(self):
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
def _compile(self, less_file, source_dir, dest_dir):
|
|
|
|
|
base = os.path.relpath(less_file, source_dir)
|
|
|
|
|
target = os.path.splitext(
|
|
|
|
|
os.path.join(self.output_path, base))[0] + '.css'
|
|
|
|
|
target_dir = os.path.dirname(target)
|
|
|
|
|
|
|
|
|
|
if not os.path.exists(target_dir):
|
|
|
|
|
try:
|
|
|
|
|
os.makedirs(target_dir)
|
|
|
|
|
except OSError:
|
|
|
|
|
logger.error("Couldn't create the pdf output folder in " +
|
|
|
|
|
target_dir)
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
cmd = ' '.join([self.settings['LESS_COMPILER'], less_file, target])
|
|
|
|
|
subprocess.call(cmd, shell=True)
|
|
|
|
|
logger.info(u' [ok] compiled %s' % base)
|
|
|
|
|
|
|
|
|
|
def generate_output(self, writer=None):
|
|
|
|
|
logger.info(u' Compiling less css')
|
|
|
|
|
|
|
|
|
|
for static_path in self.settings['STATIC_PATHS']:
|
|
|
|
|
for f in self.get_files(
|
|
|
|
|
os.path.join(self.path, static_path),
|
|
|
|
|
extensions=['less']):
|
|
|
|
|
|
|
|
|
|
self._compile(f, self.path, self.output_path)
|