forked from github/pelican
Refactoring, again.
Separate Generator and Processors. Place clear for upcoming changes about pages only websites powered by pelican !
This commit is contained in:
parent
d2a3b5380c
commit
c989db50c9
5 changed files with 189 additions and 205 deletions
21
bin/pelican
21
bin/pelican
|
|
@ -1,7 +1,10 @@
|
|||
#!/usr/bin/env python
|
||||
import argparse
|
||||
|
||||
from pelican.generators import ArticlesGenerator, PagesGenerator
|
||||
from pelican.utils import clean_output_dir
|
||||
from pelican.generators import Generator
|
||||
from pelican.processors import (ArticlesProcessor, PagesProcessor,
|
||||
StaticProcessor)
|
||||
|
||||
parser = argparse.ArgumentParser(description="""A tool to generate a
|
||||
static blog, with restructured text input files.""")
|
||||
|
|
@ -23,16 +26,10 @@ parser.add_argument('-s', '--settings', dest='settings',
|
|||
|
||||
if __name__ == '__main__':
|
||||
args = parser.parse_args()
|
||||
markup = [a.split()[0] for a in args.markup.split(',')]
|
||||
|
||||
articles = ArticlesGenerator(args.settings)
|
||||
pages = PagesGenerator(args.settings)
|
||||
context = {}
|
||||
|
||||
for gen in articles, pages:
|
||||
markup = [a.split()[0] for a in args.markup.split(',')]
|
||||
context.update(gen.create_context(args.path, args.theme, args.output,
|
||||
markup))
|
||||
|
||||
for gen in articles, pages:
|
||||
gen.generate(context)
|
||||
generator = Generator(args.settings, args.path, args.theme,
|
||||
args.output, markup)
|
||||
clean_output_dir(args.output)
|
||||
generator.run([ArticlesProcessor, PagesProcessor, StaticProcessor])
|
||||
print "Enjoy !"
|
||||
|
|
|
|||
|
|
@ -40,3 +40,13 @@ class Article(Page):
|
|||
|
||||
class Quote(Page):
|
||||
base_properties = ('author', 'date')
|
||||
|
||||
|
||||
def is_valid_content(content, f):
|
||||
try:
|
||||
content.check_properties()
|
||||
return True
|
||||
except NameError as e:
|
||||
print u" [info] Skipping %s: impossible to find informations about '%s'" % (f, e)
|
||||
return False
|
||||
|
||||
|
|
|
|||
|
|
@ -1,54 +1,50 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import os
|
||||
import shutil
|
||||
from codecs import open
|
||||
from operator import attrgetter
|
||||
|
||||
from jinja2 import Environment, FileSystemLoader
|
||||
from jinja2.exceptions import TemplateNotFound
|
||||
from feedgenerator import Atom1Feed
|
||||
|
||||
from pelican.utils import update_dict
|
||||
from pelican.settings import read_settings
|
||||
from pelican.contents import Article, Page
|
||||
from pelican.readers import read_file
|
||||
|
||||
## Constants ##########################################################
|
||||
|
||||
_TEMPLATES = ('index', 'tag', 'tags', 'article', 'category', 'categories',
|
||||
'archives', 'page')
|
||||
|
||||
_DIRECT_TEMPLATES = ('index', 'tags', 'categories', 'archives')
|
||||
|
||||
|
||||
class Generator(object):
|
||||
"""Base class generator"""
|
||||
|
||||
def __init__(self, settings):
|
||||
self.settings = read_settings(settings)
|
||||
|
||||
def _init_params(self, path=None, theme=None, output_path=None,
|
||||
def __init__(self, settings=None, path=None, theme=None, output_path=None,
|
||||
markup=None):
|
||||
"""Initialize parameters for this object.
|
||||
|
||||
:param path: the path where to find the files to parse
|
||||
:param theme: where to search for templates
|
||||
:param output_path: where to output the generated files
|
||||
:param settings: the settings file to use
|
||||
:param markup: the markup of the files to read. It's a list.
|
||||
"""
|
||||
# get the settings
|
||||
if settings is None:
|
||||
settings = {}
|
||||
self.settings = read_settings(settings)
|
||||
self.path = path or self.settings['PATH']
|
||||
self.theme = theme or self.settings['THEME']
|
||||
output_path = output_path or self.settings['OUTPUT_PATH']
|
||||
self.output_path = os.path.realpath(output_path)
|
||||
self.markup = markup or self.settings['MARKUP']
|
||||
|
||||
if 'SITEURL' not in self.settings:
|
||||
self.settings['SITEURL'] = self.output_path
|
||||
|
||||
# get the list of files to parse
|
||||
if not path:
|
||||
raise Exception('you need to specify a path to search the docs on !')
|
||||
|
||||
def generate_feed(self, elements, context, output_path=None, filename=None):
|
||||
def run(self, processors):
|
||||
context = self.settings.copy()
|
||||
processors = [p() for p in processors]
|
||||
|
||||
for p in processors:
|
||||
if hasattr(p, 'preprocess'):
|
||||
p.preprocess(context, self)
|
||||
|
||||
for p in processors:
|
||||
p.process(context, self)
|
||||
|
||||
def generate_feed(self, elements, context, filename=None):
|
||||
"""Generate a feed with the list of articles provided
|
||||
|
||||
Return the feed. If no output_path or filename is specified, just return
|
||||
|
|
@ -72,8 +68,8 @@ class Generator(object):
|
|||
author_name=getattr(element, 'author', 'John Doe'),
|
||||
pubdate=element.date)
|
||||
|
||||
if output_path and filename:
|
||||
complete_path = os.path.join(output_path, filename)
|
||||
if filename:
|
||||
complete_path = os.path.join(self.output_path, filename)
|
||||
try:
|
||||
os.makedirs(os.path.dirname(complete_path))
|
||||
except Exception:
|
||||
|
|
@ -105,12 +101,12 @@ class Generator(object):
|
|||
f.write(output)
|
||||
print u' [ok] writing %s' % filename
|
||||
|
||||
def get_templates(self, path=None):
|
||||
def get_templates(self):
|
||||
"""Return the templates to use.
|
||||
|
||||
:param path: the path to load the templates from
|
||||
"""
|
||||
path = os.path.expanduser(os.path.join(path, 'templates'))
|
||||
path = os.path.expanduser(os.path.join(self.theme, 'templates'))
|
||||
env = Environment(loader=FileSystemLoader(path))
|
||||
templates = {}
|
||||
for template in _TEMPLATES:
|
||||
|
|
@ -121,32 +117,6 @@ class Generator(object):
|
|||
template, path))
|
||||
return templates
|
||||
|
||||
def clean_output_dir(self):
|
||||
"""Remove all the files from the output directory"""
|
||||
|
||||
# remove all the existing content from the output folder
|
||||
try:
|
||||
shutil.rmtree(os.path.join(self.output_path))
|
||||
except:
|
||||
pass
|
||||
|
||||
def _get_context(self, items):
|
||||
"""Return the context to be used in templates"""
|
||||
|
||||
# create a new context only if none currently exists.
|
||||
if not hasattr(self, "context"):
|
||||
context = self.settings.copy()
|
||||
else:
|
||||
context = self.context
|
||||
|
||||
# put all we need in the context, to generate the output
|
||||
for item in items:
|
||||
value = getattr(self, item)
|
||||
if hasattr(value, 'items'):
|
||||
value = value.items()
|
||||
context[item] = value
|
||||
return context
|
||||
|
||||
def get_files(self, path, exclude=[]):
|
||||
"""Return the files to use to use in this generator
|
||||
|
||||
|
|
@ -161,143 +131,3 @@ class Generator(object):
|
|||
files.extend([os.sep.join((root, f)) for f in temp_files
|
||||
if True in [f.endswith(markup) for markup in self.markup]])
|
||||
return files
|
||||
|
||||
def is_valid_content(self, content, f):
|
||||
try:
|
||||
content.check_properties()
|
||||
return True
|
||||
except NameError as e:
|
||||
print u" [info] Skipping %s: impossible to find informations about '%s'" % (f, e)
|
||||
return False
|
||||
|
||||
|
||||
class ArticlesGenerator(Generator):
|
||||
|
||||
def __init__(self, settings=None):
|
||||
super(ArticlesGenerator, self).__init__(settings)
|
||||
self.articles = []
|
||||
self.dates = {}
|
||||
self.years = {}
|
||||
self.tags = {}
|
||||
self.categories = {}
|
||||
|
||||
def process_files(self, files):
|
||||
"""Process all the files and build the lists and dicts of
|
||||
articles/categories/etc.
|
||||
"""
|
||||
for f in files:
|
||||
content, metadatas = read_file(f)
|
||||
if 'category' not in metadatas.keys():
|
||||
category = os.path.dirname(f).replace(
|
||||
os.path.expanduser(self.path)+'/', '')
|
||||
|
||||
if category != '':
|
||||
metadatas['category'] = unicode(category)
|
||||
|
||||
article = Article(content, metadatas, settings=self.settings)
|
||||
if not self.is_valid_content(article, f):
|
||||
continue
|
||||
|
||||
update_dict(self.dates, article.date.strftime('%Y-%m-%d'), article)
|
||||
update_dict(self.years, article.date.year, article)
|
||||
update_dict(self.categories, article.category, article)
|
||||
if hasattr(article, 'tags'):
|
||||
for tag in article.tags:
|
||||
update_dict(self.tags, tag, article)
|
||||
self.articles.append(article)
|
||||
|
||||
def generate_feeds(self, context):
|
||||
"""Generate the feeds from the current context, and output files."""
|
||||
|
||||
if 'SITEURL' not in context:
|
||||
context['SITEURL'] = self.output_path
|
||||
|
||||
self.generate_feed(self.articles, context, self.output_path,
|
||||
context['FEED'])
|
||||
|
||||
for cat, arts in self.categories.items():
|
||||
arts.sort(key=attrgetter('date'), reverse=True)
|
||||
self.generate_feed(arts, context, self.output_path,
|
||||
context['CATEGORY_FEED'] % cat)
|
||||
|
||||
def generate_pages(self, context):
|
||||
"""Generate the pages on the disk"""
|
||||
|
||||
templates = self.get_templates(self.theme)
|
||||
generate = self.generate_file
|
||||
for template in _DIRECT_TEMPLATES:
|
||||
generate('%s.html' % template, templates[template], context, blog=True)
|
||||
for tag in self.tags:
|
||||
generate('tag/%s.html' % tag, templates['tag'], context, tag=tag)
|
||||
for cat in self.categories:
|
||||
generate('category/%s.html' % cat, templates['category'], context,
|
||||
category=cat, articles=self.categories[cat])
|
||||
for article in self.articles:
|
||||
generate('%s' % article.url,
|
||||
templates['article'], context, article=article,
|
||||
category=article.category)
|
||||
|
||||
def generate_static_content(self):
|
||||
"""copy static paths to output"""
|
||||
|
||||
for path in self.settings['STATIC_PATHS']:
|
||||
try:
|
||||
fromp = os.path.expanduser(os.path.join(self.theme, path))
|
||||
to = os.path.expanduser(os.path.join(self.output_path, path))
|
||||
shutil.copytree(fromp, to)
|
||||
print u' [ok] copying %s' % fromp
|
||||
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
def create_context(self, path=None, theme=None, output_path=None,
|
||||
markup=None):
|
||||
self._init_params(path, theme, output_path, markup)
|
||||
|
||||
# build the list of articles / categories / etc.
|
||||
self.process_files(self.get_files(path, ['pages',]))
|
||||
|
||||
# sort the articles by date
|
||||
self.articles.sort(key=attrgetter('date'), reverse=True)
|
||||
# and generate the output :)
|
||||
return self._get_context(('articles', 'dates', 'years', 'tags',
|
||||
'categories'))
|
||||
|
||||
def generate(self, context):
|
||||
self.generate_feeds(context)
|
||||
self.generate_pages(context)
|
||||
self.generate_static_content()
|
||||
|
||||
|
||||
class PagesGenerator(Generator):
|
||||
"""Generate pages"""
|
||||
|
||||
def __init__(self, settings=None):
|
||||
super(PagesGenerator, self).__init__(settings)
|
||||
self.pages = []
|
||||
|
||||
def process_files(self, files):
|
||||
"""Process all the files and build the lists and dicts of
|
||||
articles/categories/etc.
|
||||
"""
|
||||
for f in files:
|
||||
content, metadatas = read_file(f)
|
||||
page = Page(content, metadatas, settings=self.settings)
|
||||
if not self.is_valid_content(page, f):
|
||||
continue
|
||||
self.pages.append(page)
|
||||
|
||||
def generate_pages(self, context):
|
||||
templates = self.get_templates(self.theme)
|
||||
for page in self.pages:
|
||||
self.generate_file('pages/%s' % page.url,
|
||||
templates['page'], context, page=page)
|
||||
|
||||
def create_context(self, path=None, theme=None, output_path=None,
|
||||
markup=None):
|
||||
self._init_params(path, theme, output_path, markup)
|
||||
self.process_files(self.get_files(os.sep.join((path, 'pages'))))
|
||||
return self._get_context(('pages',))
|
||||
|
||||
def generate(self, context):
|
||||
self.generate_pages(context)
|
||||
|
|
|
|||
125
pelican/processors.py
Normal file
125
pelican/processors.py
Normal file
|
|
@ -0,0 +1,125 @@
|
|||
from operator import attrgetter
|
||||
import os
|
||||
|
||||
from pelican.utils import update_dict, copytree
|
||||
from pelican.contents import Article, Page, is_valid_content
|
||||
from pelican.readers import read_file
|
||||
|
||||
_DIRECT_TEMPLATES = ('index', 'tags', 'categories', 'archives')
|
||||
|
||||
|
||||
class Processor(object):
|
||||
|
||||
def _update_context(self, context, items):
|
||||
"""Update the context with the given items from the currrent
|
||||
processor.
|
||||
"""
|
||||
for item in items:
|
||||
value = getattr(self, item)
|
||||
if hasattr(value, 'items'):
|
||||
value = value.items()
|
||||
context[item] = value
|
||||
|
||||
|
||||
class ArticlesProcessor(Processor):
|
||||
|
||||
def __init__(self, settings=None):
|
||||
self.articles = []
|
||||
self.dates = {}
|
||||
self.years = {}
|
||||
self.tags = {}
|
||||
self.categories = {}
|
||||
|
||||
def generate_feeds(self, context, generator):
|
||||
"""Generate the feeds from the current context, and output files."""
|
||||
|
||||
generator.generate_feed(self.articles, context, context['FEED'])
|
||||
|
||||
for cat, arts in self.categories.items():
|
||||
arts.sort(key=attrgetter('date'), reverse=True)
|
||||
generator.generate_feed(arts, context,
|
||||
context['CATEGORY_FEED'] % cat)
|
||||
|
||||
def generate_pages(self, context, generator):
|
||||
"""Generate the pages on the disk"""
|
||||
|
||||
templates = generator.get_templates()
|
||||
generate = generator.generate_file
|
||||
for template in _DIRECT_TEMPLATES:
|
||||
generate('%s.html' % template, templates[template], context, blog=True)
|
||||
for tag in self.tags:
|
||||
generate('tag/%s.html' % tag, templates['tag'], context, tag=tag)
|
||||
for cat in self.categories:
|
||||
generate('category/%s.html' % cat, templates['category'], context,
|
||||
category=cat, articles=self.categories[cat])
|
||||
for article in self.articles:
|
||||
generate('%s' % article.url,
|
||||
templates['article'], context, article=article,
|
||||
category=article.category)
|
||||
|
||||
def preprocess(self, context, generator):
|
||||
|
||||
# build the list of articles / categories / etc.
|
||||
files = generator.get_files(generator.path, exclude=['pages',])
|
||||
for f in files:
|
||||
content, metadatas = read_file(f)
|
||||
if 'category' not in metadatas.keys():
|
||||
category = os.path.dirname(f).replace(
|
||||
os.path.expanduser(generator.path)+'/', '')
|
||||
|
||||
if category != '':
|
||||
metadatas['category'] = unicode(category)
|
||||
|
||||
article = Article(content, metadatas, settings=generator.settings)
|
||||
if not is_valid_content(article, f):
|
||||
continue
|
||||
|
||||
update_dict(self.dates, article.date.strftime('%Y-%m-%d'), article)
|
||||
update_dict(self.years, article.date.year, article)
|
||||
update_dict(self.categories, article.category, article)
|
||||
if hasattr(article, 'tags'):
|
||||
for tag in article.tags:
|
||||
update_dict(self.tags, tag, article)
|
||||
self.articles.append(article)
|
||||
|
||||
# sort the articles by date
|
||||
self.articles.sort(key=attrgetter('date'), reverse=True)
|
||||
# and generate the output :)
|
||||
self._update_context(context, ('articles', 'dates', 'years',
|
||||
'tags', 'categories'))
|
||||
|
||||
def process(self, context, generator):
|
||||
self.generate_feeds(context, generator)
|
||||
self.generate_pages(context, generator)
|
||||
|
||||
|
||||
class PagesProcessor(Processor):
|
||||
"""Generate pages"""
|
||||
|
||||
def __init__(self):
|
||||
self.pages = []
|
||||
|
||||
def preprocess(self, context, generator):
|
||||
for f in generator.get_files(os.sep.join((generator.path, 'pages'))):
|
||||
content, metadatas = read_file(f)
|
||||
page = Page(content, metadatas, settings=generator.settings)
|
||||
if not is_valid_content(page, f):
|
||||
continue
|
||||
self.pages.append(page)
|
||||
|
||||
def process(self, context, generator):
|
||||
templates = generator.get_templates()
|
||||
for page in self.pages:
|
||||
generator.generate_file('pages/%s' % page.url,
|
||||
templates['page'], context, page=page)
|
||||
self._update_context(context, ('pages',))
|
||||
|
||||
|
||||
class StaticProcessor(Processor):
|
||||
"""copy static paths to output"""
|
||||
|
||||
def process(self, context, generator):
|
||||
for path in generator.settings['STATIC_PATHS']:
|
||||
copytree(path, generator.theme, generator.output_path)
|
||||
copytree('pics', generator.path, generator.output_path)
|
||||
|
||||
|
|
@ -1,5 +1,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import os
|
||||
import shutil
|
||||
from datetime import datetime
|
||||
from codecs import open as _open
|
||||
|
||||
|
|
@ -48,6 +50,26 @@ def slugify(value):
|
|||
value = unicode(re.sub('[^\w\s-]', '', value).strip().lower())
|
||||
return re.sub('[-\s]+', '-', value)
|
||||
|
||||
def copytree(path, origin, destination):
|
||||
"""Copy path from origin to destination, silent any errors"""
|
||||
|
||||
try:
|
||||
fromp = os.path.expanduser(os.path.join(origin, path))
|
||||
to = os.path.expanduser(os.path.join(destination, path))
|
||||
shutil.copytree(fromp, to)
|
||||
print u' [ok] copying %s' % fromp
|
||||
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
def clean_output_dir(path):
|
||||
"""Remove all the files from the output directory"""
|
||||
|
||||
# remove all the existing content from the output folder
|
||||
try:
|
||||
shutil.rmtree(path)
|
||||
except Exception as e:
|
||||
pass
|
||||
|
||||
def truncate_html_words(s, num, end_text='...'):
|
||||
"""Truncates HTML to a certain number of words (not counting tags and
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue