mirror of
https://github.com/getpelican/pelican.git
synced 2025-10-15 20:28:56 +02:00
PEP8-ify.
Wrap to 80 chars, sanitize imports.
This commit is contained in:
parent
df25dec30a
commit
6cde7fd27a
8 changed files with 142 additions and 110 deletions
|
|
@ -1,5 +1,6 @@
|
||||||
import argparse
|
import argparse
|
||||||
import os, sys
|
import os
|
||||||
|
import sys
|
||||||
import re
|
import re
|
||||||
import time
|
import time
|
||||||
|
|
||||||
|
|
@ -69,7 +70,8 @@ class Pelican(object):
|
||||||
output_path = output_path or settings['OUTPUT_PATH']
|
output_path = output_path or settings['OUTPUT_PATH']
|
||||||
self.output_path = os.path.realpath(output_path)
|
self.output_path = os.path.realpath(output_path)
|
||||||
self.markup = markup or settings['MARKUP']
|
self.markup = markup or settings['MARKUP']
|
||||||
self.delete_outputdir = delete_outputdir or settings['DELETE_OUTPUT_DIRECTORY']
|
self.delete_outputdir = delete_outputdir \
|
||||||
|
or settings['DELETE_OUTPUT_DIRECTORY']
|
||||||
|
|
||||||
# find the theme in pelican.theme if the given one does not exists
|
# find the theme in pelican.theme if the given one does not exists
|
||||||
if not os.path.exists(self.theme):
|
if not os.path.exists(self.theme):
|
||||||
|
|
@ -112,7 +114,6 @@ class Pelican(object):
|
||||||
if hasattr(p, 'generate_output'):
|
if hasattr(p, 'generate_output'):
|
||||||
p.generate_output(writer)
|
p.generate_output(writer)
|
||||||
|
|
||||||
|
|
||||||
def get_generator_classes(self):
|
def get_generator_classes(self):
|
||||||
generators = [ArticlesGenerator, PagesGenerator, StaticGenerator]
|
generators = [ArticlesGenerator, PagesGenerator, StaticGenerator]
|
||||||
if self.settings['PDF_GENERATOR']:
|
if self.settings['PDF_GENERATOR']:
|
||||||
|
|
@ -123,7 +124,6 @@ class Pelican(object):
|
||||||
return Writer(self.output_path, settings=self.settings)
|
return Writer(self.output_path, settings=self.settings)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
parser = argparse.ArgumentParser(description="""A tool to generate a
|
parser = argparse.ArgumentParser(description="""A tool to generate a
|
||||||
static blog, with restructured text input files.""")
|
static blog, with restructured text input files.""")
|
||||||
|
|
@ -134,32 +134,38 @@ def main():
|
||||||
help='Path where to find the theme templates. If not specified, it'
|
help='Path where to find the theme templates. If not specified, it'
|
||||||
'will use the default one included with pelican.')
|
'will use the default one included with pelican.')
|
||||||
parser.add_argument('-o', '--output', dest='output',
|
parser.add_argument('-o', '--output', dest='output',
|
||||||
help='Where to output the generated files. If not specified, a directory'
|
help='Where to output the generated files. If not specified, a '
|
||||||
' will be created, named "output" in the current path.')
|
'directory will be created, named "output" in the current path.')
|
||||||
parser.add_argument('-m', '--markup', default=None, dest='markup',
|
parser.add_argument('-m', '--markup', default=None, dest='markup',
|
||||||
help='the list of markup language to use (rst or md). Please indicate '
|
help='the list of markup language to use (rst or md). Please indicate '
|
||||||
'them separated by commas')
|
'them separated by commas')
|
||||||
parser.add_argument('-s', '--settings', dest='settings', default='',
|
parser.add_argument('-s', '--settings', dest='settings', default='',
|
||||||
help='the settings of the application. Default to False.')
|
help='the settings of the application. Default to False.')
|
||||||
parser.add_argument('-d', '--delete-output-directory', dest='delete_outputdir',
|
parser.add_argument('-d', '--delete-output-directory',
|
||||||
|
dest='delete_outputdir',
|
||||||
action='store_true', help='Delete the output directory.')
|
action='store_true', help='Delete the output directory.')
|
||||||
parser.add_argument('-v', '--verbose', action='store_const', const=log.INFO, dest='verbosity',
|
parser.add_argument('-v', '--verbose', action='store_const',
|
||||||
help='Show all messages')
|
const=log.INFO, dest='verbosity',
|
||||||
parser.add_argument('-q', '--quiet', action='store_const', const=log.CRITICAL, dest='verbosity',
|
help='Show all messages')
|
||||||
help='Show only critical errors')
|
parser.add_argument('-q', '--quiet', action='store_const',
|
||||||
parser.add_argument('-D', '--debug', action='store_const', const=log.DEBUG, dest='verbosity',
|
const=log.CRITICAL, dest='verbosity',
|
||||||
help='Show all message, including debug messages')
|
help='Show only critical errors')
|
||||||
|
parser.add_argument('-D', '--debug', action='store_const',
|
||||||
|
const=log.DEBUG, dest='verbosity',
|
||||||
|
help='Show all message, including debug messages')
|
||||||
parser.add_argument('--version', action='version', version=__version__,
|
parser.add_argument('--version', action='version', version=__version__,
|
||||||
help='Print the pelican version and exit')
|
help='Print the pelican version and exit')
|
||||||
parser.add_argument('-r', '--autoreload', dest='autoreload', action='store_true',
|
parser.add_argument('-r', '--autoreload', dest='autoreload',
|
||||||
help="Relaunch pelican each time a modification occurs on the content"
|
action='store_true',
|
||||||
"files")
|
help="Relaunch pelican each time a modification occurs"
|
||||||
|
" on the content files")
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
log.init(args.verbosity)
|
log.init(args.verbosity)
|
||||||
# Split the markup languages only if some have been given. Otherwise, populate
|
# Split the markup languages only if some have been given. Otherwise,
|
||||||
# the variable with None.
|
# populate the variable with None.
|
||||||
markup = [a.strip().lower() for a in args.markup.split(',')] if args.markup else None
|
markup = [a.strip().lower() for a in args.markup.split(',')]\
|
||||||
|
if args.markup else None
|
||||||
|
|
||||||
settings = read_settings(args.settings)
|
settings = read_settings(args.settings)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,13 +1,14 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
import datetime
|
from datetime import datetime
|
||||||
from os import getenv
|
from os import getenv
|
||||||
from sys import platform, stdin
|
from sys import platform, stdin
|
||||||
import locale
|
import locale
|
||||||
|
|
||||||
from pelican.log import *
|
from pelican.log import warning, error
|
||||||
from pelican.settings import _DEFAULT_CONFIG
|
from pelican.settings import _DEFAULT_CONFIG
|
||||||
from pelican.utils import slugify, truncate_html_words
|
from pelican.utils import slugify, truncate_html_words
|
||||||
|
|
||||||
|
|
||||||
class Page(object):
|
class Page(object):
|
||||||
"""Represents a page
|
"""Represents a page
|
||||||
Given a content, and metadata, create an adequate object.
|
Given a content, and metadata, create an adequate object.
|
||||||
|
|
@ -41,7 +42,8 @@ class Page(object):
|
||||||
self.author = Author(settings['AUTHOR'], settings)
|
self.author = Author(settings['AUTHOR'], settings)
|
||||||
else:
|
else:
|
||||||
self.author = Author(getenv('USER', 'John Doe'), settings)
|
self.author = Author(getenv('USER', 'John Doe'), settings)
|
||||||
warning(u"Author of `{0}' unknow, assuming that his name is `{1}'".format(filename or self.title, self.author))
|
warning(u"Author of `{0}' unknow, assuming that his name is "
|
||||||
|
"`{1}'".format(filename or self.title, self.author))
|
||||||
|
|
||||||
# manage languages
|
# manage languages
|
||||||
self.in_default_lang = True
|
self.in_default_lang = True
|
||||||
|
|
@ -71,16 +73,19 @@ class Page(object):
|
||||||
self.date_format = self.date_format[1]
|
self.date_format = self.date_format[1]
|
||||||
|
|
||||||
if hasattr(self, 'date'):
|
if hasattr(self, 'date'):
|
||||||
|
encoded_date = self.date.strftime(
|
||||||
|
self.date_format.encode('ascii', 'xmlcharrefreplace'))
|
||||||
|
|
||||||
if platform == 'win32':
|
if platform == 'win32':
|
||||||
self.locale_date = self.date.strftime(self.date_format.encode('ascii','xmlcharrefreplace')).decode(stdin.encoding)
|
self.locale_date = encoded_date.decode(stdin.encoding)
|
||||||
else:
|
else:
|
||||||
self.locale_date = self.date.strftime(self.date_format.encode('ascii','xmlcharrefreplace')).decode('utf')
|
self.locale_date = encoded_date.decode('utf')
|
||||||
|
|
||||||
# manage status
|
# manage status
|
||||||
if not hasattr(self, 'status'):
|
if not hasattr(self, 'status'):
|
||||||
self.status = settings['DEFAULT_STATUS']
|
self.status = settings['DEFAULT_STATUS']
|
||||||
if not settings['WITH_FUTURE_DATES']:
|
if not settings['WITH_FUTURE_DATES']:
|
||||||
if hasattr(self, 'date') and self.date > datetime.datetime.now():
|
if hasattr(self, 'date') and self.date > datetime.now():
|
||||||
self.status = 'draft'
|
self.status = 'draft'
|
||||||
|
|
||||||
# set summary
|
# set summary
|
||||||
|
|
@ -98,7 +103,7 @@ class Page(object):
|
||||||
return {
|
return {
|
||||||
'slug': getattr(self, 'slug', ''),
|
'slug': getattr(self, 'slug', ''),
|
||||||
'lang': getattr(self, 'lang', 'en'),
|
'lang': getattr(self, 'lang', 'en'),
|
||||||
'date': getattr(self, 'date', datetime.datetime.now()),
|
'date': getattr(self, 'date', datetime.now()),
|
||||||
'author': self.author,
|
'author': self.author,
|
||||||
'category': getattr(self, 'category', 'misc'),
|
'category': getattr(self, 'category', 'misc'),
|
||||||
}
|
}
|
||||||
|
|
@ -133,8 +138,8 @@ class Page(object):
|
||||||
"""Dummy function"""
|
"""Dummy function"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
summary = property(_get_summary, _set_summary,
|
summary = property(_get_summary, _set_summary, "Summary of the article."
|
||||||
"Summary of the article. Based on the content. Can't be set")
|
"Based on the content. Can't be set")
|
||||||
|
|
||||||
|
|
||||||
class Article(Page):
|
class Article(Page):
|
||||||
|
|
@ -214,5 +219,6 @@ def is_valid_content(content, f):
|
||||||
content.check_properties()
|
content.check_properties()
|
||||||
return True
|
return True
|
||||||
except NameError, e:
|
except NameError, e:
|
||||||
error(u"Skipping %s: impossible to find informations about '%s'" % (f, e))
|
error(u"Skipping %s: impossible to find informations about '%s'"\
|
||||||
|
% (f, e))
|
||||||
return False
|
return False
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,6 @@ import os
|
||||||
import datetime
|
import datetime
|
||||||
import math
|
import math
|
||||||
import random
|
import random
|
||||||
import urlparse
|
|
||||||
|
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from functools import partial
|
from functools import partial
|
||||||
|
|
@ -14,10 +13,9 @@ from jinja2 import Environment, FileSystemLoader, PrefixLoader, ChoiceLoader
|
||||||
from jinja2.exceptions import TemplateNotFound
|
from jinja2.exceptions import TemplateNotFound
|
||||||
|
|
||||||
from pelican.contents import Article, Page, Category, is_valid_content
|
from pelican.contents import Article, Page, Category, is_valid_content
|
||||||
from pelican.log import *
|
from pelican.log import warning, error, debug, info
|
||||||
from pelican.readers import read_file
|
from pelican.readers import read_file
|
||||||
from pelican.utils import copy, process_translations, open
|
from pelican.utils import copy, process_translations, open
|
||||||
from pelican.utils import slugify
|
|
||||||
|
|
||||||
|
|
||||||
class Generator(object):
|
class Generator(object):
|
||||||
|
|
@ -33,17 +31,23 @@ class Generator(object):
|
||||||
|
|
||||||
# templates cache
|
# templates cache
|
||||||
self._templates = {}
|
self._templates = {}
|
||||||
self._templates_path = os.path.expanduser(os.path.join(self.theme, 'templates'))
|
self._templates_path = os.path.expanduser(
|
||||||
simple_loader = FileSystemLoader(os.path.join(os.path.dirname(os.path.abspath(__file__)), "themes", "simple", "templates"))
|
os.path.join(self.theme, 'templates'))
|
||||||
|
|
||||||
|
theme_path = os.path.join(os.path.dirname(os.path.abspath(__file__)))
|
||||||
|
|
||||||
|
simple_loader = FileSystemLoader(theme_path,
|
||||||
|
"themes", "simple", "templates")
|
||||||
self._env = Environment(
|
self._env = Environment(
|
||||||
loader=ChoiceLoader([
|
loader=ChoiceLoader([
|
||||||
FileSystemLoader(self._templates_path),
|
FileSystemLoader(self._templates_path),
|
||||||
simple_loader, # implicit inheritance
|
simple_loader, # implicit inheritance
|
||||||
PrefixLoader({'!simple' : simple_loader}) # explicit inheritance
|
PrefixLoader({'!simple': simple_loader}) # explicit one
|
||||||
]),
|
]),
|
||||||
extensions=self.settings.get('JINJA_EXTENSIONS', []),
|
extensions=self.settings.get('JINJA_EXTENSIONS', []),
|
||||||
)
|
)
|
||||||
debug('self._env.list_templates(): {0}'.format(self._env.list_templates()))
|
|
||||||
|
debug('template list: {0}'.format(self._env.list_templates()))
|
||||||
|
|
||||||
# get custom Jinja filters from user settings
|
# get custom Jinja filters from user settings
|
||||||
custom_filters = self.settings.get('JINJA_FILTERS', {})
|
custom_filters = self.settings.get('JINJA_FILTERS', {})
|
||||||
|
|
@ -58,8 +62,8 @@ class Generator(object):
|
||||||
try:
|
try:
|
||||||
self._templates[name] = self._env.get_template(name + '.html')
|
self._templates[name] = self._env.get_template(name + '.html')
|
||||||
except TemplateNotFound:
|
except TemplateNotFound:
|
||||||
raise Exception('[templates] unable to load %s.html from %s' % (
|
raise Exception('[templates] unable to load %s.html from %s' \
|
||||||
name, self._templates_path))
|
% (name, self._templates_path))
|
||||||
return self._templates[name]
|
return self._templates[name]
|
||||||
|
|
||||||
def get_files(self, path, exclude=[], extensions=None):
|
def get_files(self, path, exclude=[], extensions=None):
|
||||||
|
|
@ -75,7 +79,7 @@ class Generator(object):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
iter = os.walk(path, followlinks=True)
|
iter = os.walk(path, followlinks=True)
|
||||||
except TypeError: # python 2.5 does not support followlinks
|
except TypeError: # python 2.5 does not support followlinks
|
||||||
iter = os.walk(path)
|
iter = os.walk(path)
|
||||||
|
|
||||||
for root, dirs, temp_files in iter:
|
for root, dirs, temp_files in iter:
|
||||||
|
|
@ -102,7 +106,7 @@ class ArticlesGenerator(Generator):
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
"""initialize properties"""
|
"""initialize properties"""
|
||||||
self.articles = [] # only articles in default language
|
self.articles = [] # only articles in default language
|
||||||
self.translations = []
|
self.translations = []
|
||||||
self.dates = {}
|
self.dates = {}
|
||||||
self.tags = defaultdict(list)
|
self.tags = defaultdict(list)
|
||||||
|
|
@ -138,7 +142,8 @@ class ArticlesGenerator(Generator):
|
||||||
|
|
||||||
if 'TAG_FEED_RSS' in self.settings:
|
if 'TAG_FEED_RSS' in self.settings:
|
||||||
writer.write_feed(arts, self.context,
|
writer.write_feed(arts, self.context,
|
||||||
self.settings['TAG_FEED_RSS'] % tag, feed_type='rss')
|
self.settings['TAG_FEED_RSS'] % tag,
|
||||||
|
feed_type='rss')
|
||||||
|
|
||||||
translations_feeds = defaultdict(list)
|
translations_feeds = defaultdict(list)
|
||||||
for article in chain(self.articles, self.translations):
|
for article in chain(self.articles, self.translations):
|
||||||
|
|
@ -149,14 +154,11 @@ class ArticlesGenerator(Generator):
|
||||||
writer.write_feed(items, self.context,
|
writer.write_feed(items, self.context,
|
||||||
self.settings['TRANSLATION_FEED'] % lang)
|
self.settings['TRANSLATION_FEED'] % lang)
|
||||||
|
|
||||||
|
|
||||||
def generate_pages(self, writer):
|
def generate_pages(self, writer):
|
||||||
"""Generate the pages on the disk"""
|
"""Generate the pages on the disk"""
|
||||||
|
|
||||||
write = partial(
|
write = partial(writer.write_file,
|
||||||
writer.write_file,
|
relative_urls=self.settings.get('RELATIVE_URLS'))
|
||||||
relative_urls = self.settings.get('RELATIVE_URLS')
|
|
||||||
)
|
|
||||||
|
|
||||||
# to minimize the number of relative path stuff modification
|
# to minimize the number of relative path stuff modification
|
||||||
# in writer, articles pass first
|
# in writer, articles pass first
|
||||||
|
|
@ -171,8 +173,10 @@ class ArticlesGenerator(Generator):
|
||||||
paginated = {}
|
paginated = {}
|
||||||
if template in PAGINATED_TEMPLATES:
|
if template in PAGINATED_TEMPLATES:
|
||||||
paginated = {'articles': self.articles, 'dates': self.dates}
|
paginated = {'articles': self.articles, 'dates': self.dates}
|
||||||
write('%s.html' % template, self.get_template(template), self.context,
|
|
||||||
blog=True, paginated=paginated, page_name=template)
|
write('%s.html' % template, self.get_template(template),
|
||||||
|
self.context, blog=True, paginated=paginated,
|
||||||
|
page_name=template)
|
||||||
|
|
||||||
# and subfolders after that
|
# and subfolders after that
|
||||||
tag_template = self.get_template('tag')
|
tag_template = self.get_template('tag')
|
||||||
|
|
@ -201,15 +205,14 @@ class ArticlesGenerator(Generator):
|
||||||
page_name=u'author/%s' % aut)
|
page_name=u'author/%s' % aut)
|
||||||
|
|
||||||
for article in self.drafts:
|
for article in self.drafts:
|
||||||
write('drafts/%s.html' % article.slug, article_template, self.context,
|
write('drafts/%s.html' % article.slug, article_template,
|
||||||
article=article, category=article.category)
|
self.context, article=article, category=article.category)
|
||||||
|
|
||||||
|
|
||||||
def generate_context(self):
|
def generate_context(self):
|
||||||
"""change the context"""
|
"""change the context"""
|
||||||
|
|
||||||
# return the list of files to use
|
# return the list of files to use
|
||||||
files = self.get_files(self.path, exclude=['pages',])
|
files = self.get_files(self.path, exclude=['pages', ])
|
||||||
all_articles = []
|
all_articles = []
|
||||||
for f in files:
|
for f in files:
|
||||||
try:
|
try:
|
||||||
|
|
@ -224,14 +227,16 @@ class ArticlesGenerator(Generator):
|
||||||
if os.path.dirname(f) == self.path:
|
if os.path.dirname(f) == self.path:
|
||||||
category = self.settings['DEFAULT_CATEGORY']
|
category = self.settings['DEFAULT_CATEGORY']
|
||||||
else:
|
else:
|
||||||
category = os.path.basename(os.path.dirname(f)).decode('utf-8')
|
category = os.path.basename(os.path.dirname(f))\
|
||||||
|
.decode('utf-8')
|
||||||
|
|
||||||
if category != '':
|
if category != '':
|
||||||
metadata['category'] = Category(category, self.settings)
|
metadata['category'] = Category(category, self.settings)
|
||||||
|
|
||||||
if 'date' not in metadata.keys()\
|
if 'date' not in metadata.keys()\
|
||||||
and self.settings['FALLBACK_ON_FS_DATE']:
|
and self.settings['FALLBACK_ON_FS_DATE']:
|
||||||
metadata['date'] = datetime.datetime.fromtimestamp(os.stat(f).st_ctime)
|
metadata['date'] = datetime.datetime.fromtimestamp(
|
||||||
|
os.stat(f).st_ctime)
|
||||||
|
|
||||||
article = Article(content, metadata, settings=self.settings,
|
article = Article(content, metadata, settings=self.settings,
|
||||||
filename=f)
|
filename=f)
|
||||||
|
|
@ -265,7 +270,7 @@ class ArticlesGenerator(Generator):
|
||||||
for tag in getattr(article, 'tags', []):
|
for tag in getattr(article, 'tags', []):
|
||||||
tag_cloud[tag] += 1
|
tag_cloud[tag] += 1
|
||||||
|
|
||||||
tag_cloud = sorted(tag_cloud.items(), key = itemgetter(1), reverse = True)
|
tag_cloud = sorted(tag_cloud.items(), key=itemgetter(1), reverse=True)
|
||||||
tag_cloud = tag_cloud[:self.settings.get('TAG_CLOUD_MAX_ITEMS')]
|
tag_cloud = tag_cloud[:self.settings.get('TAG_CLOUD_MAX_ITEMS')]
|
||||||
|
|
||||||
tags = map(itemgetter(1), tag_cloud)
|
tags = map(itemgetter(1), tag_cloud)
|
||||||
|
|
@ -277,9 +282,8 @@ class ArticlesGenerator(Generator):
|
||||||
self.tag_cloud = [
|
self.tag_cloud = [
|
||||||
(
|
(
|
||||||
tag,
|
tag,
|
||||||
int(
|
int(math.floor(steps - (steps - 1) * math.log(count)
|
||||||
math.floor(steps - (steps - 1) * math.log(count) / (math.log(max_count)or 1))
|
/ (math.log(max_count)or 1)))
|
||||||
)
|
|
||||||
)
|
)
|
||||||
for tag, count in tag_cloud
|
for tag, count in tag_cloud
|
||||||
]
|
]
|
||||||
|
|
@ -290,14 +294,13 @@ class ArticlesGenerator(Generator):
|
||||||
|
|
||||||
# order the categories per name
|
# order the categories per name
|
||||||
self.categories = list(self.categories.items())
|
self.categories = list(self.categories.items())
|
||||||
self.categories.sort(reverse=self.settings.get('REVERSE_CATEGORY_ORDER'))
|
self.categories.sort(reverse=self.settings['REVERSE_CATEGORY_ORDER'])
|
||||||
|
|
||||||
self.authors = list(self.authors.items())
|
self.authors = list(self.authors.items())
|
||||||
self.authors.sort()
|
self.authors.sort()
|
||||||
|
|
||||||
self._update_context(('articles', 'dates', 'tags', 'categories', 'tag_cloud', 'authors'))
|
self._update_context(('articles', 'dates', 'tags', 'categories',
|
||||||
|
'tag_cloud', 'authors'))
|
||||||
|
|
||||||
|
|
||||||
def generate_output(self, writer):
|
def generate_output(self, writer):
|
||||||
self.generate_feeds(writer)
|
self.generate_feeds(writer)
|
||||||
|
|
@ -334,7 +337,7 @@ class PagesGenerator(Generator):
|
||||||
for page in chain(self.translations, self.pages):
|
for page in chain(self.translations, self.pages):
|
||||||
writer.write_file(page.save_as, self.get_template('page'),
|
writer.write_file(page.save_as, self.get_template('page'),
|
||||||
self.context, page=page,
|
self.context, page=page,
|
||||||
relative_urls = self.settings.get('RELATIVE_URLS'))
|
relative_urls=self.settings.get('RELATIVE_URLS'))
|
||||||
|
|
||||||
|
|
||||||
class StaticGenerator(Generator):
|
class StaticGenerator(Generator):
|
||||||
|
|
@ -345,8 +348,8 @@ class StaticGenerator(Generator):
|
||||||
final_path=None):
|
final_path=None):
|
||||||
"""Copy all the paths from source to destination"""
|
"""Copy all the paths from source to destination"""
|
||||||
for path in paths:
|
for path in paths:
|
||||||
copy(path, source, os.path.join(output_path, destination), final_path,
|
copy(path, source, os.path.join(output_path, destination),
|
||||||
overwrite=True)
|
final_path, overwrite=True)
|
||||||
|
|
||||||
def generate_output(self, writer):
|
def generate_output(self, writer):
|
||||||
self._copy_paths(self.settings['STATIC_PATHS'], self.path,
|
self._copy_paths(self.settings['STATIC_PATHS'], self.path,
|
||||||
|
|
@ -356,7 +359,8 @@ class StaticGenerator(Generator):
|
||||||
|
|
||||||
# copy all the files needed
|
# copy all the files needed
|
||||||
for source, destination in self.settings['FILES_TO_COPY']:
|
for source, destination in self.settings['FILES_TO_COPY']:
|
||||||
copy(source, self.path, self.output_path, destination, overwrite=True)
|
copy(source, self.path, self.output_path, destination,
|
||||||
|
overwrite=True)
|
||||||
|
|
||||||
|
|
||||||
class PdfGenerator(Generator):
|
class PdfGenerator(Generator):
|
||||||
|
|
@ -365,7 +369,8 @@ class PdfGenerator(Generator):
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
try:
|
try:
|
||||||
from rst2pdf.createpdf import RstToPdf
|
from rst2pdf.createpdf import RstToPdf
|
||||||
self.pdfcreator = RstToPdf(breakside=0, stylesheets=['twelvepoint'])
|
self.pdfcreator = RstToPdf(breakside=0,
|
||||||
|
stylesheets=['twelvepoint'])
|
||||||
except ImportError:
|
except ImportError:
|
||||||
raise Exception("unable to find rst2pdf")
|
raise Exception("unable to find rst2pdf")
|
||||||
super(PdfGenerator, self).__init__(*args, **kwargs)
|
super(PdfGenerator, self).__init__(*args, **kwargs)
|
||||||
|
|
@ -373,7 +378,7 @@ class PdfGenerator(Generator):
|
||||||
def _create_pdf(self, obj, output_path):
|
def _create_pdf(self, obj, output_path):
|
||||||
if obj.filename.endswith(".rst"):
|
if obj.filename.endswith(".rst"):
|
||||||
filename = obj.slug + ".pdf"
|
filename = obj.slug + ".pdf"
|
||||||
output_pdf=os.path.join(output_path, filename)
|
output_pdf = os.path.join(output_path, filename)
|
||||||
# print "Generating pdf for", obj.filename, " in ", output_pdf
|
# print "Generating pdf for", obj.filename, " in ", output_pdf
|
||||||
with open(obj.filename) as f:
|
with open(obj.filename) as f:
|
||||||
self.pdfcreator.createPdf(text=f, output=output_pdf)
|
self.pdfcreator.createPdf(text=f, output=output_pdf)
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,7 @@
|
||||||
# From django.core.paginator
|
# From django.core.paginator
|
||||||
from math import ceil
|
from math import ceil
|
||||||
|
|
||||||
|
|
||||||
class Paginator(object):
|
class Paginator(object):
|
||||||
def __init__(self, object_list, per_page, orphans=0):
|
def __init__(self, object_list, per_page, orphans=0):
|
||||||
self.object_list = object_list
|
self.object_list = object_list
|
||||||
|
|
@ -39,6 +40,7 @@ class Paginator(object):
|
||||||
return range(1, self.num_pages + 1)
|
return range(1, self.num_pages + 1)
|
||||||
page_range = property(_get_page_range)
|
page_range = property(_get_page_range)
|
||||||
|
|
||||||
|
|
||||||
class Page(object):
|
class Page(object):
|
||||||
def __init__(self, object_list, number, paginator):
|
def __init__(self, object_list, number, paginator):
|
||||||
self.object_list = object_list
|
self.object_list = object_list
|
||||||
|
|
@ -82,4 +84,3 @@ class Page(object):
|
||||||
if self.number == self.paginator.num_pages:
|
if self.number == self.paginator.num_pages:
|
||||||
return self.paginator.count
|
return self.paginator.count
|
||||||
return self.number * self.paginator.per_page
|
return self.number * self.paginator.per_page
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -6,27 +6,28 @@ try:
|
||||||
from docutils.writers.html4css1 import HTMLTranslator
|
from docutils.writers.html4css1 import HTMLTranslator
|
||||||
|
|
||||||
# import the directives to have pygments support
|
# import the directives to have pygments support
|
||||||
from pelican import rstdirectives
|
from pelican import rstdirectives # NOQA
|
||||||
except ImportError:
|
except ImportError:
|
||||||
core = False
|
core = False
|
||||||
try:
|
try:
|
||||||
from markdown import Markdown
|
from markdown import Markdown
|
||||||
except ImportError:
|
except ImportError:
|
||||||
Markdown = False
|
Markdown = False # NOQA
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from pelican.contents import Category, Tag, Author, URLWrapper
|
from pelican.contents import Category, Tag, Author
|
||||||
from pelican.utils import get_date, open
|
from pelican.utils import get_date, open
|
||||||
|
|
||||||
|
|
||||||
_METADATA_PROCESSORS = {
|
_METADATA_PROCESSORS = {
|
||||||
'tags': lambda x, y: [Tag(tag, y) for tag in unicode(x).split(',')],
|
'tags': lambda x, y: [Tag(tag, y) for tag in unicode(x).split(',')],
|
||||||
'date': lambda x, y: get_date(x),
|
'date': lambda x, y: get_date(x),
|
||||||
'status': lambda x,y: unicode.strip(x),
|
'status': lambda x, y: unicode.strip(x),
|
||||||
'category': Category,
|
'category': Category,
|
||||||
'author': Author,
|
'author': Author,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class Reader(object):
|
class Reader(object):
|
||||||
enabled = True
|
enabled = True
|
||||||
extensions = None
|
extensions = None
|
||||||
|
|
@ -39,6 +40,7 @@ class Reader(object):
|
||||||
return _METADATA_PROCESSORS[name.lower()](value, self.settings)
|
return _METADATA_PROCESSORS[name.lower()](value, self.settings)
|
||||||
return value
|
return value
|
||||||
|
|
||||||
|
|
||||||
class _FieldBodyTranslator(HTMLTranslator):
|
class _FieldBodyTranslator(HTMLTranslator):
|
||||||
|
|
||||||
def astext(self):
|
def astext(self):
|
||||||
|
|
@ -56,6 +58,7 @@ def render_node_to_html(document, node):
|
||||||
node.walkabout(visitor)
|
node.walkabout(visitor)
|
||||||
return visitor.astext()
|
return visitor.astext()
|
||||||
|
|
||||||
|
|
||||||
class RstReader(Reader):
|
class RstReader(Reader):
|
||||||
enabled = bool(docutils)
|
enabled = bool(docutils)
|
||||||
extension = "rst"
|
extension = "rst"
|
||||||
|
|
@ -65,11 +68,11 @@ class RstReader(Reader):
|
||||||
output = {}
|
output = {}
|
||||||
for docinfo in document.traverse(docutils.nodes.docinfo):
|
for docinfo in document.traverse(docutils.nodes.docinfo):
|
||||||
for element in docinfo.children:
|
for element in docinfo.children:
|
||||||
if element.tagname == 'field': # custom fields (e.g. summary)
|
if element.tagname == 'field': # custom fields (e.g. summary)
|
||||||
name_elem, body_elem = element.children
|
name_elem, body_elem = element.children
|
||||||
name = name_elem.astext()
|
name = name_elem.astext()
|
||||||
value = render_node_to_html(document, body_elem)
|
value = render_node_to_html(document, body_elem)
|
||||||
else: # standard fields (e.g. address)
|
else: # standard fields (e.g. address)
|
||||||
name = element.tagname
|
name = element.tagname
|
||||||
value = element.astext()
|
value = element.astext()
|
||||||
|
|
||||||
|
|
@ -78,7 +81,8 @@ class RstReader(Reader):
|
||||||
|
|
||||||
def _get_publisher(self, filename):
|
def _get_publisher(self, filename):
|
||||||
extra_params = {'initial_header_level': '2'}
|
extra_params = {'initial_header_level': '2'}
|
||||||
pub = docutils.core.Publisher(destination_class=docutils.io.StringOutput)
|
pub = docutils.core.Publisher(
|
||||||
|
destination_class=docutils.io.StringOutput)
|
||||||
pub.set_components('standalone', 'restructuredtext', 'html')
|
pub.set_components('standalone', 'restructuredtext', 'html')
|
||||||
pub.process_programmatic_settings(None, extra_params, None)
|
pub.process_programmatic_settings(None, extra_params, None)
|
||||||
pub.set_source(source_path=filename)
|
pub.set_source(source_path=filename)
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
import os
|
import os
|
||||||
|
from os.path import isabs
|
||||||
import locale
|
import locale
|
||||||
|
|
||||||
from pelican import log
|
from pelican import log
|
||||||
|
|
@ -10,8 +11,8 @@ _DEFAULT_CONFIG = {'PATH': None,
|
||||||
'THEME': DEFAULT_THEME,
|
'THEME': DEFAULT_THEME,
|
||||||
'OUTPUT_PATH': 'output/',
|
'OUTPUT_PATH': 'output/',
|
||||||
'MARKUP': ('rst', 'md'),
|
'MARKUP': ('rst', 'md'),
|
||||||
'STATIC_PATHS': ['images',],
|
'STATIC_PATHS': ['images', ],
|
||||||
'THEME_STATIC_PATHS': ['static',],
|
'THEME_STATIC_PATHS': ['static', ],
|
||||||
'FEED': 'feeds/all.atom.xml',
|
'FEED': 'feeds/all.atom.xml',
|
||||||
'CATEGORY_FEED': 'feeds/%s.atom.xml',
|
'CATEGORY_FEED': 'feeds/%s.atom.xml',
|
||||||
'TRANSLATION_FEED': 'feeds/all-%s.atom.xml',
|
'TRANSLATION_FEED': 'feeds/all-%s.atom.xml',
|
||||||
|
|
@ -44,7 +45,7 @@ _DEFAULT_CONFIG = {'PATH': None,
|
||||||
'DEFAULT_DATE_FORMAT': '%a %d %B %Y',
|
'DEFAULT_DATE_FORMAT': '%a %d %B %Y',
|
||||||
'DATE_FORMATS': {},
|
'DATE_FORMATS': {},
|
||||||
'JINJA_EXTENSIONS': [],
|
'JINJA_EXTENSIONS': [],
|
||||||
'LOCALE': '', # default to user locale
|
'LOCALE': '', # default to user locale
|
||||||
'DEFAULT_PAGINATION': False,
|
'DEFAULT_PAGINATION': False,
|
||||||
'DEFAULT_ORPHANS': 0,
|
'DEFAULT_ORPHANS': 0,
|
||||||
'DEFAULT_METADATA': (),
|
'DEFAULT_METADATA': (),
|
||||||
|
|
@ -53,6 +54,7 @@ _DEFAULT_CONFIG = {'PATH': None,
|
||||||
'ARTICLE_PERMALINK_STRUCTURE': ''
|
'ARTICLE_PERMALINK_STRUCTURE': ''
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def read_settings(filename):
|
def read_settings(filename):
|
||||||
"""Load a Python file into a dictionary.
|
"""Load a Python file into a dictionary.
|
||||||
"""
|
"""
|
||||||
|
|
@ -67,9 +69,10 @@ def read_settings(filename):
|
||||||
# Make the paths relative to the settings file
|
# Make the paths relative to the settings file
|
||||||
for path in ['PATH', 'OUTPUT_PATH']:
|
for path in ['PATH', 'OUTPUT_PATH']:
|
||||||
if path in context:
|
if path in context:
|
||||||
if context[path] is not None and not os.path.isabs(context[path]):
|
if context[path] is not None and not isabs(context[path]):
|
||||||
# FIXME:
|
context[path] = os.path.abspath(os.path.normpath(
|
||||||
context[path] = os.path.abspath(os.path.normpath(os.path.join(os.path.dirname(filename), context[path])))
|
os.path.join(os.path.dirname(filename), context[path]))
|
||||||
|
)
|
||||||
|
|
||||||
# if locales is not a list, make it one
|
# if locales is not a list, make it one
|
||||||
locales = context['LOCALE']
|
locales = context['LOCALE']
|
||||||
|
|
@ -84,17 +87,17 @@ def read_settings(filename):
|
||||||
for locale_ in locales:
|
for locale_ in locales:
|
||||||
try:
|
try:
|
||||||
locale.setlocale(locale.LC_ALL, locale_)
|
locale.setlocale(locale.LC_ALL, locale_)
|
||||||
break # break if it is successfull
|
break # break if it is successfull
|
||||||
except locale.Error:
|
except locale.Error:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
log.warn("LOCALE option doesn't contain a correct value")
|
log.warn("LOCALE option doesn't contain a correct value")
|
||||||
|
|
||||||
if not 'TIMEZONE' in context:
|
if not 'TIMEZONE' in context:
|
||||||
log.warn("No timezone information specified in the settings. Assuming your "\
|
log.warn("No timezone information specified in the settings. Assuming"
|
||||||
"timezone is UTC for feed generation. "\
|
" your timezone is UTC for feed generation. Check "
|
||||||
"Check http://docs.notmyidea.org/alexis/pelican/settings.html#timezone "\
|
"http://docs.notmyidea.org/alexis/pelican/settings.html#timezone "
|
||||||
"for more information")
|
"for more information")
|
||||||
|
|
||||||
# set the locale
|
# set the locale
|
||||||
return context
|
return context
|
||||||
|
|
|
||||||
|
|
@ -19,7 +19,7 @@ def get_date(string):
|
||||||
string = re.sub(' +', ' ', string)
|
string = re.sub(' +', ' ', string)
|
||||||
formats = ['%Y-%m-%d %H:%M', '%Y/%m/%d %H:%M',
|
formats = ['%Y-%m-%d %H:%M', '%Y/%m/%d %H:%M',
|
||||||
'%Y-%m-%d', '%Y/%m/%d',
|
'%Y-%m-%d', '%Y/%m/%d',
|
||||||
'%d-%m-%Y', '%Y-%d-%m', # Weird ones
|
'%d-%m-%Y', '%Y-%d-%m', # Weird ones
|
||||||
'%d/%m/%Y', '%d.%m.%Y',
|
'%d/%m/%Y', '%d.%m.%Y',
|
||||||
'%d.%m.%Y %H:%M', '%Y-%m-%d %H:%M:%S']
|
'%d.%m.%Y %H:%M', '%Y-%m-%d %H:%M:%S']
|
||||||
for date_format in formats:
|
for date_format in formats:
|
||||||
|
|
@ -48,6 +48,7 @@ def slugify(value):
|
||||||
value = unicode(re.sub('[^\w\s-]', '', value).strip().lower())
|
value = unicode(re.sub('[^\w\s-]', '', value).strip().lower())
|
||||||
return re.sub('[-\s]+', '-', value)
|
return re.sub('[-\s]+', '-', value)
|
||||||
|
|
||||||
|
|
||||||
def copy(path, source, destination, destination_path=None, overwrite=False):
|
def copy(path, source, destination, destination_path=None, overwrite=False):
|
||||||
"""Copy path from origin to destination.
|
"""Copy path from origin to destination.
|
||||||
|
|
||||||
|
|
@ -57,8 +58,8 @@ def copy(path, source, destination, destination_path=None, overwrite=False):
|
||||||
:param source: the source dir
|
:param source: the source dir
|
||||||
:param destination: the destination dir
|
:param destination: the destination dir
|
||||||
:param destination_path: the destination path (optional)
|
:param destination_path: the destination path (optional)
|
||||||
:param overwrite: wether to overwrite the destination if already exists or not
|
:param overwrite: wether to overwrite the destination if already exists or
|
||||||
|
not
|
||||||
"""
|
"""
|
||||||
if not destination_path:
|
if not destination_path:
|
||||||
destination_path = path
|
destination_path = path
|
||||||
|
|
@ -109,7 +110,8 @@ def truncate_html_words(s, num, end_text='...'):
|
||||||
length = int(num)
|
length = int(num)
|
||||||
if length <= 0:
|
if length <= 0:
|
||||||
return u''
|
return u''
|
||||||
html4_singlets = ('br', 'col', 'link', 'base', 'img', 'param', 'area', 'hr', 'input')
|
html4_singlets = ('br', 'col', 'link', 'base', 'img', 'param', 'area',
|
||||||
|
'hr', 'input')
|
||||||
|
|
||||||
# Set up regular expressions
|
# Set up regular expressions
|
||||||
re_words = re.compile(r'&.*?;|<.*?>|(\w[\w-]*)', re.U)
|
re_words = re.compile(r'&.*?;|<.*?>|(\w[\w-]*)', re.U)
|
||||||
|
|
@ -147,8 +149,9 @@ def truncate_html_words(s, num, end_text='...'):
|
||||||
except ValueError:
|
except ValueError:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
# SGML: An end tag closes, back to the matching start tag, all unclosed intervening start tags with omitted end tags
|
# SGML: An end tag closes, back to the matching start tag,
|
||||||
open_tags = open_tags[i+1:]
|
# all unclosed intervening start tags with omitted end tags
|
||||||
|
open_tags = open_tags[i + 1:]
|
||||||
else:
|
else:
|
||||||
# Add it to the start of the open tags list
|
# Add it to the start of the open tags list
|
||||||
open_tags.insert(0, tagname)
|
open_tags.insert(0, tagname)
|
||||||
|
|
@ -195,7 +198,7 @@ def process_translations(content_list):
|
||||||
default_lang_items = items[:1]
|
default_lang_items = items[:1]
|
||||||
|
|
||||||
if not slug:
|
if not slug:
|
||||||
warning('empty slug for %r' %( default_lang_items[0].filename,))
|
warning('empty slug for %r' % (default_lang_items[0].filename,))
|
||||||
index.extend(default_lang_items)
|
index.extend(default_lang_items)
|
||||||
translations.extend(filter(
|
translations.extend(filter(
|
||||||
lambda x: x not in default_lang_items,
|
lambda x: x not in default_lang_items,
|
||||||
|
|
@ -233,7 +236,8 @@ def files_changed(path, extensions):
|
||||||
|
|
||||||
def set_date_tzinfo(d, tz_name=None):
|
def set_date_tzinfo(d, tz_name=None):
|
||||||
""" Date without tzinfo shoudbe utc.
|
""" Date without tzinfo shoudbe utc.
|
||||||
This function set the right tz to date that aren't utc and don't have tzinfo
|
This function set the right tz to date that aren't utc and don't have
|
||||||
|
tzinfo.
|
||||||
"""
|
"""
|
||||||
if tz_name is not None:
|
if tz_name is not None:
|
||||||
tz = pytz.timezone(tz_name)
|
tz = pytz.timezone(tz_name)
|
||||||
|
|
|
||||||
|
|
@ -8,7 +8,7 @@ import re
|
||||||
|
|
||||||
from feedgenerator import Atom1Feed, Rss201rev2Feed
|
from feedgenerator import Atom1Feed, Rss201rev2Feed
|
||||||
from pelican.paginator import Paginator
|
from pelican.paginator import Paginator
|
||||||
from pelican.log import *
|
from pelican.log import info
|
||||||
from pelican.utils import get_relative_path, set_date_tzinfo
|
from pelican.utils import get_relative_path, set_date_tzinfo
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -28,7 +28,6 @@ class Writer(object):
|
||||||
description=context.get('SITESUBTITLE', ''))
|
description=context.get('SITESUBTITLE', ''))
|
||||||
return feed
|
return feed
|
||||||
|
|
||||||
|
|
||||||
def _add_item_to_the_feed(self, feed, item):
|
def _add_item_to_the_feed(self, feed, item):
|
||||||
|
|
||||||
feed.add_item(
|
feed.add_item(
|
||||||
|
|
@ -44,8 +43,8 @@ class Writer(object):
|
||||||
def write_feed(self, elements, context, filename=None, feed_type='atom'):
|
def write_feed(self, elements, context, filename=None, feed_type='atom'):
|
||||||
"""Generate a feed with the list of articles provided
|
"""Generate a feed with the list of articles provided
|
||||||
|
|
||||||
Return the feed. If no output_path or filename is specified, just return
|
Return the feed. If no output_path or filename is specified, just
|
||||||
the feed object.
|
return the feed object.
|
||||||
|
|
||||||
:param elements: the articles to put on the feed.
|
:param elements: the articles to put on the feed.
|
||||||
:param context: the context to get the feed metadata.
|
:param context: the context to get the feed metadata.
|
||||||
|
|
@ -56,7 +55,7 @@ class Writer(object):
|
||||||
locale.setlocale(locale.LC_ALL, 'C')
|
locale.setlocale(locale.LC_ALL, 'C')
|
||||||
try:
|
try:
|
||||||
self.site_url = context.get('SITEURL', get_relative_path(filename))
|
self.site_url = context.get('SITEURL', get_relative_path(filename))
|
||||||
self.feed_url= '%s/%s' % (self.site_url, filename)
|
self.feed_url = '%s/%s' % (self.site_url, filename)
|
||||||
|
|
||||||
feed = self._create_new_feed(feed_type, context)
|
feed = self._create_new_feed(feed_type, context)
|
||||||
|
|
||||||
|
|
@ -132,7 +131,7 @@ class Writer(object):
|
||||||
self.settings.get('DEFAULT_PAGINATION'),
|
self.settings.get('DEFAULT_PAGINATION'),
|
||||||
self.settings.get('DEFAULT_ORPHANS'))
|
self.settings.get('DEFAULT_ORPHANS'))
|
||||||
else:
|
else:
|
||||||
paginators[key] = Paginator(object_list, len(object_list), 0)
|
paginators[key] = Paginator(object_list, len(object_list))
|
||||||
|
|
||||||
# generated pages, and write
|
# generated pages, and write
|
||||||
for page_num in range(paginators.values()[0].num_pages):
|
for page_num in range(paginators.values()[0].num_pages):
|
||||||
|
|
@ -140,9 +139,10 @@ class Writer(object):
|
||||||
paginated_name = name
|
paginated_name = name
|
||||||
for key in paginators.iterkeys():
|
for key in paginators.iterkeys():
|
||||||
paginator = paginators[key]
|
paginator = paginators[key]
|
||||||
page = paginator.page(page_num+1)
|
page = paginator.page(page_num + 1)
|
||||||
paginated_localcontext.update({'%s_paginator' % key: paginator,
|
paginated_localcontext.update(
|
||||||
'%s_page' % key: page})
|
{'%s_paginator' % key: paginator,
|
||||||
|
'%s_page' % key: page})
|
||||||
if page_num > 0:
|
if page_num > 0:
|
||||||
ext = '.' + paginated_name.rsplit('.')[-1]
|
ext = '.' + paginated_name.rsplit('.')[-1]
|
||||||
paginated_name = paginated_name.replace(ext,
|
paginated_name = paginated_name.replace(ext,
|
||||||
|
|
@ -160,8 +160,8 @@ class Writer(object):
|
||||||
relative paths.
|
relative paths.
|
||||||
|
|
||||||
:param name: name of the file to output.
|
:param name: name of the file to output.
|
||||||
:param context: dict that will be passed to the templates, which need to
|
:param context: dict that will be passed to the templates, which need
|
||||||
be updated.
|
to be updated.
|
||||||
"""
|
"""
|
||||||
def _update_content(name, input):
|
def _update_content(name, input):
|
||||||
"""Change all the relatives paths of the input content to relatives
|
"""Change all the relatives paths of the input content to relatives
|
||||||
|
|
@ -184,9 +184,12 @@ class Writer(object):
|
||||||
|
|
||||||
def replacer(m):
|
def replacer(m):
|
||||||
relative_path = m.group('path')
|
relative_path = m.group('path')
|
||||||
dest_path = os.path.normpath( os.sep.join( (get_relative_path(name),
|
dest_path = os.path.normpath(
|
||||||
"static", relative_path) ) )
|
os.sep.join((get_relative_path(name), "static",
|
||||||
return m.group('markup') + m.group('quote') + dest_path + m.group('quote')
|
relative_path)))
|
||||||
|
|
||||||
|
return m.group('markup') + m.group('quote') + dest_path \
|
||||||
|
+ m.group('quote')
|
||||||
|
|
||||||
return hrefs.sub(replacer, content)
|
return hrefs.sub(replacer, content)
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue