mirror of
https://github.com/getpelican/pelican.git
synced 2025-10-15 20:28:56 +02:00
Apply code style to project via: ruff format .
This commit is contained in:
parent
8ea27b82f6
commit
cabdb26cee
41 changed files with 6505 additions and 5163 deletions
|
|
@ -1,43 +1,47 @@
|
|||
AUTHOR = 'Alexis Métaireau'
|
||||
AUTHOR = "Alexis Métaireau"
|
||||
SITENAME = "Alexis' log"
|
||||
SITEURL = 'http://blog.notmyidea.org'
|
||||
TIMEZONE = 'UTC'
|
||||
SITEURL = "http://blog.notmyidea.org"
|
||||
TIMEZONE = "UTC"
|
||||
|
||||
GITHUB_URL = 'http://github.com/ametaireau/'
|
||||
GITHUB_URL = "http://github.com/ametaireau/"
|
||||
DISQUS_SITENAME = "blog-notmyidea"
|
||||
PDF_GENERATOR = False
|
||||
REVERSE_CATEGORY_ORDER = True
|
||||
DEFAULT_PAGINATION = 2
|
||||
|
||||
FEED_RSS = 'feeds/all.rss.xml'
|
||||
CATEGORY_FEED_RSS = 'feeds/{slug}.rss.xml'
|
||||
FEED_RSS = "feeds/all.rss.xml"
|
||||
CATEGORY_FEED_RSS = "feeds/{slug}.rss.xml"
|
||||
|
||||
LINKS = (('Biologeek', 'http://biologeek.org'),
|
||||
('Filyb', "http://filyb.info/"),
|
||||
('Libert-fr', "http://www.libert-fr.com"),
|
||||
('N1k0', "http://prendreuncafe.com/blog/"),
|
||||
('Tarek Ziadé', "http://ziade.org/blog"),
|
||||
('Zubin Mithra', "http://zubin71.wordpress.com/"),)
|
||||
LINKS = (
|
||||
("Biologeek", "http://biologeek.org"),
|
||||
("Filyb", "http://filyb.info/"),
|
||||
("Libert-fr", "http://www.libert-fr.com"),
|
||||
("N1k0", "http://prendreuncafe.com/blog/"),
|
||||
("Tarek Ziadé", "http://ziade.org/blog"),
|
||||
("Zubin Mithra", "http://zubin71.wordpress.com/"),
|
||||
)
|
||||
|
||||
SOCIAL = (('twitter', 'http://twitter.com/ametaireau'),
|
||||
('lastfm', 'http://lastfm.com/user/akounet'),
|
||||
('github', 'http://github.com/ametaireau'),)
|
||||
SOCIAL = (
|
||||
("twitter", "http://twitter.com/ametaireau"),
|
||||
("lastfm", "http://lastfm.com/user/akounet"),
|
||||
("github", "http://github.com/ametaireau"),
|
||||
)
|
||||
|
||||
# global metadata to all the contents
|
||||
DEFAULT_METADATA = {'yeah': 'it is'}
|
||||
DEFAULT_METADATA = {"yeah": "it is"}
|
||||
|
||||
# path-specific metadata
|
||||
EXTRA_PATH_METADATA = {
|
||||
'extra/robots.txt': {'path': 'robots.txt'},
|
||||
"extra/robots.txt": {"path": "robots.txt"},
|
||||
}
|
||||
|
||||
# static paths will be copied without parsing their contents
|
||||
STATIC_PATHS = [
|
||||
'pictures',
|
||||
'extra/robots.txt',
|
||||
"pictures",
|
||||
"extra/robots.txt",
|
||||
]
|
||||
|
||||
FORMATTED_FIELDS = ['summary', 'custom_formatted_field']
|
||||
FORMATTED_FIELDS = ["summary", "custom_formatted_field"]
|
||||
|
||||
# foobar will not be used, because it's not in caps. All configuration keys
|
||||
# have to be in caps
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
NAME = 'namespace plugin'
|
||||
NAME = "namespace plugin"
|
||||
|
||||
|
||||
def register():
|
||||
|
|
|
|||
|
|
@ -16,7 +16,10 @@ from pelican.contents import Article
|
|||
from pelican.readers import default_metadata
|
||||
from pelican.settings import DEFAULT_CONFIG
|
||||
|
||||
__all__ = ['get_article', 'unittest', ]
|
||||
__all__ = [
|
||||
"get_article",
|
||||
"unittest",
|
||||
]
|
||||
|
||||
|
||||
@contextmanager
|
||||
|
|
@ -51,7 +54,7 @@ def isplit(s, sep=None):
|
|||
True
|
||||
|
||||
"""
|
||||
sep, hardsep = r'\s+' if sep is None else re.escape(sep), sep is not None
|
||||
sep, hardsep = r"\s+" if sep is None else re.escape(sep), sep is not None
|
||||
exp, pos, length = re.compile(sep), 0, len(s)
|
||||
while True:
|
||||
m = exp.search(s, pos)
|
||||
|
|
@ -89,10 +92,8 @@ def mute(returns_output=False):
|
|||
"""
|
||||
|
||||
def decorator(func):
|
||||
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
|
||||
saved_stdout = sys.stdout
|
||||
sys.stdout = StringIO()
|
||||
|
||||
|
|
@ -112,7 +113,7 @@ def mute(returns_output=False):
|
|||
|
||||
def get_article(title, content, **extra_metadata):
|
||||
metadata = default_metadata(settings=DEFAULT_CONFIG)
|
||||
metadata['title'] = title
|
||||
metadata["title"] = title
|
||||
if extra_metadata:
|
||||
metadata.update(extra_metadata)
|
||||
return Article(content, metadata=metadata)
|
||||
|
|
@ -125,14 +126,14 @@ def skipIfNoExecutable(executable):
|
|||
and skips the tests if not found (if subprocess raises a `OSError`).
|
||||
"""
|
||||
|
||||
with open(os.devnull, 'w') as fnull:
|
||||
with open(os.devnull, "w") as fnull:
|
||||
try:
|
||||
res = subprocess.call(executable, stdout=fnull, stderr=fnull)
|
||||
except OSError:
|
||||
res = None
|
||||
|
||||
if res is None:
|
||||
return unittest.skip('{} executable not found'.format(executable))
|
||||
return unittest.skip("{} executable not found".format(executable))
|
||||
|
||||
return lambda func: func
|
||||
|
||||
|
|
@ -164,10 +165,7 @@ def can_symlink():
|
|||
res = True
|
||||
try:
|
||||
with temporary_folder() as f:
|
||||
os.symlink(
|
||||
f,
|
||||
os.path.join(f, 'symlink')
|
||||
)
|
||||
os.symlink(f, os.path.join(f, "symlink"))
|
||||
except OSError:
|
||||
res = False
|
||||
return res
|
||||
|
|
@ -186,9 +184,9 @@ def get_settings(**kwargs):
|
|||
|
||||
def get_context(settings=None, **kwargs):
|
||||
context = settings.copy() if settings else {}
|
||||
context['generated_content'] = {}
|
||||
context['static_links'] = set()
|
||||
context['static_content'] = {}
|
||||
context["generated_content"] = {}
|
||||
context["static_links"] = set()
|
||||
context["static_content"] = {}
|
||||
context.update(kwargs)
|
||||
return context
|
||||
|
||||
|
|
@ -200,22 +198,24 @@ class LogCountHandler(BufferingHandler):
|
|||
super().__init__(capacity)
|
||||
|
||||
def count_logs(self, msg=None, level=None):
|
||||
return len([
|
||||
rec
|
||||
for rec
|
||||
in self.buffer
|
||||
if (msg is None or re.match(msg, rec.getMessage())) and
|
||||
(level is None or rec.levelno == level)
|
||||
])
|
||||
return len(
|
||||
[
|
||||
rec
|
||||
for rec in self.buffer
|
||||
if (msg is None or re.match(msg, rec.getMessage()))
|
||||
and (level is None or rec.levelno == level)
|
||||
]
|
||||
)
|
||||
|
||||
def count_formatted_logs(self, msg=None, level=None):
|
||||
return len([
|
||||
rec
|
||||
for rec
|
||||
in self.buffer
|
||||
if (msg is None or re.search(msg, self.format(rec))) and
|
||||
(level is None or rec.levelno == level)
|
||||
])
|
||||
return len(
|
||||
[
|
||||
rec
|
||||
for rec in self.buffer
|
||||
if (msg is None or re.search(msg, self.format(rec)))
|
||||
and (level is None or rec.levelno == level)
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def diff_subproc(first, second):
|
||||
|
|
@ -228,8 +228,16 @@ def diff_subproc(first, second):
|
|||
>>> didCheckFail = proc.returnCode != 0
|
||||
"""
|
||||
return subprocess.Popen(
|
||||
['git', '--no-pager', 'diff', '--no-ext-diff', '--exit-code',
|
||||
'-w', first, second],
|
||||
[
|
||||
"git",
|
||||
"--no-pager",
|
||||
"diff",
|
||||
"--no-ext-diff",
|
||||
"--exit-code",
|
||||
"-w",
|
||||
first,
|
||||
second,
|
||||
],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
text=True,
|
||||
|
|
@ -251,9 +259,12 @@ class LoggedTestCase(unittest.TestCase):
|
|||
def assertLogCountEqual(self, count=None, msg=None, **kwargs):
|
||||
actual = self._logcount_handler.count_logs(msg=msg, **kwargs)
|
||||
self.assertEqual(
|
||||
actual, count,
|
||||
msg='expected {} occurrences of {!r}, but found {}'.format(
|
||||
count, msg, actual))
|
||||
actual,
|
||||
count,
|
||||
msg="expected {} occurrences of {!r}, but found {}".format(
|
||||
count, msg, actual
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class TestCaseWithCLocale(unittest.TestCase):
|
||||
|
|
@ -261,9 +272,10 @@ class TestCaseWithCLocale(unittest.TestCase):
|
|||
|
||||
Use utils.temporary_locale if you want a context manager ("with" statement).
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self.old_locale = locale.setlocale(locale.LC_ALL)
|
||||
locale.setlocale(locale.LC_ALL, 'C')
|
||||
locale.setlocale(locale.LC_ALL, "C")
|
||||
|
||||
def tearDown(self):
|
||||
locale.setlocale(locale.LC_ALL, self.old_locale)
|
||||
|
|
|
|||
|
|
@ -8,31 +8,30 @@ from pelican.tests.support import get_context, get_settings, unittest
|
|||
|
||||
|
||||
CUR_DIR = os.path.dirname(__file__)
|
||||
CONTENT_DIR = os.path.join(CUR_DIR, 'content')
|
||||
CONTENT_DIR = os.path.join(CUR_DIR, "content")
|
||||
|
||||
|
||||
class TestCache(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.temp_cache = mkdtemp(prefix='pelican_cache.')
|
||||
self.temp_cache = mkdtemp(prefix="pelican_cache.")
|
||||
|
||||
def tearDown(self):
|
||||
rmtree(self.temp_cache)
|
||||
|
||||
def _get_cache_enabled_settings(self):
|
||||
settings = get_settings()
|
||||
settings['CACHE_CONTENT'] = True
|
||||
settings['LOAD_CONTENT_CACHE'] = True
|
||||
settings['CACHE_PATH'] = self.temp_cache
|
||||
settings["CACHE_CONTENT"] = True
|
||||
settings["LOAD_CONTENT_CACHE"] = True
|
||||
settings["CACHE_PATH"] = self.temp_cache
|
||||
return settings
|
||||
|
||||
def test_generator_caching(self):
|
||||
"""Test that cached and uncached content is same in generator level"""
|
||||
settings = self._get_cache_enabled_settings()
|
||||
settings['CONTENT_CACHING_LAYER'] = 'generator'
|
||||
settings['PAGE_PATHS'] = ['TestPages']
|
||||
settings['DEFAULT_DATE'] = (1970, 1, 1)
|
||||
settings['READERS'] = {'asc': None}
|
||||
settings["CONTENT_CACHING_LAYER"] = "generator"
|
||||
settings["PAGE_PATHS"] = ["TestPages"]
|
||||
settings["DEFAULT_DATE"] = (1970, 1, 1)
|
||||
settings["READERS"] = {"asc": None}
|
||||
context = get_context(settings)
|
||||
|
||||
def sorted_titles(items):
|
||||
|
|
@ -40,15 +39,23 @@ class TestCache(unittest.TestCase):
|
|||
|
||||
# Articles
|
||||
generator = ArticlesGenerator(
|
||||
context=context.copy(), settings=settings,
|
||||
path=CONTENT_DIR, theme=settings['THEME'], output_path=None)
|
||||
context=context.copy(),
|
||||
settings=settings,
|
||||
path=CONTENT_DIR,
|
||||
theme=settings["THEME"],
|
||||
output_path=None,
|
||||
)
|
||||
generator.generate_context()
|
||||
uncached_articles = sorted_titles(generator.articles)
|
||||
uncached_drafts = sorted_titles(generator.drafts)
|
||||
|
||||
generator = ArticlesGenerator(
|
||||
context=context.copy(), settings=settings,
|
||||
path=CONTENT_DIR, theme=settings['THEME'], output_path=None)
|
||||
context=context.copy(),
|
||||
settings=settings,
|
||||
path=CONTENT_DIR,
|
||||
theme=settings["THEME"],
|
||||
output_path=None,
|
||||
)
|
||||
generator.generate_context()
|
||||
cached_articles = sorted_titles(generator.articles)
|
||||
cached_drafts = sorted_titles(generator.drafts)
|
||||
|
|
@ -58,16 +65,24 @@ class TestCache(unittest.TestCase):
|
|||
|
||||
# Pages
|
||||
generator = PagesGenerator(
|
||||
context=context.copy(), settings=settings,
|
||||
path=CUR_DIR, theme=settings['THEME'], output_path=None)
|
||||
context=context.copy(),
|
||||
settings=settings,
|
||||
path=CUR_DIR,
|
||||
theme=settings["THEME"],
|
||||
output_path=None,
|
||||
)
|
||||
generator.generate_context()
|
||||
uncached_pages = sorted_titles(generator.pages)
|
||||
uncached_hidden_pages = sorted_titles(generator.hidden_pages)
|
||||
uncached_draft_pages = sorted_titles(generator.draft_pages)
|
||||
|
||||
generator = PagesGenerator(
|
||||
context=context.copy(), settings=settings,
|
||||
path=CUR_DIR, theme=settings['THEME'], output_path=None)
|
||||
context=context.copy(),
|
||||
settings=settings,
|
||||
path=CUR_DIR,
|
||||
theme=settings["THEME"],
|
||||
output_path=None,
|
||||
)
|
||||
generator.generate_context()
|
||||
cached_pages = sorted_titles(generator.pages)
|
||||
cached_hidden_pages = sorted_titles(generator.hidden_pages)
|
||||
|
|
@ -80,10 +95,10 @@ class TestCache(unittest.TestCase):
|
|||
def test_reader_caching(self):
|
||||
"""Test that cached and uncached content is same in reader level"""
|
||||
settings = self._get_cache_enabled_settings()
|
||||
settings['CONTENT_CACHING_LAYER'] = 'reader'
|
||||
settings['PAGE_PATHS'] = ['TestPages']
|
||||
settings['DEFAULT_DATE'] = (1970, 1, 1)
|
||||
settings['READERS'] = {'asc': None}
|
||||
settings["CONTENT_CACHING_LAYER"] = "reader"
|
||||
settings["PAGE_PATHS"] = ["TestPages"]
|
||||
settings["DEFAULT_DATE"] = (1970, 1, 1)
|
||||
settings["READERS"] = {"asc": None}
|
||||
context = get_context(settings)
|
||||
|
||||
def sorted_titles(items):
|
||||
|
|
@ -91,15 +106,23 @@ class TestCache(unittest.TestCase):
|
|||
|
||||
# Articles
|
||||
generator = ArticlesGenerator(
|
||||
context=context.copy(), settings=settings,
|
||||
path=CONTENT_DIR, theme=settings['THEME'], output_path=None)
|
||||
context=context.copy(),
|
||||
settings=settings,
|
||||
path=CONTENT_DIR,
|
||||
theme=settings["THEME"],
|
||||
output_path=None,
|
||||
)
|
||||
generator.generate_context()
|
||||
uncached_articles = sorted_titles(generator.articles)
|
||||
uncached_drafts = sorted_titles(generator.drafts)
|
||||
|
||||
generator = ArticlesGenerator(
|
||||
context=context.copy(), settings=settings,
|
||||
path=CONTENT_DIR, theme=settings['THEME'], output_path=None)
|
||||
context=context.copy(),
|
||||
settings=settings,
|
||||
path=CONTENT_DIR,
|
||||
theme=settings["THEME"],
|
||||
output_path=None,
|
||||
)
|
||||
generator.generate_context()
|
||||
cached_articles = sorted_titles(generator.articles)
|
||||
cached_drafts = sorted_titles(generator.drafts)
|
||||
|
|
@ -109,15 +132,23 @@ class TestCache(unittest.TestCase):
|
|||
|
||||
# Pages
|
||||
generator = PagesGenerator(
|
||||
context=context.copy(), settings=settings,
|
||||
path=CUR_DIR, theme=settings['THEME'], output_path=None)
|
||||
context=context.copy(),
|
||||
settings=settings,
|
||||
path=CUR_DIR,
|
||||
theme=settings["THEME"],
|
||||
output_path=None,
|
||||
)
|
||||
generator.generate_context()
|
||||
uncached_pages = sorted_titles(generator.pages)
|
||||
uncached_hidden_pages = sorted_titles(generator.hidden_pages)
|
||||
|
||||
generator = PagesGenerator(
|
||||
context=context.copy(), settings=settings,
|
||||
path=CUR_DIR, theme=settings['THEME'], output_path=None)
|
||||
context=context.copy(),
|
||||
settings=settings,
|
||||
path=CUR_DIR,
|
||||
theme=settings["THEME"],
|
||||
output_path=None,
|
||||
)
|
||||
generator.generate_context()
|
||||
cached_pages = sorted_titles(generator.pages)
|
||||
cached_hidden_pages = sorted_titles(generator.hidden_pages)
|
||||
|
|
@ -128,20 +159,28 @@ class TestCache(unittest.TestCase):
|
|||
def test_article_object_caching(self):
|
||||
"""Test Article objects caching at the generator level"""
|
||||
settings = self._get_cache_enabled_settings()
|
||||
settings['CONTENT_CACHING_LAYER'] = 'generator'
|
||||
settings['DEFAULT_DATE'] = (1970, 1, 1)
|
||||
settings['READERS'] = {'asc': None}
|
||||
settings["CONTENT_CACHING_LAYER"] = "generator"
|
||||
settings["DEFAULT_DATE"] = (1970, 1, 1)
|
||||
settings["READERS"] = {"asc": None}
|
||||
context = get_context(settings)
|
||||
|
||||
generator = ArticlesGenerator(
|
||||
context=context.copy(), settings=settings,
|
||||
path=CONTENT_DIR, theme=settings['THEME'], output_path=None)
|
||||
context=context.copy(),
|
||||
settings=settings,
|
||||
path=CONTENT_DIR,
|
||||
theme=settings["THEME"],
|
||||
output_path=None,
|
||||
)
|
||||
generator.generate_context()
|
||||
self.assertTrue(hasattr(generator, '_cache'))
|
||||
self.assertTrue(hasattr(generator, "_cache"))
|
||||
|
||||
generator = ArticlesGenerator(
|
||||
context=context.copy(), settings=settings,
|
||||
path=CONTENT_DIR, theme=settings['THEME'], output_path=None)
|
||||
context=context.copy(),
|
||||
settings=settings,
|
||||
path=CONTENT_DIR,
|
||||
theme=settings["THEME"],
|
||||
output_path=None,
|
||||
)
|
||||
generator.readers.read_file = MagicMock()
|
||||
generator.generate_context()
|
||||
"""
|
||||
|
|
@ -158,18 +197,26 @@ class TestCache(unittest.TestCase):
|
|||
def test_article_reader_content_caching(self):
|
||||
"""Test raw article content caching at the reader level"""
|
||||
settings = self._get_cache_enabled_settings()
|
||||
settings['READERS'] = {'asc': None}
|
||||
settings["READERS"] = {"asc": None}
|
||||
context = get_context(settings)
|
||||
|
||||
generator = ArticlesGenerator(
|
||||
context=context.copy(), settings=settings,
|
||||
path=CONTENT_DIR, theme=settings['THEME'], output_path=None)
|
||||
context=context.copy(),
|
||||
settings=settings,
|
||||
path=CONTENT_DIR,
|
||||
theme=settings["THEME"],
|
||||
output_path=None,
|
||||
)
|
||||
generator.generate_context()
|
||||
self.assertTrue(hasattr(generator.readers, '_cache'))
|
||||
self.assertTrue(hasattr(generator.readers, "_cache"))
|
||||
|
||||
generator = ArticlesGenerator(
|
||||
context=context.copy(), settings=settings,
|
||||
path=CONTENT_DIR, theme=settings['THEME'], output_path=None)
|
||||
context=context.copy(),
|
||||
settings=settings,
|
||||
path=CONTENT_DIR,
|
||||
theme=settings["THEME"],
|
||||
output_path=None,
|
||||
)
|
||||
readers = generator.readers.readers
|
||||
for reader in readers.values():
|
||||
reader.read = MagicMock()
|
||||
|
|
@ -182,44 +229,58 @@ class TestCache(unittest.TestCase):
|
|||
|
||||
used in --ignore-cache or autoreload mode"""
|
||||
settings = self._get_cache_enabled_settings()
|
||||
settings['READERS'] = {'asc': None}
|
||||
settings["READERS"] = {"asc": None}
|
||||
context = get_context(settings)
|
||||
|
||||
generator = ArticlesGenerator(
|
||||
context=context.copy(), settings=settings,
|
||||
path=CONTENT_DIR, theme=settings['THEME'], output_path=None)
|
||||
context=context.copy(),
|
||||
settings=settings,
|
||||
path=CONTENT_DIR,
|
||||
theme=settings["THEME"],
|
||||
output_path=None,
|
||||
)
|
||||
generator.readers.read_file = MagicMock()
|
||||
generator.generate_context()
|
||||
self.assertTrue(hasattr(generator, '_cache_open'))
|
||||
self.assertTrue(hasattr(generator, "_cache_open"))
|
||||
orig_call_count = generator.readers.read_file.call_count
|
||||
|
||||
settings['LOAD_CONTENT_CACHE'] = False
|
||||
settings["LOAD_CONTENT_CACHE"] = False
|
||||
generator = ArticlesGenerator(
|
||||
context=context.copy(), settings=settings,
|
||||
path=CONTENT_DIR, theme=settings['THEME'], output_path=None)
|
||||
context=context.copy(),
|
||||
settings=settings,
|
||||
path=CONTENT_DIR,
|
||||
theme=settings["THEME"],
|
||||
output_path=None,
|
||||
)
|
||||
generator.readers.read_file = MagicMock()
|
||||
generator.generate_context()
|
||||
self.assertEqual(
|
||||
generator.readers.read_file.call_count,
|
||||
orig_call_count)
|
||||
self.assertEqual(generator.readers.read_file.call_count, orig_call_count)
|
||||
|
||||
def test_page_object_caching(self):
|
||||
"""Test Page objects caching at the generator level"""
|
||||
settings = self._get_cache_enabled_settings()
|
||||
settings['CONTENT_CACHING_LAYER'] = 'generator'
|
||||
settings['PAGE_PATHS'] = ['TestPages']
|
||||
settings['READERS'] = {'asc': None}
|
||||
settings["CONTENT_CACHING_LAYER"] = "generator"
|
||||
settings["PAGE_PATHS"] = ["TestPages"]
|
||||
settings["READERS"] = {"asc": None}
|
||||
context = get_context(settings)
|
||||
|
||||
generator = PagesGenerator(
|
||||
context=context.copy(), settings=settings,
|
||||
path=CUR_DIR, theme=settings['THEME'], output_path=None)
|
||||
context=context.copy(),
|
||||
settings=settings,
|
||||
path=CUR_DIR,
|
||||
theme=settings["THEME"],
|
||||
output_path=None,
|
||||
)
|
||||
generator.generate_context()
|
||||
self.assertTrue(hasattr(generator, '_cache'))
|
||||
self.assertTrue(hasattr(generator, "_cache"))
|
||||
|
||||
generator = PagesGenerator(
|
||||
context=context.copy(), settings=settings,
|
||||
path=CUR_DIR, theme=settings['THEME'], output_path=None)
|
||||
context=context.copy(),
|
||||
settings=settings,
|
||||
path=CUR_DIR,
|
||||
theme=settings["THEME"],
|
||||
output_path=None,
|
||||
)
|
||||
generator.readers.read_file = MagicMock()
|
||||
generator.generate_context()
|
||||
"""
|
||||
|
|
@ -231,19 +292,27 @@ class TestCache(unittest.TestCase):
|
|||
def test_page_reader_content_caching(self):
|
||||
"""Test raw page content caching at the reader level"""
|
||||
settings = self._get_cache_enabled_settings()
|
||||
settings['PAGE_PATHS'] = ['TestPages']
|
||||
settings['READERS'] = {'asc': None}
|
||||
settings["PAGE_PATHS"] = ["TestPages"]
|
||||
settings["READERS"] = {"asc": None}
|
||||
context = get_context(settings)
|
||||
|
||||
generator = PagesGenerator(
|
||||
context=context.copy(), settings=settings,
|
||||
path=CUR_DIR, theme=settings['THEME'], output_path=None)
|
||||
context=context.copy(),
|
||||
settings=settings,
|
||||
path=CUR_DIR,
|
||||
theme=settings["THEME"],
|
||||
output_path=None,
|
||||
)
|
||||
generator.generate_context()
|
||||
self.assertTrue(hasattr(generator.readers, '_cache'))
|
||||
self.assertTrue(hasattr(generator.readers, "_cache"))
|
||||
|
||||
generator = PagesGenerator(
|
||||
context=context.copy(), settings=settings,
|
||||
path=CUR_DIR, theme=settings['THEME'], output_path=None)
|
||||
context=context.copy(),
|
||||
settings=settings,
|
||||
path=CUR_DIR,
|
||||
theme=settings["THEME"],
|
||||
output_path=None,
|
||||
)
|
||||
readers = generator.readers.readers
|
||||
for reader in readers.values():
|
||||
reader.read = MagicMock()
|
||||
|
|
@ -256,24 +325,30 @@ class TestCache(unittest.TestCase):
|
|||
|
||||
used in --ignore_cache or autoreload mode"""
|
||||
settings = self._get_cache_enabled_settings()
|
||||
settings['PAGE_PATHS'] = ['TestPages']
|
||||
settings['READERS'] = {'asc': None}
|
||||
settings["PAGE_PATHS"] = ["TestPages"]
|
||||
settings["READERS"] = {"asc": None}
|
||||
context = get_context(settings)
|
||||
|
||||
generator = PagesGenerator(
|
||||
context=context.copy(), settings=settings,
|
||||
path=CUR_DIR, theme=settings['THEME'], output_path=None)
|
||||
context=context.copy(),
|
||||
settings=settings,
|
||||
path=CUR_DIR,
|
||||
theme=settings["THEME"],
|
||||
output_path=None,
|
||||
)
|
||||
generator.readers.read_file = MagicMock()
|
||||
generator.generate_context()
|
||||
self.assertTrue(hasattr(generator, '_cache_open'))
|
||||
self.assertTrue(hasattr(generator, "_cache_open"))
|
||||
orig_call_count = generator.readers.read_file.call_count
|
||||
|
||||
settings['LOAD_CONTENT_CACHE'] = False
|
||||
settings["LOAD_CONTENT_CACHE"] = False
|
||||
generator = PagesGenerator(
|
||||
context=context.copy(), settings=settings,
|
||||
path=CUR_DIR, theme=settings['THEME'], output_path=None)
|
||||
context=context.copy(),
|
||||
settings=settings,
|
||||
path=CUR_DIR,
|
||||
theme=settings["THEME"],
|
||||
output_path=None,
|
||||
)
|
||||
generator.readers.read_file = MagicMock()
|
||||
generator.generate_context()
|
||||
self.assertEqual(
|
||||
generator.readers.read_file.call_count,
|
||||
orig_call_count)
|
||||
self.assertEqual(generator.readers.read_file.call_count, orig_call_count)
|
||||
|
|
|
|||
|
|
@ -5,68 +5,77 @@ from pelican import get_config, parse_arguments
|
|||
|
||||
class TestParseOverrides(unittest.TestCase):
|
||||
def test_flags(self):
|
||||
for flag in ['-e', '--extra-settings']:
|
||||
args = parse_arguments([flag, 'k=1'])
|
||||
self.assertDictEqual(args.overrides, {'k': 1})
|
||||
for flag in ["-e", "--extra-settings"]:
|
||||
args = parse_arguments([flag, "k=1"])
|
||||
self.assertDictEqual(args.overrides, {"k": 1})
|
||||
|
||||
def test_parse_multiple_items(self):
|
||||
args = parse_arguments('-e k1=1 k2=2'.split())
|
||||
self.assertDictEqual(args.overrides, {'k1': 1, 'k2': 2})
|
||||
args = parse_arguments("-e k1=1 k2=2".split())
|
||||
self.assertDictEqual(args.overrides, {"k1": 1, "k2": 2})
|
||||
|
||||
def test_parse_valid_json(self):
|
||||
json_values_python_values_map = {
|
||||
'""': '',
|
||||
'null': None,
|
||||
'"string"': 'string',
|
||||
'["foo", 12, "4", {}]': ['foo', 12, '4', {}]
|
||||
'""': "",
|
||||
"null": None,
|
||||
'"string"': "string",
|
||||
'["foo", 12, "4", {}]': ["foo", 12, "4", {}],
|
||||
}
|
||||
for k, v in json_values_python_values_map.items():
|
||||
args = parse_arguments(['-e', 'k=' + k])
|
||||
self.assertDictEqual(args.overrides, {'k': v})
|
||||
args = parse_arguments(["-e", "k=" + k])
|
||||
self.assertDictEqual(args.overrides, {"k": v})
|
||||
|
||||
def test_parse_invalid_syntax(self):
|
||||
invalid_items = ['k= 1', 'k =1', 'k', 'k v']
|
||||
invalid_items = ["k= 1", "k =1", "k", "k v"]
|
||||
for item in invalid_items:
|
||||
with self.assertRaises(ValueError):
|
||||
parse_arguments(f'-e {item}'.split())
|
||||
parse_arguments(f"-e {item}".split())
|
||||
|
||||
def test_parse_invalid_json(self):
|
||||
invalid_json = {
|
||||
'', 'False', 'True', 'None', 'some other string',
|
||||
'{"foo": bar}', '[foo]'
|
||||
"",
|
||||
"False",
|
||||
"True",
|
||||
"None",
|
||||
"some other string",
|
||||
'{"foo": bar}',
|
||||
"[foo]",
|
||||
}
|
||||
for v in invalid_json:
|
||||
with self.assertRaises(ValueError):
|
||||
parse_arguments(['-e ', 'k=' + v])
|
||||
parse_arguments(["-e ", "k=" + v])
|
||||
|
||||
|
||||
class TestGetConfigFromArgs(unittest.TestCase):
|
||||
def test_overrides_known_keys(self):
|
||||
args = parse_arguments([
|
||||
'-e',
|
||||
'DELETE_OUTPUT_DIRECTORY=false',
|
||||
'OUTPUT_RETENTION=["1.txt"]',
|
||||
'SITENAME="Title"'
|
||||
])
|
||||
args = parse_arguments(
|
||||
[
|
||||
"-e",
|
||||
"DELETE_OUTPUT_DIRECTORY=false",
|
||||
'OUTPUT_RETENTION=["1.txt"]',
|
||||
'SITENAME="Title"',
|
||||
]
|
||||
)
|
||||
config = get_config(args)
|
||||
config_must_contain = {
|
||||
'DELETE_OUTPUT_DIRECTORY': False,
|
||||
'OUTPUT_RETENTION': ['1.txt'],
|
||||
'SITENAME': 'Title'
|
||||
"DELETE_OUTPUT_DIRECTORY": False,
|
||||
"OUTPUT_RETENTION": ["1.txt"],
|
||||
"SITENAME": "Title",
|
||||
}
|
||||
self.assertDictEqual(config, {**config, **config_must_contain})
|
||||
|
||||
def test_overrides_non_default_type(self):
|
||||
args = parse_arguments([
|
||||
'-e',
|
||||
'DISPLAY_PAGES_ON_MENU=123',
|
||||
'PAGE_TRANSLATION_ID=null',
|
||||
'TRANSLATION_FEED_RSS_URL="someurl"'
|
||||
])
|
||||
args = parse_arguments(
|
||||
[
|
||||
"-e",
|
||||
"DISPLAY_PAGES_ON_MENU=123",
|
||||
"PAGE_TRANSLATION_ID=null",
|
||||
'TRANSLATION_FEED_RSS_URL="someurl"',
|
||||
]
|
||||
)
|
||||
config = get_config(args)
|
||||
config_must_contain = {
|
||||
'DISPLAY_PAGES_ON_MENU': 123,
|
||||
'PAGE_TRANSLATION_ID': None,
|
||||
'TRANSLATION_FEED_RSS_URL': 'someurl'
|
||||
"DISPLAY_PAGES_ON_MENU": 123,
|
||||
"PAGE_TRANSLATION_ID": None,
|
||||
"TRANSLATION_FEED_RSS_URL": "someurl",
|
||||
}
|
||||
self.assertDictEqual(config, {**config, **config_must_contain})
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
|
@ -4,26 +4,35 @@ from posixpath import join as posix_join
|
|||
from unittest.mock import patch
|
||||
|
||||
from pelican.settings import DEFAULT_CONFIG
|
||||
from pelican.tests.support import (mute, skipIfNoExecutable, temporary_folder,
|
||||
unittest, TestCaseWithCLocale)
|
||||
from pelican.tools.pelican_import import (blogger2fields, build_header,
|
||||
build_markdown_header,
|
||||
decode_wp_content,
|
||||
download_attachments, fields2pelican,
|
||||
get_attachments, tumblr2fields,
|
||||
wp2fields,
|
||||
)
|
||||
from pelican.tests.support import (
|
||||
mute,
|
||||
skipIfNoExecutable,
|
||||
temporary_folder,
|
||||
unittest,
|
||||
TestCaseWithCLocale,
|
||||
)
|
||||
from pelican.tools.pelican_import import (
|
||||
blogger2fields,
|
||||
build_header,
|
||||
build_markdown_header,
|
||||
decode_wp_content,
|
||||
download_attachments,
|
||||
fields2pelican,
|
||||
get_attachments,
|
||||
tumblr2fields,
|
||||
wp2fields,
|
||||
)
|
||||
from pelican.utils import path_to_file_url, slugify
|
||||
|
||||
CUR_DIR = os.path.abspath(os.path.dirname(__file__))
|
||||
BLOGGER_XML_SAMPLE = os.path.join(CUR_DIR, 'content', 'bloggerexport.xml')
|
||||
WORDPRESS_XML_SAMPLE = os.path.join(CUR_DIR, 'content', 'wordpressexport.xml')
|
||||
WORDPRESS_ENCODED_CONTENT_SAMPLE = os.path.join(CUR_DIR,
|
||||
'content',
|
||||
'wordpress_content_encoded')
|
||||
WORDPRESS_DECODED_CONTENT_SAMPLE = os.path.join(CUR_DIR,
|
||||
'content',
|
||||
'wordpress_content_decoded')
|
||||
BLOGGER_XML_SAMPLE = os.path.join(CUR_DIR, "content", "bloggerexport.xml")
|
||||
WORDPRESS_XML_SAMPLE = os.path.join(CUR_DIR, "content", "wordpressexport.xml")
|
||||
WORDPRESS_ENCODED_CONTENT_SAMPLE = os.path.join(
|
||||
CUR_DIR, "content", "wordpress_content_encoded"
|
||||
)
|
||||
WORDPRESS_DECODED_CONTENT_SAMPLE = os.path.join(
|
||||
CUR_DIR, "content", "wordpress_content_decoded"
|
||||
)
|
||||
|
||||
try:
|
||||
from bs4 import BeautifulSoup
|
||||
|
|
@ -36,10 +45,9 @@ except ImportError:
|
|||
LXML = False
|
||||
|
||||
|
||||
@skipIfNoExecutable(['pandoc', '--version'])
|
||||
@unittest.skipUnless(BeautifulSoup, 'Needs BeautifulSoup module')
|
||||
@skipIfNoExecutable(["pandoc", "--version"])
|
||||
@unittest.skipUnless(BeautifulSoup, "Needs BeautifulSoup module")
|
||||
class TestBloggerXmlImporter(TestCaseWithCLocale):
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
self.posts = blogger2fields(BLOGGER_XML_SAMPLE)
|
||||
|
|
@ -50,16 +58,17 @@ class TestBloggerXmlImporter(TestCaseWithCLocale):
|
|||
"""
|
||||
test_posts = list(self.posts)
|
||||
kinds = {x[8] for x in test_posts}
|
||||
self.assertEqual({'page', 'article', 'comment'}, kinds)
|
||||
page_titles = {x[0] for x in test_posts if x[8] == 'page'}
|
||||
self.assertEqual({'Test page', 'Test page 2'}, page_titles)
|
||||
article_titles = {x[0] for x in test_posts if x[8] == 'article'}
|
||||
self.assertEqual({'Black as Egypt\'s Night', 'The Steel Windpipe'},
|
||||
article_titles)
|
||||
comment_titles = {x[0] for x in test_posts if x[8] == 'comment'}
|
||||
self.assertEqual({'Mishka, always a pleasure to read your '
|
||||
'adventures!...'},
|
||||
comment_titles)
|
||||
self.assertEqual({"page", "article", "comment"}, kinds)
|
||||
page_titles = {x[0] for x in test_posts if x[8] == "page"}
|
||||
self.assertEqual({"Test page", "Test page 2"}, page_titles)
|
||||
article_titles = {x[0] for x in test_posts if x[8] == "article"}
|
||||
self.assertEqual(
|
||||
{"Black as Egypt's Night", "The Steel Windpipe"}, article_titles
|
||||
)
|
||||
comment_titles = {x[0] for x in test_posts if x[8] == "comment"}
|
||||
self.assertEqual(
|
||||
{"Mishka, always a pleasure to read your " "adventures!..."}, comment_titles
|
||||
)
|
||||
|
||||
def test_recognise_status_with_correct_filename(self):
|
||||
"""Check that importerer outputs only statuses 'published' and 'draft',
|
||||
|
|
@ -67,24 +76,25 @@ class TestBloggerXmlImporter(TestCaseWithCLocale):
|
|||
"""
|
||||
test_posts = list(self.posts)
|
||||
statuses = {x[7] for x in test_posts}
|
||||
self.assertEqual({'published', 'draft'}, statuses)
|
||||
self.assertEqual({"published", "draft"}, statuses)
|
||||
|
||||
draft_filenames = {x[2] for x in test_posts if x[7] == 'draft'}
|
||||
draft_filenames = {x[2] for x in test_posts if x[7] == "draft"}
|
||||
# draft filenames are id-based
|
||||
self.assertEqual({'page-4386962582497458967',
|
||||
'post-1276418104709695660'}, draft_filenames)
|
||||
self.assertEqual(
|
||||
{"page-4386962582497458967", "post-1276418104709695660"}, draft_filenames
|
||||
)
|
||||
|
||||
published_filenames = {x[2] for x in test_posts if x[7] == 'published'}
|
||||
published_filenames = {x[2] for x in test_posts if x[7] == "published"}
|
||||
# published filenames are url-based, except comments
|
||||
self.assertEqual({'the-steel-windpipe',
|
||||
'test-page',
|
||||
'post-5590533389087749201'}, published_filenames)
|
||||
self.assertEqual(
|
||||
{"the-steel-windpipe", "test-page", "post-5590533389087749201"},
|
||||
published_filenames,
|
||||
)
|
||||
|
||||
|
||||
@skipIfNoExecutable(['pandoc', '--version'])
|
||||
@unittest.skipUnless(BeautifulSoup, 'Needs BeautifulSoup module')
|
||||
@skipIfNoExecutable(["pandoc", "--version"])
|
||||
@unittest.skipUnless(BeautifulSoup, "Needs BeautifulSoup module")
|
||||
class TestWordpressXmlImporter(TestCaseWithCLocale):
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
self.posts = wp2fields(WORDPRESS_XML_SAMPLE)
|
||||
|
|
@ -92,30 +102,49 @@ class TestWordpressXmlImporter(TestCaseWithCLocale):
|
|||
|
||||
def test_ignore_empty_posts(self):
|
||||
self.assertTrue(self.posts)
|
||||
for (title, content, fname, date, author,
|
||||
categ, tags, status, kind, format) in self.posts:
|
||||
for (
|
||||
title,
|
||||
content,
|
||||
fname,
|
||||
date,
|
||||
author,
|
||||
categ,
|
||||
tags,
|
||||
status,
|
||||
kind,
|
||||
format,
|
||||
) in self.posts:
|
||||
self.assertTrue(title.strip())
|
||||
|
||||
def test_recognise_page_kind(self):
|
||||
""" Check that we recognise pages in wordpress, as opposed to posts """
|
||||
"""Check that we recognise pages in wordpress, as opposed to posts"""
|
||||
self.assertTrue(self.posts)
|
||||
# Collect (title, filename, kind) of non-empty posts recognised as page
|
||||
pages_data = []
|
||||
for (title, content, fname, date, author,
|
||||
categ, tags, status, kind, format) in self.posts:
|
||||
if kind == 'page':
|
||||
for (
|
||||
title,
|
||||
content,
|
||||
fname,
|
||||
date,
|
||||
author,
|
||||
categ,
|
||||
tags,
|
||||
status,
|
||||
kind,
|
||||
format,
|
||||
) in self.posts:
|
||||
if kind == "page":
|
||||
pages_data.append((title, fname))
|
||||
self.assertEqual(2, len(pages_data))
|
||||
self.assertEqual(('Page', 'contact'), pages_data[0])
|
||||
self.assertEqual(('Empty Page', 'empty'), pages_data[1])
|
||||
self.assertEqual(("Page", "contact"), pages_data[0])
|
||||
self.assertEqual(("Empty Page", "empty"), pages_data[1])
|
||||
|
||||
def test_dirpage_directive_for_page_kind(self):
|
||||
silent_f2p = mute(True)(fields2pelican)
|
||||
test_post = filter(lambda p: p[0].startswith("Empty Page"), self.posts)
|
||||
with temporary_folder() as temp:
|
||||
fname = list(silent_f2p(test_post, 'markdown',
|
||||
temp, dirpage=True))[0]
|
||||
self.assertTrue(fname.endswith('pages%sempty.md' % os.path.sep))
|
||||
fname = list(silent_f2p(test_post, "markdown", temp, dirpage=True))[0]
|
||||
self.assertTrue(fname.endswith("pages%sempty.md" % os.path.sep))
|
||||
|
||||
def test_dircat(self):
|
||||
silent_f2p = mute(True)(fields2pelican)
|
||||
|
|
@ -125,14 +154,13 @@ class TestWordpressXmlImporter(TestCaseWithCLocale):
|
|||
if len(post[5]) > 0: # Has a category
|
||||
test_posts.append(post)
|
||||
with temporary_folder() as temp:
|
||||
fnames = list(silent_f2p(test_posts, 'markdown',
|
||||
temp, dircat=True))
|
||||
subs = DEFAULT_CONFIG['SLUG_REGEX_SUBSTITUTIONS']
|
||||
fnames = list(silent_f2p(test_posts, "markdown", temp, dircat=True))
|
||||
subs = DEFAULT_CONFIG["SLUG_REGEX_SUBSTITUTIONS"]
|
||||
index = 0
|
||||
for post in test_posts:
|
||||
name = post[2]
|
||||
category = slugify(post[5][0], regex_subs=subs, preserve_case=True)
|
||||
name += '.md'
|
||||
name += ".md"
|
||||
filename = os.path.join(category, name)
|
||||
out_name = fnames[index]
|
||||
self.assertTrue(out_name.endswith(filename))
|
||||
|
|
@ -141,9 +169,19 @@ class TestWordpressXmlImporter(TestCaseWithCLocale):
|
|||
def test_unless_custom_post_all_items_should_be_pages_or_posts(self):
|
||||
self.assertTrue(self.posts)
|
||||
pages_data = []
|
||||
for (title, content, fname, date, author, categ,
|
||||
tags, status, kind, format) in self.posts:
|
||||
if kind == 'page' or kind == 'article':
|
||||
for (
|
||||
title,
|
||||
content,
|
||||
fname,
|
||||
date,
|
||||
author,
|
||||
categ,
|
||||
tags,
|
||||
status,
|
||||
kind,
|
||||
format,
|
||||
) in self.posts:
|
||||
if kind == "page" or kind == "article":
|
||||
pass
|
||||
else:
|
||||
pages_data.append((title, fname))
|
||||
|
|
@ -152,40 +190,45 @@ class TestWordpressXmlImporter(TestCaseWithCLocale):
|
|||
def test_recognise_custom_post_type(self):
|
||||
self.assertTrue(self.custposts)
|
||||
cust_data = []
|
||||
for (title, content, fname, date, author, categ,
|
||||
tags, status, kind, format) in self.custposts:
|
||||
if kind == 'article' or kind == 'page':
|
||||
for (
|
||||
title,
|
||||
content,
|
||||
fname,
|
||||
date,
|
||||
author,
|
||||
categ,
|
||||
tags,
|
||||
status,
|
||||
kind,
|
||||
format,
|
||||
) in self.custposts:
|
||||
if kind == "article" or kind == "page":
|
||||
pass
|
||||
else:
|
||||
cust_data.append((title, kind))
|
||||
self.assertEqual(3, len(cust_data))
|
||||
self.assertEqual(("A custom post in category 4", "custom1"), cust_data[0])
|
||||
self.assertEqual(("A custom post in category 5", "custom1"), cust_data[1])
|
||||
self.assertEqual(
|
||||
('A custom post in category 4', 'custom1'),
|
||||
cust_data[0])
|
||||
self.assertEqual(
|
||||
('A custom post in category 5', 'custom1'),
|
||||
cust_data[1])
|
||||
self.assertEqual(
|
||||
('A 2nd custom post type also in category 5', 'custom2'),
|
||||
cust_data[2])
|
||||
("A 2nd custom post type also in category 5", "custom2"), cust_data[2]
|
||||
)
|
||||
|
||||
def test_custom_posts_put_in_own_dir(self):
|
||||
silent_f2p = mute(True)(fields2pelican)
|
||||
test_posts = []
|
||||
for post in self.custposts:
|
||||
# check post kind
|
||||
if post[8] == 'article' or post[8] == 'page':
|
||||
if post[8] == "article" or post[8] == "page":
|
||||
pass
|
||||
else:
|
||||
test_posts.append(post)
|
||||
with temporary_folder() as temp:
|
||||
fnames = list(silent_f2p(test_posts, 'markdown',
|
||||
temp, wp_custpost=True))
|
||||
fnames = list(silent_f2p(test_posts, "markdown", temp, wp_custpost=True))
|
||||
index = 0
|
||||
for post in test_posts:
|
||||
name = post[2]
|
||||
kind = post[8]
|
||||
name += '.md'
|
||||
name += ".md"
|
||||
filename = os.path.join(kind, name)
|
||||
out_name = fnames[index]
|
||||
self.assertTrue(out_name.endswith(filename))
|
||||
|
|
@ -196,20 +239,21 @@ class TestWordpressXmlImporter(TestCaseWithCLocale):
|
|||
test_posts = []
|
||||
for post in self.custposts:
|
||||
# check post kind
|
||||
if post[8] == 'article' or post[8] == 'page':
|
||||
if post[8] == "article" or post[8] == "page":
|
||||
pass
|
||||
else:
|
||||
test_posts.append(post)
|
||||
with temporary_folder() as temp:
|
||||
fnames = list(silent_f2p(test_posts, 'markdown', temp,
|
||||
wp_custpost=True, dircat=True))
|
||||
subs = DEFAULT_CONFIG['SLUG_REGEX_SUBSTITUTIONS']
|
||||
fnames = list(
|
||||
silent_f2p(test_posts, "markdown", temp, wp_custpost=True, dircat=True)
|
||||
)
|
||||
subs = DEFAULT_CONFIG["SLUG_REGEX_SUBSTITUTIONS"]
|
||||
index = 0
|
||||
for post in test_posts:
|
||||
name = post[2]
|
||||
kind = post[8]
|
||||
category = slugify(post[5][0], regex_subs=subs, preserve_case=True)
|
||||
name += '.md'
|
||||
name += ".md"
|
||||
filename = os.path.join(kind, category, name)
|
||||
out_name = fnames[index]
|
||||
self.assertTrue(out_name.endswith(filename))
|
||||
|
|
@ -221,16 +265,19 @@ class TestWordpressXmlImporter(TestCaseWithCLocale):
|
|||
test_posts = []
|
||||
for post in self.custposts:
|
||||
# check post kind
|
||||
if post[8] == 'page':
|
||||
if post[8] == "page":
|
||||
test_posts.append(post)
|
||||
with temporary_folder() as temp:
|
||||
fnames = list(silent_f2p(test_posts, 'markdown', temp,
|
||||
wp_custpost=True, dirpage=False))
|
||||
fnames = list(
|
||||
silent_f2p(
|
||||
test_posts, "markdown", temp, wp_custpost=True, dirpage=False
|
||||
)
|
||||
)
|
||||
index = 0
|
||||
for post in test_posts:
|
||||
name = post[2]
|
||||
name += '.md'
|
||||
filename = os.path.join('pages', name)
|
||||
name += ".md"
|
||||
filename = os.path.join("pages", name)
|
||||
out_name = fnames[index]
|
||||
self.assertFalse(out_name.endswith(filename))
|
||||
|
||||
|
|
@ -238,117 +285,114 @@ class TestWordpressXmlImporter(TestCaseWithCLocale):
|
|||
test_posts = list(self.posts)
|
||||
|
||||
def r(f):
|
||||
with open(f, encoding='utf-8') as infile:
|
||||
with open(f, encoding="utf-8") as infile:
|
||||
return infile.read()
|
||||
|
||||
silent_f2p = mute(True)(fields2pelican)
|
||||
|
||||
with temporary_folder() as temp:
|
||||
|
||||
rst_files = (r(f) for f
|
||||
in silent_f2p(test_posts, 'markdown', temp))
|
||||
self.assertTrue(any('<iframe' in rst for rst in rst_files))
|
||||
rst_files = (r(f) for f
|
||||
in silent_f2p(test_posts, 'markdown',
|
||||
temp, strip_raw=True))
|
||||
self.assertFalse(any('<iframe' in rst for rst in rst_files))
|
||||
rst_files = (r(f) for f in silent_f2p(test_posts, "markdown", temp))
|
||||
self.assertTrue(any("<iframe" in rst for rst in rst_files))
|
||||
rst_files = (
|
||||
r(f) for f in silent_f2p(test_posts, "markdown", temp, strip_raw=True)
|
||||
)
|
||||
self.assertFalse(any("<iframe" in rst for rst in rst_files))
|
||||
# no effect in rst
|
||||
rst_files = (r(f) for f in silent_f2p(test_posts, 'rst', temp))
|
||||
self.assertFalse(any('<iframe' in rst for rst in rst_files))
|
||||
rst_files = (r(f) for f in silent_f2p(test_posts, 'rst', temp,
|
||||
strip_raw=True))
|
||||
self.assertFalse(any('<iframe' in rst for rst in rst_files))
|
||||
rst_files = (r(f) for f in silent_f2p(test_posts, "rst", temp))
|
||||
self.assertFalse(any("<iframe" in rst for rst in rst_files))
|
||||
rst_files = (
|
||||
r(f) for f in silent_f2p(test_posts, "rst", temp, strip_raw=True)
|
||||
)
|
||||
self.assertFalse(any("<iframe" in rst for rst in rst_files))
|
||||
|
||||
def test_decode_html_entities_in_titles(self):
|
||||
test_posts = [post for post
|
||||
in self.posts if post[2] == 'html-entity-test']
|
||||
test_posts = [post for post in self.posts if post[2] == "html-entity-test"]
|
||||
self.assertEqual(len(test_posts), 1)
|
||||
|
||||
post = test_posts[0]
|
||||
title = post[0]
|
||||
self.assertTrue(title, "A normal post with some <html> entities in "
|
||||
"the title. You can't miss them.")
|
||||
self.assertNotIn('&', title)
|
||||
self.assertTrue(
|
||||
title,
|
||||
"A normal post with some <html> entities in "
|
||||
"the title. You can't miss them.",
|
||||
)
|
||||
self.assertNotIn("&", title)
|
||||
|
||||
def test_decode_wp_content_returns_empty(self):
|
||||
""" Check that given an empty string we return an empty string."""
|
||||
"""Check that given an empty string we return an empty string."""
|
||||
self.assertEqual(decode_wp_content(""), "")
|
||||
|
||||
def test_decode_wp_content(self):
|
||||
""" Check that we can decode a wordpress content string."""
|
||||
"""Check that we can decode a wordpress content string."""
|
||||
with open(WORDPRESS_ENCODED_CONTENT_SAMPLE) as encoded_file:
|
||||
encoded_content = encoded_file.read()
|
||||
with open(WORDPRESS_DECODED_CONTENT_SAMPLE) as decoded_file:
|
||||
decoded_content = decoded_file.read()
|
||||
self.assertEqual(
|
||||
decode_wp_content(encoded_content, br=False),
|
||||
decoded_content)
|
||||
decode_wp_content(encoded_content, br=False), decoded_content
|
||||
)
|
||||
|
||||
def test_preserve_verbatim_formatting(self):
|
||||
def r(f):
|
||||
with open(f, encoding='utf-8') as infile:
|
||||
with open(f, encoding="utf-8") as infile:
|
||||
return infile.read()
|
||||
silent_f2p = mute(True)(fields2pelican)
|
||||
test_post = filter(
|
||||
lambda p: p[0].startswith("Code in List"),
|
||||
self.posts)
|
||||
with temporary_folder() as temp:
|
||||
md = [r(f) for f in silent_f2p(test_post, 'markdown', temp)][0]
|
||||
self.assertTrue(re.search(r'\s+a = \[1, 2, 3\]', md))
|
||||
self.assertTrue(re.search(r'\s+b = \[4, 5, 6\]', md))
|
||||
|
||||
for_line = re.search(r'\s+for i in zip\(a, b\):', md).group(0)
|
||||
print_line = re.search(r'\s+print i', md).group(0)
|
||||
self.assertTrue(
|
||||
for_line.rindex('for') < print_line.rindex('print'))
|
||||
silent_f2p = mute(True)(fields2pelican)
|
||||
test_post = filter(lambda p: p[0].startswith("Code in List"), self.posts)
|
||||
with temporary_folder() as temp:
|
||||
md = [r(f) for f in silent_f2p(test_post, "markdown", temp)][0]
|
||||
self.assertTrue(re.search(r"\s+a = \[1, 2, 3\]", md))
|
||||
self.assertTrue(re.search(r"\s+b = \[4, 5, 6\]", md))
|
||||
|
||||
for_line = re.search(r"\s+for i in zip\(a, b\):", md).group(0)
|
||||
print_line = re.search(r"\s+print i", md).group(0)
|
||||
self.assertTrue(for_line.rindex("for") < print_line.rindex("print"))
|
||||
|
||||
def test_code_in_list(self):
|
||||
def r(f):
|
||||
with open(f, encoding='utf-8') as infile:
|
||||
with open(f, encoding="utf-8") as infile:
|
||||
return infile.read()
|
||||
|
||||
silent_f2p = mute(True)(fields2pelican)
|
||||
test_post = filter(
|
||||
lambda p: p[0].startswith("Code in List"),
|
||||
self.posts)
|
||||
test_post = filter(lambda p: p[0].startswith("Code in List"), self.posts)
|
||||
with temporary_folder() as temp:
|
||||
md = [r(f) for f in silent_f2p(test_post, 'markdown', temp)][0]
|
||||
sample_line = re.search(r'- This is a code sample', md).group(0)
|
||||
code_line = re.search(r'\s+a = \[1, 2, 3\]', md).group(0)
|
||||
self.assertTrue(sample_line.rindex('This') < code_line.rindex('a'))
|
||||
md = [r(f) for f in silent_f2p(test_post, "markdown", temp)][0]
|
||||
sample_line = re.search(r"- This is a code sample", md).group(0)
|
||||
code_line = re.search(r"\s+a = \[1, 2, 3\]", md).group(0)
|
||||
self.assertTrue(sample_line.rindex("This") < code_line.rindex("a"))
|
||||
|
||||
def test_dont_use_smart_quotes(self):
|
||||
def r(f):
|
||||
with open(f, encoding='utf-8') as infile:
|
||||
with open(f, encoding="utf-8") as infile:
|
||||
return infile.read()
|
||||
|
||||
silent_f2p = mute(True)(fields2pelican)
|
||||
test_post = filter(
|
||||
lambda p: p[0].startswith("Post with raw data"),
|
||||
self.posts)
|
||||
test_post = filter(lambda p: p[0].startswith("Post with raw data"), self.posts)
|
||||
with temporary_folder() as temp:
|
||||
md = [r(f) for f in silent_f2p(test_post, 'markdown', temp)][0]
|
||||
md = [r(f) for f in silent_f2p(test_post, "markdown", temp)][0]
|
||||
escaped_quotes = re.search(r'\\[\'"“”‘’]', md)
|
||||
self.assertFalse(escaped_quotes)
|
||||
|
||||
def test_convert_caption_to_figure(self):
|
||||
def r(f):
|
||||
with open(f, encoding='utf-8') as infile:
|
||||
with open(f, encoding="utf-8") as infile:
|
||||
return infile.read()
|
||||
silent_f2p = mute(True)(fields2pelican)
|
||||
test_post = filter(
|
||||
lambda p: p[0].startswith("Caption on image"),
|
||||
self.posts)
|
||||
with temporary_folder() as temp:
|
||||
md = [r(f) for f in silent_f2p(test_post, 'markdown', temp)][0]
|
||||
|
||||
caption = re.search(r'\[caption', md)
|
||||
silent_f2p = mute(True)(fields2pelican)
|
||||
test_post = filter(lambda p: p[0].startswith("Caption on image"), self.posts)
|
||||
with temporary_folder() as temp:
|
||||
md = [r(f) for f in silent_f2p(test_post, "markdown", temp)][0]
|
||||
|
||||
caption = re.search(r"\[caption", md)
|
||||
self.assertFalse(caption)
|
||||
|
||||
for occurence in [
|
||||
'/theme/img/xpelican.png.pagespeed.ic.Rjep0025-y.png',
|
||||
'/theme/img/xpelican-3.png.pagespeed.ic.m-NAIdRCOM.png',
|
||||
'/theme/img/xpelican.png.pagespeed.ic.Rjep0025-y.png',
|
||||
'This is a pelican',
|
||||
'This also a pelican',
|
||||
'Yet another pelican',
|
||||
"/theme/img/xpelican.png.pagespeed.ic.Rjep0025-y.png",
|
||||
"/theme/img/xpelican-3.png.pagespeed.ic.m-NAIdRCOM.png",
|
||||
"/theme/img/xpelican.png.pagespeed.ic.Rjep0025-y.png",
|
||||
"This is a pelican",
|
||||
"This also a pelican",
|
||||
"Yet another pelican",
|
||||
]:
|
||||
# pandoc 2.x converts into 
|
||||
# pandoc 3.x converts into <figure>src<figcaption>text</figcaption></figure>
|
||||
|
|
@ -357,70 +401,97 @@ class TestWordpressXmlImporter(TestCaseWithCLocale):
|
|||
|
||||
class TestBuildHeader(unittest.TestCase):
|
||||
def test_build_header(self):
|
||||
header = build_header('test', None, None, None, None, None)
|
||||
self.assertEqual(header, 'test\n####\n\n')
|
||||
header = build_header("test", None, None, None, None, None)
|
||||
self.assertEqual(header, "test\n####\n\n")
|
||||
|
||||
def test_build_header_with_fields(self):
|
||||
header_data = [
|
||||
'Test Post',
|
||||
'2014-11-04',
|
||||
'Alexis Métaireau',
|
||||
['Programming'],
|
||||
['Pelican', 'Python'],
|
||||
'test-post',
|
||||
"Test Post",
|
||||
"2014-11-04",
|
||||
"Alexis Métaireau",
|
||||
["Programming"],
|
||||
["Pelican", "Python"],
|
||||
"test-post",
|
||||
]
|
||||
|
||||
expected_docutils = '\n'.join([
|
||||
'Test Post',
|
||||
'#########',
|
||||
':date: 2014-11-04',
|
||||
':author: Alexis Métaireau',
|
||||
':category: Programming',
|
||||
':tags: Pelican, Python',
|
||||
':slug: test-post',
|
||||
'\n',
|
||||
])
|
||||
expected_docutils = "\n".join(
|
||||
[
|
||||
"Test Post",
|
||||
"#########",
|
||||
":date: 2014-11-04",
|
||||
":author: Alexis Métaireau",
|
||||
":category: Programming",
|
||||
":tags: Pelican, Python",
|
||||
":slug: test-post",
|
||||
"\n",
|
||||
]
|
||||
)
|
||||
|
||||
expected_md = '\n'.join([
|
||||
'Title: Test Post',
|
||||
'Date: 2014-11-04',
|
||||
'Author: Alexis Métaireau',
|
||||
'Category: Programming',
|
||||
'Tags: Pelican, Python',
|
||||
'Slug: test-post',
|
||||
'\n',
|
||||
])
|
||||
expected_md = "\n".join(
|
||||
[
|
||||
"Title: Test Post",
|
||||
"Date: 2014-11-04",
|
||||
"Author: Alexis Métaireau",
|
||||
"Category: Programming",
|
||||
"Tags: Pelican, Python",
|
||||
"Slug: test-post",
|
||||
"\n",
|
||||
]
|
||||
)
|
||||
|
||||
self.assertEqual(build_header(*header_data), expected_docutils)
|
||||
self.assertEqual(build_markdown_header(*header_data), expected_md)
|
||||
|
||||
def test_build_header_with_east_asian_characters(self):
|
||||
header = build_header('これは広い幅の文字だけで構成されたタイトルです',
|
||||
None, None, None, None, None)
|
||||
header = build_header(
|
||||
"これは広い幅の文字だけで構成されたタイトルです",
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
)
|
||||
|
||||
self.assertEqual(header,
|
||||
('これは広い幅の文字だけで構成されたタイトルです\n'
|
||||
'##############################################'
|
||||
'\n\n'))
|
||||
|
||||
def test_galleries_added_to_header(self):
|
||||
header = build_header('test', None, None, None, None, None,
|
||||
attachments=['output/test1', 'output/test2'])
|
||||
self.assertEqual(header, ('test\n####\n'
|
||||
':attachments: output/test1, '
|
||||
'output/test2\n\n'))
|
||||
|
||||
def test_galleries_added_to_markdown_header(self):
|
||||
header = build_markdown_header('test', None, None, None, None, None,
|
||||
attachments=['output/test1',
|
||||
'output/test2'])
|
||||
self.assertEqual(
|
||||
header,
|
||||
'Title: test\nAttachments: output/test1, output/test2\n\n')
|
||||
(
|
||||
"これは広い幅の文字だけで構成されたタイトルです\n"
|
||||
"##############################################"
|
||||
"\n\n"
|
||||
),
|
||||
)
|
||||
|
||||
def test_galleries_added_to_header(self):
|
||||
header = build_header(
|
||||
"test",
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
attachments=["output/test1", "output/test2"],
|
||||
)
|
||||
self.assertEqual(
|
||||
header, ("test\n####\n" ":attachments: output/test1, " "output/test2\n\n")
|
||||
)
|
||||
|
||||
def test_galleries_added_to_markdown_header(self):
|
||||
header = build_markdown_header(
|
||||
"test",
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
attachments=["output/test1", "output/test2"],
|
||||
)
|
||||
self.assertEqual(
|
||||
header, "Title: test\nAttachments: output/test1, output/test2\n\n"
|
||||
)
|
||||
|
||||
|
||||
@unittest.skipUnless(BeautifulSoup, 'Needs BeautifulSoup module')
|
||||
@unittest.skipUnless(LXML, 'Needs lxml module')
|
||||
@unittest.skipUnless(BeautifulSoup, "Needs BeautifulSoup module")
|
||||
@unittest.skipUnless(LXML, "Needs lxml module")
|
||||
class TestWordpressXMLAttachements(TestCaseWithCLocale):
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
|
|
@ -435,38 +506,45 @@ class TestWordpressXMLAttachements(TestCaseWithCLocale):
|
|||
for post in self.attachments.keys():
|
||||
if post is None:
|
||||
expected = {
|
||||
('https://upload.wikimedia.org/wikipedia/commons/'
|
||||
'thumb/2/2c/Pelican_lakes_entrance02.jpg/'
|
||||
'240px-Pelican_lakes_entrance02.jpg')
|
||||
(
|
||||
"https://upload.wikimedia.org/wikipedia/commons/"
|
||||
"thumb/2/2c/Pelican_lakes_entrance02.jpg/"
|
||||
"240px-Pelican_lakes_entrance02.jpg"
|
||||
)
|
||||
}
|
||||
self.assertEqual(self.attachments[post], expected)
|
||||
elif post == 'with-excerpt':
|
||||
expected_invalid = ('http://thisurlisinvalid.notarealdomain/'
|
||||
'not_an_image.jpg')
|
||||
expected_pelikan = ('http://en.wikipedia.org/wiki/'
|
||||
'File:Pelikan_Walvis_Bay.jpg')
|
||||
self.assertEqual(self.attachments[post],
|
||||
{expected_invalid, expected_pelikan})
|
||||
elif post == 'with-tags':
|
||||
expected_invalid = ('http://thisurlisinvalid.notarealdomain')
|
||||
elif post == "with-excerpt":
|
||||
expected_invalid = (
|
||||
"http://thisurlisinvalid.notarealdomain/" "not_an_image.jpg"
|
||||
)
|
||||
expected_pelikan = (
|
||||
"http://en.wikipedia.org/wiki/" "File:Pelikan_Walvis_Bay.jpg"
|
||||
)
|
||||
self.assertEqual(
|
||||
self.attachments[post], {expected_invalid, expected_pelikan}
|
||||
)
|
||||
elif post == "with-tags":
|
||||
expected_invalid = "http://thisurlisinvalid.notarealdomain"
|
||||
self.assertEqual(self.attachments[post], {expected_invalid})
|
||||
else:
|
||||
self.fail('all attachments should match to a '
|
||||
'filename or None, {}'
|
||||
.format(post))
|
||||
self.fail(
|
||||
"all attachments should match to a " "filename or None, {}".format(
|
||||
post
|
||||
)
|
||||
)
|
||||
|
||||
def test_download_attachments(self):
|
||||
real_file = os.path.join(CUR_DIR, 'content/article.rst')
|
||||
real_file = os.path.join(CUR_DIR, "content/article.rst")
|
||||
good_url = path_to_file_url(real_file)
|
||||
bad_url = 'http://localhost:1/not_a_file.txt'
|
||||
bad_url = "http://localhost:1/not_a_file.txt"
|
||||
silent_da = mute()(download_attachments)
|
||||
with temporary_folder() as temp:
|
||||
locations = list(silent_da(temp, [good_url, bad_url]))
|
||||
self.assertEqual(1, len(locations))
|
||||
directory = locations[0]
|
||||
self.assertTrue(
|
||||
directory.endswith(posix_join('content', 'article.rst')),
|
||||
directory)
|
||||
directory.endswith(posix_join("content", "article.rst")), directory
|
||||
)
|
||||
|
||||
|
||||
class TestTumblrImporter(TestCaseWithCLocale):
|
||||
|
|
@ -484,32 +562,42 @@ class TestTumblrImporter(TestCaseWithCLocale):
|
|||
"timestamp": 1573162000,
|
||||
"format": "html",
|
||||
"slug": "a-slug",
|
||||
"tags": [
|
||||
"economics"
|
||||
],
|
||||
"tags": ["economics"],
|
||||
"state": "published",
|
||||
|
||||
"photos": [
|
||||
{
|
||||
"caption": "",
|
||||
"original_size": {
|
||||
"url": "https://..fccdc2360ba7182a.jpg",
|
||||
"width": 634,
|
||||
"height": 789
|
||||
"height": 789,
|
||||
},
|
||||
}]
|
||||
}
|
||||
],
|
||||
}
|
||||
]
|
||||
|
||||
get.side_effect = get_posts
|
||||
|
||||
posts = list(tumblr2fields("api_key", "blogname"))
|
||||
self.assertEqual(
|
||||
[('Photo',
|
||||
'<img alt="" src="https://..fccdc2360ba7182a.jpg" />\n',
|
||||
'2019-11-07-a-slug', '2019-11-07 21:26:40+0000', 'testy', ['photo'],
|
||||
['economics'], 'published', 'article', 'html')],
|
||||
[
|
||||
(
|
||||
"Photo",
|
||||
'<img alt="" src="https://..fccdc2360ba7182a.jpg" />\n',
|
||||
"2019-11-07-a-slug",
|
||||
"2019-11-07 21:26:40+0000",
|
||||
"testy",
|
||||
["photo"],
|
||||
["economics"],
|
||||
"published",
|
||||
"article",
|
||||
"html",
|
||||
)
|
||||
],
|
||||
posts,
|
||||
posts)
|
||||
posts,
|
||||
)
|
||||
|
||||
@patch("pelican.tools.pelican_import._get_tumblr_posts")
|
||||
def test_video_embed(self, get):
|
||||
|
|
@ -531,40 +619,39 @@ class TestTumblrImporter(TestCaseWithCLocale):
|
|||
"source_title": "youtube.com",
|
||||
"caption": "<p>Caption</p>",
|
||||
"player": [
|
||||
{
|
||||
"width": 250,
|
||||
"embed_code":
|
||||
"<iframe>1</iframe>"
|
||||
},
|
||||
{
|
||||
"width": 400,
|
||||
"embed_code":
|
||||
"<iframe>2</iframe>"
|
||||
},
|
||||
{
|
||||
"width": 500,
|
||||
"embed_code":
|
||||
"<iframe>3</iframe>"
|
||||
}
|
||||
{"width": 250, "embed_code": "<iframe>1</iframe>"},
|
||||
{"width": 400, "embed_code": "<iframe>2</iframe>"},
|
||||
{"width": 500, "embed_code": "<iframe>3</iframe>"},
|
||||
],
|
||||
"video_type": "youtube",
|
||||
}
|
||||
]
|
||||
]
|
||||
|
||||
get.side_effect = get_posts
|
||||
|
||||
posts = list(tumblr2fields("api_key", "blogname"))
|
||||
self.assertEqual(
|
||||
[('youtube.com',
|
||||
'<p><a href="https://href.li/?'
|
||||
'https://www.youtube.com/a">via</a></p>\n<p>Caption</p>'
|
||||
'<iframe>1</iframe>\n'
|
||||
'<iframe>2</iframe>\n'
|
||||
'<iframe>3</iframe>\n',
|
||||
'2017-07-07-the-slug',
|
||||
'2017-07-07 20:31:41+0000', 'testy', ['video'], [], 'published',
|
||||
'article', 'html')],
|
||||
[
|
||||
(
|
||||
"youtube.com",
|
||||
'<p><a href="https://href.li/?'
|
||||
'https://www.youtube.com/a">via</a></p>\n<p>Caption</p>'
|
||||
"<iframe>1</iframe>\n"
|
||||
"<iframe>2</iframe>\n"
|
||||
"<iframe>3</iframe>\n",
|
||||
"2017-07-07-the-slug",
|
||||
"2017-07-07 20:31:41+0000",
|
||||
"testy",
|
||||
["video"],
|
||||
[],
|
||||
"published",
|
||||
"article",
|
||||
"html",
|
||||
)
|
||||
],
|
||||
posts,
|
||||
posts)
|
||||
posts,
|
||||
)
|
||||
|
||||
@patch("pelican.tools.pelican_import._get_tumblr_posts")
|
||||
def test_broken_video_embed(self, get):
|
||||
|
|
@ -581,42 +668,43 @@ class TestTumblrImporter(TestCaseWithCLocale):
|
|||
"timestamp": 1471192655,
|
||||
"state": "published",
|
||||
"format": "html",
|
||||
"tags": [
|
||||
"interviews"
|
||||
],
|
||||
"source_url":
|
||||
"https://href.li/?https://www.youtube.com/watch?v=b",
|
||||
"tags": ["interviews"],
|
||||
"source_url": "https://href.li/?https://www.youtube.com/watch?v=b",
|
||||
"source_title": "youtube.com",
|
||||
"caption":
|
||||
"<p>Caption</p>",
|
||||
"caption": "<p>Caption</p>",
|
||||
"player": [
|
||||
{
|
||||
"width": 250,
|
||||
# If video is gone, embed_code is False
|
||||
"embed_code": False
|
||||
"embed_code": False,
|
||||
},
|
||||
{
|
||||
"width": 400,
|
||||
"embed_code": False
|
||||
},
|
||||
{
|
||||
"width": 500,
|
||||
"embed_code": False
|
||||
}
|
||||
{"width": 400, "embed_code": False},
|
||||
{"width": 500, "embed_code": False},
|
||||
],
|
||||
"video_type": "youtube",
|
||||
}
|
||||
]
|
||||
|
||||
get.side_effect = get_posts
|
||||
|
||||
posts = list(tumblr2fields("api_key", "blogname"))
|
||||
self.assertEqual(
|
||||
[('youtube.com',
|
||||
'<p><a href="https://href.li/?https://www.youtube.com/watch?'
|
||||
'v=b">via</a></p>\n<p>Caption</p>'
|
||||
'<p>(This video isn\'t available anymore.)</p>\n',
|
||||
'2016-08-14-the-slug',
|
||||
'2016-08-14 16:37:35+0000', 'testy', ['video'], ['interviews'],
|
||||
'published', 'article', 'html')],
|
||||
[
|
||||
(
|
||||
"youtube.com",
|
||||
'<p><a href="https://href.li/?https://www.youtube.com/watch?'
|
||||
'v=b">via</a></p>\n<p>Caption</p>'
|
||||
"<p>(This video isn't available anymore.)</p>\n",
|
||||
"2016-08-14-the-slug",
|
||||
"2016-08-14 16:37:35+0000",
|
||||
"testy",
|
||||
["video"],
|
||||
["interviews"],
|
||||
"published",
|
||||
"article",
|
||||
"html",
|
||||
)
|
||||
],
|
||||
posts,
|
||||
posts)
|
||||
posts,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -35,48 +35,41 @@ class TestLog(unittest.TestCase):
|
|||
def test_log_filter(self):
|
||||
def do_logging():
|
||||
for i in range(5):
|
||||
self.logger.warning('Log %s', i)
|
||||
self.logger.warning('Another log %s', i)
|
||||
self.logger.warning("Log %s", i)
|
||||
self.logger.warning("Another log %s", i)
|
||||
|
||||
# no filter
|
||||
with self.reset_logger():
|
||||
do_logging()
|
||||
self.assertEqual(self.handler.count_logs("Log \\d", logging.WARNING), 5)
|
||||
self.assertEqual(
|
||||
self.handler.count_logs('Log \\d', logging.WARNING),
|
||||
5)
|
||||
self.assertEqual(
|
||||
self.handler.count_logs('Another log \\d', logging.WARNING),
|
||||
5)
|
||||
self.handler.count_logs("Another log \\d", logging.WARNING), 5
|
||||
)
|
||||
|
||||
# filter by template
|
||||
with self.reset_logger():
|
||||
log.LimitFilter._ignore.add((logging.WARNING, 'Log %s'))
|
||||
log.LimitFilter._ignore.add((logging.WARNING, "Log %s"))
|
||||
do_logging()
|
||||
self.assertEqual(self.handler.count_logs("Log \\d", logging.WARNING), 0)
|
||||
self.assertEqual(
|
||||
self.handler.count_logs('Log \\d', logging.WARNING),
|
||||
0)
|
||||
self.assertEqual(
|
||||
self.handler.count_logs('Another log \\d', logging.WARNING),
|
||||
5)
|
||||
self.handler.count_logs("Another log \\d", logging.WARNING), 5
|
||||
)
|
||||
|
||||
# filter by exact message
|
||||
with self.reset_logger():
|
||||
log.LimitFilter._ignore.add((logging.WARNING, 'Log 3'))
|
||||
log.LimitFilter._ignore.add((logging.WARNING, "Log 3"))
|
||||
do_logging()
|
||||
self.assertEqual(self.handler.count_logs("Log \\d", logging.WARNING), 4)
|
||||
self.assertEqual(
|
||||
self.handler.count_logs('Log \\d', logging.WARNING),
|
||||
4)
|
||||
self.assertEqual(
|
||||
self.handler.count_logs('Another log \\d', logging.WARNING),
|
||||
5)
|
||||
self.handler.count_logs("Another log \\d", logging.WARNING), 5
|
||||
)
|
||||
|
||||
# filter by both
|
||||
with self.reset_logger():
|
||||
log.LimitFilter._ignore.add((logging.WARNING, 'Log 3'))
|
||||
log.LimitFilter._ignore.add((logging.WARNING, 'Another log %s'))
|
||||
log.LimitFilter._ignore.add((logging.WARNING, "Log 3"))
|
||||
log.LimitFilter._ignore.add((logging.WARNING, "Another log %s"))
|
||||
do_logging()
|
||||
self.assertEqual(self.handler.count_logs("Log \\d", logging.WARNING), 4)
|
||||
self.assertEqual(
|
||||
self.handler.count_logs('Log \\d', logging.WARNING),
|
||||
4)
|
||||
self.assertEqual(
|
||||
self.handler.count_logs('Another log \\d', logging.WARNING),
|
||||
0)
|
||||
self.handler.count_logs("Another log \\d", logging.WARNING), 0
|
||||
)
|
||||
|
|
|
|||
|
|
@ -17,17 +17,17 @@ class TestPage(unittest.TestCase):
|
|||
def setUp(self):
|
||||
super().setUp()
|
||||
self.old_locale = locale.setlocale(locale.LC_ALL)
|
||||
locale.setlocale(locale.LC_ALL, 'C')
|
||||
locale.setlocale(locale.LC_ALL, "C")
|
||||
self.page_kwargs = {
|
||||
'content': TEST_CONTENT,
|
||||
'context': {
|
||||
'localsiteurl': '',
|
||||
"content": TEST_CONTENT,
|
||||
"context": {
|
||||
"localsiteurl": "",
|
||||
},
|
||||
'metadata': {
|
||||
'summary': TEST_SUMMARY,
|
||||
'title': 'foo bar',
|
||||
"metadata": {
|
||||
"summary": TEST_SUMMARY,
|
||||
"title": "foo bar",
|
||||
},
|
||||
'source_path': '/path/to/file/foo.ext'
|
||||
"source_path": "/path/to/file/foo.ext",
|
||||
}
|
||||
|
||||
def tearDown(self):
|
||||
|
|
@ -37,68 +37,79 @@ class TestPage(unittest.TestCase):
|
|||
settings = get_settings()
|
||||
# fix up pagination rules
|
||||
from pelican.paginator import PaginationRule
|
||||
|
||||
pagination_rules = [
|
||||
PaginationRule(*r) for r in settings.get(
|
||||
'PAGINATION_PATTERNS',
|
||||
DEFAULT_CONFIG['PAGINATION_PATTERNS'],
|
||||
PaginationRule(*r)
|
||||
for r in settings.get(
|
||||
"PAGINATION_PATTERNS",
|
||||
DEFAULT_CONFIG["PAGINATION_PATTERNS"],
|
||||
)
|
||||
]
|
||||
settings['PAGINATION_PATTERNS'] = sorted(
|
||||
settings["PAGINATION_PATTERNS"] = sorted(
|
||||
pagination_rules,
|
||||
key=lambda r: r[0],
|
||||
)
|
||||
|
||||
self.page_kwargs['metadata']['author'] = Author('Blogger', settings)
|
||||
object_list = [Article(**self.page_kwargs),
|
||||
Article(**self.page_kwargs)]
|
||||
paginator = Paginator('foobar.foo', 'foobar/foo', object_list,
|
||||
settings)
|
||||
self.page_kwargs["metadata"]["author"] = Author("Blogger", settings)
|
||||
object_list = [Article(**self.page_kwargs), Article(**self.page_kwargs)]
|
||||
paginator = Paginator("foobar.foo", "foobar/foo", object_list, settings)
|
||||
page = paginator.page(1)
|
||||
self.assertEqual(page.save_as, 'foobar.foo')
|
||||
self.assertEqual(page.save_as, "foobar.foo")
|
||||
|
||||
def test_custom_pagination_pattern(self):
|
||||
from pelican.paginator import PaginationRule
|
||||
settings = get_settings()
|
||||
settings['PAGINATION_PATTERNS'] = [PaginationRule(*r) for r in [
|
||||
(1, '/{url}', '{base_name}/index.html'),
|
||||
(2, '/{url}{number}/', '{base_name}/{number}/index.html')
|
||||
]]
|
||||
|
||||
self.page_kwargs['metadata']['author'] = Author('Blogger', settings)
|
||||
object_list = [Article(**self.page_kwargs),
|
||||
Article(**self.page_kwargs)]
|
||||
paginator = Paginator('blog/index.html', '//blog.my.site/',
|
||||
object_list, settings, 1)
|
||||
settings = get_settings()
|
||||
settings["PAGINATION_PATTERNS"] = [
|
||||
PaginationRule(*r)
|
||||
for r in [
|
||||
(1, "/{url}", "{base_name}/index.html"),
|
||||
(2, "/{url}{number}/", "{base_name}/{number}/index.html"),
|
||||
]
|
||||
]
|
||||
|
||||
self.page_kwargs["metadata"]["author"] = Author("Blogger", settings)
|
||||
object_list = [Article(**self.page_kwargs), Article(**self.page_kwargs)]
|
||||
paginator = Paginator(
|
||||
"blog/index.html", "//blog.my.site/", object_list, settings, 1
|
||||
)
|
||||
# The URL *has to* stay absolute (with // in the front), so verify that
|
||||
page1 = paginator.page(1)
|
||||
self.assertEqual(page1.save_as, 'blog/index.html')
|
||||
self.assertEqual(page1.url, '//blog.my.site/')
|
||||
self.assertEqual(page1.save_as, "blog/index.html")
|
||||
self.assertEqual(page1.url, "//blog.my.site/")
|
||||
page2 = paginator.page(2)
|
||||
self.assertEqual(page2.save_as, 'blog/2/index.html')
|
||||
self.assertEqual(page2.url, '//blog.my.site/2/')
|
||||
self.assertEqual(page2.save_as, "blog/2/index.html")
|
||||
self.assertEqual(page2.url, "//blog.my.site/2/")
|
||||
|
||||
def test_custom_pagination_pattern_last_page(self):
|
||||
from pelican.paginator import PaginationRule
|
||||
settings = get_settings()
|
||||
settings['PAGINATION_PATTERNS'] = [PaginationRule(*r) for r in [
|
||||
(1, '/{url}1/', '{base_name}/1/index.html'),
|
||||
(2, '/{url}{number}/', '{base_name}/{number}/index.html'),
|
||||
(-1, '/{url}', '{base_name}/index.html'),
|
||||
]]
|
||||
|
||||
self.page_kwargs['metadata']['author'] = Author('Blogger', settings)
|
||||
object_list = [Article(**self.page_kwargs),
|
||||
Article(**self.page_kwargs),
|
||||
Article(**self.page_kwargs)]
|
||||
paginator = Paginator('blog/index.html', '//blog.my.site/',
|
||||
object_list, settings, 1)
|
||||
settings = get_settings()
|
||||
settings["PAGINATION_PATTERNS"] = [
|
||||
PaginationRule(*r)
|
||||
for r in [
|
||||
(1, "/{url}1/", "{base_name}/1/index.html"),
|
||||
(2, "/{url}{number}/", "{base_name}/{number}/index.html"),
|
||||
(-1, "/{url}", "{base_name}/index.html"),
|
||||
]
|
||||
]
|
||||
|
||||
self.page_kwargs["metadata"]["author"] = Author("Blogger", settings)
|
||||
object_list = [
|
||||
Article(**self.page_kwargs),
|
||||
Article(**self.page_kwargs),
|
||||
Article(**self.page_kwargs),
|
||||
]
|
||||
paginator = Paginator(
|
||||
"blog/index.html", "//blog.my.site/", object_list, settings, 1
|
||||
)
|
||||
# The URL *has to* stay absolute (with // in the front), so verify that
|
||||
page1 = paginator.page(1)
|
||||
self.assertEqual(page1.save_as, 'blog/1/index.html')
|
||||
self.assertEqual(page1.url, '//blog.my.site/1/')
|
||||
self.assertEqual(page1.save_as, "blog/1/index.html")
|
||||
self.assertEqual(page1.url, "//blog.my.site/1/")
|
||||
page2 = paginator.page(2)
|
||||
self.assertEqual(page2.save_as, 'blog/2/index.html')
|
||||
self.assertEqual(page2.url, '//blog.my.site/2/')
|
||||
self.assertEqual(page2.save_as, "blog/2/index.html")
|
||||
self.assertEqual(page2.url, "//blog.my.site/2/")
|
||||
page3 = paginator.page(3)
|
||||
self.assertEqual(page3.save_as, 'blog/index.html')
|
||||
self.assertEqual(page3.url, '//blog.my.site/')
|
||||
self.assertEqual(page3.save_as, "blog/index.html")
|
||||
self.assertEqual(page3.url, "//blog.my.site/")
|
||||
|
|
|
|||
|
|
@ -20,9 +20,10 @@ from pelican.tests.support import (
|
|||
)
|
||||
|
||||
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
SAMPLES_PATH = os.path.abspath(os.path.join(
|
||||
CURRENT_DIR, os.pardir, os.pardir, 'samples'))
|
||||
OUTPUT_PATH = os.path.abspath(os.path.join(CURRENT_DIR, 'output'))
|
||||
SAMPLES_PATH = os.path.abspath(
|
||||
os.path.join(CURRENT_DIR, os.pardir, os.pardir, "samples")
|
||||
)
|
||||
OUTPUT_PATH = os.path.abspath(os.path.join(CURRENT_DIR, "output"))
|
||||
|
||||
INPUT_PATH = os.path.join(SAMPLES_PATH, "content")
|
||||
SAMPLE_CONFIG = os.path.join(SAMPLES_PATH, "pelican.conf.py")
|
||||
|
|
@ -31,9 +32,9 @@ SAMPLE_FR_CONFIG = os.path.join(SAMPLES_PATH, "pelican.conf_FR.py")
|
|||
|
||||
def recursiveDiff(dcmp):
|
||||
diff = {
|
||||
'diff_files': [os.path.join(dcmp.right, f) for f in dcmp.diff_files],
|
||||
'left_only': [os.path.join(dcmp.right, f) for f in dcmp.left_only],
|
||||
'right_only': [os.path.join(dcmp.right, f) for f in dcmp.right_only],
|
||||
"diff_files": [os.path.join(dcmp.right, f) for f in dcmp.diff_files],
|
||||
"left_only": [os.path.join(dcmp.right, f) for f in dcmp.left_only],
|
||||
"right_only": [os.path.join(dcmp.right, f) for f in dcmp.right_only],
|
||||
}
|
||||
for sub_dcmp in dcmp.subdirs.values():
|
||||
for k, v in recursiveDiff(sub_dcmp).items():
|
||||
|
|
@ -47,11 +48,11 @@ class TestPelican(LoggedTestCase):
|
|||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
self.temp_path = mkdtemp(prefix='pelicantests.')
|
||||
self.temp_cache = mkdtemp(prefix='pelican_cache.')
|
||||
self.temp_path = mkdtemp(prefix="pelicantests.")
|
||||
self.temp_cache = mkdtemp(prefix="pelican_cache.")
|
||||
self.maxDiff = None
|
||||
self.old_locale = locale.setlocale(locale.LC_ALL)
|
||||
locale.setlocale(locale.LC_ALL, 'C')
|
||||
locale.setlocale(locale.LC_ALL, "C")
|
||||
|
||||
def tearDown(self):
|
||||
read_settings() # cleanup PYGMENTS_RST_OPTIONS
|
||||
|
|
@ -70,8 +71,8 @@ class TestPelican(LoggedTestCase):
|
|||
if proc.returncode != 0:
|
||||
msg = self._formatMessage(
|
||||
msg,
|
||||
"%s and %s differ:\nstdout:\n%s\nstderr\n%s" %
|
||||
(left_path, right_path, out, err)
|
||||
"%s and %s differ:\nstdout:\n%s\nstderr\n%s"
|
||||
% (left_path, right_path, out, err),
|
||||
)
|
||||
raise self.failureException(msg)
|
||||
|
||||
|
|
@ -85,136 +86,154 @@ class TestPelican(LoggedTestCase):
|
|||
|
||||
self.assertTrue(
|
||||
generator_classes[-1] is StaticGenerator,
|
||||
"StaticGenerator must be the last generator, but it isn't!")
|
||||
"StaticGenerator must be the last generator, but it isn't!",
|
||||
)
|
||||
self.assertIsInstance(
|
||||
generator_classes, Sequence,
|
||||
"_get_generator_classes() must return a Sequence to preserve order")
|
||||
generator_classes,
|
||||
Sequence,
|
||||
"_get_generator_classes() must return a Sequence to preserve order",
|
||||
)
|
||||
|
||||
@skipIfNoExecutable(['git', '--version'])
|
||||
@skipIfNoExecutable(["git", "--version"])
|
||||
def test_basic_generation_works(self):
|
||||
# when running pelican without settings, it should pick up the default
|
||||
# ones and generate correct output without raising any exception
|
||||
settings = read_settings(path=None, override={
|
||||
'PATH': INPUT_PATH,
|
||||
'OUTPUT_PATH': self.temp_path,
|
||||
'CACHE_PATH': self.temp_cache,
|
||||
'LOCALE': locale.normalize('en_US'),
|
||||
})
|
||||
settings = read_settings(
|
||||
path=None,
|
||||
override={
|
||||
"PATH": INPUT_PATH,
|
||||
"OUTPUT_PATH": self.temp_path,
|
||||
"CACHE_PATH": self.temp_cache,
|
||||
"LOCALE": locale.normalize("en_US"),
|
||||
},
|
||||
)
|
||||
pelican = Pelican(settings=settings)
|
||||
mute(True)(pelican.run)()
|
||||
self.assertDirsEqual(
|
||||
self.temp_path, os.path.join(OUTPUT_PATH, 'basic')
|
||||
)
|
||||
self.assertDirsEqual(self.temp_path, os.path.join(OUTPUT_PATH, "basic"))
|
||||
self.assertLogCountEqual(
|
||||
count=1,
|
||||
msg="Unable to find.*skipping url replacement",
|
||||
level=logging.WARNING)
|
||||
level=logging.WARNING,
|
||||
)
|
||||
|
||||
@skipIfNoExecutable(['git', '--version'])
|
||||
@skipIfNoExecutable(["git", "--version"])
|
||||
def test_custom_generation_works(self):
|
||||
# the same thing with a specified set of settings should work
|
||||
settings = read_settings(path=SAMPLE_CONFIG, override={
|
||||
'PATH': INPUT_PATH,
|
||||
'OUTPUT_PATH': self.temp_path,
|
||||
'CACHE_PATH': self.temp_cache,
|
||||
'LOCALE': locale.normalize('en_US.UTF-8'),
|
||||
})
|
||||
settings = read_settings(
|
||||
path=SAMPLE_CONFIG,
|
||||
override={
|
||||
"PATH": INPUT_PATH,
|
||||
"OUTPUT_PATH": self.temp_path,
|
||||
"CACHE_PATH": self.temp_cache,
|
||||
"LOCALE": locale.normalize("en_US.UTF-8"),
|
||||
},
|
||||
)
|
||||
pelican = Pelican(settings=settings)
|
||||
mute(True)(pelican.run)()
|
||||
self.assertDirsEqual(
|
||||
self.temp_path, os.path.join(OUTPUT_PATH, 'custom')
|
||||
)
|
||||
self.assertDirsEqual(self.temp_path, os.path.join(OUTPUT_PATH, "custom"))
|
||||
|
||||
@skipIfNoExecutable(['git', '--version'])
|
||||
@unittest.skipUnless(locale_available('fr_FR.UTF-8') or
|
||||
locale_available('French'), 'French locale needed')
|
||||
@skipIfNoExecutable(["git", "--version"])
|
||||
@unittest.skipUnless(
|
||||
locale_available("fr_FR.UTF-8") or locale_available("French"),
|
||||
"French locale needed",
|
||||
)
|
||||
def test_custom_locale_generation_works(self):
|
||||
'''Test that generation with fr_FR.UTF-8 locale works'''
|
||||
if sys.platform == 'win32':
|
||||
our_locale = 'French'
|
||||
"""Test that generation with fr_FR.UTF-8 locale works"""
|
||||
if sys.platform == "win32":
|
||||
our_locale = "French"
|
||||
else:
|
||||
our_locale = 'fr_FR.UTF-8'
|
||||
our_locale = "fr_FR.UTF-8"
|
||||
|
||||
settings = read_settings(path=SAMPLE_FR_CONFIG, override={
|
||||
'PATH': INPUT_PATH,
|
||||
'OUTPUT_PATH': self.temp_path,
|
||||
'CACHE_PATH': self.temp_cache,
|
||||
'LOCALE': our_locale,
|
||||
})
|
||||
settings = read_settings(
|
||||
path=SAMPLE_FR_CONFIG,
|
||||
override={
|
||||
"PATH": INPUT_PATH,
|
||||
"OUTPUT_PATH": self.temp_path,
|
||||
"CACHE_PATH": self.temp_cache,
|
||||
"LOCALE": our_locale,
|
||||
},
|
||||
)
|
||||
pelican = Pelican(settings=settings)
|
||||
mute(True)(pelican.run)()
|
||||
self.assertDirsEqual(
|
||||
self.temp_path, os.path.join(OUTPUT_PATH, 'custom_locale')
|
||||
)
|
||||
self.assertDirsEqual(self.temp_path, os.path.join(OUTPUT_PATH, "custom_locale"))
|
||||
|
||||
def test_theme_static_paths_copy(self):
|
||||
# the same thing with a specified set of settings should work
|
||||
settings = read_settings(path=SAMPLE_CONFIG, override={
|
||||
'PATH': INPUT_PATH,
|
||||
'OUTPUT_PATH': self.temp_path,
|
||||
'CACHE_PATH': self.temp_cache,
|
||||
'THEME_STATIC_PATHS': [os.path.join(SAMPLES_PATH, 'very'),
|
||||
os.path.join(SAMPLES_PATH, 'kinda'),
|
||||
os.path.join(SAMPLES_PATH,
|
||||
'theme_standard')]
|
||||
})
|
||||
settings = read_settings(
|
||||
path=SAMPLE_CONFIG,
|
||||
override={
|
||||
"PATH": INPUT_PATH,
|
||||
"OUTPUT_PATH": self.temp_path,
|
||||
"CACHE_PATH": self.temp_cache,
|
||||
"THEME_STATIC_PATHS": [
|
||||
os.path.join(SAMPLES_PATH, "very"),
|
||||
os.path.join(SAMPLES_PATH, "kinda"),
|
||||
os.path.join(SAMPLES_PATH, "theme_standard"),
|
||||
],
|
||||
},
|
||||
)
|
||||
pelican = Pelican(settings=settings)
|
||||
mute(True)(pelican.run)()
|
||||
theme_output = os.path.join(self.temp_path, 'theme')
|
||||
extra_path = os.path.join(theme_output, 'exciting', 'new', 'files')
|
||||
theme_output = os.path.join(self.temp_path, "theme")
|
||||
extra_path = os.path.join(theme_output, "exciting", "new", "files")
|
||||
|
||||
for file in ['a_stylesheet', 'a_template']:
|
||||
for file in ["a_stylesheet", "a_template"]:
|
||||
self.assertTrue(os.path.exists(os.path.join(theme_output, file)))
|
||||
|
||||
for file in ['wow!', 'boom!', 'bap!', 'zap!']:
|
||||
for file in ["wow!", "boom!", "bap!", "zap!"]:
|
||||
self.assertTrue(os.path.exists(os.path.join(extra_path, file)))
|
||||
|
||||
def test_theme_static_paths_copy_single_file(self):
|
||||
# the same thing with a specified set of settings should work
|
||||
settings = read_settings(path=SAMPLE_CONFIG, override={
|
||||
'PATH': INPUT_PATH,
|
||||
'OUTPUT_PATH': self.temp_path,
|
||||
'CACHE_PATH': self.temp_cache,
|
||||
'THEME_STATIC_PATHS': [os.path.join(SAMPLES_PATH,
|
||||
'theme_standard')]
|
||||
})
|
||||
settings = read_settings(
|
||||
path=SAMPLE_CONFIG,
|
||||
override={
|
||||
"PATH": INPUT_PATH,
|
||||
"OUTPUT_PATH": self.temp_path,
|
||||
"CACHE_PATH": self.temp_cache,
|
||||
"THEME_STATIC_PATHS": [os.path.join(SAMPLES_PATH, "theme_standard")],
|
||||
},
|
||||
)
|
||||
|
||||
pelican = Pelican(settings=settings)
|
||||
mute(True)(pelican.run)()
|
||||
theme_output = os.path.join(self.temp_path, 'theme')
|
||||
theme_output = os.path.join(self.temp_path, "theme")
|
||||
|
||||
for file in ['a_stylesheet', 'a_template']:
|
||||
for file in ["a_stylesheet", "a_template"]:
|
||||
self.assertTrue(os.path.exists(os.path.join(theme_output, file)))
|
||||
|
||||
def test_write_only_selected(self):
|
||||
"""Test that only the selected files are written"""
|
||||
settings = read_settings(path=None, override={
|
||||
'PATH': INPUT_PATH,
|
||||
'OUTPUT_PATH': self.temp_path,
|
||||
'CACHE_PATH': self.temp_cache,
|
||||
'WRITE_SELECTED': [
|
||||
os.path.join(self.temp_path, 'oh-yeah.html'),
|
||||
os.path.join(self.temp_path, 'categories.html'),
|
||||
],
|
||||
'LOCALE': locale.normalize('en_US'),
|
||||
})
|
||||
settings = read_settings(
|
||||
path=None,
|
||||
override={
|
||||
"PATH": INPUT_PATH,
|
||||
"OUTPUT_PATH": self.temp_path,
|
||||
"CACHE_PATH": self.temp_cache,
|
||||
"WRITE_SELECTED": [
|
||||
os.path.join(self.temp_path, "oh-yeah.html"),
|
||||
os.path.join(self.temp_path, "categories.html"),
|
||||
],
|
||||
"LOCALE": locale.normalize("en_US"),
|
||||
},
|
||||
)
|
||||
pelican = Pelican(settings=settings)
|
||||
logger = logging.getLogger()
|
||||
orig_level = logger.getEffectiveLevel()
|
||||
logger.setLevel(logging.INFO)
|
||||
mute(True)(pelican.run)()
|
||||
logger.setLevel(orig_level)
|
||||
self.assertLogCountEqual(
|
||||
count=2,
|
||||
msg="Writing .*",
|
||||
level=logging.INFO)
|
||||
self.assertLogCountEqual(count=2, msg="Writing .*", level=logging.INFO)
|
||||
|
||||
def test_cyclic_intersite_links_no_warnings(self):
|
||||
settings = read_settings(path=None, override={
|
||||
'PATH': os.path.join(CURRENT_DIR, 'cyclic_intersite_links'),
|
||||
'OUTPUT_PATH': self.temp_path,
|
||||
'CACHE_PATH': self.temp_cache,
|
||||
})
|
||||
settings = read_settings(
|
||||
path=None,
|
||||
override={
|
||||
"PATH": os.path.join(CURRENT_DIR, "cyclic_intersite_links"),
|
||||
"OUTPUT_PATH": self.temp_path,
|
||||
"CACHE_PATH": self.temp_cache,
|
||||
},
|
||||
)
|
||||
pelican = Pelican(settings=settings)
|
||||
mute(True)(pelican.run)()
|
||||
# There are four different intersite links:
|
||||
|
|
@ -230,41 +249,48 @@ class TestPelican(LoggedTestCase):
|
|||
self.assertLogCountEqual(
|
||||
count=1,
|
||||
msg="Unable to find '.*\\.rst', skipping url replacement.",
|
||||
level=logging.WARNING)
|
||||
level=logging.WARNING,
|
||||
)
|
||||
|
||||
def test_md_extensions_deprecation(self):
|
||||
"""Test that a warning is issued if MD_EXTENSIONS is used"""
|
||||
settings = read_settings(path=None, override={
|
||||
'PATH': INPUT_PATH,
|
||||
'OUTPUT_PATH': self.temp_path,
|
||||
'CACHE_PATH': self.temp_cache,
|
||||
'MD_EXTENSIONS': {},
|
||||
})
|
||||
settings = read_settings(
|
||||
path=None,
|
||||
override={
|
||||
"PATH": INPUT_PATH,
|
||||
"OUTPUT_PATH": self.temp_path,
|
||||
"CACHE_PATH": self.temp_cache,
|
||||
"MD_EXTENSIONS": {},
|
||||
},
|
||||
)
|
||||
pelican = Pelican(settings=settings)
|
||||
mute(True)(pelican.run)()
|
||||
self.assertLogCountEqual(
|
||||
count=1,
|
||||
msg="MD_EXTENSIONS is deprecated use MARKDOWN instead.",
|
||||
level=logging.WARNING)
|
||||
level=logging.WARNING,
|
||||
)
|
||||
|
||||
def test_parse_errors(self):
|
||||
# Verify that just an error is printed and the application doesn't
|
||||
# abort, exit or something.
|
||||
settings = read_settings(path=None, override={
|
||||
'PATH': os.path.abspath(os.path.join(CURRENT_DIR, 'parse_error')),
|
||||
'OUTPUT_PATH': self.temp_path,
|
||||
'CACHE_PATH': self.temp_cache,
|
||||
})
|
||||
settings = read_settings(
|
||||
path=None,
|
||||
override={
|
||||
"PATH": os.path.abspath(os.path.join(CURRENT_DIR, "parse_error")),
|
||||
"OUTPUT_PATH": self.temp_path,
|
||||
"CACHE_PATH": self.temp_cache,
|
||||
},
|
||||
)
|
||||
pelican = Pelican(settings=settings)
|
||||
mute(True)(pelican.run)()
|
||||
self.assertLogCountEqual(
|
||||
count=1,
|
||||
msg="Could not process .*parse_error.rst",
|
||||
level=logging.ERROR)
|
||||
count=1, msg="Could not process .*parse_error.rst", level=logging.ERROR
|
||||
)
|
||||
|
||||
def test_module_load(self):
|
||||
"""Test loading via python -m pelican --help displays the help"""
|
||||
output = subprocess.check_output([
|
||||
sys.executable, '-m', 'pelican', '--help'
|
||||
]).decode('ascii', 'replace')
|
||||
assert 'usage:' in output
|
||||
output = subprocess.check_output(
|
||||
[sys.executable, "-m", "pelican", "--help"]
|
||||
).decode("ascii", "replace")
|
||||
assert "usage:" in output
|
||||
|
|
|
|||
|
|
@ -2,27 +2,26 @@ import os
|
|||
from contextlib import contextmanager
|
||||
|
||||
import pelican.tests.dummy_plugins.normal_plugin.normal_plugin as normal_plugin
|
||||
from pelican.plugins._utils import (get_namespace_plugins, get_plugin_name,
|
||||
load_plugins)
|
||||
from pelican.plugins._utils import get_namespace_plugins, get_plugin_name, load_plugins
|
||||
from pelican.tests.support import unittest
|
||||
|
||||
|
||||
@contextmanager
|
||||
def tmp_namespace_path(path):
|
||||
'''Context manager for temporarily appending namespace plugin packages
|
||||
"""Context manager for temporarily appending namespace plugin packages
|
||||
|
||||
path: path containing the `pelican` folder
|
||||
|
||||
This modifies the `pelican.__path__` and lets the `pelican.plugins`
|
||||
namespace package resolve it from that.
|
||||
'''
|
||||
"""
|
||||
# This avoids calls to internal `pelican.plugins.__path__._recalculate()`
|
||||
# as it should not be necessary
|
||||
import pelican
|
||||
|
||||
old_path = pelican.__path__[:]
|
||||
try:
|
||||
pelican.__path__.append(os.path.join(path, 'pelican'))
|
||||
pelican.__path__.append(os.path.join(path, "pelican"))
|
||||
yield
|
||||
finally:
|
||||
pelican.__path__ = old_path
|
||||
|
|
@ -30,38 +29,38 @@ def tmp_namespace_path(path):
|
|||
|
||||
class PluginTest(unittest.TestCase):
|
||||
_PLUGIN_FOLDER = os.path.join(
|
||||
os.path.abspath(os.path.dirname(__file__)),
|
||||
'dummy_plugins')
|
||||
_NS_PLUGIN_FOLDER = os.path.join(_PLUGIN_FOLDER, 'namespace_plugin')
|
||||
_NORMAL_PLUGIN_FOLDER = os.path.join(_PLUGIN_FOLDER, 'normal_plugin')
|
||||
os.path.abspath(os.path.dirname(__file__)), "dummy_plugins"
|
||||
)
|
||||
_NS_PLUGIN_FOLDER = os.path.join(_PLUGIN_FOLDER, "namespace_plugin")
|
||||
_NORMAL_PLUGIN_FOLDER = os.path.join(_PLUGIN_FOLDER, "normal_plugin")
|
||||
|
||||
def test_namespace_path_modification(self):
|
||||
import pelican
|
||||
import pelican.plugins
|
||||
|
||||
old_path = pelican.__path__[:]
|
||||
|
||||
# not existing path
|
||||
path = os.path.join(self._PLUGIN_FOLDER, 'foo')
|
||||
path = os.path.join(self._PLUGIN_FOLDER, "foo")
|
||||
with tmp_namespace_path(path):
|
||||
self.assertIn(
|
||||
os.path.join(path, 'pelican'),
|
||||
pelican.__path__)
|
||||
self.assertIn(os.path.join(path, "pelican"), pelican.__path__)
|
||||
# foo/pelican does not exist, so it won't propagate
|
||||
self.assertNotIn(
|
||||
os.path.join(path, 'pelican', 'plugins'),
|
||||
pelican.plugins.__path__)
|
||||
os.path.join(path, "pelican", "plugins"), pelican.plugins.__path__
|
||||
)
|
||||
# verify that we restored path back
|
||||
self.assertEqual(pelican.__path__, old_path)
|
||||
|
||||
# existing path
|
||||
with tmp_namespace_path(self._NS_PLUGIN_FOLDER):
|
||||
self.assertIn(
|
||||
os.path.join(self._NS_PLUGIN_FOLDER, 'pelican'),
|
||||
pelican.__path__)
|
||||
os.path.join(self._NS_PLUGIN_FOLDER, "pelican"), pelican.__path__
|
||||
)
|
||||
# /namespace_plugin/pelican exists, so it should be in
|
||||
self.assertIn(
|
||||
os.path.join(self._NS_PLUGIN_FOLDER, 'pelican', 'plugins'),
|
||||
pelican.plugins.__path__)
|
||||
os.path.join(self._NS_PLUGIN_FOLDER, "pelican", "plugins"),
|
||||
pelican.plugins.__path__,
|
||||
)
|
||||
self.assertEqual(pelican.__path__, old_path)
|
||||
|
||||
def test_get_namespace_plugins(self):
|
||||
|
|
@ -71,11 +70,11 @@ class PluginTest(unittest.TestCase):
|
|||
# with plugin
|
||||
with tmp_namespace_path(self._NS_PLUGIN_FOLDER):
|
||||
ns_plugins = get_namespace_plugins()
|
||||
self.assertEqual(len(ns_plugins), len(existing_ns_plugins)+1)
|
||||
self.assertIn('pelican.plugins.ns_plugin', ns_plugins)
|
||||
self.assertEqual(len(ns_plugins), len(existing_ns_plugins) + 1)
|
||||
self.assertIn("pelican.plugins.ns_plugin", ns_plugins)
|
||||
self.assertEqual(
|
||||
ns_plugins['pelican.plugins.ns_plugin'].NAME,
|
||||
'namespace plugin')
|
||||
ns_plugins["pelican.plugins.ns_plugin"].NAME, "namespace plugin"
|
||||
)
|
||||
|
||||
# should be back to existing namespace plugins outside `with`
|
||||
ns_plugins = get_namespace_plugins()
|
||||
|
|
@ -91,15 +90,14 @@ class PluginTest(unittest.TestCase):
|
|||
with tmp_namespace_path(self._NS_PLUGIN_FOLDER):
|
||||
# with no `PLUGINS` setting, load namespace plugins
|
||||
plugins = load_plugins({})
|
||||
self.assertEqual(len(plugins), len(existing_ns_plugins)+1, plugins)
|
||||
self.assertEqual(len(plugins), len(existing_ns_plugins) + 1, plugins)
|
||||
self.assertEqual(
|
||||
{'pelican.plugins.ns_plugin'} | get_plugin_names(existing_ns_plugins),
|
||||
get_plugin_names(plugins))
|
||||
{"pelican.plugins.ns_plugin"} | get_plugin_names(existing_ns_plugins),
|
||||
get_plugin_names(plugins),
|
||||
)
|
||||
|
||||
# disable namespace plugins with `PLUGINS = []`
|
||||
SETTINGS = {
|
||||
'PLUGINS': []
|
||||
}
|
||||
SETTINGS = {"PLUGINS": []}
|
||||
plugins = load_plugins(SETTINGS)
|
||||
self.assertEqual(len(plugins), 0, plugins)
|
||||
|
||||
|
|
@ -107,34 +105,35 @@ class PluginTest(unittest.TestCase):
|
|||
|
||||
# normal plugin
|
||||
SETTINGS = {
|
||||
'PLUGINS': ['normal_plugin'],
|
||||
'PLUGIN_PATHS': [self._NORMAL_PLUGIN_FOLDER]
|
||||
"PLUGINS": ["normal_plugin"],
|
||||
"PLUGIN_PATHS": [self._NORMAL_PLUGIN_FOLDER],
|
||||
}
|
||||
plugins = load_plugins(SETTINGS)
|
||||
self.assertEqual(len(plugins), 1, plugins)
|
||||
self.assertEqual(
|
||||
{'normal_plugin'},
|
||||
get_plugin_names(plugins))
|
||||
self.assertEqual({"normal_plugin"}, get_plugin_names(plugins))
|
||||
|
||||
# normal submodule/subpackage plugins
|
||||
SETTINGS = {
|
||||
'PLUGINS': [
|
||||
'normal_submodule_plugin.subplugin',
|
||||
'normal_submodule_plugin.subpackage.subpackage',
|
||||
"PLUGINS": [
|
||||
"normal_submodule_plugin.subplugin",
|
||||
"normal_submodule_plugin.subpackage.subpackage",
|
||||
],
|
||||
'PLUGIN_PATHS': [self._NORMAL_PLUGIN_FOLDER]
|
||||
"PLUGIN_PATHS": [self._NORMAL_PLUGIN_FOLDER],
|
||||
}
|
||||
plugins = load_plugins(SETTINGS)
|
||||
self.assertEqual(len(plugins), 2, plugins)
|
||||
self.assertEqual(
|
||||
{'normal_submodule_plugin.subplugin',
|
||||
'normal_submodule_plugin.subpackage.subpackage'},
|
||||
get_plugin_names(plugins))
|
||||
{
|
||||
"normal_submodule_plugin.subplugin",
|
||||
"normal_submodule_plugin.subpackage.subpackage",
|
||||
},
|
||||
get_plugin_names(plugins),
|
||||
)
|
||||
|
||||
# ensure normal plugins are loaded only once
|
||||
SETTINGS = {
|
||||
'PLUGINS': ['normal_plugin'],
|
||||
'PLUGIN_PATHS': [self._NORMAL_PLUGIN_FOLDER],
|
||||
"PLUGINS": ["normal_plugin"],
|
||||
"PLUGIN_PATHS": [self._NORMAL_PLUGIN_FOLDER],
|
||||
}
|
||||
plugins = load_plugins(SETTINGS)
|
||||
for plugin in load_plugins(SETTINGS):
|
||||
|
|
@ -143,40 +142,33 @@ class PluginTest(unittest.TestCase):
|
|||
self.assertIn(plugin, plugins)
|
||||
|
||||
# namespace plugin short
|
||||
SETTINGS = {
|
||||
'PLUGINS': ['ns_plugin']
|
||||
}
|
||||
SETTINGS = {"PLUGINS": ["ns_plugin"]}
|
||||
plugins = load_plugins(SETTINGS)
|
||||
self.assertEqual(len(plugins), 1, plugins)
|
||||
self.assertEqual(
|
||||
{'pelican.plugins.ns_plugin'},
|
||||
get_plugin_names(plugins))
|
||||
self.assertEqual({"pelican.plugins.ns_plugin"}, get_plugin_names(plugins))
|
||||
|
||||
# namespace plugin long
|
||||
SETTINGS = {
|
||||
'PLUGINS': ['pelican.plugins.ns_plugin']
|
||||
}
|
||||
SETTINGS = {"PLUGINS": ["pelican.plugins.ns_plugin"]}
|
||||
plugins = load_plugins(SETTINGS)
|
||||
self.assertEqual(len(plugins), 1, plugins)
|
||||
self.assertEqual(
|
||||
{'pelican.plugins.ns_plugin'},
|
||||
get_plugin_names(plugins))
|
||||
self.assertEqual({"pelican.plugins.ns_plugin"}, get_plugin_names(plugins))
|
||||
|
||||
# normal and namespace plugin
|
||||
SETTINGS = {
|
||||
'PLUGINS': ['normal_plugin', 'ns_plugin'],
|
||||
'PLUGIN_PATHS': [self._NORMAL_PLUGIN_FOLDER]
|
||||
"PLUGINS": ["normal_plugin", "ns_plugin"],
|
||||
"PLUGIN_PATHS": [self._NORMAL_PLUGIN_FOLDER],
|
||||
}
|
||||
plugins = load_plugins(SETTINGS)
|
||||
self.assertEqual(len(plugins), 2, plugins)
|
||||
self.assertEqual(
|
||||
{'normal_plugin', 'pelican.plugins.ns_plugin'},
|
||||
get_plugin_names(plugins))
|
||||
{"normal_plugin", "pelican.plugins.ns_plugin"},
|
||||
get_plugin_names(plugins),
|
||||
)
|
||||
|
||||
def test_get_plugin_name(self):
|
||||
self.assertEqual(
|
||||
get_plugin_name(normal_plugin),
|
||||
'pelican.tests.dummy_plugins.normal_plugin.normal_plugin',
|
||||
"pelican.tests.dummy_plugins.normal_plugin.normal_plugin",
|
||||
)
|
||||
|
||||
class NoopPlugin:
|
||||
|
|
@ -185,7 +177,9 @@ class PluginTest(unittest.TestCase):
|
|||
|
||||
self.assertEqual(
|
||||
get_plugin_name(NoopPlugin),
|
||||
'PluginTest.test_get_plugin_name.<locals>.NoopPlugin')
|
||||
"PluginTest.test_get_plugin_name.<locals>.NoopPlugin",
|
||||
)
|
||||
self.assertEqual(
|
||||
get_plugin_name(NoopPlugin()),
|
||||
'PluginTest.test_get_plugin_name.<locals>.NoopPlugin')
|
||||
"PluginTest.test_get_plugin_name.<locals>.NoopPlugin",
|
||||
)
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -6,11 +6,11 @@ from pelican.tests.support import unittest
|
|||
class Test_abbr_role(unittest.TestCase):
|
||||
def call_it(self, text):
|
||||
from pelican.rstdirectives import abbr_role
|
||||
|
||||
rawtext = text
|
||||
lineno = 42
|
||||
inliner = Mock(name='inliner')
|
||||
nodes, system_messages = abbr_role(
|
||||
'abbr', rawtext, text, lineno, inliner)
|
||||
inliner = Mock(name="inliner")
|
||||
nodes, system_messages = abbr_role("abbr", rawtext, text, lineno, inliner)
|
||||
self.assertEqual(system_messages, [])
|
||||
self.assertEqual(len(nodes), 1)
|
||||
return nodes[0]
|
||||
|
|
@ -18,14 +18,14 @@ class Test_abbr_role(unittest.TestCase):
|
|||
def test(self):
|
||||
node = self.call_it("Abbr (Abbreviation)")
|
||||
self.assertEqual(node.astext(), "Abbr")
|
||||
self.assertEqual(node['explanation'], "Abbreviation")
|
||||
self.assertEqual(node["explanation"], "Abbreviation")
|
||||
|
||||
def test_newlines_in_explanation(self):
|
||||
node = self.call_it("CUL (See you\nlater)")
|
||||
self.assertEqual(node.astext(), "CUL")
|
||||
self.assertEqual(node['explanation'], "See you\nlater")
|
||||
self.assertEqual(node["explanation"], "See you\nlater")
|
||||
|
||||
def test_newlines_in_abbr(self):
|
||||
node = self.call_it("US of\nA \n (USA)")
|
||||
self.assertEqual(node.astext(), "US of\nA")
|
||||
self.assertEqual(node['explanation'], "USA")
|
||||
self.assertEqual(node["explanation"], "USA")
|
||||
|
|
|
|||
|
|
@ -17,10 +17,9 @@ class MockServer:
|
|||
|
||||
|
||||
class TestServer(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.server = MockServer()
|
||||
self.temp_output = mkdtemp(prefix='pelicantests.')
|
||||
self.temp_output = mkdtemp(prefix="pelicantests.")
|
||||
self.old_cwd = os.getcwd()
|
||||
os.chdir(self.temp_output)
|
||||
|
||||
|
|
@ -29,32 +28,33 @@ class TestServer(unittest.TestCase):
|
|||
rmtree(self.temp_output)
|
||||
|
||||
def test_get_path_that_exists(self):
|
||||
handler = ComplexHTTPRequestHandler(MockRequest(), ('0.0.0.0', 8888),
|
||||
self.server)
|
||||
handler = ComplexHTTPRequestHandler(
|
||||
MockRequest(), ("0.0.0.0", 8888), self.server
|
||||
)
|
||||
handler.base_path = self.temp_output
|
||||
|
||||
open(os.path.join(self.temp_output, 'foo.html'), 'a').close()
|
||||
os.mkdir(os.path.join(self.temp_output, 'foo'))
|
||||
open(os.path.join(self.temp_output, 'foo', 'index.html'), 'a').close()
|
||||
open(os.path.join(self.temp_output, "foo.html"), "a").close()
|
||||
os.mkdir(os.path.join(self.temp_output, "foo"))
|
||||
open(os.path.join(self.temp_output, "foo", "index.html"), "a").close()
|
||||
|
||||
os.mkdir(os.path.join(self.temp_output, 'bar'))
|
||||
open(os.path.join(self.temp_output, 'bar', 'index.html'), 'a').close()
|
||||
os.mkdir(os.path.join(self.temp_output, "bar"))
|
||||
open(os.path.join(self.temp_output, "bar", "index.html"), "a").close()
|
||||
|
||||
os.mkdir(os.path.join(self.temp_output, 'baz'))
|
||||
os.mkdir(os.path.join(self.temp_output, "baz"))
|
||||
|
||||
for suffix in ['', '/']:
|
||||
for suffix in ["", "/"]:
|
||||
# foo.html has precedence over foo/index.html
|
||||
path = handler.get_path_that_exists('foo' + suffix)
|
||||
self.assertEqual(path, 'foo.html')
|
||||
path = handler.get_path_that_exists("foo" + suffix)
|
||||
self.assertEqual(path, "foo.html")
|
||||
|
||||
# folder with index.html should return folder/index.html
|
||||
path = handler.get_path_that_exists('bar' + suffix)
|
||||
self.assertEqual(path, 'bar/index.html')
|
||||
path = handler.get_path_that_exists("bar" + suffix)
|
||||
self.assertEqual(path, "bar/index.html")
|
||||
|
||||
# folder without index.html should return same as input
|
||||
path = handler.get_path_that_exists('baz' + suffix)
|
||||
self.assertEqual(path, 'baz' + suffix)
|
||||
path = handler.get_path_that_exists("baz" + suffix)
|
||||
self.assertEqual(path, "baz" + suffix)
|
||||
|
||||
# not existing path should return None
|
||||
path = handler.get_path_that_exists('quux' + suffix)
|
||||
path = handler.get_path_that_exists("quux" + suffix)
|
||||
self.assertIsNone(path)
|
||||
|
|
|
|||
|
|
@ -4,10 +4,14 @@ import os
|
|||
from os.path import abspath, dirname, join
|
||||
|
||||
|
||||
from pelican.settings import (DEFAULT_CONFIG, DEFAULT_THEME,
|
||||
_printf_s_to_format_field,
|
||||
configure_settings,
|
||||
handle_deprecated_settings, read_settings)
|
||||
from pelican.settings import (
|
||||
DEFAULT_CONFIG,
|
||||
DEFAULT_THEME,
|
||||
_printf_s_to_format_field,
|
||||
configure_settings,
|
||||
handle_deprecated_settings,
|
||||
read_settings,
|
||||
)
|
||||
from pelican.tests.support import unittest
|
||||
|
||||
|
||||
|
|
@ -16,40 +20,39 @@ class TestSettingsConfiguration(unittest.TestCase):
|
|||
append new values to the settings (if any), and apply basic settings
|
||||
optimizations.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self.old_locale = locale.setlocale(locale.LC_ALL)
|
||||
locale.setlocale(locale.LC_ALL, 'C')
|
||||
locale.setlocale(locale.LC_ALL, "C")
|
||||
self.PATH = abspath(dirname(__file__))
|
||||
default_conf = join(self.PATH, 'default_conf.py')
|
||||
default_conf = join(self.PATH, "default_conf.py")
|
||||
self.settings = read_settings(default_conf)
|
||||
|
||||
def tearDown(self):
|
||||
locale.setlocale(locale.LC_ALL, self.old_locale)
|
||||
|
||||
def test_overwrite_existing_settings(self):
|
||||
self.assertEqual(self.settings.get('SITENAME'), "Alexis' log")
|
||||
self.assertEqual(
|
||||
self.settings.get('SITEURL'),
|
||||
'http://blog.notmyidea.org')
|
||||
self.assertEqual(self.settings.get("SITENAME"), "Alexis' log")
|
||||
self.assertEqual(self.settings.get("SITEURL"), "http://blog.notmyidea.org")
|
||||
|
||||
def test_keep_default_settings(self):
|
||||
# Keep default settings if not defined.
|
||||
self.assertEqual(
|
||||
self.settings.get('DEFAULT_CATEGORY'),
|
||||
DEFAULT_CONFIG['DEFAULT_CATEGORY'])
|
||||
self.settings.get("DEFAULT_CATEGORY"), DEFAULT_CONFIG["DEFAULT_CATEGORY"]
|
||||
)
|
||||
|
||||
def test_dont_copy_small_keys(self):
|
||||
# Do not copy keys not in caps.
|
||||
self.assertNotIn('foobar', self.settings)
|
||||
self.assertNotIn("foobar", self.settings)
|
||||
|
||||
def test_read_empty_settings(self):
|
||||
# Ensure an empty settings file results in default settings.
|
||||
settings = read_settings(None)
|
||||
expected = copy.deepcopy(DEFAULT_CONFIG)
|
||||
# Added by configure settings
|
||||
expected['FEED_DOMAIN'] = ''
|
||||
expected['ARTICLE_EXCLUDES'] = ['pages']
|
||||
expected['PAGE_EXCLUDES'] = ['']
|
||||
expected["FEED_DOMAIN"] = ""
|
||||
expected["ARTICLE_EXCLUDES"] = ["pages"]
|
||||
expected["PAGE_EXCLUDES"] = [""]
|
||||
self.maxDiff = None
|
||||
self.assertDictEqual(settings, expected)
|
||||
|
||||
|
|
@ -57,250 +60,265 @@ class TestSettingsConfiguration(unittest.TestCase):
|
|||
# Make sure that the results from one settings call doesn't
|
||||
# effect past or future instances.
|
||||
self.PATH = abspath(dirname(__file__))
|
||||
default_conf = join(self.PATH, 'default_conf.py')
|
||||
default_conf = join(self.PATH, "default_conf.py")
|
||||
settings = read_settings(default_conf)
|
||||
settings['SITEURL'] = 'new-value'
|
||||
settings["SITEURL"] = "new-value"
|
||||
new_settings = read_settings(default_conf)
|
||||
self.assertNotEqual(new_settings['SITEURL'], settings['SITEURL'])
|
||||
self.assertNotEqual(new_settings["SITEURL"], settings["SITEURL"])
|
||||
|
||||
def test_defaults_not_overwritten(self):
|
||||
# This assumes 'SITENAME': 'A Pelican Blog'
|
||||
settings = read_settings(None)
|
||||
settings['SITENAME'] = 'Not a Pelican Blog'
|
||||
self.assertNotEqual(settings['SITENAME'], DEFAULT_CONFIG['SITENAME'])
|
||||
settings["SITENAME"] = "Not a Pelican Blog"
|
||||
self.assertNotEqual(settings["SITENAME"], DEFAULT_CONFIG["SITENAME"])
|
||||
|
||||
def test_static_path_settings_safety(self):
|
||||
# Disallow static paths from being strings
|
||||
settings = {
|
||||
'STATIC_PATHS': 'foo/bar',
|
||||
'THEME_STATIC_PATHS': 'bar/baz',
|
||||
"STATIC_PATHS": "foo/bar",
|
||||
"THEME_STATIC_PATHS": "bar/baz",
|
||||
# These 4 settings are required to run configure_settings
|
||||
'PATH': '.',
|
||||
'THEME': DEFAULT_THEME,
|
||||
'SITEURL': 'http://blog.notmyidea.org/',
|
||||
'LOCALE': '',
|
||||
"PATH": ".",
|
||||
"THEME": DEFAULT_THEME,
|
||||
"SITEURL": "http://blog.notmyidea.org/",
|
||||
"LOCALE": "",
|
||||
}
|
||||
configure_settings(settings)
|
||||
self.assertEqual(settings["STATIC_PATHS"], DEFAULT_CONFIG["STATIC_PATHS"])
|
||||
self.assertEqual(
|
||||
settings['STATIC_PATHS'],
|
||||
DEFAULT_CONFIG['STATIC_PATHS'])
|
||||
self.assertEqual(
|
||||
settings['THEME_STATIC_PATHS'],
|
||||
DEFAULT_CONFIG['THEME_STATIC_PATHS'])
|
||||
settings["THEME_STATIC_PATHS"], DEFAULT_CONFIG["THEME_STATIC_PATHS"]
|
||||
)
|
||||
|
||||
def test_configure_settings(self):
|
||||
# Manipulations to settings should be applied correctly.
|
||||
settings = {
|
||||
'SITEURL': 'http://blog.notmyidea.org/',
|
||||
'LOCALE': '',
|
||||
'PATH': os.curdir,
|
||||
'THEME': DEFAULT_THEME,
|
||||
"SITEURL": "http://blog.notmyidea.org/",
|
||||
"LOCALE": "",
|
||||
"PATH": os.curdir,
|
||||
"THEME": DEFAULT_THEME,
|
||||
}
|
||||
configure_settings(settings)
|
||||
|
||||
# SITEURL should not have a trailing slash
|
||||
self.assertEqual(settings['SITEURL'], 'http://blog.notmyidea.org')
|
||||
self.assertEqual(settings["SITEURL"], "http://blog.notmyidea.org")
|
||||
|
||||
# FEED_DOMAIN, if undefined, should default to SITEURL
|
||||
self.assertEqual(settings['FEED_DOMAIN'], 'http://blog.notmyidea.org')
|
||||
self.assertEqual(settings["FEED_DOMAIN"], "http://blog.notmyidea.org")
|
||||
|
||||
settings['FEED_DOMAIN'] = 'http://feeds.example.com'
|
||||
settings["FEED_DOMAIN"] = "http://feeds.example.com"
|
||||
configure_settings(settings)
|
||||
self.assertEqual(settings['FEED_DOMAIN'], 'http://feeds.example.com')
|
||||
self.assertEqual(settings["FEED_DOMAIN"], "http://feeds.example.com")
|
||||
|
||||
def test_theme_settings_exceptions(self):
|
||||
settings = self.settings
|
||||
|
||||
# Check that theme lookup in "pelican/themes" functions as expected
|
||||
settings['THEME'] = os.path.split(settings['THEME'])[1]
|
||||
settings["THEME"] = os.path.split(settings["THEME"])[1]
|
||||
configure_settings(settings)
|
||||
self.assertEqual(settings['THEME'], DEFAULT_THEME)
|
||||
self.assertEqual(settings["THEME"], DEFAULT_THEME)
|
||||
|
||||
# Check that non-existent theme raises exception
|
||||
settings['THEME'] = 'foo'
|
||||
settings["THEME"] = "foo"
|
||||
self.assertRaises(Exception, configure_settings, settings)
|
||||
|
||||
def test_deprecated_dir_setting(self):
|
||||
settings = self.settings
|
||||
|
||||
settings['ARTICLE_DIR'] = 'foo'
|
||||
settings['PAGE_DIR'] = 'bar'
|
||||
settings["ARTICLE_DIR"] = "foo"
|
||||
settings["PAGE_DIR"] = "bar"
|
||||
|
||||
settings = handle_deprecated_settings(settings)
|
||||
|
||||
self.assertEqual(settings['ARTICLE_PATHS'], ['foo'])
|
||||
self.assertEqual(settings['PAGE_PATHS'], ['bar'])
|
||||
self.assertEqual(settings["ARTICLE_PATHS"], ["foo"])
|
||||
self.assertEqual(settings["PAGE_PATHS"], ["bar"])
|
||||
|
||||
with self.assertRaises(KeyError):
|
||||
settings['ARTICLE_DIR']
|
||||
settings['PAGE_DIR']
|
||||
settings["ARTICLE_DIR"]
|
||||
settings["PAGE_DIR"]
|
||||
|
||||
def test_default_encoding(self):
|
||||
# Test that the user locale is set if not specified in settings
|
||||
|
||||
locale.setlocale(locale.LC_ALL, 'C')
|
||||
locale.setlocale(locale.LC_ALL, "C")
|
||||
# empty string = user system locale
|
||||
self.assertEqual(self.settings['LOCALE'], [''])
|
||||
self.assertEqual(self.settings["LOCALE"], [""])
|
||||
|
||||
configure_settings(self.settings)
|
||||
lc_time = locale.getlocale(locale.LC_TIME) # should be set to user locale
|
||||
|
||||
# explicitly set locale to user pref and test
|
||||
locale.setlocale(locale.LC_TIME, '')
|
||||
locale.setlocale(locale.LC_TIME, "")
|
||||
self.assertEqual(lc_time, locale.getlocale(locale.LC_TIME))
|
||||
|
||||
def test_invalid_settings_throw_exception(self):
|
||||
# Test that the path name is valid
|
||||
|
||||
# test that 'PATH' is set
|
||||
settings = {
|
||||
}
|
||||
settings = {}
|
||||
|
||||
self.assertRaises(Exception, configure_settings, settings)
|
||||
|
||||
# Test that 'PATH' is valid
|
||||
settings['PATH'] = ''
|
||||
settings["PATH"] = ""
|
||||
self.assertRaises(Exception, configure_settings, settings)
|
||||
|
||||
# Test nonexistent THEME
|
||||
settings['PATH'] = os.curdir
|
||||
settings['THEME'] = 'foo'
|
||||
settings["PATH"] = os.curdir
|
||||
settings["THEME"] = "foo"
|
||||
|
||||
self.assertRaises(Exception, configure_settings, settings)
|
||||
|
||||
def test__printf_s_to_format_field(self):
|
||||
for s in ('%s', '{%s}', '{%s'):
|
||||
option = 'foo/{}/bar.baz'.format(s)
|
||||
result = _printf_s_to_format_field(option, 'slug')
|
||||
expected = option % 'qux'
|
||||
found = result.format(slug='qux')
|
||||
for s in ("%s", "{%s}", "{%s"):
|
||||
option = "foo/{}/bar.baz".format(s)
|
||||
result = _printf_s_to_format_field(option, "slug")
|
||||
expected = option % "qux"
|
||||
found = result.format(slug="qux")
|
||||
self.assertEqual(expected, found)
|
||||
|
||||
def test_deprecated_extra_templates_paths(self):
|
||||
settings = self.settings
|
||||
settings['EXTRA_TEMPLATES_PATHS'] = ['/foo/bar', '/ha']
|
||||
settings["EXTRA_TEMPLATES_PATHS"] = ["/foo/bar", "/ha"]
|
||||
|
||||
settings = handle_deprecated_settings(settings)
|
||||
|
||||
self.assertEqual(settings['THEME_TEMPLATES_OVERRIDES'],
|
||||
['/foo/bar', '/ha'])
|
||||
self.assertNotIn('EXTRA_TEMPLATES_PATHS', settings)
|
||||
self.assertEqual(settings["THEME_TEMPLATES_OVERRIDES"], ["/foo/bar", "/ha"])
|
||||
self.assertNotIn("EXTRA_TEMPLATES_PATHS", settings)
|
||||
|
||||
def test_deprecated_paginated_direct_templates(self):
|
||||
settings = self.settings
|
||||
settings['PAGINATED_DIRECT_TEMPLATES'] = ['index', 'archives']
|
||||
settings['PAGINATED_TEMPLATES'] = {'index': 10, 'category': None}
|
||||
settings["PAGINATED_DIRECT_TEMPLATES"] = ["index", "archives"]
|
||||
settings["PAGINATED_TEMPLATES"] = {"index": 10, "category": None}
|
||||
settings = handle_deprecated_settings(settings)
|
||||
self.assertEqual(settings['PAGINATED_TEMPLATES'],
|
||||
{'index': 10, 'category': None, 'archives': None})
|
||||
self.assertNotIn('PAGINATED_DIRECT_TEMPLATES', settings)
|
||||
self.assertEqual(
|
||||
settings["PAGINATED_TEMPLATES"],
|
||||
{"index": 10, "category": None, "archives": None},
|
||||
)
|
||||
self.assertNotIn("PAGINATED_DIRECT_TEMPLATES", settings)
|
||||
|
||||
def test_deprecated_paginated_direct_templates_from_file(self):
|
||||
# This is equivalent to reading a settings file that has
|
||||
# PAGINATED_DIRECT_TEMPLATES defined but no PAGINATED_TEMPLATES.
|
||||
settings = read_settings(None, override={
|
||||
'PAGINATED_DIRECT_TEMPLATES': ['index', 'archives']
|
||||
})
|
||||
self.assertEqual(settings['PAGINATED_TEMPLATES'], {
|
||||
'archives': None,
|
||||
'author': None,
|
||||
'index': None,
|
||||
'category': None,
|
||||
'tag': None})
|
||||
self.assertNotIn('PAGINATED_DIRECT_TEMPLATES', settings)
|
||||
settings = read_settings(
|
||||
None, override={"PAGINATED_DIRECT_TEMPLATES": ["index", "archives"]}
|
||||
)
|
||||
self.assertEqual(
|
||||
settings["PAGINATED_TEMPLATES"],
|
||||
{
|
||||
"archives": None,
|
||||
"author": None,
|
||||
"index": None,
|
||||
"category": None,
|
||||
"tag": None,
|
||||
},
|
||||
)
|
||||
self.assertNotIn("PAGINATED_DIRECT_TEMPLATES", settings)
|
||||
|
||||
def test_theme_and_extra_templates_exception(self):
|
||||
settings = self.settings
|
||||
settings['EXTRA_TEMPLATES_PATHS'] = ['/ha']
|
||||
settings['THEME_TEMPLATES_OVERRIDES'] = ['/foo/bar']
|
||||
settings["EXTRA_TEMPLATES_PATHS"] = ["/ha"]
|
||||
settings["THEME_TEMPLATES_OVERRIDES"] = ["/foo/bar"]
|
||||
|
||||
self.assertRaises(Exception, handle_deprecated_settings, settings)
|
||||
|
||||
def test_slug_and_slug_regex_substitutions_exception(self):
|
||||
settings = {}
|
||||
settings['SLUG_REGEX_SUBSTITUTIONS'] = [('C++', 'cpp')]
|
||||
settings['TAG_SUBSTITUTIONS'] = [('C#', 'csharp')]
|
||||
settings["SLUG_REGEX_SUBSTITUTIONS"] = [("C++", "cpp")]
|
||||
settings["TAG_SUBSTITUTIONS"] = [("C#", "csharp")]
|
||||
|
||||
self.assertRaises(Exception, handle_deprecated_settings, settings)
|
||||
|
||||
def test_deprecated_slug_substitutions(self):
|
||||
default_slug_regex_subs = self.settings['SLUG_REGEX_SUBSTITUTIONS']
|
||||
default_slug_regex_subs = self.settings["SLUG_REGEX_SUBSTITUTIONS"]
|
||||
|
||||
# If no deprecated setting is set, don't set new ones
|
||||
settings = {}
|
||||
settings = handle_deprecated_settings(settings)
|
||||
self.assertNotIn('SLUG_REGEX_SUBSTITUTIONS', settings)
|
||||
self.assertNotIn('TAG_REGEX_SUBSTITUTIONS', settings)
|
||||
self.assertNotIn('CATEGORY_REGEX_SUBSTITUTIONS', settings)
|
||||
self.assertNotIn('AUTHOR_REGEX_SUBSTITUTIONS', settings)
|
||||
self.assertNotIn("SLUG_REGEX_SUBSTITUTIONS", settings)
|
||||
self.assertNotIn("TAG_REGEX_SUBSTITUTIONS", settings)
|
||||
self.assertNotIn("CATEGORY_REGEX_SUBSTITUTIONS", settings)
|
||||
self.assertNotIn("AUTHOR_REGEX_SUBSTITUTIONS", settings)
|
||||
|
||||
# If SLUG_SUBSTITUTIONS is set, set {SLUG, AUTHOR}_REGEX_SUBSTITUTIONS
|
||||
# correctly, don't set {CATEGORY, TAG}_REGEX_SUBSTITUTIONS
|
||||
settings = {}
|
||||
settings['SLUG_SUBSTITUTIONS'] = [('C++', 'cpp')]
|
||||
settings["SLUG_SUBSTITUTIONS"] = [("C++", "cpp")]
|
||||
settings = handle_deprecated_settings(settings)
|
||||
self.assertEqual(settings.get('SLUG_REGEX_SUBSTITUTIONS'),
|
||||
[(r'C\+\+', 'cpp')] + default_slug_regex_subs)
|
||||
self.assertNotIn('TAG_REGEX_SUBSTITUTIONS', settings)
|
||||
self.assertNotIn('CATEGORY_REGEX_SUBSTITUTIONS', settings)
|
||||
self.assertEqual(settings.get('AUTHOR_REGEX_SUBSTITUTIONS'),
|
||||
default_slug_regex_subs)
|
||||
self.assertEqual(
|
||||
settings.get("SLUG_REGEX_SUBSTITUTIONS"),
|
||||
[(r"C\+\+", "cpp")] + default_slug_regex_subs,
|
||||
)
|
||||
self.assertNotIn("TAG_REGEX_SUBSTITUTIONS", settings)
|
||||
self.assertNotIn("CATEGORY_REGEX_SUBSTITUTIONS", settings)
|
||||
self.assertEqual(
|
||||
settings.get("AUTHOR_REGEX_SUBSTITUTIONS"), default_slug_regex_subs
|
||||
)
|
||||
|
||||
# If {CATEGORY, TAG, AUTHOR}_SUBSTITUTIONS are set, set
|
||||
# {CATEGORY, TAG, AUTHOR}_REGEX_SUBSTITUTIONS correctly, don't set
|
||||
# SLUG_REGEX_SUBSTITUTIONS
|
||||
settings = {}
|
||||
settings['TAG_SUBSTITUTIONS'] = [('C#', 'csharp')]
|
||||
settings['CATEGORY_SUBSTITUTIONS'] = [('C#', 'csharp')]
|
||||
settings['AUTHOR_SUBSTITUTIONS'] = [('Alexander Todorov', 'atodorov')]
|
||||
settings["TAG_SUBSTITUTIONS"] = [("C#", "csharp")]
|
||||
settings["CATEGORY_SUBSTITUTIONS"] = [("C#", "csharp")]
|
||||
settings["AUTHOR_SUBSTITUTIONS"] = [("Alexander Todorov", "atodorov")]
|
||||
settings = handle_deprecated_settings(settings)
|
||||
self.assertNotIn('SLUG_REGEX_SUBSTITUTIONS', settings)
|
||||
self.assertEqual(settings['TAG_REGEX_SUBSTITUTIONS'],
|
||||
[(r'C\#', 'csharp')] + default_slug_regex_subs)
|
||||
self.assertEqual(settings['CATEGORY_REGEX_SUBSTITUTIONS'],
|
||||
[(r'C\#', 'csharp')] + default_slug_regex_subs)
|
||||
self.assertEqual(settings['AUTHOR_REGEX_SUBSTITUTIONS'],
|
||||
[(r'Alexander\ Todorov', 'atodorov')] +
|
||||
default_slug_regex_subs)
|
||||
self.assertNotIn("SLUG_REGEX_SUBSTITUTIONS", settings)
|
||||
self.assertEqual(
|
||||
settings["TAG_REGEX_SUBSTITUTIONS"],
|
||||
[(r"C\#", "csharp")] + default_slug_regex_subs,
|
||||
)
|
||||
self.assertEqual(
|
||||
settings["CATEGORY_REGEX_SUBSTITUTIONS"],
|
||||
[(r"C\#", "csharp")] + default_slug_regex_subs,
|
||||
)
|
||||
self.assertEqual(
|
||||
settings["AUTHOR_REGEX_SUBSTITUTIONS"],
|
||||
[(r"Alexander\ Todorov", "atodorov")] + default_slug_regex_subs,
|
||||
)
|
||||
|
||||
# If {SLUG, CATEGORY, TAG, AUTHOR}_SUBSTITUTIONS are set, set
|
||||
# {SLUG, CATEGORY, TAG, AUTHOR}_REGEX_SUBSTITUTIONS correctly
|
||||
settings = {}
|
||||
settings['SLUG_SUBSTITUTIONS'] = [('C++', 'cpp')]
|
||||
settings['TAG_SUBSTITUTIONS'] = [('C#', 'csharp')]
|
||||
settings['CATEGORY_SUBSTITUTIONS'] = [('C#', 'csharp')]
|
||||
settings['AUTHOR_SUBSTITUTIONS'] = [('Alexander Todorov', 'atodorov')]
|
||||
settings["SLUG_SUBSTITUTIONS"] = [("C++", "cpp")]
|
||||
settings["TAG_SUBSTITUTIONS"] = [("C#", "csharp")]
|
||||
settings["CATEGORY_SUBSTITUTIONS"] = [("C#", "csharp")]
|
||||
settings["AUTHOR_SUBSTITUTIONS"] = [("Alexander Todorov", "atodorov")]
|
||||
settings = handle_deprecated_settings(settings)
|
||||
self.assertEqual(settings['TAG_REGEX_SUBSTITUTIONS'],
|
||||
[(r'C\+\+', 'cpp')] + [(r'C\#', 'csharp')] +
|
||||
default_slug_regex_subs)
|
||||
self.assertEqual(settings['CATEGORY_REGEX_SUBSTITUTIONS'],
|
||||
[(r'C\+\+', 'cpp')] + [(r'C\#', 'csharp')] +
|
||||
default_slug_regex_subs)
|
||||
self.assertEqual(settings['AUTHOR_REGEX_SUBSTITUTIONS'],
|
||||
[(r'Alexander\ Todorov', 'atodorov')] +
|
||||
default_slug_regex_subs)
|
||||
self.assertEqual(
|
||||
settings["TAG_REGEX_SUBSTITUTIONS"],
|
||||
[(r"C\+\+", "cpp")] + [(r"C\#", "csharp")] + default_slug_regex_subs,
|
||||
)
|
||||
self.assertEqual(
|
||||
settings["CATEGORY_REGEX_SUBSTITUTIONS"],
|
||||
[(r"C\+\+", "cpp")] + [(r"C\#", "csharp")] + default_slug_regex_subs,
|
||||
)
|
||||
self.assertEqual(
|
||||
settings["AUTHOR_REGEX_SUBSTITUTIONS"],
|
||||
[(r"Alexander\ Todorov", "atodorov")] + default_slug_regex_subs,
|
||||
)
|
||||
|
||||
# Handle old 'skip' flags correctly
|
||||
settings = {}
|
||||
settings['SLUG_SUBSTITUTIONS'] = [('C++', 'cpp', True)]
|
||||
settings['AUTHOR_SUBSTITUTIONS'] = [('Alexander Todorov', 'atodorov',
|
||||
False)]
|
||||
settings["SLUG_SUBSTITUTIONS"] = [("C++", "cpp", True)]
|
||||
settings["AUTHOR_SUBSTITUTIONS"] = [("Alexander Todorov", "atodorov", False)]
|
||||
settings = handle_deprecated_settings(settings)
|
||||
self.assertEqual(settings.get('SLUG_REGEX_SUBSTITUTIONS'),
|
||||
[(r'C\+\+', 'cpp')] +
|
||||
[(r'(?u)\A\s*', ''), (r'(?u)\s*\Z', '')])
|
||||
self.assertEqual(settings['AUTHOR_REGEX_SUBSTITUTIONS'],
|
||||
[(r'Alexander\ Todorov', 'atodorov')] +
|
||||
default_slug_regex_subs)
|
||||
self.assertEqual(
|
||||
settings.get("SLUG_REGEX_SUBSTITUTIONS"),
|
||||
[(r"C\+\+", "cpp")] + [(r"(?u)\A\s*", ""), (r"(?u)\s*\Z", "")],
|
||||
)
|
||||
self.assertEqual(
|
||||
settings["AUTHOR_REGEX_SUBSTITUTIONS"],
|
||||
[(r"Alexander\ Todorov", "atodorov")] + default_slug_regex_subs,
|
||||
)
|
||||
|
||||
def test_deprecated_slug_substitutions_from_file(self):
|
||||
# This is equivalent to reading a settings file that has
|
||||
# SLUG_SUBSTITUTIONS defined but no SLUG_REGEX_SUBSTITUTIONS.
|
||||
settings = read_settings(None, override={
|
||||
'SLUG_SUBSTITUTIONS': [('C++', 'cpp')]
|
||||
})
|
||||
self.assertEqual(settings['SLUG_REGEX_SUBSTITUTIONS'],
|
||||
[(r'C\+\+', 'cpp')] +
|
||||
self.settings['SLUG_REGEX_SUBSTITUTIONS'])
|
||||
self.assertNotIn('SLUG_SUBSTITUTIONS', settings)
|
||||
settings = read_settings(
|
||||
None, override={"SLUG_SUBSTITUTIONS": [("C++", "cpp")]}
|
||||
)
|
||||
self.assertEqual(
|
||||
settings["SLUG_REGEX_SUBSTITUTIONS"],
|
||||
[(r"C\+\+", "cpp")] + self.settings["SLUG_REGEX_SUBSTITUTIONS"],
|
||||
)
|
||||
self.assertNotIn("SLUG_SUBSTITUTIONS", settings)
|
||||
|
|
|
|||
|
|
@ -4,7 +4,6 @@ from pelican.tests.support import unittest
|
|||
|
||||
|
||||
class TestSuiteTest(unittest.TestCase):
|
||||
|
||||
def test_error_on_warning(self):
|
||||
with self.assertRaises(UserWarning):
|
||||
warnings.warn('test warning')
|
||||
warnings.warn("test warning")
|
||||
|
|
|
|||
|
|
@ -5,22 +5,22 @@ from pelican.urlwrappers import Author, Category, Tag, URLWrapper
|
|||
class TestURLWrapper(unittest.TestCase):
|
||||
def test_ordering(self):
|
||||
# URLWrappers are sorted by name
|
||||
wrapper_a = URLWrapper(name='first', settings={})
|
||||
wrapper_b = URLWrapper(name='last', settings={})
|
||||
wrapper_a = URLWrapper(name="first", settings={})
|
||||
wrapper_b = URLWrapper(name="last", settings={})
|
||||
self.assertFalse(wrapper_a > wrapper_b)
|
||||
self.assertFalse(wrapper_a >= wrapper_b)
|
||||
self.assertFalse(wrapper_a == wrapper_b)
|
||||
self.assertTrue(wrapper_a != wrapper_b)
|
||||
self.assertTrue(wrapper_a <= wrapper_b)
|
||||
self.assertTrue(wrapper_a < wrapper_b)
|
||||
wrapper_b.name = 'first'
|
||||
wrapper_b.name = "first"
|
||||
self.assertFalse(wrapper_a > wrapper_b)
|
||||
self.assertTrue(wrapper_a >= wrapper_b)
|
||||
self.assertTrue(wrapper_a == wrapper_b)
|
||||
self.assertFalse(wrapper_a != wrapper_b)
|
||||
self.assertTrue(wrapper_a <= wrapper_b)
|
||||
self.assertFalse(wrapper_a < wrapper_b)
|
||||
wrapper_a.name = 'last'
|
||||
wrapper_a.name = "last"
|
||||
self.assertTrue(wrapper_a > wrapper_b)
|
||||
self.assertTrue(wrapper_a >= wrapper_b)
|
||||
self.assertFalse(wrapper_a == wrapper_b)
|
||||
|
|
@ -29,57 +29,68 @@ class TestURLWrapper(unittest.TestCase):
|
|||
self.assertFalse(wrapper_a < wrapper_b)
|
||||
|
||||
def test_equality(self):
|
||||
tag = Tag('test', settings={})
|
||||
cat = Category('test', settings={})
|
||||
author = Author('test', settings={})
|
||||
tag = Tag("test", settings={})
|
||||
cat = Category("test", settings={})
|
||||
author = Author("test", settings={})
|
||||
|
||||
# same name, but different class
|
||||
self.assertNotEqual(tag, cat)
|
||||
self.assertNotEqual(tag, author)
|
||||
|
||||
# should be equal vs text representing the same name
|
||||
self.assertEqual(tag, 'test')
|
||||
self.assertEqual(tag, "test")
|
||||
|
||||
# should not be equal vs binary
|
||||
self.assertNotEqual(tag, b'test')
|
||||
self.assertNotEqual(tag, b"test")
|
||||
|
||||
# Tags describing the same should be equal
|
||||
tag_equal = Tag('Test', settings={})
|
||||
tag_equal = Tag("Test", settings={})
|
||||
self.assertEqual(tag, tag_equal)
|
||||
|
||||
# Author describing the same should be equal
|
||||
author_equal = Author('Test', settings={})
|
||||
author_equal = Author("Test", settings={})
|
||||
self.assertEqual(author, author_equal)
|
||||
|
||||
cat_ascii = Category('指導書', settings={})
|
||||
self.assertEqual(cat_ascii, 'zhi dao shu')
|
||||
cat_ascii = Category("指導書", settings={})
|
||||
self.assertEqual(cat_ascii, "zhi dao shu")
|
||||
|
||||
def test_slugify_with_substitutions_and_dots(self):
|
||||
tag = Tag('Tag Dot', settings={'TAG_REGEX_SUBSTITUTIONS': [
|
||||
('Tag Dot', 'tag.dot'),
|
||||
]})
|
||||
cat = Category('Category Dot',
|
||||
settings={'CATEGORY_REGEX_SUBSTITUTIONS': [
|
||||
('Category Dot', 'cat.dot'),
|
||||
]})
|
||||
tag = Tag(
|
||||
"Tag Dot",
|
||||
settings={
|
||||
"TAG_REGEX_SUBSTITUTIONS": [
|
||||
("Tag Dot", "tag.dot"),
|
||||
]
|
||||
},
|
||||
)
|
||||
cat = Category(
|
||||
"Category Dot",
|
||||
settings={
|
||||
"CATEGORY_REGEX_SUBSTITUTIONS": [
|
||||
("Category Dot", "cat.dot"),
|
||||
]
|
||||
},
|
||||
)
|
||||
|
||||
self.assertEqual(tag.slug, 'tag.dot')
|
||||
self.assertEqual(cat.slug, 'cat.dot')
|
||||
self.assertEqual(tag.slug, "tag.dot")
|
||||
self.assertEqual(cat.slug, "cat.dot")
|
||||
|
||||
def test_author_slug_substitutions(self):
|
||||
settings = {'AUTHOR_REGEX_SUBSTITUTIONS': [
|
||||
('Alexander Todorov', 'atodorov'),
|
||||
('Krasimir Tsonev', 'krasimir'),
|
||||
(r'[^\w\s-]', ''),
|
||||
(r'(?u)\A\s*', ''),
|
||||
(r'(?u)\s*\Z', ''),
|
||||
(r'[-\s]+', '-'),
|
||||
]}
|
||||
settings = {
|
||||
"AUTHOR_REGEX_SUBSTITUTIONS": [
|
||||
("Alexander Todorov", "atodorov"),
|
||||
("Krasimir Tsonev", "krasimir"),
|
||||
(r"[^\w\s-]", ""),
|
||||
(r"(?u)\A\s*", ""),
|
||||
(r"(?u)\s*\Z", ""),
|
||||
(r"[-\s]+", "-"),
|
||||
]
|
||||
}
|
||||
|
||||
author1 = Author('Mr. Senko', settings=settings)
|
||||
author2 = Author('Alexander Todorov', settings=settings)
|
||||
author3 = Author('Krasimir Tsonev', settings=settings)
|
||||
author1 = Author("Mr. Senko", settings=settings)
|
||||
author2 = Author("Alexander Todorov", settings=settings)
|
||||
author3 = Author("Krasimir Tsonev", settings=settings)
|
||||
|
||||
self.assertEqual(author1.slug, 'mr-senko')
|
||||
self.assertEqual(author2.slug, 'atodorov')
|
||||
self.assertEqual(author3.slug, 'krasimir')
|
||||
self.assertEqual(author1.slug, "mr-senko")
|
||||
self.assertEqual(author2.slug, "atodorov")
|
||||
self.assertEqual(author3.slug, "krasimir")
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
Loading…
Add table
Add a link
Reference in a new issue