mirror of
https://github.com/getpelican/pelican.git
synced 2025-10-15 20:28:56 +02:00
Merge pull request #2058 from avaris/fix_generator_caching
Fix generator caching
This commit is contained in:
commit
44bc69b320
2 changed files with 104 additions and 8 deletions
|
|
@ -533,7 +533,7 @@ class ArticlesGenerator(CachingGenerator):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if article_or_draft.status.lower() == "published":
|
if article_or_draft.status.lower() == "published":
|
||||||
all_articles.append(article_or_draft)
|
pass
|
||||||
elif article_or_draft.status.lower() == "draft":
|
elif article_or_draft.status.lower() == "draft":
|
||||||
article_or_draft = self.readers.read_file(
|
article_or_draft = self.readers.read_file(
|
||||||
base_path=self.path, path=f, content_class=Draft,
|
base_path=self.path, path=f, content_class=Draft,
|
||||||
|
|
@ -542,8 +542,6 @@ class ArticlesGenerator(CachingGenerator):
|
||||||
preread_sender=self,
|
preread_sender=self,
|
||||||
context_signal=signals.article_generator_context,
|
context_signal=signals.article_generator_context,
|
||||||
context_sender=self)
|
context_sender=self)
|
||||||
self.add_source_path(article_or_draft)
|
|
||||||
all_drafts.append(article_or_draft)
|
|
||||||
else:
|
else:
|
||||||
logger.error(
|
logger.error(
|
||||||
"Unknown status '%s' for file %s, skipping it.",
|
"Unknown status '%s' for file %s, skipping it.",
|
||||||
|
|
@ -553,6 +551,10 @@ class ArticlesGenerator(CachingGenerator):
|
||||||
|
|
||||||
self.cache_data(f, article_or_draft)
|
self.cache_data(f, article_or_draft)
|
||||||
|
|
||||||
|
if article_or_draft.status.lower() == "published":
|
||||||
|
all_articles.append(article_or_draft)
|
||||||
|
else:
|
||||||
|
all_drafts.append(article_or_draft)
|
||||||
self.add_source_path(article_or_draft)
|
self.add_source_path(article_or_draft)
|
||||||
|
|
||||||
self.articles, self.translations = process_translations(
|
self.articles, self.translations = process_translations(
|
||||||
|
|
@ -636,11 +638,7 @@ class PagesGenerator(CachingGenerator):
|
||||||
self._add_failed_source_path(f)
|
self._add_failed_source_path(f)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if page.status.lower() == "published":
|
if page.status.lower() not in ("published", "hidden"):
|
||||||
all_pages.append(page)
|
|
||||||
elif page.status.lower() == "hidden":
|
|
||||||
hidden_pages.append(page)
|
|
||||||
else:
|
|
||||||
logger.error(
|
logger.error(
|
||||||
"Unknown status '%s' for file %s, skipping it.",
|
"Unknown status '%s' for file %s, skipping it.",
|
||||||
page.status, f)
|
page.status, f)
|
||||||
|
|
@ -649,6 +647,10 @@ class PagesGenerator(CachingGenerator):
|
||||||
|
|
||||||
self.cache_data(f, page)
|
self.cache_data(f, page)
|
||||||
|
|
||||||
|
if page.status.lower() == "published":
|
||||||
|
all_pages.append(page)
|
||||||
|
elif page.status.lower() == "hidden":
|
||||||
|
hidden_pages.append(page)
|
||||||
self.add_source_path(page)
|
self.add_source_path(page)
|
||||||
|
|
||||||
self.pages, self.translations = process_translations(
|
self.pages, self.translations = process_translations(
|
||||||
|
|
|
||||||
|
|
@ -36,6 +36,100 @@ class TestCache(unittest.TestCase):
|
||||||
settings['CACHE_PATH'] = self.temp_cache
|
settings['CACHE_PATH'] = self.temp_cache
|
||||||
return settings
|
return settings
|
||||||
|
|
||||||
|
def test_generator_caching(self):
|
||||||
|
"""Test that cached and uncached content is same in generator level"""
|
||||||
|
settings = self._get_cache_enabled_settings()
|
||||||
|
settings['CONTENT_CACHING_LAYER'] = 'generator'
|
||||||
|
settings['PAGE_PATHS'] = ['TestPages']
|
||||||
|
settings['DEFAULT_DATE'] = (1970, 1, 1)
|
||||||
|
settings['READERS'] = {'asc': None}
|
||||||
|
|
||||||
|
def sorted_titles(items):
|
||||||
|
return sorted(item.title for item in items)
|
||||||
|
|
||||||
|
# Articles
|
||||||
|
generator = ArticlesGenerator(
|
||||||
|
context=settings.copy(), settings=settings,
|
||||||
|
path=CONTENT_DIR, theme=settings['THEME'], output_path=None)
|
||||||
|
generator.generate_context()
|
||||||
|
uncached_articles = sorted_titles(generator.articles)
|
||||||
|
uncached_drafts = sorted_titles(generator.drafts)
|
||||||
|
|
||||||
|
generator = ArticlesGenerator(
|
||||||
|
context=settings.copy(), settings=settings,
|
||||||
|
path=CONTENT_DIR, theme=settings['THEME'], output_path=None)
|
||||||
|
generator.generate_context()
|
||||||
|
cached_articles = sorted_titles(generator.articles)
|
||||||
|
cached_drafts = sorted_titles(generator.drafts)
|
||||||
|
|
||||||
|
self.assertEqual(uncached_articles, cached_articles)
|
||||||
|
self.assertEqual(uncached_drafts, cached_drafts)
|
||||||
|
|
||||||
|
# Pages
|
||||||
|
generator = PagesGenerator(
|
||||||
|
context=settings.copy(), settings=settings,
|
||||||
|
path=CUR_DIR, theme=settings['THEME'], output_path=None)
|
||||||
|
generator.generate_context()
|
||||||
|
uncached_pages = sorted_titles(generator.pages)
|
||||||
|
uncached_hidden_pages = sorted_titles(generator.hidden_pages)
|
||||||
|
|
||||||
|
generator = PagesGenerator(
|
||||||
|
context=settings.copy(), settings=settings,
|
||||||
|
path=CUR_DIR, theme=settings['THEME'], output_path=None)
|
||||||
|
generator.generate_context()
|
||||||
|
cached_pages = sorted_titles(generator.pages)
|
||||||
|
cached_hidden_pages = sorted_titles(generator.hidden_pages)
|
||||||
|
|
||||||
|
self.assertEqual(uncached_pages, cached_pages)
|
||||||
|
self.assertEqual(uncached_hidden_pages, cached_hidden_pages)
|
||||||
|
|
||||||
|
def test_reader_caching(self):
|
||||||
|
"""Test that cached and uncached content is same in reader level"""
|
||||||
|
settings = self._get_cache_enabled_settings()
|
||||||
|
settings['CONTENT_CACHING_LAYER'] = 'reader'
|
||||||
|
settings['PAGE_PATHS'] = ['TestPages']
|
||||||
|
settings['DEFAULT_DATE'] = (1970, 1, 1)
|
||||||
|
settings['READERS'] = {'asc': None}
|
||||||
|
|
||||||
|
def sorted_titles(items):
|
||||||
|
return sorted(item.title for item in items)
|
||||||
|
|
||||||
|
# Articles
|
||||||
|
generator = ArticlesGenerator(
|
||||||
|
context=settings.copy(), settings=settings,
|
||||||
|
path=CONTENT_DIR, theme=settings['THEME'], output_path=None)
|
||||||
|
generator.generate_context()
|
||||||
|
uncached_articles = sorted_titles(generator.articles)
|
||||||
|
uncached_drafts = sorted_titles(generator.drafts)
|
||||||
|
|
||||||
|
generator = ArticlesGenerator(
|
||||||
|
context=settings.copy(), settings=settings,
|
||||||
|
path=CONTENT_DIR, theme=settings['THEME'], output_path=None)
|
||||||
|
generator.generate_context()
|
||||||
|
cached_articles = sorted_titles(generator.articles)
|
||||||
|
cached_drafts = sorted_titles(generator.drafts)
|
||||||
|
|
||||||
|
self.assertEqual(uncached_articles, cached_articles)
|
||||||
|
self.assertEqual(uncached_drafts, cached_drafts)
|
||||||
|
|
||||||
|
# Pages
|
||||||
|
generator = PagesGenerator(
|
||||||
|
context=settings.copy(), settings=settings,
|
||||||
|
path=CUR_DIR, theme=settings['THEME'], output_path=None)
|
||||||
|
generator.generate_context()
|
||||||
|
uncached_pages = sorted_titles(generator.pages)
|
||||||
|
uncached_hidden_pages = sorted_titles(generator.hidden_pages)
|
||||||
|
|
||||||
|
generator = PagesGenerator(
|
||||||
|
context=settings.copy(), settings=settings,
|
||||||
|
path=CUR_DIR, theme=settings['THEME'], output_path=None)
|
||||||
|
generator.generate_context()
|
||||||
|
cached_pages = sorted_titles(generator.pages)
|
||||||
|
cached_hidden_pages = sorted_titles(generator.hidden_pages)
|
||||||
|
|
||||||
|
self.assertEqual(uncached_pages, cached_pages)
|
||||||
|
self.assertEqual(uncached_hidden_pages, cached_hidden_pages)
|
||||||
|
|
||||||
@unittest.skipUnless(MagicMock, 'Needs Mock module')
|
@unittest.skipUnless(MagicMock, 'Needs Mock module')
|
||||||
def test_article_object_caching(self):
|
def test_article_object_caching(self):
|
||||||
"""Test Article objects caching at the generator level"""
|
"""Test Article objects caching at the generator level"""
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue