forked from github/pelican
Merge pull request #1318 from smartass101/fix_cacher_open_func
set _cache_open func even if not loading cache, fixes autoreload
This commit is contained in:
commit
06080dd873
2 changed files with 65 additions and 19 deletions
|
|
@ -307,6 +307,30 @@ class TestArticlesGenerator(unittest.TestCase):
|
|||
generator.generate_context()
|
||||
generator.readers.read_file.assert_called_count == 0
|
||||
|
||||
def test_full_rebuild(self):
|
||||
"""Test that all the articles are read again when not loading cache
|
||||
|
||||
used in --full-rebuild or autoreload mode"""
|
||||
settings = get_settings(filenames={})
|
||||
settings['CACHE_DIRECTORY'] = self.temp_cache
|
||||
settings['READERS'] = {'asc': None}
|
||||
|
||||
generator = ArticlesGenerator(
|
||||
context=settings.copy(), settings=settings,
|
||||
path=CONTENT_DIR, theme=settings['THEME'], output_path=None)
|
||||
generator.readers.read_file = MagicMock()
|
||||
generator.generate_context()
|
||||
self.assertTrue(hasattr(generator, '_cache_open'))
|
||||
orig_call_count = generator.readers.read_file.call_count
|
||||
|
||||
settings['LOAD_CONTENT_CACHE'] = False
|
||||
generator = ArticlesGenerator(
|
||||
context=settings.copy(), settings=settings,
|
||||
path=CONTENT_DIR, theme=settings['THEME'], output_path=None)
|
||||
generator.readers.read_file = MagicMock()
|
||||
generator.generate_context()
|
||||
generator.readers.read_file.assert_called_count == orig_call_count
|
||||
|
||||
|
||||
class TestPageGenerator(unittest.TestCase):
|
||||
# Note: Every time you want to test for a new field; Make sure the test
|
||||
|
|
@ -372,6 +396,30 @@ class TestPageGenerator(unittest.TestCase):
|
|||
generator.generate_context()
|
||||
generator.readers.read_file.assert_called_count == 0
|
||||
|
||||
def test_full_rebuild(self):
|
||||
"""Test that all the pages are read again when not loading cache
|
||||
|
||||
used in --full-rebuild or autoreload mode"""
|
||||
settings = get_settings(filenames={})
|
||||
settings['CACHE_DIRECTORY'] = self.temp_cache
|
||||
settings['READERS'] = {'asc': None}
|
||||
|
||||
generator = PagesGenerator(
|
||||
context=settings.copy(), settings=settings,
|
||||
path=CONTENT_DIR, theme=settings['THEME'], output_path=None)
|
||||
generator.readers.read_file = MagicMock()
|
||||
generator.generate_context()
|
||||
self.assertTrue(hasattr(generator, '_cache_open'))
|
||||
orig_call_count = generator.readers.read_file.call_count
|
||||
|
||||
settings['LOAD_CONTENT_CACHE'] = False
|
||||
generator = PagesGenerator(
|
||||
context=settings.copy(), settings=settings,
|
||||
path=CONTENT_DIR, theme=settings['THEME'], output_path=None)
|
||||
generator.readers.read_file = MagicMock()
|
||||
generator.generate_context()
|
||||
generator.readers.read_file.assert_called_count == orig_call_count
|
||||
|
||||
|
||||
class TestTemplatePagesGenerator(unittest.TestCase):
|
||||
|
||||
|
|
|
|||
|
|
@ -564,25 +564,24 @@ class FileDataCacher(object):
|
|||
name = self.__class__.__name__
|
||||
self._cache_path = os.path.join(self.settings['CACHE_DIRECTORY'], name)
|
||||
self._cache_data_policy = self.settings[cache_policy_key]
|
||||
if not self.settings[load_policy_key]:
|
||||
self._cache = {}
|
||||
return
|
||||
if self.settings['GZIP_CACHE']:
|
||||
import gzip
|
||||
self._cache_open = gzip.open
|
||||
else:
|
||||
self._cache_open = open
|
||||
try:
|
||||
with self._cache_open(self._cache_path, 'rb') as f:
|
||||
self._cache = pickle.load(f)
|
||||
except Exception as e:
|
||||
if self.settings[load_policy_key]:
|
||||
try:
|
||||
with self._cache_open(self._cache_path, 'rb') as f:
|
||||
self._cache = pickle.load(f)
|
||||
except Exception as e:
|
||||
self._cache = {}
|
||||
else:
|
||||
self._cache = {}
|
||||
|
||||
def cache_data(self, filename, data):
|
||||
'''Cache data for given file'''
|
||||
if not self._cache_data_policy:
|
||||
return
|
||||
self._cache[filename] = data
|
||||
if self._cache_data_policy:
|
||||
self._cache[filename] = data
|
||||
|
||||
def get_cached_data(self, filename, default={}):
|
||||
'''Get cached data for the given file
|
||||
|
|
@ -593,15 +592,14 @@ class FileDataCacher(object):
|
|||
|
||||
def save_cache(self):
|
||||
'''Save the updated cache'''
|
||||
if not self._cache_data_policy:
|
||||
return
|
||||
try:
|
||||
mkdir_p(self.settings['CACHE_DIRECTORY'])
|
||||
with self._cache_open(self._cache_path, 'wb') as f:
|
||||
pickle.dump(self._cache, f)
|
||||
except Exception as e:
|
||||
logger.warning('Could not save cache {}\n{}'.format(
|
||||
self._cache_path, e))
|
||||
if self._cache_data_policy:
|
||||
try:
|
||||
mkdir_p(self.settings['CACHE_DIRECTORY'])
|
||||
with self._cache_open(self._cache_path, 'wb') as f:
|
||||
pickle.dump(self._cache, f)
|
||||
except Exception as e:
|
||||
logger.warning('Could not save cache {}\n{}'.format(
|
||||
self._cache_path, e))
|
||||
|
||||
|
||||
class FileStampDataCacher(FileDataCacher):
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue