Merge pull request #1318 from smartass101/fix_cacher_open_func

set _cache_open func even if not loading cache, fixes autoreload
This commit is contained in:
Justin Mayer 2014-04-18 11:27:22 -07:00
commit 06080dd873
2 changed files with 65 additions and 19 deletions

View file

@ -307,6 +307,30 @@ class TestArticlesGenerator(unittest.TestCase):
generator.generate_context() generator.generate_context()
generator.readers.read_file.assert_called_count == 0 generator.readers.read_file.assert_called_count == 0
def test_full_rebuild(self):
"""Test that all the articles are read again when not loading cache
used in --full-rebuild or autoreload mode"""
settings = get_settings(filenames={})
settings['CACHE_DIRECTORY'] = self.temp_cache
settings['READERS'] = {'asc': None}
generator = ArticlesGenerator(
context=settings.copy(), settings=settings,
path=CONTENT_DIR, theme=settings['THEME'], output_path=None)
generator.readers.read_file = MagicMock()
generator.generate_context()
self.assertTrue(hasattr(generator, '_cache_open'))
orig_call_count = generator.readers.read_file.call_count
settings['LOAD_CONTENT_CACHE'] = False
generator = ArticlesGenerator(
context=settings.copy(), settings=settings,
path=CONTENT_DIR, theme=settings['THEME'], output_path=None)
generator.readers.read_file = MagicMock()
generator.generate_context()
generator.readers.read_file.assert_called_count == orig_call_count
class TestPageGenerator(unittest.TestCase): class TestPageGenerator(unittest.TestCase):
# Note: Every time you want to test for a new field; Make sure the test # Note: Every time you want to test for a new field; Make sure the test
@ -372,6 +396,30 @@ class TestPageGenerator(unittest.TestCase):
generator.generate_context() generator.generate_context()
generator.readers.read_file.assert_called_count == 0 generator.readers.read_file.assert_called_count == 0
def test_full_rebuild(self):
"""Test that all the pages are read again when not loading cache
used in --full-rebuild or autoreload mode"""
settings = get_settings(filenames={})
settings['CACHE_DIRECTORY'] = self.temp_cache
settings['READERS'] = {'asc': None}
generator = PagesGenerator(
context=settings.copy(), settings=settings,
path=CONTENT_DIR, theme=settings['THEME'], output_path=None)
generator.readers.read_file = MagicMock()
generator.generate_context()
self.assertTrue(hasattr(generator, '_cache_open'))
orig_call_count = generator.readers.read_file.call_count
settings['LOAD_CONTENT_CACHE'] = False
generator = PagesGenerator(
context=settings.copy(), settings=settings,
path=CONTENT_DIR, theme=settings['THEME'], output_path=None)
generator.readers.read_file = MagicMock()
generator.generate_context()
generator.readers.read_file.assert_called_count == orig_call_count
class TestTemplatePagesGenerator(unittest.TestCase): class TestTemplatePagesGenerator(unittest.TestCase):

View file

@ -564,25 +564,24 @@ class FileDataCacher(object):
name = self.__class__.__name__ name = self.__class__.__name__
self._cache_path = os.path.join(self.settings['CACHE_DIRECTORY'], name) self._cache_path = os.path.join(self.settings['CACHE_DIRECTORY'], name)
self._cache_data_policy = self.settings[cache_policy_key] self._cache_data_policy = self.settings[cache_policy_key]
if not self.settings[load_policy_key]:
self._cache = {}
return
if self.settings['GZIP_CACHE']: if self.settings['GZIP_CACHE']:
import gzip import gzip
self._cache_open = gzip.open self._cache_open = gzip.open
else: else:
self._cache_open = open self._cache_open = open
try: if self.settings[load_policy_key]:
with self._cache_open(self._cache_path, 'rb') as f: try:
self._cache = pickle.load(f) with self._cache_open(self._cache_path, 'rb') as f:
except Exception as e: self._cache = pickle.load(f)
except Exception as e:
self._cache = {}
else:
self._cache = {} self._cache = {}
def cache_data(self, filename, data): def cache_data(self, filename, data):
'''Cache data for given file''' '''Cache data for given file'''
if not self._cache_data_policy: if self._cache_data_policy:
return self._cache[filename] = data
self._cache[filename] = data
def get_cached_data(self, filename, default={}): def get_cached_data(self, filename, default={}):
'''Get cached data for the given file '''Get cached data for the given file
@ -593,15 +592,14 @@ class FileDataCacher(object):
def save_cache(self): def save_cache(self):
'''Save the updated cache''' '''Save the updated cache'''
if not self._cache_data_policy: if self._cache_data_policy:
return try:
try: mkdir_p(self.settings['CACHE_DIRECTORY'])
mkdir_p(self.settings['CACHE_DIRECTORY']) with self._cache_open(self._cache_path, 'wb') as f:
with self._cache_open(self._cache_path, 'wb') as f: pickle.dump(self._cache, f)
pickle.dump(self._cache, f) except Exception as e:
except Exception as e: logger.warning('Could not save cache {}\n{}'.format(
logger.warning('Could not save cache {}\n{}'.format( self._cache_path, e))
self._cache_path, e))
class FileStampDataCacher(FileDataCacher): class FileStampDataCacher(FileDataCacher):