mirror of
https://github.com/getpelican/pelican.git
synced 2025-10-15 20:28:56 +02:00
commit
8cd0939bfe
4 changed files with 23 additions and 9 deletions
|
|
@ -227,7 +227,7 @@ class ArticlesGenerator(Generator):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# if no category is set, use the name of the path as a category
|
# if no category is set, use the name of the path as a category
|
||||||
if 'category' not in metadata.keys():
|
if 'category' not in metadata:
|
||||||
|
|
||||||
if os.path.dirname(f) == self.path:
|
if os.path.dirname(f) == self.path:
|
||||||
category = self.settings['DEFAULT_CATEGORY']
|
category = self.settings['DEFAULT_CATEGORY']
|
||||||
|
|
@ -238,8 +238,7 @@ class ArticlesGenerator(Generator):
|
||||||
if category != '':
|
if category != '':
|
||||||
metadata['category'] = Category(category, self.settings)
|
metadata['category'] = Category(category, self.settings)
|
||||||
|
|
||||||
if 'date' not in metadata.keys()\
|
if 'date' not in metadata and self.settings['FALLBACK_ON_FS_DATE']:
|
||||||
and self.settings['FALLBACK_ON_FS_DATE']:
|
|
||||||
metadata['date'] = datetime.datetime.fromtimestamp(
|
metadata['date'] = datetime.datetime.fromtimestamp(
|
||||||
os.stat(f).st_ctime)
|
os.stat(f).st_ctime)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -145,7 +145,7 @@ def read_file(filename, fmt=None, settings=None):
|
||||||
if not fmt:
|
if not fmt:
|
||||||
fmt = filename.split('.')[-1]
|
fmt = filename.split('.')[-1]
|
||||||
|
|
||||||
if fmt not in _EXTENSIONS.keys():
|
if fmt not in _EXTENSIONS:
|
||||||
raise TypeError('Pelican does not know how to parse %s' % filename)
|
raise TypeError('Pelican does not know how to parse %s' % filename)
|
||||||
|
|
||||||
reader = _EXTENSIONS[fmt](settings)
|
reader = _EXTENSIONS[fmt](settings)
|
||||||
|
|
|
||||||
|
|
@ -210,9 +210,6 @@ LAST_MTIME = 0
|
||||||
def files_changed(path, extensions):
|
def files_changed(path, extensions):
|
||||||
"""Return True if the files have changed since the last check"""
|
"""Return True if the files have changed since the last check"""
|
||||||
|
|
||||||
def with_extension(f):
|
|
||||||
return any(f.endswith(ext) for ext in extensions)
|
|
||||||
|
|
||||||
def file_times(path):
|
def file_times(path):
|
||||||
"""Return the last time files have been modified"""
|
"""Return the last time files have been modified"""
|
||||||
for root, dirs, files in os.walk(path):
|
for root, dirs, files in os.walk(path):
|
||||||
|
|
|
||||||
|
|
@ -3,11 +3,12 @@ try:
|
||||||
import unittest2 as unittest
|
import unittest2 as unittest
|
||||||
except ImportError:
|
except ImportError:
|
||||||
import unittest # NOQA
|
import unittest # NOQA
|
||||||
|
|
||||||
|
import os
|
||||||
import datetime
|
import datetime
|
||||||
|
import time
|
||||||
|
|
||||||
from pelican import utils
|
from pelican import utils
|
||||||
from pelican.contents import Article
|
|
||||||
|
|
||||||
from support import get_article
|
from support import get_article
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -73,3 +74,20 @@ class TestUtils(unittest.TestCase):
|
||||||
self.assertIn(fr_article1, trans)
|
self.assertIn(fr_article1, trans)
|
||||||
self.assertNotIn(en_article1, trans)
|
self.assertNotIn(en_article1, trans)
|
||||||
self.assertNotIn(fr_article1, index)
|
self.assertNotIn(fr_article1, index)
|
||||||
|
|
||||||
|
def test_files_changed(self):
|
||||||
|
"Test if file changes are correctly detected"
|
||||||
|
|
||||||
|
path = os.path.join(os.path.dirname(__file__), 'content')
|
||||||
|
filename = os.path.join(path, 'article_with_metadata.rst')
|
||||||
|
changed = utils.files_changed(path, 'rst')
|
||||||
|
self.assertEquals(changed, True)
|
||||||
|
|
||||||
|
changed = utils.files_changed(path, 'rst')
|
||||||
|
self.assertEquals(changed, False)
|
||||||
|
|
||||||
|
t = time.time()
|
||||||
|
os.utime(filename, (t, t))
|
||||||
|
changed = utils.files_changed(path, 'rst')
|
||||||
|
self.assertEquals(changed, True)
|
||||||
|
self.assertAlmostEqual(utils.LAST_MTIME, t, places=2)
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue