2015-06-16 09:25:09 +02:00
|
|
|
|
import locale
|
2013-01-18 07:33:42 -05:00
|
|
|
|
import logging
|
2012-03-14 12:36:55 +01:00
|
|
|
|
import os
|
2015-06-16 09:25:09 +02:00
|
|
|
|
import shutil
|
2023-08-15 19:07:39 +01:00
|
|
|
|
from datetime import timezone
|
2014-11-03 20:30:54 -08:00
|
|
|
|
from sys import platform
|
2013-04-22 19:54:52 -04:00
|
|
|
|
from tempfile import mkdtemp
|
2012-03-11 15:51:48 +01:00
|
|
|
|
|
2023-08-15 19:07:39 +01:00
|
|
|
|
try:
|
|
|
|
|
|
from zoneinfo import ZoneInfo
|
|
|
|
|
|
except ModuleNotFoundError:
|
|
|
|
|
|
from backports.zoneinfo import ZoneInfo
|
2013-06-11 22:32:51 -04:00
|
|
|
|
|
2015-06-16 09:25:09 +02:00
|
|
|
|
from pelican import utils
|
2013-04-22 19:54:52 -04:00
|
|
|
|
from pelican.generators import TemplatePagesGenerator
|
|
|
|
|
|
from pelican.settings import read_settings
|
2015-06-16 09:25:09 +02:00
|
|
|
|
from pelican.tests.support import (
|
|
|
|
|
|
LoggedTestCase,
|
|
|
|
|
|
get_article,
|
|
|
|
|
|
locale_available,
|
|
|
|
|
|
unittest,
|
|
|
|
|
|
)
|
|
|
|
|
|
from pelican.writers import Writer
|
2012-03-11 15:51:48 +01:00
|
|
|
|
|
|
|
|
|
|
|
2024-05-29 16:36:20 -05:00
|
|
|
|
class ClassDeprAttr:
|
2013-01-04 13:02:30 -05:00
|
|
|
|
_new_attribute = "new_value"
|
|
|
|
|
|
|
2024-05-29 16:36:20 -05:00
|
|
|
|
@utils.deprecated_attribute(
|
|
|
|
|
|
old="_old_attribute", new="_new_attribute", since=(3, 1, 0), remove=(4, 1, 3)
|
|
|
|
|
|
)
|
|
|
|
|
|
def _old_attribute():
|
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class TestUtils(LoggedTestCase):
|
2022-09-15 16:26:15 -07:00
|
|
|
|
def setUp(self):
|
|
|
|
|
|
super().setUp()
|
|
|
|
|
|
self.temp_output = mkdtemp(prefix="pelicantests.")
|
|
|
|
|
|
|
|
|
|
|
|
def tearDown(self):
|
|
|
|
|
|
super().tearDown()
|
|
|
|
|
|
shutil.rmtree(self.temp_output)
|
|
|
|
|
|
|
2013-01-04 13:02:30 -05:00
|
|
|
|
def test_deprecated_attribute(self):
|
2024-05-29 16:36:20 -05:00
|
|
|
|
test_class = ClassDeprAttr()
|
|
|
|
|
|
value = test_class._old_attribute
|
|
|
|
|
|
self.assertEqual(value, test_class._new_attribute)
|
2013-01-18 07:33:42 -05:00
|
|
|
|
self.assertLogCountEqual(
|
|
|
|
|
|
count=1,
|
|
|
|
|
|
msg=(
|
|
|
|
|
|
"_old_attribute has been deprecated since 3.1.0 and will be "
|
|
|
|
|
|
"removed by version 4.1.3. Use _new_attribute instead"
|
|
|
|
|
|
),
|
|
|
|
|
|
level=logging.WARNING,
|
|
|
|
|
|
)
|
2012-03-11 15:51:48 +01:00
|
|
|
|
|
|
|
|
|
|
def test_get_date(self):
|
|
|
|
|
|
# valid ones
|
2014-04-27 10:25:57 +02:00
|
|
|
|
date = utils.SafeDatetime(year=2012, month=11, day=22)
|
2013-06-11 21:40:13 -04:00
|
|
|
|
date_hour = utils.SafeDatetime(year=2012, month=11, day=22, hour=22, minute=11)
|
2014-04-27 10:25:57 +02:00
|
|
|
|
date_hour_z = utils.SafeDatetime(
|
2014-02-16 12:51:52 +01:00
|
|
|
|
year=2012, month=11, day=22, hour=22, minute=11, tzinfo=timezone.utc
|
2023-08-15 19:07:39 +01:00
|
|
|
|
)
|
2014-04-27 10:25:57 +02:00
|
|
|
|
date_hour_est = utils.SafeDatetime(
|
2014-02-16 12:51:52 +01:00
|
|
|
|
year=2012, month=11, day=22, hour=22, minute=11, tzinfo=ZoneInfo("EST")
|
2023-08-15 19:07:39 +01:00
|
|
|
|
)
|
2014-04-27 10:25:57 +02:00
|
|
|
|
date_hour_sec = utils.SafeDatetime(
|
2013-06-11 21:40:13 -04:00
|
|
|
|
year=2012, month=11, day=22, hour=22, minute=11, second=10
|
|
|
|
|
|
)
|
2014-04-27 10:25:57 +02:00
|
|
|
|
date_hour_sec_z = utils.SafeDatetime(
|
2013-06-11 21:40:13 -04:00
|
|
|
|
year=2012,
|
|
|
|
|
|
month=11,
|
|
|
|
|
|
day=22,
|
|
|
|
|
|
hour=22,
|
|
|
|
|
|
minute=11,
|
|
|
|
|
|
second=10,
|
2023-08-15 19:07:39 +01:00
|
|
|
|
tzinfo=timezone.utc,
|
|
|
|
|
|
)
|
2014-04-27 10:25:57 +02:00
|
|
|
|
date_hour_sec_est = utils.SafeDatetime(
|
2013-06-11 21:40:13 -04:00
|
|
|
|
year=2012,
|
|
|
|
|
|
month=11,
|
|
|
|
|
|
day=22,
|
|
|
|
|
|
hour=22,
|
|
|
|
|
|
minute=11,
|
|
|
|
|
|
second=10,
|
2023-08-15 19:07:39 +01:00
|
|
|
|
tzinfo=ZoneInfo("EST"),
|
|
|
|
|
|
)
|
2014-04-27 10:25:57 +02:00
|
|
|
|
date_hour_sec_frac_z = utils.SafeDatetime(
|
2013-06-11 21:40:13 -04:00
|
|
|
|
year=2012,
|
|
|
|
|
|
month=11,
|
|
|
|
|
|
day=22,
|
|
|
|
|
|
hour=22,
|
|
|
|
|
|
minute=11,
|
|
|
|
|
|
second=10,
|
2023-08-15 19:07:39 +01:00
|
|
|
|
microsecond=123000,
|
|
|
|
|
|
tzinfo=timezone.utc,
|
|
|
|
|
|
)
|
2013-06-11 21:40:13 -04:00
|
|
|
|
dates = {
|
|
|
|
|
|
"2012-11-22": date,
|
|
|
|
|
|
"2012/11/22": date,
|
|
|
|
|
|
"2012-11-22 22:11": date_hour,
|
|
|
|
|
|
"2012/11/22 22:11": date_hour,
|
|
|
|
|
|
"22-11-2012": date,
|
|
|
|
|
|
"22/11/2012": date,
|
|
|
|
|
|
"22.11.2012": date,
|
|
|
|
|
|
"22.11.2012 22:11": date_hour,
|
2014-02-16 12:51:52 +01:00
|
|
|
|
"2012-11-22T22:11Z": date_hour_z,
|
|
|
|
|
|
"2012-11-22T22:11-0500": date_hour_est,
|
2013-06-11 21:40:13 -04:00
|
|
|
|
"2012-11-22 22:11:10": date_hour_sec,
|
|
|
|
|
|
"2012-11-22T22:11:10Z": date_hour_sec_z,
|
2013-06-11 22:32:51 -04:00
|
|
|
|
"2012-11-22T22:11:10-0500": date_hour_sec_est,
|
2013-06-11 21:40:13 -04:00
|
|
|
|
"2012-11-22T22:11:10.123Z": date_hour_sec_frac_z,
|
2015-06-16 09:25:09 +02:00
|
|
|
|
}
|
2013-06-11 21:40:13 -04:00
|
|
|
|
|
2014-02-16 12:51:52 +01:00
|
|
|
|
# examples from http://www.w3.org/TR/NOTE-datetime
|
2014-04-27 10:25:57 +02:00
|
|
|
|
iso_8601_date = utils.SafeDatetime(year=1997, month=7, day=16)
|
|
|
|
|
|
iso_8601_date_hour_tz = utils.SafeDatetime(
|
2014-02-16 12:51:52 +01:00
|
|
|
|
year=1997,
|
|
|
|
|
|
month=7,
|
|
|
|
|
|
day=16,
|
|
|
|
|
|
hour=19,
|
|
|
|
|
|
minute=20,
|
2023-08-15 19:07:39 +01:00
|
|
|
|
tzinfo=ZoneInfo("Europe/London"),
|
|
|
|
|
|
)
|
2014-04-27 10:25:57 +02:00
|
|
|
|
iso_8601_date_hour_sec_tz = utils.SafeDatetime(
|
2014-02-16 12:51:52 +01:00
|
|
|
|
year=1997,
|
|
|
|
|
|
month=7,
|
|
|
|
|
|
day=16,
|
|
|
|
|
|
hour=19,
|
|
|
|
|
|
minute=20,
|
|
|
|
|
|
second=30,
|
2023-08-15 19:07:39 +01:00
|
|
|
|
tzinfo=ZoneInfo("Europe/London"),
|
|
|
|
|
|
)
|
2014-04-27 10:25:57 +02:00
|
|
|
|
iso_8601_date_hour_sec_ms_tz = utils.SafeDatetime(
|
2014-02-16 12:51:52 +01:00
|
|
|
|
year=1997,
|
|
|
|
|
|
month=7,
|
|
|
|
|
|
day=16,
|
|
|
|
|
|
hour=19,
|
|
|
|
|
|
minute=20,
|
|
|
|
|
|
second=30,
|
2023-08-15 19:07:39 +01:00
|
|
|
|
microsecond=450000,
|
|
|
|
|
|
tzinfo=ZoneInfo("Europe/London"),
|
|
|
|
|
|
)
|
2014-02-16 12:51:52 +01:00
|
|
|
|
iso_8601 = {
|
|
|
|
|
|
"1997-07-16": iso_8601_date,
|
|
|
|
|
|
"1997-07-16T19:20+01:00": iso_8601_date_hour_tz,
|
|
|
|
|
|
"1997-07-16T19:20:30+01:00": iso_8601_date_hour_sec_tz,
|
|
|
|
|
|
"1997-07-16T19:20:30.45+01:00": iso_8601_date_hour_sec_ms_tz,
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2013-06-11 21:40:13 -04:00
|
|
|
|
# invalid ones
|
|
|
|
|
|
invalid_dates = ["2010-110-12", "yay"]
|
|
|
|
|
|
|
2012-03-11 15:51:48 +01:00
|
|
|
|
for value, expected in dates.items():
|
2013-04-13 16:36:05 -04:00
|
|
|
|
self.assertEqual(utils.get_date(value), expected, value)
|
2014-02-16 12:51:52 +01:00
|
|
|
|
|
|
|
|
|
|
for value, expected in iso_8601.items():
|
|
|
|
|
|
self.assertEqual(utils.get_date(value), expected, value)
|
2012-03-11 15:51:48 +01:00
|
|
|
|
|
|
|
|
|
|
for item in invalid_dates:
|
|
|
|
|
|
self.assertRaises(ValueError, utils.get_date, item)
|
|
|
|
|
|
|
|
|
|
|
|
def test_slugify(self):
|
|
|
|
|
|
samples = (
|
2020-04-19 17:23:26 +03:00
|
|
|
|
("this is a test", "this-is-a-test"),
|
2012-03-11 15:51:48 +01:00
|
|
|
|
("this is a test", "this-is-a-test"),
|
2013-01-11 02:57:43 +01:00
|
|
|
|
("this → is ← a ↑ test", "this-is-a-test"),
|
2012-07-16 11:36:20 +08:00
|
|
|
|
("this--is---a test", "this-is-a-test"),
|
2013-03-03 19:44:57 -08:00
|
|
|
|
(
|
|
|
|
|
|
"unicode測試許功蓋,你看到了嗎?",
|
|
|
|
|
|
"unicodece-shi-xu-gong-gai-ni-kan-dao-liao-ma",
|
|
|
|
|
|
),
|
|
|
|
|
|
(
|
|
|
|
|
|
"大飯原発4号機、18日夜起動へ",
|
|
|
|
|
|
"da-fan-yuan-fa-4hao-ji-18ri-ye-qi-dong-he",
|
|
|
|
|
|
),
|
|
|
|
|
|
)
|
2012-03-11 15:51:48 +01:00
|
|
|
|
|
2018-08-07 17:35:16 +02:00
|
|
|
|
settings = read_settings()
|
|
|
|
|
|
subs = settings["SLUG_REGEX_SUBSTITUTIONS"]
|
|
|
|
|
|
|
2012-03-11 15:51:48 +01:00
|
|
|
|
for value, expected in samples:
|
2018-08-07 17:35:16 +02:00
|
|
|
|
self.assertEqual(utils.slugify(value, regex_subs=subs), expected)
|
2012-03-11 15:51:48 +01:00
|
|
|
|
|
2020-04-15 20:42:21 +02:00
|
|
|
|
self.assertEqual(utils.slugify("Cat", regex_subs=subs), "cat")
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
|
utils.slugify("Cat", regex_subs=subs, preserve_case=False), "cat"
|
|
|
|
|
|
)
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
|
utils.slugify("Cat", regex_subs=subs, preserve_case=True), "Cat"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
2020-04-19 17:23:26 +03:00
|
|
|
|
def test_slugify_use_unicode(self):
|
|
|
|
|
|
samples = (
|
|
|
|
|
|
("this is a test", "this-is-a-test"),
|
|
|
|
|
|
("this is a test", "this-is-a-test"),
|
|
|
|
|
|
("this → is ← a ↑ test", "this-is-a-test"),
|
|
|
|
|
|
("this--is---a test", "this-is-a-test"),
|
|
|
|
|
|
("unicode測試許功蓋,你看到了嗎?", "unicode測試許功蓋你看到了嗎"),
|
|
|
|
|
|
("Çığ", "çığ"),
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
settings = read_settings()
|
|
|
|
|
|
subs = settings["SLUG_REGEX_SUBSTITUTIONS"]
|
|
|
|
|
|
|
|
|
|
|
|
for value, expected in samples:
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
|
utils.slugify(value, regex_subs=subs, use_unicode=True), expected
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
# check with preserve case
|
2024-05-31 08:48:44 -05:00
|
|
|
|
self.assertEqual(
|
|
|
|
|
|
utils.slugify("Çığ", regex_subs=subs, preserve_case=True, use_unicode=True),
|
|
|
|
|
|
"Çığ",
|
|
|
|
|
|
)
|
2020-04-19 17:23:26 +03:00
|
|
|
|
|
|
|
|
|
|
# check normalization
|
|
|
|
|
|
samples = (
|
|
|
|
|
|
("大飯原発4号機、18日夜起動へ", "大飯原発4号機18日夜起動へ"),
|
|
|
|
|
|
(
|
|
|
|
|
|
"\N{LATIN SMALL LETTER C}\N{COMBINING CEDILLA}",
|
|
|
|
|
|
"\N{LATIN SMALL LETTER C WITH CEDILLA}",
|
|
|
|
|
|
),
|
|
|
|
|
|
)
|
|
|
|
|
|
for value, expected in samples:
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
|
utils.slugify(value, regex_subs=subs, use_unicode=True), expected
|
|
|
|
|
|
)
|
|
|
|
|
|
|
2013-06-14 15:54:06 +01:00
|
|
|
|
def test_slugify_substitute(self):
|
|
|
|
|
|
samples = (
|
|
|
|
|
|
("C++ is based on C", "cpp-is-based-on-c"),
|
|
|
|
|
|
("C+++ test C+ test", "cpp-test-c-test"),
|
|
|
|
|
|
("c++, c#, C#, C++", "cpp-c-sharp-c-sharp-cpp"),
|
|
|
|
|
|
("c++-streams", "cpp-streams"),
|
|
|
|
|
|
)
|
|
|
|
|
|
|
2018-08-07 17:35:16 +02:00
|
|
|
|
settings = read_settings()
|
|
|
|
|
|
subs = [
|
|
|
|
|
|
(r"C\+\+", "CPP"),
|
|
|
|
|
|
(r"C#", "C-SHARP"),
|
|
|
|
|
|
] + settings["SLUG_REGEX_SUBSTITUTIONS"]
|
2013-06-14 15:54:06 +01:00
|
|
|
|
for value, expected in samples:
|
2018-08-07 17:35:16 +02:00
|
|
|
|
self.assertEqual(utils.slugify(value, regex_subs=subs), expected)
|
2013-06-14 15:54:06 +01:00
|
|
|
|
|
2016-03-14 00:16:58 +02:00
|
|
|
|
def test_slugify_substitute_and_keeping_non_alphanum(self):
|
|
|
|
|
|
samples = (
|
|
|
|
|
|
("Fedora QA", "fedora.qa"),
|
|
|
|
|
|
("C++ is used by Fedora QA", "cpp is used by fedora.qa"),
|
2018-08-07 17:35:16 +02:00
|
|
|
|
("C++ is based on C", "cpp is based on c"),
|
|
|
|
|
|
("C+++ test C+ test", "cpp+ test c+ test"),
|
|
|
|
|
|
)
|
2016-03-14 00:16:58 +02:00
|
|
|
|
|
2018-08-07 17:35:16 +02:00
|
|
|
|
subs = [
|
|
|
|
|
|
(r"Fedora QA", "fedora.qa"),
|
|
|
|
|
|
(r"c\+\+", "cpp"),
|
|
|
|
|
|
]
|
2016-03-14 00:16:58 +02:00
|
|
|
|
for value, expected in samples:
|
2018-08-07 17:35:16 +02:00
|
|
|
|
self.assertEqual(utils.slugify(value, regex_subs=subs), expected)
|
2016-03-14 00:16:58 +02:00
|
|
|
|
|
2012-03-11 15:51:48 +01:00
|
|
|
|
def test_get_relative_path(self):
|
2013-03-11 08:38:33 -04:00
|
|
|
|
samples = (
|
|
|
|
|
|
(os.path.join("test", "test.html"), os.pardir),
|
|
|
|
|
|
(
|
|
|
|
|
|
os.path.join("test", "test", "test.html"),
|
|
|
|
|
|
os.path.join(os.pardir, os.pardir),
|
2023-10-29 22:18:29 +01:00
|
|
|
|
),
|
2012-12-10 03:03:48 +05:00
|
|
|
|
("test.html", os.curdir),
|
|
|
|
|
|
(os.path.join("/test", "test.html"), os.pardir),
|
|
|
|
|
|
(
|
|
|
|
|
|
os.path.join("/test", "test", "test.html"),
|
|
|
|
|
|
os.path.join(os.pardir, os.pardir),
|
2023-10-29 22:18:29 +01:00
|
|
|
|
),
|
2012-12-10 03:03:48 +05:00
|
|
|
|
("/test.html", os.curdir),
|
|
|
|
|
|
)
|
2012-03-11 15:51:48 +01:00
|
|
|
|
|
|
|
|
|
|
for value, expected in samples:
|
2013-04-13 16:36:05 -04:00
|
|
|
|
self.assertEqual(utils.get_relative_path(value), expected)
|
2012-03-11 15:51:48 +01:00
|
|
|
|
|
2015-07-30 21:04:28 +02:00
|
|
|
|
def test_truncate_html_words(self):
|
2015-08-19 16:43:59 +02:00
|
|
|
|
# Plain text.
|
2015-07-30 21:04:28 +02:00
|
|
|
|
self.assertEqual(utils.truncate_html_words("short string", 20), "short string")
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
|
utils.truncate_html_words("word " * 100, 20), "word " * 20 + "…"
|
2016-02-18 21:43:51 -08:00
|
|
|
|
)
|
2015-07-30 21:04:28 +02:00
|
|
|
|
|
2021-09-29 14:44:47 +08:00
|
|
|
|
# Plain text with Unicode content.
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
|
utils.truncate_html_words(
|
|
|
|
|
|
"我愿意这样,朋友——我独自远行,不但没有你,\
|
|
|
|
|
|
并且再没有别的影在黑暗里。",
|
|
|
|
|
|
12,
|
|
|
|
|
|
),
|
|
|
|
|
|
"我愿意这样,朋友——我独自远行" + " …",
|
|
|
|
|
|
)
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
|
utils.truncate_html_words(
|
|
|
|
|
|
"Ты мелькнула, ты предстала, Снова сердце задрожало,", 3
|
|
|
|
|
|
),
|
|
|
|
|
|
"Ты мелькнула, ты" + " …",
|
|
|
|
|
|
)
|
2022-07-12 02:47:37 +09:00
|
|
|
|
self.assertEqual(
|
|
|
|
|
|
utils.truncate_html_words("Trong đầm gì đẹp bằng sen", 4),
|
|
|
|
|
|
"Trong đầm gì đẹp" + " …",
|
|
|
|
|
|
)
|
2021-09-29 14:44:47 +08:00
|
|
|
|
|
2015-08-19 16:43:59 +02:00
|
|
|
|
# Words enclosed or intervaled by HTML tags.
|
2015-07-30 21:04:28 +02:00
|
|
|
|
self.assertEqual(
|
|
|
|
|
|
utils.truncate_html_words("<p>" + "word " * 100 + "</p>", 20),
|
2016-02-18 21:43:51 -08:00
|
|
|
|
"<p>" + "word " * 20 + "…</p>",
|
|
|
|
|
|
)
|
2015-07-30 21:04:28 +02:00
|
|
|
|
self.assertEqual(
|
|
|
|
|
|
utils.truncate_html_words(
|
2016-02-18 21:43:51 -08:00
|
|
|
|
'<span\nstyle="\n…\n">' + "word " * 100 + "</span>", 20
|
|
|
|
|
|
),
|
|
|
|
|
|
'<span\nstyle="\n…\n">' + "word " * 20 + "…</span>",
|
|
|
|
|
|
)
|
2015-07-30 21:04:28 +02:00
|
|
|
|
self.assertEqual(
|
|
|
|
|
|
utils.truncate_html_words("<br>" + "word " * 100, 20),
|
2016-02-18 21:43:51 -08:00
|
|
|
|
"<br>" + "word " * 20 + "…",
|
|
|
|
|
|
)
|
2015-07-30 21:04:28 +02:00
|
|
|
|
self.assertEqual(
|
|
|
|
|
|
utils.truncate_html_words("<!-- comment -->" + "word " * 100, 20),
|
2016-02-18 21:43:51 -08:00
|
|
|
|
"<!-- comment -->" + "word " * 20 + "…",
|
|
|
|
|
|
)
|
2015-07-30 21:04:28 +02:00
|
|
|
|
|
2023-07-12 19:28:26 +10:00
|
|
|
|
# Words enclosed or intervaled by HTML tags with a custom end
|
|
|
|
|
|
# marker containing HTML tags.
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
|
utils.truncate_html_words(
|
|
|
|
|
|
"<p>" + "word " * 100 + "</p>", 20, "<span>marker</span>"
|
|
|
|
|
|
),
|
|
|
|
|
|
"<p>" + "word " * 20 + "<span>marker</span></p>",
|
|
|
|
|
|
)
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
|
utils.truncate_html_words(
|
|
|
|
|
|
'<span\nstyle="\n…\n">' + "word " * 100 + "</span>",
|
|
|
|
|
|
20,
|
|
|
|
|
|
"<span>marker</span>",
|
|
|
|
|
|
),
|
|
|
|
|
|
'<span\nstyle="\n…\n">' + "word " * 20 + "<span>marker</span></span>",
|
|
|
|
|
|
)
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
|
utils.truncate_html_words(
|
|
|
|
|
|
"<br>" + "word " * 100, 20, "<span>marker</span>"
|
|
|
|
|
|
),
|
|
|
|
|
|
"<br>" + "word " * 20 + "<span>marker</span>",
|
|
|
|
|
|
)
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
|
utils.truncate_html_words(
|
|
|
|
|
|
"<!-- comment -->" + "word " * 100, 20, "<span>marker</span>"
|
|
|
|
|
|
),
|
|
|
|
|
|
"<!-- comment -->" + "word " * 20 + "<span>marker</span>",
|
|
|
|
|
|
)
|
|
|
|
|
|
|
2015-08-19 16:43:59 +02:00
|
|
|
|
# Words with hypens and apostrophes.
|
|
|
|
|
|
self.assertEqual(utils.truncate_html_words("a-b " * 100, 20), "a-b " * 20 + "…")
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
|
utils.truncate_html_words("it's " * 100, 20), "it's " * 20 + "…"
|
2016-02-18 21:43:51 -08:00
|
|
|
|
)
|
2015-08-19 16:43:59 +02:00
|
|
|
|
|
|
|
|
|
|
# Words with HTML entity references.
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
|
utils.truncate_html_words("é " * 100, 20), "é " * 20 + "…"
|
2016-02-18 21:43:51 -08:00
|
|
|
|
)
|
2015-08-19 16:43:59 +02:00
|
|
|
|
self.assertEqual(
|
|
|
|
|
|
utils.truncate_html_words("café " * 100, 20),
|
2016-02-18 21:43:51 -08:00
|
|
|
|
"café " * 20 + "…",
|
|
|
|
|
|
)
|
2015-08-19 16:43:59 +02:00
|
|
|
|
self.assertEqual(
|
|
|
|
|
|
utils.truncate_html_words("èlite " * 100, 20),
|
2016-02-18 21:43:51 -08:00
|
|
|
|
"èlite " * 20 + "…",
|
|
|
|
|
|
)
|
2015-08-19 16:43:59 +02:00
|
|
|
|
self.assertEqual(
|
|
|
|
|
|
utils.truncate_html_words("cafetiére " * 100, 20),
|
2016-02-18 21:43:51 -08:00
|
|
|
|
"cafetiére " * 20 + "…",
|
|
|
|
|
|
)
|
2015-09-22 20:52:30 +02:00
|
|
|
|
self.assertEqual(
|
|
|
|
|
|
utils.truncate_html_words("∫dx " * 100, 20), "∫dx " * 20 + "…"
|
2016-02-18 21:43:51 -08:00
|
|
|
|
)
|
2015-09-22 20:52:30 +02:00
|
|
|
|
|
|
|
|
|
|
# Words with HTML character references inside and outside
|
|
|
|
|
|
# the ASCII range.
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
|
utils.truncate_html_words("é " * 100, 20), "é " * 20 + "…"
|
2016-02-18 21:43:51 -08:00
|
|
|
|
)
|
2015-09-22 20:52:30 +02:00
|
|
|
|
self.assertEqual(
|
|
|
|
|
|
utils.truncate_html_words("∫dx " * 100, 20), "∫dx " * 20 + "…"
|
2016-02-18 21:43:51 -08:00
|
|
|
|
)
|
2015-08-19 16:43:59 +02:00
|
|
|
|
|
2018-02-08 18:30:09 +01:00
|
|
|
|
# Words with invalid or broken HTML references.
|
2018-02-08 18:39:29 +01:00
|
|
|
|
self.assertEqual(utils.truncate_html_words("&invalid;", 20), "&invalid;")
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
|
utils.truncate_html_words("�", 20), "�"
|
2018-02-08 20:10:08 +01:00
|
|
|
|
)
|
2018-02-08 18:39:29 +01:00
|
|
|
|
self.assertEqual(
|
2018-02-08 20:10:08 +01:00
|
|
|
|
utils.truncate_html_words("�", 20), "�"
|
2023-10-29 22:18:29 +01:00
|
|
|
|
)
|
2018-02-08 20:10:08 +01:00
|
|
|
|
self.assertEqual(utils.truncate_html_words("&mdash text", 20), "&mdash text")
|
|
|
|
|
|
self.assertEqual(utils.truncate_html_words("Ӓ text", 20), "Ӓ text")
|
|
|
|
|
|
self.assertEqual(utils.truncate_html_words("઼ text", 20), "઼ text")
|
2018-02-08 18:30:09 +01:00
|
|
|
|
|
2012-03-11 15:51:48 +01:00
|
|
|
|
def test_process_translations(self):
|
2016-11-20 16:00:55 +01:00
|
|
|
|
fr_articles = []
|
|
|
|
|
|
en_articles = []
|
|
|
|
|
|
|
2012-03-11 15:51:48 +01:00
|
|
|
|
# create a bunch of articles
|
2016-11-20 16:00:55 +01:00
|
|
|
|
# 0: no translation metadata
|
|
|
|
|
|
fr_articles.append(
|
|
|
|
|
|
get_article(lang="fr", slug="yay0", title="Titre", content="en français")
|
|
|
|
|
|
)
|
|
|
|
|
|
en_articles.append(
|
|
|
|
|
|
get_article(lang="en", slug="yay0", title="Title", content="in english")
|
|
|
|
|
|
)
|
|
|
|
|
|
# 1: translation metadata on default lang
|
|
|
|
|
|
fr_articles.append(
|
|
|
|
|
|
get_article(lang="fr", slug="yay1", title="Titre", content="en français")
|
|
|
|
|
|
)
|
|
|
|
|
|
en_articles.append(
|
|
|
|
|
|
get_article(
|
|
|
|
|
|
lang="en",
|
|
|
|
|
|
slug="yay1",
|
|
|
|
|
|
title="Title",
|
|
|
|
|
|
content="in english",
|
2018-03-22 23:47:51 +01:00
|
|
|
|
translation="true",
|
|
|
|
|
|
)
|
2023-10-29 22:18:29 +01:00
|
|
|
|
)
|
2016-11-20 16:00:55 +01:00
|
|
|
|
# 2: translation metadata not on default lang
|
|
|
|
|
|
fr_articles.append(
|
|
|
|
|
|
get_article(
|
|
|
|
|
|
lang="fr",
|
|
|
|
|
|
slug="yay2",
|
|
|
|
|
|
title="Titre",
|
|
|
|
|
|
content="en français",
|
2018-03-22 23:47:51 +01:00
|
|
|
|
translation="true",
|
|
|
|
|
|
)
|
2023-10-29 22:18:29 +01:00
|
|
|
|
)
|
2016-11-20 16:00:55 +01:00
|
|
|
|
en_articles.append(
|
|
|
|
|
|
get_article(lang="en", slug="yay2", title="Title", content="in english")
|
|
|
|
|
|
)
|
2013-04-06 17:48:19 +01:00
|
|
|
|
# 3: back to default language detection if all items have the
|
|
|
|
|
|
# translation metadata
|
2016-11-20 16:00:55 +01:00
|
|
|
|
fr_articles.append(
|
|
|
|
|
|
get_article(
|
|
|
|
|
|
lang="fr",
|
|
|
|
|
|
slug="yay3",
|
|
|
|
|
|
title="Titre",
|
|
|
|
|
|
content="en français",
|
2018-03-22 23:47:51 +01:00
|
|
|
|
translation="yep",
|
|
|
|
|
|
)
|
2023-10-29 22:18:29 +01:00
|
|
|
|
)
|
2016-11-20 16:00:55 +01:00
|
|
|
|
en_articles.append(
|
|
|
|
|
|
get_article(
|
|
|
|
|
|
lang="en",
|
|
|
|
|
|
slug="yay3",
|
|
|
|
|
|
title="Title",
|
|
|
|
|
|
content="in english",
|
2018-03-22 23:47:51 +01:00
|
|
|
|
translation="yes",
|
|
|
|
|
|
)
|
2023-10-29 22:18:29 +01:00
|
|
|
|
)
|
2018-03-22 23:47:51 +01:00
|
|
|
|
# 4-5: translation pairs with the same slug but different category
|
|
|
|
|
|
fr_articles.append(
|
|
|
|
|
|
get_article(
|
|
|
|
|
|
lang="fr",
|
|
|
|
|
|
slug="yay4",
|
|
|
|
|
|
title="Titre",
|
|
|
|
|
|
content="en français",
|
|
|
|
|
|
category="foo",
|
|
|
|
|
|
)
|
2023-10-29 22:18:29 +01:00
|
|
|
|
)
|
2018-03-22 23:47:51 +01:00
|
|
|
|
en_articles.append(
|
|
|
|
|
|
get_article(
|
|
|
|
|
|
lang="en",
|
|
|
|
|
|
slug="yay4",
|
|
|
|
|
|
title="Title",
|
|
|
|
|
|
content="in english",
|
|
|
|
|
|
category="foo",
|
|
|
|
|
|
)
|
2023-10-29 22:18:29 +01:00
|
|
|
|
)
|
2018-03-22 23:47:51 +01:00
|
|
|
|
fr_articles.append(
|
|
|
|
|
|
get_article(
|
|
|
|
|
|
lang="fr",
|
|
|
|
|
|
slug="yay4",
|
|
|
|
|
|
title="Titre",
|
|
|
|
|
|
content="en français",
|
|
|
|
|
|
category="bar",
|
|
|
|
|
|
)
|
2023-10-29 22:18:29 +01:00
|
|
|
|
)
|
2018-03-22 23:47:51 +01:00
|
|
|
|
en_articles.append(
|
|
|
|
|
|
get_article(
|
|
|
|
|
|
lang="en",
|
|
|
|
|
|
slug="yay4",
|
|
|
|
|
|
title="Title",
|
|
|
|
|
|
content="in english",
|
|
|
|
|
|
category="bar",
|
|
|
|
|
|
)
|
2023-10-29 22:18:29 +01:00
|
|
|
|
)
|
2016-11-20 16:00:55 +01:00
|
|
|
|
|
|
|
|
|
|
# try adding articles in both orders
|
|
|
|
|
|
for lang0_articles, lang1_articles in (
|
|
|
|
|
|
(fr_articles, en_articles),
|
|
|
|
|
|
(en_articles, fr_articles),
|
|
|
|
|
|
):
|
|
|
|
|
|
articles = lang0_articles + lang1_articles
|
|
|
|
|
|
|
2018-03-22 23:47:51 +01:00
|
|
|
|
# test process_translations with falsy translation_id
|
|
|
|
|
|
index, trans = utils.process_translations(articles, translation_id=None)
|
|
|
|
|
|
for i in range(6):
|
|
|
|
|
|
for lang_articles in [en_articles, fr_articles]:
|
|
|
|
|
|
self.assertIn(lang_articles[i], index)
|
|
|
|
|
|
self.assertNotIn(lang_articles[i], trans)
|
|
|
|
|
|
|
|
|
|
|
|
# test process_translations with simple and complex translation_id
|
|
|
|
|
|
for translation_id in ["slug", {"slug", "category"}]:
|
|
|
|
|
|
index, trans = utils.process_translations(
|
|
|
|
|
|
articles, translation_id=translation_id
|
|
|
|
|
|
)
|
2023-10-29 22:18:29 +01:00
|
|
|
|
|
2018-03-22 23:47:51 +01:00
|
|
|
|
for a in [
|
|
|
|
|
|
en_articles[0],
|
|
|
|
|
|
fr_articles[1],
|
|
|
|
|
|
en_articles[2],
|
|
|
|
|
|
en_articles[3],
|
|
|
|
|
|
en_articles[4],
|
|
|
|
|
|
en_articles[5],
|
|
|
|
|
|
]:
|
|
|
|
|
|
self.assertIn(a, index)
|
|
|
|
|
|
self.assertNotIn(a, trans)
|
|
|
|
|
|
|
|
|
|
|
|
for a in [
|
|
|
|
|
|
fr_articles[0],
|
|
|
|
|
|
en_articles[1],
|
|
|
|
|
|
fr_articles[2],
|
|
|
|
|
|
fr_articles[3],
|
|
|
|
|
|
fr_articles[4],
|
|
|
|
|
|
fr_articles[5],
|
|
|
|
|
|
]:
|
|
|
|
|
|
self.assertIn(a, trans)
|
|
|
|
|
|
self.assertNotIn(a, index)
|
|
|
|
|
|
|
|
|
|
|
|
for i in range(6):
|
|
|
|
|
|
self.assertIn(en_articles[i], fr_articles[i].translations)
|
|
|
|
|
|
self.assertIn(fr_articles[i], en_articles[i].translations)
|
|
|
|
|
|
|
|
|
|
|
|
for a_arts in [en_articles, fr_articles]:
|
|
|
|
|
|
for b_arts in [en_articles, fr_articles]:
|
|
|
|
|
|
if translation_id == "slug":
|
|
|
|
|
|
self.assertIn(a_arts[4], b_arts[5].translations)
|
|
|
|
|
|
self.assertIn(a_arts[5], b_arts[4].translations)
|
|
|
|
|
|
elif translation_id == {"slug", "category"}:
|
|
|
|
|
|
self.assertNotIn(a_arts[4], b_arts[5].translations)
|
|
|
|
|
|
self.assertNotIn(a_arts[5], b_arts[4].translations)
|
2013-04-06 17:48:19 +01:00
|
|
|
|
|
2012-07-04 12:32:20 -07:00
|
|
|
|
def test_clean_output_dir(self):
|
2013-06-23 11:44:53 -07:00
|
|
|
|
retention = ()
|
2022-09-15 16:26:15 -07:00
|
|
|
|
test_directory = os.path.join(self.temp_output, "clean_output")
|
2012-07-04 12:32:20 -07:00
|
|
|
|
content = os.path.join(os.path.dirname(__file__), "content")
|
|
|
|
|
|
shutil.copytree(content, test_directory)
|
2013-06-23 11:44:53 -07:00
|
|
|
|
utils.clean_output_dir(test_directory, retention)
|
2012-07-04 12:32:20 -07:00
|
|
|
|
self.assertTrue(os.path.isdir(test_directory))
|
|
|
|
|
|
self.assertListEqual([], os.listdir(test_directory))
|
|
|
|
|
|
shutil.rmtree(test_directory)
|
2012-10-01 19:46:02 -04:00
|
|
|
|
|
|
|
|
|
|
def test_clean_output_dir_not_there(self):
|
2013-06-23 11:44:53 -07:00
|
|
|
|
retention = ()
|
2022-09-15 16:26:15 -07:00
|
|
|
|
test_directory = os.path.join(self.temp_output, "does_not_exist")
|
2013-06-23 11:44:53 -07:00
|
|
|
|
utils.clean_output_dir(test_directory, retention)
|
2013-06-12 14:52:23 -04:00
|
|
|
|
self.assertFalse(os.path.exists(test_directory))
|
2012-10-01 19:46:02 -04:00
|
|
|
|
|
|
|
|
|
|
def test_clean_output_dir_is_file(self):
|
2013-06-23 11:44:53 -07:00
|
|
|
|
retention = ()
|
2022-09-15 16:26:15 -07:00
|
|
|
|
test_directory = os.path.join(self.temp_output, "this_is_a_file")
|
2012-10-01 19:46:02 -04:00
|
|
|
|
f = open(test_directory, "w")
|
|
|
|
|
|
f.write("")
|
|
|
|
|
|
f.close()
|
2013-06-23 11:44:53 -07:00
|
|
|
|
utils.clean_output_dir(test_directory, retention)
|
2013-06-12 14:52:23 -04:00
|
|
|
|
self.assertFalse(os.path.exists(test_directory))
|
2013-04-16 20:53:27 -04:00
|
|
|
|
|
|
|
|
|
|
def test_strftime(self):
|
2014-04-27 10:25:57 +02:00
|
|
|
|
d = utils.SafeDatetime(2012, 8, 29)
|
2013-04-16 20:53:27 -04:00
|
|
|
|
|
|
|
|
|
|
# simple formatting
|
|
|
|
|
|
self.assertEqual(utils.strftime(d, "%d/%m/%y"), "29/08/12")
|
|
|
|
|
|
self.assertEqual(utils.strftime(d, "%d/%m/%Y"), "29/08/2012")
|
|
|
|
|
|
|
2014-06-10 09:47:14 +09:00
|
|
|
|
# RFC 3339
|
2015-06-16 09:25:09 +02:00
|
|
|
|
self.assertEqual(
|
|
|
|
|
|
utils.strftime(d, "%Y-%m-%dT%H:%M:%SZ"), "2012-08-29T00:00:00Z"
|
|
|
|
|
|
)
|
2014-06-10 09:47:14 +09:00
|
|
|
|
|
2013-04-16 20:53:27 -04:00
|
|
|
|
# % escaped
|
|
|
|
|
|
self.assertEqual(utils.strftime(d, "%d%%%m%%%y"), "29%08%12")
|
|
|
|
|
|
self.assertEqual(utils.strftime(d, "%d %% %m %% %y"), "29 % 08 % 12")
|
|
|
|
|
|
# not valid % formatter
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
|
utils.strftime(d, "10% reduction in %Y"), "10% reduction in 2012"
|
|
|
|
|
|
)
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
|
utils.strftime(d, "%10 reduction in %Y"), "%10 reduction in 2012"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
# with text
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
|
utils.strftime(d, "Published in %d-%m-%Y"), "Published in 29-08-2012"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
# with non-ascii text
|
2015-06-16 09:25:09 +02:00
|
|
|
|
self.assertEqual(
|
|
|
|
|
|
utils.strftime(d, "%d/%m/%Y Øl trinken beim Besäufnis"),
|
|
|
|
|
|
"29/08/2012 Øl trinken beim Besäufnis",
|
|
|
|
|
|
)
|
2013-04-16 20:53:27 -04:00
|
|
|
|
|
2014-07-15 18:15:03 -04:00
|
|
|
|
# alternative formatting options
|
|
|
|
|
|
self.assertEqual(utils.strftime(d, "%-d/%-m/%y"), "29/8/12")
|
|
|
|
|
|
self.assertEqual(utils.strftime(d, "%-H:%-M:%-S"), "0:0:0")
|
|
|
|
|
|
|
|
|
|
|
|
d = utils.SafeDatetime(2012, 8, 9)
|
|
|
|
|
|
self.assertEqual(utils.strftime(d, "%-d/%-m/%y"), "9/8/12")
|
|
|
|
|
|
|
2021-07-13 01:35:22 -06:00
|
|
|
|
d = utils.SafeDatetime(2021, 1, 8)
|
|
|
|
|
|
self.assertEqual(utils.strftime(d, "%G - %-V - %u"), "2021 - 1 - 5")
|
|
|
|
|
|
|
2013-04-16 20:53:27 -04:00
|
|
|
|
# test the output of utils.strftime in a different locale
|
2013-04-17 09:57:48 +01:00
|
|
|
|
# Turkish locale
|
2013-04-17 19:13:40 +01:00
|
|
|
|
@unittest.skipUnless(
|
|
|
|
|
|
locale_available("tr_TR.UTF-8") or locale_available("Turkish"),
|
2013-04-16 20:53:27 -04:00
|
|
|
|
"Turkish locale needed",
|
|
|
|
|
|
)
|
2013-04-17 09:57:48 +01:00
|
|
|
|
def test_strftime_locale_dependent_turkish(self):
|
2023-10-28 17:40:40 -05:00
|
|
|
|
temp_locale = "Turkish" if platform == "win32" else "tr_TR.UTF-8"
|
2013-04-16 20:53:27 -04:00
|
|
|
|
|
2023-10-28 17:40:40 -05:00
|
|
|
|
with utils.temporary_locale(temp_locale):
|
|
|
|
|
|
d = utils.SafeDatetime(2012, 8, 29)
|
2013-04-16 20:53:27 -04:00
|
|
|
|
|
2023-10-28 17:40:40 -05:00
|
|
|
|
# simple
|
|
|
|
|
|
self.assertEqual(utils.strftime(d, "%d %B %Y"), "29 Ağustos 2012")
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
|
utils.strftime(d, "%A, %d %B %Y"), "Çarşamba, 29 Ağustos 2012"
|
|
|
|
|
|
)
|
2013-04-16 20:53:27 -04:00
|
|
|
|
|
2023-10-28 17:40:40 -05:00
|
|
|
|
# with text
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
|
utils.strftime(d, "Yayınlanma tarihi: %A, %d %B %Y"),
|
|
|
|
|
|
"Yayınlanma tarihi: Çarşamba, 29 Ağustos 2012",
|
|
|
|
|
|
)
|
2013-04-16 20:53:27 -04:00
|
|
|
|
|
2023-10-28 17:40:40 -05:00
|
|
|
|
# non-ascii format candidate (someone might pass it… for some reason)
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
|
utils.strftime(d, "%Y yılında %üretim artışı"),
|
|
|
|
|
|
"2012 yılında %üretim artışı",
|
|
|
|
|
|
)
|
2013-04-17 09:57:48 +01:00
|
|
|
|
|
|
|
|
|
|
# test the output of utils.strftime in a different locale
|
|
|
|
|
|
# French locale
|
|
|
|
|
|
@unittest.skipUnless(
|
|
|
|
|
|
locale_available("fr_FR.UTF-8") or locale_available("French"),
|
|
|
|
|
|
"French locale needed",
|
|
|
|
|
|
)
|
|
|
|
|
|
def test_strftime_locale_dependent_french(self):
|
2023-10-28 17:40:40 -05:00
|
|
|
|
temp_locale = "French" if platform == "win32" else "fr_FR.UTF-8"
|
2013-04-17 09:57:48 +01:00
|
|
|
|
|
2023-10-28 17:40:40 -05:00
|
|
|
|
with utils.temporary_locale(temp_locale):
|
|
|
|
|
|
d = utils.SafeDatetime(2012, 8, 29)
|
2013-04-17 09:57:48 +01:00
|
|
|
|
|
2023-10-28 17:40:40 -05:00
|
|
|
|
# simple
|
|
|
|
|
|
self.assertEqual(utils.strftime(d, "%d %B %Y"), "29 août 2012")
|
2013-04-17 09:57:48 +01:00
|
|
|
|
|
2023-10-28 17:40:40 -05:00
|
|
|
|
# depending on OS, the first letter is m or M
|
|
|
|
|
|
self.assertTrue(utils.strftime(d, "%A") in ("mercredi", "Mercredi"))
|
2013-04-17 09:57:48 +01:00
|
|
|
|
|
2023-10-28 17:40:40 -05:00
|
|
|
|
# with text
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
|
utils.strftime(d, "Écrit le %d %B %Y"), "Écrit le 29 août 2012"
|
|
|
|
|
|
)
|
2013-04-17 09:57:48 +01:00
|
|
|
|
|
2023-10-28 17:40:40 -05:00
|
|
|
|
# non-ascii format candidate (someone might pass it… for some reason)
|
|
|
|
|
|
self.assertEqual(utils.strftime(d, "%écrits en %Y"), "%écrits en 2012")
|
2013-04-22 19:54:52 -04:00
|
|
|
|
|
2015-06-03 08:58:59 +02:00
|
|
|
|
def test_maybe_pluralize(self):
|
2015-06-16 09:25:09 +02:00
|
|
|
|
self.assertEqual(utils.maybe_pluralize(0, "Article", "Articles"), "0 Articles")
|
|
|
|
|
|
self.assertEqual(utils.maybe_pluralize(1, "Article", "Articles"), "1 Article")
|
|
|
|
|
|
self.assertEqual(utils.maybe_pluralize(2, "Article", "Articles"), "2 Articles")
|
2015-06-03 08:58:59 +02:00
|
|
|
|
|
2023-10-28 17:40:40 -05:00
|
|
|
|
def test_temporary_locale(self):
|
|
|
|
|
|
# test with default LC category
|
|
|
|
|
|
orig_locale = locale.setlocale(locale.LC_ALL)
|
|
|
|
|
|
|
|
|
|
|
|
with utils.temporary_locale("C"):
|
|
|
|
|
|
self.assertEqual(locale.setlocale(locale.LC_ALL), "C")
|
|
|
|
|
|
|
|
|
|
|
|
self.assertEqual(locale.setlocale(locale.LC_ALL), orig_locale)
|
|
|
|
|
|
|
|
|
|
|
|
# test with custom LC category
|
|
|
|
|
|
orig_locale = locale.setlocale(locale.LC_TIME)
|
|
|
|
|
|
|
|
|
|
|
|
with utils.temporary_locale("C", locale.LC_TIME):
|
|
|
|
|
|
self.assertEqual(locale.setlocale(locale.LC_TIME), "C")
|
|
|
|
|
|
|
|
|
|
|
|
self.assertEqual(locale.setlocale(locale.LC_TIME), orig_locale)
|
|
|
|
|
|
|
2015-06-03 08:58:59 +02:00
|
|
|
|
|
2013-12-07 17:35:54 +01:00
|
|
|
|
class TestCopy(unittest.TestCase):
|
|
|
|
|
|
"""Tests the copy utility"""
|
|
|
|
|
|
|
|
|
|
|
|
def setUp(self):
|
|
|
|
|
|
self.root_dir = mkdtemp(prefix="pelicantests.")
|
2014-04-15 16:36:29 +02:00
|
|
|
|
self.old_locale = locale.setlocale(locale.LC_ALL)
|
2020-04-26 09:55:08 +02:00
|
|
|
|
locale.setlocale(locale.LC_ALL, "C")
|
2013-12-07 17:35:54 +01:00
|
|
|
|
|
|
|
|
|
|
def tearDown(self):
|
|
|
|
|
|
shutil.rmtree(self.root_dir)
|
2014-04-15 16:36:29 +02:00
|
|
|
|
locale.setlocale(locale.LC_ALL, self.old_locale)
|
2013-12-07 17:35:54 +01:00
|
|
|
|
|
|
|
|
|
|
def _create_file(self, *path):
|
|
|
|
|
|
with open(os.path.join(self.root_dir, *path), "w") as f:
|
|
|
|
|
|
f.write("42\n")
|
|
|
|
|
|
|
|
|
|
|
|
def _create_dir(self, *path):
|
|
|
|
|
|
os.makedirs(os.path.join(self.root_dir, *path))
|
|
|
|
|
|
|
|
|
|
|
|
def _exist_file(self, *path):
|
|
|
|
|
|
path = os.path.join(self.root_dir, *path)
|
|
|
|
|
|
self.assertTrue(os.path.isfile(path), "File does not exist: %s" % path)
|
|
|
|
|
|
|
|
|
|
|
|
def _exist_dir(self, *path):
|
|
|
|
|
|
path = os.path.join(self.root_dir, *path)
|
|
|
|
|
|
self.assertTrue(os.path.exists(path), "Directory does not exist: %s" % path)
|
|
|
|
|
|
|
|
|
|
|
|
def test_copy_file_same_path(self):
|
|
|
|
|
|
self._create_file("a.txt")
|
2013-12-07 21:11:15 +01:00
|
|
|
|
utils.copy(
|
|
|
|
|
|
os.path.join(self.root_dir, "a.txt"), os.path.join(self.root_dir, "b.txt")
|
|
|
|
|
|
)
|
2013-12-07 17:35:54 +01:00
|
|
|
|
self._exist_file("b.txt")
|
|
|
|
|
|
|
|
|
|
|
|
def test_copy_file_different_path(self):
|
|
|
|
|
|
self._create_dir("a")
|
|
|
|
|
|
self._create_dir("b")
|
|
|
|
|
|
self._create_file("a", "a.txt")
|
2013-12-07 21:11:15 +01:00
|
|
|
|
utils.copy(
|
|
|
|
|
|
os.path.join(self.root_dir, "a", "a.txt"),
|
|
|
|
|
|
os.path.join(self.root_dir, "b", "b.txt"),
|
|
|
|
|
|
)
|
2013-12-07 17:35:54 +01:00
|
|
|
|
self._exist_dir("b")
|
|
|
|
|
|
self._exist_file("b", "b.txt")
|
|
|
|
|
|
|
|
|
|
|
|
def test_copy_file_create_dirs(self):
|
|
|
|
|
|
self._create_file("a.txt")
|
2015-06-16 09:25:09 +02:00
|
|
|
|
utils.copy(
|
|
|
|
|
|
os.path.join(self.root_dir, "a.txt"),
|
|
|
|
|
|
os.path.join(self.root_dir, "b0", "b1", "b2", "b3", "b.txt"),
|
|
|
|
|
|
)
|
2013-12-07 17:35:54 +01:00
|
|
|
|
self._exist_dir("b0")
|
|
|
|
|
|
self._exist_dir("b0", "b1")
|
|
|
|
|
|
self._exist_dir("b0", "b1", "b2")
|
|
|
|
|
|
self._exist_dir("b0", "b1", "b2", "b3")
|
|
|
|
|
|
self._exist_file("b0", "b1", "b2", "b3", "b.txt")
|
|
|
|
|
|
|
|
|
|
|
|
def test_copy_dir_same_path(self):
|
|
|
|
|
|
self._create_dir("a")
|
|
|
|
|
|
self._create_file("a", "a.txt")
|
2013-12-07 21:11:15 +01:00
|
|
|
|
utils.copy(os.path.join(self.root_dir, "a"), os.path.join(self.root_dir, "b"))
|
2013-12-07 17:35:54 +01:00
|
|
|
|
self._exist_dir("b")
|
|
|
|
|
|
self._exist_file("b", "a.txt")
|
|
|
|
|
|
|
|
|
|
|
|
def test_copy_dir_different_path(self):
|
|
|
|
|
|
self._create_dir("a0")
|
|
|
|
|
|
self._create_dir("a0", "a1")
|
|
|
|
|
|
self._create_file("a0", "a1", "a.txt")
|
|
|
|
|
|
self._create_dir("b0")
|
2013-12-07 21:11:15 +01:00
|
|
|
|
utils.copy(
|
|
|
|
|
|
os.path.join(self.root_dir, "a0", "a1"),
|
|
|
|
|
|
os.path.join(self.root_dir, "b0", "b1"),
|
|
|
|
|
|
)
|
2013-12-07 17:35:54 +01:00
|
|
|
|
self._exist_dir("b0", "b1")
|
|
|
|
|
|
self._exist_file("b0", "b1", "a.txt")
|
|
|
|
|
|
|
|
|
|
|
|
def test_copy_dir_create_dirs(self):
|
|
|
|
|
|
self._create_dir("a")
|
|
|
|
|
|
self._create_file("a", "a.txt")
|
2013-12-07 21:11:15 +01:00
|
|
|
|
utils.copy(
|
|
|
|
|
|
os.path.join(self.root_dir, "a"),
|
|
|
|
|
|
os.path.join(self.root_dir, "b0", "b1", "b2", "b3", "b"),
|
|
|
|
|
|
)
|
2013-12-07 17:35:54 +01:00
|
|
|
|
self._exist_dir("b0")
|
|
|
|
|
|
self._exist_dir("b0", "b1")
|
|
|
|
|
|
self._exist_dir("b0", "b1", "b2")
|
|
|
|
|
|
self._exist_dir("b0", "b1", "b2", "b3")
|
|
|
|
|
|
self._exist_dir("b0", "b1", "b2", "b3", "b")
|
|
|
|
|
|
self._exist_file("b0", "b1", "b2", "b3", "b", "a.txt")
|
|
|
|
|
|
|
|
|
|
|
|
|
2013-04-22 19:54:52 -04:00
|
|
|
|
class TestDateFormatter(unittest.TestCase):
|
|
|
|
|
|
"""Tests that the output of DateFormatter jinja filter is same as
|
|
|
|
|
|
utils.strftime"""
|
|
|
|
|
|
|
|
|
|
|
|
def setUp(self):
|
|
|
|
|
|
# prepare a temp content and output folder
|
|
|
|
|
|
self.temp_content = mkdtemp(prefix="pelicantests.")
|
|
|
|
|
|
self.temp_output = mkdtemp(prefix="pelicantests.")
|
|
|
|
|
|
|
|
|
|
|
|
# prepare a template file
|
|
|
|
|
|
template_dir = os.path.join(self.temp_content, "template")
|
|
|
|
|
|
template_path = os.path.join(template_dir, "source.html")
|
|
|
|
|
|
os.makedirs(template_dir)
|
|
|
|
|
|
with open(template_path, "w") as template_file:
|
|
|
|
|
|
template_file.write('date = {{ date|strftime("%A, %d %B %Y") }}')
|
2014-04-27 10:25:57 +02:00
|
|
|
|
self.date = utils.SafeDatetime(2012, 8, 29)
|
2013-04-22 19:54:52 -04:00
|
|
|
|
|
|
|
|
|
|
def tearDown(self):
|
|
|
|
|
|
shutil.rmtree(self.temp_content)
|
|
|
|
|
|
shutil.rmtree(self.temp_output)
|
2013-04-22 20:07:53 -04:00
|
|
|
|
# reset locale to default
|
|
|
|
|
|
locale.setlocale(locale.LC_ALL, "")
|
2013-04-22 19:54:52 -04:00
|
|
|
|
|
2014-04-15 22:01:20 +02:00
|
|
|
|
@unittest.skipUnless(
|
|
|
|
|
|
locale_available("fr_FR.UTF-8") or locale_available("French"),
|
|
|
|
|
|
"French locale needed",
|
|
|
|
|
|
)
|
|
|
|
|
|
def test_french_strftime(self):
|
2015-06-16 09:25:09 +02:00
|
|
|
|
# This test tries to reproduce an issue that
|
|
|
|
|
|
# occurred with python3.3 under macos10 only
|
2023-10-28 17:40:40 -05:00
|
|
|
|
temp_locale = "French" if platform == "win32" else "fr_FR.UTF-8"
|
|
|
|
|
|
|
|
|
|
|
|
with utils.temporary_locale(temp_locale):
|
|
|
|
|
|
date = utils.SafeDatetime(2014, 8, 14)
|
|
|
|
|
|
# we compare the lower() dates since macos10 returns
|
|
|
|
|
|
# "Jeudi" for %A whereas linux reports "jeudi"
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
|
"jeudi, 14 août 2014",
|
|
|
|
|
|
utils.strftime(date, date_format="%A, %d %B %Y").lower(),
|
|
|
|
|
|
)
|
|
|
|
|
|
df = utils.DateFormatter()
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
|
"jeudi, 14 août 2014", df(date, date_format="%A, %d %B %Y").lower()
|
2023-10-29 22:18:29 +01:00
|
|
|
|
)
|
2023-10-28 17:40:40 -05:00
|
|
|
|
|
2014-04-15 22:01:20 +02:00
|
|
|
|
# Let us now set the global locale to C:
|
2023-10-28 17:40:40 -05:00
|
|
|
|
with utils.temporary_locale("C"):
|
|
|
|
|
|
# DateFormatter should still work as expected
|
|
|
|
|
|
# since it is the whole point of DateFormatter
|
|
|
|
|
|
# (This is where pre-2014/4/15 code fails on macos10)
|
|
|
|
|
|
df_date = df(date, date_format="%A, %d %B %Y").lower()
|
|
|
|
|
|
self.assertEqual("jeudi, 14 août 2014", df_date)
|
2014-04-15 22:01:20 +02:00
|
|
|
|
|
2013-04-22 19:54:52 -04:00
|
|
|
|
@unittest.skipUnless(
|
|
|
|
|
|
locale_available("fr_FR.UTF-8") or locale_available("French"),
|
|
|
|
|
|
"French locale needed",
|
|
|
|
|
|
)
|
|
|
|
|
|
def test_french_locale(self):
|
2015-02-17 20:18:12 -05:00
|
|
|
|
if platform == "win32":
|
|
|
|
|
|
locale_string = "French"
|
|
|
|
|
|
else:
|
|
|
|
|
|
locale_string = "fr_FR.UTF-8"
|
2013-04-22 19:54:52 -04:00
|
|
|
|
settings = read_settings(
|
2015-06-16 09:25:09 +02:00
|
|
|
|
override={
|
|
|
|
|
|
"LOCALE": locale_string,
|
|
|
|
|
|
"TEMPLATE_PAGES": {"template/source.html": "generated/file.html"},
|
|
|
|
|
|
}
|
|
|
|
|
|
)
|
2013-04-22 19:54:52 -04:00
|
|
|
|
|
2013-08-04 17:02:58 +02:00
|
|
|
|
generator = TemplatePagesGenerator(
|
|
|
|
|
|
{"date": self.date}, settings, self.temp_content, "", self.temp_output
|
|
|
|
|
|
)
|
2013-04-22 19:54:52 -04:00
|
|
|
|
generator.env.filters.update({"strftime": utils.DateFormatter()})
|
|
|
|
|
|
|
|
|
|
|
|
writer = Writer(self.temp_output, settings=settings)
|
|
|
|
|
|
generator.generate_output(writer)
|
|
|
|
|
|
|
2015-06-16 09:25:09 +02:00
|
|
|
|
output_path = os.path.join(self.temp_output, "generated", "file.html")
|
2013-04-22 19:54:52 -04:00
|
|
|
|
|
|
|
|
|
|
# output file has been generated
|
|
|
|
|
|
self.assertTrue(os.path.exists(output_path))
|
|
|
|
|
|
|
|
|
|
|
|
# output content is correct
|
|
|
|
|
|
with utils.pelican_open(output_path) as output_file:
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
|
output_file, utils.strftime(self.date, "date = %A, %d %B %Y")
|
2023-10-29 22:18:29 +01:00
|
|
|
|
)
|
2013-04-22 19:54:52 -04:00
|
|
|
|
|
|
|
|
|
|
@unittest.skipUnless(
|
|
|
|
|
|
locale_available("tr_TR.UTF-8") or locale_available("Turkish"),
|
|
|
|
|
|
"Turkish locale needed",
|
|
|
|
|
|
)
|
|
|
|
|
|
def test_turkish_locale(self):
|
2015-02-17 20:18:12 -05:00
|
|
|
|
if platform == "win32":
|
|
|
|
|
|
locale_string = "Turkish"
|
|
|
|
|
|
else:
|
|
|
|
|
|
locale_string = "tr_TR.UTF-8"
|
2013-04-22 19:54:52 -04:00
|
|
|
|
settings = read_settings(
|
2015-06-16 09:25:09 +02:00
|
|
|
|
override={
|
|
|
|
|
|
"LOCALE": locale_string,
|
|
|
|
|
|
"TEMPLATE_PAGES": {"template/source.html": "generated/file.html"},
|
|
|
|
|
|
}
|
|
|
|
|
|
)
|
2013-04-22 19:54:52 -04:00
|
|
|
|
|
2013-08-04 21:17:15 +02:00
|
|
|
|
generator = TemplatePagesGenerator(
|
|
|
|
|
|
{"date": self.date}, settings, self.temp_content, "", self.temp_output
|
|
|
|
|
|
)
|
2013-04-22 19:54:52 -04:00
|
|
|
|
generator.env.filters.update({"strftime": utils.DateFormatter()})
|
|
|
|
|
|
|
|
|
|
|
|
writer = Writer(self.temp_output, settings=settings)
|
|
|
|
|
|
generator.generate_output(writer)
|
|
|
|
|
|
|
2015-06-16 09:25:09 +02:00
|
|
|
|
output_path = os.path.join(self.temp_output, "generated", "file.html")
|
2013-04-22 19:54:52 -04:00
|
|
|
|
|
|
|
|
|
|
# output file has been generated
|
|
|
|
|
|
self.assertTrue(os.path.exists(output_path))
|
|
|
|
|
|
|
|
|
|
|
|
# output content is correct
|
|
|
|
|
|
with utils.pelican_open(output_path) as output_file:
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
|
output_file, utils.strftime(self.date, "date = %A, %d %B %Y")
|
2023-10-29 22:18:29 +01:00
|
|
|
|
)
|
2017-02-03 09:13:14 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class TestSanitisedJoin(unittest.TestCase):
|
|
|
|
|
|
def test_detect_parent_breakout(self):
|
2019-11-05 23:17:19 -08:00
|
|
|
|
with self.assertRaisesRegex(
|
2017-02-03 09:13:14 +01:00
|
|
|
|
RuntimeError,
|
2020-04-28 23:29:44 +03:00
|
|
|
|
"Attempted to break out of output directory to (.*?:)?/foo/test",
|
|
|
|
|
|
): # (.*?:)? accounts for Windows root
|
2017-02-03 09:13:14 +01:00
|
|
|
|
utils.sanitised_join("/foo/bar", "../test")
|
|
|
|
|
|
|
|
|
|
|
|
def test_detect_root_breakout(self):
|
2019-11-05 23:17:19 -08:00
|
|
|
|
with self.assertRaisesRegex(
|
2017-02-03 09:13:14 +01:00
|
|
|
|
RuntimeError,
|
2020-04-28 23:29:44 +03:00
|
|
|
|
"Attempted to break out of output directory to (.*?:)?/test",
|
|
|
|
|
|
): # (.*?:)? accounts for Windows root
|
2017-02-03 09:13:14 +01:00
|
|
|
|
utils.sanitised_join("/foo/bar", "/test")
|
|
|
|
|
|
|
|
|
|
|
|
def test_pass_deep_subpaths(self):
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
|
utils.sanitised_join("/foo/bar", "test"),
|
2020-04-28 23:29:44 +03:00
|
|
|
|
utils.posixize_path(os.path.abspath(os.path.join("/foo/bar", "test"))),
|
2017-02-03 09:13:14 +01:00
|
|
|
|
)
|
2023-04-24 18:44:50 +02:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class TestMemoized(unittest.TestCase):
|
|
|
|
|
|
def test_memoized(self):
|
|
|
|
|
|
class Container:
|
|
|
|
|
|
def _get(self, key):
|
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
@utils.memoized
|
|
|
|
|
|
def get(self, key):
|
|
|
|
|
|
return self._get(key)
|
|
|
|
|
|
|
|
|
|
|
|
container = Container()
|
|
|
|
|
|
|
|
|
|
|
|
with unittest.mock.patch.object(
|
|
|
|
|
|
container, "_get", side_effect=lambda x: x
|
|
|
|
|
|
) as get_mock:
|
|
|
|
|
|
self.assertEqual("foo", container.get("foo"))
|
|
|
|
|
|
get_mock.assert_called_once_with("foo")
|
|
|
|
|
|
|
|
|
|
|
|
get_mock.reset_mock()
|
|
|
|
|
|
self.assertEqual("foo", container.get("foo"))
|
|
|
|
|
|
get_mock.assert_not_called()
|
|
|
|
|
|
|
|
|
|
|
|
self.assertEqual("bar", container.get("bar"))
|
|
|
|
|
|
get_mock.assert_called_once_with("bar")
|
|
|
|
|
|
|
|
|
|
|
|
get_mock.reset_mock()
|
|
|
|
|
|
container.get.cache.clear()
|
|
|
|
|
|
self.assertEqual("bar", container.get("bar"))
|
|
|
|
|
|
get_mock.assert_called_once_with("bar")
|