mirror of
https://github.com/getpelican/pelican.git
synced 2025-10-15 20:28:56 +02:00
Merge pull request #3311 from boxydog/fix_3216_two_commits
This commit is contained in:
commit
8d8feb6341
26 changed files with 148 additions and 80 deletions
|
|
@ -17,7 +17,7 @@ repos:
|
||||||
rev: v0.1.15
|
rev: v0.1.15
|
||||||
hooks:
|
hooks:
|
||||||
- id: ruff
|
- id: ruff
|
||||||
|
args: [--fix, --exit-non-zero-on-fix]
|
||||||
- id: ruff-format
|
- id: ruff-format
|
||||||
args: ["--check"]
|
|
||||||
|
|
||||||
exclude: ^pelican/tests/output/
|
exclude: ^pelican/tests/output/
|
||||||
|
|
|
||||||
|
|
@ -19,10 +19,10 @@ __path__ = extend_path(__path__, __name__)
|
||||||
|
|
||||||
# pelican.log has to be the first pelican module to be loaded
|
# pelican.log has to be the first pelican module to be loaded
|
||||||
# because logging.setLoggerClass has to be called before logging.getLogger
|
# because logging.setLoggerClass has to be called before logging.getLogger
|
||||||
from pelican.log import console
|
from pelican.log import console # noqa: I001
|
||||||
from pelican.log import init as init_logging
|
from pelican.log import init as init_logging
|
||||||
from pelican.generators import (
|
from pelican.generators import (
|
||||||
ArticlesGenerator, # noqa: I100
|
ArticlesGenerator,
|
||||||
PagesGenerator,
|
PagesGenerator,
|
||||||
SourceFileGenerator,
|
SourceFileGenerator,
|
||||||
StaticGenerator,
|
StaticGenerator,
|
||||||
|
|
@ -354,8 +354,8 @@ def parse_arguments(argv=None):
|
||||||
"--settings",
|
"--settings",
|
||||||
dest="settings",
|
dest="settings",
|
||||||
help="The settings of the application, this is "
|
help="The settings of the application, this is "
|
||||||
"automatically set to {} if a file exists with this "
|
f"automatically set to {DEFAULT_CONFIG_NAME} if a file exists with this "
|
||||||
"name.".format(DEFAULT_CONFIG_NAME),
|
"name.",
|
||||||
)
|
)
|
||||||
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
|
|
|
||||||
|
|
@ -4,6 +4,5 @@ python -m pelican module entry point to run via python -m
|
||||||
|
|
||||||
from . import main
|
from . import main
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|
|
||||||
|
|
@ -17,6 +17,9 @@ except ModuleNotFoundError:
|
||||||
|
|
||||||
from pelican.plugins import signals
|
from pelican.plugins import signals
|
||||||
from pelican.settings import DEFAULT_CONFIG, Settings
|
from pelican.settings import DEFAULT_CONFIG, Settings
|
||||||
|
|
||||||
|
# Import these so that they're available when you import from pelican.contents.
|
||||||
|
from pelican.urlwrappers import Author, Category, Tag, URLWrapper # NOQA
|
||||||
from pelican.utils import (
|
from pelican.utils import (
|
||||||
deprecated_attribute,
|
deprecated_attribute,
|
||||||
memoized,
|
memoized,
|
||||||
|
|
@ -28,9 +31,6 @@ from pelican.utils import (
|
||||||
truncate_html_words,
|
truncate_html_words,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Import these so that they're available when you import from pelican.contents.
|
|
||||||
from pelican.urlwrappers import Author, Category, Tag, URLWrapper # NOQA
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -370,13 +370,13 @@ class Content:
|
||||||
|
|
||||||
def _get_intrasite_link_regex(self) -> re.Pattern:
|
def _get_intrasite_link_regex(self) -> re.Pattern:
|
||||||
intrasite_link_regex = self.settings["INTRASITE_LINK_REGEX"]
|
intrasite_link_regex = self.settings["INTRASITE_LINK_REGEX"]
|
||||||
regex = r"""
|
regex = rf"""
|
||||||
(?P<markup><[^\>]+ # match tag with all url-value attributes
|
(?P<markup><[^\>]+ # match tag with all url-value attributes
|
||||||
(?:href|src|poster|data|cite|formaction|action|content)\s*=\s*)
|
(?:href|src|poster|data|cite|formaction|action|content)\s*=\s*)
|
||||||
|
|
||||||
(?P<quote>["\']) # require value to be quoted
|
(?P<quote>["\']) # require value to be quoted
|
||||||
(?P<path>{}(?P<value>.*?)) # the url value
|
(?P<path>{intrasite_link_regex}(?P<value>.*?)) # the url value
|
||||||
(?P=quote)""".format(intrasite_link_regex)
|
(?P=quote)"""
|
||||||
return re.compile(regex, re.X)
|
return re.compile(regex, re.X)
|
||||||
|
|
||||||
def _update_content(self, content: str, siteurl: str) -> str:
|
def _update_content(self, content: str, siteurl: str) -> str:
|
||||||
|
|
@ -465,7 +465,6 @@ class Content:
|
||||||
@summary.setter
|
@summary.setter
|
||||||
def summary(self, value: str):
|
def summary(self, value: str):
|
||||||
"""Dummy function"""
|
"""Dummy function"""
|
||||||
pass
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def status(self) -> str:
|
def status(self) -> str:
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,6 @@ import logging
|
||||||
import pkgutil
|
import pkgutil
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
from blinker import signal, Signal
|
from blinker import Signal, signal
|
||||||
from ordered_set import OrderedSet
|
from ordered_set import OrderedSet
|
||||||
|
|
||||||
# Signals will call functions in the order of connection, i.e. plugin order
|
# Signals will call functions in the order of connection, i.e. plugin order
|
||||||
|
|
|
||||||
|
|
@ -22,7 +22,7 @@ from pelican.utils import get_date, pelican_open, posixize_path
|
||||||
try:
|
try:
|
||||||
from markdown import Markdown
|
from markdown import Markdown
|
||||||
except ImportError:
|
except ImportError:
|
||||||
Markdown = False # NOQA
|
Markdown = False
|
||||||
|
|
||||||
# Metadata processors have no way to discard an unwanted value, so we have
|
# Metadata processors have no way to discard an unwanted value, so we have
|
||||||
# them return this value instead to signal that it should be discarded later.
|
# them return this value instead to signal that it should be discarded later.
|
||||||
|
|
@ -607,8 +607,8 @@ class Readers(FileStampDataCacher):
|
||||||
|
|
||||||
# eventually filter the content with typogrify if asked so
|
# eventually filter the content with typogrify if asked so
|
||||||
if self.settings["TYPOGRIFY"]:
|
if self.settings["TYPOGRIFY"]:
|
||||||
from typogrify.filters import typogrify
|
|
||||||
import smartypants
|
import smartypants
|
||||||
|
from typogrify.filters import typogrify
|
||||||
|
|
||||||
typogrify_dashes = self.settings["TYPOGRIFY_DASHES"]
|
typogrify_dashes = self.settings["TYPOGRIFY_DASHES"]
|
||||||
if typogrify_dashes == "oldschool":
|
if typogrify_dashes == "oldschool":
|
||||||
|
|
|
||||||
|
|
@ -2,7 +2,6 @@ import re
|
||||||
|
|
||||||
from docutils import nodes, utils
|
from docutils import nodes, utils
|
||||||
from docutils.parsers.rst import Directive, directives, roles
|
from docutils.parsers.rst import Directive, directives, roles
|
||||||
|
|
||||||
from pygments import highlight
|
from pygments import highlight
|
||||||
from pygments.formatters import HtmlFormatter
|
from pygments.formatters import HtmlFormatter
|
||||||
from pygments.lexers import TextLexer, get_lexer_by_name
|
from pygments.lexers import TextLexer, get_lexer_by_name
|
||||||
|
|
|
||||||
|
|
@ -267,9 +267,7 @@ def _printf_s_to_format_field(printf_string: str, format_field: str) -> str:
|
||||||
TEST_STRING = "PELICAN_PRINTF_S_DEPRECATION"
|
TEST_STRING = "PELICAN_PRINTF_S_DEPRECATION"
|
||||||
expected = printf_string % TEST_STRING
|
expected = printf_string % TEST_STRING
|
||||||
|
|
||||||
result = printf_string.replace("{", "{{").replace("}", "}}") % "{{{}}}".format(
|
result = printf_string.replace("{", "{{").replace("}", "}}") % f"{{{format_field}}}"
|
||||||
format_field
|
|
||||||
)
|
|
||||||
if result.format(**{format_field: TEST_STRING}) != expected:
|
if result.format(**{format_field: TEST_STRING}) != expected:
|
||||||
raise ValueError(f"Failed to safely replace %s with {{{format_field}}}")
|
raise ValueError(f"Failed to safely replace %s with {{{format_field}}}")
|
||||||
|
|
||||||
|
|
@ -412,7 +410,7 @@ def handle_deprecated_settings(settings: Settings) -> Settings:
|
||||||
)
|
)
|
||||||
logger.warning(message)
|
logger.warning(message)
|
||||||
if old_values.get("SLUG"):
|
if old_values.get("SLUG"):
|
||||||
for f in {"CATEGORY", "TAG"}:
|
for f in ("CATEGORY", "TAG"):
|
||||||
if old_values.get(f):
|
if old_values.get(f):
|
||||||
old_values[f] = old_values["SLUG"] + old_values[f]
|
old_values[f] = old_values["SLUG"] + old_values[f]
|
||||||
old_values["AUTHOR"] = old_values.get("AUTHOR", [])
|
old_values["AUTHOR"] = old_values.get("AUTHOR", [])
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
from re import match
|
|
||||||
import tarfile
|
import tarfile
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
from re import match
|
||||||
from zipfile import ZipFile
|
from zipfile import ZipFile
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
|
||||||
|
|
@ -261,9 +261,7 @@ class LoggedTestCase(unittest.TestCase):
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
actual,
|
actual,
|
||||||
count,
|
count,
|
||||||
msg="expected {} occurrences of {!r}, but found {}".format(
|
msg=f"expected {count} occurrences of {msg!r}, but found {actual}",
|
||||||
count, msg, actual
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,6 @@ from unittest.mock import MagicMock
|
||||||
from pelican.generators import ArticlesGenerator, PagesGenerator
|
from pelican.generators import ArticlesGenerator, PagesGenerator
|
||||||
from pelican.tests.support import get_context, get_settings, unittest
|
from pelican.tests.support import get_context, get_settings, unittest
|
||||||
|
|
||||||
|
|
||||||
CUR_DIR = os.path.dirname(__file__)
|
CUR_DIR = os.path.dirname(__file__)
|
||||||
CONTENT_DIR = os.path.join(CUR_DIR, "content")
|
CONTENT_DIR = os.path.join(CUR_DIR, "content")
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -13,7 +13,6 @@ from pelican.settings import DEFAULT_CONFIG
|
||||||
from pelican.tests.support import LoggedTestCase, get_context, get_settings, unittest
|
from pelican.tests.support import LoggedTestCase, get_context, get_settings, unittest
|
||||||
from pelican.utils import path_to_url, posixize_path, truncate_html_words
|
from pelican.utils import path_to_url, posixize_path, truncate_html_words
|
||||||
|
|
||||||
|
|
||||||
# generate one paragraph, enclosed with <p>
|
# generate one paragraph, enclosed with <p>
|
||||||
TEST_CONTENT = str(generate_lorem_ipsum(n=1))
|
TEST_CONTENT = str(generate_lorem_ipsum(n=1))
|
||||||
TEST_SUMMARY = generate_lorem_ipsum(n=1, html=False)
|
TEST_SUMMARY = generate_lorem_ipsum(n=1, html=False)
|
||||||
|
|
@ -297,7 +296,6 @@ class TestPage(TestBase):
|
||||||
def test_signal(self):
|
def test_signal(self):
|
||||||
def receiver_test_function(sender):
|
def receiver_test_function(sender):
|
||||||
receiver_test_function.has_been_called = True
|
receiver_test_function.has_been_called = True
|
||||||
pass
|
|
||||||
|
|
||||||
receiver_test_function.has_been_called = False
|
receiver_test_function.has_been_called = False
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -13,11 +13,11 @@ from pelican.generators import (
|
||||||
TemplatePagesGenerator,
|
TemplatePagesGenerator,
|
||||||
)
|
)
|
||||||
from pelican.tests.support import (
|
from pelican.tests.support import (
|
||||||
|
TestCaseWithCLocale,
|
||||||
can_symlink,
|
can_symlink,
|
||||||
get_context,
|
get_context,
|
||||||
get_settings,
|
get_settings,
|
||||||
unittest,
|
unittest,
|
||||||
TestCaseWithCLocale,
|
|
||||||
)
|
)
|
||||||
from pelican.writers import Writer
|
from pelican.writers import Writer
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -5,11 +5,11 @@ from unittest.mock import patch
|
||||||
|
|
||||||
from pelican.settings import DEFAULT_CONFIG
|
from pelican.settings import DEFAULT_CONFIG
|
||||||
from pelican.tests.support import (
|
from pelican.tests.support import (
|
||||||
|
TestCaseWithCLocale,
|
||||||
mute,
|
mute,
|
||||||
skipIfNoExecutable,
|
skipIfNoExecutable,
|
||||||
temporary_folder,
|
temporary_folder,
|
||||||
unittest,
|
unittest,
|
||||||
TestCaseWithCLocale,
|
|
||||||
)
|
)
|
||||||
from pelican.tools.pelican_import import (
|
from pelican.tools.pelican_import import (
|
||||||
blogger2fields,
|
blogger2fields,
|
||||||
|
|
@ -19,12 +19,12 @@ from pelican.tools.pelican_import import (
|
||||||
download_attachments,
|
download_attachments,
|
||||||
fields2pelican,
|
fields2pelican,
|
||||||
get_attachments,
|
get_attachments,
|
||||||
tumblr2fields,
|
medium_slug,
|
||||||
wp2fields,
|
|
||||||
mediumpost2fields,
|
mediumpost2fields,
|
||||||
mediumposts2fields,
|
mediumposts2fields,
|
||||||
strip_medium_post_content,
|
strip_medium_post_content,
|
||||||
medium_slug,
|
tumblr2fields,
|
||||||
|
wp2fields,
|
||||||
)
|
)
|
||||||
from pelican.utils import path_to_file_url, slugify
|
from pelican.utils import path_to_file_url, slugify
|
||||||
|
|
||||||
|
|
@ -41,7 +41,7 @@ WORDPRESS_DECODED_CONTENT_SAMPLE = os.path.join(
|
||||||
try:
|
try:
|
||||||
from bs4 import BeautifulSoup
|
from bs4 import BeautifulSoup
|
||||||
except ImportError:
|
except ImportError:
|
||||||
BeautifulSoup = False # NOQA
|
BeautifulSoup = False
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import bs4.builder._lxml as LXML
|
import bs4.builder._lxml as LXML
|
||||||
|
|
@ -532,9 +532,7 @@ class TestWordpressXMLAttachements(TestCaseWithCLocale):
|
||||||
self.assertEqual(self.attachments[post], {expected_invalid})
|
self.assertEqual(self.attachments[post], {expected_invalid})
|
||||||
else:
|
else:
|
||||||
self.fail(
|
self.fail(
|
||||||
"all attachments should match to a " "filename or None, {}".format(
|
"all attachments should match to a " f"filename or None, {post}"
|
||||||
post
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_download_attachments(self):
|
def test_download_attachments(self):
|
||||||
|
|
|
||||||
|
|
@ -7,7 +7,6 @@ from pelican.paginator import Paginator
|
||||||
from pelican.settings import DEFAULT_CONFIG
|
from pelican.settings import DEFAULT_CONFIG
|
||||||
from pelican.tests.support import get_settings, unittest
|
from pelican.tests.support import get_settings, unittest
|
||||||
|
|
||||||
|
|
||||||
# generate one paragraph, enclosed with <p>
|
# generate one paragraph, enclosed with <p>
|
||||||
TEST_CONTENT = str(generate_lorem_ipsum(n=1))
|
TEST_CONTENT = str(generate_lorem_ipsum(n=1))
|
||||||
TEST_SUMMARY = generate_lorem_ipsum(n=1, html=False)
|
TEST_SUMMARY = generate_lorem_ipsum(n=1, html=False)
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,6 @@
|
||||||
import os
|
import os
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
|
|
||||||
import pelican.tests.dummy_plugins.normal_plugin.normal_plugin as normal_plugin
|
|
||||||
from pelican.plugins._utils import (
|
from pelican.plugins._utils import (
|
||||||
get_namespace_plugins,
|
get_namespace_plugins,
|
||||||
get_plugin_name,
|
get_plugin_name,
|
||||||
|
|
@ -9,6 +8,7 @@ from pelican.plugins._utils import (
|
||||||
plugin_enabled,
|
plugin_enabled,
|
||||||
)
|
)
|
||||||
from pelican.plugins.signals import signal
|
from pelican.plugins.signals import signal
|
||||||
|
from pelican.tests.dummy_plugins.normal_plugin import normal_plugin
|
||||||
from pelican.tests.support import unittest
|
from pelican.tests.support import unittest
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -5,7 +5,6 @@ from pelican import readers
|
||||||
from pelican.tests.support import get_settings, unittest
|
from pelican.tests.support import get_settings, unittest
|
||||||
from pelican.utils import SafeDatetime
|
from pelican.utils import SafeDatetime
|
||||||
|
|
||||||
|
|
||||||
CUR_DIR = os.path.dirname(__file__)
|
CUR_DIR = os.path.dirname(__file__)
|
||||||
CONTENT_PATH = os.path.join(CUR_DIR, "content")
|
CONTENT_PATH = os.path.join(CUR_DIR, "content")
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,6 @@ import locale
|
||||||
import os
|
import os
|
||||||
from os.path import abspath, dirname, join
|
from os.path import abspath, dirname, join
|
||||||
|
|
||||||
|
|
||||||
from pelican.settings import (
|
from pelican.settings import (
|
||||||
DEFAULT_CONFIG,
|
DEFAULT_CONFIG,
|
||||||
DEFAULT_THEME,
|
DEFAULT_THEME,
|
||||||
|
|
|
||||||
|
|
@ -22,7 +22,6 @@ from pelican.log import init
|
||||||
from pelican.settings import DEFAULT_CONFIG
|
from pelican.settings import DEFAULT_CONFIG
|
||||||
from pelican.utils import SafeDatetime, slugify
|
from pelican.utils import SafeDatetime, slugify
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -169,7 +169,7 @@ def ask_timezone(question, default, tzurl):
|
||||||
r = tz_dict[r]
|
r = tz_dict[r]
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
print("Please enter a valid time zone:\n" " (check [{}])".format(tzurl))
|
print("Please enter a valid time zone:\n" f" (check [{tzurl}])")
|
||||||
return r
|
return r
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -205,14 +205,14 @@ def main():
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
print(
|
print(
|
||||||
"""Welcome to pelican-quickstart v{v}.
|
f"""Welcome to pelican-quickstart v{__version__}.
|
||||||
|
|
||||||
This script will help you create a new Pelican-based website.
|
This script will help you create a new Pelican-based website.
|
||||||
|
|
||||||
Please answer the following questions so this script can generate the files
|
Please answer the following questions so this script can generate the files
|
||||||
needed by Pelican.
|
needed by Pelican.
|
||||||
|
|
||||||
""".format(v=__version__)
|
"""
|
||||||
)
|
)
|
||||||
|
|
||||||
project = os.path.join(os.environ.get("VIRTUAL_ENV", os.curdir), ".project")
|
project = os.path.join(os.environ.get("VIRTUAL_ENV", os.curdir), ".project")
|
||||||
|
|
|
||||||
|
|
@ -240,15 +240,11 @@ def install(path, v=False, u=False):
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
err(
|
err(
|
||||||
"Cannot change permissions of files "
|
"Cannot change permissions of files "
|
||||||
"or directory in `{r}':\n{e}".format(r=theme_path, e=str(e)),
|
f"or directory in `{theme_path}':\n{e!s}",
|
||||||
die=False,
|
die=False,
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
err(
|
err(f"Cannot copy `{path}' to `{theme_path}':\n{e!s}")
|
||||||
"Cannot copy `{p}' to `{t}':\n{e}".format(
|
|
||||||
p=path, t=theme_path, e=str(e)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def symlink(path, v=False):
|
def symlink(path, v=False):
|
||||||
|
|
@ -268,11 +264,7 @@ def symlink(path, v=False):
|
||||||
try:
|
try:
|
||||||
os.symlink(path, theme_path)
|
os.symlink(path, theme_path)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
err(
|
err(f"Cannot link `{path}' to `{theme_path}':\n{e!s}")
|
||||||
"Cannot link `{p}' to `{t}':\n{e}".format(
|
|
||||||
p=path, t=theme_path, e=str(e)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def is_broken_link(path):
|
def is_broken_link(path):
|
||||||
|
|
|
||||||
|
|
@ -98,7 +98,7 @@ class URLWrapper:
|
||||||
return self.name
|
return self.name
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return f"<{type(self).__name__} {repr(self._name)}>"
|
return f"<{type(self).__name__} {self._name!r}>"
|
||||||
|
|
||||||
def _from_settings(self, key, get_page_name=False):
|
def _from_settings(self, key, get_page_name=False):
|
||||||
"""Returns URL information as defined in settings.
|
"""Returns URL information as defined in settings.
|
||||||
|
|
|
||||||
|
|
@ -23,14 +23,10 @@ from typing import (
|
||||||
Any,
|
Any,
|
||||||
Callable,
|
Callable,
|
||||||
Collection,
|
Collection,
|
||||||
Dict,
|
|
||||||
Generator,
|
Generator,
|
||||||
Iterable,
|
Iterable,
|
||||||
List,
|
|
||||||
Optional,
|
Optional,
|
||||||
Sequence,
|
Sequence,
|
||||||
Tuple,
|
|
||||||
Type,
|
|
||||||
Union,
|
Union,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -40,9 +36,8 @@ try:
|
||||||
from zoneinfo import ZoneInfo
|
from zoneinfo import ZoneInfo
|
||||||
except ModuleNotFoundError:
|
except ModuleNotFoundError:
|
||||||
from backports.zoneinfo import ZoneInfo
|
from backports.zoneinfo import ZoneInfo
|
||||||
from markupsafe import Markup
|
|
||||||
|
|
||||||
import watchfiles
|
import watchfiles
|
||||||
|
from markupsafe import Markup
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from pelican.contents import Content
|
from pelican.contents import Content
|
||||||
|
|
@ -158,7 +153,7 @@ class memoized:
|
||||||
|
|
||||||
def __init__(self, func: Callable) -> None:
|
def __init__(self, func: Callable) -> None:
|
||||||
self.func = func
|
self.func = func
|
||||||
self.cache: Dict[Any, Any] = {}
|
self.cache: dict[Any, Any] = {}
|
||||||
|
|
||||||
def __call__(self, *args) -> Any:
|
def __call__(self, *args) -> Any:
|
||||||
if not isinstance(args, Hashable):
|
if not isinstance(args, Hashable):
|
||||||
|
|
@ -185,8 +180,8 @@ class memoized:
|
||||||
def deprecated_attribute(
|
def deprecated_attribute(
|
||||||
old: str,
|
old: str,
|
||||||
new: str,
|
new: str,
|
||||||
since: Tuple[int, ...],
|
since: tuple[int, ...],
|
||||||
remove: Optional[Tuple[int, ...]] = None,
|
remove: Optional[tuple[int, ...]] = None,
|
||||||
doc: Optional[str] = None,
|
doc: Optional[str] = None,
|
||||||
):
|
):
|
||||||
"""Attribute deprecation decorator for gentle upgrades
|
"""Attribute deprecation decorator for gentle upgrades
|
||||||
|
|
@ -256,7 +251,7 @@ def pelican_open(
|
||||||
|
|
||||||
def slugify(
|
def slugify(
|
||||||
value: str,
|
value: str,
|
||||||
regex_subs: Iterable[Tuple[str, str]] = (),
|
regex_subs: Iterable[tuple[str, str]] = (),
|
||||||
preserve_case: bool = False,
|
preserve_case: bool = False,
|
||||||
use_unicode: bool = False,
|
use_unicode: bool = False,
|
||||||
) -> str:
|
) -> str:
|
||||||
|
|
@ -642,9 +637,9 @@ def truncate_html_words(s: str, num: int, end_text: str = "…") -> str:
|
||||||
|
|
||||||
|
|
||||||
def process_translations(
|
def process_translations(
|
||||||
content_list: List[Content],
|
content_list: list[Content],
|
||||||
translation_id: Optional[Union[str, Collection[str]]] = None,
|
translation_id: Optional[Union[str, Collection[str]]] = None,
|
||||||
) -> Tuple[List[Content], List[Content]]:
|
) -> tuple[list[Content], list[Content]]:
|
||||||
"""Finds translations and returns them.
|
"""Finds translations and returns them.
|
||||||
|
|
||||||
For each content_list item, populates the 'translations' attribute, and
|
For each content_list item, populates the 'translations' attribute, and
|
||||||
|
|
@ -674,14 +669,14 @@ def process_translations(
|
||||||
content_list.sort(key=attrgetter(*translation_id))
|
content_list.sort(key=attrgetter(*translation_id))
|
||||||
except TypeError:
|
except TypeError:
|
||||||
raise TypeError(
|
raise TypeError(
|
||||||
"Cannot unpack {}, 'translation_id' must be falsy, a"
|
f"Cannot unpack {translation_id}, 'translation_id' must be falsy, a"
|
||||||
" string or a collection of strings".format(translation_id)
|
" string or a collection of strings"
|
||||||
)
|
)
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
raise AttributeError(
|
raise AttributeError(
|
||||||
"Cannot use {} as 'translation_id', there "
|
f"Cannot use {translation_id} as 'translation_id', there "
|
||||||
"appear to be items without these metadata "
|
"appear to be items without these metadata "
|
||||||
"attributes".format(translation_id)
|
"attributes"
|
||||||
)
|
)
|
||||||
|
|
||||||
for id_vals, items in groupby(content_list, attrgetter(*translation_id)):
|
for id_vals, items in groupby(content_list, attrgetter(*translation_id)):
|
||||||
|
|
@ -702,7 +697,7 @@ def process_translations(
|
||||||
return index, translations
|
return index, translations
|
||||||
|
|
||||||
|
|
||||||
def get_original_items(items: List[Content], with_str: str) -> List[Content]:
|
def get_original_items(items: list[Content], with_str: str) -> list[Content]:
|
||||||
def _warn_source_paths(msg, items, *extra):
|
def _warn_source_paths(msg, items, *extra):
|
||||||
args = [len(items)]
|
args = [len(items)]
|
||||||
args.extend(extra)
|
args.extend(extra)
|
||||||
|
|
@ -743,9 +738,9 @@ def get_original_items(items: List[Content], with_str: str) -> List[Content]:
|
||||||
|
|
||||||
|
|
||||||
def order_content(
|
def order_content(
|
||||||
content_list: List[Content],
|
content_list: list[Content],
|
||||||
order_by: Union[str, Callable[[Content], Any], None] = "slug",
|
order_by: Union[str, Callable[[Content], Any], None] = "slug",
|
||||||
) -> List[Content]:
|
) -> list[Content]:
|
||||||
"""Sorts content.
|
"""Sorts content.
|
||||||
|
|
||||||
order_by can be a string of an attribute or sorting function. If order_by
|
order_by can be a string of an attribute or sorting function. If order_by
|
||||||
|
|
@ -807,8 +802,8 @@ def order_content(
|
||||||
|
|
||||||
def wait_for_changes(
|
def wait_for_changes(
|
||||||
settings_file: str,
|
settings_file: str,
|
||||||
reader_class: Type["Readers"],
|
reader_class: type[Readers],
|
||||||
settings: "Settings",
|
settings: Settings,
|
||||||
):
|
):
|
||||||
content_path = settings.get("PATH", "")
|
content_path = settings.get("PATH", "")
|
||||||
theme_path = settings.get("THEME", "")
|
theme_path = settings.get("THEME", "")
|
||||||
|
|
|
||||||
|
|
@ -4,7 +4,6 @@ from posixpath import join as posix_join
|
||||||
from urllib.parse import urljoin
|
from urllib.parse import urljoin
|
||||||
|
|
||||||
from feedgenerator import Atom1Feed, Rss201rev2Feed, get_tag_uri
|
from feedgenerator import Atom1Feed, Rss201rev2Feed, get_tag_uri
|
||||||
|
|
||||||
from markupsafe import Markup
|
from markupsafe import Markup
|
||||||
|
|
||||||
from pelican.paginator import Paginator
|
from pelican.paginator import Paginator
|
||||||
|
|
|
||||||
|
|
@ -111,3 +111,102 @@ source-includes = [
|
||||||
[build-system]
|
[build-system]
|
||||||
requires = ["pdm-backend"]
|
requires = ["pdm-backend"]
|
||||||
build-backend = "pdm.backend"
|
build-backend = "pdm.backend"
|
||||||
|
|
||||||
|
|
||||||
|
[tool.ruff.lint]
|
||||||
|
# see https://docs.astral.sh/ruff/configuration/#using-pyprojecttoml
|
||||||
|
# "F" contains autoflake, see https://github.com/astral-sh/ruff/issues/1647
|
||||||
|
# add more rules
|
||||||
|
select = [
|
||||||
|
# default Ruff checkers as of ruff 0.1.3: E4, E7, E9, F
|
||||||
|
"E4",
|
||||||
|
"E7",
|
||||||
|
"E9",
|
||||||
|
"F", # pyflakes
|
||||||
|
|
||||||
|
# the rest in alphabetical order:
|
||||||
|
# TODO: "A", # flake8-builtins
|
||||||
|
# TODO: "ARG", # flake8-unused-arguments
|
||||||
|
"B", # flake8-bugbear
|
||||||
|
# TODO: "BLE", # flake8-blind-except
|
||||||
|
# TODO: Do I want "COM", # flake8-commas
|
||||||
|
"C4", # flake8-comprehensions
|
||||||
|
# TODO: "DJ", # flake8-django
|
||||||
|
# TODO: "DTZ", # flake8-datetimez
|
||||||
|
# TODO: "EM", # flake8-errmsg
|
||||||
|
"EXE", # flake8-executable
|
||||||
|
# TODO: "FURB", # refurb
|
||||||
|
# TODO: "FBT", # flake8-boolean-trap
|
||||||
|
# TODO: "G", # flake8-logging-format
|
||||||
|
"I", # isort
|
||||||
|
"ICN", # flake8-import-conventions
|
||||||
|
"INP", # flake8-no-pep420
|
||||||
|
# TODO: "INT", # flake8-gettext
|
||||||
|
"ISC", # flake8-implicit-str-concat
|
||||||
|
# TODO: "LOG", # flake8-logging
|
||||||
|
"PERF", # perflint
|
||||||
|
"PIE", # flake8-pie
|
||||||
|
"PL", # pylint
|
||||||
|
"PYI", # flake8-pyi
|
||||||
|
# TODO: "RET", # flake8-return
|
||||||
|
"RSE", # flake8-raise
|
||||||
|
"RUF",
|
||||||
|
# TODO: "SIM", # flake8-simplify
|
||||||
|
"SLF", # flake8-self
|
||||||
|
"SLOT", # flake8-slots
|
||||||
|
"TID", # flake8-tidy-imports
|
||||||
|
"UP", # pyupgrade
|
||||||
|
"Q", # flake8-quotes
|
||||||
|
"TCH", # flake8-type-checking
|
||||||
|
"T10", # flake8-debugger
|
||||||
|
"T20", # flake8-print
|
||||||
|
# TODO: "S", # flake8-bandit
|
||||||
|
"YTT", # flake8-2020
|
||||||
|
# TODO: add more flake8 rules
|
||||||
|
]
|
||||||
|
|
||||||
|
ignore = [
|
||||||
|
# suppression in order of # of violations in Dec 2023:
|
||||||
|
"B007", # unused-loop-control-variable
|
||||||
|
"T201", # print
|
||||||
|
"PLW2901", # redefined-loop-name
|
||||||
|
"SLF001", # private-member-access
|
||||||
|
"RUF001", # ambiguous-unicode-character-string
|
||||||
|
"PLR2004", # magic-value-comparison
|
||||||
|
"PLR0912", # too-many-branches
|
||||||
|
"PLR0913", # too-many-arguments
|
||||||
|
"RUF005", # collection-literal-concatenation
|
||||||
|
"RUF012", # mutable-class-default
|
||||||
|
"PLR0915", # too-many-statements
|
||||||
|
"INP001", # implicit-namespace-package
|
||||||
|
"RUF015", # unnecessary-iterable-allocation-for-first-element
|
||||||
|
"PLR1722", # sys-exit-alias
|
||||||
|
"ISC001", # single-line-implicit-string-concatenation
|
||||||
|
"C408", # unnecessary-collection-call
|
||||||
|
"B904", # raise-without-from-inside-except
|
||||||
|
"UP007", # use `|` operator for union type annotations (PEP 604)
|
||||||
|
"UP031", # printf-string-formatting
|
||||||
|
"PLR5501", # collapsible-else-if
|
||||||
|
"PERF203", # try-except-in-loop
|
||||||
|
"B006", # mutable-argument-default
|
||||||
|
"PLR1714", # repeated-equality-comparison
|
||||||
|
"PERF401", # manual-list-comprehension
|
||||||
|
# TODO: these only have one violation each in Dec 2023:
|
||||||
|
"SLOT000", # no-slots-in-str-subclass
|
||||||
|
"PYI024", # collections-named-tuple
|
||||||
|
"PLW0603", # global-statement
|
||||||
|
"PIE800", # unnecessary-spread
|
||||||
|
"ISC003", # explicit-string-concatenation
|
||||||
|
"EXE002", # shebang-missing-executable-file
|
||||||
|
"C401", # unnecessary-generator-set
|
||||||
|
"C416", # unnecessary `list` comprehension
|
||||||
|
"B028", # no-explicit-stacklevel
|
||||||
|
"B008", # function-call-in-default-argument
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.ruff.lint.extend-per-file-ignores]
|
||||||
|
|
||||||
|
"pelican/__init__.py" = [
|
||||||
|
# allow imports after a call to a function, see the file
|
||||||
|
"E402"
|
||||||
|
]
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue