mirror of
https://github.com/getpelican/pelican.git
synced 2025-10-15 20:28:56 +02:00
Move ruff to 0.12.2 and fix new complaints
This commit is contained in:
parent
9e3e1325c0
commit
4dedf17958
21 changed files with 64 additions and 102 deletions
|
|
@ -16,14 +16,14 @@ repos:
|
||||||
|
|
||||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
# ruff version should match the one in pyproject.toml
|
# ruff version should match the one in pyproject.toml
|
||||||
rev: v0.7.2
|
rev: v0.12.2
|
||||||
hooks:
|
hooks:
|
||||||
- id: ruff
|
- id: ruff-check
|
||||||
args: [--fix, --exit-non-zero-on-fix]
|
args: [--fix, --exit-non-zero-on-fix]
|
||||||
- id: ruff-format
|
- id: ruff-format
|
||||||
|
|
||||||
- repo: https://github.com/rtts/djhtml
|
- repo: https://github.com/rtts/djhtml
|
||||||
rev: '3.0.7'
|
rev: '3.0.8'
|
||||||
hooks:
|
hooks:
|
||||||
- id: djhtml
|
- id: djhtml
|
||||||
- id: djcss
|
- id: djcss
|
||||||
|
|
|
||||||
|
|
@ -41,7 +41,7 @@ version = ".".join(release.split(".")[:1])
|
||||||
last_stable = project_data.get("version")
|
last_stable = project_data.get("version")
|
||||||
rst_prolog = f"""
|
rst_prolog = f"""
|
||||||
.. |last_stable| replace:: :pelican-doc:`{last_stable}`
|
.. |last_stable| replace:: :pelican-doc:`{last_stable}`
|
||||||
.. |min_python| replace:: {project_data.get('requires-python').split(",")[0]}
|
.. |min_python| replace:: {project_data.get("requires-python").split(",")[0]}
|
||||||
"""
|
"""
|
||||||
|
|
||||||
extlinks = {"pelican-doc": ("https://docs.getpelican.com/en/latest/%s.html", "%s")}
|
extlinks = {"pelican-doc": ("https://docs.getpelican.com/en/latest/%s.html", "%s")}
|
||||||
|
|
|
||||||
|
|
@ -402,8 +402,7 @@ def parse_arguments(argv=None):
|
||||||
"--autoreload",
|
"--autoreload",
|
||||||
dest="autoreload",
|
dest="autoreload",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="Relaunch pelican each time a modification occurs"
|
help="Relaunch pelican each time a modification occurs on the content files.",
|
||||||
" on the content files.",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
|
|
@ -446,8 +445,7 @@ def parse_arguments(argv=None):
|
||||||
choices=("errors", "warnings"),
|
choices=("errors", "warnings"),
|
||||||
default="",
|
default="",
|
||||||
help=(
|
help=(
|
||||||
"Exit the program with non-zero status if any "
|
"Exit the program with non-zero status if any errors/warnings encountered."
|
||||||
"errors/warnings encountered."
|
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,4 @@
|
||||||
|
import gzip
|
||||||
import hashlib
|
import hashlib
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
|
@ -22,8 +23,6 @@ class FileDataCacher:
|
||||||
self._cache_path = os.path.join(self.settings["CACHE_PATH"], cache_name)
|
self._cache_path = os.path.join(self.settings["CACHE_PATH"], cache_name)
|
||||||
self._cache_data_policy = caching_policy
|
self._cache_data_policy = caching_policy
|
||||||
if self.settings["GZIP_CACHE"]:
|
if self.settings["GZIP_CACHE"]:
|
||||||
import gzip
|
|
||||||
|
|
||||||
self._cache_open = gzip.open
|
self._cache_open = gzip.open
|
||||||
else:
|
else:
|
||||||
self._cache_open = open
|
self._cache_open = open
|
||||||
|
|
|
||||||
|
|
@ -342,8 +342,7 @@ class Content:
|
||||||
value.geturl(),
|
value.geturl(),
|
||||||
extra={
|
extra={
|
||||||
"limit_msg": (
|
"limit_msg": (
|
||||||
"Other resources were not found "
|
"Other resources were not found and their urls not replaced"
|
||||||
"and their urls not replaced"
|
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,5 @@
|
||||||
import logging
|
import logging
|
||||||
|
import warnings
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
|
||||||
from rich.console import Console
|
from rich.console import Console
|
||||||
|
|
@ -156,8 +157,6 @@ def init(
|
||||||
|
|
||||||
|
|
||||||
def log_warnings():
|
def log_warnings():
|
||||||
import warnings
|
|
||||||
|
|
||||||
logging.captureWarnings(True)
|
logging.captureWarnings(True)
|
||||||
warnings.simplefilter("default", DeprecationWarning)
|
warnings.simplefilter("default", DeprecationWarning)
|
||||||
init(logging.DEBUG, name="py.warnings")
|
init(logging.DEBUG, name="py.warnings")
|
||||||
|
|
|
||||||
|
|
@ -53,7 +53,7 @@ class Paginator:
|
||||||
"Returns the total number of pages."
|
"Returns the total number of pages."
|
||||||
if self._num_pages is None:
|
if self._num_pages is None:
|
||||||
hits = max(1, self.count - self.orphans)
|
hits = max(1, self.count - self.orphans)
|
||||||
self._num_pages = int(ceil(hits / (float(self.per_page) or 1)))
|
self._num_pages = ceil(hits / (float(self.per_page) or 1))
|
||||||
return self._num_pages
|
return self._num_pages
|
||||||
|
|
||||||
num_pages = property(_get_num_pages)
|
num_pages = property(_get_num_pages)
|
||||||
|
|
|
||||||
|
|
@ -19,7 +19,7 @@ def iter_namespace(ns_pkg):
|
||||||
|
|
||||||
def get_namespace_plugins(ns_pkg=None):
|
def get_namespace_plugins(ns_pkg=None):
|
||||||
if ns_pkg is None:
|
if ns_pkg is None:
|
||||||
import pelican.plugins as ns_pkg
|
import pelican.plugins as ns_pkg # noqa: PLC0415
|
||||||
|
|
||||||
return {
|
return {
|
||||||
name: importlib.import_module(name)
|
name: importlib.import_module(name)
|
||||||
|
|
@ -29,7 +29,7 @@ def get_namespace_plugins(ns_pkg=None):
|
||||||
|
|
||||||
|
|
||||||
def list_plugins(ns_pkg=None):
|
def list_plugins(ns_pkg=None):
|
||||||
from pelican.log import init as init_logging
|
from pelican.log import init as init_logging # noqa: PLC0415
|
||||||
|
|
||||||
init_logging(logging.INFO)
|
init_logging(logging.INFO)
|
||||||
ns_plugins = get_namespace_plugins(ns_pkg)
|
ns_plugins = get_namespace_plugins(ns_pkg)
|
||||||
|
|
|
||||||
|
|
@ -630,8 +630,9 @@ class Readers(FileStampDataCacher):
|
||||||
|
|
||||||
# eventually filter the content with typogrify if asked so
|
# eventually filter the content with typogrify if asked so
|
||||||
if self.settings["TYPOGRIFY"]:
|
if self.settings["TYPOGRIFY"]:
|
||||||
import smartypants
|
# typogrify is an optional feature, user may not have it installed
|
||||||
from typogrify.filters import typogrify
|
import smartypants # noqa: PLC0415
|
||||||
|
from typogrify.filters import typogrify # noqa: PLC0415
|
||||||
|
|
||||||
typogrify_dashes = self.settings["TYPOGRIFY_DASHES"]
|
typogrify_dashes = self.settings["TYPOGRIFY_DASHES"]
|
||||||
if typogrify_dashes == "oldschool":
|
if typogrify_dashes == "oldschool":
|
||||||
|
|
@ -657,7 +658,7 @@ class Readers(FileStampDataCacher):
|
||||||
return typogrify(
|
return typogrify(
|
||||||
text,
|
text,
|
||||||
self.settings["TYPOGRIFY_IGNORE_TAGS"],
|
self.settings["TYPOGRIFY_IGNORE_TAGS"],
|
||||||
**{f: False for f in self.settings["TYPOGRIFY_OMIT_FILTERS"]},
|
**dict.fromkeys(self.settings["TYPOGRIFY_OMIT_FILTERS"], False),
|
||||||
)
|
)
|
||||||
except TypeError:
|
except TypeError:
|
||||||
try:
|
try:
|
||||||
|
|
|
||||||
|
|
@ -12,6 +12,7 @@ from types import ModuleType
|
||||||
from typing import Any, Optional
|
from typing import Any, Optional
|
||||||
|
|
||||||
from pelican.log import LimitFilter
|
from pelican.log import LimitFilter
|
||||||
|
from pelican.paginator import PaginationRule
|
||||||
|
|
||||||
|
|
||||||
def load_source(name: str, path: str) -> ModuleType:
|
def load_source(name: str, path: str) -> ModuleType:
|
||||||
|
|
@ -320,8 +321,7 @@ def handle_deprecated_settings(settings: Settings) -> Settings:
|
||||||
# EXTRA_TEMPLATES_PATHS -> THEME_TEMPLATES_OVERRIDES
|
# EXTRA_TEMPLATES_PATHS -> THEME_TEMPLATES_OVERRIDES
|
||||||
if "EXTRA_TEMPLATES_PATHS" in settings:
|
if "EXTRA_TEMPLATES_PATHS" in settings:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
"EXTRA_TEMPLATES_PATHS is deprecated use "
|
"EXTRA_TEMPLATES_PATHS is deprecated use THEME_TEMPLATES_OVERRIDES instead."
|
||||||
"THEME_TEMPLATES_OVERRIDES instead."
|
|
||||||
)
|
)
|
||||||
if settings.get("THEME_TEMPLATES_OVERRIDES"):
|
if settings.get("THEME_TEMPLATES_OVERRIDES"):
|
||||||
raise Exception(
|
raise Exception(
|
||||||
|
|
@ -453,8 +453,7 @@ def handle_deprecated_settings(settings: Settings) -> Settings:
|
||||||
settings[key] = _printf_s_to_format_field(settings[key], "lang")
|
settings[key] = _printf_s_to_format_field(settings[key], "lang")
|
||||||
except ValueError:
|
except ValueError:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
"Failed to convert %%s to {lang} for %s. "
|
"Failed to convert %%s to {lang} for %s. Falling back to default.",
|
||||||
"Falling back to default.",
|
|
||||||
key,
|
key,
|
||||||
)
|
)
|
||||||
settings[key] = DEFAULT_CONFIG[key]
|
settings[key] = DEFAULT_CONFIG[key]
|
||||||
|
|
@ -476,8 +475,7 @@ def handle_deprecated_settings(settings: Settings) -> Settings:
|
||||||
settings[key] = _printf_s_to_format_field(settings[key], "slug")
|
settings[key] = _printf_s_to_format_field(settings[key], "slug")
|
||||||
except ValueError:
|
except ValueError:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
"Failed to convert %%s to {slug} for %s. "
|
"Failed to convert %%s to {slug} for %s. Falling back to default.",
|
||||||
"Falling back to default.",
|
|
||||||
key,
|
key,
|
||||||
)
|
)
|
||||||
settings[key] = DEFAULT_CONFIG[key]
|
settings[key] = DEFAULT_CONFIG[key]
|
||||||
|
|
@ -689,8 +687,6 @@ def configure_settings(settings: Settings) -> Settings:
|
||||||
)
|
)
|
||||||
|
|
||||||
# fix up pagination rules
|
# fix up pagination rules
|
||||||
from pelican.paginator import PaginationRule
|
|
||||||
|
|
||||||
pagination_rules = [
|
pagination_rules = [
|
||||||
PaginationRule(*r)
|
PaginationRule(*r)
|
||||||
for r in settings.get(
|
for r in settings.get(
|
||||||
|
|
|
||||||
|
|
@ -7,7 +7,7 @@ from sys import platform
|
||||||
|
|
||||||
from jinja2.utils import generate_lorem_ipsum
|
from jinja2.utils import generate_lorem_ipsum
|
||||||
|
|
||||||
from pelican.contents import Article, Author, Category, Page, Static
|
from pelican.contents import Article, Author, Category, Page, Static, logger
|
||||||
from pelican.plugins.signals import content_object_init
|
from pelican.plugins.signals import content_object_init
|
||||||
from pelican.settings import DEFAULT_CONFIG
|
from pelican.settings import DEFAULT_CONFIG
|
||||||
from pelican.tests.support import LoggedTestCase, get_context, get_settings, unittest
|
from pelican.tests.support import LoggedTestCase, get_context, get_settings, unittest
|
||||||
|
|
@ -49,24 +49,18 @@ class TestBase(LoggedTestCase):
|
||||||
self._enable_limit_filter()
|
self._enable_limit_filter()
|
||||||
|
|
||||||
def _disable_limit_filter(self):
|
def _disable_limit_filter(self):
|
||||||
from pelican.contents import logger
|
|
||||||
|
|
||||||
logger.disable_filter()
|
logger.disable_filter()
|
||||||
|
|
||||||
def _enable_limit_filter(self):
|
def _enable_limit_filter(self):
|
||||||
from pelican.contents import logger
|
|
||||||
|
|
||||||
logger.enable_filter()
|
logger.enable_filter()
|
||||||
|
|
||||||
def _copy_page_kwargs(self):
|
def _copy_page_kwargs(self):
|
||||||
# make a deep copy of page_kwargs
|
# copy page_kwargs
|
||||||
page_kwargs = {key: self.page_kwargs[key] for key in self.page_kwargs}
|
page_kwargs = dict(self.page_kwargs)
|
||||||
for key in page_kwargs:
|
for key, val in page_kwargs.items():
|
||||||
if not isinstance(page_kwargs[key], dict):
|
if not isinstance(val, dict):
|
||||||
break
|
break
|
||||||
page_kwargs[key] = {
|
page_kwargs[key] = {subkey: val[subkey] for subkey in val}
|
||||||
subkey: page_kwargs[key][subkey] for subkey in page_kwargs[key]
|
|
||||||
}
|
|
||||||
|
|
||||||
return page_kwargs
|
return page_kwargs
|
||||||
|
|
||||||
|
|
@ -310,18 +304,16 @@ class TestPage(TestBase):
|
||||||
|
|
||||||
# I doubt this can work on all platforms ...
|
# I doubt this can work on all platforms ...
|
||||||
if platform == "win32":
|
if platform == "win32":
|
||||||
locale = "jpn"
|
the_locale = "jpn"
|
||||||
else:
|
else:
|
||||||
locale = "ja_JP.utf8"
|
the_locale = "ja_JP.utf8"
|
||||||
page_kwargs["settings"]["DATE_FORMATS"] = {"jp": (locale, "%Y-%m-%d(%a)")}
|
page_kwargs["settings"]["DATE_FORMATS"] = {"jp": (the_locale, "%Y-%m-%d(%a)")}
|
||||||
page_kwargs["metadata"]["lang"] = "jp"
|
page_kwargs["metadata"]["lang"] = "jp"
|
||||||
|
|
||||||
import locale as locale_module
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
page = Page(**page_kwargs)
|
page = Page(**page_kwargs)
|
||||||
self.assertEqual(page.locale_date, "2015-09-13(\u65e5)")
|
self.assertEqual(page.locale_date, "2015-09-13(\u65e5)")
|
||||||
except locale_module.Error:
|
except locale.Error:
|
||||||
# The constructor of ``Page`` will try to set the locale to
|
# The constructor of ``Page`` will try to set the locale to
|
||||||
# ``ja_JP.utf8``. But this attempt will failed when there is no
|
# ``ja_JP.utf8``. But this attempt will failed when there is no
|
||||||
# such locale in the system. You can see which locales there are
|
# such locale in the system. You can see which locales there are
|
||||||
|
|
@ -329,7 +321,7 @@ class TestPage(TestBase):
|
||||||
#
|
#
|
||||||
# Until we find some other method to test this functionality, we
|
# Until we find some other method to test this functionality, we
|
||||||
# will simply skip this test.
|
# will simply skip this test.
|
||||||
unittest.skip(f"There is no locale {locale} in this system.")
|
unittest.skip(f"There is no locale {the_locale} in this system.")
|
||||||
|
|
||||||
def test_template(self):
|
def test_template(self):
|
||||||
# Pages default to page, metadata overwrites
|
# Pages default to page, metadata overwrites
|
||||||
|
|
@ -406,8 +398,7 @@ class TestPage(TestBase):
|
||||||
|
|
||||||
# fragment
|
# fragment
|
||||||
args["content"] = (
|
args["content"] = (
|
||||||
"A simple test, with a "
|
'A simple test, with a <a href="|filename|article.rst#section-2">link</a>'
|
||||||
'<a href="|filename|article.rst#section-2">link</a>'
|
|
||||||
)
|
)
|
||||||
content = Page(**args).get_content("http://notmyidea.org")
|
content = Page(**args).get_content("http://notmyidea.org")
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
|
|
@ -687,8 +678,7 @@ class TestPage(TestBase):
|
||||||
}
|
}
|
||||||
|
|
||||||
args["content"] = (
|
args["content"] = (
|
||||||
"A simple test, with a link to a"
|
'A simple test, with a link to a<a href="{filename}poster.jpg">poster</a>'
|
||||||
'<a href="{filename}poster.jpg">poster</a>'
|
|
||||||
)
|
)
|
||||||
content = Page(**args).get_content("http://notmyidea.org")
|
content = Page(**args).get_content("http://notmyidea.org")
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
|
|
|
||||||
|
|
@ -916,10 +916,7 @@ class TestArticlesGenerator(unittest.TestCase):
|
||||||
"This is a super article !",
|
"This is a super article !",
|
||||||
"This is a super article !",
|
"This is a super article !",
|
||||||
"This is an article with category !",
|
"This is an article with category !",
|
||||||
(
|
("This is an article with multiple authors in lastname, firstname format!"),
|
||||||
"This is an article with multiple authors in lastname, "
|
|
||||||
"firstname format!"
|
|
||||||
),
|
|
||||||
"This is an article with multiple authors in list format!",
|
"This is an article with multiple authors in list format!",
|
||||||
"This is an article with multiple authors!",
|
"This is an article with multiple authors!",
|
||||||
"This is an article with multiple authors!",
|
"This is an article with multiple authors!",
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,7 @@ import locale
|
||||||
from jinja2.utils import generate_lorem_ipsum
|
from jinja2.utils import generate_lorem_ipsum
|
||||||
|
|
||||||
from pelican.contents import Article, Author
|
from pelican.contents import Article, Author
|
||||||
from pelican.paginator import Paginator
|
from pelican.paginator import PaginationRule, Paginator
|
||||||
from pelican.settings import DEFAULT_CONFIG
|
from pelican.settings import DEFAULT_CONFIG
|
||||||
from pelican.tests.support import get_settings, unittest
|
from pelican.tests.support import get_settings, unittest
|
||||||
|
|
||||||
|
|
@ -35,8 +35,6 @@ class TestPage(unittest.TestCase):
|
||||||
def test_save_as_preservation(self):
|
def test_save_as_preservation(self):
|
||||||
settings = get_settings()
|
settings = get_settings()
|
||||||
# fix up pagination rules
|
# fix up pagination rules
|
||||||
from pelican.paginator import PaginationRule
|
|
||||||
|
|
||||||
pagination_rules = [
|
pagination_rules = [
|
||||||
PaginationRule(*r)
|
PaginationRule(*r)
|
||||||
for r in settings.get(
|
for r in settings.get(
|
||||||
|
|
@ -56,8 +54,6 @@ class TestPage(unittest.TestCase):
|
||||||
self.assertEqual(page.save_as, "foobar.foo")
|
self.assertEqual(page.save_as, "foobar.foo")
|
||||||
|
|
||||||
def test_custom_pagination_pattern(self):
|
def test_custom_pagination_pattern(self):
|
||||||
from pelican.paginator import PaginationRule
|
|
||||||
|
|
||||||
settings = get_settings()
|
settings = get_settings()
|
||||||
settings["PAGINATION_PATTERNS"] = [
|
settings["PAGINATION_PATTERNS"] = [
|
||||||
PaginationRule(*r)
|
PaginationRule(*r)
|
||||||
|
|
@ -81,8 +77,6 @@ class TestPage(unittest.TestCase):
|
||||||
self.assertEqual(page2.url, "//blog.my.site/2/")
|
self.assertEqual(page2.url, "//blog.my.site/2/")
|
||||||
|
|
||||||
def test_custom_pagination_pattern_last_page(self):
|
def test_custom_pagination_pattern_last_page(self):
|
||||||
from pelican.paginator import PaginationRule
|
|
||||||
|
|
||||||
settings = get_settings()
|
settings = get_settings()
|
||||||
settings["PAGINATION_PATTERNS"] = [
|
settings["PAGINATION_PATTERNS"] = [
|
||||||
PaginationRule(*r)
|
PaginationRule(*r)
|
||||||
|
|
|
||||||
|
|
@ -23,7 +23,7 @@ def tmp_namespace_path(path):
|
||||||
"""
|
"""
|
||||||
# This avoids calls to internal `pelican.plugins.__path__._recalculate()`
|
# This avoids calls to internal `pelican.plugins.__path__._recalculate()`
|
||||||
# as it should not be necessary
|
# as it should not be necessary
|
||||||
import pelican
|
import pelican # noqa: PLC0415
|
||||||
|
|
||||||
old_path = pelican.__path__[:]
|
old_path = pelican.__path__[:]
|
||||||
try:
|
try:
|
||||||
|
|
@ -41,8 +41,8 @@ class PluginTest(unittest.TestCase):
|
||||||
_NORMAL_PLUGIN_FOLDER = os.path.join(_PLUGIN_FOLDER, "normal_plugin")
|
_NORMAL_PLUGIN_FOLDER = os.path.join(_PLUGIN_FOLDER, "normal_plugin")
|
||||||
|
|
||||||
def test_namespace_path_modification(self):
|
def test_namespace_path_modification(self):
|
||||||
import pelican
|
import pelican # noqa: PLC0415
|
||||||
import pelican.plugins
|
import pelican.plugins # noqa: PLC0415
|
||||||
|
|
||||||
old_path = pelican.__path__[:]
|
old_path = pelican.__path__[:]
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,12 +1,11 @@
|
||||||
from unittest.mock import Mock
|
from unittest.mock import Mock
|
||||||
|
|
||||||
|
from pelican.rstdirectives import abbr_role
|
||||||
from pelican.tests.support import unittest
|
from pelican.tests.support import unittest
|
||||||
|
|
||||||
|
|
||||||
class Test_abbr_role(unittest.TestCase):
|
class Test_abbr_role(unittest.TestCase):
|
||||||
def call_it(self, text):
|
def call_it(self, text):
|
||||||
from pelican.rstdirectives import abbr_role
|
|
||||||
|
|
||||||
rawtext = text
|
rawtext = text
|
||||||
lineno = 42
|
lineno = 42
|
||||||
inliner = Mock(name="inliner")
|
inliner = Mock(name="inliner")
|
||||||
|
|
|
||||||
|
|
@ -2,6 +2,7 @@
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import datetime
|
import datetime
|
||||||
|
import json
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
|
@ -9,6 +10,7 @@ import subprocess
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
import time
|
import time
|
||||||
|
import urllib.request as urllib_request
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from html import unescape
|
from html import unescape
|
||||||
from urllib.error import URLError
|
from urllib.error import URLError
|
||||||
|
|
@ -16,6 +18,7 @@ from urllib.parse import quote, urlparse, urlsplit, urlunsplit
|
||||||
from urllib.request import urlretrieve
|
from urllib.request import urlretrieve
|
||||||
|
|
||||||
import dateutil.parser
|
import dateutil.parser
|
||||||
|
from docutils.utils import column_width
|
||||||
|
|
||||||
# because logging.setLoggerClass has to be called before logging.getLogger
|
# because logging.setLoggerClass has to be called before logging.getLogger
|
||||||
from pelican.log import init
|
from pelican.log import init
|
||||||
|
|
@ -118,7 +121,7 @@ def decode_wp_content(content, br=True):
|
||||||
def _import_bs4():
|
def _import_bs4():
|
||||||
"""Import and return bs4, otherwise sys.exit."""
|
"""Import and return bs4, otherwise sys.exit."""
|
||||||
try:
|
try:
|
||||||
import bs4
|
import bs4 # noqa: PLC0415
|
||||||
except ImportError:
|
except ImportError:
|
||||||
error = (
|
error = (
|
||||||
'Missing dependency "BeautifulSoup4" and "lxml" required to '
|
'Missing dependency "BeautifulSoup4" and "lxml" required to '
|
||||||
|
|
@ -272,7 +275,7 @@ def blogger2fields(xml):
|
||||||
def dc2fields(file):
|
def dc2fields(file):
|
||||||
"""Opens a Dotclear export file, and yield pelican fields"""
|
"""Opens a Dotclear export file, and yield pelican fields"""
|
||||||
try:
|
try:
|
||||||
from bs4 import BeautifulSoup
|
from bs4 import BeautifulSoup # noqa: PLC0415
|
||||||
except ImportError:
|
except ImportError:
|
||||||
error = (
|
error = (
|
||||||
"Missing dependency "
|
"Missing dependency "
|
||||||
|
|
@ -311,7 +314,7 @@ def dc2fields(file):
|
||||||
else:
|
else:
|
||||||
posts.append(line)
|
posts.append(line)
|
||||||
|
|
||||||
print("%i posts read." % len(posts))
|
print(f"{len(posts)} posts read.")
|
||||||
|
|
||||||
subs = DEFAULT_CONFIG["SLUG_REGEX_SUBSTITUTIONS"]
|
subs = DEFAULT_CONFIG["SLUG_REGEX_SUBSTITUTIONS"]
|
||||||
for post in posts:
|
for post in posts:
|
||||||
|
|
@ -367,7 +370,7 @@ def dc2fields(file):
|
||||||
.replace("a:0:", "")
|
.replace("a:0:", "")
|
||||||
)
|
)
|
||||||
if len(tag) > 1:
|
if len(tag) > 1:
|
||||||
if int(len(tag[:1])) == 1:
|
if len(tag[:1]) == 1:
|
||||||
newtag = tag.split('"')[1]
|
newtag = tag.split('"')[1]
|
||||||
tags.append(
|
tags.append(
|
||||||
BeautifulSoup(newtag, "xml")
|
BeautifulSoup(newtag, "xml")
|
||||||
|
|
@ -418,13 +421,10 @@ def dc2fields(file):
|
||||||
|
|
||||||
|
|
||||||
def _get_tumblr_posts(api_key, blogname, offset=0):
|
def _get_tumblr_posts(api_key, blogname, offset=0):
|
||||||
import json
|
|
||||||
import urllib.request as urllib_request
|
|
||||||
|
|
||||||
url = (
|
url = (
|
||||||
"https://api.tumblr.com/v2/blog/%s.tumblr.com/"
|
f"https://api.tumblr.com/v2/blog/{blogname}.tumblr.com/"
|
||||||
"posts?api_key=%s&offset=%d&filter=raw"
|
f"posts?api_key={api_key}&offset={offset}&filter=raw"
|
||||||
) % (blogname, api_key, offset)
|
)
|
||||||
request = urllib_request.Request(url)
|
request = urllib_request.Request(url)
|
||||||
handle = urllib_request.urlopen(request)
|
handle = urllib_request.urlopen(request)
|
||||||
posts = json.loads(handle.read().decode("utf-8"))
|
posts = json.loads(handle.read().decode("utf-8"))
|
||||||
|
|
@ -673,7 +673,7 @@ def mediumposts2fields(medium_export_dir: str):
|
||||||
|
|
||||||
def feed2fields(file):
|
def feed2fields(file):
|
||||||
"""Read a feed and yield pelican fields"""
|
"""Read a feed and yield pelican fields"""
|
||||||
import feedparser
|
import feedparser # noqa: PLC0415
|
||||||
|
|
||||||
d = feedparser.parse(file)
|
d = feedparser.parse(file)
|
||||||
subs = DEFAULT_CONFIG["SLUG_REGEX_SUBSTITUTIONS"]
|
subs = DEFAULT_CONFIG["SLUG_REGEX_SUBSTITUTIONS"]
|
||||||
|
|
@ -707,8 +707,6 @@ def build_header(
|
||||||
):
|
):
|
||||||
"""Build a header from a list of fields"""
|
"""Build a header from a list of fields"""
|
||||||
|
|
||||||
from docutils.utils import column_width
|
|
||||||
|
|
||||||
header = "{}\n{}\n".format(title, "#" * column_width(title))
|
header = "{}\n{}\n".format(title, "#" * column_width(title))
|
||||||
if date:
|
if date:
|
||||||
header += f":date: {date}\n"
|
header += f":date: {date}\n"
|
||||||
|
|
@ -971,10 +969,10 @@ def fields2pelican(
|
||||||
if is_pandoc_needed(in_markup) and not pandoc_version:
|
if is_pandoc_needed(in_markup) and not pandoc_version:
|
||||||
posts_require_pandoc.append(filename)
|
posts_require_pandoc.append(filename)
|
||||||
|
|
||||||
slug = not disable_slugs and filename or None
|
slug = (not disable_slugs and filename) or None
|
||||||
assert slug is None or filename == os.path.basename(
|
assert slug is None or filename == os.path.basename(filename), (
|
||||||
filename
|
f"filename is not a basename: {filename}"
|
||||||
), f"filename is not a basename: {filename}"
|
)
|
||||||
|
|
||||||
if wp_attach and attachments:
|
if wp_attach and attachments:
|
||||||
try:
|
try:
|
||||||
|
|
@ -1047,8 +1045,7 @@ def fields2pelican(
|
||||||
"--wrap=none" if pandoc_version >= (1, 16) else "--no-wrap"
|
"--wrap=none" if pandoc_version >= (1, 16) else "--no-wrap"
|
||||||
)
|
)
|
||||||
cmd = (
|
cmd = (
|
||||||
"pandoc --normalize {0} --from=html"
|
'pandoc --normalize {0} --from=html --to={1} {2} -o "{3}" "{4}"'
|
||||||
' --to={1} {2} -o "{3}" "{4}"'
|
|
||||||
)
|
)
|
||||||
cmd = cmd.format(
|
cmd = cmd.format(
|
||||||
parse_raw,
|
parse_raw,
|
||||||
|
|
@ -1070,7 +1067,7 @@ def fields2pelican(
|
||||||
try:
|
try:
|
||||||
rc = subprocess.call(cmd, shell=True)
|
rc = subprocess.call(cmd, shell=True)
|
||||||
if rc < 0:
|
if rc < 0:
|
||||||
error = "Child was terminated by signal %d" % -rc
|
error = f"Child was terminated by signal {-rc}"
|
||||||
sys.exit(error)
|
sys.exit(error)
|
||||||
|
|
||||||
elif rc > 0:
|
elif rc > 0:
|
||||||
|
|
|
||||||
|
|
@ -17,8 +17,7 @@ try:
|
||||||
import pelican
|
import pelican
|
||||||
except ImportError:
|
except ImportError:
|
||||||
err(
|
err(
|
||||||
"Cannot import pelican.\nYou must "
|
"Cannot import pelican.\nYou must install Pelican in order to run this script.",
|
||||||
"install Pelican in order to run this script.",
|
|
||||||
-1,
|
-1,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -10,6 +10,7 @@ import re
|
||||||
import shutil
|
import shutil
|
||||||
import sys
|
import sys
|
||||||
import traceback
|
import traceback
|
||||||
|
import unicodedata
|
||||||
import urllib
|
import urllib
|
||||||
from collections.abc import Collection, Generator, Hashable, Iterable, Sequence
|
from collections.abc import Collection, Generator, Hashable, Iterable, Sequence
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
|
|
@ -25,6 +26,7 @@ from typing import (
|
||||||
)
|
)
|
||||||
|
|
||||||
import dateutil.parser
|
import dateutil.parser
|
||||||
|
import unidecode
|
||||||
from watchfiles import Change
|
from watchfiles import Change
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
@ -260,10 +262,6 @@ def slugify(
|
||||||
look into pelican.settings.DEFAULT_CONFIG['SLUG_REGEX_SUBSTITUTIONS'].
|
look into pelican.settings.DEFAULT_CONFIG['SLUG_REGEX_SUBSTITUTIONS'].
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import unicodedata
|
|
||||||
|
|
||||||
import unidecode
|
|
||||||
|
|
||||||
def normalize_unicode(text: str) -> str:
|
def normalize_unicode(text: str) -> str:
|
||||||
# normalize text by compatibility composition
|
# normalize text by compatibility composition
|
||||||
# see: https://en.wikipedia.org/wiki/Unicode_equivalence
|
# see: https://en.wikipedia.org/wiki/Unicode_equivalence
|
||||||
|
|
@ -796,8 +794,7 @@ def order_content(
|
||||||
content.get_relative_source_path(),
|
content.get_relative_source_path(),
|
||||||
extra={
|
extra={
|
||||||
"limit_msg": (
|
"limit_msg": (
|
||||||
"More files are missing "
|
"More files are missing the needed attribute."
|
||||||
"the needed attribute."
|
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -261,8 +261,7 @@ class Writer:
|
||||||
# generated pages, and write
|
# generated pages, and write
|
||||||
for page_num in range(next(iter(paginators.values())).num_pages):
|
for page_num in range(next(iter(paginators.values())).num_pages):
|
||||||
paginated_kwargs = kwargs.copy()
|
paginated_kwargs = kwargs.copy()
|
||||||
for key in paginators.keys():
|
for key, paginator in paginators.items():
|
||||||
paginator = paginators[key]
|
|
||||||
previous_page = paginator.page(page_num) if page_num > 0 else None
|
previous_page = paginator.page(page_num) if page_num > 0 else None
|
||||||
page = paginator.page(page_num + 1)
|
page = paginator.page(page_num + 1)
|
||||||
next_page = (
|
next_page = (
|
||||||
|
|
|
||||||
|
|
@ -96,7 +96,7 @@ dev = [
|
||||||
"tox>=4.11.3",
|
"tox>=4.11.3",
|
||||||
"invoke>=2.2.0",
|
"invoke>=2.2.0",
|
||||||
# ruff version should match the one in .pre-commit-config.yaml
|
# ruff version should match the one in .pre-commit-config.yaml
|
||||||
"ruff==0.7.2",
|
"ruff==0.12.2",
|
||||||
"tomli>=2.0.1; python_version < \"3.11\"",
|
"tomli>=2.0.1; python_version < \"3.11\"",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
@ -112,7 +112,6 @@ source-includes = [
|
||||||
requires = ["pdm-backend"]
|
requires = ["pdm-backend"]
|
||||||
build-backend = "pdm.backend"
|
build-backend = "pdm.backend"
|
||||||
|
|
||||||
|
|
||||||
[tool.ruff.lint]
|
[tool.ruff.lint]
|
||||||
# see https://docs.astral.sh/ruff/configuration/#using-pyprojecttoml
|
# see https://docs.astral.sh/ruff/configuration/#using-pyprojecttoml
|
||||||
# "F" contains autoflake, see https://github.com/astral-sh/ruff/issues/1647
|
# "F" contains autoflake, see https://github.com/astral-sh/ruff/issues/1647
|
||||||
|
|
|
||||||
5
tasks.py
5
tasks.py
|
|
@ -3,10 +3,11 @@ from pathlib import Path
|
||||||
from shutil import which
|
from shutil import which
|
||||||
|
|
||||||
from invoke import task
|
from invoke import task
|
||||||
|
from livereload import Server
|
||||||
|
|
||||||
PKG_NAME = "pelican"
|
PKG_NAME = "pelican"
|
||||||
PKG_PATH = Path(PKG_NAME)
|
PKG_PATH = Path(PKG_NAME)
|
||||||
DOCS_PORT = os.environ.get("DOCS_PORT", 8000)
|
DOCS_PORT = int(os.environ.get("DOCS_PORT", "8000"))
|
||||||
BIN_DIR = "bin" if os.name != "nt" else "Scripts"
|
BIN_DIR = "bin" if os.name != "nt" else "Scripts"
|
||||||
PTY = os.name != "nt"
|
PTY = os.name != "nt"
|
||||||
ACTIVE_VENV = os.environ.get("VIRTUAL_ENV", None)
|
ACTIVE_VENV = os.environ.get("VIRTUAL_ENV", None)
|
||||||
|
|
@ -29,8 +30,6 @@ def docbuild(c):
|
||||||
@task(docbuild)
|
@task(docbuild)
|
||||||
def docserve(c):
|
def docserve(c):
|
||||||
"""Serve docs at http://localhost:$DOCS_PORT/ (default port is 8000)"""
|
"""Serve docs at http://localhost:$DOCS_PORT/ (default port is 8000)"""
|
||||||
from livereload import Server
|
|
||||||
|
|
||||||
server = Server()
|
server = Server()
|
||||||
server.watch("docs/conf.py", lambda: docbuild(c))
|
server.watch("docs/conf.py", lambda: docbuild(c))
|
||||||
server.watch("CONTRIBUTING.rst", lambda: docbuild(c))
|
server.watch("CONTRIBUTING.rst", lambda: docbuild(c))
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue