mirror of
https://github.com/getpelican/pelican.git
synced 2025-10-15 20:28:56 +02:00
Manual pass on sources for better standards.
This commit is contained in:
parent
20662c2a43
commit
519dcdbcb3
9 changed files with 136 additions and 105 deletions
|
|
@ -293,8 +293,15 @@ def main():
|
|||
# restriction; all files are recursively checked if they
|
||||
# have changed, no matter what extension the filenames
|
||||
# have.
|
||||
if files_changed(pelican.path, pelican.markup, pelican.ignore_files) or \
|
||||
files_changed(pelican.theme, [''], pelican.ignore_files):
|
||||
if (files_changed(
|
||||
pelican.path,
|
||||
pelican.markup,
|
||||
pelican.ignore_files)
|
||||
or files_changed(
|
||||
pelican.theme,
|
||||
[''],
|
||||
pelican.ignore_files
|
||||
)):
|
||||
if not files_found_error:
|
||||
files_found_error = True
|
||||
pelican.run()
|
||||
|
|
@ -318,8 +325,7 @@ def main():
|
|||
time.sleep(1) # sleep to avoid cpu load
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
"Caught exception \"{}\". Reloading.".format(e)
|
||||
)
|
||||
'Caught exception "{0}". Reloading.'.format(e))
|
||||
continue
|
||||
else:
|
||||
pelican.run()
|
||||
|
|
|
|||
|
|
@ -11,7 +11,6 @@ import re
|
|||
import sys
|
||||
|
||||
from datetime import datetime
|
||||
from sys import platform, stdin
|
||||
|
||||
|
||||
from pelican.settings import _DEFAULT_CONFIG
|
||||
|
|
@ -92,7 +91,8 @@ class Page(object):
|
|||
|
||||
if isinstance(self.date_format, tuple):
|
||||
locale_string = self.date_format[0]
|
||||
if sys.version_info < (3, ) and isinstance(locale_string, six.text_type):
|
||||
if sys.version_info < (3, ) and isinstance(locale_string,
|
||||
six.text_type):
|
||||
locale_string = locale_string.encode('ascii')
|
||||
locale.setlocale(locale.LC_ALL, locale_string)
|
||||
self.date_format = self.date_format[1]
|
||||
|
|
@ -288,10 +288,13 @@ class URLWrapper(object):
|
|||
return self.name
|
||||
|
||||
def _from_settings(self, key, get_page_name=False):
|
||||
"""Returns URL information as defined in settings.
|
||||
When get_page_name=True returns URL without anything after {slug}
|
||||
e.g. if in settings: CATEGORY_URL="cat/{slug}.html" this returns "cat/{slug}"
|
||||
Useful for pagination."""
|
||||
"""Returns URL information as defined in settings.
|
||||
|
||||
When get_page_name=True returns URL without anything after {slug} e.g.
|
||||
if in settings: CATEGORY_URL="cat/{slug}.html" this returns
|
||||
"cat/{slug}" Useful for pagination.
|
||||
|
||||
"""
|
||||
setting = "%s_%s" % (self.__class__.__name__.upper(), key)
|
||||
value = self.settings[setting]
|
||||
if not isinstance(value, six.string_types):
|
||||
|
|
@ -303,7 +306,8 @@ class URLWrapper(object):
|
|||
else:
|
||||
return value.format(**self.as_dict())
|
||||
|
||||
page_name = property(functools.partial(_from_settings, key='URL', get_page_name=True))
|
||||
page_name = property(functools.partial(_from_settings, key='URL',
|
||||
get_page_name=True))
|
||||
url = property(functools.partial(_from_settings, key='URL'))
|
||||
save_as = property(functools.partial(_from_settings, key='SAVE_AS'))
|
||||
|
||||
|
|
|
|||
|
|
@ -14,11 +14,14 @@ from functools import partial
|
|||
from itertools import chain
|
||||
from operator import attrgetter, itemgetter
|
||||
|
||||
from jinja2 import (Environment, FileSystemLoader, PrefixLoader, ChoiceLoader,
|
||||
BaseLoader, TemplateNotFound)
|
||||
from jinja2 import (
|
||||
Environment, FileSystemLoader, PrefixLoader, ChoiceLoader, BaseLoader,
|
||||
TemplateNotFound
|
||||
)
|
||||
|
||||
from pelican.contents import Article, Page, Category, StaticContent, \
|
||||
is_valid_content
|
||||
from pelican.contents import (
|
||||
Article, Page, Category, StaticContent, is_valid_content
|
||||
)
|
||||
from pelican.readers import read_file
|
||||
from pelican.utils import copy, process_translations, mkdir_p
|
||||
from pelican import signals
|
||||
|
|
@ -76,8 +79,9 @@ class Generator(object):
|
|||
try:
|
||||
self._templates[name] = self.env.get_template(name + '.html')
|
||||
except TemplateNotFound:
|
||||
raise Exception('[templates] unable to load %s.html from %s' \
|
||||
% (name, self._templates_path))
|
||||
raise Exception(
|
||||
('[templates] unable to load %s.html from %s'
|
||||
% (name, self._templates_path)))
|
||||
return self._templates[name]
|
||||
|
||||
def get_files(self, path, exclude=[], extensions=None):
|
||||
|
|
@ -165,8 +169,8 @@ class ArticlesGenerator(Generator):
|
|||
self.categories = defaultdict(list)
|
||||
self.related_posts = []
|
||||
self.authors = defaultdict(list)
|
||||
super(ArticlesGenerator, self).__init__(*args, **kwargs)
|
||||
self.drafts = []
|
||||
super(ArticlesGenerator, self).__init__(*args, **kwargs)
|
||||
signals.article_generator_init.send(self)
|
||||
|
||||
def generate_feeds(self, writer):
|
||||
|
|
@ -180,8 +184,8 @@ class ArticlesGenerator(Generator):
|
|||
writer.write_feed(self.articles, self.context,
|
||||
self.settings['FEED_RSS'], feed_type='rss')
|
||||
|
||||
if self.settings.get('FEED_ALL_ATOM') or \
|
||||
self.settings.get('FEED_ALL_RSS'):
|
||||
if (self.settings.get('FEED_ALL_ATOM')
|
||||
or self.settings.get('FEED_ALL_RSS')):
|
||||
all_articles = list(self.articles)
|
||||
for article in self.articles:
|
||||
all_articles.extend(article.translations)
|
||||
|
|
@ -193,7 +197,8 @@ class ArticlesGenerator(Generator):
|
|||
|
||||
if self.settings.get('FEED_ALL_RSS'):
|
||||
writer.write_feed(all_articles, self.context,
|
||||
self.settings['FEED_ALL_RSS'], feed_type='rss')
|
||||
self.settings['FEED_ALL_RSS'],
|
||||
feed_type='rss')
|
||||
|
||||
for cat, arts in self.categories:
|
||||
arts.sort(key=attrgetter('date'), reverse=True)
|
||||
|
|
@ -206,8 +211,8 @@ class ArticlesGenerator(Generator):
|
|||
self.settings['CATEGORY_FEED_RSS'] % cat,
|
||||
feed_type='rss')
|
||||
|
||||
if self.settings.get('TAG_FEED_ATOM') \
|
||||
or self.settings.get('TAG_FEED_RSS'):
|
||||
if (self.settings.get('TAG_FEED_ATOM')
|
||||
or self.settings.get('TAG_FEED_RSS')):
|
||||
for tag, arts in self.tags.items():
|
||||
arts.sort(key=attrgetter('date'), reverse=True)
|
||||
if self.settings.get('TAG_FEED_ATOM'):
|
||||
|
|
@ -219,8 +224,8 @@ class ArticlesGenerator(Generator):
|
|||
self.settings['TAG_FEED_RSS'] % tag,
|
||||
feed_type='rss')
|
||||
|
||||
if self.settings.get('TRANSLATION_FEED_ATOM') or \
|
||||
self.settings.get('TRANSLATION_FEED_RSS'):
|
||||
if (self.settings.get('TRANSLATION_FEED_ATOM')
|
||||
or self.settings.get('TRANSLATION_FEED_RSS')):
|
||||
translations_feeds = defaultdict(list)
|
||||
for article in chain(self.articles, self.translations):
|
||||
translations_feeds[article.lang].append(article)
|
||||
|
|
@ -376,7 +381,7 @@ class ArticlesGenerator(Generator):
|
|||
# only main articles are listed in categories, not translations
|
||||
self.categories[article.category].append(article)
|
||||
# ignore blank authors as well as undefined
|
||||
if hasattr(article,'author') and article.author.name != '':
|
||||
if hasattr(article, 'author') and article.author.name != '':
|
||||
self.authors[article.author].append(article)
|
||||
|
||||
# sort the articles by date
|
||||
|
|
@ -470,7 +475,8 @@ class PagesGenerator(Generator):
|
|||
repr(f)))
|
||||
|
||||
self.pages, self.translations = process_translations(all_pages)
|
||||
self.hidden_pages, self.hidden_translations = process_translations(hidden_pages)
|
||||
self.hidden_pages, self.hidden_translations = (
|
||||
process_translations(hidden_pages))
|
||||
|
||||
self._update_context(('pages', ))
|
||||
self.context['PAGES'] = self.pages
|
||||
|
|
@ -574,6 +580,7 @@ class PdfGenerator(Generator):
|
|||
for page in self.context['pages']:
|
||||
self._create_pdf(page, pdf_path)
|
||||
|
||||
|
||||
class SourceFileGenerator(Generator):
|
||||
def generate_context(self):
|
||||
self.output_extension = self.settings['OUTPUT_SOURCES_EXTENSION']
|
||||
|
|
|
|||
|
|
@ -31,11 +31,10 @@ def ansi(color, text):
|
|||
|
||||
|
||||
class ANSIFormatter(Formatter):
|
||||
"""
|
||||
Convert a `logging.LogRecord' object into colored text, using ANSI escape sequences.
|
||||
"""
|
||||
## colors:
|
||||
"""Convert a `logging.LogRecord' object into colored text, using ANSI
|
||||
escape sequences.
|
||||
|
||||
"""
|
||||
def format(self, record):
|
||||
msg = str(record.msg)
|
||||
if record.levelname == 'INFO':
|
||||
|
|
@ -67,8 +66,8 @@ class TextFormatter(Formatter):
|
|||
def init(level=None, logger=getLogger(), handler=StreamHandler()):
|
||||
logger = logging.getLogger()
|
||||
|
||||
if os.isatty(sys.stdout.fileno()) \
|
||||
and not sys.platform.startswith('win'):
|
||||
if (os.isatty(sys.stdout.fileno())
|
||||
and not sys.platform.startswith('win')):
|
||||
fmt = ANSIFormatter()
|
||||
else:
|
||||
fmt = TextFormatter()
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals, print_function
|
||||
import six
|
||||
|
||||
import os
|
||||
import re
|
||||
|
|
@ -23,7 +22,6 @@ try:
|
|||
asciidoc = True
|
||||
except ImportError:
|
||||
asciidoc = False
|
||||
import re
|
||||
|
||||
import cgi
|
||||
try:
|
||||
|
|
@ -168,6 +166,7 @@ class MarkdownReader(Reader):
|
|||
metadata = self._parse_metadata(md.Meta)
|
||||
return content, metadata
|
||||
|
||||
|
||||
class HTMLReader(Reader):
|
||||
"""Parses HTML files as input, looking for meta, title, and body tags"""
|
||||
file_extensions = ['htm', 'html']
|
||||
|
|
@ -237,10 +236,10 @@ class HTMLReader(Reader):
|
|||
|
||||
def handle_charref(self, data):
|
||||
self._data_buffer += '&#{};'.format(data)
|
||||
|
||||
|
||||
def build_tag(self, tag, attrs, close_tag):
|
||||
result = '<{}'.format(cgi.escape(tag))
|
||||
for k,v in attrs:
|
||||
for k, v in attrs:
|
||||
result += ' ' + cgi.escape(k)
|
||||
if v is not None:
|
||||
result += '="{}"'.format(cgi.escape(v))
|
||||
|
|
@ -272,6 +271,7 @@ class HTMLReader(Reader):
|
|||
metadata[k] = self.process_metadata(k, parser.metadata[k])
|
||||
return parser.body, metadata
|
||||
|
||||
|
||||
class AsciiDocReader(Reader):
|
||||
enabled = bool(asciidoc)
|
||||
file_extensions = ['asc']
|
||||
|
|
|
|||
|
|
@ -2,12 +2,12 @@ from __future__ import print_function
|
|||
try:
|
||||
import SimpleHTTPServer as srvmod
|
||||
except ImportError:
|
||||
import http.server as srvmod
|
||||
import http.server as srvmod # NOQA
|
||||
|
||||
try:
|
||||
import SocketServer as socketserver
|
||||
except ImportError:
|
||||
import socketserver
|
||||
import socketserver # NOQA
|
||||
|
||||
PORT = 8000
|
||||
|
||||
|
|
@ -17,4 +17,3 @@ httpd = socketserver.TCPServer(("", PORT), Handler)
|
|||
|
||||
print("serving at port", PORT)
|
||||
httpd.serve_forever()
|
||||
|
||||
|
|
|
|||
|
|
@ -62,7 +62,8 @@ _DEFAULT_CONFIG = {'PATH': '.',
|
|||
'DEFAULT_LANG': 'en',
|
||||
'TAG_CLOUD_STEPS': 4,
|
||||
'TAG_CLOUD_MAX_ITEMS': 100,
|
||||
'DIRECT_TEMPLATES': ('index', 'tags', 'categories', 'archives'),
|
||||
'DIRECT_TEMPLATES': ('index', 'tags', 'categories',
|
||||
'archives'),
|
||||
'EXTRA_TEMPLATES_PATHS': [],
|
||||
'PAGINATED_DIRECT_TEMPLATES': ('index', ),
|
||||
'PELICAN_CLASS': 'pelican.Pelican',
|
||||
|
|
@ -171,7 +172,8 @@ def configure_settings(settings):
|
|||
if (siteurl.endswith('/')):
|
||||
settings['SITEURL'] = siteurl[:-1]
|
||||
logger.warn("Removed extraneous trailing slash from SITEURL.")
|
||||
# If SITEURL is defined but FEED_DOMAIN isn't, set FEED_DOMAIN = SITEURL
|
||||
# If SITEURL is defined but FEED_DOMAIN isn't,
|
||||
# set FEED_DOMAIN to SITEURL
|
||||
if not 'FEED_DOMAIN' in settings:
|
||||
settings['FEED_DOMAIN'] = settings['SITEURL']
|
||||
|
||||
|
|
@ -185,36 +187,45 @@ def configure_settings(settings):
|
|||
|
||||
if any(settings.get(k) for k in feed_keys):
|
||||
if not settings.get('FEED_DOMAIN'):
|
||||
logger.warn("Since feed URLs should always be absolute, you should specify "
|
||||
"FEED_DOMAIN in your settings. (e.g., 'FEED_DOMAIN = "
|
||||
"http://www.example.com')")
|
||||
logger.warn(
|
||||
"Since feed URLs should always be absolute, you should specify"
|
||||
" FEED_DOMAIN in your settings. (e.g., 'FEED_DOMAIN = "
|
||||
"http://www.example.com')")
|
||||
|
||||
if not settings.get('SITEURL'):
|
||||
logger.warn("Feeds generated without SITEURL set properly may not be valid")
|
||||
logger.warn('Feeds generated without SITEURL set properly may not'
|
||||
' be valid')
|
||||
|
||||
if not 'TIMEZONE' in settings:
|
||||
logger.warn("No timezone information specified in the settings. Assuming"
|
||||
" your timezone is UTC for feed generation. Check "
|
||||
"http://docs.notmyidea.org/alexis/pelican/settings.html#timezone "
|
||||
"for more information")
|
||||
logger.warn(
|
||||
'No timezone information specified in the settings. Assuming'
|
||||
' your timezone is UTC for feed generation. Check '
|
||||
'http://docs.notmyidea.org/alexis/pelican/settings.html#timezone '
|
||||
'for more information')
|
||||
|
||||
if 'LESS_GENERATOR' in settings:
|
||||
logger.warn("The LESS_GENERATOR setting has been removed in favor "
|
||||
"of the Webassets plugin")
|
||||
logger.warn(
|
||||
'The LESS_GENERATOR setting has been removed in favor '
|
||||
'of the Webassets plugin')
|
||||
|
||||
if 'OUTPUT_SOURCES_EXTENSION' in settings:
|
||||
if not isinstance(settings['OUTPUT_SOURCES_EXTENSION'], six.string_types):
|
||||
settings['OUTPUT_SOURCES_EXTENSION'] = _DEFAULT_CONFIG['OUTPUT_SOURCES_EXTENSION']
|
||||
logger.warn("Detected misconfiguration with OUTPUT_SOURCES_EXTENSION."
|
||||
" falling back to the default extension " +
|
||||
_DEFAULT_CONFIG['OUTPUT_SOURCES_EXTENSION'])
|
||||
if not isinstance(settings['OUTPUT_SOURCES_EXTENSION'],
|
||||
six.string_types):
|
||||
settings['OUTPUT_SOURCES_EXTENSION'] = (
|
||||
_DEFAULT_CONFIG['OUTPUT_SOURCES_EXTENSION'])
|
||||
logger.warn(
|
||||
'Detected misconfiguration with OUTPUT_SOURCES_EXTENSION, '
|
||||
'falling back to the default extension ' +
|
||||
_DEFAULT_CONFIG['OUTPUT_SOURCES_EXTENSION'])
|
||||
|
||||
filename_metadata = settings.get('FILENAME_METADATA')
|
||||
if filename_metadata and not isinstance(filename_metadata, six.string_types):
|
||||
logger.error("Detected misconfiguration with FILENAME_METADATA"
|
||||
" setting (must be string or compiled pattern), falling"
|
||||
"back to the default")
|
||||
settings['FILENAME_METADATA'] = \
|
||||
_DEFAULT_CONFIG['FILENAME_METADATA']
|
||||
if filename_metadata and not isinstance(filename_metadata,
|
||||
six.string_types):
|
||||
logger.error(
|
||||
'Detected misconfiguration with FILENAME_METADATA '
|
||||
'setting (must be string or compiled pattern), falling '
|
||||
'back to the default')
|
||||
settings['FILENAME_METADATA'] = (
|
||||
_DEFAULT_CONFIG['FILENAME_METADATA'])
|
||||
|
||||
return settings
|
||||
|
|
|
|||
|
|
@ -29,12 +29,13 @@ def strftime(date, date_format):
|
|||
|
||||
This :func:`strftime()` is compatible to Python 2 and 3. In both cases,
|
||||
input and output is always unicode.
|
||||
|
||||
|
||||
Still, Python 3's :func:`strftime()` seems to somehow "normalize" unicode
|
||||
chars in the format string. So if e.g. your format string contains 'ø' or
|
||||
'ä', the result will be 'o' and 'a'.
|
||||
|
||||
See here for an `extensive testcase <https://github.com/dmdm/test_strftime>`_.
|
||||
See here for an `extensive testcase
|
||||
<https://github.com/dmdm/test_strftime>`_.
|
||||
|
||||
:param date: Any object that sports a :meth:`strftime()` method.
|
||||
:param date_format: Format string, can always be unicode.
|
||||
|
|
@ -67,18 +68,12 @@ def strftime(date, date_format):
|
|||
result = unicode(result)
|
||||
# Convert XML character references back to unicode characters.
|
||||
if "&#" in result:
|
||||
result = re.sub(r'&#(?P<num>\d+);'
|
||||
, lambda m: unichr(int(m.group('num')))
|
||||
, result
|
||||
)
|
||||
result = re.sub(r'&#(?P<num>\d+);',
|
||||
lambda m: unichr(int(m.group('num'))),
|
||||
result)
|
||||
return result
|
||||
|
||||
|
||||
|
||||
#----------------------------------------------------------------------------
|
||||
# Stolen from Django: django.utils.encoding
|
||||
#
|
||||
|
||||
def python_2_unicode_compatible(klass):
|
||||
"""
|
||||
A decorator that defines __unicode__ and __str__ methods under Python 2.
|
||||
|
|
@ -86,43 +81,48 @@ def python_2_unicode_compatible(klass):
|
|||
|
||||
To support Python 2 and 3 with a single code base, define a __str__ method
|
||||
returning text and apply this decorator to the class.
|
||||
|
||||
From django.utils.encoding.
|
||||
"""
|
||||
if not six.PY3:
|
||||
klass.__unicode__ = klass.__str__
|
||||
klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
|
||||
return klass
|
||||
|
||||
#----------------------------------------------------------------------------
|
||||
|
||||
class NoFilesError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class memoized(object):
|
||||
'''Decorator. Caches a function's return value each time it is called.
|
||||
If called later with the same arguments, the cached value is returned
|
||||
(not reevaluated).
|
||||
'''
|
||||
def __init__(self, func):
|
||||
self.func = func
|
||||
self.cache = {}
|
||||
def __call__(self, *args):
|
||||
if not isinstance(args, Hashable):
|
||||
# uncacheable. a list, for instance.
|
||||
# better to not cache than blow up.
|
||||
return self.func(*args)
|
||||
if args in self.cache:
|
||||
return self.cache[args]
|
||||
else:
|
||||
value = self.func(*args)
|
||||
self.cache[args] = value
|
||||
return value
|
||||
def __repr__(self):
|
||||
'''Return the function's docstring.'''
|
||||
return self.func.__doc__
|
||||
def __get__(self, obj, objtype):
|
||||
'''Support instance methods.'''
|
||||
return partial(self.__call__, obj)
|
||||
"""Function decorator to cache return values.
|
||||
|
||||
If called later with the same arguments, the cached value is returned
|
||||
(not reevaluated).
|
||||
|
||||
"""
|
||||
def __init__(self, func):
|
||||
self.func = func
|
||||
self.cache = {}
|
||||
|
||||
def __call__(self, *args):
|
||||
if not isinstance(args, Hashable):
|
||||
# uncacheable. a list, for instance.
|
||||
# better to not cache than blow up.
|
||||
return self.func(*args)
|
||||
if args in self.cache:
|
||||
return self.cache[args]
|
||||
else:
|
||||
value = self.func(*args)
|
||||
self.cache[args] = value
|
||||
return value
|
||||
|
||||
def __repr__(self):
|
||||
return self.func.__doc__
|
||||
|
||||
def __get__(self, obj, objtype):
|
||||
'''Support instance methods.'''
|
||||
return partial(self.__call__, obj)
|
||||
|
||||
|
||||
def deprecated_attribute(old, new, since=None, remove=None, doc=None):
|
||||
|
|
@ -166,6 +166,7 @@ def deprecated_attribute(old, new, since=None, remove=None, doc=None):
|
|||
|
||||
return decorator
|
||||
|
||||
|
||||
def get_date(string):
|
||||
"""Return a datetime object from a string.
|
||||
|
||||
|
|
@ -196,6 +197,7 @@ class pelican_open(object):
|
|||
def __exit__(self, exc_type, exc_value, traceback):
|
||||
pass
|
||||
|
||||
|
||||
def slugify(value):
|
||||
"""
|
||||
Normalizes string, converts to lowercase, removes non-alpha characters,
|
||||
|
|
@ -262,6 +264,7 @@ def copy(path, source, destination, destination_path=None, overwrite=False):
|
|||
else:
|
||||
logger.warning('skipped copy %s to %s' % (source_, destination_))
|
||||
|
||||
|
||||
def clean_output_dir(path):
|
||||
"""Remove all the files from the output directory"""
|
||||
|
||||
|
|
@ -414,6 +417,7 @@ def process_translations(content_list):
|
|||
|
||||
LAST_MTIME = 0
|
||||
|
||||
|
||||
def files_changed(path, extensions, ignores=[]):
|
||||
"""Return True if the files have changed since the last check"""
|
||||
|
||||
|
|
@ -423,7 +427,8 @@ def files_changed(path, extensions, ignores=[]):
|
|||
dirs[:] = [x for x in dirs if x[0] != '.']
|
||||
for f in files:
|
||||
if any(f.endswith(ext) for ext in extensions) \
|
||||
and not any(fnmatch.fnmatch(f, ignore) for ignore in ignores):
|
||||
and not any(fnmatch.fnmatch(f, ignore)
|
||||
for ignore in ignores):
|
||||
yield os.stat(os.path.join(root, f)).st_mtime
|
||||
|
||||
global LAST_MTIME
|
||||
|
|
|
|||
|
|
@ -78,11 +78,11 @@ class Writer(object):
|
|||
os.makedirs(os.path.dirname(complete_path))
|
||||
except Exception:
|
||||
pass
|
||||
fp = open(complete_path, 'w', encoding='utf-8' if six.PY3 else None)
|
||||
feed.write(fp, 'utf-8')
|
||||
logger.info('writing %s' % complete_path)
|
||||
|
||||
fp.close()
|
||||
encoding = 'utf-8' if six.PY3 else None
|
||||
with open(complete_path, 'w', encoding=encoding) as fp:
|
||||
feed.write(fp, 'utf-8')
|
||||
logger.info('writing %s' % complete_path)
|
||||
return feed
|
||||
finally:
|
||||
locale.setlocale(locale.LC_ALL, old_locale)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue