mirror of
https://github.com/getpelican/pelican.git
synced 2025-10-15 20:28:56 +02:00
Added tag cloud generation. Result stored in the context.tag_cloud as list of tuples (tag, weight). Weight is from 1 to TAG_CLOUD_STEPS, lesser weight corresponds to bigger font size.
This commit is contained in:
parent
7873370583
commit
b3256f0ecd
3 changed files with 34 additions and 2 deletions
|
|
@ -60,6 +60,8 @@ Setting name what it does ?
|
||||||
`STATIC_THEME_PATHS` Static theme paths you want to copy. Default values
|
`STATIC_THEME_PATHS` Static theme paths you want to copy. Default values
|
||||||
is `static`, but if your theme have others static paths,
|
is `static`, but if your theme have others static paths,
|
||||||
you can put them here.
|
you can put them here.
|
||||||
|
`TAG_CLOUD_STEPS` Count of different font sizes in the tag cloud.
|
||||||
|
`TAG_CLOUD_MAX_ITEMS` Maximum tags count in the cloud.
|
||||||
`THEME` theme to use to product the output. can be the
|
`THEME` theme to use to product the output. can be the
|
||||||
complete static path to a theme folder, or chosen
|
complete static path to a theme folder, or chosen
|
||||||
between the list of default themes (see below)
|
between the list of default themes (see below)
|
||||||
|
|
|
||||||
|
|
@ -1,9 +1,11 @@
|
||||||
from operator import attrgetter
|
from operator import attrgetter, itemgetter
|
||||||
from itertools import chain
|
from itertools import chain
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
import os
|
import os
|
||||||
|
import math
|
||||||
|
import random
|
||||||
|
|
||||||
from jinja2 import Environment, FileSystemLoader
|
from jinja2 import Environment, FileSystemLoader
|
||||||
from jinja2.exceptions import TemplateNotFound
|
from jinja2.exceptions import TemplateNotFound
|
||||||
|
|
@ -193,8 +195,34 @@ class ArticlesGenerator(Generator):
|
||||||
self.dates = list(self.articles)
|
self.dates = list(self.articles)
|
||||||
self.dates.sort(key=attrgetter('date'),
|
self.dates.sort(key=attrgetter('date'),
|
||||||
reverse=self.context['REVERSE_ARCHIVE_ORDER'])
|
reverse=self.context['REVERSE_ARCHIVE_ORDER'])
|
||||||
|
|
||||||
|
# create tag cloud
|
||||||
|
tag_cloud = defaultdict(int)
|
||||||
|
for article in self.articles:
|
||||||
|
for tag in article.tags:
|
||||||
|
tag_cloud[tag] += 1
|
||||||
|
|
||||||
|
tag_cloud = sorted(tag_cloud.items(), key = itemgetter(1), reverse = True)
|
||||||
|
tag_cloud = tag_cloud[:self.settings.get('TAG_CLOUD_MAX_ITEMS')]
|
||||||
|
|
||||||
|
max_count = max(map(itemgetter(1), tag_cloud))
|
||||||
|
steps = self.settings.get('TAG_CLOUD_STEPS')
|
||||||
|
|
||||||
|
# calculate word sizes
|
||||||
|
self.tag_cloud = [
|
||||||
|
(
|
||||||
|
tag,
|
||||||
|
int(
|
||||||
|
math.floor(steps - (steps - 1) * math.log(count) / math.log(max_count))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
for tag, count in tag_cloud
|
||||||
|
]
|
||||||
|
# put words in chaos
|
||||||
|
random.shuffle(self.tag_cloud)
|
||||||
|
|
||||||
# and generate the output :)
|
# and generate the output :)
|
||||||
self._update_context(('articles', 'dates', 'tags', 'categories'))
|
self._update_context(('articles', 'dates', 'tags', 'categories', 'tag_cloud'))
|
||||||
|
|
||||||
def generate_output(self, writer):
|
def generate_output(self, writer):
|
||||||
self.generate_feeds(writer)
|
self.generate_feeds(writer)
|
||||||
|
|
|
||||||
|
|
@ -22,6 +22,8 @@ _DEFAULT_CONFIG = {'PATH': None,
|
||||||
'CLEAN_URLS': False, # use /blah/ instead /blah.html in urls
|
'CLEAN_URLS': False, # use /blah/ instead /blah.html in urls
|
||||||
'RELATIVE_URLS': True,
|
'RELATIVE_URLS': True,
|
||||||
'DEFAULT_LANG': 'en',
|
'DEFAULT_LANG': 'en',
|
||||||
|
'TAG_CLOUD_STEPS': 4,
|
||||||
|
'TAG_CLOUD_MAX_ITEMS': 100,
|
||||||
}
|
}
|
||||||
|
|
||||||
def read_settings(filename):
|
def read_settings(filename):
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue