forked from github/pelican
Compare commits
563 commits
poetry-bui
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
| f5840e7f2d | |||
| da015db848 | |||
| 25aec6d05d | |||
| c0618e0a9c | |||
| df34aec7f2 | |||
| 755cabe0bb | |||
| 17234d7520 | |||
| cc07b2dbb0 | |||
| 101f856a82 | |||
| e663b95c9e | |||
| f8ca328568 | |||
| 279c589a84 | |||
| 9818b62a2f | |||
| 37f4802e27 | |||
| 9f84d3e911 | |||
| 2ed8e41f1d | |||
| 914dc73320 | |||
| 310fab5496 | |||
| bfed925127 | |||
| 6716c43b32 | |||
| c188ce1152 | |||
| 89ea49d06f | |||
| e35ca0fd21 | |||
| 3d9fff7ce6 | |||
| 49ea09bbbe | |||
| 6c0f3cbd58 | |||
| 06cac4589c | |||
| eeaffe79e1 | |||
| 0d6bec6451 | |||
| 08e2883d56 | |||
| de5b3aec04 | |||
|
|
84db21c724 |
||
|
|
2f9d382c56 |
||
|
|
86bf9230b4 | ||
|
|
a5db130e1d | ||
|
|
9e8af627b5 | ||
|
|
44db258f59 | ||
|
|
e609b4d78a |
||
|
|
5687883d4b |
||
|
|
84f2995b38 |
||
|
|
4201256a5b |
||
|
|
2d375bc135 | ||
|
|
bb0ed26fdd | ||
|
|
d85f493ee3 | ||
|
|
513abbfdc6 |
||
|
|
ef501a3d89 |
||
|
|
36ebe91af7 | ||
|
|
28e54106f2 | ||
|
|
bdd4e45628 | ||
|
|
6a50191728 | ||
|
|
f89f8894cc |
||
|
|
6fec5786d0 |
||
|
|
5171631dec |
||
|
|
8d63d1ced8 |
||
|
|
49723e6daf | ||
|
|
993c75103b | ||
|
|
7a2c72e604 |
||
|
|
05535c7d6c | ||
|
|
853e9e6b8a |
||
|
|
617aba077f | ||
|
|
487877f206 | ||
|
|
750a1f8bfa | ||
|
|
d03ae2a41f | ||
|
|
b17182ece1 |
||
|
|
50f77b42b2 | ||
|
|
135c61f769 |
||
|
|
f19de98b9e | ||
|
|
e46595cdac |
||
|
|
79d37ba491 |
||
|
|
dc40474e29 |
||
|
|
5c7e4bbac4 |
||
|
|
b80a592d30 | ||
|
|
d83c5029b8 |
||
|
|
e262990c68 | ||
|
|
fd955ba054 | ||
|
|
8c7c29e4f2 |
||
|
|
657ad64f49 |
||
|
|
af9865d317 | ||
|
|
b77bb690e2 | ||
|
|
30bde3823f | ||
|
|
3e81af966a | ||
|
|
8fd94ea025 |
||
|
|
82b48fcfa1 | ||
|
|
fc45791da4 | ||
|
|
2687475fba |
||
|
|
9b77a9027b | ||
|
|
cbe6c08f44 | ||
|
|
ccb4e58882 |
||
|
|
37ba4fc21a | ||
|
|
4af40e8077 | ||
|
|
7f07c220de | ||
|
|
270fd4edcc |
||
|
|
98bdd87dae | ||
|
|
308af1912e | ||
|
|
59756f8faf |
||
|
|
3569dede01 | ||
|
|
7577dd7603 | ||
|
|
3624bcdbf4 | ||
|
|
880e9769e8 |
||
|
|
144b2edf88 | ||
|
|
9d30c5608a | ||
|
|
b6d3b65899 | ||
|
|
425f302880 |
||
|
|
9d46a94d6d | ||
|
|
54ac03fca6 | ||
|
|
0bd02c00c0 | ||
|
|
800f22b3ba | ||
|
|
39c964450c |
||
|
|
c46063cfc3 | ||
|
|
8d8feb6341 |
||
|
|
6d8597addb | ||
|
|
4f46fedd73 | ||
|
|
f0b6e8732f |
||
|
|
1001dcb609 | ||
|
|
e4d7f0a9d9 | ||
|
|
0b5934a1fa |
||
|
|
0f5179b816 |
||
|
|
666b962eb6 |
||
|
|
94bcd41f27 | ||
|
|
b87308cfaa | ||
|
|
fabc409277 | ||
|
|
7454138184 | ||
|
|
7c7c9355b6 | ||
|
|
960aee5907 |
||
|
|
8a01cb11e1 |
||
|
|
1f14606f83 | ||
|
|
f1f2ceccc7 | ||
|
|
c36ab07526 | ||
|
|
ff35d26cbc |
||
|
|
dbf90a4821 |
||
|
|
e4807316ae |
||
|
|
3a662ace03 |
||
|
|
2fa5c515b0 |
||
|
|
d39dd9b85f | ||
|
|
f2ab4a1dc1 |
||
|
|
fbe81a971a |
||
|
|
def0899f10 |
||
|
|
d6a33f1d21 | ||
|
|
b1cb6c7326 |
||
|
|
5e6dba73ac |
||
|
|
bf4fd679a5 |
||
|
|
f69e2cca6b |
||
|
|
f0beb81a97 | ||
|
|
5d3e87b50b |
||
|
|
af37f0656f |
||
|
|
8626d5bd85 |
||
|
|
2d75ca8391 |
||
|
|
61eacffa90 |
||
|
|
4ed5c0d5b8 | ||
|
|
7466b13e0a | ||
|
|
d9b2bc3a4e |
||
|
|
7194cf5795 | ||
|
|
a1d475fb22 |
||
|
|
6fd2c0d8e6 |
||
|
|
76f7343b61 | ||
|
|
f510b4b21f | ||
|
|
a2525f7db4 | ||
|
|
1d0fd456e8 | ||
|
|
6cd707a668 | ||
|
|
7ca66ee9d0 | ||
|
|
9525583ccd |
||
|
|
86d6898517 |
||
|
|
0c5d63c69e | ||
|
|
eca501ac1e |
||
|
|
e92ccb8a46 |
||
|
|
2238dcab07 | ||
|
|
db241feaa4 |
||
|
|
ecd598f293 | ||
|
|
903ce3ce33 | ||
|
|
39ff56a082 | ||
|
|
be5afa3175 |
||
|
|
6059675d55 |
||
|
|
d7015db9e4 |
||
|
|
e014e5b55b |
||
|
|
451b094a94 |
||
|
|
dc1b6ab14d |
||
|
|
8a8b952ecb |
||
|
|
14f19474df |
||
|
|
32b72123f0 |
||
|
|
ee10d6995a |
||
|
|
49aef30dab |
||
|
|
feae8ef41c |
||
|
|
76650898a6 | ||
|
|
e6a5e2a665 | ||
|
|
abae21494d | ||
|
|
a20cd8dda5 |
||
|
|
3c57996945 | ||
|
|
08785f714f | ||
|
|
4e438ffe60 |
||
|
|
6f467fefdc |
||
|
|
f0aab11a2d |
||
|
|
805ca9b4a9 | ||
|
|
271f4dd68f | ||
|
|
cabdb26cee | ||
|
|
8ea27b82f6 | ||
|
|
bfb2587697 |
||
|
|
dbe0b1125f | ||
|
|
a23a4e14cf |
||
|
|
6f1605edf9 |
||
|
|
842c6537f1 |
||
|
|
9437de6341 | ||
|
|
cce1570135 | ||
|
|
73599f44f2 |
||
|
|
8a0f335e2b | ||
|
|
165d57eff5 |
||
|
|
eb052cae09 | ||
|
|
00d26fc068 | ||
|
|
8b6d215934 | ||
|
|
c18f1a7308 | ||
|
|
a76a419585 | ||
|
|
fad2ff7ae3 |
||
|
|
fec78ebf33 |
||
|
|
3a6ae72333 | ||
|
|
84795c701c |
||
|
|
3dce25ab23 |
||
|
|
269751b033 |
||
|
|
85bf98232d |
||
|
|
8a7e01646b |
||
|
|
b812f2ad1c |
||
|
|
e14f20bb99 |
||
|
|
b8d5919cd2 |
||
|
|
33d6712e8b |
||
|
|
29b10ef6e6 |
||
|
|
6cf6a1ffe9 |
||
|
|
58fd855385 |
||
|
|
19c797af5e |
||
|
|
7dfc799f25 |
||
|
|
f342dc3097 |
||
|
|
43e513f218 |
||
|
|
b289dcea82 |
||
|
|
631ac1bdb3 |
||
|
|
b388057d66 |
||
|
|
5519efef2e |
||
|
|
7643e0e92b |
||
|
|
61ca47c519 |
||
|
|
6ed7395812 |
||
|
|
11c13ceae1 |
||
|
|
83a8059d02 |
||
|
|
a20bbb55d6 |
||
|
|
dc427ad9d6 |
||
|
|
b6a9a8333b |
||
|
|
9c87d8f3a3 |
||
|
|
6a623ba10a |
||
|
|
865f7b10dd | ||
|
|
91d9ef7a70 | ||
|
|
58e70082e0 | ||
|
|
8fd5d6f51b | ||
|
|
1404a2dbc3 |
||
|
|
fab6e1a2c5 | ||
|
|
777a708ef7 |
||
|
|
620139cdaf |
||
|
|
0282c1d6eb |
||
|
|
4caf9a88a2 |
||
|
|
36b4c6dc08 |
||
|
|
a8fefad331 | ||
|
|
ab9e55b398 |
||
|
|
5d8c03108b | ||
|
|
5c36cfbb9b | ||
|
|
82e2571754 |
||
|
|
de0fae8182 | ||
|
|
48166bd687 | ||
|
|
29185e4ad7 |
||
|
|
3be0703b14 | ||
|
|
e724de9ffe | ||
|
|
30adfba1ca |
||
|
|
8a5f02ac60 |
||
|
|
2eeff62fd7 |
||
|
|
0d1bcd4b11 |
||
|
|
63b60da919 | ||
|
|
48a0484d15 | ||
|
|
9384e7cb0b | ||
|
|
639173da6b | ||
|
|
5435dd0d81 | ||
|
|
dcd3045f32 | ||
|
|
0989ed29be |
||
|
|
5d1dcd8ed3 |
||
|
|
bf0860ee86 |
||
|
|
7acc9ac554 |
||
|
|
413c6a1c71 |
||
|
|
44d9754466 | ||
|
|
e8076bfe03 |
||
|
|
1d2bf8e96e |
||
|
|
715c056cd4 | ||
|
|
7f1ecdec8b | ||
|
|
b8bf5950b6 |
||
|
|
ef7e26329c | ||
|
|
0533e2da9f |
||
|
|
69d2b5b9af |
||
|
|
b260b3838e | ||
|
|
410f60d6b3 | ||
|
|
d3daa4d794 | ||
|
|
33220695d1 |
||
|
|
7d6accac4d | ||
|
|
7877376153 | ||
|
|
168093a750 | ||
|
|
6ba7a0926d |
||
|
|
5214248344 |
||
|
|
1f6b344f7d | ||
|
|
418a9191b0 | ||
|
|
23c50ea885 |
||
|
|
1b360acafa |
||
|
|
bbbc96cf83 |
||
|
|
043ab617b8 |
||
|
|
8b6b7dac2d |
||
|
|
0a42b5f250 |
||
|
|
6f93202e60 | ||
|
|
ef844dbe0a | ||
|
|
bb682973fb | ||
|
|
7adcfc7938 | ||
|
|
86f62d0a92 |
||
|
|
8bb9e0da48 | ||
|
|
208332c0e6 |
||
|
|
4db5c7ca4b | ||
|
|
6ac497922f | ||
|
|
219c01afb0 | ||
|
|
a2852942ea | ||
|
|
06c9e0fb80 | ||
|
|
385d5bf75e | ||
|
|
b473280eac |
||
|
|
95ff3b8e62 |
||
|
|
f50bf26466 |
||
|
|
7850153c54 |
||
|
|
504bfcf703 |
||
|
|
b777bedce3 |
||
|
|
4d82a42229 | ||
|
|
ff665de3ca | ||
|
|
5e986580e8 | ||
|
|
b10c7c699b | ||
|
|
f015ab89d9 |
||
|
|
3937028c00 |
||
|
|
6ddbc83f43 |
||
|
|
a51d75c8ed |
||
|
|
1f6f4a3626 |
||
|
|
cbddac44e4 | ||
|
|
66408d611f |
||
|
|
cdc90d5d07 |
||
|
|
6d11c6f2e5 |
||
|
|
9d509253c3 |
||
|
|
27f2c678cb | ||
|
|
2a7e691000 | ||
|
|
5d4cb5619b |
||
|
|
6af939e096 |
||
|
|
5103aa9a38 | ||
|
|
09d434d87b | ||
|
|
062144a875 |
||
|
|
e265deb094 |
||
|
|
73c0320f62 |
||
|
|
083fb357bd |
||
|
|
23f3804c96 |
||
|
|
1bcd6c5f56 |
||
|
|
21e855a29f | ||
|
|
9c0c5b4929 | ||
|
|
6487735efb | ||
|
|
09c420f40c | ||
|
|
fcfb39b8f2 | ||
|
|
81b5fbe174 | ||
|
|
33aca76d78 | ||
|
|
961909a149 | ||
|
|
ac416d7df2 | ||
|
|
494b418dda | ||
|
|
6cac8237cc |
||
|
|
84dfbcf3dc |
||
|
|
9685e4b594 | ||
|
|
d5d792060c |
||
|
|
5c222ef41b |
||
|
|
c46554ae70 |
||
|
|
7006016121 | ||
|
|
39e5edde9c | ||
|
|
392bf98118 | ||
|
|
595f3be988 | ||
|
|
d89c6f6ed2 | ||
|
|
e8d6318e93 |
||
|
|
f94ba0ef3d |
||
|
|
eca0ee04b7 |
||
|
|
433453924b |
||
|
|
16b8a03ad9 | ||
|
|
34ca2e1de2 |
||
|
|
77e5381be7 |
||
|
|
5823412085 | ||
|
|
f3613af155 |
||
|
|
21c331e789 |
||
|
|
5c178a1ccb | ||
|
|
ab81f183c6 | ||
|
|
fe19f1abb6 | ||
|
|
2f5fc10614 |
||
|
|
4794752dd9 | ||
|
|
7b9a859e5e | ||
|
|
fe4f1ec4ea | ||
|
|
0384c9bc07 |
||
|
|
11633992a0 |
||
|
|
4c8572e85d |
||
|
|
1a321102a1 |
||
|
|
8f6a61439d | ||
|
|
59f7f4beb8 | ||
|
|
2e35bc90a6 | ||
|
|
fa31a7e279 | ||
|
|
57c5296a44 |
||
|
|
8eb4be521f |
||
|
|
1b87ef6a7b | ||
|
|
9ec1750709 | ||
|
|
bb10d286a6 | ||
|
|
de2e9b7e41 | ||
|
|
17089aefc9 | ||
|
|
99c935df8f | ||
|
|
aeec09b397 | ||
|
|
9bd54b7b60 | ||
|
|
eab67f7634 | ||
|
|
d1a874e580 |
||
|
|
793b93bd34 |
||
|
|
33ef2c5356 |
||
|
|
98372c9869 | ||
|
|
8849721913 |
||
|
|
0da8659d0e | ||
|
|
30597b70f0 | ||
|
|
735d93c7b6 |
||
|
|
5803052bb7 | ||
|
|
31b282f3e2 | ||
|
|
76cf879414 | ||
|
|
b5426fb0bb |
||
|
|
2b631ab4d3 | ||
|
|
0b9a488c26 | ||
|
|
e2c73a0a16 |
||
|
|
324fcefae7 |
||
|
|
c5c7483268 |
||
|
|
2d97a45902 |
||
|
|
f862d64b7a | ||
|
|
7dec2660fb | ||
|
|
f9238269d7 | ||
|
|
492ed61c9f | ||
|
|
eacd6435ef | ||
|
|
24aefd3b1b |
||
|
|
7ccaa9a6b6 |
||
|
|
22192c148a | ||
|
|
a088f8bb9e | ||
|
|
62a878ded3 | ||
|
|
991cebf95d | ||
|
|
438938819e | ||
|
|
332be6e5c8 |
||
|
|
0919507ae7 |
||
|
|
a168470f29 | ||
|
|
1cd7dd6a28 | ||
|
|
bc21922cf2 | ||
|
|
58e3770b80 | ||
|
|
7eb730af78 | ||
|
|
a52922bfb5 | ||
|
|
82098a634f |
||
|
|
7d492bad67 | ||
|
|
4bfcedb8a5 | ||
|
|
80f44c494a | ||
|
|
ee8fb6998c |
||
|
|
7dfac1c0c0 |
||
|
|
2cafe926fa | ||
|
|
e800b23b4d |
||
|
|
c10792c679 |
||
|
|
845acfe1ac | ||
|
|
c041bf2192 | ||
|
|
c41b8abb13 | ||
|
|
add3628a64 | ||
|
|
487da3550b | ||
|
|
7893455b43 |
||
|
|
fb9df68477 |
||
|
|
40f3d2df91 | ||
|
|
14afc6c54a | ||
|
|
fd3ad0c16e | ||
|
|
c461def10a | ||
|
|
dac01831f2 |
||
|
|
a00284f744 |
||
|
|
88953d45d5 | ||
|
|
cf4e8d527d |
||
|
|
1219bcd029 | ||
|
|
9a9dbcf523 | ||
|
|
1449840199 | ||
|
|
2b08497c32 | ||
|
|
ce4994bec8 | ||
|
|
4b6b5f0461 | ||
|
|
31398d4247 | ||
|
|
d44821b733 |
||
|
|
eaa801f14a |
||
|
|
0db5afb920 | ||
|
|
872c4dbd30 | ||
|
|
cdec530572 | ||
|
|
68817845b0 | ||
|
|
27762d2cf7 | ||
|
|
aa7c821c70 |
||
|
|
7a26f509df |
||
|
|
f846191edd | ||
|
|
8bb5f1b786 | ||
|
|
ce5d063513 | ||
|
|
e01cde7fcb | ||
|
|
34fc7f2a84 | ||
|
|
ab3001391e | ||
|
|
f9abcb30e3 |
||
|
|
5971c5ad90 |
||
|
|
89a31141ff | ||
|
|
507d68e5c6 | ||
|
|
659c4cbf0f |
||
|
|
b17e4a5ffa | ||
|
|
b12443f48a |
||
|
|
9e72b29fc1 | ||
|
|
dc60105926 |
||
|
|
783a70da46 |
||
|
|
049bb2e1b3 |
||
|
|
8033162ba4 | ||
|
|
9435a6c045 |
||
|
|
246da3b4d8 | ||
|
|
afdf0fb3cf |
||
|
|
a9d9ccb583 | ||
|
|
f21969a016 | ||
|
|
4b79d6dae6 | ||
|
|
67e27ca7cd | ||
|
|
e3c7a915e5 | ||
|
|
06d4eff4a5 | ||
|
|
98b1a46362 | ||
|
|
09822c16a7 |
||
|
|
b454f76f72 | ||
|
|
c345268de6 |
||
|
|
587e1a4ad8 |
||
|
|
e4d9c41a77 |
||
|
|
f4bb0d8933 |
||
|
|
197cd1e12e |
||
|
|
20a1ac0e6f |
||
|
|
fd0923d2f2 |
||
|
|
8b8a0147e8 |
||
|
|
6da975fc73 |
||
|
|
991c00af95 |
||
|
|
8fffcbef7a |
||
|
|
30c9f6bb52 |
||
|
|
aed71c30f8 |
||
|
|
7a6686f467 | ||
|
|
3565094b10 |
||
|
|
0ab6ac9798 | ||
|
|
4bda2745ac | ||
|
|
e11a7de051 | ||
|
|
94bdcb7f18 | ||
|
|
fafe0207ca | ||
|
|
64fcdb5172 |
||
|
|
ade70cb2e2 | ||
|
|
145cd4be92 | ||
|
|
a0335711af | ||
|
|
16975bc3a2 | ||
|
|
863421b1b8 | ||
|
|
26a8909580 |
||
|
|
64bb392fef | ||
|
|
adbefe8363 | ||
|
|
b769144a63 | ||
|
|
45c5cb9029 | ||
|
|
74692a37a6 |
||
|
|
449a8e6ac4 |
||
|
|
b0cdae78d2 |
||
|
|
04c0ea53e6 | ||
|
|
d728cd2696 | ||
|
|
1c50a18d0a | ||
|
|
40d09875e6 | ||
|
|
f80bf187a9 | ||
|
|
18a2720ea7 | ||
|
|
d2bbfd967e | ||
|
|
18b626aa8b | ||
|
|
dc6fb57c41 | ||
|
|
f05a806164 |
||
|
|
d3aa4f7c7c |
||
|
|
a133716709 | ||
|
|
cfba3d72be | ||
|
|
d817231836 |
544 changed files with 5124 additions and 41876 deletions
|
|
@ -1,3 +0,0 @@
|
||||||
[report]
|
|
||||||
omit = pelican/tests/*
|
|
||||||
|
|
||||||
|
|
@ -3,13 +3,13 @@ root = true
|
||||||
[*]
|
[*]
|
||||||
charset = utf-8
|
charset = utf-8
|
||||||
end_of_line = lf
|
end_of_line = lf
|
||||||
indent_size = 4
|
indent_size = 2
|
||||||
indent_style = space
|
indent_style = space
|
||||||
insert_final_newline = true
|
insert_final_newline = true
|
||||||
trim_trailing_whitespace = true
|
trim_trailing_whitespace = true
|
||||||
|
|
||||||
[*.py]
|
[*.py]
|
||||||
max_line_length = 79
|
max_line_length = 88
|
||||||
|
|
||||||
[*.yml]
|
[*.{yml,yaml}]
|
||||||
indent_size = 2
|
indent_size = 2
|
||||||
|
|
|
||||||
7
.gitattributes
vendored
7
.gitattributes
vendored
|
|
@ -1,7 +0,0 @@
|
||||||
# Auto detect text files and perform LF normalization
|
|
||||||
* text=auto
|
|
||||||
|
|
||||||
# Improve accuracy of GitHub's Linguist-powered language statistics
|
|
||||||
pelican/tests/content/* linguist-vendored
|
|
||||||
pelican/tests/output/* linguist-vendored
|
|
||||||
samples/* linguist-vendored
|
|
||||||
2
.github/FUNDING.yml
vendored
2
.github/FUNDING.yml
vendored
|
|
@ -1,2 +0,0 @@
|
||||||
custom: https://donate.getpelican.com
|
|
||||||
liberapay: pelican
|
|
||||||
40
.github/ISSUE_TEMPLATE/---bug-report.md
vendored
40
.github/ISSUE_TEMPLATE/---bug-report.md
vendored
|
|
@ -1,40 +0,0 @@
|
||||||
---
|
|
||||||
name: "\U0001F41E Bug Report"
|
|
||||||
about: Did you find a bug?
|
|
||||||
title: ''
|
|
||||||
labels: bug
|
|
||||||
assignees: ''
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
<!--
|
|
||||||
Hi there! Thank you for discovering and submitting an issue.
|
|
||||||
|
|
||||||
Before you submit this, let’s make sure of a few things.
|
|
||||||
Please make sure the following boxes are ticked if they are correct.
|
|
||||||
If not, please try and fulfill them first.
|
|
||||||
-->
|
|
||||||
|
|
||||||
<!-- Checked checkbox should look like this: [x] -->
|
|
||||||
- [ ] I have read the [Filing Issues](https://docs.getpelican.com/en/latest/contribute.html#filing-issues) and subsequent “How to Get Help” sections of the documentation.
|
|
||||||
- [ ] I have searched the [issues](https://github.com/getpelican/pelican/issues?q=is%3Aissue) (including closed ones) and believe that this is not a duplicate.
|
|
||||||
|
|
||||||
<!--
|
|
||||||
Once the above boxes are checked, if you are able to fill in the following list
|
|
||||||
with your information, it would be very helpful for maintainers.
|
|
||||||
-->
|
|
||||||
|
|
||||||
- **OS version and name**: <!-- Replace with version + name -->
|
|
||||||
- **Python version**: <!-- Replace with version -->
|
|
||||||
- **Pelican version**: <!-- Replace with version -->
|
|
||||||
- **Link to theme**: <!-- Replace with link to the theme you are using -->
|
|
||||||
- **Links to plugins**: <!-- Replace with list of links to plugins you are using -->
|
|
||||||
- **Link to your site**: <!-- If available, replace with link to your site -->
|
|
||||||
- **Link to your source**: <!-- If available, replace with link to relevant source repository -->
|
|
||||||
- **Link to a [Gist](https://gist.github.com/) with the contents of your settings file**: <!-- If your source is not accessible, put Gist link here -->
|
|
||||||
|
|
||||||
## Issue
|
|
||||||
<!--
|
|
||||||
Now feel free to write your issue. Please avoid vague phrases like “[…] doesn’t work”.
|
|
||||||
Be descriptive! Thanks again 🙌 ❤️
|
|
||||||
-->
|
|
||||||
22
.github/ISSUE_TEMPLATE/---documentation.md
vendored
22
.github/ISSUE_TEMPLATE/---documentation.md
vendored
|
|
@ -1,22 +0,0 @@
|
||||||
---
|
|
||||||
name: "\U0001F4DA Documentation"
|
|
||||||
about: Did you find errors, problems, or anything unclear in the docs (https://docs.getpelican.com/)?
|
|
||||||
title: ''
|
|
||||||
labels: docs
|
|
||||||
assignees: ''
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
<!--
|
|
||||||
Hi there! Thank you for discovering and submitting an issue with our documentation.
|
|
||||||
|
|
||||||
Before you submit this, let’s make sure of a few things.
|
|
||||||
Please make sure the following boxes are ticked if they are correct.
|
|
||||||
If not, please try and fulfill them first.
|
|
||||||
-->
|
|
||||||
|
|
||||||
<!-- Checked checkbox should look like this: [x] -->
|
|
||||||
- [ ] I have searched the [issues](https://github.com/getpelican/pelican/issues?q=is%3Aissue) (including closed ones) and believe that this is not a duplicate.
|
|
||||||
|
|
||||||
## Issue
|
|
||||||
<!-- Now feel free to write your issue, but please be descriptive! Thanks again 🙌 ❤️ -->
|
|
||||||
24
.github/ISSUE_TEMPLATE/---enhancement-request.md
vendored
24
.github/ISSUE_TEMPLATE/---enhancement-request.md
vendored
|
|
@ -1,24 +0,0 @@
|
||||||
---
|
|
||||||
name: "\U0001F381 Feature Request"
|
|
||||||
about: Do you have ideas for new features and improvements?
|
|
||||||
title: ''
|
|
||||||
labels: enhancement
|
|
||||||
assignees: ''
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
<!--
|
|
||||||
Hi there! Thank you for wanting to make Pelican better.
|
|
||||||
|
|
||||||
Before you submit this, let’s make sure of a few things.
|
|
||||||
Please make sure the following boxes are ticked if they are correct.
|
|
||||||
If not, please try and fulfill them first. The last one is optional but encouraged.
|
|
||||||
-->
|
|
||||||
|
|
||||||
<!-- Checked checkbox should look like this: [x] -->
|
|
||||||
- [ ] I have searched the [issues](https://github.com/getpelican/pelican/issues?q=is%3Aissue) (including closed ones) and believe that this is not a duplicate.
|
|
||||||
- [ ] I have searched the [documentation](https://docs.getpelican.com/) and believe that my question is not covered.
|
|
||||||
- [ ] I am willing to lend a hand to help implement this feature. <!-- optional but encouraged -->
|
|
||||||
|
|
||||||
## Feature Request
|
|
||||||
<!-- Now feel free to write your idea for improvement. Thanks again 🙌 ❤️ -->
|
|
||||||
23
.github/ISSUE_TEMPLATE/---everything-else.md
vendored
23
.github/ISSUE_TEMPLATE/---everything-else.md
vendored
|
|
@ -1,23 +0,0 @@
|
||||||
---
|
|
||||||
name: "\U0001F5C3 Everything Else"
|
|
||||||
about: Do you have a question/issue that does not fall into any of the other categories?
|
|
||||||
title: ''
|
|
||||||
labels: question
|
|
||||||
assignees: ''
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
<!--
|
|
||||||
Describe your question/issue here. This space is meant to be used for general questions
|
|
||||||
that are not bugs, feature requests, or documentation issues.
|
|
||||||
Before you submit this, let’s make sure of a few things.
|
|
||||||
Please make sure the following boxes are ticked if they are correct.
|
|
||||||
If not, please try and fulfill them first.
|
|
||||||
-->
|
|
||||||
|
|
||||||
<!-- Checked checkbox should look like this: [x] -->
|
|
||||||
- [ ] I have searched the [issues](https://github.com/getpelican/pelican/issues?q=is%3Aissue) (including closed ones) and believe that this is not a duplicate.
|
|
||||||
- [ ] I have searched the [documentation](https://docs.getpelican.com/) and believe that my question is not covered.
|
|
||||||
|
|
||||||
## Issue
|
|
||||||
<!-- Now feel free to write your issue, but please be descriptive! Thanks again 🙌 ❤️ -->
|
|
||||||
7
.github/ISSUE_TEMPLATE/config.yml
vendored
7
.github/ISSUE_TEMPLATE/config.yml
vendored
|
|
@ -1,7 +0,0 @@
|
||||||
# Ref: https://help.github.com/en/github/building-a-strong-community/configuring-issue-templates-for-your-repository#configuring-the-template-chooser
|
|
||||||
blank_issues_enabled: true
|
|
||||||
contact_links:
|
|
||||||
- name: '💬 Pelican IRC Channel on Freenode'
|
|
||||||
url: https://kiwiirc.com/client/irc.freenode.net/?#pelican
|
|
||||||
about: |
|
|
||||||
Chat with the community, ask questions, and learn about best practices.
|
|
||||||
12
.github/pull_request_template.md
vendored
12
.github/pull_request_template.md
vendored
|
|
@ -1,12 +0,0 @@
|
||||||
# Pull Request Checklist
|
|
||||||
|
|
||||||
Resolves: #issue-number-here <!-- Only if related issue *already* exists — otherwise remove this line -->
|
|
||||||
|
|
||||||
<!-- This is just a reminder about the most common mistakes. Please make sure that you tick all *appropriate* boxes. Also, please read our [contribution guide](https://docs.getpelican.com/en/latest/contribute.html#contributing-code) at least once — it will save you unnecessary review cycles! -->
|
|
||||||
|
|
||||||
- [ ] Ensured **tests pass** and (if applicable) updated functional test output
|
|
||||||
- [ ] Conformed to **code style guidelines** by running appropriate linting tools
|
|
||||||
- [ ] Added **tests** for changed code
|
|
||||||
- [ ] Updated **documentation** for changed code
|
|
||||||
|
|
||||||
<!-- If you have *any* questions to *any* of the points above, just **submit and ask**! This checklist is here to *help* you, not to deter you from contributing! -->
|
|
||||||
31
.github/stale.yml
vendored
31
.github/stale.yml
vendored
|
|
@ -1,31 +0,0 @@
|
||||||
# Configuration for probot-stale - https://github.com/probot/stale
|
|
||||||
|
|
||||||
# Number of days of inactivity before an Issue or Pull Request becomes stale
|
|
||||||
daysUntilStale: 60
|
|
||||||
|
|
||||||
# Number of days of inactivity before an Issue or Pull Request with the stale label is closed.
|
|
||||||
daysUntilClose: 30
|
|
||||||
|
|
||||||
# Set to true to ignore issues in a project (defaults to false)
|
|
||||||
exemptProjects: true
|
|
||||||
|
|
||||||
# Set to true to ignore issues in a milestone (defaults to false)
|
|
||||||
exemptMilestones: true
|
|
||||||
|
|
||||||
# Set to true to ignore issues with an assignee (defaults to false)
|
|
||||||
exemptAssignees: true
|
|
||||||
|
|
||||||
# Label to use when marking as stale
|
|
||||||
staleLabel: stale
|
|
||||||
|
|
||||||
# Comment to post when marking as stale. Set to `false` to disable
|
|
||||||
markComment: >
|
|
||||||
This issue has been automatically marked as stale because it has not had
|
|
||||||
recent activity. It will be closed if no further activity occurs. Thank you
|
|
||||||
for your participation and understanding.
|
|
||||||
|
|
||||||
# Limit the number of actions per hour, from 1-30. Default is 30
|
|
||||||
limitPerRun: 1
|
|
||||||
|
|
||||||
# Limit to only `issues` or `pulls`
|
|
||||||
only: issues
|
|
||||||
159
.github/workflows/main.yml
vendored
159
.github/workflows/main.yml
vendored
|
|
@ -1,159 +0,0 @@
|
||||||
name: build
|
|
||||||
|
|
||||||
on: [push, pull_request]
|
|
||||||
|
|
||||||
env:
|
|
||||||
# color output for pytest and tox
|
|
||||||
PYTEST_ADDOPTS: "--color=yes"
|
|
||||||
PY_COLORS: 1
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
test:
|
|
||||||
name: Test - ${{ matrix.config.python }} - ${{ matrix.config.os }}
|
|
||||||
runs-on: ${{ matrix.config.os }}-latest
|
|
||||||
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
config:
|
|
||||||
- os: ubuntu
|
|
||||||
python: 3.5
|
|
||||||
- os: ubuntu
|
|
||||||
python: 3.6
|
|
||||||
- os: ubuntu
|
|
||||||
python: 3.7
|
|
||||||
- os: ubuntu
|
|
||||||
python: 3.8
|
|
||||||
- os: macos
|
|
||||||
python: 3.7
|
|
||||||
- os: windows
|
|
||||||
python: 3.7
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Setup Python ${{ matrix.config.python }}
|
|
||||||
uses: actions/setup-python@v1.1.1
|
|
||||||
with:
|
|
||||||
python-version: ${{ matrix.config.python }}
|
|
||||||
- name: Set pip cache (Linux)
|
|
||||||
uses: actions/cache@v1
|
|
||||||
if: startsWith(runner.os, 'Linux')
|
|
||||||
with:
|
|
||||||
path: ~/.cache/pip
|
|
||||||
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements/*') }}
|
|
||||||
restore-keys: |
|
|
||||||
${{ runner.os }}-pip-
|
|
||||||
- name: Setup pip cache (macOS)
|
|
||||||
uses: actions/cache@v1
|
|
||||||
if: startsWith(runner.os, 'macOS')
|
|
||||||
with:
|
|
||||||
path: ~/Library/Caches/pip
|
|
||||||
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements/*') }}
|
|
||||||
restore-keys: |
|
|
||||||
${{ runner.os }}-pip-
|
|
||||||
- name: Setup pip cache (Windows)
|
|
||||||
uses: actions/cache@v1
|
|
||||||
if: startsWith(runner.os, 'Windows')
|
|
||||||
with:
|
|
||||||
path: ~\AppData\Local\pip\Cache
|
|
||||||
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements/*') }}
|
|
||||||
restore-keys: |
|
|
||||||
${{ runner.os }}-pip-
|
|
||||||
- name: Install locale (Linux)
|
|
||||||
if: startsWith(runner.os, 'Linux')
|
|
||||||
run: sudo locale-gen fr_FR.UTF-8 tr_TR.UTF-8
|
|
||||||
- name: Install pandoc
|
|
||||||
uses: r-lib/actions/setup-pandoc@v1
|
|
||||||
with:
|
|
||||||
pandoc-version: "2.9.2"
|
|
||||||
- name: Install tox
|
|
||||||
run: python -m pip install -U pip tox
|
|
||||||
- name: Info
|
|
||||||
run: |
|
|
||||||
echo "===== PYTHON ====="
|
|
||||||
python --version
|
|
||||||
echo "===== PANDOC ====="
|
|
||||||
pandoc --version | head -2
|
|
||||||
- name: Run tests
|
|
||||||
run: tox -e py${{ matrix.config.python }}
|
|
||||||
|
|
||||||
|
|
||||||
lint:
|
|
||||||
name: Lint
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Setup Python
|
|
||||||
uses: actions/setup-python@v1.1.1
|
|
||||||
with:
|
|
||||||
python-version: 3.6
|
|
||||||
- name: Set pip cache (Linux)
|
|
||||||
uses: actions/cache@v1
|
|
||||||
if: startsWith(runner.os, 'Linux')
|
|
||||||
with:
|
|
||||||
path: ~/.cache/pip
|
|
||||||
key: ${{ runner.os }}-pip-${{ hashFiles('requirements/*') }}
|
|
||||||
restore-keys: |
|
|
||||||
${{ runner.os }}-pip-
|
|
||||||
- name: Install tox
|
|
||||||
run: python -m pip install -U pip tox
|
|
||||||
- name: Check
|
|
||||||
run: tox -e flake8
|
|
||||||
|
|
||||||
|
|
||||||
docs:
|
|
||||||
name: Build docs
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Setup Python
|
|
||||||
uses: actions/setup-python@v1.1.1
|
|
||||||
with:
|
|
||||||
python-version: 3.6
|
|
||||||
- name: Set pip cache (Linux)
|
|
||||||
uses: actions/cache@v1
|
|
||||||
if: startsWith(runner.os, 'Linux')
|
|
||||||
with:
|
|
||||||
path: ~/.cache/pip
|
|
||||||
key: ${{ runner.os }}-pip-${{ hashFiles('requirements/*') }}
|
|
||||||
restore-keys: |
|
|
||||||
${{ runner.os }}-pip-
|
|
||||||
- name: Install tox
|
|
||||||
run: python -m pip install -U pip tox
|
|
||||||
- name: Check
|
|
||||||
run: tox -e docs
|
|
||||||
|
|
||||||
|
|
||||||
deploy:
|
|
||||||
name: Deploy
|
|
||||||
needs: [test, lint, docs]
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
if: ${{ github.ref=='refs/heads/master' && github.event_name!='pull_request' }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Setup Python
|
|
||||||
uses: actions/setup-python@v1.1.1
|
|
||||||
with:
|
|
||||||
python-version: 3.7
|
|
||||||
- name: Check release
|
|
||||||
id: check_release
|
|
||||||
run: |
|
|
||||||
python -m pip install pip --upgrade
|
|
||||||
pip install poetry
|
|
||||||
pip install githubrelease
|
|
||||||
pip install --pre autopub
|
|
||||||
echo "##[set-output name=release;]$(autopub check)"
|
|
||||||
- name: Publish
|
|
||||||
if: ${{ steps.check_release.outputs.release=='' }}
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GH_TOKEN }}
|
|
||||||
PYPI_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
|
|
||||||
run: |
|
|
||||||
git remote set-url origin https://$GITHUB_TOKEN@github.com/${{ github.repository }}
|
|
||||||
autopub prepare
|
|
||||||
poetry build
|
|
||||||
autopub commit
|
|
||||||
autopub githubrelease
|
|
||||||
poetry publish -u __token__ -p $PYPI_PASSWORD
|
|
||||||
19
.gitignore
vendored
19
.gitignore
vendored
|
|
@ -1,18 +1 @@
|
||||||
*.egg-info
|
node_modules/
|
||||||
.*.swp
|
|
||||||
.*.swo
|
|
||||||
*.pyc
|
|
||||||
.DS_Store
|
|
||||||
docs/_build
|
|
||||||
docs/fr/_build
|
|
||||||
build
|
|
||||||
dist
|
|
||||||
tags
|
|
||||||
.tox
|
|
||||||
.coverage
|
|
||||||
htmlcov
|
|
||||||
*.orig
|
|
||||||
venv
|
|
||||||
samples/output
|
|
||||||
*.pem
|
|
||||||
poetry.lock
|
|
||||||
|
|
|
||||||
24
.mailmap
24
.mailmap
|
|
@ -1,24 +0,0 @@
|
||||||
Alexis Métaireau <alexis@notmyidea.org>
|
|
||||||
Alexis Métaireau <alexis@notmyidea.org> <alexis, notmyidea, org>
|
|
||||||
Alexis Métaireau <alexis@notmyidea.org> <ametaireau@gmail.com>
|
|
||||||
Axel Haustant <noirbizarre@gmail.com> <axel.haustant.ext@mappy.com>
|
|
||||||
Axel Haustant <noirbizarre@gmail.com> <axel.haustant@valtech.fr>
|
|
||||||
Dave Mankoff <mankyd@gmail.com>
|
|
||||||
Feth Arezki <feth@tuttu.info>
|
|
||||||
Guillaume <guillaume@lame.homelinux.com>
|
|
||||||
Guillaume <guillaume@lame.homelinux.com> <guillaume@mint.(none)>
|
|
||||||
Guillaume B <guitreize@gmail.com>
|
|
||||||
Guillermo López <guilan70@hotmail.com>
|
|
||||||
Guillermo López <guilan70@hotmail.com> <guillermo.lopez@outlook.com>
|
|
||||||
Jomel Imperio <jimperio@gmail.com>
|
|
||||||
Justin Mayer <entrop@gmail.com>
|
|
||||||
Justin Mayer <entrop@gmail.com> <entroP@gmail.com>
|
|
||||||
Marco Milanesi <kpanic@gnufunk.org> <marcom@openquake.org>
|
|
||||||
Massimo Santini <santini@dsi.unimi.it> <santini@spillane.docenti.dsi.unimi.it>
|
|
||||||
Rémy HUBSCHER <hubscher.remy@gmail.com> <remy.hubscher@ionyse.com>
|
|
||||||
Simon Conseil <contact@saimon.org>
|
|
||||||
Simon Liedtke <liedtke.simon@googlemail.com>
|
|
||||||
Skami18 <skami@skami-laptop.dyndns.org>
|
|
||||||
Stuart Colville <muffinresearchlabs@gmail.com> <muffinresearch@gmail.com>
|
|
||||||
Stéphane Bunel <stephane@lutetium.(none)>
|
|
||||||
tBunnyMan <WagThatTail@Me.com>
|
|
||||||
|
|
@ -1,20 +0,0 @@
|
||||||
# See https://pre-commit.com/hooks.html for info on hooks
|
|
||||||
repos:
|
|
||||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
|
||||||
rev: v2.5.0
|
|
||||||
hooks:
|
|
||||||
- id: check-added-large-files
|
|
||||||
- id: check-ast
|
|
||||||
- id: check-toml
|
|
||||||
- id: check-yaml
|
|
||||||
- id: debug-statements
|
|
||||||
- id: detect-private-key
|
|
||||||
- id: end-of-file-fixer
|
|
||||||
- id: trailing-whitespace
|
|
||||||
- id: flake8
|
|
||||||
name: Flake8 on commit diff
|
|
||||||
description: This hook limits Flake8 checks to changed lines of code.
|
|
||||||
entry: bash
|
|
||||||
args: [-c, 'git diff HEAD | flake8 --diff --max-line-length=88']
|
|
||||||
|
|
||||||
exclude: ^pelican/tests/output/
|
|
||||||
53
.travis.yml
53
.travis.yml
|
|
@ -1,53 +0,0 @@
|
||||||
language: python
|
|
||||||
python:
|
|
||||||
- "3.6"
|
|
||||||
env:
|
|
||||||
global:
|
|
||||||
- PYPI_USERNAME=autopub
|
|
||||||
- secure: "h5V/+YL+CrqvfAesNkSb824Ngk5x+f0eFzj/LBbmnzjvArKAmc6R6WGyx8SDD7WF/PlaTf0M1fH3a7pjIS8Ee+TS1Rb0Lt1HPqUs1yntg1+Js2ZQp3p20wfsDc+bZ4/2g8xLsSMv1EJ4np7/GJ5fXqpSxjr/Xs5LYA7ZLwNNwDw="
|
|
||||||
- secure: "GiDFfmjH7uzYNnkjQMV/mIkbRdmgkGmtbFPeaj9taBNA5tPp3IBt3GOOS6UL/zm9xiwu9Xo6sxZWkGzY19Hsdv28YPH34N3abo0QSnz4IGiHs152Hi7Qi6Tb0QkT5D3OxuSIm8LmFL7+su89Q7vBFowrT6HL1Mn8CDDWSj3eqbo="
|
|
||||||
- TWINE_USERNAME=$PYPI_USERNAME
|
|
||||||
- TWINE_PASSWORD=$PYPI_PASSWORD
|
|
||||||
matrix:
|
|
||||||
- TOX_ENV=docs
|
|
||||||
- TOX_ENV=flake8
|
|
||||||
- TOX_ENV=py3.5
|
|
||||||
- TOX_ENV=py3.6
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
- python: 3.7
|
|
||||||
sudo: true
|
|
||||||
dist: xenial
|
|
||||||
env:
|
|
||||||
- TOX_ENV=py3.7
|
|
||||||
addons:
|
|
||||||
apt_packages:
|
|
||||||
- pandoc
|
|
||||||
before_install:
|
|
||||||
- sudo apt-get update -qq
|
|
||||||
- sudo locale-gen fr_FR.UTF-8 tr_TR.UTF-8
|
|
||||||
install:
|
|
||||||
- pip install tox==2.5.0
|
|
||||||
script: tox -e $TOX_ENV
|
|
||||||
before_deploy:
|
|
||||||
- 'if [ "$TRAVIS_PULL_REQUEST" != "false" ]; then travis_terminate 0; fi'
|
|
||||||
- pip install githubrelease
|
|
||||||
- pip install --pre autopub
|
|
||||||
- autopub check || travis_terminate 0
|
|
||||||
- pip install poetry
|
|
||||||
- pip install twine
|
|
||||||
- git checkout ${TRAVIS_BRANCH}
|
|
||||||
- git remote set-url origin https://$GITHUB_TOKEN@github.com/$TRAVIS_REPO_SLUG
|
|
||||||
deploy:
|
|
||||||
provider: script
|
|
||||||
script: autopub deploy
|
|
||||||
skip_cleanup: true
|
|
||||||
on:
|
|
||||||
branch: master
|
|
||||||
python: "3.7"
|
|
||||||
# The channel name "irc.freenode.org#pelican" is encrypted against getpelican/pelican to prevent IRC spam of forks
|
|
||||||
notifications:
|
|
||||||
irc:
|
|
||||||
channels:
|
|
||||||
- secure: "JP57f61QovrhmLoAF6oPOzIK2aXGfSO06FHg7yiuKBOWMiaxQejZUGJX919muCLhWJXDugsviIqCMoAWwNV3o1WQbqIr+G5TR+N9MrtCs4Zi6vpGj09bR8giKUKx+PPKEoe1Ew56E4y2LxzGO4Lj9hZx8M2YVdwPNWrWZgp6WXE="
|
|
||||||
on_success: change
|
|
||||||
158
CONTRIBUTING.rst
158
CONTRIBUTING.rst
|
|
@ -1,158 +0,0 @@
|
||||||
Filing issues
|
|
||||||
=============
|
|
||||||
|
|
||||||
* Before you file an issue, try `asking for help`_ first.
|
|
||||||
* If determined to file an issue, first check for `existing issues`_, including
|
|
||||||
closed issues.
|
|
||||||
|
|
||||||
.. _`asking for help`: `How to get help`_
|
|
||||||
.. _`existing issues`: https://github.com/getpelican/pelican/issues
|
|
||||||
|
|
||||||
How to get help
|
|
||||||
===============
|
|
||||||
|
|
||||||
Before you ask for help, please make sure you do the following:
|
|
||||||
|
|
||||||
1. Read the documentation_ thoroughly. If in a hurry, at least use the search
|
|
||||||
field that is provided at top-left on the documentation_ pages. Make sure
|
|
||||||
you read the docs for the Pelican version you are using.
|
|
||||||
2. Use a search engine (e.g., DuckDuckGo, Google) to search for a solution to
|
|
||||||
your problem. Someone may have already found a solution, perhaps in the
|
|
||||||
form of a plugin_ or a specific combination of settings.
|
|
||||||
|
|
||||||
3. Try reproducing the issue in a clean environment, ensuring you are using:
|
|
||||||
|
|
||||||
* latest Pelican release (or an up-to-date Git clone of Pelican master)
|
|
||||||
* latest releases of libraries used by Pelican
|
|
||||||
* no plugins or only those related to the issue
|
|
||||||
|
|
||||||
**NOTE:** The most common sources of problems are anomalies in (1) themes,
|
|
||||||
(2) settings files, and (3) ``make``/``invoke`` automation wrappers. If you can't
|
|
||||||
reproduce your problem when using the following steps to generate your site,
|
|
||||||
then the problem is almost certainly with your chosen theme and/or settings
|
|
||||||
file (and not Pelican itself)::
|
|
||||||
|
|
||||||
cd ~/projects/your-site
|
|
||||||
git clone https://github.com/getpelican/pelican ~/projects/pelican
|
|
||||||
pelican content -s ~/projects/pelican/samples/pelican.conf.py -t ~/projects/pelican/pelican/themes/notmyidea
|
|
||||||
|
|
||||||
If despite the above efforts you still cannot resolve your problem, be sure to
|
|
||||||
include in your inquiry the following information, preferably in the form of
|
|
||||||
links to content uploaded to a `paste service`_, GitHub repository, or other
|
|
||||||
publicly-accessible location:
|
|
||||||
|
|
||||||
* Describe what version of Pelican you are running (output of ``pelican --version``
|
|
||||||
or the HEAD commit hash if you cloned the repo) and how exactly you installed
|
|
||||||
it (the full command you used, e.g. ``pip install pelican``).
|
|
||||||
* If you are looking for a way to get some end result, prepare a detailed
|
|
||||||
description of what the end result should look like (preferably in the form of
|
|
||||||
an image or a mock-up page) and explain in detail what you have done so far to
|
|
||||||
achieve it.
|
|
||||||
* If you are trying to solve some issue, prepare a detailed description of how
|
|
||||||
to reproduce the problem. If the issue cannot be easily reproduced, it cannot
|
|
||||||
be debugged by developers or volunteers. Describe only the **minimum steps**
|
|
||||||
necessary to reproduce it (no extra plugins, etc.).
|
|
||||||
* Upload your settings file or any other custom code that would enable people to
|
|
||||||
reproduce the problem or to see what you have already tried to achieve the
|
|
||||||
desired end result.
|
|
||||||
* Upload detailed and **complete** output logs and backtraces (remember to add
|
|
||||||
the ``--debug`` flag: ``pelican --debug content [...]``)
|
|
||||||
|
|
||||||
.. _documentation: https://docs.getpelican.com/
|
|
||||||
.. _`paste service`: https://dpaste.de/
|
|
||||||
|
|
||||||
Once the above preparation is ready, you can contact people willing to help via
|
|
||||||
(preferably) the ``#pelican`` IRC channel or send a message to ``authors at getpelican dot com``.
|
|
||||||
Remember to include all the information you prepared.
|
|
||||||
|
|
||||||
The #pelican IRC channel
|
|
||||||
------------------------
|
|
||||||
|
|
||||||
* Because of differing time zones, you may not get an immediate response to your
|
|
||||||
question, but please be patient and stay logged into IRC — someone will almost
|
|
||||||
always respond if you wait long enough (it may take a few hours).
|
|
||||||
* If you don't have an IRC client handy, use the webchat_.
|
|
||||||
* You can direct your IRC client to the channel using this `IRC link`_ or you
|
|
||||||
can manually join the ``#pelican`` IRC channel on the `freenode IRC network`_.
|
|
||||||
|
|
||||||
.. _webchat: https://kiwiirc.com/client/irc.freenode.net/?#pelican
|
|
||||||
.. _`IRC link`: irc://irc.freenode.net/pelican
|
|
||||||
.. _`freenode IRC network`: https://freenode.net/
|
|
||||||
|
|
||||||
|
|
||||||
Contributing code
|
|
||||||
=================
|
|
||||||
|
|
||||||
Before you submit a contribution, please ask whether it is desired so that you
|
|
||||||
don't spend a lot of time working on something that would be rejected for a
|
|
||||||
known reason. Consider also whether your new feature might be better suited as
|
|
||||||
a plugin_ — you can `ask for help`_ to make that determination.
|
|
||||||
|
|
||||||
Using Git and GitHub
|
|
||||||
--------------------
|
|
||||||
|
|
||||||
* `Create a new git branch`_ specific to your change (as opposed to making
|
|
||||||
your commits in the master branch).
|
|
||||||
* **Don't put multiple unrelated fixes/features in the same branch / pull request.**
|
|
||||||
For example, if you're working on a new feature and find a bugfix that
|
|
||||||
doesn't *require* your new feature, **make a new distinct branch and pull
|
|
||||||
request** for the bugfix.
|
|
||||||
* Add a ``RELEASE.md`` file in the root of the project that contains the
|
|
||||||
release type (major, minor, patch) and a summary of the changes that will be
|
|
||||||
used as the release changelog entry. For example::
|
|
||||||
|
|
||||||
Release type: minor
|
|
||||||
|
|
||||||
Reload browser window upon changes to content, settings, or theme
|
|
||||||
|
|
||||||
* Check for unnecessary whitespace via ``git diff --check`` before committing.
|
|
||||||
* First line of your commit message should start with present-tense verb, be 50
|
|
||||||
characters or less, and include the relevant issue number(s) if applicable.
|
|
||||||
*Example:* ``Ensure proper PLUGIN_PATH behavior. Refs #428.`` If the commit
|
|
||||||
*completely fixes* an existing bug report, please use ``Fixes #585`` or ``Fix
|
|
||||||
#585`` syntax (so the relevant issue is automatically closed upon PR merge).
|
|
||||||
* After the first line of the commit message, add a blank line and then a more
|
|
||||||
detailed explanation (when relevant).
|
|
||||||
* `Squash your commits`_ to eliminate merge commits and ensure a clean and
|
|
||||||
readable commit history.
|
|
||||||
* If you have previously filed a GitHub issue and want to contribute code that
|
|
||||||
addresses that issue, **please use** ``hub pull-request`` instead of using
|
|
||||||
GitHub's web UI to submit the pull request. This isn't an absolute
|
|
||||||
requirement, but makes the maintainers' lives much easier! Specifically:
|
|
||||||
`install hub <https://github.com/github/hub/#installation>`_ and then run
|
|
||||||
`hub pull-request -i [ISSUE] <https://hub.github.com/hub-pull-request.1.html>`_
|
|
||||||
to turn your GitHub issue into a pull request containing your code.
|
|
||||||
* After you have issued a pull request, the continuous integration (CI) system
|
|
||||||
will run the test suite for all supported Python versions and check for PEP8
|
|
||||||
compliance. If any of these checks fail, you should fix them. (If tests fail
|
|
||||||
on the CI system but seem to pass locally, ensure that local test runs aren't
|
|
||||||
skipping any tests.)
|
|
||||||
|
|
||||||
Contribution quality standards
|
|
||||||
------------------------------
|
|
||||||
|
|
||||||
* Adhere to `PEP8 coding standards`_. This can be eased via the `pycodestyle
|
|
||||||
<https://pypi.org/project/pycodestyle>`_ or `flake8
|
|
||||||
<https://pypi.org/project/flake8/>`_ tools, the latter of which in
|
|
||||||
particular will give you some useful hints about ways in which the
|
|
||||||
code/formatting can be improved. We try to keep line length within the
|
|
||||||
79-character maximum specified by PEP8. Because that can sometimes compromise
|
|
||||||
readability, the hard/enforced maximum is 88 characters.
|
|
||||||
* Ensure your code is compatible with the `officially-supported Python releases`_.
|
|
||||||
* Add docs and tests for your changes. Undocumented and untested features will
|
|
||||||
not be accepted.
|
|
||||||
* `Run all the tests`_ **on all versions of Python supported by Pelican** to
|
|
||||||
ensure nothing was accidentally broken.
|
|
||||||
|
|
||||||
Check out our `Git Tips`_ page or `ask for help`_ if you
|
|
||||||
need assistance or have any questions about these guidelines.
|
|
||||||
|
|
||||||
.. _`plugin`: https://docs.getpelican.com/en/latest/plugins.html
|
|
||||||
.. _`#pelican IRC channel`: https://webchat.freenode.net/?channels=pelican&uio=d4
|
|
||||||
.. _`Create a new git branch`: https://github.com/getpelican/pelican/wiki/Git-Tips#making-your-changes
|
|
||||||
.. _`Squash your commits`: https://github.com/getpelican/pelican/wiki/Git-Tips#squashing-commits
|
|
||||||
.. _`Run all the tests`: https://docs.getpelican.com/en/latest/contribute.html#running-the-test-suite
|
|
||||||
.. _`Git Tips`: https://github.com/getpelican/pelican/wiki/Git-Tips
|
|
||||||
.. _`PEP8 coding standards`: https://www.python.org/dev/peps/pep-0008/
|
|
||||||
.. _`ask for help`: `How to get help`_
|
|
||||||
.. _`officially-supported Python releases`: https://devguide.python.org/#status-of-python-branches
|
|
||||||
681
LICENSE
681
LICENSE
|
|
@ -1,661 +1,20 @@
|
||||||
GNU AFFERO GENERAL PUBLIC LICENSE
|
MIT License
|
||||||
Version 3, 19 November 2007
|
|
||||||
|
Copyright (c) 2024 Oliver Ladner
|
||||||
Copyright (C) 2007 Free Software Foundation, Inc. <https://www.fsf.org/>
|
|
||||||
Everyone is permitted to copy and distribute verbatim copies
|
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||||
of this license document, but changing it is not allowed.
|
this software and associated documentation files (the "Software"), to deal in
|
||||||
|
the Software without restriction, including without limitation the rights to
|
||||||
Preamble
|
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
||||||
|
the Software, and to permit persons to whom the Software is furnished to do so,
|
||||||
The GNU Affero General Public License is a free, copyleft license for
|
subject to the following conditions:
|
||||||
software and other kinds of works, specifically designed to ensure
|
|
||||||
cooperation with the community in the case of network server software.
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
The licenses for most software and other practical works are designed
|
|
||||||
to take away your freedom to share and change the works. By contrast,
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
our General Public Licenses are intended to guarantee your freedom to
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||||
share and change all versions of a program--to make sure it remains free
|
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
||||||
software for all its users.
|
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
||||||
|
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||||
When we speak of free software, we are referring to freedom, not
|
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
price. Our General Public Licenses are designed to make sure that you
|
|
||||||
have the freedom to distribute copies of free software (and charge for
|
|
||||||
them if you wish), that you receive source code or can get it if you
|
|
||||||
want it, that you can change the software or use pieces of it in new
|
|
||||||
free programs, and that you know you can do these things.
|
|
||||||
|
|
||||||
Developers that use our General Public Licenses protect your rights
|
|
||||||
with two steps: (1) assert copyright on the software, and (2) offer
|
|
||||||
you this License which gives you legal permission to copy, distribute
|
|
||||||
and/or modify the software.
|
|
||||||
|
|
||||||
A secondary benefit of defending all users' freedom is that
|
|
||||||
improvements made in alternate versions of the program, if they
|
|
||||||
receive widespread use, become available for other developers to
|
|
||||||
incorporate. Many developers of free software are heartened and
|
|
||||||
encouraged by the resulting cooperation. However, in the case of
|
|
||||||
software used on network servers, this result may fail to come about.
|
|
||||||
The GNU General Public License permits making a modified version and
|
|
||||||
letting the public access it on a server without ever releasing its
|
|
||||||
source code to the public.
|
|
||||||
|
|
||||||
The GNU Affero General Public License is designed specifically to
|
|
||||||
ensure that, in such cases, the modified source code becomes available
|
|
||||||
to the community. It requires the operator of a network server to
|
|
||||||
provide the source code of the modified version running there to the
|
|
||||||
users of that server. Therefore, public use of a modified version, on
|
|
||||||
a publicly accessible server, gives the public access to the source
|
|
||||||
code of the modified version.
|
|
||||||
|
|
||||||
An older license, called the Affero General Public License and
|
|
||||||
published by Affero, was designed to accomplish similar goals. This is
|
|
||||||
a different license, not a version of the Affero GPL, but Affero has
|
|
||||||
released a new version of the Affero GPL which permits relicensing under
|
|
||||||
this license.
|
|
||||||
|
|
||||||
The precise terms and conditions for copying, distribution and
|
|
||||||
modification follow.
|
|
||||||
|
|
||||||
TERMS AND CONDITIONS
|
|
||||||
|
|
||||||
0. Definitions.
|
|
||||||
|
|
||||||
"This License" refers to version 3 of the GNU Affero General Public License.
|
|
||||||
|
|
||||||
"Copyright" also means copyright-like laws that apply to other kinds of
|
|
||||||
works, such as semiconductor masks.
|
|
||||||
|
|
||||||
"The Program" refers to any copyrightable work licensed under this
|
|
||||||
License. Each licensee is addressed as "you". "Licensees" and
|
|
||||||
"recipients" may be individuals or organizations.
|
|
||||||
|
|
||||||
To "modify" a work means to copy from or adapt all or part of the work
|
|
||||||
in a fashion requiring copyright permission, other than the making of an
|
|
||||||
exact copy. The resulting work is called a "modified version" of the
|
|
||||||
earlier work or a work "based on" the earlier work.
|
|
||||||
|
|
||||||
A "covered work" means either the unmodified Program or a work based
|
|
||||||
on the Program.
|
|
||||||
|
|
||||||
To "propagate" a work means to do anything with it that, without
|
|
||||||
permission, would make you directly or secondarily liable for
|
|
||||||
infringement under applicable copyright law, except executing it on a
|
|
||||||
computer or modifying a private copy. Propagation includes copying,
|
|
||||||
distribution (with or without modification), making available to the
|
|
||||||
public, and in some countries other activities as well.
|
|
||||||
|
|
||||||
To "convey" a work means any kind of propagation that enables other
|
|
||||||
parties to make or receive copies. Mere interaction with a user through
|
|
||||||
a computer network, with no transfer of a copy, is not conveying.
|
|
||||||
|
|
||||||
An interactive user interface displays "Appropriate Legal Notices"
|
|
||||||
to the extent that it includes a convenient and prominently visible
|
|
||||||
feature that (1) displays an appropriate copyright notice, and (2)
|
|
||||||
tells the user that there is no warranty for the work (except to the
|
|
||||||
extent that warranties are provided), that licensees may convey the
|
|
||||||
work under this License, and how to view a copy of this License. If
|
|
||||||
the interface presents a list of user commands or options, such as a
|
|
||||||
menu, a prominent item in the list meets this criterion.
|
|
||||||
|
|
||||||
1. Source Code.
|
|
||||||
|
|
||||||
The "source code" for a work means the preferred form of the work
|
|
||||||
for making modifications to it. "Object code" means any non-source
|
|
||||||
form of a work.
|
|
||||||
|
|
||||||
A "Standard Interface" means an interface that either is an official
|
|
||||||
standard defined by a recognized standards body, or, in the case of
|
|
||||||
interfaces specified for a particular programming language, one that
|
|
||||||
is widely used among developers working in that language.
|
|
||||||
|
|
||||||
The "System Libraries" of an executable work include anything, other
|
|
||||||
than the work as a whole, that (a) is included in the normal form of
|
|
||||||
packaging a Major Component, but which is not part of that Major
|
|
||||||
Component, and (b) serves only to enable use of the work with that
|
|
||||||
Major Component, or to implement a Standard Interface for which an
|
|
||||||
implementation is available to the public in source code form. A
|
|
||||||
"Major Component", in this context, means a major essential component
|
|
||||||
(kernel, window system, and so on) of the specific operating system
|
|
||||||
(if any) on which the executable work runs, or a compiler used to
|
|
||||||
produce the work, or an object code interpreter used to run it.
|
|
||||||
|
|
||||||
The "Corresponding Source" for a work in object code form means all
|
|
||||||
the source code needed to generate, install, and (for an executable
|
|
||||||
work) run the object code and to modify the work, including scripts to
|
|
||||||
control those activities. However, it does not include the work's
|
|
||||||
System Libraries, or general-purpose tools or generally available free
|
|
||||||
programs which are used unmodified in performing those activities but
|
|
||||||
which are not part of the work. For example, Corresponding Source
|
|
||||||
includes interface definition files associated with source files for
|
|
||||||
the work, and the source code for shared libraries and dynamically
|
|
||||||
linked subprograms that the work is specifically designed to require,
|
|
||||||
such as by intimate data communication or control flow between those
|
|
||||||
subprograms and other parts of the work.
|
|
||||||
|
|
||||||
The Corresponding Source need not include anything that users
|
|
||||||
can regenerate automatically from other parts of the Corresponding
|
|
||||||
Source.
|
|
||||||
|
|
||||||
The Corresponding Source for a work in source code form is that
|
|
||||||
same work.
|
|
||||||
|
|
||||||
2. Basic Permissions.
|
|
||||||
|
|
||||||
All rights granted under this License are granted for the term of
|
|
||||||
copyright on the Program, and are irrevocable provided the stated
|
|
||||||
conditions are met. This License explicitly affirms your unlimited
|
|
||||||
permission to run the unmodified Program. The output from running a
|
|
||||||
covered work is covered by this License only if the output, given its
|
|
||||||
content, constitutes a covered work. This License acknowledges your
|
|
||||||
rights of fair use or other equivalent, as provided by copyright law.
|
|
||||||
|
|
||||||
You may make, run and propagate covered works that you do not
|
|
||||||
convey, without conditions so long as your license otherwise remains
|
|
||||||
in force. You may convey covered works to others for the sole purpose
|
|
||||||
of having them make modifications exclusively for you, or provide you
|
|
||||||
with facilities for running those works, provided that you comply with
|
|
||||||
the terms of this License in conveying all material for which you do
|
|
||||||
not control copyright. Those thus making or running the covered works
|
|
||||||
for you must do so exclusively on your behalf, under your direction
|
|
||||||
and control, on terms that prohibit them from making any copies of
|
|
||||||
your copyrighted material outside their relationship with you.
|
|
||||||
|
|
||||||
Conveying under any other circumstances is permitted solely under
|
|
||||||
the conditions stated below. Sublicensing is not allowed; section 10
|
|
||||||
makes it unnecessary.
|
|
||||||
|
|
||||||
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
|
||||||
|
|
||||||
No covered work shall be deemed part of an effective technological
|
|
||||||
measure under any applicable law fulfilling obligations under article
|
|
||||||
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
|
||||||
similar laws prohibiting or restricting circumvention of such
|
|
||||||
measures.
|
|
||||||
|
|
||||||
When you convey a covered work, you waive any legal power to forbid
|
|
||||||
circumvention of technological measures to the extent such circumvention
|
|
||||||
is effected by exercising rights under this License with respect to
|
|
||||||
the covered work, and you disclaim any intention to limit operation or
|
|
||||||
modification of the work as a means of enforcing, against the work's
|
|
||||||
users, your or third parties' legal rights to forbid circumvention of
|
|
||||||
technological measures.
|
|
||||||
|
|
||||||
4. Conveying Verbatim Copies.
|
|
||||||
|
|
||||||
You may convey verbatim copies of the Program's source code as you
|
|
||||||
receive it, in any medium, provided that you conspicuously and
|
|
||||||
appropriately publish on each copy an appropriate copyright notice;
|
|
||||||
keep intact all notices stating that this License and any
|
|
||||||
non-permissive terms added in accord with section 7 apply to the code;
|
|
||||||
keep intact all notices of the absence of any warranty; and give all
|
|
||||||
recipients a copy of this License along with the Program.
|
|
||||||
|
|
||||||
You may charge any price or no price for each copy that you convey,
|
|
||||||
and you may offer support or warranty protection for a fee.
|
|
||||||
|
|
||||||
5. Conveying Modified Source Versions.
|
|
||||||
|
|
||||||
You may convey a work based on the Program, or the modifications to
|
|
||||||
produce it from the Program, in the form of source code under the
|
|
||||||
terms of section 4, provided that you also meet all of these conditions:
|
|
||||||
|
|
||||||
a) The work must carry prominent notices stating that you modified
|
|
||||||
it, and giving a relevant date.
|
|
||||||
|
|
||||||
b) The work must carry prominent notices stating that it is
|
|
||||||
released under this License and any conditions added under section
|
|
||||||
7. This requirement modifies the requirement in section 4 to
|
|
||||||
"keep intact all notices".
|
|
||||||
|
|
||||||
c) You must license the entire work, as a whole, under this
|
|
||||||
License to anyone who comes into possession of a copy. This
|
|
||||||
License will therefore apply, along with any applicable section 7
|
|
||||||
additional terms, to the whole of the work, and all its parts,
|
|
||||||
regardless of how they are packaged. This License gives no
|
|
||||||
permission to license the work in any other way, but it does not
|
|
||||||
invalidate such permission if you have separately received it.
|
|
||||||
|
|
||||||
d) If the work has interactive user interfaces, each must display
|
|
||||||
Appropriate Legal Notices; however, if the Program has interactive
|
|
||||||
interfaces that do not display Appropriate Legal Notices, your
|
|
||||||
work need not make them do so.
|
|
||||||
|
|
||||||
A compilation of a covered work with other separate and independent
|
|
||||||
works, which are not by their nature extensions of the covered work,
|
|
||||||
and which are not combined with it such as to form a larger program,
|
|
||||||
in or on a volume of a storage or distribution medium, is called an
|
|
||||||
"aggregate" if the compilation and its resulting copyright are not
|
|
||||||
used to limit the access or legal rights of the compilation's users
|
|
||||||
beyond what the individual works permit. Inclusion of a covered work
|
|
||||||
in an aggregate does not cause this License to apply to the other
|
|
||||||
parts of the aggregate.
|
|
||||||
|
|
||||||
6. Conveying Non-Source Forms.
|
|
||||||
|
|
||||||
You may convey a covered work in object code form under the terms
|
|
||||||
of sections 4 and 5, provided that you also convey the
|
|
||||||
machine-readable Corresponding Source under the terms of this License,
|
|
||||||
in one of these ways:
|
|
||||||
|
|
||||||
a) Convey the object code in, or embodied in, a physical product
|
|
||||||
(including a physical distribution medium), accompanied by the
|
|
||||||
Corresponding Source fixed on a durable physical medium
|
|
||||||
customarily used for software interchange.
|
|
||||||
|
|
||||||
b) Convey the object code in, or embodied in, a physical product
|
|
||||||
(including a physical distribution medium), accompanied by a
|
|
||||||
written offer, valid for at least three years and valid for as
|
|
||||||
long as you offer spare parts or customer support for that product
|
|
||||||
model, to give anyone who possesses the object code either (1) a
|
|
||||||
copy of the Corresponding Source for all the software in the
|
|
||||||
product that is covered by this License, on a durable physical
|
|
||||||
medium customarily used for software interchange, for a price no
|
|
||||||
more than your reasonable cost of physically performing this
|
|
||||||
conveying of source, or (2) access to copy the
|
|
||||||
Corresponding Source from a network server at no charge.
|
|
||||||
|
|
||||||
c) Convey individual copies of the object code with a copy of the
|
|
||||||
written offer to provide the Corresponding Source. This
|
|
||||||
alternative is allowed only occasionally and noncommercially, and
|
|
||||||
only if you received the object code with such an offer, in accord
|
|
||||||
with subsection 6b.
|
|
||||||
|
|
||||||
d) Convey the object code by offering access from a designated
|
|
||||||
place (gratis or for a charge), and offer equivalent access to the
|
|
||||||
Corresponding Source in the same way through the same place at no
|
|
||||||
further charge. You need not require recipients to copy the
|
|
||||||
Corresponding Source along with the object code. If the place to
|
|
||||||
copy the object code is a network server, the Corresponding Source
|
|
||||||
may be on a different server (operated by you or a third party)
|
|
||||||
that supports equivalent copying facilities, provided you maintain
|
|
||||||
clear directions next to the object code saying where to find the
|
|
||||||
Corresponding Source. Regardless of what server hosts the
|
|
||||||
Corresponding Source, you remain obligated to ensure that it is
|
|
||||||
available for as long as needed to satisfy these requirements.
|
|
||||||
|
|
||||||
e) Convey the object code using peer-to-peer transmission, provided
|
|
||||||
you inform other peers where the object code and Corresponding
|
|
||||||
Source of the work are being offered to the general public at no
|
|
||||||
charge under subsection 6d.
|
|
||||||
|
|
||||||
A separable portion of the object code, whose source code is excluded
|
|
||||||
from the Corresponding Source as a System Library, need not be
|
|
||||||
included in conveying the object code work.
|
|
||||||
|
|
||||||
A "User Product" is either (1) a "consumer product", which means any
|
|
||||||
tangible personal property which is normally used for personal, family,
|
|
||||||
or household purposes, or (2) anything designed or sold for incorporation
|
|
||||||
into a dwelling. In determining whether a product is a consumer product,
|
|
||||||
doubtful cases shall be resolved in favor of coverage. For a particular
|
|
||||||
product received by a particular user, "normally used" refers to a
|
|
||||||
typical or common use of that class of product, regardless of the status
|
|
||||||
of the particular user or of the way in which the particular user
|
|
||||||
actually uses, or expects or is expected to use, the product. A product
|
|
||||||
is a consumer product regardless of whether the product has substantial
|
|
||||||
commercial, industrial or non-consumer uses, unless such uses represent
|
|
||||||
the only significant mode of use of the product.
|
|
||||||
|
|
||||||
"Installation Information" for a User Product means any methods,
|
|
||||||
procedures, authorization keys, or other information required to install
|
|
||||||
and execute modified versions of a covered work in that User Product from
|
|
||||||
a modified version of its Corresponding Source. The information must
|
|
||||||
suffice to ensure that the continued functioning of the modified object
|
|
||||||
code is in no case prevented or interfered with solely because
|
|
||||||
modification has been made.
|
|
||||||
|
|
||||||
If you convey an object code work under this section in, or with, or
|
|
||||||
specifically for use in, a User Product, and the conveying occurs as
|
|
||||||
part of a transaction in which the right of possession and use of the
|
|
||||||
User Product is transferred to the recipient in perpetuity or for a
|
|
||||||
fixed term (regardless of how the transaction is characterized), the
|
|
||||||
Corresponding Source conveyed under this section must be accompanied
|
|
||||||
by the Installation Information. But this requirement does not apply
|
|
||||||
if neither you nor any third party retains the ability to install
|
|
||||||
modified object code on the User Product (for example, the work has
|
|
||||||
been installed in ROM).
|
|
||||||
|
|
||||||
The requirement to provide Installation Information does not include a
|
|
||||||
requirement to continue to provide support service, warranty, or updates
|
|
||||||
for a work that has been modified or installed by the recipient, or for
|
|
||||||
the User Product in which it has been modified or installed. Access to a
|
|
||||||
network may be denied when the modification itself materially and
|
|
||||||
adversely affects the operation of the network or violates the rules and
|
|
||||||
protocols for communication across the network.
|
|
||||||
|
|
||||||
Corresponding Source conveyed, and Installation Information provided,
|
|
||||||
in accord with this section must be in a format that is publicly
|
|
||||||
documented (and with an implementation available to the public in
|
|
||||||
source code form), and must require no special password or key for
|
|
||||||
unpacking, reading or copying.
|
|
||||||
|
|
||||||
7. Additional Terms.
|
|
||||||
|
|
||||||
"Additional permissions" are terms that supplement the terms of this
|
|
||||||
License by making exceptions from one or more of its conditions.
|
|
||||||
Additional permissions that are applicable to the entire Program shall
|
|
||||||
be treated as though they were included in this License, to the extent
|
|
||||||
that they are valid under applicable law. If additional permissions
|
|
||||||
apply only to part of the Program, that part may be used separately
|
|
||||||
under those permissions, but the entire Program remains governed by
|
|
||||||
this License without regard to the additional permissions.
|
|
||||||
|
|
||||||
When you convey a copy of a covered work, you may at your option
|
|
||||||
remove any additional permissions from that copy, or from any part of
|
|
||||||
it. (Additional permissions may be written to require their own
|
|
||||||
removal in certain cases when you modify the work.) You may place
|
|
||||||
additional permissions on material, added by you to a covered work,
|
|
||||||
for which you have or can give appropriate copyright permission.
|
|
||||||
|
|
||||||
Notwithstanding any other provision of this License, for material you
|
|
||||||
add to a covered work, you may (if authorized by the copyright holders of
|
|
||||||
that material) supplement the terms of this License with terms:
|
|
||||||
|
|
||||||
a) Disclaiming warranty or limiting liability differently from the
|
|
||||||
terms of sections 15 and 16 of this License; or
|
|
||||||
|
|
||||||
b) Requiring preservation of specified reasonable legal notices or
|
|
||||||
author attributions in that material or in the Appropriate Legal
|
|
||||||
Notices displayed by works containing it; or
|
|
||||||
|
|
||||||
c) Prohibiting misrepresentation of the origin of that material, or
|
|
||||||
requiring that modified versions of such material be marked in
|
|
||||||
reasonable ways as different from the original version; or
|
|
||||||
|
|
||||||
d) Limiting the use for publicity purposes of names of licensors or
|
|
||||||
authors of the material; or
|
|
||||||
|
|
||||||
e) Declining to grant rights under trademark law for use of some
|
|
||||||
trade names, trademarks, or service marks; or
|
|
||||||
|
|
||||||
f) Requiring indemnification of licensors and authors of that
|
|
||||||
material by anyone who conveys the material (or modified versions of
|
|
||||||
it) with contractual assumptions of liability to the recipient, for
|
|
||||||
any liability that these contractual assumptions directly impose on
|
|
||||||
those licensors and authors.
|
|
||||||
|
|
||||||
All other non-permissive additional terms are considered "further
|
|
||||||
restrictions" within the meaning of section 10. If the Program as you
|
|
||||||
received it, or any part of it, contains a notice stating that it is
|
|
||||||
governed by this License along with a term that is a further
|
|
||||||
restriction, you may remove that term. If a license document contains
|
|
||||||
a further restriction but permits relicensing or conveying under this
|
|
||||||
License, you may add to a covered work material governed by the terms
|
|
||||||
of that license document, provided that the further restriction does
|
|
||||||
not survive such relicensing or conveying.
|
|
||||||
|
|
||||||
If you add terms to a covered work in accord with this section, you
|
|
||||||
must place, in the relevant source files, a statement of the
|
|
||||||
additional terms that apply to those files, or a notice indicating
|
|
||||||
where to find the applicable terms.
|
|
||||||
|
|
||||||
Additional terms, permissive or non-permissive, may be stated in the
|
|
||||||
form of a separately written license, or stated as exceptions;
|
|
||||||
the above requirements apply either way.
|
|
||||||
|
|
||||||
8. Termination.
|
|
||||||
|
|
||||||
You may not propagate or modify a covered work except as expressly
|
|
||||||
provided under this License. Any attempt otherwise to propagate or
|
|
||||||
modify it is void, and will automatically terminate your rights under
|
|
||||||
this License (including any patent licenses granted under the third
|
|
||||||
paragraph of section 11).
|
|
||||||
|
|
||||||
However, if you cease all violation of this License, then your
|
|
||||||
license from a particular copyright holder is reinstated (a)
|
|
||||||
provisionally, unless and until the copyright holder explicitly and
|
|
||||||
finally terminates your license, and (b) permanently, if the copyright
|
|
||||||
holder fails to notify you of the violation by some reasonable means
|
|
||||||
prior to 60 days after the cessation.
|
|
||||||
|
|
||||||
Moreover, your license from a particular copyright holder is
|
|
||||||
reinstated permanently if the copyright holder notifies you of the
|
|
||||||
violation by some reasonable means, this is the first time you have
|
|
||||||
received notice of violation of this License (for any work) from that
|
|
||||||
copyright holder, and you cure the violation prior to 30 days after
|
|
||||||
your receipt of the notice.
|
|
||||||
|
|
||||||
Termination of your rights under this section does not terminate the
|
|
||||||
licenses of parties who have received copies or rights from you under
|
|
||||||
this License. If your rights have been terminated and not permanently
|
|
||||||
reinstated, you do not qualify to receive new licenses for the same
|
|
||||||
material under section 10.
|
|
||||||
|
|
||||||
9. Acceptance Not Required for Having Copies.
|
|
||||||
|
|
||||||
You are not required to accept this License in order to receive or
|
|
||||||
run a copy of the Program. Ancillary propagation of a covered work
|
|
||||||
occurring solely as a consequence of using peer-to-peer transmission
|
|
||||||
to receive a copy likewise does not require acceptance. However,
|
|
||||||
nothing other than this License grants you permission to propagate or
|
|
||||||
modify any covered work. These actions infringe copyright if you do
|
|
||||||
not accept this License. Therefore, by modifying or propagating a
|
|
||||||
covered work, you indicate your acceptance of this License to do so.
|
|
||||||
|
|
||||||
10. Automatic Licensing of Downstream Recipients.
|
|
||||||
|
|
||||||
Each time you convey a covered work, the recipient automatically
|
|
||||||
receives a license from the original licensors, to run, modify and
|
|
||||||
propagate that work, subject to this License. You are not responsible
|
|
||||||
for enforcing compliance by third parties with this License.
|
|
||||||
|
|
||||||
An "entity transaction" is a transaction transferring control of an
|
|
||||||
organization, or substantially all assets of one, or subdividing an
|
|
||||||
organization, or merging organizations. If propagation of a covered
|
|
||||||
work results from an entity transaction, each party to that
|
|
||||||
transaction who receives a copy of the work also receives whatever
|
|
||||||
licenses to the work the party's predecessor in interest had or could
|
|
||||||
give under the previous paragraph, plus a right to possession of the
|
|
||||||
Corresponding Source of the work from the predecessor in interest, if
|
|
||||||
the predecessor has it or can get it with reasonable efforts.
|
|
||||||
|
|
||||||
You may not impose any further restrictions on the exercise of the
|
|
||||||
rights granted or affirmed under this License. For example, you may
|
|
||||||
not impose a license fee, royalty, or other charge for exercise of
|
|
||||||
rights granted under this License, and you may not initiate litigation
|
|
||||||
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
|
||||||
any patent claim is infringed by making, using, selling, offering for
|
|
||||||
sale, or importing the Program or any portion of it.
|
|
||||||
|
|
||||||
11. Patents.
|
|
||||||
|
|
||||||
A "contributor" is a copyright holder who authorizes use under this
|
|
||||||
License of the Program or a work on which the Program is based. The
|
|
||||||
work thus licensed is called the contributor's "contributor version".
|
|
||||||
|
|
||||||
A contributor's "essential patent claims" are all patent claims
|
|
||||||
owned or controlled by the contributor, whether already acquired or
|
|
||||||
hereafter acquired, that would be infringed by some manner, permitted
|
|
||||||
by this License, of making, using, or selling its contributor version,
|
|
||||||
but do not include claims that would be infringed only as a
|
|
||||||
consequence of further modification of the contributor version. For
|
|
||||||
purposes of this definition, "control" includes the right to grant
|
|
||||||
patent sublicenses in a manner consistent with the requirements of
|
|
||||||
this License.
|
|
||||||
|
|
||||||
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
|
||||||
patent license under the contributor's essential patent claims, to
|
|
||||||
make, use, sell, offer for sale, import and otherwise run, modify and
|
|
||||||
propagate the contents of its contributor version.
|
|
||||||
|
|
||||||
In the following three paragraphs, a "patent license" is any express
|
|
||||||
agreement or commitment, however denominated, not to enforce a patent
|
|
||||||
(such as an express permission to practice a patent or covenant not to
|
|
||||||
sue for patent infringement). To "grant" such a patent license to a
|
|
||||||
party means to make such an agreement or commitment not to enforce a
|
|
||||||
patent against the party.
|
|
||||||
|
|
||||||
If you convey a covered work, knowingly relying on a patent license,
|
|
||||||
and the Corresponding Source of the work is not available for anyone
|
|
||||||
to copy, free of charge and under the terms of this License, through a
|
|
||||||
publicly available network server or other readily accessible means,
|
|
||||||
then you must either (1) cause the Corresponding Source to be so
|
|
||||||
available, or (2) arrange to deprive yourself of the benefit of the
|
|
||||||
patent license for this particular work, or (3) arrange, in a manner
|
|
||||||
consistent with the requirements of this License, to extend the patent
|
|
||||||
license to downstream recipients. "Knowingly relying" means you have
|
|
||||||
actual knowledge that, but for the patent license, your conveying the
|
|
||||||
covered work in a country, or your recipient's use of the covered work
|
|
||||||
in a country, would infringe one or more identifiable patents in that
|
|
||||||
country that you have reason to believe are valid.
|
|
||||||
|
|
||||||
If, pursuant to or in connection with a single transaction or
|
|
||||||
arrangement, you convey, or propagate by procuring conveyance of, a
|
|
||||||
covered work, and grant a patent license to some of the parties
|
|
||||||
receiving the covered work authorizing them to use, propagate, modify
|
|
||||||
or convey a specific copy of the covered work, then the patent license
|
|
||||||
you grant is automatically extended to all recipients of the covered
|
|
||||||
work and works based on it.
|
|
||||||
|
|
||||||
A patent license is "discriminatory" if it does not include within
|
|
||||||
the scope of its coverage, prohibits the exercise of, or is
|
|
||||||
conditioned on the non-exercise of one or more of the rights that are
|
|
||||||
specifically granted under this License. You may not convey a covered
|
|
||||||
work if you are a party to an arrangement with a third party that is
|
|
||||||
in the business of distributing software, under which you make payment
|
|
||||||
to the third party based on the extent of your activity of conveying
|
|
||||||
the work, and under which the third party grants, to any of the
|
|
||||||
parties who would receive the covered work from you, a discriminatory
|
|
||||||
patent license (a) in connection with copies of the covered work
|
|
||||||
conveyed by you (or copies made from those copies), or (b) primarily
|
|
||||||
for and in connection with specific products or compilations that
|
|
||||||
contain the covered work, unless you entered into that arrangement,
|
|
||||||
or that patent license was granted, prior to 28 March 2007.
|
|
||||||
|
|
||||||
Nothing in this License shall be construed as excluding or limiting
|
|
||||||
any implied license or other defenses to infringement that may
|
|
||||||
otherwise be available to you under applicable patent law.
|
|
||||||
|
|
||||||
12. No Surrender of Others' Freedom.
|
|
||||||
|
|
||||||
If conditions are imposed on you (whether by court order, agreement or
|
|
||||||
otherwise) that contradict the conditions of this License, they do not
|
|
||||||
excuse you from the conditions of this License. If you cannot convey a
|
|
||||||
covered work so as to satisfy simultaneously your obligations under this
|
|
||||||
License and any other pertinent obligations, then as a consequence you may
|
|
||||||
not convey it at all. For example, if you agree to terms that obligate you
|
|
||||||
to collect a royalty for further conveying from those to whom you convey
|
|
||||||
the Program, the only way you could satisfy both those terms and this
|
|
||||||
License would be to refrain entirely from conveying the Program.
|
|
||||||
|
|
||||||
13. Remote Network Interaction; Use with the GNU General Public License.
|
|
||||||
|
|
||||||
Notwithstanding any other provision of this License, if you modify the
|
|
||||||
Program, your modified version must prominently offer all users
|
|
||||||
interacting with it remotely through a computer network (if your version
|
|
||||||
supports such interaction) an opportunity to receive the Corresponding
|
|
||||||
Source of your version by providing access to the Corresponding Source
|
|
||||||
from a network server at no charge, through some standard or customary
|
|
||||||
means of facilitating copying of software. This Corresponding Source
|
|
||||||
shall include the Corresponding Source for any work covered by version 3
|
|
||||||
of the GNU General Public License that is incorporated pursuant to the
|
|
||||||
following paragraph.
|
|
||||||
|
|
||||||
Notwithstanding any other provision of this License, you have
|
|
||||||
permission to link or combine any covered work with a work licensed
|
|
||||||
under version 3 of the GNU General Public License into a single
|
|
||||||
combined work, and to convey the resulting work. The terms of this
|
|
||||||
License will continue to apply to the part which is the covered work,
|
|
||||||
but the work with which it is combined will remain governed by version
|
|
||||||
3 of the GNU General Public License.
|
|
||||||
|
|
||||||
14. Revised Versions of this License.
|
|
||||||
|
|
||||||
The Free Software Foundation may publish revised and/or new versions of
|
|
||||||
the GNU Affero General Public License from time to time. Such new versions
|
|
||||||
will be similar in spirit to the present version, but may differ in detail to
|
|
||||||
address new problems or concerns.
|
|
||||||
|
|
||||||
Each version is given a distinguishing version number. If the
|
|
||||||
Program specifies that a certain numbered version of the GNU Affero General
|
|
||||||
Public License "or any later version" applies to it, you have the
|
|
||||||
option of following the terms and conditions either of that numbered
|
|
||||||
version or of any later version published by the Free Software
|
|
||||||
Foundation. If the Program does not specify a version number of the
|
|
||||||
GNU Affero General Public License, you may choose any version ever published
|
|
||||||
by the Free Software Foundation.
|
|
||||||
|
|
||||||
If the Program specifies that a proxy can decide which future
|
|
||||||
versions of the GNU Affero General Public License can be used, that proxy's
|
|
||||||
public statement of acceptance of a version permanently authorizes you
|
|
||||||
to choose that version for the Program.
|
|
||||||
|
|
||||||
Later license versions may give you additional or different
|
|
||||||
permissions. However, no additional obligations are imposed on any
|
|
||||||
author or copyright holder as a result of your choosing to follow a
|
|
||||||
later version.
|
|
||||||
|
|
||||||
15. Disclaimer of Warranty.
|
|
||||||
|
|
||||||
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
|
||||||
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
|
||||||
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
|
||||||
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
|
||||||
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
|
||||||
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
|
||||||
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
|
||||||
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
|
||||||
|
|
||||||
16. Limitation of Liability.
|
|
||||||
|
|
||||||
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
|
||||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
|
||||||
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
|
||||||
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
|
||||||
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
|
||||||
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
|
||||||
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
|
||||||
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
|
||||||
SUCH DAMAGES.
|
|
||||||
|
|
||||||
17. Interpretation of Sections 15 and 16.
|
|
||||||
|
|
||||||
If the disclaimer of warranty and limitation of liability provided
|
|
||||||
above cannot be given local legal effect according to their terms,
|
|
||||||
reviewing courts shall apply local law that most closely approximates
|
|
||||||
an absolute waiver of all civil liability in connection with the
|
|
||||||
Program, unless a warranty or assumption of liability accompanies a
|
|
||||||
copy of the Program in return for a fee.
|
|
||||||
|
|
||||||
END OF TERMS AND CONDITIONS
|
|
||||||
|
|
||||||
How to Apply These Terms to Your New Programs
|
|
||||||
|
|
||||||
If you develop a new program, and you want it to be of the greatest
|
|
||||||
possible use to the public, the best way to achieve this is to make it
|
|
||||||
free software which everyone can redistribute and change under these terms.
|
|
||||||
|
|
||||||
To do so, attach the following notices to the program. It is safest
|
|
||||||
to attach them to the start of each source file to most effectively
|
|
||||||
state the exclusion of warranty; and each file should have at least
|
|
||||||
the "copyright" line and a pointer to where the full notice is found.
|
|
||||||
|
|
||||||
<one line to give the program's name and a brief idea of what it does.>
|
|
||||||
Copyright (C) <year> <name of author>
|
|
||||||
|
|
||||||
This program is free software: you can redistribute it and/or modify
|
|
||||||
it under the terms of the GNU Affero General Public License as published by
|
|
||||||
the Free Software Foundation, either version 3 of the License, or
|
|
||||||
(at your option) any later version.
|
|
||||||
|
|
||||||
This program is distributed in the hope that it will be useful,
|
|
||||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
GNU Affero General Public License for more details.
|
|
||||||
|
|
||||||
You should have received a copy of the GNU Affero General Public License
|
|
||||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
Also add information on how to contact you by electronic and paper mail.
|
|
||||||
|
|
||||||
If your software can interact with users remotely through a computer
|
|
||||||
network, you should also make sure that it provides a way for users to
|
|
||||||
get its source. For example, if your program is a web application, its
|
|
||||||
interface could display a "Source" link that leads users to an archive
|
|
||||||
of the code. There are many ways you could offer source, and different
|
|
||||||
solutions will be better for different programs; see section 13 for the
|
|
||||||
specific requirements.
|
|
||||||
|
|
||||||
You should also get your employer (if you work as a programmer) or school,
|
|
||||||
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
|
||||||
For more information on this, and how to apply and follow the GNU AGPL, see
|
|
||||||
<https://www.gnu.org/licenses/>.
|
|
||||||
|
|
|
||||||
|
|
@ -1,3 +0,0 @@
|
||||||
include *.rst
|
|
||||||
recursive-include pelican *.html *.css *png *.rst *.markdown *.md *.mkd *.xml *.py
|
|
||||||
include LICENSE THANKS docs/changelog.rst pyproject.toml
|
|
||||||
43
README.md
Normal file
43
README.md
Normal file
|
|
@ -0,0 +1,43 @@
|
||||||
|
# lugh Pelican theme
|
||||||
|
|
||||||
|
This theme is based on the [simple theme](https://github.com/getpelican/pelican/tree/main/pelican/themes/simple/templates).
|
||||||
|
It's heavily customized to what I need here, so no efforts have been made to
|
||||||
|
keep it useful for others. Amongst other things, I:
|
||||||
|
|
||||||
|
- removed translations
|
||||||
|
- changed the structure (HTML `<footer>` etc.)
|
||||||
|
|
||||||
|
## Docs
|
||||||
|
|
||||||
|
- [Pelican: how to create your own theme](https://docs.getpelican.com/en/stable/themes.html)
|
||||||
|
- [Tailwind CSS quick start](https://tailwindcss.com/docs/installation)
|
||||||
|
|
||||||
|
## Doing
|
||||||
|
|
||||||
|
### Prepare Pelican development server config
|
||||||
|
|
||||||
|
Adapt Pelican's `publishconf.py` for local development.
|
||||||
|
E.g. `RELATIVE_URLS = False`
|
||||||
|
|
||||||
|
### Install Tailwind CSS Typography plugin
|
||||||
|
|
||||||
|
Typography enables sane defaults for longer texts. In this case, we use it for
|
||||||
|
the body content only, which is always Markdown. Typography is a bit of a beast
|
||||||
|
to configure/align to standard Tailwind.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
npm install -D @tailwindcss/typography
|
||||||
|
```
|
||||||
|
|
||||||
|
### Run the Tailwind build process
|
||||||
|
|
||||||
|
```shell
|
||||||
|
npx tailwindcss -i static/css/in.css -o static/css/out.css --watch
|
||||||
|
```
|
||||||
|
|
||||||
|
### Run Pelican dev server
|
||||||
|
|
||||||
|
```shell
|
||||||
|
conda activate pelican
|
||||||
|
./devserver.sh
|
||||||
|
```
|
||||||
67
README.rst
67
README.rst
|
|
@ -1,67 +0,0 @@
|
||||||
Pelican |build-status| |pypi-version| |repology|
|
|
||||||
=====================================
|
|
||||||
|
|
||||||
Pelican is a static site generator, written in Python_.
|
|
||||||
|
|
||||||
* Write content in reStructuredText_ or Markdown_ using your editor of choice
|
|
||||||
* Includes a simple command line tool to (re)generate site files
|
|
||||||
* Easy to interface with version control systems and web hooks
|
|
||||||
* Completely static output is simple to host anywhere
|
|
||||||
|
|
||||||
|
|
||||||
Features
|
|
||||||
--------
|
|
||||||
|
|
||||||
Pelican currently supports:
|
|
||||||
|
|
||||||
* Chronological content (e.g., articles, blog posts) as well as static pages
|
|
||||||
* Integration with external services (e.g., Google Analytics and Disqus)
|
|
||||||
* Site themes (created using Jinja2_ templates)
|
|
||||||
* Publication of articles in multiple languages
|
|
||||||
* Generation of Atom and RSS feeds
|
|
||||||
* Syntax highlighting via Pygments_
|
|
||||||
* Importing existing content from WordPress, Dotclear, and other services
|
|
||||||
* Fast rebuild times due to content caching and selective output writing
|
|
||||||
|
|
||||||
Check out `Pelican's documentation`_ for further information.
|
|
||||||
|
|
||||||
|
|
||||||
How to get help, contribute, or provide feedback
|
|
||||||
------------------------------------------------
|
|
||||||
|
|
||||||
See our `contribution submission and feedback guidelines <CONTRIBUTING.rst>`_.
|
|
||||||
|
|
||||||
|
|
||||||
Source code
|
|
||||||
-----------
|
|
||||||
|
|
||||||
Pelican's source code is `hosted on GitHub`_. If you feel like hacking,
|
|
||||||
take a look at `Pelican's internals`_.
|
|
||||||
|
|
||||||
|
|
||||||
Why the name "Pelican"?
|
|
||||||
-----------------------
|
|
||||||
|
|
||||||
"Pelican" is an anagram of *calepin*, which means "notebook" in French.
|
|
||||||
|
|
||||||
|
|
||||||
.. Links
|
|
||||||
|
|
||||||
.. _Python: https://www.python.org/
|
|
||||||
.. _reStructuredText: http://docutils.sourceforge.net/rst.html
|
|
||||||
.. _Markdown: https://daringfireball.net/projects/markdown/
|
|
||||||
.. _Jinja2: https://palletsprojects.com/p/jinja/
|
|
||||||
.. _Pygments: https://pygments.org/
|
|
||||||
.. _`Pelican's documentation`: https://docs.getpelican.com/
|
|
||||||
.. _`Pelican's internals`: https://docs.getpelican.com/en/latest/internals.html
|
|
||||||
.. _`hosted on GitHub`: https://github.com/getpelican/pelican
|
|
||||||
|
|
||||||
.. |build-status| image:: https://img.shields.io/github/workflow/status/getpelican/pelican/build
|
|
||||||
:target: https://github.com/getpelican/pelican/actions
|
|
||||||
:alt: GitHub Actions CI: continuous integration status
|
|
||||||
.. |pypi-version| image:: https://img.shields.io/pypi/v/pelican.svg
|
|
||||||
:target: https://pypi.python.org/pypi/pelican
|
|
||||||
:alt: PyPI: the Python Package Index
|
|
||||||
.. |repology| image:: https://repology.org/badge/tiny-repos/pelican.svg
|
|
||||||
:target: https://repology.org/project/pelican/versions
|
|
||||||
:alt: Repology: the packaging hub
|
|
||||||
165
THANKS
165
THANKS
|
|
@ -1,165 +0,0 @@
|
||||||
Pelican is a project originally created by Alexis Métaireau
|
|
||||||
<https://blog.notmyidea.org/> and subsequently maintained by Justin Mayer
|
|
||||||
<https://justinmayer.com/>, but there are a large number of people that have
|
|
||||||
contributed or implemented key features over time. We do our best to keep this
|
|
||||||
list up-to-date, but you can also have a look at the nice contributor graphs
|
|
||||||
produced by GitHub: https://github.com/getpelican/pelican/graphs/contributors
|
|
||||||
|
|
||||||
If you want to contribute, check the documentation section about how to do so:
|
|
||||||
<https://docs.getpelican.com/en/latest/contribute.html>
|
|
||||||
|
|
||||||
Aaron Kavlie
|
|
||||||
Abhishek L
|
|
||||||
Albrecht Mühlenschulte
|
|
||||||
Aldiantoro Nugroho
|
|
||||||
Alen Mujezinovic
|
|
||||||
Alessandro Martin
|
|
||||||
Alexander Artemenko
|
|
||||||
Alexandre RODIERE
|
|
||||||
Alexis Daboville
|
|
||||||
Alexis Métaireau
|
|
||||||
Allan Whatmough
|
|
||||||
Andrea Crotti
|
|
||||||
Andrew Laski
|
|
||||||
Andrew Spiers
|
|
||||||
Arnaud BOS
|
|
||||||
asselinpaul
|
|
||||||
Axel Haustant
|
|
||||||
Ben Rosser (TC01)
|
|
||||||
Benoît HERVIER
|
|
||||||
Bernhard Scheirle
|
|
||||||
Borgar
|
|
||||||
Brandon W Maister
|
|
||||||
Brendan Wholihan
|
|
||||||
Brian C. Lane
|
|
||||||
Brian Hsu
|
|
||||||
Brian St. Pierre
|
|
||||||
Bruno Binet
|
|
||||||
BunnyMan
|
|
||||||
Chenguang Wang
|
|
||||||
Chris Elston
|
|
||||||
Chris McDonald (Wraithan)
|
|
||||||
Chris Streeter
|
|
||||||
Christophe Chauvet
|
|
||||||
Clint Howarth
|
|
||||||
Colin Dunklau
|
|
||||||
Dafydd Crosby
|
|
||||||
Dana Woodman
|
|
||||||
Dave King
|
|
||||||
Dave Mankoff
|
|
||||||
David Beitey
|
|
||||||
David Marble
|
|
||||||
Deniz Turgut (Avaris)
|
|
||||||
derdon
|
|
||||||
Dirkjan Ochtman
|
|
||||||
Dirk Makowski
|
|
||||||
draftcode
|
|
||||||
Edward Delaporte
|
|
||||||
Emily Strickland
|
|
||||||
epatters
|
|
||||||
Eric Case
|
|
||||||
Erik Hetzner
|
|
||||||
FELD Boris
|
|
||||||
Feth Arezki
|
|
||||||
Florian Jacob
|
|
||||||
Florian Preinstorfer
|
|
||||||
Félix Delval
|
|
||||||
Freeculture
|
|
||||||
George V. Reilly
|
|
||||||
Guillaume
|
|
||||||
Guillaume B
|
|
||||||
Guillermo López
|
|
||||||
guillermooo
|
|
||||||
Ian Cordasco
|
|
||||||
Igor Kalnitsky
|
|
||||||
Irfan Ahmad
|
|
||||||
Iuri de Silvio
|
|
||||||
Ivan Dyedov
|
|
||||||
James King
|
|
||||||
James Rowe
|
|
||||||
jawher
|
|
||||||
Jered Boxman
|
|
||||||
Jerome
|
|
||||||
Jiachen Yang
|
|
||||||
Jochen Breuer
|
|
||||||
joe di castro
|
|
||||||
John Kristensen
|
|
||||||
John Mastro
|
|
||||||
Jökull Sólberg Auðunsson
|
|
||||||
Jomel Imperio
|
|
||||||
Joseph Reagle
|
|
||||||
Joshua Adelman
|
|
||||||
Julian Berman
|
|
||||||
Justin Mayer
|
|
||||||
Kevin Deldycke
|
|
||||||
Kevin Yap
|
|
||||||
Kyle Fuller
|
|
||||||
Laureline Guerin
|
|
||||||
Leonard Huang
|
|
||||||
Leroy Jiang
|
|
||||||
Lucas Cimon
|
|
||||||
Marcel Hellkamp
|
|
||||||
Marco Milanesi
|
|
||||||
Marcus Fredriksson
|
|
||||||
Mario Rodas
|
|
||||||
Mark Caudill
|
|
||||||
Martin Brochhaus
|
|
||||||
Massimo Santini
|
|
||||||
Matt Bowcock
|
|
||||||
Matt Layman
|
|
||||||
Meir Kriheli
|
|
||||||
Michael Guntsche
|
|
||||||
Michael Reneer
|
|
||||||
Michael Yanovich
|
|
||||||
Mike Yumatov
|
|
||||||
Mikhail Korobov
|
|
||||||
m-r-r
|
|
||||||
mviera
|
|
||||||
Nico Di Rocco
|
|
||||||
Nicolas Duhamel
|
|
||||||
Nicolas Perriault
|
|
||||||
Nicolas Steinmetz
|
|
||||||
Paolo Melchiorre
|
|
||||||
Paul Asselin
|
|
||||||
Pavel Puchkin
|
|
||||||
Perry Roper
|
|
||||||
Peter Desmet
|
|
||||||
Philippe Pepiot
|
|
||||||
Rachid Belaid
|
|
||||||
Randall Degges
|
|
||||||
Ranjhith Kalisamy
|
|
||||||
Remi Rampin
|
|
||||||
Rémy HUBSCHER
|
|
||||||
renhbo
|
|
||||||
Richard Duivenvoorde
|
|
||||||
Rogdham
|
|
||||||
Roman Skvazh
|
|
||||||
Ronny Pfannschmidt
|
|
||||||
Rory McCann
|
|
||||||
Rıdvan Örsvuran
|
|
||||||
saghul
|
|
||||||
sam
|
|
||||||
Samrat Man Singh
|
|
||||||
Simon Conseil
|
|
||||||
Simon Liedtke
|
|
||||||
Skami18
|
|
||||||
solsTiCe d'Hiver
|
|
||||||
Steve Schwarz
|
|
||||||
Stéphane Bunel
|
|
||||||
Stéphane Raimbault
|
|
||||||
Stuart Colville
|
|
||||||
Talha Mansoor
|
|
||||||
Tarek Ziade
|
|
||||||
Thanos Lefteris
|
|
||||||
Thomas Thurman
|
|
||||||
Tobias
|
|
||||||
Tomi Pieviläinen
|
|
||||||
Trae Blain
|
|
||||||
Tshepang Lekhonkhobe
|
|
||||||
Valentin-Costel Hăloiu
|
|
||||||
Vlad Niculae
|
|
||||||
William Light
|
|
||||||
William Minchin
|
|
||||||
Wladislaw Merezhko
|
|
||||||
W. Trevor King
|
|
||||||
Zoresvit
|
|
||||||
130
docs/Makefile
130
docs/Makefile
|
|
@ -1,130 +0,0 @@
|
||||||
# Makefile for Sphinx documentation
|
|
||||||
#
|
|
||||||
|
|
||||||
# You can set these variables from the command line.
|
|
||||||
SPHINXOPTS =
|
|
||||||
SPHINXBUILD = sphinx-build
|
|
||||||
PAPER =
|
|
||||||
BUILDDIR = _build
|
|
||||||
|
|
||||||
# Internal variables.
|
|
||||||
PAPEROPT_a4 = -D latex_paper_size=a4
|
|
||||||
PAPEROPT_letter = -D latex_paper_size=letter
|
|
||||||
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
|
|
||||||
|
|
||||||
.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest
|
|
||||||
|
|
||||||
help:
|
|
||||||
@echo "Please use \`make <target>' where <target> is one of"
|
|
||||||
@echo " html to make standalone HTML files"
|
|
||||||
@echo " dirhtml to make HTML files named index.html in directories"
|
|
||||||
@echo " singlehtml to make a single large HTML file"
|
|
||||||
@echo " pickle to make pickle files"
|
|
||||||
@echo " json to make JSON files"
|
|
||||||
@echo " htmlhelp to make HTML files and a HTML help project"
|
|
||||||
@echo " qthelp to make HTML files and a qthelp project"
|
|
||||||
@echo " devhelp to make HTML files and a Devhelp project"
|
|
||||||
@echo " epub to make an epub"
|
|
||||||
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
|
|
||||||
@echo " latexpdf to make LaTeX files and run them through pdflatex"
|
|
||||||
@echo " text to make text files"
|
|
||||||
@echo " man to make manual pages"
|
|
||||||
@echo " changes to make an overview of all changed/added/deprecated items"
|
|
||||||
@echo " linkcheck to check all external links for integrity"
|
|
||||||
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
|
|
||||||
|
|
||||||
clean:
|
|
||||||
-rm -rf $(BUILDDIR)/*
|
|
||||||
|
|
||||||
html:
|
|
||||||
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
|
||||||
@echo
|
|
||||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
|
|
||||||
|
|
||||||
dirhtml:
|
|
||||||
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
|
|
||||||
@echo
|
|
||||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
|
|
||||||
|
|
||||||
singlehtml:
|
|
||||||
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
|
|
||||||
@echo
|
|
||||||
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
|
|
||||||
|
|
||||||
pickle:
|
|
||||||
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
|
|
||||||
@echo
|
|
||||||
@echo "Build finished; now you can process the pickle files."
|
|
||||||
|
|
||||||
json:
|
|
||||||
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
|
|
||||||
@echo
|
|
||||||
@echo "Build finished; now you can process the JSON files."
|
|
||||||
|
|
||||||
htmlhelp:
|
|
||||||
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
|
|
||||||
@echo
|
|
||||||
@echo "Build finished; now you can run HTML Help Workshop with the" \
|
|
||||||
".hhp project file in $(BUILDDIR)/htmlhelp."
|
|
||||||
|
|
||||||
qthelp:
|
|
||||||
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
|
|
||||||
@echo
|
|
||||||
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
|
|
||||||
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
|
|
||||||
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Raclette.qhcp"
|
|
||||||
@echo "To view the help file:"
|
|
||||||
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Raclette.qhc"
|
|
||||||
|
|
||||||
devhelp:
|
|
||||||
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
|
|
||||||
@echo
|
|
||||||
@echo "Build finished."
|
|
||||||
@echo "To view the help file:"
|
|
||||||
@echo "# mkdir -p $$HOME/.local/share/devhelp/Raclette"
|
|
||||||
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Raclette"
|
|
||||||
@echo "# devhelp"
|
|
||||||
|
|
||||||
epub:
|
|
||||||
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
|
|
||||||
@echo
|
|
||||||
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
|
|
||||||
|
|
||||||
latex:
|
|
||||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
|
||||||
@echo
|
|
||||||
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
|
|
||||||
@echo "Run \`make' in that directory to run these through (pdf)latex" \
|
|
||||||
"(use \`make latexpdf' here to do that automatically)."
|
|
||||||
|
|
||||||
latexpdf:
|
|
||||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
|
||||||
@echo "Running LaTeX files through pdflatex..."
|
|
||||||
make -C $(BUILDDIR)/latex all-pdf
|
|
||||||
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
|
|
||||||
|
|
||||||
text:
|
|
||||||
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
|
|
||||||
@echo
|
|
||||||
@echo "Build finished. The text files are in $(BUILDDIR)/text."
|
|
||||||
|
|
||||||
man:
|
|
||||||
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
|
|
||||||
@echo
|
|
||||||
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
|
|
||||||
|
|
||||||
changes:
|
|
||||||
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
|
|
||||||
@echo
|
|
||||||
@echo "The overview file is in $(BUILDDIR)/changes."
|
|
||||||
|
|
||||||
linkcheck:
|
|
||||||
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
|
|
||||||
@echo
|
|
||||||
@echo "Link check complete; look for any errors in the above output " \
|
|
||||||
"or in $(BUILDDIR)/linkcheck/output.txt."
|
|
||||||
|
|
||||||
doctest:
|
|
||||||
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
|
|
||||||
@echo "Testing of doctests in the sources finished, look at the " \
|
|
||||||
"results in $(BUILDDIR)/doctest/output.txt."
|
|
||||||
BIN
docs/_static/overall.png
vendored
BIN
docs/_static/overall.png
vendored
Binary file not shown.
|
Before Width: | Height: | Size: 4.9 KiB |
BIN
docs/_static/theme-basic.zip
vendored
BIN
docs/_static/theme-basic.zip
vendored
Binary file not shown.
12
docs/_static/theme_overrides.css
vendored
12
docs/_static/theme_overrides.css
vendored
|
|
@ -1,12 +0,0 @@
|
||||||
|
|
||||||
/* override table width restrictions */
|
|
||||||
.wy-table-responsive table td, .wy-table-responsive table th {
|
|
||||||
/* !important prevents the common CSS stylesheets from
|
|
||||||
overriding this as on RTD they are loaded after this stylesheet */
|
|
||||||
white-space: normal !important;
|
|
||||||
}
|
|
||||||
|
|
||||||
.wy-table-responsive {
|
|
||||||
overflow: visible !important;
|
|
||||||
}
|
|
||||||
|
|
||||||
BIN
docs/_static/uml.jpg
vendored
BIN
docs/_static/uml.jpg
vendored
Binary file not shown.
|
Before Width: | Height: | Size: 65 KiB |
|
|
@ -1,431 +0,0 @@
|
||||||
Release history
|
|
||||||
###############
|
|
||||||
|
|
||||||
4.2.0 - 2019-10-17
|
|
||||||
==================
|
|
||||||
|
|
||||||
* Support inline SVGs; don't treat titles in SVGs as HTML titles
|
|
||||||
* Add category to feeds (in addition to tags)
|
|
||||||
* Improve content metadata field docs
|
|
||||||
* Add docs for including other Markdown/reST files in content
|
|
||||||
|
|
||||||
4.1.3 - 2019-10-09
|
|
||||||
==================
|
|
||||||
|
|
||||||
* Fix quick-start docs regarding `pelican --listen`
|
|
||||||
* Set default listen address to 127.0.0.1
|
|
||||||
* Add extra/optional Markdown dependency to setup.py
|
|
||||||
* Use correct SSH port syntax for rsync in tasks.py
|
|
||||||
* Place all deprecated settings handling together
|
|
||||||
* Add related project URLs for display on PyPI
|
|
||||||
* Skip some tests on Windows that can't pass due to filesystem differences
|
|
||||||
|
|
||||||
4.1.2 - 2019-09-23
|
|
||||||
==================
|
|
||||||
|
|
||||||
Fix pelican.settings.load_source to avoid caching issues - PR #2621
|
|
||||||
|
|
||||||
4.1.1 - 2019-08-23
|
|
||||||
==================
|
|
||||||
|
|
||||||
* Add AutoPub to auto-publish releases on PR merge
|
|
||||||
* Add CSS classes for reStructuredText figures
|
|
||||||
* Pass `argv` to Pelican `main` entrypoint
|
|
||||||
* Set default content status to a blank string rather than `None`
|
|
||||||
|
|
||||||
4.1.0 - 2019-07-14
|
|
||||||
==================
|
|
||||||
|
|
||||||
* Live browser reload upon changed files (provided via Invoke task)
|
|
||||||
* Add ``pyproject.toml``, managed by Poetry
|
|
||||||
* Support for invoking ``python -m pelican``
|
|
||||||
* Add relative source path attribute to content
|
|
||||||
* Allow directories in ``EXTRA_PATH_METADATA``
|
|
||||||
* Add ``all_articles`` variable to period pages (for recent posts functionality)
|
|
||||||
* Improve debug mode output
|
|
||||||
* Remove blank or duplicate summaries from Atom feed
|
|
||||||
* Fix bugs in pagination, pelican-import, pelican-quickstart, and feed importer
|
|
||||||
|
|
||||||
4.0.1 (2018-11-30)
|
|
||||||
==================
|
|
||||||
|
|
||||||
* Refactor ``pelican.server`` logging
|
|
||||||
* Fix bug in which all static files were processed as "draft"
|
|
||||||
* Bug fixes for Invoke/Makefile automation, Importer, and other miscellanea
|
|
||||||
|
|
||||||
If upgrading from 3.7.x or earlier, please note that slug-related settings in
|
|
||||||
4.0+ use ``{slug}`` and/or ``{lang}`` rather than ``%s``. If ``%s``-style
|
|
||||||
settings are encountered, Pelican will emit a warning and fall back to the
|
|
||||||
default setting. Some user-submitted themes might try to format setting values
|
|
||||||
but fail upon site build with a ``TypeError``. In such cases, the theme needs
|
|
||||||
to be updated. For example, instead of ``TAG_FEED_ATOM|format(tag.slug)``, use
|
|
||||||
``TAG_FEED_ATOM.format(slug=tag.slug)``
|
|
||||||
|
|
||||||
4.0.0 (2018-11-13)
|
|
||||||
==================
|
|
||||||
|
|
||||||
* Replace ``develop_server.sh`` script with ``pelican --listen``
|
|
||||||
* Improved copy/link behavior for large static files (e.g., videos)
|
|
||||||
* New ``{static}`` syntax to link to static content; content linked to by
|
|
||||||
``{static}`` and ``{attach}`` is automatically copied over even if not in
|
|
||||||
``STATIC_PATHS``
|
|
||||||
* Pages can now have ``draft`` status
|
|
||||||
* Show current settings via new ``--print-settings`` flag
|
|
||||||
* All settings for slugs now use ``{slug}`` and/or ``{lang}`` rather than
|
|
||||||
``%s``. If ``%s``-style settings are encountered, Pelican will emit a warning
|
|
||||||
and fallback to the default setting.
|
|
||||||
* New signals: ``feed_generated`` and ``page_generated_write_page``
|
|
||||||
* Replace Fabric with Invoke and ``fabfile.py`` template with ``tasks.py``
|
|
||||||
* Replace ``PAGINATED_DIRECT_TEMPLATES`` by ``PAGINATED_TEMPLATES``, extending
|
|
||||||
control over pagination to all templates and making page size variable
|
|
||||||
* Replace ``SLUG_SUBSTITUTIONS`` (and friends) by ``SLUG_REGEX_SUBSTITUTIONS``
|
|
||||||
for more finegrained control
|
|
||||||
* ``'{base_name}'`` value in ``PAGINATION_PATTERNS`` setting no longer strips
|
|
||||||
``'bar'`` from ``'foo/bar.html'`` (unless ``'bar' == 'index'``).
|
|
||||||
* ``ARTICLE_ORDER_BY`` and ``PAGE_ORDER_BY`` now also affect 1) category, tag
|
|
||||||
and author pages 2) feeds 3) draft and hidden articles and pages
|
|
||||||
* New ``ARTICLE_TRANSLATION_ID`` and ``PAGE_TRANSLATION_ID`` settings to
|
|
||||||
specify metadata attributes used to identify/disable translations
|
|
||||||
* Make the HTML reader parse multiple occurrences of metadata tags as a list
|
|
||||||
* New Blogger XML backup importer
|
|
||||||
* Wordpress importer now updates file links to point to local copies if the
|
|
||||||
files were downloaded with ``--wp-attach``.
|
|
||||||
* Importer no longer inserts extra newlines, to prevent breaking of HTML
|
|
||||||
attributes.
|
|
||||||
* Pelican server now prioritises ``foo.html`` and ``foo/index.html`` over
|
|
||||||
``foo/`` when resolving ``foo``.
|
|
||||||
|
|
||||||
3.7.1 (2017-01-10)
|
|
||||||
==================
|
|
||||||
|
|
||||||
* Fix locale issues in Quickstart script
|
|
||||||
* Specify encoding for README and CHANGELOG in setup.py
|
|
||||||
|
|
||||||
3.7.0 (2016-12-12)
|
|
||||||
==================
|
|
||||||
|
|
||||||
* Atom feeds output ``<content>`` in addition to ``<summary>``
|
|
||||||
* Atom feeds use ``<published>`` for the original publication date and
|
|
||||||
``<updated>`` for modifications
|
|
||||||
* Simplify Atom feed ID generation and support URL fragments
|
|
||||||
* Produce category feeds with category-specific titles
|
|
||||||
* RSS feeds now default to summary instead of full content;
|
|
||||||
set ``RSS_FEED_SUMMARY_ONLY = False`` to revert to previous behavior
|
|
||||||
* Replace ``MD_EXTENSIONS`` with ``MARKDOWN`` setting
|
|
||||||
* Replace ``JINJA_EXTENSIONS`` with more-robust ``JINJA_ENVIRONMENT`` setting
|
|
||||||
* Improve summary truncation logic to handle special characters and tags that
|
|
||||||
span multiple lines, using HTML parser instead of regular expressions
|
|
||||||
* Include summary when looking for intra-site link substitutions
|
|
||||||
* Link to authors and index via ``{author}name`` and ``{index}`` syntax
|
|
||||||
* Override widget names via ``LINKS_WIDGET_NAME`` and ``SOCIAL_WIDGET_NAME``
|
|
||||||
* Add ``INDEX_SAVE_AS`` option to override default ``index.html`` value
|
|
||||||
* Remove ``PAGES`` context variable for themes in favor of ``pages``
|
|
||||||
* ``SLUG_SUBSTITUTIONS`` now accepts 3-tuple elements, allowing URL slugs to
|
|
||||||
contain non-alphanumeric characters
|
|
||||||
* Tag and category slugs can be controlled with greater precision using the
|
|
||||||
``TAG_SUBSTITUTIONS`` and ``CATEGORY_SUBSTITUTIONS`` settings
|
|
||||||
* Author slugs can be controlled with greater precision using the
|
|
||||||
``AUTHOR_SUBSTITUTIONS`` setting
|
|
||||||
* ``DEFAULT_DATE`` can be defined as a string
|
|
||||||
* Use ``mtime`` instead of ``ctime`` when ``DEFAULT_DATE = 'fs'``
|
|
||||||
* Add ``--fatal=errors|warnings`` option for use with continuous integration
|
|
||||||
* When using generator-level caching, ensure previously-cached files are
|
|
||||||
processed instead of just new files.
|
|
||||||
* Add Python and Pelican version information to debug output
|
|
||||||
* Improve compatibility with Python 3.5
|
|
||||||
* Comply with and enforce PEP8 guidelines
|
|
||||||
* Replace tables in settings documentation with ``data::`` directives
|
|
||||||
|
|
||||||
3.6.3 (2015-08-14)
|
|
||||||
==================
|
|
||||||
|
|
||||||
* Fix permissions issue in release tarball
|
|
||||||
|
|
||||||
3.6.2 (2015-08-01)
|
|
||||||
==================
|
|
||||||
|
|
||||||
* Fix installation errors related to Unicode in tests
|
|
||||||
* Don't show pagination in ``notmyidea`` theme if there's only one page
|
|
||||||
* Make hidden pages available in context
|
|
||||||
* Improve URLWrapper comparison
|
|
||||||
|
|
||||||
3.6.0 (2015-06-15)
|
|
||||||
==================
|
|
||||||
|
|
||||||
* Disable caching by default in order to prevent potential confusion
|
|
||||||
* Improve caching behavior, replacing ``pickle`` with ``cpickle``
|
|
||||||
* Allow Markdown or reST content in metadata fields other than ``summary``
|
|
||||||
* Support semicolon-separated author/tag lists
|
|
||||||
* Improve flexibility of article sorting
|
|
||||||
* Add ``--relative-urls`` argument
|
|
||||||
* Support devserver listening on addresses other than localhost
|
|
||||||
* Unify HTTP server handlers to ``pelican.server`` throughout
|
|
||||||
* Handle intra-site links to draft posts
|
|
||||||
* Move ``tag_cloud`` from core to plugin
|
|
||||||
* Load default theme's external resources via HTTPS
|
|
||||||
* Import drafts from WordPress XML
|
|
||||||
* Improve support for Windows users
|
|
||||||
* Enhance logging and test suite
|
|
||||||
* Clean up and refactor codebase
|
|
||||||
* New signals: ``all_generators_finalized`` and ``page_writer_finalized``
|
|
||||||
|
|
||||||
3.5.0 (2014-11-04)
|
|
||||||
==================
|
|
||||||
|
|
||||||
* Introduce ``ARTICLE_ORDER_BY`` and ``PAGE_ORDER_BY`` settings to control the
|
|
||||||
order of articles and pages.
|
|
||||||
* Include time zone information in dates rendered in templates.
|
|
||||||
* Expose the reader name in the metadata for articles and pages.
|
|
||||||
* Add the ability to store static files along with content in the same
|
|
||||||
directory as articles and pages using ``{attach}`` in the path.
|
|
||||||
* Prevent Pelican from raising an exception when there are duplicate pieces of
|
|
||||||
metadata in a Markdown file.
|
|
||||||
* Introduce the ``TYPOGRIFY_IGNORE_TAGS`` setting to add HTML tags to be
|
|
||||||
ignored by Typogrify.
|
|
||||||
* Add the ability to use ``-`` in date formats to strip leading zeros. For
|
|
||||||
example, ``%-d/%-m/%y`` will now result in the date ``9/8/12``.
|
|
||||||
* Ensure feed generation is correctly disabled during quickstart configuration.
|
|
||||||
* Fix ``PAGE_EXCLUDES`` and ``ARTICLE_EXCLUDES`` from incorrectly matching
|
|
||||||
sub-directories.
|
|
||||||
* Introduce ``STATIC_EXCLUDE`` setting to add static file excludes.
|
|
||||||
* Fix an issue when using ``PAGINATION_PATTERNS`` while ``RELATIVE_URLS``
|
|
||||||
is enabled.
|
|
||||||
* Fix feed generation causing links to use the wrong language for month
|
|
||||||
names when using other locales.
|
|
||||||
* Fix an issue where the authors list in the simple template wasn't correctly
|
|
||||||
formatted.
|
|
||||||
* Fix an issue when parsing non-string URLs from settings.
|
|
||||||
* Improve consistency of debug and warning messages.
|
|
||||||
|
|
||||||
3.4.0 (2014-07-01)
|
|
||||||
==================
|
|
||||||
|
|
||||||
* Speed up content generation via new caching mechanism
|
|
||||||
* Add selective post generation (instead of always building entire site)
|
|
||||||
* Many documentation improvements, including switching to prettier RtD theme
|
|
||||||
* Add support for multiple content and plugin paths
|
|
||||||
* Add ``:modified:`` metadata field to complement ``:date:``.
|
|
||||||
Used to specify the last date and time an article was updated independently
|
|
||||||
from the date and time it was published.
|
|
||||||
* Add support for multiple authors via new ``:authors:`` metadata field
|
|
||||||
* Watch for changes in static directories when in auto-regeneration mode
|
|
||||||
* Add filters to limit log output when desired
|
|
||||||
* Add language support to drafts
|
|
||||||
* Add ``SLUGIFY_SOURCE`` setting to control how post slugs are generated
|
|
||||||
* Fix many issues relating to locale and encoding
|
|
||||||
* Apply Typogrify filter to post summary
|
|
||||||
* Preserve file metadata (e.g. time stamps) when copying static files to output
|
|
||||||
* Move AsciiDoc support from Pelican core into separate plugin
|
|
||||||
* Produce inline links instead of reference-style links when importing content
|
|
||||||
* Improve handling of ``IGNORE_FILES`` setting behavior
|
|
||||||
* Properly escape symbol characters in tag names (e.g., ``C++``)
|
|
||||||
* Minor tweaks for Python 3.4 compatibility
|
|
||||||
* Add several new signals
|
|
||||||
|
|
||||||
3.3.0 (2013-09-24)
|
|
||||||
==================
|
|
||||||
|
|
||||||
* Drop Python 3.2 support in favor of Python 3.3
|
|
||||||
* Add ``Fabfile`` so Fabric can be used for workflow automation instead of Make
|
|
||||||
* ``OUTPUT_RETENTION`` setting can be used to preserve metadata (e.g., VCS
|
|
||||||
data such as ``.hg`` and ``.git``) from being removed from output directory
|
|
||||||
* Tumblr import
|
|
||||||
* Improve logic and consistency when cleaning output folder
|
|
||||||
* Improve documentation versioning and release automation
|
|
||||||
* Improve pagination flexibility
|
|
||||||
* Rename signals for better consistency (some plugins may need to be updated)
|
|
||||||
* Move metadata extraction from generators to readers; metadata extraction no
|
|
||||||
longer article-specific
|
|
||||||
* Deprecate ``FILES_TO_COPY`` in favor of ``STATIC_PATHS`` and
|
|
||||||
``EXTRA_PATH_METADATA``
|
|
||||||
* Summaries in Markdown posts no longer include footnotes
|
|
||||||
* Remove unnecessary whitespace in output via ``lstrip_blocks`` Jinja parameter
|
|
||||||
* Move PDF generation from core to plugin
|
|
||||||
* Replace ``MARKUP`` setting with ``READERS``
|
|
||||||
* Add warning if img tag is missing ``alt`` attribute
|
|
||||||
* Add support for ``{}`` in relative links syntax, besides ``||``
|
|
||||||
* Add support for ``{tag}`` and ``{category}`` relative links
|
|
||||||
* Add a ``content_written`` signal
|
|
||||||
|
|
||||||
3.2.1 and 3.2.2
|
|
||||||
===============
|
|
||||||
|
|
||||||
* Facilitate inclusion in FreeBSD Ports Collection
|
|
||||||
|
|
||||||
3.2 (2013-04-24)
|
|
||||||
================
|
|
||||||
|
|
||||||
* Support for Python 3!
|
|
||||||
* Override page save-to location from meta-data (enables using a static page as
|
|
||||||
the site's home page, for example)
|
|
||||||
* Time period archives (per-year, per-month, and per-day archives of posts)
|
|
||||||
* Posterous blog import
|
|
||||||
* Improve WordPress blog import
|
|
||||||
* Migrate plugins to separate repository
|
|
||||||
* Improve HTML parser
|
|
||||||
* Provide ability to show or hide categories from menu using
|
|
||||||
``DISPLAY_CATEGORIES_ON_MENU`` option
|
|
||||||
* Auto-regeneration can be told to ignore files via ``IGNORE_FILES`` setting
|
|
||||||
* Improve post-generation feedback to user
|
|
||||||
* For multilingual posts, use meta-data to designate which is the original
|
|
||||||
and which is the translation
|
|
||||||
* Add ``.mdown`` to list of supported Markdown file extensions
|
|
||||||
* Document-relative URL generation (``RELATIVE_URLS``) is now off by default
|
|
||||||
|
|
||||||
3.1 (2012-12-04)
|
|
||||||
================
|
|
||||||
|
|
||||||
* Importer now stores slugs within files by default. This can be disabled with
|
|
||||||
the ``--disable-slugs`` option.
|
|
||||||
* Improve handling of links to intra-site resources
|
|
||||||
* Ensure WordPress import adds paragraphs for all types of line endings
|
|
||||||
in post content
|
|
||||||
* Decode HTML entities within WordPress post titles on import
|
|
||||||
* Improve appearance of LinkedIn icon in default theme
|
|
||||||
* Add GitHub and Google+ social icons support in default theme
|
|
||||||
* Optimize social icons
|
|
||||||
* Add ``FEED_ALL_ATOM`` and ``FEED_ALL_RSS`` to generate feeds containing all
|
|
||||||
posts regardless of their language
|
|
||||||
* Split ``TRANSLATION_FEED`` into ``TRANSLATION_FEED_ATOM`` and
|
|
||||||
``TRANSLATION_FEED_RSS``
|
|
||||||
* Different feeds can now be enabled/disabled individually
|
|
||||||
* Allow for blank author: if ``AUTHOR`` setting is not set, author won't
|
|
||||||
default to ``${USER}`` anymore, and a post won't contain any author
|
|
||||||
information if the post author is empty
|
|
||||||
* Move LESS and Webassets support from Pelican core to plugin
|
|
||||||
* The ``DEFAULT_DATE`` setting now defaults to ``None``, which means that
|
|
||||||
articles won't be generated unless date metadata is specified
|
|
||||||
* Add ``FILENAME_METADATA`` setting to support metadata extraction from
|
|
||||||
filename
|
|
||||||
* Add ``gzip_cache`` plugin to compress common text files into a ``.gz``
|
|
||||||
file within the same directory as the original file, preventing the server
|
|
||||||
(e.g. Nginx) from having to compress files during an HTTP call
|
|
||||||
* Add support for AsciiDoc-formatted content
|
|
||||||
* Add ``USE_FOLDER_AS_CATEGORY`` setting so that feature can be toggled on/off
|
|
||||||
* Support arbitrary Jinja template files
|
|
||||||
* Restore basic functional tests
|
|
||||||
* New signals: ``generator_init``, ``get_generators``, and
|
|
||||||
``article_generate_preread``
|
|
||||||
|
|
||||||
3.0 (2012-08-08)
|
|
||||||
================
|
|
||||||
|
|
||||||
* Refactored the way URLs are handled
|
|
||||||
* Improved the English documentation
|
|
||||||
* Fixed packaging using ``setuptools`` entrypoints
|
|
||||||
* Added ``typogrify`` support
|
|
||||||
* Added a way to disable feed generation
|
|
||||||
* Added support for ``DIRECT_TEMPLATES``
|
|
||||||
* Allow multiple extensions for content files
|
|
||||||
* Added LESS support
|
|
||||||
* Improved the import script
|
|
||||||
* Added functional tests
|
|
||||||
* Rsync support in the generated Makefile
|
|
||||||
* Improved feed support (easily pluggable with Feedburner for instance)
|
|
||||||
* Added support for ``abbr`` in reST
|
|
||||||
* Fixed a bunch of bugs :-)
|
|
||||||
|
|
||||||
2.8 (2012-02-28)
|
|
||||||
==================
|
|
||||||
|
|
||||||
* Dotclear importer
|
|
||||||
* Allow the usage of Markdown extensions
|
|
||||||
* Themes are now easily extensible
|
|
||||||
* Don't output pagination information if there is only one page
|
|
||||||
* Add a page per author, with all their articles
|
|
||||||
* Improved the test suite
|
|
||||||
* Made the themes easier to extend
|
|
||||||
* Removed Skribit support
|
|
||||||
* Added a ``pelican-quickstart`` script
|
|
||||||
* Fixed timezone-related issues
|
|
||||||
* Added some scripts for Windows support
|
|
||||||
* Date can be specified in seconds
|
|
||||||
* Never fail when generating posts (skip and continue)
|
|
||||||
* Allow the use of future dates
|
|
||||||
* Support having different timezones per language
|
|
||||||
* Enhanced the documentation
|
|
||||||
|
|
||||||
2.7 (2011-06-11)
|
|
||||||
==================
|
|
||||||
|
|
||||||
* Use ``logging`` rather than echoing to stdout
|
|
||||||
* Support custom Jinja filters
|
|
||||||
* Compatibility with Python 2.5
|
|
||||||
* Added a theme manager
|
|
||||||
* Packaged for Debian
|
|
||||||
* Added draft support
|
|
||||||
|
|
||||||
2.6 (2011-03-08)
|
|
||||||
==================
|
|
||||||
|
|
||||||
* Changes in the output directory structure
|
|
||||||
* Makes templates easier to work with / create
|
|
||||||
* Added RSS support (was Atom-only)
|
|
||||||
* Added tag support for the feeds
|
|
||||||
* Enhance the documentation
|
|
||||||
* Added another theme (brownstone)
|
|
||||||
* Added translations
|
|
||||||
* Added a way to use cleaner URLs with a rewrite url module (or equivalent)
|
|
||||||
* Added a tag cloud
|
|
||||||
* Added an autoreloading feature: the blog is automatically regenerated each
|
|
||||||
time a modification is detected
|
|
||||||
* Translate the documentation into French
|
|
||||||
* Import a blog from an RSS feed
|
|
||||||
* Pagination support
|
|
||||||
* Added Skribit support
|
|
||||||
|
|
||||||
2.5 (2010-11-20)
|
|
||||||
==================
|
|
||||||
|
|
||||||
* Import from WordPress
|
|
||||||
* Added some new themes (martyalchin / wide-notmyidea)
|
|
||||||
* First bug report!
|
|
||||||
* Linkedin support
|
|
||||||
* Added a FAQ
|
|
||||||
* Google Analytics support
|
|
||||||
* Twitter support
|
|
||||||
* Use relative URLs, not static ones
|
|
||||||
|
|
||||||
2.4 (2010-11-06)
|
|
||||||
================
|
|
||||||
|
|
||||||
* Minor themes changes
|
|
||||||
* Add Disqus support (so we have comments)
|
|
||||||
* Another code refactoring
|
|
||||||
* Added config settings about pages
|
|
||||||
* Blog entries can also be generated in PDF
|
|
||||||
|
|
||||||
2.3 (2010-10-31)
|
|
||||||
================
|
|
||||||
|
|
||||||
* Markdown support
|
|
||||||
|
|
||||||
2.2 (2010-10-30)
|
|
||||||
================
|
|
||||||
|
|
||||||
* Prettify output
|
|
||||||
* Manages static pages as well
|
|
||||||
|
|
||||||
2.1 (2010-10-30)
|
|
||||||
================
|
|
||||||
|
|
||||||
* Make notmyidea the default theme
|
|
||||||
|
|
||||||
2.0 (2010-10-30)
|
|
||||||
================
|
|
||||||
|
|
||||||
* Refactoring to be more extensible
|
|
||||||
* Change into the setting variables
|
|
||||||
|
|
||||||
1.2 (2010-09-28)
|
|
||||||
================
|
|
||||||
|
|
||||||
* Added a debug option
|
|
||||||
* Added per-category feeds
|
|
||||||
* Use filesystem to get dates if no metadata is provided
|
|
||||||
* Add Pygments support
|
|
||||||
|
|
||||||
1.1 (2010-08-19)
|
|
||||||
================
|
|
||||||
|
|
||||||
* First working version
|
|
||||||
81
docs/conf.py
81
docs/conf.py
|
|
@ -1,81 +0,0 @@
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from pelican import __version__
|
|
||||||
|
|
||||||
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
|
|
||||||
|
|
||||||
sys.path.append(os.path.abspath(os.pardir))
|
|
||||||
|
|
||||||
# -- General configuration ----------------------------------------------------
|
|
||||||
templates_path = ['_templates']
|
|
||||||
extensions = ['sphinx.ext.autodoc',
|
|
||||||
'sphinx.ext.ifconfig',
|
|
||||||
'sphinx.ext.extlinks']
|
|
||||||
source_suffix = '.rst'
|
|
||||||
master_doc = 'index'
|
|
||||||
project = 'Pelican'
|
|
||||||
copyright = '2010 – present, Justin Mayer, Alexis Metaireau, and contributors'
|
|
||||||
exclude_patterns = ['_build']
|
|
||||||
release = __version__
|
|
||||||
version = '.'.join(release.split('.')[:1])
|
|
||||||
last_stable = __version__
|
|
||||||
rst_prolog = '''
|
|
||||||
.. |last_stable| replace:: :pelican-doc:`{}`
|
|
||||||
'''.format(last_stable)
|
|
||||||
|
|
||||||
# The name of the Pygments (syntax highlighting) style to use.
|
|
||||||
pygments_style = 'sphinx'
|
|
||||||
|
|
||||||
extlinks = {
|
|
||||||
'pelican-doc': ('https://docs.getpelican.com/%s/', '')
|
|
||||||
}
|
|
||||||
|
|
||||||
# -- Options for HTML output --------------------------------------------------
|
|
||||||
|
|
||||||
html_theme = 'default'
|
|
||||||
if not on_rtd:
|
|
||||||
try:
|
|
||||||
import sphinx_rtd_theme
|
|
||||||
html_theme = 'sphinx_rtd_theme'
|
|
||||||
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
html_static_path = ['_static']
|
|
||||||
|
|
||||||
# Output file base name for HTML help builder.
|
|
||||||
htmlhelp_basename = 'Pelicandoc'
|
|
||||||
|
|
||||||
html_use_smartypants = True
|
|
||||||
|
|
||||||
# If false, no module index is generated.
|
|
||||||
html_use_modindex = False
|
|
||||||
|
|
||||||
# If false, no index is generated.
|
|
||||||
html_use_index = False
|
|
||||||
|
|
||||||
# If true, links to the reST sources are added to the pages.
|
|
||||||
html_show_sourcelink = False
|
|
||||||
|
|
||||||
|
|
||||||
def setup(app):
|
|
||||||
# overrides for wide tables in RTD theme
|
|
||||||
app.add_css_file('theme_overrides.css') # path relative to _static
|
|
||||||
|
|
||||||
|
|
||||||
# -- Options for LaTeX output -------------------------------------------------
|
|
||||||
latex_documents = [
|
|
||||||
('index', 'Pelican.tex', 'Pelican Documentation', 'Justin Mayer',
|
|
||||||
'manual'),
|
|
||||||
]
|
|
||||||
|
|
||||||
# -- Options for manual page output -------------------------------------------
|
|
||||||
man_pages = [
|
|
||||||
('index', 'pelican', 'pelican documentation',
|
|
||||||
['Justin Mayer'], 1),
|
|
||||||
('pelican-themes', 'pelican-themes', 'A theme manager for Pelican',
|
|
||||||
['Mickaël Raybaud'], 1),
|
|
||||||
('themes', 'pelican-theming', 'How to create themes for Pelican',
|
|
||||||
['The Pelican contributors'], 1)
|
|
||||||
]
|
|
||||||
625
docs/content.rst
625
docs/content.rst
|
|
@ -1,625 +0,0 @@
|
||||||
Writing content
|
|
||||||
###############
|
|
||||||
|
|
||||||
Articles and pages
|
|
||||||
==================
|
|
||||||
|
|
||||||
Pelican considers "articles" to be chronological content, such as posts on a
|
|
||||||
blog, and thus associated with a date.
|
|
||||||
|
|
||||||
The idea behind "pages" is that they are usually not temporal in nature and are
|
|
||||||
used for content that does not change very often (e.g., "About" or "Contact"
|
|
||||||
pages).
|
|
||||||
|
|
||||||
You can find sample content in the repository at ``samples/content/``.
|
|
||||||
|
|
||||||
.. _internal_metadata:
|
|
||||||
|
|
||||||
File metadata
|
|
||||||
=============
|
|
||||||
|
|
||||||
Pelican tries to be smart enough to get the information it needs from the
|
|
||||||
file system (for instance, about the category of your articles), but some
|
|
||||||
information you need to provide in the form of metadata inside your files.
|
|
||||||
|
|
||||||
If you are writing your content in reStructuredText format, you can provide
|
|
||||||
this metadata in text files via the following syntax (give your file the
|
|
||||||
``.rst`` extension)::
|
|
||||||
|
|
||||||
My super title
|
|
||||||
##############
|
|
||||||
|
|
||||||
:date: 2010-10-03 10:20
|
|
||||||
:modified: 2010-10-04 18:40
|
|
||||||
:tags: thats, awesome
|
|
||||||
:category: yeah
|
|
||||||
:slug: my-super-post
|
|
||||||
:authors: Alexis Metaireau, Conan Doyle
|
|
||||||
:summary: Short version for index and feeds
|
|
||||||
|
|
||||||
Author and tag lists may be semicolon-separated instead, which allows
|
|
||||||
you to write authors and tags containing commas::
|
|
||||||
|
|
||||||
:tags: pelican, publishing tool; pelican, bird
|
|
||||||
:authors: Metaireau, Alexis; Doyle, Conan
|
|
||||||
|
|
||||||
Pelican implements an extension to reStructuredText to enable support for the
|
|
||||||
``abbr`` HTML tag. To use it, write something like this in your post::
|
|
||||||
|
|
||||||
This will be turned into :abbr:`HTML (HyperText Markup Language)`.
|
|
||||||
|
|
||||||
You can also use Markdown syntax (with a file ending in ``.md``, ``.markdown``,
|
|
||||||
``.mkd``, or ``.mdown``). Markdown generation requires that you first
|
|
||||||
explicitly install the Python-Markdown_ package, which can be done via ``pip
|
|
||||||
install Markdown``.
|
|
||||||
|
|
||||||
Pelican also supports `Markdown Extensions`_, which might have to be installed
|
|
||||||
separately if they are not included in the default ``Markdown`` package and can
|
|
||||||
be configured and loaded via the ``MARKDOWN`` setting.
|
|
||||||
|
|
||||||
Metadata syntax for Markdown posts should follow this pattern::
|
|
||||||
|
|
||||||
Title: My super title
|
|
||||||
Date: 2010-12-03 10:20
|
|
||||||
Modified: 2010-12-05 19:30
|
|
||||||
Category: Python
|
|
||||||
Tags: pelican, publishing
|
|
||||||
Slug: my-super-post
|
|
||||||
Authors: Alexis Metaireau, Conan Doyle
|
|
||||||
Summary: Short version for index and feeds
|
|
||||||
|
|
||||||
This is the content of my super blog post.
|
|
||||||
|
|
||||||
You can also have your own metadata keys (so long as they don't conflict with
|
|
||||||
reserved metadata keywords) for use in your templates. The following table
|
|
||||||
contains a list of reserved metadata keywords:
|
|
||||||
|
|
||||||
=============== ===============================================================
|
|
||||||
Metadata Description
|
|
||||||
=============== ===============================================================
|
|
||||||
``title`` Title of the article or page
|
|
||||||
``date`` Publication date (e.g., ``YYYY-MM-DD HH:SS``)
|
|
||||||
``modified`` Modification date (e.g., ``YYYY-MM-DD HH:SS``)
|
|
||||||
``tags`` Content tags, separated by commas
|
|
||||||
``keywords`` Content keywords, separated by commas (HTML content only)
|
|
||||||
``category`` Content category (one only — not multiple)
|
|
||||||
``slug`` Identifier used in URLs and translations
|
|
||||||
``author`` Content author, when there is only one
|
|
||||||
``authors`` Content authors, when there are multiple
|
|
||||||
``summary`` Brief description of content for index pages
|
|
||||||
``lang`` Content language ID (``en``, ``fr``, etc.)
|
|
||||||
``translation`` If content is a translation of another (``true`` or ``false``)
|
|
||||||
``status`` Content status: ``draft``, ``hidden``, or ``published``
|
|
||||||
``template`` Name of template to use to generate content (without extension)
|
|
||||||
``save_as`` Save content to this relative file path
|
|
||||||
``url`` URL to use for this article/page
|
|
||||||
=============== ===============================================================
|
|
||||||
|
|
||||||
Readers for additional formats (such as AsciiDoc_) are available via plugins.
|
|
||||||
Refer to `pelican-plugins`_ repository for those.
|
|
||||||
|
|
||||||
Pelican can also process HTML files ending in ``.html`` and ``.htm``. Pelican
|
|
||||||
interprets the HTML in a very straightforward manner, reading metadata from
|
|
||||||
``meta`` tags, the title from the ``title`` tag, and the body out from the
|
|
||||||
``body`` tag::
|
|
||||||
|
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
<title>My super title</title>
|
|
||||||
<meta name="tags" content="thats, awesome" />
|
|
||||||
<meta name="date" content="2012-07-09 22:28" />
|
|
||||||
<meta name="modified" content="2012-07-10 20:14" />
|
|
||||||
<meta name="category" content="yeah" />
|
|
||||||
<meta name="authors" content="Alexis Métaireau, Conan Doyle" />
|
|
||||||
<meta name="summary" content="Short version for index and feeds" />
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
This is the content of my super blog post.
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
|
|
||||||
With HTML, there is one simple exception to the standard metadata: tags can be
|
|
||||||
specified either via the ``tags`` metadata, as is standard in Pelican, or via
|
|
||||||
the ``keywords`` metadata, as is standard in HTML. The two can be used
|
|
||||||
interchangeably.
|
|
||||||
|
|
||||||
Note that, aside from the title, none of this content metadata is mandatory:
|
|
||||||
if the date is not specified and ``DEFAULT_DATE`` is set to ``'fs'``, Pelican
|
|
||||||
will rely on the file's "mtime" timestamp, and the category can be determined
|
|
||||||
by the directory in which the file resides. For example, a file located at
|
|
||||||
``python/foobar/myfoobar.rst`` will have a category of ``foobar``. If you would
|
|
||||||
like to organize your files in other ways where the name of the subfolder would
|
|
||||||
not be a good category name, you can set the setting ``USE_FOLDER_AS_CATEGORY``
|
|
||||||
to ``False``. When parsing dates given in the page metadata, Pelican supports
|
|
||||||
the W3C's `suggested subset ISO 8601`__.
|
|
||||||
|
|
||||||
So the title is the only required metadata. If that bothers you, worry not.
|
|
||||||
Instead of manually specifying a title in your metadata each time, you can use
|
|
||||||
the source content file name as the title. For example, a Markdown source file
|
|
||||||
named ``Publishing via Pelican.md`` would automatically be assigned a title of
|
|
||||||
*Publishing via Pelican*. If you would prefer this behavior, add the following
|
|
||||||
line to your settings file::
|
|
||||||
|
|
||||||
FILENAME_METADATA = '(?P<title>.*)'
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
When experimenting with different settings (especially the metadata
|
|
||||||
ones) caching may interfere and the changes may not be visible. In
|
|
||||||
such cases disable caching with ``LOAD_CONTENT_CACHE = False`` or
|
|
||||||
use the ``--ignore-cache`` command-line switch.
|
|
||||||
|
|
||||||
__ `W3C ISO 8601`_
|
|
||||||
|
|
||||||
``modified`` should be last time you updated the article, and defaults to
|
|
||||||
``date`` if not specified. Besides you can show ``modified`` in the templates,
|
|
||||||
feed entries in feed readers will be updated automatically when you set
|
|
||||||
``modified`` to the current date after you modified your article.
|
|
||||||
|
|
||||||
``authors`` is a comma-separated list of article authors. If there's only one
|
|
||||||
author you can use ``author`` field.
|
|
||||||
|
|
||||||
If you do not explicitly specify summary metadata for a given post, the
|
|
||||||
``SUMMARY_MAX_LENGTH`` setting can be used to specify how many words from the
|
|
||||||
beginning of an article are used as the summary.
|
|
||||||
|
|
||||||
You can also extract any metadata from the filename through a regular
|
|
||||||
expression to be set in the ``FILENAME_METADATA`` setting. All named groups
|
|
||||||
that are matched will be set in the metadata object. The default value for the
|
|
||||||
``FILENAME_METADATA`` setting will only extract the date from the filename. For
|
|
||||||
example, if you would like to extract both the date and the slug, you could set
|
|
||||||
something like: ``'(?P<date>\d{4}-\d{2}-\d{2})_(?P<slug>.*)'``
|
|
||||||
|
|
||||||
Please note that the metadata available inside your files takes precedence over
|
|
||||||
the metadata extracted from the filename.
|
|
||||||
|
|
||||||
Pages
|
|
||||||
=====
|
|
||||||
|
|
||||||
If you create a folder named ``pages`` inside the content folder, all the
|
|
||||||
files in it will be used to generate static pages, such as **About** or
|
|
||||||
**Contact** pages. (See example filesystem layout below.)
|
|
||||||
|
|
||||||
You can use the ``DISPLAY_PAGES_ON_MENU`` setting to control whether all those
|
|
||||||
pages are displayed in the primary navigation menu. (Default is ``True``.)
|
|
||||||
|
|
||||||
If you want to exclude any pages from being linked to or listed in the menu
|
|
||||||
then add a ``status: hidden`` attribute to its metadata. This is useful for
|
|
||||||
things like making error pages that fit the generated theme of your site.
|
|
||||||
|
|
||||||
Static content
|
|
||||||
==============
|
|
||||||
|
|
||||||
Static files are files other than articles and pages that are copied to the
|
|
||||||
output folder as-is, without processing. You can control which static files
|
|
||||||
are copied over with the ``STATIC_PATHS`` setting of the project's
|
|
||||||
``pelicanconf.py`` file. Pelican's default configuration includes the
|
|
||||||
``images`` directory for this, but others must be added manually. In addition,
|
|
||||||
static files that are explicitly linked to are included (see below).
|
|
||||||
|
|
||||||
Mixed content in the same directory
|
|
||||||
-----------------------------------
|
|
||||||
|
|
||||||
Starting with Pelican 3.5, static files can safely share a source directory
|
|
||||||
with page source files, without exposing the page sources in the generated
|
|
||||||
site. Any such directory must be added to both ``STATIC_PATHS`` and
|
|
||||||
``PAGE_PATHS`` (or ``STATIC_PATHS`` and ``ARTICLE_PATHS``). Pelican will
|
|
||||||
identify and process the page source files normally, and copy the remaining
|
|
||||||
files as if they lived in a separate directory reserved for static files.
|
|
||||||
|
|
||||||
Note: Placing static and content source files together in the same source
|
|
||||||
directory does not guarantee that they will end up in the same place in the
|
|
||||||
generated site. The easiest way to do this is by using the ``{attach}`` link
|
|
||||||
syntax (described below). Alternatively, the ``STATIC_SAVE_AS``,
|
|
||||||
``PAGE_SAVE_AS``, and ``ARTICLE_SAVE_AS`` settings (and the corresponding
|
|
||||||
``*_URL`` settings) can be configured to place files of different types
|
|
||||||
together, just as they could in earlier versions of Pelican.
|
|
||||||
|
|
||||||
.. _ref-linking-to-internal-content:
|
|
||||||
|
|
||||||
Linking to internal content
|
|
||||||
===========================
|
|
||||||
|
|
||||||
From Pelican 3.1 onwards, it is now possible to specify intra-site links to
|
|
||||||
files in the *source content* hierarchy instead of files in the *generated*
|
|
||||||
hierarchy. This makes it easier to link from the current post to other content
|
|
||||||
that may be sitting alongside that post (instead of having to determine where
|
|
||||||
the other content will be placed after site generation).
|
|
||||||
|
|
||||||
To link to internal content (files in the ``content`` directory), use the
|
|
||||||
following syntax for the link target: ``{filename}path/to/file``
|
|
||||||
Note: forward slashes, ``/``,
|
|
||||||
are the required path separator in the ``{filename}`` directive
|
|
||||||
on all operating systems, including Windows.
|
|
||||||
|
|
||||||
For example, a Pelican project might be structured like this::
|
|
||||||
|
|
||||||
website/
|
|
||||||
├── content
|
|
||||||
│ ├── category/
|
|
||||||
│ │ └── article1.rst
|
|
||||||
│ ├── article2.md
|
|
||||||
│ └── pages
|
|
||||||
│ └── about.md
|
|
||||||
└── pelican.conf.py
|
|
||||||
|
|
||||||
In this example, ``article1.rst`` could look like this::
|
|
||||||
|
|
||||||
The first article
|
|
||||||
#################
|
|
||||||
|
|
||||||
:date: 2012-12-01 10:02
|
|
||||||
|
|
||||||
See below intra-site link examples in reStructuredText format.
|
|
||||||
|
|
||||||
`a link relative to the current file <{filename}../article2.md>`_
|
|
||||||
`a link relative to the content root <{filename}/article2.md>`_
|
|
||||||
|
|
||||||
and ``article2.md``::
|
|
||||||
|
|
||||||
Title: The second article
|
|
||||||
Date: 2012-12-01 10:02
|
|
||||||
|
|
||||||
See below intra-site link examples in Markdown format.
|
|
||||||
|
|
||||||
[a link relative to the current file]({filename}category/article1.rst)
|
|
||||||
[a link relative to the content root]({filename}/category/article1.rst)
|
|
||||||
|
|
||||||
Linking to static files
|
|
||||||
-----------------------
|
|
||||||
|
|
||||||
You can link to static content using ``{static}path/to/file``. Files linked to
|
|
||||||
with this syntax will automatically be copied to the output directory, even if
|
|
||||||
the source directories containing them are not included in the ``STATIC_PATHS``
|
|
||||||
setting of the project's ``pelicanconf.py`` file.
|
|
||||||
|
|
||||||
For example, a project's content directory might be structured like this::
|
|
||||||
|
|
||||||
content
|
|
||||||
├── images
|
|
||||||
│ └── han.jpg
|
|
||||||
├── pdfs
|
|
||||||
│ └── menu.pdf
|
|
||||||
└── pages
|
|
||||||
└── test.md
|
|
||||||
|
|
||||||
``test.md`` would include::
|
|
||||||
|
|
||||||

|
|
||||||
[Our Menu]({static}/pdfs/menu.pdf)
|
|
||||||
|
|
||||||
Site generation would then copy ``han.jpg`` to ``output/images/han.jpg``,
|
|
||||||
``menu.pdf`` to ``output/pdfs/menu.pdf``, and write the appropriate links
|
|
||||||
in ``test.md``.
|
|
||||||
|
|
||||||
If you use ``{static}`` to link to an article or a page, this will be turned
|
|
||||||
into a link to its source code.
|
|
||||||
|
|
||||||
Attaching static files
|
|
||||||
----------------------
|
|
||||||
|
|
||||||
Starting with Pelican 3.5, static files can be "attached" to a page or article
|
|
||||||
using this syntax for the link target: ``{attach}path/to/file`` This works
|
|
||||||
like the ``{static}`` syntax, but also relocates the static file into the
|
|
||||||
linking document's output directory. If the static file originates from a
|
|
||||||
subdirectory beneath the linking document's source, that relationship will be
|
|
||||||
preserved on output. Otherwise, it will become a sibling of the linking
|
|
||||||
document.
|
|
||||||
|
|
||||||
This only works for linking to static files.
|
|
||||||
|
|
||||||
For example, a project's content directory might be structured like this::
|
|
||||||
|
|
||||||
content
|
|
||||||
├── blog
|
|
||||||
│ ├── icons
|
|
||||||
│ │ └── icon.png
|
|
||||||
│ ├── photo.jpg
|
|
||||||
│ └── testpost.md
|
|
||||||
└── downloads
|
|
||||||
└── archive.zip
|
|
||||||
|
|
||||||
``pelicanconf.py`` would include::
|
|
||||||
|
|
||||||
PATH = 'content'
|
|
||||||
ARTICLE_PATHS = ['blog']
|
|
||||||
ARTICLE_SAVE_AS = '{date:%Y}/{slug}.html'
|
|
||||||
ARTICLE_URL = '{date:%Y}/{slug}.html'
|
|
||||||
|
|
||||||
``testpost.md`` would include::
|
|
||||||
|
|
||||||
Title: Test Post
|
|
||||||
Category: test
|
|
||||||
Date: 2014-10-31
|
|
||||||
|
|
||||||

|
|
||||||

|
|
||||||
[Downloadable File]({attach}/downloads/archive.zip)
|
|
||||||
|
|
||||||
Site generation would then produce an output directory structured like this::
|
|
||||||
|
|
||||||
output
|
|
||||||
└── 2014
|
|
||||||
├── archive.zip
|
|
||||||
├── icons
|
|
||||||
│ └── icon.png
|
|
||||||
├── photo.jpg
|
|
||||||
└── test-post.html
|
|
||||||
|
|
||||||
Notice that all the files linked using ``{attach}`` ended up in or beneath
|
|
||||||
the article's output directory.
|
|
||||||
|
|
||||||
If a static file is linked multiple times, the relocating feature of
|
|
||||||
``{attach}`` will only work in the first of those links to be processed.
|
|
||||||
After the first link, Pelican will treat ``{attach}`` like ``{static}``.
|
|
||||||
This avoids breaking the already-processed links.
|
|
||||||
|
|
||||||
**Be careful when linking to a file from multiple documents:**
|
|
||||||
Since the first link to a file finalizes its location and Pelican does
|
|
||||||
not define the order in which documents are processed, using ``{attach}`` on a
|
|
||||||
file linked by multiple documents can cause its location to change from one
|
|
||||||
site build to the next. (Whether this happens in practice will depend on the
|
|
||||||
operating system, file system, version of Pelican, and documents being added,
|
|
||||||
modified, or removed from the project.) Any external sites linking to the
|
|
||||||
file's old location might then find their links broken. **It is therefore
|
|
||||||
advisable to use {attach} only if you use it in all links to a file, and only
|
|
||||||
if the linking documents share a single directory.** Under these conditions,
|
|
||||||
the file's output location will not change in future builds. In cases where
|
|
||||||
these precautions are not possible, consider using ``{static}`` links instead
|
|
||||||
of ``{attach}``, and letting the file's location be determined by the project's
|
|
||||||
``STATIC_SAVE_AS`` and ``STATIC_URL`` settings. (Per-file ``save_as`` and
|
|
||||||
``url`` overrides can still be set in ``EXTRA_PATH_METADATA``.)
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
When using ``{attach}``, any parent directory in ``*_URL`` / ``*_SAVE_AS``
|
|
||||||
settings should match each other. See also: :ref:`url-settings`
|
|
||||||
|
|
||||||
Linking to authors, categories, index and tags
|
|
||||||
----------------------------------------------
|
|
||||||
|
|
||||||
You can link to authors, categories, index and tags using the ``{author}name``,
|
|
||||||
``{category}foobar``, ``{index}`` and ``{tag}tagname`` syntax.
|
|
||||||
|
|
||||||
Deprecated internal link syntax
|
|
||||||
-------------------------------
|
|
||||||
|
|
||||||
To remain compatible with earlier versions, Pelican still supports vertical
|
|
||||||
bars (``||``) in addition to curly braces (``{}``) for internal links. For
|
|
||||||
example: ``|filename|an_article.rst``, ``|tag|tagname``, ``|category|foobar``.
|
|
||||||
The syntax was changed from ``||`` to ``{}`` to avoid collision with Markdown
|
|
||||||
extensions or reST directives. Similarly, Pelican also still supports linking
|
|
||||||
to static content with ``{filename}``. The syntax was changed to ``{static}``
|
|
||||||
to allow linking to both generated articles and pages and their static sources.
|
|
||||||
|
|
||||||
Support for the old syntax may eventually be removed.
|
|
||||||
|
|
||||||
Including other files
|
|
||||||
---------------------
|
|
||||||
Both Markdown and reStructuredText syntaxes provide mechanisms for this.
|
|
||||||
|
|
||||||
Following below are some examples for **reStructuredText** using `the include directive`_:
|
|
||||||
|
|
||||||
.. code-block:: rst
|
|
||||||
|
|
||||||
.. include:: file.rst
|
|
||||||
|
|
||||||
Include a fragment of a file delimited by two identifiers, highlighted as C++ (slicing based on line numbers is also possible):
|
|
||||||
|
|
||||||
.. code-block:: rst
|
|
||||||
|
|
||||||
.. include:: main.cpp
|
|
||||||
:code: c++
|
|
||||||
:start-after: // begin
|
|
||||||
:end-before: // end
|
|
||||||
|
|
||||||
Include a raw HTML file (or an inline SVG) and put it directly into the output without any processing:
|
|
||||||
|
|
||||||
.. code-block:: rst
|
|
||||||
|
|
||||||
.. raw:: html
|
|
||||||
:file: table.html
|
|
||||||
|
|
||||||
For **Markdown**, one must rely on an extension. For example, using the `mdx_include plugin`_:
|
|
||||||
|
|
||||||
.. code-block:: none
|
|
||||||
|
|
||||||
```html
|
|
||||||
{! template.html !}
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
Importing an existing site
|
|
||||||
==========================
|
|
||||||
|
|
||||||
It is possible to import your site from WordPress, Tumblr, Dotclear, and RSS
|
|
||||||
feeds using a simple script. See :ref:`import`.
|
|
||||||
|
|
||||||
Translations
|
|
||||||
============
|
|
||||||
|
|
||||||
It is possible to translate articles. To do so, you need to add a ``lang`` meta
|
|
||||||
attribute to your articles/pages and set a ``DEFAULT_LANG`` setting (which is
|
|
||||||
English [en] by default). With those settings in place, only articles with the
|
|
||||||
default language will be listed, and each article will be accompanied by a list
|
|
||||||
of available translations for that article.
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
This core Pelican functionality does not create sub-sites
|
|
||||||
(e.g. ``example.com/de``) with translated templates for each
|
|
||||||
language. For such advanced functionality the `i18n_subsites
|
|
||||||
plugin`_ can be used.
|
|
||||||
|
|
||||||
By default, Pelican uses the article's URL "slug" to determine if two or more
|
|
||||||
articles are translations of one another. (This can be changed with the
|
|
||||||
``ARTICLE_TRANSLATION_ID`` setting.) The slug can be set manually in the file's
|
|
||||||
metadata; if not set explicitly, Pelican will auto-generate the slug from the
|
|
||||||
title of the article.
|
|
||||||
|
|
||||||
Here is an example of two articles, one in English and the other in French.
|
|
||||||
|
|
||||||
The English article::
|
|
||||||
|
|
||||||
Foobar is not dead
|
|
||||||
##################
|
|
||||||
|
|
||||||
:slug: foobar-is-not-dead
|
|
||||||
:lang: en
|
|
||||||
|
|
||||||
That's true, foobar is still alive!
|
|
||||||
|
|
||||||
And the French version::
|
|
||||||
|
|
||||||
Foobar n'est pas mort !
|
|
||||||
#######################
|
|
||||||
|
|
||||||
:slug: foobar-is-not-dead
|
|
||||||
:lang: fr
|
|
||||||
|
|
||||||
Oui oui, foobar est toujours vivant !
|
|
||||||
|
|
||||||
Post content quality notwithstanding, you can see that only item in common
|
|
||||||
between the two articles is the slug, which is functioning here as an
|
|
||||||
identifier. If you'd rather not explicitly define the slug this way, you must
|
|
||||||
then instead ensure that the translated article titles are identical, since the
|
|
||||||
slug will be auto-generated from the article title.
|
|
||||||
|
|
||||||
If you do not want the original version of one specific article to be detected
|
|
||||||
by the ``DEFAULT_LANG`` setting, use the ``translation`` metadata to specify
|
|
||||||
which posts are translations::
|
|
||||||
|
|
||||||
Foobar is not dead
|
|
||||||
##################
|
|
||||||
|
|
||||||
:slug: foobar-is-not-dead
|
|
||||||
:lang: en
|
|
||||||
:translation: true
|
|
||||||
|
|
||||||
That's true, foobar is still alive!
|
|
||||||
|
|
||||||
|
|
||||||
.. _internal_pygments_options:
|
|
||||||
|
|
||||||
Syntax highlighting
|
|
||||||
===================
|
|
||||||
|
|
||||||
Pelican can provide colorized syntax highlighting for your code blocks.
|
|
||||||
To do so, you must use the following conventions inside your content files.
|
|
||||||
|
|
||||||
For reStructuredText, use the ``code-block`` directive to specify the type
|
|
||||||
of code to be highlighted (in these examples, we'll use ``python``)::
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
print("Pelican is a static site generator.")
|
|
||||||
|
|
||||||
For Markdown, which utilizes the `CodeHilite extension`_ to provide syntax
|
|
||||||
highlighting, include the language identifier just above the code block,
|
|
||||||
indenting both the identifier and the code::
|
|
||||||
|
|
||||||
There are two ways to specify the identifier:
|
|
||||||
|
|
||||||
:::python
|
|
||||||
print("The triple-colon syntax will *not* show line numbers.")
|
|
||||||
|
|
||||||
To display line numbers, use a path-less shebang instead of colons:
|
|
||||||
|
|
||||||
#!python
|
|
||||||
print("The path-less shebang syntax *will* show line numbers.")
|
|
||||||
|
|
||||||
The specified identifier (e.g. ``python``, ``ruby``) should be one that
|
|
||||||
appears on the `list of available lexers <https://pygments.org/docs/lexers/>`_.
|
|
||||||
|
|
||||||
When using reStructuredText the following options are available in the
|
|
||||||
code-block directive:
|
|
||||||
|
|
||||||
============= ============ =========================================
|
|
||||||
Option Valid values Description
|
|
||||||
============= ============ =========================================
|
|
||||||
anchorlinenos N/A If present wrap line numbers in <a> tags.
|
|
||||||
classprefix string String to prepend to token class names
|
|
||||||
hl_lines numbers List of lines to be highlighted, where
|
|
||||||
line numbers to highlight are separated
|
|
||||||
by a space. This is similar to
|
|
||||||
``emphasize-lines`` in Sphinx, but it
|
|
||||||
does not support a range of line numbers
|
|
||||||
separated by a hyphen, or comma-separated
|
|
||||||
line numbers.
|
|
||||||
lineanchors string Wrap each line in an anchor using this
|
|
||||||
string and -linenumber.
|
|
||||||
linenos string If present or set to "table" output line
|
|
||||||
numbers in a table, if set to
|
|
||||||
"inline" output them inline. "none" means
|
|
||||||
do not output the line numbers for this
|
|
||||||
table.
|
|
||||||
linenospecial number If set every nth line will be given the
|
|
||||||
'special' css class.
|
|
||||||
linenostart number Line number for the first line.
|
|
||||||
linenostep number Print every nth line number.
|
|
||||||
lineseparator string String to print between lines of code,
|
|
||||||
'\n' by default.
|
|
||||||
linespans string Wrap each line in a span using this and
|
|
||||||
-linenumber.
|
|
||||||
nobackground N/A If set do not output background color for
|
|
||||||
the wrapping element
|
|
||||||
nowrap N/A If set do not wrap the tokens at all.
|
|
||||||
tagsfile string ctags file to use for name definitions.
|
|
||||||
tagurlformat string format for the ctag links.
|
|
||||||
============= ============ =========================================
|
|
||||||
|
|
||||||
Note that, depending on the version, your Pygments module might not have
|
|
||||||
all of these options available. Refer to the *HtmlFormatter* section of the
|
|
||||||
`Pygments documentation <https://pygments.org/docs/formatters/>`_ for more
|
|
||||||
details on each of the options.
|
|
||||||
|
|
||||||
For example, the following code block enables line numbers, starting at 153,
|
|
||||||
and prefixes the Pygments CSS classes with *pgcss* to make the names
|
|
||||||
more unique and avoid possible CSS conflicts::
|
|
||||||
|
|
||||||
.. code-block:: identifier
|
|
||||||
:classprefix: pgcss
|
|
||||||
:linenos: table
|
|
||||||
:linenostart: 153
|
|
||||||
|
|
||||||
<indented code block goes here>
|
|
||||||
|
|
||||||
It is also possible to specify the ``PYGMENTS_RST_OPTIONS`` variable in your
|
|
||||||
Pelican settings file to include options that will be automatically applied to
|
|
||||||
every code block.
|
|
||||||
|
|
||||||
For example, if you want to have line numbers displayed for every code block
|
|
||||||
and a CSS prefix you would set this variable to::
|
|
||||||
|
|
||||||
PYGMENTS_RST_OPTIONS = {'classprefix': 'pgcss', 'linenos': 'table'}
|
|
||||||
|
|
||||||
If specified, settings for individual code blocks will override the defaults in
|
|
||||||
your settings file.
|
|
||||||
|
|
||||||
Publishing drafts
|
|
||||||
=================
|
|
||||||
|
|
||||||
If you want to publish an article or a page as a draft (for friends to review
|
|
||||||
before publishing, for example), you can add a ``Status: draft`` attribute to
|
|
||||||
its metadata. That article will then be output to the ``drafts`` folder and not
|
|
||||||
listed on the index page nor on any category or tag page.
|
|
||||||
|
|
||||||
If your articles should be automatically published as a draft (to not
|
|
||||||
accidentally publish an article before it is finished) include the status in
|
|
||||||
the ``DEFAULT_METADATA``::
|
|
||||||
|
|
||||||
DEFAULT_METADATA = {
|
|
||||||
'status': 'draft',
|
|
||||||
}
|
|
||||||
|
|
||||||
To publish a post when the default status is ``draft``, update the post's
|
|
||||||
metadata to include ``Status: published``.
|
|
||||||
|
|
||||||
.. _W3C ISO 8601: https://www.w3.org/TR/NOTE-datetime
|
|
||||||
.. _AsciiDoc: https://www.methods.co.nz/asciidoc/
|
|
||||||
.. _pelican-plugins: https://github.com/getpelican/pelican-plugins
|
|
||||||
.. _Python-Markdown: https://github.com/Python-Markdown/markdown
|
|
||||||
.. _Markdown Extensions: https://python-markdown.github.io/extensions/
|
|
||||||
.. _CodeHilite extension: https://python-markdown.github.io/extensions/code_hilite/#syntax
|
|
||||||
.. _i18n_subsites plugin: https://github.com/getpelican/pelican-plugins/tree/master/i18n_subsites
|
|
||||||
.. _the include directive: http://docutils.sourceforge.net/docs/ref/rst/directives.html#include
|
|
||||||
.. _mdx_include plugin: https://github.com/neurobin/mdx_include
|
|
||||||
|
|
@ -1,242 +0,0 @@
|
||||||
Contributing and feedback guidelines
|
|
||||||
####################################
|
|
||||||
|
|
||||||
There are many ways to contribute to Pelican. You can improve the
|
|
||||||
documentation, add missing features, and fix bugs (or just report them). You
|
|
||||||
can also help out by reviewing and commenting on
|
|
||||||
`existing issues <https://github.com/getpelican/pelican/issues>`_.
|
|
||||||
|
|
||||||
Don't hesitate to fork Pelican and submit an issue or pull request on GitHub.
|
|
||||||
When doing so, please consider the following guidelines.
|
|
||||||
|
|
||||||
.. include:: ../CONTRIBUTING.rst
|
|
||||||
|
|
||||||
Setting up the development environment
|
|
||||||
======================================
|
|
||||||
|
|
||||||
While there are many ways to set up one's development environment, the following
|
|
||||||
instructions will utilize Pip_ and Poetry_. These tools facilitate managing
|
|
||||||
virtual environments for separate Python projects that are isolated from one
|
|
||||||
another, so you can use different packages (and package versions) for each.
|
|
||||||
|
|
||||||
Please note that Python 3.6+ is required for Pelican development.
|
|
||||||
|
|
||||||
*(Optional)* If you prefer to install Poetry once for use with multiple projects,
|
|
||||||
you can install it via::
|
|
||||||
|
|
||||||
curl -sSL https://raw.githubusercontent.com/sdispater/poetry/master/get-poetry.py | python
|
|
||||||
|
|
||||||
Point your web browser to the `Pelican repository`_ and tap the **Fork** button
|
|
||||||
at top-right. Then clone the source for your fork and add the upstream project
|
|
||||||
as a Git remote::
|
|
||||||
|
|
||||||
mkdir ~/projects
|
|
||||||
git clone https://github.com/YOUR_USERNAME/pelican.git ~/projects/pelican
|
|
||||||
cd ~/projects/pelican
|
|
||||||
git remote add upstream https://github.com/getpelican/pelican.git
|
|
||||||
|
|
||||||
While Poetry can dynamically create and manage virtual environments, we're going
|
|
||||||
to manually create and activate a virtual environment::
|
|
||||||
|
|
||||||
mkdir ~/virtualenvs
|
|
||||||
python3 -m venv ~/virtualenvs/pelican
|
|
||||||
source ~/virtualenvs/pelican/bin/activate
|
|
||||||
|
|
||||||
Install the needed dependencies and set up the project::
|
|
||||||
|
|
||||||
pip install invoke
|
|
||||||
invoke setup
|
|
||||||
pip install -e ~/projects/pelican
|
|
||||||
|
|
||||||
Your local environment should now be ready to go!
|
|
||||||
|
|
||||||
.. _Pip: https://pip.pypa.io/
|
|
||||||
.. _Poetry: https://poetry.eustace.io/docs/#installation
|
|
||||||
.. _Pelican repository: https://github.com/getpelican/pelican
|
|
||||||
|
|
||||||
Development
|
|
||||||
===========
|
|
||||||
|
|
||||||
Once Pelican has been set up for local development, create a topic branch for
|
|
||||||
your bug fix or feature::
|
|
||||||
|
|
||||||
git checkout -b name-of-your-bugfix-or-feature
|
|
||||||
|
|
||||||
Now you can make changes to Pelican, its documentation, and/or other aspects of
|
|
||||||
the project.
|
|
||||||
|
|
||||||
Running the test suite
|
|
||||||
----------------------
|
|
||||||
|
|
||||||
Each time you make changes to Pelican, there are two things to do regarding
|
|
||||||
tests: check that the existing tests pass, and add tests for any new features
|
|
||||||
or bug fixes. The tests are located in ``pelican/tests``, and you can run them
|
|
||||||
via::
|
|
||||||
|
|
||||||
invoke tests
|
|
||||||
|
|
||||||
In addition to running the test suite, the above invocation will also check code
|
|
||||||
style and let you know whether non-conforming patterns were found. In some cases
|
|
||||||
these linters will make the needed changes directly, while in other cases you
|
|
||||||
may need to make additional changes until ``invoke tests`` no longer reports any
|
|
||||||
code style violations.
|
|
||||||
|
|
||||||
After making your changes and running the tests, you may see a test failure
|
|
||||||
mentioning that "some generated files differ from the expected functional tests
|
|
||||||
output." If you have made changes that affect the HTML output generated by
|
|
||||||
Pelican, and the changes to that output are expected and deemed correct given
|
|
||||||
the nature of your changes, then you should update the output used by the
|
|
||||||
functional tests. To do so, **make sure you have both** ``en_EN.utf8`` **and**
|
|
||||||
``fr_FR.utf8`` **locales installed**, and then run the following command::
|
|
||||||
|
|
||||||
invoke update-functional-tests
|
|
||||||
|
|
||||||
You may also find that some tests are skipped because some dependency (e.g.,
|
|
||||||
Pandoc) is not installed. This does not automatically mean that these tests
|
|
||||||
have passed; you should at least verify that any skipped tests are not affected
|
|
||||||
by your changes.
|
|
||||||
|
|
||||||
You should run the test suite under each of the supported versions of Python.
|
|
||||||
This is best done by creating a separate Python environment for each version.
|
|
||||||
Tox_ is a useful tool to automate running tests inside ``virtualenv``
|
|
||||||
environments.
|
|
||||||
|
|
||||||
.. _Tox: https://tox.readthedocs.io/en/latest/
|
|
||||||
|
|
||||||
Building the docs
|
|
||||||
-----------------
|
|
||||||
|
|
||||||
If you make changes to the documentation, you should build and inspect your
|
|
||||||
changes before committing them::
|
|
||||||
|
|
||||||
invoke docserve
|
|
||||||
|
|
||||||
Open http://localhost:8000 in your browser to review the documentation. While
|
|
||||||
the above task is running, any changes you make and save to the documentation
|
|
||||||
should automatically appear in the browser, as it live-reloads when it detects
|
|
||||||
changes to the documentation source files.
|
|
||||||
|
|
||||||
Plugin development
|
|
||||||
------------------
|
|
||||||
|
|
||||||
To create a *new* Pelican plugin, please refer to the `plugin template`_
|
|
||||||
repository for detailed instructions.
|
|
||||||
|
|
||||||
If you want to contribute to an *existing* Pelican plugin, follow the steps
|
|
||||||
above to set up Pelican for local development, and then create a directory to
|
|
||||||
store cloned plugin repositories::
|
|
||||||
|
|
||||||
mkdir -p ~/projects/pelican-plugins
|
|
||||||
|
|
||||||
Assuming you wanted to contribute to the Simple Footnotes plugin, you would
|
|
||||||
first browse to the `Simple Footnotes`_ repository on GitHub and tap the **Fork**
|
|
||||||
button at top-right. Then clone the source for your fork and add the upstream
|
|
||||||
project as a Git remote::
|
|
||||||
|
|
||||||
git clone https://github.com/YOUR_USERNAME/simple-footnotes.git ~/projects/pelican-plugins/simple-footnotes
|
|
||||||
cd ~/projects/pelican-plugins/simple-footnotes
|
|
||||||
git remote add upstream https://github.com/pelican-plugins/simple-footnotes.git
|
|
||||||
|
|
||||||
Install the needed dependencies and set up the project::
|
|
||||||
|
|
||||||
invoke setup
|
|
||||||
|
|
||||||
Create a topic branch for your plugin bug fix or feature::
|
|
||||||
|
|
||||||
git checkout -b name-of-your-bugfix-or-feature
|
|
||||||
|
|
||||||
After writing new tests for your plugin changes, run the plugin test suite::
|
|
||||||
|
|
||||||
invoke tests
|
|
||||||
|
|
||||||
.. _plugin template: https://github.com/getpelican/cookiecutter-pelican-plugin
|
|
||||||
.. _Simple Footnotes: https://github.com/pelican-plugins/simple-footnotes
|
|
||||||
|
|
||||||
Submitting your changes
|
|
||||||
-----------------------
|
|
||||||
|
|
||||||
Assuming linting validation and tests pass, add a ``RELEASE.md`` file in the root
|
|
||||||
of the project that contains the release type (major, minor, patch) and a
|
|
||||||
summary of the changes that will be used as the release changelog entry.
|
|
||||||
For example::
|
|
||||||
|
|
||||||
Release type: patch
|
|
||||||
|
|
||||||
Fix browser reloading upon changes to content, settings, or theme
|
|
||||||
|
|
||||||
Commit your changes and push your topic branch::
|
|
||||||
|
|
||||||
git add .
|
|
||||||
git commit -m "Your detailed description of your changes"
|
|
||||||
git push origin name-of-your-bugfix-or-feature
|
|
||||||
|
|
||||||
Finally, browse to your repository fork on GitHub and submit a pull request.
|
|
||||||
|
|
||||||
|
|
||||||
Logging tips
|
|
||||||
============
|
|
||||||
|
|
||||||
Try to use logging with appropriate levels.
|
|
||||||
|
|
||||||
For logging messages that are not repeated, use the usual Python way::
|
|
||||||
|
|
||||||
# at top of file
|
|
||||||
import logging
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
# when needed
|
|
||||||
logger.warning("A warning with %s formatting", arg_to_be_formatted)
|
|
||||||
|
|
||||||
Do not format log messages yourself. Use ``%s`` formatting in messages and pass
|
|
||||||
arguments to logger. This is important, because the Pelican logger will
|
|
||||||
preprocess some arguments, such as exceptions.
|
|
||||||
|
|
||||||
Limiting extraneous log messages
|
|
||||||
--------------------------------
|
|
||||||
|
|
||||||
If the log message can occur several times, you may want to limit the log to
|
|
||||||
prevent flooding. In order to do that, use the ``extra`` keyword argument for
|
|
||||||
the logging message in the following format::
|
|
||||||
|
|
||||||
logger.warning("A warning with %s formatting", arg_to_be_formatted,
|
|
||||||
extra={'limit_msg': 'A generic message for too many warnings'})
|
|
||||||
|
|
||||||
Optionally, you can also set ``'limit_args'`` as a tuple of arguments in
|
|
||||||
``extra`` dict if your generic message needs formatting.
|
|
||||||
|
|
||||||
Limit is set to ``5``, i.e, first four logs with the same ``'limit_msg'`` are
|
|
||||||
outputted normally but the fifth one will be logged using ``'limit_msg'`` (and
|
|
||||||
``'limit_args'`` if present). After the fifth, corresponding log messages will
|
|
||||||
be ignored.
|
|
||||||
|
|
||||||
For example, if you want to log missing resources, use the following code::
|
|
||||||
|
|
||||||
for resource in resources:
|
|
||||||
if resource.is_missing:
|
|
||||||
logger.warning(
|
|
||||||
'The resource %s is missing', resource.name,
|
|
||||||
extra={'limit_msg': 'Other resources were missing'})
|
|
||||||
|
|
||||||
The log messages will be displayed as follows::
|
|
||||||
|
|
||||||
WARNING: The resource prettiest_cat.jpg is missing
|
|
||||||
WARNING: The resource best_cat_ever.jpg is missing
|
|
||||||
WARNING: The resource cutest_cat.jpg is missing
|
|
||||||
WARNING: The resource lolcat.jpg is missing
|
|
||||||
WARNING: Other resources were missing
|
|
||||||
|
|
||||||
|
|
||||||
Outputting traceback in the logs
|
|
||||||
--------------------------------
|
|
||||||
|
|
||||||
If you're logging inside an ``except`` block, you may want to provide the
|
|
||||||
traceback information as well. You can do that by setting ``exc_info`` keyword
|
|
||||||
argument to ``True`` during logging. However, doing so by default can be
|
|
||||||
undesired because tracebacks are long and can be confusing to regular users.
|
|
||||||
Try to limit them to ``--debug`` mode like the following::
|
|
||||||
|
|
||||||
try:
|
|
||||||
some_action()
|
|
||||||
except Exception as e:
|
|
||||||
logger.error('Exception occurred: %s', e,
|
|
||||||
exc_info=settings.get('DEBUG', False))
|
|
||||||
291
docs/faq.rst
291
docs/faq.rst
|
|
@ -1,291 +0,0 @@
|
||||||
Frequently Asked Questions (FAQ)
|
|
||||||
################################
|
|
||||||
|
|
||||||
Here are some frequently asked questions about Pelican.
|
|
||||||
|
|
||||||
What's the best way to communicate a problem, question, or suggestion?
|
|
||||||
======================================================================
|
|
||||||
|
|
||||||
Please read our :doc:`feedback guidelines <contribute>`.
|
|
||||||
|
|
||||||
How can I help?
|
|
||||||
===============
|
|
||||||
|
|
||||||
There are several ways to help out. First, you can report any Pelican
|
|
||||||
suggestions or problems you might have via IRC (preferred) or the `issue
|
|
||||||
tracker <https://github.com/getpelican/pelican/issues>`_. If submitting an
|
|
||||||
issue report, please first check the existing issue list (both open and closed)
|
|
||||||
in order to avoid submitting a duplicate issue.
|
|
||||||
|
|
||||||
If you want to contribute, please fork `the git repository
|
|
||||||
<https://github.com/getpelican/pelican/>`_, create a new feature branch, make
|
|
||||||
your changes, and issue a pull request. Someone will review your changes as
|
|
||||||
soon as possible. Please refer to the :doc:`How to Contribute <contribute>`
|
|
||||||
section for more details.
|
|
||||||
|
|
||||||
You can also contribute by creating themes and improving the documentation.
|
|
||||||
|
|
||||||
Is the Pelican settings file mandatory?
|
|
||||||
=======================================
|
|
||||||
|
|
||||||
Configuration files are optional and are just an easy way to configure Pelican.
|
|
||||||
For basic operations, it's possible to specify options while invoking Pelican
|
|
||||||
via the command line. See ``pelican --help`` for more information.
|
|
||||||
|
|
||||||
Changes to the settings file take no effect
|
|
||||||
===========================================
|
|
||||||
|
|
||||||
When experimenting with different settings (especially the metadata ones)
|
|
||||||
caching may interfere and the changes may not be visible. In such cases, ensure
|
|
||||||
that caching is disabled via ``LOAD_CONTENT_CACHE = False`` or use the
|
|
||||||
``--ignore-cache`` command-line switch.
|
|
||||||
|
|
||||||
I'm creating my own theme. How do I use Pygments for syntax highlighting?
|
|
||||||
=========================================================================
|
|
||||||
|
|
||||||
Pygments adds some classes to the generated content. These classes are used by
|
|
||||||
themes to style code syntax highlighting via CSS. Specifically, you can
|
|
||||||
customize the appearance of your syntax highlighting via the ``.highlight pre``
|
|
||||||
class in your theme's CSS file. To see how various styles can be used to render
|
|
||||||
Django code, for example, use the style selector drop-down at top-right on the
|
|
||||||
`Pygments project demo site <https://pygments.org/demo/>`_.
|
|
||||||
|
|
||||||
You can use the following example commands to generate a starting CSS file from
|
|
||||||
a Pygments built-in style (in this case, "monokai") and then copy the generated
|
|
||||||
CSS file to your new theme::
|
|
||||||
|
|
||||||
pygmentize -S monokai -f html -a .highlight > pygment.css
|
|
||||||
cp pygment.css path/to/theme/static/css/
|
|
||||||
|
|
||||||
Don't forget to import your ``pygment.css`` file from your main CSS file.
|
|
||||||
|
|
||||||
How do I create my own theme?
|
|
||||||
=============================
|
|
||||||
|
|
||||||
Please refer to :ref:`theming-pelican`.
|
|
||||||
|
|
||||||
I want to use Markdown, but I got an error.
|
|
||||||
===========================================
|
|
||||||
|
|
||||||
If you try to generate Markdown content without first installing the Markdown
|
|
||||||
library, may see a message that says ``No valid files found in content``.
|
|
||||||
Markdown is not a hard dependency for Pelican, so if you have content in
|
|
||||||
Markdown format, you will need to explicitly install the Markdown library. You
|
|
||||||
can do so by typing the following command, prepending ``sudo`` if permissions
|
|
||||||
require it::
|
|
||||||
|
|
||||||
pip install markdown
|
|
||||||
|
|
||||||
Can I use arbitrary metadata in my templates?
|
|
||||||
=============================================
|
|
||||||
|
|
||||||
Yes. For example, to include a modified date in a Markdown post, one could
|
|
||||||
include the following at the top of the article::
|
|
||||||
|
|
||||||
Modified: 2012-08-08
|
|
||||||
|
|
||||||
For reStructuredText, this metadata should of course be prefixed with a colon::
|
|
||||||
|
|
||||||
:Modified: 2012-08-08
|
|
||||||
|
|
||||||
This metadata can then be accessed in templates such as ``article.html`` via::
|
|
||||||
|
|
||||||
{% if article.modified %}
|
|
||||||
Last modified: {{ article.modified }}
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
If you want to include metadata in templates outside the article context (e.g.,
|
|
||||||
``base.html``), the ``if`` statement should instead be::
|
|
||||||
|
|
||||||
{% if article and article.modified %}
|
|
||||||
|
|
||||||
How do I assign custom templates on a per-page basis?
|
|
||||||
=====================================================
|
|
||||||
|
|
||||||
It's as simple as adding an extra line of metadata to any page or article that
|
|
||||||
you want to have its own template. For example, this is how it would be handled
|
|
||||||
for content in reST format::
|
|
||||||
|
|
||||||
:template: template_name
|
|
||||||
|
|
||||||
For content in Markdown format::
|
|
||||||
|
|
||||||
Template: template_name
|
|
||||||
|
|
||||||
Then just make sure your theme contains the relevant template file (e.g.
|
|
||||||
``template_name.html``).
|
|
||||||
|
|
||||||
How can I override the generated URL of a specific page or article?
|
|
||||||
===================================================================
|
|
||||||
|
|
||||||
Include ``url`` and ``save_as`` metadata in any pages or articles that you want
|
|
||||||
to override the generated URL. Here is an example page in reST format::
|
|
||||||
|
|
||||||
Override url/save_as page
|
|
||||||
#########################
|
|
||||||
|
|
||||||
:url: override/url/
|
|
||||||
:save_as: override/url/index.html
|
|
||||||
|
|
||||||
With this metadata, the page will be written to ``override/url/index.html``
|
|
||||||
and Pelican will use url ``override/url/`` to link to this page.
|
|
||||||
|
|
||||||
How can I use a static page as my home page?
|
|
||||||
============================================
|
|
||||||
|
|
||||||
The override feature mentioned above can be used to specify a static page as
|
|
||||||
your home page. The following Markdown example could be stored in
|
|
||||||
``content/pages/home.md``::
|
|
||||||
|
|
||||||
Title: Welcome to My Site
|
|
||||||
URL:
|
|
||||||
save_as: index.html
|
|
||||||
|
|
||||||
Thank you for visiting. Welcome!
|
|
||||||
|
|
||||||
If the original blog index is still wanted, it can then be saved in a
|
|
||||||
different location by setting ``INDEX_SAVE_AS = 'blog_index.html'`` for
|
|
||||||
the ``'index'`` direct template.
|
|
||||||
|
|
||||||
What if I want to disable feed generation?
|
|
||||||
==========================================
|
|
||||||
|
|
||||||
To disable feed generation, all feed settings should be set to ``None``. All
|
|
||||||
but three feed settings already default to ``None``, so if you want to disable
|
|
||||||
all feed generation, you only need to specify the following settings::
|
|
||||||
|
|
||||||
FEED_ALL_ATOM = None
|
|
||||||
CATEGORY_FEED_ATOM = None
|
|
||||||
TRANSLATION_FEED_ATOM = None
|
|
||||||
AUTHOR_FEED_ATOM = None
|
|
||||||
AUTHOR_FEED_RSS = None
|
|
||||||
|
|
||||||
The word ``None`` should not be surrounded by quotes. Please note that ``None``
|
|
||||||
and ``''`` are not the same thing.
|
|
||||||
|
|
||||||
I'm getting a warning about feeds generated without SITEURL being set properly
|
|
||||||
==============================================================================
|
|
||||||
|
|
||||||
`RSS and Atom feeds require all URL links to be absolute
|
|
||||||
<https://validator.w3.org/feed/docs/rss2.html#comments>`_. In order to properly
|
|
||||||
generate links in Pelican you will need to set ``SITEURL`` to the full path of
|
|
||||||
your site.
|
|
||||||
|
|
||||||
Feeds are still generated when this warning is displayed, but links within may
|
|
||||||
be malformed and thus the feed may not validate.
|
|
||||||
|
|
||||||
My feeds are broken since I upgraded to Pelican 3.x
|
|
||||||
===================================================
|
|
||||||
|
|
||||||
Starting in 3.0, some of the FEED setting names were changed to more explicitly
|
|
||||||
refer to the Atom feeds they inherently represent (much like the FEED_RSS
|
|
||||||
setting names). Here is an exact list of the renamed settings::
|
|
||||||
|
|
||||||
FEED -> FEED_ATOM
|
|
||||||
TAG_FEED -> TAG_FEED_ATOM
|
|
||||||
CATEGORY_FEED -> CATEGORY_FEED_ATOM
|
|
||||||
|
|
||||||
Starting in 3.1, the new feed ``FEED_ALL_ATOM`` has been introduced: this feed
|
|
||||||
will aggregate all posts regardless of their language. This setting generates
|
|
||||||
``'feeds/all.atom.xml'`` by default and ``FEED_ATOM`` now defaults to ``None``.
|
|
||||||
The following feed setting has also been renamed::
|
|
||||||
|
|
||||||
TRANSLATION_FEED -> TRANSLATION_FEED_ATOM
|
|
||||||
|
|
||||||
Older themes that referenced the old setting names may not link properly. In
|
|
||||||
order to rectify this, please update your theme for compatibility by changing
|
|
||||||
the relevant values in your template files. For an example of complete feed
|
|
||||||
headers and usage please check out the ``simple`` theme.
|
|
||||||
|
|
||||||
Is Pelican only suitable for blogs?
|
|
||||||
===================================
|
|
||||||
|
|
||||||
No. Pelican can be easily configured to create and maintain any type of static
|
|
||||||
site. This may require a little customization of your theme and Pelican
|
|
||||||
configuration. For example, if you are building a launch site for your product
|
|
||||||
and do not need tags on your site, you could remove the relevant HTML code from
|
|
||||||
your theme. You can also disable generation of tag-related pages via::
|
|
||||||
|
|
||||||
TAGS_SAVE_AS = ''
|
|
||||||
TAG_SAVE_AS = ''
|
|
||||||
|
|
||||||
Why does Pelican always write all HTML files even with content caching enabled?
|
|
||||||
===============================================================================
|
|
||||||
|
|
||||||
In order to reliably determine whether the HTML output is different before
|
|
||||||
writing it, a large part of the generation environment including the template
|
|
||||||
contexts, imported plugins, etc. would have to be saved and compared, at least
|
|
||||||
in the form of a hash (which would require special handling of unhashable
|
|
||||||
types), because of all the possible combinations of plugins, pagination, etc.
|
|
||||||
which may change in many different ways. This would require a lot more
|
|
||||||
processing time and memory and storage space. Simply writing the files each
|
|
||||||
time is a lot faster and a lot more reliable.
|
|
||||||
|
|
||||||
However, this means that the modification time of the files changes every time,
|
|
||||||
so a ``rsync`` based upload will transfer them even if their content hasn't
|
|
||||||
changed. A simple solution is to make ``rsync`` use the ``--checksum`` option,
|
|
||||||
which will make it compare the file checksums in a much faster way than Pelican
|
|
||||||
would.
|
|
||||||
|
|
||||||
When only several specific output files are of interest (e.g. when working on
|
|
||||||
some specific page or the theme templates), the `WRITE_SELECTED` option may
|
|
||||||
help, see :ref:`writing_only_selected_content`.
|
|
||||||
|
|
||||||
How to process only a subset of all articles?
|
|
||||||
=============================================
|
|
||||||
|
|
||||||
It is often useful to process only e.g. 10 articles for debugging purposes.
|
|
||||||
This can be achieved by explicitly specifying only the filenames of those
|
|
||||||
articles in ``ARTICLE_PATHS``. A list of such filenames could be found using a
|
|
||||||
command similar to ``cd content; find -name '*.md' | head -n 10``.
|
|
||||||
|
|
||||||
My tag-cloud is missing/broken since I upgraded Pelican
|
|
||||||
=======================================================
|
|
||||||
|
|
||||||
In an ongoing effort to steamline Pelican, `tag_cloud` generation has been
|
|
||||||
moved out of the pelican core and into a separate `plugin
|
|
||||||
<https://github.com/getpelican/pelican-plugins/tree/master/tag_cloud>`_. See
|
|
||||||
the :ref:`plugins` documentation further information about the Pelican plugin
|
|
||||||
system.
|
|
||||||
|
|
||||||
Since I upgraded Pelican my pages are no longer rendered
|
|
||||||
========================================================
|
|
||||||
|
|
||||||
Pages were available to themes as lowercase ``pages`` and uppercase ``PAGES``.
|
|
||||||
To bring this inline with the :ref:`templates-variables` section, ``PAGES`` has
|
|
||||||
been removed. This is quickly resolved by updating your theme to iterate over
|
|
||||||
``pages`` instead of ``PAGES``. Just replace::
|
|
||||||
|
|
||||||
{% for pg in PAGES %}
|
|
||||||
|
|
||||||
with something like::
|
|
||||||
|
|
||||||
{% for pg in pages %}
|
|
||||||
|
|
||||||
How can I stop Pelican from trying to parse my static files as content?
|
|
||||||
=======================================================================
|
|
||||||
|
|
||||||
Pelican's article and page generators run before it's static generator. That
|
|
||||||
means if you use a setup similar to the default configuration, where a static
|
|
||||||
source directory is defined inside a ``*_PATHS`` setting, all files that have a
|
|
||||||
valid content file ending (``.html``, ``.rst``, ``.md``, ...) will be treated
|
|
||||||
as articles or pages before they get treated as static files.
|
|
||||||
|
|
||||||
To circumvent this issue either use the appropriate ``*_EXCLUDES`` setting or
|
|
||||||
disable the offending reader via ``READERS`` if you don't need it.
|
|
||||||
|
|
||||||
Why is [arbitrary Markdown syntax] not supported?
|
|
||||||
=================================================
|
|
||||||
|
|
||||||
Pelican does not directly handle Markdown processing and instead delegates that
|
|
||||||
task to the Python-Markdown_ project, the core of which purposefully follows
|
|
||||||
the original Markdown syntax rules and not the myriad Markdown "flavors" that
|
|
||||||
have subsequently propagated. That said, Python-Markdown_ is quite modular, and
|
|
||||||
the syntax you are looking for may be provided by one of the many available
|
|
||||||
`Markdown Extensions`_. Alternatively, some folks have created Pelican plugins
|
|
||||||
that support Markdown variants, so that may be your best choice if there is a
|
|
||||||
particular variant you want to use when writing your content.
|
|
||||||
|
|
||||||
|
|
||||||
.. _Python-Markdown: https://github.com/Python-Markdown/markdown
|
|
||||||
.. _Markdown Extensions: https://python-markdown.github.io/extensions/
|
|
||||||
|
|
@ -1,143 +0,0 @@
|
||||||
.. _import:
|
|
||||||
|
|
||||||
Importing an existing site
|
|
||||||
##########################
|
|
||||||
|
|
||||||
Description
|
|
||||||
===========
|
|
||||||
|
|
||||||
``pelican-import`` is a command-line tool for converting articles from other
|
|
||||||
software to reStructuredText or Markdown. The supported import formats are:
|
|
||||||
|
|
||||||
- Blogger XML export
|
|
||||||
- Dotclear export
|
|
||||||
- Posterous API
|
|
||||||
- Tumblr API
|
|
||||||
- WordPress XML export
|
|
||||||
- RSS/Atom feed
|
|
||||||
|
|
||||||
The conversion from HTML to reStructuredText or Markdown relies on `Pandoc`_.
|
|
||||||
For Dotclear, if the source posts are written with Markdown syntax, they will
|
|
||||||
not be converted (as Pelican also supports Markdown).
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
Unlike Pelican, Wordpress supports multiple categories per article. These
|
|
||||||
are imported as a comma-separated string. You have to resolve these
|
|
||||||
manually, or use a plugin that enables multiple categories per article
|
|
||||||
(like `more_categories`_).
|
|
||||||
|
|
||||||
Dependencies
|
|
||||||
============
|
|
||||||
|
|
||||||
``pelican-import`` has some dependencies not required by the rest of Pelican:
|
|
||||||
|
|
||||||
- *BeautifulSoup4* and *lxml*, for WordPress and Dotclear import. Can be
|
|
||||||
installed like any other Python package (``pip install BeautifulSoup4
|
|
||||||
lxml``).
|
|
||||||
- *Feedparser*, for feed import (``pip install feedparser``).
|
|
||||||
- *Pandoc*, see the `Pandoc site`_ for installation instructions on your
|
|
||||||
operating system.
|
|
||||||
|
|
||||||
.. _Pandoc: https://pandoc.org/
|
|
||||||
.. _Pandoc site: https://pandoc.org/installing.html
|
|
||||||
|
|
||||||
|
|
||||||
Usage
|
|
||||||
=====
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
pelican-import [-h] [--blogger] [--dotclear] [--posterous] [--tumblr] [--wpfile] [--feed]
|
|
||||||
[-o OUTPUT] [-m MARKUP] [--dir-cat] [--dir-page] [--strip-raw] [--wp-custpost]
|
|
||||||
[--wp-attach] [--disable-slugs] [-e EMAIL] [-p PASSWORD] [-b BLOGNAME]
|
|
||||||
input|api_token|api_key
|
|
||||||
|
|
||||||
Positional arguments
|
|
||||||
--------------------
|
|
||||||
============= ============================================================================
|
|
||||||
``input`` The input file to read
|
|
||||||
``api_token`` (Posterous only) api_token can be obtained from http://posterous.com/api/
|
|
||||||
``api_key`` (Tumblr only) api_key can be obtained from https://www.tumblr.com/oauth/apps
|
|
||||||
============= ============================================================================
|
|
||||||
|
|
||||||
Optional arguments
|
|
||||||
------------------
|
|
||||||
|
|
||||||
-h, --help Show this help message and exit
|
|
||||||
--blogger Blogger XML export (default: False)
|
|
||||||
--dotclear Dotclear export (default: False)
|
|
||||||
--posterous Posterous API (default: False)
|
|
||||||
--tumblr Tumblr API (default: False)
|
|
||||||
--wpfile WordPress XML export (default: False)
|
|
||||||
--feed Feed to parse (default: False)
|
|
||||||
-o OUTPUT, --output OUTPUT
|
|
||||||
Output path (default: content)
|
|
||||||
-m MARKUP, --markup MARKUP
|
|
||||||
Output markup format: ``rst``, ``markdown``, or ``asciidoc``
|
|
||||||
(default: ``rst``)
|
|
||||||
--dir-cat Put files in directories with categories name
|
|
||||||
(default: False)
|
|
||||||
--dir-page Put files recognised as pages in "pages/" sub-
|
|
||||||
directory (blogger and wordpress import only)
|
|
||||||
(default: False)
|
|
||||||
--filter-author Import only post from the specified author
|
|
||||||
--strip-raw Strip raw HTML code that can't be converted to markup
|
|
||||||
such as flash embeds or iframes (wordpress import
|
|
||||||
only) (default: False)
|
|
||||||
--wp-custpost Put wordpress custom post types in directories. If
|
|
||||||
used with --dir-cat option directories will be created
|
|
||||||
as "/post_type/category/" (wordpress import only)
|
|
||||||
--wp-attach Download files uploaded to wordpress as attachments.
|
|
||||||
Files will be added to posts as a list in the post
|
|
||||||
header and links to the files within the post will be
|
|
||||||
updated. All files will be downloaded, even if they
|
|
||||||
aren't associated with a post. Files will be downloaded
|
|
||||||
with their original path inside the output directory,
|
|
||||||
e.g. "output/wp-uploads/date/postname/file.jpg".
|
|
||||||
(wordpress import only) (requires an internet
|
|
||||||
connection)
|
|
||||||
--disable-slugs Disable storing slugs from imported posts within
|
|
||||||
output. With this disabled, your Pelican URLs may not
|
|
||||||
be consistent with your original posts. (default:
|
|
||||||
False)
|
|
||||||
-e EMAIL, --email=EMAIL
|
|
||||||
Email used to authenticate Posterous API
|
|
||||||
-p PASSWORD, --password=PASSWORD
|
|
||||||
Password used to authenticate Posterous API
|
|
||||||
-b BLOGNAME, --blogname=BLOGNAME
|
|
||||||
Blog name used in Tumblr API
|
|
||||||
|
|
||||||
|
|
||||||
Examples
|
|
||||||
========
|
|
||||||
|
|
||||||
For Blogger::
|
|
||||||
|
|
||||||
$ pelican-import --blogger -o ~/output ~/posts.xml
|
|
||||||
|
|
||||||
For Dotclear::
|
|
||||||
|
|
||||||
$ pelican-import --dotclear -o ~/output ~/backup.txt
|
|
||||||
|
|
||||||
for Posterous::
|
|
||||||
|
|
||||||
$ pelican-import --posterous -o ~/output --email=<email_address> --password=<password> <api_token>
|
|
||||||
|
|
||||||
For Tumblr::
|
|
||||||
|
|
||||||
$ pelican-import --tumblr -o ~/output --blogname=<blogname> <api_token>
|
|
||||||
|
|
||||||
For WordPress::
|
|
||||||
|
|
||||||
$ pelican-import --wpfile -o ~/output ~/posts.xml
|
|
||||||
|
|
||||||
Tests
|
|
||||||
=====
|
|
||||||
|
|
||||||
To test the module, one can use sample files:
|
|
||||||
|
|
||||||
- for WordPress: https://www.wpbeginner.com/wp-themes/how-to-add-dummy-content-for-theme-development-in-wordpress/
|
|
||||||
- for Dotclear: http://media.dotaddict.org/tda/downloads/lorem-backup.txt
|
|
||||||
|
|
||||||
.. _more_categories: https://github.com/getpelican/pelican-plugins/tree/master/more_categories
|
|
||||||
|
|
@ -1,85 +0,0 @@
|
||||||
Pelican |release|
|
|
||||||
=================
|
|
||||||
|
|
||||||
|
|
||||||
.. ifconfig:: release.endswith('.dev')
|
|
||||||
|
|
||||||
.. warning::
|
|
||||||
|
|
||||||
This documentation is for the version of Pelican currently under
|
|
||||||
development. Were you looking for version |last_stable| documentation?
|
|
||||||
|
|
||||||
|
|
||||||
Pelican is a static site generator, written in Python_. Highlights include:
|
|
||||||
|
|
||||||
* Write your content directly with your editor of choice in reStructuredText_
|
|
||||||
or Markdown_ formats
|
|
||||||
* Includes a simple CLI tool to (re)generate your site
|
|
||||||
* Easy to interface with distributed version control systems and web hooks
|
|
||||||
* Completely static output is easy to host anywhere
|
|
||||||
|
|
||||||
Ready to get started? Check out the :doc:`Quickstart<quickstart>` guide.
|
|
||||||
|
|
||||||
Features
|
|
||||||
--------
|
|
||||||
|
|
||||||
Pelican |version| currently supports:
|
|
||||||
|
|
||||||
* Articles (e.g., blog posts) and pages (e.g., "About", "Projects", "Contact")
|
|
||||||
* Comments, via an external service (Disqus). If you prefer to have more
|
|
||||||
control over your comment data, self-hosted comments are another option.
|
|
||||||
Check out the `Pelican Plugins`_ repository for more details.
|
|
||||||
* Theming support (themes are created using Jinja2_ templates)
|
|
||||||
* Publication of articles in multiple languages
|
|
||||||
* Atom/RSS feeds
|
|
||||||
* Code syntax highlighting
|
|
||||||
* Import from WordPress, Dotclear, or RSS feeds
|
|
||||||
* Integration with external tools: Twitter, Google Analytics, etc. (optional)
|
|
||||||
* Fast rebuild times thanks to content caching and selective output writing
|
|
||||||
|
|
||||||
Why the name "Pelican"?
|
|
||||||
-----------------------
|
|
||||||
|
|
||||||
"Pelican" is an anagram for *calepin*, which means "notebook" in French. ;)
|
|
||||||
|
|
||||||
Source code
|
|
||||||
-----------
|
|
||||||
|
|
||||||
You can access the source code at: https://github.com/getpelican/pelican
|
|
||||||
|
|
||||||
How to get help, contribute, or provide feedback
|
|
||||||
------------------------------------------------
|
|
||||||
|
|
||||||
See our :doc:`feedback and contribution submission guidelines <contribute>`.
|
|
||||||
|
|
||||||
Documentation
|
|
||||||
-------------
|
|
||||||
|
|
||||||
.. toctree::
|
|
||||||
:maxdepth: 2
|
|
||||||
|
|
||||||
quickstart
|
|
||||||
install
|
|
||||||
content
|
|
||||||
publish
|
|
||||||
settings
|
|
||||||
themes
|
|
||||||
plugins
|
|
||||||
pelican-themes
|
|
||||||
importer
|
|
||||||
faq
|
|
||||||
tips
|
|
||||||
contribute
|
|
||||||
internals
|
|
||||||
report
|
|
||||||
changelog
|
|
||||||
|
|
||||||
.. Links
|
|
||||||
|
|
||||||
.. _Python: https://www.python.org/
|
|
||||||
.. _reStructuredText: http://docutils.sourceforge.net/rst.html
|
|
||||||
.. _Markdown: https://daringfireball.net/projects/markdown/
|
|
||||||
.. _Jinja2: https://palletsprojects.com/p/jinja/
|
|
||||||
.. _`Pelican documentation`: https://docs.getpelican.com/latest/
|
|
||||||
.. _`Pelican's internals`: https://docs.getpelican.com/en/latest/internals.html
|
|
||||||
.. _`Pelican plugins`: https://github.com/getpelican/pelican-plugins
|
|
||||||
129
docs/install.rst
129
docs/install.rst
|
|
@ -1,129 +0,0 @@
|
||||||
Installing Pelican
|
|
||||||
##################
|
|
||||||
|
|
||||||
Pelican currently runs best on Python 2.7.x and 3.5+; earlier versions of
|
|
||||||
Python are not supported.
|
|
||||||
|
|
||||||
You can install Pelican via several different methods. The simplest is via
|
|
||||||
`pip <https://pip.pypa.io/en/stable/>`_::
|
|
||||||
|
|
||||||
pip install pelican
|
|
||||||
|
|
||||||
Or, if you plan on using Markdown::
|
|
||||||
|
|
||||||
pip install pelican[Markdown]
|
|
||||||
|
|
||||||
(Keep in mind that operating systems will often require you to prefix the above
|
|
||||||
command with ``sudo`` in order to install Pelican system-wide.)
|
|
||||||
|
|
||||||
While the above is the simplest method, the recommended approach is to create a
|
|
||||||
virtual environment for Pelican via virtualenv_ before installing Pelican.
|
|
||||||
Assuming you have virtualenv_ installed, you can then open a new terminal
|
|
||||||
session and create a new virtual environment for Pelican::
|
|
||||||
|
|
||||||
virtualenv ~/virtualenvs/pelican
|
|
||||||
cd ~/virtualenvs/pelican
|
|
||||||
source bin/activate
|
|
||||||
|
|
||||||
Once the virtual environment has been created and activated, Pelican can be
|
|
||||||
installed via ``pip install pelican`` as noted above. Alternatively, if you
|
|
||||||
have the project source, you can install Pelican using the distutils method::
|
|
||||||
|
|
||||||
cd path-to-Pelican-source
|
|
||||||
python setup.py install
|
|
||||||
|
|
||||||
If you have Git installed and prefer to install the latest bleeding-edge
|
|
||||||
version of Pelican rather than a stable release, use the following command::
|
|
||||||
|
|
||||||
pip install -e "git+https://github.com/getpelican/pelican.git#egg=pelican"
|
|
||||||
|
|
||||||
Once Pelican is installed, you can run ``pelican --help`` to see basic usage
|
|
||||||
options. For more detail, refer to the :doc:`Publish<publish>` section.
|
|
||||||
|
|
||||||
Optional packages
|
|
||||||
-----------------
|
|
||||||
|
|
||||||
If you plan on using `Markdown <https://pypi.org/project/Markdown/>`_ as a
|
|
||||||
markup format, you can install Pelican with Markdown support::
|
|
||||||
|
|
||||||
pip install pelican[Markdown]
|
|
||||||
|
|
||||||
Or you might need to install it a posteriori::
|
|
||||||
|
|
||||||
pip install Markdown
|
|
||||||
|
|
||||||
Typographical enhancements can be enabled in your settings file, but first the
|
|
||||||
requisite `Typogrify <https://pypi.org/project/typogrify/>`_ library must be
|
|
||||||
installed::
|
|
||||||
|
|
||||||
pip install typogrify
|
|
||||||
|
|
||||||
Dependencies
|
|
||||||
------------
|
|
||||||
|
|
||||||
When Pelican is installed, the following dependent Python packages should be
|
|
||||||
automatically installed without any action on your part:
|
|
||||||
|
|
||||||
* `feedgenerator <https://pypi.org/project/feedgenerator/>`_, to generate the
|
|
||||||
Atom feeds
|
|
||||||
* `jinja2 <https://pypi.org/project/Jinja2/>`_, for templating support
|
|
||||||
* `pygments <https://pypi.org/project/Pygments/>`_, for syntax highlighting
|
|
||||||
* `docutils <https://pypi.org/project/docutils/>`_, for supporting
|
|
||||||
reStructuredText as an input format
|
|
||||||
* `pytz <https://pypi.org/project/pytz/>`_, for timezone definitions
|
|
||||||
* `blinker <https://pypi.org/project/blinker/>`_, an object-to-object and
|
|
||||||
broadcast signaling system
|
|
||||||
* `unidecode <https://pypi.org/project/Unidecode/>`_, for ASCII
|
|
||||||
transliterations of Unicode text
|
|
||||||
* `six <https://pypi.org/project/six/>`_, for Python 2 and 3 compatibility
|
|
||||||
utilities
|
|
||||||
* `MarkupSafe <https://pypi.org/project/MarkupSafe/>`_, for a markup safe
|
|
||||||
string implementation
|
|
||||||
* `python-dateutil <https://pypi.org/project/python-dateutil/>`_, to read
|
|
||||||
the date metadata
|
|
||||||
|
|
||||||
Upgrading
|
|
||||||
---------
|
|
||||||
|
|
||||||
If you installed a stable Pelican release via ``pip`` and wish to upgrade to
|
|
||||||
the latest stable release, you can do so by adding ``--upgrade``::
|
|
||||||
|
|
||||||
pip install --upgrade pelican
|
|
||||||
|
|
||||||
If you installed Pelican via distutils or the bleeding-edge method, simply
|
|
||||||
perform the same step to install the most recent version.
|
|
||||||
|
|
||||||
Kickstart your site
|
|
||||||
-------------------
|
|
||||||
|
|
||||||
Once Pelican has been installed, you can create a skeleton project via the
|
|
||||||
``pelican-quickstart`` command, which begins by asking some questions about
|
|
||||||
your site::
|
|
||||||
|
|
||||||
pelican-quickstart
|
|
||||||
|
|
||||||
If run inside an activated virtual environment, ``pelican-quickstart`` will
|
|
||||||
look for an associated project path inside ``$VIRTUAL_ENV/.project``. If that
|
|
||||||
file exists and contains a valid directory path, the new Pelican project will
|
|
||||||
be saved at that location. Otherwise, the default is the current working
|
|
||||||
directory. To set the new project path on initial invocation, use:
|
|
||||||
``pelican-quickstart --path /your/desired/directory``
|
|
||||||
|
|
||||||
Once you finish answering all the questions, your project will consist of the
|
|
||||||
following hierarchy (except for *pages* — shown in parentheses below — which
|
|
||||||
you can optionally add yourself if you plan to create non-chronological
|
|
||||||
content)::
|
|
||||||
|
|
||||||
yourproject/
|
|
||||||
├── content
|
|
||||||
│ └── (pages)
|
|
||||||
├── output
|
|
||||||
├── tasks.py
|
|
||||||
├── Makefile
|
|
||||||
├── pelicanconf.py # Main settings file
|
|
||||||
└── publishconf.py # Settings to use when ready to publish
|
|
||||||
|
|
||||||
The next step is to begin to adding content to the *content* folder that has
|
|
||||||
been created for you.
|
|
||||||
|
|
||||||
.. _virtualenv: https://virtualenv.pypa.io/en/latest/
|
|
||||||
|
|
@ -1,93 +0,0 @@
|
||||||
Pelican internals
|
|
||||||
#################
|
|
||||||
|
|
||||||
This section describe how Pelican works internally. As you'll see, it's quite
|
|
||||||
simple, but a bit of documentation doesn't hurt. :)
|
|
||||||
|
|
||||||
You can also find in the :doc:`report` section an excerpt of a report the
|
|
||||||
original author wrote with some software design information.
|
|
||||||
|
|
||||||
.. _report: :doc:`report`
|
|
||||||
|
|
||||||
Overall structure
|
|
||||||
=================
|
|
||||||
|
|
||||||
What Pelican does is take a list of files and process them into some sort of
|
|
||||||
output. Usually, the input files are reStructuredText and Markdown files, and
|
|
||||||
the output is a blog, but both input and output can be anything you want.
|
|
||||||
|
|
||||||
The logic is separated into different classes and concepts:
|
|
||||||
|
|
||||||
* **Writers** are responsible for writing files: .html files, RSS feeds, and so
|
|
||||||
on. Since those operations are commonly used, the object is created once and
|
|
||||||
then passed to the generators.
|
|
||||||
|
|
||||||
* **Readers** are used to read from various formats (HTML, Markdown and
|
|
||||||
reStructuredText for now, but the system is extensible). Given a file, they
|
|
||||||
return metadata (author, tags, category, etc.) and content (HTML-formatted).
|
|
||||||
|
|
||||||
* **Generators** generate the different outputs. For instance, Pelican comes
|
|
||||||
with ``ArticlesGenerator`` and ``PageGenerator``. Given a configuration, they
|
|
||||||
can do whatever they want. Most of the time, it's generating files from
|
|
||||||
inputs.
|
|
||||||
|
|
||||||
* Pelican also uses templates, so it's easy to write your own theme. The
|
|
||||||
syntax is `Jinja2 <https://palletsprojects.com/p/jinja/>`_ and is very easy to learn, so
|
|
||||||
don't hesitate to jump in and build your own theme.
|
|
||||||
|
|
||||||
How to implement a new reader?
|
|
||||||
==============================
|
|
||||||
|
|
||||||
Is there an awesome markup language you want to add to Pelican? Well, the only
|
|
||||||
thing you have to do is to create a class with a ``read`` method that returns
|
|
||||||
HTML content and some metadata.
|
|
||||||
|
|
||||||
Take a look at the Markdown reader::
|
|
||||||
|
|
||||||
class MarkdownReader(BaseReader):
|
|
||||||
enabled = bool(Markdown)
|
|
||||||
|
|
||||||
def read(self, source_path):
|
|
||||||
"""Parse content and metadata of markdown files"""
|
|
||||||
text = pelican_open(source_path)
|
|
||||||
md_extensions = {'markdown.extensions.meta': {},
|
|
||||||
'markdown.extensions.codehilite': {}}
|
|
||||||
md = Markdown(extensions=md_extensions.keys(),
|
|
||||||
extension_configs=md_extensions)
|
|
||||||
content = md.convert(text)
|
|
||||||
|
|
||||||
metadata = {}
|
|
||||||
for name, value in md.Meta.items():
|
|
||||||
name = name.lower()
|
|
||||||
meta = self.process_metadata(name, value[0])
|
|
||||||
metadata[name] = meta
|
|
||||||
return content, metadata
|
|
||||||
|
|
||||||
Simple, isn't it?
|
|
||||||
|
|
||||||
If your new reader requires additional Python dependencies, then you should
|
|
||||||
wrap their ``import`` statements in a ``try...except`` block. Then inside the
|
|
||||||
reader's class, set the ``enabled`` class attribute to mark import success or
|
|
||||||
failure. This makes it possible for users to continue using their favourite
|
|
||||||
markup method without needing to install modules for formats they don't use.
|
|
||||||
|
|
||||||
How to implement a new generator?
|
|
||||||
=================================
|
|
||||||
|
|
||||||
Generators have two important methods. You're not forced to create both; only
|
|
||||||
the existing ones will be called.
|
|
||||||
|
|
||||||
* ``generate_context``, that is called first, for all the generators.
|
|
||||||
Do whatever you have to do, and update the global context if needed. This
|
|
||||||
context is shared between all generators, and will be passed to the
|
|
||||||
templates. For instance, the ``PageGenerator`` ``generate_context`` method
|
|
||||||
finds all the pages, transforms them into objects, and populates the context
|
|
||||||
with them. Be careful *not* to output anything using this context at this
|
|
||||||
stage, as it is likely to change by the effect of other generators.
|
|
||||||
|
|
||||||
* ``generate_output`` is then called. And guess what is it made for? Oh,
|
|
||||||
generating the output. :) It's here that you may want to look at the context
|
|
||||||
and call the methods of the ``writer`` object that is passed as the first
|
|
||||||
argument of this function. In the ``PageGenerator`` example, this method will
|
|
||||||
look at all the pages recorded in the global context and output a file on the
|
|
||||||
disk (using the writer method ``write_file``) for each page encountered.
|
|
||||||
|
|
@ -1,170 +0,0 @@
|
||||||
pelican-themes
|
|
||||||
##############
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
Description
|
|
||||||
===========
|
|
||||||
|
|
||||||
``pelican-themes`` is a command line tool for managing themes for Pelican. See
|
|
||||||
:ref:`settings/themes` for settings related to themes.
|
|
||||||
|
|
||||||
|
|
||||||
Usage
|
|
||||||
"""""
|
|
||||||
|
|
||||||
| pelican-themes [-h] [-l] [-i theme path [theme path ...]]
|
|
||||||
| [-r theme name [theme name ...]]
|
|
||||||
| [-s theme path [theme path ...]] [-v] [--version]
|
|
||||||
|
|
||||||
Optional arguments:
|
|
||||||
"""""""""""""""""""
|
|
||||||
|
|
||||||
|
|
||||||
-h, --help Show the help an exit
|
|
||||||
|
|
||||||
-l, --list Show the themes already installed
|
|
||||||
|
|
||||||
-i theme_path, --install theme_path One or more themes to install
|
|
||||||
|
|
||||||
-r theme_name, --remove theme_name One or more themes to remove
|
|
||||||
|
|
||||||
-s theme_path, --symlink theme_path Same as "--install", but create a symbolic link instead of copying the theme.
|
|
||||||
Useful for theme development
|
|
||||||
|
|
||||||
-v, --verbose Verbose output
|
|
||||||
|
|
||||||
--version Print the version of this script
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
Examples
|
|
||||||
========
|
|
||||||
|
|
||||||
|
|
||||||
Listing the installed themes
|
|
||||||
""""""""""""""""""""""""""""
|
|
||||||
|
|
||||||
With ``pelican-themes``, you can see the available themes by using the ``-l``
|
|
||||||
or ``--list`` option:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ pelican-themes -l
|
|
||||||
notmyidea
|
|
||||||
two-column@
|
|
||||||
simple
|
|
||||||
$ pelican-themes --list
|
|
||||||
notmyidea
|
|
||||||
two-column@
|
|
||||||
simple
|
|
||||||
|
|
||||||
In this example, we can see there are three themes available: ``notmyidea``,
|
|
||||||
``simple``, and ``two-column``.
|
|
||||||
|
|
||||||
``two-column`` is prefixed with an ``@`` because this theme is not copied to
|
|
||||||
the Pelican theme path, but is instead just linked to it (see `Creating
|
|
||||||
symbolic links`_ for details about creating symbolic links).
|
|
||||||
|
|
||||||
Note that you can combine the ``--list`` option with the ``-v`` or
|
|
||||||
``--verbose`` option to get more verbose output, like this:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ pelican-themes -v -l
|
|
||||||
/usr/local/lib/python2.6/dist-packages/pelican-2.6.0-py2.6.egg/pelican/themes/notmyidea
|
|
||||||
/usr/local/lib/python2.6/dist-packages/pelican-2.6.0-py2.6.egg/pelican/themes/two-column (symbolic link to `/home/skami/Dev/Python/pelican-themes/two-column')
|
|
||||||
/usr/local/lib/python2.6/dist-packages/pelican-2.6.0-py2.6.egg/pelican/themes/simple
|
|
||||||
|
|
||||||
|
|
||||||
Installing themes
|
|
||||||
"""""""""""""""""
|
|
||||||
|
|
||||||
You can install one or more themes using the ``-i`` or ``--install`` option.
|
|
||||||
This option takes as argument the path(s) of the theme(s) you want to install,
|
|
||||||
and can be combined with the verbose option:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
# pelican-themes --install ~/Dev/Python/pelican-themes/notmyidea-cms --verbose
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
# pelican-themes --install ~/Dev/Python/pelican-themes/notmyidea-cms\
|
|
||||||
~/Dev/Python/pelican-themes/martyalchin \
|
|
||||||
--verbose
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
# pelican-themes -vi ~/Dev/Python/pelican-themes/two-column
|
|
||||||
|
|
||||||
|
|
||||||
Removing themes
|
|
||||||
"""""""""""""""
|
|
||||||
|
|
||||||
The ``pelican-themes`` command can also remove themes from the Pelican themes
|
|
||||||
path. The ``-r`` or ``--remove`` option takes as argument the name(s) of the
|
|
||||||
theme(s) you want to remove, and can be combined with the ``--verbose`` option.
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
# pelican-themes --remove two-column
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
# pelican-themes -r martyachin notmyidea-cmd -v
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
Creating symbolic links
|
|
||||||
"""""""""""""""""""""""
|
|
||||||
|
|
||||||
``pelican-themes`` can also install themes by creating symbolic links instead
|
|
||||||
of copying entire themes into the Pelican themes path.
|
|
||||||
|
|
||||||
To symbolically link a theme, you can use the ``-s`` or ``--symlink``, which
|
|
||||||
works exactly as the ``--install`` option:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
# pelican-themes --symlink ~/Dev/Python/pelican-themes/two-column
|
|
||||||
|
|
||||||
In this example, the ``two-column`` theme is now symbolically linked to the
|
|
||||||
Pelican themes path, so we can use it, but we can also modify it without having
|
|
||||||
to reinstall it after each modification.
|
|
||||||
|
|
||||||
This is useful for theme development:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ sudo pelican-themes -s ~/Dev/Python/pelican-themes/two-column
|
|
||||||
$ pelican ~/Blog/content -o /tmp/out -t two-column
|
|
||||||
$ firefox /tmp/out/index.html
|
|
||||||
$ vim ~/Dev/Pelican/pelican-themes/two-column/static/css/main.css
|
|
||||||
$ pelican ~/Blog/content -o /tmp/out -t two-column
|
|
||||||
$ cp /tmp/bg.png ~/Dev/Pelican/pelican-themes/two-column/static/img/bg.png
|
|
||||||
$ pelican ~/Blog/content -o /tmp/out -t two-column
|
|
||||||
$ vim ~/Dev/Pelican/pelican-themes/two-column/templates/index.html
|
|
||||||
$ pelican ~/Blog/content -o /tmp/out -t two-column
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
Doing several things at once
|
|
||||||
""""""""""""""""""""""""""""
|
|
||||||
|
|
||||||
The ``--install``, ``--remove`` and ``--symlink`` option are not mutually
|
|
||||||
exclusive, so you can combine them in the same command line to do more than one
|
|
||||||
operation at time, like this:
|
|
||||||
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
# pelican-themes --remove notmyidea-cms two-column \
|
|
||||||
--install ~/Dev/Python/pelican-themes/notmyidea-cms-fr \
|
|
||||||
--symlink ~/Dev/Python/pelican-themes/two-column \
|
|
||||||
--verbose
|
|
||||||
|
|
||||||
In this example, the theme ``notmyidea-cms`` is replaced by the theme
|
|
||||||
``notmyidea-cms-fr``
|
|
||||||
279
docs/plugins.rst
279
docs/plugins.rst
|
|
@ -1,279 +0,0 @@
|
||||||
.. _plugins:
|
|
||||||
|
|
||||||
Plugins
|
|
||||||
#######
|
|
||||||
|
|
||||||
Beginning with version 3.0, Pelican supports plugins. Plugins are a way to add
|
|
||||||
features to Pelican without having to directly modify the Pelican core.
|
|
||||||
|
|
||||||
How to use plugins
|
|
||||||
==================
|
|
||||||
|
|
||||||
Starting with version 4.5, Pelican moved to a new plugin structure utilizing
|
|
||||||
namespace packages that can be easily installed via Pip_. Plugins supporting
|
|
||||||
this structure will install under the namespace package ``pelican.plugins`` and
|
|
||||||
can be automatically discovered by Pelican. To see a list of plugins that are
|
|
||||||
active in your environment, run::
|
|
||||||
|
|
||||||
pelican-plugins
|
|
||||||
|
|
||||||
If you leave the ``PLUGINS`` setting as default (``None``), Pelican will
|
|
||||||
automatically discover namespace plugins and register them. If, on the other
|
|
||||||
hand, you specify a ``PLUGINS`` setting as a list of plugins, this
|
|
||||||
auto-discovery will be disabled. At that point, only the plugins you specify
|
|
||||||
will be registered, and you must explicitly list any namespace plugins as well.
|
|
||||||
|
|
||||||
If you are using the ``PLUGINS`` setting, you can specify plugins in two ways.
|
|
||||||
The first method specifies plugins as a list of strings. Namespace plugins can
|
|
||||||
be specified either by their full names (``pelican.plugins.myplugin``) or by
|
|
||||||
their short names (``myplugin``)::
|
|
||||||
|
|
||||||
PLUGINS = ['package.myplugin',
|
|
||||||
'namespace_plugin1',
|
|
||||||
'pelican.plugins.namespace_plugin2']
|
|
||||||
|
|
||||||
Alternatively, you can import them in your settings file and pass the modules::
|
|
||||||
|
|
||||||
from package import myplugin
|
|
||||||
from pelican.plugins import namespace_plugin1, namespace_plugin2
|
|
||||||
PLUGINS = [myplugin, namespace_plugin1, namespace_plugin2]
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
When experimenting with different plugins (especially the ones that deal
|
|
||||||
with metadata and content) caching may interfere and the changes may not be
|
|
||||||
visible. In such cases disable caching with ``LOAD_CONTENT_CACHE = False``
|
|
||||||
or use the ``--ignore-cache`` command-line switch.
|
|
||||||
|
|
||||||
If your plugins are not in an importable path, you can specify a list of paths
|
|
||||||
via the ``PLUGIN_PATHS`` setting. As shown in the following example, paths in
|
|
||||||
the ``PLUGIN_PATHS`` list can be absolute or relative to the settings file::
|
|
||||||
|
|
||||||
PLUGIN_PATHS = ["plugins", "/srv/pelican/plugins"]
|
|
||||||
PLUGINS = ["assets", "liquid_tags", "sitemap"]
|
|
||||||
|
|
||||||
Where to find plugins
|
|
||||||
=====================
|
|
||||||
Namespace plugins can be found in the `pelican-plugins organization`_ as
|
|
||||||
individual repositories. Legacy plugins are located in the `pelican-plugins
|
|
||||||
repository`_ and will be gradually phased out in favor of the namespace
|
|
||||||
versions.
|
|
||||||
|
|
||||||
.. _pelican-plugins organization: https://github.com/pelican-plugins
|
|
||||||
.. _pelican-plugins repository: https://github.com/getpelican/pelican-plugins
|
|
||||||
|
|
||||||
Please note that while we do our best to review and maintain these plugins,
|
|
||||||
they are submitted by the Pelican community and thus may have varying levels of
|
|
||||||
support and interoperability.
|
|
||||||
|
|
||||||
How to create plugins
|
|
||||||
=====================
|
|
||||||
|
|
||||||
Plugins are based on the concept of signals. Pelican sends signals, and plugins
|
|
||||||
subscribe to those signals. The list of available signals is documented in a
|
|
||||||
subsequent section.
|
|
||||||
|
|
||||||
The only rule to follow for plugins is to define a ``register`` callable, in
|
|
||||||
which you map the signals to your plugin logic. Let's take a simple example::
|
|
||||||
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from pelican import signals
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
def test(sender):
|
|
||||||
log.debug("%s initialized !!", sender)
|
|
||||||
|
|
||||||
def register():
|
|
||||||
signals.initialized.connect(test)
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
Signal receivers are weakly-referenced and thus must not be defined within
|
|
||||||
your ``register`` callable or they will be garbage-collected before the
|
|
||||||
signal is emitted.
|
|
||||||
|
|
||||||
If multiple plugins connect to the same signal, there is no way to guarantee or
|
|
||||||
control in which order the plugins will be executed. This is a limitation
|
|
||||||
inherited from Blinker_, the dependency Pelican uses to implement signals.
|
|
||||||
|
|
||||||
Namespace plugin structure
|
|
||||||
--------------------------
|
|
||||||
|
|
||||||
Namespace plugins must adhere to a certain structure in order to function
|
|
||||||
properly. They need to be installable (i.e. contain ``setup.py`` or equivalent)
|
|
||||||
and have a folder structure as follows::
|
|
||||||
|
|
||||||
myplugin
|
|
||||||
├── pelican
|
|
||||||
│ └── plugins
|
|
||||||
│ └── myplugin
|
|
||||||
│ ├── __init__.py
|
|
||||||
│ └── ...
|
|
||||||
├── ...
|
|
||||||
└── setup.py
|
|
||||||
|
|
||||||
It is crucial that ``pelican`` or ``pelican/plugins`` folder **not**
|
|
||||||
contain an ``__init__.py`` file. In fact, it is best to have those folders
|
|
||||||
empty besides the listed folders in the above structure and keep your
|
|
||||||
plugin related files contained solely in the ``pelican/plugins/myplugin``
|
|
||||||
folder to avoid any issues.
|
|
||||||
|
|
||||||
To easily set up the proper structure, a `cookiecutter template for plugins`_
|
|
||||||
is provided. Refer to that project's README for instructions on how to use it.
|
|
||||||
|
|
||||||
.. _cookiecutter template for plugins: https://github.com/getpelican/cookiecutter-pelican-plugin
|
|
||||||
|
|
||||||
List of signals
|
|
||||||
===============
|
|
||||||
|
|
||||||
Here is the list of currently implemented signals:
|
|
||||||
|
|
||||||
================================= ============================ ===========================================================================
|
|
||||||
Signal Arguments Description
|
|
||||||
================================= ============================ ===========================================================================
|
|
||||||
initialized pelican object
|
|
||||||
finalized pelican object invoked after all the generators are executed and just before pelican exits
|
|
||||||
useful for custom post processing actions, such as:
|
|
||||||
- minifying js/css assets.
|
|
||||||
- notify/ping search engines with an updated sitemap.
|
|
||||||
generator_init generator invoked in the Generator.__init__
|
|
||||||
all_generators_finalized generators invoked after all the generators are executed and before writing output
|
|
||||||
readers_init readers invoked in the Readers.__init__
|
|
||||||
article_generator_context article_generator, metadata
|
|
||||||
article_generator_preread article_generator invoked before a article is read in ArticlesGenerator.generate_context;
|
|
||||||
use if code needs to do something before every article is parsed
|
|
||||||
article_generator_init article_generator invoked in the ArticlesGenerator.__init__
|
|
||||||
article_generator_pretaxonomy article_generator invoked before categories and tags lists are created
|
|
||||||
useful when e.g. modifying the list of articles to be generated
|
|
||||||
so that removed articles are not leaked in categories or tags
|
|
||||||
article_generator_finalized article_generator invoked at the end of ArticlesGenerator.generate_context
|
|
||||||
article_generator_write_article article_generator, content invoked before writing each article, the article is passed as content
|
|
||||||
article_writer_finalized article_generator, writer invoked after all articles and related pages have been written, but before
|
|
||||||
the article generator is closed.
|
|
||||||
get_generators pelican object invoked in Pelican.get_generator_classes,
|
|
||||||
can return a Generator, or several
|
|
||||||
generators in a tuple or in a list.
|
|
||||||
get_writer pelican object invoked in Pelican.get_writer,
|
|
||||||
can return a custom Writer.
|
|
||||||
page_generator_context page_generator, metadata
|
|
||||||
page_generator_preread page_generator invoked before a page is read in PageGenerator.generate_context;
|
|
||||||
use if code needs to do something before every page is parsed.
|
|
||||||
page_generator_init page_generator invoked in the PagesGenerator.__init__
|
|
||||||
page_generator_finalized page_generator invoked at the end of PagesGenerator.generate_context
|
|
||||||
page_generator_write_page page_generator, content invoked before writing each page, the page is passed as content
|
|
||||||
page_writer_finalized page_generator, writer invoked after all pages have been written, but before the page generator
|
|
||||||
is closed.
|
|
||||||
static_generator_context static_generator, metadata
|
|
||||||
static_generator_preread static_generator invoked before a static file is read in StaticGenerator.generate_context;
|
|
||||||
use if code needs to do something before every static file is added to the
|
|
||||||
staticfiles list.
|
|
||||||
static_generator_init static_generator invoked in the StaticGenerator.__init__
|
|
||||||
static_generator_finalized static_generator invoked at the end of StaticGenerator.generate_context
|
|
||||||
content_object_init content_object invoked at the end of Content.__init__
|
|
||||||
content_written path, context invoked each time a content file is written.
|
|
||||||
feed_generated context, feed invoked each time a feed gets generated. Can be used to modify a feed
|
|
||||||
object before it gets written.
|
|
||||||
feed_written path, context, feed invoked each time a feed file is written.
|
|
||||||
================================= ============================ ===========================================================================
|
|
||||||
|
|
||||||
.. warning::
|
|
||||||
|
|
||||||
Avoid ``content_object_init`` signal if you intend to read ``summary`` or
|
|
||||||
``content`` properties of the content object. That combination can result in
|
|
||||||
unresolved links when :ref:`ref-linking-to-internal-content` (see
|
|
||||||
`pelican-plugins bug #314`_). Use ``_summary`` and ``_content`` properties
|
|
||||||
instead, or, alternatively, run your plugin at a later stage (e.g.
|
|
||||||
``all_generators_finalized``).
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
After Pelican 3.2, signal names were standardized. Older plugins may need
|
|
||||||
to be updated to use the new names:
|
|
||||||
|
|
||||||
========================== ===========================
|
|
||||||
Old name New name
|
|
||||||
========================== ===========================
|
|
||||||
article_generate_context article_generator_context
|
|
||||||
article_generate_finalized article_generator_finalized
|
|
||||||
article_generate_preread article_generator_preread
|
|
||||||
pages_generate_context page_generator_context
|
|
||||||
pages_generate_preread page_generator_preread
|
|
||||||
pages_generator_finalized page_generator_finalized
|
|
||||||
pages_generator_init page_generator_init
|
|
||||||
static_generate_context static_generator_context
|
|
||||||
static_generate_preread static_generator_preread
|
|
||||||
========================== ===========================
|
|
||||||
|
|
||||||
Recipes
|
|
||||||
=======
|
|
||||||
|
|
||||||
We eventually realised some of the recipes to create plugins would be best
|
|
||||||
shared in the documentation somewhere, so here they are!
|
|
||||||
|
|
||||||
How to create a new reader
|
|
||||||
--------------------------
|
|
||||||
|
|
||||||
One thing you might want is to add support for your very own input format.
|
|
||||||
While it might make sense to add this feature in Pelican core, we wisely chose
|
|
||||||
to avoid this situation and instead have the different readers defined via
|
|
||||||
plugins.
|
|
||||||
|
|
||||||
The rationale behind this choice is mainly that plugins are really easy to
|
|
||||||
write and don't slow down Pelican itself when they're not active.
|
|
||||||
|
|
||||||
No more talking — here is an example::
|
|
||||||
|
|
||||||
from pelican import signals
|
|
||||||
from pelican.readers import BaseReader
|
|
||||||
|
|
||||||
# Create a new reader class, inheriting from the pelican.reader.BaseReader
|
|
||||||
class NewReader(BaseReader):
|
|
||||||
enabled = True # Yeah, you probably want that :-)
|
|
||||||
|
|
||||||
# The list of file extensions you want this reader to match with.
|
|
||||||
# If multiple readers were to use the same extension, the latest will
|
|
||||||
# win (so the one you're defining here, most probably).
|
|
||||||
file_extensions = ['yeah']
|
|
||||||
|
|
||||||
# You need to have a read method, which takes a filename and returns
|
|
||||||
# some content and the associated metadata.
|
|
||||||
def read(self, filename):
|
|
||||||
metadata = {'title': 'Oh yeah',
|
|
||||||
'category': 'Foo',
|
|
||||||
'date': '2012-12-01'}
|
|
||||||
|
|
||||||
parsed = {}
|
|
||||||
for key, value in metadata.items():
|
|
||||||
parsed[key] = self.process_metadata(key, value)
|
|
||||||
|
|
||||||
return "Some content", parsed
|
|
||||||
|
|
||||||
def add_reader(readers):
|
|
||||||
readers.reader_classes['yeah'] = NewReader
|
|
||||||
|
|
||||||
# This is how pelican works.
|
|
||||||
def register():
|
|
||||||
signals.readers_init.connect(add_reader)
|
|
||||||
|
|
||||||
|
|
||||||
Adding a new generator
|
|
||||||
----------------------
|
|
||||||
|
|
||||||
Adding a new generator is also really easy. You might want to have a look at
|
|
||||||
:doc:`internals` for more information on how to create your own generator.
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
def get_generators(pelican_object):
|
|
||||||
# define a new generator here if you need to
|
|
||||||
return MyGenerator
|
|
||||||
|
|
||||||
def register():
|
|
||||||
signals.get_generators.connect(get_generators)
|
|
||||||
|
|
||||||
|
|
||||||
.. _Pip: https://pip.pypa.io/
|
|
||||||
.. _pelican-plugins bug #314: https://github.com/getpelican/pelican-plugins/issues/314
|
|
||||||
.. _Blinker: https://pythonhosted.org/blinker/
|
|
||||||
210
docs/publish.rst
210
docs/publish.rst
|
|
@ -1,210 +0,0 @@
|
||||||
Publish your site
|
|
||||||
#################
|
|
||||||
|
|
||||||
.. _site_generation:
|
|
||||||
|
|
||||||
Site generation
|
|
||||||
===============
|
|
||||||
|
|
||||||
Once Pelican is installed and you have some content (e.g., in Markdown or reST
|
|
||||||
format), you can convert your content into HTML via the ``pelican`` command,
|
|
||||||
specifying the path to your content and (optionally) the path to your
|
|
||||||
:doc:`settings<settings>` file::
|
|
||||||
|
|
||||||
pelican /path/to/your/content/ [-s path/to/your/settings.py]
|
|
||||||
|
|
||||||
The above command will generate your site and save it in the ``output/``
|
|
||||||
folder, using the default theme to produce a simple site. The default theme
|
|
||||||
consists of very simple HTML without styling and is provided so folks may use
|
|
||||||
it as a basis for creating their own themes.
|
|
||||||
|
|
||||||
When working on a single article or page, it is possible to generate only the
|
|
||||||
file that corresponds to that content. To do this, use the ``--write-selected``
|
|
||||||
argument, like so::
|
|
||||||
|
|
||||||
pelican --write-selected output/posts/my-post-title.html
|
|
||||||
|
|
||||||
Note that you must specify the path to the generated *output* file — not the
|
|
||||||
source content. To determine the output file name and location, use the
|
|
||||||
``--debug`` flag. If desired, ``--write-selected`` can take a comma-separated
|
|
||||||
list of paths or can be configured as a setting. (See:
|
|
||||||
:ref:`writing_only_selected_content`)
|
|
||||||
|
|
||||||
You can also tell Pelican to watch for your modifications, instead of manually
|
|
||||||
re-running it every time you want to see your changes. To enable this, run the
|
|
||||||
``pelican`` command with the ``-r`` or ``--autoreload`` option. On non-Windows
|
|
||||||
environments, this option can also be combined with the ``-l`` or ``--listen``
|
|
||||||
option to simultaneously both auto-regenerate *and* serve the output at
|
|
||||||
http://localhost:8000::
|
|
||||||
|
|
||||||
pelican --autoreload --listen
|
|
||||||
|
|
||||||
Pelican has other command-line switches available. Have a look at the help to
|
|
||||||
see all the options you can use::
|
|
||||||
|
|
||||||
pelican --help
|
|
||||||
|
|
||||||
Viewing the generated files
|
|
||||||
---------------------------
|
|
||||||
|
|
||||||
The files generated by Pelican are static files, so you don't actually need
|
|
||||||
anything special to view them. You can use your browser to open the generated
|
|
||||||
HTML files directly::
|
|
||||||
|
|
||||||
firefox output/index.html
|
|
||||||
|
|
||||||
Because the above method may have trouble locating your CSS and other linked
|
|
||||||
assets, running Pelican's simple built-in web server will often provide a more
|
|
||||||
reliable previewing experience::
|
|
||||||
|
|
||||||
pelican --listen
|
|
||||||
|
|
||||||
Once the web server has been started, you can preview your site at:
|
|
||||||
http://localhost:8000/
|
|
||||||
|
|
||||||
Deployment
|
|
||||||
==========
|
|
||||||
|
|
||||||
After you have generated your site, previewed it in your local development
|
|
||||||
environment, and are ready to deploy it to production, you might first
|
|
||||||
re-generate your site with any production-specific settings (e.g., analytics
|
|
||||||
feeds, etc.) that you may have defined::
|
|
||||||
|
|
||||||
pelican content -s publishconf.py
|
|
||||||
|
|
||||||
To base your publish configuration on top of your ``pelicanconf.py``, you can
|
|
||||||
import your ``pelicanconf`` settings by including the following line in your
|
|
||||||
``publishconf.py``::
|
|
||||||
|
|
||||||
from pelicanconf import *
|
|
||||||
|
|
||||||
If you have generated a ``publishconf.py`` using ``pelican-quickstart``, this
|
|
||||||
line is included by default.
|
|
||||||
|
|
||||||
The steps for deploying your site will depend on where it will be hosted. If
|
|
||||||
you have SSH access to a server running Nginx or Apache, you might use the
|
|
||||||
``rsync`` tool to transmit your site files::
|
|
||||||
|
|
||||||
rsync -avc --delete output/ host.example.com:/var/www/your-site/
|
|
||||||
|
|
||||||
There are many other deployment options, some of which can be configured when
|
|
||||||
first setting up your site via the ``pelican-quickstart`` command. See the
|
|
||||||
:doc:`Tips<tips>` page for detail on publishing via GitHub Pages.
|
|
||||||
|
|
||||||
Automation
|
|
||||||
==========
|
|
||||||
|
|
||||||
While the ``pelican`` command is the canonical way to generate your site,
|
|
||||||
automation tools can be used to streamline the generation and publication flow.
|
|
||||||
One of the questions asked during the ``pelican-quickstart`` process pertains
|
|
||||||
to whether you want to automate site generation and publication. If you
|
|
||||||
answered "yes" to that question, a ``tasks.py`` and ``Makefile`` will be
|
|
||||||
generated in the root of your project. These files, pre-populated with certain
|
|
||||||
information gleaned from other answers provided during the
|
|
||||||
``pelican-quickstart`` process, are meant as a starting point and should be
|
|
||||||
customized to fit your particular needs and usage patterns. If you find one or
|
|
||||||
both of these automation tools to be of limited utility, these files can
|
|
||||||
deleted at any time and will not affect usage of the canonical ``pelican``
|
|
||||||
command.
|
|
||||||
|
|
||||||
Following are automation tools that "wrap" the ``pelican`` command and can
|
|
||||||
simplify the process of generating, previewing, and uploading your site.
|
|
||||||
|
|
||||||
Invoke
|
|
||||||
------
|
|
||||||
|
|
||||||
The advantage of Invoke_ is that it is written in Python and thus can be used
|
|
||||||
in a wide range of environments. The downside is that it must be installed
|
|
||||||
separately. Use the following command to install Invoke, prefixing with
|
|
||||||
``sudo`` if your environment requires it::
|
|
||||||
|
|
||||||
pip install invoke
|
|
||||||
|
|
||||||
Take a moment to open the ``tasks.py`` file that was generated in your project
|
|
||||||
root. You will see a number of commands, any one of which can be renamed,
|
|
||||||
removed, and/or customized to your liking. Using the out-of-the-box
|
|
||||||
configuration, you can generate your site via::
|
|
||||||
|
|
||||||
invoke build
|
|
||||||
|
|
||||||
If you'd prefer to have Pelican automatically regenerate your site every time a
|
|
||||||
change is detected (which is handy when testing locally), use the following
|
|
||||||
command instead::
|
|
||||||
|
|
||||||
invoke regenerate
|
|
||||||
|
|
||||||
To serve the generated site so it can be previewed in your browser at
|
|
||||||
http://localhost:8000/::
|
|
||||||
|
|
||||||
invoke serve
|
|
||||||
|
|
||||||
To serve the generated site with automatic browser reloading every time a
|
|
||||||
change is detected, first ``pip install livereload``, then use the
|
|
||||||
following command::
|
|
||||||
|
|
||||||
invoke livereload
|
|
||||||
|
|
||||||
If during the ``pelican-quickstart`` process you answered "yes" when asked
|
|
||||||
whether you want to upload your site via SSH, you can use the following command
|
|
||||||
to publish your site via rsync over SSH::
|
|
||||||
|
|
||||||
invoke publish
|
|
||||||
|
|
||||||
These are just a few of the commands available by default, so feel free to
|
|
||||||
explore ``tasks.py`` and see what other commands are available. More
|
|
||||||
importantly, don't hesitate to customize ``tasks.py`` to suit your specific
|
|
||||||
needs and preferences.
|
|
||||||
|
|
||||||
Make
|
|
||||||
----
|
|
||||||
|
|
||||||
A ``Makefile`` is also automatically created for you when you say "yes" to the
|
|
||||||
relevant question during the ``pelican-quickstart`` process. The advantage of
|
|
||||||
this method is that the ``make`` command is built into most POSIX systems and
|
|
||||||
thus doesn't require installing anything else in order to use it. The downside
|
|
||||||
is that non-POSIX systems (e.g., Windows) do not include ``make``, and
|
|
||||||
installing it on those systems can be a non-trivial task.
|
|
||||||
|
|
||||||
If you want to use ``make`` to generate your site using the settings in
|
|
||||||
``pelicanconf.py``, run::
|
|
||||||
|
|
||||||
make html
|
|
||||||
|
|
||||||
To generate the site for production, using the settings in ``publishconf.py``,
|
|
||||||
run::
|
|
||||||
|
|
||||||
make publish
|
|
||||||
|
|
||||||
If you'd prefer to have Pelican automatically regenerate your site every time a
|
|
||||||
change is detected (which is handy when testing locally), use the following
|
|
||||||
command instead::
|
|
||||||
|
|
||||||
make regenerate
|
|
||||||
|
|
||||||
To serve the generated site so it can be previewed in your browser at
|
|
||||||
http://localhost:8000/::
|
|
||||||
|
|
||||||
make serve
|
|
||||||
|
|
||||||
Normally you would need to run ``make regenerate`` and ``make serve`` in two
|
|
||||||
separate terminal sessions, but you can run both at once via::
|
|
||||||
|
|
||||||
make devserver
|
|
||||||
|
|
||||||
The above command will simultaneously run Pelican in regeneration mode as well
|
|
||||||
as serve the output at http://localhost:8000.
|
|
||||||
|
|
||||||
When you're ready to publish your site, you can upload it via the method(s) you
|
|
||||||
chose during the ``pelican-quickstart`` questionnaire. For this example, we'll
|
|
||||||
use rsync over ssh::
|
|
||||||
|
|
||||||
make rsync_upload
|
|
||||||
|
|
||||||
That's it! Your site should now be live.
|
|
||||||
|
|
||||||
(The default ``Makefile`` and ``devserver.sh`` scripts use the ``python`` and
|
|
||||||
``pelican`` executables to complete its tasks. If you want to use different
|
|
||||||
executables, such as ``python3``, you can set the ``PY`` and ``PELICAN``
|
|
||||||
environment variables, respectively, to override the default executable names.)
|
|
||||||
|
|
||||||
.. _Invoke: https://www.pyinvoke.org/
|
|
||||||
|
|
@ -1,82 +0,0 @@
|
||||||
Quickstart
|
|
||||||
##########
|
|
||||||
|
|
||||||
Reading through all the documentation is highly recommended, but for the truly
|
|
||||||
impatient, following are some quick steps to get started.
|
|
||||||
|
|
||||||
Installation
|
|
||||||
------------
|
|
||||||
|
|
||||||
Install Pelican (and optionally Markdown if you intend to use it) on Python
|
|
||||||
2.7.x or Python 3.5+ by running the following command in your preferred
|
|
||||||
terminal, prefixing with ``sudo`` if permissions warrant::
|
|
||||||
|
|
||||||
pip install pelican[Markdown]
|
|
||||||
|
|
||||||
Create a project
|
|
||||||
----------------
|
|
||||||
|
|
||||||
First, choose a name for your project, create an appropriately-named directory
|
|
||||||
for your site, and switch to that directory::
|
|
||||||
|
|
||||||
mkdir -p ~/projects/yoursite
|
|
||||||
cd ~/projects/yoursite
|
|
||||||
|
|
||||||
Create a skeleton project via the ``pelican-quickstart`` command, which begins
|
|
||||||
by asking some questions about your site::
|
|
||||||
|
|
||||||
pelican-quickstart
|
|
||||||
|
|
||||||
For questions that have default values denoted in brackets, feel free to use
|
|
||||||
the Return key to accept those default values [#tzlocal_fn]_. When asked for
|
|
||||||
your URL prefix, enter your domain name as indicated (e.g.,
|
|
||||||
``https://example.com``).
|
|
||||||
|
|
||||||
Create an article
|
|
||||||
-----------------
|
|
||||||
|
|
||||||
You cannot run Pelican until you have created some content. Use your preferred
|
|
||||||
text editor to create your first article with the following content::
|
|
||||||
|
|
||||||
Title: My First Review
|
|
||||||
Date: 2010-12-03 10:20
|
|
||||||
Category: Review
|
|
||||||
|
|
||||||
Following is a review of my favorite mechanical keyboard.
|
|
||||||
|
|
||||||
Given that this example article is in Markdown format, save it as
|
|
||||||
``~/projects/yoursite/content/keyboard-review.md``.
|
|
||||||
|
|
||||||
Generate your site
|
|
||||||
------------------
|
|
||||||
|
|
||||||
From your project root directory, run the ``pelican`` command to generate your site::
|
|
||||||
|
|
||||||
pelican content
|
|
||||||
|
|
||||||
Your site has now been generated inside the ``output/`` directory. (You may see
|
|
||||||
a warning related to feeds, but that is normal when developing locally and can
|
|
||||||
be ignored for now.)
|
|
||||||
|
|
||||||
Preview your site
|
|
||||||
-----------------
|
|
||||||
|
|
||||||
Open a new terminal session, navigate to your project root directory, and
|
|
||||||
run the following command to launch Pelican's web server::
|
|
||||||
|
|
||||||
pelican --listen
|
|
||||||
|
|
||||||
Preview your site by navigating to http://localhost:8000/ in your browser.
|
|
||||||
|
|
||||||
Continue reading the other documentation sections for more detail, and check
|
|
||||||
out the Pelican wiki's Tutorials_ page for links to community-published
|
|
||||||
tutorials.
|
|
||||||
|
|
||||||
.. _Tutorials: https://github.com/getpelican/pelican/wiki/Tutorials
|
|
||||||
|
|
||||||
Footnotes
|
|
||||||
---------
|
|
||||||
|
|
||||||
.. [#tzlocal_fn] You can help localize default fields by installing the
|
|
||||||
optional `tzlocal <https://pypi.org/project/tzlocal/>`_
|
|
||||||
module.
|
|
||||||
114
docs/report.rst
114
docs/report.rst
|
|
@ -1,114 +0,0 @@
|
||||||
Some history about Pelican
|
|
||||||
##########################
|
|
||||||
|
|
||||||
.. warning::
|
|
||||||
|
|
||||||
This page comes from a report the original author (Alexis Métaireau) wrote
|
|
||||||
right after writing Pelican, in December 2010. The information may not be
|
|
||||||
up-to-date.
|
|
||||||
|
|
||||||
Pelican is a simple static blog generator. It parses markup files (Markdown or
|
|
||||||
reStructuredText for now) and generates an HTML folder with all the files in
|
|
||||||
it. I've chosen to use Python to implement Pelican because it seemed to be
|
|
||||||
simple and to fit to my needs. I did not wanted to define a class for each
|
|
||||||
thing, but still wanted to keep my things loosely coupled. It turns out that it
|
|
||||||
was exactly what I wanted. From time to time, thanks to the feedback of some
|
|
||||||
users, it took me a very few time to provide fixes on it. So far, I've
|
|
||||||
re-factored the Pelican code by two
|
|
||||||
times; each time took less than 30 minutes.
|
|
||||||
|
|
||||||
Use case
|
|
||||||
========
|
|
||||||
|
|
||||||
I was previously using WordPress, a solution you can host on a web server to
|
|
||||||
manage your blog. Most of the time, I prefer using markup languages such as
|
|
||||||
Markdown or reStructuredText to type my articles. To do so, I use vim. I think
|
|
||||||
it is important to let the people choose the tool they want to write the
|
|
||||||
articles. In my opinion, a blog manager should just allow you to take any kind
|
|
||||||
of input and transform it to a weblog. That's what Pelican does. You can write
|
|
||||||
your articles using the tool you want, and the markup language you want, and
|
|
||||||
then generate a static HTML weblog.
|
|
||||||
|
|
||||||
.. image:: _static/overall.png
|
|
||||||
|
|
||||||
To be flexible enough, Pelican has template support, so you can easily write
|
|
||||||
your own themes if you want to.
|
|
||||||
|
|
||||||
Design process
|
|
||||||
==============
|
|
||||||
|
|
||||||
Pelican came from a need I have. I started by creating a single file
|
|
||||||
application, and I have make it grow to support what it does by now. To start,
|
|
||||||
I wrote a piece of documentation about what I wanted to do. Then, I created the
|
|
||||||
content I wanted to parse (the reStructuredText files) and started
|
|
||||||
experimenting with the code. Pelican was 200 lines long and contained almost
|
|
||||||
ten functions and one class when it was first usable.
|
|
||||||
|
|
||||||
I have been facing different problems all over the time and wanted to add
|
|
||||||
features to Pelican while using it. The first change I have done was to add the
|
|
||||||
support of a settings file. It is possible to pass the options to the command
|
|
||||||
line, but can be tedious if there is a lot of them. In the same way, I have
|
|
||||||
added the support of different things over time: Atom feeds, multiple themes,
|
|
||||||
multiple markup support, etc. At some point, it appears that the "only one
|
|
||||||
file" mantra was not good enough for Pelican, so I decided to rework a bit all
|
|
||||||
that, and split this in multiple different files.
|
|
||||||
|
|
||||||
I’ve separated the logic in different classes and concepts:
|
|
||||||
|
|
||||||
* *writers* are responsible of all the writing process of the files.
|
|
||||||
They are responsible of writing .html files, RSS feeds and so on. Since those
|
|
||||||
operations are commonly used, the object is created once, and then passed to
|
|
||||||
the generators.
|
|
||||||
|
|
||||||
* *readers* are used to read from various formats (Markdown and
|
|
||||||
reStructuredText for now, but the system is extensible). Given a file, they
|
|
||||||
return metadata (author, tags, category, etc) and content (HTML formatted).
|
|
||||||
|
|
||||||
* *generators* generate the different outputs. For instance, Pelican
|
|
||||||
comes with an ArticlesGenerator and PagesGenerator, into others. Given a
|
|
||||||
configuration, they can do whatever you want them to do. Most of the time
|
|
||||||
it's generating files from inputs (user inputs and files).
|
|
||||||
|
|
||||||
I also deal with contents objects. They can be ``Articles``, ``Pages``,
|
|
||||||
``Quotes``, or whatever you want. They are defined in the ``contents.py``
|
|
||||||
module and represent some content to be used by the program.
|
|
||||||
|
|
||||||
In more detail
|
|
||||||
==============
|
|
||||||
|
|
||||||
Here is an overview of the classes involved in Pelican.
|
|
||||||
|
|
||||||
.. image:: _static/uml.jpg
|
|
||||||
|
|
||||||
The interface does not really exist, and I have added it only to clarify the
|
|
||||||
whole picture. I do use duck typing and not interfaces.
|
|
||||||
|
|
||||||
Internally, the following process is followed:
|
|
||||||
|
|
||||||
* First of all, the command line is parsed, and some content from the user is
|
|
||||||
used to initialize the different generator objects.
|
|
||||||
|
|
||||||
* A ``context`` is created. It contains the settings from the command line and
|
|
||||||
a settings file if provided.
|
|
||||||
* The ``generate_context`` method of each generator is called, updating
|
|
||||||
the context.
|
|
||||||
* The writer is created and given to the ``generate_output`` method of each
|
|
||||||
generator.
|
|
||||||
|
|
||||||
I make two calls because it is important that when the output is generated by
|
|
||||||
the generators, the context will not change. In other words, the first method
|
|
||||||
``generate_context`` should modify the context, whereas the second
|
|
||||||
``generate_output`` method should not.
|
|
||||||
|
|
||||||
Then, it is up to the generators to do what the want, in the
|
|
||||||
``generate_context`` and ``generate_content`` method. Taking the
|
|
||||||
``ArticlesGenerator`` class will help to understand some others concepts. Here
|
|
||||||
is what happens when calling the ``generate_context`` method:
|
|
||||||
|
|
||||||
* Read the folder “path”, looking for restructured text files, load each of
|
|
||||||
them, and construct a content object (``Article``) with it. To do so, use
|
|
||||||
``Reader`` objects.
|
|
||||||
* Update the ``context`` with all those articles.
|
|
||||||
|
|
||||||
Then, the ``generate_content`` method uses the ``context`` and the ``writer``
|
|
||||||
to generate the wanted output.
|
|
||||||
1395
docs/settings.rst
1395
docs/settings.rst
File diff suppressed because it is too large
Load diff
570
docs/themes.rst
570
docs/themes.rst
|
|
@ -1,570 +0,0 @@
|
||||||
.. _theming-pelican:
|
|
||||||
|
|
||||||
Creating themes
|
|
||||||
###############
|
|
||||||
|
|
||||||
To generate its HTML output, Pelican uses the `Jinja
|
|
||||||
<https://palletsprojects.com/p/jinja/>`_ templating engine due to its flexibility and
|
|
||||||
straightforward syntax. If you want to create your own theme, feel free to take
|
|
||||||
inspiration from the `"simple" theme
|
|
||||||
<https://github.com/getpelican/pelican/tree/master/pelican/themes/simple/templates>`_.
|
|
||||||
|
|
||||||
To generate your site using a theme you have created (or downloaded manually
|
|
||||||
and then modified), you can specify that theme via the ``-t`` flag::
|
|
||||||
|
|
||||||
pelican content -s pelicanconf.py -t /projects/your-site/themes/your-theme
|
|
||||||
|
|
||||||
If you'd rather not specify the theme on every invocation, you can define
|
|
||||||
``THEME`` in your settings to point to the location of your preferred theme.
|
|
||||||
|
|
||||||
|
|
||||||
Structure
|
|
||||||
=========
|
|
||||||
|
|
||||||
To make your own theme, you must follow the following structure::
|
|
||||||
|
|
||||||
├── static
|
|
||||||
│ ├── css
|
|
||||||
│ └── images
|
|
||||||
└── templates
|
|
||||||
├── archives.html // to display archives
|
|
||||||
├── period_archives.html // to display time-period archives
|
|
||||||
├── article.html // processed for each article
|
|
||||||
├── author.html // processed for each author
|
|
||||||
├── authors.html // must list all the authors
|
|
||||||
├── categories.html // must list all the categories
|
|
||||||
├── category.html // processed for each category
|
|
||||||
├── index.html // the index (list all the articles)
|
|
||||||
├── page.html // processed for each page
|
|
||||||
├── tag.html // processed for each tag
|
|
||||||
└── tags.html // must list all the tags. Can be a tag cloud.
|
|
||||||
|
|
||||||
* `static` contains all the static assets, which will be copied to the output
|
|
||||||
`theme` folder. The above filesystem layout includes CSS and image folders,
|
|
||||||
but those are just examples. Put what you need here.
|
|
||||||
|
|
||||||
* `templates` contains all the templates that will be used to generate the
|
|
||||||
content. The template files listed above are mandatory; you can add your own
|
|
||||||
templates if it helps you keep things organized while creating your theme.
|
|
||||||
|
|
||||||
|
|
||||||
.. _templates-variables:
|
|
||||||
|
|
||||||
Templates and variables
|
|
||||||
=======================
|
|
||||||
|
|
||||||
The idea is to use a simple syntax that you can embed into your HTML pages.
|
|
||||||
This document describes which templates should exist in a theme, and which
|
|
||||||
variables will be passed to each template at generation time.
|
|
||||||
|
|
||||||
All templates will receive the variables defined in your settings file, as long
|
|
||||||
as they are in all-caps. You can access them directly.
|
|
||||||
|
|
||||||
|
|
||||||
Common variables
|
|
||||||
----------------
|
|
||||||
|
|
||||||
All of these settings will be available to all templates.
|
|
||||||
|
|
||||||
============= ===================================================
|
|
||||||
Variable Description
|
|
||||||
============= ===================================================
|
|
||||||
output_file The name of the file currently being generated. For
|
|
||||||
instance, when Pelican is rendering the home page,
|
|
||||||
output_file will be "index.html".
|
|
||||||
articles The list of articles, ordered descending by date.
|
|
||||||
All the elements are `Article` objects, so you can
|
|
||||||
access their attributes (e.g. title, summary, author
|
|
||||||
etc.). Sometimes this is shadowed (for instance, in
|
|
||||||
the tags page). You will then find info about it
|
|
||||||
in the `all_articles` variable.
|
|
||||||
dates The same list of articles, but ordered by date,
|
|
||||||
ascending.
|
|
||||||
drafts The list of draft articles
|
|
||||||
authors A list of (author, articles) tuples, containing all
|
|
||||||
the authors and corresponding articles (values)
|
|
||||||
categories A list of (category, articles) tuples, containing
|
|
||||||
all the categories and corresponding articles (values)
|
|
||||||
tags A list of (tag, articles) tuples, containing all
|
|
||||||
the tags and corresponding articles (values)
|
|
||||||
pages The list of pages
|
|
||||||
hidden_pages The list of hidden pages
|
|
||||||
draft_pages The list of draft pages
|
|
||||||
============= ===================================================
|
|
||||||
|
|
||||||
|
|
||||||
Sorting
|
|
||||||
-------
|
|
||||||
|
|
||||||
URL wrappers (currently categories, tags, and authors), have comparison methods
|
|
||||||
that allow them to be easily sorted by name::
|
|
||||||
|
|
||||||
{% for tag, articles in tags|sort %}
|
|
||||||
|
|
||||||
If you want to sort based on different criteria, `Jinja's sort command`__ has a
|
|
||||||
number of options.
|
|
||||||
|
|
||||||
__ https://jinja.palletsprojects.com/en/master/templates/#sort
|
|
||||||
|
|
||||||
|
|
||||||
Date Formatting
|
|
||||||
---------------
|
|
||||||
|
|
||||||
Pelican formats the date according to your settings and locale
|
|
||||||
(``DATE_FORMATS``/``DEFAULT_DATE_FORMAT``) and provides a ``locale_date``
|
|
||||||
attribute. On the other hand, the ``date`` attribute will be a `datetime`_
|
|
||||||
object. If you need custom formatting for a date different than your settings,
|
|
||||||
use the Jinja filter ``strftime`` that comes with Pelican. Usage is same as
|
|
||||||
Python `strftime`_ format, but the filter will do the right thing and format
|
|
||||||
your date according to the locale given in your settings::
|
|
||||||
|
|
||||||
{{ article.date|strftime('%d %B %Y') }}
|
|
||||||
|
|
||||||
.. _datetime: https://docs.python.org/3/library/datetime.html#datetime-objects
|
|
||||||
.. _strftime: https://docs.python.org/3/library/datetime.html#strftime-strptime-behavior
|
|
||||||
|
|
||||||
|
|
||||||
index.html
|
|
||||||
----------
|
|
||||||
|
|
||||||
This is the home page or index of your blog, generated at ``index.html``.
|
|
||||||
|
|
||||||
If pagination is active, subsequent pages will reside in
|
|
||||||
``index{number}.html``.
|
|
||||||
|
|
||||||
====================== ===================================================
|
|
||||||
Variable Description
|
|
||||||
====================== ===================================================
|
|
||||||
articles_paginator A paginator object for the list of articles
|
|
||||||
articles_page The current page of articles
|
|
||||||
articles_previous_page The previous page of articles (``None`` if page does
|
|
||||||
not exist)
|
|
||||||
articles_next_page The next page of articles (``None`` if page does
|
|
||||||
not exist)
|
|
||||||
dates_paginator A paginator object for the article list, ordered by
|
|
||||||
date, ascending.
|
|
||||||
dates_page The current page of articles, ordered by date,
|
|
||||||
ascending.
|
|
||||||
dates_previous_page The previous page of articles, ordered by date,
|
|
||||||
ascending (``None`` if page does not exist)
|
|
||||||
dates_next_page The next page of articles, ordered by date,
|
|
||||||
ascending (``None`` if page does not exist)
|
|
||||||
page_name 'index' -- useful for pagination links
|
|
||||||
====================== ===================================================
|
|
||||||
|
|
||||||
|
|
||||||
author.html
|
|
||||||
-------------
|
|
||||||
|
|
||||||
This template will be processed for each of the existing authors, with output
|
|
||||||
generated according to the ``AUTHOR_SAVE_AS`` setting (`Default:`
|
|
||||||
``author/{slug}.html``). If pagination is active, subsequent pages will by
|
|
||||||
default reside at ``author/{slug}{number}.html``.
|
|
||||||
|
|
||||||
====================== ===================================================
|
|
||||||
Variable Description
|
|
||||||
====================== ===================================================
|
|
||||||
author The name of the author being processed
|
|
||||||
articles Articles by this author
|
|
||||||
dates Articles by this author, but ordered by date,
|
|
||||||
ascending
|
|
||||||
articles_paginator A paginator object for the list of articles
|
|
||||||
articles_page The current page of articles
|
|
||||||
articles_previous_page The previous page of articles (``None`` if page does
|
|
||||||
not exist)
|
|
||||||
articles_next_page The next page of articles (``None`` if page does
|
|
||||||
not exist)
|
|
||||||
dates_paginator A paginator object for the article list, ordered by
|
|
||||||
date, ascending.
|
|
||||||
dates_page The current page of articles, ordered by date,
|
|
||||||
ascending.
|
|
||||||
dates_previous_page The previous page of articles, ordered by date,
|
|
||||||
ascending (``None`` if page does not exist)
|
|
||||||
dates_next_page The next page of articles, ordered by date,
|
|
||||||
ascending (``None`` if page does not exist)
|
|
||||||
page_name AUTHOR_URL where everything after `{slug}` is
|
|
||||||
removed -- useful for pagination links
|
|
||||||
====================== ===================================================
|
|
||||||
|
|
||||||
|
|
||||||
category.html
|
|
||||||
-------------
|
|
||||||
|
|
||||||
This template will be processed for each of the existing categories, with
|
|
||||||
output generated according to the ``CATEGORY_SAVE_AS`` setting (`Default:`
|
|
||||||
``category/{slug}.html``). If pagination is active, subsequent pages will by
|
|
||||||
default reside at ``category/{slug}{number}.html``.
|
|
||||||
|
|
||||||
====================== ===================================================
|
|
||||||
Variable Description
|
|
||||||
====================== ===================================================
|
|
||||||
category The name of the category being processed
|
|
||||||
articles Articles for this category
|
|
||||||
dates Articles for this category, but ordered by date,
|
|
||||||
ascending
|
|
||||||
articles_paginator A paginator object for the list of articles
|
|
||||||
articles_page The current page of articles
|
|
||||||
articles_previous_page The previous page of articles (``None`` if page does
|
|
||||||
not exist)
|
|
||||||
articles_next_page The next page of articles (``None`` if page does
|
|
||||||
not exist)
|
|
||||||
dates_paginator A paginator object for the list of articles,
|
|
||||||
ordered by date, ascending
|
|
||||||
dates_page The current page of articles, ordered by date,
|
|
||||||
ascending
|
|
||||||
dates_previous_page The previous page of articles, ordered by date,
|
|
||||||
ascending (``None`` if page does not exist)
|
|
||||||
dates_next_page The next page of articles, ordered by date,
|
|
||||||
ascending (``None`` if page does not exist)
|
|
||||||
page_name CATEGORY_URL where everything after `{slug}` is
|
|
||||||
removed -- useful for pagination links
|
|
||||||
====================== ===================================================
|
|
||||||
|
|
||||||
|
|
||||||
article.html
|
|
||||||
-------------
|
|
||||||
|
|
||||||
This template will be processed for each article, with output generated
|
|
||||||
according to the ``ARTICLE_SAVE_AS`` setting (`Default:` ``{slug}.html``). The
|
|
||||||
following variables are available when rendering.
|
|
||||||
|
|
||||||
============= ===================================================
|
|
||||||
Variable Description
|
|
||||||
============= ===================================================
|
|
||||||
article The article object to be displayed
|
|
||||||
category The name of the category for the current article
|
|
||||||
============= ===================================================
|
|
||||||
|
|
||||||
Any metadata that you put in the header of the article source file will be
|
|
||||||
available as fields on the ``article`` object. The field name will be the same
|
|
||||||
as the name of the metadata field, except in all-lowercase characters.
|
|
||||||
|
|
||||||
For example, you could add a field called `FacebookImage` to your article
|
|
||||||
metadata, as shown below:
|
|
||||||
|
|
||||||
.. code-block:: md
|
|
||||||
|
|
||||||
Title: I love Python more than music
|
|
||||||
Date: 2013-11-06 10:06
|
|
||||||
Tags: personal, python
|
|
||||||
Category: Tech
|
|
||||||
Slug: python-je-l-aime-a-mourir
|
|
||||||
Author: Francis Cabrel
|
|
||||||
FacebookImage: http://franciscabrel.com/images/pythonlove.png
|
|
||||||
|
|
||||||
This new metadata will be made available as `article.facebookimage` in your
|
|
||||||
`article.html` template. This would allow you, for example, to specify an image
|
|
||||||
for the Facebook open graph tags that will change for each article:
|
|
||||||
|
|
||||||
.. code-block:: html+jinja
|
|
||||||
|
|
||||||
<meta property="og:image" content="{{ article.facebookimage }}"/>
|
|
||||||
|
|
||||||
|
|
||||||
page.html
|
|
||||||
---------
|
|
||||||
|
|
||||||
This template will be processed for each page, with output generated according
|
|
||||||
to the ``PAGE_SAVE_AS`` setting (`Default:` ``pages/{slug}.html``). The
|
|
||||||
following variables are available when rendering.
|
|
||||||
|
|
||||||
============= ===================================================
|
|
||||||
Variable Description
|
|
||||||
============= ===================================================
|
|
||||||
page The page object to be displayed. You can access its
|
|
||||||
title, slug, and content.
|
|
||||||
============= ===================================================
|
|
||||||
|
|
||||||
|
|
||||||
tag.html
|
|
||||||
--------
|
|
||||||
|
|
||||||
This template will be processed for each tag, with output generated according
|
|
||||||
to the ``TAG_SAVE_AS`` setting (`Default:` ``tag/{slug}.html``). If pagination
|
|
||||||
is active, subsequent pages will by default reside at
|
|
||||||
``tag/{slug}{number}.html``.
|
|
||||||
|
|
||||||
====================== ===================================================
|
|
||||||
Variable Description
|
|
||||||
====================== ===================================================
|
|
||||||
tag The name of the tag being processed
|
|
||||||
articles Articles related to this tag
|
|
||||||
dates Articles related to this tag, but ordered by date,
|
|
||||||
ascending
|
|
||||||
articles_paginator A paginator object for the list of articles
|
|
||||||
articles_page The current page of articles
|
|
||||||
articles_previous_page The previous page of articles (``None`` if page does
|
|
||||||
not exist)
|
|
||||||
articles_next_page The next page of articles (``None`` if page does
|
|
||||||
not exist)
|
|
||||||
dates_paginator A paginator object for the list of articles,
|
|
||||||
ordered by date, ascending
|
|
||||||
dates_page The current page of articles, ordered by date,
|
|
||||||
ascending
|
|
||||||
dates_previous_page The previous page of articles, ordered by date,
|
|
||||||
ascending (``None`` if page does not exist)
|
|
||||||
dates_next_page The next page of articles, ordered by date,
|
|
||||||
ascending (``None`` if page does not exist)
|
|
||||||
page_name TAG_URL where everything after `{slug}` is removed
|
|
||||||
-- useful for pagination links
|
|
||||||
====================== ===================================================
|
|
||||||
|
|
||||||
|
|
||||||
period_archives.html
|
|
||||||
--------------------
|
|
||||||
|
|
||||||
This template will be processed for each year of your posts if a path for
|
|
||||||
``YEAR_ARCHIVE_SAVE_AS`` is defined, each month if ``MONTH_ARCHIVE_SAVE_AS`` is
|
|
||||||
defined, and each day if ``DAY_ARCHIVE_SAVE_AS`` is defined.
|
|
||||||
|
|
||||||
=================== ===================================================
|
|
||||||
Variable Description
|
|
||||||
=================== ===================================================
|
|
||||||
period A tuple of the form (`year`, `month`, `day`) that
|
|
||||||
indicates the current time period. `year` and `day`
|
|
||||||
are numbers while `month` is a string. This tuple
|
|
||||||
only contains `year` if the time period is a
|
|
||||||
given year. It contains both `year` and `month`
|
|
||||||
if the time period is over years and months and
|
|
||||||
so on.
|
|
||||||
|
|
||||||
=================== ===================================================
|
|
||||||
|
|
||||||
You can see an example of how to use `period` in the `"simple" theme
|
|
||||||
period_archives.html template
|
|
||||||
<https://github.com/getpelican/pelican/blob/master/pelican/themes/simple/templates/period_archives.html>`_.
|
|
||||||
|
|
||||||
|
|
||||||
Objects
|
|
||||||
=======
|
|
||||||
|
|
||||||
Detail objects attributes that are available and useful in templates. Not all
|
|
||||||
attributes are listed here, this is a selection of attributes considered useful
|
|
||||||
in a template.
|
|
||||||
|
|
||||||
.. _object-article:
|
|
||||||
|
|
||||||
Article
|
|
||||||
-------
|
|
||||||
|
|
||||||
The string representation of an Article is the `source_path` attribute.
|
|
||||||
|
|
||||||
====================== ===================================================
|
|
||||||
Attribute Description
|
|
||||||
====================== ===================================================
|
|
||||||
author The :ref:`Author <object-author_cat_tag>` of
|
|
||||||
this article.
|
|
||||||
authors A list of :ref:`Authors <object-author_cat_tag>`
|
|
||||||
of this article.
|
|
||||||
category The :ref:`Category <object-author_cat_tag>`
|
|
||||||
of this article.
|
|
||||||
content The rendered content of the article.
|
|
||||||
date Datetime object representing the article date.
|
|
||||||
date_format Either default date format or locale date format.
|
|
||||||
default_template Default template name.
|
|
||||||
in_default_lang Boolean representing if the article is written
|
|
||||||
in the default language.
|
|
||||||
lang Language of the article.
|
|
||||||
locale_date Date formatted by the `date_format`.
|
|
||||||
metadata Article header metadata `dict`.
|
|
||||||
save_as Location to save the article page.
|
|
||||||
slug Page slug.
|
|
||||||
source_path Full system path of the article source file.
|
|
||||||
relative_source_path Relative path from PATH_ to the article source file.
|
|
||||||
status The article status, can be any of 'published' or
|
|
||||||
'draft'.
|
|
||||||
summary Rendered summary content.
|
|
||||||
tags List of :ref:`Tag <object-author_cat_tag>`
|
|
||||||
objects.
|
|
||||||
template Template name to use for rendering.
|
|
||||||
title Title of the article.
|
|
||||||
translations List of translations
|
|
||||||
:ref:`Article <object-article>` objects.
|
|
||||||
url URL to the article page.
|
|
||||||
====================== ===================================================
|
|
||||||
|
|
||||||
.. _PATH: settings.html#PATH
|
|
||||||
|
|
||||||
|
|
||||||
.. _object-author_cat_tag:
|
|
||||||
|
|
||||||
Author / Category / Tag
|
|
||||||
-----------------------
|
|
||||||
|
|
||||||
The string representation of those objects is the `name` attribute.
|
|
||||||
|
|
||||||
=================== ===================================================
|
|
||||||
Attribute Description
|
|
||||||
=================== ===================================================
|
|
||||||
name Name of this object [1]_.
|
|
||||||
page_name Author page name.
|
|
||||||
save_as Location to save the author page.
|
|
||||||
slug Page slug.
|
|
||||||
url URL to the author page.
|
|
||||||
=================== ===================================================
|
|
||||||
|
|
||||||
.. [1] for Author object, coming from `:authors:` or `AUTHOR`.
|
|
||||||
|
|
||||||
.. _object-page:
|
|
||||||
|
|
||||||
Page
|
|
||||||
----
|
|
||||||
|
|
||||||
The string representation of a Page is the `source_path` attribute.
|
|
||||||
|
|
||||||
===================== ===================================================
|
|
||||||
Attribute Description
|
|
||||||
===================== ===================================================
|
|
||||||
author The :ref:`Author <object-author_cat_tag>` of
|
|
||||||
this page.
|
|
||||||
content The rendered content of the page.
|
|
||||||
date Datetime object representing the page date.
|
|
||||||
date_format Either default date format or locale date format.
|
|
||||||
default_template Default template name.
|
|
||||||
in_default_lang Boolean representing if the article is written
|
|
||||||
in the default language.
|
|
||||||
lang Language of the article.
|
|
||||||
locale_date Date formatted by the `date_format`.
|
|
||||||
metadata Page header metadata `dict`.
|
|
||||||
save_as Location to save the page.
|
|
||||||
slug Page slug.
|
|
||||||
source_path Full system path of the page source file.
|
|
||||||
relative_source_path Relative path from PATH_ to the page source file.
|
|
||||||
status The page status, can be any of 'published', 'hidden' or
|
|
||||||
'draft'.
|
|
||||||
summary Rendered summary content.
|
|
||||||
tags List of :ref:`Tag <object-author_cat_tag>`
|
|
||||||
objects.
|
|
||||||
template Template name to use for rendering.
|
|
||||||
title Title of the page.
|
|
||||||
translations List of translations
|
|
||||||
:ref:`Article <object-article>` objects.
|
|
||||||
url URL to the page.
|
|
||||||
===================== ===================================================
|
|
||||||
|
|
||||||
.. _PATH: settings.html#PATH
|
|
||||||
|
|
||||||
|
|
||||||
Feeds
|
|
||||||
=====
|
|
||||||
|
|
||||||
The feed variables changed in 3.0. Each variable now explicitly lists ATOM or
|
|
||||||
RSS in the name. ATOM is still the default. Old themes will need to be updated.
|
|
||||||
Here is a complete list of the feed variables::
|
|
||||||
|
|
||||||
FEED_ATOM
|
|
||||||
FEED_RSS
|
|
||||||
FEED_ALL_ATOM
|
|
||||||
FEED_ALL_RSS
|
|
||||||
CATEGORY_FEED_ATOM
|
|
||||||
CATEGORY_FEED_RSS
|
|
||||||
AUTHOR_FEED_ATOM
|
|
||||||
AUTHOR_FEED_RSS
|
|
||||||
TAG_FEED_ATOM
|
|
||||||
TAG_FEED_RSS
|
|
||||||
TRANSLATION_FEED_ATOM
|
|
||||||
TRANSLATION_FEED_RSS
|
|
||||||
|
|
||||||
|
|
||||||
Inheritance
|
|
||||||
===========
|
|
||||||
|
|
||||||
Since version 3.0, Pelican supports inheritance from the ``simple`` theme, so
|
|
||||||
you can re-use the ``simple`` theme templates in your own themes.
|
|
||||||
|
|
||||||
If one of the mandatory files in the ``templates/`` directory of your theme is
|
|
||||||
missing, it will be replaced by the matching template from the ``simple``
|
|
||||||
theme. So if the HTML structure of a template in the ``simple`` theme is right
|
|
||||||
for you, you don't have to write a new template from scratch.
|
|
||||||
|
|
||||||
You can also extend templates from the ``simple`` theme in your own themes by
|
|
||||||
using the ``{% extends %}`` directive as in the following example:
|
|
||||||
|
|
||||||
.. code-block:: html+jinja
|
|
||||||
|
|
||||||
{% extends "!simple/index.html" %} <!-- extends the ``index.html`` template from the ``simple`` theme -->
|
|
||||||
|
|
||||||
{% extends "index.html" %} <!-- "regular" extending -->
|
|
||||||
|
|
||||||
|
|
||||||
Example
|
|
||||||
-------
|
|
||||||
|
|
||||||
With this system, it is possible to create a theme with just two files.
|
|
||||||
|
|
||||||
base.html
|
|
||||||
"""""""""
|
|
||||||
|
|
||||||
The first file is the ``templates/base.html`` template:
|
|
||||||
|
|
||||||
.. code-block:: html+jinja
|
|
||||||
|
|
||||||
{% extends "!simple/base.html" %}
|
|
||||||
|
|
||||||
{% block head %}
|
|
||||||
{{ super() }}
|
|
||||||
<link rel="stylesheet" type="text/css" href="{{ SITEURL }}/theme/css/style.css" />
|
|
||||||
{% endblock %}
|
|
||||||
|
|
||||||
1. On the first line, we extend the ``base.html`` template from the ``simple``
|
|
||||||
theme, so we don't have to rewrite the entire file.
|
|
||||||
2. On the third line, we open the ``head`` block which has already been defined
|
|
||||||
in the ``simple`` theme.
|
|
||||||
3. On the fourth line, the function ``super()`` keeps the content previously
|
|
||||||
inserted in the ``head`` block.
|
|
||||||
4. On the fifth line, we append a stylesheet to the page.
|
|
||||||
5. On the last line, we close the ``head`` block.
|
|
||||||
|
|
||||||
This file will be extended by all the other templates, so the stylesheet will
|
|
||||||
be linked from all pages.
|
|
||||||
|
|
||||||
style.css
|
|
||||||
"""""""""
|
|
||||||
|
|
||||||
The second file is the ``static/css/style.css`` CSS stylesheet:
|
|
||||||
|
|
||||||
.. code-block:: css
|
|
||||||
|
|
||||||
body {
|
|
||||||
font-family : monospace ;
|
|
||||||
font-size : 100% ;
|
|
||||||
background-color : white ;
|
|
||||||
color : #111 ;
|
|
||||||
width : 80% ;
|
|
||||||
min-width : 400px ;
|
|
||||||
min-height : 200px ;
|
|
||||||
padding : 1em ;
|
|
||||||
margin : 5% 10% ;
|
|
||||||
border : thin solid gray ;
|
|
||||||
border-radius : 5px ;
|
|
||||||
display : block ;
|
|
||||||
}
|
|
||||||
|
|
||||||
a:link { color : blue ; text-decoration : none ; }
|
|
||||||
a:hover { color : blue ; text-decoration : underline ; }
|
|
||||||
a:visited { color : blue ; }
|
|
||||||
|
|
||||||
h1 a { color : inherit !important }
|
|
||||||
h2 a { color : inherit !important }
|
|
||||||
h3 a { color : inherit !important }
|
|
||||||
h4 a { color : inherit !important }
|
|
||||||
h5 a { color : inherit !important }
|
|
||||||
h6 a { color : inherit !important }
|
|
||||||
|
|
||||||
pre {
|
|
||||||
margin : 2em 1em 2em 4em ;
|
|
||||||
}
|
|
||||||
|
|
||||||
#menu li {
|
|
||||||
display : inline ;
|
|
||||||
}
|
|
||||||
|
|
||||||
#post-list {
|
|
||||||
margin-bottom : 1em ;
|
|
||||||
margin-top : 1em ;
|
|
||||||
}
|
|
||||||
|
|
||||||
Download
|
|
||||||
""""""""
|
|
||||||
|
|
||||||
You can download this example theme :download:`here <_static/theme-basic.zip>`.
|
|
||||||
182
docs/tips.rst
182
docs/tips.rst
|
|
@ -1,182 +0,0 @@
|
||||||
Tips
|
|
||||||
####
|
|
||||||
|
|
||||||
Here are some tips about Pelican that you might find useful.
|
|
||||||
|
|
||||||
Custom 404 Pages
|
|
||||||
================
|
|
||||||
|
|
||||||
When a browser requests a resource that the web server cannot find, the web
|
|
||||||
server usually displays a generic "File not found" (404) error page that can be
|
|
||||||
stark and unsightly. One way to provide an error page that matches the theme of
|
|
||||||
your site is to create a custom 404 page (*not* an article), such as this
|
|
||||||
Markdown-formatted example stored in ``content/pages/404.md``::
|
|
||||||
|
|
||||||
Title: Not Found
|
|
||||||
Status: hidden
|
|
||||||
Save_as: 404.html
|
|
||||||
|
|
||||||
The requested item could not be located. Perhaps you might want to check
|
|
||||||
the [Archives](/archives.html)?
|
|
||||||
|
|
||||||
The next step is to configure your web server to display this custom page
|
|
||||||
instead of its default 404 page. For Nginx, add the following to your
|
|
||||||
configuration file's ``location`` block::
|
|
||||||
|
|
||||||
error_page 404 /404.html;
|
|
||||||
|
|
||||||
For Apache::
|
|
||||||
|
|
||||||
ErrorDocument 404 /404.html
|
|
||||||
|
|
||||||
For Amazon S3, first navigate to the ``Static Site Hosting`` menu in the bucket
|
|
||||||
settings on your AWS cosole. From there::
|
|
||||||
|
|
||||||
Error Document: 404.html
|
|
||||||
|
|
||||||
Publishing to GitHub
|
|
||||||
====================
|
|
||||||
|
|
||||||
`GitHub Pages <https://help.github.com/categories/20/articles>`_ offer an easy
|
|
||||||
and convenient way to publish Pelican sites. There are `two types of GitHub
|
|
||||||
Pages <https://help.github.com/articles/user-organization-and-project-pages>`_:
|
|
||||||
*Project Pages* and *User Pages*. Pelican sites can be published as both
|
|
||||||
Project Pages and User Pages.
|
|
||||||
|
|
||||||
Project Pages
|
|
||||||
-------------
|
|
||||||
|
|
||||||
To publish a Pelican site as a Project Page you need to *push* the content of
|
|
||||||
the ``output`` dir generated by Pelican to a repository's ``gh-pages`` branch
|
|
||||||
on GitHub.
|
|
||||||
|
|
||||||
The excellent `ghp-import <https://github.com/davisp/ghp-import>`_, which can
|
|
||||||
be installed with ``pip``, makes this process really easy.
|
|
||||||
|
|
||||||
For example, if the source of your Pelican site is contained in a GitHub
|
|
||||||
repository, and if you want to publish that Pelican site in the form of Project
|
|
||||||
Pages to this repository, you can then use the following::
|
|
||||||
|
|
||||||
$ pelican content -o output -s pelicanconf.py
|
|
||||||
$ ghp-import output -b gh-pages
|
|
||||||
$ git push origin gh-pages
|
|
||||||
|
|
||||||
The ``ghp-import output`` command updates the local ``gh-pages`` branch with
|
|
||||||
the content of the ``output`` directory (creating the branch if it doesn't
|
|
||||||
already exist). The ``git push origin gh-pages`` command updates the remote
|
|
||||||
``gh-pages`` branch, effectively publishing the Pelican site.
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
The ``github`` target of the Makefile (and the ``gh_pages`` task of
|
|
||||||
``tasks.py``) created by the ``pelican-quickstart`` command publishes the
|
|
||||||
Pelican site as Project Pages, as described above.
|
|
||||||
|
|
||||||
User Pages
|
|
||||||
----------
|
|
||||||
|
|
||||||
To publish a Pelican site in the form of User Pages, you need to *push* the
|
|
||||||
content of the ``output`` dir generated by Pelican to the ``master`` branch of
|
|
||||||
your ``<username>.github.io`` repository on GitHub.
|
|
||||||
|
|
||||||
Again, you can take advantage of ``ghp-import``::
|
|
||||||
|
|
||||||
$ pelican content -o output -s pelicanconf.py
|
|
||||||
$ ghp-import output -b gh-pages
|
|
||||||
$ git push git@github.com:elemoine/elemoine.github.io.git gh-pages:master
|
|
||||||
|
|
||||||
The ``git push`` command pushes the local ``gh-pages`` branch (freshly updated
|
|
||||||
by the ``ghp-import`` command) to the ``elemoine.github.io`` repository's
|
|
||||||
``master`` branch on GitHub.
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
To publish your Pelican site as User Pages, feel free to adjust the
|
|
||||||
``github`` target of the Makefile.
|
|
||||||
|
|
||||||
Another option for publishing to User Pages is to generate the output files in
|
|
||||||
the root directory of the project.
|
|
||||||
|
|
||||||
For example, your main project folder is ``<username>.github.io`` and you can
|
|
||||||
create the Pelican project in a subdirectory called ``Pelican``. Then from
|
|
||||||
inside the ``Pelican`` folder you can run::
|
|
||||||
|
|
||||||
$ pelican content -o .. -s pelicanconf.py
|
|
||||||
|
|
||||||
Now you can push the whole project ``<username>.github.io`` to the master
|
|
||||||
branch of your GitHub repository::
|
|
||||||
|
|
||||||
$ git push origin master
|
|
||||||
|
|
||||||
(assuming origin is set to your remote repository).
|
|
||||||
|
|
||||||
Custom 404 Pages
|
|
||||||
----------------
|
|
||||||
|
|
||||||
GitHub Pages will display the custom 404 page described above, as noted in the
|
|
||||||
relevant `GitHub docs <https://help.github.com/articles/custom-404-pages/>`_.
|
|
||||||
|
|
||||||
Update your site on each commit
|
|
||||||
-------------------------------
|
|
||||||
|
|
||||||
To automatically update your Pelican site on each commit, you can create a
|
|
||||||
post-commit hook. For example, you can add the following to
|
|
||||||
``.git/hooks/post-commit``::
|
|
||||||
|
|
||||||
pelican content -o output -s pelicanconf.py && ghp-import output && git push origin gh-pages
|
|
||||||
|
|
||||||
Copy static files to the root of your site
|
|
||||||
------------------------------------------
|
|
||||||
|
|
||||||
To use a `custom domain
|
|
||||||
<https://help.github.com/articles/setting-up-a-custom-domain-with-pages>`_ with
|
|
||||||
GitHub Pages, you need to put the domain of your site (e.g.,
|
|
||||||
``blog.example.com``) inside a ``CNAME`` file at the root of your site. To do
|
|
||||||
this, create the ``content/extra/`` directory and add a ``CNAME`` file to it.
|
|
||||||
Then use the ``STATIC_PATHS`` setting to tell Pelican to copy this file to your
|
|
||||||
output directory. For example::
|
|
||||||
|
|
||||||
STATIC_PATHS = ['images', 'extra/CNAME']
|
|
||||||
EXTRA_PATH_METADATA = {'extra/CNAME': {'path': 'CNAME'},}
|
|
||||||
|
|
||||||
Note: use forward slashes, ``/``, even on Windows.
|
|
||||||
|
|
||||||
You can also use the ``EXTRA_PATH_METADATA`` mechanism to place a
|
|
||||||
``favicon.ico`` or ``robots.txt`` at the root of any site.
|
|
||||||
|
|
||||||
How to add YouTube or Vimeo Videos
|
|
||||||
==================================
|
|
||||||
|
|
||||||
The easiest way is to paste the embed code of the video from these sites
|
|
||||||
directly into your source content.
|
|
||||||
|
|
||||||
Alternatively, you can also use Pelican plugins like ``liquid_tags``,
|
|
||||||
``pelican_youtube``, or ``pelican_vimeo`` to embed videos in your content.
|
|
||||||
|
|
||||||
Moreover, markup languages like reST and Markdown have plugins that let you
|
|
||||||
embed videos in the markup. You can use `reST video directive
|
|
||||||
<https://gist.github.com/dbrgn/2922648>`_ for reST or `mdx_video plugin
|
|
||||||
<https://github.com/italomaia/mdx-video>`_ for Markdown.
|
|
||||||
|
|
||||||
|
|
||||||
Develop Locally Using SSL
|
|
||||||
==================================
|
|
||||||
|
|
||||||
Here's how you can set up your local pelican server to support SSL.
|
|
||||||
|
|
||||||
First, create a self-signed certificate and key using ``openssl`` (this creates ``cert.pem`` and ``key.pem``)::
|
|
||||||
|
|
||||||
$ openssl req -x509 -newkey rsa:4096 -keyout key.pem -out cert.pem -days 365 -nodes
|
|
||||||
|
|
||||||
And use this command to launch the server (the server starts within your ``output`` directory)::
|
|
||||||
|
|
||||||
python -m pelican.server 8443 --key=../key.pem --cert=../cert.pem
|
|
||||||
|
|
||||||
If you are using ``develop-server.sh``, add this to the top::
|
|
||||||
|
|
||||||
CERT="$BASEDIR/cert.pem"
|
|
||||||
KEY="$BASEDIR/key.pem"
|
|
||||||
|
|
||||||
and modify the ``pelican.server`` line as follows::
|
|
||||||
|
|
||||||
$PY -m pelican.server $port --ssl --cert="$CERT" --key="$KEY" &
|
|
||||||
1556
package-lock.json
generated
Normal file
1556
package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load diff
6
package.json
Normal file
6
package.json
Normal file
|
|
@ -0,0 +1,6 @@
|
||||||
|
{
|
||||||
|
"devDependencies": {
|
||||||
|
"@tailwindcss/typography": "^0.5.15",
|
||||||
|
"tailwindcss": "^3.4.17"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -1,498 +0,0 @@
|
||||||
import argparse
|
|
||||||
import logging
|
|
||||||
import multiprocessing
|
|
||||||
import os
|
|
||||||
import pprint
|
|
||||||
import sys
|
|
||||||
import time
|
|
||||||
import traceback
|
|
||||||
from collections.abc import Iterable
|
|
||||||
# Combines all paths to `pelican` package accessible from `sys.path`
|
|
||||||
# Makes it possible to install `pelican` and namespace plugins into different
|
|
||||||
# locations in the file system (e.g. pip with `-e` or `--user`)
|
|
||||||
from pkgutil import extend_path
|
|
||||||
__path__ = extend_path(__path__, __name__)
|
|
||||||
|
|
||||||
# pelican.log has to be the first pelican module to be loaded
|
|
||||||
# because logging.setLoggerClass has to be called before logging.getLogger
|
|
||||||
from pelican.log import init as init_logging
|
|
||||||
from pelican.generators import (ArticlesGenerator, # noqa: I100
|
|
||||||
PagesGenerator, SourceFileGenerator,
|
|
||||||
StaticGenerator, TemplatePagesGenerator)
|
|
||||||
from pelican.plugins import signals
|
|
||||||
from pelican.plugins._utils import load_plugins
|
|
||||||
from pelican.readers import Readers
|
|
||||||
from pelican.server import ComplexHTTPRequestHandler, RootedHTTPServer
|
|
||||||
from pelican.settings import read_settings
|
|
||||||
from pelican.utils import (FileSystemWatcher, clean_output_dir, maybe_pluralize)
|
|
||||||
from pelican.writers import Writer
|
|
||||||
|
|
||||||
try:
|
|
||||||
__version__ = __import__('pkg_resources') \
|
|
||||||
.get_distribution('pelican').version
|
|
||||||
except Exception:
|
|
||||||
__version__ = "unknown"
|
|
||||||
|
|
||||||
DEFAULT_CONFIG_NAME = 'pelicanconf.py'
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class Pelican:
|
|
||||||
|
|
||||||
def __init__(self, settings):
|
|
||||||
"""Pelican initialisation
|
|
||||||
|
|
||||||
Performs some checks on the environment before doing anything else.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# define the default settings
|
|
||||||
self.settings = settings
|
|
||||||
|
|
||||||
self.path = settings['PATH']
|
|
||||||
self.theme = settings['THEME']
|
|
||||||
self.output_path = settings['OUTPUT_PATH']
|
|
||||||
self.ignore_files = settings['IGNORE_FILES']
|
|
||||||
self.delete_outputdir = settings['DELETE_OUTPUT_DIRECTORY']
|
|
||||||
self.output_retention = settings['OUTPUT_RETENTION']
|
|
||||||
|
|
||||||
self.init_path()
|
|
||||||
self.init_plugins()
|
|
||||||
signals.initialized.send(self)
|
|
||||||
|
|
||||||
def init_path(self):
|
|
||||||
if not any(p in sys.path for p in ['', os.curdir]):
|
|
||||||
logger.debug("Adding current directory to system path")
|
|
||||||
sys.path.insert(0, '')
|
|
||||||
|
|
||||||
def init_plugins(self):
|
|
||||||
self.plugins = load_plugins(self.settings)
|
|
||||||
for plugin in self.plugins:
|
|
||||||
logger.debug('Registering plugin `%s`', plugin.__name__)
|
|
||||||
try:
|
|
||||||
plugin.register()
|
|
||||||
except Exception as e:
|
|
||||||
logger.error('Cannot register plugin `%s`\n%s',
|
|
||||||
plugin.__name__, e)
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
"""Run the generators and return"""
|
|
||||||
start_time = time.time()
|
|
||||||
|
|
||||||
context = self.settings.copy()
|
|
||||||
# Share these among all the generators and content objects
|
|
||||||
# They map source paths to Content objects or None
|
|
||||||
context['generated_content'] = {}
|
|
||||||
context['static_links'] = set()
|
|
||||||
context['static_content'] = {}
|
|
||||||
context['localsiteurl'] = self.settings['SITEURL']
|
|
||||||
|
|
||||||
generators = [
|
|
||||||
cls(
|
|
||||||
context=context,
|
|
||||||
settings=self.settings,
|
|
||||||
path=self.path,
|
|
||||||
theme=self.theme,
|
|
||||||
output_path=self.output_path,
|
|
||||||
) for cls in self.get_generator_classes()
|
|
||||||
]
|
|
||||||
|
|
||||||
# Delete the output directory if (1) the appropriate setting is True
|
|
||||||
# and (2) that directory is not the parent of the source directory
|
|
||||||
if (self.delete_outputdir
|
|
||||||
and os.path.commonpath([os.path.realpath(self.output_path)]) !=
|
|
||||||
os.path.commonpath([os.path.realpath(self.output_path),
|
|
||||||
os.path.realpath(self.path)])):
|
|
||||||
clean_output_dir(self.output_path, self.output_retention)
|
|
||||||
|
|
||||||
for p in generators:
|
|
||||||
if hasattr(p, 'generate_context'):
|
|
||||||
p.generate_context()
|
|
||||||
|
|
||||||
for p in generators:
|
|
||||||
if hasattr(p, 'refresh_metadata_intersite_links'):
|
|
||||||
p.refresh_metadata_intersite_links()
|
|
||||||
|
|
||||||
signals.all_generators_finalized.send(generators)
|
|
||||||
|
|
||||||
writer = self.get_writer()
|
|
||||||
|
|
||||||
for p in generators:
|
|
||||||
if hasattr(p, 'generate_output'):
|
|
||||||
p.generate_output(writer)
|
|
||||||
|
|
||||||
signals.finalized.send(self)
|
|
||||||
|
|
||||||
articles_generator = next(g for g in generators
|
|
||||||
if isinstance(g, ArticlesGenerator))
|
|
||||||
pages_generator = next(g for g in generators
|
|
||||||
if isinstance(g, PagesGenerator))
|
|
||||||
|
|
||||||
pluralized_articles = maybe_pluralize(
|
|
||||||
(len(articles_generator.articles) +
|
|
||||||
len(articles_generator.translations)),
|
|
||||||
'article',
|
|
||||||
'articles')
|
|
||||||
pluralized_drafts = maybe_pluralize(
|
|
||||||
(len(articles_generator.drafts) +
|
|
||||||
len(articles_generator.drafts_translations)),
|
|
||||||
'draft',
|
|
||||||
'drafts')
|
|
||||||
pluralized_pages = maybe_pluralize(
|
|
||||||
(len(pages_generator.pages) +
|
|
||||||
len(pages_generator.translations)),
|
|
||||||
'page',
|
|
||||||
'pages')
|
|
||||||
pluralized_hidden_pages = maybe_pluralize(
|
|
||||||
(len(pages_generator.hidden_pages) +
|
|
||||||
len(pages_generator.hidden_translations)),
|
|
||||||
'hidden page',
|
|
||||||
'hidden pages')
|
|
||||||
pluralized_draft_pages = maybe_pluralize(
|
|
||||||
(len(pages_generator.draft_pages) +
|
|
||||||
len(pages_generator.draft_translations)),
|
|
||||||
'draft page',
|
|
||||||
'draft pages')
|
|
||||||
|
|
||||||
print('Done: Processed {}, {}, {}, {} and {} in {:.2f} seconds.'
|
|
||||||
.format(
|
|
||||||
pluralized_articles,
|
|
||||||
pluralized_drafts,
|
|
||||||
pluralized_pages,
|
|
||||||
pluralized_hidden_pages,
|
|
||||||
pluralized_draft_pages,
|
|
||||||
time.time() - start_time))
|
|
||||||
|
|
||||||
def get_generator_classes(self):
|
|
||||||
generators = [ArticlesGenerator, PagesGenerator]
|
|
||||||
|
|
||||||
if self.settings['TEMPLATE_PAGES']:
|
|
||||||
generators.append(TemplatePagesGenerator)
|
|
||||||
if self.settings['OUTPUT_SOURCES']:
|
|
||||||
generators.append(SourceFileGenerator)
|
|
||||||
|
|
||||||
for pair in signals.get_generators.send(self):
|
|
||||||
(funct, value) = pair
|
|
||||||
|
|
||||||
if not isinstance(value, Iterable):
|
|
||||||
value = (value, )
|
|
||||||
|
|
||||||
for v in value:
|
|
||||||
if isinstance(v, type):
|
|
||||||
logger.debug('Found generator: %s', v)
|
|
||||||
generators.append(v)
|
|
||||||
|
|
||||||
# StaticGenerator must run last, so it can identify files that
|
|
||||||
# were skipped by the other generators, and so static files can
|
|
||||||
# have their output paths overridden by the {attach} link syntax.
|
|
||||||
generators.append(StaticGenerator)
|
|
||||||
return generators
|
|
||||||
|
|
||||||
def get_writer(self):
|
|
||||||
writers = [w for (_, w) in signals.get_writer.send(self)
|
|
||||||
if isinstance(w, type)]
|
|
||||||
writers_found = len(writers)
|
|
||||||
if writers_found == 0:
|
|
||||||
return Writer(self.output_path, settings=self.settings)
|
|
||||||
else:
|
|
||||||
writer = writers[0]
|
|
||||||
if writers_found == 1:
|
|
||||||
logger.debug('Found writer: %s', writer)
|
|
||||||
else:
|
|
||||||
logger.warning(
|
|
||||||
'%s writers found, using only first one: %s',
|
|
||||||
writers_found, writer)
|
|
||||||
return writer(self.output_path, settings=self.settings)
|
|
||||||
|
|
||||||
|
|
||||||
class PrintSettings(argparse.Action):
|
|
||||||
def __call__(self, parser, namespace, values, option_string):
|
|
||||||
instance, settings = get_instance(namespace)
|
|
||||||
|
|
||||||
if values:
|
|
||||||
# One or more arguments provided, so only print those settings
|
|
||||||
for setting in values:
|
|
||||||
if setting in settings:
|
|
||||||
# Only add newline between setting name and value if dict
|
|
||||||
if isinstance(settings[setting], dict):
|
|
||||||
setting_format = '\n{}:\n{}'
|
|
||||||
else:
|
|
||||||
setting_format = '\n{}: {}'
|
|
||||||
print(setting_format.format(
|
|
||||||
setting,
|
|
||||||
pprint.pformat(settings[setting])))
|
|
||||||
else:
|
|
||||||
print('\n{} is not a recognized setting.'.format(setting))
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
# No argument was given to --print-settings, so print all settings
|
|
||||||
pprint.pprint(settings)
|
|
||||||
|
|
||||||
parser.exit()
|
|
||||||
|
|
||||||
|
|
||||||
def parse_arguments(argv=None):
|
|
||||||
parser = argparse.ArgumentParser(
|
|
||||||
description='A tool to generate a static blog, '
|
|
||||||
' with restructured text input files.',
|
|
||||||
formatter_class=argparse.ArgumentDefaultsHelpFormatter
|
|
||||||
)
|
|
||||||
|
|
||||||
parser.add_argument(dest='path', nargs='?',
|
|
||||||
help='Path where to find the content files.',
|
|
||||||
default=None)
|
|
||||||
|
|
||||||
parser.add_argument('-t', '--theme-path', dest='theme',
|
|
||||||
help='Path where to find the theme templates. If not '
|
|
||||||
'specified, it will use the default one included with '
|
|
||||||
'pelican.')
|
|
||||||
|
|
||||||
parser.add_argument('-o', '--output', dest='output',
|
|
||||||
help='Where to output the generated files. If not '
|
|
||||||
'specified, a directory will be created, named '
|
|
||||||
'"output" in the current path.')
|
|
||||||
|
|
||||||
parser.add_argument('-s', '--settings', dest='settings',
|
|
||||||
help='The settings of the application, this is '
|
|
||||||
'automatically set to {} if a file exists with this '
|
|
||||||
'name.'.format(DEFAULT_CONFIG_NAME))
|
|
||||||
|
|
||||||
parser.add_argument('-d', '--delete-output-directory',
|
|
||||||
dest='delete_outputdir', action='store_true',
|
|
||||||
default=None, help='Delete the output directory.')
|
|
||||||
|
|
||||||
parser.add_argument('-v', '--verbose', action='store_const',
|
|
||||||
const=logging.INFO, dest='verbosity',
|
|
||||||
help='Show all messages.')
|
|
||||||
|
|
||||||
parser.add_argument('-q', '--quiet', action='store_const',
|
|
||||||
const=logging.CRITICAL, dest='verbosity',
|
|
||||||
help='Show only critical errors.')
|
|
||||||
|
|
||||||
parser.add_argument('-D', '--debug', action='store_const',
|
|
||||||
const=logging.DEBUG, dest='verbosity',
|
|
||||||
help='Show all messages, including debug messages.')
|
|
||||||
|
|
||||||
parser.add_argument('--version', action='version', version=__version__,
|
|
||||||
help='Print the pelican version and exit.')
|
|
||||||
|
|
||||||
parser.add_argument('-r', '--autoreload', dest='autoreload',
|
|
||||||
action='store_true',
|
|
||||||
help='Relaunch pelican each time a modification occurs'
|
|
||||||
' on the content files.')
|
|
||||||
|
|
||||||
parser.add_argument('--print-settings', dest='print_settings', nargs='*',
|
|
||||||
action=PrintSettings, metavar='SETTING_NAME',
|
|
||||||
help='Print current configuration settings and exit. '
|
|
||||||
'Append one or more setting name arguments to see the '
|
|
||||||
'values for specific settings only.')
|
|
||||||
|
|
||||||
parser.add_argument('--relative-urls', dest='relative_paths',
|
|
||||||
action='store_true',
|
|
||||||
help='Use relative urls in output, '
|
|
||||||
'useful for site development')
|
|
||||||
|
|
||||||
parser.add_argument('--cache-path', dest='cache_path',
|
|
||||||
help=('Directory in which to store cache files. '
|
|
||||||
'If not specified, defaults to "cache".'))
|
|
||||||
|
|
||||||
parser.add_argument('--ignore-cache', action='store_true',
|
|
||||||
dest='ignore_cache', help='Ignore content cache '
|
|
||||||
'from previous runs by not loading cache files.')
|
|
||||||
|
|
||||||
parser.add_argument('-w', '--write-selected', type=str,
|
|
||||||
dest='selected_paths', default=None,
|
|
||||||
help='Comma separated list of selected paths to write')
|
|
||||||
|
|
||||||
parser.add_argument('--fatal', metavar='errors|warnings',
|
|
||||||
choices=('errors', 'warnings'), default='',
|
|
||||||
help=('Exit the program with non-zero status if any '
|
|
||||||
'errors/warnings encountered.'))
|
|
||||||
|
|
||||||
parser.add_argument('--logs-dedup-min-level', default='WARNING',
|
|
||||||
choices=('DEBUG', 'INFO', 'WARNING', 'ERROR'),
|
|
||||||
help=('Only enable log de-duplication for levels equal'
|
|
||||||
' to or above the specified value'))
|
|
||||||
|
|
||||||
parser.add_argument('-l', '--listen', dest='listen', action='store_true',
|
|
||||||
help='Serve content files via HTTP and port 8000.')
|
|
||||||
|
|
||||||
parser.add_argument('-p', '--port', dest='port', type=int,
|
|
||||||
help='Port to serve HTTP files at. (default: 8000)')
|
|
||||||
|
|
||||||
parser.add_argument('-b', '--bind', dest='bind',
|
|
||||||
help='IP to bind to when serving files via HTTP '
|
|
||||||
'(default: 127.0.0.1)')
|
|
||||||
|
|
||||||
args = parser.parse_args(argv)
|
|
||||||
|
|
||||||
if args.port is not None and not args.listen:
|
|
||||||
logger.warning('--port without --listen has no effect')
|
|
||||||
if args.bind is not None and not args.listen:
|
|
||||||
logger.warning('--bind without --listen has no effect')
|
|
||||||
|
|
||||||
return args
|
|
||||||
|
|
||||||
|
|
||||||
def get_config(args):
|
|
||||||
config = {}
|
|
||||||
if args.path:
|
|
||||||
config['PATH'] = os.path.abspath(os.path.expanduser(args.path))
|
|
||||||
if args.output:
|
|
||||||
config['OUTPUT_PATH'] = \
|
|
||||||
os.path.abspath(os.path.expanduser(args.output))
|
|
||||||
if args.theme:
|
|
||||||
abstheme = os.path.abspath(os.path.expanduser(args.theme))
|
|
||||||
config['THEME'] = abstheme if os.path.exists(abstheme) else args.theme
|
|
||||||
if args.delete_outputdir is not None:
|
|
||||||
config['DELETE_OUTPUT_DIRECTORY'] = args.delete_outputdir
|
|
||||||
if args.ignore_cache:
|
|
||||||
config['LOAD_CONTENT_CACHE'] = False
|
|
||||||
if args.cache_path:
|
|
||||||
config['CACHE_PATH'] = args.cache_path
|
|
||||||
if args.selected_paths:
|
|
||||||
config['WRITE_SELECTED'] = args.selected_paths.split(',')
|
|
||||||
if args.relative_paths:
|
|
||||||
config['RELATIVE_URLS'] = args.relative_paths
|
|
||||||
if args.port is not None:
|
|
||||||
config['PORT'] = args.port
|
|
||||||
if args.bind is not None:
|
|
||||||
config['BIND'] = args.bind
|
|
||||||
config['DEBUG'] = args.verbosity == logging.DEBUG
|
|
||||||
|
|
||||||
return config
|
|
||||||
|
|
||||||
|
|
||||||
def get_instance(args):
|
|
||||||
|
|
||||||
config_file = args.settings
|
|
||||||
if config_file is None and os.path.isfile(DEFAULT_CONFIG_NAME):
|
|
||||||
config_file = DEFAULT_CONFIG_NAME
|
|
||||||
args.settings = DEFAULT_CONFIG_NAME
|
|
||||||
|
|
||||||
settings = read_settings(config_file, override=get_config(args))
|
|
||||||
|
|
||||||
cls = settings['PELICAN_CLASS']
|
|
||||||
if isinstance(cls, str):
|
|
||||||
module, cls_name = cls.rsplit('.', 1)
|
|
||||||
module = __import__(module)
|
|
||||||
cls = getattr(module, cls_name)
|
|
||||||
|
|
||||||
return cls(settings), settings
|
|
||||||
|
|
||||||
|
|
||||||
def autoreload(args, excqueue=None):
|
|
||||||
print(' --- AutoReload Mode: Monitoring `content`, `theme` and'
|
|
||||||
' `settings` for changes. ---')
|
|
||||||
pelican, settings = get_instance(args)
|
|
||||||
watcher = FileSystemWatcher(args.settings, Readers, settings)
|
|
||||||
sleep = False
|
|
||||||
while True:
|
|
||||||
try:
|
|
||||||
# Don't sleep first time, but sleep afterwards to reduce cpu load
|
|
||||||
if sleep:
|
|
||||||
time.sleep(0.5)
|
|
||||||
else:
|
|
||||||
sleep = True
|
|
||||||
|
|
||||||
modified = watcher.check()
|
|
||||||
|
|
||||||
if modified['settings']:
|
|
||||||
pelican, settings = get_instance(args)
|
|
||||||
watcher.update_watchers(settings)
|
|
||||||
|
|
||||||
if any(modified.values()):
|
|
||||||
print('\n-> Modified: {}. re-generating...'.format(
|
|
||||||
', '.join(k for k, v in modified.items() if v)))
|
|
||||||
pelican.run()
|
|
||||||
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
if excqueue is not None:
|
|
||||||
excqueue.put(None)
|
|
||||||
return
|
|
||||||
raise
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
if (args.verbosity == logging.DEBUG):
|
|
||||||
if excqueue is not None:
|
|
||||||
excqueue.put(
|
|
||||||
traceback.format_exception_only(type(e), e)[-1])
|
|
||||||
else:
|
|
||||||
raise
|
|
||||||
logger.warning(
|
|
||||||
'Caught exception:\n"%s".', e,
|
|
||||||
exc_info=settings.get('DEBUG', False))
|
|
||||||
|
|
||||||
|
|
||||||
def listen(server, port, output, excqueue=None):
|
|
||||||
RootedHTTPServer.allow_reuse_address = True
|
|
||||||
try:
|
|
||||||
httpd = RootedHTTPServer(
|
|
||||||
output, (server, port), ComplexHTTPRequestHandler)
|
|
||||||
except OSError as e:
|
|
||||||
logging.error("Could not listen on port %s, server %s.", port, server)
|
|
||||||
if excqueue is not None:
|
|
||||||
excqueue.put(traceback.format_exception_only(type(e), e)[-1])
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
|
||||||
print("\nServing site at: {}:{} - Tap CTRL-C to stop".format(
|
|
||||||
server, port))
|
|
||||||
httpd.serve_forever()
|
|
||||||
except Exception as e:
|
|
||||||
if excqueue is not None:
|
|
||||||
excqueue.put(traceback.format_exception_only(type(e), e)[-1])
|
|
||||||
return
|
|
||||||
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
httpd.socket.close()
|
|
||||||
if excqueue is not None:
|
|
||||||
return
|
|
||||||
raise
|
|
||||||
|
|
||||||
|
|
||||||
def main(argv=None):
|
|
||||||
args = parse_arguments(argv)
|
|
||||||
logs_dedup_min_level = getattr(logging, args.logs_dedup_min_level)
|
|
||||||
init_logging(args.verbosity, args.fatal,
|
|
||||||
logs_dedup_min_level=logs_dedup_min_level)
|
|
||||||
|
|
||||||
logger.debug('Pelican version: %s', __version__)
|
|
||||||
logger.debug('Python version: %s', sys.version.split()[0])
|
|
||||||
|
|
||||||
try:
|
|
||||||
pelican, settings = get_instance(args)
|
|
||||||
|
|
||||||
if args.autoreload and args.listen:
|
|
||||||
excqueue = multiprocessing.Queue()
|
|
||||||
p1 = multiprocessing.Process(
|
|
||||||
target=autoreload,
|
|
||||||
args=(args, excqueue))
|
|
||||||
p2 = multiprocessing.Process(
|
|
||||||
target=listen,
|
|
||||||
args=(settings.get('BIND'), settings.get('PORT'),
|
|
||||||
settings.get("OUTPUT_PATH"), excqueue))
|
|
||||||
p1.start()
|
|
||||||
p2.start()
|
|
||||||
exc = excqueue.get()
|
|
||||||
p1.terminate()
|
|
||||||
p2.terminate()
|
|
||||||
if exc is not None:
|
|
||||||
logger.critical(exc)
|
|
||||||
elif args.autoreload:
|
|
||||||
autoreload(args)
|
|
||||||
elif args.listen:
|
|
||||||
listen(settings.get('BIND'), settings.get('PORT'),
|
|
||||||
settings.get("OUTPUT_PATH"))
|
|
||||||
else:
|
|
||||||
watcher = FileSystemWatcher(args.settings, Readers, settings)
|
|
||||||
watcher.check()
|
|
||||||
pelican.run()
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
logger.warning('Keyboard interrupt received. Exiting.')
|
|
||||||
except Exception as e:
|
|
||||||
logger.critical('%s', e)
|
|
||||||
|
|
||||||
if args.verbosity == logging.DEBUG:
|
|
||||||
raise
|
|
||||||
else:
|
|
||||||
sys.exit(getattr(e, 'exitcode', 1))
|
|
||||||
|
|
@ -1,9 +0,0 @@
|
||||||
"""
|
|
||||||
python -m pelican module entry point to run via python -m
|
|
||||||
"""
|
|
||||||
|
|
||||||
from . import main
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
main()
|
|
||||||
134
pelican/cache.py
134
pelican/cache.py
|
|
@ -1,134 +0,0 @@
|
||||||
import hashlib
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import pickle
|
|
||||||
|
|
||||||
from pelican.utils import mkdir_p
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class FileDataCacher:
|
|
||||||
"""Class that can cache data contained in files"""
|
|
||||||
|
|
||||||
def __init__(self, settings, cache_name, caching_policy, load_policy):
|
|
||||||
"""Load the specified cache within CACHE_PATH in settings
|
|
||||||
|
|
||||||
only if *load_policy* is True,
|
|
||||||
May use gzip if GZIP_CACHE ins settings is True.
|
|
||||||
Sets caching policy according to *caching_policy*.
|
|
||||||
"""
|
|
||||||
self.settings = settings
|
|
||||||
self._cache_path = os.path.join(self.settings['CACHE_PATH'],
|
|
||||||
cache_name)
|
|
||||||
self._cache_data_policy = caching_policy
|
|
||||||
if self.settings['GZIP_CACHE']:
|
|
||||||
import gzip
|
|
||||||
self._cache_open = gzip.open
|
|
||||||
else:
|
|
||||||
self._cache_open = open
|
|
||||||
if load_policy:
|
|
||||||
try:
|
|
||||||
with self._cache_open(self._cache_path, 'rb') as fhandle:
|
|
||||||
self._cache = pickle.load(fhandle)
|
|
||||||
except (OSError, UnicodeDecodeError) as err:
|
|
||||||
logger.debug('Cannot load cache %s (this is normal on first '
|
|
||||||
'run). Proceeding with empty cache.\n%s',
|
|
||||||
self._cache_path, err)
|
|
||||||
self._cache = {}
|
|
||||||
except pickle.PickleError as err:
|
|
||||||
logger.warning('Cannot unpickle cache %s, cache may be using '
|
|
||||||
'an incompatible protocol (see pelican '
|
|
||||||
'caching docs). '
|
|
||||||
'Proceeding with empty cache.\n%s',
|
|
||||||
self._cache_path, err)
|
|
||||||
self._cache = {}
|
|
||||||
else:
|
|
||||||
self._cache = {}
|
|
||||||
|
|
||||||
def cache_data(self, filename, data):
|
|
||||||
"""Cache data for given file"""
|
|
||||||
if self._cache_data_policy:
|
|
||||||
self._cache[filename] = data
|
|
||||||
|
|
||||||
def get_cached_data(self, filename, default=None):
|
|
||||||
"""Get cached data for the given file
|
|
||||||
|
|
||||||
if no data is cached, return the default object
|
|
||||||
"""
|
|
||||||
return self._cache.get(filename, default)
|
|
||||||
|
|
||||||
def save_cache(self):
|
|
||||||
"""Save the updated cache"""
|
|
||||||
if self._cache_data_policy:
|
|
||||||
try:
|
|
||||||
mkdir_p(self.settings['CACHE_PATH'])
|
|
||||||
with self._cache_open(self._cache_path, 'wb') as fhandle:
|
|
||||||
pickle.dump(self._cache, fhandle)
|
|
||||||
except (OSError, pickle.PicklingError) as err:
|
|
||||||
logger.warning('Could not save cache %s\n ... %s',
|
|
||||||
self._cache_path, err)
|
|
||||||
|
|
||||||
|
|
||||||
class FileStampDataCacher(FileDataCacher):
|
|
||||||
"""Subclass that also caches the stamp of the file"""
|
|
||||||
|
|
||||||
def __init__(self, settings, cache_name, caching_policy, load_policy):
|
|
||||||
"""This sublcass additionally sets filestamp function
|
|
||||||
and base path for filestamping operations
|
|
||||||
"""
|
|
||||||
|
|
||||||
super().__init__(settings, cache_name, caching_policy, load_policy)
|
|
||||||
|
|
||||||
method = self.settings['CHECK_MODIFIED_METHOD']
|
|
||||||
if method == 'mtime':
|
|
||||||
self._filestamp_func = os.path.getmtime
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
hash_func = getattr(hashlib, method)
|
|
||||||
|
|
||||||
def filestamp_func(filename):
|
|
||||||
"""return hash of file contents"""
|
|
||||||
with open(filename, 'rb') as fhandle:
|
|
||||||
return hash_func(fhandle.read()).digest()
|
|
||||||
|
|
||||||
self._filestamp_func = filestamp_func
|
|
||||||
except AttributeError as err:
|
|
||||||
logger.warning('Could not get hashing function\n\t%s', err)
|
|
||||||
self._filestamp_func = None
|
|
||||||
|
|
||||||
def cache_data(self, filename, data):
|
|
||||||
"""Cache stamp and data for the given file"""
|
|
||||||
stamp = self._get_file_stamp(filename)
|
|
||||||
super().cache_data(filename, (stamp, data))
|
|
||||||
|
|
||||||
def _get_file_stamp(self, filename):
|
|
||||||
"""Check if the given file has been modified
|
|
||||||
since the previous build.
|
|
||||||
|
|
||||||
depending on CHECK_MODIFIED_METHOD
|
|
||||||
a float may be returned for 'mtime',
|
|
||||||
a hash for a function name in the hashlib module
|
|
||||||
or an empty bytes string otherwise
|
|
||||||
"""
|
|
||||||
|
|
||||||
try:
|
|
||||||
return self._filestamp_func(filename)
|
|
||||||
except (OSError, TypeError) as err:
|
|
||||||
logger.warning('Cannot get modification stamp for %s\n\t%s',
|
|
||||||
filename, err)
|
|
||||||
return ''
|
|
||||||
|
|
||||||
def get_cached_data(self, filename, default=None):
|
|
||||||
"""Get the cached data for the given filename
|
|
||||||
if the file has not been modified.
|
|
||||||
|
|
||||||
If no record exists or file has been modified, return default.
|
|
||||||
Modification is checked by comparing the cached
|
|
||||||
and current file stamp.
|
|
||||||
"""
|
|
||||||
|
|
||||||
stamp, data = super().get_cached_data(filename, (None, default))
|
|
||||||
if stamp != self._get_file_stamp(filename):
|
|
||||||
return default
|
|
||||||
return data
|
|
||||||
|
|
@ -1,607 +0,0 @@
|
||||||
import copy
|
|
||||||
import datetime
|
|
||||||
import locale
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
from urllib.parse import urljoin, urlparse, urlunparse
|
|
||||||
|
|
||||||
import pytz
|
|
||||||
|
|
||||||
from pelican.plugins import signals
|
|
||||||
from pelican.settings import DEFAULT_CONFIG
|
|
||||||
from pelican.utils import (deprecated_attribute, memoized, path_to_url,
|
|
||||||
posixize_path, sanitised_join, set_date_tzinfo,
|
|
||||||
slugify, truncate_html_words)
|
|
||||||
|
|
||||||
# Import these so that they're avalaible when you import from pelican.contents.
|
|
||||||
from pelican.urlwrappers import (Author, Category, Tag, URLWrapper) # NOQA
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class Content:
|
|
||||||
"""Represents a content.
|
|
||||||
|
|
||||||
:param content: the string to parse, containing the original content.
|
|
||||||
:param metadata: the metadata associated to this page (optional).
|
|
||||||
:param settings: the settings dictionary (optional).
|
|
||||||
:param source_path: The location of the source of this content (if any).
|
|
||||||
:param context: The shared context between generators.
|
|
||||||
|
|
||||||
"""
|
|
||||||
@deprecated_attribute(old='filename', new='source_path', since=(3, 2, 0))
|
|
||||||
def filename():
|
|
||||||
return None
|
|
||||||
|
|
||||||
def __init__(self, content, metadata=None, settings=None,
|
|
||||||
source_path=None, context=None):
|
|
||||||
if metadata is None:
|
|
||||||
metadata = {}
|
|
||||||
if settings is None:
|
|
||||||
settings = copy.deepcopy(DEFAULT_CONFIG)
|
|
||||||
|
|
||||||
self.settings = settings
|
|
||||||
self._content = content
|
|
||||||
if context is None:
|
|
||||||
context = {}
|
|
||||||
self._context = context
|
|
||||||
self.translations = []
|
|
||||||
|
|
||||||
local_metadata = dict()
|
|
||||||
local_metadata.update(metadata)
|
|
||||||
|
|
||||||
# set metadata as attributes
|
|
||||||
for key, value in local_metadata.items():
|
|
||||||
if key in ('save_as', 'url'):
|
|
||||||
key = 'override_' + key
|
|
||||||
setattr(self, key.lower(), value)
|
|
||||||
|
|
||||||
# also keep track of the metadata attributes available
|
|
||||||
self.metadata = local_metadata
|
|
||||||
|
|
||||||
# default template if it's not defined in page
|
|
||||||
self.template = self._get_template()
|
|
||||||
|
|
||||||
# First, read the authors from "authors", if not, fallback to "author"
|
|
||||||
# and if not use the settings defined one, if any.
|
|
||||||
if not hasattr(self, 'author'):
|
|
||||||
if hasattr(self, 'authors'):
|
|
||||||
self.author = self.authors[0]
|
|
||||||
elif 'AUTHOR' in settings:
|
|
||||||
self.author = Author(settings['AUTHOR'], settings)
|
|
||||||
|
|
||||||
if not hasattr(self, 'authors') and hasattr(self, 'author'):
|
|
||||||
self.authors = [self.author]
|
|
||||||
|
|
||||||
# XXX Split all the following code into pieces, there is too much here.
|
|
||||||
|
|
||||||
# manage languages
|
|
||||||
self.in_default_lang = True
|
|
||||||
if 'DEFAULT_LANG' in settings:
|
|
||||||
default_lang = settings['DEFAULT_LANG'].lower()
|
|
||||||
if not hasattr(self, 'lang'):
|
|
||||||
self.lang = default_lang
|
|
||||||
|
|
||||||
self.in_default_lang = (self.lang == default_lang)
|
|
||||||
|
|
||||||
# create the slug if not existing, generate slug according to
|
|
||||||
# setting of SLUG_ATTRIBUTE
|
|
||||||
if not hasattr(self, 'slug'):
|
|
||||||
if (settings['SLUGIFY_SOURCE'] == 'title' and
|
|
||||||
hasattr(self, 'title')):
|
|
||||||
value = self.title
|
|
||||||
elif (settings['SLUGIFY_SOURCE'] == 'basename' and
|
|
||||||
source_path is not None):
|
|
||||||
value = os.path.basename(os.path.splitext(source_path)[0])
|
|
||||||
else:
|
|
||||||
value = None
|
|
||||||
if value is not None:
|
|
||||||
self.slug = slugify(
|
|
||||||
value,
|
|
||||||
regex_subs=settings.get('SLUG_REGEX_SUBSTITUTIONS', []),
|
|
||||||
preserve_case=settings.get('SLUGIFY_PRESERVE_CASE', False),
|
|
||||||
use_unicode=settings.get('SLUGIFY_USE_UNICODE', False))
|
|
||||||
|
|
||||||
self.source_path = source_path
|
|
||||||
self.relative_source_path = self.get_relative_source_path()
|
|
||||||
|
|
||||||
# manage the date format
|
|
||||||
if not hasattr(self, 'date_format'):
|
|
||||||
if hasattr(self, 'lang') and self.lang in settings['DATE_FORMATS']:
|
|
||||||
self.date_format = settings['DATE_FORMATS'][self.lang]
|
|
||||||
else:
|
|
||||||
self.date_format = settings['DEFAULT_DATE_FORMAT']
|
|
||||||
|
|
||||||
if isinstance(self.date_format, tuple):
|
|
||||||
locale_string = self.date_format[0]
|
|
||||||
locale.setlocale(locale.LC_ALL, locale_string)
|
|
||||||
self.date_format = self.date_format[1]
|
|
||||||
|
|
||||||
# manage timezone
|
|
||||||
default_timezone = settings.get('TIMEZONE', 'UTC')
|
|
||||||
timezone = getattr(self, 'timezone', default_timezone)
|
|
||||||
self.timezone = pytz.timezone(timezone)
|
|
||||||
|
|
||||||
if hasattr(self, 'date'):
|
|
||||||
self.date = set_date_tzinfo(self.date, timezone)
|
|
||||||
self.locale_date = self.date.strftime(self.date_format)
|
|
||||||
|
|
||||||
if hasattr(self, 'modified'):
|
|
||||||
self.modified = set_date_tzinfo(self.modified, timezone)
|
|
||||||
self.locale_modified = self.modified.strftime(self.date_format)
|
|
||||||
|
|
||||||
# manage status
|
|
||||||
if not hasattr(self, 'status'):
|
|
||||||
# Previous default of None broke comment plugins and perhaps others
|
|
||||||
self.status = getattr(self, 'default_status', '')
|
|
||||||
|
|
||||||
# store the summary metadata if it is set
|
|
||||||
if 'summary' in metadata:
|
|
||||||
self._summary = metadata['summary']
|
|
||||||
|
|
||||||
signals.content_object_init.send(self)
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return self.source_path or repr(self)
|
|
||||||
|
|
||||||
def _has_valid_mandatory_properties(self):
|
|
||||||
"""Test mandatory properties are set."""
|
|
||||||
for prop in self.mandatory_properties:
|
|
||||||
if not hasattr(self, prop):
|
|
||||||
logger.error(
|
|
||||||
"Skipping %s: could not find information about '%s'",
|
|
||||||
self, prop)
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
def _has_valid_save_as(self):
|
|
||||||
"""Return true if save_as doesn't write outside output path, false
|
|
||||||
otherwise."""
|
|
||||||
try:
|
|
||||||
output_path = self.settings["OUTPUT_PATH"]
|
|
||||||
except KeyError:
|
|
||||||
# we cannot check
|
|
||||||
return True
|
|
||||||
|
|
||||||
try:
|
|
||||||
sanitised_join(output_path, self.save_as)
|
|
||||||
except RuntimeError: # outside output_dir
|
|
||||||
logger.error(
|
|
||||||
"Skipping %s: file %r would be written outside output path",
|
|
||||||
self,
|
|
||||||
self.save_as,
|
|
||||||
)
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
def _has_valid_status(self):
|
|
||||||
if hasattr(self, 'allowed_statuses'):
|
|
||||||
if self.status not in self.allowed_statuses:
|
|
||||||
logger.error(
|
|
||||||
"Unknown status '%s' for file %s, skipping it.",
|
|
||||||
self.status,
|
|
||||||
self
|
|
||||||
)
|
|
||||||
return False
|
|
||||||
|
|
||||||
# if undefined we allow all
|
|
||||||
return True
|
|
||||||
|
|
||||||
def is_valid(self):
|
|
||||||
"""Validate Content"""
|
|
||||||
# Use all() to not short circuit and get results of all validations
|
|
||||||
return all([self._has_valid_mandatory_properties(),
|
|
||||||
self._has_valid_save_as(),
|
|
||||||
self._has_valid_status()])
|
|
||||||
|
|
||||||
@property
|
|
||||||
def url_format(self):
|
|
||||||
"""Returns the URL, formatted with the proper values"""
|
|
||||||
metadata = copy.copy(self.metadata)
|
|
||||||
path = self.metadata.get('path', self.get_relative_source_path())
|
|
||||||
metadata.update({
|
|
||||||
'path': path_to_url(path),
|
|
||||||
'slug': getattr(self, 'slug', ''),
|
|
||||||
'lang': getattr(self, 'lang', 'en'),
|
|
||||||
'date': getattr(self, 'date', datetime.datetime.now()),
|
|
||||||
'author': self.author.slug if hasattr(self, 'author') else '',
|
|
||||||
'category': self.category.slug if hasattr(self, 'category') else ''
|
|
||||||
})
|
|
||||||
return metadata
|
|
||||||
|
|
||||||
def _expand_settings(self, key, klass=None):
|
|
||||||
if not klass:
|
|
||||||
klass = self.__class__.__name__
|
|
||||||
fq_key = ('{}_{}'.format(klass, key)).upper()
|
|
||||||
return self.settings[fq_key].format(**self.url_format)
|
|
||||||
|
|
||||||
def get_url_setting(self, key):
|
|
||||||
if hasattr(self, 'override_' + key):
|
|
||||||
return getattr(self, 'override_' + key)
|
|
||||||
key = key if self.in_default_lang else 'lang_%s' % key
|
|
||||||
return self._expand_settings(key)
|
|
||||||
|
|
||||||
def _link_replacer(self, siteurl, m):
|
|
||||||
what = m.group('what')
|
|
||||||
value = urlparse(m.group('value'))
|
|
||||||
path = value.path
|
|
||||||
origin = m.group('path')
|
|
||||||
|
|
||||||
# urllib.parse.urljoin() produces `a.html` for urljoin("..", "a.html")
|
|
||||||
# so if RELATIVE_URLS are enabled, we fall back to os.path.join() to
|
|
||||||
# properly get `../a.html`. However, os.path.join() produces
|
|
||||||
# `baz/http://foo/bar.html` for join("baz", "http://foo/bar.html")
|
|
||||||
# instead of correct "http://foo/bar.html", so one has to pick a side
|
|
||||||
# as there is no silver bullet.
|
|
||||||
if self.settings['RELATIVE_URLS']:
|
|
||||||
joiner = os.path.join
|
|
||||||
else:
|
|
||||||
joiner = urljoin
|
|
||||||
|
|
||||||
# However, it's not *that* simple: urljoin("blog", "index.html")
|
|
||||||
# produces just `index.html` instead of `blog/index.html` (unlike
|
|
||||||
# os.path.join()), so in order to get a correct answer one needs to
|
|
||||||
# append a trailing slash to siteurl in that case. This also makes
|
|
||||||
# the new behavior fully compatible with Pelican 3.7.1.
|
|
||||||
if not siteurl.endswith('/'):
|
|
||||||
siteurl += '/'
|
|
||||||
|
|
||||||
# XXX Put this in a different location.
|
|
||||||
if what in {'filename', 'static', 'attach'}:
|
|
||||||
if path.startswith('/'):
|
|
||||||
path = path[1:]
|
|
||||||
else:
|
|
||||||
# relative to the source path of this content
|
|
||||||
path = self.get_relative_source_path(
|
|
||||||
os.path.join(self.relative_dir, path)
|
|
||||||
)
|
|
||||||
|
|
||||||
key = 'static_content' if what in ('static', 'attach')\
|
|
||||||
else 'generated_content'
|
|
||||||
|
|
||||||
def _get_linked_content(key, path):
|
|
||||||
try:
|
|
||||||
return self._context[key][path]
|
|
||||||
except KeyError:
|
|
||||||
try:
|
|
||||||
# Markdown escapes spaces, try unescaping
|
|
||||||
return self._context[key][path.replace('%20', ' ')]
|
|
||||||
except KeyError:
|
|
||||||
if what == 'filename' and key == 'generated_content':
|
|
||||||
key = 'static_content'
|
|
||||||
linked_content = _get_linked_content(key, path)
|
|
||||||
if linked_content:
|
|
||||||
logger.warning(
|
|
||||||
'{filename} used for linking to static'
|
|
||||||
' content %s in %s. Use {static} instead',
|
|
||||||
path,
|
|
||||||
self.get_relative_source_path())
|
|
||||||
return linked_content
|
|
||||||
return None
|
|
||||||
|
|
||||||
linked_content = _get_linked_content(key, path)
|
|
||||||
if linked_content:
|
|
||||||
if what == 'attach':
|
|
||||||
linked_content.attach_to(self)
|
|
||||||
origin = joiner(siteurl, linked_content.url)
|
|
||||||
origin = origin.replace('\\', '/') # for Windows paths.
|
|
||||||
else:
|
|
||||||
logger.warning(
|
|
||||||
"Unable to find '%s', skipping url replacement.",
|
|
||||||
value.geturl(), extra={
|
|
||||||
'limit_msg': ("Other resources were not found "
|
|
||||||
"and their urls not replaced")})
|
|
||||||
elif what == 'category':
|
|
||||||
origin = joiner(siteurl, Category(path, self.settings).url)
|
|
||||||
elif what == 'tag':
|
|
||||||
origin = joiner(siteurl, Tag(path, self.settings).url)
|
|
||||||
elif what == 'index':
|
|
||||||
origin = joiner(siteurl, self.settings['INDEX_SAVE_AS'])
|
|
||||||
elif what == 'author':
|
|
||||||
origin = joiner(siteurl, Author(path, self.settings).url)
|
|
||||||
else:
|
|
||||||
logger.warning(
|
|
||||||
"Replacement Indicator '%s' not recognized, "
|
|
||||||
"skipping replacement",
|
|
||||||
what)
|
|
||||||
|
|
||||||
# keep all other parts, such as query, fragment, etc.
|
|
||||||
parts = list(value)
|
|
||||||
parts[2] = origin
|
|
||||||
origin = urlunparse(parts)
|
|
||||||
|
|
||||||
return ''.join((m.group('markup'), m.group('quote'), origin,
|
|
||||||
m.group('quote')))
|
|
||||||
|
|
||||||
def _get_intrasite_link_regex(self):
|
|
||||||
intrasite_link_regex = self.settings['INTRASITE_LINK_REGEX']
|
|
||||||
regex = r"""
|
|
||||||
(?P<markup><[^\>]+ # match tag with all url-value attributes
|
|
||||||
(?:href|src|poster|data|cite|formaction|action)\s*=\s*)
|
|
||||||
|
|
||||||
(?P<quote>["\']) # require value to be quoted
|
|
||||||
(?P<path>{}(?P<value>.*?)) # the url value
|
|
||||||
\2""".format(intrasite_link_regex)
|
|
||||||
return re.compile(regex, re.X)
|
|
||||||
|
|
||||||
def _update_content(self, content, siteurl):
|
|
||||||
"""Update the content attribute.
|
|
||||||
|
|
||||||
Change all the relative paths of the content to relative paths
|
|
||||||
suitable for the output content.
|
|
||||||
|
|
||||||
:param content: content resource that will be passed to the templates.
|
|
||||||
:param siteurl: siteurl which is locally generated by the writer in
|
|
||||||
case of RELATIVE_URLS.
|
|
||||||
"""
|
|
||||||
if not content:
|
|
||||||
return content
|
|
||||||
|
|
||||||
hrefs = self._get_intrasite_link_regex()
|
|
||||||
return hrefs.sub(lambda m: self._link_replacer(siteurl, m), content)
|
|
||||||
|
|
||||||
def get_static_links(self):
|
|
||||||
static_links = set()
|
|
||||||
hrefs = self._get_intrasite_link_regex()
|
|
||||||
for m in hrefs.finditer(self._content):
|
|
||||||
what = m.group('what')
|
|
||||||
value = urlparse(m.group('value'))
|
|
||||||
path = value.path
|
|
||||||
if what not in {'static', 'attach'}:
|
|
||||||
continue
|
|
||||||
if path.startswith('/'):
|
|
||||||
path = path[1:]
|
|
||||||
else:
|
|
||||||
# relative to the source path of this content
|
|
||||||
path = self.get_relative_source_path(
|
|
||||||
os.path.join(self.relative_dir, path)
|
|
||||||
)
|
|
||||||
path = path.replace('%20', ' ')
|
|
||||||
static_links.add(path)
|
|
||||||
return static_links
|
|
||||||
|
|
||||||
def get_siteurl(self):
|
|
||||||
return self._context.get('localsiteurl', '')
|
|
||||||
|
|
||||||
@memoized
|
|
||||||
def get_content(self, siteurl):
|
|
||||||
if hasattr(self, '_get_content'):
|
|
||||||
content = self._get_content()
|
|
||||||
else:
|
|
||||||
content = self._content
|
|
||||||
return self._update_content(content, siteurl)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def content(self):
|
|
||||||
return self.get_content(self.get_siteurl())
|
|
||||||
|
|
||||||
@memoized
|
|
||||||
def get_summary(self, siteurl):
|
|
||||||
"""Returns the summary of an article.
|
|
||||||
|
|
||||||
This is based on the summary metadata if set, otherwise truncate the
|
|
||||||
content.
|
|
||||||
"""
|
|
||||||
if 'summary' in self.metadata:
|
|
||||||
return self.metadata['summary']
|
|
||||||
|
|
||||||
if self.settings['SUMMARY_MAX_LENGTH'] is None:
|
|
||||||
return self.content
|
|
||||||
|
|
||||||
return truncate_html_words(self.content,
|
|
||||||
self.settings['SUMMARY_MAX_LENGTH'],
|
|
||||||
self.settings['SUMMARY_END_MARKER'])
|
|
||||||
|
|
||||||
@property
|
|
||||||
def summary(self):
|
|
||||||
return self.get_summary(self.get_siteurl())
|
|
||||||
|
|
||||||
def _get_summary(self):
|
|
||||||
"""deprecated function to access summary"""
|
|
||||||
|
|
||||||
logger.warning('_get_summary() has been deprecated since 3.6.4. '
|
|
||||||
'Use the summary decorator instead')
|
|
||||||
return self.summary
|
|
||||||
|
|
||||||
@summary.setter
|
|
||||||
def summary(self, value):
|
|
||||||
"""Dummy function"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
@property
|
|
||||||
def status(self):
|
|
||||||
return self._status
|
|
||||||
|
|
||||||
@status.setter
|
|
||||||
def status(self, value):
|
|
||||||
# TODO maybe typecheck
|
|
||||||
self._status = value.lower()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def url(self):
|
|
||||||
return self.get_url_setting('url')
|
|
||||||
|
|
||||||
@property
|
|
||||||
def save_as(self):
|
|
||||||
return self.get_url_setting('save_as')
|
|
||||||
|
|
||||||
def _get_template(self):
|
|
||||||
if hasattr(self, 'template') and self.template is not None:
|
|
||||||
return self.template
|
|
||||||
else:
|
|
||||||
return self.default_template
|
|
||||||
|
|
||||||
def get_relative_source_path(self, source_path=None):
|
|
||||||
"""Return the relative path (from the content path) to the given
|
|
||||||
source_path.
|
|
||||||
|
|
||||||
If no source path is specified, use the source path of this
|
|
||||||
content object.
|
|
||||||
"""
|
|
||||||
if not source_path:
|
|
||||||
source_path = self.source_path
|
|
||||||
if source_path is None:
|
|
||||||
return None
|
|
||||||
|
|
||||||
return posixize_path(
|
|
||||||
os.path.relpath(
|
|
||||||
os.path.abspath(os.path.join(
|
|
||||||
self.settings['PATH'],
|
|
||||||
source_path)),
|
|
||||||
os.path.abspath(self.settings['PATH'])
|
|
||||||
))
|
|
||||||
|
|
||||||
@property
|
|
||||||
def relative_dir(self):
|
|
||||||
return posixize_path(
|
|
||||||
os.path.dirname(
|
|
||||||
os.path.relpath(
|
|
||||||
os.path.abspath(self.source_path),
|
|
||||||
os.path.abspath(self.settings['PATH']))))
|
|
||||||
|
|
||||||
def refresh_metadata_intersite_links(self):
|
|
||||||
for key in self.settings['FORMATTED_FIELDS']:
|
|
||||||
if key in self.metadata and key != 'summary':
|
|
||||||
value = self._update_content(
|
|
||||||
self.metadata[key],
|
|
||||||
self.get_siteurl()
|
|
||||||
)
|
|
||||||
self.metadata[key] = value
|
|
||||||
setattr(self, key.lower(), value)
|
|
||||||
|
|
||||||
# _summary is an internal variable that some plugins may be writing to,
|
|
||||||
# so ensure changes to it are picked up
|
|
||||||
if ('summary' in self.settings['FORMATTED_FIELDS'] and
|
|
||||||
'summary' in self.metadata):
|
|
||||||
self._summary = self._update_content(
|
|
||||||
self._summary,
|
|
||||||
self.get_siteurl()
|
|
||||||
)
|
|
||||||
self.metadata['summary'] = self._summary
|
|
||||||
|
|
||||||
|
|
||||||
class Page(Content):
|
|
||||||
mandatory_properties = ('title',)
|
|
||||||
allowed_statuses = ('published', 'hidden', 'draft')
|
|
||||||
default_status = 'published'
|
|
||||||
default_template = 'page'
|
|
||||||
|
|
||||||
def _expand_settings(self, key):
|
|
||||||
klass = 'draft_page' if self.status == 'draft' else None
|
|
||||||
return super()._expand_settings(key, klass)
|
|
||||||
|
|
||||||
|
|
||||||
class Article(Content):
|
|
||||||
mandatory_properties = ('title', 'date', 'category')
|
|
||||||
allowed_statuses = ('published', 'draft')
|
|
||||||
default_status = 'published'
|
|
||||||
default_template = 'article'
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
super().__init__(*args, **kwargs)
|
|
||||||
|
|
||||||
# handle WITH_FUTURE_DATES (designate article to draft based on date)
|
|
||||||
if not self.settings['WITH_FUTURE_DATES'] and hasattr(self, 'date'):
|
|
||||||
if self.date.tzinfo is None:
|
|
||||||
now = datetime.datetime.now()
|
|
||||||
else:
|
|
||||||
now = datetime.datetime.utcnow().replace(tzinfo=pytz.utc)
|
|
||||||
if self.date > now:
|
|
||||||
self.status = 'draft'
|
|
||||||
|
|
||||||
# if we are a draft and there is no date provided, set max datetime
|
|
||||||
if not hasattr(self, 'date') and self.status == 'draft':
|
|
||||||
self.date = datetime.datetime.max.replace(tzinfo=self.timezone)
|
|
||||||
|
|
||||||
def _expand_settings(self, key):
|
|
||||||
klass = 'draft' if self.status == 'draft' else 'article'
|
|
||||||
return super()._expand_settings(key, klass)
|
|
||||||
|
|
||||||
|
|
||||||
class Static(Content):
|
|
||||||
mandatory_properties = ('title',)
|
|
||||||
default_status = 'published'
|
|
||||||
default_template = None
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
super().__init__(*args, **kwargs)
|
|
||||||
self._output_location_referenced = False
|
|
||||||
|
|
||||||
@deprecated_attribute(old='filepath', new='source_path', since=(3, 2, 0))
|
|
||||||
def filepath():
|
|
||||||
return None
|
|
||||||
|
|
||||||
@deprecated_attribute(old='src', new='source_path', since=(3, 2, 0))
|
|
||||||
def src():
|
|
||||||
return None
|
|
||||||
|
|
||||||
@deprecated_attribute(old='dst', new='save_as', since=(3, 2, 0))
|
|
||||||
def dst():
|
|
||||||
return None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def url(self):
|
|
||||||
# Note when url has been referenced, so we can avoid overriding it.
|
|
||||||
self._output_location_referenced = True
|
|
||||||
return super().url
|
|
||||||
|
|
||||||
@property
|
|
||||||
def save_as(self):
|
|
||||||
# Note when save_as has been referenced, so we can avoid overriding it.
|
|
||||||
self._output_location_referenced = True
|
|
||||||
return super().save_as
|
|
||||||
|
|
||||||
def attach_to(self, content):
|
|
||||||
"""Override our output directory with that of the given content object.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Determine our file's new output path relative to the linking
|
|
||||||
# document. If it currently lives beneath the linking
|
|
||||||
# document's source directory, preserve that relationship on output.
|
|
||||||
# Otherwise, make it a sibling.
|
|
||||||
|
|
||||||
linking_source_dir = os.path.dirname(content.source_path)
|
|
||||||
tail_path = os.path.relpath(self.source_path, linking_source_dir)
|
|
||||||
if tail_path.startswith(os.pardir + os.sep):
|
|
||||||
tail_path = os.path.basename(tail_path)
|
|
||||||
new_save_as = os.path.join(
|
|
||||||
os.path.dirname(content.save_as), tail_path)
|
|
||||||
|
|
||||||
# We do not build our new url by joining tail_path with the linking
|
|
||||||
# document's url, because we cannot know just by looking at the latter
|
|
||||||
# whether it points to the document itself or to its parent directory.
|
|
||||||
# (An url like 'some/content' might mean a directory named 'some'
|
|
||||||
# with a file named 'content', or it might mean a directory named
|
|
||||||
# 'some/content' with a file named 'index.html'.) Rather than trying
|
|
||||||
# to figure it out by comparing the linking document's url and save_as
|
|
||||||
# path, we simply build our new url from our new save_as path.
|
|
||||||
|
|
||||||
new_url = path_to_url(new_save_as)
|
|
||||||
|
|
||||||
def _log_reason(reason):
|
|
||||||
logger.warning(
|
|
||||||
"The {attach} link in %s cannot relocate "
|
|
||||||
"%s because %s. Falling back to "
|
|
||||||
"{filename} link behavior instead.",
|
|
||||||
content.get_relative_source_path(),
|
|
||||||
self.get_relative_source_path(), reason,
|
|
||||||
extra={'limit_msg': "More {attach} warnings silenced."})
|
|
||||||
|
|
||||||
# We never override an override, because we don't want to interfere
|
|
||||||
# with user-defined overrides that might be in EXTRA_PATH_METADATA.
|
|
||||||
if hasattr(self, 'override_save_as') or hasattr(self, 'override_url'):
|
|
||||||
if new_save_as != self.save_as or new_url != self.url:
|
|
||||||
_log_reason("its output location was already overridden")
|
|
||||||
return
|
|
||||||
|
|
||||||
# We never change an output path that has already been referenced,
|
|
||||||
# because we don't want to break links that depend on that path.
|
|
||||||
if self._output_location_referenced:
|
|
||||||
if new_save_as != self.save_as or new_url != self.url:
|
|
||||||
_log_reason("another link already referenced its location")
|
|
||||||
return
|
|
||||||
|
|
||||||
self.override_save_as = new_save_as
|
|
||||||
self.override_url = new_url
|
|
||||||
|
|
@ -1,934 +0,0 @@
|
||||||
import calendar
|
|
||||||
import errno
|
|
||||||
import fnmatch
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
from collections import defaultdict
|
|
||||||
from functools import partial
|
|
||||||
from itertools import chain, groupby
|
|
||||||
from operator import attrgetter
|
|
||||||
|
|
||||||
from jinja2 import (BaseLoader, ChoiceLoader, Environment, FileSystemLoader,
|
|
||||||
PrefixLoader, TemplateNotFound)
|
|
||||||
|
|
||||||
from pelican.cache import FileStampDataCacher
|
|
||||||
from pelican.contents import Article, Page, Static
|
|
||||||
from pelican.plugins import signals
|
|
||||||
from pelican.readers import Readers
|
|
||||||
from pelican.utils import (DateFormatter, copy, mkdir_p, order_content,
|
|
||||||
posixize_path, process_translations)
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class PelicanTemplateNotFound(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class Generator:
|
|
||||||
"""Baseclass generator"""
|
|
||||||
|
|
||||||
def __init__(self, context, settings, path, theme, output_path,
|
|
||||||
readers_cache_name='', **kwargs):
|
|
||||||
self.context = context
|
|
||||||
self.settings = settings
|
|
||||||
self.path = path
|
|
||||||
self.theme = theme
|
|
||||||
self.output_path = output_path
|
|
||||||
|
|
||||||
for arg, value in kwargs.items():
|
|
||||||
setattr(self, arg, value)
|
|
||||||
|
|
||||||
self.readers = Readers(self.settings, readers_cache_name)
|
|
||||||
|
|
||||||
# templates cache
|
|
||||||
self._templates = {}
|
|
||||||
self._templates_path = list(self.settings['THEME_TEMPLATES_OVERRIDES'])
|
|
||||||
|
|
||||||
theme_templates_path = os.path.expanduser(
|
|
||||||
os.path.join(self.theme, 'templates'))
|
|
||||||
self._templates_path.append(theme_templates_path)
|
|
||||||
theme_loader = FileSystemLoader(theme_templates_path)
|
|
||||||
|
|
||||||
simple_theme_path = os.path.dirname(os.path.abspath(__file__))
|
|
||||||
simple_loader = FileSystemLoader(
|
|
||||||
os.path.join(simple_theme_path, "themes", "simple", "templates"))
|
|
||||||
|
|
||||||
self.env = Environment(
|
|
||||||
loader=ChoiceLoader([
|
|
||||||
FileSystemLoader(self._templates_path),
|
|
||||||
simple_loader, # implicit inheritance
|
|
||||||
PrefixLoader({
|
|
||||||
'!simple': simple_loader,
|
|
||||||
'!theme': theme_loader
|
|
||||||
}) # explicit ones
|
|
||||||
]),
|
|
||||||
**self.settings['JINJA_ENVIRONMENT']
|
|
||||||
)
|
|
||||||
|
|
||||||
logger.debug('Template list: %s', self.env.list_templates())
|
|
||||||
|
|
||||||
# provide utils.strftime as a jinja filter
|
|
||||||
self.env.filters.update({'strftime': DateFormatter()})
|
|
||||||
|
|
||||||
# get custom Jinja filters from user settings
|
|
||||||
custom_filters = self.settings['JINJA_FILTERS']
|
|
||||||
self.env.filters.update(custom_filters)
|
|
||||||
|
|
||||||
# get custom Jinja globals from user settings
|
|
||||||
custom_globals = self.settings['JINJA_GLOBALS']
|
|
||||||
self.env.globals.update(custom_globals)
|
|
||||||
|
|
||||||
# get custom Jinja tests from user settings
|
|
||||||
custom_tests = self.settings['JINJA_TESTS']
|
|
||||||
self.env.tests.update(custom_tests)
|
|
||||||
|
|
||||||
signals.generator_init.send(self)
|
|
||||||
|
|
||||||
def get_template(self, name):
|
|
||||||
"""Return the template by name.
|
|
||||||
Use self.theme to get the templates to use, and return a list of
|
|
||||||
templates ready to use with Jinja2.
|
|
||||||
"""
|
|
||||||
if name not in self._templates:
|
|
||||||
for ext in self.settings['TEMPLATE_EXTENSIONS']:
|
|
||||||
try:
|
|
||||||
self._templates[name] = self.env.get_template(name + ext)
|
|
||||||
break
|
|
||||||
except TemplateNotFound:
|
|
||||||
continue
|
|
||||||
|
|
||||||
if name not in self._templates:
|
|
||||||
raise PelicanTemplateNotFound(
|
|
||||||
'[templates] unable to load {}[{}] from {}'.format(
|
|
||||||
name, ', '.join(self.settings['TEMPLATE_EXTENSIONS']),
|
|
||||||
self._templates_path))
|
|
||||||
|
|
||||||
return self._templates[name]
|
|
||||||
|
|
||||||
def _include_path(self, path, extensions=None):
|
|
||||||
"""Inclusion logic for .get_files(), returns True/False
|
|
||||||
|
|
||||||
:param path: the path which might be including
|
|
||||||
:param extensions: the list of allowed extensions, or False if all
|
|
||||||
extensions are allowed
|
|
||||||
"""
|
|
||||||
if extensions is None:
|
|
||||||
extensions = tuple(self.readers.extensions)
|
|
||||||
basename = os.path.basename(path)
|
|
||||||
|
|
||||||
# check IGNORE_FILES
|
|
||||||
ignores = self.settings['IGNORE_FILES']
|
|
||||||
if any(fnmatch.fnmatch(basename, ignore) for ignore in ignores):
|
|
||||||
return False
|
|
||||||
|
|
||||||
ext = os.path.splitext(basename)[1][1:]
|
|
||||||
if extensions is False or ext in extensions:
|
|
||||||
return True
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
def get_files(self, paths, exclude=[], extensions=None):
|
|
||||||
"""Return a list of files to use, based on rules
|
|
||||||
|
|
||||||
:param paths: the list pf paths to search (relative to self.path)
|
|
||||||
:param exclude: the list of path to exclude
|
|
||||||
:param extensions: the list of allowed extensions (if False, all
|
|
||||||
extensions are allowed)
|
|
||||||
"""
|
|
||||||
# backward compatibility for older generators
|
|
||||||
if isinstance(paths, str):
|
|
||||||
paths = [paths]
|
|
||||||
|
|
||||||
# group the exclude dir names by parent path, for use with os.walk()
|
|
||||||
exclusions_by_dirpath = {}
|
|
||||||
for e in exclude:
|
|
||||||
parent_path, subdir = os.path.split(os.path.join(self.path, e))
|
|
||||||
exclusions_by_dirpath.setdefault(parent_path, set()).add(subdir)
|
|
||||||
|
|
||||||
files = set()
|
|
||||||
ignores = self.settings['IGNORE_FILES']
|
|
||||||
for path in paths:
|
|
||||||
# careful: os.path.join() will add a slash when path == ''.
|
|
||||||
root = os.path.join(self.path, path) if path else self.path
|
|
||||||
|
|
||||||
if os.path.isdir(root):
|
|
||||||
for dirpath, dirs, temp_files in os.walk(
|
|
||||||
root, topdown=True, followlinks=True):
|
|
||||||
excl = exclusions_by_dirpath.get(dirpath, ())
|
|
||||||
# We copy the `dirs` list as we will modify it in the loop:
|
|
||||||
for d in list(dirs):
|
|
||||||
if (d in excl or
|
|
||||||
any(fnmatch.fnmatch(d, ignore)
|
|
||||||
for ignore in ignores)):
|
|
||||||
if d in dirs:
|
|
||||||
dirs.remove(d)
|
|
||||||
|
|
||||||
reldir = os.path.relpath(dirpath, self.path)
|
|
||||||
for f in temp_files:
|
|
||||||
fp = os.path.join(reldir, f)
|
|
||||||
if self._include_path(fp, extensions):
|
|
||||||
files.add(fp)
|
|
||||||
elif os.path.exists(root) and self._include_path(path, extensions):
|
|
||||||
files.add(path) # can't walk non-directories
|
|
||||||
return files
|
|
||||||
|
|
||||||
def add_source_path(self, content, static=False):
|
|
||||||
"""Record a source file path that a Generator found and processed.
|
|
||||||
Store a reference to its Content object, for url lookups later.
|
|
||||||
"""
|
|
||||||
location = content.get_relative_source_path()
|
|
||||||
key = 'static_content' if static else 'generated_content'
|
|
||||||
self.context[key][location] = content
|
|
||||||
|
|
||||||
def _add_failed_source_path(self, path, static=False):
|
|
||||||
"""Record a source file path that a Generator failed to process.
|
|
||||||
(For example, one that was missing mandatory metadata.)
|
|
||||||
The path argument is expected to be relative to self.path.
|
|
||||||
"""
|
|
||||||
key = 'static_content' if static else 'generated_content'
|
|
||||||
self.context[key][posixize_path(os.path.normpath(path))] = None
|
|
||||||
|
|
||||||
def _is_potential_source_path(self, path, static=False):
|
|
||||||
"""Return True if path was supposed to be used as a source file.
|
|
||||||
(This includes all source files that have been found by generators
|
|
||||||
before this method is called, even if they failed to process.)
|
|
||||||
The path argument is expected to be relative to self.path.
|
|
||||||
"""
|
|
||||||
key = 'static_content' if static else 'generated_content'
|
|
||||||
return (posixize_path(os.path.normpath(path)) in self.context[key])
|
|
||||||
|
|
||||||
def add_static_links(self, content):
|
|
||||||
"""Add file links in content to context to be processed as Static
|
|
||||||
content.
|
|
||||||
"""
|
|
||||||
self.context['static_links'] |= content.get_static_links()
|
|
||||||
|
|
||||||
def _update_context(self, items):
|
|
||||||
"""Update the context with the given items from the currrent
|
|
||||||
processor.
|
|
||||||
"""
|
|
||||||
for item in items:
|
|
||||||
value = getattr(self, item)
|
|
||||||
if hasattr(value, 'items'):
|
|
||||||
value = list(value.items()) # py3k safeguard for iterators
|
|
||||||
self.context[item] = value
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
# return the name of the class for logging purposes
|
|
||||||
return self.__class__.__name__
|
|
||||||
|
|
||||||
|
|
||||||
class CachingGenerator(Generator, FileStampDataCacher):
|
|
||||||
'''Subclass of Generator and FileStampDataCacher classes
|
|
||||||
|
|
||||||
enables content caching, either at the generator or reader level
|
|
||||||
'''
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
'''Initialize the generator, then set up caching
|
|
||||||
|
|
||||||
note the multiple inheritance structure
|
|
||||||
'''
|
|
||||||
cls_name = self.__class__.__name__
|
|
||||||
Generator.__init__(self, *args,
|
|
||||||
readers_cache_name=(cls_name + '-Readers'),
|
|
||||||
**kwargs)
|
|
||||||
|
|
||||||
cache_this_level = \
|
|
||||||
self.settings['CONTENT_CACHING_LAYER'] == 'generator'
|
|
||||||
caching_policy = cache_this_level and self.settings['CACHE_CONTENT']
|
|
||||||
load_policy = cache_this_level and self.settings['LOAD_CONTENT_CACHE']
|
|
||||||
FileStampDataCacher.__init__(self, self.settings, cls_name,
|
|
||||||
caching_policy, load_policy
|
|
||||||
)
|
|
||||||
|
|
||||||
def _get_file_stamp(self, filename):
|
|
||||||
'''Get filestamp for path relative to generator.path'''
|
|
||||||
filename = os.path.join(self.path, filename)
|
|
||||||
return super()._get_file_stamp(filename)
|
|
||||||
|
|
||||||
|
|
||||||
class _FileLoader(BaseLoader):
|
|
||||||
|
|
||||||
def __init__(self, path, basedir):
|
|
||||||
self.path = path
|
|
||||||
self.fullpath = os.path.join(basedir, path)
|
|
||||||
|
|
||||||
def get_source(self, environment, template):
|
|
||||||
if template != self.path or not os.path.exists(self.fullpath):
|
|
||||||
raise TemplateNotFound(template)
|
|
||||||
mtime = os.path.getmtime(self.fullpath)
|
|
||||||
with open(self.fullpath, encoding='utf-8') as f:
|
|
||||||
source = f.read()
|
|
||||||
return (source, self.fullpath,
|
|
||||||
lambda: mtime == os.path.getmtime(self.fullpath))
|
|
||||||
|
|
||||||
|
|
||||||
class TemplatePagesGenerator(Generator):
|
|
||||||
|
|
||||||
def generate_output(self, writer):
|
|
||||||
for source, dest in self.settings['TEMPLATE_PAGES'].items():
|
|
||||||
self.env.loader.loaders.insert(0, _FileLoader(source, self.path))
|
|
||||||
try:
|
|
||||||
template = self.env.get_template(source)
|
|
||||||
rurls = self.settings['RELATIVE_URLS']
|
|
||||||
writer.write_file(dest, template, self.context, rurls,
|
|
||||||
override_output=True, url='')
|
|
||||||
finally:
|
|
||||||
del self.env.loader.loaders[0]
|
|
||||||
|
|
||||||
|
|
||||||
class ArticlesGenerator(CachingGenerator):
|
|
||||||
"""Generate blog articles"""
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
"""initialize properties"""
|
|
||||||
self.articles = [] # only articles in default language
|
|
||||||
self.translations = []
|
|
||||||
self.dates = {}
|
|
||||||
self.tags = defaultdict(list)
|
|
||||||
self.categories = defaultdict(list)
|
|
||||||
self.related_posts = []
|
|
||||||
self.authors = defaultdict(list)
|
|
||||||
self.drafts = [] # only drafts in default language
|
|
||||||
self.drafts_translations = []
|
|
||||||
super().__init__(*args, **kwargs)
|
|
||||||
signals.article_generator_init.send(self)
|
|
||||||
|
|
||||||
def generate_feeds(self, writer):
|
|
||||||
"""Generate the feeds from the current context, and output files."""
|
|
||||||
|
|
||||||
if self.settings.get('FEED_ATOM'):
|
|
||||||
writer.write_feed(
|
|
||||||
self.articles,
|
|
||||||
self.context,
|
|
||||||
self.settings['FEED_ATOM'],
|
|
||||||
self.settings.get('FEED_ATOM_URL', self.settings['FEED_ATOM'])
|
|
||||||
)
|
|
||||||
|
|
||||||
if self.settings.get('FEED_RSS'):
|
|
||||||
writer.write_feed(
|
|
||||||
self.articles,
|
|
||||||
self.context,
|
|
||||||
self.settings['FEED_RSS'],
|
|
||||||
self.settings.get('FEED_RSS_URL', self.settings['FEED_RSS']),
|
|
||||||
feed_type='rss'
|
|
||||||
)
|
|
||||||
|
|
||||||
if (self.settings.get('FEED_ALL_ATOM') or
|
|
||||||
self.settings.get('FEED_ALL_RSS')):
|
|
||||||
all_articles = list(self.articles)
|
|
||||||
for article in self.articles:
|
|
||||||
all_articles.extend(article.translations)
|
|
||||||
order_content(all_articles,
|
|
||||||
order_by=self.settings['ARTICLE_ORDER_BY'])
|
|
||||||
|
|
||||||
if self.settings.get('FEED_ALL_ATOM'):
|
|
||||||
writer.write_feed(
|
|
||||||
all_articles,
|
|
||||||
self.context,
|
|
||||||
self.settings['FEED_ALL_ATOM'],
|
|
||||||
self.settings.get('FEED_ALL_ATOM_URL',
|
|
||||||
self.settings['FEED_ALL_ATOM'])
|
|
||||||
)
|
|
||||||
|
|
||||||
if self.settings.get('FEED_ALL_RSS'):
|
|
||||||
writer.write_feed(
|
|
||||||
all_articles,
|
|
||||||
self.context,
|
|
||||||
self.settings['FEED_ALL_RSS'],
|
|
||||||
self.settings.get('FEED_ALL_RSS_URL',
|
|
||||||
self.settings['FEED_ALL_RSS']),
|
|
||||||
feed_type='rss'
|
|
||||||
)
|
|
||||||
|
|
||||||
for cat, arts in self.categories:
|
|
||||||
if self.settings.get('CATEGORY_FEED_ATOM'):
|
|
||||||
writer.write_feed(
|
|
||||||
arts,
|
|
||||||
self.context,
|
|
||||||
self.settings['CATEGORY_FEED_ATOM'].format(slug=cat.slug),
|
|
||||||
self.settings.get(
|
|
||||||
'CATEGORY_FEED_ATOM_URL',
|
|
||||||
self.settings['CATEGORY_FEED_ATOM']).format(
|
|
||||||
slug=cat.slug
|
|
||||||
),
|
|
||||||
feed_title=cat.name
|
|
||||||
)
|
|
||||||
|
|
||||||
if self.settings.get('CATEGORY_FEED_RSS'):
|
|
||||||
writer.write_feed(
|
|
||||||
arts,
|
|
||||||
self.context,
|
|
||||||
self.settings['CATEGORY_FEED_RSS'].format(slug=cat.slug),
|
|
||||||
self.settings.get(
|
|
||||||
'CATEGORY_FEED_RSS_URL',
|
|
||||||
self.settings['CATEGORY_FEED_RSS']).format(
|
|
||||||
slug=cat.slug
|
|
||||||
),
|
|
||||||
feed_title=cat.name,
|
|
||||||
feed_type='rss'
|
|
||||||
)
|
|
||||||
|
|
||||||
for auth, arts in self.authors:
|
|
||||||
if self.settings.get('AUTHOR_FEED_ATOM'):
|
|
||||||
writer.write_feed(
|
|
||||||
arts,
|
|
||||||
self.context,
|
|
||||||
self.settings['AUTHOR_FEED_ATOM'].format(slug=auth.slug),
|
|
||||||
self.settings.get(
|
|
||||||
'AUTHOR_FEED_ATOM_URL',
|
|
||||||
self.settings['AUTHOR_FEED_ATOM']
|
|
||||||
).format(slug=auth.slug),
|
|
||||||
feed_title=auth.name
|
|
||||||
)
|
|
||||||
|
|
||||||
if self.settings.get('AUTHOR_FEED_RSS'):
|
|
||||||
writer.write_feed(
|
|
||||||
arts,
|
|
||||||
self.context,
|
|
||||||
self.settings['AUTHOR_FEED_RSS'].format(slug=auth.slug),
|
|
||||||
self.settings.get(
|
|
||||||
'AUTHOR_FEED_RSS_URL',
|
|
||||||
self.settings['AUTHOR_FEED_RSS']
|
|
||||||
).format(slug=auth.slug),
|
|
||||||
feed_title=auth.name,
|
|
||||||
feed_type='rss'
|
|
||||||
)
|
|
||||||
|
|
||||||
if (self.settings.get('TAG_FEED_ATOM') or
|
|
||||||
self.settings.get('TAG_FEED_RSS')):
|
|
||||||
for tag, arts in self.tags.items():
|
|
||||||
if self.settings.get('TAG_FEED_ATOM'):
|
|
||||||
writer.write_feed(
|
|
||||||
arts,
|
|
||||||
self.context,
|
|
||||||
self.settings['TAG_FEED_ATOM'].format(slug=tag.slug),
|
|
||||||
self.settings.get(
|
|
||||||
'TAG_FEED_ATOM_URL',
|
|
||||||
self.settings['TAG_FEED_ATOM']
|
|
||||||
).format(slug=tag.slug),
|
|
||||||
feed_title=tag.name
|
|
||||||
)
|
|
||||||
|
|
||||||
if self.settings.get('TAG_FEED_RSS'):
|
|
||||||
writer.write_feed(
|
|
||||||
arts,
|
|
||||||
self.context,
|
|
||||||
self.settings['TAG_FEED_RSS'].format(slug=tag.slug),
|
|
||||||
self.settings.get(
|
|
||||||
'TAG_FEED_RSS_URL',
|
|
||||||
self.settings['TAG_FEED_RSS']
|
|
||||||
).format(slug=tag.slug),
|
|
||||||
feed_title=tag.name,
|
|
||||||
feed_type='rss'
|
|
||||||
)
|
|
||||||
|
|
||||||
if (self.settings.get('TRANSLATION_FEED_ATOM') or
|
|
||||||
self.settings.get('TRANSLATION_FEED_RSS')):
|
|
||||||
translations_feeds = defaultdict(list)
|
|
||||||
for article in chain(self.articles, self.translations):
|
|
||||||
translations_feeds[article.lang].append(article)
|
|
||||||
|
|
||||||
for lang, items in translations_feeds.items():
|
|
||||||
items = order_content(
|
|
||||||
items, order_by=self.settings['ARTICLE_ORDER_BY'])
|
|
||||||
if self.settings.get('TRANSLATION_FEED_ATOM'):
|
|
||||||
writer.write_feed(
|
|
||||||
items,
|
|
||||||
self.context,
|
|
||||||
self.settings['TRANSLATION_FEED_ATOM']
|
|
||||||
.format(lang=lang),
|
|
||||||
self.settings.get(
|
|
||||||
'TRANSLATION_FEED_ATOM_URL',
|
|
||||||
self.settings['TRANSLATION_FEED_ATOM']
|
|
||||||
).format(lang=lang),
|
|
||||||
)
|
|
||||||
if self.settings.get('TRANSLATION_FEED_RSS'):
|
|
||||||
writer.write_feed(
|
|
||||||
items,
|
|
||||||
self.context,
|
|
||||||
self.settings['TRANSLATION_FEED_RSS']
|
|
||||||
.format(lang=lang),
|
|
||||||
self.settings.get(
|
|
||||||
'TRANSLATION_FEED_RSS_URL',
|
|
||||||
self.settings['TRANSLATION_FEED_RSS']
|
|
||||||
).format(lang=lang),
|
|
||||||
feed_type='rss'
|
|
||||||
)
|
|
||||||
|
|
||||||
def generate_articles(self, write):
|
|
||||||
"""Generate the articles."""
|
|
||||||
for article in chain(self.translations, self.articles):
|
|
||||||
signals.article_generator_write_article.send(self, content=article)
|
|
||||||
write(article.save_as, self.get_template(article.template),
|
|
||||||
self.context, article=article, category=article.category,
|
|
||||||
override_output=hasattr(article, 'override_save_as'),
|
|
||||||
url=article.url, blog=True)
|
|
||||||
|
|
||||||
def generate_period_archives(self, write):
|
|
||||||
"""Generate per-year, per-month, and per-day archives."""
|
|
||||||
try:
|
|
||||||
template = self.get_template('period_archives')
|
|
||||||
except PelicanTemplateNotFound:
|
|
||||||
template = self.get_template('archives')
|
|
||||||
|
|
||||||
period_save_as = {
|
|
||||||
'year': self.settings['YEAR_ARCHIVE_SAVE_AS'],
|
|
||||||
'month': self.settings['MONTH_ARCHIVE_SAVE_AS'],
|
|
||||||
'day': self.settings['DAY_ARCHIVE_SAVE_AS'],
|
|
||||||
}
|
|
||||||
|
|
||||||
period_url = {
|
|
||||||
'year': self.settings['YEAR_ARCHIVE_URL'],
|
|
||||||
'month': self.settings['MONTH_ARCHIVE_URL'],
|
|
||||||
'day': self.settings['DAY_ARCHIVE_URL'],
|
|
||||||
}
|
|
||||||
|
|
||||||
period_date_key = {
|
|
||||||
'year': attrgetter('date.year'),
|
|
||||||
'month': attrgetter('date.year', 'date.month'),
|
|
||||||
'day': attrgetter('date.year', 'date.month', 'date.day')
|
|
||||||
}
|
|
||||||
|
|
||||||
def _generate_period_archives(dates, key, save_as_fmt, url_fmt):
|
|
||||||
"""Generate period archives from `dates`, grouped by
|
|
||||||
`key` and written to `save_as`.
|
|
||||||
"""
|
|
||||||
# `dates` is already sorted by date
|
|
||||||
for _period, group in groupby(dates, key=key):
|
|
||||||
archive = list(group)
|
|
||||||
articles = [a for a in self.articles if a in archive]
|
|
||||||
# arbitrarily grab the first date so that the usual
|
|
||||||
# format string syntax can be used for specifying the
|
|
||||||
# period archive dates
|
|
||||||
date = archive[0].date
|
|
||||||
save_as = save_as_fmt.format(date=date)
|
|
||||||
url = url_fmt.format(date=date)
|
|
||||||
context = self.context.copy()
|
|
||||||
|
|
||||||
if key == period_date_key['year']:
|
|
||||||
context["period"] = (_period,)
|
|
||||||
else:
|
|
||||||
month_name = calendar.month_name[_period[1]]
|
|
||||||
if key == period_date_key['month']:
|
|
||||||
context["period"] = (_period[0],
|
|
||||||
month_name)
|
|
||||||
else:
|
|
||||||
context["period"] = (_period[0],
|
|
||||||
month_name,
|
|
||||||
_period[2])
|
|
||||||
|
|
||||||
write(save_as, template, context, articles=articles,
|
|
||||||
dates=archive, template_name='period_archives',
|
|
||||||
blog=True, url=url, all_articles=self.articles)
|
|
||||||
|
|
||||||
for period in 'year', 'month', 'day':
|
|
||||||
save_as = period_save_as[period]
|
|
||||||
url = period_url[period]
|
|
||||||
if save_as:
|
|
||||||
key = period_date_key[period]
|
|
||||||
_generate_period_archives(self.dates, key, save_as, url)
|
|
||||||
|
|
||||||
def generate_direct_templates(self, write):
|
|
||||||
"""Generate direct templates pages"""
|
|
||||||
for template in self.settings['DIRECT_TEMPLATES']:
|
|
||||||
save_as = self.settings.get("%s_SAVE_AS" % template.upper(),
|
|
||||||
'%s.html' % template)
|
|
||||||
url = self.settings.get("%s_URL" % template.upper(),
|
|
||||||
'%s.html' % template)
|
|
||||||
if not save_as:
|
|
||||||
continue
|
|
||||||
|
|
||||||
write(save_as, self.get_template(template), self.context,
|
|
||||||
articles=self.articles, dates=self.dates, blog=True,
|
|
||||||
template_name=template,
|
|
||||||
page_name=os.path.splitext(save_as)[0], url=url)
|
|
||||||
|
|
||||||
def generate_tags(self, write):
|
|
||||||
"""Generate Tags pages."""
|
|
||||||
tag_template = self.get_template('tag')
|
|
||||||
for tag, articles in self.tags.items():
|
|
||||||
dates = [article for article in self.dates if article in articles]
|
|
||||||
write(tag.save_as, tag_template, self.context, tag=tag,
|
|
||||||
url=tag.url, articles=articles, dates=dates,
|
|
||||||
template_name='tag', blog=True, page_name=tag.page_name,
|
|
||||||
all_articles=self.articles)
|
|
||||||
|
|
||||||
def generate_categories(self, write):
|
|
||||||
"""Generate category pages."""
|
|
||||||
category_template = self.get_template('category')
|
|
||||||
for cat, articles in self.categories:
|
|
||||||
dates = [article for article in self.dates if article in articles]
|
|
||||||
write(cat.save_as, category_template, self.context, url=cat.url,
|
|
||||||
category=cat, articles=articles, dates=dates,
|
|
||||||
template_name='category', blog=True, page_name=cat.page_name,
|
|
||||||
all_articles=self.articles)
|
|
||||||
|
|
||||||
def generate_authors(self, write):
|
|
||||||
"""Generate Author pages."""
|
|
||||||
author_template = self.get_template('author')
|
|
||||||
for aut, articles in self.authors:
|
|
||||||
dates = [article for article in self.dates if article in articles]
|
|
||||||
write(aut.save_as, author_template, self.context,
|
|
||||||
url=aut.url, author=aut, articles=articles, dates=dates,
|
|
||||||
template_name='author', blog=True,
|
|
||||||
page_name=aut.page_name, all_articles=self.articles)
|
|
||||||
|
|
||||||
def generate_drafts(self, write):
|
|
||||||
"""Generate drafts pages."""
|
|
||||||
for draft in chain(self.drafts_translations, self.drafts):
|
|
||||||
write(draft.save_as, self.get_template(draft.template),
|
|
||||||
self.context, article=draft, category=draft.category,
|
|
||||||
override_output=hasattr(draft, 'override_save_as'),
|
|
||||||
blog=True, all_articles=self.articles, url=draft.url)
|
|
||||||
|
|
||||||
def generate_pages(self, writer):
|
|
||||||
"""Generate the pages on the disk"""
|
|
||||||
write = partial(writer.write_file,
|
|
||||||
relative_urls=self.settings['RELATIVE_URLS'])
|
|
||||||
|
|
||||||
# to minimize the number of relative path stuff modification
|
|
||||||
# in writer, articles pass first
|
|
||||||
self.generate_articles(write)
|
|
||||||
self.generate_period_archives(write)
|
|
||||||
self.generate_direct_templates(write)
|
|
||||||
|
|
||||||
# and subfolders after that
|
|
||||||
self.generate_tags(write)
|
|
||||||
self.generate_categories(write)
|
|
||||||
self.generate_authors(write)
|
|
||||||
self.generate_drafts(write)
|
|
||||||
|
|
||||||
def generate_context(self):
|
|
||||||
"""Add the articles into the shared context"""
|
|
||||||
|
|
||||||
all_articles = []
|
|
||||||
all_drafts = []
|
|
||||||
for f in self.get_files(
|
|
||||||
self.settings['ARTICLE_PATHS'],
|
|
||||||
exclude=self.settings['ARTICLE_EXCLUDES']):
|
|
||||||
article = self.get_cached_data(f, None)
|
|
||||||
if article is None:
|
|
||||||
try:
|
|
||||||
article = self.readers.read_file(
|
|
||||||
base_path=self.path, path=f, content_class=Article,
|
|
||||||
context=self.context,
|
|
||||||
preread_signal=signals.article_generator_preread,
|
|
||||||
preread_sender=self,
|
|
||||||
context_signal=signals.article_generator_context,
|
|
||||||
context_sender=self)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(
|
|
||||||
'Could not process %s\n%s', f, e,
|
|
||||||
exc_info=self.settings.get('DEBUG', False))
|
|
||||||
self._add_failed_source_path(f)
|
|
||||||
continue
|
|
||||||
|
|
||||||
if not article.is_valid():
|
|
||||||
self._add_failed_source_path(f)
|
|
||||||
continue
|
|
||||||
|
|
||||||
self.cache_data(f, article)
|
|
||||||
|
|
||||||
if article.status == "published":
|
|
||||||
all_articles.append(article)
|
|
||||||
elif article.status == "draft":
|
|
||||||
all_drafts.append(article)
|
|
||||||
self.add_source_path(article)
|
|
||||||
self.add_static_links(article)
|
|
||||||
|
|
||||||
def _process(arts):
|
|
||||||
origs, translations = process_translations(
|
|
||||||
arts, translation_id=self.settings['ARTICLE_TRANSLATION_ID'])
|
|
||||||
origs = order_content(origs, self.settings['ARTICLE_ORDER_BY'])
|
|
||||||
return origs, translations
|
|
||||||
|
|
||||||
self.articles, self.translations = _process(all_articles)
|
|
||||||
self.drafts, self.drafts_translations = _process(all_drafts)
|
|
||||||
|
|
||||||
signals.article_generator_pretaxonomy.send(self)
|
|
||||||
|
|
||||||
for article in self.articles:
|
|
||||||
# only main articles are listed in categories and tags
|
|
||||||
# not translations
|
|
||||||
self.categories[article.category].append(article)
|
|
||||||
if hasattr(article, 'tags'):
|
|
||||||
for tag in article.tags:
|
|
||||||
self.tags[tag].append(article)
|
|
||||||
for author in getattr(article, 'authors', []):
|
|
||||||
self.authors[author].append(article)
|
|
||||||
|
|
||||||
self.dates = list(self.articles)
|
|
||||||
self.dates.sort(key=attrgetter('date'),
|
|
||||||
reverse=self.context['NEWEST_FIRST_ARCHIVES'])
|
|
||||||
|
|
||||||
# and generate the output :)
|
|
||||||
|
|
||||||
# order the categories per name
|
|
||||||
self.categories = list(self.categories.items())
|
|
||||||
self.categories.sort(
|
|
||||||
reverse=self.settings['REVERSE_CATEGORY_ORDER'])
|
|
||||||
|
|
||||||
self.authors = list(self.authors.items())
|
|
||||||
self.authors.sort()
|
|
||||||
|
|
||||||
self._update_context(('articles', 'dates', 'tags', 'categories',
|
|
||||||
'authors', 'related_posts', 'drafts'))
|
|
||||||
self.save_cache()
|
|
||||||
self.readers.save_cache()
|
|
||||||
signals.article_generator_finalized.send(self)
|
|
||||||
|
|
||||||
def generate_output(self, writer):
|
|
||||||
self.generate_feeds(writer)
|
|
||||||
self.generate_pages(writer)
|
|
||||||
signals.article_writer_finalized.send(self, writer=writer)
|
|
||||||
|
|
||||||
def refresh_metadata_intersite_links(self):
|
|
||||||
for e in chain(self.articles,
|
|
||||||
self.translations,
|
|
||||||
self.drafts,
|
|
||||||
self.drafts_translations):
|
|
||||||
if hasattr(e, 'refresh_metadata_intersite_links'):
|
|
||||||
e.refresh_metadata_intersite_links()
|
|
||||||
|
|
||||||
|
|
||||||
class PagesGenerator(CachingGenerator):
|
|
||||||
"""Generate pages"""
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
self.pages = []
|
|
||||||
self.translations = []
|
|
||||||
self.hidden_pages = []
|
|
||||||
self.hidden_translations = []
|
|
||||||
self.draft_pages = []
|
|
||||||
self.draft_translations = []
|
|
||||||
super().__init__(*args, **kwargs)
|
|
||||||
signals.page_generator_init.send(self)
|
|
||||||
|
|
||||||
def generate_context(self):
|
|
||||||
all_pages = []
|
|
||||||
hidden_pages = []
|
|
||||||
draft_pages = []
|
|
||||||
for f in self.get_files(
|
|
||||||
self.settings['PAGE_PATHS'],
|
|
||||||
exclude=self.settings['PAGE_EXCLUDES']):
|
|
||||||
page = self.get_cached_data(f, None)
|
|
||||||
if page is None:
|
|
||||||
try:
|
|
||||||
page = self.readers.read_file(
|
|
||||||
base_path=self.path, path=f, content_class=Page,
|
|
||||||
context=self.context,
|
|
||||||
preread_signal=signals.page_generator_preread,
|
|
||||||
preread_sender=self,
|
|
||||||
context_signal=signals.page_generator_context,
|
|
||||||
context_sender=self)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(
|
|
||||||
'Could not process %s\n%s', f, e,
|
|
||||||
exc_info=self.settings.get('DEBUG', False))
|
|
||||||
self._add_failed_source_path(f)
|
|
||||||
continue
|
|
||||||
|
|
||||||
if not page.is_valid():
|
|
||||||
self._add_failed_source_path(f)
|
|
||||||
continue
|
|
||||||
|
|
||||||
self.cache_data(f, page)
|
|
||||||
|
|
||||||
if page.status == "published":
|
|
||||||
all_pages.append(page)
|
|
||||||
elif page.status == "hidden":
|
|
||||||
hidden_pages.append(page)
|
|
||||||
elif page.status == "draft":
|
|
||||||
draft_pages.append(page)
|
|
||||||
self.add_source_path(page)
|
|
||||||
self.add_static_links(page)
|
|
||||||
|
|
||||||
def _process(pages):
|
|
||||||
origs, translations = process_translations(
|
|
||||||
pages, translation_id=self.settings['PAGE_TRANSLATION_ID'])
|
|
||||||
origs = order_content(origs, self.settings['PAGE_ORDER_BY'])
|
|
||||||
return origs, translations
|
|
||||||
|
|
||||||
self.pages, self.translations = _process(all_pages)
|
|
||||||
self.hidden_pages, self.hidden_translations = _process(hidden_pages)
|
|
||||||
self.draft_pages, self.draft_translations = _process(draft_pages)
|
|
||||||
|
|
||||||
self._update_context(('pages', 'hidden_pages', 'draft_pages'))
|
|
||||||
|
|
||||||
self.save_cache()
|
|
||||||
self.readers.save_cache()
|
|
||||||
signals.page_generator_finalized.send(self)
|
|
||||||
|
|
||||||
def generate_output(self, writer):
|
|
||||||
for page in chain(self.translations, self.pages,
|
|
||||||
self.hidden_translations, self.hidden_pages,
|
|
||||||
self.draft_translations, self.draft_pages):
|
|
||||||
signals.page_generator_write_page.send(self, content=page)
|
|
||||||
writer.write_file(
|
|
||||||
page.save_as, self.get_template(page.template),
|
|
||||||
self.context, page=page,
|
|
||||||
relative_urls=self.settings['RELATIVE_URLS'],
|
|
||||||
override_output=hasattr(page, 'override_save_as'),
|
|
||||||
url=page.url)
|
|
||||||
signals.page_writer_finalized.send(self, writer=writer)
|
|
||||||
|
|
||||||
def refresh_metadata_intersite_links(self):
|
|
||||||
for e in chain(self.pages,
|
|
||||||
self.hidden_pages,
|
|
||||||
self.hidden_translations,
|
|
||||||
self.draft_pages,
|
|
||||||
self.draft_translations):
|
|
||||||
if hasattr(e, 'refresh_metadata_intersite_links'):
|
|
||||||
e.refresh_metadata_intersite_links()
|
|
||||||
|
|
||||||
|
|
||||||
class StaticGenerator(Generator):
|
|
||||||
"""copy static paths (what you want to copy, like images, medias etc.
|
|
||||||
to output"""
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
super().__init__(*args, **kwargs)
|
|
||||||
self.fallback_to_symlinks = False
|
|
||||||
signals.static_generator_init.send(self)
|
|
||||||
|
|
||||||
def generate_context(self):
|
|
||||||
self.staticfiles = []
|
|
||||||
linked_files = set(self.context['static_links'])
|
|
||||||
found_files = self.get_files(self.settings['STATIC_PATHS'],
|
|
||||||
exclude=self.settings['STATIC_EXCLUDES'],
|
|
||||||
extensions=False)
|
|
||||||
for f in linked_files | found_files:
|
|
||||||
|
|
||||||
# skip content source files unless the user explicitly wants them
|
|
||||||
if self.settings['STATIC_EXCLUDE_SOURCES']:
|
|
||||||
if self._is_potential_source_path(f):
|
|
||||||
continue
|
|
||||||
|
|
||||||
static = self.readers.read_file(
|
|
||||||
base_path=self.path, path=f, content_class=Static,
|
|
||||||
fmt='static', context=self.context,
|
|
||||||
preread_signal=signals.static_generator_preread,
|
|
||||||
preread_sender=self,
|
|
||||||
context_signal=signals.static_generator_context,
|
|
||||||
context_sender=self)
|
|
||||||
self.staticfiles.append(static)
|
|
||||||
self.add_source_path(static, static=True)
|
|
||||||
self._update_context(('staticfiles',))
|
|
||||||
signals.static_generator_finalized.send(self)
|
|
||||||
|
|
||||||
def generate_output(self, writer):
|
|
||||||
self._copy_paths(self.settings['THEME_STATIC_PATHS'], self.theme,
|
|
||||||
self.settings['THEME_STATIC_DIR'], self.output_path,
|
|
||||||
os.curdir)
|
|
||||||
for sc in self.context['staticfiles']:
|
|
||||||
if self._file_update_required(sc):
|
|
||||||
self._link_or_copy_staticfile(sc)
|
|
||||||
else:
|
|
||||||
logger.debug('%s is up to date, not copying', sc.source_path)
|
|
||||||
|
|
||||||
def _copy_paths(self, paths, source, destination, output_path,
|
|
||||||
final_path=None):
|
|
||||||
"""Copy all the paths from source to destination"""
|
|
||||||
for path in paths:
|
|
||||||
source_path = os.path.join(source, path)
|
|
||||||
|
|
||||||
if final_path:
|
|
||||||
if os.path.isfile(source_path):
|
|
||||||
destination_path = os.path.join(output_path, destination,
|
|
||||||
final_path,
|
|
||||||
os.path.basename(path))
|
|
||||||
else:
|
|
||||||
destination_path = os.path.join(output_path, destination,
|
|
||||||
final_path)
|
|
||||||
else:
|
|
||||||
destination_path = os.path.join(output_path, destination, path)
|
|
||||||
|
|
||||||
copy(source_path, destination_path,
|
|
||||||
self.settings['IGNORE_FILES'])
|
|
||||||
|
|
||||||
def _file_update_required(self, staticfile):
|
|
||||||
source_path = os.path.join(self.path, staticfile.source_path)
|
|
||||||
save_as = os.path.join(self.output_path, staticfile.save_as)
|
|
||||||
if not os.path.exists(save_as):
|
|
||||||
return True
|
|
||||||
elif (self.settings['STATIC_CREATE_LINKS'] and
|
|
||||||
os.path.samefile(source_path, save_as)):
|
|
||||||
return False
|
|
||||||
elif (self.settings['STATIC_CREATE_LINKS'] and
|
|
||||||
os.path.realpath(save_as) == source_path):
|
|
||||||
return False
|
|
||||||
elif not self.settings['STATIC_CHECK_IF_MODIFIED']:
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
return self._source_is_newer(staticfile)
|
|
||||||
|
|
||||||
def _source_is_newer(self, staticfile):
|
|
||||||
source_path = os.path.join(self.path, staticfile.source_path)
|
|
||||||
save_as = os.path.join(self.output_path, staticfile.save_as)
|
|
||||||
s_mtime = os.path.getmtime(source_path)
|
|
||||||
d_mtime = os.path.getmtime(save_as)
|
|
||||||
return s_mtime - d_mtime > 0.000001
|
|
||||||
|
|
||||||
def _link_or_copy_staticfile(self, sc):
|
|
||||||
if self.settings['STATIC_CREATE_LINKS']:
|
|
||||||
self._link_staticfile(sc)
|
|
||||||
else:
|
|
||||||
self._copy_staticfile(sc)
|
|
||||||
|
|
||||||
def _copy_staticfile(self, sc):
|
|
||||||
source_path = os.path.join(self.path, sc.source_path)
|
|
||||||
save_as = os.path.join(self.output_path, sc.save_as)
|
|
||||||
self._mkdir(os.path.dirname(save_as))
|
|
||||||
copy(source_path, save_as)
|
|
||||||
logger.info('Copying %s to %s', sc.source_path, sc.save_as)
|
|
||||||
|
|
||||||
def _link_staticfile(self, sc):
|
|
||||||
source_path = os.path.join(self.path, sc.source_path)
|
|
||||||
save_as = os.path.join(self.output_path, sc.save_as)
|
|
||||||
self._mkdir(os.path.dirname(save_as))
|
|
||||||
try:
|
|
||||||
if os.path.lexists(save_as):
|
|
||||||
os.unlink(save_as)
|
|
||||||
logger.info('Linking %s and %s', sc.source_path, sc.save_as)
|
|
||||||
if self.fallback_to_symlinks:
|
|
||||||
os.symlink(source_path, save_as)
|
|
||||||
else:
|
|
||||||
os.link(source_path, save_as)
|
|
||||||
except OSError as err:
|
|
||||||
if err.errno == errno.EXDEV: # 18: Invalid cross-device link
|
|
||||||
logger.debug(
|
|
||||||
"Cross-device links not valid. "
|
|
||||||
"Creating symbolic links instead."
|
|
||||||
)
|
|
||||||
self.fallback_to_symlinks = True
|
|
||||||
self._link_staticfile(sc)
|
|
||||||
else:
|
|
||||||
raise err
|
|
||||||
|
|
||||||
def _mkdir(self, path):
|
|
||||||
if os.path.lexists(path) and not os.path.isdir(path):
|
|
||||||
os.unlink(path)
|
|
||||||
mkdir_p(path)
|
|
||||||
|
|
||||||
|
|
||||||
class SourceFileGenerator(Generator):
|
|
||||||
|
|
||||||
def generate_context(self):
|
|
||||||
self.output_extension = self.settings['OUTPUT_SOURCES_EXTENSION']
|
|
||||||
|
|
||||||
def _create_source(self, obj):
|
|
||||||
output_path, _ = os.path.splitext(obj.save_as)
|
|
||||||
dest = os.path.join(self.output_path,
|
|
||||||
output_path + self.output_extension)
|
|
||||||
copy(obj.source_path, dest)
|
|
||||||
|
|
||||||
def generate_output(self, writer=None):
|
|
||||||
logger.info('Generating source files...')
|
|
||||||
for obj in chain(self.context['articles'], self.context['pages']):
|
|
||||||
self._create_source(obj)
|
|
||||||
for obj_trans in obj.translations:
|
|
||||||
self._create_source(obj_trans)
|
|
||||||
228
pelican/log.py
228
pelican/log.py
|
|
@ -1,228 +0,0 @@
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from collections import defaultdict
|
|
||||||
from collections.abc import Mapping
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
'init'
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class BaseFormatter(logging.Formatter):
|
|
||||||
def __init__(self, fmt=None, datefmt=None):
|
|
||||||
FORMAT = '%(customlevelname)s %(message)s'
|
|
||||||
super().__init__(fmt=FORMAT, datefmt=datefmt)
|
|
||||||
|
|
||||||
def format(self, record):
|
|
||||||
customlevel = self._get_levelname(record.levelname)
|
|
||||||
record.__dict__['customlevelname'] = customlevel
|
|
||||||
# format multiline messages 'nicely' to make it clear they are together
|
|
||||||
record.msg = record.msg.replace('\n', '\n | ')
|
|
||||||
if not isinstance(record.args, Mapping):
|
|
||||||
record.args = tuple(arg.replace('\n', '\n | ') if
|
|
||||||
isinstance(arg, str) else
|
|
||||||
arg for arg in record.args)
|
|
||||||
return super().format(record)
|
|
||||||
|
|
||||||
def formatException(self, ei):
|
|
||||||
''' prefix traceback info for better representation '''
|
|
||||||
s = super().formatException(ei)
|
|
||||||
# fancy format traceback
|
|
||||||
s = '\n'.join(' | ' + line for line in s.splitlines())
|
|
||||||
# separate the traceback from the preceding lines
|
|
||||||
s = ' |___\n{}'.format(s)
|
|
||||||
return s
|
|
||||||
|
|
||||||
def _get_levelname(self, name):
|
|
||||||
''' NOOP: overridden by subclasses '''
|
|
||||||
return name
|
|
||||||
|
|
||||||
|
|
||||||
class ANSIFormatter(BaseFormatter):
|
|
||||||
ANSI_CODES = {
|
|
||||||
'red': '\033[1;31m',
|
|
||||||
'yellow': '\033[1;33m',
|
|
||||||
'cyan': '\033[1;36m',
|
|
||||||
'white': '\033[1;37m',
|
|
||||||
'bgred': '\033[1;41m',
|
|
||||||
'bggrey': '\033[1;100m',
|
|
||||||
'reset': '\033[0;m'}
|
|
||||||
|
|
||||||
LEVEL_COLORS = {
|
|
||||||
'INFO': 'cyan',
|
|
||||||
'WARNING': 'yellow',
|
|
||||||
'ERROR': 'red',
|
|
||||||
'CRITICAL': 'bgred',
|
|
||||||
'DEBUG': 'bggrey'}
|
|
||||||
|
|
||||||
def _get_levelname(self, name):
|
|
||||||
color = self.ANSI_CODES[self.LEVEL_COLORS.get(name, 'white')]
|
|
||||||
if name == 'INFO':
|
|
||||||
fmt = '{0}->{2}'
|
|
||||||
else:
|
|
||||||
fmt = '{0}{1}{2}:'
|
|
||||||
return fmt.format(color, name, self.ANSI_CODES['reset'])
|
|
||||||
|
|
||||||
|
|
||||||
class TextFormatter(BaseFormatter):
|
|
||||||
"""
|
|
||||||
Convert a `logging.LogRecord' object into text.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def _get_levelname(self, name):
|
|
||||||
if name == 'INFO':
|
|
||||||
return '->'
|
|
||||||
else:
|
|
||||||
return name + ':'
|
|
||||||
|
|
||||||
|
|
||||||
class LimitFilter(logging.Filter):
|
|
||||||
"""
|
|
||||||
Remove duplicates records, and limit the number of records in the same
|
|
||||||
group.
|
|
||||||
|
|
||||||
Groups are specified by the message to use when the number of records in
|
|
||||||
the same group hit the limit.
|
|
||||||
E.g.: log.warning(('43 is not the answer', 'More erroneous answers'))
|
|
||||||
"""
|
|
||||||
|
|
||||||
LOGS_DEDUP_MIN_LEVEL = logging.WARNING
|
|
||||||
|
|
||||||
_ignore = set()
|
|
||||||
_raised_messages = set()
|
|
||||||
_threshold = 5
|
|
||||||
_group_count = defaultdict(int)
|
|
||||||
|
|
||||||
def filter(self, record):
|
|
||||||
# don't limit log messages for anything above "warning"
|
|
||||||
if record.levelno > self.LOGS_DEDUP_MIN_LEVEL:
|
|
||||||
return True
|
|
||||||
|
|
||||||
# extract group
|
|
||||||
group = record.__dict__.get('limit_msg', None)
|
|
||||||
group_args = record.__dict__.get('limit_args', ())
|
|
||||||
|
|
||||||
# ignore record if it was already raised
|
|
||||||
message_key = (record.levelno, record.getMessage())
|
|
||||||
if message_key in self._raised_messages:
|
|
||||||
return False
|
|
||||||
else:
|
|
||||||
self._raised_messages.add(message_key)
|
|
||||||
|
|
||||||
# ignore LOG_FILTER records by templates or messages
|
|
||||||
# when "debug" isn't enabled
|
|
||||||
logger_level = logging.getLogger().getEffectiveLevel()
|
|
||||||
if logger_level > logging.DEBUG:
|
|
||||||
template_key = (record.levelno, record.msg)
|
|
||||||
message_key = (record.levelno, record.getMessage())
|
|
||||||
if (template_key in self._ignore or message_key in self._ignore):
|
|
||||||
return False
|
|
||||||
|
|
||||||
# check if we went over threshold
|
|
||||||
if group:
|
|
||||||
key = (record.levelno, group)
|
|
||||||
self._group_count[key] += 1
|
|
||||||
if self._group_count[key] == self._threshold:
|
|
||||||
record.msg = group
|
|
||||||
record.args = group_args
|
|
||||||
elif self._group_count[key] > self._threshold:
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
class LimitLogger(logging.Logger):
|
|
||||||
"""
|
|
||||||
A logger which adds LimitFilter automatically
|
|
||||||
"""
|
|
||||||
|
|
||||||
limit_filter = LimitFilter()
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
super().__init__(*args, **kwargs)
|
|
||||||
self.enable_filter()
|
|
||||||
|
|
||||||
def disable_filter(self):
|
|
||||||
self.removeFilter(LimitLogger.limit_filter)
|
|
||||||
|
|
||||||
def enable_filter(self):
|
|
||||||
self.addFilter(LimitLogger.limit_filter)
|
|
||||||
|
|
||||||
|
|
||||||
class FatalLogger(LimitLogger):
|
|
||||||
warnings_fatal = False
|
|
||||||
errors_fatal = False
|
|
||||||
|
|
||||||
def warning(self, *args, **kwargs):
|
|
||||||
super().warning(*args, **kwargs)
|
|
||||||
if FatalLogger.warnings_fatal:
|
|
||||||
raise RuntimeError('Warning encountered')
|
|
||||||
|
|
||||||
def error(self, *args, **kwargs):
|
|
||||||
super().error(*args, **kwargs)
|
|
||||||
if FatalLogger.errors_fatal:
|
|
||||||
raise RuntimeError('Error encountered')
|
|
||||||
|
|
||||||
|
|
||||||
logging.setLoggerClass(FatalLogger)
|
|
||||||
# force root logger to be of our preferred class
|
|
||||||
logging.getLogger().__class__ = FatalLogger
|
|
||||||
|
|
||||||
|
|
||||||
def supports_color():
|
|
||||||
"""
|
|
||||||
Returns True if the running system's terminal supports color,
|
|
||||||
and False otherwise.
|
|
||||||
|
|
||||||
from django.core.management.color
|
|
||||||
"""
|
|
||||||
plat = sys.platform
|
|
||||||
supported_platform = plat != 'Pocket PC' and \
|
|
||||||
(plat != 'win32' or 'ANSICON' in os.environ)
|
|
||||||
|
|
||||||
# isatty is not always implemented, #6223.
|
|
||||||
is_a_tty = hasattr(sys.stdout, 'isatty') and sys.stdout.isatty()
|
|
||||||
if not supported_platform or not is_a_tty:
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def get_formatter():
|
|
||||||
if supports_color():
|
|
||||||
return ANSIFormatter()
|
|
||||||
else:
|
|
||||||
return TextFormatter()
|
|
||||||
|
|
||||||
|
|
||||||
def init(level=None, fatal='', handler=logging.StreamHandler(), name=None,
|
|
||||||
logs_dedup_min_level=None):
|
|
||||||
FatalLogger.warnings_fatal = fatal.startswith('warning')
|
|
||||||
FatalLogger.errors_fatal = bool(fatal)
|
|
||||||
|
|
||||||
logger = logging.getLogger(name)
|
|
||||||
|
|
||||||
handler.setFormatter(get_formatter())
|
|
||||||
logger.addHandler(handler)
|
|
||||||
|
|
||||||
if level:
|
|
||||||
logger.setLevel(level)
|
|
||||||
if logs_dedup_min_level:
|
|
||||||
LimitFilter.LOGS_DEDUP_MIN_LEVEL = logs_dedup_min_level
|
|
||||||
|
|
||||||
|
|
||||||
def log_warnings():
|
|
||||||
import warnings
|
|
||||||
logging.captureWarnings(True)
|
|
||||||
warnings.simplefilter("default", DeprecationWarning)
|
|
||||||
init(logging.DEBUG, name='py.warnings')
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
init(level=logging.DEBUG)
|
|
||||||
|
|
||||||
root_logger = logging.getLogger()
|
|
||||||
root_logger.debug('debug')
|
|
||||||
root_logger.info('info')
|
|
||||||
root_logger.warning('warning')
|
|
||||||
root_logger.error('error')
|
|
||||||
root_logger.critical('critical')
|
|
||||||
|
|
@ -1,158 +0,0 @@
|
||||||
import functools
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
from collections import namedtuple
|
|
||||||
from math import ceil
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
PaginationRule = namedtuple(
|
|
||||||
'PaginationRule',
|
|
||||||
'min_page URL SAVE_AS',
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class Paginator:
|
|
||||||
def __init__(self, name, url, object_list, settings, per_page=None):
|
|
||||||
self.name = name
|
|
||||||
self.url = url
|
|
||||||
self.object_list = object_list
|
|
||||||
self.settings = settings
|
|
||||||
if per_page:
|
|
||||||
self.per_page = per_page
|
|
||||||
self.orphans = settings['DEFAULT_ORPHANS']
|
|
||||||
else:
|
|
||||||
self.per_page = len(object_list)
|
|
||||||
self.orphans = 0
|
|
||||||
|
|
||||||
self._num_pages = self._count = None
|
|
||||||
|
|
||||||
def page(self, number):
|
|
||||||
"Returns a Page object for the given 1-based page number."
|
|
||||||
bottom = (number - 1) * self.per_page
|
|
||||||
top = bottom + self.per_page
|
|
||||||
if top + self.orphans >= self.count:
|
|
||||||
top = self.count
|
|
||||||
return Page(self.name, self.url, self.object_list[bottom:top], number,
|
|
||||||
self, self.settings)
|
|
||||||
|
|
||||||
def _get_count(self):
|
|
||||||
"Returns the total number of objects, across all pages."
|
|
||||||
if self._count is None:
|
|
||||||
self._count = len(self.object_list)
|
|
||||||
return self._count
|
|
||||||
count = property(_get_count)
|
|
||||||
|
|
||||||
def _get_num_pages(self):
|
|
||||||
"Returns the total number of pages."
|
|
||||||
if self._num_pages is None:
|
|
||||||
hits = max(1, self.count - self.orphans)
|
|
||||||
self._num_pages = int(ceil(hits / (float(self.per_page) or 1)))
|
|
||||||
return self._num_pages
|
|
||||||
num_pages = property(_get_num_pages)
|
|
||||||
|
|
||||||
def _get_page_range(self):
|
|
||||||
"""
|
|
||||||
Returns a 1-based range of pages for iterating through within
|
|
||||||
a template for loop.
|
|
||||||
"""
|
|
||||||
return list(range(1, self.num_pages + 1))
|
|
||||||
page_range = property(_get_page_range)
|
|
||||||
|
|
||||||
|
|
||||||
class Page:
|
|
||||||
def __init__(self, name, url, object_list, number, paginator, settings):
|
|
||||||
self.full_name = name
|
|
||||||
self.name, self.extension = os.path.splitext(name)
|
|
||||||
dn, fn = os.path.split(name)
|
|
||||||
self.base_name = dn if fn in ('index.htm', 'index.html') else self.name
|
|
||||||
self.base_url = url
|
|
||||||
self.object_list = object_list
|
|
||||||
self.number = number
|
|
||||||
self.paginator = paginator
|
|
||||||
self.settings = settings
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return '<Page {} of {}>'.format(self.number, self.paginator.num_pages)
|
|
||||||
|
|
||||||
def has_next(self):
|
|
||||||
return self.number < self.paginator.num_pages
|
|
||||||
|
|
||||||
def has_previous(self):
|
|
||||||
return self.number > 1
|
|
||||||
|
|
||||||
def has_other_pages(self):
|
|
||||||
return self.has_previous() or self.has_next()
|
|
||||||
|
|
||||||
def next_page_number(self):
|
|
||||||
return self.number + 1
|
|
||||||
|
|
||||||
def previous_page_number(self):
|
|
||||||
return self.number - 1
|
|
||||||
|
|
||||||
def start_index(self):
|
|
||||||
"""
|
|
||||||
Returns the 1-based index of the first object on this page,
|
|
||||||
relative to total objects in the paginator.
|
|
||||||
"""
|
|
||||||
# Special case, return zero if no items.
|
|
||||||
if self.paginator.count == 0:
|
|
||||||
return 0
|
|
||||||
return (self.paginator.per_page * (self.number - 1)) + 1
|
|
||||||
|
|
||||||
def end_index(self):
|
|
||||||
"""
|
|
||||||
Returns the 1-based index of the last object on this page,
|
|
||||||
relative to total objects found (hits).
|
|
||||||
"""
|
|
||||||
# Special case for the last page because there can be orphans.
|
|
||||||
if self.number == self.paginator.num_pages:
|
|
||||||
return self.paginator.count
|
|
||||||
return self.number * self.paginator.per_page
|
|
||||||
|
|
||||||
def _from_settings(self, key):
|
|
||||||
"""Returns URL information as defined in settings. Similar to
|
|
||||||
URLWrapper._from_settings, but specialized to deal with pagination
|
|
||||||
logic."""
|
|
||||||
|
|
||||||
rule = None
|
|
||||||
|
|
||||||
# find the last matching pagination rule
|
|
||||||
for p in self.settings['PAGINATION_PATTERNS']:
|
|
||||||
if p.min_page <= self.number:
|
|
||||||
rule = p
|
|
||||||
|
|
||||||
if not rule:
|
|
||||||
return ''
|
|
||||||
|
|
||||||
prop_value = getattr(rule, key)
|
|
||||||
|
|
||||||
if not isinstance(prop_value, str):
|
|
||||||
logger.warning('%s is set to %s', key, prop_value)
|
|
||||||
return prop_value
|
|
||||||
|
|
||||||
# URL or SAVE_AS is a string, format it with a controlled context
|
|
||||||
context = {
|
|
||||||
'save_as': self.full_name,
|
|
||||||
'url': self.base_url,
|
|
||||||
'name': self.name,
|
|
||||||
'base_name': self.base_name,
|
|
||||||
'extension': self.extension,
|
|
||||||
'number': self.number,
|
|
||||||
}
|
|
||||||
|
|
||||||
ret = prop_value.format(**context)
|
|
||||||
# Remove a single leading slash, if any. This is done for backwards
|
|
||||||
# compatibility reasons. If a leading slash is needed (for URLs
|
|
||||||
# relative to server root or absolute URLs without the scheme such as
|
|
||||||
# //blog.my.site/), it can be worked around by prefixing the pagination
|
|
||||||
# pattern by an additional slash (which then gets removed, preserving
|
|
||||||
# the other slashes). This also means the following code *can't* be
|
|
||||||
# changed to lstrip() because that would remove all leading slashes and
|
|
||||||
# thus make the workaround impossible. See
|
|
||||||
# test_custom_pagination_pattern() for a verification of this.
|
|
||||||
if ret[0] == '/':
|
|
||||||
ret = ret[1:]
|
|
||||||
return ret
|
|
||||||
|
|
||||||
url = property(functools.partial(_from_settings, key='URL'))
|
|
||||||
save_as = property(functools.partial(_from_settings, key='SAVE_AS'))
|
|
||||||
|
|
@ -1,101 +0,0 @@
|
||||||
import importlib
|
|
||||||
import importlib.machinery
|
|
||||||
import importlib.util
|
|
||||||
import logging
|
|
||||||
import pkgutil
|
|
||||||
import sys
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def iter_namespace(ns_pkg):
|
|
||||||
# Specifying the second argument (prefix) to iter_modules makes the
|
|
||||||
# returned name an absolute name instead of a relative one. This allows
|
|
||||||
# import_module to work without having to do additional modification to
|
|
||||||
# the name.
|
|
||||||
return pkgutil.iter_modules(ns_pkg.__path__, ns_pkg.__name__ + ".")
|
|
||||||
|
|
||||||
|
|
||||||
def get_namespace_plugins(ns_pkg=None):
|
|
||||||
if ns_pkg is None:
|
|
||||||
import pelican.plugins as ns_pkg
|
|
||||||
|
|
||||||
return {
|
|
||||||
name: importlib.import_module(name)
|
|
||||||
for finder, name, ispkg
|
|
||||||
in iter_namespace(ns_pkg)
|
|
||||||
if ispkg
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def list_plugins(ns_pkg=None):
|
|
||||||
from pelican.log import init as init_logging
|
|
||||||
init_logging(logging.INFO)
|
|
||||||
ns_plugins = get_namespace_plugins(ns_pkg)
|
|
||||||
if ns_plugins:
|
|
||||||
logger.info('Plugins found:\n' + '\n'.join(ns_plugins))
|
|
||||||
else:
|
|
||||||
logger.info('No plugins are installed')
|
|
||||||
|
|
||||||
|
|
||||||
def load_legacy_plugin(plugin, plugin_paths):
|
|
||||||
# Try to find plugin in PLUGIN_PATHS
|
|
||||||
spec = importlib.machinery.PathFinder.find_spec(plugin, plugin_paths)
|
|
||||||
if spec is None:
|
|
||||||
# If failed, try to find it in normal importable locations
|
|
||||||
spec = importlib.util.find_spec(plugin)
|
|
||||||
if spec is None:
|
|
||||||
raise ImportError('Cannot import plugin `{}`'.format(plugin))
|
|
||||||
else:
|
|
||||||
# create module object from spec
|
|
||||||
mod = importlib.util.module_from_spec(spec)
|
|
||||||
# place it into sys.modules cache
|
|
||||||
# necessary if module imports itself at some point (e.g. packages)
|
|
||||||
sys.modules[spec.name] = mod
|
|
||||||
try:
|
|
||||||
# try to execute it inside module object
|
|
||||||
spec.loader.exec_module(mod)
|
|
||||||
except Exception: # problem with import
|
|
||||||
try:
|
|
||||||
# remove module from sys.modules since it can't be loaded
|
|
||||||
del sys.modules[spec.name]
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
raise
|
|
||||||
|
|
||||||
# if all went well, we have the plugin module
|
|
||||||
return mod
|
|
||||||
|
|
||||||
|
|
||||||
def load_plugins(settings):
|
|
||||||
logger.debug('Finding namespace plugins')
|
|
||||||
namespace_plugins = get_namespace_plugins()
|
|
||||||
if namespace_plugins:
|
|
||||||
logger.debug('Namespace plugins found:\n' +
|
|
||||||
'\n'.join(namespace_plugins))
|
|
||||||
plugins = []
|
|
||||||
if settings.get('PLUGINS') is not None:
|
|
||||||
for plugin in settings['PLUGINS']:
|
|
||||||
if isinstance(plugin, str):
|
|
||||||
logger.debug('Loading plugin `%s`', plugin)
|
|
||||||
# try to find in namespace plugins
|
|
||||||
if plugin in namespace_plugins:
|
|
||||||
plugin = namespace_plugins[plugin]
|
|
||||||
elif 'pelican.plugins.{}'.format(plugin) in namespace_plugins:
|
|
||||||
plugin = namespace_plugins['pelican.plugins.{}'.format(
|
|
||||||
plugin)]
|
|
||||||
# try to import it
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
plugin = load_legacy_plugin(
|
|
||||||
plugin,
|
|
||||||
settings.get('PLUGIN_PATHS', []))
|
|
||||||
except ImportError as e:
|
|
||||||
logger.error('Cannot load plugin `%s`\n%s', plugin, e)
|
|
||||||
continue
|
|
||||||
plugins.append(plugin)
|
|
||||||
else:
|
|
||||||
plugins = list(namespace_plugins.values())
|
|
||||||
|
|
||||||
return plugins
|
|
||||||
|
|
@ -1,49 +0,0 @@
|
||||||
from blinker import signal
|
|
||||||
|
|
||||||
# Run-level signals:
|
|
||||||
|
|
||||||
initialized = signal('pelican_initialized')
|
|
||||||
get_generators = signal('get_generators')
|
|
||||||
all_generators_finalized = signal('all_generators_finalized')
|
|
||||||
get_writer = signal('get_writer')
|
|
||||||
finalized = signal('pelican_finalized')
|
|
||||||
|
|
||||||
# Reader-level signals
|
|
||||||
|
|
||||||
readers_init = signal('readers_init')
|
|
||||||
|
|
||||||
# Generator-level signals
|
|
||||||
|
|
||||||
generator_init = signal('generator_init')
|
|
||||||
|
|
||||||
article_generator_init = signal('article_generator_init')
|
|
||||||
article_generator_pretaxonomy = signal('article_generator_pretaxonomy')
|
|
||||||
article_generator_finalized = signal('article_generator_finalized')
|
|
||||||
article_generator_write_article = signal('article_generator_write_article')
|
|
||||||
article_writer_finalized = signal('article_writer_finalized')
|
|
||||||
|
|
||||||
page_generator_init = signal('page_generator_init')
|
|
||||||
page_generator_finalized = signal('page_generator_finalized')
|
|
||||||
page_generator_write_page = signal('page_generator_write_page')
|
|
||||||
page_writer_finalized = signal('page_writer_finalized')
|
|
||||||
|
|
||||||
static_generator_init = signal('static_generator_init')
|
|
||||||
static_generator_finalized = signal('static_generator_finalized')
|
|
||||||
|
|
||||||
# Page-level signals
|
|
||||||
|
|
||||||
article_generator_preread = signal('article_generator_preread')
|
|
||||||
article_generator_context = signal('article_generator_context')
|
|
||||||
|
|
||||||
page_generator_preread = signal('page_generator_preread')
|
|
||||||
page_generator_context = signal('page_generator_context')
|
|
||||||
|
|
||||||
static_generator_preread = signal('static_generator_preread')
|
|
||||||
static_generator_context = signal('static_generator_context')
|
|
||||||
|
|
||||||
content_object_init = signal('content_object_init')
|
|
||||||
|
|
||||||
# Writers signals
|
|
||||||
content_written = signal('content_written')
|
|
||||||
feed_generated = signal('feed_generated')
|
|
||||||
feed_written = signal('feed_written')
|
|
||||||
|
|
@ -1,747 +0,0 @@
|
||||||
import datetime
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
from collections import OrderedDict
|
|
||||||
from html import escape
|
|
||||||
from html.parser import HTMLParser
|
|
||||||
from io import StringIO
|
|
||||||
|
|
||||||
import docutils
|
|
||||||
import docutils.core
|
|
||||||
import docutils.io
|
|
||||||
from docutils.parsers.rst.languages import get_language as get_docutils_lang
|
|
||||||
from docutils.writers.html4css1 import HTMLTranslator, Writer
|
|
||||||
|
|
||||||
from pelican import rstdirectives # NOQA
|
|
||||||
from pelican.cache import FileStampDataCacher
|
|
||||||
from pelican.contents import Author, Category, Page, Tag
|
|
||||||
from pelican.plugins import signals
|
|
||||||
from pelican.utils import get_date, pelican_open, posixize_path
|
|
||||||
|
|
||||||
try:
|
|
||||||
from markdown import Markdown
|
|
||||||
except ImportError:
|
|
||||||
Markdown = False # NOQA
|
|
||||||
|
|
||||||
# Metadata processors have no way to discard an unwanted value, so we have
|
|
||||||
# them return this value instead to signal that it should be discarded later.
|
|
||||||
# This means that _filter_discardable_metadata() must be called on processed
|
|
||||||
# metadata dicts before use, to remove the items with the special value.
|
|
||||||
_DISCARD = object()
|
|
||||||
|
|
||||||
DUPLICATES_DEFINITIONS_ALLOWED = {
|
|
||||||
'tags': False,
|
|
||||||
'date': False,
|
|
||||||
'modified': False,
|
|
||||||
'status': False,
|
|
||||||
'category': False,
|
|
||||||
'author': False,
|
|
||||||
'save_as': False,
|
|
||||||
'url': False,
|
|
||||||
'authors': False,
|
|
||||||
'slug': False
|
|
||||||
}
|
|
||||||
|
|
||||||
METADATA_PROCESSORS = {
|
|
||||||
'tags': lambda x, y: ([
|
|
||||||
Tag(tag, y)
|
|
||||||
for tag in ensure_metadata_list(x)
|
|
||||||
] or _DISCARD),
|
|
||||||
'date': lambda x, y: get_date(x.replace('_', ' ')),
|
|
||||||
'modified': lambda x, y: get_date(x),
|
|
||||||
'status': lambda x, y: x.strip() or _DISCARD,
|
|
||||||
'category': lambda x, y: _process_if_nonempty(Category, x, y),
|
|
||||||
'author': lambda x, y: _process_if_nonempty(Author, x, y),
|
|
||||||
'authors': lambda x, y: ([
|
|
||||||
Author(author, y)
|
|
||||||
for author in ensure_metadata_list(x)
|
|
||||||
] or _DISCARD),
|
|
||||||
'slug': lambda x, y: x.strip() or _DISCARD,
|
|
||||||
}
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def ensure_metadata_list(text):
|
|
||||||
"""Canonicalize the format of a list of authors or tags. This works
|
|
||||||
the same way as Docutils' "authors" field: if it's already a list,
|
|
||||||
those boundaries are preserved; otherwise, it must be a string;
|
|
||||||
if the string contains semicolons, it is split on semicolons;
|
|
||||||
otherwise, it is split on commas. This allows you to write
|
|
||||||
author lists in either "Jane Doe, John Doe" or "Doe, Jane; Doe, John"
|
|
||||||
format.
|
|
||||||
|
|
||||||
Regardless, all list items undergo .strip() before returning, and
|
|
||||||
empty items are discarded.
|
|
||||||
"""
|
|
||||||
if isinstance(text, str):
|
|
||||||
if ';' in text:
|
|
||||||
text = text.split(';')
|
|
||||||
else:
|
|
||||||
text = text.split(',')
|
|
||||||
|
|
||||||
return list(OrderedDict.fromkeys(
|
|
||||||
[v for v in (w.strip() for w in text) if v]
|
|
||||||
))
|
|
||||||
|
|
||||||
|
|
||||||
def _process_if_nonempty(processor, name, settings):
|
|
||||||
"""Removes extra whitespace from name and applies a metadata processor.
|
|
||||||
If name is empty or all whitespace, returns _DISCARD instead.
|
|
||||||
"""
|
|
||||||
name = name.strip()
|
|
||||||
return processor(name, settings) if name else _DISCARD
|
|
||||||
|
|
||||||
|
|
||||||
def _filter_discardable_metadata(metadata):
|
|
||||||
"""Return a copy of a dict, minus any items marked as discardable."""
|
|
||||||
return {name: val for name, val in metadata.items() if val is not _DISCARD}
|
|
||||||
|
|
||||||
|
|
||||||
class BaseReader:
|
|
||||||
"""Base class to read files.
|
|
||||||
|
|
||||||
This class is used to process static files, and it can be inherited for
|
|
||||||
other types of file. A Reader class must have the following attributes:
|
|
||||||
|
|
||||||
- enabled: (boolean) tell if the Reader class is enabled. It
|
|
||||||
generally depends on the import of some dependency.
|
|
||||||
- file_extensions: a list of file extensions that the Reader will process.
|
|
||||||
- extensions: a list of extensions to use in the reader (typical use is
|
|
||||||
Markdown).
|
|
||||||
|
|
||||||
"""
|
|
||||||
enabled = True
|
|
||||||
file_extensions = ['static']
|
|
||||||
extensions = None
|
|
||||||
|
|
||||||
def __init__(self, settings):
|
|
||||||
self.settings = settings
|
|
||||||
|
|
||||||
def process_metadata(self, name, value):
|
|
||||||
if name in METADATA_PROCESSORS:
|
|
||||||
return METADATA_PROCESSORS[name](value, self.settings)
|
|
||||||
return value
|
|
||||||
|
|
||||||
def read(self, source_path):
|
|
||||||
"No-op parser"
|
|
||||||
content = None
|
|
||||||
metadata = {}
|
|
||||||
return content, metadata
|
|
||||||
|
|
||||||
|
|
||||||
class _FieldBodyTranslator(HTMLTranslator):
|
|
||||||
|
|
||||||
def __init__(self, document):
|
|
||||||
super().__init__(document)
|
|
||||||
self.compact_p = None
|
|
||||||
|
|
||||||
def astext(self):
|
|
||||||
return ''.join(self.body)
|
|
||||||
|
|
||||||
def visit_field_body(self, node):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def depart_field_body(self, node):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def render_node_to_html(document, node, field_body_translator_class):
|
|
||||||
visitor = field_body_translator_class(document)
|
|
||||||
node.walkabout(visitor)
|
|
||||||
return visitor.astext()
|
|
||||||
|
|
||||||
|
|
||||||
class PelicanHTMLWriter(Writer):
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
super().__init__()
|
|
||||||
self.translator_class = PelicanHTMLTranslator
|
|
||||||
|
|
||||||
|
|
||||||
class PelicanHTMLTranslator(HTMLTranslator):
|
|
||||||
|
|
||||||
def visit_abbreviation(self, node):
|
|
||||||
attrs = {}
|
|
||||||
if node.hasattr('explanation'):
|
|
||||||
attrs['title'] = node['explanation']
|
|
||||||
self.body.append(self.starttag(node, 'abbr', '', **attrs))
|
|
||||||
|
|
||||||
def depart_abbreviation(self, node):
|
|
||||||
self.body.append('</abbr>')
|
|
||||||
|
|
||||||
def visit_image(self, node):
|
|
||||||
# set an empty alt if alt is not specified
|
|
||||||
# avoids that alt is taken from src
|
|
||||||
node['alt'] = node.get('alt', '')
|
|
||||||
return HTMLTranslator.visit_image(self, node)
|
|
||||||
|
|
||||||
|
|
||||||
class RstReader(BaseReader):
|
|
||||||
"""Reader for reStructuredText files
|
|
||||||
|
|
||||||
By default the output HTML is written using
|
|
||||||
docutils.writers.html4css1.Writer and translated using a subclass of
|
|
||||||
docutils.writers.html4css1.HTMLTranslator. If you want to override it with
|
|
||||||
your own writer/translator (e.g. a HTML5-based one), pass your classes to
|
|
||||||
these two attributes. Look in the source code for details.
|
|
||||||
|
|
||||||
writer_class Used for writing contents
|
|
||||||
field_body_translator_class Used for translating metadata such
|
|
||||||
as article summary
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
enabled = bool(docutils)
|
|
||||||
file_extensions = ['rst']
|
|
||||||
|
|
||||||
writer_class = PelicanHTMLWriter
|
|
||||||
field_body_translator_class = _FieldBodyTranslator
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
super().__init__(*args, **kwargs)
|
|
||||||
|
|
||||||
lang_code = self.settings.get('DEFAULT_LANG', 'en')
|
|
||||||
if get_docutils_lang(lang_code):
|
|
||||||
self._language_code = lang_code
|
|
||||||
else:
|
|
||||||
logger.warning("Docutils has no localization for '%s'."
|
|
||||||
" Using 'en' instead.", lang_code)
|
|
||||||
self._language_code = 'en'
|
|
||||||
|
|
||||||
def _parse_metadata(self, document, source_path):
|
|
||||||
"""Return the dict containing document metadata"""
|
|
||||||
formatted_fields = self.settings['FORMATTED_FIELDS']
|
|
||||||
|
|
||||||
output = {}
|
|
||||||
|
|
||||||
if document.first_child_matching_class(docutils.nodes.title) is None:
|
|
||||||
logger.warning(
|
|
||||||
'Document title missing in file %s: '
|
|
||||||
'Ensure exactly one top level section',
|
|
||||||
source_path)
|
|
||||||
|
|
||||||
for docinfo in document.traverse(docutils.nodes.docinfo):
|
|
||||||
for element in docinfo.children:
|
|
||||||
if element.tagname == 'field': # custom fields (e.g. summary)
|
|
||||||
name_elem, body_elem = element.children
|
|
||||||
name = name_elem.astext()
|
|
||||||
if name in formatted_fields:
|
|
||||||
value = render_node_to_html(
|
|
||||||
document, body_elem,
|
|
||||||
self.field_body_translator_class)
|
|
||||||
else:
|
|
||||||
value = body_elem.astext()
|
|
||||||
elif element.tagname == 'authors': # author list
|
|
||||||
name = element.tagname
|
|
||||||
value = [element.astext() for element in element.children]
|
|
||||||
else: # standard fields (e.g. address)
|
|
||||||
name = element.tagname
|
|
||||||
value = element.astext()
|
|
||||||
name = name.lower()
|
|
||||||
|
|
||||||
output[name] = self.process_metadata(name, value)
|
|
||||||
return output
|
|
||||||
|
|
||||||
def _get_publisher(self, source_path):
|
|
||||||
extra_params = {'initial_header_level': '2',
|
|
||||||
'syntax_highlight': 'short',
|
|
||||||
'input_encoding': 'utf-8',
|
|
||||||
'language_code': self._language_code,
|
|
||||||
'halt_level': 2,
|
|
||||||
'traceback': True,
|
|
||||||
'warning_stream': StringIO(),
|
|
||||||
'embed_stylesheet': False}
|
|
||||||
user_params = self.settings.get('DOCUTILS_SETTINGS')
|
|
||||||
if user_params:
|
|
||||||
extra_params.update(user_params)
|
|
||||||
|
|
||||||
pub = docutils.core.Publisher(
|
|
||||||
writer=self.writer_class(),
|
|
||||||
destination_class=docutils.io.StringOutput)
|
|
||||||
pub.set_components('standalone', 'restructuredtext', 'html')
|
|
||||||
pub.process_programmatic_settings(None, extra_params, None)
|
|
||||||
pub.set_source(source_path=source_path)
|
|
||||||
pub.publish()
|
|
||||||
return pub
|
|
||||||
|
|
||||||
def read(self, source_path):
|
|
||||||
"""Parses restructured text"""
|
|
||||||
pub = self._get_publisher(source_path)
|
|
||||||
parts = pub.writer.parts
|
|
||||||
content = parts.get('body')
|
|
||||||
|
|
||||||
metadata = self._parse_metadata(pub.document, source_path)
|
|
||||||
metadata.setdefault('title', parts.get('title'))
|
|
||||||
|
|
||||||
return content, metadata
|
|
||||||
|
|
||||||
|
|
||||||
class MarkdownReader(BaseReader):
|
|
||||||
"""Reader for Markdown files"""
|
|
||||||
|
|
||||||
enabled = bool(Markdown)
|
|
||||||
file_extensions = ['md', 'markdown', 'mkd', 'mdown']
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
super().__init__(*args, **kwargs)
|
|
||||||
settings = self.settings['MARKDOWN']
|
|
||||||
settings.setdefault('extension_configs', {})
|
|
||||||
settings.setdefault('extensions', [])
|
|
||||||
for extension in settings['extension_configs'].keys():
|
|
||||||
if extension not in settings['extensions']:
|
|
||||||
settings['extensions'].append(extension)
|
|
||||||
if 'markdown.extensions.meta' not in settings['extensions']:
|
|
||||||
settings['extensions'].append('markdown.extensions.meta')
|
|
||||||
self._source_path = None
|
|
||||||
|
|
||||||
def _parse_metadata(self, meta):
|
|
||||||
"""Return the dict containing document metadata"""
|
|
||||||
formatted_fields = self.settings['FORMATTED_FIELDS']
|
|
||||||
|
|
||||||
# prevent metadata extraction in fields
|
|
||||||
self._md.preprocessors.deregister('meta')
|
|
||||||
|
|
||||||
output = {}
|
|
||||||
for name, value in meta.items():
|
|
||||||
name = name.lower()
|
|
||||||
if name in formatted_fields:
|
|
||||||
# formatted metadata is special case and join all list values
|
|
||||||
formatted_values = "\n".join(value)
|
|
||||||
# reset the markdown instance to clear any state
|
|
||||||
self._md.reset()
|
|
||||||
formatted = self._md.convert(formatted_values)
|
|
||||||
output[name] = self.process_metadata(name, formatted)
|
|
||||||
elif not DUPLICATES_DEFINITIONS_ALLOWED.get(name, True):
|
|
||||||
if len(value) > 1:
|
|
||||||
logger.warning(
|
|
||||||
'Duplicate definition of `%s` '
|
|
||||||
'for %s. Using first one.',
|
|
||||||
name, self._source_path)
|
|
||||||
output[name] = self.process_metadata(name, value[0])
|
|
||||||
elif len(value) > 1:
|
|
||||||
# handle list metadata as list of string
|
|
||||||
output[name] = self.process_metadata(name, value)
|
|
||||||
else:
|
|
||||||
# otherwise, handle metadata as single string
|
|
||||||
output[name] = self.process_metadata(name, value[0])
|
|
||||||
return output
|
|
||||||
|
|
||||||
def read(self, source_path):
|
|
||||||
"""Parse content and metadata of markdown files"""
|
|
||||||
|
|
||||||
self._source_path = source_path
|
|
||||||
self._md = Markdown(**self.settings['MARKDOWN'])
|
|
||||||
with pelican_open(source_path) as text:
|
|
||||||
content = self._md.convert(text)
|
|
||||||
|
|
||||||
if hasattr(self._md, 'Meta'):
|
|
||||||
metadata = self._parse_metadata(self._md.Meta)
|
|
||||||
else:
|
|
||||||
metadata = {}
|
|
||||||
return content, metadata
|
|
||||||
|
|
||||||
|
|
||||||
class HTMLReader(BaseReader):
|
|
||||||
"""Parses HTML files as input, looking for meta, title, and body tags"""
|
|
||||||
|
|
||||||
file_extensions = ['htm', 'html']
|
|
||||||
enabled = True
|
|
||||||
|
|
||||||
class _HTMLParser(HTMLParser):
|
|
||||||
def __init__(self, settings, filename):
|
|
||||||
super().__init__(convert_charrefs=False)
|
|
||||||
self.body = ''
|
|
||||||
self.metadata = {}
|
|
||||||
self.settings = settings
|
|
||||||
|
|
||||||
self._data_buffer = ''
|
|
||||||
|
|
||||||
self._filename = filename
|
|
||||||
|
|
||||||
self._in_top_level = True
|
|
||||||
self._in_head = False
|
|
||||||
self._in_title = False
|
|
||||||
self._in_body = False
|
|
||||||
self._in_tags = False
|
|
||||||
|
|
||||||
def handle_starttag(self, tag, attrs):
|
|
||||||
if tag == 'head' and self._in_top_level:
|
|
||||||
self._in_top_level = False
|
|
||||||
self._in_head = True
|
|
||||||
elif tag == 'title' and self._in_head:
|
|
||||||
self._in_title = True
|
|
||||||
self._data_buffer = ''
|
|
||||||
elif tag == 'body' and self._in_top_level:
|
|
||||||
self._in_top_level = False
|
|
||||||
self._in_body = True
|
|
||||||
self._data_buffer = ''
|
|
||||||
elif tag == 'meta' and self._in_head:
|
|
||||||
self._handle_meta_tag(attrs)
|
|
||||||
|
|
||||||
elif self._in_body:
|
|
||||||
self._data_buffer += self.build_tag(tag, attrs, False)
|
|
||||||
|
|
||||||
def handle_endtag(self, tag):
|
|
||||||
if tag == 'head':
|
|
||||||
if self._in_head:
|
|
||||||
self._in_head = False
|
|
||||||
self._in_top_level = True
|
|
||||||
elif self._in_head and tag == 'title':
|
|
||||||
self._in_title = False
|
|
||||||
self.metadata['title'] = self._data_buffer
|
|
||||||
elif tag == 'body':
|
|
||||||
self.body = self._data_buffer
|
|
||||||
self._in_body = False
|
|
||||||
self._in_top_level = True
|
|
||||||
elif self._in_body:
|
|
||||||
self._data_buffer += '</{}>'.format(escape(tag))
|
|
||||||
|
|
||||||
def handle_startendtag(self, tag, attrs):
|
|
||||||
if tag == 'meta' and self._in_head:
|
|
||||||
self._handle_meta_tag(attrs)
|
|
||||||
if self._in_body:
|
|
||||||
self._data_buffer += self.build_tag(tag, attrs, True)
|
|
||||||
|
|
||||||
def handle_comment(self, data):
|
|
||||||
self._data_buffer += '<!--{}-->'.format(data)
|
|
||||||
|
|
||||||
def handle_data(self, data):
|
|
||||||
self._data_buffer += data
|
|
||||||
|
|
||||||
def handle_entityref(self, data):
|
|
||||||
self._data_buffer += '&{};'.format(data)
|
|
||||||
|
|
||||||
def handle_charref(self, data):
|
|
||||||
self._data_buffer += '&#{};'.format(data)
|
|
||||||
|
|
||||||
def build_tag(self, tag, attrs, close_tag):
|
|
||||||
result = '<{}'.format(escape(tag))
|
|
||||||
for k, v in attrs:
|
|
||||||
result += ' ' + escape(k)
|
|
||||||
if v is not None:
|
|
||||||
# If the attribute value contains a double quote, surround
|
|
||||||
# with single quotes, otherwise use double quotes.
|
|
||||||
if '"' in v:
|
|
||||||
result += "='{}'".format(escape(v, quote=False))
|
|
||||||
else:
|
|
||||||
result += '="{}"'.format(escape(v, quote=False))
|
|
||||||
if close_tag:
|
|
||||||
return result + ' />'
|
|
||||||
return result + '>'
|
|
||||||
|
|
||||||
def _handle_meta_tag(self, attrs):
|
|
||||||
name = self._attr_value(attrs, 'name')
|
|
||||||
if name is None:
|
|
||||||
attr_list = ['{}="{}"'.format(k, v) for k, v in attrs]
|
|
||||||
attr_serialized = ', '.join(attr_list)
|
|
||||||
logger.warning("Meta tag in file %s does not have a 'name' "
|
|
||||||
"attribute, skipping. Attributes: %s",
|
|
||||||
self._filename, attr_serialized)
|
|
||||||
return
|
|
||||||
name = name.lower()
|
|
||||||
contents = self._attr_value(attrs, 'content', '')
|
|
||||||
if not contents:
|
|
||||||
contents = self._attr_value(attrs, 'contents', '')
|
|
||||||
if contents:
|
|
||||||
logger.warning(
|
|
||||||
"Meta tag attribute 'contents' used in file %s, should"
|
|
||||||
" be changed to 'content'",
|
|
||||||
self._filename,
|
|
||||||
extra={'limit_msg': "Other files have meta tag "
|
|
||||||
"attribute 'contents' that should "
|
|
||||||
"be changed to 'content'"})
|
|
||||||
|
|
||||||
if name == 'keywords':
|
|
||||||
name = 'tags'
|
|
||||||
|
|
||||||
if name in self.metadata:
|
|
||||||
# if this metadata already exists (i.e. a previous tag with the
|
|
||||||
# same name has already been specified then either convert to
|
|
||||||
# list or append to list
|
|
||||||
if isinstance(self.metadata[name], list):
|
|
||||||
self.metadata[name].append(contents)
|
|
||||||
else:
|
|
||||||
self.metadata[name] = [self.metadata[name], contents]
|
|
||||||
else:
|
|
||||||
self.metadata[name] = contents
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _attr_value(cls, attrs, name, default=None):
|
|
||||||
return next((x[1] for x in attrs if x[0] == name), default)
|
|
||||||
|
|
||||||
def read(self, filename):
|
|
||||||
"""Parse content and metadata of HTML files"""
|
|
||||||
with pelican_open(filename) as content:
|
|
||||||
parser = self._HTMLParser(self.settings, filename)
|
|
||||||
parser.feed(content)
|
|
||||||
parser.close()
|
|
||||||
|
|
||||||
metadata = {}
|
|
||||||
for k in parser.metadata:
|
|
||||||
metadata[k] = self.process_metadata(k, parser.metadata[k])
|
|
||||||
return parser.body, metadata
|
|
||||||
|
|
||||||
|
|
||||||
class Readers(FileStampDataCacher):
|
|
||||||
"""Interface for all readers.
|
|
||||||
|
|
||||||
This class contains a mapping of file extensions / Reader classes, to know
|
|
||||||
which Reader class must be used to read a file (based on its extension).
|
|
||||||
This is customizable both with the 'READERS' setting, and with the
|
|
||||||
'readers_init' signall for plugins.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, settings=None, cache_name=''):
|
|
||||||
self.settings = settings or {}
|
|
||||||
self.readers = {}
|
|
||||||
self.reader_classes = {}
|
|
||||||
|
|
||||||
for cls in [BaseReader] + BaseReader.__subclasses__():
|
|
||||||
if not cls.enabled:
|
|
||||||
logger.debug('Missing dependencies for %s',
|
|
||||||
', '.join(cls.file_extensions))
|
|
||||||
continue
|
|
||||||
|
|
||||||
for ext in cls.file_extensions:
|
|
||||||
self.reader_classes[ext] = cls
|
|
||||||
|
|
||||||
if self.settings['READERS']:
|
|
||||||
self.reader_classes.update(self.settings['READERS'])
|
|
||||||
|
|
||||||
signals.readers_init.send(self)
|
|
||||||
|
|
||||||
for fmt, reader_class in self.reader_classes.items():
|
|
||||||
if not reader_class:
|
|
||||||
continue
|
|
||||||
|
|
||||||
self.readers[fmt] = reader_class(self.settings)
|
|
||||||
|
|
||||||
# set up caching
|
|
||||||
cache_this_level = (cache_name != '' and
|
|
||||||
self.settings['CONTENT_CACHING_LAYER'] == 'reader')
|
|
||||||
caching_policy = cache_this_level and self.settings['CACHE_CONTENT']
|
|
||||||
load_policy = cache_this_level and self.settings['LOAD_CONTENT_CACHE']
|
|
||||||
super().__init__(settings, cache_name, caching_policy, load_policy)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def extensions(self):
|
|
||||||
return self.readers.keys()
|
|
||||||
|
|
||||||
def read_file(self, base_path, path, content_class=Page, fmt=None,
|
|
||||||
context=None, preread_signal=None, preread_sender=None,
|
|
||||||
context_signal=None, context_sender=None):
|
|
||||||
"""Return a content object parsed with the given format."""
|
|
||||||
|
|
||||||
path = os.path.abspath(os.path.join(base_path, path))
|
|
||||||
source_path = posixize_path(os.path.relpath(path, base_path))
|
|
||||||
logger.debug(
|
|
||||||
'Read file %s -> %s',
|
|
||||||
source_path, content_class.__name__)
|
|
||||||
|
|
||||||
if not fmt:
|
|
||||||
_, ext = os.path.splitext(os.path.basename(path))
|
|
||||||
fmt = ext[1:]
|
|
||||||
|
|
||||||
if fmt not in self.readers:
|
|
||||||
raise TypeError(
|
|
||||||
'Pelican does not know how to parse %s', path)
|
|
||||||
|
|
||||||
if preread_signal:
|
|
||||||
logger.debug(
|
|
||||||
'Signal %s.send(%s)',
|
|
||||||
preread_signal.name, preread_sender)
|
|
||||||
preread_signal.send(preread_sender)
|
|
||||||
|
|
||||||
reader = self.readers[fmt]
|
|
||||||
|
|
||||||
metadata = _filter_discardable_metadata(default_metadata(
|
|
||||||
settings=self.settings, process=reader.process_metadata))
|
|
||||||
metadata.update(path_metadata(
|
|
||||||
full_path=path, source_path=source_path,
|
|
||||||
settings=self.settings))
|
|
||||||
metadata.update(_filter_discardable_metadata(parse_path_metadata(
|
|
||||||
source_path=source_path, settings=self.settings,
|
|
||||||
process=reader.process_metadata)))
|
|
||||||
reader_name = reader.__class__.__name__
|
|
||||||
metadata['reader'] = reader_name.replace('Reader', '').lower()
|
|
||||||
|
|
||||||
content, reader_metadata = self.get_cached_data(path, (None, None))
|
|
||||||
if content is None:
|
|
||||||
content, reader_metadata = reader.read(path)
|
|
||||||
self.cache_data(path, (content, reader_metadata))
|
|
||||||
metadata.update(_filter_discardable_metadata(reader_metadata))
|
|
||||||
|
|
||||||
if content:
|
|
||||||
# find images with empty alt
|
|
||||||
find_empty_alt(content, path)
|
|
||||||
|
|
||||||
# eventually filter the content with typogrify if asked so
|
|
||||||
if self.settings['TYPOGRIFY']:
|
|
||||||
from typogrify.filters import typogrify
|
|
||||||
import smartypants
|
|
||||||
|
|
||||||
typogrify_dashes = self.settings['TYPOGRIFY_DASHES']
|
|
||||||
if typogrify_dashes == 'oldschool':
|
|
||||||
smartypants.Attr.default = smartypants.Attr.set2
|
|
||||||
elif typogrify_dashes == 'oldschool_inverted':
|
|
||||||
smartypants.Attr.default = smartypants.Attr.set3
|
|
||||||
else:
|
|
||||||
smartypants.Attr.default = smartypants.Attr.set1
|
|
||||||
|
|
||||||
# Tell `smartypants` to also replace " HTML entities with
|
|
||||||
# smart quotes. This is necessary because Docutils has already
|
|
||||||
# replaced double quotes with said entities by the time we run
|
|
||||||
# this filter.
|
|
||||||
smartypants.Attr.default |= smartypants.Attr.w
|
|
||||||
|
|
||||||
def typogrify_wrapper(text):
|
|
||||||
"""Ensures ignore_tags feature is backward compatible"""
|
|
||||||
try:
|
|
||||||
return typogrify(
|
|
||||||
text,
|
|
||||||
self.settings['TYPOGRIFY_IGNORE_TAGS'])
|
|
||||||
except TypeError:
|
|
||||||
return typogrify(text)
|
|
||||||
|
|
||||||
if content:
|
|
||||||
content = typogrify_wrapper(content)
|
|
||||||
|
|
||||||
if 'title' in metadata:
|
|
||||||
metadata['title'] = typogrify_wrapper(metadata['title'])
|
|
||||||
|
|
||||||
if 'summary' in metadata:
|
|
||||||
metadata['summary'] = typogrify_wrapper(metadata['summary'])
|
|
||||||
|
|
||||||
if context_signal:
|
|
||||||
logger.debug(
|
|
||||||
'Signal %s.send(%s, <metadata>)',
|
|
||||||
context_signal.name,
|
|
||||||
context_sender)
|
|
||||||
context_signal.send(context_sender, metadata=metadata)
|
|
||||||
|
|
||||||
return content_class(content=content, metadata=metadata,
|
|
||||||
settings=self.settings, source_path=path,
|
|
||||||
context=context)
|
|
||||||
|
|
||||||
|
|
||||||
def find_empty_alt(content, path):
|
|
||||||
"""Find images with empty alt
|
|
||||||
|
|
||||||
Create warnings for all images with empty alt (up to a certain number),
|
|
||||||
as they are really likely to be accessibility flaws.
|
|
||||||
|
|
||||||
"""
|
|
||||||
imgs = re.compile(r"""
|
|
||||||
(?:
|
|
||||||
# src before alt
|
|
||||||
<img
|
|
||||||
[^\>]*
|
|
||||||
src=(['"])(.*?)\1
|
|
||||||
[^\>]*
|
|
||||||
alt=(['"])\3
|
|
||||||
)|(?:
|
|
||||||
# alt before src
|
|
||||||
<img
|
|
||||||
[^\>]*
|
|
||||||
alt=(['"])\4
|
|
||||||
[^\>]*
|
|
||||||
src=(['"])(.*?)\5
|
|
||||||
)
|
|
||||||
""", re.X)
|
|
||||||
for match in re.findall(imgs, content):
|
|
||||||
logger.warning(
|
|
||||||
'Empty alt attribute for image %s in %s',
|
|
||||||
os.path.basename(match[1] + match[5]), path,
|
|
||||||
extra={'limit_msg': 'Other images have empty alt attributes'})
|
|
||||||
|
|
||||||
|
|
||||||
def default_metadata(settings=None, process=None):
|
|
||||||
metadata = {}
|
|
||||||
if settings:
|
|
||||||
for name, value in dict(settings.get('DEFAULT_METADATA', {})).items():
|
|
||||||
if process:
|
|
||||||
value = process(name, value)
|
|
||||||
metadata[name] = value
|
|
||||||
if 'DEFAULT_CATEGORY' in settings:
|
|
||||||
value = settings['DEFAULT_CATEGORY']
|
|
||||||
if process:
|
|
||||||
value = process('category', value)
|
|
||||||
metadata['category'] = value
|
|
||||||
if settings.get('DEFAULT_DATE', None) and \
|
|
||||||
settings['DEFAULT_DATE'] != 'fs':
|
|
||||||
if isinstance(settings['DEFAULT_DATE'], str):
|
|
||||||
metadata['date'] = get_date(settings['DEFAULT_DATE'])
|
|
||||||
else:
|
|
||||||
metadata['date'] = datetime.datetime(*settings['DEFAULT_DATE'])
|
|
||||||
return metadata
|
|
||||||
|
|
||||||
|
|
||||||
def path_metadata(full_path, source_path, settings=None):
|
|
||||||
metadata = {}
|
|
||||||
if settings:
|
|
||||||
if settings.get('DEFAULT_DATE', None) == 'fs':
|
|
||||||
metadata['date'] = datetime.datetime.fromtimestamp(
|
|
||||||
os.stat(full_path).st_mtime)
|
|
||||||
metadata['modified'] = metadata['date']
|
|
||||||
|
|
||||||
# Apply EXTRA_PATH_METADATA for the source path and the paths of any
|
|
||||||
# parent directories. Sorting EPM first ensures that the most specific
|
|
||||||
# path wins conflicts.
|
|
||||||
|
|
||||||
epm = settings.get('EXTRA_PATH_METADATA', {})
|
|
||||||
for path, meta in sorted(epm.items()):
|
|
||||||
# Enforce a trailing slash when checking for parent directories.
|
|
||||||
# This prevents false positives when one file or directory's name
|
|
||||||
# is a prefix of another's.
|
|
||||||
dirpath = posixize_path(os.path.join(path, ''))
|
|
||||||
if source_path == path or source_path.startswith(dirpath):
|
|
||||||
metadata.update(meta)
|
|
||||||
|
|
||||||
return metadata
|
|
||||||
|
|
||||||
|
|
||||||
def parse_path_metadata(source_path, settings=None, process=None):
|
|
||||||
r"""Extract a metadata dictionary from a file's path
|
|
||||||
|
|
||||||
>>> import pprint
|
|
||||||
>>> settings = {
|
|
||||||
... 'FILENAME_METADATA': r'(?P<slug>[^.]*).*',
|
|
||||||
... 'PATH_METADATA':
|
|
||||||
... r'(?P<category>[^/]*)/(?P<date>\d{4}-\d{2}-\d{2})/.*',
|
|
||||||
... }
|
|
||||||
>>> reader = BaseReader(settings=settings)
|
|
||||||
>>> metadata = parse_path_metadata(
|
|
||||||
... source_path='my-cat/2013-01-01/my-slug.html',
|
|
||||||
... settings=settings,
|
|
||||||
... process=reader.process_metadata)
|
|
||||||
>>> pprint.pprint(metadata) # doctest: +ELLIPSIS
|
|
||||||
{'category': <pelican.urlwrappers.Category object at ...>,
|
|
||||||
'date': datetime.datetime(2013, 1, 1, 0, 0),
|
|
||||||
'slug': 'my-slug'}
|
|
||||||
"""
|
|
||||||
metadata = {}
|
|
||||||
dirname, basename = os.path.split(source_path)
|
|
||||||
base, ext = os.path.splitext(basename)
|
|
||||||
subdir = os.path.basename(dirname)
|
|
||||||
if settings:
|
|
||||||
checks = []
|
|
||||||
for key, data in [('FILENAME_METADATA', base),
|
|
||||||
('PATH_METADATA', source_path)]:
|
|
||||||
checks.append((settings.get(key, None), data))
|
|
||||||
if settings.get('USE_FOLDER_AS_CATEGORY', None):
|
|
||||||
checks.append(('(?P<category>.*)', subdir))
|
|
||||||
for regexp, data in checks:
|
|
||||||
if regexp and data:
|
|
||||||
match = re.match(regexp, data)
|
|
||||||
if match:
|
|
||||||
# .items() for py3k compat.
|
|
||||||
for k, v in match.groupdict().items():
|
|
||||||
k = k.lower() # metadata must be lowercase
|
|
||||||
if v is not None and k not in metadata:
|
|
||||||
if process:
|
|
||||||
v = process(k, v)
|
|
||||||
metadata[k] = v
|
|
||||||
return metadata
|
|
||||||
|
|
@ -1,90 +0,0 @@
|
||||||
import re
|
|
||||||
|
|
||||||
from docutils import nodes, utils
|
|
||||||
from docutils.parsers.rst import Directive, directives, roles
|
|
||||||
|
|
||||||
from pygments import highlight
|
|
||||||
from pygments.formatters import HtmlFormatter
|
|
||||||
from pygments.lexers import TextLexer, get_lexer_by_name
|
|
||||||
|
|
||||||
import pelican.settings as pys
|
|
||||||
|
|
||||||
|
|
||||||
class Pygments(Directive):
|
|
||||||
""" Source code syntax highlighting.
|
|
||||||
"""
|
|
||||||
required_arguments = 1
|
|
||||||
optional_arguments = 0
|
|
||||||
final_argument_whitespace = True
|
|
||||||
option_spec = {
|
|
||||||
'anchorlinenos': directives.flag,
|
|
||||||
'classprefix': directives.unchanged,
|
|
||||||
'hl_lines': directives.unchanged,
|
|
||||||
'lineanchors': directives.unchanged,
|
|
||||||
'linenos': directives.unchanged,
|
|
||||||
'linenospecial': directives.nonnegative_int,
|
|
||||||
'linenostart': directives.nonnegative_int,
|
|
||||||
'linenostep': directives.nonnegative_int,
|
|
||||||
'lineseparator': directives.unchanged,
|
|
||||||
'linespans': directives.unchanged,
|
|
||||||
'nobackground': directives.flag,
|
|
||||||
'nowrap': directives.flag,
|
|
||||||
'tagsfile': directives.unchanged,
|
|
||||||
'tagurlformat': directives.unchanged,
|
|
||||||
}
|
|
||||||
has_content = True
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
self.assert_has_content()
|
|
||||||
try:
|
|
||||||
lexer = get_lexer_by_name(self.arguments[0])
|
|
||||||
except ValueError:
|
|
||||||
# no lexer found - use the text one instead of an exception
|
|
||||||
lexer = TextLexer()
|
|
||||||
|
|
||||||
# Fetch the defaults
|
|
||||||
if pys.PYGMENTS_RST_OPTIONS is not None:
|
|
||||||
for k, v in pys.PYGMENTS_RST_OPTIONS.items():
|
|
||||||
# Locally set options overrides the defaults
|
|
||||||
if k not in self.options:
|
|
||||||
self.options[k] = v
|
|
||||||
|
|
||||||
if ('linenos' in self.options and
|
|
||||||
self.options['linenos'] not in ('table', 'inline')):
|
|
||||||
if self.options['linenos'] == 'none':
|
|
||||||
self.options.pop('linenos')
|
|
||||||
else:
|
|
||||||
self.options['linenos'] = 'table'
|
|
||||||
|
|
||||||
for flag in ('nowrap', 'nobackground', 'anchorlinenos'):
|
|
||||||
if flag in self.options:
|
|
||||||
self.options[flag] = True
|
|
||||||
|
|
||||||
# noclasses should already default to False, but just in case...
|
|
||||||
formatter = HtmlFormatter(noclasses=False, **self.options)
|
|
||||||
parsed = highlight('\n'.join(self.content), lexer, formatter)
|
|
||||||
return [nodes.raw('', parsed, format='html')]
|
|
||||||
|
|
||||||
|
|
||||||
directives.register_directive('code-block', Pygments)
|
|
||||||
directives.register_directive('sourcecode', Pygments)
|
|
||||||
|
|
||||||
|
|
||||||
_abbr_re = re.compile(r'\((.*)\)$', re.DOTALL)
|
|
||||||
|
|
||||||
|
|
||||||
class abbreviation(nodes.Inline, nodes.TextElement):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def abbr_role(typ, rawtext, text, lineno, inliner, options={}, content=[]):
|
|
||||||
text = utils.unescape(text)
|
|
||||||
m = _abbr_re.search(text)
|
|
||||||
if m is None:
|
|
||||||
return [abbreviation(text, text)], []
|
|
||||||
abbr = text[:m.start()].strip()
|
|
||||||
expl = m.group(1)
|
|
||||||
return [abbreviation(abbr, abbr, explanation=expl)], []
|
|
||||||
|
|
||||||
|
|
||||||
roles.register_local_role('abbr', abbr_role)
|
|
||||||
|
|
@ -1,138 +0,0 @@
|
||||||
import argparse
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import posixpath
|
|
||||||
import ssl
|
|
||||||
import sys
|
|
||||||
import urllib
|
|
||||||
from http import server
|
|
||||||
|
|
||||||
try:
|
|
||||||
from magic import from_file as magic_from_file
|
|
||||||
except ImportError:
|
|
||||||
magic_from_file = None
|
|
||||||
|
|
||||||
from pelican.log import init as init_logging
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def parse_arguments():
|
|
||||||
parser = argparse.ArgumentParser(
|
|
||||||
description='Pelican Development Server',
|
|
||||||
formatter_class=argparse.ArgumentDefaultsHelpFormatter
|
|
||||||
)
|
|
||||||
parser.add_argument("port", default=8000, type=int, nargs="?",
|
|
||||||
help="Port to Listen On")
|
|
||||||
parser.add_argument("server", default="", nargs="?",
|
|
||||||
help="Interface to Listen On")
|
|
||||||
parser.add_argument('--ssl', action="store_true",
|
|
||||||
help='Activate SSL listener')
|
|
||||||
parser.add_argument('--cert', default="./cert.pem", nargs="?",
|
|
||||||
help='Path to certificate file. ' +
|
|
||||||
'Relative to current directory')
|
|
||||||
parser.add_argument('--key', default="./key.pem", nargs="?",
|
|
||||||
help='Path to certificate key file. ' +
|
|
||||||
'Relative to current directory')
|
|
||||||
parser.add_argument('--path', default=".",
|
|
||||||
help='Path to pelican source directory to serve. ' +
|
|
||||||
'Relative to current directory')
|
|
||||||
return parser.parse_args()
|
|
||||||
|
|
||||||
|
|
||||||
class ComplexHTTPRequestHandler(server.SimpleHTTPRequestHandler):
|
|
||||||
SUFFIXES = ['.html', '/index.html', '/', '']
|
|
||||||
|
|
||||||
def translate_path(self, path):
|
|
||||||
# abandon query parameters
|
|
||||||
path = path.split('?', 1)[0]
|
|
||||||
path = path.split('#', 1)[0]
|
|
||||||
# Don't forget explicit trailing slash when normalizing. Issue17324
|
|
||||||
trailing_slash = path.rstrip().endswith('/')
|
|
||||||
path = urllib.parse.unquote(path)
|
|
||||||
path = posixpath.normpath(path)
|
|
||||||
words = path.split('/')
|
|
||||||
words = filter(None, words)
|
|
||||||
path = self.base_path
|
|
||||||
for word in words:
|
|
||||||
if os.path.dirname(word) or word in (os.curdir, os.pardir):
|
|
||||||
# Ignore components that are not a simple file/directory name
|
|
||||||
continue
|
|
||||||
path = os.path.join(path, word)
|
|
||||||
if trailing_slash:
|
|
||||||
path += '/'
|
|
||||||
return path
|
|
||||||
|
|
||||||
def do_GET(self):
|
|
||||||
# cut off a query string
|
|
||||||
original_path = self.path.split('?', 1)[0]
|
|
||||||
# try to find file
|
|
||||||
self.path = self.get_path_that_exists(original_path)
|
|
||||||
|
|
||||||
if not self.path:
|
|
||||||
return
|
|
||||||
|
|
||||||
server.SimpleHTTPRequestHandler.do_GET(self)
|
|
||||||
|
|
||||||
def get_path_that_exists(self, original_path):
|
|
||||||
# Try to strip trailing slash
|
|
||||||
original_path = original_path.rstrip('/')
|
|
||||||
# Try to detect file by applying various suffixes
|
|
||||||
tries = []
|
|
||||||
for suffix in self.SUFFIXES:
|
|
||||||
path = original_path + suffix
|
|
||||||
if os.path.exists(self.translate_path(path)):
|
|
||||||
return path
|
|
||||||
tries.append(path)
|
|
||||||
logger.warning("Unable to find `%s` or variations:\n%s",
|
|
||||||
original_path,
|
|
||||||
'\n'.join(tries))
|
|
||||||
return None
|
|
||||||
|
|
||||||
def guess_type(self, path):
|
|
||||||
"""Guess at the mime type for the specified file.
|
|
||||||
"""
|
|
||||||
mimetype = server.SimpleHTTPRequestHandler.guess_type(self, path)
|
|
||||||
|
|
||||||
# If the default guess is too generic, try the python-magic library
|
|
||||||
if mimetype == 'application/octet-stream' and magic_from_file:
|
|
||||||
mimetype = magic_from_file(path, mime=True)
|
|
||||||
|
|
||||||
return mimetype
|
|
||||||
|
|
||||||
|
|
||||||
class RootedHTTPServer(server.HTTPServer):
|
|
||||||
def __init__(self, base_path, *args, **kwargs):
|
|
||||||
server.HTTPServer.__init__(self, *args, **kwargs)
|
|
||||||
self.RequestHandlerClass.base_path = base_path
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
init_logging(level=logging.INFO)
|
|
||||||
logger.warning("'python -m pelican.server' is deprecated.\nThe "
|
|
||||||
"Pelican development server should be run via "
|
|
||||||
"'pelican --listen' or 'pelican -l'.\nThis can be combined "
|
|
||||||
"with regeneration as 'pelican -lr'.\nRerun 'pelican-"
|
|
||||||
"quickstart' to get new Makefile and tasks.py files.")
|
|
||||||
args = parse_arguments()
|
|
||||||
RootedHTTPServer.allow_reuse_address = True
|
|
||||||
try:
|
|
||||||
httpd = RootedHTTPServer(
|
|
||||||
args.path, (args.server, args.port), ComplexHTTPRequestHandler)
|
|
||||||
if args.ssl:
|
|
||||||
httpd.socket = ssl.wrap_socket(
|
|
||||||
httpd.socket, keyfile=args.key,
|
|
||||||
certfile=args.cert, server_side=True)
|
|
||||||
except ssl.SSLError as e:
|
|
||||||
logger.error("Couldn't open certificate file %s or key file %s",
|
|
||||||
args.cert, args.key)
|
|
||||||
logger.error("Could not listen on port %s, server %s.",
|
|
||||||
args.port, args.server)
|
|
||||||
sys.exit(getattr(e, 'exitcode', 1))
|
|
||||||
|
|
||||||
logger.info("Serving at port %s, server %s.",
|
|
||||||
args.port, args.server)
|
|
||||||
try:
|
|
||||||
httpd.serve_forever()
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
logger.info("Shutting down server.")
|
|
||||||
httpd.socket.close()
|
|
||||||
|
|
@ -1,660 +0,0 @@
|
||||||
import copy
|
|
||||||
import importlib.util
|
|
||||||
import inspect
|
|
||||||
import locale
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
from os.path import isabs
|
|
||||||
|
|
||||||
from pelican.log import LimitFilter
|
|
||||||
|
|
||||||
|
|
||||||
def load_source(name, path):
|
|
||||||
spec = importlib.util.spec_from_file_location(name, path)
|
|
||||||
mod = importlib.util.module_from_spec(spec)
|
|
||||||
spec.loader.exec_module(mod)
|
|
||||||
return mod
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
DEFAULT_THEME = os.path.join(os.path.dirname(os.path.abspath(__file__)),
|
|
||||||
'themes', 'notmyidea')
|
|
||||||
DEFAULT_CONFIG = {
|
|
||||||
'PATH': os.curdir,
|
|
||||||
'ARTICLE_PATHS': [''],
|
|
||||||
'ARTICLE_EXCLUDES': [],
|
|
||||||
'PAGE_PATHS': ['pages'],
|
|
||||||
'PAGE_EXCLUDES': [],
|
|
||||||
'THEME': DEFAULT_THEME,
|
|
||||||
'OUTPUT_PATH': 'output',
|
|
||||||
'READERS': {},
|
|
||||||
'STATIC_PATHS': ['images'],
|
|
||||||
'STATIC_EXCLUDES': [],
|
|
||||||
'STATIC_EXCLUDE_SOURCES': True,
|
|
||||||
'THEME_STATIC_DIR': 'theme',
|
|
||||||
'THEME_STATIC_PATHS': ['static', ],
|
|
||||||
'FEED_ALL_ATOM': 'feeds/all.atom.xml',
|
|
||||||
'CATEGORY_FEED_ATOM': 'feeds/{slug}.atom.xml',
|
|
||||||
'AUTHOR_FEED_ATOM': 'feeds/{slug}.atom.xml',
|
|
||||||
'AUTHOR_FEED_RSS': 'feeds/{slug}.rss.xml',
|
|
||||||
'TRANSLATION_FEED_ATOM': 'feeds/all-{lang}.atom.xml',
|
|
||||||
'FEED_MAX_ITEMS': '',
|
|
||||||
'RSS_FEED_SUMMARY_ONLY': True,
|
|
||||||
'SITEURL': '',
|
|
||||||
'SITENAME': 'A Pelican Blog',
|
|
||||||
'DISPLAY_PAGES_ON_MENU': True,
|
|
||||||
'DISPLAY_CATEGORIES_ON_MENU': True,
|
|
||||||
'DOCUTILS_SETTINGS': {},
|
|
||||||
'OUTPUT_SOURCES': False,
|
|
||||||
'OUTPUT_SOURCES_EXTENSION': '.text',
|
|
||||||
'USE_FOLDER_AS_CATEGORY': True,
|
|
||||||
'DEFAULT_CATEGORY': 'misc',
|
|
||||||
'WITH_FUTURE_DATES': True,
|
|
||||||
'CSS_FILE': 'main.css',
|
|
||||||
'NEWEST_FIRST_ARCHIVES': True,
|
|
||||||
'REVERSE_CATEGORY_ORDER': False,
|
|
||||||
'DELETE_OUTPUT_DIRECTORY': False,
|
|
||||||
'OUTPUT_RETENTION': [],
|
|
||||||
'INDEX_SAVE_AS': 'index.html',
|
|
||||||
'ARTICLE_URL': '{slug}.html',
|
|
||||||
'ARTICLE_SAVE_AS': '{slug}.html',
|
|
||||||
'ARTICLE_ORDER_BY': 'reversed-date',
|
|
||||||
'ARTICLE_LANG_URL': '{slug}-{lang}.html',
|
|
||||||
'ARTICLE_LANG_SAVE_AS': '{slug}-{lang}.html',
|
|
||||||
'DRAFT_URL': 'drafts/{slug}.html',
|
|
||||||
'DRAFT_SAVE_AS': 'drafts/{slug}.html',
|
|
||||||
'DRAFT_LANG_URL': 'drafts/{slug}-{lang}.html',
|
|
||||||
'DRAFT_LANG_SAVE_AS': 'drafts/{slug}-{lang}.html',
|
|
||||||
'PAGE_URL': 'pages/{slug}.html',
|
|
||||||
'PAGE_SAVE_AS': 'pages/{slug}.html',
|
|
||||||
'PAGE_ORDER_BY': 'basename',
|
|
||||||
'PAGE_LANG_URL': 'pages/{slug}-{lang}.html',
|
|
||||||
'PAGE_LANG_SAVE_AS': 'pages/{slug}-{lang}.html',
|
|
||||||
'DRAFT_PAGE_URL': 'drafts/pages/{slug}.html',
|
|
||||||
'DRAFT_PAGE_SAVE_AS': 'drafts/pages/{slug}.html',
|
|
||||||
'DRAFT_PAGE_LANG_URL': 'drafts/pages/{slug}-{lang}.html',
|
|
||||||
'DRAFT_PAGE_LANG_SAVE_AS': 'drafts/pages/{slug}-{lang}.html',
|
|
||||||
'STATIC_URL': '{path}',
|
|
||||||
'STATIC_SAVE_AS': '{path}',
|
|
||||||
'STATIC_CREATE_LINKS': False,
|
|
||||||
'STATIC_CHECK_IF_MODIFIED': False,
|
|
||||||
'CATEGORY_URL': 'category/{slug}.html',
|
|
||||||
'CATEGORY_SAVE_AS': 'category/{slug}.html',
|
|
||||||
'TAG_URL': 'tag/{slug}.html',
|
|
||||||
'TAG_SAVE_AS': 'tag/{slug}.html',
|
|
||||||
'AUTHOR_URL': 'author/{slug}.html',
|
|
||||||
'AUTHOR_SAVE_AS': 'author/{slug}.html',
|
|
||||||
'PAGINATION_PATTERNS': [
|
|
||||||
(1, '{name}{extension}', '{name}{extension}'),
|
|
||||||
(2, '{name}{number}{extension}', '{name}{number}{extension}'),
|
|
||||||
],
|
|
||||||
'YEAR_ARCHIVE_URL': '',
|
|
||||||
'YEAR_ARCHIVE_SAVE_AS': '',
|
|
||||||
'MONTH_ARCHIVE_URL': '',
|
|
||||||
'MONTH_ARCHIVE_SAVE_AS': '',
|
|
||||||
'DAY_ARCHIVE_URL': '',
|
|
||||||
'DAY_ARCHIVE_SAVE_AS': '',
|
|
||||||
'RELATIVE_URLS': False,
|
|
||||||
'DEFAULT_LANG': 'en',
|
|
||||||
'ARTICLE_TRANSLATION_ID': 'slug',
|
|
||||||
'PAGE_TRANSLATION_ID': 'slug',
|
|
||||||
'DIRECT_TEMPLATES': ['index', 'tags', 'categories', 'authors', 'archives'],
|
|
||||||
'THEME_TEMPLATES_OVERRIDES': [],
|
|
||||||
'PAGINATED_TEMPLATES': {'index': None, 'tag': None, 'category': None,
|
|
||||||
'author': None},
|
|
||||||
'PELICAN_CLASS': 'pelican.Pelican',
|
|
||||||
'DEFAULT_DATE_FORMAT': '%a %d %B %Y',
|
|
||||||
'DATE_FORMATS': {},
|
|
||||||
'MARKDOWN': {
|
|
||||||
'extension_configs': {
|
|
||||||
'markdown.extensions.codehilite': {'css_class': 'highlight'},
|
|
||||||
'markdown.extensions.extra': {},
|
|
||||||
'markdown.extensions.meta': {},
|
|
||||||
},
|
|
||||||
'output_format': 'html5',
|
|
||||||
},
|
|
||||||
'JINJA_FILTERS': {},
|
|
||||||
'JINJA_GLOBALS': {},
|
|
||||||
'JINJA_TESTS': {},
|
|
||||||
'JINJA_ENVIRONMENT': {
|
|
||||||
'trim_blocks': True,
|
|
||||||
'lstrip_blocks': True,
|
|
||||||
'extensions': [],
|
|
||||||
},
|
|
||||||
'LOG_FILTER': [],
|
|
||||||
'LOCALE': [''], # defaults to user locale
|
|
||||||
'DEFAULT_PAGINATION': False,
|
|
||||||
'DEFAULT_ORPHANS': 0,
|
|
||||||
'DEFAULT_METADATA': {},
|
|
||||||
'FILENAME_METADATA': r'(?P<date>\d{4}-\d{2}-\d{2}).*',
|
|
||||||
'PATH_METADATA': '',
|
|
||||||
'EXTRA_PATH_METADATA': {},
|
|
||||||
'ARTICLE_PERMALINK_STRUCTURE': '',
|
|
||||||
'TYPOGRIFY': False,
|
|
||||||
'TYPOGRIFY_IGNORE_TAGS': [],
|
|
||||||
'TYPOGRIFY_DASHES': 'default',
|
|
||||||
'SUMMARY_END_MARKER': '…',
|
|
||||||
'SUMMARY_MAX_LENGTH': 50,
|
|
||||||
'PLUGIN_PATHS': [],
|
|
||||||
'PLUGINS': None,
|
|
||||||
'PYGMENTS_RST_OPTIONS': {},
|
|
||||||
'TEMPLATE_PAGES': {},
|
|
||||||
'TEMPLATE_EXTENSIONS': ['.html'],
|
|
||||||
'IGNORE_FILES': ['.#*'],
|
|
||||||
'SLUG_REGEX_SUBSTITUTIONS': [
|
|
||||||
(r'[^\w\s-]', ''), # remove non-alphabetical/whitespace/'-' chars
|
|
||||||
(r'(?u)\A\s*', ''), # strip leading whitespace
|
|
||||||
(r'(?u)\s*\Z', ''), # strip trailing whitespace
|
|
||||||
(r'[-\s]+', '-'), # reduce multiple whitespace or '-' to single '-'
|
|
||||||
],
|
|
||||||
'INTRASITE_LINK_REGEX': '[{|](?P<what>.*?)[|}]',
|
|
||||||
'SLUGIFY_SOURCE': 'title',
|
|
||||||
'SLUGIFY_USE_UNICODE': False,
|
|
||||||
'SLUGIFY_PRESERVE_CASE': False,
|
|
||||||
'CACHE_CONTENT': False,
|
|
||||||
'CONTENT_CACHING_LAYER': 'reader',
|
|
||||||
'CACHE_PATH': 'cache',
|
|
||||||
'GZIP_CACHE': True,
|
|
||||||
'CHECK_MODIFIED_METHOD': 'mtime',
|
|
||||||
'LOAD_CONTENT_CACHE': False,
|
|
||||||
'WRITE_SELECTED': [],
|
|
||||||
'FORMATTED_FIELDS': ['summary'],
|
|
||||||
'PORT': 8000,
|
|
||||||
'BIND': '127.0.0.1',
|
|
||||||
}
|
|
||||||
|
|
||||||
PYGMENTS_RST_OPTIONS = None
|
|
||||||
|
|
||||||
|
|
||||||
def read_settings(path=None, override=None):
|
|
||||||
settings = override or {}
|
|
||||||
|
|
||||||
if path:
|
|
||||||
settings = dict(get_settings_from_file(path), **settings)
|
|
||||||
|
|
||||||
if settings:
|
|
||||||
settings = handle_deprecated_settings(settings)
|
|
||||||
|
|
||||||
if path:
|
|
||||||
# Make relative paths absolute
|
|
||||||
def getabs(maybe_relative, base_path=path):
|
|
||||||
if isabs(maybe_relative):
|
|
||||||
return maybe_relative
|
|
||||||
return os.path.abspath(os.path.normpath(os.path.join(
|
|
||||||
os.path.dirname(base_path), maybe_relative)))
|
|
||||||
|
|
||||||
for p in ['PATH', 'OUTPUT_PATH', 'THEME', 'CACHE_PATH']:
|
|
||||||
if settings.get(p) is not None:
|
|
||||||
absp = getabs(settings[p])
|
|
||||||
# THEME may be a name rather than a path
|
|
||||||
if p != 'THEME' or os.path.exists(absp):
|
|
||||||
settings[p] = absp
|
|
||||||
|
|
||||||
if settings.get('PLUGIN_PATHS') is not None:
|
|
||||||
settings['PLUGIN_PATHS'] = [getabs(pluginpath)
|
|
||||||
for pluginpath
|
|
||||||
in settings['PLUGIN_PATHS']]
|
|
||||||
|
|
||||||
settings = dict(copy.deepcopy(DEFAULT_CONFIG), **settings)
|
|
||||||
settings = configure_settings(settings)
|
|
||||||
|
|
||||||
# This is because there doesn't seem to be a way to pass extra
|
|
||||||
# parameters to docutils directive handlers, so we have to have a
|
|
||||||
# variable here that we'll import from within Pygments.run (see
|
|
||||||
# rstdirectives.py) to see what the user defaults were.
|
|
||||||
global PYGMENTS_RST_OPTIONS
|
|
||||||
PYGMENTS_RST_OPTIONS = settings.get('PYGMENTS_RST_OPTIONS', None)
|
|
||||||
return settings
|
|
||||||
|
|
||||||
|
|
||||||
def get_settings_from_module(module=None):
|
|
||||||
"""Loads settings from a module, returns a dictionary."""
|
|
||||||
|
|
||||||
context = {}
|
|
||||||
if module is not None:
|
|
||||||
context.update(
|
|
||||||
(k, v) for k, v in inspect.getmembers(module) if k.isupper())
|
|
||||||
return context
|
|
||||||
|
|
||||||
|
|
||||||
def get_settings_from_file(path):
|
|
||||||
"""Loads settings from a file path, returning a dict."""
|
|
||||||
|
|
||||||
name, ext = os.path.splitext(os.path.basename(path))
|
|
||||||
module = load_source(name, path)
|
|
||||||
return get_settings_from_module(module)
|
|
||||||
|
|
||||||
|
|
||||||
def get_jinja_environment(settings):
|
|
||||||
"""Sets the environment for Jinja"""
|
|
||||||
|
|
||||||
jinja_env = settings.setdefault('JINJA_ENVIRONMENT',
|
|
||||||
DEFAULT_CONFIG['JINJA_ENVIRONMENT'])
|
|
||||||
|
|
||||||
# Make sure we include the defaults if the user has set env variables
|
|
||||||
for key, value in DEFAULT_CONFIG['JINJA_ENVIRONMENT'].items():
|
|
||||||
if key not in jinja_env:
|
|
||||||
jinja_env[key] = value
|
|
||||||
|
|
||||||
return settings
|
|
||||||
|
|
||||||
|
|
||||||
def _printf_s_to_format_field(printf_string, format_field):
|
|
||||||
"""Tries to replace %s with {format_field} in the provided printf_string.
|
|
||||||
Raises ValueError in case of failure.
|
|
||||||
"""
|
|
||||||
TEST_STRING = 'PELICAN_PRINTF_S_DEPRECATION'
|
|
||||||
expected = printf_string % TEST_STRING
|
|
||||||
|
|
||||||
result = printf_string.replace('{', '{{').replace('}', '}}') \
|
|
||||||
% '{{{}}}'.format(format_field)
|
|
||||||
if result.format(**{format_field: TEST_STRING}) != expected:
|
|
||||||
raise ValueError('Failed to safely replace %s with {{{}}}'.format(
|
|
||||||
format_field))
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
def handle_deprecated_settings(settings):
|
|
||||||
"""Converts deprecated settings and issues warnings. Issues an exception
|
|
||||||
if both old and new setting is specified.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# PLUGIN_PATH -> PLUGIN_PATHS
|
|
||||||
if 'PLUGIN_PATH' in settings:
|
|
||||||
logger.warning('PLUGIN_PATH setting has been replaced by '
|
|
||||||
'PLUGIN_PATHS, moving it to the new setting name.')
|
|
||||||
settings['PLUGIN_PATHS'] = settings['PLUGIN_PATH']
|
|
||||||
del settings['PLUGIN_PATH']
|
|
||||||
|
|
||||||
# PLUGIN_PATHS: str -> [str]
|
|
||||||
if isinstance(settings.get('PLUGIN_PATHS'), str):
|
|
||||||
logger.warning("Defining PLUGIN_PATHS setting as string "
|
|
||||||
"has been deprecated (should be a list)")
|
|
||||||
settings['PLUGIN_PATHS'] = [settings['PLUGIN_PATHS']]
|
|
||||||
|
|
||||||
# JINJA_EXTENSIONS -> JINJA_ENVIRONMENT > extensions
|
|
||||||
if 'JINJA_EXTENSIONS' in settings:
|
|
||||||
logger.warning('JINJA_EXTENSIONS setting has been deprecated, '
|
|
||||||
'moving it to JINJA_ENVIRONMENT setting.')
|
|
||||||
settings['JINJA_ENVIRONMENT']['extensions'] = \
|
|
||||||
settings['JINJA_EXTENSIONS']
|
|
||||||
del settings['JINJA_EXTENSIONS']
|
|
||||||
|
|
||||||
# {ARTICLE,PAGE}_DIR -> {ARTICLE,PAGE}_PATHS
|
|
||||||
for key in ['ARTICLE', 'PAGE']:
|
|
||||||
old_key = key + '_DIR'
|
|
||||||
new_key = key + '_PATHS'
|
|
||||||
if old_key in settings:
|
|
||||||
logger.warning(
|
|
||||||
'Deprecated setting %s, moving it to %s list',
|
|
||||||
old_key, new_key)
|
|
||||||
settings[new_key] = [settings[old_key]] # also make a list
|
|
||||||
del settings[old_key]
|
|
||||||
|
|
||||||
# EXTRA_TEMPLATES_PATHS -> THEME_TEMPLATES_OVERRIDES
|
|
||||||
if 'EXTRA_TEMPLATES_PATHS' in settings:
|
|
||||||
logger.warning('EXTRA_TEMPLATES_PATHS is deprecated use '
|
|
||||||
'THEME_TEMPLATES_OVERRIDES instead.')
|
|
||||||
if ('THEME_TEMPLATES_OVERRIDES' in settings and
|
|
||||||
settings['THEME_TEMPLATES_OVERRIDES']):
|
|
||||||
raise Exception(
|
|
||||||
'Setting both EXTRA_TEMPLATES_PATHS and '
|
|
||||||
'THEME_TEMPLATES_OVERRIDES is not permitted. Please move to '
|
|
||||||
'only setting THEME_TEMPLATES_OVERRIDES.')
|
|
||||||
settings['THEME_TEMPLATES_OVERRIDES'] = \
|
|
||||||
settings['EXTRA_TEMPLATES_PATHS']
|
|
||||||
del settings['EXTRA_TEMPLATES_PATHS']
|
|
||||||
|
|
||||||
# MD_EXTENSIONS -> MARKDOWN
|
|
||||||
if 'MD_EXTENSIONS' in settings:
|
|
||||||
logger.warning('MD_EXTENSIONS is deprecated use MARKDOWN '
|
|
||||||
'instead. Falling back to the default.')
|
|
||||||
settings['MARKDOWN'] = DEFAULT_CONFIG['MARKDOWN']
|
|
||||||
|
|
||||||
# LESS_GENERATOR -> Webassets plugin
|
|
||||||
# FILES_TO_COPY -> STATIC_PATHS, EXTRA_PATH_METADATA
|
|
||||||
for old, new, doc in [
|
|
||||||
('LESS_GENERATOR', 'the Webassets plugin', None),
|
|
||||||
('FILES_TO_COPY', 'STATIC_PATHS and EXTRA_PATH_METADATA',
|
|
||||||
'https://github.com/getpelican/pelican/'
|
|
||||||
'blob/master/docs/settings.rst#path-metadata'),
|
|
||||||
]:
|
|
||||||
if old in settings:
|
|
||||||
message = 'The {} setting has been removed in favor of {}'.format(
|
|
||||||
old, new)
|
|
||||||
if doc:
|
|
||||||
message += ', see {} for details'.format(doc)
|
|
||||||
logger.warning(message)
|
|
||||||
|
|
||||||
# PAGINATED_DIRECT_TEMPLATES -> PAGINATED_TEMPLATES
|
|
||||||
if 'PAGINATED_DIRECT_TEMPLATES' in settings:
|
|
||||||
message = 'The {} setting has been removed in favor of {}'.format(
|
|
||||||
'PAGINATED_DIRECT_TEMPLATES', 'PAGINATED_TEMPLATES')
|
|
||||||
logger.warning(message)
|
|
||||||
|
|
||||||
# set PAGINATED_TEMPLATES
|
|
||||||
if 'PAGINATED_TEMPLATES' not in settings:
|
|
||||||
settings['PAGINATED_TEMPLATES'] = {
|
|
||||||
'tag': None, 'category': None, 'author': None}
|
|
||||||
|
|
||||||
for t in settings['PAGINATED_DIRECT_TEMPLATES']:
|
|
||||||
if t not in settings['PAGINATED_TEMPLATES']:
|
|
||||||
settings['PAGINATED_TEMPLATES'][t] = None
|
|
||||||
del settings['PAGINATED_DIRECT_TEMPLATES']
|
|
||||||
|
|
||||||
# {SLUG,CATEGORY,TAG,AUTHOR}_SUBSTITUTIONS ->
|
|
||||||
# {SLUG,CATEGORY,TAG,AUTHOR}_REGEX_SUBSTITUTIONS
|
|
||||||
url_settings_url = \
|
|
||||||
'http://docs.getpelican.com/en/latest/settings.html#url-settings'
|
|
||||||
flavours = {'SLUG', 'CATEGORY', 'TAG', 'AUTHOR'}
|
|
||||||
old_values = {f: settings[f + '_SUBSTITUTIONS']
|
|
||||||
for f in flavours if f + '_SUBSTITUTIONS' in settings}
|
|
||||||
new_values = {f: settings[f + '_REGEX_SUBSTITUTIONS']
|
|
||||||
for f in flavours if f + '_REGEX_SUBSTITUTIONS' in settings}
|
|
||||||
if old_values and new_values:
|
|
||||||
raise Exception(
|
|
||||||
'Setting both {new_key} and {old_key} (or variants thereof) is '
|
|
||||||
'not permitted. Please move to only setting {new_key}.'
|
|
||||||
.format(old_key='SLUG_SUBSTITUTIONS',
|
|
||||||
new_key='SLUG_REGEX_SUBSTITUTIONS'))
|
|
||||||
if old_values:
|
|
||||||
message = ('{} and variants thereof are deprecated and will be '
|
|
||||||
'removed in the future. Please use {} and variants thereof '
|
|
||||||
'instead. Check {}.'
|
|
||||||
.format('SLUG_SUBSTITUTIONS', 'SLUG_REGEX_SUBSTITUTIONS',
|
|
||||||
url_settings_url))
|
|
||||||
logger.warning(message)
|
|
||||||
if old_values.get('SLUG'):
|
|
||||||
for f in {'CATEGORY', 'TAG'}:
|
|
||||||
if old_values.get(f):
|
|
||||||
old_values[f] = old_values['SLUG'] + old_values[f]
|
|
||||||
old_values['AUTHOR'] = old_values.get('AUTHOR', [])
|
|
||||||
for f in flavours:
|
|
||||||
if old_values.get(f) is not None:
|
|
||||||
regex_subs = []
|
|
||||||
# by default will replace non-alphanum characters
|
|
||||||
replace = True
|
|
||||||
for tpl in old_values[f]:
|
|
||||||
try:
|
|
||||||
src, dst, skip = tpl
|
|
||||||
if skip:
|
|
||||||
replace = False
|
|
||||||
except ValueError:
|
|
||||||
src, dst = tpl
|
|
||||||
regex_subs.append(
|
|
||||||
(re.escape(src), dst.replace('\\', r'\\')))
|
|
||||||
|
|
||||||
if replace:
|
|
||||||
regex_subs += [
|
|
||||||
(r'[^\w\s-]', ''),
|
|
||||||
(r'(?u)\A\s*', ''),
|
|
||||||
(r'(?u)\s*\Z', ''),
|
|
||||||
(r'[-\s]+', '-'),
|
|
||||||
]
|
|
||||||
else:
|
|
||||||
regex_subs += [
|
|
||||||
(r'(?u)\A\s*', ''),
|
|
||||||
(r'(?u)\s*\Z', ''),
|
|
||||||
]
|
|
||||||
settings[f + '_REGEX_SUBSTITUTIONS'] = regex_subs
|
|
||||||
settings.pop(f + '_SUBSTITUTIONS', None)
|
|
||||||
|
|
||||||
# `%s` -> '{slug}` or `{lang}` in FEED settings
|
|
||||||
for key in ['TRANSLATION_FEED_ATOM',
|
|
||||||
'TRANSLATION_FEED_RSS'
|
|
||||||
]:
|
|
||||||
if settings.get(key) and '%s' in settings[key]:
|
|
||||||
logger.warning('%%s usage in %s is deprecated, use {lang} '
|
|
||||||
'instead.', key)
|
|
||||||
try:
|
|
||||||
settings[key] = _printf_s_to_format_field(
|
|
||||||
settings[key], 'lang')
|
|
||||||
except ValueError:
|
|
||||||
logger.warning('Failed to convert %%s to {lang} for %s. '
|
|
||||||
'Falling back to default.', key)
|
|
||||||
settings[key] = DEFAULT_CONFIG[key]
|
|
||||||
for key in ['AUTHOR_FEED_ATOM',
|
|
||||||
'AUTHOR_FEED_RSS',
|
|
||||||
'CATEGORY_FEED_ATOM',
|
|
||||||
'CATEGORY_FEED_RSS',
|
|
||||||
'TAG_FEED_ATOM',
|
|
||||||
'TAG_FEED_RSS',
|
|
||||||
]:
|
|
||||||
if settings.get(key) and '%s' in settings[key]:
|
|
||||||
logger.warning('%%s usage in %s is deprecated, use {slug} '
|
|
||||||
'instead.', key)
|
|
||||||
try:
|
|
||||||
settings[key] = _printf_s_to_format_field(
|
|
||||||
settings[key], 'slug')
|
|
||||||
except ValueError:
|
|
||||||
logger.warning('Failed to convert %%s to {slug} for %s. '
|
|
||||||
'Falling back to default.', key)
|
|
||||||
settings[key] = DEFAULT_CONFIG[key]
|
|
||||||
|
|
||||||
# CLEAN_URLS
|
|
||||||
if settings.get('CLEAN_URLS', False):
|
|
||||||
logger.warning('Found deprecated `CLEAN_URLS` in settings.'
|
|
||||||
' Modifying the following settings for the'
|
|
||||||
' same behaviour.')
|
|
||||||
|
|
||||||
settings['ARTICLE_URL'] = '{slug}/'
|
|
||||||
settings['ARTICLE_LANG_URL'] = '{slug}-{lang}/'
|
|
||||||
settings['PAGE_URL'] = 'pages/{slug}/'
|
|
||||||
settings['PAGE_LANG_URL'] = 'pages/{slug}-{lang}/'
|
|
||||||
|
|
||||||
for setting in ('ARTICLE_URL', 'ARTICLE_LANG_URL', 'PAGE_URL',
|
|
||||||
'PAGE_LANG_URL'):
|
|
||||||
logger.warning("%s = '%s'", setting, settings[setting])
|
|
||||||
|
|
||||||
# AUTORELOAD_IGNORE_CACHE -> --ignore-cache
|
|
||||||
if settings.get('AUTORELOAD_IGNORE_CACHE'):
|
|
||||||
logger.warning('Found deprecated `AUTORELOAD_IGNORE_CACHE` in '
|
|
||||||
'settings. Use --ignore-cache instead.')
|
|
||||||
settings.pop('AUTORELOAD_IGNORE_CACHE')
|
|
||||||
|
|
||||||
# ARTICLE_PERMALINK_STRUCTURE
|
|
||||||
if settings.get('ARTICLE_PERMALINK_STRUCTURE', False):
|
|
||||||
logger.warning('Found deprecated `ARTICLE_PERMALINK_STRUCTURE` in'
|
|
||||||
' settings. Modifying the following settings for'
|
|
||||||
' the same behaviour.')
|
|
||||||
|
|
||||||
structure = settings['ARTICLE_PERMALINK_STRUCTURE']
|
|
||||||
|
|
||||||
# Convert %(variable) into {variable}.
|
|
||||||
structure = re.sub(r'%\((\w+)\)s', r'{\g<1>}', structure)
|
|
||||||
|
|
||||||
# Convert %x into {date:%x} for strftime
|
|
||||||
structure = re.sub(r'(%[A-z])', r'{date:\g<1>}', structure)
|
|
||||||
|
|
||||||
# Strip a / prefix
|
|
||||||
structure = re.sub('^/', '', structure)
|
|
||||||
|
|
||||||
for setting in ('ARTICLE_URL', 'ARTICLE_LANG_URL', 'PAGE_URL',
|
|
||||||
'PAGE_LANG_URL', 'DRAFT_URL', 'DRAFT_LANG_URL',
|
|
||||||
'ARTICLE_SAVE_AS', 'ARTICLE_LANG_SAVE_AS',
|
|
||||||
'DRAFT_SAVE_AS', 'DRAFT_LANG_SAVE_AS',
|
|
||||||
'PAGE_SAVE_AS', 'PAGE_LANG_SAVE_AS'):
|
|
||||||
settings[setting] = os.path.join(structure,
|
|
||||||
settings[setting])
|
|
||||||
logger.warning("%s = '%s'", setting, settings[setting])
|
|
||||||
|
|
||||||
# {,TAG,CATEGORY,TRANSLATION}_FEED -> {,TAG,CATEGORY,TRANSLATION}_FEED_ATOM
|
|
||||||
for new, old in [('FEED', 'FEED_ATOM'), ('TAG_FEED', 'TAG_FEED_ATOM'),
|
|
||||||
('CATEGORY_FEED', 'CATEGORY_FEED_ATOM'),
|
|
||||||
('TRANSLATION_FEED', 'TRANSLATION_FEED_ATOM')]:
|
|
||||||
if settings.get(new, False):
|
|
||||||
logger.warning(
|
|
||||||
'Found deprecated `%(new)s` in settings. Modify %(new)s '
|
|
||||||
'to %(old)s in your settings and theme for the same '
|
|
||||||
'behavior. Temporarily setting %(old)s for backwards '
|
|
||||||
'compatibility.',
|
|
||||||
{'new': new, 'old': old}
|
|
||||||
)
|
|
||||||
settings[old] = settings[new]
|
|
||||||
|
|
||||||
return settings
|
|
||||||
|
|
||||||
|
|
||||||
def configure_settings(settings):
|
|
||||||
"""Provide optimizations, error checking, and warnings for the given
|
|
||||||
settings.
|
|
||||||
Also, specify the log messages to be ignored.
|
|
||||||
"""
|
|
||||||
if 'PATH' not in settings or not os.path.isdir(settings['PATH']):
|
|
||||||
raise Exception('You need to specify a path containing the content'
|
|
||||||
' (see pelican --help for more information)')
|
|
||||||
|
|
||||||
# specify the log messages to be ignored
|
|
||||||
log_filter = settings.get('LOG_FILTER', DEFAULT_CONFIG['LOG_FILTER'])
|
|
||||||
LimitFilter._ignore.update(set(log_filter))
|
|
||||||
|
|
||||||
# lookup the theme in "pelican/themes" if the given one doesn't exist
|
|
||||||
if not os.path.isdir(settings['THEME']):
|
|
||||||
theme_path = os.path.join(
|
|
||||||
os.path.dirname(os.path.abspath(__file__)),
|
|
||||||
'themes',
|
|
||||||
settings['THEME'])
|
|
||||||
if os.path.exists(theme_path):
|
|
||||||
settings['THEME'] = theme_path
|
|
||||||
else:
|
|
||||||
raise Exception("Could not find the theme %s"
|
|
||||||
% settings['THEME'])
|
|
||||||
|
|
||||||
# make paths selected for writing absolute if necessary
|
|
||||||
settings['WRITE_SELECTED'] = [
|
|
||||||
os.path.abspath(path) for path in
|
|
||||||
settings.get('WRITE_SELECTED', DEFAULT_CONFIG['WRITE_SELECTED'])
|
|
||||||
]
|
|
||||||
|
|
||||||
# standardize strings to lowercase strings
|
|
||||||
for key in ['DEFAULT_LANG']:
|
|
||||||
if key in settings:
|
|
||||||
settings[key] = settings[key].lower()
|
|
||||||
|
|
||||||
# set defaults for Jinja environment
|
|
||||||
settings = get_jinja_environment(settings)
|
|
||||||
|
|
||||||
# standardize strings to lists
|
|
||||||
for key in ['LOCALE']:
|
|
||||||
if key in settings and isinstance(settings[key], str):
|
|
||||||
settings[key] = [settings[key]]
|
|
||||||
|
|
||||||
# check settings that must be a particular type
|
|
||||||
for key, types in [
|
|
||||||
('OUTPUT_SOURCES_EXTENSION', str),
|
|
||||||
('FILENAME_METADATA', str),
|
|
||||||
]:
|
|
||||||
if key in settings and not isinstance(settings[key], types):
|
|
||||||
value = settings.pop(key)
|
|
||||||
logger.warn(
|
|
||||||
'Detected misconfigured %s (%s), '
|
|
||||||
'falling back to the default (%s)',
|
|
||||||
key, value, DEFAULT_CONFIG[key])
|
|
||||||
|
|
||||||
# try to set the different locales, fallback on the default.
|
|
||||||
locales = settings.get('LOCALE', DEFAULT_CONFIG['LOCALE'])
|
|
||||||
|
|
||||||
for locale_ in locales:
|
|
||||||
try:
|
|
||||||
locale.setlocale(locale.LC_ALL, str(locale_))
|
|
||||||
break # break if it is successful
|
|
||||||
except locale.Error:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
logger.warning(
|
|
||||||
"Locale could not be set. Check the LOCALE setting, ensuring it "
|
|
||||||
"is valid and available on your system.")
|
|
||||||
|
|
||||||
if ('SITEURL' in settings):
|
|
||||||
# If SITEURL has a trailing slash, remove it and provide a warning
|
|
||||||
siteurl = settings['SITEURL']
|
|
||||||
if (siteurl.endswith('/')):
|
|
||||||
settings['SITEURL'] = siteurl[:-1]
|
|
||||||
logger.warning("Removed extraneous trailing slash from SITEURL.")
|
|
||||||
# If SITEURL is defined but FEED_DOMAIN isn't,
|
|
||||||
# set FEED_DOMAIN to SITEURL
|
|
||||||
if 'FEED_DOMAIN' not in settings:
|
|
||||||
settings['FEED_DOMAIN'] = settings['SITEURL']
|
|
||||||
|
|
||||||
# check content caching layer and warn of incompatibilities
|
|
||||||
if settings.get('CACHE_CONTENT', False) and \
|
|
||||||
settings.get('CONTENT_CACHING_LAYER', '') == 'generator' and \
|
|
||||||
settings.get('WITH_FUTURE_DATES', False):
|
|
||||||
logger.warning(
|
|
||||||
"WITH_FUTURE_DATES conflicts with CONTENT_CACHING_LAYER "
|
|
||||||
"set to 'generator', use 'reader' layer instead")
|
|
||||||
|
|
||||||
# Warn if feeds are generated with both SITEURL & FEED_DOMAIN undefined
|
|
||||||
feed_keys = [
|
|
||||||
'FEED_ATOM', 'FEED_RSS',
|
|
||||||
'FEED_ALL_ATOM', 'FEED_ALL_RSS',
|
|
||||||
'CATEGORY_FEED_ATOM', 'CATEGORY_FEED_RSS',
|
|
||||||
'AUTHOR_FEED_ATOM', 'AUTHOR_FEED_RSS',
|
|
||||||
'TAG_FEED_ATOM', 'TAG_FEED_RSS',
|
|
||||||
'TRANSLATION_FEED_ATOM', 'TRANSLATION_FEED_RSS',
|
|
||||||
]
|
|
||||||
|
|
||||||
if any(settings.get(k) for k in feed_keys):
|
|
||||||
if not settings.get('SITEURL'):
|
|
||||||
logger.warning('Feeds generated without SITEURL set properly may'
|
|
||||||
' not be valid')
|
|
||||||
|
|
||||||
if 'TIMEZONE' not in settings:
|
|
||||||
logger.warning(
|
|
||||||
'No timezone information specified in the settings. Assuming'
|
|
||||||
' your timezone is UTC for feed generation. Check '
|
|
||||||
'https://docs.getpelican.com/en/latest/settings.html#TIMEZONE '
|
|
||||||
'for more information')
|
|
||||||
|
|
||||||
# fix up pagination rules
|
|
||||||
from pelican.paginator import PaginationRule
|
|
||||||
pagination_rules = [
|
|
||||||
PaginationRule(*r) for r in settings.get(
|
|
||||||
'PAGINATION_PATTERNS',
|
|
||||||
DEFAULT_CONFIG['PAGINATION_PATTERNS'],
|
|
||||||
)
|
|
||||||
]
|
|
||||||
settings['PAGINATION_PATTERNS'] = sorted(
|
|
||||||
pagination_rules,
|
|
||||||
key=lambda r: r[0],
|
|
||||||
)
|
|
||||||
|
|
||||||
# Save people from accidentally setting a string rather than a list
|
|
||||||
path_keys = (
|
|
||||||
'ARTICLE_EXCLUDES',
|
|
||||||
'DEFAULT_METADATA',
|
|
||||||
'DIRECT_TEMPLATES',
|
|
||||||
'THEME_TEMPLATES_OVERRIDES',
|
|
||||||
'FILES_TO_COPY',
|
|
||||||
'IGNORE_FILES',
|
|
||||||
'PAGINATED_DIRECT_TEMPLATES',
|
|
||||||
'PLUGINS',
|
|
||||||
'STATIC_EXCLUDES',
|
|
||||||
'STATIC_PATHS',
|
|
||||||
'THEME_STATIC_PATHS',
|
|
||||||
'ARTICLE_PATHS',
|
|
||||||
'PAGE_PATHS',
|
|
||||||
)
|
|
||||||
for PATH_KEY in filter(lambda k: k in settings, path_keys):
|
|
||||||
if isinstance(settings[PATH_KEY], str):
|
|
||||||
logger.warning("Detected misconfiguration with %s setting "
|
|
||||||
"(must be a list), falling back to the default",
|
|
||||||
PATH_KEY)
|
|
||||||
settings[PATH_KEY] = DEFAULT_CONFIG[PATH_KEY]
|
|
||||||
|
|
||||||
# Add {PAGE,ARTICLE}_PATHS to {ARTICLE,PAGE}_EXCLUDES
|
|
||||||
mutually_exclusive = ('ARTICLE', 'PAGE')
|
|
||||||
for type_1, type_2 in [mutually_exclusive, mutually_exclusive[::-1]]:
|
|
||||||
try:
|
|
||||||
includes = settings[type_1 + '_PATHS']
|
|
||||||
excludes = settings[type_2 + '_EXCLUDES']
|
|
||||||
for path in includes:
|
|
||||||
if path not in excludes:
|
|
||||||
excludes.append(path)
|
|
||||||
except KeyError:
|
|
||||||
continue # setting not specified, nothing to do
|
|
||||||
|
|
||||||
return settings
|
|
||||||
|
|
@ -1,8 +0,0 @@
|
||||||
This is a test bad page
|
|
||||||
#######################
|
|
||||||
|
|
||||||
:status: invalid
|
|
||||||
|
|
||||||
The quick brown fox jumped over the lazy dog's back.
|
|
||||||
|
|
||||||
The status here is invalid, the page should not render.
|
|
||||||
|
|
@ -1,8 +0,0 @@
|
||||||
This is a test draft page
|
|
||||||
##########################
|
|
||||||
|
|
||||||
:status: draft
|
|
||||||
|
|
||||||
The quick brown fox .
|
|
||||||
|
|
||||||
This page is a draft.
|
|
||||||
|
|
@ -1,12 +0,0 @@
|
||||||
title: This is a markdown test draft page
|
|
||||||
status: draft
|
|
||||||
|
|
||||||
Test Markdown File Header
|
|
||||||
=========================
|
|
||||||
|
|
||||||
Used for pelican test
|
|
||||||
---------------------
|
|
||||||
|
|
||||||
The quick brown fox .
|
|
||||||
|
|
||||||
This page is a draft
|
|
||||||
|
|
@ -1,11 +0,0 @@
|
||||||
This is a test draft page with a custom template
|
|
||||||
#################################################
|
|
||||||
|
|
||||||
:status: draft
|
|
||||||
:template: custom
|
|
||||||
|
|
||||||
The quick brown fox .
|
|
||||||
|
|
||||||
This page is a draft
|
|
||||||
|
|
||||||
This page has a custom template to be called when rendered
|
|
||||||
|
|
@ -1,8 +0,0 @@
|
||||||
This is a test hidden page
|
|
||||||
##########################
|
|
||||||
|
|
||||||
:status: hidden
|
|
||||||
|
|
||||||
The quick brown fox jumped over the lazy dog's back.
|
|
||||||
|
|
||||||
This page is hidden
|
|
||||||
|
|
@ -1,12 +0,0 @@
|
||||||
title: This is a markdown test hidden page
|
|
||||||
status: hidden
|
|
||||||
|
|
||||||
Test Markdown File Header
|
|
||||||
=========================
|
|
||||||
|
|
||||||
Used for pelican test
|
|
||||||
---------------------
|
|
||||||
|
|
||||||
The quick brown fox jumped over the lazy dog's back.
|
|
||||||
|
|
||||||
This page is hidden
|
|
||||||
|
|
@ -1,11 +0,0 @@
|
||||||
This is a test hidden page with a custom template
|
|
||||||
#################################################
|
|
||||||
|
|
||||||
:status: hidden
|
|
||||||
:template: custom
|
|
||||||
|
|
||||||
The quick brown fox jumped over the lazy dog's back.
|
|
||||||
|
|
||||||
This page is hidden
|
|
||||||
|
|
||||||
This page has a custom template to be called when rendered
|
|
||||||
|
|
@ -1,4 +0,0 @@
|
||||||
This is a test page
|
|
||||||
###################
|
|
||||||
|
|
||||||
The quick brown fox jumped over the lazy dog's back.
|
|
||||||
|
|
@ -1,9 +0,0 @@
|
||||||
title: This is a markdown test page
|
|
||||||
|
|
||||||
Test Markdown File Header
|
|
||||||
=========================
|
|
||||||
|
|
||||||
Used for pelican test
|
|
||||||
---------------------
|
|
||||||
|
|
||||||
The quick brown fox jumped over the lazy dog's back.
|
|
||||||
|
|
@ -1,6 +0,0 @@
|
||||||
A Page (Test) for sorting
|
|
||||||
#########################
|
|
||||||
|
|
||||||
:slug: zzzz
|
|
||||||
|
|
||||||
When using title, should be first. When using slug, should be last.
|
|
||||||
|
|
@ -1,7 +0,0 @@
|
||||||
Title: Page with a bunch of links
|
|
||||||
|
|
||||||
My links:
|
|
||||||
|
|
||||||
[Link 1]({tag}マック)
|
|
||||||
|
|
||||||
[Link 2]({category}Yeah)
|
|
||||||
|
|
@ -1,7 +0,0 @@
|
||||||
Title: Page with static links
|
|
||||||
|
|
||||||
My links:
|
|
||||||
|
|
||||||
[Link 0]({static}image0.jpg)
|
|
||||||
|
|
||||||
[Link 1]({attach}image1.jpg)
|
|
||||||
|
|
@ -1,8 +0,0 @@
|
||||||
This is a test page with a preset template
|
|
||||||
##########################################
|
|
||||||
|
|
||||||
:template: custom
|
|
||||||
|
|
||||||
The quick brown fox jumped over the lazy dog's back.
|
|
||||||
|
|
||||||
This article has a custom template to be called when rendered
|
|
||||||
|
|
@ -1,15 +0,0 @@
|
||||||
import logging
|
|
||||||
import warnings
|
|
||||||
|
|
||||||
from pelican.log import log_warnings
|
|
||||||
|
|
||||||
# redirect warnings modulole to use logging instead
|
|
||||||
log_warnings()
|
|
||||||
|
|
||||||
# setup warnings to log DeprecationWarning's and error on
|
|
||||||
# warnings in pelican's codebase
|
|
||||||
warnings.simplefilter("default", DeprecationWarning)
|
|
||||||
warnings.filterwarnings("error", ".*", Warning, "pelican")
|
|
||||||
|
|
||||||
# Add a NullHandler to silence warning about no available handlers
|
|
||||||
logging.getLogger().addHandler(logging.NullHandler())
|
|
||||||
|
|
@ -1,6 +0,0 @@
|
||||||
|
|
||||||
Rst with filename metadata
|
|
||||||
##########################
|
|
||||||
|
|
||||||
:category: yeah
|
|
||||||
:author: Alexis Métaireau
|
|
||||||
|
|
@ -1,6 +0,0 @@
|
||||||
category: yeah
|
|
||||||
author: Alexis Métaireau
|
|
||||||
|
|
||||||
Markdown with filename metadata
|
|
||||||
===============================
|
|
||||||
|
|
||||||
|
|
@ -1,7 +0,0 @@
|
||||||
This is an article with category !
|
|
||||||
##################################
|
|
||||||
|
|
||||||
:category: yeah
|
|
||||||
:date: 1970-01-01
|
|
||||||
|
|
||||||
This article should be in 'yeah' category.
|
|
||||||
|
|
@ -1,4 +0,0 @@
|
||||||
This is an article without category !
|
|
||||||
#####################################
|
|
||||||
|
|
||||||
This article should be in 'TestCategory' category.
|
|
||||||
|
|
@ -1,6 +0,0 @@
|
||||||
Article title
|
|
||||||
#############
|
|
||||||
|
|
||||||
THIS is some content. With some stuff to "typogrify"...
|
|
||||||
|
|
||||||
Now with added support for :abbr:`TLA (three letter acronym)`.
|
|
||||||
|
|
@ -1,11 +0,0 @@
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
Ensure that if an attribute value contains a double quote, it is
|
|
||||||
surrounded with single quotes, otherwise with double quotes.
|
|
||||||
<span data-test="'single quoted string'">Span content</span>
|
|
||||||
<span data-test='"double quoted string"'>Span content</span>
|
|
||||||
<span data-test="string without quotes">Span content</span>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
|
|
@ -1,15 +0,0 @@
|
||||||
An Article With Code Block To Test Typogrify Ignore
|
|
||||||
###################################################
|
|
||||||
|
|
||||||
An article with some code
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
x & y
|
|
||||||
|
|
||||||
A block quote:
|
|
||||||
|
|
||||||
x & y
|
|
||||||
|
|
||||||
Normal:
|
|
||||||
x & y
|
|
||||||
|
|
@ -1,8 +0,0 @@
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
Body content
|
|
||||||
<!-- This comment is included (including extra whitespace) -->
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
|
|
@ -1,15 +0,0 @@
|
||||||
Title: Test metadata duplicates
|
|
||||||
Category: test
|
|
||||||
Tags: foo, bar, foobar, foo, bar
|
|
||||||
Authors: Author, First; Author, Second; Author, First
|
|
||||||
Date: 2010-12-02 10:14
|
|
||||||
Modified: 2010-12-02 10:20
|
|
||||||
Summary: I have a lot to test
|
|
||||||
|
|
||||||
Test Markdown File Header
|
|
||||||
=========================
|
|
||||||
|
|
||||||
Used for pelican test
|
|
||||||
---------------------
|
|
||||||
|
|
||||||
The quick brown fox jumped over the lazy dog's back.
|
|
||||||
|
|
@ -1,17 +0,0 @@
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
<title>Article with an inline SVG</title>
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
Ensure that the title attribute in an inline svg is not handled as an HTML title.
|
|
||||||
<svg xmlns="http://www.w3.org/2000/svg" width="210mm" height="297mm" viewBox="0 0 210 297">
|
|
||||||
<path fill="#b2b2ff" stroke="#000" stroke-width="2.646" d="M88.698 89.869l-8.899 15.63a38.894 38.894 0 00-16.474 31.722 38.894 38.894 0 0038.894 38.894 38.894 38.894 0 0038.894-38.894 38.894 38.894 0 00-9-24.83l-2.38-16.886-14.828 4.994a38.894 38.894 0 00-12.13-2.144z">
|
|
||||||
<title>A different title inside the inline SVG</title>
|
|
||||||
</path>
|
|
||||||
<ellipse cx="100.806" cy="125.285" rx="3.704" ry="10.583"/>
|
|
||||||
<ellipse cx="82.021" cy="125.285" rx="3.704" ry="10.583"/>
|
|
||||||
<ellipse cx="-111.432" cy="146.563" rx="3.704" ry="10.583" transform="rotate(-64.822)"/>
|
|
||||||
<ellipse cx="-118.245" cy="91.308" rx="6.18" ry="8.62" transform="matrix(.063 -.99801 .96163 .27436 0 0)"/>
|
|
||||||
</svg>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
|
|
@ -1,6 +0,0 @@
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
<title>This is a super article !</title>
|
|
||||||
<meta name="keywords" content="foo, bar, foobar" />
|
|
||||||
</head>
|
|
||||||
</html>
|
|
||||||
|
|
@ -1,15 +0,0 @@
|
||||||
Title: Article with markdown containing footnotes
|
|
||||||
Date: 2012-10-31
|
|
||||||
Modified: 2012-11-01
|
|
||||||
Summary: Summary with **inline** markup *should* be supported.
|
|
||||||
Multiline: Line Metadata should be handle properly.
|
|
||||||
See syntax of Meta-Data extension of Python Markdown package:
|
|
||||||
If a line is indented by 4 or more spaces,
|
|
||||||
that line is assumed to be an additional line of the value
|
|
||||||
for the previous keyword.
|
|
||||||
A keyword may have as many lines as desired.
|
|
||||||
|
|
||||||
This is some content[^1] with some footnotes[^footnote]
|
|
||||||
|
|
||||||
[^1]: Numbered footnote
|
|
||||||
[^footnote]: Named footnote
|
|
||||||
|
|
@ -1,5 +0,0 @@
|
||||||
Title: Article with markdown and nested summary metadata
|
|
||||||
Date: 2012-10-30
|
|
||||||
Summary: Test: This metadata value looks like metadata
|
|
||||||
|
|
||||||
This is some content.
|
|
||||||
|
|
@ -1,19 +0,0 @@
|
||||||
Title: マックOS X 10.8でパイソンとVirtualenvをインストールと設定
|
|
||||||
Slug: python-virtualenv-on-mac-osx-mountain-lion-10.8
|
|
||||||
Date: 2012-12-20
|
|
||||||
Modified: 2012-12-22
|
|
||||||
Tags: パイソン, マック
|
|
||||||
Category: 指導書
|
|
||||||
Summary: パイソンとVirtualenvをまっくでインストールする方法について明確に説明します。
|
|
||||||
|
|
||||||
Writing unicode is certainly fun.
|
|
||||||
|
|
||||||
パイソンとVirtualenvをまっくでインストールする方法について明確に説明します。
|
|
||||||
|
|
||||||
And let's mix languages.
|
|
||||||
|
|
||||||
первый пост
|
|
||||||
|
|
||||||
Now another.
|
|
||||||
|
|
||||||
İlk yazı çok özel değil.
|
|
||||||
|
|
@ -1,10 +0,0 @@
|
||||||
Title: Article with markdown and summary metadata multi
|
|
||||||
Date: 2012-10-31
|
|
||||||
Summary:
|
|
||||||
A multi-line summary should be supported
|
|
||||||
as well as **inline markup**.
|
|
||||||
custom_formatted_field:
|
|
||||||
Multi-line metadata should also be supported
|
|
||||||
as well as *inline markup* and stuff to "typogrify"...
|
|
||||||
|
|
||||||
This is some content.
|
|
||||||
|
|
@ -1,5 +0,0 @@
|
||||||
Title: Article with markdown and summary metadata single
|
|
||||||
Date: 2012-10-30
|
|
||||||
Summary: A single-line summary should be supported as well as **inline markup**.
|
|
||||||
|
|
||||||
This is some content.
|
|
||||||
|
|
@ -1,10 +0,0 @@
|
||||||
title: Test markdown File
|
|
||||||
category: test
|
|
||||||
|
|
||||||
Test Markdown File Header
|
|
||||||
=========================
|
|
||||||
|
|
||||||
Used for pelican test
|
|
||||||
---------------------
|
|
||||||
|
|
||||||
This is another markdown test file. Uses the markdown extension.
|
|
||||||
|
|
@ -1,8 +0,0 @@
|
||||||
Title: Test Markdown extensions
|
|
||||||
|
|
||||||
[TOC]
|
|
||||||
|
|
||||||
## Level1
|
|
||||||
|
|
||||||
### Level2
|
|
||||||
|
|
||||||
|
|
@ -1,14 +0,0 @@
|
||||||
Title: Test md File
|
|
||||||
Category: test
|
|
||||||
Tags: foo, bar, foobar
|
|
||||||
Date: 2010-12-02 10:14
|
|
||||||
Modified: 2010-12-02 10:20
|
|
||||||
Summary: I have a lot to test
|
|
||||||
|
|
||||||
Test Markdown File Header
|
|
||||||
=========================
|
|
||||||
|
|
||||||
Used for pelican test
|
|
||||||
---------------------
|
|
||||||
|
|
||||||
The quick brown fox jumped over the lazy dog's back.
|
|
||||||
|
|
@ -1,10 +0,0 @@
|
||||||
title: Test mdown File
|
|
||||||
category: test
|
|
||||||
|
|
||||||
Test Markdown File Header
|
|
||||||
=========================
|
|
||||||
|
|
||||||
Used for pelican test
|
|
||||||
---------------------
|
|
||||||
|
|
||||||
This is another markdown test file. Uses the mdown extension.
|
|
||||||
|
|
@ -1,15 +0,0 @@
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
<title>This is a super article !</title>
|
|
||||||
<meta name="tags" content="foo, bar, foobar" />
|
|
||||||
<meta name="date" content="2010-12-02 10:14" />
|
|
||||||
<meta name="category" content="yeah" />
|
|
||||||
<meta name="author" content="Alexis Métaireau" />
|
|
||||||
<meta name="summary" content="Summary and stuff" />
|
|
||||||
<meta name="custom_field" content="http://notmyidea.org" />
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
Multi-line metadata should be supported
|
|
||||||
as well as <strong>inline markup</strong>.
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
|
|
@ -1,16 +0,0 @@
|
||||||
|
|
||||||
This is a super article !
|
|
||||||
#########################
|
|
||||||
|
|
||||||
:tags: foo, bar, foobar
|
|
||||||
:date: 2010-12-02 10:14
|
|
||||||
:modified: 2010-12-02 10:20
|
|
||||||
:category: yeah
|
|
||||||
:author: Alexis Métaireau
|
|
||||||
:summary:
|
|
||||||
Multi-line metadata should be supported
|
|
||||||
as well as **inline markup** and stuff to "typogrify"...
|
|
||||||
:custom_field: http://notmyidea.org
|
|
||||||
:custom_formatted_field:
|
|
||||||
Multi-line metadata should also be supported
|
|
||||||
as well as *inline markup* and stuff to "typogrify"...
|
|
||||||
|
|
@ -1,12 +0,0 @@
|
||||||
|
|
||||||
This is a super article !
|
|
||||||
#########################
|
|
||||||
|
|
||||||
:tags: foo, bar, foobar
|
|
||||||
:date: 2010-12-02 10:14
|
|
||||||
:category: yeah
|
|
||||||
:author: Alexis Métaireau
|
|
||||||
:summary:
|
|
||||||
Multi-line metadata should be supported
|
|
||||||
as well as **inline markup**.
|
|
||||||
:custom_field: http://notmyidea.org
|
|
||||||
|
|
@ -1,15 +0,0 @@
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
<title>This is a super article !</title>
|
|
||||||
<meta name="tags" contents="foo, bar, foobar" />
|
|
||||||
<meta name="date" contents="2010-12-02 10:14" />
|
|
||||||
<meta name="category" contents="yeah" />
|
|
||||||
<meta name="author" contents="Alexis Métaireau" />
|
|
||||||
<meta name="summary" contents="Summary and stuff" />
|
|
||||||
<meta name="custom_field" contents="http://notmyidea.org" />
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
Multi-line metadata should be supported
|
|
||||||
as well as <strong>inline markup</strong>.
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
|
|
@ -1,15 +0,0 @@
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
<title>This is a super article !</title>
|
|
||||||
<meta name="tags" content="foo, bar, foobar" />
|
|
||||||
<meta name="date" content="2010-12-02 10:14" />
|
|
||||||
<meta name="category" content="yeah" />
|
|
||||||
<meta name="author" content="Alexis Métaireau" />
|
|
||||||
<meta name="summary" content="Summary and stuff" />
|
|
||||||
<meta name="custom_field" content="http://notmyidea.org" />
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
Multi-line metadata should be supported
|
|
||||||
as well as <strong>inline markup</strong>.
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
|
|
@ -1,16 +0,0 @@
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
<title>This is a super article !</title>
|
|
||||||
<meta name="tags" content="foo, bar, foobar" />
|
|
||||||
<meta name="date" content="2010-12-02 10:14" />
|
|
||||||
<meta name="modified" content="2010-12-31 23:59" />
|
|
||||||
<meta name="category" content="yeah" />
|
|
||||||
<meta name="author" content="Alexis Métaireau" />
|
|
||||||
<meta name="summary" content="Summary and stuff" />
|
|
||||||
<meta name="custom_field" content="http://notmyidea.org" />
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
Multi-line metadata should be supported
|
|
||||||
as well as <strong>inline markup</strong>.
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
|
|
@ -1,15 +0,0 @@
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
<title>This is a super article !</title>
|
|
||||||
<meta name="tags" content="foo, bar, foobar" />
|
|
||||||
<meta name="modified" content="2010-12-02 10:14" />
|
|
||||||
<meta name="category" content="yeah" />
|
|
||||||
<meta name="author" content="Alexis Métaireau" />
|
|
||||||
<meta name="summary" content="Summary and stuff" />
|
|
||||||
<meta name="custom_field" content="http://notmyidea.org" />
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
Multi-line metadata should be supported
|
|
||||||
as well as <strong>inline markup</strong>.
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue