Add makesite.py
This commit is contained in:
parent
349b7f2503
commit
4ef8597db8
@ -8,3 +8,4 @@ pages of the polynom.me project, like CSS and fonts.
|
|||||||
- `assets/css/index.css`: GPLv3 (See `LICENSE`)
|
- `assets/css/index.css`: GPLv3 (See `LICENSE`)
|
||||||
- [Overpass](https://github.com/RedHatOfficial/Overpass): LGPL 2.1 (See `licenses/Overpass.md`)
|
- [Overpass](https://github.com/RedHatOfficial/Overpass): LGPL 2.1 (See `licenses/Overpass.md`)
|
||||||
- [Roboto](https://github.com/googlefonts/roboto): Apache License 2.0 (See `licenses/Roboto.md`)
|
- [Roboto](https://github.com/googlefonts/roboto): Apache License 2.0 (See `licenses/Roboto.md`)
|
||||||
|
- [makesite](https://github.com/sunainapai/makesite): MIT License (See `licenses/makesite.md`)
|
||||||
|
@ -29,6 +29,10 @@ a {
|
|||||||
color: white;
|
color: white;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.page-title {
|
||||||
|
color: #9b59b6;
|
||||||
|
}
|
||||||
|
|
||||||
ul {
|
ul {
|
||||||
margin-top: 2px;
|
margin-top: 2px;
|
||||||
}
|
}
|
22
licenses/makesite.md
Normal file
22
licenses/makesite.md
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
The MIT License (MIT)
|
||||||
|
=====================
|
||||||
|
Copyright (c) 2018 Sunaina Pai
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining
|
||||||
|
a copy of this software and associated documentation files (the
|
||||||
|
"Software"), to deal in the Software without restriction, including
|
||||||
|
without limitation the rights to use, copy, modify, merge, publish,
|
||||||
|
distribute, sublicense, and/or sell copies of the Software, and to
|
||||||
|
permit persons to whom the Software is furnished to do so, subject to
|
||||||
|
the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be
|
||||||
|
included in all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||||
|
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||||
|
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||||
|
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||||
|
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||||
|
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||||
|
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
293
makesite.py
Normal file
293
makesite.py
Normal file
@ -0,0 +1,293 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
# The MIT License (MIT)
|
||||||
|
#
|
||||||
|
# Copyright (c) 2018 Sunaina Pai
|
||||||
|
#
|
||||||
|
# Permission is hereby granted, free of charge, to any person obtaining
|
||||||
|
# a copy of this software and associated documentation files (the
|
||||||
|
# "Software"), to deal in the Software without restriction, including
|
||||||
|
# without limitation the rights to use, copy, modify, merge, publish,
|
||||||
|
# distribute, sublicense, and/or sell copies of the Software, and to
|
||||||
|
# permit persons to whom the Software is furnished to do so, subject to
|
||||||
|
# the following conditions:
|
||||||
|
#
|
||||||
|
# The above copyright notice and this permission notice shall be
|
||||||
|
# included in all copies or substantial portions of the Software.
|
||||||
|
#
|
||||||
|
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||||
|
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||||
|
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||||
|
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||||
|
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||||
|
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||||
|
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
|
|
||||||
|
|
||||||
|
"""Make static website/blog with Python."""
|
||||||
|
|
||||||
|
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import re
|
||||||
|
import glob
|
||||||
|
import sys
|
||||||
|
import json
|
||||||
|
import datetime
|
||||||
|
import optparse
|
||||||
|
|
||||||
|
|
||||||
|
def fread(filename):
|
||||||
|
"""Read file and close the file."""
|
||||||
|
with open(filename, 'r') as f:
|
||||||
|
return f.read()
|
||||||
|
|
||||||
|
|
||||||
|
def fwrite(filename, text):
|
||||||
|
"""Write content to file and close the file."""
|
||||||
|
basedir = os.path.dirname(filename)
|
||||||
|
if not os.path.isdir(basedir):
|
||||||
|
os.makedirs(basedir)
|
||||||
|
|
||||||
|
with open(filename, 'w') as f:
|
||||||
|
f.write(text)
|
||||||
|
|
||||||
|
|
||||||
|
def log(msg, *args):
|
||||||
|
"""Log message with specified arguments."""
|
||||||
|
sys.stderr.write(msg.format(*args) + '\n')
|
||||||
|
|
||||||
|
|
||||||
|
def truncate(text, words=25):
|
||||||
|
"""Remove tags and truncate text to the specified number of words."""
|
||||||
|
return ' '.join(re.sub('(?s)<.*?>', ' ', text).split()[:words])
|
||||||
|
|
||||||
|
|
||||||
|
def read_headers(text):
|
||||||
|
"""Parse headers in text and yield (key, value, end-index) tuples."""
|
||||||
|
for match in re.finditer(r'\s*<!--\s*(.+?)\s*:\s*(.+?)\s*-->\s*|.+', text):
|
||||||
|
if not match.group(1):
|
||||||
|
break
|
||||||
|
yield match.group(1), match.group(2), match.end()
|
||||||
|
|
||||||
|
|
||||||
|
def rfc_2822_format(date_str):
|
||||||
|
"""Convert yyyy-mm-dd date string to RFC 2822 format date string."""
|
||||||
|
d = datetime.datetime.strptime(date_str, '%Y-%m-%d')
|
||||||
|
return d.strftime('%a, %d %b %Y %H:%M:%S +0000')
|
||||||
|
|
||||||
|
|
||||||
|
def read_content(filename):
|
||||||
|
"""Read content and metadata from file into a dictionary."""
|
||||||
|
# Read file content.
|
||||||
|
text = fread(filename)
|
||||||
|
|
||||||
|
# Read metadata and save it in a dictionary.
|
||||||
|
date_slug = os.path.basename(filename).split('.')[0]
|
||||||
|
match = re.search(r'^(?:(\d\d\d\d-\d\d-\d\d)-)?(.+)$', date_slug)
|
||||||
|
content = {
|
||||||
|
'date': match.group(1) or '1970-01-01',
|
||||||
|
'slug': match.group(2),
|
||||||
|
}
|
||||||
|
|
||||||
|
# Read headers.
|
||||||
|
end = 0
|
||||||
|
for key, val, end in read_headers(text):
|
||||||
|
content[key] = val
|
||||||
|
|
||||||
|
# Separate content from headers.
|
||||||
|
text = text[end:]
|
||||||
|
|
||||||
|
# Convert Markdown content to HTML.
|
||||||
|
if filename.endswith(('.md', '.mkd', '.mkdn', '.mdown', '.markdown')):
|
||||||
|
try:
|
||||||
|
if _test == 'ImportError':
|
||||||
|
raise ImportError('Error forced by test')
|
||||||
|
import commonmark
|
||||||
|
text = commonmark.commonmark(text)
|
||||||
|
except ImportError as e:
|
||||||
|
log('WARNING: Cannot render Markdown in {}: {}', filename, str(e))
|
||||||
|
|
||||||
|
# Update the dictionary with content and RFC 2822 date.
|
||||||
|
content.update({
|
||||||
|
'content': text,
|
||||||
|
'rfc_2822_date': rfc_2822_format(content['date'])
|
||||||
|
})
|
||||||
|
|
||||||
|
return content
|
||||||
|
|
||||||
|
|
||||||
|
def render(template, **params):
|
||||||
|
"""Replace placeholders in template with values from params."""
|
||||||
|
return re.sub(r'{{\s*([^}\s]+)\s*}}',
|
||||||
|
lambda match: str(params.get(match.group(1), match.group(0))),
|
||||||
|
template)
|
||||||
|
|
||||||
|
|
||||||
|
def make_pages(src, dst, layout, **params):
|
||||||
|
"""Generate pages from page content."""
|
||||||
|
items = []
|
||||||
|
|
||||||
|
for src_path in glob.glob(src):
|
||||||
|
content = read_content(src_path)
|
||||||
|
page_params = dict(params, **content)
|
||||||
|
|
||||||
|
# Check if reading specific params was requested
|
||||||
|
if 'params' in content:
|
||||||
|
page_params.update(**json.loads(open(content['params'], 'r').read()))
|
||||||
|
|
||||||
|
# Populate placeholders in content if content-rendering is enabled.
|
||||||
|
if page_params.get('render') == 'yes':
|
||||||
|
rendered_content = render(page_params['content'], **page_params)
|
||||||
|
page_params['content'] = rendered_content
|
||||||
|
content['content'] = rendered_content
|
||||||
|
|
||||||
|
items.append(content)
|
||||||
|
|
||||||
|
dst_path = render(dst, **page_params)
|
||||||
|
output = render(layout, **page_params)
|
||||||
|
|
||||||
|
# For embedding, for example, a post into the page layout
|
||||||
|
if 'embed' in params:
|
||||||
|
embed = fread(f'layout/{params["embed"]}.html')
|
||||||
|
'''
|
||||||
|
embed_params = {}
|
||||||
|
embed_params.update(**page_params)
|
||||||
|
embed_params['content'] = output
|
||||||
|
'''
|
||||||
|
embed_params = dict(**page_params)
|
||||||
|
embed_params.update(content=output)
|
||||||
|
output = render(embed, **embed_params)
|
||||||
|
|
||||||
|
log('Rendering {} => {} ...', src_path, dst_path)
|
||||||
|
fwrite(dst_path, output)
|
||||||
|
|
||||||
|
return sorted(items, key=lambda x: x['date'], reverse=True)
|
||||||
|
|
||||||
|
|
||||||
|
def make_list(posts, dst, list_layout, item_layout, truncate_posts=True, **params):
|
||||||
|
"""Generate list page for a blog."""
|
||||||
|
items = []
|
||||||
|
for post in posts:
|
||||||
|
item_params = dict(params, **post)
|
||||||
|
if truncate_posts:
|
||||||
|
item_params['summary'] = truncate(post['content'])
|
||||||
|
else:
|
||||||
|
item_params['summary'] = post['content']
|
||||||
|
item = render(item_layout, **item_params)
|
||||||
|
items.append(item)
|
||||||
|
|
||||||
|
params['content'] = ''.join(items)
|
||||||
|
dst_path = render(dst, **params)
|
||||||
|
output = render(list_layout, **params)
|
||||||
|
|
||||||
|
log('Rendering list => {} ...', dst_path)
|
||||||
|
fwrite(dst_path, output)
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = optparse.OptionParser()
|
||||||
|
parser.add_option('-p', '--params', dest='params',
|
||||||
|
help='The params file to load')
|
||||||
|
parser.add_option('-b', '--blog', action='store_true', dest='blog',
|
||||||
|
help='Generate a blog')
|
||||||
|
parser.add_option('-r', '--rss', action='store_true', dest='rss',
|
||||||
|
help='Generate a RSS feed')
|
||||||
|
parser.add_option('--copy-assets', action='store_true', dest='copy_assets',
|
||||||
|
help='Copy assets over')
|
||||||
|
parser.add_option('-a', '--assets', action='append', dest='assets', default=['assets'],
|
||||||
|
help='The assets directory to copy')
|
||||||
|
parser.add_option('-v', '--variable', action='append', dest='variables',
|
||||||
|
help='Append a variable to the params')
|
||||||
|
parser.add_option('-i', '--ignore', action='append', dest='asset_ignores',
|
||||||
|
help='Ignore a folder when copying the assets')
|
||||||
|
parser.add_option('--include', action='append', dest='includes', default=[],
|
||||||
|
help='Include files and folders into the build')
|
||||||
|
(options, args) = parser.parse_args()
|
||||||
|
|
||||||
|
# Create a new _site directory from scratch.
|
||||||
|
if os.path.isdir('_site'):
|
||||||
|
shutil.rmtree('_site')
|
||||||
|
if options.copy_assets:
|
||||||
|
def ignore(dir_, items):
|
||||||
|
full_items = [os.path.join(dir_, i) for i in items]
|
||||||
|
intersection = set(full_items).intersection(options.asset_ignores)
|
||||||
|
starts = [(item.startswith(i) for i in options.asset_ignores) for item in full_items]
|
||||||
|
|
||||||
|
return list(intersection) + starts
|
||||||
|
for asset in options.assets:
|
||||||
|
shutil.copytree(asset, '_site/assets', ignore=ignore, dirs_exist_ok=True)
|
||||||
|
|
||||||
|
if options.includes:
|
||||||
|
for include in options.includes:
|
||||||
|
print(f'Copying {include} => _site/{include} ...')
|
||||||
|
if os.path.isfile(include):
|
||||||
|
shutil.copy(include, '_site/')
|
||||||
|
else:
|
||||||
|
shutil.copytree(include, '_site/')
|
||||||
|
|
||||||
|
# Default parameters.
|
||||||
|
params = {
|
||||||
|
'base_path': '',
|
||||||
|
'subtitle': 'Lorem Ipsum',
|
||||||
|
'author': 'Admin',
|
||||||
|
'site_url': 'http://localhost:8000',
|
||||||
|
'current_year': datetime.datetime.now().year
|
||||||
|
}
|
||||||
|
|
||||||
|
# If params.json exists, load it.
|
||||||
|
if not options.params:
|
||||||
|
params_file = 'params.json'
|
||||||
|
else:
|
||||||
|
params_file = options.params
|
||||||
|
if os.path.isfile(params_file):
|
||||||
|
params.update(**json.loads(fread(params_file)))
|
||||||
|
|
||||||
|
for variable in options.variables:
|
||||||
|
key, val = variable.split('=')
|
||||||
|
params[key] = val
|
||||||
|
|
||||||
|
# Load layouts.
|
||||||
|
page_layout = fread('layout/page.html')
|
||||||
|
|
||||||
|
# Combine layouts to form final layouts.
|
||||||
|
#post_layout = render(page_layout, content=post_layout)
|
||||||
|
#list_layout = render(page_layout, content=list_layout)
|
||||||
|
|
||||||
|
# Create site pages.
|
||||||
|
for element in os.listdir('content/'):
|
||||||
|
if not os.path.isfile('content/' + element):
|
||||||
|
continue
|
||||||
|
|
||||||
|
make_pages('content/' + element, '_site/' + element,
|
||||||
|
page_layout, **params)
|
||||||
|
|
||||||
|
if options.blog:
|
||||||
|
post_layout = fread('layout/post.html')
|
||||||
|
list_layout = fread('layout/list.html')
|
||||||
|
item_layout = fread('layout/item.html')
|
||||||
|
|
||||||
|
# Create blogs.
|
||||||
|
blog_posts = make_pages('content/blog/*.md',
|
||||||
|
'_site/{{ slug }}.html',
|
||||||
|
post_layout, blog='blog', embed='page', **params)
|
||||||
|
|
||||||
|
# Create blog list pages.
|
||||||
|
make_list(blog_posts, '_site/index.html',
|
||||||
|
page_layout, item_layout, blog='blog', embed='page', **params)
|
||||||
|
|
||||||
|
if options.rss:
|
||||||
|
feed_xml = fread('layout/feed.xml')
|
||||||
|
item_xml = fread('layout/item.xml')
|
||||||
|
|
||||||
|
# Create RSS feeds.
|
||||||
|
make_list(blog_posts, '_site/atom.xml',
|
||||||
|
feed_xml, item_xml, truncate_posts=False, blog='blog', **params)
|
||||||
|
|
||||||
|
|
||||||
|
# Test parameter to be set temporarily by unit tests.
|
||||||
|
_test = None
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
16
pgp-sign.py
Normal file
16
pgp-sign.py
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
import sys
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
if len(sys.argv) <= 1:
|
||||||
|
print('Usage: pgp-sign.py [files]')
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
for file_ in sys.argv[1:]:
|
||||||
|
print(f'Signing {file_} ...')
|
||||||
|
proc = subprocess.run(['gpg', '--armor', '--detach-sign', '--output', '-', '--local-user', 'papatutuwawa@polynom.me', file_],
|
||||||
|
capture_output=True, check=True)
|
||||||
|
signature = proc.stdout.decode('utf-8')[:-1]
|
||||||
|
with open(file_, 'r') as fh:
|
||||||
|
content = fh.read().replace('%%%SIGNED_PAGES_PGP_SIGNATURE%%%', signature)
|
||||||
|
with open(file_, 'w') as fh:
|
||||||
|
fh.write(content)
|
Loading…
Reference in New Issue
Block a user