Initial state

This commit is contained in:
Reid 'arrdem' McKenzie 2021-04-08 00:37:51 -06:00
commit c25e825a95
27 changed files with 1271 additions and 0 deletions

12
.gitignore vendored Normal file
View file

@ -0,0 +1,12 @@
*.log
*.pyc
*.pyo
.DS_Store
.cache
.idea
/**/__pychache__
/**/_build
/**/_public
bazel-*
tmp/
public/

7
LICENSE Normal file
View file

@ -0,0 +1,7 @@
Copyright 2017 Reid 'arrdem' McKenzie <me@arrdem.com>
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

12
README.rst Normal file
View file

@ -0,0 +1,12 @@
Reid's monorepo
===============
I've found it inconvenient to develop lots of small Python modules.
And so I'm going the other way; Bazel in a monorepo with several subprojects so I'm able to reuse a maximum of scaffolding.
License
~~~~~~~
Copyright Reid 'arrdem' McKenzie, 4/8/2021.
Unless labeled otherwise, the contents of this repository are distributed under the terms of the MIT license.
See the included `LICENSE` file for more.

61
WORKSPACE Normal file
View file

@ -0,0 +1,61 @@
# WORKSPACE
#
# This file exists to configure the Bazel (https://bazel.build/) build tool to our needs.
# Particularly, it installs rule definitions and other capabilities which aren't in Bazel core.
# In the future we may have our own modifications to this config.
# Install the blessed Python and PyPi rule support
# From https://github.com/bazelbuild/rules_python
workspace(
name = "arrdem_source",
)
load(
"@bazel_tools//tools/build_defs/repo:http.bzl",
"http_archive",
"http_file",
)
load(
"@bazel_tools//tools/build_defs/repo:git.bzl",
"git_repository",
)
####################################################################################################
# Skylib
####################################################################################################
git_repository(
name = "bazel_skylib",
remote = "https://github.com/bazelbuild/bazel-skylib.git",
tag = "1.0.3",
)
load("@bazel_skylib//:workspace.bzl", "bazel_skylib_workspace")
bazel_skylib_workspace()
####################################################################################################
# Python support
####################################################################################################
# Using rules_python at a more recent SHA than the last release like a baws
git_repository(
name = "rules_python",
remote = "https://github.com/bazelbuild/rules_python.git",
tag = "0.2.0",
)
# pip package pinnings need to be initialized.
# this generates a bunch of bzl rules so that each pip dep is a bzl target
load("@rules_python//python:pip.bzl", "pip_install")
pip_install(
name = "arrdem_source_pypi",
requirements = "//tools/python:requirements.txt",
python_interpreter = "python3",
)
####################################################################################################
# Postscript
####################################################################################################
# Do toolchain nonsense to use py3
register_toolchains(
"//tools/python:toolchain",
)

3
tools/build_rules/BUILD Normal file
View file

@ -0,0 +1,3 @@
package(default_visibility = ["//visibility:public"])
licenses(["notice"])

47
tools/build_rules/cp.bzl Normal file
View file

@ -0,0 +1,47 @@
load("@bazel_skylib//rules:copy_file.bzl",
"copy_file",
)
def cp(name, src, **kwargs):
"""A slightly more convenient cp() rule. Name and out should always be the same."""
rule_name = name.replace(".", "_").replace(":", "/").replace("//", "").replace("/", "_")
copy_file(
name = rule_name,
src = src,
out = name,
**kwargs
)
return rule_name
def _copy_filegroup_impl(ctx):
all_outputs = []
for t in ctx.attr.deps:
t_prefix = t.label.package
for f in t.files.to_list():
# Strip out the source prefix...
path = f.short_path.replace(t_prefix + "/", "")
out = ctx.actions.declare_file(path)
print(ctx.attr.name, t.label, f, " => ", path)
all_outputs += [out]
ctx.actions.run_shell(
outputs=[out],
inputs=depset([f]),
arguments=[f.path, out.path],
command="cp $1 $2"
)
return [
DefaultInfo(
files=depset(all_outputs),
runfiles=ctx.runfiles(files=all_outputs))
]
copy_filegroups = rule(
implementation=_copy_filegroup_impl,
attrs={
"deps": attr.label_list(),
},
)

View file

@ -0,0 +1,26 @@
# A global prelude for all BUILD[.bazel] files
load("//tools/python:defs.bzl",
"py_library",
"py_binary",
"py_unittest",
"py_pytest",
"py_resources",
)
load("@arrdem_source_pypi//:requirements.bzl",
py_requirement="requirement"
)
load("@bazel_skylib//rules:copy_file.bzl",
"copy_file",
)
load("//tools/build_rules:cp.bzl",
"cp",
"copy_filegroups"
)
load("//tools/build_rules:webp.bzl",
"webp_image",
)

View file

@ -0,0 +1,25 @@
"""
Webp image building.
"""
def webp_image(src, name = None, out = None, quality = 95, flags = None):
"""Use cwebp to convert the image to an output."""
out = out or src.split(".", 1)[0] + ".webp"
name = name or out.replace(".", "_")
return native.genrule(
name = name,
srcs = [src],
outs = [out],
cmd = "cwebp {} $< -o $@".format(
" ".join([str(i) for i in (flags or ["-q", quality])])
)
)
def auto_webps(srcs):
"""Generate webp targets automagically for a mess of files."""
for f in srcs:
webp_image(
src = f,
)

13
tools/fmt.sh Executable file
View file

@ -0,0 +1,13 @@
#!/usr/bin/env bash
set -euox pipefail
cd "$(git rev-parse --show-toplevel)"
bazel build //tools/python/...
DIRS=(*)
bazel-bin/tools/python/autoflake -ir "${DIRS[@]}"
bazel-bin/tools/python/black "${DIRS[@]}"
bazel-bin/tools/python/isort "${DIRS[@]}"
bazel-bin/tools/python/unify --quote '"' -ir "${DIRS[@]}"
bazel-bin/tools/python/reqsort --execute tools/python/requirements.txt

20
tools/lint.sh Executable file
View file

@ -0,0 +1,20 @@
#!/usr/bin/env bash
set -euox pipefail
cd "$(git rev-parse --show-toplevel)"
bazel build //tools/python/...
DIRS=(*)
bazel-bin/tools/python/autoflake -r "${DIRS[@]}"
bazel-bin/tools/python/black --check "${DIRS[@]}"
bazel-bin/tools/python/isort --check "${DIRS[@]}"
bazel-bin/tools/python/unify --quote '"' -cr "${DIRS[@]}"
bazel-bin/tools/python/reqsort --dryrun tools/python/requirements.txt
for f in $(find . -type f -name "openapi.yaml"); do
bazel-bin/tools/python/openapi "${f}" && echo "Schema $f OK"
done
for f in $(find . -type f -name "openapi.yaml"); do
bazel-bin/tools/python/yamllint -c tools/yamllint/yamllintrc "${f}"
done

136
tools/python/BUILD Normal file
View file

@ -0,0 +1,136 @@
load("@rules_python//python:defs.bzl",
"py_runtime_pair"
)
package(default_visibility = ["//visibility:public"])
licenses(["notice"])
exports_files([
"defs.bzl",
"bzl_pytest_shim.py",
"bzl_unittest_shim.py",
])
py_runtime(
name = "python_runtime",
interpreter_path = "/usr/bin/python3",
python_version = "PY3",
visibility = ["//visibility:public"],
)
py_runtime_pair(
name = "runtime",
py2_runtime = None,
py3_runtime = ":python_runtime",
)
toolchain(
name = "toolchain",
toolchain = ":runtime",
toolchain_type = "@bazel_tools//tools/python:toolchain_type",
)
py_binary(
name = "autoflake",
main = "autoflake_shim.py",
deps = [
py_requirement("autoflake"),
]
)
py_binary(
name = "black",
main = "black_shim.py",
deps = [
py_requirement("black"),
]
)
py_binary(
name = "isort",
main = "isort_shim.py",
deps = [
py_requirement("isort"),
]
)
py_binary(
name = "sphinx",
main = "sphinx_shim.py",
deps = [
py_requirement("click"),
py_requirement("recommonmark"),
py_requirement("sphinx"),
py_requirement("sphinxcontrib-openapi"),
py_requirement("sphinxcontrib-programoutput"),
py_requirement("livereload"),
]
)
py_pytest(
name = "test_licenses",
srcs = [
"test_licenses.py",
],
data = [
"requirements.txt",
],
deps = [
py_requirement("requests"),
]
)
py_binary(
name = "unify",
main = "unify_shim.py",
deps = [
py_requirement("unify"),
]
)
py_binary(
name = "openapi",
main = "openapi_shim.py",
deps = [
py_requirement("openapi-spec-validator"),
]
)
# WARNING: YAMLLINT is GLP3'd code. Do not extend, modify or depend on this as a lib.
py_binary(
name = "yamllint",
main = "yamllint_shim.py",
deps = [
py_requirement("yamllint"),
]
)
py_binary(
name = "reqsort",
main = "reqsort.py",
deps = [
py_requirement("click"),
]
)
py_binary(
name = "templater",
main = "templater.py",
deps = [
py_requirement("click"),
py_requirement("jinja2"),
py_requirement("PyYAML"),
]
)
py_binary(
name = "xfmt",
main = "xfmt.py",
deps = [
py_requirement("beautifulsoup4"),
py_requirement("click"),
py_requirement("lxml"),
]
)

View file

@ -0,0 +1,14 @@
#!/usr/bin/env python3
"""
Shim for executing autoflake.
"""
import re
import sys
from autoflake import main
if __name__ == "__main__":
sys.argv[0] = re.sub(r"(-script\.pyw|\.exe)?$", "", sys.argv[0])
sys.exit(main())

View file

@ -0,0 +1,14 @@
#!/usr/bin/env python3
"""
Shim for executing black.
"""
import re
import sys
from black import patched_main
if __name__ == "__main__":
sys.argv[0] = re.sub(r"(-script\.pyw|\.exe)?$", "", sys.argv[0])
sys.exit(patched_main())

View file

@ -0,0 +1,12 @@
import sys
import pytest
if __name__ == "__main__":
print(sys.version_info, file=sys.stderr)
print(sys.argv, file=sys.stderr)
cmdline = ["--ignore=external"] + sys.argv[1:]
print(cmdline, file=sys.stderr)
sys.exit(pytest.main(cmdline))

View file

@ -0,0 +1,66 @@
"""Universal launcher for unit tests"""
import argparse
import logging
import os
import sys
import unittest
def main():
"""Parse args, collect tests and run them"""
# Disable *.pyc files
sys.dont_write_bytecode = True
# Add ".." to module search path
cur_dir = os.path.dirname(os.path.realpath(__file__))
top_dir = os.path.abspath(os.path.join(cur_dir, os.pardir))
sys.path.append(top_dir)
# Parse command line arguments
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
"-v",
"--verbose",
action="count",
default=0,
help="verbosity level, use: [-v | -vv | -vvv]",
)
parser.add_argument(
"-s", "--start-directory", default=None, help="directory to start discovery"
)
parser.add_argument(
"-p",
"--pattern",
default="test*.py",
help="pattern to match test files ('test*.py' default)",
)
parser.add_argument(
"test", nargs="*", help="test specs (e.g. module.TestCase.test_func)"
)
args = parser.parse_args()
if not args.start_directory:
args.start_directory = cur_dir
if args.verbose > 2:
logging.basicConfig(level=logging.DEBUG, format="DEBUG: %(message)s")
loader = unittest.TestLoader()
if args.test:
# Add particular tests
for test in args.test:
suite = unittest.TestSuite()
suite.addTests(loader.loadTestsFromName(test))
else:
# Find all tests
suite = loader.discover(args.start_directory, args.pattern)
runner = unittest.TextTestRunner(verbosity=args.verbose)
result = runner.run(suite)
return result.wasSuccessful()
if __name__ == "__main__":
# NOTE: True(success) -> 0, False(fail) -> 1
exit(not main())

146
tools/python/defs.bzl Normal file
View file

@ -0,0 +1,146 @@
load("@arrdem_source_pypi//:requirements.bzl",
_py_requirement = "requirement"
)
load("@rules_python//python:defs.bzl",
_py_binary = "py_binary",
_py_test = "py_test",
_py_library = "py_library",
)
def py_requirement(*args, **kwargs):
"""A re-export of requirement()"""
return _py_requirement(*args, **kwargs)
def py_test(python_version=None, **kwargs):
"""A re-export of py_test()"""
if python_version and python_version != "PY3":
fail("py3k only!")
return _py_test(
python_version="PY3",
**kwargs,
)
def py_pytest(name, srcs, deps, main=None, python_version=None, args=None, **kwargs):
"""A py_test target which uses pytest."""
if python_version and python_version != "PY3":
fail("py3k only!")
f = "//tools/python:bzl_pytest_shim.py"
deps = [
py_requirement("pytest"),
py_requirement("jedi"),
py_requirement("pytest-pudb"),
] + deps
srcs = [f] + srcs
t = py_test(
name = name,
srcs = srcs,
main = f,
args = args,
python_version="PY3",
deps = deps,
**kwargs,
)
# FIXME (arrdem 2020-09-27):
# This really needs to be a py_image_test.
# Not clear how to achieve that.
# py_image(
# name = name + ".containerized",
# main = f,
# args = args,
# srcs = srcs,
# deps = deps,
# **kwargs,
# )
return t
def py_unittest(srcs=[], **kwargs):
"""A helper for running unittest tests"""
f = "//tools/python:bzl_unittest_shim.py"
return py_test(
main = f,
srcs = [f] + srcs,
**kwargs
)
def py_binary(python_version=None, main=None, srcs=None, **kwargs):
"""A re-export of py_binary()"""
if python_version and python_version != "PY3":
fail("py3k only!")
srcs = srcs or []
if main not in srcs:
srcs = [main] + srcs
return _py_binary(
python_version = "PY3",
main = main,
srcs = srcs,
**kwargs,
)
def py_library(srcs_version=None, **kwargs):
"""A re-export of py_library()"""
if srcs_version and srcs_version != "PY3":
fail("py3k only!")
return _py_library(
srcs_version="PY3",
**kwargs
)
ResourceGroupInfo = provider(
fields = {
"srcs": "files to use from Python",
},
)
def _resource_impl(ctx):
srcs = []
for target in ctx.attr.srcs:
srcs.extend(target.files.to_list())
transitive_srcs = depset(direct = srcs)
return [
ResourceGroupInfo(
srcs = ctx.attr.srcs,
),
PyInfo(
has_py2_only_sources = False,
has_py3_only_sources = True,
uses_shared_libraries = False,
transitive_sources = transitive_srcs,
),
]
py_resources = rule(
implementation = _resource_impl,
attrs = {
"srcs": attr.label_list(
allow_empty = True,
mandatory = True,
allow_files = True,
doc = "Files to hand through to Python",
),
},
)

View file

@ -0,0 +1,14 @@
#!/usr/bin/env python3
"""
Shim for executing isort.
"""
import re
import sys
from isort.main import main
if __name__ == "__main__":
sys.argv[0] = re.sub(r"(-script\.pyw?|\.exe)?$", "", sys.argv[0])
sys.exit(main())

View file

@ -0,0 +1,12 @@
#!/usr/bin/env python3
"""Shim for executing the openapi spec validator."""
import re
import sys
from openapi_spec_validator.__main__ import main
if __name__ == "__main__":
sys.argv[0] = re.sub(r"(-script\.pyw|\.exe)?$", "", sys.argv[0])
sys.exit(main())

72
tools/python/reqsort.py Normal file
View file

@ -0,0 +1,72 @@
"""
Platform independent sorting/formatting for requirements.txt
"""
import re
import click
REQ_PATTERN = re.compile(
r"(?P<pkgname>[a-zA-Z0-9_-]+)(?P<features>\[.*?\])?==(?P<version>[^\s;#]+)|(.*?#egg=(?P<eggname>[a-zA-Z0-9_-]+))"
)
SHITLIST = [
"pip",
"pkg-resources",
"setuptools",
]
def sort_key(requirement: str) -> str:
requirement = requirement.lower()
match = re.match(REQ_PATTERN, requirement)
sort_key = (
(match.group("pkgname") or match.group("eggname")) # Get the match group
.replace("-", "") # We ignore -
.replace("_", "") # And _
)
return sort_key
@click.command()
@click.option("--execute/--dryrun", "execute", default=False)
@click.argument("requirements")
def main(requirements, execute):
"""Given the path of a requirements.txt, format it.
If running in --execute, rewrite the source file with sorted contents and exit 0.
If running in --dryrun, exit 0 if --execute would produce no changes otherwise exit 1.
"""
with open(requirements) as f:
lines = f.readlines()
f.seek(0)
# Preserve an initial "buffer" for equality testing
initial_buff = f.read()
# Trim whitespace
lines = [l.strip() for l in lines]
# Discard comments and shitlisted packages
lines = [l for l in lines if not l.startswith("#") and not sort_key(l) in SHITLIST]
# And sort, ignoring case explicitly
lines = sorted(lines, key=sort_key)
# And generate a new "buffer"
new_buff = "\n".join(lines) + "\n"
if new_buff != initial_buff and not execute:
exit(1)
else:
with open(requirements, "w") as f:
f.write(new_buff)
exit(0)
if __name__ == "__main__":
main()

View file

@ -0,0 +1,67 @@
alabaster==0.7.12
appdirs==1.4.4
attrs==20.3.0
autoflake==1.4
Babel==2.9.0
beautifulsoup4==4.9.3
black==20.8b1
certifi==2020.12.5
chardet==4.0.0
click==7.1.2
commonmark==0.9.1
docutils==0.17
idna==2.10
imagesize==1.2.0
iniconfig==1.1.1
isodate==0.6.0
isort==5.8.0
jedi==0.18.0
Jinja2==2.11.3
jsonschema==3.2.0
livereload==2.6.3
lxml==4.6.3
m2r==0.2.1
MarkupSafe==1.1.1
mistune==0.8.4
mypy-extensions==0.4.3
openapi-schema-validator==0.1.5
openapi-spec-validator==0.3.0
packaging==20.9
parso==0.8.2
pathspec==0.8.1
pluggy==0.13.1
pudb==2020.1
py==1.10.0
pyflakes==2.3.1
Pygments==2.8.1
pyparsing==2.4.7
pyrsistent==0.17.3
pytest==6.2.3
pytest-pudb==0.7.0
pytz==2021.1
PyYAML==5.4.1
recommonmark==0.7.1
regex==2021.4.4
requests==2.25.1
six==1.15.0
snowballstemmer==2.1.0
soupsieve==2.2.1
Sphinx==3.5.3
sphinxcontrib-applehelp==1.0.2
sphinxcontrib-devhelp==1.0.2
sphinxcontrib-htmlhelp==1.0.3
sphinxcontrib-httpdomain==1.7.0
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-openapi==0.7.0
sphinxcontrib-programoutput==0.17
sphinxcontrib-qthelp==1.0.3
sphinxcontrib-serializinghtml==1.1.4
toml==0.10.2
tornado==6.1
typed-ast==1.4.2
typing-extensions==3.7.4.3
unify==0.5
untokenize==0.1.1
urllib3==1.26.4
urwid==2.1.2
yamllint==1.26.1

218
tools/python/sphinx_shim.py Normal file
View file

@ -0,0 +1,218 @@
#!/usr/bin/env python3
"""A documentation generator.
This is a shim tool which wraps up a whole bunch of Sphinx internals in a single "convenient"
entrypoint. Former tweeps may recognize some parallels to the `docbird` tool developed by Twitter's
techdocs team.
"""
import builtins
from functools import wraps
import io
import os
import sys
import click
import livereload
from sphinx.application import Sphinx
from sphinx.cmd.build import main as build
from sphinx.cmd.quickstart import main as new
from sphinx.ext.apidoc import main as apidoc
from sphinx.ext.autosummary.generate import main as autosummary
from sphinx.util.docutils import docutils_namespace, patch_docutils
@click.group()
def cli():
"""A documentation generator.
Just a shim to a variety of upstream Sphinx commands typically distributed as separate binaries
for some dang reason.
Note that due to subcommand argument parsing '-- --help' is likely required.
Subcommands have not been renamed (or customized, yet) from their Sphinx equivalents.
"""
@cli.group()
def generate():
"""Subcommands for doing RST header generation."""
@generate.command(
"apidoc",
context_settings=dict(
ignore_unknown_options=True,
),
)
@click.argument("argv", nargs=-1, type=click.UNPROCESSED)
def do_apidoc(argv):
"""Use sphinx.ext.apidoc to generate API documentation."""
return apidoc(argv)
@generate.command(
"summary",
context_settings=dict(
ignore_unknown_options=True,
),
)
@click.argument("argv", nargs=-1, type=click.UNPROCESSED)
def do_summary(argv):
"""Use sphinx.ext.autosummary to generate module summaries."""
return autosummary(argv)
@cli.command(
"new",
context_settings=dict(
ignore_unknown_options=True,
),
)
@click.argument("argv", nargs=-1, type=click.UNPROCESSED)
def do_new(argv):
"""Create a new Sphinx in the current directory."""
return new(argv)
@cli.command(
"build",
context_settings=dict(
ignore_unknown_options=True,
),
)
@click.argument("sourcedir")
@click.argument("outputdir")
@click.option("-c", "--confdir")
@click.option("-d", "--doctreedir")
@click.option("-b", "--builder", default="html")
@click.option("--freshenv/--no-freshenv", default=False)
@click.option("-W", "--warning-is-error", "werror", is_flag=True, flag_value=True)
@click.option("-t", "--tag", "tags", multiple=True)
def do_build(
sourcedir, outputdir, confdir, doctreedir, builder, freshenv, werror, tags
):
"""Build a single Sphinx project."""
if not confdir:
confdir = sourcedir
if not doctreedir:
doctreedir = os.path.join(outputdir, ".doctrees")
status = sys.stdout
warning = sys.stderr
error = sys.stderr
confdir = confdir or sourcedir
confoverrides = {} # FIXME: support these
with patch_docutils(confdir), docutils_namespace():
app = Sphinx(
sourcedir,
confdir,
outputdir,
doctreedir,
builder,
confoverrides,
status,
warning,
freshenv,
werror,
tags,
1,
4,
False,
)
app.build(True, [])
return app.statuscode
@cli.command(
"serve",
context_settings=dict(
ignore_unknown_options=True,
),
)
@click.option("-h", "--host", default="localhost")
@click.option("-p", "--port", type=int, default=8080)
@click.argument("sourcedir")
@click.argument("outputdir")
def do_serve(host, port, sourcedir, outputdir):
"""Build and then serve a Sphinx tree."""
sourcedir = os.path.realpath(sourcedir)
outputdir = os.path.realpath(outputdir)
server = livereload.Server()
# HACK (arrdem 2020-10-31):
# Okay. This is an elder hack, and I'm proud of it.
#
# The naive implementation of the watching server is to watch the input files, which is
# obviously correct. However, Sphinx has a BUNCH of operators like include and mdinclude and
# soforth which can cause a Sphinx doctree to have file dependencies OUTSIDE of the "trivial"
# source path dependency set.
#
# In order to make sure that rebuilding does what the user intends, we trace calls to the
# open() function and attempt to dynamically discover the dependency set of the site. This
# allows us to trigger strictly correct rebuilds unlike other Sphinx implementations which
# need to be restarted under some circumstances.
def opener(old_open):
@wraps(old_open)
def tracking_open(path, mode="r", *args, **kw):
file = old_open(path, mode, *args, **kw)
if isinstance(path, int):
# If you're doing something weird with file pointers, ignore it.
pass
else:
path = os.path.realpath(path)
if "w" in mode:
# If we're writing a file, it's an output for sure. Ignore it.
ignorelist.add(path)
elif (
not path.startswith(outputdir)
and path not in ignorelist
and not path in watchlist
):
# Watch any source file (file we open for reading)
server.watch(path, build)
watchlist.add(path)
return file
return tracking_open
ignorelist = set()
watchlist = set()
def build():
try:
old_open = open
builtins.open = opener(old_open)
io.open = opener(old_open)
do_build([sourcedir, outputdir])
except SystemExit:
pass
finally:
builtins.open = old_open
io.open = old_open
build()
server.watch(
"conf.py", build
) # Not sure why this isn't picked up, but it doesn't seem to be.
server.serve(port=port, host=host, root=outputdir)
if __name__ == "__main__":
# Hack in a -- delimeter to bypass click arg parsing
if not (sys.argv + [""])[1].startswith("-"):
sys.argv = [sys.argv[0], "--"] + sys.argv[1:]
# Use click subcommands for everything else
exit(cli())

75
tools/python/templater.py Normal file
View file

@ -0,0 +1,75 @@
"""A tiny template(s) tool.
Processes Jekyll/Hyde/Hugo/... style 'fontmatter' headers, applying Jinja2/Liquid templating from an
optional templates and includes directory.
"""
import os
import re
import click
import jinja2
import yaml
FONTMATTER_PATTERN = re.compile(
r"^(---\n\r?(?P<fontmatter>.*?)\n\r?---\n\r?)?(?P<content>.+)$", re.DOTALL
)
@click.command()
@click.option("-i", "--include", "include_dir", multiple=True)
@click.option("-t", "--template", "template_dir", multiple=True)
@click.option("-c", "--config", "config_file")
@click.argument("infile")
@click.argument("outfile")
def main(include_dir, template_dir, config_file, infile, outfile):
"""Apply templating.
Consume infile, processing it with templating and write the results to outfile.
"""
loaders = []
for d in include_dir:
loaders.append(jinja2.FileSystemLoader(os.path.realpath(d)))
for d in template_dir:
loaders.append(jinja2.FileSystemLoader(os.path.realpath(d)))
# Build a j2 environment using the potentially various loaders..
environment = jinja2.Environment(loader=jinja2.ChoiceLoader(loaders))
# Load a site config
if config_file:
with open(config_file) as f:
site = yaml.safe_load(f.read())
else:
site = {}
# Figure out doing the fontmatter nonsense...
with open(infile, "r") as f:
buff = f.read()
match = re.match(FONTMATTER_PATTERN, buff)
if fontmatter := match.group("fontmatter"):
fontmatter = yaml.safe_load(fontmatter)
else:
fontmatter = {}
# Render the file contents
template = environment.from_string(match.group("content"))
content = template.render(site=site, page=fontmatter)
# If there's a configured `layout:` stick the content in the layout.
if "layout" in fontmatter:
template = environment.get_template(fontmatter.get("layout"))
content = template.render(content=content, site=site, page=fontmatter)
# And dump the results
with open(outfile, "w") as f:
f.write(content)
if __name__ == "__main__":
main()

View file

@ -0,0 +1,142 @@
"""
Validate 3rdparty library licenses as approved.
"""
import re
import pytest
import requests
# Licenses approved as representing non-copyleft and not precluding commercial usage.
# This is all easy, there's a good schema here.
APPROVED_LICENSES = [
"License :: OSI Approved :: MIT License",
"License :: OSI Approved :: Apache Software License",
"License :: OSI Approved :: BSD License",
"License :: OSI Approved :: Mozilla Public License 1.0 (MPL)",
"License :: OSI Approved :: Mozilla Public License 1.1 (MPL 1.1)",
"License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)",
"License :: OSI Approved :: Python Software Foundation License",
"License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)",
"License :: OSI Approved :: ISC License (ISCL)",
]
# This data is GARBO.
LICENSES_BY_LOWERNAME = {
"apache 2.0": "License :: OSI Approved :: Apache Software License",
"apache": "License :: OSI Approved :: Apache Software License",
"bsd 3 clause": "License :: OSI Approved :: BSD License",
"bsd": "License :: OSI Approved :: BSD License",
"gplv3": "License :: OSI Approved :: GNU General Public License v3 (GPLv3)",
"http://www.apache.org/licenses/license-2.0": "License :: OSI Approved :: Apache Software License",
"isc": "License :: OSI Approved :: ISC License (ISCL)",
"mit": "License :: OSI Approved :: MIT License",
"mpl 2.0": "License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)",
"mpl": "License :: OSI Approved :: Mozilla Public License 1.0 (MPL)",
"psf": "License :: OSI Approved :: Python Software Foundation License",
}
# Mash in some cases.
LICENSES_BY_LOWERNAME.update(
{l.split(" :: ")[-1].lower(): l for l in APPROVED_LICENSES}
)
# As a workaround for packages which don"t have correct meadata on PyPi, hand-verified packages
APPROVED_PACKAGES = [
"yamllint", # WARNING: YAMLLINT IS GLP3"d.
"Flask_Log_Request_ID", # MIT, currently depended on as a git dep.
]
REQ_PATTERN = re.compile(
r"(?P<pkgname>[a-zA-Z0-9_-]+)(?P<features>\[.*?\])?==(?P<version>[^\s;#]+)|(.*?#egg=(?P<eggname>[a-zA-Z0-9_-]+))"
)
def parse_requirement(line):
"""Given a requirement return the requirement name and version as a tuple.
Only the strict `==` version pinning subset is supported.
Features are supported.
"""
if m := re.match(REQ_PATTERN, line):
return (m.group("pkgname") or m.group("eggname")), m.group("version")
@pytest.mark.parametrize(
"line,t",
[
("foo==1.2.3", ("foo", "1.2.3")),
("foo[bar]==1.2.3", ("foo", "1.2.3")),
("foo[bar, baz, qux]==1.2.3", ("foo", "1.2.3")),
# Various stuff we should ignore
("# comment line", None),
(" # garbage whitespace", None),
(" \t", None),
],
)
def test_parse_requirement(line, t):
"""The irony of testing one"s tests is not lost."""
assert parse_requirement(line) == t
with open("tools/python/requirements.txt") as f:
PACKAGES = [parse_requirement(l) for l in f.readlines()]
def bash_license(ln):
if ln:
ln = re.sub("[(),]|( version)|( license)", "", ln.lower())
ln = LICENSES_BY_LOWERNAME.get(ln, ln)
return ln
def licenses(package, version):
"""Get package metadata (the licenses list) from PyPi.
pip and other tools use the local package metadata to introspect licenses which requires that
packages be installed. Going to PyPi isn't strictly reproducible both because the PyPi database
could be updated and we could see network failures but there really isn't a good way to solve
this problem.
"""
l = []
# If we don't have a version (eg. forked git dep) assume we've got the same license constraints
# as the latest upstream release. After all we can't re-license stuff.
if not version:
blob = requests.get(f"https://pypi.python.org/pypi/{package}/json").json()
if ln := bash_license(blob.get("license")):
l.append(ln)
else:
try:
version = list(blob.get("releases", {}).keys())[-1]
except IndexError:
pass
# If we have a version, try to pull that release's metadata since it may have more/better.
if version:
blob = requests.get(
f"https://pypi.python.org/pypi/{package}/{version}/json"
).json()
l = [
c
for c in blob.get("info", {}).get("classifiers", [])
if c.startswith("License")
]
ln = blob.get("info", {}).get("license")
if ln and not l:
l.append(bash_license(ln))
return l
@pytest.mark.parametrize("package,version", PACKAGES)
def test_approved_license(package, version):
"""Ensure that a given package is either allowed by name or uses an approved license."""
_licenses = licenses(package, version)
assert package in APPROVED_PACKAGES or any(
l in APPROVED_LICENSES for l in _licenses
), f"{package} was not approved and its license(s) were unknown {_licenses!r}"

View file

@ -0,0 +1,11 @@
#!/usr/bin/env python3
"""
Shim for executing isort.
"""
from unify import main
if __name__ == "__main__":
exit(main())

25
tools/python/xfmt.py Normal file
View file

@ -0,0 +1,25 @@
#!/usr/bin/env python3
"""
A quick and dirty XML formatter.
"""
from bs4 import BeautifulSoup
import click
@click.command()
@click.argument("filename")
def main(filename):
with open(filename) as f:
bs = BeautifulSoup(f, "xml")
with open(filename, "w") as of:
of.write(bs.prettify())
of.write("\n")
print(f"Formatted {filename}!")
if __name__ == "__main__":
main()

View file

@ -0,0 +1,14 @@
#!/usr/bin/python
# WARNING: Yamllint is GPL3'd code.
"""A shim for executing yamllint."""
import re
import sys
from yamllint.cli import run
if __name__ == "__main__":
sys.argv[0] = re.sub(r"(-script\.pyw?|\.exe)?$", "", sys.argv[0])
sys.exit(run())

View file

@ -0,0 +1,7 @@
---
extends: default
rules:
line-length:
max: 100
level: error