Extract cram from source@438df41
This commit is contained in:
commit
e8c3bbc1f0
47 changed files with 2180 additions and 0 deletions
24
.gitignore
vendored
Normal file
24
.gitignore
vendored
Normal file
|
@ -0,0 +1,24 @@
|
|||
.DS_Store
|
||||
.cache
|
||||
.dev
|
||||
.dev
|
||||
.idea
|
||||
/**/#*
|
||||
/**/*.egg-info
|
||||
/**/*.log
|
||||
/**/*.pyc
|
||||
/**/*.pyc
|
||||
/**/*.pyo
|
||||
/**/.#*
|
||||
/**/.mypy*
|
||||
/**/.hypothesis*
|
||||
/**/__pychache__
|
||||
/**/_build
|
||||
/**/_public
|
||||
/**/build
|
||||
/**/dist
|
||||
/**/node_modules
|
||||
bazel-*
|
||||
projects/public-dns/config.yml
|
||||
public/
|
||||
tmp/
|
27
BUILD
Normal file
27
BUILD
Normal file
|
@ -0,0 +1,27 @@
|
|||
py_library(
|
||||
name = "lib",
|
||||
srcs = glob(["src/python/**/*.py"]),
|
||||
deps = [
|
||||
py_requirement("click"),
|
||||
py_requirement("toposort"),
|
||||
py_requirement("toml"),
|
||||
]
|
||||
)
|
||||
|
||||
zapp_binary(
|
||||
name = "cram",
|
||||
main = "src/python/cram/__main__.py",
|
||||
shebang = "/usr/bin/env python3",
|
||||
imports = [
|
||||
"src/python"
|
||||
],
|
||||
deps = [
|
||||
":lib",
|
||||
],
|
||||
)
|
||||
|
||||
sh_test(
|
||||
name = "integration_test_cram",
|
||||
srcs = glob(["integration_test.sh"]),
|
||||
data = glob(["test/integration/**/*"]) + [":cram"],
|
||||
)
|
121
README.md
Normal file
121
README.md
Normal file
|
@ -0,0 +1,121 @@
|
|||
# Cram
|
||||
|
||||
> To force (people or things) into a place or container that is or appears to be too small to contain them.
|
||||
|
||||
An alternative to GNU Stow, more some notion of packages with dependencies and install scripts.
|
||||
|
||||
Think an Ansible, Puppet or even NixOS but anyarch and lite enough to check in with your dotfiles.
|
||||
|
||||
## Overview
|
||||
|
||||
Cram operates on a directory of packages called `packages.d/`, and two directories of metapackages called `profiles.d` and `hosts.d`.
|
||||
|
||||
### Packages
|
||||
|
||||
A Cram package consists of a directory containing a `pkg.toml` file with the following format -
|
||||
|
||||
```toml
|
||||
[cram]
|
||||
version = 1
|
||||
|
||||
[package]
|
||||
# The package.require list names depended artifacts.
|
||||
[[package.require]]
|
||||
name = "packages.d/some-other-package"
|
||||
|
||||
# (optional) The package.build list enumerates either
|
||||
# inline scripts or script files. These are run as a
|
||||
# package is 'built' before it is installed.
|
||||
[[package.build]]
|
||||
run = "some-build-command"
|
||||
|
||||
# (optional) Hook script(s) which occur before installation.
|
||||
[[package.pre_install]]
|
||||
run = "some-hook"
|
||||
|
||||
# (optional) Override installation scrpt(s).
|
||||
# By default, everthing under the package directory
|
||||
# (the `pkg.toml` excepted) treated is as a file to be
|
||||
# installed and stow is emulated using symlinks.
|
||||
[[package.install]]
|
||||
run = "some-install-command"
|
||||
|
||||
# (optional) Hook script(s) which after installation.
|
||||
[[package.post_install]]
|
||||
run = "some-other-hook"
|
||||
```
|
||||
|
||||
To take a somewhat real example from my own dotfiles -
|
||||
|
||||
```shell
|
||||
$ tree -a packages.d/tmux
|
||||
packages.d/tmux
|
||||
├── pkg.toml
|
||||
└── .tmux.conf
|
||||
```
|
||||
|
||||
This TMUX package provides only my `.tmux.conf` file, and a stub `pkg.toml` that does nothing.
|
||||
A fancier setup could use `pkg.toml` to install TMUX either as a `pre_install` task or by using a separate TMUX package and providing the config in a profile.
|
||||
|
||||
### Metapackages
|
||||
|
||||
Writing lots of packages gets cumbersome quickly, as does managing long lists of explicit dependencies.
|
||||
To try and manage this, Cram provides metapackages - packages which contain no stowable files, but instad contain subpackages.
|
||||
|
||||
To take a somewhat real example from my own dotfiles -
|
||||
|
||||
```shell
|
||||
$ tree -a -L 1 profiles.d/macos
|
||||
profiles.d/macos
|
||||
├── pkg.toml
|
||||
├── emacs/
|
||||
├── homebrew/
|
||||
└── zsh/
|
||||
```
|
||||
|
||||
The `profiles.d/macos` package depends AUTOMATICALLY on the contents of the `profiles.d/macos/emacs`, `profiles.d/macos/homebrew` and `profiles.d/macos/zsh` packages, which are normal packages.
|
||||
These sub-packages can have normal dependencies on other packages both within and without the profile and install files or run scripts.
|
||||
|
||||
Profiles allow users to write groups of related packages, especially configs, which go together and allows for scoped reuse of meaningful names.
|
||||
|
||||
Likewise the `hosts.d/` tree allows users to store host-specific packages.
|
||||
|
||||
## Usage
|
||||
|
||||
```
|
||||
$ cram apply [--dry-run|--execute] [--optimize] [--require <package>] <configdir> <destdir>
|
||||
```
|
||||
|
||||
The `apply` task applies a configuration to a destination directory.
|
||||
The most common uses of this would be `--dry-run` (the default), which functions as a `diff` or `--execute ~/conf ~/` for emulating Stow and installing dotfiles.
|
||||
|
||||
By default `cram` installs two packages - `profiles.d/default` and `hosts.d/$(hostname -s)`.
|
||||
This default can be overriden by providing `--require <package>` one or more times to enumerate specific packages to install.
|
||||
|
||||
Cram always reads the `.cram.log` state file and diffs the current state against the configured state.
|
||||
Files and directories no longer defined by the configured state are cleaned up automatically.
|
||||
|
||||
```
|
||||
$ cram state <configdir>
|
||||
```
|
||||
|
||||
The `state` task loads up and prints the `.cram.log` state file generated by any previous `cram apply --execute` so you can read a manifest of what cram thinks it did.
|
||||
This is useful because `cram` attempts to optimize repeated executions and implement change detection using the state file.
|
||||
|
||||
This cache can be busted if needed by using `apply --execute --no-optimize`, which will cause cram to take all actions it deems presently required.
|
||||
This can result in dangling symlinks in the filesystem.
|
||||
|
||||
```
|
||||
$ cram list <configdir> [package]
|
||||
```
|
||||
|
||||
The `list` task lists out all available packages (eg. packages, profiles, hosts, and subpackages) as a dependency graph.
|
||||
When provided a specific package, the details of that package (its requirements and installation task log) will be printed.
|
||||
|
||||
## License
|
||||
|
||||
Copyright Reid 'arrdem' McKenzie, 15/02/2022.
|
||||
|
||||
Published under the terms of the Anticapitalist Software License (https://anticapitalist.software).
|
||||
|
||||
Unlimited commercial licensing is available at nominal pricing.
|
75
WORKSPACE
Normal file
75
WORKSPACE
Normal file
|
@ -0,0 +1,75 @@
|
|||
# WORKSPACE
|
||||
#
|
||||
# This file exists to configure the Bazel (https://bazel.build/) build tool to our needs.
|
||||
# Particularly, it installs rule definitions and other capabilities which aren't in Bazel core.
|
||||
# In the future we may have our own modifications to this config.
|
||||
|
||||
# Install the blessed Python and PyPi rule support
|
||||
# From https://github.com/bazelbuild/rules_python
|
||||
|
||||
workspace(
|
||||
name = "arrdem_cram",
|
||||
)
|
||||
|
||||
load(
|
||||
"@bazel_tools//tools/build_defs/repo:http.bzl",
|
||||
"http_archive",
|
||||
"http_file",
|
||||
)
|
||||
load(
|
||||
"@bazel_tools//tools/build_defs/repo:git.bzl",
|
||||
"git_repository",
|
||||
)
|
||||
|
||||
####################################################################################################
|
||||
# Skylib
|
||||
####################################################################################################
|
||||
git_repository(
|
||||
name = "bazel_skylib",
|
||||
remote = "https://github.com/bazelbuild/bazel-skylib.git",
|
||||
tag = "1.0.3",
|
||||
)
|
||||
load("@bazel_skylib//:workspace.bzl", "bazel_skylib_workspace")
|
||||
bazel_skylib_workspace()
|
||||
|
||||
####################################################################################################
|
||||
# Python support
|
||||
####################################################################################################
|
||||
|
||||
# Using rules_python at a more recent SHA than the last release like a baws
|
||||
git_repository(
|
||||
name = "rules_python",
|
||||
remote = "https://github.com/bazelbuild/rules_python.git",
|
||||
# tag = "0.4.0",
|
||||
commit = "888fa20176cdcaebb33f968dc7a8112fb678731d",
|
||||
)
|
||||
|
||||
register_toolchains("//tools/python:python3_toolchain")
|
||||
|
||||
# pip package pinnings need to be initialized.
|
||||
# this generates a bunch of bzl rules so that each pip dep is a bzl target
|
||||
load("@rules_python//python:pip.bzl", "pip_parse")
|
||||
|
||||
pip_parse(
|
||||
name = "arrdem_cram_pypi",
|
||||
requirements_lock = "//tools/python:requirements.txt",
|
||||
python_interpreter_target = "//tools/python:pythonshim",
|
||||
)
|
||||
|
||||
# Load the starlark macro which will define your dependencies.
|
||||
load("@arrdem_cram_pypi//:requirements.bzl", "install_deps")
|
||||
|
||||
# Call it to define repos for your requirements.
|
||||
install_deps()
|
||||
|
||||
git_repository(
|
||||
name = "rules_zapp",
|
||||
remote = "https://github.com/arrdem/rules_zapp.git",
|
||||
commit = "d7a0382927fb8a68115b560f4fee7dca743068f8",
|
||||
# tag = "0.1.2",
|
||||
)
|
||||
|
||||
# local_repository(
|
||||
# name = "rules_zapp",
|
||||
# path = "/home/arrdem/doc/hobby/programming/lang/python/rules_zapp",
|
||||
# )
|
96
integration_test.sh
Executable file
96
integration_test.sh
Executable file
|
@ -0,0 +1,96 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -ex
|
||||
|
||||
dest=$(mktemp -d)
|
||||
|
||||
./cram --help
|
||||
|
||||
root="test/integration/"
|
||||
|
||||
# Should be able to list all packages
|
||||
./cram list "$root" | grep "packages.d/p1"
|
||||
|
||||
# P3 depends on P1, should show up in the listing
|
||||
./cram list "$root" packages.d/p3 | grep "packages.d/p1"
|
||||
|
||||
# P4 depends on P3, should show up in the listing
|
||||
./cram list "$root" packages.d/p4 | grep "packages.d/p3"
|
||||
|
||||
# The default profile should depend on its subpackage
|
||||
./cram list "$root" profiles.d/default | grep "profiles.d/default/subpackage"
|
||||
|
||||
# And the subpackage has a dep
|
||||
./cram list "$root" profiles.d/default/subpackage | grep "packages.d/p3"
|
||||
|
||||
# Install one package
|
||||
./cram apply --no-optimize --require packages.d/p1 --execute "$root" "${dest}"
|
||||
[ -L "${dest}"/foo ]
|
||||
./cram state "$root" | grep "${dest}/foo"
|
||||
rm -r "${dest}"/*
|
||||
|
||||
# Install two transitively (legacy)
|
||||
./cram apply --no-optimize --require packages.d/p3 --execute "$root" "${dest}"
|
||||
[ -L "${dest}"/foo ]
|
||||
[ -L "${dest}"/bar ]
|
||||
./cram state "$root" | grep "${dest}/foo"
|
||||
./cram state "$root" | grep "${dest}/bar"
|
||||
rm -r "${dest}"/*
|
||||
|
||||
# Install two transitively (current)
|
||||
./cram apply --no-optimize --require packages.d/p4 --execute "$root" "${dest}"
|
||||
[ -L "${dest}"/foo ]
|
||||
[ -L "${dest}"/bar ]
|
||||
rm -r "${dest}"/*
|
||||
|
||||
# Install two transitively (current)
|
||||
./cram apply --no-optimize --require packages.d/p4 --execute "$root" "${dest}"
|
||||
[ -L "${dest}"/foo ]
|
||||
[ -L "${dest}"/bar ]
|
||||
rm -r "${dest}"/*
|
||||
|
||||
# Install two transitively (current)
|
||||
./cram apply --no-optimize --require hosts.d/test --require profiles.d/default --execute "$root" "${dest}"
|
||||
[ -L "${dest}"/foo ]
|
||||
[ -L "${dest}"/bar ]
|
||||
rm -r "${dest}"/*
|
||||
|
||||
# INSTALL scripts get run as-is
|
||||
./cram list "$root" packages.d/p5 | grep "packages.d/p5/INSTALL"
|
||||
|
||||
# Inline scripts get pulled out repeatably
|
||||
./cram list "$root" packages.d/p6 | grep "b5bea41b6c623f7c09f1bf24dcae58ebab3c0cdd90ad966bc43a45b44867e12b"
|
||||
|
||||
# Inline scripts get pulled out repeatably, even from the list format
|
||||
./cram list "$root" packages.d/p7 | grep "b5bea41b6c623f7c09f1bf24dcae58ebab3c0cdd90ad966bc43a45b44867e12b"
|
||||
|
||||
# Test log-based optimization
|
||||
./cram apply --no-optimize --require packages.d/p4 --execute "$root" "${dest}"
|
||||
[ -L "${dest}"/foo ]
|
||||
[ -L "${dest}"/bar ]
|
||||
# These paths were already linked, they shouldn't be re-linked when optimizing.
|
||||
! ./cram apply --require packages.d/p4 --optimize --execute "$root" "${dest}" | grep "${dest}/foo"
|
||||
! ./cram apply --require packages.d/p4 --optimize --execute "$root" "${dest}" | grep "${dest}/bar"
|
||||
rm -r "${dest}"/*
|
||||
|
||||
# Likewise, if we've exec'd this once we shouldn't do it again
|
||||
./cram apply --no-optimize --require packages.d/p5 --execute "$root" "${dest}"
|
||||
! ./cram apply --require packages.d/p5 --execute "$root" "${dest}" | grep "exec"
|
||||
|
||||
# ... unless the user tells us to
|
||||
./cram apply --no-optimize --require packages.d/p5 --execute "$root" "${dest}"
|
||||
./cram apply --exec-always --require packages.d/p5 --execute "$root" "${dest}" | grep "exec"
|
||||
|
||||
# If multiple packages provide the same _effective_ script, do it once
|
||||
./cram apply --require packages.d/p6 --require packages.d/p7 --execute "$root" "${dest}" | sort | uniq -c | grep "/tmp/stow/b5bea41b6c623f7c09f1bf24dcae58ebab3c0cdd90ad966bc43a45b44867e12b.sh" | grep "1 - exec"
|
||||
|
||||
# Test log-based cleanup
|
||||
./cram apply --require packages.d/p1 --require packages.d/p2 --execute "$root" "${dest}"
|
||||
[ -L "${dest}"/foo ]
|
||||
[ -L "${dest}"/bar ]
|
||||
# And how bar shouldn't be installed...
|
||||
./cram state test/
|
||||
./cram apply --require packages.d/p1 --execute "$root" "${dest}"
|
||||
./cram state test/
|
||||
[ -L "${dest}"/foo ]
|
||||
[ ! -L "${dest}"/bar ]
|
4
src/python/cram/__init__.py
Normal file
4
src/python/cram/__init__.py
Normal file
|
@ -0,0 +1,4 @@
|
|||
__version__ = "0.1.0"
|
||||
__author__ = "Reid D. 'arrdem' McKenzie <me@arrdem.com>"
|
||||
__copyright__ = "Copyright 2020"
|
||||
__license__ = "https://anticapitalist.software/"
|
339
src/python/cram/__main__.py
Normal file
339
src/python/cram/__main__.py
Normal file
|
@ -0,0 +1,339 @@
|
|||
"""Cram's entry point."""
|
||||
|
||||
from itertools import chain
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
import pickle
|
||||
from typing import List
|
||||
|
||||
from . import (
|
||||
__author__,
|
||||
__copyright__,
|
||||
__license__,
|
||||
__version__,
|
||||
)
|
||||
from .v0 import PackageV0, ProfileV0
|
||||
from .v1 import PackageV1, ProfileV1
|
||||
|
||||
import click
|
||||
import toml
|
||||
from toposort import toposort_flatten
|
||||
from vfs import Vfs
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _exit(val):
|
||||
logging.shutdown()
|
||||
exit(val)
|
||||
|
||||
|
||||
def load(root: Path, name: str, clss):
|
||||
for c in clss:
|
||||
i = c(root, name)
|
||||
if i.test():
|
||||
return i
|
||||
|
||||
|
||||
def load_package(root, name):
|
||||
log.debug(f"Attempting to load package {name} from {root}")
|
||||
return load(root, name, [PackageV1, PackageV0])
|
||||
|
||||
|
||||
def load_profile(root, name):
|
||||
log.debug(f"Attempting to load profile {name} from {root}")
|
||||
return load(root, name, [ProfileV1, ProfileV0])
|
||||
|
||||
|
||||
def load_packages(root: Path) -> dict:
|
||||
"""Load the configured packages."""
|
||||
|
||||
packages = {}
|
||||
log.debug(f"Trying to load packages from {root}...")
|
||||
for p in (root / "packages.d").glob("*"):
|
||||
name = str(p.relative_to(root))
|
||||
packages[name] = load_package(p, name)
|
||||
|
||||
# Add profiles, hosts which contain subpackages.
|
||||
for mp_root in chain((root / "profiles.d").glob("*"), (root / "hosts.d").glob("*")):
|
||||
|
||||
# First find all subpackages
|
||||
for p in mp_root.glob("*"):
|
||||
if p.is_dir():
|
||||
name = str(p.relative_to(root))
|
||||
packages[name] = load_package(p, name)
|
||||
|
||||
# Register the metapackages themselves using the profile type
|
||||
mp_name = str(mp_root.relative_to(root))
|
||||
packages[mp_name] = load_profile(mp_root, mp_name)
|
||||
|
||||
return packages
|
||||
|
||||
|
||||
def build_fs(root: Path, dest: Path, prelude: List[str]) -> Vfs:
|
||||
"""Build a VFS by configuring dest from the given config root."""
|
||||
|
||||
packages = load_packages(root)
|
||||
requirements = []
|
||||
requirements.extend(prelude)
|
||||
|
||||
if packages:
|
||||
for p in packages:
|
||||
log.debug(f"Loaded package {p}")
|
||||
else:
|
||||
log.warning("Loaded no packages!")
|
||||
|
||||
for r in requirements:
|
||||
try:
|
||||
for d in packages[r].requires():
|
||||
if d not in requirements:
|
||||
requirements.append(d)
|
||||
except KeyError:
|
||||
log.fatal(f"Error: Unable to load package {r}")
|
||||
_exit(1)
|
||||
|
||||
# Compute the topsort graph
|
||||
requirements = {r: packages[r].requires() for r in requirements}
|
||||
fs = Vfs()
|
||||
|
||||
# Abstractly execute the current packages
|
||||
for r in toposort_flatten(requirements):
|
||||
r = packages[r]
|
||||
r.install(fs, dest)
|
||||
|
||||
return fs
|
||||
|
||||
|
||||
def load_state(statefile: Path) -> Vfs:
|
||||
"""Load a persisted VFS state from disk. Sort of."""
|
||||
|
||||
oldfs = Vfs([])
|
||||
|
||||
if statefile.exists():
|
||||
log.debug("Loading statefile %s", statefile)
|
||||
with open(statefile, "rb") as fp:
|
||||
oldfs._log = pickle.load(fp)
|
||||
else:
|
||||
log.warning("No previous statefile %s", statefile)
|
||||
|
||||
return oldfs
|
||||
|
||||
|
||||
def simplify(old_fs: Vfs, new_fs: Vfs, /, exec_idempotent=True) -> Vfs:
|
||||
"""Try to reduce a new VFS using diff from the original VFS."""
|
||||
|
||||
old_fs = old_fs.copy()
|
||||
new_fs = new_fs.copy()
|
||||
|
||||
# Scrub anything in the new log that's in the old log
|
||||
for txn in list(old_fs._log):
|
||||
# Except for execs which are stateful
|
||||
if txn[0] == "exec" and not exec_idempotent:
|
||||
continue
|
||||
|
||||
try:
|
||||
new_fs._log.remove(txn)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
# Dedupe the new log while preserving order.
|
||||
keys = set()
|
||||
deduped = []
|
||||
for op in new_fs._log:
|
||||
key = str(op)
|
||||
if key not in keys:
|
||||
keys.add(key)
|
||||
deduped.append(op)
|
||||
new_fs._log = deduped
|
||||
|
||||
return new_fs
|
||||
|
||||
|
||||
def scrub(old_fs: Vfs, new_fs: Vfs) -> Vfs:
|
||||
"""Try to eliminate files which were previously installed but are no longer used."""
|
||||
|
||||
old_fs = old_fs.copy()
|
||||
new_fs = new_fs.copy()
|
||||
cleanup_fs = Vfs([])
|
||||
|
||||
# Look for files in the old log which are no longer present in the new log
|
||||
for txn in old_fs._log:
|
||||
if txn[0] == "link" and txn not in new_fs._log:
|
||||
cleanup_fs.unlink(txn[2])
|
||||
|
||||
elif txn[0] == "mkdir" and txn not in new_fs._log:
|
||||
cleanup_fs.unlink(txn[1])
|
||||
|
||||
# Do unlink operations before we do install operations.
|
||||
# This works around being unable to finely straify uninstall operations over their source packages.
|
||||
cleanup_fs.merge(new_fs)
|
||||
|
||||
return cleanup_fs
|
||||
|
||||
|
||||
@click.group()
|
||||
@click.version_option(version=1, message=f"""Cram {__version__}
|
||||
|
||||
Documentation
|
||||
https://github.com/arrdem/source/tree/trunk/projects/cram/
|
||||
|
||||
Features
|
||||
- 0.0.0 legacy config format
|
||||
- 0.1.0 TOML config format
|
||||
- 0.1.0 log based optimizer
|
||||
- 0.1.0 idempotent default for scripts
|
||||
|
||||
About
|
||||
{__copyright__}, {__author__}.
|
||||
Published under the terms of the {__license__} license.
|
||||
""")
|
||||
def cli():
|
||||
pass
|
||||
|
||||
|
||||
@cli.command("apply")
|
||||
@click.option("--execute/--dry-run", default=False)
|
||||
@click.option("--force/--no-force", default=False)
|
||||
@click.option("--state-file", default=".cram.log", type=Path)
|
||||
@click.option("--optimize/--no-optimize", default=True)
|
||||
@click.option("--require", type=str, multiple=True, default=[f"hosts.d/{os.uname()[1].split('.')[0]}", "profiles.d/default"])
|
||||
@click.option("--exec-idempotent/--exec-always", "exec_idempotent", default=True)
|
||||
@click.argument("confdir", type=Path)
|
||||
@click.argument("destdir", type=Path)
|
||||
def do_apply(confdir, destdir, state_file, execute, optimize, force, require, exec_idempotent):
|
||||
"""The entry point of cram."""
|
||||
|
||||
# Resolve the two input paths to absolutes
|
||||
root = confdir.resolve()
|
||||
dest = destdir.resolve()
|
||||
|
||||
if not root.is_dir():
|
||||
log.fatal(f"{confdir} does not exist!")
|
||||
_exit(1)
|
||||
|
||||
if not state_file.is_absolute():
|
||||
state_file = root / state_file
|
||||
|
||||
if not force:
|
||||
old_fs = load_state(state_file)
|
||||
log.debug(f"Loaded old state consisting of {len(old_fs._log)} steps")
|
||||
else:
|
||||
# Force an empty state
|
||||
old_fs = Vfs([])
|
||||
|
||||
new_fs = build_fs(root, dest, require)
|
||||
log.debug(f"Built new state consisting of {len(new_fs._log)} steps")
|
||||
|
||||
# Middleware processing of the resulting filesystem(s)
|
||||
executable_fs = scrub(old_fs, new_fs)
|
||||
if optimize:
|
||||
executable_fs = simplify(old_fs, executable_fs,
|
||||
exec_idempotent=exec_idempotent)
|
||||
|
||||
# Dump the new state.
|
||||
# Note that we dump the UNOPTIMIZED state, because we want to simplify relative complete states.
|
||||
def cb(e):
|
||||
print("-", *e)
|
||||
|
||||
if execute:
|
||||
executable_fs.execute(callback=cb)
|
||||
|
||||
with open(state_file, "wb") as fp:
|
||||
pickle.dump(new_fs._log, fp)
|
||||
|
||||
else:
|
||||
for e in executable_fs._log:
|
||||
cb(e)
|
||||
|
||||
|
||||
@cli.command("list")
|
||||
@click.option("-1", "--oneline", is_flag=True, default=False, help="Only list names of resources")
|
||||
@click.argument("confdir", type=Path)
|
||||
@click.argument("requirements", nargs=-1)
|
||||
def do_list(confdir, requirements, oneline):
|
||||
"""List out packages, profiles, hosts and subpackages in the <confdir>."""
|
||||
root = confdir.resolve()
|
||||
|
||||
if not root.is_dir():
|
||||
log.fatal(f"{confdir} does not exist!")
|
||||
_exit(1)
|
||||
|
||||
packages = load_packages(root)
|
||||
|
||||
if requirements:
|
||||
dest = Path("~/")
|
||||
for pname in requirements:
|
||||
fs = Vfs()
|
||||
p = packages[pname]
|
||||
p.install(fs, dest)
|
||||
print(f"{pname}: ({type(p).__name__})")
|
||||
print("requires:")
|
||||
for e in p.requires():
|
||||
print(" -", e)
|
||||
print("log:")
|
||||
for e in fs._log:
|
||||
print(" -", *e)
|
||||
|
||||
elif oneline:
|
||||
for pname in sorted(packages.keys()):
|
||||
print(pname)
|
||||
else:
|
||||
for pname in sorted(packages.keys()):
|
||||
p = packages[pname]
|
||||
print(f"{pname}: ({type(p).__name__})")
|
||||
for d in p.requires():
|
||||
print(f"- {d}")
|
||||
|
||||
|
||||
@cli.command("state")
|
||||
@click.option("--state-file", default=".cram.log", type=Path)
|
||||
@click.argument("confdir", type=Path)
|
||||
def do_state(confdir, state_file):
|
||||
"""List out the last `apply` state in the <confdir>/.cram.log or --state-file."""
|
||||
root = confdir.resolve()
|
||||
|
||||
if not root.is_dir():
|
||||
log.fatal(f"{confdir} does not exist!")
|
||||
_exit(1)
|
||||
|
||||
if not state_file.is_absolute():
|
||||
state_file = root / state_file
|
||||
|
||||
fs = load_state(state_file)
|
||||
for e in fs._log:
|
||||
print("-", *e)
|
||||
|
||||
|
||||
@cli.command("fmt")
|
||||
@click.argument("confdir", type=Path)
|
||||
@click.argument("requirement", type=str)
|
||||
def do_fmt(confdir, requirement):
|
||||
"""Format the specified requirement to a canonical-ish representation."""
|
||||
|
||||
root = confdir.resolve()
|
||||
|
||||
if not root.is_dir():
|
||||
log.fatal(f"{confdir} does not exist!")
|
||||
_exit(1)
|
||||
|
||||
packages = load_packages(root)
|
||||
pkg = packages[requirement]
|
||||
json = pkg.json()
|
||||
|
||||
for suffix in pkg.SPECIAL_FILES:
|
||||
f = (root / requirement / suffix)
|
||||
if f.exists():
|
||||
f.unlink()
|
||||
|
||||
with open(root / requirement / "pkg.toml", "w") as fp:
|
||||
toml.dump(json, fp)
|
||||
|
||||
if __name__ == "__main__" or 1:
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
|
||||
)
|
||||
|
||||
cli()
|
86
src/python/cram/common.py
Normal file
86
src/python/cram/common.py
Normal file
|
@ -0,0 +1,86 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
from pathlib import Path
|
||||
from shlex import quote as sh_quote
|
||||
import sys
|
||||
from typing import List, Optional
|
||||
|
||||
from vfs import Vfs
|
||||
|
||||
|
||||
# FIXME: This should be a config somewhere
|
||||
SHELL = "/bin/sh"
|
||||
|
||||
# Light monkeypatching because macos ships a "stable" a py
|
||||
if sys.version_info <= (3, 9, 0):
|
||||
Path.readlink = lambda p: Path(os.readlink(str(p)))
|
||||
|
||||
|
||||
def sh(cmd: List[str], /,
|
||||
env: Optional[dict] = None):
|
||||
|
||||
prefix = []
|
||||
if env:
|
||||
prefix.append("/usr/bin/env")
|
||||
for k, v in env.items():
|
||||
v = sh_quote(str(v))
|
||||
prefix.append(f"{k}={v}")
|
||||
|
||||
return tuple(prefix + [SHELL, *cmd])
|
||||
|
||||
|
||||
def stow(fs: Vfs, src_dir: Path, dest_dir: Path, skip=[]):
|
||||
"""Recursively 'stow' (link) the contents of the source into the destination."""
|
||||
|
||||
dest_root = Path(dest_dir)
|
||||
src_root = Path(src_dir)
|
||||
skip = [src_root / n for n in skip]
|
||||
|
||||
for src in src_root.glob("**/*"):
|
||||
if src in skip:
|
||||
continue
|
||||
|
||||
elif src.name.endswith(".gitkeep"):
|
||||
continue
|
||||
|
||||
dest = dest_root / src.relative_to(src_root)
|
||||
if src.is_symlink():
|
||||
|
||||
fs.link(src.readlink().resolve(), dest)
|
||||
elif src.is_dir():
|
||||
fs.mkdir(dest)
|
||||
fs.chmod(dest, src.stat().st_mode)
|
||||
|
||||
elif src.is_file():
|
||||
fs.link(src, dest)
|
||||
|
||||
|
||||
class Package(object):
|
||||
def __init__(self, root: Path, name: str):
|
||||
self.root = root
|
||||
self.name = name
|
||||
|
||||
def test(self):
|
||||
return True
|
||||
|
||||
def requires(self):
|
||||
return []
|
||||
|
||||
def install(self, fs: Vfs, dest: Path):
|
||||
self.do_build(fs, dest)
|
||||
self.pre_install(fs, dest)
|
||||
self.do_install(fs, dest)
|
||||
self.post_install(fs, dest)
|
||||
|
||||
def do_build(self, fs: Vfs, dest: Path):
|
||||
pass
|
||||
|
||||
def pre_install(self, fs: Vfs, dest: Path):
|
||||
pass
|
||||
|
||||
def do_install(self, fs: Vfs, dest: Path):
|
||||
pass
|
||||
|
||||
def post_install(self, fs: Vfs, dest: Path):
|
||||
pass
|
109
src/python/cram/v0.py
Normal file
109
src/python/cram/v0.py
Normal file
|
@ -0,0 +1,109 @@
|
|||
"""Cram's original (v0) configs.
|
||||
|
||||
An ill-considered pseudo-format.
|
||||
"""
|
||||
|
||||
from pathlib import Path
|
||||
import re
|
||||
|
||||
from .common import Package, sh, stow
|
||||
|
||||
from vfs import Vfs
|
||||
|
||||
|
||||
class PackageV0(Package):
|
||||
"""The original package format from install.sh."""
|
||||
|
||||
SPECIAL_FILES = ["BUILD", "PRE_INSTALL", "INSTALL", "POST_INSTALL", "REQUIRES"]
|
||||
|
||||
def requires(self):
|
||||
"""Get the dependencies of this package."""
|
||||
requiresf = self.root / "REQUIRES"
|
||||
requires = []
|
||||
|
||||
# Listed dependencies
|
||||
if requiresf.exists():
|
||||
with open(requiresf) as fp:
|
||||
for l in fp:
|
||||
l = l.strip()
|
||||
l = re.sub(r"\s*#.*\n", "", l)
|
||||
if l:
|
||||
requires.append(l)
|
||||
|
||||
return requires
|
||||
|
||||
def install(self, fs: Vfs, dest: Path):
|
||||
"""Install this package."""
|
||||
buildf = self.root / "BUILD"
|
||||
if buildf.exists():
|
||||
fs.exec(self.root, sh([str(buildf)]))
|
||||
|
||||
pref = self.root / "PRE_INSTALL"
|
||||
if pref.exists():
|
||||
fs.exec(self.root, sh([str(pref)]))
|
||||
|
||||
installf = self.root / "INSTALL"
|
||||
if installf.exists():
|
||||
fs.exec(self.root, sh([str(installf)]))
|
||||
else:
|
||||
stow(fs, self.root, dest, self.SPECIAL_FILES)
|
||||
|
||||
postf = self.root / "POST_INSTALL"
|
||||
if postf.exists():
|
||||
fs.exec(self.root, sh([str(postf)]))
|
||||
|
||||
def _read(self, p: Path):
|
||||
if p.exists():
|
||||
with open(p) as fp:
|
||||
return fp.read()
|
||||
else:
|
||||
return None
|
||||
|
||||
def json(self):
|
||||
buildt = self._read(self.root / "BUILD")
|
||||
pret = self._read(self.root / "PRE_INSTALL")
|
||||
installt = self._read(self.root / "INSTALL")
|
||||
postt = self._read(self.root / "POST_INSTALL")
|
||||
|
||||
o = {"cram": {"version": 1}, "package": {"require": []}}
|
||||
|
||||
if buildt:
|
||||
o["package"]["build"] = [{"run": buildt}]
|
||||
if pret:
|
||||
o["package"]["pre_install"] = [{"run": pret}]
|
||||
if installt:
|
||||
o["package"]["install"] = [{"run": installt}]
|
||||
if postt:
|
||||
o["package"]["install"] = [{"run": postt}]
|
||||
|
||||
o["package"]["require"] = [{"name": it} for it in sorted(self.requires())]
|
||||
|
||||
return o
|
||||
|
||||
|
||||
class ProfileV0(PackageV0):
|
||||
def requires(self):
|
||||
requires = super().requires()
|
||||
for p in self.root.glob("*"):
|
||||
if p.is_dir():
|
||||
requires.append(self.name + "/" + p.name)
|
||||
return requires
|
||||
|
||||
def install(self, fs: Vfs, dest: Path):
|
||||
"""Profiles differ from Packages in that they don't support literal files."""
|
||||
|
||||
buildf = self.root / "BUILD"
|
||||
if buildf.exists():
|
||||
fs.exec(self.root, sh([str(buildf)]))
|
||||
|
||||
pref = self.root / "PRE_INSTALL"
|
||||
if pref.exists():
|
||||
fs.exec(self.root, sh([str(pref)]))
|
||||
|
||||
installf = self.root / "INSTALL"
|
||||
if installf.exists():
|
||||
fs.exec(self.root, sh([str(installf)]))
|
||||
|
||||
postf = self.root / "POST_INSTALL"
|
||||
if postf.exists():
|
||||
fs.exec(self.root, sh([str(postf)]))
|
116
src/python/cram/v1.py
Normal file
116
src/python/cram/v1.py
Normal file
|
@ -0,0 +1,116 @@
|
|||
"""Cram's v1 configs.
|
||||
|
||||
Based on well* defined TOML manifests, rather than many files.
|
||||
|
||||
*Okay. Better.
|
||||
"""
|
||||
|
||||
from hashlib import sha256
|
||||
from pathlib import Path
|
||||
from typing import List, Optional, Union
|
||||
|
||||
from .common import Package, sh, stow
|
||||
|
||||
import toml
|
||||
from vfs import Vfs
|
||||
|
||||
|
||||
def tempf(name):
|
||||
root = Path("/tmp/stow")
|
||||
root.mkdir(exist_ok=True, parents=True)
|
||||
return root / name
|
||||
|
||||
|
||||
class PackageV1(Package):
|
||||
"""The v1 package format."""
|
||||
|
||||
SPECIAL_FILES = ["pkg.toml"]
|
||||
_config = None
|
||||
|
||||
def config(self):
|
||||
if not self._config:
|
||||
with open(self.root / self.SPECIAL_FILES[0], "r") as fp:
|
||||
self._config = toml.load(fp)
|
||||
return self._config
|
||||
|
||||
def test(self):
|
||||
return (self.root / self.SPECIAL_FILES[0]).exists() and self.config().get("cram", {}).get("version") == 1
|
||||
|
||||
def requires(self):
|
||||
"""Get the dependencies of this package."""
|
||||
|
||||
def _name(it):
|
||||
if isinstance(it, str):
|
||||
return it
|
||||
elif isinstance(it, dict):
|
||||
return it["name"]
|
||||
|
||||
return [
|
||||
_name(it) for it in self.config().get("package", {}).get("require", [])
|
||||
]
|
||||
|
||||
def do_sh_or_script(self, content: Optional[Union[List[str], str]], fs: Vfs, dest: Path, cwd: Path = "/tmp"):
|
||||
if content is None:
|
||||
pass
|
||||
|
||||
elif isinstance(content, list):
|
||||
for c in content:
|
||||
self.do_sh_or_script(c, fs, dest)
|
||||
|
||||
elif isinstance(content, dict):
|
||||
self.do_sh_or_script(
|
||||
content["run"],
|
||||
fs,
|
||||
dest,
|
||||
{"cwd": self.root}.get(content.get("root"), "/tmp")
|
||||
)
|
||||
|
||||
elif isinstance(content, str):
|
||||
sum = sha256()
|
||||
sum.update(content.encode("utf-8"))
|
||||
sum = sum.hexdigest()
|
||||
|
||||
installf = self.root / content
|
||||
if installf.exists():
|
||||
with open(installf, "r") as fp:
|
||||
self.do_sh_or_script(fp.read(), fs, dest)
|
||||
|
||||
elif content:
|
||||
f = tempf(f"{sum}.sh")
|
||||
with open(f, "w") as fp:
|
||||
fp.write(content)
|
||||
fs.exec(cwd, sh([f]))
|
||||
|
||||
def do_build(self, fs: Vfs, dest: Path):
|
||||
self.do_sh_or_script(self.config().get("package", {}).get("build"), fs, dest)
|
||||
|
||||
def pre_install(self, fs: Vfs, dest: Path):
|
||||
self.do_sh_or_script(self.config().get("package", {}).get("pre_install"), fs, dest)
|
||||
|
||||
def do_install(self, fs: Vfs, dest: Path):
|
||||
if not self.do_sh_or_script(self.config().get("package", {}).get("install"), fs, dest):
|
||||
stow(fs, self.root, dest, self.SPECIAL_FILES)
|
||||
|
||||
def post_install(self, fs: Vfs, dest: Path):
|
||||
self.do_sh_or_script(self.config().get("package", {}).get("post_install"), fs, dest)
|
||||
|
||||
|
||||
def json(self):
|
||||
return self.config()
|
||||
|
||||
|
||||
class ProfileV1(PackageV1):
|
||||
"""Unline packages, profiles don't support recursive stow of contents."""
|
||||
|
||||
def do_install(self, fs: Vfs, dest: Path):
|
||||
self.do_sh_or_script(self.config().get("package", {}).get("install"), fs, dest)
|
||||
|
||||
def requires(self):
|
||||
requires = super().requires()
|
||||
|
||||
# Implicitly depended subpackages
|
||||
for p in self.root.glob("*"):
|
||||
if p.is_dir():
|
||||
requires.append(self.name + "/" + p.name)
|
||||
|
||||
return requires
|
5
src/python/vfs/__init__.py
Normal file
5
src/python/vfs/__init__.py
Normal file
|
@ -0,0 +1,5 @@
|
|||
"""
|
||||
The published interface of the VFS package.
|
||||
"""
|
||||
|
||||
from .impl import Vfs # noqa
|
111
src/python/vfs/impl.py
Normal file
111
src/python/vfs/impl.py
Normal file
|
@ -0,0 +1,111 @@
|
|||
"""
|
||||
The implementation.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from shutil import rmtree
|
||||
from subprocess import run
|
||||
|
||||
|
||||
_log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Vfs(object):
|
||||
"""An abstract filesystem device which can accumulate changes, and apply them in a batch."""
|
||||
|
||||
def __init__(self, log=None):
|
||||
self._log = log or []
|
||||
|
||||
def _execute_exec(self, e):
|
||||
_, dir, cmd = e
|
||||
run(cmd, cwd=str(dir))
|
||||
|
||||
def _execute_link(self, e):
|
||||
_, src, dest = e
|
||||
if dest.is_file() or dest.is_symlink():
|
||||
if dest.is_symlink() and dest.readlink() == src:
|
||||
return
|
||||
else:
|
||||
_log.warn(f"Replacing {dest}")
|
||||
dest.unlink()
|
||||
elif dest.is_dir():
|
||||
_log.warn(f"Replacing {dest}")
|
||||
rmtree(dest)
|
||||
|
||||
assert not dest.exists(), f"{dest} should not exist"
|
||||
dest.symlink_to(src)
|
||||
|
||||
def _execute_chmod(self, e):
|
||||
_, dest, mode = e
|
||||
dest.chmod(mode)
|
||||
|
||||
def _execute_mkdir(self, e):
|
||||
_, dest = e
|
||||
if dest.is_dir():
|
||||
return
|
||||
elif dest.exists() or dest.is_symlink():
|
||||
dest.unlink()
|
||||
|
||||
dest.mkdir(exist_ok=True)
|
||||
|
||||
def _execute_unlink(self, e):
|
||||
_, dest = e
|
||||
# Note that a path which is a dangling symlink will NOT exist but WILL be a symlink
|
||||
if not dest.exists() and not dest.is_symlink():
|
||||
return
|
||||
# Files and dirs just unlink
|
||||
if dest.is_symlink() or dest.is_file():
|
||||
dest.unlink()
|
||||
# Dirs require recursion
|
||||
elif dest.is_dir():
|
||||
rmtree(dest)
|
||||
# Don't succeed silently
|
||||
else:
|
||||
raise Exception(f"Couldn't unlink {dest}")
|
||||
|
||||
def _execute_unimplemented(self, e):
|
||||
raise NotImplementedError()
|
||||
|
||||
def _entry_to_command(self, e):
|
||||
return e
|
||||
|
||||
def execute(self, /, callback=None):
|
||||
for e in self._log:
|
||||
cmd = self._entry_to_command(e)
|
||||
_log.debug(f"Executing %r as %r", e, cmd)
|
||||
|
||||
if callback:
|
||||
callback(cmd)
|
||||
|
||||
# Using self as a dispatch table lol
|
||||
getattr(self, f"_execute_{cmd[0]}", self._execute_unimplemented)(cmd)
|
||||
|
||||
def _command_to_entry(self, cmd):
|
||||
return cmd
|
||||
|
||||
def _append(self, cmd):
|
||||
self._log.append(self._command_to_entry(cmd))
|
||||
|
||||
def link(self, src, dest):
|
||||
self._append(("link", src, dest))
|
||||
|
||||
def copy(self, src, dest):
|
||||
self._append(("copy", src, dest))
|
||||
|
||||
def chmod(self, dest, mode):
|
||||
self._append(("chmod", dest, mode))
|
||||
|
||||
def mkdir(self, dest):
|
||||
self._append(("mkdir", dest))
|
||||
|
||||
def exec(self, dest, cmd):
|
||||
self._append(("exec", dest, cmd))
|
||||
|
||||
def unlink(self, dest):
|
||||
self._append(("unlink", dest))
|
||||
|
||||
def copy(self):
|
||||
return Vfs(list(self._log))
|
||||
|
||||
def merge(self, other: "Vfs"):
|
||||
self._log.extend(other._log)
|
6
test/integration/hosts.d/test/pkg.toml
Normal file
6
test/integration/hosts.d/test/pkg.toml
Normal file
|
@ -0,0 +1,6 @@
|
|||
[cram]
|
||||
version = 1
|
||||
|
||||
[package]
|
||||
[[package.require]]
|
||||
name = "packages.d/p1"
|
1
test/integration/packages.d/p1/foo
Normal file
1
test/integration/packages.d/p1/foo
Normal file
|
@ -0,0 +1 @@
|
|||
bar
|
1
test/integration/packages.d/p2/bar
Normal file
1
test/integration/packages.d/p2/bar
Normal file
|
@ -0,0 +1 @@
|
|||
qux
|
2
test/integration/packages.d/p3/REQUIRES
Normal file
2
test/integration/packages.d/p3/REQUIRES
Normal file
|
@ -0,0 +1,2 @@
|
|||
packages.d/p1
|
||||
packages.d/p2
|
6
test/integration/packages.d/p4/pkg.toml
Normal file
6
test/integration/packages.d/p4/pkg.toml
Normal file
|
@ -0,0 +1,6 @@
|
|||
[cram]
|
||||
version = 1
|
||||
|
||||
[package]
|
||||
[[package.require]]
|
||||
name = "packages.d/p3"
|
3
test/integration/packages.d/p5/INSTALL
Normal file
3
test/integration/packages.d/p5/INSTALL
Normal file
|
@ -0,0 +1,3 @@
|
|||
#!/bin/bash
|
||||
# A legacy custom install script
|
||||
true
|
5
test/integration/packages.d/p6/pkg.toml
Normal file
5
test/integration/packages.d/p6/pkg.toml
Normal file
|
@ -0,0 +1,5 @@
|
|||
[cram]
|
||||
version = 1
|
||||
|
||||
[package]
|
||||
install = "true"
|
6
test/integration/packages.d/p7/pkg.toml
Normal file
6
test/integration/packages.d/p7/pkg.toml
Normal file
|
@ -0,0 +1,6 @@
|
|||
[cram]
|
||||
version = 1
|
||||
|
||||
[package]
|
||||
[[package.install]]
|
||||
run = "true"
|
4
test/integration/profiles.d/default/pkg.toml
Normal file
4
test/integration/profiles.d/default/pkg.toml
Normal file
|
@ -0,0 +1,4 @@
|
|||
[cram]
|
||||
version = 1
|
||||
|
||||
[package]
|
6
test/integration/profiles.d/default/subpackage/pkg.toml
Normal file
6
test/integration/profiles.d/default/subpackage/pkg.toml
Normal file
|
@ -0,0 +1,6 @@
|
|||
[cram]
|
||||
version = 1
|
||||
|
||||
[package]
|
||||
[[package.require]]
|
||||
name = "packages.d/p3"
|
10
tools/autoflake/BUILD
Normal file
10
tools/autoflake/BUILD
Normal file
|
@ -0,0 +1,10 @@
|
|||
zapp_binary(
|
||||
name = "autoflake",
|
||||
main = "__main__.py",
|
||||
deps = [
|
||||
py_requirement("autoflake"),
|
||||
],
|
||||
visibility = [
|
||||
"//visibility:public"
|
||||
],
|
||||
)
|
15
tools/autoflake/__main__.py
Normal file
15
tools/autoflake/__main__.py
Normal file
|
@ -0,0 +1,15 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
Shim for executing autoflake.
|
||||
"""
|
||||
|
||||
import re
|
||||
import sys
|
||||
|
||||
from autoflake import main
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.argv[0] = re.sub(r"(-script\.pyw|\.exe)?$", "", sys.argv[0])
|
||||
sys.exit(main())
|
10
tools/black/BUILD
Normal file
10
tools/black/BUILD
Normal file
|
@ -0,0 +1,10 @@
|
|||
py_binary(
|
||||
name = "black",
|
||||
main = "__main__.py",
|
||||
deps = [
|
||||
py_requirement("black"),
|
||||
],
|
||||
visibility = [
|
||||
"//visibility:public"
|
||||
],
|
||||
)
|
51
tools/black/__main__.py
Normal file
51
tools/black/__main__.py
Normal file
|
@ -0,0 +1,51 @@
|
|||
"""A shim to black which knows how to tee output for --output-file."""
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
|
||||
from black import nullcontext, patched_main
|
||||
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--output-file", default=None)
|
||||
|
||||
|
||||
class Tee(object):
|
||||
"""Something that looks like a File/Writeable but does teed writes."""
|
||||
|
||||
def __init__(self, name, mode):
|
||||
self._file = open(name, mode)
|
||||
self._stdout = sys.stdout
|
||||
|
||||
def __enter__(self):
|
||||
sys.stdout = self
|
||||
return self
|
||||
|
||||
def __exit__(self, *args, **kwargs):
|
||||
sys.stdout = self._stdout
|
||||
self.close()
|
||||
|
||||
def write(self, data):
|
||||
self._file.write(data)
|
||||
self._stdout.write(data)
|
||||
|
||||
def flush(self):
|
||||
self._file.flush()
|
||||
self._stdout.flush()
|
||||
|
||||
def close(self):
|
||||
self._file.close()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
opts, args = parser.parse_known_args()
|
||||
|
||||
if opts.output_file:
|
||||
print("Teeig output....")
|
||||
ctx = Tee(opts.output_file, "w")
|
||||
else:
|
||||
ctx = nullcontext()
|
||||
|
||||
with ctx:
|
||||
sys.argv = [sys.argv[0]] + args
|
||||
patched_main()
|
71
tools/black/black.bzl
Normal file
71
tools/black/black.bzl
Normal file
|
@ -0,0 +1,71 @@
|
|||
"""Linting for Python using Aspects."""
|
||||
|
||||
# Hacked up from https://github.com/bazelbuild/rules_rust/blob/main/rust/private/clippy.bzl
|
||||
#
|
||||
# Usage:
|
||||
# bazel build --aspects="//tools/flake8:flake8.bzl%flake8_aspect" --output_groups=flake8_checks <target|pattern>
|
||||
#
|
||||
# Note that the build directive can be inserted to .bazelrc to make it part of the default behavior
|
||||
|
||||
def _black_aspect_impl(target, ctx):
|
||||
if hasattr(ctx.rule.attr, 'srcs'):
|
||||
black = ctx.attr._black.files_to_run
|
||||
config = ctx.attr._config.files.to_list()[0]
|
||||
|
||||
files = []
|
||||
for src in ctx.rule.attr.srcs:
|
||||
for f in src.files.to_list():
|
||||
if f.extension == "py":
|
||||
files.append(f)
|
||||
|
||||
if files:
|
||||
report = ctx.actions.declare_file(ctx.label.name + ".black.report")
|
||||
else:
|
||||
return []
|
||||
|
||||
args = ["--check", "--output-file", report.path]
|
||||
for f in files:
|
||||
args.append(f.path)
|
||||
|
||||
ctx.actions.run(
|
||||
executable = black,
|
||||
inputs = files,
|
||||
tools = ctx.attr._config.files.to_list() + ctx.attr._black.files.to_list(),
|
||||
arguments = args,
|
||||
outputs = [report],
|
||||
mnemonic = "Black",
|
||||
)
|
||||
|
||||
return [
|
||||
OutputGroupInfo(black_checks = depset([report]))
|
||||
]
|
||||
|
||||
return []
|
||||
|
||||
|
||||
black_aspect = aspect(
|
||||
implementation = _black_aspect_impl,
|
||||
attr_aspects = ['deps'],
|
||||
attrs = {
|
||||
'_black': attr.label(default=":black"),
|
||||
'_config': attr.label(
|
||||
default="//:setup.cfg",
|
||||
executable=False,
|
||||
allow_single_file=True
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def _black_rule_impl(ctx):
|
||||
ready_targets = [dep for dep in ctx.attr.deps if "black_checks" in dir(dep[OutputGroupInfo])]
|
||||
files = depset([], transitive = [dep[OutputGroupInfo].black_checks for dep in ready_targets])
|
||||
return [DefaultInfo(files = files)]
|
||||
|
||||
|
||||
black = rule(
|
||||
implementation = _black_rule_impl,
|
||||
attrs = {
|
||||
'deps' : attr.label_list(aspects = [black_aspect]),
|
||||
},
|
||||
)
|
3
tools/build_rules/BUILD
Normal file
3
tools/build_rules/BUILD
Normal file
|
@ -0,0 +1,3 @@
|
|||
package(default_visibility = ["//visibility:public"])
|
||||
|
||||
licenses(["notice"])
|
47
tools/build_rules/cp.bzl
Normal file
47
tools/build_rules/cp.bzl
Normal file
|
@ -0,0 +1,47 @@
|
|||
load("@bazel_skylib//rules:copy_file.bzl",
|
||||
"copy_file",
|
||||
)
|
||||
|
||||
def cp(name, src, **kwargs):
|
||||
"""A slightly more convenient cp() rule. Name and out should always be the same."""
|
||||
|
||||
rule_name = name.replace(".", "_").replace(":", "/").replace("//", "").replace("/", "_")
|
||||
copy_file(
|
||||
name = rule_name,
|
||||
src = src,
|
||||
out = name,
|
||||
**kwargs
|
||||
)
|
||||
return rule_name
|
||||
|
||||
|
||||
def _copy_filegroup_impl(ctx):
|
||||
all_outputs = []
|
||||
for t in ctx.attr.deps:
|
||||
t_prefix = t.label.package
|
||||
for f in t.files.to_list():
|
||||
# Strip out the source prefix...
|
||||
path = f.short_path.replace(t_prefix + "/", "")
|
||||
out = ctx.actions.declare_file(path)
|
||||
print(ctx.attr.name, t.label, f, " => ", path)
|
||||
all_outputs += [out]
|
||||
ctx.actions.run_shell(
|
||||
outputs=[out],
|
||||
inputs=depset([f]),
|
||||
arguments=[f.path, out.path],
|
||||
command="cp $1 $2"
|
||||
)
|
||||
|
||||
return [
|
||||
DefaultInfo(
|
||||
files=depset(all_outputs),
|
||||
runfiles=ctx.runfiles(files=all_outputs))
|
||||
]
|
||||
|
||||
|
||||
copy_filegroups = rule(
|
||||
implementation=_copy_filegroup_impl,
|
||||
attrs={
|
||||
"deps": attr.label_list(),
|
||||
},
|
||||
)
|
32
tools/build_rules/prelude_bazel
Normal file
32
tools/build_rules/prelude_bazel
Normal file
|
@ -0,0 +1,32 @@
|
|||
# -*- mode: bazel -*-
|
||||
# A global prelude for all BUILD[.bazel] files
|
||||
|
||||
load("//tools/python:defs.bzl",
|
||||
"py_library",
|
||||
"py_binary",
|
||||
"py_unittest",
|
||||
"py_pytest",
|
||||
"py_resources",
|
||||
"py_project",
|
||||
)
|
||||
|
||||
load("@arrdem_cram_pypi//:requirements.bzl",
|
||||
py_requirement="requirement"
|
||||
)
|
||||
|
||||
load("@bazel_skylib//rules:copy_file.bzl",
|
||||
"copy_file",
|
||||
)
|
||||
|
||||
load("//tools/build_rules:cp.bzl",
|
||||
"cp",
|
||||
"copy_filegroups"
|
||||
)
|
||||
|
||||
load("//tools/build_rules:webp.bzl",
|
||||
"webp_image",
|
||||
)
|
||||
|
||||
load("@rules_zapp//zapp:zapp.bzl",
|
||||
"zapp_binary",
|
||||
)
|
25
tools/build_rules/webp.bzl
Normal file
25
tools/build_rules/webp.bzl
Normal file
|
@ -0,0 +1,25 @@
|
|||
"""
|
||||
Webp image building.
|
||||
"""
|
||||
|
||||
def webp_image(src, name = None, out = None, quality = 95, flags = None):
|
||||
"""Use cwebp to convert the image to an output."""
|
||||
|
||||
out = out or src.split(".", 1)[0] + ".webp"
|
||||
name = name or out.replace(".", "_")
|
||||
return native.genrule(
|
||||
name = name,
|
||||
srcs = [src],
|
||||
outs = [out],
|
||||
cmd = "cwebp {} $< -o $@".format(
|
||||
" ".join([str(i) for i in (flags or ["-q", quality])])
|
||||
)
|
||||
)
|
||||
|
||||
def auto_webps(srcs):
|
||||
"""Generate webp targets automagically for a mess of files."""
|
||||
|
||||
for f in srcs:
|
||||
webp_image(
|
||||
src = f,
|
||||
)
|
12
tools/flake8/BUILD
Normal file
12
tools/flake8/BUILD
Normal file
|
@ -0,0 +1,12 @@
|
|||
exports_files(["flake8.cfg"], visibility=["//visibility:public"])
|
||||
|
||||
py_binary(
|
||||
name = "flake8",
|
||||
main = "__main__.py",
|
||||
deps = [
|
||||
py_requirement("flake8"),
|
||||
],
|
||||
visibility = [
|
||||
"//visibility:public"
|
||||
],
|
||||
)
|
5
tools/flake8/__main__.py
Normal file
5
tools/flake8/__main__.py
Normal file
|
@ -0,0 +1,5 @@
|
|||
from flake8.main import cli
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
cli.main()
|
71
tools/flake8/flake8.bzl
Normal file
71
tools/flake8/flake8.bzl
Normal file
|
@ -0,0 +1,71 @@
|
|||
"""Linting for Python using Aspects."""
|
||||
|
||||
# Hacked up from https://github.com/bazelbuild/rules_rust/blob/main/rust/private/clippy.bzl
|
||||
#
|
||||
# Usage:
|
||||
# bazel build --aspects="//tools/flake8:flake8.bzl%flake8_aspect" --output_groups=flake8_checks <target|pattern>
|
||||
#
|
||||
# Note that the build directive can be inserted to .bazelrc to make it part of the default behavior
|
||||
|
||||
def _flake8_aspect_impl(target, ctx):
|
||||
if hasattr(ctx.rule.attr, 'srcs'):
|
||||
flake8 = ctx.attr._flake8.files_to_run
|
||||
config = ctx.attr._config.files.to_list()[0]
|
||||
|
||||
files = []
|
||||
for src in ctx.rule.attr.srcs:
|
||||
for f in src.files.to_list():
|
||||
if f.extension == "py":
|
||||
files.append(f)
|
||||
|
||||
if files:
|
||||
report = ctx.actions.declare_file(ctx.label.name + ".flake.report")
|
||||
else:
|
||||
return []
|
||||
|
||||
args = ["--config", config.path, "--tee", "--output-file", report.path]
|
||||
for f in files:
|
||||
args.append(f.path)
|
||||
|
||||
ctx.actions.run(
|
||||
executable = flake8,
|
||||
inputs = files,
|
||||
tools = ctx.attr._config.files.to_list() + ctx.attr._flake8.files.to_list(),
|
||||
arguments = args,
|
||||
outputs = [report],
|
||||
mnemonic = "Flake8",
|
||||
)
|
||||
|
||||
return [
|
||||
OutputGroupInfo(flake8_checks = depset([report]))
|
||||
]
|
||||
|
||||
return []
|
||||
|
||||
|
||||
flake8_aspect = aspect(
|
||||
implementation = _flake8_aspect_impl,
|
||||
attr_aspects = ['deps'],
|
||||
attrs = {
|
||||
'_flake8': attr.label(default=":flake8"),
|
||||
'_config': attr.label(
|
||||
default="//:setup.cfg",
|
||||
executable=False,
|
||||
allow_single_file=True
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def _flake8_rule_impl(ctx):
|
||||
ready_targets = [dep for dep in ctx.attr.deps if "flake8_checks" in dir(dep[OutputGroupInfo])]
|
||||
files = depset([], transitive = [dep[OutputGroupInfo].flake8_checks for dep in ready_targets])
|
||||
return [DefaultInfo(files = files)]
|
||||
|
||||
|
||||
flake8 = rule(
|
||||
implementation = _flake8_rule_impl,
|
||||
attrs = {
|
||||
'deps' : attr.label_list(aspects = [flake8_aspect]),
|
||||
},
|
||||
)
|
23
tools/fmt.sh
Executable file
23
tools/fmt.sh
Executable file
|
@ -0,0 +1,23 @@
|
|||
#!/usr/bin/env bash
|
||||
set -euox pipefail
|
||||
cd "$(git rev-parse --show-toplevel)"
|
||||
|
||||
bazel build //tools/...
|
||||
|
||||
DIRS=(tools src/python test/python)
|
||||
|
||||
function brl() {
|
||||
bin="$1"
|
||||
shift
|
||||
bazel build "//${bin}"
|
||||
"bazel-bin/${bin}/$(basename ${bin})" "$@"
|
||||
return "$?"
|
||||
}
|
||||
|
||||
for d in "${DIRS[@]}"; do
|
||||
if [ -d "$d" ]; then
|
||||
brl tools/autoflake --remove-all-unused-imports -ir $(realpath "$d")
|
||||
brl tools/isort $(realpath "$d")
|
||||
brl tools/unify --quote '"' -ir $(realpath "$d")
|
||||
fi
|
||||
done
|
11
tools/isort/BUILD
Normal file
11
tools/isort/BUILD
Normal file
|
@ -0,0 +1,11 @@
|
|||
py_binary(
|
||||
name = "isort",
|
||||
main = "__main__.py",
|
||||
deps = [
|
||||
py_requirement("isort"),
|
||||
],
|
||||
visibility = [
|
||||
"//visibility:public"
|
||||
],
|
||||
|
||||
)
|
15
tools/isort/__main__.py
Normal file
15
tools/isort/__main__.py
Normal file
|
@ -0,0 +1,15 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
Shim for executing isort.
|
||||
"""
|
||||
|
||||
import re
|
||||
import sys
|
||||
|
||||
from isort.main import main
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.argv[0] = re.sub(r"(-script\.pyw?|\.exe)?$", "", sys.argv[0])
|
||||
sys.exit(main())
|
25
tools/lint.sh
Executable file
25
tools/lint.sh
Executable file
|
@ -0,0 +1,25 @@
|
|||
#!/usr/bin/env bash
|
||||
set -euox pipefail
|
||||
cd "$(git rev-parse --show-toplevel)"
|
||||
bazel build //tools/python/...
|
||||
|
||||
DIRS=(tools projects)
|
||||
|
||||
function brl() {
|
||||
bin="$1"
|
||||
shift
|
||||
bazel build "//${bin}"
|
||||
"bazel-bin/${bin}/$(basename ${bin})" "$@"
|
||||
return "$?"
|
||||
}
|
||||
|
||||
brl tools/flake8 "${DIRS[@]}"
|
||||
brl tools/isort --check "${DIRS[@]}"
|
||||
brl tools/unify --quote '"' -cr "${DIRS[@]}"
|
||||
brl tools/reqman lint tools/python/requirements.txt
|
||||
|
||||
# OpenAPI specific junk
|
||||
for f in $(find . -type f -name "openapi.yaml"); do
|
||||
brl tools/openapi "${f}" && echo "Schema $f OK"
|
||||
brl tools/yamllint -c tools/yamllint/yamllintrc "${f}"
|
||||
done
|
47
tools/python/BUILD
Normal file
47
tools/python/BUILD
Normal file
|
@ -0,0 +1,47 @@
|
|||
load("@rules_python//python:defs.bzl",
|
||||
"py_runtime_pair",
|
||||
)
|
||||
|
||||
load("@arrdem_cram_pypi//:requirements.bzl", "all_requirements")
|
||||
|
||||
package(default_visibility = ["//visibility:public"])
|
||||
|
||||
licenses(["notice"])
|
||||
|
||||
exports_files([
|
||||
"defs.bzl",
|
||||
"bzl_pytest_shim.py",
|
||||
"bzl_unittest_shim.py",
|
||||
"pythonshim",
|
||||
])
|
||||
|
||||
py_runtime(
|
||||
name = "python3_runtime",
|
||||
files = [],
|
||||
interpreter = ":pythonshim",
|
||||
python_version = "PY3",
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
|
||||
py_runtime_pair(
|
||||
name = "python_runtime",
|
||||
py2_runtime = None,
|
||||
py3_runtime = ":python3_runtime",
|
||||
)
|
||||
|
||||
toolchain(
|
||||
name = "python3_toolchain",
|
||||
toolchain = ":python_runtime",
|
||||
toolchain_type = "@bazel_tools//tools/python:toolchain_type",
|
||||
)
|
||||
|
||||
py_pytest(
|
||||
name = "test_licenses",
|
||||
srcs = [
|
||||
"test_licenses.py",
|
||||
],
|
||||
data = [
|
||||
"requirements.txt",
|
||||
],
|
||||
deps = all_requirements,
|
||||
)
|
11
tools/python/bzl_pytest_shim.py
Normal file
11
tools/python/bzl_pytest_shim.py
Normal file
|
@ -0,0 +1,11 @@
|
|||
"""A shim for executing pytest."""
|
||||
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
cmdline = ["--ignore=external"] + sys.argv[1:]
|
||||
print(cmdline, file=sys.stderr)
|
||||
sys.exit(pytest.main(cmdline))
|
66
tools/python/bzl_unittest_shim.py
Normal file
66
tools/python/bzl_unittest_shim.py
Normal file
|
@ -0,0 +1,66 @@
|
|||
"""Universal launcher for unit tests"""
|
||||
|
||||
import argparse
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
|
||||
def main():
|
||||
"""Parse args, collect tests and run them"""
|
||||
# Disable *.pyc files
|
||||
sys.dont_write_bytecode = True
|
||||
|
||||
# Add ".." to module search path
|
||||
cur_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
top_dir = os.path.abspath(os.path.join(cur_dir, os.pardir))
|
||||
sys.path.append(top_dir)
|
||||
|
||||
# Parse command line arguments
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.add_argument(
|
||||
"-v",
|
||||
"--verbose",
|
||||
action="count",
|
||||
default=0,
|
||||
help="verbosity level, use: [-v | -vv | -vvv]",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-s", "--start-directory", default=None, help="directory to start discovery"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-p",
|
||||
"--pattern",
|
||||
default="test*.py",
|
||||
help="pattern to match test files ('test*.py' default)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"test", nargs="*", help="test specs (e.g. module.TestCase.test_func)"
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
if not args.start_directory:
|
||||
args.start_directory = cur_dir
|
||||
|
||||
if args.verbose > 2:
|
||||
logging.basicConfig(level=logging.DEBUG, format="DEBUG: %(message)s")
|
||||
|
||||
loader = unittest.TestLoader()
|
||||
if args.test:
|
||||
# Add particular tests
|
||||
for test in args.test:
|
||||
suite = unittest.TestSuite()
|
||||
suite.addTests(loader.loadTestsFromName(test))
|
||||
else:
|
||||
# Find all tests
|
||||
suite = loader.discover(args.start_directory, args.pattern)
|
||||
|
||||
runner = unittest.TextTestRunner(verbosity=args.verbose)
|
||||
result = runner.run(suite)
|
||||
return result.wasSuccessful()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# NOTE: True(success) -> 0, False(fail) -> 1
|
||||
exit(not main())
|
254
tools/python/defs.bzl
Normal file
254
tools/python/defs.bzl
Normal file
|
@ -0,0 +1,254 @@
|
|||
load("@arrdem_cram_pypi//:requirements.bzl",
|
||||
_py_requirement = "requirement"
|
||||
)
|
||||
|
||||
load("@rules_python//python:defs.bzl",
|
||||
"py_runtime",
|
||||
"py_runtime_pair",
|
||||
_py_binary = "py_binary",
|
||||
_py_test = "py_test",
|
||||
_py_library = "py_library",
|
||||
)
|
||||
|
||||
load("@rules_zapp//zapp:zapp.bzl",
|
||||
"zapp_binary",
|
||||
)
|
||||
|
||||
load("@bazel_skylib//lib:sets.bzl", "sets")
|
||||
|
||||
|
||||
def py_requirement(*args, **kwargs):
|
||||
"""A re-export of requirement()"""
|
||||
return _py_requirement(*args, **kwargs)
|
||||
|
||||
|
||||
def py_test(python_version=None, **kwargs):
|
||||
"""A re-export of py_test()"""
|
||||
|
||||
if python_version and python_version != "PY3":
|
||||
fail("py3k only!")
|
||||
|
||||
return _py_test(
|
||||
python_version="PY3",
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
|
||||
def py_pytest(name, srcs, deps, main=None, python_version=None, args=None, **kwargs):
|
||||
"""A py_test target which uses pytest."""
|
||||
|
||||
if python_version and python_version != "PY3":
|
||||
fail("py3k only!")
|
||||
|
||||
f = "//tools/python:bzl_pytest_shim.py"
|
||||
|
||||
deps = sets.to_list(sets.make([
|
||||
py_requirement("pytest"),
|
||||
py_requirement("pytest-pudb"),
|
||||
py_requirement("pytest-cov"),
|
||||
py_requirement("pytest-timeout"),
|
||||
] + deps))
|
||||
|
||||
srcs = [f] + srcs
|
||||
|
||||
py_test(
|
||||
name = name,
|
||||
srcs = srcs,
|
||||
main = f,
|
||||
args = args,
|
||||
python_version="PY3",
|
||||
deps = deps,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
# zapp_test(
|
||||
# name = name + ".zapp",
|
||||
# main = f,
|
||||
# args = args,
|
||||
# srcs = srcs,
|
||||
# deps = deps,
|
||||
# test = True,
|
||||
# zip_safe = False,
|
||||
# **kwargs,
|
||||
# )
|
||||
|
||||
# FIXME (arrdem 2020-09-27):
|
||||
# Generate a py_image_test.
|
||||
# Not clear how to achieve that.
|
||||
|
||||
|
||||
def py_unittest(srcs=[], **kwargs):
|
||||
"""A helper for running unittest tests"""
|
||||
|
||||
f = "//tools/python:bzl_unittest_shim.py"
|
||||
return py_test(
|
||||
main = f,
|
||||
srcs = [f] + srcs,
|
||||
**kwargs
|
||||
)
|
||||
|
||||
|
||||
def py_binary(python_version=None, main=None, srcs=None, **kwargs):
|
||||
"""A re-export of py_binary()"""
|
||||
|
||||
if python_version and python_version != "PY3":
|
||||
fail("py3k only!")
|
||||
|
||||
srcs = srcs or []
|
||||
if main not in srcs:
|
||||
srcs = [main] + srcs
|
||||
|
||||
return _py_binary(
|
||||
python_version = "PY3",
|
||||
main = main,
|
||||
srcs = srcs,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
|
||||
def py_library(srcs_version=None, **kwargs):
|
||||
"""A re-export of py_library()"""
|
||||
|
||||
if srcs_version and srcs_version != "PY3":
|
||||
fail("py3k only!")
|
||||
|
||||
return _py_library(
|
||||
srcs_version="PY3",
|
||||
**kwargs
|
||||
)
|
||||
|
||||
|
||||
ResourceGroupInfo = provider(
|
||||
fields = {
|
||||
"srcs": "files to use from Python",
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
def _resource_impl(ctx):
|
||||
srcs = []
|
||||
for target in ctx.attr.srcs:
|
||||
srcs.extend(target.files.to_list())
|
||||
transitive_srcs = depset(direct = srcs)
|
||||
|
||||
return [
|
||||
ResourceGroupInfo(
|
||||
srcs = ctx.attr.srcs,
|
||||
),
|
||||
PyInfo(
|
||||
has_py2_only_sources = False,
|
||||
has_py3_only_sources = True,
|
||||
uses_shared_libraries = False,
|
||||
transitive_sources = transitive_srcs,
|
||||
),
|
||||
]
|
||||
|
||||
py_resources = rule(
|
||||
implementation = _resource_impl,
|
||||
attrs = {
|
||||
"srcs": attr.label_list(
|
||||
allow_empty = True,
|
||||
mandatory = True,
|
||||
allow_files = True,
|
||||
doc = "Files to hand through to Python",
|
||||
),
|
||||
},
|
||||
)
|
||||
|
||||
def py_project(name=None,
|
||||
main=None,
|
||||
main_deps=None,
|
||||
lib_srcs=None,
|
||||
lib_deps=None,
|
||||
lib_data=None,
|
||||
test_srcs=None,
|
||||
test_deps=None,
|
||||
test_data=None):
|
||||
"""
|
||||
A helper for defining conventionally-formatted python project.
|
||||
|
||||
Assumes that there's a {src,test}/{resources,python} where src/ is a library and test/ is local tests only.
|
||||
|
||||
Each test_*.py source generates its own implicit test target. This allows for automatic test parallelism. Non
|
||||
test_*.py files are implicitly srcs for the generated test targets. This is the same as making them implicitly a
|
||||
testonly lib.
|
||||
|
||||
"""
|
||||
|
||||
lib_srcs = lib_srcs or native.glob(["src/python/**/*.py"],
|
||||
exclude=[
|
||||
"**/*.pyc",
|
||||
])
|
||||
lib_data = lib_data or native.glob(["src/resources/**/*",
|
||||
"src/python/**/*"],
|
||||
exclude=[
|
||||
"**/*.py",
|
||||
"**/*.pyc",
|
||||
])
|
||||
test_srcs = test_srcs or native.glob(["test/python/**/*.py"],
|
||||
exclude=[
|
||||
"**/*.pyc",
|
||||
])
|
||||
test_data = test_data or native.glob(["test/resources/**/*",
|
||||
"test/python/**/*"],
|
||||
exclude=[
|
||||
"**/*.py",
|
||||
"**/*.pyc",
|
||||
])
|
||||
|
||||
lib_name = name if not main else "lib"
|
||||
|
||||
py_library(
|
||||
name=lib_name,
|
||||
srcs=lib_srcs,
|
||||
deps=lib_deps,
|
||||
data=lib_data,
|
||||
imports=[
|
||||
"src/python",
|
||||
"src/resources",
|
||||
],
|
||||
visibility = [
|
||||
"//visibility:public",
|
||||
],
|
||||
)
|
||||
|
||||
if main:
|
||||
py_binary(
|
||||
name=name,
|
||||
main=main,
|
||||
deps=(main_deps or []) + [lib_name],
|
||||
imports=[
|
||||
"src/python",
|
||||
"src/resources",
|
||||
],
|
||||
visibility = [
|
||||
"//visibility:public",
|
||||
],
|
||||
)
|
||||
|
||||
zapp_binary(
|
||||
name=name + ".zapp",
|
||||
main=main,
|
||||
deps=(main_deps or []) + [lib_name],
|
||||
data=lib_data,
|
||||
imports=[
|
||||
"src/python",
|
||||
"src/resources",
|
||||
],
|
||||
visibility = [
|
||||
"//visibility:public",
|
||||
],
|
||||
)
|
||||
|
||||
for src in test_srcs:
|
||||
if "test_" in src:
|
||||
py_pytest(
|
||||
name=src.split("/")[-1],
|
||||
srcs=[src] + [f for f in test_srcs if "test_" not in f],
|
||||
deps=[lib_name] + (test_deps or []),
|
||||
data=test_data,
|
||||
imports=[
|
||||
"test/python",
|
||||
"test/resources",
|
||||
],
|
||||
)
|
21
tools/python/pythonshim
Executable file
21
tools/python/pythonshim
Executable file
|
@ -0,0 +1,21 @@
|
|||
#!/bin/sh
|
||||
|
||||
# Bazel STRONGLY disapproves of linking dynamically to a Python interpreter.
|
||||
# But ... that's exactly what we want to do.
|
||||
# So this script exists to find a 'compliant' Python install and use that.
|
||||
|
||||
PYTHONREV="3.10"
|
||||
CMD="python${PYTHONREV}"
|
||||
|
||||
if [ -x "$(command -v "$CMD")" ]; then
|
||||
exec "$(which "$CMD")" "$@"
|
||||
else
|
||||
case "$(uname)" in
|
||||
Darwin)
|
||||
# FIXME: What if it isn't there?
|
||||
exec /opt/homebrew/bin/"$CMD" "$@"
|
||||
;;
|
||||
esac
|
||||
echo "Error: Unable to find a viable Python executable" >&2
|
||||
exit 1
|
||||
fi
|
36
tools/python/requirements.txt
Normal file
36
tools/python/requirements.txt
Normal file
|
@ -0,0 +1,36 @@
|
|||
attrs==22.1.0
|
||||
autoflake==1.4
|
||||
black==22.6.0
|
||||
click==8.1.3
|
||||
coverage==6.4.2
|
||||
flake8==4.0.1
|
||||
iniconfig==1.1.1
|
||||
isort==5.10.1
|
||||
jedi==0.18.1
|
||||
mccabe==0.6.1
|
||||
mypy-extensions==0.4.3
|
||||
packaging==21.3
|
||||
parso==0.8.3
|
||||
pathspec==0.9.0
|
||||
pip==22.2.1
|
||||
platformdirs==2.5.2
|
||||
pluggy==1.0.0
|
||||
pudb==2022.1.2
|
||||
py==1.11.0
|
||||
pycodestyle==2.8.0
|
||||
pyflakes==2.4.0
|
||||
Pygments==2.12.0
|
||||
pyparsing==3.0.9
|
||||
pytest==7.1.2
|
||||
pytest-cov==3.0.0
|
||||
pytest-pudb==0.7.0
|
||||
pytest-timeout==2.1.0
|
||||
setuptools==62.6.0
|
||||
toml==0.10.2
|
||||
tomli==2.0.1
|
||||
toposort==1.7
|
||||
unify==0.5
|
||||
untokenize==0.1.1
|
||||
urwid==2.1.2
|
||||
urwid-readline==0.13
|
||||
wheel==0.37.1
|
144
tools/python/test_licenses.py
Normal file
144
tools/python/test_licenses.py
Normal file
|
@ -0,0 +1,144 @@
|
|||
"""
|
||||
Validate 3rdparty library licenses as approved.
|
||||
"""
|
||||
|
||||
import re
|
||||
|
||||
from pkg_resources import (
|
||||
DistInfoDistribution,
|
||||
working_set,
|
||||
)
|
||||
import pytest
|
||||
|
||||
|
||||
# Licenses approved as representing non-copyleft and not precluding commercial usage.
|
||||
# This is all easy, there's a good schema here.
|
||||
APPROVED_LICENSES = [
|
||||
MIT := "License :: OSI Approved :: MIT License",
|
||||
APACHE := "License :: OSI Approved :: Apache Software License",
|
||||
BSD := "License :: OSI Approved :: BSD License",
|
||||
MPL10 := "License :: OSI Approved :: Mozilla Public License 1.0 (MPL)",
|
||||
MPL11 := "License :: OSI Approved :: Mozilla Public License 1.1 (MPL 1.1)",
|
||||
MPL20 := "License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)",
|
||||
PSFL := "License :: OSI Approved :: Python Software Foundation License",
|
||||
LGPL := "License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)",
|
||||
LGPL3 := "License :: OSI Approved :: GNU Lesser General Public License v3 or later (LGPLv3+)",
|
||||
ISCL := "License :: OSI Approved :: ISC License (ISCL)",
|
||||
]
|
||||
|
||||
UNAPPROVED_LICENSES = [
|
||||
GPL1 := "License :: OSI Approved :: GNU General Public License",
|
||||
GPL2 := "License :: OSI Approved :: GNU General Public License v2 (GPLv2)",
|
||||
GPL3 := "License :: OSI Approved :: GNU General Public License v3 (GPLv3)",
|
||||
]
|
||||
|
||||
# This data is GARBO.
|
||||
LICENSES_BY_LOWERNAME = {
|
||||
"apache 2.0": APACHE,
|
||||
"apache": APACHE,
|
||||
"http://www.apache.org/licenses/license-2.0": APACHE,
|
||||
"bsd 3": BSD,
|
||||
"bsd": BSD,
|
||||
"gpl": GPL1,
|
||||
"gpl2": GPL2,
|
||||
"gpl3": GPL3,
|
||||
"lgpl": LGPL,
|
||||
"lgpl3": LGPL3,
|
||||
"isc": ISCL,
|
||||
"mit": MIT,
|
||||
"mpl": MPL10,
|
||||
"mpl 2.0": MPL20,
|
||||
"psf": PSFL,
|
||||
}
|
||||
|
||||
# Mash in some cases.
|
||||
LICENSES_BY_LOWERNAME.update(
|
||||
{lic.split(" :: ")[-1].lower(): lic for lic in APPROVED_LICENSES}
|
||||
)
|
||||
|
||||
# As a workaround for packages which don"t have correct meadata on PyPi, hand-verified packages
|
||||
APPROVED_PACKAGES = [
|
||||
"yamllint", # WARNING: YAMLLINT IS GLP3"d.
|
||||
"Flask_Log_Request_ID", # MIT, currently depended on as a git dep.
|
||||
"anosql", # BSD
|
||||
]
|
||||
|
||||
|
||||
def bash_license(ln):
|
||||
while True:
|
||||
lnn = re.sub(
|
||||
r"[(),]|( version)|( license)|( ?v(?=\d))|([ -]clause)|(or later)",
|
||||
"",
|
||||
ln.lower(),
|
||||
)
|
||||
if ln != lnn:
|
||||
ln = lnn
|
||||
else:
|
||||
break
|
||||
|
||||
ln = LICENSES_BY_LOWERNAME.get(ln, ln)
|
||||
return ln
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"a,b",
|
||||
[
|
||||
("MIT", MIT),
|
||||
("mit", MIT),
|
||||
("BSD", BSD),
|
||||
("BSD 3-clause", BSD),
|
||||
("BSD 3 clause", BSD),
|
||||
("GPL3", GPL3),
|
||||
("GPL v3", GPL3),
|
||||
("GPLv3", GPL3),
|
||||
],
|
||||
)
|
||||
def test_bash_license(a, b):
|
||||
assert bash_license(a) == b
|
||||
|
||||
|
||||
def licenses(dist: DistInfoDistribution):
|
||||
"""Get dist metadata (the licenses list) from PyPi.
|
||||
|
||||
pip and other tools use the local dist metadata to introspect licenses which requires that
|
||||
packages be installed. Going to PyPi isn't strictly reproducible both because the PyPi database
|
||||
could be updated and we could see network failures but there really isn't a good way to solve
|
||||
this problem.
|
||||
|
||||
"""
|
||||
|
||||
lics = []
|
||||
name = dist.project_name
|
||||
version = dist.version
|
||||
print(name, version, type(dist))
|
||||
|
||||
meta = dist.get_metadata(dist.PKG_INFO).split("\n")
|
||||
classifiers = [
|
||||
l.replace("Classifier: ", "", 1) for l in meta if l.startswith("Classifier: ")
|
||||
]
|
||||
license = bash_license(
|
||||
next((l for l in meta if l.startswith("License:")), "License: UNKNOWN").replace(
|
||||
"License: ", "", 1
|
||||
)
|
||||
)
|
||||
lics.extend(l for l in classifiers if l.startswith("License ::"))
|
||||
|
||||
if not lics:
|
||||
lics.append(license)
|
||||
|
||||
return lics
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"dist",
|
||||
(w for w in working_set if w.location.find("arrdem_cram_pypi") != -1),
|
||||
ids=lambda dist: dist.project_name,
|
||||
)
|
||||
def test_approved_license(dist: DistInfoDistribution):
|
||||
"""Ensure that a given package is either allowed by name or uses an approved license."""
|
||||
|
||||
_licenses = licenses(dist)
|
||||
print(dist.location)
|
||||
assert dist.project_name in APPROVED_PACKAGES or any(
|
||||
lic in APPROVED_LICENSES for lic in _licenses
|
||||
), f"{dist.project_name} ({dist.location}) was not approved and its license(s) were unknown {_licenses!r}"
|
10
tools/unify/BUILD
Normal file
10
tools/unify/BUILD
Normal file
|
@ -0,0 +1,10 @@
|
|||
py_binary(
|
||||
name = "unify",
|
||||
main = "__main__.py",
|
||||
deps = [
|
||||
py_requirement("unify"),
|
||||
],
|
||||
visibility = [
|
||||
"//visibility:public"
|
||||
],
|
||||
)
|
12
tools/unify/__main__.py
Normal file
12
tools/unify/__main__.py
Normal file
|
@ -0,0 +1,12 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
Shim for executing isort.
|
||||
"""
|
||||
|
||||
|
||||
from unify import main
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
exit(main())
|
Loading…
Reference in a new issue