Compare commits

...

10 commits

4 changed files with 84 additions and 168 deletions

View file

@ -1,138 +0,0 @@
"""
The Zapp compiler.
"""
import argparse
import io
import json
import os
import pathlib
import stat
import sys
import zipfile
parser = argparse.ArgumentParser(description="The (bootstrap) Zapp compiler")
parser.add_argument("-o", "--out", dest="output", help="Output target file")
parser.add_argument("-d", "--debug", dest="debug", action="store_true", default=False)
parser.add_argument("manifest", help="The (JSON) manifest")
MAIN_TEMPLATE = """\
# -*- coding: utf-8 -*-
\"\"\"Zapp-generated __main__\""\"
from importlib import import_module
# FIXME: This is absolutely implementation details.
# Execing would be somewhat nicer
from runpy import _run_module_as_main
for script in {scripts!r}:
print(script)
mod, sep, fn = script.partition(':')
mod_ok = all(part.isidentifier() for part in mod.split('.'))
fn_ok = all(part.isidentifier() for part in fn.split('.'))
if not mod_ok:
raise RuntimeError("Invalid module reference {{!r}}".format(mod))
if fn and not fn_ok:
raise RuntimeError("Invalid function reference {{!r}}".format(fn))
if mod and fn and False:
mod = import_module(mod)
getattr(mod, fn)()
else:
_run_module_as_main(mod)
"""
def make_dunder_main(manifest):
"""Generate a __main__.py file for the given manifest."""
prelude = manifest.get("prelude_points", [])
main = manifest.get("entry_point")
scripts = prelude + [main]
return MAIN_TEMPLATE.format(**locals())
def dir_walk_prefixes(path):
"""Helper. Walk all slices of a path."""
segments = []
yield ""
for segment in path.split("/"):
segments.append(segment)
yield os.path.join(*segments)
def generate_dunder_inits(manifest):
"""Hack the manifest to insert __init__ files as needed."""
sources = manifest["sources"]
for input_file in list(sources.keys()):
for path in dir_walk_prefixes(os.path.dirname(input_file)):
init_file = os.path.join(path, "__init__.py")
if init_file not in sources:
sources[init_file] = ""
return manifest
def generate_manifest(opts, manifest):
"""Insert the manifest.json file."""
manifest["sources"]["zapp/manifest.json"] = opts.manifest
return manifest
def main():
opts, args = parser.parse_known_args()
with open(opts.manifest) as fp:
manifest = json.load(fp)
manifest = generate_manifest(opts, manifest)
# Patch the manifest to insert needed __init__ files
# NOTE: This has to be the LAST thing we do
manifest = generate_dunder_inits(manifest)
if opts.debug:
from pprint import pprint
pprint(
{
"opts": {
k: getattr(opts, k) for k in dir(opts) if not k.startswith("_")
},
"manifest": manifest,
}
)
with open(opts.output, "w") as zapp:
shebang = "#!" + manifest["shebang"] + "\n"
zapp.write(shebang)
# Now we're gonna build the zapp from the manifest
with zipfile.ZipFile(opts.output, "a") as zapp:
# Append the __main__.py generated record
zapp.writestr("__main__.py", make_dunder_main(manifest))
# Append user-specified sources
for dest, src in manifest["sources"].items():
if src == "":
zapp.writestr(dest, "")
else:
zapp.write(src, dest)
# Append user-specified libraries
# FIXME
zapp = pathlib.Path(opts.output)
zapp.chmod(zapp.stat().st_mode | stat.S_IEXEC)
if __name__ == "__main__" or 1:
main()

View file

@ -3,17 +3,15 @@ The Zapp! compiler.
"""
import argparse
import io
import json
import os
import pathlib
import stat
import sys
import zipfile
from collections import defaultdict
from email.parser import Parser
from itertools import chain
from shutil import move
from pathlib import Path
from tempfile import TemporaryDirectory
from zapp.support.pep425 import compress_tags, decompress_tag
@ -53,10 +51,10 @@ for script in {scripts!r}:
"""
def dsub(d1, d2):
def dsub(d1: dict, d2: dict) -> dict:
"""Dictionary subtraction. Remove k/vs from d1 if they occur in d2."""
return [(k, v) for k, v in d1 if not k in (_k for _k, _ in d2)]
return {k: v for k, v in d1.items() if k not in d2 or v != d2[k]}
def make_dunder_main(manifest):
@ -93,10 +91,6 @@ def load_wheel(opts, manifest, path):
return {k: _get(k) for k in msg.keys()}
# RECORD seems to just record file reference checksums for validation
# with open(os.path.join(path, "RECORD")) as recordf:
# record = recordf.read()
with open(os.path.join(path, "METADATA")) as metaf:
meta = _parse_email(metaf.read())
@ -105,15 +99,33 @@ def load_wheel(opts, manifest, path):
prefix = os.path.dirname(path)
# Naive glob of sources; note that bazel may hvae inserted empty __init__.py trash
sources = [
(
dest,
spec,
)
for dest, spec in manifest["sources"]
for dest, spec in manifest["sources"].items()
if spec["source"].startswith(prefix)
]
# Retain only manifest-listed sources (dealing with __init__.py trash, but maybe not all conflicts)
with open(os.path.join(path, "RECORD")) as recordf:
known_srcs = set()
for line in recordf:
srcname, *_ = line.split(",")
known_srcs.add(srcname)
sources = {
dest: spec
for dest, spec in sources
if dest in known_srcs or not dest.endswith("__init__.py")
}
# FIXME: Check hashes & sizes of manifest-listed sources and abort on error/conflict.
# FIXME: Check for .so files or other compiled artifacts, adjust tags accordingly.
return {
# "record": record,
"meta": meta,
@ -148,12 +160,25 @@ def wheel_name(wheel):
def zip_wheel(tmpdir, wheel):
"""Build a 'tempfile' containing the proper contents of the wheel."""
wheel_file = os.path.join(tmpdir, wheel_name(wheel))
wn = wheel_name(wheel)
cached_path = cache_wheel_path(wn)
wheel_file = os.path.join(tmpdir, wn)
with zipfile.ZipFile(wheel_file, "w") as whl:
for dest, src in wheel["sources"]:
for dest, src in wheel["sources"].items():
whl.write(src["source"], dest)
try:
# Attempt to enter the (re)built wheel into the cache. This could fail
# due to coss-device rename problems, or due to something else having
# concurrently built the same wheel and won the race.
#
# FIXME: This probably needs some guardrails to ensure that we only put
# architecture-independent wheels into the cache this way to avoid the
# plethora of "missbehaved wheels" problems that pip deals with.
Path(wheel_file).rename(cached_path)
return str(cached_path)
except OSError:
return wheel_file
@ -166,10 +191,12 @@ def rezip_wheels(opts, manifest):
wheels = [
load_wheel(opts, manifest, os.path.dirname(s["source"]))
for _, s in manifest["sources"]
for _, s in manifest["sources"].items()
if s["source"].endswith("/WHEEL")
]
manifest["requirements"] = {}
# Zip up the wheels and insert wheel records to the manifest
for w in wheels:
# Try to cheat and hit in the local cache first rather than building wheels every time
@ -177,12 +204,6 @@ def rezip_wheels(opts, manifest):
# Expunge sources available in the wheel
manifest["sources"] = dsub(manifest["sources"], w["sources"])
if opts.debug:
from pprint import pprint
print("---")
pprint({"$type": "whl", **w})
# We may have a double-path dependency.
# If we DON'T, we have to zip
if wn not in manifest["wheels"]:
@ -194,10 +215,17 @@ def rezip_wheels(opts, manifest):
pass
wf = str(wf)
else:
if opts.debug and False:
print("\n---")
json.dump({"$type": "whl", **w}, sys.stdout, indent=2)
wf = zip_wheel(opts.tmpdir, w)
# Insert a new wheel source
manifest["wheels"][wn] = {"hashes": [], "source": wf}
manifest["wheels"][wn] = {"hashes": [], "source": wf, "manifest": w}
# Insert the requirement
manifest["requirements"][w["meta"]["Name"]] = w["meta"]["Version"]
return manifest
@ -273,6 +301,13 @@ def enable_unzipping(opts, manifest):
return manifest
def fix_sources(opts, manifest):
manifest["sources"] = {f: m for f, m in manifest["sources"]}
return manifest
def main():
opts, args = parser.parse_known_args()
@ -282,6 +317,7 @@ def main():
with TemporaryDirectory() as d:
setattr(opts, "tmpdir", d)
manifest = fix_sources(opts, manifest)
manifest = rezip_wheels(opts, manifest)
manifest = ensure_srcs_map(opts, manifest)
manifest = enable_unzipping(opts, manifest)
@ -293,17 +329,17 @@ def main():
manifest = insert_manifest_json(opts, manifest)
if opts.debug:
from pprint import pprint
print("---")
pprint(
print("\n---")
json.dump(
{
"$type": "zapp",
"opts": {
k: getattr(opts, k) for k in dir(opts) if not k.startswith("_")
},
"manifest": manifest,
}
},
sys.stdout,
indent=2
)
with open(opts.output, "w") as zapp:

View file

@ -1,5 +1,6 @@
"""The Zapp runtime manifest API."""
import argparse
import json
from copy import deepcopy
from importlib.resources import open_text
@ -35,4 +36,20 @@ def manifest():
return json.load(fp)
PARSER = argparse.ArgumentParser()
PARSER.add_argument("--json", action="store_const", const="json", dest="format", default="json")
PARSER.add_argument("--requirements", action="store_const", const="requirements", dest="format")
if __name__ == "__main__":
opts, args = PARSER.parse_known_args()
if opts.format == "json":
print(json.dumps(manifest()))
elif opts.format == "requirements":
for req, rev in manifest()["requirements"].items():
print("{}=={}".format(req, rev))
__all__ = ["manifest"]

View file

@ -105,9 +105,6 @@ def _zapp_impl(ctx):
stored_path = _store_path(input_file.short_path, ctx, import_roots)
if stored_path:
local_path = input_file.path
conflicts = [e for e in sources_map if e[0] == stored_path]
if conflicts:
print("File %s conflicts with others, %s" % (input_file, conflicts))
sources_map.append([stored_path, local_path])
_check_script(main_py_ref, sources_map)
@ -154,12 +151,14 @@ def _zapp_impl(ctx):
progress_message = "Building zapp file %s" % ctx.label,
executable = ctx.executable.compiler,
arguments = [
"--debug",
"-o", ctx.outputs.executable.path,
manifest_file.path
],
mnemonic = "PythonCompile",
use_default_shell_env = True,
execution_requirements = {
"no-sandbox": "", # So zappc can use a filesystem cache of (re)build wheels
},
)
# .zapp file itself has no runfiles and no providers
@ -206,6 +205,7 @@ def zapp_binary(name,
test=False,
compiler=None,
zip_safe=True,
shebang=None,
_rule=_zapp,
**kwargs):
"""A self-contained, single-file Python program, with a .zapp file extension.
@ -262,6 +262,7 @@ def zapp_binary(name,
prelude_points = prelude_points,
zip_safe = zip_safe,
wheels = [name + ".whls"],
shebang = shebang,
)