Compare commits
No commits in common. "72f82e0ace184fe862f1b19c4f71c3bc36cf335b" and "d75c1a2f953d7d6bf99453e5d3b32197805788cf" have entirely different histories.
72f82e0ace
...
d75c1a2f95
4 changed files with 168 additions and 84 deletions
138
zapp/__main__.py
Normal file
138
zapp/__main__.py
Normal file
|
@ -0,0 +1,138 @@
|
||||||
|
"""
|
||||||
|
The Zapp compiler.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import io
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import pathlib
|
||||||
|
import stat
|
||||||
|
import sys
|
||||||
|
import zipfile
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser(description="The (bootstrap) Zapp compiler")
|
||||||
|
parser.add_argument("-o", "--out", dest="output", help="Output target file")
|
||||||
|
parser.add_argument("-d", "--debug", dest="debug", action="store_true", default=False)
|
||||||
|
parser.add_argument("manifest", help="The (JSON) manifest")
|
||||||
|
|
||||||
|
|
||||||
|
MAIN_TEMPLATE = """\
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
\"\"\"Zapp-generated __main__\""\"
|
||||||
|
|
||||||
|
from importlib import import_module
|
||||||
|
# FIXME: This is absolutely implementation details.
|
||||||
|
# Execing would be somewhat nicer
|
||||||
|
from runpy import _run_module_as_main
|
||||||
|
|
||||||
|
for script in {scripts!r}:
|
||||||
|
print(script)
|
||||||
|
mod, sep, fn = script.partition(':')
|
||||||
|
mod_ok = all(part.isidentifier() for part in mod.split('.'))
|
||||||
|
fn_ok = all(part.isidentifier() for part in fn.split('.'))
|
||||||
|
|
||||||
|
if not mod_ok:
|
||||||
|
raise RuntimeError("Invalid module reference {{!r}}".format(mod))
|
||||||
|
if fn and not fn_ok:
|
||||||
|
raise RuntimeError("Invalid function reference {{!r}}".format(fn))
|
||||||
|
|
||||||
|
if mod and fn and False:
|
||||||
|
mod = import_module(mod)
|
||||||
|
getattr(mod, fn)()
|
||||||
|
else:
|
||||||
|
_run_module_as_main(mod)
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def make_dunder_main(manifest):
|
||||||
|
"""Generate a __main__.py file for the given manifest."""
|
||||||
|
|
||||||
|
prelude = manifest.get("prelude_points", [])
|
||||||
|
main = manifest.get("entry_point")
|
||||||
|
scripts = prelude + [main]
|
||||||
|
return MAIN_TEMPLATE.format(**locals())
|
||||||
|
|
||||||
|
|
||||||
|
def dir_walk_prefixes(path):
|
||||||
|
"""Helper. Walk all slices of a path."""
|
||||||
|
|
||||||
|
segments = []
|
||||||
|
yield ""
|
||||||
|
for segment in path.split("/"):
|
||||||
|
segments.append(segment)
|
||||||
|
yield os.path.join(*segments)
|
||||||
|
|
||||||
|
|
||||||
|
def generate_dunder_inits(manifest):
|
||||||
|
"""Hack the manifest to insert __init__ files as needed."""
|
||||||
|
|
||||||
|
sources = manifest["sources"]
|
||||||
|
|
||||||
|
for input_file in list(sources.keys()):
|
||||||
|
for path in dir_walk_prefixes(os.path.dirname(input_file)):
|
||||||
|
init_file = os.path.join(path, "__init__.py")
|
||||||
|
if init_file not in sources:
|
||||||
|
sources[init_file] = ""
|
||||||
|
|
||||||
|
return manifest
|
||||||
|
|
||||||
|
|
||||||
|
def generate_manifest(opts, manifest):
|
||||||
|
"""Insert the manifest.json file."""
|
||||||
|
|
||||||
|
manifest["sources"]["zapp/manifest.json"] = opts.manifest
|
||||||
|
|
||||||
|
return manifest
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
opts, args = parser.parse_known_args()
|
||||||
|
|
||||||
|
with open(opts.manifest) as fp:
|
||||||
|
manifest = json.load(fp)
|
||||||
|
|
||||||
|
manifest = generate_manifest(opts, manifest)
|
||||||
|
# Patch the manifest to insert needed __init__ files
|
||||||
|
# NOTE: This has to be the LAST thing we do
|
||||||
|
manifest = generate_dunder_inits(manifest)
|
||||||
|
|
||||||
|
if opts.debug:
|
||||||
|
from pprint import pprint
|
||||||
|
|
||||||
|
pprint(
|
||||||
|
{
|
||||||
|
"opts": {
|
||||||
|
k: getattr(opts, k) for k in dir(opts) if not k.startswith("_")
|
||||||
|
},
|
||||||
|
"manifest": manifest,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
with open(opts.output, "w") as zapp:
|
||||||
|
shebang = "#!" + manifest["shebang"] + "\n"
|
||||||
|
zapp.write(shebang)
|
||||||
|
|
||||||
|
# Now we're gonna build the zapp from the manifest
|
||||||
|
with zipfile.ZipFile(opts.output, "a") as zapp:
|
||||||
|
|
||||||
|
# Append the __main__.py generated record
|
||||||
|
zapp.writestr("__main__.py", make_dunder_main(manifest))
|
||||||
|
|
||||||
|
# Append user-specified sources
|
||||||
|
for dest, src in manifest["sources"].items():
|
||||||
|
if src == "":
|
||||||
|
zapp.writestr(dest, "")
|
||||||
|
else:
|
||||||
|
zapp.write(src, dest)
|
||||||
|
|
||||||
|
# Append user-specified libraries
|
||||||
|
# FIXME
|
||||||
|
|
||||||
|
zapp = pathlib.Path(opts.output)
|
||||||
|
zapp.chmod(zapp.stat().st_mode | stat.S_IEXEC)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__" or 1:
|
||||||
|
main()
|
|
@ -3,15 +3,17 @@ The Zapp! compiler.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
|
import io
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import pathlib
|
import pathlib
|
||||||
import stat
|
import stat
|
||||||
import sys
|
import sys
|
||||||
import zipfile
|
import zipfile
|
||||||
|
from collections import defaultdict
|
||||||
from email.parser import Parser
|
from email.parser import Parser
|
||||||
from itertools import chain
|
from itertools import chain
|
||||||
from pathlib import Path
|
from shutil import move
|
||||||
from tempfile import TemporaryDirectory
|
from tempfile import TemporaryDirectory
|
||||||
|
|
||||||
from zapp.support.pep425 import compress_tags, decompress_tag
|
from zapp.support.pep425 import compress_tags, decompress_tag
|
||||||
|
@ -51,10 +53,10 @@ for script in {scripts!r}:
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
def dsub(d1: dict, d2: dict) -> dict:
|
def dsub(d1, d2):
|
||||||
"""Dictionary subtraction. Remove k/vs from d1 if they occur in d2."""
|
"""Dictionary subtraction. Remove k/vs from d1 if they occur in d2."""
|
||||||
|
|
||||||
return {k: v for k, v in d1.items() if k not in d2 or v != d2[k]}
|
return [(k, v) for k, v in d1 if not k in (_k for _k, _ in d2)]
|
||||||
|
|
||||||
|
|
||||||
def make_dunder_main(manifest):
|
def make_dunder_main(manifest):
|
||||||
|
@ -91,6 +93,10 @@ def load_wheel(opts, manifest, path):
|
||||||
|
|
||||||
return {k: _get(k) for k in msg.keys()}
|
return {k: _get(k) for k in msg.keys()}
|
||||||
|
|
||||||
|
# RECORD seems to just record file reference checksums for validation
|
||||||
|
# with open(os.path.join(path, "RECORD")) as recordf:
|
||||||
|
# record = recordf.read()
|
||||||
|
|
||||||
with open(os.path.join(path, "METADATA")) as metaf:
|
with open(os.path.join(path, "METADATA")) as metaf:
|
||||||
meta = _parse_email(metaf.read())
|
meta = _parse_email(metaf.read())
|
||||||
|
|
||||||
|
@ -99,33 +105,15 @@ def load_wheel(opts, manifest, path):
|
||||||
|
|
||||||
prefix = os.path.dirname(path)
|
prefix = os.path.dirname(path)
|
||||||
|
|
||||||
# Naive glob of sources; note that bazel may hvae inserted empty __init__.py trash
|
|
||||||
sources = [
|
sources = [
|
||||||
(
|
(
|
||||||
dest,
|
dest,
|
||||||
spec,
|
spec,
|
||||||
)
|
)
|
||||||
for dest, spec in manifest["sources"].items()
|
for dest, spec in manifest["sources"]
|
||||||
if spec["source"].startswith(prefix)
|
if spec["source"].startswith(prefix)
|
||||||
]
|
]
|
||||||
|
|
||||||
# Retain only manifest-listed sources (dealing with __init__.py trash, but maybe not all conflicts)
|
|
||||||
with open(os.path.join(path, "RECORD")) as recordf:
|
|
||||||
known_srcs = set()
|
|
||||||
for line in recordf:
|
|
||||||
srcname, *_ = line.split(",")
|
|
||||||
known_srcs.add(srcname)
|
|
||||||
|
|
||||||
sources = {
|
|
||||||
dest: spec
|
|
||||||
for dest, spec in sources
|
|
||||||
if dest in known_srcs or not dest.endswith("__init__.py")
|
|
||||||
}
|
|
||||||
|
|
||||||
# FIXME: Check hashes & sizes of manifest-listed sources and abort on error/conflict.
|
|
||||||
|
|
||||||
# FIXME: Check for .so files or other compiled artifacts, adjust tags accordingly.
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
# "record": record,
|
# "record": record,
|
||||||
"meta": meta,
|
"meta": meta,
|
||||||
|
@ -160,26 +148,13 @@ def wheel_name(wheel):
|
||||||
def zip_wheel(tmpdir, wheel):
|
def zip_wheel(tmpdir, wheel):
|
||||||
"""Build a 'tempfile' containing the proper contents of the wheel."""
|
"""Build a 'tempfile' containing the proper contents of the wheel."""
|
||||||
|
|
||||||
wn = wheel_name(wheel)
|
wheel_file = os.path.join(tmpdir, wheel_name(wheel))
|
||||||
cached_path = cache_wheel_path(wn)
|
|
||||||
wheel_file = os.path.join(tmpdir, wn)
|
|
||||||
|
|
||||||
with zipfile.ZipFile(wheel_file, "w") as whl:
|
with zipfile.ZipFile(wheel_file, "w") as whl:
|
||||||
for dest, src in wheel["sources"].items():
|
for dest, src in wheel["sources"]:
|
||||||
whl.write(src["source"], dest)
|
whl.write(src["source"], dest)
|
||||||
|
|
||||||
try:
|
return wheel_file
|
||||||
# Attempt to enter the (re)built wheel into the cache. This could fail
|
|
||||||
# due to coss-device rename problems, or due to something else having
|
|
||||||
# concurrently built the same wheel and won the race.
|
|
||||||
#
|
|
||||||
# FIXME: This probably needs some guardrails to ensure that we only put
|
|
||||||
# architecture-independent wheels into the cache this way to avoid the
|
|
||||||
# plethora of "missbehaved wheels" problems that pip deals with.
|
|
||||||
Path(wheel_file).rename(cached_path)
|
|
||||||
return str(cached_path)
|
|
||||||
except OSError:
|
|
||||||
return wheel_file
|
|
||||||
|
|
||||||
|
|
||||||
def rezip_wheels(opts, manifest):
|
def rezip_wheels(opts, manifest):
|
||||||
|
@ -191,12 +166,10 @@ def rezip_wheels(opts, manifest):
|
||||||
|
|
||||||
wheels = [
|
wheels = [
|
||||||
load_wheel(opts, manifest, os.path.dirname(s["source"]))
|
load_wheel(opts, manifest, os.path.dirname(s["source"]))
|
||||||
for _, s in manifest["sources"].items()
|
for _, s in manifest["sources"]
|
||||||
if s["source"].endswith("/WHEEL")
|
if s["source"].endswith("/WHEEL")
|
||||||
]
|
]
|
||||||
|
|
||||||
manifest["requirements"] = {}
|
|
||||||
|
|
||||||
# Zip up the wheels and insert wheel records to the manifest
|
# Zip up the wheels and insert wheel records to the manifest
|
||||||
for w in wheels:
|
for w in wheels:
|
||||||
# Try to cheat and hit in the local cache first rather than building wheels every time
|
# Try to cheat and hit in the local cache first rather than building wheels every time
|
||||||
|
@ -204,6 +177,12 @@ def rezip_wheels(opts, manifest):
|
||||||
# Expunge sources available in the wheel
|
# Expunge sources available in the wheel
|
||||||
manifest["sources"] = dsub(manifest["sources"], w["sources"])
|
manifest["sources"] = dsub(manifest["sources"], w["sources"])
|
||||||
|
|
||||||
|
if opts.debug:
|
||||||
|
from pprint import pprint
|
||||||
|
|
||||||
|
print("---")
|
||||||
|
pprint({"$type": "whl", **w})
|
||||||
|
|
||||||
# We may have a double-path dependency.
|
# We may have a double-path dependency.
|
||||||
# If we DON'T, we have to zip
|
# If we DON'T, we have to zip
|
||||||
if wn not in manifest["wheels"]:
|
if wn not in manifest["wheels"]:
|
||||||
|
@ -215,17 +194,10 @@ def rezip_wheels(opts, manifest):
|
||||||
pass
|
pass
|
||||||
wf = str(wf)
|
wf = str(wf)
|
||||||
else:
|
else:
|
||||||
if opts.debug and False:
|
|
||||||
print("\n---")
|
|
||||||
json.dump({"$type": "whl", **w}, sys.stdout, indent=2)
|
|
||||||
|
|
||||||
wf = zip_wheel(opts.tmpdir, w)
|
wf = zip_wheel(opts.tmpdir, w)
|
||||||
|
|
||||||
# Insert a new wheel source
|
# Insert a new wheel source
|
||||||
manifest["wheels"][wn] = {"hashes": [], "source": wf, "manifest": w}
|
manifest["wheels"][wn] = {"hashes": [], "source": wf}
|
||||||
|
|
||||||
# Insert the requirement
|
|
||||||
manifest["requirements"][w["meta"]["Name"]] = w["meta"]["Version"]
|
|
||||||
|
|
||||||
return manifest
|
return manifest
|
||||||
|
|
||||||
|
@ -301,13 +273,6 @@ def enable_unzipping(opts, manifest):
|
||||||
return manifest
|
return manifest
|
||||||
|
|
||||||
|
|
||||||
def fix_sources(opts, manifest):
|
|
||||||
|
|
||||||
manifest["sources"] = {f: m for f, m in manifest["sources"]}
|
|
||||||
|
|
||||||
return manifest
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
opts, args = parser.parse_known_args()
|
opts, args = parser.parse_known_args()
|
||||||
|
|
||||||
|
@ -317,7 +282,6 @@ def main():
|
||||||
with TemporaryDirectory() as d:
|
with TemporaryDirectory() as d:
|
||||||
setattr(opts, "tmpdir", d)
|
setattr(opts, "tmpdir", d)
|
||||||
|
|
||||||
manifest = fix_sources(opts, manifest)
|
|
||||||
manifest = rezip_wheels(opts, manifest)
|
manifest = rezip_wheels(opts, manifest)
|
||||||
manifest = ensure_srcs_map(opts, manifest)
|
manifest = ensure_srcs_map(opts, manifest)
|
||||||
manifest = enable_unzipping(opts, manifest)
|
manifest = enable_unzipping(opts, manifest)
|
||||||
|
@ -329,17 +293,17 @@ def main():
|
||||||
manifest = insert_manifest_json(opts, manifest)
|
manifest = insert_manifest_json(opts, manifest)
|
||||||
|
|
||||||
if opts.debug:
|
if opts.debug:
|
||||||
print("\n---")
|
from pprint import pprint
|
||||||
json.dump(
|
|
||||||
|
print("---")
|
||||||
|
pprint(
|
||||||
{
|
{
|
||||||
"$type": "zapp",
|
"$type": "zapp",
|
||||||
"opts": {
|
"opts": {
|
||||||
k: getattr(opts, k) for k in dir(opts) if not k.startswith("_")
|
k: getattr(opts, k) for k in dir(opts) if not k.startswith("_")
|
||||||
},
|
},
|
||||||
"manifest": manifest,
|
"manifest": manifest,
|
||||||
},
|
}
|
||||||
sys.stdout,
|
|
||||||
indent=2
|
|
||||||
)
|
)
|
||||||
|
|
||||||
with open(opts.output, "w") as zapp:
|
with open(opts.output, "w") as zapp:
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
"""The Zapp runtime manifest API."""
|
"""The Zapp runtime manifest API."""
|
||||||
|
|
||||||
import argparse
|
|
||||||
import json
|
import json
|
||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
from importlib.resources import open_text
|
from importlib.resources import open_text
|
||||||
|
@ -36,20 +35,4 @@ def manifest():
|
||||||
return json.load(fp)
|
return json.load(fp)
|
||||||
|
|
||||||
|
|
||||||
PARSER = argparse.ArgumentParser()
|
|
||||||
PARSER.add_argument("--json", action="store_const", const="json", dest="format", default="json")
|
|
||||||
PARSER.add_argument("--requirements", action="store_const", const="requirements", dest="format")
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
opts, args = PARSER.parse_known_args()
|
|
||||||
|
|
||||||
if opts.format == "json":
|
|
||||||
print(json.dumps(manifest()))
|
|
||||||
|
|
||||||
elif opts.format == "requirements":
|
|
||||||
for req, rev in manifest()["requirements"].items():
|
|
||||||
print("{}=={}".format(req, rev))
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = ["manifest"]
|
__all__ = ["manifest"]
|
||||||
|
|
|
@ -105,6 +105,9 @@ def _zapp_impl(ctx):
|
||||||
stored_path = _store_path(input_file.short_path, ctx, import_roots)
|
stored_path = _store_path(input_file.short_path, ctx, import_roots)
|
||||||
if stored_path:
|
if stored_path:
|
||||||
local_path = input_file.path
|
local_path = input_file.path
|
||||||
|
conflicts = [e for e in sources_map if e[0] == stored_path]
|
||||||
|
if conflicts:
|
||||||
|
print("File %s conflicts with others, %s" % (input_file, conflicts))
|
||||||
sources_map.append([stored_path, local_path])
|
sources_map.append([stored_path, local_path])
|
||||||
|
|
||||||
_check_script(main_py_ref, sources_map)
|
_check_script(main_py_ref, sources_map)
|
||||||
|
@ -151,14 +154,12 @@ def _zapp_impl(ctx):
|
||||||
progress_message = "Building zapp file %s" % ctx.label,
|
progress_message = "Building zapp file %s" % ctx.label,
|
||||||
executable = ctx.executable.compiler,
|
executable = ctx.executable.compiler,
|
||||||
arguments = [
|
arguments = [
|
||||||
|
"--debug",
|
||||||
"-o", ctx.outputs.executable.path,
|
"-o", ctx.outputs.executable.path,
|
||||||
manifest_file.path
|
manifest_file.path
|
||||||
],
|
],
|
||||||
mnemonic = "PythonCompile",
|
mnemonic = "PythonCompile",
|
||||||
use_default_shell_env = True,
|
use_default_shell_env = True,
|
||||||
execution_requirements = {
|
|
||||||
"no-sandbox": "", # So zappc can use a filesystem cache of (re)build wheels
|
|
||||||
},
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# .zapp file itself has no runfiles and no providers
|
# .zapp file itself has no runfiles and no providers
|
||||||
|
@ -205,7 +206,6 @@ def zapp_binary(name,
|
||||||
test=False,
|
test=False,
|
||||||
compiler=None,
|
compiler=None,
|
||||||
zip_safe=True,
|
zip_safe=True,
|
||||||
shebang=None,
|
|
||||||
_rule=_zapp,
|
_rule=_zapp,
|
||||||
**kwargs):
|
**kwargs):
|
||||||
"""A self-contained, single-file Python program, with a .zapp file extension.
|
"""A self-contained, single-file Python program, with a .zapp file extension.
|
||||||
|
@ -262,7 +262,6 @@ def zapp_binary(name,
|
||||||
prelude_points = prelude_points,
|
prelude_points = prelude_points,
|
||||||
zip_safe = zip_safe,
|
zip_safe = zip_safe,
|
||||||
wheels = [name + ".whls"],
|
wheels = [name + ".whls"],
|
||||||
shebang = shebang,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue