Done with flake8
This commit is contained in:
parent
29aaea1a45
commit
2b9d3ad927
18 changed files with 66 additions and 71 deletions
|
@ -131,16 +131,16 @@ def available_migrations(queries: Queries, conn) -> t.Iterable[MigrationDescript
|
||||||
# query_fn: t.Callable + {.__name__, .__doc__, .sql}
|
# query_fn: t.Callable + {.__name__, .__doc__, .sql}
|
||||||
query_fn = getattr(queries, query_name)
|
query_fn = getattr(queries, query_name)
|
||||||
yield MigrationDescriptor(
|
yield MigrationDescriptor(
|
||||||
name = query_name,
|
name=query_name,
|
||||||
committed_at = None,
|
committed_at=None,
|
||||||
sha256sum = sha256(query_fn.sql.encode("utf-8")).hexdigest())
|
sha256sum=sha256(query_fn.sql.encode("utf-8")).hexdigest())
|
||||||
|
|
||||||
|
|
||||||
def execute_migration(queries: Queries, conn, migration: MigrationDescriptor):
|
def execute_migration(queries: Queries, conn, migration: MigrationDescriptor):
|
||||||
"""Execute a given migration singularly."""
|
"""Execute a given migration singularly."""
|
||||||
|
|
||||||
with conn:
|
with conn:
|
||||||
# Mark the migration as in flight
|
# Mark the migration as in flight
|
||||||
queries.anosql_migrations_create(
|
queries.anosql_migrations_create(
|
||||||
conn,
|
conn,
|
||||||
# Args
|
# Args
|
||||||
|
|
|
@ -13,6 +13,7 @@ _SQL = """\
|
||||||
CREATE TABLE kv (`id` INT, `key` TEXT, `value` TEXT);
|
CREATE TABLE kv (`id` INT, `key` TEXT, `value` TEXT);
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
def table_exists(conn, table_name):
|
def table_exists(conn, table_name):
|
||||||
return list(conn.execute(f"""\
|
return list(conn.execute(f"""\
|
||||||
SELECT (
|
SELECT (
|
||||||
|
@ -35,7 +36,7 @@ def conn() -> sqlite3.Connection:
|
||||||
def test_connect(conn: sqlite3.Connection):
|
def test_connect(conn: sqlite3.Connection):
|
||||||
"""Assert that the connection works and we can execute against it."""
|
"""Assert that the connection works and we can execute against it."""
|
||||||
|
|
||||||
assert list(conn.execute("SELECT 1;")) == [(1,),]
|
assert list(conn.execute("SELECT 1;")) == [(1, ), ]
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
|
|
|
@ -66,13 +66,7 @@ def test_insert_returning(sqlite3_conn, queries):
|
||||||
)
|
)
|
||||||
print(blogid, type(blogid))
|
print(blogid, type(blogid))
|
||||||
cur = sqlite3_conn.cursor()
|
cur = sqlite3_conn.cursor()
|
||||||
cur.execute("""\
|
cur.execute("SELECT `title` FROM `blogs` WHERE `blogid` = ?;", (blogid,))
|
||||||
select title
|
|
||||||
from blogs
|
|
||||||
where blogid = ?;
|
|
||||||
""",
|
|
||||||
(blogid,),
|
|
||||||
)
|
|
||||||
actual = cur.fetchone()
|
actual = cur.fetchone()
|
||||||
cur.close()
|
cur.close()
|
||||||
expected = ("My first blog",)
|
expected = ("My first blog",)
|
||||||
|
|
|
@ -46,8 +46,8 @@ def curse_repl(handle_buffer):
|
||||||
for ex, buff, vals, err in reversed(examples):
|
for ex, buff, vals, err in reversed(examples):
|
||||||
putstr(f"Example {ex}:", attr=curses.A_BOLD)
|
putstr(f"Example {ex}:", attr=curses.A_BOLD)
|
||||||
|
|
||||||
for l in buff.split("\n"):
|
for line in buff.split("\n"):
|
||||||
putstr(f" | {l}")
|
putstr(f" | {line}")
|
||||||
|
|
||||||
putstr("")
|
putstr("")
|
||||||
|
|
||||||
|
@ -55,8 +55,8 @@ def curse_repl(handle_buffer):
|
||||||
err = str(err)
|
err = str(err)
|
||||||
err = err.split("\n")
|
err = err.split("\n")
|
||||||
putstr(" Error:")
|
putstr(" Error:")
|
||||||
for l in err:
|
for line in err:
|
||||||
putstr(f" {l}", attr=curses.COLOR_YELLOW)
|
putstr(f" {line}", attr=curses.COLOR_YELLOW)
|
||||||
|
|
||||||
elif vals:
|
elif vals:
|
||||||
putstr(" Values:")
|
putstr(" Values:")
|
||||||
|
|
|
@ -24,9 +24,9 @@ def mk_sqlist(contents, open=None, close=None):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def pairwise(l: list) -> iter:
|
def pairwise(elems: list) -> iter:
|
||||||
"s -> (s0,s1), (s2,s3), (s4, s5), ..."
|
"s -> (s0,s1), (s2,s3), (s4, s5), ..."
|
||||||
return zip(l[::2], l[1::2])
|
return zip(elems[::2], elems[1::2])
|
||||||
|
|
||||||
|
|
||||||
def mk_dict(contents, open=None, close=None):
|
def mk_dict(contents, open=None, close=None):
|
||||||
|
|
|
@ -1,6 +1,4 @@
|
||||||
#!/usr/bin/env python3
|
"""
|
||||||
|
|
||||||
__doc__ = f"""
|
|
||||||
Datalog (py)
|
Datalog (py)
|
||||||
============
|
============
|
||||||
|
|
||||||
|
@ -127,8 +125,7 @@ def main(args):
|
||||||
db = db.merge(read_dataset(f.read()))
|
db = db.merge(read_dataset(f.read()))
|
||||||
print(f"Loaded {db_file} ...")
|
print(f"Loaded {db_file} ...")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print("Internal error - {e}")
|
print(f"Internal error - {e}\nUnable to load db {db_file}, skipping")
|
||||||
print(f"Unable to load db {db_file}, skipping")
|
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
|
@ -155,8 +152,8 @@ def main(args):
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
op, val = read_command(line)
|
op, val = read_command(line)
|
||||||
except Exception as e:
|
except Exception:
|
||||||
print(f"Got an unknown command or syntax error, can't tell which")
|
print("Got an unknown command or a syntax error, can't tell which")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Definition merges on the DB
|
# Definition merges on the DB
|
||||||
|
@ -215,7 +212,7 @@ def main(args):
|
||||||
qdb = db.merge(db_cls([], [val]))
|
qdb = db.merge(db_cls([], [val]))
|
||||||
val = val.pattern
|
val = val.pattern
|
||||||
|
|
||||||
with yaspin(SPINNER) as spinner:
|
with yaspin(SPINNER):
|
||||||
with Timing() as t:
|
with Timing() as t:
|
||||||
try:
|
try:
|
||||||
results = list(select(qdb, val))
|
results = list(select(qdb, val))
|
||||||
|
|
|
@ -19,7 +19,7 @@ def match(tuple, expr, bindings=None):
|
||||||
# This may not work out long term.
|
# This may not work out long term.
|
||||||
if isinstance(a, LVar) and isinstance(b, LVar):
|
if isinstance(a, LVar) and isinstance(b, LVar):
|
||||||
continue
|
continue
|
||||||
elif isinstance(a, LVar) and not a in bindings and isinstance(b, Constant):
|
elif isinstance(a, LVar) and a not in bindings and isinstance(b, Constant):
|
||||||
bindings[a] = b
|
bindings[a] = b
|
||||||
elif isinstance(a, LVar) and a in bindings and bindings[a] == b:
|
elif isinstance(a, LVar) and a in bindings and bindings[a] == b:
|
||||||
continue
|
continue
|
||||||
|
|
|
@ -4,7 +4,7 @@ A datalog reader.
|
||||||
|
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
|
||||||
from datalog.parser import FAILURE, Grammar
|
from datalog.parser import FAILURE, Grammar, ParseError
|
||||||
from datalog.types import Constant, Dataset, LVar, Rule
|
from datalog.types import Constant, Dataset, LVar, Rule
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -73,9 +73,6 @@ def test_lvar_unification(db_cls):
|
||||||
def test_rule_join(db_cls):
|
def test_rule_join(db_cls):
|
||||||
"""Test a basic join query - the parent -> grandparent relation."""
|
"""Test a basic join query - the parent -> grandparent relation."""
|
||||||
|
|
||||||
child = Constant("child")
|
|
||||||
gc = Constant("grandchild")
|
|
||||||
|
|
||||||
d = read(
|
d = read(
|
||||||
"""
|
"""
|
||||||
child(a, b).
|
child(a, b).
|
||||||
|
|
|
@ -40,7 +40,7 @@ class TreeDumper(ast.NodeVisitor):
|
||||||
self.visit(node)
|
self.visit(node)
|
||||||
|
|
||||||
def visit(self, node):
|
def visit(self, node):
|
||||||
nodetype = type(node)
|
# nodetype = type(node)
|
||||||
nodename = node.__class__.__name__
|
nodename = node.__class__.__name__
|
||||||
indent = " " * len(self._stack) * 2
|
indent = " " * len(self._stack) * 2
|
||||||
print(indent + nodename)
|
print(indent + nodename)
|
||||||
|
@ -59,14 +59,14 @@ class YAMLTreeDumper(ast.NodeVisitor):
|
||||||
|
|
||||||
def node2yml(self, node):
|
def node2yml(self, node):
|
||||||
try:
|
try:
|
||||||
nodetype = type(node)
|
# nodetype = type(node)
|
||||||
nodename = node.__class__.__name__
|
nodename = node.__class__.__name__
|
||||||
return {
|
return {
|
||||||
"op": nodename,
|
"op": nodename,
|
||||||
"props": {n: node.__dict__[n] for n in propnames(node)},
|
"props": {n: node.__dict__[n] for n in propnames(node)},
|
||||||
"children": [],
|
"children": [],
|
||||||
}
|
}
|
||||||
except:
|
except Exception:
|
||||||
print(repr(node), propnames(node), dir(node))
|
print(repr(node), propnames(node), dir(node))
|
||||||
|
|
||||||
def visit(self, node):
|
def visit(self, node):
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
# flake8: noqa: all
|
||||||
|
|
||||||
# Python AST interpreter written in Python
|
# Python AST interpreter written in Python
|
||||||
#
|
#
|
||||||
# This module is part of the Pycopy https://github.com/pfalcon/pycopy
|
# This module is part of the Pycopy https://github.com/pfalcon/pycopy
|
||||||
|
@ -105,6 +107,7 @@ def arg_name(arg):
|
||||||
else:
|
else:
|
||||||
return arg.arg
|
return arg.arg
|
||||||
|
|
||||||
|
|
||||||
def kwarg_defaults(args):
|
def kwarg_defaults(args):
|
||||||
if sys.version_info < (3, 0, 0):
|
if sys.version_info < (3, 0, 0):
|
||||||
return args.defaults
|
return args.defaults
|
||||||
|
@ -117,7 +120,6 @@ class TargetNonlocalFlow(Exception):
|
||||||
a target application."""
|
a target application."""
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class TargetBreak(TargetNonlocalFlow):
|
class TargetBreak(TargetNonlocalFlow):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -248,7 +250,7 @@ class ModuleInterpreter(StrictNodeVisitor):
|
||||||
self.push_ns(ClassNS(node))
|
self.push_ns(ClassNS(node))
|
||||||
try:
|
try:
|
||||||
self.stmt_list_visit(node.body)
|
self.stmt_list_visit(node.body)
|
||||||
except:
|
except Exception:
|
||||||
self.pop_ns()
|
self.pop_ns()
|
||||||
raise
|
raise
|
||||||
ns = self.ns
|
ns = self.ns
|
||||||
|
@ -563,7 +565,7 @@ class ModuleInterpreter(StrictNodeVisitor):
|
||||||
it = iter(val)
|
it = iter(val)
|
||||||
try:
|
try:
|
||||||
for elt_idx, t in enumerate(target.elts):
|
for elt_idx, t in enumerate(target.elts):
|
||||||
if getattr(ast, "Starred", None ) and isinstance(t, ast.Starred):
|
if getattr(ast, "Starred", None) and isinstance(t, ast.Starred):
|
||||||
t = t.value
|
t = t.value
|
||||||
all_elts = list(it)
|
all_elts = list(it)
|
||||||
break_i = len(all_elts) - (len(target.elts) - elt_idx - 1)
|
break_i = len(all_elts) - (len(target.elts) - elt_idx - 1)
|
||||||
|
|
|
@ -159,7 +159,7 @@ if __name__ == "__main__":
|
||||||
test_poll(q, reps)
|
test_poll(q, reps)
|
||||||
test_append(q, reps)
|
test_append(q, reps)
|
||||||
|
|
||||||
print(f"Testing with :memory:")
|
print("Testing with :memory:")
|
||||||
q = JobQueue(":memory:")
|
q = JobQueue(":memory:")
|
||||||
test_insert(q, reps)
|
test_insert(q, reps)
|
||||||
test_poll(q, reps)
|
test_poll(q, reps)
|
||||||
|
|
|
@ -47,8 +47,8 @@ def test_poll(db):
|
||||||
j1 = db.create("payload 1")
|
j1 = db.create("payload 1")
|
||||||
j2 = db.create("payload 2")
|
j2 = db.create("payload 2")
|
||||||
assert j1.modified == j2.modified, "Two within the second to force the `rowid` ASC"
|
assert j1.modified == j2.modified, "Two within the second to force the `rowid` ASC"
|
||||||
sleep(1) # And a side-effect for the third one
|
sleep(1) # And a side-effect for the third one
|
||||||
j3 = db.create("payload 3")
|
db.create("payload 3")
|
||||||
|
|
||||||
j = db.poll("true", ["assigned"])
|
j = db.poll("true", ["assigned"])
|
||||||
|
|
||||||
|
@ -60,7 +60,7 @@ def test_poll(db):
|
||||||
def test_poll_not_found(db):
|
def test_poll_not_found(db):
|
||||||
"""Test that poll can return nothing."""
|
"""Test that poll can return nothing."""
|
||||||
|
|
||||||
j1 = db.create("payload 1")
|
db.create("payload 1")
|
||||||
j = db.poll("false", ["assigned"])
|
j = db.poll("false", ["assigned"])
|
||||||
assert j is None
|
assert j is None
|
||||||
|
|
||||||
|
@ -69,7 +69,7 @@ def test_append(db, payload):
|
||||||
"""Test that appending an event to the log does append and preserves invariants."""
|
"""Test that appending an event to the log does append and preserves invariants."""
|
||||||
|
|
||||||
j = db.create(payload)
|
j = db.create(payload)
|
||||||
sleep(1) ## side-effect so that sqlite3 gets a different commit timestamp
|
sleep(1) # side-effect so that sqlite3 gets a different commit timestamp
|
||||||
j_prime = db.append_event(j.id, "some user-defined event")
|
j_prime = db.append_event(j.id, "some user-defined event")
|
||||||
|
|
||||||
assert isinstance(j_prime, Job)
|
assert isinstance(j_prime, Job)
|
||||||
|
@ -85,7 +85,7 @@ def test_cas_ok(db):
|
||||||
"""Test that we can CAS a job from one state to the 'next'."""
|
"""Test that we can CAS a job from one state to the 'next'."""
|
||||||
|
|
||||||
j = db.create("job2", ["state", 2])
|
j = db.create("job2", ["state", 2])
|
||||||
sleep(1) # side-effect so that sqlite3 gets a different commit timestamp
|
sleep(1) # side-effect so that sqlite3 gets a different commit timestamp
|
||||||
j_prime = db.cas_state(j.id, ["state", 2], ["state", 3])
|
j_prime = db.cas_state(j.id, ["state", 2], ["state", 3])
|
||||||
|
|
||||||
assert isinstance(j_prime, Job), "\n".join(db._db.iterdump())
|
assert isinstance(j_prime, Job), "\n".join(db._db.iterdump())
|
||||||
|
|
|
@ -154,13 +154,13 @@ class YamlLinter(object):
|
||||||
if maxl := schema.get("maxLength"):
|
if maxl := schema.get("maxLength"):
|
||||||
if len(node.value) > maxl:
|
if len(node.value) > maxl:
|
||||||
yield LintRecord(
|
yield LintRecord(
|
||||||
LintLevel.MISSMATCH, node, schema, f"Expected a shorter string"
|
LintLevel.MISSMATCH, node, schema, "Expected a shorter string"
|
||||||
)
|
)
|
||||||
|
|
||||||
if minl := schema.get("minLength"):
|
if minl := schema.get("minLength"):
|
||||||
if len(node.value) < minl:
|
if len(node.value) < minl:
|
||||||
yield LintRecord(
|
yield LintRecord(
|
||||||
LintLevel.MISSMATCH, node, schema, f"Expected a longer string"
|
LintLevel.MISSMATCH, node, schema, "Expected a longer string"
|
||||||
)
|
)
|
||||||
|
|
||||||
if pat := schema.get("pattern"):
|
if pat := schema.get("pattern"):
|
||||||
|
@ -169,7 +169,7 @@ class YamlLinter(object):
|
||||||
LintLevel.MISSMATCH,
|
LintLevel.MISSMATCH,
|
||||||
node,
|
node,
|
||||||
schema,
|
schema,
|
||||||
f"Expected a string matching the pattern",
|
"Expected a string matching the pattern",
|
||||||
)
|
)
|
||||||
|
|
||||||
def lint_integer(self, schema, node: Node) -> t.Iterable[LintRecord]:
|
def lint_integer(self, schema, node: Node) -> t.Iterable[LintRecord]:
|
||||||
|
@ -259,11 +259,11 @@ class YamlLinter(object):
|
||||||
|
|
||||||
# Special schemas
|
# Special schemas
|
||||||
# These are schemas that accept everything.
|
# These are schemas that accept everything.
|
||||||
if schema == True or schema == {}:
|
if schema is True or schema == {}:
|
||||||
yield from []
|
yield from []
|
||||||
|
|
||||||
# This is the schema that rejects everything.
|
# This is the schema that rejects everything.
|
||||||
elif schema == False:
|
elif schema is False:
|
||||||
yield LintRecord(
|
yield LintRecord(
|
||||||
LintLevel.UNEXPECTED, node, schema, "Received an unexpected value"
|
LintLevel.UNEXPECTED, node, schema, "Received an unexpected value"
|
||||||
)
|
)
|
||||||
|
|
18
setup.cfg
18
setup.cfg
|
@ -6,7 +6,12 @@ multi_line_output = 3
|
||||||
lines_after_imports = 2
|
lines_after_imports = 2
|
||||||
default_section = THIRDPARTY
|
default_section = THIRDPARTY
|
||||||
known_localfolder = datalog
|
known_localfolder = datalog
|
||||||
sections = FUTURE,STDLIB,LOCALFOLDER,THIRDPARTY
|
sections = [
|
||||||
|
FUTURE,
|
||||||
|
STDLIB,
|
||||||
|
LOCALFOLDER,
|
||||||
|
THIRDPARTY,
|
||||||
|
]
|
||||||
force_sort_within_sections = 1
|
force_sort_within_sections = 1
|
||||||
force_alphabetical_sort_within_sections = 1
|
force_alphabetical_sort_within_sections = 1
|
||||||
combine_as_imports = 1
|
combine_as_imports = 1
|
||||||
|
@ -18,7 +23,16 @@ index-servers = pypi
|
||||||
shitlist = pip,pkg_resources,setuptools
|
shitlist = pip,pkg_resources,setuptools
|
||||||
|
|
||||||
[flake8]
|
[flake8]
|
||||||
extend-ignore = E203,E501,F405,F403,E731,E306,E227
|
extend-ignore = [
|
||||||
|
E203,
|
||||||
|
E227,
|
||||||
|
E306,
|
||||||
|
E501,
|
||||||
|
E731,
|
||||||
|
E741,
|
||||||
|
F403,
|
||||||
|
F405,
|
||||||
|
]
|
||||||
|
|
||||||
[pypi]
|
[pypi]
|
||||||
repository = https://pypi.python.org/pypi
|
repository = https://pypi.python.org/pypi
|
||||||
|
|
|
@ -14,10 +14,6 @@ load("@rules_zapp//zapp:zapp.bzl",
|
||||||
"zapp_binary",
|
"zapp_binary",
|
||||||
)
|
)
|
||||||
|
|
||||||
load("//tools/flake8:flake8.bzl",
|
|
||||||
"flake8",
|
|
||||||
)
|
|
||||||
|
|
||||||
def py_requirement(*args, **kwargs):
|
def py_requirement(*args, **kwargs):
|
||||||
"""A re-export of requirement()"""
|
"""A re-export of requirement()"""
|
||||||
return _py_requirement(*args, **kwargs)
|
return _py_requirement(*args, **kwargs)
|
||||||
|
@ -211,12 +207,6 @@ def py_project(name=None,
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
# if lib_srcs:
|
|
||||||
# flake8(
|
|
||||||
# name = "flake8",
|
|
||||||
# deps = [lib_name],
|
|
||||||
# )
|
|
||||||
|
|
||||||
if main:
|
if main:
|
||||||
py_binary(
|
py_binary(
|
||||||
name=name,
|
name=name,
|
||||||
|
|
|
@ -58,7 +58,7 @@ LICENSES_BY_LOWERNAME = {
|
||||||
|
|
||||||
# Mash in some cases.
|
# Mash in some cases.
|
||||||
LICENSES_BY_LOWERNAME.update(
|
LICENSES_BY_LOWERNAME.update(
|
||||||
{l.split(" :: ")[-1].lower(): l for l in APPROVED_LICENSES}
|
{lic.split(" :: ")[-1].lower(): lic for lic in APPROVED_LICENSES}
|
||||||
)
|
)
|
||||||
|
|
||||||
# As a workaround for packages which don"t have correct meadata on PyPi, hand-verified packages
|
# As a workaround for packages which don"t have correct meadata on PyPi, hand-verified packages
|
||||||
|
@ -108,7 +108,7 @@ def licenses(package: Requirement):
|
||||||
this problem.
|
this problem.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
l = []
|
lics = []
|
||||||
version = next((v for op, v in package.specs if op == "=="), None)
|
version = next((v for op, v in package.specs if op == "=="), None)
|
||||||
print(package.name, version)
|
print(package.name, version)
|
||||||
|
|
||||||
|
@ -120,7 +120,7 @@ def licenses(package: Requirement):
|
||||||
headers={"Accept": "application/json"}
|
headers={"Accept": "application/json"}
|
||||||
).json()
|
).json()
|
||||||
if ln := bash_license(blob.get("license")):
|
if ln := bash_license(blob.get("license")):
|
||||||
l.append(ln)
|
lics.append(ln)
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
version = list(blob.get("releases", {}).keys())[-1]
|
version = list(blob.get("releases", {}).keys())[-1]
|
||||||
|
@ -133,16 +133,16 @@ def licenses(package: Requirement):
|
||||||
f"https://pypi.org/pypi/{package.name}/{version}/json",
|
f"https://pypi.org/pypi/{package.name}/{version}/json",
|
||||||
headers={"Accept": "application/json"}
|
headers={"Accept": "application/json"}
|
||||||
).json()
|
).json()
|
||||||
l = [
|
lics.extend([
|
||||||
c
|
c
|
||||||
for c in blob.get("info", {}).get("classifiers", [])
|
for c in blob.get("info", {}).get("classifiers", [])
|
||||||
if c.startswith("License")
|
if c.startswith("License")
|
||||||
]
|
])
|
||||||
ln = blob.get("info", {}).get("license")
|
ln = blob.get("info", {}).get("license")
|
||||||
if ln and not l:
|
if ln and not lics:
|
||||||
l.append(bash_license(ln))
|
lics.append(bash_license(ln))
|
||||||
|
|
||||||
return l
|
return lics
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("package", PACKAGES)
|
@pytest.mark.parametrize("package", PACKAGES)
|
||||||
|
@ -151,5 +151,5 @@ def test_approved_license(package):
|
||||||
|
|
||||||
_licenses = licenses(package)
|
_licenses = licenses(package)
|
||||||
assert package.name in APPROVED_PACKAGES or any(
|
assert package.name in APPROVED_PACKAGES or any(
|
||||||
l in APPROVED_LICENSES for l in _licenses
|
lic in APPROVED_LICENSES for lic in _licenses
|
||||||
), f"{package} was not approved and its license(s) were unknown {_licenses!r}"
|
), f"{package} was not approved and its license(s) were unknown {_licenses!r}"
|
||||||
|
|
|
@ -105,7 +105,7 @@ def do_build(
|
||||||
|
|
||||||
status = sys.stdout
|
status = sys.stdout
|
||||||
warning = sys.stderr
|
warning = sys.stderr
|
||||||
error = sys.stderr
|
# error = sys.stderr
|
||||||
|
|
||||||
confdir = confdir or sourcedir
|
confdir = confdir or sourcedir
|
||||||
confoverrides = {} # FIXME: support these
|
confoverrides = {} # FIXME: support these
|
||||||
|
@ -176,7 +176,7 @@ def do_serve(host, port, sourcedir, outputdir):
|
||||||
elif (
|
elif (
|
||||||
not path.startswith(outputdir)
|
not path.startswith(outputdir)
|
||||||
and path not in ignorelist
|
and path not in ignorelist
|
||||||
and not path in watchlist
|
and path not in watchlist
|
||||||
):
|
):
|
||||||
# Watch any source file (file we open for reading)
|
# Watch any source file (file we open for reading)
|
||||||
server.watch(path, build)
|
server.watch(path, build)
|
||||||
|
|
Loading…
Reference in a new issue