Done with flake8

This commit is contained in:
Reid 'arrdem' McKenzie 2021-08-30 01:06:21 -06:00
parent 4664b4353c
commit 4f2ee8e021
18 changed files with 66 additions and 71 deletions

View file

@ -131,16 +131,16 @@ def available_migrations(queries: Queries, conn) -> t.Iterable[MigrationDescript
# query_fn: t.Callable + {.__name__, .__doc__, .sql}
query_fn = getattr(queries, query_name)
yield MigrationDescriptor(
name = query_name,
committed_at = None,
sha256sum = sha256(query_fn.sql.encode("utf-8")).hexdigest())
name=query_name,
committed_at=None,
sha256sum=sha256(query_fn.sql.encode("utf-8")).hexdigest())
def execute_migration(queries: Queries, conn, migration: MigrationDescriptor):
"""Execute a given migration singularly."""
with conn:
# Mark the migration as in flight
# Mark the migration as in flight
queries.anosql_migrations_create(
conn,
# Args

View file

@ -13,6 +13,7 @@ _SQL = """\
CREATE TABLE kv (`id` INT, `key` TEXT, `value` TEXT);
"""
def table_exists(conn, table_name):
return list(conn.execute(f"""\
SELECT (
@ -35,7 +36,7 @@ def conn() -> sqlite3.Connection:
def test_connect(conn: sqlite3.Connection):
"""Assert that the connection works and we can execute against it."""
assert list(conn.execute("SELECT 1;")) == [(1,),]
assert list(conn.execute("SELECT 1;")) == [(1, ), ]
@pytest.fixture

View file

@ -66,13 +66,7 @@ def test_insert_returning(sqlite3_conn, queries):
)
print(blogid, type(blogid))
cur = sqlite3_conn.cursor()
cur.execute("""\
select title
from blogs
where blogid = ?;
""",
(blogid,),
)
cur.execute("SELECT `title` FROM `blogs` WHERE `blogid` = ?;", (blogid,))
actual = cur.fetchone()
cur.close()
expected = ("My first blog",)

View file

@ -46,8 +46,8 @@ def curse_repl(handle_buffer):
for ex, buff, vals, err in reversed(examples):
putstr(f"Example {ex}:", attr=curses.A_BOLD)
for l in buff.split("\n"):
putstr(f" | {l}")
for line in buff.split("\n"):
putstr(f" | {line}")
putstr("")
@ -55,8 +55,8 @@ def curse_repl(handle_buffer):
err = str(err)
err = err.split("\n")
putstr(" Error:")
for l in err:
putstr(f" {l}", attr=curses.COLOR_YELLOW)
for line in err:
putstr(f" {line}", attr=curses.COLOR_YELLOW)
elif vals:
putstr(" Values:")

View file

@ -24,9 +24,9 @@ def mk_sqlist(contents, open=None, close=None):
)
def pairwise(l: list) -> iter:
def pairwise(elems: list) -> iter:
"s -> (s0,s1), (s2,s3), (s4, s5), ..."
return zip(l[::2], l[1::2])
return zip(elems[::2], elems[1::2])
def mk_dict(contents, open=None, close=None):

View file

@ -1,6 +1,4 @@
#!/usr/bin/env python3
__doc__ = f"""
"""
Datalog (py)
============
@ -127,8 +125,7 @@ def main(args):
db = db.merge(read_dataset(f.read()))
print(f"Loaded {db_file} ...")
except Exception as e:
print("Internal error - {e}")
print(f"Unable to load db {db_file}, skipping")
print(f"Internal error - {e}\nUnable to load db {db_file}, skipping")
while True:
try:
@ -155,8 +152,8 @@ def main(args):
else:
try:
op, val = read_command(line)
except Exception as e:
print(f"Got an unknown command or syntax error, can't tell which")
except Exception:
print("Got an unknown command or a syntax error, can't tell which")
continue
# Definition merges on the DB
@ -215,7 +212,7 @@ def main(args):
qdb = db.merge(db_cls([], [val]))
val = val.pattern
with yaspin(SPINNER) as spinner:
with yaspin(SPINNER):
with Timing() as t:
try:
results = list(select(qdb, val))

View file

@ -19,7 +19,7 @@ def match(tuple, expr, bindings=None):
# This may not work out long term.
if isinstance(a, LVar) and isinstance(b, LVar):
continue
elif isinstance(a, LVar) and not a in bindings and isinstance(b, Constant):
elif isinstance(a, LVar) and a not in bindings and isinstance(b, Constant):
bindings[a] = b
elif isinstance(a, LVar) and a in bindings and bindings[a] == b:
continue

View file

@ -4,7 +4,7 @@ A datalog reader.
from collections import defaultdict
from datalog.parser import FAILURE, Grammar
from datalog.parser import FAILURE, Grammar, ParseError
from datalog.types import Constant, Dataset, LVar, Rule

View file

@ -73,9 +73,6 @@ def test_lvar_unification(db_cls):
def test_rule_join(db_cls):
"""Test a basic join query - the parent -> grandparent relation."""
child = Constant("child")
gc = Constant("grandchild")
d = read(
"""
child(a, b).

View file

@ -40,7 +40,7 @@ class TreeDumper(ast.NodeVisitor):
self.visit(node)
def visit(self, node):
nodetype = type(node)
# nodetype = type(node)
nodename = node.__class__.__name__
indent = " " * len(self._stack) * 2
print(indent + nodename)
@ -59,14 +59,14 @@ class YAMLTreeDumper(ast.NodeVisitor):
def node2yml(self, node):
try:
nodetype = type(node)
# nodetype = type(node)
nodename = node.__class__.__name__
return {
"op": nodename,
"props": {n: node.__dict__[n] for n in propnames(node)},
"children": [],
}
except:
except Exception:
print(repr(node), propnames(node), dir(node))
def visit(self, node):

View file

@ -1,3 +1,5 @@
# flake8: noqa: all
# Python AST interpreter written in Python
#
# This module is part of the Pycopy https://github.com/pfalcon/pycopy
@ -105,6 +107,7 @@ def arg_name(arg):
else:
return arg.arg
def kwarg_defaults(args):
if sys.version_info < (3, 0, 0):
return args.defaults
@ -117,7 +120,6 @@ class TargetNonlocalFlow(Exception):
a target application."""
class TargetBreak(TargetNonlocalFlow):
pass
@ -248,7 +250,7 @@ class ModuleInterpreter(StrictNodeVisitor):
self.push_ns(ClassNS(node))
try:
self.stmt_list_visit(node.body)
except:
except Exception:
self.pop_ns()
raise
ns = self.ns
@ -563,7 +565,7 @@ class ModuleInterpreter(StrictNodeVisitor):
it = iter(val)
try:
for elt_idx, t in enumerate(target.elts):
if getattr(ast, "Starred", None ) and isinstance(t, ast.Starred):
if getattr(ast, "Starred", None) and isinstance(t, ast.Starred):
t = t.value
all_elts = list(it)
break_i = len(all_elts) - (len(target.elts) - elt_idx - 1)

View file

@ -159,7 +159,7 @@ if __name__ == "__main__":
test_poll(q, reps)
test_append(q, reps)
print(f"Testing with :memory:")
print("Testing with :memory:")
q = JobQueue(":memory:")
test_insert(q, reps)
test_poll(q, reps)

View file

@ -47,8 +47,8 @@ def test_poll(db):
j1 = db.create("payload 1")
j2 = db.create("payload 2")
assert j1.modified == j2.modified, "Two within the second to force the `rowid` ASC"
sleep(1) # And a side-effect for the third one
j3 = db.create("payload 3")
sleep(1) # And a side-effect for the third one
db.create("payload 3")
j = db.poll("true", ["assigned"])
@ -60,7 +60,7 @@ def test_poll(db):
def test_poll_not_found(db):
"""Test that poll can return nothing."""
j1 = db.create("payload 1")
db.create("payload 1")
j = db.poll("false", ["assigned"])
assert j is None
@ -69,7 +69,7 @@ def test_append(db, payload):
"""Test that appending an event to the log does append and preserves invariants."""
j = db.create(payload)
sleep(1) ## side-effect so that sqlite3 gets a different commit timestamp
sleep(1) # side-effect so that sqlite3 gets a different commit timestamp
j_prime = db.append_event(j.id, "some user-defined event")
assert isinstance(j_prime, Job)
@ -85,7 +85,7 @@ def test_cas_ok(db):
"""Test that we can CAS a job from one state to the 'next'."""
j = db.create("job2", ["state", 2])
sleep(1) # side-effect so that sqlite3 gets a different commit timestamp
sleep(1) # side-effect so that sqlite3 gets a different commit timestamp
j_prime = db.cas_state(j.id, ["state", 2], ["state", 3])
assert isinstance(j_prime, Job), "\n".join(db._db.iterdump())

View file

@ -154,13 +154,13 @@ class YamlLinter(object):
if maxl := schema.get("maxLength"):
if len(node.value) > maxl:
yield LintRecord(
LintLevel.MISSMATCH, node, schema, f"Expected a shorter string"
LintLevel.MISSMATCH, node, schema, "Expected a shorter string"
)
if minl := schema.get("minLength"):
if len(node.value) < minl:
yield LintRecord(
LintLevel.MISSMATCH, node, schema, f"Expected a longer string"
LintLevel.MISSMATCH, node, schema, "Expected a longer string"
)
if pat := schema.get("pattern"):
@ -169,7 +169,7 @@ class YamlLinter(object):
LintLevel.MISSMATCH,
node,
schema,
f"Expected a string matching the pattern",
"Expected a string matching the pattern",
)
def lint_integer(self, schema, node: Node) -> t.Iterable[LintRecord]:
@ -259,11 +259,11 @@ class YamlLinter(object):
# Special schemas
# These are schemas that accept everything.
if schema == True or schema == {}:
if schema is True or schema == {}:
yield from []
# This is the schema that rejects everything.
elif schema == False:
elif schema is False:
yield LintRecord(
LintLevel.UNEXPECTED, node, schema, "Received an unexpected value"
)

View file

@ -6,7 +6,12 @@ multi_line_output = 3
lines_after_imports = 2
default_section = THIRDPARTY
known_localfolder = datalog
sections = FUTURE,STDLIB,LOCALFOLDER,THIRDPARTY
sections = [
FUTURE,
STDLIB,
LOCALFOLDER,
THIRDPARTY,
]
force_sort_within_sections = 1
force_alphabetical_sort_within_sections = 1
combine_as_imports = 1
@ -18,7 +23,16 @@ index-servers = pypi
shitlist = pip,pkg_resources,setuptools
[flake8]
extend-ignore = E203,E501,F405,F403,E731,E306,E227
extend-ignore = [
E203,
E227,
E306,
E501,
E731,
E741,
F403,
F405,
]
[pypi]
repository = https://pypi.python.org/pypi

View file

@ -14,10 +14,6 @@ load("@rules_zapp//zapp:zapp.bzl",
"zapp_binary",
)
load("//tools/flake8:flake8.bzl",
"flake8",
)
def py_requirement(*args, **kwargs):
"""A re-export of requirement()"""
return _py_requirement(*args, **kwargs)
@ -211,12 +207,6 @@ def py_project(name=None,
],
)
# if lib_srcs:
# flake8(
# name = "flake8",
# deps = [lib_name],
# )
if main:
py_binary(
name=name,

View file

@ -58,7 +58,7 @@ LICENSES_BY_LOWERNAME = {
# Mash in some cases.
LICENSES_BY_LOWERNAME.update(
{l.split(" :: ")[-1].lower(): l for l in APPROVED_LICENSES}
{lic.split(" :: ")[-1].lower(): lic for lic in APPROVED_LICENSES}
)
# As a workaround for packages which don"t have correct meadata on PyPi, hand-verified packages
@ -108,7 +108,7 @@ def licenses(package: Requirement):
this problem.
"""
l = []
lics = []
version = next((v for op, v in package.specs if op == "=="), None)
print(package.name, version)
@ -120,7 +120,7 @@ def licenses(package: Requirement):
headers={"Accept": "application/json"}
).json()
if ln := bash_license(blob.get("license")):
l.append(ln)
lics.append(ln)
else:
try:
version = list(blob.get("releases", {}).keys())[-1]
@ -133,16 +133,16 @@ def licenses(package: Requirement):
f"https://pypi.org/pypi/{package.name}/{version}/json",
headers={"Accept": "application/json"}
).json()
l = [
lics.extend([
c
for c in blob.get("info", {}).get("classifiers", [])
if c.startswith("License")
]
])
ln = blob.get("info", {}).get("license")
if ln and not l:
l.append(bash_license(ln))
if ln and not lics:
lics.append(bash_license(ln))
return l
return lics
@pytest.mark.parametrize("package", PACKAGES)
@ -151,5 +151,5 @@ def test_approved_license(package):
_licenses = licenses(package)
assert package.name in APPROVED_PACKAGES or any(
l in APPROVED_LICENSES for l in _licenses
lic in APPROVED_LICENSES for lic in _licenses
), f"{package} was not approved and its license(s) were unknown {_licenses!r}"

View file

@ -105,7 +105,7 @@ def do_build(
status = sys.stdout
warning = sys.stderr
error = sys.stderr
# error = sys.stderr
confdir = confdir or sourcedir
confoverrides = {} # FIXME: support these
@ -176,7 +176,7 @@ def do_serve(host, port, sourcedir, outputdir):
elif (
not path.startswith(outputdir)
and path not in ignorelist
and not path in watchlist
and path not in watchlist
):
# Watch any source file (file we open for reading)
server.watch(path, build)