From 2494211ef289a42c160ee6a859f4040405bcd2a7 Mon Sep 17 00:00:00 2001 From: Reid 'arrdem' McKenzie Date: Thu, 2 Sep 2021 22:10:35 -0600 Subject: [PATCH] Black all the things --- .../src/python/anosql_migrations.py | 17 +- .../test/python/test_migrations.py | 20 +- projects/anosql/doc/conf.py | 47 +++-- projects/anosql/src/python/anosql/__init__.py | 8 +- .../src/python/anosql/adapters/sqlite3.py | 4 +- projects/anosql/src/python/anosql/core.py | 22 ++- projects/anosql/test/python/conftest.py | 9 +- projects/anosql/test/python/test_psycopg2.py | 15 +- projects/anosql/test/python/test_simple.py | 182 ++++++++++-------- projects/anosql/test/python/test_sqlite3.py | 20 +- projects/damm/test/python/test_damm.py | 15 +- projects/damm/test/python/test_hypothesis.py | 2 +- .../src/python/datalog/shell/__main__.py | 12 +- .../test/python/test_datalog_evaluator.py | 14 +- projects/flowmetal/scratch/astdump.py | 6 +- projects/flowmetal/scratch/astinterp.py | 20 +- projects/flowmetal/scratch/test.py | 1 + projects/jobq/benchmark.py | 30 ++- projects/jobq/src/python/jobq/__init__.py | 19 +- projects/jobqd/src/python/jobqd/__main__.py | 8 +- projects/jobqd/src/python/jobqd/rest/api.py | 61 +++--- projects/kook/src/kook/client.py | 7 +- projects/lilith/setup.py | 10 +- .../proquint/src/python/proquint/__init__.py | 20 +- .../proquint/src/python/proquint/__main__.py | 4 +- .../proquint/test/python/test_examples.py | 5 +- .../proquint/test/python/test_hypothesis.py | 20 +- projects/reqman/src/python/reqman/__main__.py | 10 +- .../src/python/yamlschema/__init__.py | 2 +- tools/python/requirements.txt | 12 +- tools/python/test_licenses.py | 67 +++---- 31 files changed, 395 insertions(+), 294 deletions(-) diff --git a/projects/anosql-migrations/src/python/anosql_migrations.py b/projects/anosql-migrations/src/python/anosql_migrations.py index 425d130..4fd6175 100644 --- a/projects/anosql-migrations/src/python/anosql_migrations.py +++ b/projects/anosql-migrations/src/python/anosql_migrations.py @@ -100,10 +100,16 @@ def create_tables(queries: Queries, conn) -> None: log.info("Created migrations table") # Insert the bootstrap 'fixup' record - execute_migration(queries, conn, - MigrationDescriptor( - name="anosql_migrations_create_table", - sha256sum=sha256(queries.anosql_migrations_create_table.sql.encode("utf-8")).hexdigest())) + execute_migration( + queries, + conn, + MigrationDescriptor( + name="anosql_migrations_create_table", + sha256sum=sha256( + queries.anosql_migrations_create_table.sql.encode("utf-8") + ).hexdigest(), + ), + ) def committed_migrations(queries: Queries, conn) -> t.Iterable[MigrationDescriptor]: @@ -133,7 +139,8 @@ def available_migrations(queries: Queries, conn) -> t.Iterable[MigrationDescript yield MigrationDescriptor( name=query_name, committed_at=None, - sha256sum=sha256(query_fn.sql.encode("utf-8")).hexdigest()) + sha256sum=sha256(query_fn.sql.encode("utf-8")).hexdigest(), + ) def execute_migration(queries: Queries, conn, migration: MigrationDescriptor): diff --git a/projects/anosql-migrations/test/python/test_migrations.py b/projects/anosql-migrations/test/python/test_migrations.py index afe83a8..98e732d 100644 --- a/projects/anosql-migrations/test/python/test_migrations.py +++ b/projects/anosql-migrations/test/python/test_migrations.py @@ -15,7 +15,9 @@ CREATE TABLE kv (`id` INT, `key` TEXT, `value` TEXT); def table_exists(conn, table_name): - return list(conn.execute(f"""\ + return list( + conn.execute( + f"""\ SELECT ( `name` ) @@ -23,7 +25,9 @@ def table_exists(conn, table_name): WHERE `type` = 'table' AND `name` = '{table_name}' - ;""")) + ;""" + ) + ) @pytest.fixture @@ -36,7 +40,9 @@ def conn() -> sqlite3.Connection: def test_connect(conn: sqlite3.Connection): """Assert that the connection works and we can execute against it.""" - assert list(conn.execute("SELECT 1;")) == [(1, ), ] + assert list(conn.execute("SELECT 1;")) == [ + (1,), + ] @pytest.fixture @@ -66,7 +72,9 @@ def test_migrations_list(conn, queries): """Test that we can list out available migrations.""" ms = list(anosql_migrations.available_migrations(queries, conn)) - assert any(m.name == "migration_0000_create_kv" for m in ms), f"Didn't find in {ms!r}" + assert any( + m.name == "migration_0000_create_kv" for m in ms + ), f"Didn't find in {ms!r}" def test_committed_migrations(conn, queries): @@ -96,4 +104,6 @@ def test_post_committed_migrations(migrated_conn, queries): """Assert that the create_kv migration has been committed.""" ms = list(anosql_migrations.committed_migrations(queries, migrated_conn)) - assert any(m.name == "migration_0000_create_kv" for m in ms), "\n".join(migrated_conn.iterdump()) + assert any(m.name == "migration_0000_create_kv" for m in ms), "\n".join( + migrated_conn.iterdump() + ) diff --git a/projects/anosql/doc/conf.py b/projects/anosql/doc/conf.py index 490f820..9305f4d 100644 --- a/projects/anosql/doc/conf.py +++ b/projects/anosql/doc/conf.py @@ -240,29 +240,25 @@ htmlhelp_basename = "anosqldoc" # -- Options for LaTeX output --------------------------------------------- latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # - # 'papersize': 'letterpaper', - - # The font size ('10pt', '11pt' or '12pt'). - # - # 'pointsize': '10pt', - - # Additional stuff for the LaTeX preamble. - # - # 'preamble': '', - - # Latex figure (float) alignment - # - # 'figure_align': 'htbp', + # The paper size ('letterpaper' or 'a4paper'). + # + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # + # 'preamble': '', + # Latex figure (float) alignment + # + # 'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - (master_doc, "anosql.tex", u"anosql Documentation", - u"Honza Pokorny", "manual"), + (master_doc, "anosql.tex", u"anosql Documentation", u"Honza Pokorny", "manual"), ] # The name of an image file (relative to this directory) to place at the top of @@ -302,10 +298,7 @@ latex_documents = [ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). -man_pages = [ - (master_doc, "anosql", u"anosql Documentation", - [author], 1) -] +man_pages = [(master_doc, "anosql", u"anosql Documentation", [author], 1)] # If true, show URL addresses after external links. # @@ -318,9 +311,15 @@ man_pages = [ # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - (master_doc, "anosql", u"anosql Documentation", - author, "anosql", "One line description of project.", - "Miscellaneous"), + ( + master_doc, + "anosql", + u"anosql Documentation", + author, + "anosql", + "One line description of project.", + "Miscellaneous", + ), ] # Documents to append as an appendix to all manuals. diff --git a/projects/anosql/src/python/anosql/__init__.py b/projects/anosql/src/python/anosql/__init__.py index 2e73a7f..4faf5e4 100644 --- a/projects/anosql/src/python/anosql/__init__.py +++ b/projects/anosql/src/python/anosql/__init__.py @@ -2,4 +2,10 @@ from .core import from_path, from_str, SQLOperationType from .exceptions import SQLLoadException, SQLParseException -__all__ = ["from_path", "from_str", "SQLOperationType", "SQLLoadException", "SQLParseException"] +__all__ = [ + "from_path", + "from_str", + "SQLOperationType", + "SQLLoadException", + "SQLParseException", +] diff --git a/projects/anosql/src/python/anosql/adapters/sqlite3.py b/projects/anosql/src/python/anosql/adapters/sqlite3.py index ea8cb06..9b50204 100644 --- a/projects/anosql/src/python/anosql/adapters/sqlite3.py +++ b/projects/anosql/src/python/anosql/adapters/sqlite3.py @@ -53,7 +53,9 @@ class SQLite3DriverAdapter(object): conn.execute(sql, parameters) @staticmethod - def insert_update_delete_many(conn: sqlite3.Connection, _query_name, sql, parameters): + def insert_update_delete_many( + conn: sqlite3.Connection, _query_name, sql, parameters + ): log.debug({"sql": sql, "parameters": parameters}) conn.executemany(sql, parameters) diff --git a/projects/anosql/src/python/anosql/core.py b/projects/anosql/src/python/anosql/core.py index ecb4a78..3eb63a8 100644 --- a/projects/anosql/src/python/anosql/core.py +++ b/projects/anosql/src/python/anosql/core.py @@ -4,10 +4,10 @@ from .adapters.psycopg2 import PsycoPG2Adapter from .adapters.sqlite3 import SQLite3DriverAdapter from .exceptions import SQLLoadException, SQLParseException from .patterns import ( - doc_comment_pattern, - empty_pattern, - query_name_definition_pattern, - valid_query_name_pattern + doc_comment_pattern, + empty_pattern, + query_name_definition_pattern, + valid_query_name_pattern, ) @@ -89,8 +89,8 @@ def get_driver_adapter(driver_name): class SQLOperationType(object): - """Enumeration (kind of) of anosql operation types - """ + """Enumeration (kind of) of anosql operation types""" + INSERT_RETURNING = 0 INSERT_UPDATE_DELETE = 1 INSERT_UPDATE_DELETE_MANY = 2 @@ -168,9 +168,13 @@ def _create_fns(query_name, docs, op_type, sql, driver_adapter): if op_type == SQLOperationType.INSERT_RETURNING: return driver_adapter.insert_returning(conn, query_name, sql, parameters) elif op_type == SQLOperationType.INSERT_UPDATE_DELETE: - return driver_adapter.insert_update_delete(conn, query_name, sql, parameters) + return driver_adapter.insert_update_delete( + conn, query_name, sql, parameters + ) elif op_type == SQLOperationType.INSERT_UPDATE_DELETE_MANY: - return driver_adapter.insert_update_delete_many(conn, query_name, sql, *parameters) + return driver_adapter.insert_update_delete_many( + conn, query_name, sql, *parameters + ) elif op_type == SQLOperationType.SCRIPT: return driver_adapter.execute_script(conn, sql) elif op_type == SQLOperationType.SELECT_ONE_ROW: @@ -351,5 +355,5 @@ def from_path(sql_path, driver_name): else: raise SQLLoadException( "The sql_path must be a directory or file, got {}".format(sql_path), - sql_path + sql_path, ) diff --git a/projects/anosql/test/python/conftest.py b/projects/anosql/test/python/conftest.py index df31a25..f722d73 100644 --- a/projects/anosql/test/python/conftest.py +++ b/projects/anosql/test/python/conftest.py @@ -102,10 +102,15 @@ def pg_conn(postgresql): with postgresql.cursor() as cur: with open(USERS_DATA_PATH) as fp: - cur.copy_from(fp, "users", sep=",", columns=["username", "firstname", "lastname"]) + cur.copy_from( + fp, "users", sep=",", columns=["username", "firstname", "lastname"] + ) with open(BLOGS_DATA_PATH) as fp: cur.copy_from( - fp, "blogs", sep=",", columns=["userid", "title", "content", "published"] + fp, + "blogs", + sep=",", + columns=["userid", "title", "content", "published"], ) return postgresql diff --git a/projects/anosql/test/python/test_psycopg2.py b/projects/anosql/test/python/test_psycopg2.py index 03f4ce3..57a81ab 100644 --- a/projects/anosql/test/python/test_psycopg2.py +++ b/projects/anosql/test/python/test_psycopg2.py @@ -29,17 +29,26 @@ def test_record_query(pg_conn, queries): def test_parameterized_query(pg_conn, queries): actual = queries.blogs.get_user_blogs(pg_conn, userid=1) - expected = [("How to make a pie.", date(2018, 11, 23)), ("What I did Today", date(2017, 7, 28))] + expected = [ + ("How to make a pie.", date(2018, 11, 23)), + ("What I did Today", date(2017, 7, 28)), + ] assert actual == expected def test_parameterized_record_query(pg_conn, queries): dsn = pg_conn.get_dsn_parameters() with psycopg2.connect(cursor_factory=psycopg2.extras.RealDictCursor, **dsn) as conn: - actual = queries.blogs.pg_get_blogs_published_after(conn, published=date(2018, 1, 1)) + actual = queries.blogs.pg_get_blogs_published_after( + conn, published=date(2018, 1, 1) + ) expected = [ - {"title": "How to make a pie.", "username": "bobsmith", "published": "2018-11-23 00:00"}, + { + "title": "How to make a pie.", + "username": "bobsmith", + "published": "2018-11-23 00:00", + }, {"title": "Testing", "username": "janedoe", "published": "2018-01-01 00:00"}, ] diff --git a/projects/anosql/test/python/test_simple.py b/projects/anosql/test/python/test_simple.py index d7afe37..3fc63bf 100644 --- a/projects/anosql/test/python/test_simple.py +++ b/projects/anosql/test/python/test_simple.py @@ -5,6 +5,7 @@ import pytest @pytest.fixture def sqlite(request): import sqlite3 + sqlconnection = sqlite3.connect(":memory:") def fin(): @@ -18,11 +19,13 @@ def sqlite(request): def test_simple_query(sqlite): - _test_create_insert = ("-- name: create-some-table#\n" - "-- testing insertion\n" - "CREATE TABLE foo (a, b, c);\n\n" - "-- name: insert-some-value!\n" - "INSERT INTO foo (a, b, c) VALUES (1, 2, 3);\n") + _test_create_insert = ( + "-- name: create-some-table#\n" + "-- testing insertion\n" + "CREATE TABLE foo (a, b, c);\n\n" + "-- name: insert-some-value!\n" + "INSERT INTO foo (a, b, c) VALUES (1, 2, 3);\n" + ) q = anosql.from_str(_test_create_insert, "sqlite3") q.create_some_table(sqlite) @@ -30,11 +33,13 @@ def test_simple_query(sqlite): def test_auto_insert_query(sqlite): - _test_create_insert = ("-- name: create-some-table#\n" - "-- testing insertion\n" - "CREATE TABLE foo (a, b, c);\n\n" - "-- name: insert-some-value 7 - "92", # [0, 9] => 2 - "1234", # Amusingly, this is a 0-product. -]) +@pytest.mark.parametrize( + "num", + [ + "0", # 0 itself is the start Damm state + "37", # [0, 3] => 7 + "92", # [0, 9] => 2 + "1234", # Amusingly, this is a 0-product. + ], +) def test_num_verifies(num): """Assert that known-good Damm checks pass.""" diff --git a/projects/damm/test/python/test_hypothesis.py b/projects/damm/test/python/test_hypothesis.py index 1ad98dc..7273bad 100644 --- a/projects/damm/test/python/test_hypothesis.py +++ b/projects/damm/test/python/test_hypothesis.py @@ -3,7 +3,7 @@ from hypothesis import given from hypothesis.strategies import integers -@given(integers(0, 1<<512)) +@given(integers(0, 1 << 512)) def test_num_checks_verify(num): """Assert the generated Damm check for number verifies.""" diff --git a/projects/datalog-shell/src/python/datalog/shell/__main__.py b/projects/datalog-shell/src/python/datalog/shell/__main__.py index b02d663..6b0b605 100755 --- a/projects/datalog-shell/src/python/datalog/shell/__main__.py +++ b/projects/datalog-shell/src/python/datalog/shell/__main__.py @@ -56,12 +56,12 @@ from datalog.debris import Timing from datalog.evaluator import select from datalog.reader import pr_str, read_command, read_dataset from datalog.types import ( - CachedDataset, - Dataset, - LVar, - PartlyIndexedDataset, - Rule, - TableIndexedDataset + CachedDataset, + Dataset, + LVar, + PartlyIndexedDataset, + Rule, + TableIndexedDataset, ) from prompt_toolkit import print_formatted_text, PromptSession diff --git a/projects/datalog/test/python/test_datalog_evaluator.py b/projects/datalog/test/python/test_datalog_evaluator.py index 2b328a3..9d6a490 100644 --- a/projects/datalog/test/python/test_datalog_evaluator.py +++ b/projects/datalog/test/python/test_datalog_evaluator.py @@ -2,11 +2,11 @@ from datalog.easy import read, select from datalog.types import ( - CachedDataset, - Constant, - Dataset, - PartlyIndexedDataset, - TableIndexedDataset + CachedDataset, + Constant, + Dataset, + PartlyIndexedDataset, + TableIndexedDataset, ) import pytest @@ -194,7 +194,9 @@ def test_alternate_rule_lrec(db_cls): """Testing that both recursion and alternation work.""" if db_cls == Dataset: - pytest.xfail("left-recursive rules aren't supported with a trivial store and no planner") + pytest.xfail( + "left-recursive rules aren't supported with a trivial store and no planner" + ) d = read( """ diff --git a/projects/flowmetal/scratch/astdump.py b/projects/flowmetal/scratch/astdump.py index 4fba249..ed71dcc 100644 --- a/projects/flowmetal/scratch/astdump.py +++ b/projects/flowmetal/scratch/astdump.py @@ -91,4 +91,8 @@ if __name__ == "__main__": with open(filename) as f: root = ast.parse(f.read(), filename) - print(yaml.dump(YAMLTreeDumper().visit(root), default_flow_style=False, sort_keys=False)) + print( + yaml.dump( + YAMLTreeDumper().visit(root), default_flow_style=False, sort_keys=False + ) + ) diff --git a/projects/flowmetal/scratch/astinterp.py b/projects/flowmetal/scratch/astinterp.py index 902bddd..ede73b1 100644 --- a/projects/flowmetal/scratch/astinterp.py +++ b/projects/flowmetal/scratch/astinterp.py @@ -314,7 +314,7 @@ class ModuleInterpreter(StrictNodeVisitor): # # Note that we have to do the .posonlyargs dance if argspec.vararg: - self.ns[argspec.vararg.arg] = args[len(argspec.args):] + self.ns[argspec.vararg.arg] = args[len(argspec.args) :] else: if len(args) > len(argspec.args or getattr(argspec, "posonlyargs", ())): arg_num_mismatch() @@ -791,8 +791,9 @@ class ModuleInterpreter(StrictNodeVisitor): for n in node.names: if n in self.ns and self.ns[n] is not GLOBAL: raise SyntaxError( - "SyntaxError: name '{}' is assigned to before global declaration" - .format(n) + "SyntaxError: name '{}' is assigned to before global declaration".format( + n + ) ) # Don't store GLOBAL in the top-level namespace if self.ns.parent: @@ -895,6 +896,7 @@ class ModuleInterpreter(StrictNodeVisitor): def visit_Ellipsis(self, node): # In Py3k only from ast import Ellipsis + return Ellipsis def visit_Print(self, node): @@ -929,8 +931,8 @@ class InterpreterSystem(object): name = name.replace(".", os.path.sep) for e in self.path: for ext in [ - # ".flow", - ".py", + # ".flow", + ".py", ]: if os.path.isdir(e): f = os.path.join(e, name + ext) @@ -942,10 +944,14 @@ class InterpreterSystem(object): elif os.path.isfile(e): # FIXME (arrdem 2021-05-31) - raise RuntimeError("Import from .zip/.whl/.egg archives aren't supported yet") + raise RuntimeError( + "Import from .zip/.whl/.egg archives aren't supported yet" + ) else: - self.modules[name] = __import__(name, globals, locals, fromlist, level) + self.modules[name] = __import__( + name, globals, locals, fromlist, level + ) return self.modules[name] diff --git a/projects/flowmetal/scratch/test.py b/projects/flowmetal/scratch/test.py index 499c26b..567fe3d 100644 --- a/projects/flowmetal/scratch/test.py +++ b/projects/flowmetal/scratch/test.py @@ -27,6 +27,7 @@ for _ in range(10): def bar(a, b, **bs): pass + import requests diff --git a/projects/jobq/benchmark.py b/projects/jobq/benchmark.py index b07ab9a..949560f 100644 --- a/projects/jobq/benchmark.py +++ b/projects/jobq/benchmark.py @@ -43,10 +43,10 @@ def timer(val: float) -> str: """Given a time in NS, convert it to integral NS/MS/S such that the non-decimal part is integral.""" for factor, unit in [ - (1e9, "s"), - (1e6, "ms"), - (1e3, "us"), - (1, "ns"), + (1e9, "s"), + (1e6, "ms"), + (1e3, "us"), + (1, "ns"), ]: scaled_val = val / factor if 1e4 > scaled_val > 1.0: @@ -60,21 +60,20 @@ def bench(callable, reps): with timing() as t: callable() timings.append(t.duration) - print(f"""Ran {callable.__name__!r} {reps} times, total time {timer(run_t.duration)} + print( + f"""Ran {callable.__name__!r} {reps} times, total time {timer(run_t.duration)} mean: {timer(mean(timings))} median: {timer(median(timings))} stddev: {timer(stdev(timings))} test overhead: {timer((run_t.duration - sum(timings)) / reps)} -""") +""" + ) def test_reference_json(reps): """As a reference benchmark, test just appending to a file.""" - jobs = [ - {"user_id": randint(0, 1<<32), "msg": randstr(256)} - for _ in range(reps) - ] + jobs = [{"user_id": randint(0, 1 << 32), "msg": randstr(256)} for _ in range(reps)] jobs_i = iter(jobs) def naive_serialize(): @@ -86,15 +85,13 @@ def test_reference_json(reps): def test_reference_fsync(reps): """As a reference benchmark, test just appending to a file.""" - jobs = [ - {"user_id": randint(0, 1<<32), "msg": randstr(256)} - for _ in range(reps) - ] + jobs = [{"user_id": randint(0, 1 << 32), "msg": randstr(256)} for _ in range(reps)] jobs_i = iter(jobs) handle, path = tempfile.mkstemp() os.close(handle) with open(path, "w") as fd: + def naive_fsync(): fd.write(json.dumps([next(jobs_i), ["CREATED"]])) fd.flush() @@ -106,10 +103,7 @@ def test_reference_fsync(reps): def test_insert(q, reps): """Benchmark insertion time to a given SQLite DB.""" - jobs = [ - {"user_id": randint(0, 1<<32), "msg": randstr(256)} - for _ in range(reps) - ] + jobs = [{"user_id": randint(0, 1 << 32), "msg": randstr(256)} for _ in range(reps)] jobs_i = iter(jobs) def insert(): diff --git a/projects/jobq/src/python/jobq/__init__.py b/projects/jobq/src/python/jobq/__init__.py index 6bdf20c..1732502 100644 --- a/projects/jobq/src/python/jobq/__init__.py +++ b/projects/jobq/src/python/jobq/__init__.py @@ -160,7 +160,9 @@ def compile_query(query): elif isinstance(query, str): terms = [query] - assert not any(keyword in query.lower() for keyword in ["select", "update", "delete", ";"]) + assert not any( + keyword in query.lower() for keyword in ["select", "update", "delete", ";"] + ) return " AND ".join(terms) @@ -173,7 +175,6 @@ class Job(NamedTuple): class JobQueue(object): - def __init__(self, path): self._db = sqlite3.connect(path) self._queries = anosql.from_str(_SQL, "sqlite3") @@ -196,7 +197,7 @@ class JobQueue(object): json.loads(payload), json.loads(events), json.loads(state), - datetime.fromtimestamp(int(modified)) + datetime.fromtimestamp(int(modified)), ) def _from_result(self, result) -> Job: @@ -227,6 +228,7 @@ class JobQueue(object): if limit: limit = int(limit) + def lf(iterable): iterable = iter(iterable) for i in range(limit): @@ -234,6 +236,7 @@ class JobQueue(object): yield next(iterable) except StopIteration: break + jobs = lf(jobs) return self._from_results(jobs) @@ -265,9 +268,7 @@ class JobQueue(object): """Fetch all available data about a given job by ID.""" with self._db as db: - return self._from_result( - self._queries.job_get(db, id=job_id) - ) + return self._from_result(self._queries.job_get(db, id=job_id)) def cas_state(self, job_id, old_state, new_state): """CAS update a job's state, returning the updated job or indicating a conflict.""" @@ -287,11 +288,7 @@ class JobQueue(object): with self._db as db: return self._from_result( - self._queries.job_append_event( - db, - id=job_id, - event=json.dumps(event) - ) + self._queries.job_append_event(db, id=job_id, event=json.dumps(event)) ) def delete_job(self, job_id): diff --git a/projects/jobqd/src/python/jobqd/__main__.py b/projects/jobqd/src/python/jobqd/__main__.py index fbd99f8..f9089f9 100644 --- a/projects/jobqd/src/python/jobqd/__main__.py +++ b/projects/jobqd/src/python/jobqd/__main__.py @@ -52,9 +52,7 @@ def get_jobs(): query = blob.get("query", "true") - return jsonify({ - "jobs": [job_as_json(j) for j in request.q.query(query)] - }), 200 + return jsonify({"jobs": [job_as_json(j) for j in request.q.query(query)]}), 200 @app.route("/api/v0/job/create", methods=["POST"]) @@ -64,9 +62,7 @@ def create_job(): blob = request.get_json(force=True) payload = blob["payload"] state = blob.get("state", None) - job = request.q.create( - payload, state - ) + job = request.q.create(payload, state) return jsonify(job_as_json(job)), 200 diff --git a/projects/jobqd/src/python/jobqd/rest/api.py b/projects/jobqd/src/python/jobqd/rest/api.py index c8c12a1..00e346e 100644 --- a/projects/jobqd/src/python/jobqd/rest/api.py +++ b/projects/jobqd/src/python/jobqd/rest/api.py @@ -20,7 +20,7 @@ class Job(t.NamedTuple): payload=obj["payload"], events=obj["events"], state=obj["state"], - modified=datetime.fromtimestamp(obj["modified"]) + modified=datetime.fromtimestamp(obj["modified"]), ) @@ -32,63 +32,62 @@ class JobqClient(object): def jobs(self, query=None, limit=10) -> t.Iterable[Job]: """Enumerate jobs on the queue.""" - for job in self._session.post(self._url + "/api/v0/job", - json={"query": query or [], - "limit": limit})\ - .json()\ - .get("jobs"): + for job in ( + self._session.post( + self._url + "/api/v0/job", json={"query": query or [], "limit": limit} + ) + .json() + .get("jobs") + ): yield Job.from_json(job) def poll(self, query, state) -> Job: """Poll the job queue for the first job matching the given query, atomically advancing it to the given state and returning the advanced Job.""" return Job.from_json( - self._session - .post(self._url + "/api/v0/job/poll", - json={"query": query, - "state": state}) - .json()) + self._session.post( + self._url + "/api/v0/job/poll", json={"query": query, "state": state} + ).json() + ) def create(self, payload: object, state=None) -> Job: """Create a new job in the system.""" return Job.from_json( - self._session - .post(self._url + "/api/v0/job/create", - json={"payload": payload, - "state": state}) - .json()) + self._session.post( + self._url + "/api/v0/job/create", + json={"payload": payload, "state": state}, + ).json() + ) def fetch(self, job: Job) -> Job: """Fetch the current state of a job.""" return Job.from_json( - self._session - .get(self._url + "/api/v0/job/" + job.id) - .json()) + self._session.get(self._url + "/api/v0/job/" + job.id).json() + ) def advance(self, job: Job, state: object) -> Job: """Attempt to advance a job to a subsequent state.""" return Job.from_json( - self._session - .post(job.url + "/state", - json={"old": job.state, - "new": state}) - .json()) + self._session.post( + job.url + "/state", json={"old": job.state, "new": state} + ).json() + ) def event(self, job: Job, event: object) -> Job: """Attempt to record an event against a job.""" return Job.from_json( - self._session - .post(self._url + f"/api/v0/job/{job.id}/event", - json=event) - .json()) + self._session.post( + self._url + f"/api/v0/job/{job.id}/event", json=event + ).json() + ) def delete(self, job: Job) -> None: """Delete a remote job.""" - return (self._session - .delete(self._url + f"/api/v0/job/{job.id}") - .raise_for_status()) + return self._session.delete( + self._url + f"/api/v0/job/{job.id}" + ).raise_for_status() diff --git a/projects/kook/src/kook/client.py b/projects/kook/src/kook/client.py index 4194cf1..bc74fee 100644 --- a/projects/kook/src/kook/client.py +++ b/projects/kook/src/kook/client.py @@ -12,7 +12,12 @@ from kazoo.exceptions import NodeExistsError from kazoo.protocol.states import ZnodeStat from kazoo.recipe.lock import Lock, ReadLock, WriteLock from kook.config import current_config, KookConfig -from toolz.dicttoolz import assoc as _assoc, dissoc as _dissoc, merge as _merge, update_in +from toolz.dicttoolz import ( + assoc as _assoc, + dissoc as _dissoc, + merge as _merge, + update_in, +) def assoc(m, k, v): diff --git a/projects/lilith/setup.py b/projects/lilith/setup.py index 4c5f494..56dd3a9 100644 --- a/projects/lilith/setup.py +++ b/projects/lilith/setup.py @@ -14,10 +14,12 @@ setup( packages=[ "lilith", ], - package_data={"": [ - "src/python/lilith/*.lark", - "src/python/lilith/*.lil", - ]}, + package_data={ + "": [ + "src/python/lilith/*.lark", + "src/python/lilith/*.lil", + ] + }, include_package_data=True, install_requires=requirements, entry_points={ diff --git a/projects/proquint/src/python/proquint/__init__.py b/projects/proquint/src/python/proquint/__init__.py index e4fed48..f034bef 100644 --- a/projects/proquint/src/python/proquint/__init__.py +++ b/projects/proquint/src/python/proquint/__init__.py @@ -89,16 +89,18 @@ class Proquint(object): val = n << 8 | m # This is slightly un-idiomatic, but it precisely captures the coding definition - yield "".join([ - dict[val >> shift & mask] - for dict, shift, mask in [ - (cls.CONSONANTS, 0xC, 0xf), - (cls.VOWELS, 0xA, 0x3), - (cls.CONSONANTS, 0x6, 0xf), - (cls.VOWELS, 0x4, 0x3), - (cls.CONSONANTS, 0x0, 0xf) + yield "".join( + [ + dict[val >> shift & mask] + for dict, shift, mask in [ + (cls.CONSONANTS, 0xC, 0xF), + (cls.VOWELS, 0xA, 0x3), + (cls.CONSONANTS, 0x6, 0xF), + (cls.VOWELS, 0x4, 0x3), + (cls.CONSONANTS, 0x0, 0xF), + ] ] - ]) + ) # Core methods ################################################################################################ diff --git a/projects/proquint/src/python/proquint/__main__.py b/projects/proquint/src/python/proquint/__main__.py index 1599502..0e28a7a 100644 --- a/projects/proquint/src/python/proquint/__main__.py +++ b/projects/proquint/src/python/proquint/__main__.py @@ -10,7 +10,9 @@ from proquint import Proquint parser = argparse.ArgumentParser() g = parser.add_mutually_exclusive_group() g.add_argument("-g", "--generate", dest="generate", default=False, action="store_true") -g.add_argument("-p", "--predictable", dest="predictable", default=False, action="store_true") +g.add_argument( + "-p", "--predictable", dest="predictable", default=False, action="store_true" +) g.add_argument("-d", "--decode", dest="decode", default=False, action="store_true") g.add_argument("-e", "--encode", dest="encode", default=False, action="store_true") parser.add_argument("-w", "--width", dest="width", type=int, default=32) diff --git a/projects/proquint/test/python/test_examples.py b/projects/proquint/test/python/test_examples.py index 8133404..486c2d3 100644 --- a/projects/proquint/test/python/test_examples.py +++ b/projects/proquint/test/python/test_examples.py @@ -38,7 +38,6 @@ examples = [ (536870912, 32, "fabab-babab"), (1073741824, 32, "habab-babab"), (2147483648, 32, "mabab-babab"), - # A random value (3232235536, 32, "safom-babib"), ] @@ -53,4 +52,6 @@ def test_decode_examples(val, width, qint): def test_encode_examples(val, width, qint): encoded_qint = proquint.Proquint.encode(val, width) decoded_val = proquint.Proquint.decode(encoded_qint) - assert encoded_qint == qint, f"did not encode {val} to {qint}; got {encoded_qint} ({decoded_val})" + assert ( + encoded_qint == qint + ), f"did not encode {val} to {qint}; got {encoded_qint} ({decoded_val})" diff --git a/projects/proquint/test/python/test_hypothesis.py b/projects/proquint/test/python/test_hypothesis.py index c459e20..a7f0318 100644 --- a/projects/proquint/test/python/test_hypothesis.py +++ b/projects/proquint/test/python/test_hypothesis.py @@ -5,25 +5,21 @@ from hypothesis.strategies import integers import proquint -@given(integers(min_value=0, max_value=1<<16)) +@given(integers(min_value=0, max_value=1 << 16)) def test_round_trip_16(val): - assert proquint.Proquint.decode( - proquint.Proquint.encode(val, 16)) == val + assert proquint.Proquint.decode(proquint.Proquint.encode(val, 16)) == val -@given(integers(min_value=0, max_value=1<<32)) +@given(integers(min_value=0, max_value=1 << 32)) def test_round_trip_32(val): - assert proquint.Proquint.decode( - proquint.Proquint.encode(val, 32)) == val + assert proquint.Proquint.decode(proquint.Proquint.encode(val, 32)) == val -@given(integers(min_value=0, max_value=1<<64)) +@given(integers(min_value=0, max_value=1 << 64)) def test_round_trip_64(val): - assert proquint.Proquint.decode( - proquint.Proquint.encode(val, 64)) == val + assert proquint.Proquint.decode(proquint.Proquint.encode(val, 64)) == val -@given(integers(min_value=0, max_value=1<<512)) +@given(integers(min_value=0, max_value=1 << 512)) def test_round_trip_512(val): - assert proquint.Proquint.decode( - proquint.Proquint.encode(val, 512)) == val + assert proquint.Proquint.decode(proquint.Proquint.encode(val, 512)) == val diff --git a/projects/reqman/src/python/reqman/__main__.py b/projects/reqman/src/python/reqman/__main__.py index a18f93a..5faabc1 100644 --- a/projects/reqman/src/python/reqman/__main__.py +++ b/projects/reqman/src/python/reqman/__main__.py @@ -49,7 +49,7 @@ def req_name(requirement: str) -> str: def sort_key(requirement: str) -> str: return ( req_name(requirement) # Get the match group - .lower() # We ignore case + .lower() # We ignore case .replace("-", "") # We ignore - .replace("_", "") # And _ ) @@ -58,7 +58,9 @@ def sort_key(requirement: str) -> str: def _bq(query): """Enumerate the PyPi package names from a Bazel query.""" - unused = subprocess.check_output(["bazel", "query", query, "--output=package"]).decode("utf-8") + unused = subprocess.check_output( + ["bazel", "query", query, "--output=package"] + ).decode("utf-8") for line in unused.split("\n"): if line: yield line.replace("@arrdem_source_pypi//pypi__", "") @@ -67,7 +69,9 @@ def _bq(query): def _unused(): """Find unused requirements.""" - return set(_bq("@arrdem_source_pypi//...")) - set(_bq("filter('//pypi__', deps(//...))")) + return set(_bq("@arrdem_source_pypi//...")) - set( + _bq("filter('//pypi__', deps(//...))") + ) def _load(fname): diff --git a/projects/yamlschema/src/python/yamlschema/__init__.py b/projects/yamlschema/src/python/yamlschema/__init__.py index 70aa196..1a75a3c 100644 --- a/projects/yamlschema/src/python/yamlschema/__init__.py +++ b/projects/yamlschema/src/python/yamlschema/__init__.py @@ -199,7 +199,7 @@ class YamlLinter(object): ) def _lint_num_range(self, schema, node: Node, value) -> t.Iterable[LintRecord]: - """"FIXME.""" + """ "FIXME.""" if (base := schema.get("multipleOf")) is not None: if value % base != 0: diff --git a/tools/python/requirements.txt b/tools/python/requirements.txt index 59124ee..570daa0 100644 --- a/tools/python/requirements.txt +++ b/tools/python/requirements.txt @@ -5,6 +5,8 @@ attrs==20.3.0 autoflake==1.4 Babel==2.9.0 beautifulsoup4==4.9.3 +black==21.8b0 +bleach==4.0.0 certifi==2020.12.5 chardet==4.0.0 click==7.1.2 @@ -40,8 +42,10 @@ openapi-schema-validator==0.1.5 openapi-spec-validator==0.3.0 packaging==20.9 parso==0.8.2 -pathspec==0.8.1 +pathspec==0.9.0 +pep517==0.11.0 pip-tools==6.2.0 +platformdirs==2.3.0 pluggy==0.13.1 port-for==0.6.1 prompt-toolkit==3.0.18 @@ -63,6 +67,7 @@ PyYAML==5.4.1 readme-renderer==29.0 recommonmark==0.7.1 redis==3.5.3 +regex==2021.8.28 requests==2.25.1 requests-toolbelt==0.9.1 requirements-parser==0.2.0 @@ -81,15 +86,18 @@ sphinxcontrib-programoutput==0.17 sphinxcontrib-qthelp==1.0.3 sphinxcontrib-serializinghtml==1.1.4 toml==0.10.2 +tomli==1.2.1 tornado==6.1 typed-ast==1.4.2 -typing-extensions==3.7.4.3 +typing-extensions==3.10.0.2 unify==0.5 untokenize==0.1.1 urllib3==1.26.4 urwid==2.1.2 wcwidth==0.2.5 +webencodings==0.5.1 Werkzeug==2.0.1 yamllint==1.26.1 yarl==1.6.3 yaspin==1.5.0 +zipp==3.5.0 diff --git a/tools/python/test_licenses.py b/tools/python/test_licenses.py index 7aee264..804b8f0 100644 --- a/tools/python/test_licenses.py +++ b/tools/python/test_licenses.py @@ -13,16 +13,16 @@ from requirements.requirement import Requirement # Licenses approved as representing non-copyleft and not precluding commercial usage. # This is all easy, there's a good schema here. APPROVED_LICENSES = [ - MIT := "License :: OSI Approved :: MIT License", + MIT := "License :: OSI Approved :: MIT License", APACHE := "License :: OSI Approved :: Apache Software License", - BSD := "License :: OSI Approved :: BSD License", - MPL10 := "License :: OSI Approved :: Mozilla Public License 1.0 (MPL)", - MPL11 := "License :: OSI Approved :: Mozilla Public License 1.1 (MPL 1.1)", - MPL20 := "License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)", - PSFL := "License :: OSI Approved :: Python Software Foundation License", - LGPL := "License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)", - LGPL3 := "License :: OSI Approved :: GNU Lesser General Public License v3 or later (LGPLv3+)", - ISCL := "License :: OSI Approved :: ISC License (ISCL)", + BSD := "License :: OSI Approved :: BSD License", + MPL10 := "License :: OSI Approved :: Mozilla Public License 1.0 (MPL)", + MPL11 := "License :: OSI Approved :: Mozilla Public License 1.1 (MPL 1.1)", + MPL20 := "License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)", + PSFL := "License :: OSI Approved :: Python Software Foundation License", + LGPL := "License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)", + LGPL3 := "License :: OSI Approved :: GNU Lesser General Public License v3 or later (LGPLv3+)", + ISCL := "License :: OSI Approved :: ISC License (ISCL)", ] UNAPPROVED_LICENSES = [ @@ -36,23 +36,17 @@ LICENSES_BY_LOWERNAME = { "apache 2.0": APACHE, "apache": APACHE, "http://www.apache.org/licenses/license-2.0": APACHE, - "bsd 3": BSD, "bsd": BSD, - "gpl": GPL1, "gpl2": GPL2, "gpl3": GPL3, "lgpl": LGPL, "lgpl3": LGPL3, - "isc": ISCL, - "mit": MIT, - "mpl": MPL10, "mpl 2.0": MPL20, - "psf": PSFL, } @@ -75,7 +69,9 @@ with open("tools/python/requirements.txt") as fd: def bash_license(ln): while True: - lnn = re.sub(r"[(),]|( version)|( license)|( ?v(?=\d))|([ -]clause)", "", ln.lower()) + lnn = re.sub( + r"[(),]|( version)|( license)|( ?v(?=\d))|([ -]clause)", "", ln.lower() + ) if ln != lnn: ln = lnn else: @@ -85,16 +81,19 @@ def bash_license(ln): return ln -@pytest.mark.parametrize("a,b", [ - ("MIT", MIT), - ("mit", MIT), - ("BSD", BSD), - ("BSD 3-clause", BSD), - ("BSD 3 clause", BSD), - ("GPL3", GPL3), - ("GPL v3", GPL3), - ("GPLv3", GPL3), -]) +@pytest.mark.parametrize( + "a,b", + [ + ("MIT", MIT), + ("mit", MIT), + ("BSD", BSD), + ("BSD 3-clause", BSD), + ("BSD 3 clause", BSD), + ("GPL3", GPL3), + ("GPL v3", GPL3), + ("GPLv3", GPL3), + ], +) def test_bash_license(a, b): assert bash_license(a) == b @@ -117,7 +116,7 @@ def licenses(package: Requirement): if not version: blob = requests.get( f"https://pypi.org/pypi/{package.name}/json", - headers={"Accept": "application/json"} + headers={"Accept": "application/json"}, ).json() if ln := bash_license(blob.get("license")): lics.append(ln) @@ -131,13 +130,15 @@ def licenses(package: Requirement): if version: blob = requests.get( f"https://pypi.org/pypi/{package.name}/{version}/json", - headers={"Accept": "application/json"} + headers={"Accept": "application/json"}, ).json() - lics.extend([ - c - for c in blob.get("info", {}).get("classifiers", []) - if c.startswith("License") - ]) + lics.extend( + [ + c + for c in blob.get("info", {}).get("classifiers", []) + if c.startswith("License") + ] + ) ln = blob.get("info", {}).get("license") if ln and not lics: lics.append(bash_license(ln))