And lint
This commit is contained in:
parent
bc06fc01ff
commit
54ab71f19c
21 changed files with 80 additions and 81 deletions
|
@ -1,13 +1,13 @@
|
||||||
"""Quick and dirty migrations for AnoSQL."""
|
"""Quick and dirty migrations for AnoSQL."""
|
||||||
|
|
||||||
import logging
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from hashlib import sha256
|
from hashlib import sha256
|
||||||
|
import logging
|
||||||
import re
|
import re
|
||||||
import typing as t
|
import typing as t
|
||||||
|
|
||||||
import anosql
|
import anosql
|
||||||
from anosql.core import Queries, from_str
|
from anosql.core import from_str, Queries
|
||||||
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
@ -103,7 +103,7 @@ def create_tables(queries: Queries, conn) -> None:
|
||||||
# Insert the bootstrap 'fixup' record
|
# Insert the bootstrap 'fixup' record
|
||||||
execute_migration(queries, conn,
|
execute_migration(queries, conn,
|
||||||
MigrationDescriptor(
|
MigrationDescriptor(
|
||||||
name='anosql_migrations_create_table',
|
name="anosql_migrations_create_table",
|
||||||
sha256sum=sha256(queries.anosql_migrations_create_table.sql.encode("utf-8")).hexdigest()))
|
sha256sum=sha256(queries.anosql_migrations_create_table.sql.encode("utf-8")).hexdigest()))
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -7,6 +7,7 @@ from anosql.core import Queries
|
||||||
import anosql_migrations
|
import anosql_migrations
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
_SQL = """\
|
_SQL = """\
|
||||||
-- name: migration_0000_create_kv
|
-- name: migration_0000_create_kv
|
||||||
CREATE TABLE kv (`id` INT, `key` TEXT, `value` TEXT);
|
CREATE TABLE kv (`id` INT, `key` TEXT, `value` TEXT);
|
||||||
|
@ -49,9 +50,9 @@ def test_queries(queries):
|
||||||
"""Assert that we can construct a queries instance with migrations features."""
|
"""Assert that we can construct a queries instance with migrations features."""
|
||||||
|
|
||||||
assert isinstance(queries, Queries)
|
assert isinstance(queries, Queries)
|
||||||
assert hasattr(queries, 'anosql_migrations_create_table')
|
assert hasattr(queries, "anosql_migrations_create_table")
|
||||||
assert hasattr(queries, 'anosql_migrations_list')
|
assert hasattr(queries, "anosql_migrations_list")
|
||||||
assert hasattr(queries, 'anosql_migrations_create')
|
assert hasattr(queries, "anosql_migrations_create")
|
||||||
|
|
||||||
|
|
||||||
def test_migrations_create_table(conn, queries):
|
def test_migrations_create_table(conn, queries):
|
||||||
|
|
|
@ -21,6 +21,7 @@
|
||||||
# sys.path.insert(0, os.path.abspath('.'))
|
# sys.path.insert(0, os.path.abspath('.'))
|
||||||
import pkg_resources
|
import pkg_resources
|
||||||
|
|
||||||
|
|
||||||
# -- General configuration ------------------------------------------------
|
# -- General configuration ------------------------------------------------
|
||||||
|
|
||||||
# If your documentation needs a minimal Sphinx version, state it here.
|
# If your documentation needs a minimal Sphinx version, state it here.
|
||||||
|
@ -33,32 +34,32 @@ import pkg_resources
|
||||||
extensions = ["sphinx.ext.autodoc", "sphinx.ext.napoleon"]
|
extensions = ["sphinx.ext.autodoc", "sphinx.ext.napoleon"]
|
||||||
|
|
||||||
# Add any paths that contain templates here, relative to this directory.
|
# Add any paths that contain templates here, relative to this directory.
|
||||||
templates_path = ['_templates']
|
templates_path = ["_templates"]
|
||||||
|
|
||||||
# The suffix(es) of source filenames.
|
# The suffix(es) of source filenames.
|
||||||
# You can specify multiple suffix as a list of string:
|
# You can specify multiple suffix as a list of string:
|
||||||
#
|
#
|
||||||
# source_suffix = ['.rst', '.md']
|
# source_suffix = ['.rst', '.md']
|
||||||
source_suffix = '.rst'
|
source_suffix = ".rst"
|
||||||
|
|
||||||
# The encoding of source files.
|
# The encoding of source files.
|
||||||
#
|
#
|
||||||
# source_encoding = 'utf-8-sig'
|
# source_encoding = 'utf-8-sig'
|
||||||
|
|
||||||
# The master toctree document.
|
# The master toctree document.
|
||||||
master_doc = 'index'
|
master_doc = "index"
|
||||||
|
|
||||||
# General information about the project.
|
# General information about the project.
|
||||||
project = u'anosql'
|
project = u"anosql"
|
||||||
copyright = u'2014-2017, Honza Pokorny'
|
copyright = u"2014-2017, Honza Pokorny"
|
||||||
author = u'Honza Pokorny'
|
author = u"Honza Pokorny"
|
||||||
|
|
||||||
# The version info for the project you're documenting, acts as replacement for
|
# The version info for the project you're documenting, acts as replacement for
|
||||||
# |version| and |release|, also used in various other places throughout the
|
# |version| and |release|, also used in various other places throughout the
|
||||||
# built documents.
|
# built documents.
|
||||||
#
|
#
|
||||||
# The short X.Y version.
|
# The short X.Y version.
|
||||||
version = pkg_resources.get_distribution('anosql').version
|
version = pkg_resources.get_distribution("anosql").version
|
||||||
# The full version, including alpha/beta/rc tags.
|
# The full version, including alpha/beta/rc tags.
|
||||||
release = version
|
release = version
|
||||||
|
|
||||||
|
@ -81,7 +82,7 @@ language = None
|
||||||
# List of patterns, relative to source directory, that match files and
|
# List of patterns, relative to source directory, that match files and
|
||||||
# directories to ignore when looking for source files.
|
# directories to ignore when looking for source files.
|
||||||
# This patterns also effect to html_static_path and html_extra_path
|
# This patterns also effect to html_static_path and html_extra_path
|
||||||
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
|
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
|
||||||
|
|
||||||
# The reST default role (used for this markup: `text`) to use for all
|
# The reST default role (used for this markup: `text`) to use for all
|
||||||
# documents.
|
# documents.
|
||||||
|
@ -103,7 +104,7 @@ exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
|
||||||
# show_authors = False
|
# show_authors = False
|
||||||
|
|
||||||
# The name of the Pygments (syntax highlighting) style to use.
|
# The name of the Pygments (syntax highlighting) style to use.
|
||||||
pygments_style = 'sphinx'
|
pygments_style = "sphinx"
|
||||||
|
|
||||||
# A list of ignored prefixes for module index sorting.
|
# A list of ignored prefixes for module index sorting.
|
||||||
# modindex_common_prefix = []
|
# modindex_common_prefix = []
|
||||||
|
@ -120,7 +121,7 @@ todo_include_todos = False
|
||||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||||
# a list of builtin themes.
|
# a list of builtin themes.
|
||||||
#
|
#
|
||||||
html_theme = 'alabaster'
|
html_theme = "alabaster"
|
||||||
|
|
||||||
# Theme options are theme-specific and customize the look and feel of a theme
|
# Theme options are theme-specific and customize the look and feel of a theme
|
||||||
# further. For a list of options available for each theme, see the
|
# further. For a list of options available for each theme, see the
|
||||||
|
@ -234,7 +235,7 @@ html_static_path = []
|
||||||
# html_search_scorer = 'scorer.js'
|
# html_search_scorer = 'scorer.js'
|
||||||
|
|
||||||
# Output file base name for HTML help builder.
|
# Output file base name for HTML help builder.
|
||||||
htmlhelp_basename = 'anosqldoc'
|
htmlhelp_basename = "anosqldoc"
|
||||||
|
|
||||||
# -- Options for LaTeX output ---------------------------------------------
|
# -- Options for LaTeX output ---------------------------------------------
|
||||||
|
|
||||||
|
@ -260,8 +261,8 @@ latex_elements = {
|
||||||
# (source start file, target name, title,
|
# (source start file, target name, title,
|
||||||
# author, documentclass [howto, manual, or own class]).
|
# author, documentclass [howto, manual, or own class]).
|
||||||
latex_documents = [
|
latex_documents = [
|
||||||
(master_doc, 'anosql.tex', u'anosql Documentation',
|
(master_doc, "anosql.tex", u"anosql Documentation",
|
||||||
u'Honza Pokorny', 'manual'),
|
u"Honza Pokorny", "manual"),
|
||||||
]
|
]
|
||||||
|
|
||||||
# The name of an image file (relative to this directory) to place at the top of
|
# The name of an image file (relative to this directory) to place at the top of
|
||||||
|
@ -302,7 +303,7 @@ latex_documents = [
|
||||||
# One entry per manual page. List of tuples
|
# One entry per manual page. List of tuples
|
||||||
# (source start file, name, description, authors, manual section).
|
# (source start file, name, description, authors, manual section).
|
||||||
man_pages = [
|
man_pages = [
|
||||||
(master_doc, 'anosql', u'anosql Documentation',
|
(master_doc, "anosql", u"anosql Documentation",
|
||||||
[author], 1)
|
[author], 1)
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -317,9 +318,9 @@ man_pages = [
|
||||||
# (source start file, target name, title, author,
|
# (source start file, target name, title, author,
|
||||||
# dir menu entry, description, category)
|
# dir menu entry, description, category)
|
||||||
texinfo_documents = [
|
texinfo_documents = [
|
||||||
(master_doc, 'anosql', u'anosql Documentation',
|
(master_doc, "anosql", u"anosql Documentation",
|
||||||
author, 'anosql', 'One line description of project.',
|
author, "anosql", "One line description of project.",
|
||||||
'Miscellaneous'),
|
"Miscellaneous"),
|
||||||
]
|
]
|
||||||
|
|
||||||
# Documents to append as an appendix to all manuals.
|
# Documents to append as an appendix to all manuals.
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
from .core import from_path, from_str, SQLOperationType
|
from .core import from_path, from_str, SQLOperationType
|
||||||
from .exceptions import SQLLoadException, SQLParseException
|
from .exceptions import SQLLoadException, SQLParseException
|
||||||
|
|
||||||
|
|
||||||
__all__ = ["from_path", "from_str", "SQLOperationType", "SQLLoadException", "SQLParseException"]
|
__all__ = ["from_path", "from_str", "SQLOperationType", "SQLLoadException", "SQLParseException"]
|
||||||
|
|
|
@ -5,15 +5,15 @@ from ..patterns import var_pattern
|
||||||
|
|
||||||
def replacer(match):
|
def replacer(match):
|
||||||
gd = match.groupdict()
|
gd = match.groupdict()
|
||||||
if gd['dblquote'] is not None:
|
if gd["dblquote"] is not None:
|
||||||
return gd['dblquote']
|
return gd["dblquote"]
|
||||||
elif gd['quote'] is not None:
|
elif gd["quote"] is not None:
|
||||||
return gd["quote"]
|
return gd["quote"]
|
||||||
else:
|
else:
|
||||||
return '{lead}%({var_name})s{trail}'.format(
|
return "{lead}%({var_name})s{trail}".format(
|
||||||
lead=gd['lead'],
|
lead=gd["lead"],
|
||||||
var_name=gd['var_name'],
|
var_name=gd["var_name"],
|
||||||
trail=gd['trail'],
|
trail=gd["trail"],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -6,6 +6,7 @@ from contextlib import contextmanager
|
||||||
import logging
|
import logging
|
||||||
import sqlite3
|
import sqlite3
|
||||||
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@ -29,7 +30,7 @@ class SQLite3DriverAdapter(object):
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def select(conn, _query_name, sql, parameters):
|
def select(conn, _query_name, sql, parameters):
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
log.debug({'sql': sql, 'parameters': parameters})
|
log.debug({"sql": sql, "parameters": parameters})
|
||||||
cur.execute(sql, parameters)
|
cur.execute(sql, parameters)
|
||||||
results = cur.fetchall()
|
results = cur.fetchall()
|
||||||
cur.close()
|
cur.close()
|
||||||
|
@ -39,7 +40,7 @@ class SQLite3DriverAdapter(object):
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def select_cursor(conn: sqlite3.Connection, _query_name, sql, parameters):
|
def select_cursor(conn: sqlite3.Connection, _query_name, sql, parameters):
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
log.debug({'sql': sql, 'parameters': parameters})
|
log.debug({"sql": sql, "parameters": parameters})
|
||||||
cur.execute(sql, parameters)
|
cur.execute(sql, parameters)
|
||||||
try:
|
try:
|
||||||
yield cur
|
yield cur
|
||||||
|
@ -48,18 +49,18 @@ class SQLite3DriverAdapter(object):
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def insert_update_delete(conn: sqlite3.Connection, _query_name, sql, parameters):
|
def insert_update_delete(conn: sqlite3.Connection, _query_name, sql, parameters):
|
||||||
log.debug({'sql': sql, 'parameters': parameters})
|
log.debug({"sql": sql, "parameters": parameters})
|
||||||
conn.execute(sql, parameters)
|
conn.execute(sql, parameters)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def insert_update_delete_many(conn: sqlite3.Connection, _query_name, sql, parameters):
|
def insert_update_delete_many(conn: sqlite3.Connection, _query_name, sql, parameters):
|
||||||
log.debug({'sql': sql, 'parameters': parameters})
|
log.debug({"sql": sql, "parameters": parameters})
|
||||||
conn.executemany(sql, parameters)
|
conn.executemany(sql, parameters)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def insert_returning(conn: sqlite3.Connection, _query_name, sql, parameters):
|
def insert_returning(conn: sqlite3.Connection, _query_name, sql, parameters):
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
log.debug({'sql': sql, 'parameters': parameters})
|
log.debug({"sql": sql, "parameters": parameters})
|
||||||
cur.execute(sql, parameters)
|
cur.execute(sql, parameters)
|
||||||
|
|
||||||
if "returning" not in sql.lower():
|
if "returning" not in sql.lower():
|
||||||
|
@ -75,5 +76,5 @@ class SQLite3DriverAdapter(object):
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def execute_script(conn: sqlite3.Connection, sql):
|
def execute_script(conn: sqlite3.Connection, sql):
|
||||||
log.debug({'sql': sql, 'parameters': None})
|
log.debug({"sql": sql, "parameters": None})
|
||||||
conn.executescript(sql)
|
conn.executescript(sql)
|
||||||
|
|
|
@ -4,10 +4,10 @@ from .adapters.psycopg2 import PsycoPG2Adapter
|
||||||
from .adapters.sqlite3 import SQLite3DriverAdapter
|
from .adapters.sqlite3 import SQLite3DriverAdapter
|
||||||
from .exceptions import SQLLoadException, SQLParseException
|
from .exceptions import SQLLoadException, SQLParseException
|
||||||
from .patterns import (
|
from .patterns import (
|
||||||
query_name_definition_pattern,
|
|
||||||
empty_pattern,
|
|
||||||
doc_comment_pattern,
|
doc_comment_pattern,
|
||||||
valid_query_name_pattern,
|
empty_pattern,
|
||||||
|
query_name_definition_pattern,
|
||||||
|
valid_query_name_pattern
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -340,7 +340,7 @@ def from_path(sql_path, driver_name):
|
||||||
|
|
||||||
"""
|
"""
|
||||||
if not os.path.exists(sql_path):
|
if not os.path.exists(sql_path):
|
||||||
raise SQLLoadException('File does not exist: {}.'.format(sql_path), sql_path)
|
raise SQLLoadException("File does not exist: {}.".format(sql_path), sql_path)
|
||||||
|
|
||||||
driver_adapter = get_driver_adapter(driver_name)
|
driver_adapter = get_driver_adapter(driver_name)
|
||||||
|
|
||||||
|
@ -350,6 +350,6 @@ def from_path(sql_path, driver_name):
|
||||||
return Queries(load_queries_from_file(sql_path, driver_adapter))
|
return Queries(load_queries_from_file(sql_path, driver_adapter))
|
||||||
else:
|
else:
|
||||||
raise SQLLoadException(
|
raise SQLLoadException(
|
||||||
'The sql_path must be a directory or file, got {}'.format(sql_path),
|
"The sql_path must be a directory or file, got {}".format(sql_path),
|
||||||
sql_path
|
sql_path
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
import re
|
import re
|
||||||
|
|
||||||
|
|
||||||
query_name_definition_pattern = re.compile(r"--\s*name\s*:\s*")
|
query_name_definition_pattern = re.compile(r"--\s*name\s*:\s*")
|
||||||
"""
|
"""
|
||||||
Pattern: Identifies name definition comments.
|
Pattern: Identifies name definition comments.
|
||||||
|
|
|
@ -4,6 +4,7 @@ import sqlite3
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
BLOGDB_PATH = os.path.join(os.path.dirname(os.path.abspath(__file__)), "blogdb")
|
BLOGDB_PATH = os.path.join(os.path.dirname(os.path.abspath(__file__)), "blogdb")
|
||||||
USERS_DATA_PATH = os.path.join(BLOGDB_PATH, "data", "users_data.csv")
|
USERS_DATA_PATH = os.path.join(BLOGDB_PATH, "data", "users_data.csv")
|
||||||
BLOGS_DATA_PATH = os.path.join(BLOGDB_PATH, "data", "blogs_data.csv")
|
BLOGS_DATA_PATH = os.path.join(BLOGDB_PATH, "data", "blogs_data.csv")
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
import os
|
|
||||||
from datetime import date
|
from datetime import date
|
||||||
|
import os
|
||||||
|
|
||||||
import anosql
|
import anosql
|
||||||
import psycopg2
|
import psycopg2
|
||||||
|
|
|
@ -1,12 +1,11 @@
|
||||||
import pytest
|
|
||||||
|
|
||||||
import anosql
|
import anosql
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def sqlite(request):
|
def sqlite(request):
|
||||||
import sqlite3
|
import sqlite3
|
||||||
sqlconnection = sqlite3.connect(':memory:')
|
sqlconnection = sqlite3.connect(":memory:")
|
||||||
|
|
||||||
def fin():
|
def fin():
|
||||||
"teardown"
|
"teardown"
|
||||||
|
@ -80,7 +79,7 @@ def test_one_row(sqlite):
|
||||||
"-- name: two-rows?\n"
|
"-- name: two-rows?\n"
|
||||||
"SELECT 1 UNION SELECT 2;\n")
|
"SELECT 1 UNION SELECT 2;\n")
|
||||||
q = anosql.from_str(_test_one_row, "sqlite3")
|
q = anosql.from_str(_test_one_row, "sqlite3")
|
||||||
assert q.one_row(sqlite) == (1, 'hello')
|
assert q.one_row(sqlite) == (1, "hello")
|
||||||
assert q.two_rows(sqlite) is None
|
assert q.two_rows(sqlite) is None
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -3,6 +3,7 @@ import os
|
||||||
import anosql
|
import anosql
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
def dict_factory(cursor, row):
|
def dict_factory(cursor, row):
|
||||||
d = {}
|
d = {}
|
||||||
for idx, col in enumerate(cursor.description):
|
for idx, col in enumerate(cursor.description):
|
||||||
|
|
|
@ -3,21 +3,20 @@ Benchmarking the jobq.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
from time import perf_counter_ns
|
|
||||||
from abc import abstractclassmethod
|
|
||||||
import os
|
|
||||||
from random import randint, choice
|
|
||||||
import string
|
|
||||||
from statistics import mean, median, stdev
|
|
||||||
import tempfile
|
|
||||||
import logging
|
|
||||||
import json
|
import json
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
from random import choice, randint
|
||||||
|
from statistics import mean, median, stdev
|
||||||
|
import string
|
||||||
|
import tempfile
|
||||||
|
from time import perf_counter_ns
|
||||||
|
|
||||||
from jobq import JobQueue
|
from jobq import JobQueue
|
||||||
|
|
||||||
|
|
||||||
def randstr(len):
|
def randstr(len):
|
||||||
return ''.join(choice(string.ascii_uppercase + string.digits) for _ in range(len))
|
return "".join(choice(string.ascii_uppercase + string.digits) for _ in range(len))
|
||||||
|
|
||||||
|
|
||||||
class Timing(object):
|
class Timing(object):
|
||||||
|
@ -44,10 +43,10 @@ def timer(val: float) -> str:
|
||||||
"""Given a time in NS, convert it to integral NS/MS/S such that the non-decimal part is integral."""
|
"""Given a time in NS, convert it to integral NS/MS/S such that the non-decimal part is integral."""
|
||||||
|
|
||||||
for factor, unit in [
|
for factor, unit in [
|
||||||
(1e9, 's'),
|
(1e9, "s"),
|
||||||
(1e6, 'ms'),
|
(1e6, "ms"),
|
||||||
(1e3, 'us'),
|
(1e3, "us"),
|
||||||
(1, 'ns'),
|
(1, "ns"),
|
||||||
]:
|
]:
|
||||||
scaled_val = val / factor
|
scaled_val = val / factor
|
||||||
if 1e4 > scaled_val > 1.0:
|
if 1e4 > scaled_val > 1.0:
|
||||||
|
|
|
@ -2,13 +2,11 @@
|
||||||
A job queue library teetering atop sqlite3.
|
A job queue library teetering atop sqlite3.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import sqlite3
|
|
||||||
import json
|
|
||||||
from typing import NamedTuple, Optional as Maybe
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import sqlite3
|
||||||
|
from typing import NamedTuple, Optional as Maybe
|
||||||
|
|
||||||
import anosql
|
import anosql
|
||||||
from anosql_migrations import run_migrations, with_migrations
|
from anosql_migrations import run_migrations, with_migrations
|
||||||
|
|
|
@ -8,6 +8,7 @@ from time import sleep
|
||||||
from jobq import Job, JobQueue
|
from jobq import Job, JobQueue
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
logging.getLogger().setLevel(logging.DEBUG)
|
logging.getLogger().setLevel(logging.DEBUG)
|
||||||
|
|
||||||
|
|
||||||
|
@ -49,7 +50,7 @@ def test_poll(db):
|
||||||
sleep(1) # And a side-effect for the third one
|
sleep(1) # And a side-effect for the third one
|
||||||
j3 = db.create("payload 3")
|
j3 = db.create("payload 3")
|
||||||
|
|
||||||
j = db.poll('true', ["assigned"])
|
j = db.poll("true", ["assigned"])
|
||||||
|
|
||||||
assert isinstance(j, Job)
|
assert isinstance(j, Job)
|
||||||
assert j.id == j1.id, "j1 is the oldest in the system and should poll first."
|
assert j.id == j1.id, "j1 is the oldest in the system and should poll first."
|
||||||
|
@ -60,7 +61,7 @@ def test_poll_not_found(db):
|
||||||
"""Test that poll can return nothing."""
|
"""Test that poll can return nothing."""
|
||||||
|
|
||||||
j1 = db.create("payload 1")
|
j1 = db.create("payload 1")
|
||||||
j = db.poll('false', ["assigned"])
|
j = db.poll("false", ["assigned"])
|
||||||
assert j is None
|
assert j is None
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -3,16 +3,11 @@ A job queue over HTTP.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
from functools import wraps
|
|
||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import sys
|
|
||||||
import sqlite3
|
|
||||||
|
|
||||||
from jobq import Job, JobQueue
|
|
||||||
|
|
||||||
from flask import abort, current_app, Flask, jsonify, request
|
from flask import abort, current_app, Flask, jsonify, request
|
||||||
|
from jobq import Job, JobQueue
|
||||||
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
from setuptools import setup
|
from setuptools import setup
|
||||||
|
|
||||||
|
|
||||||
with open("requirements.txt") as fp:
|
with open("requirements.txt") as fp:
|
||||||
requirements = [l.strip() for l in fp.readlines()]
|
requirements = [l.strip() for l in fp.readlines()]
|
||||||
|
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
"""The Lilith runner."""
|
"""The Lilith runner."""
|
||||||
|
|
||||||
import logging
|
|
||||||
import argparse
|
import argparse
|
||||||
from importlib.resources import read_text as resource_text
|
from importlib.resources import read_text as resource_text
|
||||||
|
import logging
|
||||||
import sys
|
import sys
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
|
|
|
@ -5,8 +5,8 @@ A quick and dirty recursive interpreter for Lilith.
|
||||||
import logging
|
import logging
|
||||||
import typing as t
|
import typing as t
|
||||||
|
|
||||||
from lilith.parser import Apply, Block, Symbol, Args
|
from lilith.parser import Apply, Args, Block, Symbol
|
||||||
from lilith.reader import Def, Module, Import
|
from lilith.reader import Def, Import, Module
|
||||||
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
|
@ -2,10 +2,11 @@
|
||||||
Variously poor parsing for Lilith.
|
Variously poor parsing for Lilith.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import typing as t
|
|
||||||
from importlib.resources import read_text
|
from importlib.resources import read_text
|
||||||
|
import typing as t
|
||||||
|
|
||||||
|
from lark import Lark, Transformer, v_args
|
||||||
|
|
||||||
from lark import Lark, v_args, Transformer
|
|
||||||
|
|
||||||
GRAMMAR = read_text("lilith", "grammar.lark")
|
GRAMMAR = read_text("lilith", "grammar.lark")
|
||||||
|
|
||||||
|
|
|
@ -6,8 +6,6 @@ import argparse
|
||||||
import os
|
import os
|
||||||
from pprint import pprint
|
from pprint import pprint
|
||||||
import re
|
import re
|
||||||
import sys
|
|
||||||
|
|
||||||
|
|
||||||
from gandi.client import GandiAPI
|
from gandi.client import GandiAPI
|
||||||
import jinja2
|
import jinja2
|
||||||
|
|
Loading…
Reference in a new issue