Fmt.
This commit is contained in:
parent
5646555b14
commit
f712734648
21 changed files with 74 additions and 97 deletions
|
@ -6,7 +6,6 @@ import logging
|
||||||
import re
|
import re
|
||||||
import typing as t
|
import typing as t
|
||||||
|
|
||||||
import anosql
|
|
||||||
from anosql.core import from_str, Queries
|
from anosql.core import from_str, Queries
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -12,7 +12,6 @@ import re
|
||||||
from calf.grammar import TOKENS
|
from calf.grammar import TOKENS
|
||||||
from calf.io.reader import PeekPosReader
|
from calf.io.reader import PeekPosReader
|
||||||
from calf.token import CalfToken
|
from calf.token import CalfToken
|
||||||
from calf.util import *
|
|
||||||
|
|
||||||
|
|
||||||
class CalfLexer:
|
class CalfLexer:
|
||||||
|
|
|
@ -2,11 +2,10 @@
|
||||||
The Calf parser.
|
The Calf parser.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from itertools import tee
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from calf.grammar import MATCHING, WHITESPACE_TYPES
|
from calf.grammar import MATCHING
|
||||||
from calf.lexer import CalfLexer, lex_buffer, lex_file
|
from calf.lexer import lex_buffer, lex_file
|
||||||
from calf.token import *
|
from calf.token import *
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -7,7 +7,6 @@ trip through the lexer.
|
||||||
|
|
||||||
import calf.lexer as cl
|
import calf.lexer as cl
|
||||||
from conftest import parametrize
|
from conftest import parametrize
|
||||||
import pytest
|
|
||||||
|
|
||||||
|
|
||||||
def lex_single_token(buffer):
|
def lex_single_token(buffer):
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
from setuptools import find_packages, setup
|
from setuptools import setup
|
||||||
|
|
||||||
|
|
||||||
here = path.abspath(path.dirname(__file__))
|
here = path.abspath(path.dirname(__file__))
|
||||||
|
|
|
@ -59,7 +59,6 @@ from datalog.evaluator import select
|
||||||
from datalog.reader import pr_str, read_command, read_dataset
|
from datalog.reader import pr_str, read_command, read_dataset
|
||||||
from datalog.types import (
|
from datalog.types import (
|
||||||
CachedDataset,
|
CachedDataset,
|
||||||
Constant,
|
|
||||||
Dataset,
|
Dataset,
|
||||||
LVar,
|
LVar,
|
||||||
PartlyIndexedDataset,
|
PartlyIndexedDataset,
|
||||||
|
@ -67,7 +66,7 @@ from datalog.types import (
|
||||||
TableIndexedDataset
|
TableIndexedDataset
|
||||||
)
|
)
|
||||||
|
|
||||||
from prompt_toolkit import print_formatted_text, prompt, PromptSession
|
from prompt_toolkit import print_formatted_text, PromptSession
|
||||||
from prompt_toolkit.formatted_text import FormattedText
|
from prompt_toolkit.formatted_text import FormattedText
|
||||||
from prompt_toolkit.history import FileHistory
|
from prompt_toolkit.history import FileHistory
|
||||||
from prompt_toolkit.styles import Style
|
from prompt_toolkit.styles import Style
|
||||||
|
|
|
@ -1,5 +1,9 @@
|
||||||
"""Debris."""
|
"""Debris."""
|
||||||
|
|
||||||
|
from random import shuffle
|
||||||
|
|
||||||
|
from datalog.types import LVar
|
||||||
|
|
||||||
|
|
||||||
def shuffled(seq):
|
def shuffled(seq):
|
||||||
"""Because random.shuffle() is in-place >.>"""
|
"""Because random.shuffle() is in-place >.>"""
|
||||||
|
|
|
@ -3,11 +3,8 @@ A datalog engine.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from functools import reduce
|
from functools import reduce
|
||||||
from itertools import chain
|
|
||||||
|
|
||||||
from datalog.parser import parse
|
from datalog.types import CachedDataset, Constant, Dataset, LVar, TableIndexedDataset
|
||||||
from datalog.reader import pr_str, read
|
|
||||||
from datalog.types import CachedDataset, Constant, Dataset, LVar, Rule, TableIndexedDataset
|
|
||||||
|
|
||||||
|
|
||||||
def match(tuple, expr, bindings=None):
|
def match(tuple, expr, bindings=None):
|
||||||
|
|
|
@ -3,7 +3,6 @@ A datalog reader.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from itertools import chain
|
|
||||||
|
|
||||||
from datalog.parser import FAILURE, Grammar
|
from datalog.parser import FAILURE, Grammar
|
||||||
from datalog.types import Constant, Dataset, LVar, Rule
|
from datalog.types import Constant, Dataset, LVar, Rule
|
||||||
|
|
|
@ -5,9 +5,7 @@ from datalog.types import (
|
||||||
CachedDataset,
|
CachedDataset,
|
||||||
Constant,
|
Constant,
|
||||||
Dataset,
|
Dataset,
|
||||||
LVar,
|
|
||||||
PartlyIndexedDataset,
|
PartlyIndexedDataset,
|
||||||
Rule,
|
|
||||||
TableIndexedDataset
|
TableIndexedDataset
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,3 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
A (toy) tool for emitting Python ASTs as YAML formatted data.
|
A (toy) tool for emitting Python ASTs as YAML formatted data.
|
||||||
"""
|
"""
|
||||||
|
@ -12,85 +10,85 @@ import yaml
|
||||||
|
|
||||||
|
|
||||||
def propnames(node):
|
def propnames(node):
|
||||||
"""return names of attributes specific for the current node"""
|
"""return names of attributes specific for the current node"""
|
||||||
|
|
||||||
props = {x for x in dir(node) if not x.startswith("_")}
|
props = {x for x in dir(node) if not x.startswith("_")}
|
||||||
|
|
||||||
if isinstance(node, ast.Module):
|
if isinstance(node, ast.Module):
|
||||||
props -= {"body"}
|
props -= {"body"}
|
||||||
|
|
||||||
if isinstance(node, (ast.Expr, ast.Attribute)):
|
if isinstance(node, (ast.Expr, ast.Attribute)):
|
||||||
props -= {"value"}
|
props -= {"value"}
|
||||||
|
|
||||||
if isinstance(node, ast.Constant):
|
if isinstance(node, ast.Constant):
|
||||||
props -= {"n", "s"}
|
props -= {"n", "s"}
|
||||||
|
|
||||||
if isinstance(node, ast.ClassDef):
|
if isinstance(node, ast.ClassDef):
|
||||||
props -= {"body"}
|
props -= {"body"}
|
||||||
|
|
||||||
return props
|
return props
|
||||||
|
|
||||||
|
|
||||||
# Note that ast.NodeTransformer exists for mutations.
|
# Note that ast.NodeTransformer exists for mutations.
|
||||||
# This is just for reads.
|
# This is just for reads.
|
||||||
class TreeDumper(ast.NodeVisitor):
|
class TreeDumper(ast.NodeVisitor):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self._stack = []
|
self._stack = []
|
||||||
|
|
||||||
def dump(self, node):
|
def dump(self, node):
|
||||||
self.visit(node)
|
self.visit(node)
|
||||||
|
|
||||||
def visit(self, node):
|
def visit(self, node):
|
||||||
nodetype = type(node)
|
nodetype = type(node)
|
||||||
nodename = node.__class__.__name__
|
nodename = node.__class__.__name__
|
||||||
indent = " " * len(self._stack) * 2
|
indent = " " * len(self._stack) * 2
|
||||||
print(indent + nodename)
|
print(indent + nodename)
|
||||||
for n in propnames(node):
|
for n in propnames(node):
|
||||||
print(indent + "%s: %s" % (n, node.__dict__[n]))
|
print(indent + "%s: %s" % (n, node.__dict__[n]))
|
||||||
|
|
||||||
self._stack.append(node)
|
self._stack.append(node)
|
||||||
self.generic_visit(node)
|
self.generic_visit(node)
|
||||||
self._stack.pop()
|
self._stack.pop()
|
||||||
|
|
||||||
|
|
||||||
class YAMLTreeDumper(ast.NodeVisitor):
|
class YAMLTreeDumper(ast.NodeVisitor):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self._stack = []
|
self._stack = []
|
||||||
|
|
||||||
def node2yml(self, node):
|
def node2yml(self, node):
|
||||||
try:
|
try:
|
||||||
nodetype = type(node)
|
nodetype = type(node)
|
||||||
nodename = node.__class__.__name__
|
nodename = node.__class__.__name__
|
||||||
return {
|
return {
|
||||||
"op": nodename,
|
"op": nodename,
|
||||||
"props": {n: node.__dict__[n] for n in propnames(node)},
|
"props": {n: node.__dict__[n] for n in propnames(node)},
|
||||||
"children": [],
|
"children": [],
|
||||||
}
|
}
|
||||||
except:
|
except:
|
||||||
print(repr(node), propnames(node), dir(node))
|
print(repr(node), propnames(node), dir(node))
|
||||||
|
|
||||||
def visit(self, node):
|
def visit(self, node):
|
||||||
yml_node = self.node2yml(node)
|
yml_node = self.node2yml(node)
|
||||||
self._stack.append(yml_node)
|
self._stack.append(yml_node)
|
||||||
old_stack = self._stack
|
old_stack = self._stack
|
||||||
self._stack = yml_node["children"]
|
self._stack = yml_node["children"]
|
||||||
self.generic_visit(node)
|
self.generic_visit(node)
|
||||||
self._stack = old_stack
|
self._stack = old_stack
|
||||||
return yml_node
|
return yml_node
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
parser = optparse.OptionParser(usage="%prog [options] <filename.py>")
|
parser = optparse.OptionParser(usage="%prog [options] <filename.py>")
|
||||||
opts, args = parser.parse_args()
|
opts, args = parser.parse_args()
|
||||||
|
|
||||||
if len(args) == 0:
|
if len(args) == 0:
|
||||||
parser.print_help()
|
parser.print_help()
|
||||||
sys.exit(-1)
|
sys.exit(-1)
|
||||||
filename = args[0]
|
filename = args[0]
|
||||||
|
|
||||||
with open(filename) as f:
|
with open(filename) as f:
|
||||||
root = ast.parse(f.read(), filename)
|
root = ast.parse(f.read(), filename)
|
||||||
|
|
||||||
print(yaml.dump(YAMLTreeDumper().visit(root), default_flow_style=False, sort_keys=False))
|
print(yaml.dump(YAMLTreeDumper().visit(root), default_flow_style=False, sort_keys=False))
|
||||||
|
|
|
@ -11,7 +11,6 @@ from kazoo.client import KazooClient
|
||||||
from kazoo.exceptions import NodeExistsError
|
from kazoo.exceptions import NodeExistsError
|
||||||
from kazoo.protocol.states import ZnodeStat
|
from kazoo.protocol.states import ZnodeStat
|
||||||
from kazoo.recipe.lock import Lock, ReadLock, WriteLock
|
from kazoo.recipe.lock import Lock, ReadLock, WriteLock
|
||||||
from kazoo.recipe.watchers import ChildrenWatch, DataWatch
|
|
||||||
from kook.config import current_config, KookConfig
|
from kook.config import current_config, KookConfig
|
||||||
from toolz.dicttoolz import assoc as _assoc, dissoc as _dissoc, merge as _merge, update_in
|
from toolz.dicttoolz import assoc as _assoc, dissoc as _dissoc, merge as _merge, update_in
|
||||||
|
|
||||||
|
|
|
@ -7,9 +7,9 @@ import sys
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
from lilith.interpreter import Bindings, eval as lil_eval, Runtime
|
from lilith.interpreter import Bindings, eval as lil_eval, Runtime
|
||||||
from lilith.parser import Apply, Args, parse_expr, Symbol
|
from lilith.parser import parse_expr, Symbol
|
||||||
from lilith.reader import Def, Import, Module, read_buffer, read_file
|
from lilith.reader import Def, Module, read_buffer, read_file
|
||||||
from prompt_toolkit import print_formatted_text, prompt, PromptSession
|
from prompt_toolkit import print_formatted_text, PromptSession
|
||||||
from prompt_toolkit.formatted_text import FormattedText
|
from prompt_toolkit.formatted_text import FormattedText
|
||||||
from prompt_toolkit.history import FileHistory
|
from prompt_toolkit.history import FileHistory
|
||||||
from prompt_toolkit.styles import Style
|
from prompt_toolkit.styles import Style
|
||||||
|
|
|
@ -6,7 +6,7 @@ import logging
|
||||||
import typing as t
|
import typing as t
|
||||||
from warnings import warn
|
from warnings import warn
|
||||||
|
|
||||||
from lilith.parser import Args, Block, parse_buffer, Symbol
|
from lilith.parser import Block, parse_buffer, Symbol
|
||||||
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
Pytest fixtures.
|
Pytest fixtures.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from lilith.parser import Block, GRAMMAR, parser_with_transformer
|
from lilith.parser import GRAMMAR, parser_with_transformer
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
"""tests covering the Lilith parser."""
|
"""tests covering the Lilith parser."""
|
||||||
|
|
||||||
from lilith.parser import Apply, Args, Block, GRAMMAR, parse_buffer, parser_with_transformer, Symbol
|
from lilith.parser import Apply, Args, Block, parse_buffer, Symbol
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -21,23 +21,14 @@ import socket
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
from sys import exit
|
from sys import exit
|
||||||
import syslog
|
|
||||||
from telnetlib import Telnet
|
from telnetlib import Telnet
|
||||||
from threading import Event, Lock, Thread
|
from threading import Event, Lock, Thread
|
||||||
from time import sleep
|
from time import sleep
|
||||||
|
|
||||||
from kazoo.client import KazooClient
|
from kazoo.exceptions import ConnectionLoss, LockTimeout, SessionExpiredError
|
||||||
from kazoo.exceptions import (
|
|
||||||
ConnectionLoss,
|
|
||||||
LockTimeout,
|
|
||||||
NodeExistsError,
|
|
||||||
NoNodeError,
|
|
||||||
SessionExpiredError
|
|
||||||
)
|
|
||||||
from kazoo.handlers.threading import KazooTimeoutError
|
from kazoo.handlers.threading import KazooTimeoutError
|
||||||
from kazoo.recipe.lock import Lock as KazooLock
|
from kazoo.recipe.lock import Lock as KazooLock
|
||||||
from kook.client import KookClient, lock
|
from kook.client import KookClient, lock
|
||||||
import yaml
|
|
||||||
|
|
||||||
|
|
||||||
log = logging.getLogger("arrdem.overwatchd")
|
log = logging.getLogger("arrdem.overwatchd")
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
from setuptools import find_packages, setup
|
from setuptools import setup
|
||||||
|
|
||||||
|
|
||||||
here = path.abspath(path.dirname(__file__))
|
here = path.abspath(path.dirname(__file__))
|
||||||
|
|
|
@ -3,7 +3,6 @@
|
||||||
from hypothesis import given
|
from hypothesis import given
|
||||||
from hypothesis.strategies import integers
|
from hypothesis.strategies import integers
|
||||||
import proquint
|
import proquint
|
||||||
import pytest
|
|
||||||
|
|
||||||
|
|
||||||
@given(integers(min_value=0, max_value=1<<16))
|
@given(integers(min_value=0, max_value=1<<16))
|
||||||
|
|
|
@ -10,7 +10,6 @@ import re
|
||||||
from gandi.client import GandiAPI
|
from gandi.client import GandiAPI
|
||||||
import jinja2
|
import jinja2
|
||||||
import meraki
|
import meraki
|
||||||
import pkg_resources
|
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -7,8 +7,6 @@ import os
|
||||||
import socket
|
import socket
|
||||||
import sqlite3 as sql
|
import sqlite3 as sql
|
||||||
|
|
||||||
from ratchet import Event, Message, Request
|
|
||||||
|
|
||||||
|
|
||||||
SCHEMA_SCRIPT = """
|
SCHEMA_SCRIPT = """
|
||||||
PRAGMA foreign_keys = ON;
|
PRAGMA foreign_keys = ON;
|
||||||
|
|
Loading…
Reference in a new issue