Fmt.
This commit is contained in:
parent
223fd5688b
commit
a6e59b2e0c
28 changed files with 195 additions and 57 deletions
|
@ -1,5 +1,12 @@
|
||||||
from .core import from_path, from_str, SQLOperationType
|
from .core import (
|
||||||
from .exceptions import SQLLoadException, SQLParseException
|
from_path,
|
||||||
|
from_str,
|
||||||
|
SQLOperationType,
|
||||||
|
)
|
||||||
|
from .exceptions import (
|
||||||
|
SQLLoadException,
|
||||||
|
SQLParseException,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
|
|
|
@ -2,7 +2,10 @@ import os
|
||||||
|
|
||||||
from .adapters.psycopg2 import PsycoPG2Adapter
|
from .adapters.psycopg2 import PsycoPG2Adapter
|
||||||
from .adapters.sqlite3 import SQLite3DriverAdapter
|
from .adapters.sqlite3 import SQLite3DriverAdapter
|
||||||
from .exceptions import SQLLoadException, SQLParseException
|
from .exceptions import (
|
||||||
|
SQLLoadException,
|
||||||
|
SQLParseException,
|
||||||
|
)
|
||||||
from .patterns import (
|
from .patterns import (
|
||||||
doc_comment_pattern,
|
doc_comment_pattern,
|
||||||
empty_pattern,
|
empty_pattern,
|
||||||
|
|
|
@ -3,10 +3,11 @@ A tree deduplicator and archiver tool.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
from pathlib import Path
|
|
||||||
from hashlib import sha256
|
from hashlib import sha256
|
||||||
|
from pathlib import Path
|
||||||
from shutil import copy2 as copyfile
|
from shutil import copy2 as copyfile
|
||||||
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
parser.add_argument("from_dir", type=Path)
|
parser.add_argument("from_dir", type=Path)
|
||||||
parser.add_argument("to_dir", type=Path)
|
parser.add_argument("to_dir", type=Path)
|
||||||
|
|
|
@ -17,18 +17,16 @@ Inspired by https://github.com/herval/org_photos/blob/main/org_photos.rb
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import sys
|
|
||||||
from pathlib import Path
|
|
||||||
from hashlib import sha256, sha512
|
|
||||||
from datetime import date
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from shutil import copy2 as copyfile
|
from hashlib import sha256, sha512
|
||||||
|
from pathlib import Path
|
||||||
import re
|
import re
|
||||||
|
from shutil import copy2 as copyfile
|
||||||
|
import sys
|
||||||
import typing as t
|
import typing as t
|
||||||
|
|
||||||
# FIXME: use piexif, which supports writeback not exifread.
|
# FIXME: use piexif, which supports writeback not exifread.
|
||||||
import exifread
|
import exifread
|
||||||
from exifread.classes import IfdTag
|
|
||||||
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
|
@ -36,7 +34,7 @@ parser.add_argument("src_dir", type=Path)
|
||||||
parser.add_argument("dest_dir", type=Path)
|
parser.add_argument("dest_dir", type=Path)
|
||||||
|
|
||||||
|
|
||||||
MODIFIED_ISO_DATE = '%Y:%m:%dT%H:%M:%SF%f'
|
MODIFIED_ISO_DATE = "%Y:%m:%dT%H:%M:%SF%f"
|
||||||
|
|
||||||
|
|
||||||
def take(n, iter):
|
def take(n, iter):
|
||||||
|
@ -51,7 +49,7 @@ def take(n, iter):
|
||||||
|
|
||||||
def exif_tags(p: Path) -> object:
|
def exif_tags(p: Path) -> object:
|
||||||
"""Return the EXIF tags on an image."""
|
"""Return the EXIF tags on an image."""
|
||||||
with open(p, 'rb') as fp:
|
with open(p, "rb") as fp:
|
||||||
return exifread.process_file(fp)
|
return exifread.process_file(fp)
|
||||||
|
|
||||||
# EXIF tags dataset (exifread edition) -
|
# EXIF tags dataset (exifread edition) -
|
||||||
|
@ -430,7 +428,7 @@ def date_from_name(p: Path):
|
||||||
# A bug
|
# A bug
|
||||||
# 2014:08:21T19:4640F1408672000
|
# 2014:08:21T19:4640F1408672000
|
||||||
# 2015:12:14T23:0933F1450159773
|
# 2015:12:14T23:0933F1450159773
|
||||||
'%Y:%m:%dT%H:%M%SF%f',
|
"%Y:%m:%dT%H:%M%SF%f",
|
||||||
|
|
||||||
# 2020-12-21 17.15.09.0
|
# 2020-12-21 17.15.09.0
|
||||||
"%Y-%m-%d %H.%M.%S.%f",
|
"%Y-%m-%d %H.%M.%S.%f",
|
||||||
|
@ -556,9 +554,9 @@ def img_info(p: Path) -> ImgInfo:
|
||||||
camera_make = get_tag("Image Make", "Unknown")
|
camera_make = get_tag("Image Make", "Unknown")
|
||||||
camera_model = get_tag("Image Model", "Unknown")
|
camera_model = get_tag("Image Model", "Unknown")
|
||||||
camera_sn = get_tag("MakerNote SerialNumber", "Unknown")
|
camera_sn = get_tag("MakerNote SerialNumber", "Unknown")
|
||||||
lens_make = get_tag('EXIF LensMake', "Unknown")
|
lens_make = get_tag("EXIF LensMake", "Unknown")
|
||||||
lens_model = get_tag('EXIF LensModel', "Unknown")
|
lens_model = get_tag("EXIF LensModel", "Unknown")
|
||||||
lens_sn = get_tag('EXIF LensSerialNumber', "Unknown")
|
lens_sn = get_tag("EXIF LensSerialNumber", "Unknown")
|
||||||
software = get_tag("Image Software", "Unknown")
|
software = get_tag("Image Software", "Unknown")
|
||||||
dirty = False
|
dirty = False
|
||||||
|
|
||||||
|
|
|
@ -9,7 +9,10 @@ zonefiles through the parser.
|
||||||
|
|
||||||
from types import LambdaType
|
from types import LambdaType
|
||||||
|
|
||||||
from bussard.gen.parser import parse as _parse, Parser # noqa
|
from bussard.gen.parser import ( # noqa
|
||||||
|
parse as _parse,
|
||||||
|
Parser,
|
||||||
|
)
|
||||||
from bussard.gen.types import * # noqa
|
from bussard.gen.types import * # noqa
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -3,7 +3,12 @@ Tests of the Bussard reader.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import bussard.reader as t
|
import bussard.reader as t
|
||||||
from bussard.reader import Actions, Parser, read, read1
|
from bussard.reader import (
|
||||||
|
Actions,
|
||||||
|
Parser,
|
||||||
|
read,
|
||||||
|
read1,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def parse_word(input):
|
def parse_word(input):
|
||||||
|
|
|
@ -2,7 +2,10 @@
|
||||||
|
|
||||||
from os import path
|
from os import path
|
||||||
|
|
||||||
from setuptools import find_namespace_packages, setup
|
from setuptools import (
|
||||||
|
find_namespace_packages,
|
||||||
|
setup,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
# Fetch the README contents
|
# Fetch the README contents
|
||||||
|
|
|
@ -54,7 +54,11 @@ import sys
|
||||||
|
|
||||||
from datalog.debris import Timing
|
from datalog.debris import Timing
|
||||||
from datalog.evaluator import select
|
from datalog.evaluator import select
|
||||||
from datalog.reader import pr_str, read_command, read_dataset
|
from datalog.reader import (
|
||||||
|
pr_str,
|
||||||
|
read_command,
|
||||||
|
read_dataset,
|
||||||
|
)
|
||||||
from datalog.types import (
|
from datalog.types import (
|
||||||
CachedDataset,
|
CachedDataset,
|
||||||
Dataset,
|
Dataset,
|
||||||
|
@ -64,8 +68,13 @@ from datalog.types import (
|
||||||
TableIndexedDataset,
|
TableIndexedDataset,
|
||||||
)
|
)
|
||||||
|
|
||||||
from prompt_toolkit import print_formatted_text, PromptSession
|
from prompt_toolkit import (
|
||||||
from prompt_toolkit.formatted_text import FormattedText
|
print_formatted_text,
|
||||||
|
PromptSession,
|
||||||
|
)
|
||||||
|
from prompt_toolkit.formatted_text import (
|
||||||
|
FormattedText,
|
||||||
|
)
|
||||||
from prompt_toolkit.history import FileHistory
|
from prompt_toolkit.history import FileHistory
|
||||||
from prompt_toolkit.styles import Style
|
from prompt_toolkit.styles import Style
|
||||||
from yaspin import Spinner, yaspin
|
from yaspin import Spinner, yaspin
|
||||||
|
|
|
@ -8,9 +8,18 @@ Easy because it's closer to hand, but no simpler.
|
||||||
|
|
||||||
from typing import Sequence, Tuple
|
from typing import Sequence, Tuple
|
||||||
|
|
||||||
from datalog.evaluator import join as __join, select as __select
|
from datalog.evaluator import (
|
||||||
|
join as __join,
|
||||||
|
select as __select,
|
||||||
|
)
|
||||||
from datalog.reader import read as __read
|
from datalog.reader import read as __read
|
||||||
from datalog.types import Constant, Dataset, LTuple, LVar, PartlyIndexedDataset
|
from datalog.types import (
|
||||||
|
Constant,
|
||||||
|
Dataset,
|
||||||
|
LTuple,
|
||||||
|
LVar,
|
||||||
|
PartlyIndexedDataset,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def read(text: str, db_cls=PartlyIndexedDataset):
|
def read(text: str, db_cls=PartlyIndexedDataset):
|
||||||
|
|
|
@ -4,7 +4,13 @@ A datalog engine.
|
||||||
|
|
||||||
from functools import reduce
|
from functools import reduce
|
||||||
|
|
||||||
from datalog.types import CachedDataset, Constant, Dataset, LVar, TableIndexedDataset
|
from datalog.types import (
|
||||||
|
CachedDataset,
|
||||||
|
Constant,
|
||||||
|
Dataset,
|
||||||
|
LVar,
|
||||||
|
TableIndexedDataset,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def match(tuple, expr, bindings=None):
|
def match(tuple, expr, bindings=None):
|
||||||
|
|
|
@ -4,8 +4,17 @@ A datalog reader.
|
||||||
|
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
|
||||||
from datalog.parser import FAILURE, Grammar, ParseError
|
from datalog.parser import (
|
||||||
from datalog.types import Constant, Dataset, LVar, Rule
|
FAILURE,
|
||||||
|
Grammar,
|
||||||
|
ParseError,
|
||||||
|
)
|
||||||
|
from datalog.types import (
|
||||||
|
Constant,
|
||||||
|
Dataset,
|
||||||
|
LVar,
|
||||||
|
Rule,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class Actions(object):
|
class Actions(object):
|
||||||
|
|
|
@ -3,7 +3,12 @@ The Flowmetal server entry point.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import click
|
import click
|
||||||
from flowmetal import frontend, interpreter, reaper, scheduler
|
from flowmetal import (
|
||||||
|
frontend,
|
||||||
|
interpreter,
|
||||||
|
reaper,
|
||||||
|
scheduler,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@click.group()
|
@click.group()
|
||||||
|
|
|
@ -2,7 +2,10 @@
|
||||||
An abstract or base Flowmetal DB.
|
An abstract or base Flowmetal DB.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from abc import abstractclassmethod, abstractmethod
|
from abc import (
|
||||||
|
abstractclassmethod,
|
||||||
|
abstractmethod,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class Db(ABC):
|
class Db(ABC):
|
||||||
|
|
|
@ -9,7 +9,10 @@ import sqlite3
|
||||||
from typing import NamedTuple, Optional as Maybe
|
from typing import NamedTuple, Optional as Maybe
|
||||||
|
|
||||||
import anosql
|
import anosql
|
||||||
from anosql_migrations import run_migrations, with_migrations
|
from anosql_migrations import (
|
||||||
|
run_migrations,
|
||||||
|
with_migrations,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
_GET_JOB_FIELDS = """\
|
_GET_JOB_FIELDS = """\
|
||||||
|
|
|
@ -6,7 +6,13 @@ import argparse
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from flask import abort, current_app, Flask, jsonify, request
|
from flask import (
|
||||||
|
abort,
|
||||||
|
current_app,
|
||||||
|
Flask,
|
||||||
|
jsonify,
|
||||||
|
request,
|
||||||
|
)
|
||||||
from jobq import Job, JobQueue
|
from jobq import Job, JobQueue
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -5,12 +5,22 @@ from itertools import chain
|
||||||
import json
|
import json
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
from typing import Any, Iterable, Optional, Tuple, Union
|
from typing import (
|
||||||
|
Any,
|
||||||
|
Iterable,
|
||||||
|
Optional,
|
||||||
|
Tuple,
|
||||||
|
Union,
|
||||||
|
)
|
||||||
|
|
||||||
from kazoo.client import KazooClient
|
from kazoo.client import KazooClient
|
||||||
from kazoo.exceptions import NodeExistsError
|
from kazoo.exceptions import NodeExistsError
|
||||||
from kazoo.protocol.states import ZnodeStat
|
from kazoo.protocol.states import ZnodeStat
|
||||||
from kazoo.recipe.lock import Lock, ReadLock, WriteLock
|
from kazoo.recipe.lock import (
|
||||||
|
Lock,
|
||||||
|
ReadLock,
|
||||||
|
WriteLock,
|
||||||
|
)
|
||||||
from kook.config import current_config, KookConfig
|
from kook.config import current_config, KookConfig
|
||||||
from toolz.dicttoolz import (
|
from toolz.dicttoolz import (
|
||||||
assoc as _assoc,
|
assoc as _assoc,
|
||||||
|
|
|
@ -1,16 +1,32 @@
|
||||||
"""The Lilith runner."""
|
"""The Lilith runner."""
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
from importlib.resources import read_text as resource_text
|
from importlib.resources import (
|
||||||
|
read_text as resource_text,
|
||||||
|
)
|
||||||
import logging
|
import logging
|
||||||
import sys
|
import sys
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
from lilith.interpreter import Bindings, eval as lil_eval, Runtime
|
from lilith.interpreter import (
|
||||||
|
Bindings,
|
||||||
|
eval as lil_eval,
|
||||||
|
Runtime,
|
||||||
|
)
|
||||||
from lilith.parser import parse_expr, Symbol
|
from lilith.parser import parse_expr, Symbol
|
||||||
from lilith.reader import Def, Module, read_buffer, read_file
|
from lilith.reader import (
|
||||||
from prompt_toolkit import print_formatted_text, PromptSession
|
Def,
|
||||||
from prompt_toolkit.formatted_text import FormattedText
|
Module,
|
||||||
|
read_buffer,
|
||||||
|
read_file,
|
||||||
|
)
|
||||||
|
from prompt_toolkit import (
|
||||||
|
print_formatted_text,
|
||||||
|
PromptSession,
|
||||||
|
)
|
||||||
|
from prompt_toolkit.formatted_text import (
|
||||||
|
FormattedText,
|
||||||
|
)
|
||||||
from prompt_toolkit.history import FileHistory
|
from prompt_toolkit.history import FileHistory
|
||||||
from prompt_toolkit.styles import Style
|
from prompt_toolkit.styles import Style
|
||||||
import yaml
|
import yaml
|
||||||
|
|
|
@ -5,7 +5,12 @@ A quick and dirty recursive interpreter for Lilith.
|
||||||
import logging
|
import logging
|
||||||
import typing as t
|
import typing as t
|
||||||
|
|
||||||
from lilith.parser import Apply, Args, Block, Symbol
|
from lilith.parser import (
|
||||||
|
Apply,
|
||||||
|
Args,
|
||||||
|
Block,
|
||||||
|
Symbol,
|
||||||
|
)
|
||||||
from lilith.reader import Def, Import, Module
|
from lilith.reader import Def, Import, Module
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -6,7 +6,11 @@ import logging
|
||||||
import typing as t
|
import typing as t
|
||||||
from warnings import warn
|
from warnings import warn
|
||||||
|
|
||||||
from lilith.parser import Block, parse_buffer, Symbol
|
from lilith.parser import (
|
||||||
|
Block,
|
||||||
|
parse_buffer,
|
||||||
|
Symbol,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
|
@ -2,7 +2,10 @@
|
||||||
Pytest fixtures.
|
Pytest fixtures.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from lilith.parser import GRAMMAR, parser_with_transformer
|
from lilith.parser import (
|
||||||
|
GRAMMAR,
|
||||||
|
parser_with_transformer,
|
||||||
|
)
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -2,7 +2,11 @@
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from lilith.interpreter import Bindings, eval, Runtime
|
from lilith.interpreter import (
|
||||||
|
Bindings,
|
||||||
|
eval,
|
||||||
|
Runtime,
|
||||||
|
)
|
||||||
from lilith.parser import Apply, Args, Symbol
|
from lilith.parser import Apply, Args, Symbol
|
||||||
from lilith.reader import Def, Module
|
from lilith.reader import Def, Module
|
||||||
import pytest
|
import pytest
|
||||||
|
|
|
@ -1,6 +1,12 @@
|
||||||
"""tests covering the Lilith parser."""
|
"""tests covering the Lilith parser."""
|
||||||
|
|
||||||
from lilith.parser import Apply, Args, Block, parse_buffer, Symbol
|
from lilith.parser import (
|
||||||
|
Apply,
|
||||||
|
Args,
|
||||||
|
Block,
|
||||||
|
parse_buffer,
|
||||||
|
Symbol,
|
||||||
|
)
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,11 @@
|
||||||
"""Tests covering the reader."""
|
"""Tests covering the reader."""
|
||||||
|
|
||||||
from lilith.parser import Apply, Args, Block, Symbol
|
from lilith.parser import (
|
||||||
|
Apply,
|
||||||
|
Args,
|
||||||
|
Block,
|
||||||
|
Symbol,
|
||||||
|
)
|
||||||
from lilith.reader import Def, Module, read_buffer
|
from lilith.reader import Def, Module, read_buffer
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
|
|
@ -25,8 +25,14 @@ from telnetlib import Telnet
|
||||||
from threading import Event, Lock, Thread
|
from threading import Event, Lock, Thread
|
||||||
from time import sleep
|
from time import sleep
|
||||||
|
|
||||||
from kazoo.exceptions import ConnectionLoss, LockTimeout, SessionExpiredError
|
from kazoo.exceptions import (
|
||||||
from kazoo.handlers.threading import KazooTimeoutError
|
ConnectionLoss,
|
||||||
|
LockTimeout,
|
||||||
|
SessionExpiredError,
|
||||||
|
)
|
||||||
|
from kazoo.handlers.threading import (
|
||||||
|
KazooTimeoutError,
|
||||||
|
)
|
||||||
from kazoo.recipe.lock import Lock as KazooLock
|
from kazoo.recipe.lock import Lock as KazooLock
|
||||||
from kook.client import KookClient, lock
|
from kook.client import KookClient, lock
|
||||||
|
|
||||||
|
|
|
@ -9,7 +9,12 @@ import re
|
||||||
import typing as t
|
import typing as t
|
||||||
|
|
||||||
import yaml
|
import yaml
|
||||||
from yaml.nodes import MappingNode, Node, ScalarNode, SequenceNode
|
from yaml.nodes import (
|
||||||
|
MappingNode,
|
||||||
|
Node,
|
||||||
|
ScalarNode,
|
||||||
|
SequenceNode,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
10
setup.cfg
10
setup.cfg
|
@ -1,21 +1,17 @@
|
||||||
[isort]
|
[isort]
|
||||||
line_length = 100
|
line_length = 50
|
||||||
indent = 4
|
indent = 4
|
||||||
# 1/Vertical
|
# 1/Vertical
|
||||||
multi_line_output = 3
|
multi_line_output = 3
|
||||||
lines_after_imports = 2
|
lines_after_imports = 2
|
||||||
default_section = THIRDPARTY
|
default_section = THIRDPARTY
|
||||||
known_localfolder = datalog
|
known_localfolder = datalog
|
||||||
sections = [
|
sections = FUTURE,STDLIB,LOCALFOLDER,THIRDPARTY,
|
||||||
FUTURE,
|
|
||||||
STDLIB,
|
|
||||||
LOCALFOLDER,
|
|
||||||
THIRDPARTY,
|
|
||||||
]
|
|
||||||
|
|
||||||
force_sort_within_sections = 1
|
force_sort_within_sections = 1
|
||||||
force_alphabetical_sort_within_sections = 1
|
force_alphabetical_sort_within_sections = 1
|
||||||
combine_as_imports = 1
|
combine_as_imports = 1
|
||||||
|
include_trailing_comma = 1
|
||||||
|
|
||||||
[distutils]
|
[distutils]
|
||||||
index-servers = pypi
|
index-servers = pypi
|
||||||
|
|
|
@ -4,7 +4,10 @@ Validate 3rdparty library licenses as approved.
|
||||||
|
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from pkg_resources import DistInfoDistribution, working_set
|
from pkg_resources import (
|
||||||
|
DistInfoDistribution,
|
||||||
|
working_set,
|
||||||
|
)
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -19,8 +19,13 @@ import livereload
|
||||||
from sphinx.application import Sphinx
|
from sphinx.application import Sphinx
|
||||||
from sphinx.cmd.quickstart import main as new
|
from sphinx.cmd.quickstart import main as new
|
||||||
from sphinx.ext.apidoc import main as apidoc
|
from sphinx.ext.apidoc import main as apidoc
|
||||||
from sphinx.ext.autosummary.generate import main as autosummary
|
from sphinx.ext.autosummary.generate import (
|
||||||
from sphinx.util.docutils import docutils_namespace, patch_docutils
|
main as autosummary,
|
||||||
|
)
|
||||||
|
from sphinx.util.docutils import (
|
||||||
|
docutils_namespace,
|
||||||
|
patch_docutils,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@click.group()
|
@click.group()
|
||||||
|
|
Loading…
Reference in a new issue