Make (fn) work

This commit is contained in:
Reid 'arrdem' McKenzie 2020-07-18 20:06:24 -06:00
parent 226ece5eaa
commit 8e72dcd64f
2 changed files with 79 additions and 25 deletions

View file

@ -145,11 +145,11 @@ class LetExpr(ValueLevelExpr, NamedTuple):
class FnExpr(ValueLevelExpr, NamedTuple):
arguments: List
ret_expr: DoExpr
ret_type: TypeExpr
@property
def type(self) -> TypeExpr:
"""This is where the fun begins."""
return
return self.ret_type
## Reader implementation
@ -162,29 +162,41 @@ class AnalyzerBase(ABC):
"""Analyze a token tree, returning an expr tree."""
def _t(txt):
return p.SymbolToken(txt, txt, None)
class Analyzer(AnalyzerBase):
"""A reference Analyzer implementation.
Walks a parsed token tree, building up a syntax tree.
"""
TACK0 = _t('')
TACK1 = _t('|-')
LET = _t('let')
DO = _t('do')
FN = _t('fn')
LIST = _t('list')
QUOTE = _t('quote')
@classmethod
def _nows(cls, tokens):
return [t for t in tokens if not isinstance(t, p.WhitespaceToken)]
TACK0 = p.SymbolToken('', '/⊢', None)
TACK1 = p.SymbolToken('|-', '|-', None)
@classmethod
def _chomp(cls, tokens):
"""'chomp' an expression and optional ascription off the tokens, returning an expression and the remaining tokens."""
print(tokens)
if len(tokens) == 1:
return cls.analyze(tokens[0]), []
elif tokens[1] in [cls.TACK0, cls.TACK1]:
if len(tokens) >= 3:
return AscribeExpr(cls.analyze(tokens[0]), cls.analyze(tokens[2])), tokens[3:]
return (
AscribeExpr(
cls.analyze(tokens[0]),
cls.analyze(tokens[2])),
tokens[3:],
)
else:
raise SyntaxError(f"Analyzing tack at {tokens[1].pos}, did not find following type ascription!")
else:
@ -219,14 +231,15 @@ class Analyzer(AnalyzerBase):
if isinstance(token, p.StringToken):
return StringExpr(token)
if isinstance(token, p.SymbolToken):
return token
if isinstance(token, p.ListToken):
return cls.analyze_list(token)
LET = p.SymbolToken('let', 'let', None)
DO = p.SymbolToken('do', 'do', None)
FN = p.SymbolToken('fn', 'fn', None)
LIST = p.SymbolToken('list', 'list', None)
QUOTE = p.SymbolToken('quote', 'quote', None)
@classmethod
def _do(cls, t, body: list):
return p.ListToken([cls.DO] + body, t.raw, t.pos)
@classmethod
def analyze_list(cls, token: p.ListToken):
@ -242,49 +255,87 @@ class Analyzer(AnalyzerBase):
raise NotImplementedError("Quote isn't quite there!")
if tokens[0] == cls.LIST:
return ListExpr(cls._terms(tokens[1::]))
return ListExpr(cls._terms(tokens[1:]))
if tokens[0] == cls.DO:
return cls.analyze_do(tokens[1::])
return cls.analyze_do(token)
if tokens[0] == cls.LET:
return cls.analyze_let(tokens[1::])
return cls.analyze_let(token)
if tokens[0] == cls.FN:
return cls.analyze_fn(tokens[1::])
return cls.analyze_fn(token)
cls.analyze_invoke(tokens)
@classmethod
def analyze_let(cls, tokens):
def analyze_let(cls, let_token):
tokens = cls._nows(let_token.data[1:])
assert len(tokens) >= 2
assert isinstance(tokens[0], p.ListToken)
bindings = []
binding_tokens = cls._nows(tokens[0].data)
tokens = tokens[1:]
while binding_tokens:
print("analyze_let", binding_tokens)
bindexpr, binding_tokens = cls._chomp(binding_tokens)
valexpr, binding_tokens = cls._chomp(binding_tokens)
if isinstance(binding_tokens[0], p.SymbolToken):
bindexpr = binding_tokens[0]
binding_tokens = binding_tokens[1:]
else:
raise SyntaxError(f"Analyzing `let` at {let_token.pos}, got illegal binding expression {binding_tokens[0]}")
if not binding_tokens:
raise SyntaxError(f"Analyzing `let` at {let_token.pos}, got binding expression without subsequent value expression!")
if binding_tokens[0] in [cls.TACK0, cls.TACK1]:
if len(binding_tokens) < 2:
raise SyntaxError(f"Analyzing `let` at {let_token.pos}, got `⊢` at {binding_tokens[0].pos} without type!")
bind_ascription = cls.analyze(binding_tokens[1])
binding_tokens = binding_tokens[2:]
bindexpr = AscribeExpr(bindexpr, bind_ascription)
if not binding_tokens:
raise SyntaxError(f"Analyzing `let` at {let_token.pos}, got binding expression without subsequent value expression!")
valexpr = binding_tokens[0]
binding_tokens = cls.analyze(binding_tokens[1:])
bindings.append((bindexpr, valexpr))
return LetExpr(bindings, cls.analyze_do(tokens[1::]))
# FIXME (arrdem 2020-07-18):
# This needs to happen with bindings
tail = tokens[0] if len(tokens) == 1 else cls._do(let_token, tokens)
return LetExpr(bindings, cls.analyze(tail))
@classmethod
def analyze_do(cls, tokens):
def analyze_do(cls, do_token):
tokens = cls._nows(do_token.data[1:])
exprs = cls._terms(tokens)
return DoExpr(exprs[::-1], exprs[-1])
return DoExpr(exprs[:-1], exprs[-1])
@classmethod
def analyze_fn(cls, tokens):
def analyze_fn(cls, fn_token):
tokens = cls._nows(fn_token.data[1:])
assert len(tokens) >= 2
assert isinstance(tokens[0], p.ListToken)
args = []
arg_tokens = cls._nows(tokens[0].data)
while arg_tokens:
argexpr, arg_tokens = cls._chomp(arg_tokens)
args.append(argexpr)
return FnExpr(args, cls.analyze_do(tokens[1::]))
ascription = None
if tokens[1] in [cls.TACK0, cls.TACK1]:
ascription = cls.analyze(tokens[2])
tokens = tokens[2:]
else:
tokens = tokens[1:]
# FIXME (arrdem 2020-07-18):
# This needs to happen with bindings
body = cls.analyze(cls._do(fn_token, tokens))
return FnExpr(args, body, ascription or body.type)
## Analysis interface
def analyzes(buff: str,

View file

@ -37,8 +37,11 @@ def test_analyze_constants(txt, exprtype):
('()', a.ListExpr, None),
('(list)', a.ListExpr, None),
('(list 1)', a.ListExpr, a.BuiltinType.INTEGER),
('(do 1)', a.DoExpr, a.BuiltinType.INTEGER),
('(do foo bar 1)', a.DoExpr, a.BuiltinType.INTEGER),
('(let [a 1] 1)', a.LetExpr, a.BuiltinType.INTEGER),
('(fn [] 1)', a.FnExpr, a.BuiltinType.INTEGER),
('(fn [] ⊢ integer? x)', a.FnExpr, p.SymbolToken('integer?', None, None)),
])
def test_analyze_rettype(txt, exprtype, rettype):
"""Make sure that do exprs work."""