Move index handling from expression parsing into value parsing
This commit is contained in:
parent
ab9a7bb34e
commit
2278a16ca0
@ -35,7 +35,7 @@ class Interp:
|
|||||||
def _eval_index(self, index: ast.Index) -> typing.Any:
|
def _eval_index(self, index: ast.Index) -> typing.Any:
|
||||||
val = self._convert_value(index.value)
|
val = self._convert_value(index.value)
|
||||||
if not hasattr(val, '__getitem__'):
|
if not hasattr(val, '__getitem__'):
|
||||||
raise ValueError(f'{index.value.pos}: value is not indexable ({type(val).__getitem__})')
|
raise ValueError(f'{index.value.pos}: value is not indexable ({type(val).__name__})')
|
||||||
index_val = self._convert_value(index.index)
|
index_val = self._convert_value(index.index)
|
||||||
if type(index_val) is int and hasattr(val, '__len__') and index_val >= len(val):
|
if type(index_val) is int and hasattr(val, '__len__') and index_val >= len(val):
|
||||||
raise IndexError(f'{index.index.pos}: index out of range ({index_val} with length {len(val)})')
|
raise IndexError(f'{index.index.pos}: index out of range ({index_val} with length {len(val)})')
|
||||||
|
@ -34,15 +34,14 @@ class Parser:
|
|||||||
tok, pos, lit = self._scan()
|
tok, pos, lit = self._scan()
|
||||||
if tok != lexer.Token.SQUARE or lit != ']':
|
if tok != lexer.Token.SQUARE or lit != ']':
|
||||||
raise ExpectedError(pos, 'closing square bracket', lit)
|
raise ExpectedError(pos, 'closing square bracket', lit)
|
||||||
|
while self.lexer._peek(1) == '[':
|
||||||
|
self._scan()
|
||||||
|
index = self._parse_index(index)
|
||||||
return index
|
return index
|
||||||
|
|
||||||
def _parse_expr(self) -> ast.Value:
|
def _parse_expr(self) -> ast.Value:
|
||||||
left = self._parse_value()
|
left = self._parse_value()
|
||||||
tok, pos, lit = self._scan()
|
tok, pos, lit = self._scan()
|
||||||
while tok == lexer.Token.SQUARE and lit == '[':
|
|
||||||
left = self._parse_index(left)
|
|
||||||
# Scan the next token for the next if statement
|
|
||||||
tok, pos, lit = self._scan()
|
|
||||||
if tok != lexer.Token.OPERATOR:
|
if tok != lexer.Token.OPERATOR:
|
||||||
self._unscan(tok, pos, lit)
|
self._unscan(tok, pos, lit)
|
||||||
return left
|
return left
|
||||||
@ -117,33 +116,35 @@ class Parser:
|
|||||||
return ast.FunctionCall(pos=id_pos, name=id_lit, args=args)
|
return ast.FunctionCall(pos=id_pos, name=id_lit, args=args)
|
||||||
|
|
||||||
def _parse_value(self) -> ast.Value:
|
def _parse_value(self) -> ast.Value:
|
||||||
|
out = None
|
||||||
tok, pos, lit = self._scan()
|
tok, pos, lit = self._scan()
|
||||||
match tok:
|
match tok:
|
||||||
case lexer.Token.INTEGER:
|
case lexer.Token.INTEGER:
|
||||||
return ast.Integer(pos=pos, value=int(lit))
|
out = ast.Integer(pos=pos, value=int(lit))
|
||||||
case lexer.Token.FLOAT:
|
case lexer.Token.FLOAT:
|
||||||
return ast.Float(pos=pos, value=float(lit))
|
out = ast.Float(pos=pos, value=float(lit))
|
||||||
case lexer.Token.BOOL:
|
case lexer.Token.BOOL:
|
||||||
return ast.Bool(pos=pos, value=(lit == 'true'))
|
out = ast.Bool(pos=pos, value=(lit == 'true'))
|
||||||
case lexer.Token.STRING:
|
case lexer.Token.STRING:
|
||||||
return ast.String(pos=pos, value=pyast.literal_eval(lit))
|
out = ast.String(pos=pos, value=pyast.literal_eval(lit))
|
||||||
case lexer.Token.IDENT:
|
case lexer.Token.IDENT:
|
||||||
if self.lexer._peek(1) == '(':
|
if self.lexer._peek(1) == '(':
|
||||||
self._unscan(tok, pos, lit)
|
self._unscan(tok, pos, lit)
|
||||||
return self._parse_func_call()
|
out = self._parse_func_call()
|
||||||
return ast.VariableRef(pos=pos, name=lit)
|
else:
|
||||||
|
out = ast.VariableRef(pos=pos, name=lit)
|
||||||
case lexer.Token.HEREDOC:
|
case lexer.Token.HEREDOC:
|
||||||
return ast.String(pos=pos, value=lit)
|
out = ast.String(pos=pos, value=lit)
|
||||||
case lexer.Token.OPERATOR:
|
case lexer.Token.OPERATOR:
|
||||||
return ast.UnaryExpression(pos=pos, op=ast.Operator(pos=pos, value=lit), value=self._parse_value())
|
out = ast.UnaryExpression(pos=pos, op=ast.Operator(pos=pos, value=lit), value=self._parse_value())
|
||||||
case lexer.Token.SQUARE:
|
case lexer.Token.SQUARE:
|
||||||
if lit != '[':
|
if lit != '[':
|
||||||
raise ExpectedError(pos, repr('['), lit)
|
raise ExpectedError(pos, repr('['), lit)
|
||||||
return self._parse_tuple(pos)
|
out = self._parse_tuple(pos)
|
||||||
case lexer.Token.CURLY:
|
case lexer.Token.CURLY:
|
||||||
if lit != '{':
|
if lit != '{':
|
||||||
raise ExpectedError(pos, repr('{'), lit)
|
raise ExpectedError(pos, repr('{'), lit)
|
||||||
return self._parse_object(pos)
|
out = self._parse_object(pos)
|
||||||
case lexer.Token.PAREN:
|
case lexer.Token.PAREN:
|
||||||
if lit != '(':
|
if lit != '(':
|
||||||
raise ExpectedError(pos, repr('('), lit)
|
raise ExpectedError(pos, repr('('), lit)
|
||||||
@ -151,10 +152,16 @@ class Parser:
|
|||||||
tok, pos, lit = self._scan()
|
tok, pos, lit = self._scan()
|
||||||
if tok != lexer.Token.PAREN or lit != ')':
|
if tok != lexer.Token.PAREN or lit != ')':
|
||||||
raise ExpectedError(pos, repr(')'), lit)
|
raise ExpectedError(pos, repr(')'), lit)
|
||||||
return expr
|
out = expr
|
||||||
|
case _:
|
||||||
raise ExpectedError(pos, 'value', lit)
|
raise ExpectedError(pos, 'value', lit)
|
||||||
|
|
||||||
|
if self.lexer._peek(1) == '[':
|
||||||
|
self._scan()
|
||||||
|
out = self._parse_index(out)
|
||||||
|
|
||||||
|
return out
|
||||||
|
|
||||||
def parse(self, until: tuple[lexer.Token, str] = (lexer.Token.EOF, '')) -> ast.AST:
|
def parse(self, until: tuple[lexer.Token, str] = (lexer.Token.EOF, '')) -> ast.AST:
|
||||||
tree = []
|
tree = []
|
||||||
while True:
|
while True:
|
||||||
|
@ -104,12 +104,17 @@ class TestRefs(unittest.TestCase):
|
|||||||
self.assertEqual(cfg['x'], 456)
|
self.assertEqual(cfg['x'], 456)
|
||||||
|
|
||||||
def test_index(self):
|
def test_index(self):
|
||||||
i = interp.Interp(io.StringIO("x = y[1]"), "TestRefs.test_index")
|
i = interp.Interp(io.StringIO('x = y[1]'), "TestRefs.test_index")
|
||||||
i['y'] = [123, 456, 789]
|
i['y'] = [123, 456, 789]
|
||||||
cfg = i.run()
|
cfg = i.run()
|
||||||
self.assertIn('x', cfg)
|
self.assertIn('x', cfg)
|
||||||
self.assertEqual(cfg['x'], 456)
|
self.assertEqual(cfg['x'], 456)
|
||||||
|
|
||||||
|
def test_multi_index(self):
|
||||||
|
cfg = interp.Interp(io.StringIO('x = ["123", "456", "789"][1][2]'), "TestRefs.test_multi_index").run()
|
||||||
|
self.assertIn('x', cfg)
|
||||||
|
self.assertEqual(cfg['x'], '6')
|
||||||
|
|
||||||
def test_func(self):
|
def test_func(self):
|
||||||
def y(a, b):
|
def y(a, b):
|
||||||
return a + b
|
return a + b
|
||||||
|
Loading…
Reference in New Issue
Block a user