Skip to content

Commit

Permalink
Added tests for old custom lexer interface (future_interface=1)
Browse files Browse the repository at this point in the history
  • Loading branch information
erezsh committed Aug 19, 2024
1 parent c5751e8 commit deda6aa
Showing 1 changed file with 22 additions and 8 deletions.
30 changes: 22 additions & 8 deletions tests/test_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -1011,7 +1011,19 @@ def lex(self, lexer_state, parser_state):

__future_interface__ = 2

class CustomLexerOld(Lexer):
class CustomLexerOld1(Lexer):
"""
Purpose of this custom lexer is to test the integration,
so it uses the traditionalparser as implementation without custom lexing behaviour.
"""
def __init__(self, lexer_conf):
self.lexer = BasicLexer(copy(lexer_conf))
def lex(self, lexer_state, parser_state):
return self.lexer.lex(lexer_state, parser_state)

__future_interface__ = 1

class CustomLexerOld0(Lexer):
"""
Purpose of this custom lexer is to test the integration,
so it uses the traditionalparser as implementation without custom lexing behaviour.
Expand Down Expand Up @@ -1098,7 +1110,8 @@ def load(self,f):
def _make_parser_test(LEXER, PARSER):
lexer_class_or_name = {
'custom_new': CustomLexerNew,
'custom_old': CustomLexerOld,
'custom_old1': CustomLexerOld1,
'custom_old0': CustomLexerOld0,
}.get(LEXER, LEXER)

def _Lark(grammar, **kwargs):
Expand Down Expand Up @@ -1652,7 +1665,7 @@ def test_token_flags(self):
tree = l.parse('AB,a')
self.assertEqual(tree.children, ['AB'])

@unittest.skipIf(LEXER in ('basic', 'custom_old', 'custom_new'), "Requires context sensitive terminal selection")
@unittest.skipIf(LEXER in ('basic', 'custom_old0', 'custom_old1', 'custom_new'), "Requires context sensitive terminal selection")
def test_token_flags_collision(self):

g = """!start: "a"i "a"
Expand Down Expand Up @@ -2411,7 +2424,7 @@ def test_meddling_unused(self):
parser = _Lark(grammar)


@unittest.skipIf(PARSER!='lalr' or LEXER == 'custom_old', "Serialize currently only works for LALR parsers without custom lexers (though it should be easy to extend)")
@unittest.skipIf(PARSER!='lalr' or LEXER == 'custom_old0', "Serialize currently only works for LALR parsers without custom lexers (though it should be easy to extend)")
def test_serialize(self):
grammar = """
start: _ANY b "C"
Expand Down Expand Up @@ -2457,7 +2470,7 @@ def test_lexer_detect_newline_tokens(self):
self.assertEqual(a.line, 1)
self.assertEqual(b.line, 2)

@unittest.skipIf(PARSER=='cyk' or LEXER=='custom_old', "match_examples() not supported for CYK/old custom lexer")
@unittest.skipIf(PARSER=='cyk' or LEXER=='custom_old0', "match_examples() not supported for CYK/old custom lexer")
def test_match_examples(self):
p = _Lark(r"""
start: "a" "b" "c"
Expand Down Expand Up @@ -2666,7 +2679,7 @@ def test_strict(self):
"""
self.assertRaises(GrammarError, _Lark, grammar, strict=True)

@unittest.skipIf(LEXER in ('dynamic', 'dynamic_complete', 'custom_old'),
@unittest.skipIf(LEXER in ('dynamic', 'dynamic_complete', 'custom_old0', 'custom_old1'),
"start_pos and end_pos not compatible with old style custom/dynamic lexer ")
def test_parse_textslice(self):
grammar = r"""
Expand Down Expand Up @@ -2710,7 +2723,7 @@ def test_parse_textslice(self):
assert t.line == 9


@unittest.skipIf(LEXER not in ('dynamic', 'dynamic_complete', 'custom_old'),
@unittest.skipIf(LEXER not in ('dynamic', 'dynamic_complete', 'custom_old0', 'custom_old1'),
"start_pos and end_pos not compatible with old style custom/dynamic lexer ")
def test_parse_textslice_fails(self):
parser = _Lark("start: ")
Expand All @@ -2736,7 +2749,8 @@ def test_parse_textslice_fails(self):

('custom_new', 'lalr'),
('custom_new', 'cyk'),
('custom_old', 'earley'),
('custom_old0', 'earley'),
('custom_old1', 'earley'),
]

for _LEXER, _PARSER in _TO_TEST:
Expand Down

0 comments on commit deda6aa

Please sign in to comment.