tokenizer: Remove redundant token types

This commit is contained in:
James Westman 2022-02-09 15:24:56 -06:00
parent bac008296a
commit 3bdcc83c4e
4 changed files with 17 additions and 38 deletions

View file

@ -40,11 +40,11 @@ class TestTokenizer(unittest.TestCase):
def test_basic(self):
self.assert_tokenize("ident(){}; \n <<+>>*/=", [
(TokenType.IDENT, "ident"),
(TokenType.OPEN_PAREN, "("),
(TokenType.CLOSE_PAREN, ")"),
(TokenType.OPEN_BLOCK, "{"),
(TokenType.CLOSE_BLOCK, "}"),
(TokenType.STMT_END, ";"),
(TokenType.PUNCTUATION, "("),
(TokenType.PUNCTUATION, ")"),
(TokenType.PUNCTUATION, "{"),
(TokenType.PUNCTUATION, "}"),
(TokenType.PUNCTUATION, ";"),
(TokenType.WHITESPACE, " \n "),
(TokenType.OP, "<<+>>*/="),
(TokenType.EOF, ""),