Format using black

This commit is contained in:
James Westman 2022-12-19 11:49:10 -06:00
parent 6a36d92380
commit 8fee46ec68
No known key found for this signature in database
GPG key ID: CE2DBA0ADB654EA6
40 changed files with 975 additions and 610 deletions

View file

@ -32,47 +32,57 @@ class TestTokenizer(unittest.TestCase):
for token, (type, token_str) in zip(tokens, expect):
self.assertEqual(token.type, type)
self.assertEqual(str(token), token_str)
except PrintableError as e: # pragma: no cover
except PrintableError as e: # pragma: no cover
e.pretty_print("<test input>", string)
raise e
def test_basic(self):
self.assert_tokenize("ident(){}; \n <<+>>*/=", [
(TokenType.IDENT, "ident"),
(TokenType.PUNCTUATION, "("),
(TokenType.PUNCTUATION, ")"),
(TokenType.PUNCTUATION, "{"),
(TokenType.PUNCTUATION, "}"),
(TokenType.PUNCTUATION, ";"),
(TokenType.WHITESPACE, " \n "),
(TokenType.OP, "<<"),
(TokenType.OP, "+"),
(TokenType.OP, ">>"),
(TokenType.OP, "*"),
(TokenType.OP, "/"),
(TokenType.OP, "="),
(TokenType.EOF, ""),
])
self.assert_tokenize(
"ident(){}; \n <<+>>*/=",
[
(TokenType.IDENT, "ident"),
(TokenType.PUNCTUATION, "("),
(TokenType.PUNCTUATION, ")"),
(TokenType.PUNCTUATION, "{"),
(TokenType.PUNCTUATION, "}"),
(TokenType.PUNCTUATION, ";"),
(TokenType.WHITESPACE, " \n "),
(TokenType.OP, "<<"),
(TokenType.OP, "+"),
(TokenType.OP, ">>"),
(TokenType.OP, "*"),
(TokenType.OP, "/"),
(TokenType.OP, "="),
(TokenType.EOF, ""),
],
)
def test_quotes(self):
self.assert_tokenize(r'"this is a \n string""this is \\another \"string\""', [
(TokenType.QUOTED, r'"this is a \n string"'),
(TokenType.QUOTED, r'"this is \\another \"string\""'),
(TokenType.EOF, ""),
])
self.assert_tokenize(
r'"this is a \n string""this is \\another \"string\""',
[
(TokenType.QUOTED, r'"this is a \n string"'),
(TokenType.QUOTED, r'"this is \\another \"string\""'),
(TokenType.EOF, ""),
],
)
def test_comments(self):
self.assert_tokenize('/* \n \\n COMMENT /* */', [
(TokenType.COMMENT, '/* \n \\n COMMENT /* */'),
(TokenType.EOF, ""),
])
self.assert_tokenize('line // comment\nline', [
(TokenType.IDENT, 'line'),
(TokenType.WHITESPACE, ' '),
(TokenType.COMMENT, '// comment'),
(TokenType.WHITESPACE, '\n'),
(TokenType.IDENT, 'line'),
(TokenType.EOF, ""),
])
self.assert_tokenize(
"/* \n \\n COMMENT /* */",
[
(TokenType.COMMENT, "/* \n \\n COMMENT /* */"),
(TokenType.EOF, ""),
],
)
self.assert_tokenize(
"line // comment\nline",
[
(TokenType.IDENT, "line"),
(TokenType.WHITESPACE, " "),
(TokenType.COMMENT, "// comment"),
(TokenType.WHITESPACE, "\n"),
(TokenType.IDENT, "line"),
(TokenType.EOF, ""),
],
)