Format using black

This commit is contained in:
James Westman 2022-12-19 11:49:10 -06:00
parent 6a36d92380
commit 8fee46ec68
No known key found for this signature in database
GPG key ID: CE2DBA0ADB654EA6
40 changed files with 975 additions and 610 deletions

View file

@ -7,10 +7,16 @@ sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
from blueprintcompiler import tokenizer, parser, decompiler, gir
from blueprintcompiler.completions import complete
from blueprintcompiler.errors import PrintableError, MultipleErrors, CompileError, CompilerBugError
from blueprintcompiler.errors import (
PrintableError,
MultipleErrors,
CompileError,
CompilerBugError,
)
from blueprintcompiler.tokenizer import Token, TokenType, tokenize
from blueprintcompiler import utils
@PythonFuzz
def fuzz(buf):
try:
@ -29,6 +35,7 @@ def fuzz(buf):
except UnicodeDecodeError:
pass
if __name__ == "__main__":
# Make sure Gtk 4.0 is accessible, otherwise every test will fail on that
# and nothing interesting will be tested

View file

@ -18,7 +18,7 @@
# SPDX-License-Identifier: LGPL-3.0-or-later
import difflib # I love Python
import difflib # I love Python
from pathlib import Path
import traceback
import unittest
@ -59,23 +59,26 @@ class TestSamples(unittest.TestCase):
xml = XmlOutput()
actual = xml.emit(ast)
if actual.strip() != expected.strip(): # pragma: no cover
if actual.strip() != expected.strip(): # pragma: no cover
diff = difflib.unified_diff(expected.splitlines(), actual.splitlines())
print("\n".join(diff))
raise AssertionError()
self.assert_docs_dont_crash(blueprint, ast)
self.assert_completions_dont_crash(blueprint, ast, tokens)
except PrintableError as e: # pragma: no cover
except PrintableError as e: # pragma: no cover
e.pretty_print(name + ".blp", blueprint)
raise AssertionError()
def assert_sample_error(self, name):
try:
with open((Path(__file__).parent / f"sample_errors/{name}.blp").resolve()) as f:
with open(
(Path(__file__).parent / f"sample_errors/{name}.blp").resolve()
) as f:
blueprint = f.read()
with open((Path(__file__).parent / f"sample_errors/{name}.err").resolve()) as f:
with open(
(Path(__file__).parent / f"sample_errors/{name}.err").resolve()
) as f:
expected = f.read()
tokens = tokenizer.tokenize(blueprint)
@ -91,6 +94,7 @@ class TestSamples(unittest.TestCase):
if len(warnings):
raise MultipleErrors(warnings)
except PrintableError as e:
def error_str(error):
line, col = utils.idx_to_pos(error.start + 1, blueprint)
len = error.end - error.start
@ -100,17 +104,16 @@ class TestSamples(unittest.TestCase):
actual = error_str(e)
elif isinstance(e, MultipleErrors):
actual = "\n".join([error_str(error) for error in e.errors])
else: # pragma: no cover
else: # pragma: no cover
raise AssertionError()
if actual.strip() != expected.strip(): # pragma: no cover
if actual.strip() != expected.strip(): # pragma: no cover
diff = difflib.unified_diff(expected.splitlines(), actual.splitlines())
print("\n".join(diff))
raise AssertionError()
else: # pragma: no cover
else: # pragma: no cover
raise AssertionError("Expected a compiler error, but none was emitted")
def assert_decompile(self, name):
try:
with open((Path(__file__).parent / f"samples/{name}.blp").resolve()) as f:
@ -121,15 +124,14 @@ class TestSamples(unittest.TestCase):
actual = decompiler.decompile(ui_path)
if actual.strip() != expected.strip(): # pragma: no cover
if actual.strip() != expected.strip(): # pragma: no cover
diff = difflib.unified_diff(expected.splitlines(), actual.splitlines())
print("\n".join(diff))
raise AssertionError()
except PrintableError as e: # pragma: no cover
except PrintableError as e: # pragma: no cover
e.pretty_print(name + ".blp", blueprint)
raise AssertionError()
def test_samples(self):
self.assert_sample("accessibility")
self.assert_sample("action_widgets")
@ -161,7 +163,6 @@ class TestSamples(unittest.TestCase):
self.assert_sample("unchecked_class")
self.assert_sample("using")
def test_sample_errors(self):
self.assert_sample_error("a11y_in_non_widget")
self.assert_sample_error("a11y_prop_dne")
@ -209,7 +210,6 @@ class TestSamples(unittest.TestCase):
self.assert_sample_error("using_invalid_namespace")
self.assert_sample_error("widgets_in_non_size_group")
def test_decompiler(self):
self.assert_decompile("accessibility_dec")
self.assert_decompile("binding")

View file

@ -32,47 +32,57 @@ class TestTokenizer(unittest.TestCase):
for token, (type, token_str) in zip(tokens, expect):
self.assertEqual(token.type, type)
self.assertEqual(str(token), token_str)
except PrintableError as e: # pragma: no cover
except PrintableError as e: # pragma: no cover
e.pretty_print("<test input>", string)
raise e
def test_basic(self):
self.assert_tokenize("ident(){}; \n <<+>>*/=", [
(TokenType.IDENT, "ident"),
(TokenType.PUNCTUATION, "("),
(TokenType.PUNCTUATION, ")"),
(TokenType.PUNCTUATION, "{"),
(TokenType.PUNCTUATION, "}"),
(TokenType.PUNCTUATION, ";"),
(TokenType.WHITESPACE, " \n "),
(TokenType.OP, "<<"),
(TokenType.OP, "+"),
(TokenType.OP, ">>"),
(TokenType.OP, "*"),
(TokenType.OP, "/"),
(TokenType.OP, "="),
(TokenType.EOF, ""),
])
self.assert_tokenize(
"ident(){}; \n <<+>>*/=",
[
(TokenType.IDENT, "ident"),
(TokenType.PUNCTUATION, "("),
(TokenType.PUNCTUATION, ")"),
(TokenType.PUNCTUATION, "{"),
(TokenType.PUNCTUATION, "}"),
(TokenType.PUNCTUATION, ";"),
(TokenType.WHITESPACE, " \n "),
(TokenType.OP, "<<"),
(TokenType.OP, "+"),
(TokenType.OP, ">>"),
(TokenType.OP, "*"),
(TokenType.OP, "/"),
(TokenType.OP, "="),
(TokenType.EOF, ""),
],
)
def test_quotes(self):
self.assert_tokenize(r'"this is a \n string""this is \\another \"string\""', [
(TokenType.QUOTED, r'"this is a \n string"'),
(TokenType.QUOTED, r'"this is \\another \"string\""'),
(TokenType.EOF, ""),
])
self.assert_tokenize(
r'"this is a \n string""this is \\another \"string\""',
[
(TokenType.QUOTED, r'"this is a \n string"'),
(TokenType.QUOTED, r'"this is \\another \"string\""'),
(TokenType.EOF, ""),
],
)
def test_comments(self):
self.assert_tokenize('/* \n \\n COMMENT /* */', [
(TokenType.COMMENT, '/* \n \\n COMMENT /* */'),
(TokenType.EOF, ""),
])
self.assert_tokenize('line // comment\nline', [
(TokenType.IDENT, 'line'),
(TokenType.WHITESPACE, ' '),
(TokenType.COMMENT, '// comment'),
(TokenType.WHITESPACE, '\n'),
(TokenType.IDENT, 'line'),
(TokenType.EOF, ""),
])
self.assert_tokenize(
"/* \n \\n COMMENT /* */",
[
(TokenType.COMMENT, "/* \n \\n COMMENT /* */"),
(TokenType.EOF, ""),
],
)
self.assert_tokenize(
"line // comment\nline",
[
(TokenType.IDENT, "line"),
(TokenType.WHITESPACE, " "),
(TokenType.COMMENT, "// comment"),
(TokenType.WHITESPACE, "\n"),
(TokenType.IDENT, "line"),
(TokenType.EOF, ""),
],
)