mirror of
https://gitlab.gnome.org/jwestman/blueprint-compiler.git
synced 2025-05-04 15:59:08 -04:00
Add warning for sync-create
This commit is contained in:
parent
32d4769f65
commit
9873a2072b
8 changed files with 62 additions and 19 deletions
|
@ -35,6 +35,7 @@ class CompileError(PrintableError):
|
||||||
""" A PrintableError with a start/end position and optional hints """
|
""" A PrintableError with a start/end position and optional hints """
|
||||||
|
|
||||||
category = "error"
|
category = "error"
|
||||||
|
color = Colors.RED
|
||||||
|
|
||||||
def __init__(self, message, start=None, end=None, did_you_mean=None, hints=None, actions=None):
|
def __init__(self, message, start=None, end=None, did_you_mean=None, hints=None, actions=None):
|
||||||
super().__init__(message)
|
super().__init__(message)
|
||||||
|
@ -69,20 +70,25 @@ class CompileError(PrintableError):
|
||||||
self.hint("Did you check your spelling?")
|
self.hint("Did you check your spelling?")
|
||||||
self.hint("Are your dependencies up to date?")
|
self.hint("Are your dependencies up to date?")
|
||||||
|
|
||||||
def pretty_print(self, filename, code):
|
def pretty_print(self, filename, code, stream=sys.stdout):
|
||||||
line_num, col_num = utils.idx_to_pos(self.start + 1, code)
|
line_num, col_num = utils.idx_to_pos(self.start + 1, code)
|
||||||
line = code.splitlines(True)[line_num]
|
line = code.splitlines(True)[line_num]
|
||||||
|
|
||||||
# Display 1-based line numbers
|
# Display 1-based line numbers
|
||||||
line_num += 1
|
line_num += 1
|
||||||
|
|
||||||
print(f"""{Colors.RED}{Colors.BOLD}{self.category}: {self.message}{Colors.CLEAR}
|
stream.write(f"""{self.color}{Colors.BOLD}{self.category}: {self.message}{Colors.CLEAR}
|
||||||
at {filename} line {line_num} column {col_num}:
|
at {filename} line {line_num} column {col_num}:
|
||||||
{Colors.FAINT}{line_num :>4} |{Colors.CLEAR}{line.rstrip()}\n {Colors.FAINT}|{" "*(col_num-1)}^{Colors.CLEAR}""")
|
{Colors.FAINT}{line_num :>4} |{Colors.CLEAR}{line.rstrip()}\n {Colors.FAINT}|{" "*(col_num-1)}^{Colors.CLEAR}\n""")
|
||||||
|
|
||||||
for hint in self.hints:
|
for hint in self.hints:
|
||||||
print(f"{Colors.FAINT}hint: {hint}{Colors.CLEAR}")
|
stream.write(f"{Colors.FAINT}hint: {hint}{Colors.CLEAR}\n")
|
||||||
print()
|
stream.write("\n")
|
||||||
|
|
||||||
|
|
||||||
|
class CompileWarning(CompileError):
|
||||||
|
category = "warning"
|
||||||
|
color = Colors.YELLOW
|
||||||
|
|
||||||
|
|
||||||
class UnexpectedTokenError(CompileError):
|
class UnexpectedTokenError(CompileError):
|
||||||
|
|
|
@ -47,7 +47,10 @@ def decompile_file(in_file, out_file) -> T.Union[str, CouldNotPort]:
|
||||||
try:
|
try:
|
||||||
# make sure the output compiles
|
# make sure the output compiles
|
||||||
tokens = tokenizer.tokenize(decompiled)
|
tokens = tokenizer.tokenize(decompiled)
|
||||||
ast, errors = parser.parse(tokens)
|
ast, errors, warnings = parser.parse(tokens)
|
||||||
|
|
||||||
|
for warning in warnings:
|
||||||
|
warning.pretty_print(out_file, decompiled)
|
||||||
|
|
||||||
if errors:
|
if errors:
|
||||||
raise errors
|
raise errors
|
||||||
|
|
|
@ -34,10 +34,10 @@ class Property(AstNode):
|
||||||
".",
|
".",
|
||||||
UseIdent("bind_property").expected("a property name to bind from"),
|
UseIdent("bind_property").expected("a property name to bind from"),
|
||||||
ZeroOrMore(AnyOf(
|
ZeroOrMore(AnyOf(
|
||||||
"sync-create",
|
|
||||||
["no-sync-create", UseLiteral("no_sync_create", True)],
|
["no-sync-create", UseLiteral("no_sync_create", True)],
|
||||||
["inverted", UseLiteral("inverted", True)],
|
["inverted", UseLiteral("inverted", True)],
|
||||||
["bidirectional", UseLiteral("bidirectional", True)],
|
["bidirectional", UseLiteral("bidirectional", True)],
|
||||||
|
Match("sync-create").warn("sync-create is deprecated in favor of no-sync-create"),
|
||||||
)),
|
)),
|
||||||
),
|
),
|
||||||
Statement(
|
Statement(
|
||||||
|
|
|
@ -55,7 +55,8 @@ class OpenFile:
|
||||||
self.diagnostics = []
|
self.diagnostics = []
|
||||||
try:
|
try:
|
||||||
self.tokens = tokenizer.tokenize(self.text)
|
self.tokens = tokenizer.tokenize(self.text)
|
||||||
self.ast, errors = parser.parse(self.tokens)
|
self.ast, errors, warnings = parser.parse(self.tokens)
|
||||||
|
self.diagnostics += warnings
|
||||||
if errors is not None:
|
if errors is not None:
|
||||||
self.diagnostics += errors.errors
|
self.diagnostics += errors.errors
|
||||||
self.diagnostics += self.ast.errors
|
self.diagnostics += self.ast.errors
|
||||||
|
|
|
@ -18,6 +18,7 @@
|
||||||
# SPDX-License-Identifier: LGPL-3.0-or-later
|
# SPDX-License-Identifier: LGPL-3.0-or-later
|
||||||
|
|
||||||
|
|
||||||
|
import typing as T
|
||||||
import argparse, json, os, sys
|
import argparse, json, os, sys
|
||||||
|
|
||||||
from .errors import PrintableError, report_bug, MultipleErrors
|
from .errors import PrintableError, report_bug, MultipleErrors
|
||||||
|
@ -78,7 +79,10 @@ class BlueprintApp:
|
||||||
def cmd_compile(self, opts):
|
def cmd_compile(self, opts):
|
||||||
data = opts.input.read()
|
data = opts.input.read()
|
||||||
try:
|
try:
|
||||||
xml = self._compile(data)
|
xml, warnings = self._compile(data)
|
||||||
|
|
||||||
|
for warning in warnings:
|
||||||
|
warning.pretty_print(opts.input.name, data, stream=sys.stderr)
|
||||||
|
|
||||||
if opts.output == "-":
|
if opts.output == "-":
|
||||||
print(xml)
|
print(xml)
|
||||||
|
@ -99,7 +103,10 @@ class BlueprintApp:
|
||||||
print(f"{Colors.RED}{Colors.BOLD}error: input file '{file.name}' is not in input directory '{opts.input_dir}'{Colors.CLEAR}")
|
print(f"{Colors.RED}{Colors.BOLD}error: input file '{file.name}' is not in input directory '{opts.input_dir}'{Colors.CLEAR}")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
xml = self._compile(data)
|
xml, warnings = self._compile(data)
|
||||||
|
|
||||||
|
for warning in warnings:
|
||||||
|
warning.pretty_print(file.name, data, stream=sys.stderr)
|
||||||
|
|
||||||
path = os.path.join(
|
path = os.path.join(
|
||||||
opts.output_dir,
|
opts.output_dir,
|
||||||
|
@ -125,16 +132,16 @@ class BlueprintApp:
|
||||||
interactive_port.run(opts)
|
interactive_port.run(opts)
|
||||||
|
|
||||||
|
|
||||||
def _compile(self, data: str) -> str:
|
def _compile(self, data: str) -> T.Tuple[str, T.List[PrintableError]]:
|
||||||
tokens = tokenizer.tokenize(data)
|
tokens = tokenizer.tokenize(data)
|
||||||
ast, errors = parser.parse(tokens)
|
ast, errors, warnings = parser.parse(tokens)
|
||||||
|
|
||||||
if errors:
|
if errors:
|
||||||
raise errors
|
raise errors
|
||||||
if len(ast.errors):
|
if len(ast.errors):
|
||||||
raise MultipleErrors(ast.errors)
|
raise MultipleErrors(ast.errors)
|
||||||
|
|
||||||
return ast.generate()
|
return ast.generate(), warnings
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
|
@ -24,7 +24,7 @@ import typing as T
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
|
|
||||||
from .errors import assert_true, CompilerBugError, CompileError, UnexpectedTokenError
|
from .errors import assert_true, CompilerBugError, CompileError, CompileWarning, UnexpectedTokenError
|
||||||
from .tokenizer import Token, TokenType
|
from .tokenizer import Token, TokenType
|
||||||
|
|
||||||
|
|
||||||
|
@ -233,6 +233,10 @@ class ParseNode:
|
||||||
""" Convenience method for err(). """
|
""" Convenience method for err(). """
|
||||||
return self.err("Expected " + expect)
|
return self.err("Expected " + expect)
|
||||||
|
|
||||||
|
def warn(self, message):
|
||||||
|
""" Causes this ParseNode to emit a warning if it parses successfully. """
|
||||||
|
return Warning(self, message)
|
||||||
|
|
||||||
|
|
||||||
class Err(ParseNode):
|
class Err(ParseNode):
|
||||||
""" ParseNode that emits a compile error if it fails to parse. """
|
""" ParseNode that emits a compile error if it fails to parse. """
|
||||||
|
@ -253,6 +257,23 @@ class Err(ParseNode):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
class Warning(ParseNode):
|
||||||
|
""" ParseNode that emits a compile warning if it parses successfully. """
|
||||||
|
|
||||||
|
def __init__(self, child, message):
|
||||||
|
self.child = to_parse_node(child)
|
||||||
|
self.message = message
|
||||||
|
|
||||||
|
def _parse(self, ctx):
|
||||||
|
ctx.skip()
|
||||||
|
start_idx = ctx.index
|
||||||
|
if self.child.parse(ctx).succeeded():
|
||||||
|
start_token = ctx.tokens[start_idx]
|
||||||
|
end_token = ctx.tokens[ctx.index]
|
||||||
|
ctx.warnings.append(CompileWarning(self.message, start_token.start, end_token.end))
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
class Fail(ParseNode):
|
class Fail(ParseNode):
|
||||||
""" ParseNode that emits a compile error if it parses successfully. """
|
""" ParseNode that emits a compile error if it parses successfully. """
|
||||||
|
|
||||||
|
|
|
@ -18,14 +18,14 @@
|
||||||
# SPDX-License-Identifier: LGPL-3.0-or-later
|
# SPDX-License-Identifier: LGPL-3.0-or-later
|
||||||
|
|
||||||
|
|
||||||
from .errors import MultipleErrors
|
from .errors import MultipleErrors, PrintableError
|
||||||
from .parse_tree import *
|
from .parse_tree import *
|
||||||
from .parser_utils import *
|
from .parser_utils import *
|
||||||
from .tokenizer import TokenType
|
from .tokenizer import TokenType
|
||||||
from .language import OBJECT_HOOKS, OBJECT_CONTENT_HOOKS, VALUE_HOOKS, Template, UI
|
from .language import OBJECT_HOOKS, OBJECT_CONTENT_HOOKS, VALUE_HOOKS, Template, UI
|
||||||
|
|
||||||
|
|
||||||
def parse(tokens) -> T.Tuple[UI, T.Optional[MultipleErrors]]:
|
def parse(tokens) -> T.Tuple[UI, T.Optional[MultipleErrors], T.List[PrintableError]]:
|
||||||
""" Parses a list of tokens into an abstract syntax tree. """
|
""" Parses a list of tokens into an abstract syntax tree. """
|
||||||
|
|
||||||
ctx = ParseContext(tokens)
|
ctx = ParseContext(tokens)
|
||||||
|
@ -33,5 +33,6 @@ def parse(tokens) -> T.Tuple[UI, T.Optional[MultipleErrors]]:
|
||||||
|
|
||||||
ast_node = ctx.last_group.to_ast() if ctx.last_group else None
|
ast_node = ctx.last_group.to_ast() if ctx.last_group else None
|
||||||
errors = MultipleErrors(ctx.errors) if len(ctx.errors) else None
|
errors = MultipleErrors(ctx.errors) if len(ctx.errors) else None
|
||||||
|
warnings = ctx.warnings
|
||||||
|
|
||||||
return (ast_node, errors)
|
return (ast_node, errors, warnings)
|
||||||
|
|
|
@ -38,12 +38,14 @@ class TestSamples(unittest.TestCase):
|
||||||
expected = f.read()
|
expected = f.read()
|
||||||
|
|
||||||
tokens = tokenizer.tokenize(blueprint)
|
tokens = tokenizer.tokenize(blueprint)
|
||||||
ast, errors = parser.parse(tokens)
|
ast, errors, warnings = parser.parse(tokens)
|
||||||
|
|
||||||
if errors:
|
if errors:
|
||||||
raise errors
|
raise errors
|
||||||
if len(ast.errors):
|
if len(ast.errors):
|
||||||
raise MultipleErrors(ast.errors)
|
raise MultipleErrors(ast.errors)
|
||||||
|
if len(warnings):
|
||||||
|
raise MultipleErrors(warnings)
|
||||||
|
|
||||||
actual = ast.generate()
|
actual = ast.generate()
|
||||||
if actual.strip() != expected.strip(): # pragma: no cover
|
if actual.strip() != expected.strip(): # pragma: no cover
|
||||||
|
@ -63,12 +65,14 @@ class TestSamples(unittest.TestCase):
|
||||||
expected = f.read()
|
expected = f.read()
|
||||||
|
|
||||||
tokens = tokenizer.tokenize(blueprint)
|
tokens = tokenizer.tokenize(blueprint)
|
||||||
ast, errors = parser.parse(tokens)
|
ast, errors, warnings = parser.parse(tokens)
|
||||||
|
|
||||||
if errors:
|
if errors:
|
||||||
raise errors
|
raise errors
|
||||||
if len(ast.errors):
|
if len(ast.errors):
|
||||||
raise MultipleErrors(ast.errors)
|
raise MultipleErrors(ast.errors)
|
||||||
|
if len(warnings):
|
||||||
|
raise MultipleErrors(warnings)
|
||||||
except PrintableError as e:
|
except PrintableError as e:
|
||||||
def error_str(error):
|
def error_str(error):
|
||||||
line, col = utils.idx_to_pos(error.start + 1, blueprint)
|
line, col = utils.idx_to_pos(error.start + 1, blueprint)
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue