mirror of
https://gitlab.gnome.org/jwestman/blueprint-compiler.git
synced 2025-05-06 16:19:07 -04:00
Compare commits
9 commits
5e6a34bfbd
...
49e6b075f4
Author | SHA1 | Date | |
---|---|---|---|
|
49e6b075f4 | ||
|
72319b29c6 | ||
|
67983aee2e | ||
|
1205fc42ea | ||
|
e9206809d6 | ||
|
d5b2ee3589 | ||
|
8f3ae9a626 | ||
|
e5d6910626 | ||
|
b26433d865 |
18 changed files with 226 additions and 91 deletions
|
@ -196,6 +196,13 @@ class AstNode:
|
|||
|
||||
return None
|
||||
|
||||
def get_child_at(self, idx: int) -> "AstNode":
|
||||
for child in self.children:
|
||||
if idx in child.range:
|
||||
return child.get_child_at(idx)
|
||||
|
||||
return self
|
||||
|
||||
def get_semantic_tokens(self) -> T.Iterator[SemanticToken]:
|
||||
for child in self.children:
|
||||
yield from child.get_semantic_tokens()
|
||||
|
|
|
@ -21,7 +21,18 @@ import typing as T
|
|||
|
||||
from . import annotations, gir, language
|
||||
from .ast_utils import AstNode
|
||||
from .completions_utils import *
|
||||
from .completions_utils import (
|
||||
CompletionContext,
|
||||
CompletionItemKind,
|
||||
CompletionPriority,
|
||||
completer,
|
||||
completers,
|
||||
get_object_id_completions,
|
||||
get_property_completion,
|
||||
get_sort_key,
|
||||
new_statement_patterns,
|
||||
)
|
||||
from .language.contexts import ValueTypeCtx
|
||||
from .language.types import ClassName
|
||||
from .lsp_utils import Completion, CompletionItemKind, TextEdit, get_docs_section
|
||||
from .parser import SKIP_TOKENS
|
||||
|
@ -38,13 +49,6 @@ def _complete(
|
|||
token_idx: int,
|
||||
next_token: Token,
|
||||
) -> T.Iterator[Completion]:
|
||||
for child in ast_node.children:
|
||||
if child.group.start <= idx and (
|
||||
idx < child.group.end or (idx == child.group.end and child.incomplete)
|
||||
):
|
||||
yield from _complete(lsp, child, tokens, idx, token_idx, next_token)
|
||||
return
|
||||
|
||||
prev_tokens: T.List[Token] = []
|
||||
|
||||
# collect the 5 previous non-skipped tokens
|
||||
|
@ -54,7 +58,7 @@ def _complete(
|
|||
prev_tokens.insert(0, token)
|
||||
token_idx -= 1
|
||||
|
||||
for completer in ast_node.completers:
|
||||
for completer in completers:
|
||||
yield from completer(prev_tokens, next_token, ast_node, lsp, idx)
|
||||
|
||||
|
||||
|
@ -79,12 +83,22 @@ def complete(
|
|||
if tokens[token_idx].type == TokenType.IDENT:
|
||||
idx = tokens[token_idx].start
|
||||
token_idx -= 1
|
||||
else:
|
||||
while tokens[token_idx].type == TokenType.WHITESPACE:
|
||||
idx = tokens[token_idx].start
|
||||
token_idx -= 1
|
||||
|
||||
yield from _complete(lsp, ast_node, tokens, idx, token_idx, next_token)
|
||||
while tokens[token_idx].type == TokenType.WHITESPACE:
|
||||
idx = tokens[token_idx].start
|
||||
token_idx -= 1
|
||||
|
||||
child_node = ast_node.get_child_at(idx)
|
||||
# If the cursor is at the end of a node, completions should be for the next child of the parent, unless the node
|
||||
# is incomplete.
|
||||
while (
|
||||
child_node.range.end == idx
|
||||
and not child_node.incomplete
|
||||
and child_node.parent is not None
|
||||
):
|
||||
child_node = child_node.parent
|
||||
|
||||
yield from _complete(lsp, child_node, tokens, idx, token_idx, next_token)
|
||||
|
||||
|
||||
@completer([language.GtkDirective])
|
||||
|
@ -163,7 +177,13 @@ def _available_namespace_completions(ctx: CompletionContext):
|
|||
|
||||
|
||||
@completer(
|
||||
applies_in=[language.UI, language.ObjectContent, language.Template],
|
||||
applies_in=[
|
||||
language.UI,
|
||||
language.ObjectContent,
|
||||
language.Template,
|
||||
language.TypeName,
|
||||
language.BracketedTypeName,
|
||||
],
|
||||
matches=new_statement_patterns,
|
||||
)
|
||||
def namespace(ctx: CompletionContext):
|
||||
|
@ -184,7 +204,13 @@ def namespace(ctx: CompletionContext):
|
|||
|
||||
|
||||
@completer(
|
||||
applies_in=[language.UI, language.ObjectContent, language.Template],
|
||||
applies_in=[
|
||||
language.UI,
|
||||
language.ObjectContent,
|
||||
language.Template,
|
||||
language.TypeName,
|
||||
language.BracketedTypeName,
|
||||
],
|
||||
matches=[
|
||||
[(TokenType.IDENT, None), (TokenType.OP, "."), (TokenType.IDENT, None)],
|
||||
[(TokenType.IDENT, None), (TokenType.OP, ".")],
|
||||
|
@ -195,7 +221,11 @@ def object_completer(ctx: CompletionContext):
|
|||
if ns is not None:
|
||||
for c in ns.classes.values():
|
||||
snippet = c.name
|
||||
if str(ctx.next_token) != "{":
|
||||
if (
|
||||
str(ctx.next_token) != "{"
|
||||
and not isinstance(ctx.ast_node, language.TypeName)
|
||||
and not isinstance(ctx.ast_node, language.BracketedTypeName)
|
||||
):
|
||||
snippet += " {\n $0\n}"
|
||||
|
||||
yield Completion(
|
||||
|
@ -209,7 +239,13 @@ def object_completer(ctx: CompletionContext):
|
|||
|
||||
|
||||
@completer(
|
||||
applies_in=[language.UI, language.ObjectContent, language.Template],
|
||||
applies_in=[
|
||||
language.UI,
|
||||
language.ObjectContent,
|
||||
language.Template,
|
||||
language.TypeName,
|
||||
language.BracketedTypeName,
|
||||
],
|
||||
matches=new_statement_patterns,
|
||||
)
|
||||
def gtk_object_completer(ctx: CompletionContext):
|
||||
|
@ -217,7 +253,11 @@ def gtk_object_completer(ctx: CompletionContext):
|
|||
if ns is not None:
|
||||
for c in ns.classes.values():
|
||||
snippet = c.name
|
||||
if str(ctx.next_token) != "{":
|
||||
if (
|
||||
str(ctx.next_token) != "{"
|
||||
and not isinstance(ctx.ast_node, language.TypeName)
|
||||
and not isinstance(ctx.ast_node, language.BracketedTypeName)
|
||||
):
|
||||
snippet += " {\n $0\n}"
|
||||
|
||||
yield Completion(
|
||||
|
@ -229,6 +269,17 @@ def gtk_object_completer(ctx: CompletionContext):
|
|||
detail=c.detail,
|
||||
)
|
||||
|
||||
if isinstance(ctx.ast_node, language.BracketedTypeName) or (
|
||||
isinstance(ctx.ast_node, language.TypeName)
|
||||
and not isinstance(ctx.ast_node, language.ClassName)
|
||||
):
|
||||
for basic_type in gir.BASIC_TYPES:
|
||||
yield Completion(
|
||||
basic_type,
|
||||
CompletionItemKind.Class,
|
||||
sort_text=get_sort_key(CompletionPriority.CLASS, basic_type),
|
||||
)
|
||||
|
||||
|
||||
@completer(
|
||||
applies_in=[language.ObjectContent],
|
||||
|
@ -270,6 +321,8 @@ def prop_value_completer(ctx: CompletionContext):
|
|||
)
|
||||
|
||||
if (vt := ctx.ast_node.value_type) is not None:
|
||||
assert isinstance(vt, ValueTypeCtx)
|
||||
|
||||
if isinstance(vt.value_type, gir.Enumeration):
|
||||
for name, member in vt.value_type.members.items():
|
||||
yield Completion(
|
||||
|
@ -295,22 +348,14 @@ def prop_value_completer(ctx: CompletionContext):
|
|||
elif isinstance(vt.value_type, gir.Class) or isinstance(
|
||||
vt.value_type, gir.Interface
|
||||
):
|
||||
yield Completion(
|
||||
"null",
|
||||
CompletionItemKind.Constant,
|
||||
sort_text=get_sort_key(CompletionPriority.KEYWORD, "null"),
|
||||
)
|
||||
if vt.allow_null:
|
||||
yield Completion(
|
||||
"null",
|
||||
CompletionItemKind.Constant,
|
||||
sort_text=get_sort_key(CompletionPriority.KEYWORD, "null"),
|
||||
)
|
||||
|
||||
for id, obj in ctx.ast_node.root.context[language.ScopeCtx].objects.items():
|
||||
if obj.gir_class is not None and obj.gir_class.assignable_to(
|
||||
vt.value_type
|
||||
):
|
||||
yield Completion(
|
||||
id,
|
||||
CompletionItemKind.Variable,
|
||||
signature=" " + obj.signature,
|
||||
sort_text=get_sort_key(CompletionPriority.NAMED_OBJECT, id),
|
||||
)
|
||||
yield from get_object_id_completions(ctx, vt.value_type)
|
||||
|
||||
if isinstance(ctx.ast_node, language.Property):
|
||||
yield from _available_namespace_completions(ctx)
|
||||
|
@ -416,7 +461,13 @@ def complete_response_id(ctx: CompletionContext):
|
|||
(TokenType.IDENT, "response"),
|
||||
(TokenType.OP, "="),
|
||||
(TokenType.IDENT, None),
|
||||
]
|
||||
],
|
||||
[
|
||||
(TokenType.IDENT, "action"),
|
||||
(TokenType.IDENT, "response"),
|
||||
(TokenType.OP, "="),
|
||||
(TokenType.NUMBER, None),
|
||||
],
|
||||
],
|
||||
)
|
||||
def complete_response_default(ctx: CompletionContext):
|
||||
|
|
|
@ -22,7 +22,7 @@ import typing as T
|
|||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
|
||||
from . import gir
|
||||
from . import gir, language
|
||||
from .ast_utils import AstNode
|
||||
from .lsp_utils import Completion, CompletionItemKind
|
||||
from .tokenizer import Token, TokenType
|
||||
|
@ -57,14 +57,21 @@ new_statement_patterns = [
|
|||
[(TokenType.PUNCTUATION, "}")],
|
||||
[(TokenType.PUNCTUATION, "]")],
|
||||
[(TokenType.PUNCTUATION, ";")],
|
||||
[(TokenType.OP, "<")],
|
||||
]
|
||||
|
||||
|
||||
completers = []
|
||||
|
||||
|
||||
def completer(applies_in: T.List, matches: T.List = [], applies_in_subclass=None):
|
||||
def decorator(func: T.Callable[[CompletionContext], T.Generator[Completion]]):
|
||||
def inner(
|
||||
prev_tokens: T.List[Token], next_token: Token, ast_node, lsp, idx: int
|
||||
):
|
||||
if not any(isinstance(ast_node, rule) for rule in applies_in):
|
||||
return
|
||||
|
||||
# For completers that apply in ObjectContent nodes, we can further
|
||||
# check that the object is the right class
|
||||
if applies_in_subclass is not None:
|
||||
|
@ -118,8 +125,7 @@ def completer(applies_in: T.List, matches: T.List = [], applies_in_subclass=None
|
|||
)
|
||||
yield from func(context)
|
||||
|
||||
for c in applies_in:
|
||||
c.completers.append(inner)
|
||||
completers.append(inner)
|
||||
return inner
|
||||
|
||||
return decorator
|
||||
|
@ -157,3 +163,18 @@ def get_property_completion(
|
|||
snippet=snippet,
|
||||
docs=doc,
|
||||
)
|
||||
|
||||
|
||||
def get_object_id_completions(
|
||||
ctx: CompletionContext, value_type: T.Optional[gir.GirType] = None
|
||||
):
|
||||
for id, obj in ctx.ast_node.root.context[language.ScopeCtx].objects.items():
|
||||
if value_type is None or (
|
||||
obj.gir_class is not None and obj.gir_class.assignable_to(value_type)
|
||||
):
|
||||
yield Completion(
|
||||
id,
|
||||
CompletionItemKind.Variable,
|
||||
signature=" " + obj.signature,
|
||||
sort_text=get_sort_key(CompletionPriority.NAMED_OBJECT, id),
|
||||
)
|
||||
|
|
|
@ -92,31 +92,38 @@ class CompileError(PrintableError):
|
|||
def pretty_print(self, filename: str, code: str, stream=sys.stdout) -> None:
|
||||
assert self.range is not None
|
||||
|
||||
line_num, col_num = utils.idx_to_pos(self.range.start + 1, code)
|
||||
end_line_num, end_col_num = utils.idx_to_pos(self.range.end + 1, code)
|
||||
line = code.splitlines(True)[line_num] if code != "" else ""
|
||||
def format_line(range: Range):
|
||||
line_num, col_num = utils.idx_to_pos(range.start, code)
|
||||
end_line_num, end_col_num = utils.idx_to_pos(range.end, code)
|
||||
line = code.splitlines(True)[line_num] if code != "" else ""
|
||||
|
||||
# Display 1-based line numbers
|
||||
line_num += 1
|
||||
end_line_num += 1
|
||||
# Display 1-based line numbers
|
||||
line_num += 1
|
||||
end_line_num += 1
|
||||
col_num += 1
|
||||
end_col_num += 1
|
||||
|
||||
n_spaces = col_num - 1
|
||||
n_carets = (
|
||||
(end_col_num - col_num)
|
||||
if line_num == end_line_num
|
||||
else (len(line) - n_spaces - 1)
|
||||
)
|
||||
n_spaces = col_num - 1
|
||||
n_carets = (
|
||||
(end_col_num - col_num)
|
||||
if line_num == end_line_num
|
||||
else (len(line) - n_spaces - 1)
|
||||
)
|
||||
|
||||
n_spaces += line.count("\t", 0, col_num)
|
||||
n_carets += line.count("\t", col_num, col_num + n_carets)
|
||||
line = line.replace("\t", " ")
|
||||
n_spaces += line.count("\t", 0, col_num)
|
||||
n_carets += line.count("\t", col_num, col_num + n_carets)
|
||||
line = line.replace("\t", " ")
|
||||
|
||||
n_carets = max(n_carets, 1)
|
||||
n_carets = max(n_carets, 1)
|
||||
|
||||
return line_num, col_num, line.rstrip(), (" " * n_spaces) + ("^" * n_carets)
|
||||
|
||||
line_num, col_num, line, carets = format_line(self.range)
|
||||
|
||||
stream.write(
|
||||
f"""{self.color}{Colors.BOLD}{self.category}: {self.message}{Colors.CLEAR}
|
||||
at {filename} line {line_num} column {col_num}:
|
||||
{Colors.FAINT}{line_num :>4} |{Colors.CLEAR}{line.rstrip()}\n {Colors.FAINT}|{" "*n_spaces}{"^"*n_carets}{Colors.CLEAR}\n"""
|
||||
{Colors.FAINT}{line_num :>4} |{Colors.CLEAR}{line}\n {Colors.FAINT}|{carets}{Colors.CLEAR}\n"""
|
||||
)
|
||||
|
||||
for hint in self.hints:
|
||||
|
@ -141,14 +148,12 @@ at {filename} line {line_num} column {col_num}:
|
|||
)
|
||||
|
||||
for ref in self.references:
|
||||
line_num, col_num = utils.idx_to_pos(ref.range.start + 1, code)
|
||||
line = code.splitlines(True)[line_num]
|
||||
line_num += 1
|
||||
line_num, col_num, line, carets = format_line(ref.range)
|
||||
|
||||
stream.write(
|
||||
f"""{Colors.FAINT}note: {ref.message}:
|
||||
at {filename} line {line_num} column {col_num}:
|
||||
{Colors.FAINT}{line_num :>4} |{line.rstrip()}\n {Colors.FAINT}|{" "*(col_num-1)}^{Colors.CLEAR}\n"""
|
||||
{Colors.FAINT}{line_num :>4} |{line}\n {Colors.FAINT}|{carets}{Colors.CLEAR}\n"""
|
||||
)
|
||||
|
||||
stream.write("\n")
|
||||
|
|
|
@ -289,7 +289,7 @@ class TypeType(BasicType):
|
|||
return isinstance(other, TypeType)
|
||||
|
||||
|
||||
_BASIC_TYPES = {
|
||||
BASIC_TYPES = {
|
||||
"bool": BoolType,
|
||||
"string": StringType,
|
||||
"int": IntType,
|
||||
|
@ -914,7 +914,7 @@ class Namespace(GirNode):
|
|||
|
||||
def get_type_by_cname(self, cname: str) -> T.Optional[GirType]:
|
||||
"""Gets a type from this namespace by its C name."""
|
||||
for basic in _BASIC_TYPES.values():
|
||||
for basic in BASIC_TYPES.values():
|
||||
if basic.glib_type_name == cname:
|
||||
return basic()
|
||||
|
||||
|
@ -1036,8 +1036,8 @@ class GirContext:
|
|||
return None
|
||||
|
||||
def get_type(self, name: str, ns: str) -> T.Optional[GirType]:
|
||||
if ns is None and name in _BASIC_TYPES:
|
||||
return _BASIC_TYPES[name]()
|
||||
if ns is None and name in BASIC_TYPES:
|
||||
return BASIC_TYPES[name]()
|
||||
|
||||
ns = ns or "Gtk"
|
||||
|
||||
|
|
|
@ -44,7 +44,7 @@ from .gtkbuilder_child import (
|
|||
from .gtkbuilder_template import Template
|
||||
from .imports import GtkDirective, Import
|
||||
from .response_id import ExtResponse
|
||||
from .types import ClassName
|
||||
from .types import BracketedTypeName, ClassName, TypeName
|
||||
from .ui import UI
|
||||
from .values import (
|
||||
ArrayValue,
|
||||
|
|
|
@ -34,6 +34,7 @@ from ..errors import (
|
|||
CompileError,
|
||||
CompileWarning,
|
||||
DeprecatedWarning,
|
||||
ErrorReference,
|
||||
MultipleErrors,
|
||||
UnusedWarning,
|
||||
UpgradeWarning,
|
||||
|
|
|
@ -48,7 +48,7 @@ class ScopeCtx:
|
|||
return self.node
|
||||
|
||||
@cached_property
|
||||
def objects(self) -> T.Dict[str, Object]:
|
||||
def objects(self) -> T.Dict[str, AstNode]:
|
||||
return {
|
||||
obj.tokens["id"]: obj
|
||||
for obj in self._iter_recursive(self.node)
|
||||
|
@ -58,7 +58,7 @@ class ScopeCtx:
|
|||
def validate_unique_ids(self) -> None:
|
||||
from .gtk_list_item_factory import ExtListItemFactory
|
||||
|
||||
passed = {}
|
||||
passed: T.Dict[str, AstNode] = {}
|
||||
for obj in self._iter_recursive(self.node):
|
||||
if obj.tokens["id"] is None:
|
||||
continue
|
||||
|
@ -71,10 +71,16 @@ class ScopeCtx:
|
|||
raise CompileError(
|
||||
f"Duplicate object ID '{obj.tokens['id']}'",
|
||||
token.range,
|
||||
references=[
|
||||
ErrorReference(
|
||||
passed[obj.tokens["id"]].group.tokens["id"].range,
|
||||
"previous declaration was here",
|
||||
)
|
||||
],
|
||||
)
|
||||
passed[obj.tokens["id"]] = obj
|
||||
|
||||
def _iter_recursive(self, node: AstNode):
|
||||
def _iter_recursive(self, node: AstNode) -> T.Generator[AstNode, T.Any, None]:
|
||||
yield node
|
||||
for child in node.children:
|
||||
if child.context[ScopeCtx] is self:
|
||||
|
|
|
@ -21,7 +21,7 @@
|
|||
from ..decompiler import decompile_element
|
||||
from .common import *
|
||||
from .contexts import ScopeCtx, ValueTypeCtx
|
||||
from .types import TypeName
|
||||
from .types import BracketedTypeName, TypeName
|
||||
|
||||
expr = Sequence()
|
||||
|
||||
|
@ -196,7 +196,7 @@ class CastExpr(InfixExpr):
|
|||
grammar = [
|
||||
Keyword("as"),
|
||||
AnyOf(
|
||||
["<", TypeName, Match(">").expected()],
|
||||
BracketedTypeName,
|
||||
[
|
||||
UseExact("lparen", "("),
|
||||
TypeName,
|
||||
|
@ -211,7 +211,13 @@ class CastExpr(InfixExpr):
|
|||
|
||||
@property
|
||||
def type(self) -> T.Optional[GirType]:
|
||||
return self.children[TypeName][0].gir_type
|
||||
if len(self.children[BracketedTypeName]) == 1:
|
||||
type_name = self.children[BracketedTypeName][0].type_name
|
||||
return None if type_name is None else type_name.gir_type
|
||||
elif len(self.children[TypeName]) == 1:
|
||||
return self.children[TypeName][0].gir_type
|
||||
else:
|
||||
return None
|
||||
|
||||
@validate()
|
||||
def cast_makes_sense(self):
|
||||
|
|
|
@ -247,3 +247,11 @@ def decompile_signal(
|
|||
line += ";"
|
||||
ctx.print(line)
|
||||
return gir
|
||||
|
||||
|
||||
@completer(
|
||||
[Signal],
|
||||
[[(TokenType.PUNCTUATION, "(")]],
|
||||
)
|
||||
def signal_object_completer(ctx: CompletionContext):
|
||||
yield from get_object_id_completions(ctx)
|
||||
|
|
|
@ -27,11 +27,11 @@ class TypeName(AstNode):
|
|||
[
|
||||
UseIdent("namespace"),
|
||||
".",
|
||||
UseIdent("class_name"),
|
||||
UseIdent("class_name").expected("class name"),
|
||||
],
|
||||
[
|
||||
AnyOf("$", [".", UseLiteral("old_extern", True)]),
|
||||
UseIdent("class_name"),
|
||||
UseIdent("class_name").expected("class name"),
|
||||
UseLiteral("extern", True),
|
||||
],
|
||||
UseIdent("class_name"),
|
||||
|
@ -47,7 +47,11 @@ class TypeName(AstNode):
|
|||
|
||||
@validate("class_name")
|
||||
def type_exists(self):
|
||||
if not self.tokens["extern"] and self.gir_ns is not None:
|
||||
if (
|
||||
not self.tokens["extern"]
|
||||
and self.gir_ns is not None
|
||||
and self.tokens["class_name"] is not None
|
||||
):
|
||||
self.root.gir.validate_type(
|
||||
self.tokens["class_name"], self.tokens["namespace"]
|
||||
)
|
||||
|
@ -182,3 +186,14 @@ class TemplateClassName(ClassName):
|
|||
self.root.gir.validate_type(
|
||||
self.tokens["class_name"], self.tokens["namespace"]
|
||||
)
|
||||
|
||||
|
||||
class BracketedTypeName(AstNode):
|
||||
grammar = Statement("<", to_parse_node(TypeName).expected("type name"), end=">")
|
||||
|
||||
@property
|
||||
def type_name(self) -> T.Optional[TypeName]:
|
||||
if len(self.children[TypeName]) == 0:
|
||||
return None
|
||||
|
||||
return self.children[TypeName][0]
|
||||
|
|
|
@ -26,7 +26,7 @@ from .common import *
|
|||
from .contexts import ExprValueCtx, ScopeCtx, ValueTypeCtx
|
||||
from .expression import Expression
|
||||
from .gobject_object import Object
|
||||
from .types import TypeName
|
||||
from .types import BracketedTypeName, TypeName
|
||||
|
||||
|
||||
class Translated(AstNode):
|
||||
|
@ -80,11 +80,7 @@ class TypeLiteral(AstNode):
|
|||
grammar = [
|
||||
"typeof",
|
||||
AnyOf(
|
||||
[
|
||||
"<",
|
||||
to_parse_node(TypeName).expected("type name"),
|
||||
Match(">").expected(),
|
||||
],
|
||||
BracketedTypeName,
|
||||
[
|
||||
UseExact("lparen", "("),
|
||||
to_parse_node(TypeName).expected("type name"),
|
||||
|
@ -98,8 +94,13 @@ class TypeLiteral(AstNode):
|
|||
return gir.TypeType()
|
||||
|
||||
@property
|
||||
def type_name(self) -> TypeName:
|
||||
return self.children[TypeName][0]
|
||||
def type_name(self) -> T.Optional[TypeName]:
|
||||
if len(self.children[BracketedTypeName]) == 1:
|
||||
return self.children[BracketedTypeName][0].type_name
|
||||
elif len(self.children[TypeName]) == 1:
|
||||
return self.children[TypeName][0]
|
||||
else:
|
||||
return None
|
||||
|
||||
@validate()
|
||||
def validate_for_type(self) -> None:
|
||||
|
|
|
@ -209,6 +209,7 @@ class XmlOutput(OutputFormat):
|
|||
else:
|
||||
xml.put_text(self._object_id(value, value.ident))
|
||||
elif isinstance(value, TypeLiteral):
|
||||
assert value.type_name is not None
|
||||
xml.put_text(value.type_name.glib_type_name)
|
||||
else:
|
||||
if isinstance(value.value, float) and value.value == int(value.value):
|
||||
|
|
|
@ -280,9 +280,9 @@ class Err(ParseNode):
|
|||
start_idx -= 1
|
||||
start_token = ctx.tokens[start_idx]
|
||||
|
||||
raise CompileError(
|
||||
self.message, Range(start_token.end, start_token.end, ctx.text)
|
||||
)
|
||||
position = start_token.start if ctx.start == start_idx else start_token.end
|
||||
|
||||
raise CompileError(self.message, Range(position, position, ctx.text))
|
||||
return True
|
||||
|
||||
|
||||
|
@ -358,7 +358,20 @@ class Statement(ParseNode):
|
|||
|
||||
token = ctx.peek_token()
|
||||
if str(token) != self.end:
|
||||
ctx.errors.append(CompileError(f"Expected `{self.end}`", token.range))
|
||||
start_idx = ctx.index - 1
|
||||
while ctx.tokens[start_idx].type in SKIP_TOKENS:
|
||||
start_idx -= 1
|
||||
start_token = ctx.tokens[start_idx]
|
||||
|
||||
position = (
|
||||
start_token.start if ctx.index - 1 == start_idx else start_token.end
|
||||
)
|
||||
|
||||
ctx.errors.append(
|
||||
CompileError(
|
||||
f"Expected `{self.end}`", Range(position, position, ctx.text)
|
||||
)
|
||||
)
|
||||
else:
|
||||
ctx.next_token()
|
||||
return True
|
||||
|
|
|
@ -76,8 +76,8 @@ def did_you_mean(word: str, options: T.List[str]) -> T.Optional[str]:
|
|||
def idx_to_pos(idx: int, text: str) -> T.Tuple[int, int]:
|
||||
if idx == 0 or len(text) == 0:
|
||||
return (0, 0)
|
||||
line_num = text.count("\n", 0, idx - 1) + 1
|
||||
col_num = idx - text.rfind("\n", 0, idx - 1) - 1
|
||||
line_num = text.count("\n", 0, idx) + 1
|
||||
col_num = idx - text.rfind("\n", 0, idx) - 1
|
||||
return (line_num - 1, col_num)
|
||||
|
||||
|
||||
|
|
|
@ -1 +1 @@
|
|||
1,0,0,File must start with a "using Gtk" directive (e.g. `using Gtk 4.0;`)
|
||||
1,1,0,File must start with a "using Gtk" directive (e.g. `using Gtk 4.0;`)
|
|
@ -1 +1 @@
|
|||
6,1,1,Expected `;`
|
||||
5,4,0,Expected `;`
|
|
@ -143,9 +143,9 @@ class TestSamples(unittest.TestCase):
|
|||
]
|
||||
|
||||
def error_str(error: CompileError):
|
||||
line, col = utils.idx_to_pos(error.range.start + 1, blueprint)
|
||||
line, col = utils.idx_to_pos(error.range.start, blueprint)
|
||||
len = error.range.length
|
||||
return ",".join([str(line + 1), str(col), str(len), error.message])
|
||||
return ",".join([str(line + 1), str(col + 1), str(len), error.message])
|
||||
|
||||
actual = "\n".join([error_str(error) for error in errors])
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue