Compare commits

...

9 commits

Author SHA1 Message Date
James Westman
49e6b075f4 Merge branch 'completion-improvements' into 'main'
Completion improvements

See merge request GNOME/blueprint-compiler!243
2025-05-04 01:10:47 +00:00
James Westman
72319b29c6
completions: Don't suggest "null" where not allowed 2025-05-03 14:44:13 -05:00
James Westman
67983aee2e
completions: Object names in signal handlers 2025-05-03 14:39:02 -05:00
James Westman
1205fc42ea
completions: Fix completions in identifiers 2025-05-03 14:27:45 -05:00
James Westman
e9206809d6
completions: Add types in typeof<> and as<> 2025-05-03 14:27:45 -05:00
James Westman
d5b2ee3589
completions: Add GtkScale mark positions 2025-05-03 14:27:45 -05:00
James Westman
8f3ae9a626
parser: Tweak parsing during error conditions
When an explicit parsing error is encountered and a CompileError raised,
apply the changes to the context state. This way, the rule that catches
the exception (e.g. Statement or Until) knows where the error occurred.

Also, changed "Expected" errors to be reported at the end of the
previous non-whitespace token.
2025-05-03 14:27:45 -05:00
James Westman
e5d6910626
completions: Fix generated signal handler name
A syntax error in the snippet caused the generated signal handler name
not to be used.
2025-05-03 14:27:44 -05:00
James Westman
b26433d865
completions: Add completions for response IDs 2025-05-03 14:27:44 -05:00
32 changed files with 379 additions and 147 deletions

View file

@ -196,6 +196,13 @@ class AstNode:
return None return None
def get_child_at(self, idx: int) -> "AstNode":
for child in self.children:
if idx in child.range:
return child.get_child_at(idx)
return self
def get_semantic_tokens(self) -> T.Iterator[SemanticToken]: def get_semantic_tokens(self) -> T.Iterator[SemanticToken]:
for child in self.children: for child in self.children:
yield from child.get_semantic_tokens() yield from child.get_semantic_tokens()

View file

@ -21,7 +21,18 @@ import typing as T
from . import annotations, gir, language from . import annotations, gir, language
from .ast_utils import AstNode from .ast_utils import AstNode
from .completions_utils import * from .completions_utils import (
CompletionContext,
CompletionItemKind,
CompletionPriority,
completer,
completers,
get_object_id_completions,
get_property_completion,
get_sort_key,
new_statement_patterns,
)
from .language.contexts import ValueTypeCtx
from .language.types import ClassName from .language.types import ClassName
from .lsp_utils import Completion, CompletionItemKind, TextEdit, get_docs_section from .lsp_utils import Completion, CompletionItemKind, TextEdit, get_docs_section
from .parser import SKIP_TOKENS from .parser import SKIP_TOKENS
@ -38,13 +49,6 @@ def _complete(
token_idx: int, token_idx: int,
next_token: Token, next_token: Token,
) -> T.Iterator[Completion]: ) -> T.Iterator[Completion]:
for child in ast_node.children:
if child.group.start <= idx and (
idx < child.group.end or (idx == child.group.end and child.incomplete)
):
yield from _complete(lsp, child, tokens, idx, token_idx, next_token)
return
prev_tokens: T.List[Token] = [] prev_tokens: T.List[Token] = []
# collect the 5 previous non-skipped tokens # collect the 5 previous non-skipped tokens
@ -54,7 +58,7 @@ def _complete(
prev_tokens.insert(0, token) prev_tokens.insert(0, token)
token_idx -= 1 token_idx -= 1
for completer in ast_node.completers: for completer in completers:
yield from completer(prev_tokens, next_token, ast_node, lsp, idx) yield from completer(prev_tokens, next_token, ast_node, lsp, idx)
@ -76,11 +80,25 @@ def complete(
next_token = tokens[next_token_idx] next_token = tokens[next_token_idx]
# if the current token is an identifier or whitespace, move to the token before it # if the current token is an identifier or whitespace, move to the token before it
while tokens[token_idx].type in [TokenType.IDENT, TokenType.WHITESPACE]: if tokens[token_idx].type == TokenType.IDENT:
idx = tokens[token_idx].start idx = tokens[token_idx].start
token_idx -= 1 token_idx -= 1
yield from _complete(lsp, ast_node, tokens, idx, token_idx, next_token) while tokens[token_idx].type == TokenType.WHITESPACE:
idx = tokens[token_idx].start
token_idx -= 1
child_node = ast_node.get_child_at(idx)
# If the cursor is at the end of a node, completions should be for the next child of the parent, unless the node
# is incomplete.
while (
child_node.range.end == idx
and not child_node.incomplete
and child_node.parent is not None
):
child_node = child_node.parent
yield from _complete(lsp, child_node, tokens, idx, token_idx, next_token)
@completer([language.GtkDirective]) @completer([language.GtkDirective])
@ -159,7 +177,13 @@ def _available_namespace_completions(ctx: CompletionContext):
@completer( @completer(
applies_in=[language.UI, language.ObjectContent, language.Template], applies_in=[
language.UI,
language.ObjectContent,
language.Template,
language.TypeName,
language.BracketedTypeName,
],
matches=new_statement_patterns, matches=new_statement_patterns,
) )
def namespace(ctx: CompletionContext): def namespace(ctx: CompletionContext):
@ -180,7 +204,13 @@ def namespace(ctx: CompletionContext):
@completer( @completer(
applies_in=[language.UI, language.ObjectContent, language.Template], applies_in=[
language.UI,
language.ObjectContent,
language.Template,
language.TypeName,
language.BracketedTypeName,
],
matches=[ matches=[
[(TokenType.IDENT, None), (TokenType.OP, "."), (TokenType.IDENT, None)], [(TokenType.IDENT, None), (TokenType.OP, "."), (TokenType.IDENT, None)],
[(TokenType.IDENT, None), (TokenType.OP, ".")], [(TokenType.IDENT, None), (TokenType.OP, ".")],
@ -191,7 +221,11 @@ def object_completer(ctx: CompletionContext):
if ns is not None: if ns is not None:
for c in ns.classes.values(): for c in ns.classes.values():
snippet = c.name snippet = c.name
if str(ctx.next_token) != "{": if (
str(ctx.next_token) != "{"
and not isinstance(ctx.ast_node, language.TypeName)
and not isinstance(ctx.ast_node, language.BracketedTypeName)
):
snippet += " {\n $0\n}" snippet += " {\n $0\n}"
yield Completion( yield Completion(
@ -205,7 +239,13 @@ def object_completer(ctx: CompletionContext):
@completer( @completer(
applies_in=[language.UI, language.ObjectContent, language.Template], applies_in=[
language.UI,
language.ObjectContent,
language.Template,
language.TypeName,
language.BracketedTypeName,
],
matches=new_statement_patterns, matches=new_statement_patterns,
) )
def gtk_object_completer(ctx: CompletionContext): def gtk_object_completer(ctx: CompletionContext):
@ -213,7 +253,11 @@ def gtk_object_completer(ctx: CompletionContext):
if ns is not None: if ns is not None:
for c in ns.classes.values(): for c in ns.classes.values():
snippet = c.name snippet = c.name
if str(ctx.next_token) != "{": if (
str(ctx.next_token) != "{"
and not isinstance(ctx.ast_node, language.TypeName)
and not isinstance(ctx.ast_node, language.BracketedTypeName)
):
snippet += " {\n $0\n}" snippet += " {\n $0\n}"
yield Completion( yield Completion(
@ -225,6 +269,17 @@ def gtk_object_completer(ctx: CompletionContext):
detail=c.detail, detail=c.detail,
) )
if isinstance(ctx.ast_node, language.BracketedTypeName) or (
isinstance(ctx.ast_node, language.TypeName)
and not isinstance(ctx.ast_node, language.ClassName)
):
for basic_type in gir.BASIC_TYPES:
yield Completion(
basic_type,
CompletionItemKind.Class,
sort_text=get_sort_key(CompletionPriority.CLASS, basic_type),
)
@completer( @completer(
applies_in=[language.ObjectContent], applies_in=[language.ObjectContent],
@ -236,7 +291,7 @@ def property_completer(ctx: CompletionContext):
for prop_name, prop in ctx.ast_node.gir_class.properties.items(): for prop_name, prop in ctx.ast_node.gir_class.properties.items():
yield get_property_completion( yield get_property_completion(
prop_name, prop_name,
prop, prop.type,
ctx, ctx,
annotations.is_property_translated(prop), annotations.is_property_translated(prop),
prop.doc, prop.doc,
@ -245,7 +300,11 @@ def property_completer(ctx: CompletionContext):
@completer( @completer(
applies_in=[language.Property, language.A11yProperty], applies_in=[language.Property, language.A11yProperty],
matches=[[(TokenType.IDENT, None), (TokenType.OP, ":")]], matches=[
[(TokenType.IDENT, None), (TokenType.OP, ":")],
[(TokenType.PUNCTUATION, ",")],
[(TokenType.PUNCTUATION, "[")],
],
) )
def prop_value_completer(ctx: CompletionContext): def prop_value_completer(ctx: CompletionContext):
if isinstance(ctx.ast_node, language.Property): if isinstance(ctx.ast_node, language.Property):
@ -262,6 +321,8 @@ def prop_value_completer(ctx: CompletionContext):
) )
if (vt := ctx.ast_node.value_type) is not None: if (vt := ctx.ast_node.value_type) is not None:
assert isinstance(vt, ValueTypeCtx)
if isinstance(vt.value_type, gir.Enumeration): if isinstance(vt.value_type, gir.Enumeration):
for name, member in vt.value_type.members.items(): for name, member in vt.value_type.members.items():
yield Completion( yield Completion(
@ -287,22 +348,14 @@ def prop_value_completer(ctx: CompletionContext):
elif isinstance(vt.value_type, gir.Class) or isinstance( elif isinstance(vt.value_type, gir.Class) or isinstance(
vt.value_type, gir.Interface vt.value_type, gir.Interface
): ):
if vt.allow_null:
yield Completion( yield Completion(
"null", "null",
CompletionItemKind.Constant, CompletionItemKind.Constant,
sort_text=get_sort_key(CompletionPriority.KEYWORD, "null"), sort_text=get_sort_key(CompletionPriority.KEYWORD, "null"),
) )
for id, obj in ctx.ast_node.root.context[language.ScopeCtx].objects.items(): yield from get_object_id_completions(ctx, vt.value_type)
if obj.gir_class is not None and obj.gir_class.assignable_to(
vt.value_type
):
yield Completion(
id,
CompletionItemKind.Variable,
signature=" " + obj.signature,
sort_text=get_sort_key(CompletionPriority.NAMED_OBJECT, id),
)
if isinstance(ctx.ast_node, language.Property): if isinstance(ctx.ast_node, language.Property):
yield from _available_namespace_completions(ctx) yield from _available_namespace_completions(ctx)
@ -348,7 +401,7 @@ def signal_completer(ctx: CompletionContext):
.lower() .lower()
) )
snippet = f"{signal_name} => \\$${{1:${name}_{signal_name.replace('-', '_')}}}()$0;" snippet = f"{signal_name} => \\$${{1:{name}_{signal_name.replace('-', '_')}}}()$0;"
yield Completion( yield Completion(
signal_name, signal_name,
@ -367,3 +420,58 @@ def template_completer(_ctx: CompletionContext):
CompletionItemKind.Snippet, CompletionItemKind.Snippet,
snippet="template ${1:ClassName} : ${2:ParentClass} {\n $0\n}", snippet="template ${1:ClassName} : ${2:ParentClass} {\n $0\n}",
) )
@completer(
applies_in=[language.ObjectContent, language.ChildType],
matches=[[(TokenType.PUNCTUATION, "[")]],
applies_in_subclass=[("Gtk", "Dialog"), ("Gtk", "InfoBar")],
)
def response_id_completer(ctx: CompletionContext):
yield Completion(
"action",
CompletionItemKind.Snippet,
sort_text=get_sort_key(CompletionPriority.KEYWORD, "action"),
snippet="action response=$0",
)
@completer(
[language.ChildAnnotation, language.ExtResponse],
[[(TokenType.IDENT, "action"), (TokenType.IDENT, "response"), (TokenType.OP, "=")]],
)
def complete_response_id(ctx: CompletionContext):
gir = ctx.ast_node.root.gir
response_type = gir.get_type("ResponseType", "Gtk")
yield from [
Completion(
name,
kind=CompletionItemKind.EnumMember,
docs=member.doc,
)
for name, member in response_type.members.items()
]
@completer(
[language.ChildAnnotation, language.ExtResponse],
[
[
(TokenType.IDENT, "action"),
(TokenType.IDENT, "response"),
(TokenType.OP, "="),
(TokenType.IDENT, None),
],
[
(TokenType.IDENT, "action"),
(TokenType.IDENT, "response"),
(TokenType.OP, "="),
(TokenType.NUMBER, None),
],
],
)
def complete_response_default(ctx: CompletionContext):
yield Completion(
"default",
kind=CompletionItemKind.Keyword,
)

View file

@ -22,7 +22,7 @@ import typing as T
from dataclasses import dataclass from dataclasses import dataclass
from enum import Enum from enum import Enum
from . import gir from . import gir, language
from .ast_utils import AstNode from .ast_utils import AstNode
from .lsp_utils import Completion, CompletionItemKind from .lsp_utils import Completion, CompletionItemKind
from .tokenizer import Token, TokenType from .tokenizer import Token, TokenType
@ -57,21 +57,40 @@ new_statement_patterns = [
[(TokenType.PUNCTUATION, "}")], [(TokenType.PUNCTUATION, "}")],
[(TokenType.PUNCTUATION, "]")], [(TokenType.PUNCTUATION, "]")],
[(TokenType.PUNCTUATION, ";")], [(TokenType.PUNCTUATION, ";")],
[(TokenType.OP, "<")],
] ]
completers = []
def completer(applies_in: T.List, matches: T.List = [], applies_in_subclass=None): def completer(applies_in: T.List, matches: T.List = [], applies_in_subclass=None):
def decorator(func: T.Callable[[CompletionContext], T.Generator[Completion]]): def decorator(func: T.Callable[[CompletionContext], T.Generator[Completion]]):
def inner( def inner(
prev_tokens: T.List[Token], next_token: Token, ast_node, lsp, idx: int prev_tokens: T.List[Token], next_token: Token, ast_node, lsp, idx: int
): ):
if not any(isinstance(ast_node, rule) for rule in applies_in):
return
# For completers that apply in ObjectContent nodes, we can further # For completers that apply in ObjectContent nodes, we can further
# check that the object is the right class # check that the object is the right class
if applies_in_subclass is not None: if applies_in_subclass is not None:
type = ast_node.root.gir.get_type( parent_obj = ast_node
applies_in_subclass[1], applies_in_subclass[0] while parent_obj is not None and not hasattr(parent_obj, "gir_class"):
parent_obj = parent_obj.parent
if (
parent_obj is None
or not parent_obj.gir_class
or not any(
[
parent_obj.gir_class.assignable_to(
parent_obj.root.gir.get_type(c[1], c[0])
) )
if not ast_node.gir_class or not ast_node.gir_class.assignable_to(type): for c in applies_in_subclass
]
)
):
return return
any_match = len(matches) == 0 any_match = len(matches) == 0
@ -106,8 +125,7 @@ def completer(applies_in: T.List, matches: T.List = [], applies_in_subclass=None
) )
yield from func(context) yield from func(context)
for c in applies_in: completers.append(inner)
c.completers.append(inner)
return inner return inner
return decorator return decorator
@ -145,3 +163,18 @@ def get_property_completion(
snippet=snippet, snippet=snippet,
docs=doc, docs=doc,
) )
def get_object_id_completions(
ctx: CompletionContext, value_type: T.Optional[gir.GirType] = None
):
for id, obj in ctx.ast_node.root.context[language.ScopeCtx].objects.items():
if value_type is None or (
obj.gir_class is not None and obj.gir_class.assignable_to(value_type)
):
yield Completion(
id,
CompletionItemKind.Variable,
signature=" " + obj.signature,
sort_text=get_sort_key(CompletionPriority.NAMED_OBJECT, id),
)

View file

@ -92,13 +92,16 @@ class CompileError(PrintableError):
def pretty_print(self, filename: str, code: str, stream=sys.stdout) -> None: def pretty_print(self, filename: str, code: str, stream=sys.stdout) -> None:
assert self.range is not None assert self.range is not None
line_num, col_num = utils.idx_to_pos(self.range.start + 1, code) def format_line(range: Range):
end_line_num, end_col_num = utils.idx_to_pos(self.range.end + 1, code) line_num, col_num = utils.idx_to_pos(range.start, code)
end_line_num, end_col_num = utils.idx_to_pos(range.end, code)
line = code.splitlines(True)[line_num] if code != "" else "" line = code.splitlines(True)[line_num] if code != "" else ""
# Display 1-based line numbers # Display 1-based line numbers
line_num += 1 line_num += 1
end_line_num += 1 end_line_num += 1
col_num += 1
end_col_num += 1
n_spaces = col_num - 1 n_spaces = col_num - 1
n_carets = ( n_carets = (
@ -111,10 +114,16 @@ class CompileError(PrintableError):
n_carets += line.count("\t", col_num, col_num + n_carets) n_carets += line.count("\t", col_num, col_num + n_carets)
line = line.replace("\t", " ") line = line.replace("\t", " ")
n_carets = max(n_carets, 1)
return line_num, col_num, line.rstrip(), (" " * n_spaces) + ("^" * n_carets)
line_num, col_num, line, carets = format_line(self.range)
stream.write( stream.write(
f"""{self.color}{Colors.BOLD}{self.category}: {self.message}{Colors.CLEAR} f"""{self.color}{Colors.BOLD}{self.category}: {self.message}{Colors.CLEAR}
at {filename} line {line_num} column {col_num}: at {filename} line {line_num} column {col_num}:
{Colors.FAINT}{line_num :>4} |{Colors.CLEAR}{line.rstrip()}\n {Colors.FAINT}|{" "*n_spaces}{"^"*n_carets}{Colors.CLEAR}\n""" {Colors.FAINT}{line_num :>4} |{Colors.CLEAR}{line}\n {Colors.FAINT}|{carets}{Colors.CLEAR}\n"""
) )
for hint in self.hints: for hint in self.hints:
@ -139,14 +148,12 @@ at {filename} line {line_num} column {col_num}:
) )
for ref in self.references: for ref in self.references:
line_num, col_num = utils.idx_to_pos(ref.range.start + 1, code) line_num, col_num, line, carets = format_line(ref.range)
line = code.splitlines(True)[line_num]
line_num += 1
stream.write( stream.write(
f"""{Colors.FAINT}note: {ref.message}: f"""{Colors.FAINT}note: {ref.message}:
at {filename} line {line_num} column {col_num}: at {filename} line {line_num} column {col_num}:
{Colors.FAINT}{line_num :>4} |{line.rstrip()}\n {Colors.FAINT}|{" "*(col_num-1)}^{Colors.CLEAR}\n""" {Colors.FAINT}{line_num :>4} |{line}\n {Colors.FAINT}|{carets}{Colors.CLEAR}\n"""
) )
stream.write("\n") stream.write("\n")

View file

@ -289,7 +289,7 @@ class TypeType(BasicType):
return isinstance(other, TypeType) return isinstance(other, TypeType)
_BASIC_TYPES = { BASIC_TYPES = {
"bool": BoolType, "bool": BoolType,
"string": StringType, "string": StringType,
"int": IntType, "int": IntType,
@ -914,7 +914,7 @@ class Namespace(GirNode):
def get_type_by_cname(self, cname: str) -> T.Optional[GirType]: def get_type_by_cname(self, cname: str) -> T.Optional[GirType]:
"""Gets a type from this namespace by its C name.""" """Gets a type from this namespace by its C name."""
for basic in _BASIC_TYPES.values(): for basic in BASIC_TYPES.values():
if basic.glib_type_name == cname: if basic.glib_type_name == cname:
return basic() return basic()
@ -1036,8 +1036,8 @@ class GirContext:
return None return None
def get_type(self, name: str, ns: str) -> T.Optional[GirType]: def get_type(self, name: str, ns: str) -> T.Optional[GirType]:
if ns is None and name in _BASIC_TYPES: if ns is None and name in BASIC_TYPES:
return _BASIC_TYPES[name]() return BASIC_TYPES[name]()
ns = ns or "Gtk" ns = ns or "Gtk"

View file

@ -34,10 +34,17 @@ from .gtk_scale import ExtScaleMarks
from .gtk_size_group import ExtSizeGroupWidgets from .gtk_size_group import ExtSizeGroupWidgets
from .gtk_string_list import ExtStringListStrings from .gtk_string_list import ExtStringListStrings
from .gtk_styles import ExtStyles from .gtk_styles import ExtStyles
from .gtkbuilder_child import Child, ChildExtension, ChildInternal, ChildType from .gtkbuilder_child import (
Child,
ChildAnnotation,
ChildExtension,
ChildInternal,
ChildType,
)
from .gtkbuilder_template import Template from .gtkbuilder_template import Template
from .imports import GtkDirective, Import from .imports import GtkDirective, Import
from .types import ClassName from .response_id import ExtResponse
from .types import BracketedTypeName, ClassName, TypeName
from .ui import UI from .ui import UI
from .values import ( from .values import (
ArrayValue, ArrayValue,

View file

@ -140,7 +140,7 @@ class ExtAdwResponseDialog(AstNode):
@completer( @completer(
applies_in=[ObjectContent], applies_in=[ObjectContent],
applies_in_subclass=("Adw", "MessageDialog"), applies_in_subclass=[("Adw", "AlertDialog"), ("Adw", "MessageDialog")],
matches=new_statement_patterns, matches=new_statement_patterns,
) )
def complete_adw_message_dialog(_ctx: CompletionContext): def complete_adw_message_dialog(_ctx: CompletionContext):
@ -149,20 +149,6 @@ def complete_adw_message_dialog(_ctx: CompletionContext):
) )
@completer(
applies_in=[ObjectContent],
applies_in_subclass=("Adw", "AlertDialog"),
matches=new_statement_patterns,
)
def complete_adw_alert_dialog(_ctx: CompletionContext):
yield Completion(
"responses",
CompletionItemKind.Keyword,
snippet="responses [\n\t$0\n]",
sort_text=get_sort_key(CompletionPriority.OBJECT_MEMBER, "responses"),
)
@decompiler("responses") @decompiler("responses")
def decompile_responses(ctx, gir): def decompile_responses(ctx, gir):
ctx.print(f"responses [") ctx.print(f"responses [")

View file

@ -34,6 +34,7 @@ from ..errors import (
CompileError, CompileError,
CompileWarning, CompileWarning,
DeprecatedWarning, DeprecatedWarning,
ErrorReference,
MultipleErrors, MultipleErrors,
UnusedWarning, UnusedWarning,
UpgradeWarning, UpgradeWarning,

View file

@ -48,7 +48,7 @@ class ScopeCtx:
return self.node return self.node
@cached_property @cached_property
def objects(self) -> T.Dict[str, Object]: def objects(self) -> T.Dict[str, AstNode]:
return { return {
obj.tokens["id"]: obj obj.tokens["id"]: obj
for obj in self._iter_recursive(self.node) for obj in self._iter_recursive(self.node)
@ -58,7 +58,7 @@ class ScopeCtx:
def validate_unique_ids(self) -> None: def validate_unique_ids(self) -> None:
from .gtk_list_item_factory import ExtListItemFactory from .gtk_list_item_factory import ExtListItemFactory
passed = {} passed: T.Dict[str, AstNode] = {}
for obj in self._iter_recursive(self.node): for obj in self._iter_recursive(self.node):
if obj.tokens["id"] is None: if obj.tokens["id"] is None:
continue continue
@ -71,10 +71,16 @@ class ScopeCtx:
raise CompileError( raise CompileError(
f"Duplicate object ID '{obj.tokens['id']}'", f"Duplicate object ID '{obj.tokens['id']}'",
token.range, token.range,
references=[
ErrorReference(
passed[obj.tokens["id"]].group.tokens["id"].range,
"previous declaration was here",
)
],
) )
passed[obj.tokens["id"]] = obj passed[obj.tokens["id"]] = obj
def _iter_recursive(self, node: AstNode): def _iter_recursive(self, node: AstNode) -> T.Generator[AstNode, T.Any, None]:
yield node yield node
for child in node.children: for child in node.children:
if child.context[ScopeCtx] is self: if child.context[ScopeCtx] is self:

View file

@ -21,7 +21,7 @@
from ..decompiler import decompile_element from ..decompiler import decompile_element
from .common import * from .common import *
from .contexts import ScopeCtx, ValueTypeCtx from .contexts import ScopeCtx, ValueTypeCtx
from .types import TypeName from .types import BracketedTypeName, TypeName
expr = Sequence() expr = Sequence()
@ -196,7 +196,7 @@ class CastExpr(InfixExpr):
grammar = [ grammar = [
Keyword("as"), Keyword("as"),
AnyOf( AnyOf(
["<", TypeName, Match(">").expected()], BracketedTypeName,
[ [
UseExact("lparen", "("), UseExact("lparen", "("),
TypeName, TypeName,
@ -211,7 +211,13 @@ class CastExpr(InfixExpr):
@property @property
def type(self) -> T.Optional[GirType]: def type(self) -> T.Optional[GirType]:
if len(self.children[BracketedTypeName]) == 1:
type_name = self.children[BracketedTypeName][0].type_name
return None if type_name is None else type_name.gir_type
elif len(self.children[TypeName]) == 1:
return self.children[TypeName][0].gir_type return self.children[TypeName][0].gir_type
else:
return None
@validate() @validate()
def cast_makes_sense(self): def cast_makes_sense(self):

View file

@ -247,3 +247,11 @@ def decompile_signal(
line += ";" line += ";"
ctx.print(line) ctx.print(line)
return gir return gir
@completer(
[Signal],
[[(TokenType.PUNCTUATION, "(")]],
)
def signal_object_completer(ctx: CompletionContext):
yield from get_object_id_completions(ctx)

View file

@ -91,7 +91,7 @@ class ExtComboBoxItems(AstNode):
@completer( @completer(
applies_in=[ObjectContent], applies_in=[ObjectContent],
applies_in_subclass=("Gtk", "ComboBoxText"), applies_in_subclass=[("Gtk", "ComboBoxText")],
matches=new_statement_patterns, matches=new_statement_patterns,
) )
def items_completer(_ctx: CompletionContext): def items_completer(_ctx: CompletionContext):

View file

@ -98,7 +98,7 @@ ext_file_filter_suffixes = create_node("suffixes", "suffix")
@completer( @completer(
applies_in=[ObjectContent], applies_in=[ObjectContent],
applies_in_subclass=("Gtk", "FileFilter"), applies_in_subclass=[("Gtk", "FileFilter")],
matches=new_statement_patterns, matches=new_statement_patterns,
) )
def file_filter_completer(_ctx: CompletionContext): def file_filter_completer(_ctx: CompletionContext):

View file

@ -90,7 +90,7 @@ class ExtLayout(AstNode):
@completer( @completer(
applies_in=[ObjectContent], applies_in=[ObjectContent],
applies_in_subclass=("Gtk", "Widget"), applies_in_subclass=[("Gtk", "Widget")],
matches=new_statement_patterns, matches=new_statement_patterns,
) )
def layout_completer(_ctx: CompletionContext): def layout_completer(_ctx: CompletionContext):

View file

@ -23,22 +23,20 @@ from .values import StringValue
class ExtScaleMark(AstNode): class ExtScaleMark(AstNode):
grammar = [ grammar = Statement(
Keyword("mark"), Keyword("mark"),
Match("(").expected(), Match("(").expected(),
[
Optional(AnyOf(UseExact("sign", "-"), UseExact("sign", "+"))), Optional(AnyOf(UseExact("sign", "-"), UseExact("sign", "+"))),
UseNumber("value"), UseNumber("value").expected("value"),
Optional( Optional(
[ [
",", ",",
UseIdent("position"), UseIdent("position").expected("position"),
Optional([",", StringValue]), Optional([",", to_parse_node(StringValue).expected("label")]),
] ]
), ),
], end=")",
Match(")").expected(), )
]
@property @property
def value(self) -> float: def value(self) -> float:
@ -134,7 +132,7 @@ class ExtScaleMarks(AstNode):
@completer( @completer(
applies_in=[ObjectContent], applies_in=[ObjectContent],
applies_in_subclass=("Gtk", "Scale"), applies_in_subclass=[("Gtk", "Scale")],
matches=new_statement_patterns, matches=new_statement_patterns,
) )
def complete_marks(_ctx: CompletionContext): def complete_marks(_ctx: CompletionContext):
@ -153,6 +151,23 @@ def complete_mark(_ctx: CompletionContext):
yield Completion("mark", CompletionItemKind.Keyword, snippet="mark ($0),") yield Completion("mark", CompletionItemKind.Keyword, snippet="mark ($0),")
@completer(
applies_in=[ExtScaleMark],
matches=[[(TokenType.NUMBER, None), (TokenType.PUNCTUATION, ",")]],
)
def complete_mark_position(ctx: CompletionContext):
gir = ctx.ast_node.root.gir
response_type = gir.get_type("PositionType", "Gtk")
yield from [
Completion(
name,
kind=CompletionItemKind.EnumMember,
docs=member.doc,
)
for name, member in response_type.members.items()
]
@decompiler("marks") @decompiler("marks")
def decompile_marks( def decompile_marks(
ctx, ctx,

View file

@ -101,7 +101,7 @@ class ExtSizeGroupWidgets(AstNode):
@completer( @completer(
applies_in=[ObjectContent], applies_in=[ObjectContent],
applies_in_subclass=("Gtk", "SizeGroup"), applies_in_subclass=[("Gtk", "SizeGroup")],
matches=new_statement_patterns, matches=new_statement_patterns,
) )
def size_group_completer(_ctx: CompletionContext): def size_group_completer(_ctx: CompletionContext):

View file

@ -72,7 +72,7 @@ class ExtStringListStrings(AstNode):
@completer( @completer(
applies_in=[ObjectContent], applies_in=[ObjectContent],
applies_in_subclass=("Gtk", "StringList"), applies_in_subclass=[("Gtk", "StringList")],
matches=new_statement_patterns, matches=new_statement_patterns,
) )
def strings_completer(_ctx: CompletionContext): def strings_completer(_ctx: CompletionContext):

View file

@ -77,7 +77,7 @@ class ExtStyles(AstNode):
@completer( @completer(
applies_in=[ObjectContent], applies_in=[ObjectContent],
applies_in_subclass=("Gtk", "Widget"), applies_in_subclass=[("Gtk", "Widget")],
matches=new_statement_patterns, matches=new_statement_patterns,
) )
def style_completer(_ctx: CompletionContext): def style_completer(_ctx: CompletionContext):

View file

@ -31,7 +31,12 @@ ALLOWED_PARENTS: T.List[T.Tuple[str, str]] = [
class ChildInternal(AstNode): class ChildInternal(AstNode):
grammar = ["internal-child", UseIdent("internal_child")] grammar = [
"[",
"internal-child",
UseIdent("internal_child").expected("internal child name"),
Match("]").expected(),
]
@property @property
def internal_child(self) -> str: def internal_child(self) -> str:
@ -39,7 +44,7 @@ class ChildInternal(AstNode):
class ChildType(AstNode): class ChildType(AstNode):
grammar = UseIdent("child_type").expected("a child type") grammar = ["[", UseIdent("child_type").expected("a child type"), "]"]
@property @property
def child_type(self) -> str: def child_type(self) -> str:
@ -59,7 +64,7 @@ class ChildExtension(AstNode):
class ChildAnnotation(AstNode): class ChildAnnotation(AstNode):
grammar = ["[", AnyOf(ChildInternal, ChildExtension, ChildType), "]"] grammar = AnyOf(ChildInternal, ChildExtension, ChildType)
@property @property
def child(self) -> T.Union[ChildInternal, ChildExtension, ChildType]: def child(self) -> T.Union[ChildInternal, ChildExtension, ChildType]:

View file

@ -28,19 +28,21 @@ class ExtResponse(AstNode):
ALLOWED_PARENTS: T.List[T.Tuple[str, str]] = [("Gtk", "Dialog"), ("Gtk", "InfoBar")] ALLOWED_PARENTS: T.List[T.Tuple[str, str]] = [("Gtk", "Dialog"), ("Gtk", "InfoBar")]
grammar = [ grammar = Statement(
"[",
Keyword("action"), Keyword("action"),
Keyword("response"), Keyword("response"),
"=", Match("=").expected(),
AnyOf( AnyOf(
UseIdent("response_id"), UseIdent("response_id"),
[ [
Optional(UseExact("sign", "-")), Optional(UseExact("sign", "-")),
UseNumber("response_id"), UseNumber("response_id"),
], ],
), ).expected("response ID"),
Optional([Keyword("default"), UseLiteral("is_default", True)]), Optional([Keyword("default"), UseLiteral("is_default", True)]),
] end="]",
)
@validate() @validate()
def parent_has_action_widgets(self) -> None: def parent_has_action_widgets(self) -> None:

View file

@ -27,11 +27,11 @@ class TypeName(AstNode):
[ [
UseIdent("namespace"), UseIdent("namespace"),
".", ".",
UseIdent("class_name"), UseIdent("class_name").expected("class name"),
], ],
[ [
AnyOf("$", [".", UseLiteral("old_extern", True)]), AnyOf("$", [".", UseLiteral("old_extern", True)]),
UseIdent("class_name"), UseIdent("class_name").expected("class name"),
UseLiteral("extern", True), UseLiteral("extern", True),
], ],
UseIdent("class_name"), UseIdent("class_name"),
@ -47,7 +47,11 @@ class TypeName(AstNode):
@validate("class_name") @validate("class_name")
def type_exists(self): def type_exists(self):
if not self.tokens["extern"] and self.gir_ns is not None: if (
not self.tokens["extern"]
and self.gir_ns is not None
and self.tokens["class_name"] is not None
):
self.root.gir.validate_type( self.root.gir.validate_type(
self.tokens["class_name"], self.tokens["namespace"] self.tokens["class_name"], self.tokens["namespace"]
) )
@ -182,3 +186,14 @@ class TemplateClassName(ClassName):
self.root.gir.validate_type( self.root.gir.validate_type(
self.tokens["class_name"], self.tokens["namespace"] self.tokens["class_name"], self.tokens["namespace"]
) )
class BracketedTypeName(AstNode):
grammar = Statement("<", to_parse_node(TypeName).expected("type name"), end=">")
@property
def type_name(self) -> T.Optional[TypeName]:
if len(self.children[TypeName]) == 0:
return None
return self.children[TypeName][0]

View file

@ -26,7 +26,7 @@ from .common import *
from .contexts import ExprValueCtx, ScopeCtx, ValueTypeCtx from .contexts import ExprValueCtx, ScopeCtx, ValueTypeCtx
from .expression import Expression from .expression import Expression
from .gobject_object import Object from .gobject_object import Object
from .types import TypeName from .types import BracketedTypeName, TypeName
class Translated(AstNode): class Translated(AstNode):
@ -80,11 +80,7 @@ class TypeLiteral(AstNode):
grammar = [ grammar = [
"typeof", "typeof",
AnyOf( AnyOf(
[ BracketedTypeName,
"<",
to_parse_node(TypeName).expected("type name"),
Match(">").expected(),
],
[ [
UseExact("lparen", "("), UseExact("lparen", "("),
to_parse_node(TypeName).expected("type name"), to_parse_node(TypeName).expected("type name"),
@ -98,8 +94,13 @@ class TypeLiteral(AstNode):
return gir.TypeType() return gir.TypeType()
@property @property
def type_name(self) -> TypeName: def type_name(self) -> T.Optional[TypeName]:
if len(self.children[BracketedTypeName]) == 1:
return self.children[BracketedTypeName][0].type_name
elif len(self.children[TypeName]) == 1:
return self.children[TypeName][0] return self.children[TypeName][0]
else:
return None
@validate() @validate()
def validate_for_type(self) -> None: def validate_for_type(self) -> None:

View file

@ -209,6 +209,7 @@ class XmlOutput(OutputFormat):
else: else:
xml.put_text(self._object_id(value, value.ident)) xml.put_text(self._object_id(value, value.ident))
elif isinstance(value, TypeLiteral): elif isinstance(value, TypeLiteral):
assert value.type_name is not None
xml.put_text(value.type_name.glib_type_name) xml.put_text(value.type_name.glib_type_name)
else: else:
if isinstance(value.value, float) and value.value == int(value.value): if isinstance(value.value, float) and value.value == int(value.value):

View file

@ -235,7 +235,15 @@ class ParseNode:
start_idx = ctx.index start_idx = ctx.index
inner_ctx = ctx.create_child() inner_ctx = ctx.create_child()
if self._parse(inner_ctx): try:
result = self._parse(inner_ctx)
except Exception as e:
# If an exception occurs, there's an explicit error, not just a rule that didn't match. Apply the context
# state so that whichever rule handles the exception (e.g. a Statement) knows where the error occurred.
ctx.apply_child(inner_ctx)
raise e
if result:
ctx.apply_child(inner_ctx) ctx.apply_child(inner_ctx)
if ctx.index == start_idx: if ctx.index == start_idx:
return ParseResult.EMPTY return ParseResult.EMPTY
@ -269,12 +277,12 @@ class Err(ParseNode):
if self.child.parse(ctx).failed(): if self.child.parse(ctx).failed():
start_idx = ctx.start start_idx = ctx.start
while ctx.tokens[start_idx].type in SKIP_TOKENS: while ctx.tokens[start_idx].type in SKIP_TOKENS:
start_idx += 1 start_idx -= 1
start_token = ctx.tokens[start_idx] start_token = ctx.tokens[start_idx]
raise CompileError( position = start_token.start if ctx.start == start_idx else start_token.end
self.message, Range(start_token.start, start_token.start, ctx.text)
) raise CompileError(self.message, Range(position, position, ctx.text))
return True return True
@ -329,8 +337,9 @@ class Statement(ParseNode):
"""ParseNode that attempts to match all of its children in sequence. If any """ParseNode that attempts to match all of its children in sequence. If any
child raises an error, the error will be logged but parsing will continue.""" child raises an error, the error will be logged but parsing will continue."""
def __init__(self, *children): def __init__(self, *children, end: str = ";"):
self.children = [to_parse_node(child) for child in children] self.children = [to_parse_node(child) for child in children]
self.end = end
def _parse(self, ctx) -> bool: def _parse(self, ctx) -> bool:
for child in self.children: for child in self.children:
@ -340,11 +349,29 @@ class Statement(ParseNode):
except CompileError as e: except CompileError as e:
ctx.errors.append(e) ctx.errors.append(e)
ctx.set_group_incomplete() ctx.set_group_incomplete()
token = ctx.peek_token()
if str(token) == self.end:
ctx.next_token()
return True return True
token = ctx.peek_token() token = ctx.peek_token()
if str(token) != ";": if str(token) != self.end:
ctx.errors.append(CompileError("Expected `;`", token.range)) start_idx = ctx.index - 1
while ctx.tokens[start_idx].type in SKIP_TOKENS:
start_idx -= 1
start_token = ctx.tokens[start_idx]
position = (
start_token.start if ctx.index - 1 == start_idx else start_token.end
)
ctx.errors.append(
CompileError(
f"Expected `{self.end}`", Range(position, position, ctx.text)
)
)
else: else:
ctx.next_token() ctx.next_token()
return True return True
@ -405,7 +432,6 @@ class Until(ParseNode):
ctx.skip_unexpected_token() ctx.skip_unexpected_token()
except CompileError as e: except CompileError as e:
ctx.errors.append(e) ctx.errors.append(e)
ctx.next_token()
return True return True

View file

@ -1 +1 @@
4,6,22,Action widget must have ID 4,5,24,Action widget must have ID

View file

@ -1 +1 @@
4,6,18,Gtk.Box doesn't have action widgets 4,5,20,Gtk.Box doesn't have action widgets

View file

@ -1 +1 @@
1,0,0,File must start with a "using Gtk" directive (e.g. `using Gtk 4.0;`) 1,1,0,File must start with a "using Gtk" directive (e.g. `using Gtk 4.0;`)

View file

@ -1 +1 @@
6,1,1,Expected `;` 5,4,0,Expected `;`

View file

@ -1,2 +1 @@
5,1,0,Expected a signal detail name 4,11,0,Expected a signal detail name
4,9,3,Unexpected tokens

View file

@ -1,2 +1 @@
4,5,21,Attributes are not permitted at the top level of a menu 4,5,21,Attributes are not permitted at the top level of a menu
4,16,10,Unexpected tokens

View file

@ -1 +1 @@
1,11,0,Expected a version number for GTK 1,10,0,Expected a version number for GTK

View file

@ -143,9 +143,9 @@ class TestSamples(unittest.TestCase):
] ]
def error_str(error: CompileError): def error_str(error: CompileError):
line, col = utils.idx_to_pos(error.range.start + 1, blueprint) line, col = utils.idx_to_pos(error.range.start, blueprint)
len = error.range.length len = error.range.length
return ",".join([str(line + 1), str(col), str(len), error.message]) return ",".join([str(line + 1), str(col + 1), str(len), error.message])
actual = "\n".join([error_str(error) for error in errors]) actual = "\n".join([error_str(error) for error in errors])