mirror of
https://gitlab.gnome.org/jwestman/blueprint-compiler.git
synced 2025-05-04 15:59:08 -04:00
cleanup: Format using black
This commit is contained in:
parent
4b42016837
commit
af03c2ac0f
36 changed files with 928 additions and 616 deletions
|
@ -28,10 +28,13 @@ from .xml_emitter import XmlEmitter
|
|||
|
||||
class Children:
|
||||
"""Allows accessing children by type using array syntax."""
|
||||
|
||||
def __init__(self, children):
|
||||
self._children = children
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._children)
|
||||
|
||||
def __getitem__(self, key):
|
||||
return [child for child in self._children if isinstance(child, key)]
|
||||
|
||||
|
@ -53,8 +56,9 @@ class AstNode:
|
|||
|
||||
def __init_subclass__(cls):
|
||||
cls.completers = []
|
||||
cls.validators = [getattr(cls, f) for f in dir(cls) if hasattr(getattr(cls, f), "_validator")]
|
||||
|
||||
cls.validators = [
|
||||
getattr(cls, f) for f in dir(cls) if hasattr(getattr(cls, f), "_validator")
|
||||
]
|
||||
|
||||
@property
|
||||
def root(self):
|
||||
|
@ -122,7 +126,6 @@ class AstNode:
|
|||
for child in self.children:
|
||||
yield from child.get_semantic_tokens()
|
||||
|
||||
|
||||
def iterate_children_recursive(self) -> T.Iterator["AstNode"]:
|
||||
yield self
|
||||
for child in self.children:
|
||||
|
|
|
@ -29,9 +29,13 @@ from .tokenizer import TokenType, Token
|
|||
Pattern = T.List[T.Tuple[TokenType, T.Optional[str]]]
|
||||
|
||||
|
||||
def _complete(ast_node: AstNode, tokens: T.List[Token], idx: int, token_idx: int) -> T.Iterator[Completion]:
|
||||
def _complete(
|
||||
ast_node: AstNode, tokens: T.List[Token], idx: int, token_idx: int
|
||||
) -> T.Iterator[Completion]:
|
||||
for child in ast_node.children:
|
||||
if child.group.start <= idx and (idx < child.group.end or (idx == child.group.end and child.incomplete)):
|
||||
if child.group.start <= idx and (
|
||||
idx < child.group.end or (idx == child.group.end and child.incomplete)
|
||||
):
|
||||
yield from _complete(child, tokens, idx, token_idx)
|
||||
return
|
||||
|
||||
|
@ -48,7 +52,9 @@ def _complete(ast_node: AstNode, tokens: T.List[Token], idx: int, token_idx: int
|
|||
yield from completer(prev_tokens, ast_node)
|
||||
|
||||
|
||||
def complete(ast_node: AstNode, tokens: T.List[Token], idx: int) -> T.Iterator[Completion]:
|
||||
def complete(
|
||||
ast_node: AstNode, tokens: T.List[Token], idx: int
|
||||
) -> T.Iterator[Completion]:
|
||||
token_idx = 0
|
||||
# find the current token
|
||||
for i, token in enumerate(tokens):
|
||||
|
@ -70,13 +76,17 @@ def using_gtk(ast_node, match_variables):
|
|||
|
||||
@completer(
|
||||
applies_in=[language.UI, language.ObjectContent, language.Template],
|
||||
matches=new_statement_patterns
|
||||
matches=new_statement_patterns,
|
||||
)
|
||||
def namespace(ast_node, match_variables):
|
||||
yield Completion("Gtk", CompletionItemKind.Module, text="Gtk.")
|
||||
for ns in ast_node.root.children[language.Import]:
|
||||
if ns.gir_namespace is not None:
|
||||
yield Completion(ns.gir_namespace.name, CompletionItemKind.Module, text=ns.gir_namespace.name + ".")
|
||||
yield Completion(
|
||||
ns.gir_namespace.name,
|
||||
CompletionItemKind.Module,
|
||||
text=ns.gir_namespace.name + ".",
|
||||
)
|
||||
|
||||
|
||||
@completer(
|
||||
|
@ -84,7 +94,7 @@ def namespace(ast_node, match_variables):
|
|||
matches=[
|
||||
[(TokenType.IDENT, None), (TokenType.OP, "."), (TokenType.IDENT, None)],
|
||||
[(TokenType.IDENT, None), (TokenType.OP, ".")],
|
||||
]
|
||||
],
|
||||
)
|
||||
def object_completer(ast_node, match_variables):
|
||||
ns = ast_node.root.gir.namespaces.get(match_variables[0])
|
||||
|
@ -105,9 +115,7 @@ def property_completer(ast_node, match_variables):
|
|||
|
||||
@completer(
|
||||
applies_in=[language.Property, language.BaseTypedAttribute],
|
||||
matches=[
|
||||
[(TokenType.IDENT, None), (TokenType.OP, ":")]
|
||||
],
|
||||
matches=[[(TokenType.IDENT, None), (TokenType.OP, ":")]],
|
||||
)
|
||||
def prop_value_completer(ast_node, match_variables):
|
||||
if isinstance(ast_node.value_type, gir.Enumeration):
|
||||
|
@ -129,16 +137,21 @@ def signal_completer(ast_node, match_variables):
|
|||
if not isinstance(ast_node.parent, language.Object):
|
||||
name = "on"
|
||||
else:
|
||||
name = "on_" + (ast_node.parent.tokens["id"] or ast_node.parent.tokens["class_name"].lower())
|
||||
yield Completion(signal, CompletionItemKind.Property, snippet=f"{signal} => ${{1:{name}_{signal.replace('-', '_')}}}()$0;")
|
||||
|
||||
|
||||
@completer(
|
||||
applies_in=[language.UI],
|
||||
matches=new_statement_patterns
|
||||
name = "on_" + (
|
||||
ast_node.parent.tokens["id"]
|
||||
or ast_node.parent.tokens["class_name"].lower()
|
||||
)
|
||||
yield Completion(
|
||||
signal,
|
||||
CompletionItemKind.Property,
|
||||
snippet=f"{signal} => ${{1:{name}_{signal.replace('-', '_')}}}()$0;",
|
||||
)
|
||||
|
||||
|
||||
@completer(applies_in=[language.UI], matches=new_statement_patterns)
|
||||
def template_completer(ast_node, match_variables):
|
||||
yield Completion(
|
||||
"template", CompletionItemKind.Snippet,
|
||||
snippet="template ${1:ClassName} : ${2:ParentClass} {\n $0\n}"
|
||||
"template",
|
||||
CompletionItemKind.Snippet,
|
||||
snippet="template ${1:ClassName} : ${2:ParentClass} {\n $0\n}",
|
||||
)
|
||||
|
|
|
@ -33,19 +33,24 @@ new_statement_patterns = [
|
|||
|
||||
def applies_to(*ast_types):
|
||||
"""Decorator describing which AST nodes the completer should apply in."""
|
||||
|
||||
def decorator(func):
|
||||
for c in ast_types:
|
||||
c.completers.append(func)
|
||||
return func
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def completer(applies_in: T.List, matches: T.List = [], applies_in_subclass=None):
|
||||
def decorator(func):
|
||||
def inner(prev_tokens: T.List[Token], ast_node):
|
||||
# For completers that apply in ObjectContent nodes, we can further
|
||||
# check that the object is the right class
|
||||
if applies_in_subclass is not None:
|
||||
type = ast_node.root.gir.get_type(applies_in_subclass[1], applies_in_subclass[0])
|
||||
type = ast_node.root.gir.get_type(
|
||||
applies_in_subclass[1], applies_in_subclass[0]
|
||||
)
|
||||
if ast_node.gir_class and not ast_node.gir_class.assignable_to(type):
|
||||
return
|
||||
|
||||
|
@ -59,7 +64,9 @@ def completer(applies_in: T.List, matches: T.List=[], applies_in_subclass=None):
|
|||
for i in range(0, len(pattern)):
|
||||
type, value = pattern[i]
|
||||
token = prev_tokens[i - len(pattern)]
|
||||
if token.type != type or (value is not None and str(token) != value):
|
||||
if token.type != type or (
|
||||
value is not None and str(token) != value
|
||||
):
|
||||
break
|
||||
if value is None:
|
||||
match_variables.append(str(token))
|
||||
|
|
|
@ -60,16 +60,16 @@ class DecompileCtx:
|
|||
|
||||
self.gir.add_namespace(get_namespace("Gtk", "4.0"))
|
||||
|
||||
|
||||
@property
|
||||
def result(self):
|
||||
imports = "\n".join([
|
||||
imports = "\n".join(
|
||||
[
|
||||
f"using {ns} {namespace.version};"
|
||||
for ns, namespace in self.gir.namespaces.items()
|
||||
])
|
||||
]
|
||||
)
|
||||
return imports + "\n" + self._result
|
||||
|
||||
|
||||
def type_by_cname(self, cname):
|
||||
if type := self.gir.get_type_by_cname(cname):
|
||||
return type
|
||||
|
@ -83,7 +83,6 @@ class DecompileCtx:
|
|||
except:
|
||||
pass
|
||||
|
||||
|
||||
def start_block(self):
|
||||
self._blocks_need_end.append(None)
|
||||
|
||||
|
@ -94,7 +93,6 @@ class DecompileCtx:
|
|||
def end_block_with(self, text):
|
||||
self._blocks_need_end[-1] = text
|
||||
|
||||
|
||||
def print(self, line, newline=True):
|
||||
if line == "}" or line == "]":
|
||||
self._indent -= 1
|
||||
|
@ -109,7 +107,11 @@ class DecompileCtx:
|
|||
line_type = LineType.STMT
|
||||
else:
|
||||
line_type = LineType.NONE
|
||||
if line_type != self._last_line_type and self._last_line_type != LineType.BLOCK_START and line_type != LineType.BLOCK_END:
|
||||
if (
|
||||
line_type != self._last_line_type
|
||||
and self._last_line_type != LineType.BLOCK_START
|
||||
and line_type != LineType.BLOCK_END
|
||||
):
|
||||
self._result += "\n"
|
||||
self._last_line_type = line_type
|
||||
|
||||
|
@ -122,10 +124,9 @@ class DecompileCtx:
|
|||
self._blocks_need_end[-1] = _CLOSING[line[-1]]
|
||||
self._indent += 1
|
||||
|
||||
|
||||
def print_attribute(self, name, value, type):
|
||||
if type is None:
|
||||
self.print(f"{name}: \"{escape_quote(value)}\";")
|
||||
self.print(f'{name}: "{escape_quote(value)}";')
|
||||
elif type.assignable_to(FloatType()):
|
||||
self.print(f"{name}: {value};")
|
||||
elif type.assignable_to(BoolType()):
|
||||
|
@ -134,12 +135,20 @@ class DecompileCtx:
|
|||
elif (
|
||||
type.assignable_to(self.gir.namespaces["Gtk"].lookup_type("Gdk.Pixbuf"))
|
||||
or type.assignable_to(self.gir.namespaces["Gtk"].lookup_type("Gdk.Texture"))
|
||||
or type.assignable_to(self.gir.namespaces["Gtk"].lookup_type("Gdk.Paintable"))
|
||||
or type.assignable_to(self.gir.namespaces["Gtk"].lookup_type("Gtk.ShortcutAction"))
|
||||
or type.assignable_to(self.gir.namespaces["Gtk"].lookup_type("Gtk.ShortcutTrigger"))
|
||||
or type.assignable_to(
|
||||
self.gir.namespaces["Gtk"].lookup_type("Gdk.Paintable")
|
||||
)
|
||||
or type.assignable_to(
|
||||
self.gir.namespaces["Gtk"].lookup_type("Gtk.ShortcutAction")
|
||||
)
|
||||
or type.assignable_to(
|
||||
self.gir.namespaces["Gtk"].lookup_type("Gtk.ShortcutTrigger")
|
||||
)
|
||||
):
|
||||
self.print(f'{name}: "{escape_quote(value)}";')
|
||||
elif type.assignable_to(
|
||||
self.gir.namespaces["Gtk"].lookup_type("GObject.Object")
|
||||
):
|
||||
self.print(f"{name}: \"{escape_quote(value)}\";")
|
||||
elif type.assignable_to(self.gir.namespaces["Gtk"].lookup_type("GObject.Object")):
|
||||
self.print(f"{name}: {value};")
|
||||
elif isinstance(type, Enumeration):
|
||||
for member in type.members.values():
|
||||
|
@ -152,7 +161,7 @@ class DecompileCtx:
|
|||
flags = re.sub(r"\s*\|\s*", " | ", value).replace("-", "_")
|
||||
self.print(f"{name}: {flags};")
|
||||
else:
|
||||
self.print(f"{name}: \"{escape_quote(value)}\";")
|
||||
self.print(f'{name}: "{escape_quote(value)}";')
|
||||
|
||||
|
||||
def _decompile_element(ctx: DecompileCtx, gir, xml):
|
||||
|
@ -192,19 +201,21 @@ def decompile(data):
|
|||
return ctx.result
|
||||
|
||||
|
||||
|
||||
def canon(string: str) -> str:
|
||||
if string == "class":
|
||||
return "klass"
|
||||
else:
|
||||
return string.replace("-", "_").lower()
|
||||
|
||||
|
||||
def truthy(string: str) -> bool:
|
||||
return string.lower() in ["yes", "true", "t", "y", "1"]
|
||||
|
||||
|
||||
def full_name(gir):
|
||||
return gir.name if gir.full_name.startswith("Gtk.") else gir.full_name
|
||||
|
||||
|
||||
def lookup_by_cname(gir, cname: str):
|
||||
if isinstance(gir, GirContext):
|
||||
return gir.get_type_by_cname(cname)
|
||||
|
@ -217,15 +228,17 @@ def decompiler(tag, cdata=False):
|
|||
func._cdata = cdata
|
||||
_DECOMPILERS[tag] = func
|
||||
return func
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def escape_quote(string: str) -> str:
|
||||
return (string
|
||||
.replace("\\", "\\\\")
|
||||
.replace("\'", "\\'")
|
||||
.replace("\"", "\\\"")
|
||||
.replace("\n", "\\n"))
|
||||
return (
|
||||
string.replace("\\", "\\\\")
|
||||
.replace("'", "\\'")
|
||||
.replace('"', '\\"')
|
||||
.replace("\n", "\\n")
|
||||
)
|
||||
|
||||
|
||||
@decompiler("interface")
|
||||
|
@ -239,7 +252,18 @@ def decompile_requires(ctx, gir, lib=None, version=None):
|
|||
|
||||
|
||||
@decompiler("property", cdata=True)
|
||||
def decompile_property(ctx, gir, name, cdata, bind_source=None, bind_property=None, bind_flags=None, translatable="false", comments=None, context=None):
|
||||
def decompile_property(
|
||||
ctx,
|
||||
gir,
|
||||
name,
|
||||
cdata,
|
||||
bind_source=None,
|
||||
bind_property=None,
|
||||
bind_flags=None,
|
||||
translatable="false",
|
||||
comments=None,
|
||||
context=None,
|
||||
):
|
||||
name = name.replace("_", "-")
|
||||
if comments is not None:
|
||||
ctx.print(f"/* Translators: {comments} */")
|
||||
|
@ -259,18 +283,32 @@ def decompile_property(ctx, gir, name, cdata, bind_source=None, bind_property=No
|
|||
ctx.print(f"{name}: bind {bind_source}.{bind_property}{flags};")
|
||||
elif truthy(translatable):
|
||||
if context is not None:
|
||||
ctx.print(f"{name}: C_(\"{escape_quote(context)}\", \"{escape_quote(cdata)}\");")
|
||||
ctx.print(
|
||||
f'{name}: C_("{escape_quote(context)}", "{escape_quote(cdata)}");'
|
||||
)
|
||||
else:
|
||||
ctx.print(f"{name}: _(\"{escape_quote(cdata)}\");")
|
||||
ctx.print(f'{name}: _("{escape_quote(cdata)}");')
|
||||
elif gir is None or gir.properties.get(name) is None:
|
||||
ctx.print(f"{name}: \"{escape_quote(cdata)}\";")
|
||||
ctx.print(f'{name}: "{escape_quote(cdata)}";')
|
||||
else:
|
||||
ctx.print_attribute(name, cdata, gir.properties.get(name).type)
|
||||
return gir
|
||||
|
||||
|
||||
@decompiler("attribute", cdata=True)
|
||||
def decompile_attribute(ctx, gir, name, cdata, translatable="false", comments=None, context=None):
|
||||
decompile_property(ctx, gir, name, cdata, translatable=translatable, comments=comments, context=context)
|
||||
def decompile_attribute(
|
||||
ctx, gir, name, cdata, translatable="false", comments=None, context=None
|
||||
):
|
||||
decompile_property(
|
||||
ctx,
|
||||
gir,
|
||||
name,
|
||||
cdata,
|
||||
translatable=translatable,
|
||||
comments=comments,
|
||||
context=context,
|
||||
)
|
||||
|
||||
|
||||
@decompiler("attributes")
|
||||
def decompile_attributes(ctx, gir):
|
||||
|
@ -287,5 +325,7 @@ class UnsupportedError(Exception):
|
|||
print(f"in {Colors.UNDERLINE}{filename}{Colors.NO_UNDERLINE}")
|
||||
if self.tag:
|
||||
print(f"in tag {Colors.BLUE}{self.tag}{Colors.CLEAR}")
|
||||
print(f"""{Colors.FAINT}The gtk-blueprint-tool compiler might support this feature, but the
|
||||
porting tool does not. You probably need to port this file manually.{Colors.CLEAR}\n""")
|
||||
print(
|
||||
f"""{Colors.FAINT}The gtk-blueprint-tool compiler might support this feature, but the
|
||||
porting tool does not. You probably need to port this file manually.{Colors.CLEAR}\n"""
|
||||
)
|
||||
|
|
|
@ -23,6 +23,7 @@ import sys, traceback
|
|||
from . import utils
|
||||
from .utils import Colors
|
||||
|
||||
|
||||
class PrintableError(Exception):
|
||||
"""Parent class for errors that can be pretty-printed for the user, e.g.
|
||||
compilation warnings and errors."""
|
||||
|
@ -37,7 +38,9 @@ class CompileError(PrintableError):
|
|||
category = "error"
|
||||
color = Colors.RED
|
||||
|
||||
def __init__(self, message, start=None, end=None, did_you_mean=None, hints=None, actions=None):
|
||||
def __init__(
|
||||
self, message, start=None, end=None, did_you_mean=None, hints=None, actions=None
|
||||
):
|
||||
super().__init__(message)
|
||||
|
||||
self.message = message
|
||||
|
@ -53,7 +56,6 @@ class CompileError(PrintableError):
|
|||
self.hints.append(hint)
|
||||
return self
|
||||
|
||||
|
||||
def _did_you_mean(self, word: str, options: T.List[str]):
|
||||
if word.replace("_", "-") in options:
|
||||
self.hint(f"use '-', not '_': `{word.replace('_', '-')}`")
|
||||
|
@ -77,9 +79,11 @@ class CompileError(PrintableError):
|
|||
# Display 1-based line numbers
|
||||
line_num += 1
|
||||
|
||||
stream.write(f"""{self.color}{Colors.BOLD}{self.category}: {self.message}{Colors.CLEAR}
|
||||
stream.write(
|
||||
f"""{self.color}{Colors.BOLD}{self.category}: {self.message}{Colors.CLEAR}
|
||||
at {filename} line {line_num} column {col_num}:
|
||||
{Colors.FAINT}{line_num :>4} |{Colors.CLEAR}{line.rstrip()}\n {Colors.FAINT}|{" "*(col_num-1)}^{Colors.CLEAR}\n""")
|
||||
{Colors.FAINT}{line_num :>4} |{Colors.CLEAR}{line.rstrip()}\n {Colors.FAINT}|{" "*(col_num-1)}^{Colors.CLEAR}\n"""
|
||||
)
|
||||
|
||||
for hint in self.hints:
|
||||
stream.write(f"{Colors.FAINT}hint: {hint}{Colors.CLEAR}\n")
|
||||
|
@ -132,9 +136,10 @@ def report_bug(): # pragma: no cover
|
|||
|
||||
print(traceback.format_exc())
|
||||
print(f"Arguments: {sys.argv}\n")
|
||||
print(f"""{Colors.BOLD}{Colors.RED}***** COMPILER BUG *****
|
||||
print(
|
||||
f"""{Colors.BOLD}{Colors.RED}***** COMPILER BUG *****
|
||||
The blueprint-compiler program has crashed. Please report the above stacktrace,
|
||||
along with the input file(s) if possible, on GitLab:
|
||||
{Colors.BOLD}{Colors.BLUE}{Colors.UNDERLINE}https://gitlab.gnome.org/jwestman/blueprint-compiler/-/issues/new?issue
|
||||
{Colors.CLEAR}""")
|
||||
|
||||
{Colors.CLEAR}"""
|
||||
)
|
||||
|
|
|
@ -31,7 +31,9 @@ _namespace_cache = {}
|
|||
_search_paths = []
|
||||
xdg_data_home = os.environ.get("XDG_DATA_HOME", os.path.expanduser("~/.local/share"))
|
||||
_search_paths.append(os.path.join(xdg_data_home, "gir-1.0"))
|
||||
xdg_data_dirs = os.environ.get("XDG_DATA_DIRS", "/usr/share:/usr/local/share").split(":")
|
||||
xdg_data_dirs = os.environ.get("XDG_DATA_DIRS", "/usr/share:/usr/local/share").split(
|
||||
":"
|
||||
)
|
||||
_search_paths += [os.path.join(dir, "gir-1.0") for dir in xdg_data_dirs]
|
||||
|
||||
|
||||
|
@ -75,31 +77,50 @@ class BasicType(GirType):
|
|||
def full_name(self) -> str:
|
||||
return self.name
|
||||
|
||||
|
||||
class BoolType(BasicType):
|
||||
name = "bool"
|
||||
|
||||
def assignable_to(self, other) -> bool:
|
||||
return isinstance(other, BoolType)
|
||||
|
||||
|
||||
class IntType(BasicType):
|
||||
name = "int"
|
||||
|
||||
def assignable_to(self, other) -> bool:
|
||||
return isinstance(other, IntType) or isinstance(other, UIntType) or isinstance(other, FloatType)
|
||||
return (
|
||||
isinstance(other, IntType)
|
||||
or isinstance(other, UIntType)
|
||||
or isinstance(other, FloatType)
|
||||
)
|
||||
|
||||
|
||||
class UIntType(BasicType):
|
||||
name = "uint"
|
||||
|
||||
def assignable_to(self, other) -> bool:
|
||||
return isinstance(other, IntType) or isinstance(other, UIntType) or isinstance(other, FloatType)
|
||||
return (
|
||||
isinstance(other, IntType)
|
||||
or isinstance(other, UIntType)
|
||||
or isinstance(other, FloatType)
|
||||
)
|
||||
|
||||
|
||||
class FloatType(BasicType):
|
||||
name = "float"
|
||||
|
||||
def assignable_to(self, other) -> bool:
|
||||
return isinstance(other, FloatType)
|
||||
|
||||
|
||||
class StringType(BasicType):
|
||||
name = "string"
|
||||
|
||||
def assignable_to(self, other) -> bool:
|
||||
return isinstance(other, StringType)
|
||||
|
||||
|
||||
_BASIC_TYPES = {
|
||||
"gboolean": BoolType,
|
||||
"int": IntType,
|
||||
|
@ -114,6 +135,7 @@ _BASIC_TYPES = {
|
|||
"utf8": StringType,
|
||||
}
|
||||
|
||||
|
||||
class GirNode:
|
||||
def __init__(self, container, xml):
|
||||
self.container = container
|
||||
|
@ -169,7 +191,7 @@ class GirNode:
|
|||
|
||||
@property
|
||||
def type_name(self):
|
||||
return self.xml.get_elements('type')[0]['name']
|
||||
return self.xml.get_elements("type")[0]["name"]
|
||||
|
||||
@property
|
||||
def type(self):
|
||||
|
@ -193,8 +215,11 @@ class Parameter(GirNode):
|
|||
class Signal(GirNode):
|
||||
def __init__(self, klass, xml: xml_reader.Element):
|
||||
super().__init__(klass, xml)
|
||||
if parameters := xml.get_elements('parameters'):
|
||||
self.params = [Parameter(self, child) for child in parameters[0].get_elements('parameter')]
|
||||
if parameters := xml.get_elements("parameters"):
|
||||
self.params = [
|
||||
Parameter(self, child)
|
||||
for child in parameters[0].get_elements("parameter")
|
||||
]
|
||||
else:
|
||||
self.params = []
|
||||
|
||||
|
@ -207,9 +232,17 @@ class Signal(GirNode):
|
|||
class Interface(GirNode, GirType):
|
||||
def __init__(self, ns, xml: xml_reader.Element):
|
||||
super().__init__(ns, xml)
|
||||
self.properties = {child["name"]: Property(self, child) for child in xml.get_elements("property")}
|
||||
self.signals = {child["name"]: Signal(self, child) for child in xml.get_elements("glib:signal")}
|
||||
self.prerequisites = [child["name"] for child in xml.get_elements("prerequisite")]
|
||||
self.properties = {
|
||||
child["name"]: Property(self, child)
|
||||
for child in xml.get_elements("property")
|
||||
}
|
||||
self.signals = {
|
||||
child["name"]: Signal(self, child)
|
||||
for child in xml.get_elements("glib:signal")
|
||||
}
|
||||
self.prerequisites = [
|
||||
child["name"] for child in xml.get_elements("prerequisite")
|
||||
]
|
||||
|
||||
def assignable_to(self, other) -> bool:
|
||||
if self == other:
|
||||
|
@ -225,8 +258,14 @@ class Class(GirNode, GirType):
|
|||
super().__init__(ns, xml)
|
||||
self._parent = xml["parent"]
|
||||
self.implements = [impl["name"] for impl in xml.get_elements("implements")]
|
||||
self.own_properties = {child["name"]: Property(self, child) for child in xml.get_elements("property")}
|
||||
self.own_signals = {child["name"]: Signal(self, child) for child in xml.get_elements("glib:signal")}
|
||||
self.own_properties = {
|
||||
child["name"]: Property(self, child)
|
||||
for child in xml.get_elements("property")
|
||||
}
|
||||
self.own_signals = {
|
||||
child["name"]: Signal(self, child)
|
||||
for child in xml.get_elements("glib:signal")
|
||||
}
|
||||
|
||||
@property
|
||||
def signature(self):
|
||||
|
@ -251,7 +290,6 @@ class Class(GirNode, GirType):
|
|||
return None
|
||||
return self.get_containing(Namespace).lookup_type(self._parent)
|
||||
|
||||
|
||||
def assignable_to(self, other) -> bool:
|
||||
if self == other:
|
||||
return True
|
||||
|
@ -259,12 +297,15 @@ class Class(GirNode, GirType):
|
|||
return True
|
||||
else:
|
||||
for iface in self.implements:
|
||||
if self.get_containing(Namespace).lookup_type(iface).assignable_to(other):
|
||||
if (
|
||||
self.get_containing(Namespace)
|
||||
.lookup_type(iface)
|
||||
.assignable_to(other)
|
||||
):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def _enum_properties(self):
|
||||
yield from self.own_properties.values()
|
||||
|
||||
|
@ -272,7 +313,9 @@ class Class(GirNode, GirType):
|
|||
yield from self.parent.properties.values()
|
||||
|
||||
for impl in self.implements:
|
||||
yield from self.get_containing(Namespace).lookup_type(impl).properties.values()
|
||||
yield from self.get_containing(Namespace).lookup_type(
|
||||
impl
|
||||
).properties.values()
|
||||
|
||||
def _enum_signals(self):
|
||||
yield from self.own_signals.values()
|
||||
|
@ -309,7 +352,10 @@ class EnumMember(GirNode):
|
|||
class Enumeration(GirNode, GirType):
|
||||
def __init__(self, ns, xml: xml_reader.Element):
|
||||
super().__init__(ns, xml)
|
||||
self.members = { child["name"]: EnumMember(self, child) for child in xml.get_elements("member") }
|
||||
self.members = {
|
||||
child["name"]: EnumMember(self, child)
|
||||
for child in xml.get_elements("member")
|
||||
}
|
||||
|
||||
@property
|
||||
def signature(self):
|
||||
|
@ -336,7 +382,10 @@ class BitfieldMember(GirNode):
|
|||
class Bitfield(GirNode, GirType):
|
||||
def __init__(self, ns, xml: xml_reader.Element):
|
||||
super().__init__(ns, xml)
|
||||
self.members = { child["name"]: EnumMember(self, child) for child in xml.get_elements("member") }
|
||||
self.members = {
|
||||
child["name"]: EnumMember(self, child)
|
||||
for child in xml.get_elements("member")
|
||||
}
|
||||
|
||||
@property
|
||||
def signature(self):
|
||||
|
@ -349,17 +398,27 @@ class Bitfield(GirNode, GirType):
|
|||
class Namespace(GirNode):
|
||||
def __init__(self, repo, xml: xml_reader.Element):
|
||||
super().__init__(repo, xml)
|
||||
self.classes = { child["name"]: Class(self, child) for child in xml.get_elements("class") }
|
||||
self.interfaces = { child["name"]: Interface(self, child) for child in xml.get_elements("interface") }
|
||||
self.enumerations = { child["name"]: Enumeration(self, child) for child in xml.get_elements("enumeration") }
|
||||
self.bitfields = { child["name"]: Bitfield(self, child) for child in xml.get_elements("bitfield") }
|
||||
self.classes = {
|
||||
child["name"]: Class(self, child) for child in xml.get_elements("class")
|
||||
}
|
||||
self.interfaces = {
|
||||
child["name"]: Interface(self, child)
|
||||
for child in xml.get_elements("interface")
|
||||
}
|
||||
self.enumerations = {
|
||||
child["name"]: Enumeration(self, child)
|
||||
for child in xml.get_elements("enumeration")
|
||||
}
|
||||
self.bitfields = {
|
||||
child["name"]: Bitfield(self, child)
|
||||
for child in xml.get_elements("bitfield")
|
||||
}
|
||||
self.version = xml["version"]
|
||||
|
||||
@property
|
||||
def signature(self):
|
||||
return f"namespace {self.name} {self.version}"
|
||||
|
||||
|
||||
def get_type(self, name):
|
||||
"""Gets a type (class, interface, enum, etc.) from this namespace."""
|
||||
return (
|
||||
|
@ -369,14 +428,16 @@ class Namespace(GirNode):
|
|||
or self.bitfields.get(name)
|
||||
)
|
||||
|
||||
|
||||
def get_type_by_cname(self, cname: str):
|
||||
"""Gets a type from this namespace by its C name."""
|
||||
for item in [*self.classes.values(), *self.interfaces.values(), *self.enumerations.values()]:
|
||||
for item in [
|
||||
*self.classes.values(),
|
||||
*self.interfaces.values(),
|
||||
*self.enumerations.values(),
|
||||
]:
|
||||
if item.cname == cname:
|
||||
return item
|
||||
|
||||
|
||||
def lookup_type(self, type_name: str):
|
||||
"""Looks up a type in the scope of this namespace (including in the
|
||||
namespace's dependencies)."""
|
||||
|
@ -393,28 +454,31 @@ class Namespace(GirNode):
|
|||
class Repository(GirNode):
|
||||
def __init__(self, xml: xml_reader.Element):
|
||||
super().__init__(None, xml)
|
||||
self.namespaces = { child["name"]: Namespace(self, child) for child in xml.get_elements("namespace") }
|
||||
self.namespaces = {
|
||||
child["name"]: Namespace(self, child)
|
||||
for child in xml.get_elements("namespace")
|
||||
}
|
||||
|
||||
try:
|
||||
self.includes = { include["name"]: get_namespace(include["name"], include["version"]) for include in xml.get_elements("include") }
|
||||
self.includes = {
|
||||
include["name"]: get_namespace(include["name"], include["version"])
|
||||
for include in xml.get_elements("include")
|
||||
}
|
||||
except:
|
||||
raise CompilerBugError(f"Failed to load dependencies.")
|
||||
|
||||
|
||||
def get_type(self, name: str, ns: str) -> T.Optional[GirNode]:
|
||||
if namespace := self.namespaces.get(ns):
|
||||
return namespace.get_type(name)
|
||||
else:
|
||||
return self.lookup_namespace(ns).get_type(name)
|
||||
|
||||
|
||||
def get_type_by_cname(self, name: str) -> T.Optional[GirNode]:
|
||||
for ns in self.namespaces.values():
|
||||
if type := ns.get_type_by_cname(name):
|
||||
return type
|
||||
return None
|
||||
|
||||
|
||||
def lookup_namespace(self, ns: str):
|
||||
"""Finds a namespace among this namespace's dependencies."""
|
||||
if namespace := self.namespaces.get(ns):
|
||||
|
@ -429,22 +493,21 @@ class GirContext:
|
|||
def __init__(self):
|
||||
self.namespaces = {}
|
||||
|
||||
|
||||
def add_namespace(self, namespace: Namespace):
|
||||
other = self.namespaces.get(namespace.name)
|
||||
if other is not None and other.version != namespace.version:
|
||||
raise CompileError(f"Namespace {namespace.name}-{namespace.version} can't be imported because version {other.version} was imported earlier")
|
||||
raise CompileError(
|
||||
f"Namespace {namespace.name}-{namespace.version} can't be imported because version {other.version} was imported earlier"
|
||||
)
|
||||
|
||||
self.namespaces[namespace.name] = namespace
|
||||
|
||||
|
||||
def get_type_by_cname(self, name: str) -> T.Optional[GirNode]:
|
||||
for ns in self.namespaces.values():
|
||||
if type := ns.get_type_by_cname(name):
|
||||
return type
|
||||
return None
|
||||
|
||||
|
||||
def get_type(self, name: str, ns: str) -> T.Optional[GirNode]:
|
||||
ns = ns or "Gtk"
|
||||
|
||||
|
@ -453,7 +516,6 @@ class GirContext:
|
|||
|
||||
return self.namespaces[ns].get_type(name)
|
||||
|
||||
|
||||
def get_class(self, name: str, ns: str) -> T.Optional[Class]:
|
||||
type = self.get_type(name, ns)
|
||||
if isinstance(type, Class):
|
||||
|
@ -461,7 +523,6 @@ class GirContext:
|
|||
else:
|
||||
return None
|
||||
|
||||
|
||||
def validate_ns(self, ns: str):
|
||||
"""Raises an exception if there is a problem looking up the given
|
||||
namespace."""
|
||||
|
@ -474,7 +535,6 @@ class GirContext:
|
|||
did_you_mean=(ns, self.namespaces.keys()),
|
||||
)
|
||||
|
||||
|
||||
def validate_class(self, name: str, ns: str):
|
||||
"""Raises an exception if there is a problem looking up the given
|
||||
class (it doesn't exist, it isn't a class, etc.)"""
|
||||
|
@ -494,4 +554,3 @@ class GirContext:
|
|||
f"{ns}.{name} is not a class",
|
||||
did_you_mean=(name, self.namespaces[ns].classes.keys()),
|
||||
)
|
||||
|
||||
|
|
|
@ -34,9 +34,11 @@ class CouldNotPort:
|
|||
def __init__(self, message):
|
||||
self.message = message
|
||||
|
||||
|
||||
def change_suffix(f):
|
||||
return f.removesuffix(".ui") + ".blp"
|
||||
|
||||
|
||||
def decompile_file(in_file, out_file) -> T.Union[str, CouldNotPort]:
|
||||
if os.path.exists(out_file):
|
||||
return CouldNotPort("already exists")
|
||||
|
@ -61,12 +63,15 @@ def decompile_file(in_file, out_file) -> T.Union[str, CouldNotPort]:
|
|||
except PrintableError as e:
|
||||
e.pretty_print(out_file, decompiled)
|
||||
|
||||
print(f"{Colors.RED}{Colors.BOLD}error: the generated file does not compile{Colors.CLEAR}")
|
||||
print(
|
||||
f"{Colors.RED}{Colors.BOLD}error: the generated file does not compile{Colors.CLEAR}"
|
||||
)
|
||||
print(f"in {Colors.UNDERLINE}{out_file}{Colors.NO_UNDERLINE}")
|
||||
print(
|
||||
f"""{Colors.FAINT}Either the original XML file had an error, or there is a bug in the
|
||||
porting tool. If you think it's a bug (which is likely), please file an issue on GitLab:
|
||||
{Colors.BLUE}{Colors.UNDERLINE}https://gitlab.gnome.org/jwestman/blueprint-compiler/-/issues/new?issue{Colors.CLEAR}\n""")
|
||||
{Colors.BLUE}{Colors.UNDERLINE}https://gitlab.gnome.org/jwestman/blueprint-compiler/-/issues/new?issue{Colors.CLEAR}\n"""
|
||||
)
|
||||
|
||||
return CouldNotPort("does not compile")
|
||||
|
||||
|
@ -106,7 +111,9 @@ def enter():
|
|||
|
||||
|
||||
def step1():
|
||||
print(f"{Colors.BOLD}STEP 1: Create subprojects/blueprint-compiler.wrap{Colors.CLEAR}")
|
||||
print(
|
||||
f"{Colors.BOLD}STEP 1: Create subprojects/blueprint-compiler.wrap{Colors.CLEAR}"
|
||||
)
|
||||
|
||||
if os.path.exists("subprojects/blueprint-compiler.wrap"):
|
||||
print("subprojects/blueprint-compiler.wrap already exists, skipping\n")
|
||||
|
@ -119,14 +126,16 @@ def step1():
|
|||
pass
|
||||
|
||||
with open("subprojects/blueprint-compiler.wrap", "w") as wrap:
|
||||
wrap.write("""[wrap-git]
|
||||
wrap.write(
|
||||
"""[wrap-git]
|
||||
directory = blueprint-compiler
|
||||
url = https://gitlab.gnome.org/jwestman/blueprint-compiler.git
|
||||
revision = main
|
||||
depth = 1
|
||||
|
||||
[provide]
|
||||
program_names = blueprint-compiler""")
|
||||
program_names = blueprint-compiler"""
|
||||
)
|
||||
|
||||
print()
|
||||
|
||||
|
@ -141,7 +150,9 @@ def step2():
|
|||
if yesno("Add '/subprojects/blueprint-compiler' to .gitignore?"):
|
||||
gitignore.write("\n/subprojects/blueprint-compiler\n")
|
||||
else:
|
||||
print("'/subprojects/blueprint-compiler' already in .gitignore, skipping")
|
||||
print(
|
||||
"'/subprojects/blueprint-compiler' already in .gitignore, skipping"
|
||||
)
|
||||
else:
|
||||
if yesno("Create .gitignore with '/subprojects/blueprint-compiler'?"):
|
||||
with open(".gitignore", "w") as gitignore:
|
||||
|
@ -164,9 +175,13 @@ def step3():
|
|||
if isinstance(result, CouldNotPort):
|
||||
if result.message == "already exists":
|
||||
print(Colors.FAINT, end="")
|
||||
print(f"{Colors.RED}will not port {Colors.UNDERLINE}{in_file}{Colors.NO_UNDERLINE} -> {Colors.UNDERLINE}{out_file}{Colors.NO_UNDERLINE} [{result.message}]{Colors.CLEAR}")
|
||||
print(
|
||||
f"{Colors.RED}will not port {Colors.UNDERLINE}{in_file}{Colors.NO_UNDERLINE} -> {Colors.UNDERLINE}{out_file}{Colors.NO_UNDERLINE} [{result.message}]{Colors.CLEAR}"
|
||||
)
|
||||
else:
|
||||
print(f"will port {Colors.UNDERLINE}{in_file}{Colors.CLEAR} -> {Colors.UNDERLINE}{out_file}{Colors.CLEAR}")
|
||||
print(
|
||||
f"will port {Colors.UNDERLINE}{in_file}{Colors.CLEAR} -> {Colors.UNDERLINE}{out_file}{Colors.CLEAR}"
|
||||
)
|
||||
success += 1
|
||||
|
||||
print()
|
||||
|
@ -175,7 +190,9 @@ def step3():
|
|||
elif success == len(files):
|
||||
print(f"{Colors.GREEN}All files were converted.{Colors.CLEAR}")
|
||||
elif success > 0:
|
||||
print(f"{Colors.RED}{success} file(s) were converted, {len(files) - success} were not.{Colors.CLEAR}")
|
||||
print(
|
||||
f"{Colors.RED}{success} file(s) were converted, {len(files) - success} were not.{Colors.CLEAR}"
|
||||
)
|
||||
else:
|
||||
print(f"{Colors.RED}None of the files could be converted.{Colors.CLEAR}")
|
||||
|
||||
|
@ -199,22 +216,33 @@ def step3():
|
|||
|
||||
def step4(ported):
|
||||
print(f"{Colors.BOLD}STEP 4: Set up meson.build{Colors.CLEAR}")
|
||||
print(f"{Colors.BOLD}{Colors.YELLOW}NOTE: Depending on your build system setup, you may need to make some adjustments to this step.{Colors.CLEAR}")
|
||||
print(
|
||||
f"{Colors.BOLD}{Colors.YELLOW}NOTE: Depending on your build system setup, you may need to make some adjustments to this step.{Colors.CLEAR}"
|
||||
)
|
||||
|
||||
meson_files = [file for file in listdir_recursive(".") if os.path.basename(file) == "meson.build"]
|
||||
meson_files = [
|
||||
file
|
||||
for file in listdir_recursive(".")
|
||||
if os.path.basename(file) == "meson.build"
|
||||
]
|
||||
for meson_file in meson_files:
|
||||
with open(meson_file, "r") as f:
|
||||
if "gnome.compile_resources" in f.read():
|
||||
parent = os.path.dirname(meson_file)
|
||||
file_list = "\n ".join([
|
||||
file_list = "\n ".join(
|
||||
[
|
||||
f"'{os.path.relpath(file, parent)}',"
|
||||
for file in ported
|
||||
if file.startswith(parent)
|
||||
])
|
||||
]
|
||||
)
|
||||
|
||||
if len(file_list):
|
||||
print(f"{Colors.BOLD}Paste the following into {Colors.UNDERLINE}{meson_file}{Colors.NO_UNDERLINE}:{Colors.CLEAR}")
|
||||
print(f"""
|
||||
print(
|
||||
f"{Colors.BOLD}Paste the following into {Colors.UNDERLINE}{meson_file}{Colors.NO_UNDERLINE}:{Colors.CLEAR}"
|
||||
)
|
||||
print(
|
||||
f"""
|
||||
blueprints = custom_target('blueprints',
|
||||
input: files(
|
||||
{file_list}
|
||||
|
@ -222,14 +250,17 @@ blueprints = custom_target('blueprints',
|
|||
output: '.',
|
||||
command: [find_program('blueprint-compiler'), 'batch-compile', '@OUTPUT@', '@CURRENT_SOURCE_DIR@', '@INPUT@'],
|
||||
)
|
||||
""")
|
||||
"""
|
||||
)
|
||||
enter()
|
||||
|
||||
print(f"""{Colors.BOLD}Paste the following into the 'gnome.compile_resources()'
|
||||
print(
|
||||
f"""{Colors.BOLD}Paste the following into the 'gnome.compile_resources()'
|
||||
arguments in {Colors.UNDERLINE}{meson_file}{Colors.NO_UNDERLINE}:{Colors.CLEAR}
|
||||
|
||||
dependencies: blueprints,
|
||||
""")
|
||||
"""
|
||||
)
|
||||
enter()
|
||||
|
||||
print()
|
||||
|
@ -239,7 +270,9 @@ def step5(in_files):
|
|||
print(f"{Colors.BOLD}STEP 5: Update POTFILES.in{Colors.CLEAR}")
|
||||
|
||||
if not os.path.exists("po/POTFILES.in"):
|
||||
print(f"{Colors.UNDERLINE}po/POTFILES.in{Colors.NO_UNDERLINE} does not exist, skipping\n")
|
||||
print(
|
||||
f"{Colors.UNDERLINE}po/POTFILES.in{Colors.NO_UNDERLINE} does not exist, skipping\n"
|
||||
)
|
||||
return
|
||||
|
||||
with open("po/POTFILES.in", "r") as potfiles:
|
||||
|
@ -252,12 +285,24 @@ def step5(in_files):
|
|||
|
||||
new_data = "".join(lines)
|
||||
|
||||
print(f"{Colors.BOLD}Will make the following changes to {Colors.UNDERLINE}po/POTFILES.in{Colors.CLEAR}")
|
||||
print(
|
||||
"".join([
|
||||
(Colors.GREEN if line.startswith('+') else Colors.RED + Colors.FAINT if line.startswith('-') else '') + line + Colors.CLEAR
|
||||
f"{Colors.BOLD}Will make the following changes to {Colors.UNDERLINE}po/POTFILES.in{Colors.CLEAR}"
|
||||
)
|
||||
print(
|
||||
"".join(
|
||||
[
|
||||
(
|
||||
Colors.GREEN
|
||||
if line.startswith("+")
|
||||
else Colors.RED + Colors.FAINT
|
||||
if line.startswith("-")
|
||||
else ""
|
||||
)
|
||||
+ line
|
||||
+ Colors.CLEAR
|
||||
for line in difflib.unified_diff(old_lines, lines)
|
||||
])
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
if yesno("Is this ok?"):
|
||||
|
@ -286,5 +331,6 @@ def run(opts):
|
|||
step5(in_files)
|
||||
step6(in_files)
|
||||
|
||||
print(f"{Colors.BOLD}STEP 6: Done! Make sure your app still builds and runs correctly.{Colors.CLEAR}")
|
||||
|
||||
print(
|
||||
f"{Colors.BOLD}STEP 6: Done! Make sure your app still builds and runs correctly.{Colors.CLEAR}"
|
||||
)
|
||||
|
|
|
@ -44,6 +44,7 @@ class ObjectContent(AstNode):
|
|||
for x in self.children:
|
||||
x.emit_xml(xml)
|
||||
|
||||
|
||||
class Object(AstNode):
|
||||
grammar: T.Any = [
|
||||
class_name,
|
||||
|
@ -58,8 +59,14 @@ class Object(AstNode):
|
|||
|
||||
@validate("class_name")
|
||||
def gir_class_exists(self):
|
||||
if self.tokens["class_name"] and not self.tokens["ignore_gir"] and self.gir_ns is not None:
|
||||
self.root.gir.validate_class(self.tokens["class_name"], self.tokens["namespace"])
|
||||
if (
|
||||
self.tokens["class_name"]
|
||||
and not self.tokens["ignore_gir"]
|
||||
and self.gir_ns is not None
|
||||
):
|
||||
self.root.gir.validate_class(
|
||||
self.tokens["class_name"], self.tokens["namespace"]
|
||||
)
|
||||
|
||||
@property
|
||||
def gir_ns(self):
|
||||
|
@ -69,15 +76,15 @@ class Object(AstNode):
|
|||
@property
|
||||
def gir_class(self):
|
||||
if self.tokens["class_name"] and not self.tokens["ignore_gir"]:
|
||||
return self.root.gir.get_class(self.tokens["class_name"], self.tokens["namespace"])
|
||||
|
||||
return self.root.gir.get_class(
|
||||
self.tokens["class_name"], self.tokens["namespace"]
|
||||
)
|
||||
|
||||
@docs("namespace")
|
||||
def namespace_docs(self):
|
||||
if ns := self.root.gir.namespaces.get(self.tokens["namespace"]):
|
||||
return ns.doc
|
||||
|
||||
|
||||
@docs("class_name")
|
||||
def class_docs(self):
|
||||
if self.gir_class:
|
||||
|
@ -100,10 +107,15 @@ class Object(AstNode):
|
|||
def emit_xml(self, xml: XmlEmitter):
|
||||
from .gtkbuilder_child import Child
|
||||
|
||||
xml.start_tag("object", **{
|
||||
"class": self.gir_class.glib_type_name if self.gir_class else self.tokens["class_name"],
|
||||
xml.start_tag(
|
||||
"object",
|
||||
**{
|
||||
"class": self.gir_class.glib_type_name
|
||||
if self.gir_class
|
||||
else self.tokens["class_name"],
|
||||
"id": self.tokens["id"],
|
||||
})
|
||||
},
|
||||
)
|
||||
for child in self.children:
|
||||
child.emit_xml(xml)
|
||||
|
||||
|
@ -122,13 +134,17 @@ def validate_parent_type(node, ns: str, name: str, err_msg: str):
|
|||
parent = node.root.gir.get_type(name, ns)
|
||||
container_type = node.parent_by_type(Object).gir_class
|
||||
if container_type and not container_type.assignable_to(parent):
|
||||
raise CompileError(f"{container_type.full_name} is not a {parent.full_name}, so it doesn't have {err_msg}")
|
||||
raise CompileError(
|
||||
f"{container_type.full_name} is not a {parent.full_name}, so it doesn't have {err_msg}"
|
||||
)
|
||||
|
||||
|
||||
@decompiler("object")
|
||||
def decompile_object(ctx, gir, klass, id=None):
|
||||
gir_class = ctx.type_by_cname(klass)
|
||||
klass_name = decompile.full_name(gir_class) if gir_class is not None else "." + klass
|
||||
klass_name = (
|
||||
decompile.full_name(gir_class) if gir_class is not None else "." + klass
|
||||
)
|
||||
if id is None:
|
||||
ctx.print(f"{klass_name} {{")
|
||||
else:
|
||||
|
|
|
@ -33,12 +33,16 @@ class Property(AstNode):
|
|||
UseIdent("bind_source").expected("the ID of a source object to bind from"),
|
||||
".",
|
||||
UseIdent("bind_property").expected("a property name to bind from"),
|
||||
ZeroOrMore(AnyOf(
|
||||
ZeroOrMore(
|
||||
AnyOf(
|
||||
["no-sync-create", UseLiteral("no_sync_create", True)],
|
||||
["inverted", UseLiteral("inverted", True)],
|
||||
["bidirectional", UseLiteral("bidirectional", True)],
|
||||
Match("sync-create").warn("sync-create is deprecated in favor of no-sync-create"),
|
||||
)),
|
||||
Match("sync-create").warn(
|
||||
"sync-create is deprecated in favor of no-sync-create"
|
||||
),
|
||||
)
|
||||
),
|
||||
),
|
||||
Statement(
|
||||
UseIdent("name"),
|
||||
|
@ -54,19 +58,16 @@ class Property(AstNode):
|
|||
def gir_class(self):
|
||||
return self.parent.parent.gir_class
|
||||
|
||||
|
||||
@property
|
||||
def gir_property(self):
|
||||
if self.gir_class is not None:
|
||||
return self.gir_class.properties.get(self.tokens["name"])
|
||||
|
||||
|
||||
@property
|
||||
def value_type(self):
|
||||
if self.gir_property is not None:
|
||||
return self.gir_property.type
|
||||
|
||||
|
||||
@validate("name")
|
||||
def property_exists(self):
|
||||
if self.gir_class is None:
|
||||
|
@ -82,10 +83,9 @@ class Property(AstNode):
|
|||
if self.gir_property is None:
|
||||
raise CompileError(
|
||||
f"Class {self.gir_class.full_name} does not contain a property called {self.tokens['name']}",
|
||||
did_you_mean=(self.tokens["name"], self.gir_class.properties.keys())
|
||||
did_you_mean=(self.tokens["name"], self.gir_class.properties.keys()),
|
||||
)
|
||||
|
||||
|
||||
@validate()
|
||||
def obj_property_type(self):
|
||||
if len(self.children[Object]) == 0:
|
||||
|
@ -93,18 +93,21 @@ class Property(AstNode):
|
|||
|
||||
object = self.children[Object][0]
|
||||
type = self.value_type
|
||||
if object and type and object.gir_class and not object.gir_class.assignable_to(type):
|
||||
if (
|
||||
object
|
||||
and type
|
||||
and object.gir_class
|
||||
and not object.gir_class.assignable_to(type)
|
||||
):
|
||||
raise CompileError(
|
||||
f"Cannot assign {object.gir_class.full_name} to {type.full_name}"
|
||||
)
|
||||
|
||||
|
||||
@docs("name")
|
||||
def property_docs(self):
|
||||
if self.gir_property is not None:
|
||||
return self.gir_property.doc
|
||||
|
||||
|
||||
def emit_xml(self, xml: XmlEmitter):
|
||||
values = self.children[Value]
|
||||
value = values[0] if len(values) == 1 else None
|
||||
|
|
|
@ -25,33 +25,34 @@ from .common import *
|
|||
class Signal(AstNode):
|
||||
grammar = Statement(
|
||||
UseIdent("name"),
|
||||
Optional([
|
||||
Optional(
|
||||
[
|
||||
"::",
|
||||
UseIdent("detail_name").expected("a signal detail name"),
|
||||
]),
|
||||
]
|
||||
),
|
||||
"=>",
|
||||
UseIdent("handler").expected("the name of a function to handle the signal"),
|
||||
Match("(").expected("argument list"),
|
||||
Optional(UseIdent("object")).expected("object identifier"),
|
||||
Match(")").expected(),
|
||||
ZeroOrMore(AnyOf(
|
||||
ZeroOrMore(
|
||||
AnyOf(
|
||||
[Keyword("swapped"), UseLiteral("swapped", True)],
|
||||
[Keyword("after"), UseLiteral("after", True)],
|
||||
)),
|
||||
)
|
||||
|
||||
),
|
||||
)
|
||||
|
||||
@property
|
||||
def gir_signal(self):
|
||||
if self.gir_class is not None:
|
||||
return self.gir_class.signals.get(self.tokens["name"])
|
||||
|
||||
|
||||
@property
|
||||
def gir_class(self):
|
||||
return self.parent.parent.gir_class
|
||||
|
||||
|
||||
@validate("name")
|
||||
def signal_exists(self):
|
||||
if self.gir_class is None:
|
||||
|
@ -67,10 +68,9 @@ class Signal(AstNode):
|
|||
if self.gir_signal is None:
|
||||
raise CompileError(
|
||||
f"Class {self.gir_class.full_name} does not contain a signal called {self.tokens['name']}",
|
||||
did_you_mean=(self.tokens["name"], self.gir_class.signals.keys())
|
||||
did_you_mean=(self.tokens["name"], self.gir_class.signals.keys()),
|
||||
)
|
||||
|
||||
|
||||
@validate("object")
|
||||
def object_exists(self):
|
||||
object_id = self.tokens["object"]
|
||||
|
@ -78,17 +78,13 @@ class Signal(AstNode):
|
|||
return
|
||||
|
||||
if self.root.objects_by_id.get(object_id) is None:
|
||||
raise CompileError(
|
||||
f"Could not find object with ID '{object_id}'"
|
||||
)
|
||||
|
||||
raise CompileError(f"Could not find object with ID '{object_id}'")
|
||||
|
||||
@docs("name")
|
||||
def signal_docs(self):
|
||||
if self.gir_signal is not None:
|
||||
return self.gir_signal.doc
|
||||
|
||||
|
||||
def emit_xml(self, xml: XmlEmitter):
|
||||
name = self.tokens["name"]
|
||||
if self.tokens["detail_name"]:
|
||||
|
@ -98,7 +94,7 @@ class Signal(AstNode):
|
|||
name=name,
|
||||
handler=self.tokens["handler"],
|
||||
swapped="true" if self.tokens["swapped"] else None,
|
||||
object=self.tokens["object"]
|
||||
object=self.tokens["object"],
|
||||
)
|
||||
|
||||
|
||||
|
|
|
@ -86,6 +86,7 @@ def get_state_types(gir):
|
|||
"selected": BoolType(),
|
||||
}
|
||||
|
||||
|
||||
def get_types(gir):
|
||||
return {
|
||||
**get_property_types(gir),
|
||||
|
@ -93,6 +94,7 @@ def get_types(gir):
|
|||
**get_state_types(gir),
|
||||
}
|
||||
|
||||
|
||||
def _get_docs(gir, name):
|
||||
return (
|
||||
gir.get_type("AccessibleProperty", "Gtk").members.get(name)
|
||||
|
@ -151,7 +153,6 @@ class A11y(AstNode):
|
|||
def container_is_widget(self):
|
||||
validate_parent_type(self, "Gtk", "Widget", "accessibility properties")
|
||||
|
||||
|
||||
def emit_xml(self, xml: XmlEmitter):
|
||||
xml.start_tag("accessibility")
|
||||
for child in self.children:
|
||||
|
@ -165,8 +166,7 @@ class A11y(AstNode):
|
|||
)
|
||||
def a11y_completer(ast_node, match_variables):
|
||||
yield Completion(
|
||||
"accessibility", CompletionItemKind.Snippet,
|
||||
snippet="accessibility {\n $0\n}"
|
||||
"accessibility", CompletionItemKind.Snippet, snippet="accessibility {\n $0\n}"
|
||||
)
|
||||
|
||||
|
||||
|
@ -176,20 +176,24 @@ def a11y_completer(ast_node, match_variables):
|
|||
)
|
||||
def a11y_name_completer(ast_node, match_variables):
|
||||
for name, type in get_types(ast_node.root.gir).items():
|
||||
yield Completion(name, CompletionItemKind.Property, docs=_get_docs(ast_node.root.gir, type))
|
||||
yield Completion(
|
||||
name, CompletionItemKind.Property, docs=_get_docs(ast_node.root.gir, type)
|
||||
)
|
||||
|
||||
|
||||
@decompiler("relation", cdata=True)
|
||||
def decompile_relation(ctx, gir, name, cdata):
|
||||
ctx.print_attribute(name, cdata, get_types(ctx.gir).get(name))
|
||||
|
||||
|
||||
@decompiler("state", cdata=True)
|
||||
def decompile_state(ctx, gir, name, cdata, translatable="false"):
|
||||
if decompile.truthy(translatable):
|
||||
ctx.print(f"{name}: _(\"{_escape_quote(cdata)}\");")
|
||||
ctx.print(f'{name}: _("{_escape_quote(cdata)}");')
|
||||
else:
|
||||
ctx.print_attribute(name, cdata, get_types(ctx.gir).get(name))
|
||||
|
||||
|
||||
@decompiler("accessibility")
|
||||
def decompile_accessibility(ctx, gir):
|
||||
ctx.print("accessibility {")
|
||||
|
|
|
@ -35,12 +35,14 @@ class Item(BaseTypedAttribute):
|
|||
item = Group(
|
||||
Item,
|
||||
[
|
||||
Optional([
|
||||
Optional(
|
||||
[
|
||||
UseIdent("name"),
|
||||
":",
|
||||
]),
|
||||
VALUE_HOOKS,
|
||||
]
|
||||
),
|
||||
VALUE_HOOKS,
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
|
@ -56,7 +58,6 @@ class Items(AstNode):
|
|||
def container_is_combo_box_text(self):
|
||||
validate_parent_type(self, "Gtk", "ComboBoxText", "combo box items")
|
||||
|
||||
|
||||
def emit_xml(self, xml: XmlEmitter):
|
||||
xml.start_tag("items")
|
||||
for child in self.children:
|
||||
|
@ -70,7 +71,4 @@ class Items(AstNode):
|
|||
matches=new_statement_patterns,
|
||||
)
|
||||
def items_completer(ast_node, match_variables):
|
||||
yield Completion(
|
||||
"items", CompletionItemKind.Snippet,
|
||||
snippet="items [$0]"
|
||||
)
|
||||
yield Completion("items", CompletionItemKind.Snippet, snippet="items [$0]")
|
||||
|
|
|
@ -54,12 +54,12 @@ def create_node(tag_name: str, singular: str):
|
|||
[
|
||||
UseQuoted("name"),
|
||||
UseLiteral("tag_name", singular),
|
||||
]
|
||||
],
|
||||
),
|
||||
",",
|
||||
),
|
||||
"]",
|
||||
]
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
|
@ -74,31 +74,38 @@ suffixes = create_node("suffixes", "suffix")
|
|||
matches=new_statement_patterns,
|
||||
)
|
||||
def file_filter_completer(ast_node, match_variables):
|
||||
yield Completion("mime-types", CompletionItemKind.Snippet, snippet="mime-types [\"$0\"]")
|
||||
yield Completion("patterns", CompletionItemKind.Snippet, snippet="patterns [\"$0\"]")
|
||||
yield Completion("suffixes", CompletionItemKind.Snippet, snippet="suffixes [\"$0\"]")
|
||||
yield Completion(
|
||||
"mime-types", CompletionItemKind.Snippet, snippet='mime-types ["$0"]'
|
||||
)
|
||||
yield Completion("patterns", CompletionItemKind.Snippet, snippet='patterns ["$0"]')
|
||||
yield Completion("suffixes", CompletionItemKind.Snippet, snippet='suffixes ["$0"]')
|
||||
|
||||
|
||||
@decompiler("mime-types")
|
||||
def decompile_mime_types(ctx, gir):
|
||||
ctx.print("mime-types [")
|
||||
|
||||
|
||||
@decompiler("mime-type", cdata=True)
|
||||
def decompile_mime_type(ctx, gir, cdata):
|
||||
ctx.print(f'"{cdata}",')
|
||||
|
||||
|
||||
@decompiler("patterns")
|
||||
def decompile_patterns(ctx, gir):
|
||||
ctx.print("patterns [")
|
||||
|
||||
|
||||
@decompiler("pattern", cdata=True)
|
||||
def decompile_pattern(ctx, gir, cdata):
|
||||
ctx.print(f'"{cdata}",')
|
||||
|
||||
|
||||
@decompiler("suffixes")
|
||||
def decompile_suffixes(ctx, gir):
|
||||
ctx.print("suffixes [")
|
||||
|
||||
|
||||
@decompiler("suffix", cdata=True)
|
||||
def decompile_suffix(ctx, gir, cdata):
|
||||
ctx.print(f'"{cdata}",')
|
||||
|
|
|
@ -38,7 +38,7 @@ layout_prop = Group(
|
|||
UseIdent("name"),
|
||||
":",
|
||||
VALUE_HOOKS.expected("a value"),
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
|
@ -53,7 +53,6 @@ class Layout(AstNode):
|
|||
def container_is_widget(self):
|
||||
validate_parent_type(self, "Gtk", "Widget", "layout properties")
|
||||
|
||||
|
||||
def emit_xml(self, xml: XmlEmitter):
|
||||
xml.start_tag("layout")
|
||||
for child in self.children:
|
||||
|
@ -67,10 +66,7 @@ class Layout(AstNode):
|
|||
matches=new_statement_patterns,
|
||||
)
|
||||
def layout_completer(ast_node, match_variables):
|
||||
yield Completion(
|
||||
"layout", CompletionItemKind.Snippet,
|
||||
snippet="layout {\n $0\n}"
|
||||
)
|
||||
yield Completion("layout", CompletionItemKind.Snippet, snippet="layout {\n $0\n}")
|
||||
|
||||
|
||||
@decompiler("layout")
|
||||
|
|
|
@ -48,22 +48,12 @@ menu_contents = Sequence()
|
|||
|
||||
menu_section = Group(
|
||||
Menu,
|
||||
[
|
||||
"section",
|
||||
UseLiteral("tag", "section"),
|
||||
Optional(UseIdent("id")),
|
||||
menu_contents
|
||||
]
|
||||
["section", UseLiteral("tag", "section"), Optional(UseIdent("id")), menu_contents],
|
||||
)
|
||||
|
||||
menu_submenu = Group(
|
||||
Menu,
|
||||
[
|
||||
"submenu",
|
||||
UseLiteral("tag", "submenu"),
|
||||
Optional(UseIdent("id")),
|
||||
menu_contents
|
||||
]
|
||||
["submenu", UseLiteral("tag", "submenu"), Optional(UseIdent("id")), menu_contents],
|
||||
)
|
||||
|
||||
menu_attribute = Group(
|
||||
|
@ -73,7 +63,7 @@ menu_attribute = Group(
|
|||
":",
|
||||
VALUE_HOOKS.expected("a value"),
|
||||
Match(";").expected(),
|
||||
]
|
||||
],
|
||||
)
|
||||
|
||||
menu_item = Group(
|
||||
|
@ -84,7 +74,7 @@ menu_item = Group(
|
|||
Optional(UseIdent("id")),
|
||||
Match("{").expected(),
|
||||
Until(menu_attribute, "}"),
|
||||
]
|
||||
],
|
||||
)
|
||||
|
||||
menu_item_shorthand = Group(
|
||||
|
@ -97,45 +87,49 @@ menu_item_shorthand = Group(
|
|||
MenuAttribute,
|
||||
[UseLiteral("name", "label"), VALUE_HOOKS],
|
||||
),
|
||||
Optional([
|
||||
Optional(
|
||||
[
|
||||
",",
|
||||
Optional([
|
||||
Optional(
|
||||
[
|
||||
Group(
|
||||
MenuAttribute,
|
||||
[UseLiteral("name", "action"), VALUE_HOOKS],
|
||||
),
|
||||
Optional([
|
||||
Optional(
|
||||
[
|
||||
",",
|
||||
Group(
|
||||
MenuAttribute,
|
||||
[UseLiteral("name", "icon"), VALUE_HOOKS],
|
||||
),
|
||||
])
|
||||
])
|
||||
]),
|
||||
Match(")").expected(),
|
||||
]
|
||||
),
|
||||
]
|
||||
),
|
||||
]
|
||||
),
|
||||
Match(")").expected(),
|
||||
],
|
||||
)
|
||||
|
||||
menu_contents.children = [
|
||||
Match("{"),
|
||||
Until(AnyOf(
|
||||
Until(
|
||||
AnyOf(
|
||||
menu_section,
|
||||
menu_submenu,
|
||||
menu_item_shorthand,
|
||||
menu_item,
|
||||
menu_attribute,
|
||||
), "}"),
|
||||
),
|
||||
"}",
|
||||
),
|
||||
]
|
||||
|
||||
menu = Group(
|
||||
Menu,
|
||||
[
|
||||
"menu",
|
||||
UseLiteral("tag", "menu"),
|
||||
Optional(UseIdent("id")),
|
||||
menu_contents
|
||||
],
|
||||
["menu", UseLiteral("tag", "menu"), Optional(UseIdent("id")), menu_contents],
|
||||
)
|
||||
|
||||
|
||||
|
@ -144,10 +138,7 @@ menu = Group(
|
|||
matches=new_statement_patterns,
|
||||
)
|
||||
def menu_completer(ast_node, match_variables):
|
||||
yield Completion(
|
||||
"menu", CompletionItemKind.Snippet,
|
||||
snippet="menu {\n $0\n}"
|
||||
)
|
||||
yield Completion("menu", CompletionItemKind.Snippet, snippet="menu {\n $0\n}")
|
||||
|
||||
|
||||
@completer(
|
||||
|
@ -156,34 +147,21 @@ def menu_completer(ast_node, match_variables):
|
|||
)
|
||||
def menu_content_completer(ast_node, match_variables):
|
||||
yield Completion(
|
||||
"submenu", CompletionItemKind.Snippet,
|
||||
snippet="submenu {\n $0\n}"
|
||||
"submenu", CompletionItemKind.Snippet, snippet="submenu {\n $0\n}"
|
||||
)
|
||||
yield Completion(
|
||||
"section", CompletionItemKind.Snippet,
|
||||
snippet="section {\n $0\n}"
|
||||
"section", CompletionItemKind.Snippet, snippet="section {\n $0\n}"
|
||||
)
|
||||
yield Completion("item", CompletionItemKind.Snippet, snippet="item {\n $0\n}")
|
||||
yield Completion(
|
||||
"item", CompletionItemKind.Snippet,
|
||||
snippet="item {\n $0\n}"
|
||||
)
|
||||
yield Completion(
|
||||
"item (shorthand)", CompletionItemKind.Snippet,
|
||||
snippet='item (_("${1:Label}"), "${2:action-name}", "${3:icon-name}")'
|
||||
"item (shorthand)",
|
||||
CompletionItemKind.Snippet,
|
||||
snippet='item (_("${1:Label}"), "${2:action-name}", "${3:icon-name}")',
|
||||
)
|
||||
|
||||
yield Completion(
|
||||
"label", CompletionItemKind.Snippet,
|
||||
snippet='label: $0;'
|
||||
)
|
||||
yield Completion(
|
||||
"action", CompletionItemKind.Snippet,
|
||||
snippet='action: "$0";'
|
||||
)
|
||||
yield Completion(
|
||||
"icon", CompletionItemKind.Snippet,
|
||||
snippet='icon: "$0";'
|
||||
)
|
||||
yield Completion("label", CompletionItemKind.Snippet, snippet="label: $0;")
|
||||
yield Completion("action", CompletionItemKind.Snippet, snippet='action: "$0";')
|
||||
yield Completion("icon", CompletionItemKind.Snippet, snippet='icon: "$0";')
|
||||
|
||||
|
||||
@decompiler("menu")
|
||||
|
@ -193,6 +171,7 @@ def decompile_menu(ctx, gir, id=None):
|
|||
else:
|
||||
ctx.print("menu {")
|
||||
|
||||
|
||||
@decompiler("submenu")
|
||||
def decompile_submenu(ctx, gir, id=None):
|
||||
if id:
|
||||
|
@ -200,6 +179,7 @@ def decompile_submenu(ctx, gir, id=None):
|
|||
else:
|
||||
ctx.print("submenu {")
|
||||
|
||||
|
||||
@decompiler("item")
|
||||
def decompile_item(ctx, gir, id=None):
|
||||
if id:
|
||||
|
@ -207,6 +187,7 @@ def decompile_item(ctx, gir, id=None):
|
|||
else:
|
||||
ctx.print("item {")
|
||||
|
||||
|
||||
@decompiler("section")
|
||||
def decompile_section(ctx, gir, id=None):
|
||||
if id:
|
||||
|
|
|
@ -32,7 +32,7 @@ class Widget(AstNode):
|
|||
if object is None:
|
||||
raise CompileError(
|
||||
f"Could not find object with ID {self.tokens['name']}",
|
||||
did_you_mean=(self.tokens['name'], self.root.objects_by_id.keys()),
|
||||
did_you_mean=(self.tokens["name"], self.root.objects_by_id.keys()),
|
||||
)
|
||||
elif object.gir_class and not object.gir_class.assignable_to(type):
|
||||
raise CompileError(
|
||||
|
|
|
@ -51,7 +51,6 @@ class Strings(AstNode):
|
|||
def container_is_string_list(self):
|
||||
validate_parent_type(self, "Gtk", "StringList", "StringList items")
|
||||
|
||||
|
||||
def emit_xml(self, xml: XmlEmitter):
|
||||
xml.start_tag("items")
|
||||
for child in self.children:
|
||||
|
@ -65,7 +64,4 @@ class Strings(AstNode):
|
|||
matches=new_statement_patterns,
|
||||
)
|
||||
def strings_completer(ast_node, match_variables):
|
||||
yield Completion(
|
||||
"strings", CompletionItemKind.Snippet,
|
||||
snippet="strings [$0]"
|
||||
)
|
||||
yield Completion("strings", CompletionItemKind.Snippet, snippet="strings [$0]")
|
||||
|
|
|
@ -54,13 +54,14 @@ class Styles(AstNode):
|
|||
matches=new_statement_patterns,
|
||||
)
|
||||
def style_completer(ast_node, match_variables):
|
||||
yield Completion("styles", CompletionItemKind.Keyword, snippet="styles [\"$0\"]")
|
||||
yield Completion("styles", CompletionItemKind.Keyword, snippet='styles ["$0"]')
|
||||
|
||||
|
||||
@decompiler("style")
|
||||
def decompile_style(ctx, gir):
|
||||
ctx.print(f"styles [")
|
||||
|
||||
|
||||
@decompiler("class")
|
||||
def decompile_style_class(ctx, gir, name):
|
||||
ctx.print(f'"{name}",')
|
||||
|
|
|
@ -27,13 +27,15 @@ from .common import *
|
|||
|
||||
class Child(AstNode):
|
||||
grammar = [
|
||||
Optional([
|
||||
Optional(
|
||||
[
|
||||
"[",
|
||||
Optional(["internal-child", UseLiteral("internal_child", True)]),
|
||||
UseIdent("child_type").expected("a child type"),
|
||||
Optional(ResponseId),
|
||||
"]",
|
||||
]),
|
||||
]
|
||||
),
|
||||
Object,
|
||||
]
|
||||
|
||||
|
|
|
@ -26,10 +26,12 @@ class Template(Object):
|
|||
grammar = [
|
||||
"template",
|
||||
UseIdent("name").expected("template class name"),
|
||||
Optional([
|
||||
Optional(
|
||||
[
|
||||
Match(":"),
|
||||
class_name.expected("parent class"),
|
||||
]),
|
||||
]
|
||||
),
|
||||
ObjectContent,
|
||||
]
|
||||
|
||||
|
|
|
@ -24,8 +24,12 @@ from .common import *
|
|||
|
||||
class GtkDirective(AstNode):
|
||||
grammar = Statement(
|
||||
Match("using").err("File must start with a \"using Gtk\" directive (e.g. `using Gtk 4.0;`)"),
|
||||
Match("Gtk").err("File must start with a \"using Gtk\" directive (e.g. `using Gtk 4.0;`)"),
|
||||
Match("using").err(
|
||||
'File must start with a "using Gtk" directive (e.g. `using Gtk 4.0;`)'
|
||||
),
|
||||
Match("Gtk").err(
|
||||
'File must start with a "using Gtk" directive (e.g. `using Gtk 4.0;`)'
|
||||
),
|
||||
UseNumberText("version").expected("a version number for GTK"),
|
||||
)
|
||||
|
||||
|
@ -34,17 +38,17 @@ class GtkDirective(AstNode):
|
|||
if self.tokens["version"] not in ["4.0"]:
|
||||
err = CompileError("Only GTK 4 is supported")
|
||||
if self.tokens["version"].startswith("4"):
|
||||
err.hint("Expected the GIR version, not an exact version number. Use `using Gtk 4.0;`.")
|
||||
err.hint(
|
||||
"Expected the GIR version, not an exact version number. Use `using Gtk 4.0;`."
|
||||
)
|
||||
else:
|
||||
err.hint("Expected `using Gtk 4.0;`")
|
||||
raise err
|
||||
|
||||
|
||||
@property
|
||||
def gir_namespace(self):
|
||||
return gir.get_namespace("Gtk", self.tokens["version"])
|
||||
|
||||
|
||||
def emit_xml(self, xml: XmlEmitter):
|
||||
xml.put_self_closing("requires", lib="gtk", version=self.tokens["version"])
|
||||
|
||||
|
|
|
@ -26,21 +26,13 @@ from .common import *
|
|||
class ResponseId(AstNode):
|
||||
"""Response ID of action widget."""
|
||||
|
||||
ALLOWED_PARENTS: T.List[T.Tuple[str, str]] = [
|
||||
("Gtk", "Dialog"),
|
||||
("Gtk", "InfoBar")
|
||||
]
|
||||
ALLOWED_PARENTS: T.List[T.Tuple[str, str]] = [("Gtk", "Dialog"), ("Gtk", "InfoBar")]
|
||||
|
||||
grammar = [
|
||||
UseIdent("response"),
|
||||
"=",
|
||||
AnyOf(
|
||||
UseIdent("response_id"),
|
||||
UseNumber("response_id")
|
||||
),
|
||||
Optional([
|
||||
Keyword("default"), UseLiteral("is_default", True)
|
||||
])
|
||||
AnyOf(UseIdent("response_id"), UseNumber("response_id")),
|
||||
Optional([Keyword("default"), UseLiteral("is_default", True)]),
|
||||
]
|
||||
|
||||
@validate()
|
||||
|
@ -88,18 +80,15 @@ class ResponseId(AstNode):
|
|||
|
||||
if isinstance(response, int):
|
||||
if response < 0:
|
||||
raise CompileError(
|
||||
"Numeric response type can't be negative")
|
||||
raise CompileError("Numeric response type can't be negative")
|
||||
elif isinstance(response, float):
|
||||
raise CompileError(
|
||||
"Response type must be GtkResponseType member or integer,"
|
||||
" not float"
|
||||
"Response type must be GtkResponseType member or integer," " not float"
|
||||
)
|
||||
else:
|
||||
responses = gir.get_type("ResponseType", "Gtk").members.keys()
|
||||
if response not in responses:
|
||||
raise CompileError(
|
||||
f"Response type \"{response}\" doesn't exist")
|
||||
raise CompileError(f'Response type "{response}" doesn\'t exist')
|
||||
|
||||
@validate("default")
|
||||
def no_multiple_default(self) -> None:
|
||||
|
@ -143,7 +132,7 @@ class ResponseId(AstNode):
|
|||
xml.start_tag(
|
||||
"action-widget",
|
||||
response=self.tokens["response_id"],
|
||||
default=self.tokens["is_default"]
|
||||
default=self.tokens["is_default"],
|
||||
)
|
||||
xml.put_text(self.widget_id)
|
||||
xml.end_tag()
|
||||
|
|
|
@ -30,10 +30,13 @@ class UI(AstNode):
|
|||
grammar = [
|
||||
GtkDirective,
|
||||
ZeroOrMore(Import),
|
||||
Until(AnyOf(
|
||||
Until(
|
||||
AnyOf(
|
||||
Template,
|
||||
OBJECT_HOOKS,
|
||||
), Eof()),
|
||||
),
|
||||
Eof(),
|
||||
),
|
||||
]
|
||||
|
||||
@property
|
||||
|
@ -59,11 +62,13 @@ class UI(AstNode):
|
|||
|
||||
return gir_ctx
|
||||
|
||||
|
||||
@property
|
||||
def objects_by_id(self):
|
||||
return { obj.tokens["id"]: obj for obj in self.iterate_children_recursive() if obj.tokens["id"] is not None }
|
||||
|
||||
return {
|
||||
obj.tokens["id"]: obj
|
||||
for obj in self.iterate_children_recursive()
|
||||
if obj.tokens["id"] is not None
|
||||
}
|
||||
|
||||
@validate()
|
||||
def gir_errors(self):
|
||||
|
@ -72,17 +77,16 @@ class UI(AstNode):
|
|||
if len(self._gir_errors):
|
||||
raise MultipleErrors(self._gir_errors)
|
||||
|
||||
|
||||
@validate()
|
||||
def at_most_one_template(self):
|
||||
if len(self.children[Template]) > 1:
|
||||
for template in self.children[Template][1:]:
|
||||
raise CompileError(
|
||||
f"Only one template may be defined per file, but this file contains {len(self.children[Template])}",
|
||||
template.group.tokens["name"].start, template.group.tokens["name"].end,
|
||||
template.group.tokens["name"].start,
|
||||
template.group.tokens["name"].end,
|
||||
)
|
||||
|
||||
|
||||
@validate()
|
||||
def unique_ids(self):
|
||||
passed = {}
|
||||
|
@ -92,10 +96,11 @@ class UI(AstNode):
|
|||
|
||||
if obj.tokens["id"] in passed:
|
||||
token = obj.group.tokens["id"]
|
||||
raise CompileError(f"Duplicate object ID '{obj.tokens['id']}'", token.start, token.end)
|
||||
raise CompileError(
|
||||
f"Duplicate object ID '{obj.tokens['id']}'", token.start, token.end
|
||||
)
|
||||
passed[obj.tokens["id"]] = obj
|
||||
|
||||
|
||||
def emit_xml(self, xml: XmlEmitter):
|
||||
xml.start_tag("interface")
|
||||
for x in self.children:
|
||||
|
|
|
@ -71,7 +71,9 @@ class LiteralValue(Value):
|
|||
try:
|
||||
int(self.tokens["value"])
|
||||
except:
|
||||
raise CompileError(f"Cannot convert {self.group.tokens['value']} to integer")
|
||||
raise CompileError(
|
||||
f"Cannot convert {self.group.tokens['value']} to integer"
|
||||
)
|
||||
|
||||
elif isinstance(type, gir.UIntType):
|
||||
try:
|
||||
|
@ -79,13 +81,17 @@ class LiteralValue(Value):
|
|||
if int(self.tokens["value"]) < 0:
|
||||
raise Exception()
|
||||
except:
|
||||
raise CompileError(f"Cannot convert {self.group.tokens['value']} to unsigned integer")
|
||||
raise CompileError(
|
||||
f"Cannot convert {self.group.tokens['value']} to unsigned integer"
|
||||
)
|
||||
|
||||
elif isinstance(type, gir.FloatType):
|
||||
try:
|
||||
float(self.tokens["value"])
|
||||
except:
|
||||
raise CompileError(f"Cannot convert {self.group.tokens['value']} to float")
|
||||
raise CompileError(
|
||||
f"Cannot convert {self.group.tokens['value']} to float"
|
||||
)
|
||||
|
||||
elif isinstance(type, gir.StringType):
|
||||
pass
|
||||
|
@ -100,15 +106,20 @@ class LiteralValue(Value):
|
|||
"Gtk.ShortcutAction",
|
||||
]
|
||||
if type.full_name not in parseable_types:
|
||||
raise CompileError(f"Cannot convert {self.group.tokens['value']} to {type.full_name}")
|
||||
raise CompileError(
|
||||
f"Cannot convert {self.group.tokens['value']} to {type.full_name}"
|
||||
)
|
||||
|
||||
elif type is not None:
|
||||
raise CompileError(f"Cannot convert {self.group.tokens['value']} to {type.full_name}")
|
||||
raise CompileError(
|
||||
f"Cannot convert {self.group.tokens['value']} to {type.full_name}"
|
||||
)
|
||||
|
||||
|
||||
class Flag(AstNode):
|
||||
grammar = UseIdent("value")
|
||||
|
||||
|
||||
class FlagsValue(Value):
|
||||
grammar = [Flag, "|", Delimited(Flag, "|")]
|
||||
|
||||
|
@ -133,14 +144,14 @@ class IdentValue(Value):
|
|||
if self.tokens["value"] not in type.members:
|
||||
raise CompileError(
|
||||
f"{self.tokens['value']} is not a member of {type.full_name}",
|
||||
did_you_mean=(self.tokens['value'], type.members.keys()),
|
||||
did_you_mean=(self.tokens["value"], type.members.keys()),
|
||||
)
|
||||
|
||||
elif isinstance(type, gir.BoolType):
|
||||
if self.tokens["value"] not in ["true", "false"]:
|
||||
raise CompileError(
|
||||
f"Expected 'true' or 'false' for boolean value",
|
||||
did_you_mean=(self.tokens['value'], ["true", "false"]),
|
||||
did_you_mean=(self.tokens["value"], ["true", "false"]),
|
||||
)
|
||||
|
||||
elif type is not None:
|
||||
|
@ -148,14 +159,13 @@ class IdentValue(Value):
|
|||
if object is None:
|
||||
raise CompileError(
|
||||
f"Could not find object with ID {self.tokens['value']}",
|
||||
did_you_mean=(self.tokens['value'], self.root.objects_by_id.keys()),
|
||||
did_you_mean=(self.tokens["value"], self.root.objects_by_id.keys()),
|
||||
)
|
||||
elif object.gir_class and not object.gir_class.assignable_to(type):
|
||||
raise CompileError(
|
||||
f"Cannot assign {object.gir_class.full_name} to {type.full_name}"
|
||||
)
|
||||
|
||||
|
||||
@docs()
|
||||
def docs(self):
|
||||
type = self.parent.value_type
|
||||
|
@ -167,9 +177,7 @@ class IdentValue(Value):
|
|||
elif isinstance(type, gir.GirNode):
|
||||
return type.doc
|
||||
|
||||
|
||||
def get_semantic_tokens(self) -> T.Iterator[SemanticToken]:
|
||||
if isinstance(self.parent.value_type, gir.Enumeration):
|
||||
token = self.group.tokens["value"]
|
||||
yield SemanticToken(token.start, token.end, SemanticTokenType.EnumMember)
|
||||
|
||||
|
|
|
@ -31,6 +31,7 @@ def command(json_method):
|
|||
def decorator(func):
|
||||
func._json_method = json_method
|
||||
return func
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
|
@ -46,8 +47,16 @@ class OpenFile:
|
|||
|
||||
def apply_changes(self, changes):
|
||||
for change in changes:
|
||||
start = utils.pos_to_idx(change["range"]["start"]["line"], change["range"]["start"]["character"], self.text)
|
||||
end = utils.pos_to_idx(change["range"]["end"]["line"], change["range"]["end"]["character"], self.text)
|
||||
start = utils.pos_to_idx(
|
||||
change["range"]["start"]["line"],
|
||||
change["range"]["start"]["character"],
|
||||
self.text,
|
||||
)
|
||||
end = utils.pos_to_idx(
|
||||
change["range"]["end"]["line"],
|
||||
change["range"]["end"]["character"],
|
||||
self.text,
|
||||
)
|
||||
self.text = self.text[:start] + change["text"] + self.text[end:]
|
||||
self._update()
|
||||
|
||||
|
@ -65,7 +74,6 @@ class OpenFile:
|
|||
except CompileError as e:
|
||||
self.diagnostics.append(e)
|
||||
|
||||
|
||||
def calc_semantic_tokens(self) -> T.List[int]:
|
||||
tokens = list(self.ast.get_semantic_tokens())
|
||||
token_lists = [
|
||||
|
@ -74,7 +82,9 @@ class OpenFile:
|
|||
token.end - token.start, # length
|
||||
token.type,
|
||||
0, # token modifiers
|
||||
] for token in tokens]
|
||||
]
|
||||
for token in tokens
|
||||
]
|
||||
|
||||
# convert line, column numbers to deltas
|
||||
for i, token_list in enumerate(token_lists[1:]):
|
||||
|
@ -122,12 +132,13 @@ class LanguageServer:
|
|||
except Exception as e:
|
||||
self._log(traceback.format_exc())
|
||||
|
||||
|
||||
def _send(self, data):
|
||||
data["jsonrpc"] = "2.0"
|
||||
line = json.dumps(data, separators=(",", ":")) + "\r\n"
|
||||
self._log("output: " + line)
|
||||
sys.stdout.write(f"Content-Length: {len(line.encode())}\r\nContent-Type: application/vscode-jsonrpc; charset=utf-8\r\n\r\n{line}")
|
||||
sys.stdout.write(
|
||||
f"Content-Length: {len(line.encode())}\r\nContent-Type: application/vscode-jsonrpc; charset=utf-8\r\n\r\n{line}"
|
||||
)
|
||||
sys.stdout.flush()
|
||||
|
||||
def _log(self, msg):
|
||||
|
@ -137,22 +148,27 @@ class LanguageServer:
|
|||
self.logfile.flush()
|
||||
|
||||
def _send_response(self, id, result):
|
||||
self._send({
|
||||
self._send(
|
||||
{
|
||||
"id": id,
|
||||
"result": result,
|
||||
})
|
||||
}
|
||||
)
|
||||
|
||||
def _send_notification(self, method, params):
|
||||
self._send({
|
||||
self._send(
|
||||
{
|
||||
"method": method,
|
||||
"params": params,
|
||||
})
|
||||
|
||||
}
|
||||
)
|
||||
|
||||
@command("initialize")
|
||||
def initialize(self, id, params):
|
||||
self.client_capabilities = params.get("capabilities")
|
||||
self._send_response(id, {
|
||||
self._send_response(
|
||||
id,
|
||||
{
|
||||
"capabilities": {
|
||||
"textDocumentSync": {
|
||||
"openClose": True,
|
||||
|
@ -168,7 +184,8 @@ class LanguageServer:
|
|||
"codeActionProvider": {},
|
||||
"hoverProvider": True,
|
||||
}
|
||||
})
|
||||
},
|
||||
)
|
||||
|
||||
@command("textDocument/didOpen")
|
||||
def didOpen(self, id, params):
|
||||
|
@ -195,14 +212,23 @@ class LanguageServer:
|
|||
@command("textDocument/hover")
|
||||
def hover(self, id, params):
|
||||
open_file = self._open_files[params["textDocument"]["uri"]]
|
||||
docs = open_file.ast and open_file.ast.get_docs(utils.pos_to_idx(params["position"]["line"], params["position"]["character"], open_file.text))
|
||||
docs = open_file.ast and open_file.ast.get_docs(
|
||||
utils.pos_to_idx(
|
||||
params["position"]["line"],
|
||||
params["position"]["character"],
|
||||
open_file.text,
|
||||
)
|
||||
)
|
||||
if docs:
|
||||
self._send_response(id, {
|
||||
self._send_response(
|
||||
id,
|
||||
{
|
||||
"contents": {
|
||||
"kind": "markdown",
|
||||
"value": docs,
|
||||
}
|
||||
})
|
||||
},
|
||||
)
|
||||
else:
|
||||
self._send_response(id, None)
|
||||
|
||||
|
@ -214,26 +240,39 @@ class LanguageServer:
|
|||
self._send_response(id, [])
|
||||
return
|
||||
|
||||
idx = utils.pos_to_idx(params["position"]["line"], params["position"]["character"], open_file.text)
|
||||
idx = utils.pos_to_idx(
|
||||
params["position"]["line"], params["position"]["character"], open_file.text
|
||||
)
|
||||
completions = complete(open_file.ast, open_file.tokens, idx)
|
||||
self._send_response(id, [completion.to_json(True) for completion in completions])
|
||||
|
||||
self._send_response(
|
||||
id, [completion.to_json(True) for completion in completions]
|
||||
)
|
||||
|
||||
@command("textDocument/semanticTokens/full")
|
||||
def semantic_tokens(self, id, params):
|
||||
open_file = self._open_files[params["textDocument"]["uri"]]
|
||||
|
||||
self._send_response(id, {
|
||||
self._send_response(
|
||||
id,
|
||||
{
|
||||
"data": open_file.calc_semantic_tokens(),
|
||||
})
|
||||
|
||||
},
|
||||
)
|
||||
|
||||
@command("textDocument/codeAction")
|
||||
def code_actions(self, id, params):
|
||||
open_file = self._open_files[params["textDocument"]["uri"]]
|
||||
|
||||
range_start = utils.pos_to_idx(params["range"]["start"]["line"], params["range"]["start"]["character"], open_file.text)
|
||||
range_end = utils.pos_to_idx(params["range"]["end"]["line"], params["range"]["end"]["character"], open_file.text)
|
||||
range_start = utils.pos_to_idx(
|
||||
params["range"]["start"]["line"],
|
||||
params["range"]["start"]["character"],
|
||||
open_file.text,
|
||||
)
|
||||
range_end = utils.pos_to_idx(
|
||||
params["range"]["end"]["line"],
|
||||
params["range"]["end"]["character"],
|
||||
open_file.text,
|
||||
)
|
||||
|
||||
actions = [
|
||||
{
|
||||
|
@ -242,12 +281,16 @@ class LanguageServer:
|
|||
"diagnostics": [self._create_diagnostic(open_file.text, diagnostic)],
|
||||
"edit": {
|
||||
"changes": {
|
||||
open_file.uri: [{
|
||||
"range": utils.idxs_to_range(diagnostic.start, diagnostic.end, open_file.text),
|
||||
"newText": action.replace_with
|
||||
}]
|
||||
open_file.uri: [
|
||||
{
|
||||
"range": utils.idxs_to_range(
|
||||
diagnostic.start, diagnostic.end, open_file.text
|
||||
),
|
||||
"newText": action.replace_with,
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
}
|
||||
for diagnostic in open_file.diagnostics
|
||||
if not (diagnostic.end < range_start or diagnostic.start > range_end)
|
||||
|
@ -256,12 +299,17 @@ class LanguageServer:
|
|||
|
||||
self._send_response(id, actions)
|
||||
|
||||
|
||||
def _send_file_updates(self, open_file: OpenFile):
|
||||
self._send_notification("textDocument/publishDiagnostics", {
|
||||
self._send_notification(
|
||||
"textDocument/publishDiagnostics",
|
||||
{
|
||||
"uri": open_file.uri,
|
||||
"diagnostics": [self._create_diagnostic(open_file.text, err) for err in open_file.diagnostics],
|
||||
})
|
||||
"diagnostics": [
|
||||
self._create_diagnostic(open_file.text, err)
|
||||
for err in open_file.diagnostics
|
||||
],
|
||||
},
|
||||
)
|
||||
|
||||
def _create_diagnostic(self, text, err):
|
||||
return {
|
||||
|
@ -275,4 +323,3 @@ for name in dir(LanguageServer):
|
|||
item = getattr(LanguageServer, name)
|
||||
if callable(item) and hasattr(item, "_json_method"):
|
||||
LanguageServer.commands[item._json_method] = item
|
||||
|
||||
|
|
|
@ -31,13 +31,16 @@ class TextDocumentSyncKind(enum.IntEnum):
|
|||
Full = 1
|
||||
Incremental = 2
|
||||
|
||||
|
||||
class CompletionItemTag(enum.IntEnum):
|
||||
Deprecated = 1
|
||||
|
||||
|
||||
class InsertTextFormat(enum.IntEnum):
|
||||
PlainText = 1
|
||||
Snippet = 2
|
||||
|
||||
|
||||
class CompletionItemKind(enum.IntEnum):
|
||||
Text = 1
|
||||
Method = 2
|
||||
|
@ -91,7 +94,9 @@ class Completion:
|
|||
"documentation": {
|
||||
"kind": "markdown",
|
||||
"value": self.docs,
|
||||
} if self.docs else None,
|
||||
}
|
||||
if self.docs
|
||||
else None,
|
||||
"deprecated": self.deprecated,
|
||||
"insertText": insert_text,
|
||||
"insertTextFormat": insert_text_format,
|
||||
|
|
|
@ -37,19 +37,37 @@ class BlueprintApp:
|
|||
self.subparsers = self.parser.add_subparsers(metavar="command")
|
||||
self.parser.set_defaults(func=self.cmd_help)
|
||||
|
||||
compile = self.add_subcommand("compile", "Compile blueprint files", self.cmd_compile)
|
||||
compile = self.add_subcommand(
|
||||
"compile", "Compile blueprint files", self.cmd_compile
|
||||
)
|
||||
compile.add_argument("--output", dest="output", default="-")
|
||||
compile.add_argument("input", metavar="filename", default=sys.stdin, type=argparse.FileType('r'))
|
||||
compile.add_argument(
|
||||
"input", metavar="filename", default=sys.stdin, type=argparse.FileType("r")
|
||||
)
|
||||
|
||||
batch_compile = self.add_subcommand("batch-compile", "Compile many blueprint files at once", self.cmd_batch_compile)
|
||||
batch_compile = self.add_subcommand(
|
||||
"batch-compile",
|
||||
"Compile many blueprint files at once",
|
||||
self.cmd_batch_compile,
|
||||
)
|
||||
batch_compile.add_argument("output_dir", metavar="output-dir")
|
||||
batch_compile.add_argument("input_dir", metavar="input-dir")
|
||||
batch_compile.add_argument("inputs", nargs="+", metavar="filenames", default=sys.stdin, type=argparse.FileType('r'))
|
||||
batch_compile.add_argument(
|
||||
"inputs",
|
||||
nargs="+",
|
||||
metavar="filenames",
|
||||
default=sys.stdin,
|
||||
type=argparse.FileType("r"),
|
||||
)
|
||||
|
||||
port = self.add_subcommand("port", "Interactive porting tool", self.cmd_port)
|
||||
|
||||
lsp = self.add_subcommand("lsp", "Run the language server (for internal use by IDEs)", self.cmd_lsp)
|
||||
lsp.add_argument("--logfile", dest="logfile", default=None, type=argparse.FileType('a'))
|
||||
lsp = self.add_subcommand(
|
||||
"lsp", "Run the language server (for internal use by IDEs)", self.cmd_lsp
|
||||
)
|
||||
lsp.add_argument(
|
||||
"--logfile", dest="logfile", default=None, type=argparse.FileType("a")
|
||||
)
|
||||
|
||||
self.add_subcommand("help", "Show this message", self.cmd_help)
|
||||
|
||||
|
@ -65,17 +83,14 @@ class BlueprintApp:
|
|||
except:
|
||||
report_bug()
|
||||
|
||||
|
||||
def add_subcommand(self, name, help, func):
|
||||
parser = self.subparsers.add_parser(name, help=help)
|
||||
parser.set_defaults(func=func)
|
||||
return parser
|
||||
|
||||
|
||||
def cmd_help(self, opts):
|
||||
self.parser.print_help()
|
||||
|
||||
|
||||
def cmd_compile(self, opts):
|
||||
data = opts.input.read()
|
||||
try:
|
||||
|
@ -93,14 +108,15 @@ class BlueprintApp:
|
|||
e.pretty_print(opts.input.name, data)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def cmd_batch_compile(self, opts):
|
||||
for file in opts.inputs:
|
||||
data = file.read()
|
||||
|
||||
try:
|
||||
if not os.path.commonpath([file.name, opts.input_dir]):
|
||||
print(f"{Colors.RED}{Colors.BOLD}error: input file '{file.name}' is not in input directory '{opts.input_dir}'{Colors.CLEAR}")
|
||||
print(
|
||||
f"{Colors.RED}{Colors.BOLD}error: input file '{file.name}' is not in input directory '{opts.input_dir}'{Colors.CLEAR}"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
xml, warnings = self._compile(data)
|
||||
|
@ -111,9 +127,8 @@ class BlueprintApp:
|
|||
path = os.path.join(
|
||||
opts.output_dir,
|
||||
os.path.relpath(
|
||||
os.path.splitext(file.name)[0] + ".ui",
|
||||
opts.input_dir
|
||||
)
|
||||
os.path.splitext(file.name)[0] + ".ui", opts.input_dir
|
||||
),
|
||||
)
|
||||
os.makedirs(os.path.dirname(path), exist_ok=True)
|
||||
with open(path, "w") as file:
|
||||
|
@ -122,16 +137,13 @@ class BlueprintApp:
|
|||
e.pretty_print(file.name, data)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def cmd_lsp(self, opts):
|
||||
langserv = LanguageServer(opts.logfile)
|
||||
langserv.run()
|
||||
|
||||
|
||||
def cmd_port(self, opts):
|
||||
interactive_port.run(opts)
|
||||
|
||||
|
||||
def _compile(self, data: str) -> T.Tuple[str, T.List[PrintableError]]:
|
||||
tokens = tokenizer.tokenize(data)
|
||||
ast, errors, warnings = parser.parse(tokens)
|
||||
|
|
|
@ -24,7 +24,13 @@ import typing as T
|
|||
from collections import defaultdict
|
||||
from enum import Enum
|
||||
|
||||
from .errors import assert_true, CompilerBugError, CompileError, CompileWarning, UnexpectedTokenError
|
||||
from .errors import (
|
||||
assert_true,
|
||||
CompilerBugError,
|
||||
CompileError,
|
||||
CompileWarning,
|
||||
UnexpectedTokenError,
|
||||
)
|
||||
from .tokenizer import Token, TokenType
|
||||
|
||||
|
||||
|
@ -83,12 +89,16 @@ class ParseGroup:
|
|||
try:
|
||||
return self.ast_type(self, children, self.keys, incomplete=self.incomplete)
|
||||
except TypeError as e:
|
||||
raise CompilerBugError(f"Failed to construct ast.{self.ast_type.__name__} from ParseGroup. See the previous stacktrace.")
|
||||
raise CompilerBugError(
|
||||
f"Failed to construct ast.{self.ast_type.__name__} from ParseGroup. See the previous stacktrace."
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
result = str(self.ast_type.__name__)
|
||||
result += "".join([f"\n{key}: {val}" for key, val in self.keys.items()]) + "\n"
|
||||
result += "\n".join([str(child) for children in self.children.values() for child in children])
|
||||
result += "\n".join(
|
||||
[str(child) for children in self.children.values() for child in children]
|
||||
)
|
||||
return result.replace("\n", "\n ")
|
||||
|
||||
|
||||
|
@ -109,7 +119,6 @@ class ParseContext:
|
|||
self.errors = []
|
||||
self.warnings = []
|
||||
|
||||
|
||||
def create_child(self):
|
||||
"""Creates a new ParseContext at this context's position. The new
|
||||
context will be used to parse one node. If parsing is successful, the
|
||||
|
@ -148,7 +157,6 @@ class ParseContext:
|
|||
elif other.last_group:
|
||||
self.last_group = other.last_group
|
||||
|
||||
|
||||
def start_group(self, ast_type):
|
||||
"""Sets this context to have its own match group."""
|
||||
assert_true(self.group is None)
|
||||
|
@ -164,10 +172,12 @@ class ParseContext:
|
|||
parsed, but the parser recovered)."""
|
||||
self.group_incomplete = True
|
||||
|
||||
|
||||
def skip(self):
|
||||
"""Skips whitespace and comments."""
|
||||
while self.index < len(self.tokens) and self.tokens[self.index].type in SKIP_TOKENS:
|
||||
while (
|
||||
self.index < len(self.tokens)
|
||||
and self.tokens[self.index].type in SKIP_TOKENS
|
||||
):
|
||||
self.index += 1
|
||||
|
||||
def next_token(self) -> Token:
|
||||
|
@ -192,9 +202,11 @@ class ParseContext:
|
|||
self.skip()
|
||||
end = self.tokens[self.index - 1].end
|
||||
|
||||
if (len(self.errors)
|
||||
if (
|
||||
len(self.errors)
|
||||
and isinstance((err := self.errors[-1]), UnexpectedTokenError)
|
||||
and err.end == start):
|
||||
and err.end == start
|
||||
):
|
||||
err.end = end
|
||||
else:
|
||||
self.errors.append(UnexpectedTokenError(start, end))
|
||||
|
@ -270,7 +282,9 @@ class Warning(ParseNode):
|
|||
if self.child.parse(ctx).succeeded():
|
||||
start_token = ctx.tokens[start_idx]
|
||||
end_token = ctx.tokens[ctx.index]
|
||||
ctx.warnings.append(CompileWarning(self.message, start_token.start, end_token.end))
|
||||
ctx.warnings.append(
|
||||
CompileWarning(self.message, start_token.start, end_token.end)
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
|
@ -295,6 +309,7 @@ class Fail(ParseNode):
|
|||
|
||||
class Group(ParseNode):
|
||||
"""ParseNode that creates a match group."""
|
||||
|
||||
def __init__(self, ast_type, child):
|
||||
self.ast_type = ast_type
|
||||
self.child = to_parse_node(child)
|
||||
|
@ -307,6 +322,7 @@ class Group(ParseNode):
|
|||
|
||||
class Sequence(ParseNode):
|
||||
"""ParseNode that attempts to match all of its children in sequence."""
|
||||
|
||||
def __init__(self, *children):
|
||||
self.children = [to_parse_node(child) for child in children]
|
||||
|
||||
|
@ -320,6 +336,7 @@ class Sequence(ParseNode):
|
|||
class Statement(ParseNode):
|
||||
"""ParseNode that attempts to match all of its children in sequence. If any
|
||||
child raises an error, the error will be logged but parsing will continue."""
|
||||
|
||||
def __init__(self, *children):
|
||||
self.children = [to_parse_node(child) for child in children]
|
||||
|
||||
|
@ -344,12 +361,14 @@ class Statement(ParseNode):
|
|||
class AnyOf(ParseNode):
|
||||
"""ParseNode that attempts to match exactly one of its children. Child
|
||||
nodes are attempted in order."""
|
||||
|
||||
def __init__(self, *children):
|
||||
self.children = children
|
||||
|
||||
@property
|
||||
def children(self):
|
||||
return self._children
|
||||
|
||||
@children.setter
|
||||
def children(self, children):
|
||||
self._children = [to_parse_node(child) for child in children]
|
||||
|
@ -365,6 +384,7 @@ class Until(ParseNode):
|
|||
"""ParseNode that repeats its child until a delimiting token is found. If
|
||||
the child does not match, one token is skipped and the match is attempted
|
||||
again."""
|
||||
|
||||
def __init__(self, child, delimiter):
|
||||
self.child = to_parse_node(child)
|
||||
self.delimiter = to_parse_node(delimiter)
|
||||
|
@ -388,10 +408,10 @@ class ZeroOrMore(ParseNode):
|
|||
"""ParseNode that matches its child any number of times (including zero
|
||||
times). It cannot fail to parse. If its child raises an exception, one token
|
||||
will be skipped and parsing will continue."""
|
||||
|
||||
def __init__(self, child):
|
||||
self.child = to_parse_node(child)
|
||||
|
||||
|
||||
def _parse(self, ctx):
|
||||
while True:
|
||||
try:
|
||||
|
@ -405,6 +425,7 @@ class ZeroOrMore(ParseNode):
|
|||
class Delimited(ParseNode):
|
||||
"""ParseNode that matches its first child any number of times (including zero
|
||||
times) with its second child in between and optionally at the end."""
|
||||
|
||||
def __init__(self, child, delimiter):
|
||||
self.child = to_parse_node(child)
|
||||
self.delimiter = to_parse_node(delimiter)
|
||||
|
@ -418,6 +439,7 @@ class Delimited(ParseNode):
|
|||
class Optional(ParseNode):
|
||||
"""ParseNode that matches its child zero or one times. It cannot fail to
|
||||
parse."""
|
||||
|
||||
def __init__(self, child):
|
||||
self.child = to_parse_node(child)
|
||||
|
||||
|
@ -428,6 +450,7 @@ class Optional(ParseNode):
|
|||
|
||||
class Eof(ParseNode):
|
||||
"""ParseNode that matches an EOF token."""
|
||||
|
||||
def _parse(self, ctx: ParseContext) -> bool:
|
||||
token = ctx.next_token()
|
||||
return token.type == TokenType.EOF
|
||||
|
@ -435,6 +458,7 @@ class Eof(ParseNode):
|
|||
|
||||
class Match(ParseNode):
|
||||
"""ParseNode that matches the given literal token."""
|
||||
|
||||
def __init__(self, op):
|
||||
self.op = op
|
||||
|
||||
|
@ -453,6 +477,7 @@ class Match(ParseNode):
|
|||
class UseIdent(ParseNode):
|
||||
"""ParseNode that matches any identifier and sets it in a key=value pair on
|
||||
the containing match group."""
|
||||
|
||||
def __init__(self, key):
|
||||
self.key = key
|
||||
|
||||
|
@ -468,6 +493,7 @@ class UseIdent(ParseNode):
|
|||
class UseNumber(ParseNode):
|
||||
"""ParseNode that matches a number and sets it in a key=value pair on
|
||||
the containing match group."""
|
||||
|
||||
def __init__(self, key):
|
||||
self.key = key
|
||||
|
||||
|
@ -486,6 +512,7 @@ class UseNumber(ParseNode):
|
|||
class UseNumberText(ParseNode):
|
||||
"""ParseNode that matches a number, but sets its *original text* it in a
|
||||
key=value pair on the containing match group."""
|
||||
|
||||
def __init__(self, key):
|
||||
self.key = key
|
||||
|
||||
|
@ -501,6 +528,7 @@ class UseNumberText(ParseNode):
|
|||
class UseQuoted(ParseNode):
|
||||
"""ParseNode that matches a quoted string and sets it in a key=value pair
|
||||
on the containing match group."""
|
||||
|
||||
def __init__(self, key):
|
||||
self.key = key
|
||||
|
||||
|
@ -509,11 +537,13 @@ class UseQuoted(ParseNode):
|
|||
if token.type != TokenType.QUOTED:
|
||||
return False
|
||||
|
||||
string = (str(token)[1:-1]
|
||||
string = (
|
||||
str(token)[1:-1]
|
||||
.replace("\\n", "\n")
|
||||
.replace("\\\"", "\"")
|
||||
.replace('\\"', '"')
|
||||
.replace("\\\\", "\\")
|
||||
.replace("\\'", "\'"))
|
||||
.replace("\\'", "'")
|
||||
)
|
||||
ctx.set_group_val(self.key, string, token)
|
||||
return True
|
||||
|
||||
|
@ -522,6 +552,7 @@ class UseLiteral(ParseNode):
|
|||
"""ParseNode that doesn't match anything, but rather sets a static key=value
|
||||
pair on the containing group. Useful for, e.g., property and signal flags:
|
||||
`Sequence(Keyword("swapped"), UseLiteral("swapped", True))`"""
|
||||
|
||||
def __init__(self, key, literal):
|
||||
self.key = key
|
||||
self.literal = literal
|
||||
|
@ -534,6 +565,7 @@ class UseLiteral(ParseNode):
|
|||
class Keyword(ParseNode):
|
||||
"""Matches the given identifier and sets it as a named token, with the name
|
||||
being the identifier itself."""
|
||||
|
||||
def __init__(self, kw):
|
||||
self.kw = kw
|
||||
self.set_token = True
|
||||
|
|
|
@ -86,7 +86,9 @@ def _tokenize(ui_ml: str):
|
|||
break
|
||||
|
||||
if not matched:
|
||||
raise CompileError("Could not determine what kind of syntax is meant here", i, i)
|
||||
raise CompileError(
|
||||
"Could not determine what kind of syntax is meant here", i, i
|
||||
)
|
||||
|
||||
yield Token(TokenType.EOF, i, i, ui_ml)
|
||||
|
||||
|
|
|
@ -21,15 +21,15 @@ import typing as T
|
|||
|
||||
|
||||
class Colors:
|
||||
RED = '\033[91m'
|
||||
GREEN = '\033[92m'
|
||||
YELLOW = '\033[33m'
|
||||
FAINT = '\033[2m'
|
||||
BOLD = '\033[1m'
|
||||
BLUE = '\033[34m'
|
||||
UNDERLINE = '\033[4m'
|
||||
NO_UNDERLINE = '\033[24m'
|
||||
CLEAR = '\033[0m'
|
||||
RED = "\033[91m"
|
||||
GREEN = "\033[92m"
|
||||
YELLOW = "\033[33m"
|
||||
FAINT = "\033[2m"
|
||||
BOLD = "\033[1m"
|
||||
BLUE = "\033[34m"
|
||||
UNDERLINE = "\033[4m"
|
||||
NO_UNDERLINE = "\033[24m"
|
||||
CLEAR = "\033[0m"
|
||||
|
||||
|
||||
def lazy_prop(func):
|
||||
|
@ -68,7 +68,11 @@ def did_you_mean(word: str, options: T.List[str]) -> T.Optional[str]:
|
|||
cost = 1
|
||||
else:
|
||||
cost = 2
|
||||
distances[i][j] = min(distances[i-1][j] + 2, distances[i][j-1] + 2, distances[i-1][j-1] + cost)
|
||||
distances[i][j] = min(
|
||||
distances[i - 1][j] + 2,
|
||||
distances[i][j - 1] + 2,
|
||||
distances[i - 1][j - 1] + cost,
|
||||
)
|
||||
|
||||
return distances[m - 1][n - 1]
|
||||
|
||||
|
@ -87,10 +91,12 @@ def idx_to_pos(idx: int, text: str) -> T.Tuple[int, int]:
|
|||
col_num = len(sp[-1])
|
||||
return (line_num - 1, col_num)
|
||||
|
||||
|
||||
def pos_to_idx(line: int, col: int, text: str) -> int:
|
||||
lines = text.splitlines(keepends=True)
|
||||
return sum([len(line) for line in lines[:line]]) + col
|
||||
|
||||
|
||||
def idxs_to_range(start: int, end: int, text: str):
|
||||
start_l, start_c = idx_to_pos(start, text)
|
||||
end_l, end_c = idx_to_pos(end, text)
|
||||
|
|
|
@ -26,11 +26,24 @@ from .utils import lazy_prop
|
|||
|
||||
|
||||
# To speed up parsing, we ignore all tags except these
|
||||
PARSE_GIR = set([
|
||||
"repository", "namespace", "class", "interface", "property", "glib:signal",
|
||||
"include", "implements", "type", "parameter", "parameters", "enumeration",
|
||||
"member", "bitfield",
|
||||
])
|
||||
PARSE_GIR = set(
|
||||
[
|
||||
"repository",
|
||||
"namespace",
|
||||
"class",
|
||||
"interface",
|
||||
"property",
|
||||
"glib:signal",
|
||||
"include",
|
||||
"implements",
|
||||
"type",
|
||||
"parameter",
|
||||
"parameters",
|
||||
"enumeration",
|
||||
"member",
|
||||
"bitfield",
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
class Element:
|
||||
|
@ -42,7 +55,7 @@ class Element:
|
|||
|
||||
@lazy_prop
|
||||
def cdata(self):
|
||||
return ''.join(self.cdata_chunks)
|
||||
return "".join(self.cdata_chunks)
|
||||
|
||||
def get_elements(self, name) -> T.List["Element"]:
|
||||
return self.children.get(name, [])
|
||||
|
@ -59,7 +72,10 @@ class Handler(sax.handler.ContentHandler):
|
|||
self._interesting_elements = parse_type
|
||||
|
||||
def startElement(self, name, attrs):
|
||||
if self._interesting_elements is not None and name not in self._interesting_elements:
|
||||
if (
|
||||
self._interesting_elements is not None
|
||||
and name not in self._interesting_elements
|
||||
):
|
||||
self.skipping += 1
|
||||
if self.skipping > 0:
|
||||
return
|
||||
|
@ -74,11 +90,13 @@ class Handler(sax.handler.ContentHandler):
|
|||
|
||||
self.stack.append(element)
|
||||
|
||||
|
||||
def endElement(self, name):
|
||||
if self.skipping == 0:
|
||||
self.stack.pop()
|
||||
if self._interesting_elements is not None and name not in self._interesting_elements:
|
||||
if (
|
||||
self._interesting_elements is not None
|
||||
and name not in self._interesting_elements
|
||||
):
|
||||
self.skipping -= 1
|
||||
|
||||
def characters(self, content):
|
||||
|
|
17
docs/conf.py
17
docs/conf.py
|
@ -17,9 +17,9 @@
|
|||
|
||||
# -- Project information -----------------------------------------------------
|
||||
|
||||
project = 'Blueprint'
|
||||
copyright = '2021, James Westman'
|
||||
author = 'James Westman'
|
||||
project = "Blueprint"
|
||||
copyright = "2021, James Westman"
|
||||
author = "James Westman"
|
||||
|
||||
|
||||
# -- General configuration ---------------------------------------------------
|
||||
|
@ -27,16 +27,15 @@ author = 'James Westman'
|
|||
# Add any Sphinx extension module names here, as strings. They can be
|
||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||
# ones.
|
||||
extensions = [
|
||||
]
|
||||
extensions = []
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ['_templates']
|
||||
templates_path = ["_templates"]
|
||||
|
||||
# List of patterns, relative to source directory, that match files and
|
||||
# directories to ignore when looking for source files.
|
||||
# This pattern also affects html_static_path and html_extra_path.
|
||||
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
|
||||
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
|
||||
|
||||
|
||||
# -- Options for HTML output -------------------------------------------------
|
||||
|
@ -44,9 +43,9 @@ exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
|
|||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
#
|
||||
html_theme = 'furo'
|
||||
html_theme = "furo"
|
||||
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
html_static_path = ['_static']
|
||||
html_static_path = ["_static"]
|
||||
|
|
|
@ -56,12 +56,15 @@ class TestSamples(unittest.TestCase):
|
|||
e.pretty_print(name + ".blp", blueprint)
|
||||
raise AssertionError()
|
||||
|
||||
|
||||
def assert_sample_error(self, name):
|
||||
try:
|
||||
with open((Path(__file__).parent / f"sample_errors/{name}.blp").resolve()) as f:
|
||||
with open(
|
||||
(Path(__file__).parent / f"sample_errors/{name}.blp").resolve()
|
||||
) as f:
|
||||
blueprint = f.read()
|
||||
with open((Path(__file__).parent / f"sample_errors/{name}.err").resolve()) as f:
|
||||
with open(
|
||||
(Path(__file__).parent / f"sample_errors/{name}.err").resolve()
|
||||
) as f:
|
||||
expected = f.read()
|
||||
|
||||
tokens = tokenizer.tokenize(blueprint)
|
||||
|
@ -74,6 +77,7 @@ class TestSamples(unittest.TestCase):
|
|||
if len(warnings):
|
||||
raise MultipleErrors(warnings)
|
||||
except PrintableError as e:
|
||||
|
||||
def error_str(error):
|
||||
line, col = utils.idx_to_pos(error.start + 1, blueprint)
|
||||
len = error.end - error.start
|
||||
|
@ -93,7 +97,6 @@ class TestSamples(unittest.TestCase):
|
|||
else: # pragma: no cover
|
||||
raise AssertionError("Expected a compiler error, but none was emitted")
|
||||
|
||||
|
||||
def assert_decompile(self, name):
|
||||
try:
|
||||
with open((Path(__file__).parent / f"samples/{name}.blp").resolve()) as f:
|
||||
|
@ -112,7 +115,6 @@ class TestSamples(unittest.TestCase):
|
|||
e.pretty_print(name + ".blp", blueprint)
|
||||
raise AssertionError()
|
||||
|
||||
|
||||
def test_samples(self):
|
||||
self.assert_sample("accessibility")
|
||||
self.assert_sample("action_widgets")
|
||||
|
@ -141,7 +143,6 @@ class TestSamples(unittest.TestCase):
|
|||
self.assert_sample("uint")
|
||||
self.assert_sample("using")
|
||||
|
||||
|
||||
def test_sample_errors(self):
|
||||
self.assert_sample_error("a11y_in_non_widget")
|
||||
self.assert_sample_error("a11y_prop_dne")
|
||||
|
@ -180,7 +181,6 @@ class TestSamples(unittest.TestCase):
|
|||
self.assert_sample_error("using_invalid_namespace")
|
||||
self.assert_sample_error("widgets_in_non_size_group")
|
||||
|
||||
|
||||
def test_decompiler(self):
|
||||
self.assert_decompile("accessibility_dec")
|
||||
self.assert_decompile("binding")
|
||||
|
|
|
@ -36,9 +36,10 @@ class TestTokenizer(unittest.TestCase):
|
|||
e.pretty_print("<test input>", string)
|
||||
raise e
|
||||
|
||||
|
||||
def test_basic(self):
|
||||
self.assert_tokenize("ident(){}; \n <<+>>*/=", [
|
||||
self.assert_tokenize(
|
||||
"ident(){}; \n <<+>>*/=",
|
||||
[
|
||||
(TokenType.IDENT, "ident"),
|
||||
(TokenType.PUNCTUATION, "("),
|
||||
(TokenType.PUNCTUATION, ")"),
|
||||
|
@ -48,26 +49,35 @@ class TestTokenizer(unittest.TestCase):
|
|||
(TokenType.WHITESPACE, " \n "),
|
||||
(TokenType.OP, "<<+>>*/="),
|
||||
(TokenType.EOF, ""),
|
||||
])
|
||||
],
|
||||
)
|
||||
|
||||
def test_quotes(self):
|
||||
self.assert_tokenize(r'"this is a \n string""this is \\another \"string\""', [
|
||||
self.assert_tokenize(
|
||||
r'"this is a \n string""this is \\another \"string\""',
|
||||
[
|
||||
(TokenType.QUOTED, r'"this is a \n string"'),
|
||||
(TokenType.QUOTED, r'"this is \\another \"string\""'),
|
||||
(TokenType.EOF, ""),
|
||||
])
|
||||
],
|
||||
)
|
||||
|
||||
def test_comments(self):
|
||||
self.assert_tokenize('/* \n \\n COMMENT /* */', [
|
||||
(TokenType.COMMENT, '/* \n \\n COMMENT /* */'),
|
||||
self.assert_tokenize(
|
||||
"/* \n \\n COMMENT /* */",
|
||||
[
|
||||
(TokenType.COMMENT, "/* \n \\n COMMENT /* */"),
|
||||
(TokenType.EOF, ""),
|
||||
])
|
||||
self.assert_tokenize('line // comment\nline', [
|
||||
(TokenType.IDENT, 'line'),
|
||||
(TokenType.WHITESPACE, ' '),
|
||||
(TokenType.COMMENT, '// comment'),
|
||||
(TokenType.WHITESPACE, '\n'),
|
||||
(TokenType.IDENT, 'line'),
|
||||
],
|
||||
)
|
||||
self.assert_tokenize(
|
||||
"line // comment\nline",
|
||||
[
|
||||
(TokenType.IDENT, "line"),
|
||||
(TokenType.WHITESPACE, " "),
|
||||
(TokenType.COMMENT, "// comment"),
|
||||
(TokenType.WHITESPACE, "\n"),
|
||||
(TokenType.IDENT, "line"),
|
||||
(TokenType.EOF, ""),
|
||||
])
|
||||
|
||||
],
|
||||
)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue