Format using black

This commit is contained in:
James Westman 2022-12-19 11:49:10 -06:00
parent 6a36d92380
commit 8fee46ec68
No known key found for this signature in database
GPG key ID: CE2DBA0ADB654EA6
40 changed files with 975 additions and 610 deletions

View file

@ -27,10 +27,13 @@ from .lsp_utils import SemanticToken
class Children: class Children:
"""Allows accessing children by type using array syntax.""" """Allows accessing children by type using array syntax."""
def __init__(self, children): def __init__(self, children):
self._children = children self._children = children
def __iter__(self): def __iter__(self):
return iter(self._children) return iter(self._children)
def __getitem__(self, key): def __getitem__(self, key):
if isinstance(key, int): if isinstance(key, int):
return self._children[key] return self._children[key]
@ -55,8 +58,9 @@ class AstNode:
def __init_subclass__(cls): def __init_subclass__(cls):
cls.completers = [] cls.completers = []
cls.validators = [getattr(cls, f) for f in dir(cls) if hasattr(getattr(cls, f), "_validator")] cls.validators = [
getattr(cls, f) for f in dir(cls) if hasattr(getattr(cls, f), "_validator")
]
@property @property
def root(self): def root(self):
@ -116,13 +120,11 @@ class AstNode:
for child in self.children: for child in self.children:
yield from child.get_semantic_tokens() yield from child.get_semantic_tokens()
def iterate_children_recursive(self) -> T.Iterator["AstNode"]: def iterate_children_recursive(self) -> T.Iterator["AstNode"]:
yield self yield self
for child in self.children: for child in self.children:
yield from child.iterate_children_recursive() yield from child.iterate_children_recursive()
def validate_unique_in_parent(self, error, check=None): def validate_unique_in_parent(self, error, check=None):
for child in self.parent.children: for child in self.parent.children:
if child is self: if child is self:
@ -132,7 +134,13 @@ class AstNode:
if check is None or check(child): if check is None or check(child):
raise CompileError( raise CompileError(
error, error,
references=[ErrorReference(child.group.start, child.group.end, "previous declaration was here")] references=[
ErrorReference(
child.group.start,
child.group.end,
"previous declaration was here",
)
],
) )

View file

@ -30,9 +30,13 @@ from .tokenizer import TokenType, Token
Pattern = T.List[T.Tuple[TokenType, T.Optional[str]]] Pattern = T.List[T.Tuple[TokenType, T.Optional[str]]]
def _complete(ast_node: AstNode, tokens: T.List[Token], idx: int, token_idx: int) -> T.Iterator[Completion]: def _complete(
ast_node: AstNode, tokens: T.List[Token], idx: int, token_idx: int
) -> T.Iterator[Completion]:
for child in ast_node.children: for child in ast_node.children:
if child.group.start <= idx and (idx < child.group.end or (idx == child.group.end and child.incomplete)): if child.group.start <= idx and (
idx < child.group.end or (idx == child.group.end and child.incomplete)
):
yield from _complete(child, tokens, idx, token_idx) yield from _complete(child, tokens, idx, token_idx)
return return
@ -49,7 +53,9 @@ def _complete(ast_node: AstNode, tokens: T.List[Token], idx: int, token_idx: int
yield from completer(prev_tokens, ast_node) yield from completer(prev_tokens, ast_node)
def complete(ast_node: AstNode, tokens: T.List[Token], idx: int) -> T.Iterator[Completion]: def complete(
ast_node: AstNode, tokens: T.List[Token], idx: int
) -> T.Iterator[Completion]:
token_idx = 0 token_idx = 0
# find the current token # find the current token
for i, token in enumerate(tokens): for i, token in enumerate(tokens):
@ -71,13 +77,17 @@ def using_gtk(ast_node, match_variables):
@completer( @completer(
applies_in=[language.UI, language.ObjectContent, language.Template], applies_in=[language.UI, language.ObjectContent, language.Template],
matches=new_statement_patterns matches=new_statement_patterns,
) )
def namespace(ast_node, match_variables): def namespace(ast_node, match_variables):
yield Completion("Gtk", CompletionItemKind.Module, text="Gtk.") yield Completion("Gtk", CompletionItemKind.Module, text="Gtk.")
for ns in ast_node.root.children[language.Import]: for ns in ast_node.root.children[language.Import]:
if ns.gir_namespace is not None: if ns.gir_namespace is not None:
yield Completion(ns.gir_namespace.name, CompletionItemKind.Module, text=ns.gir_namespace.name + ".") yield Completion(
ns.gir_namespace.name,
CompletionItemKind.Module,
text=ns.gir_namespace.name + ".",
)
@completer( @completer(
@ -85,7 +95,7 @@ def namespace(ast_node, match_variables):
matches=[ matches=[
[(TokenType.IDENT, None), (TokenType.OP, "."), (TokenType.IDENT, None)], [(TokenType.IDENT, None), (TokenType.OP, "."), (TokenType.IDENT, None)],
[(TokenType.IDENT, None), (TokenType.OP, ".")], [(TokenType.IDENT, None), (TokenType.OP, ".")],
] ],
) )
def object_completer(ast_node, match_variables): def object_completer(ast_node, match_variables):
ns = ast_node.root.gir.namespaces.get(match_variables[0]) ns = ast_node.root.gir.namespaces.get(match_variables[0])
@ -117,9 +127,7 @@ def property_completer(ast_node, match_variables):
@completer( @completer(
applies_in=[language.Property, language.BaseTypedAttribute], applies_in=[language.Property, language.BaseTypedAttribute],
matches=[ matches=[[(TokenType.IDENT, None), (TokenType.OP, ":")]],
[(TokenType.IDENT, None), (TokenType.OP, ":")]
],
) )
def prop_value_completer(ast_node, match_variables): def prop_value_completer(ast_node, match_variables):
if isinstance(ast_node.value_type, gir.Enumeration): if isinstance(ast_node.value_type, gir.Enumeration):
@ -141,16 +149,23 @@ def signal_completer(ast_node, match_variables):
if not isinstance(ast_node.parent, language.Object): if not isinstance(ast_node.parent, language.Object):
name = "on" name = "on"
else: else:
name = "on_" + (ast_node.parent.children[ClassName][0].tokens["id"] or ast_node.parent.children[ClassName][0].tokens["class_name"].lower()) name = "on_" + (
yield Completion(signal, CompletionItemKind.Property, snippet=f"{signal} => ${{1:{name}_{signal.replace('-', '_')}}}()$0;") ast_node.parent.children[ClassName][0].tokens["id"]
or ast_node.parent.children[ClassName][0]
.tokens["class_name"]
@completer( .lower()
applies_in=[language.UI],
matches=new_statement_patterns
) )
yield Completion(
signal,
CompletionItemKind.Property,
snippet=f"{signal} => ${{1:{name}_{signal.replace('-', '_')}}}()$0;",
)
@completer(applies_in=[language.UI], matches=new_statement_patterns)
def template_completer(ast_node, match_variables): def template_completer(ast_node, match_variables):
yield Completion( yield Completion(
"template", CompletionItemKind.Snippet, "template",
snippet="template ${1:ClassName} : ${2:ParentClass} {\n $0\n}" CompletionItemKind.Snippet,
snippet="template ${1:ClassName} : ${2:ParentClass} {\n $0\n}",
) )

View file

@ -33,19 +33,24 @@ new_statement_patterns = [
def applies_to(*ast_types): def applies_to(*ast_types):
"""Decorator describing which AST nodes the completer should apply in.""" """Decorator describing which AST nodes the completer should apply in."""
def decorator(func): def decorator(func):
for c in ast_types: for c in ast_types:
c.completers.append(func) c.completers.append(func)
return func return func
return decorator return decorator
def completer(applies_in: T.List, matches: T.List = [], applies_in_subclass=None): def completer(applies_in: T.List, matches: T.List = [], applies_in_subclass=None):
def decorator(func): def decorator(func):
def inner(prev_tokens: T.List[Token], ast_node): def inner(prev_tokens: T.List[Token], ast_node):
# For completers that apply in ObjectContent nodes, we can further # For completers that apply in ObjectContent nodes, we can further
# check that the object is the right class # check that the object is the right class
if applies_in_subclass is not None: if applies_in_subclass is not None:
type = ast_node.root.gir.get_type(applies_in_subclass[1], applies_in_subclass[0]) type = ast_node.root.gir.get_type(
applies_in_subclass[1], applies_in_subclass[0]
)
if ast_node.gir_class and not ast_node.gir_class.assignable_to(type): if ast_node.gir_class and not ast_node.gir_class.assignable_to(type):
return return
@ -59,7 +64,9 @@ def completer(applies_in: T.List, matches: T.List=[], applies_in_subclass=None):
for i in range(0, len(pattern)): for i in range(0, len(pattern)):
type, value = pattern[i] type, value = pattern[i]
token = prev_tokens[i - len(pattern)] token = prev_tokens[i - len(pattern)]
if token.type != type or (value is not None and str(token) != value): if token.type != type or (
value is not None and str(token) != value
):
break break
if value is None: if value is None:
match_variables.append(str(token)) match_variables.append(str(token))

View file

@ -60,16 +60,16 @@ class DecompileCtx:
self.gir.add_namespace(get_namespace("Gtk", "4.0")) self.gir.add_namespace(get_namespace("Gtk", "4.0"))
@property @property
def result(self): def result(self):
imports = "\n".join([ imports = "\n".join(
[
f"using {ns} {namespace.version};" f"using {ns} {namespace.version};"
for ns, namespace in self.gir.namespaces.items() for ns, namespace in self.gir.namespaces.items()
]) ]
)
return imports + "\n" + self._result return imports + "\n" + self._result
def type_by_cname(self, cname): def type_by_cname(self, cname):
if type := self.gir.get_type_by_cname(cname): if type := self.gir.get_type_by_cname(cname):
return type return type
@ -83,7 +83,6 @@ class DecompileCtx:
except: except:
pass pass
def start_block(self): def start_block(self):
self._blocks_need_end.append(None) self._blocks_need_end.append(None)
@ -94,7 +93,6 @@ class DecompileCtx:
def end_block_with(self, text): def end_block_with(self, text):
self._blocks_need_end[-1] = text self._blocks_need_end[-1] = text
def print(self, line, newline=True): def print(self, line, newline=True):
if line == "}" or line == "]": if line == "}" or line == "]":
self._indent -= 1 self._indent -= 1
@ -109,7 +107,11 @@ class DecompileCtx:
line_type = LineType.STMT line_type = LineType.STMT
else: else:
line_type = LineType.NONE line_type = LineType.NONE
if line_type != self._last_line_type and self._last_line_type != LineType.BLOCK_START and line_type != LineType.BLOCK_END: if (
line_type != self._last_line_type
and self._last_line_type != LineType.BLOCK_START
and line_type != LineType.BLOCK_END
):
self._result += "\n" self._result += "\n"
self._last_line_type = line_type self._last_line_type = line_type
@ -127,10 +129,10 @@ class DecompileCtx:
for member in type.members.values(): for member in type.members.values():
if member.nick == value or member.c_ident == value: if member.nick == value or member.c_ident == value:
return member.name return member.name
return value.replace('-', '_') return value.replace("-", "_")
if type is None: if type is None:
self.print(f"{name}: \"{escape_quote(value)}\";") self.print(f'{name}: "{escape_quote(value)}";')
elif type.assignable_to(FloatType()): elif type.assignable_to(FloatType()):
self.print(f"{name}: {value};") self.print(f"{name}: {value};")
elif type.assignable_to(BoolType()): elif type.assignable_to(BoolType()):
@ -139,12 +141,20 @@ class DecompileCtx:
elif ( elif (
type.assignable_to(self.gir.namespaces["Gtk"].lookup_type("Gdk.Pixbuf")) type.assignable_to(self.gir.namespaces["Gtk"].lookup_type("Gdk.Pixbuf"))
or type.assignable_to(self.gir.namespaces["Gtk"].lookup_type("Gdk.Texture")) or type.assignable_to(self.gir.namespaces["Gtk"].lookup_type("Gdk.Texture"))
or type.assignable_to(self.gir.namespaces["Gtk"].lookup_type("Gdk.Paintable")) or type.assignable_to(
or type.assignable_to(self.gir.namespaces["Gtk"].lookup_type("Gtk.ShortcutAction")) self.gir.namespaces["Gtk"].lookup_type("Gdk.Paintable")
or type.assignable_to(self.gir.namespaces["Gtk"].lookup_type("Gtk.ShortcutTrigger")) )
or type.assignable_to(
self.gir.namespaces["Gtk"].lookup_type("Gtk.ShortcutAction")
)
or type.assignable_to(
self.gir.namespaces["Gtk"].lookup_type("Gtk.ShortcutTrigger")
)
):
self.print(f'{name}: "{escape_quote(value)}";')
elif type.assignable_to(
self.gir.namespaces["Gtk"].lookup_type("GObject.Object")
): ):
self.print(f"{name}: \"{escape_quote(value)}\";")
elif type.assignable_to(self.gir.namespaces["Gtk"].lookup_type("GObject.Object")):
self.print(f"{name}: {value};") self.print(f"{name}: {value};")
elif isinstance(type, Bitfield): elif isinstance(type, Bitfield):
flags = [get_enum_name(flag) for flag in value.split("|")] flags = [get_enum_name(flag) for flag in value.split("|")]
@ -152,7 +162,7 @@ class DecompileCtx:
elif isinstance(type, Enumeration): elif isinstance(type, Enumeration):
self.print(f"{name}: {get_enum_name(value)};") self.print(f"{name}: {get_enum_name(value)};")
else: else:
self.print(f"{name}: \"{escape_quote(value)}\";") self.print(f'{name}: "{escape_quote(value)}";')
def _decompile_element(ctx: DecompileCtx, gir, xml): def _decompile_element(ctx: DecompileCtx, gir, xml):
@ -191,19 +201,21 @@ def decompile(data):
return ctx.result return ctx.result
def canon(string: str) -> str: def canon(string: str) -> str:
if string == "class": if string == "class":
return "klass" return "klass"
else: else:
return string.replace("-", "_").lower() return string.replace("-", "_").lower()
def truthy(string: str) -> bool: def truthy(string: str) -> bool:
return string.lower() in ["yes", "true", "t", "y", "1"] return string.lower() in ["yes", "true", "t", "y", "1"]
def full_name(gir): def full_name(gir):
return gir.name if gir.full_name.startswith("Gtk.") else gir.full_name return gir.name if gir.full_name.startswith("Gtk.") else gir.full_name
def lookup_by_cname(gir, cname: str): def lookup_by_cname(gir, cname: str):
if isinstance(gir, GirContext): if isinstance(gir, GirContext):
return gir.get_type_by_cname(cname) return gir.get_type_by_cname(cname)
@ -216,15 +228,17 @@ def decompiler(tag, cdata=False):
func._cdata = cdata func._cdata = cdata
_DECOMPILERS[tag] = func _DECOMPILERS[tag] = func
return func return func
return decorator return decorator
def escape_quote(string: str) -> str: def escape_quote(string: str) -> str:
return (string return (
.replace("\\", "\\\\") string.replace("\\", "\\\\")
.replace("\'", "\\'") .replace("'", "\\'")
.replace("\"", "\\\"") .replace('"', '\\"')
.replace("\n", "\\n")) .replace("\n", "\\n")
)
@decompiler("interface") @decompiler("interface")
@ -243,7 +257,18 @@ def decompile_placeholder(ctx, gir):
@decompiler("property", cdata=True) @decompiler("property", cdata=True)
def decompile_property(ctx, gir, name, cdata, bind_source=None, bind_property=None, bind_flags=None, translatable="false", comments=None, context=None): def decompile_property(
ctx,
gir,
name,
cdata,
bind_source=None,
bind_property=None,
bind_flags=None,
translatable="false",
comments=None,
context=None,
):
name = name.replace("_", "-") name = name.replace("_", "-")
if comments is not None: if comments is not None:
ctx.print(f"/* Translators: {comments} */") ctx.print(f"/* Translators: {comments} */")
@ -263,18 +288,32 @@ def decompile_property(ctx, gir, name, cdata, bind_source=None, bind_property=No
ctx.print(f"{name}: bind {bind_source}.{bind_property}{flags};") ctx.print(f"{name}: bind {bind_source}.{bind_property}{flags};")
elif truthy(translatable): elif truthy(translatable):
if context is not None: if context is not None:
ctx.print(f"{name}: C_(\"{escape_quote(context)}\", \"{escape_quote(cdata)}\");") ctx.print(
f'{name}: C_("{escape_quote(context)}", "{escape_quote(cdata)}");'
)
else: else:
ctx.print(f"{name}: _(\"{escape_quote(cdata)}\");") ctx.print(f'{name}: _("{escape_quote(cdata)}");')
elif gir is None or gir.properties.get(name) is None: elif gir is None or gir.properties.get(name) is None:
ctx.print(f"{name}: \"{escape_quote(cdata)}\";") ctx.print(f'{name}: "{escape_quote(cdata)}";')
else: else:
ctx.print_attribute(name, cdata, gir.properties.get(name).type) ctx.print_attribute(name, cdata, gir.properties.get(name).type)
return gir return gir
@decompiler("attribute", cdata=True) @decompiler("attribute", cdata=True)
def decompile_attribute(ctx, gir, name, cdata, translatable="false", comments=None, context=None): def decompile_attribute(
decompile_property(ctx, gir, name, cdata, translatable=translatable, comments=comments, context=context) ctx, gir, name, cdata, translatable="false", comments=None, context=None
):
decompile_property(
ctx,
gir,
name,
cdata,
translatable=translatable,
comments=comments,
context=context,
)
@decompiler("attributes") @decompiler("attributes")
def decompile_attributes(ctx, gir): def decompile_attributes(ctx, gir):
@ -291,5 +330,7 @@ class UnsupportedError(Exception):
print(f"in {Colors.UNDERLINE}{filename}{Colors.NO_UNDERLINE}") print(f"in {Colors.UNDERLINE}{filename}{Colors.NO_UNDERLINE}")
if self.tag: if self.tag:
print(f"in tag {Colors.BLUE}{self.tag}{Colors.CLEAR}") print(f"in tag {Colors.BLUE}{self.tag}{Colors.CLEAR}")
print(f"""{Colors.FAINT}The compiler might support this feature, but the porting tool does not. You print(
probably need to port this file manually.{Colors.CLEAR}\n""") f"""{Colors.FAINT}The compiler might support this feature, but the porting tool does not. You
probably need to port this file manually.{Colors.CLEAR}\n"""
)

View file

@ -23,6 +23,7 @@ import sys, traceback
from . import utils from . import utils
from .utils import Colors from .utils import Colors
class PrintableError(Exception): class PrintableError(Exception):
"""Parent class for errors that can be pretty-printed for the user, e.g. """Parent class for errors that can be pretty-printed for the user, e.g.
compilation warnings and errors.""" compilation warnings and errors."""
@ -44,7 +45,17 @@ class CompileError(PrintableError):
category = "error" category = "error"
color = Colors.RED color = Colors.RED
def __init__(self, message, start=None, end=None, did_you_mean=None, hints=None, actions=None, fatal=False, references=None): def __init__(
self,
message,
start=None,
end=None,
did_you_mean=None,
hints=None,
actions=None,
fatal=False,
references=None,
):
super().__init__(message) super().__init__(message)
self.message = message self.message = message
@ -62,7 +73,6 @@ class CompileError(PrintableError):
self.hints.append(hint) self.hints.append(hint)
return self return self
def _did_you_mean(self, word: str, options: T.List[str]): def _did_you_mean(self, word: str, options: T.List[str]):
if word.replace("_", "-") in options: if word.replace("_", "-") in options:
self.hint(f"use '-', not '_': `{word.replace('_', '-')}`") self.hint(f"use '-', not '_': `{word.replace('_', '-')}`")
@ -86,9 +96,11 @@ class CompileError(PrintableError):
# Display 1-based line numbers # Display 1-based line numbers
line_num += 1 line_num += 1
stream.write(f"""{self.color}{Colors.BOLD}{self.category}: {self.message}{Colors.CLEAR} stream.write(
f"""{self.color}{Colors.BOLD}{self.category}: {self.message}{Colors.CLEAR}
at {filename} line {line_num} column {col_num}: at {filename} line {line_num} column {col_num}:
{Colors.FAINT}{line_num :>4} |{Colors.CLEAR}{line.rstrip()}\n {Colors.FAINT}|{" "*(col_num-1)}^{Colors.CLEAR}\n""") {Colors.FAINT}{line_num :>4} |{Colors.CLEAR}{line.rstrip()}\n {Colors.FAINT}|{" "*(col_num-1)}^{Colors.CLEAR}\n"""
)
for hint in self.hints: for hint in self.hints:
stream.write(f"{Colors.FAINT}hint: {hint}{Colors.CLEAR}\n") stream.write(f"{Colors.FAINT}hint: {hint}{Colors.CLEAR}\n")
@ -98,9 +110,11 @@ at {filename} line {line_num} column {col_num}:
line = code.splitlines(True)[line_num] line = code.splitlines(True)[line_num]
line_num += 1 line_num += 1
stream.write(f"""{Colors.FAINT}note: {ref.message}: stream.write(
f"""{Colors.FAINT}note: {ref.message}:
at {filename} line {line_num} column {col_num}: at {filename} line {line_num} column {col_num}:
{Colors.FAINT}{line_num :>4} |{line.rstrip()}\n {Colors.FAINT}|{" "*(col_num-1)}^{Colors.CLEAR}\n""") {Colors.FAINT}{line_num :>4} |{line.rstrip()}\n {Colors.FAINT}|{" "*(col_num-1)}^{Colors.CLEAR}\n"""
)
stream.write("\n") stream.write("\n")
@ -151,11 +165,12 @@ def report_bug(): # pragma: no cover
print(traceback.format_exc()) print(traceback.format_exc())
print(f"Arguments: {sys.argv}\n") print(f"Arguments: {sys.argv}\n")
print(f"""{Colors.BOLD}{Colors.RED}***** COMPILER BUG ***** print(
f"""{Colors.BOLD}{Colors.RED}***** COMPILER BUG *****
The blueprint-compiler program has crashed. Please report the above stacktrace, The blueprint-compiler program has crashed. Please report the above stacktrace,
along with the input file(s) if possible, on GitLab: along with the input file(s) if possible, on GitLab:
{Colors.BOLD}{Colors.BLUE}{Colors.UNDERLINE}https://gitlab.gnome.org/jwestman/blueprint-compiler/-/issues/new?issue {Colors.BOLD}{Colors.BLUE}{Colors.UNDERLINE}https://gitlab.gnome.org/jwestman/blueprint-compiler/-/issues/new?issue
{Colors.CLEAR}""") {Colors.CLEAR}"""
)
sys.exit(1) sys.exit(1)

View file

@ -22,6 +22,7 @@ import typing as T
import os, sys import os, sys
import gi # type: ignore import gi # type: ignore
gi.require_version("GIRepository", "2.0") gi.require_version("GIRepository", "2.0")
from gi.repository import GIRepository # type: ignore from gi.repository import GIRepository # type: ignore
@ -60,10 +61,13 @@ def get_namespace(namespace, version) -> "Namespace":
def get_xml(namespace, version): def get_xml(namespace, version):
from .main import VERSION from .main import VERSION
from xml.etree import ElementTree from xml.etree import ElementTree
search_paths = [] search_paths = []
if data_paths := os.environ.get("XDG_DATA_DIRS"): if data_paths := os.environ.get("XDG_DATA_DIRS"):
search_paths += [os.path.join(path, "gir-1.0") for path in data_paths.split(os.pathsep)] search_paths += [
os.path.join(path, "gir-1.0") for path in data_paths.split(os.pathsep)
]
filename = f"{namespace}-{version}.gir" filename = f"{namespace}-{version}.gir"
@ -104,36 +108,57 @@ class BasicType(GirType):
def full_name(self) -> str: def full_name(self) -> str:
return self.name return self.name
class BoolType(BasicType): class BoolType(BasicType):
name = "bool" name = "bool"
def assignable_to(self, other) -> bool: def assignable_to(self, other) -> bool:
return isinstance(other, BoolType) return isinstance(other, BoolType)
class IntType(BasicType): class IntType(BasicType):
name = "int" name = "int"
def assignable_to(self, other) -> bool: def assignable_to(self, other) -> bool:
return isinstance(other, IntType) or isinstance(other, UIntType) or isinstance(other, FloatType) return (
isinstance(other, IntType)
or isinstance(other, UIntType)
or isinstance(other, FloatType)
)
class UIntType(BasicType): class UIntType(BasicType):
name = "uint" name = "uint"
def assignable_to(self, other) -> bool: def assignable_to(self, other) -> bool:
return isinstance(other, IntType) or isinstance(other, UIntType) or isinstance(other, FloatType) return (
isinstance(other, IntType)
or isinstance(other, UIntType)
or isinstance(other, FloatType)
)
class FloatType(BasicType): class FloatType(BasicType):
name = "float" name = "float"
def assignable_to(self, other) -> bool: def assignable_to(self, other) -> bool:
return isinstance(other, FloatType) return isinstance(other, FloatType)
class StringType(BasicType): class StringType(BasicType):
name = "string" name = "string"
def assignable_to(self, other) -> bool: def assignable_to(self, other) -> bool:
return isinstance(other, StringType) return isinstance(other, StringType)
class TypeType(BasicType): class TypeType(BasicType):
name = "GType" name = "GType"
def assignable_to(self, other) -> bool: def assignable_to(self, other) -> bool:
return isinstance(other, TypeType) return isinstance(other, TypeType)
_BASIC_TYPES = { _BASIC_TYPES = {
"gboolean": BoolType, "gboolean": BoolType,
"int": IntType, "int": IntType,
@ -150,6 +175,7 @@ _BASIC_TYPES = {
"type": TypeType, "type": TypeType,
} }
class GirNode: class GirNode:
def __init__(self, container, tl): def __init__(self, container, tl):
self.container = container self.container = container
@ -291,7 +317,9 @@ class Interface(GirNode, GirType):
n_prerequisites = self.tl.INTERFACE_N_PREREQUISITES n_prerequisites = self.tl.INTERFACE_N_PREREQUISITES
offset = self.tl.header.HEADER_INTERFACE_BLOB_SIZE offset = self.tl.header.HEADER_INTERFACE_BLOB_SIZE
offset += (n_prerequisites + n_prerequisites % 2) * 2 offset += (n_prerequisites + n_prerequisites % 2) * 2
offset += self.tl.INTERFACE_N_PROPERTIES * self.tl.header.HEADER_PROPERTY_BLOB_SIZE offset += (
self.tl.INTERFACE_N_PROPERTIES * self.tl.header.HEADER_PROPERTY_BLOB_SIZE
)
offset += self.tl.INTERFACE_N_METHODS * self.tl.header.HEADER_FUNCTION_BLOB_SIZE offset += self.tl.INTERFACE_N_METHODS * self.tl.header.HEADER_FUNCTION_BLOB_SIZE
n_signals = self.tl.INTERFACE_N_SIGNALS n_signals = self.tl.INTERFACE_N_SIGNALS
property_size = self.tl.header.HEADER_SIGNAL_BLOB_SIZE property_size = self.tl.header.HEADER_SIGNAL_BLOB_SIZE
@ -342,7 +370,9 @@ class Class(GirNode, GirType):
offset = self.tl.header.HEADER_OBJECT_BLOB_SIZE offset = self.tl.header.HEADER_OBJECT_BLOB_SIZE
offset += (n_interfaces + n_interfaces % 2) * 2 offset += (n_interfaces + n_interfaces % 2) * 2
offset += self.tl.OBJ_N_FIELDS * self.tl.header.HEADER_FIELD_BLOB_SIZE offset += self.tl.OBJ_N_FIELDS * self.tl.header.HEADER_FIELD_BLOB_SIZE
offset += self.tl.OBJ_N_FIELD_CALLBACKS * self.tl.header.HEADER_CALLBACK_BLOB_SIZE offset += (
self.tl.OBJ_N_FIELD_CALLBACKS * self.tl.header.HEADER_CALLBACK_BLOB_SIZE
)
n_properties = self.tl.OBJ_N_PROPERTIES n_properties = self.tl.OBJ_N_PROPERTIES
property_size = self.tl.header.HEADER_PROPERTY_BLOB_SIZE property_size = self.tl.header.HEADER_PROPERTY_BLOB_SIZE
result = {} result = {}
@ -357,7 +387,9 @@ class Class(GirNode, GirType):
offset = self.tl.header.HEADER_OBJECT_BLOB_SIZE offset = self.tl.header.HEADER_OBJECT_BLOB_SIZE
offset += (n_interfaces + n_interfaces % 2) * 2 offset += (n_interfaces + n_interfaces % 2) * 2
offset += self.tl.OBJ_N_FIELDS * self.tl.header.HEADER_FIELD_BLOB_SIZE offset += self.tl.OBJ_N_FIELDS * self.tl.header.HEADER_FIELD_BLOB_SIZE
offset += self.tl.OBJ_N_FIELD_CALLBACKS * self.tl.header.HEADER_CALLBACK_BLOB_SIZE offset += (
self.tl.OBJ_N_FIELD_CALLBACKS * self.tl.header.HEADER_CALLBACK_BLOB_SIZE
)
offset += self.tl.OBJ_N_PROPERTIES * self.tl.header.HEADER_PROPERTY_BLOB_SIZE offset += self.tl.OBJ_N_PROPERTIES * self.tl.header.HEADER_PROPERTY_BLOB_SIZE
offset += self.tl.OBJ_N_METHODS * self.tl.header.HEADER_FUNCTION_BLOB_SIZE offset += self.tl.OBJ_N_METHODS * self.tl.header.HEADER_FUNCTION_BLOB_SIZE
n_signals = self.tl.OBJ_N_SIGNALS n_signals = self.tl.OBJ_N_SIGNALS
@ -381,7 +413,9 @@ class Class(GirNode, GirType):
if self.parent is not None: if self.parent is not None:
result += f" : {self.parent.container.name}.{self.parent.name}" result += f" : {self.parent.container.name}.{self.parent.name}"
if len(self.implements): if len(self.implements):
result += " implements " + ", ".join([impl.full_name for impl in self.implements]) result += " implements " + ", ".join(
[impl.full_name for impl in self.implements]
)
return result return result
@cached_property @cached_property
@ -510,7 +544,10 @@ class Namespace(GirNode):
self.entries[entry_name] = Class(self, entry_blob) self.entries[entry_name] = Class(self, entry_blob)
elif entry_type == typelib.BLOB_TYPE_INTERFACE: elif entry_type == typelib.BLOB_TYPE_INTERFACE:
self.entries[entry_name] = Interface(self, entry_blob) self.entries[entry_name] = Interface(self, entry_blob)
elif entry_type == typelib.BLOB_TYPE_BOXED or entry_type == typelib.BLOB_TYPE_STRUCT: elif (
entry_type == typelib.BLOB_TYPE_BOXED
or entry_type == typelib.BLOB_TYPE_STRUCT
):
self.entries[entry_name] = Boxed(self, entry_blob) self.entries[entry_name] = Boxed(self, entry_blob)
@cached_property @cached_property
@ -531,11 +568,19 @@ class Namespace(GirNode):
@cached_property @cached_property
def classes(self): def classes(self):
return { name: entry for name, entry in self.entries.items() if isinstance(entry, Class) } return {
name: entry
for name, entry in self.entries.items()
if isinstance(entry, Class)
}
@cached_property @cached_property
def interfaces(self): def interfaces(self):
return { name: entry for name, entry in self.entries.items() if isinstance(entry, Interface) } return {
name: entry
for name, entry in self.entries.items()
if isinstance(entry, Interface)
}
def get_type(self, name): def get_type(self, name):
"""Gets a type (class, interface, enum, etc.) from this namespace.""" """Gets a type (class, interface, enum, etc.) from this namespace."""
@ -569,7 +614,9 @@ class Repository(GirNode):
if dependencies := tl[0x24].string: if dependencies := tl[0x24].string:
deps = [tuple(dep.split("-", 1)) for dep in dependencies.split("|")] deps = [tuple(dep.split("-", 1)) for dep in dependencies.split("|")]
try: try:
self.includes = { name: get_namespace(name, version) for name, version in deps } self.includes = {
name: get_namespace(name, version) for name, version in deps
}
except: except:
raise CompilerBugError(f"Failed to load dependencies.") raise CompilerBugError(f"Failed to load dependencies.")
else: else:
@ -578,14 +625,12 @@ class Repository(GirNode):
def get_type(self, name: str, ns: str) -> T.Optional[GirNode]: def get_type(self, name: str, ns: str) -> T.Optional[GirNode]:
return self.lookup_namespace(ns).get_type(name) return self.lookup_namespace(ns).get_type(name)
def get_type_by_cname(self, name: str) -> T.Optional[GirNode]: def get_type_by_cname(self, name: str) -> T.Optional[GirNode]:
for ns in [self.namespace, *self.includes.values()]: for ns in [self.namespace, *self.includes.values()]:
if type := ns.get_type_by_cname(name): if type := ns.get_type_by_cname(name):
return type return type
return None return None
def lookup_namespace(self, ns: str): def lookup_namespace(self, ns: str):
"""Finds a namespace among this namespace's dependencies.""" """Finds a namespace among this namespace's dependencies."""
if ns == self.namespace.name: if ns == self.namespace.name:
@ -610,9 +655,19 @@ class Repository(GirNode):
return BoolType() return BoolType()
elif type_id in [typelib.TYPE_FLOAT, typelib.TYPE_DOUBLE]: elif type_id in [typelib.TYPE_FLOAT, typelib.TYPE_DOUBLE]:
return FloatType() return FloatType()
elif type_id in [typelib.TYPE_INT8, typelib.TYPE_INT16, typelib.TYPE_INT32, typelib.TYPE_INT64]: elif type_id in [
typelib.TYPE_INT8,
typelib.TYPE_INT16,
typelib.TYPE_INT32,
typelib.TYPE_INT64,
]:
return IntType() return IntType()
elif type_id in [typelib.TYPE_UINT8, typelib.TYPE_UINT16, typelib.TYPE_UINT32, typelib.TYPE_UINT64]: elif type_id in [
typelib.TYPE_UINT8,
typelib.TYPE_UINT16,
typelib.TYPE_UINT32,
typelib.TYPE_UINT64,
]:
return UIntType() return UIntType()
elif type_id == typelib.TYPE_UTF8: elif type_id == typelib.TYPE_UTF8:
return StringType() return StringType()
@ -621,30 +676,30 @@ class Repository(GirNode):
else: else:
raise CompilerBugError("Unknown type ID", type_id) raise CompilerBugError("Unknown type ID", type_id)
else: else:
return self._resolve_dir_entry(self.tl.header[type_id].INTERFACE_TYPE_INTERFACE) return self._resolve_dir_entry(
self.tl.header[type_id].INTERFACE_TYPE_INTERFACE
)
class GirContext: class GirContext:
def __init__(self): def __init__(self):
self.namespaces = {} self.namespaces = {}
def add_namespace(self, namespace: Namespace): def add_namespace(self, namespace: Namespace):
other = self.namespaces.get(namespace.name) other = self.namespaces.get(namespace.name)
if other is not None and other.version != namespace.version: if other is not None and other.version != namespace.version:
raise CompileError(f"Namespace {namespace.name}-{namespace.version} can't be imported because version {other.version} was imported earlier") raise CompileError(
f"Namespace {namespace.name}-{namespace.version} can't be imported because version {other.version} was imported earlier"
)
self.namespaces[namespace.name] = namespace self.namespaces[namespace.name] = namespace
def get_type_by_cname(self, name: str) -> T.Optional[GirNode]: def get_type_by_cname(self, name: str) -> T.Optional[GirNode]:
for ns in self.namespaces.values(): for ns in self.namespaces.values():
if type := ns.get_type_by_cname(name): if type := ns.get_type_by_cname(name):
return type return type
return None return None
def get_type(self, name: str, ns: str) -> T.Optional[GirNode]: def get_type(self, name: str, ns: str) -> T.Optional[GirNode]:
ns = ns or "Gtk" ns = ns or "Gtk"
@ -653,7 +708,6 @@ class GirContext:
return self.namespaces[ns].get_type(name) return self.namespaces[ns].get_type(name)
def get_class(self, name: str, ns: str) -> T.Optional[Class]: def get_class(self, name: str, ns: str) -> T.Optional[Class]:
type = self.get_type(name, ns) type = self.get_type(name, ns)
if isinstance(type, Class): if isinstance(type, Class):
@ -661,7 +715,6 @@ class GirContext:
else: else:
return None return None
def validate_ns(self, ns: str): def validate_ns(self, ns: str):
"""Raises an exception if there is a problem looking up the given """Raises an exception if there is a problem looking up the given
namespace.""" namespace."""

View file

@ -35,9 +35,11 @@ class CouldNotPort:
def __init__(self, message): def __init__(self, message):
self.message = message self.message = message
def change_suffix(f): def change_suffix(f):
return f.removesuffix(".ui") + ".blp" return f.removesuffix(".ui") + ".blp"
def decompile_file(in_file, out_file) -> T.Union[str, CouldNotPort]: def decompile_file(in_file, out_file) -> T.Union[str, CouldNotPort]:
if os.path.exists(out_file): if os.path.exists(out_file):
return CouldNotPort("already exists") return CouldNotPort("already exists")
@ -63,12 +65,15 @@ def decompile_file(in_file, out_file) -> T.Union[str, CouldNotPort]:
except PrintableError as e: except PrintableError as e:
e.pretty_print(out_file, decompiled) e.pretty_print(out_file, decompiled)
print(f"{Colors.RED}{Colors.BOLD}error: the generated file does not compile{Colors.CLEAR}") print(
f"{Colors.RED}{Colors.BOLD}error: the generated file does not compile{Colors.CLEAR}"
)
print(f"in {Colors.UNDERLINE}{out_file}{Colors.NO_UNDERLINE}") print(f"in {Colors.UNDERLINE}{out_file}{Colors.NO_UNDERLINE}")
print( print(
f"""{Colors.FAINT}Either the original XML file had an error, or there is a bug in the f"""{Colors.FAINT}Either the original XML file had an error, or there is a bug in the
porting tool. If you think it's a bug (which is likely), please file an issue on GitLab: porting tool. If you think it's a bug (which is likely), please file an issue on GitLab:
{Colors.BLUE}{Colors.UNDERLINE}https://gitlab.gnome.org/jwestman/blueprint-compiler/-/issues/new?issue{Colors.CLEAR}\n""") {Colors.BLUE}{Colors.UNDERLINE}https://gitlab.gnome.org/jwestman/blueprint-compiler/-/issues/new?issue{Colors.CLEAR}\n"""
)
return CouldNotPort("does not compile") return CouldNotPort("does not compile")
@ -108,7 +113,9 @@ def enter():
def step1(): def step1():
print(f"{Colors.BOLD}STEP 1: Create subprojects/blueprint-compiler.wrap{Colors.CLEAR}") print(
f"{Colors.BOLD}STEP 1: Create subprojects/blueprint-compiler.wrap{Colors.CLEAR}"
)
if os.path.exists("subprojects/blueprint-compiler.wrap"): if os.path.exists("subprojects/blueprint-compiler.wrap"):
print("subprojects/blueprint-compiler.wrap already exists, skipping\n") print("subprojects/blueprint-compiler.wrap already exists, skipping\n")
@ -121,17 +128,20 @@ def step1():
pass pass
from .main import VERSION from .main import VERSION
VERSION = "main" if VERSION == "uninstalled" else "v" + VERSION VERSION = "main" if VERSION == "uninstalled" else "v" + VERSION
with open("subprojects/blueprint-compiler.wrap", "w") as wrap: with open("subprojects/blueprint-compiler.wrap", "w") as wrap:
wrap.write(f"""[wrap-git] wrap.write(
f"""[wrap-git]
directory = blueprint-compiler directory = blueprint-compiler
url = https://gitlab.gnome.org/jwestman/blueprint-compiler.git url = https://gitlab.gnome.org/jwestman/blueprint-compiler.git
revision = {VERSION} revision = {VERSION}
depth = 1 depth = 1
[provide] [provide]
program_names = blueprint-compiler""") program_names = blueprint-compiler"""
)
print() print()
@ -146,7 +156,9 @@ def step2():
if yesno("Add '/subprojects/blueprint-compiler' to .gitignore?"): if yesno("Add '/subprojects/blueprint-compiler' to .gitignore?"):
gitignore.write("\n/subprojects/blueprint-compiler\n") gitignore.write("\n/subprojects/blueprint-compiler\n")
else: else:
print("'/subprojects/blueprint-compiler' already in .gitignore, skipping") print(
"'/subprojects/blueprint-compiler' already in .gitignore, skipping"
)
else: else:
if yesno("Create .gitignore with '/subprojects/blueprint-compiler'?"): if yesno("Create .gitignore with '/subprojects/blueprint-compiler'?"):
with open(".gitignore", "w") as gitignore: with open(".gitignore", "w") as gitignore:
@ -169,9 +181,13 @@ def step3():
if isinstance(result, CouldNotPort): if isinstance(result, CouldNotPort):
if result.message == "already exists": if result.message == "already exists":
print(Colors.FAINT, end="") print(Colors.FAINT, end="")
print(f"{Colors.RED}will not port {Colors.UNDERLINE}{in_file}{Colors.NO_UNDERLINE} -> {Colors.UNDERLINE}{out_file}{Colors.NO_UNDERLINE} [{result.message}]{Colors.CLEAR}") print(
f"{Colors.RED}will not port {Colors.UNDERLINE}{in_file}{Colors.NO_UNDERLINE} -> {Colors.UNDERLINE}{out_file}{Colors.NO_UNDERLINE} [{result.message}]{Colors.CLEAR}"
)
else: else:
print(f"will port {Colors.UNDERLINE}{in_file}{Colors.CLEAR} -> {Colors.UNDERLINE}{out_file}{Colors.CLEAR}") print(
f"will port {Colors.UNDERLINE}{in_file}{Colors.CLEAR} -> {Colors.UNDERLINE}{out_file}{Colors.CLEAR}"
)
success += 1 success += 1
print() print()
@ -180,7 +196,9 @@ def step3():
elif success == len(files): elif success == len(files):
print(f"{Colors.GREEN}All files were converted.{Colors.CLEAR}") print(f"{Colors.GREEN}All files were converted.{Colors.CLEAR}")
elif success > 0: elif success > 0:
print(f"{Colors.RED}{success} file(s) were converted, {len(files) - success} were not.{Colors.CLEAR}") print(
f"{Colors.RED}{success} file(s) were converted, {len(files) - success} were not.{Colors.CLEAR}"
)
else: else:
print(f"{Colors.RED}None of the files could be converted.{Colors.CLEAR}") print(f"{Colors.RED}None of the files could be converted.{Colors.CLEAR}")
@ -204,22 +222,33 @@ def step3():
def step4(ported): def step4(ported):
print(f"{Colors.BOLD}STEP 4: Set up meson.build{Colors.CLEAR}") print(f"{Colors.BOLD}STEP 4: Set up meson.build{Colors.CLEAR}")
print(f"{Colors.BOLD}{Colors.YELLOW}NOTE: Depending on your build system setup, you may need to make some adjustments to this step.{Colors.CLEAR}") print(
f"{Colors.BOLD}{Colors.YELLOW}NOTE: Depending on your build system setup, you may need to make some adjustments to this step.{Colors.CLEAR}"
)
meson_files = [file for file in listdir_recursive(".") if os.path.basename(file) == "meson.build"] meson_files = [
file
for file in listdir_recursive(".")
if os.path.basename(file) == "meson.build"
]
for meson_file in meson_files: for meson_file in meson_files:
with open(meson_file, "r") as f: with open(meson_file, "r") as f:
if "gnome.compile_resources" in f.read(): if "gnome.compile_resources" in f.read():
parent = os.path.dirname(meson_file) parent = os.path.dirname(meson_file)
file_list = "\n ".join([ file_list = "\n ".join(
[
f"'{os.path.relpath(file, parent)}'," f"'{os.path.relpath(file, parent)}',"
for file in ported for file in ported
if file.startswith(parent) if file.startswith(parent)
]) ]
)
if len(file_list): if len(file_list):
print(f"{Colors.BOLD}Paste the following into {Colors.UNDERLINE}{meson_file}{Colors.NO_UNDERLINE}:{Colors.CLEAR}") print(
print(f""" f"{Colors.BOLD}Paste the following into {Colors.UNDERLINE}{meson_file}{Colors.NO_UNDERLINE}:{Colors.CLEAR}"
)
print(
f"""
blueprints = custom_target('blueprints', blueprints = custom_target('blueprints',
input: files( input: files(
{file_list} {file_list}
@ -227,14 +256,17 @@ blueprints = custom_target('blueprints',
output: '.', output: '.',
command: [find_program('blueprint-compiler'), 'batch-compile', '@OUTPUT@', '@CURRENT_SOURCE_DIR@', '@INPUT@'], command: [find_program('blueprint-compiler'), 'batch-compile', '@OUTPUT@', '@CURRENT_SOURCE_DIR@', '@INPUT@'],
) )
""") """
)
enter() enter()
print(f"""{Colors.BOLD}Paste the following into the 'gnome.compile_resources()' print(
f"""{Colors.BOLD}Paste the following into the 'gnome.compile_resources()'
arguments in {Colors.UNDERLINE}{meson_file}{Colors.NO_UNDERLINE}:{Colors.CLEAR} arguments in {Colors.UNDERLINE}{meson_file}{Colors.NO_UNDERLINE}:{Colors.CLEAR}
dependencies: blueprints, dependencies: blueprints,
""") """
)
enter() enter()
print() print()
@ -244,7 +276,9 @@ def step5(in_files):
print(f"{Colors.BOLD}STEP 5: Update POTFILES.in{Colors.CLEAR}") print(f"{Colors.BOLD}STEP 5: Update POTFILES.in{Colors.CLEAR}")
if not os.path.exists("po/POTFILES.in"): if not os.path.exists("po/POTFILES.in"):
print(f"{Colors.UNDERLINE}po/POTFILES.in{Colors.NO_UNDERLINE} does not exist, skipping\n") print(
f"{Colors.UNDERLINE}po/POTFILES.in{Colors.NO_UNDERLINE} does not exist, skipping\n"
)
return return
with open("po/POTFILES.in", "r") as potfiles: with open("po/POTFILES.in", "r") as potfiles:
@ -257,12 +291,24 @@ def step5(in_files):
new_data = "".join(lines) new_data = "".join(lines)
print(f"{Colors.BOLD}Will make the following changes to {Colors.UNDERLINE}po/POTFILES.in{Colors.CLEAR}")
print( print(
"".join([ f"{Colors.BOLD}Will make the following changes to {Colors.UNDERLINE}po/POTFILES.in{Colors.CLEAR}"
(Colors.GREEN if line.startswith('+') else Colors.RED + Colors.FAINT if line.startswith('-') else '') + line + Colors.CLEAR )
print(
"".join(
[
(
Colors.GREEN
if line.startswith("+")
else Colors.RED + Colors.FAINT
if line.startswith("-")
else ""
)
+ line
+ Colors.CLEAR
for line in difflib.unified_diff(old_lines, lines) for line in difflib.unified_diff(old_lines, lines)
]) ]
)
) )
if yesno("Is this ok?"): if yesno("Is this ok?"):
@ -291,5 +337,6 @@ def run(opts):
step5(in_files) step5(in_files)
step6(in_files) step6(in_files)
print(f"{Colors.BOLD}STEP 6: Done! Make sure your app still builds and runs correctly.{Colors.CLEAR}") print(
f"{Colors.BOLD}STEP 6: Done! Make sure your app still builds and runs correctly.{Colors.CLEAR}"
)

View file

@ -16,7 +16,16 @@ from .gtkbuilder_template import Template
from .imports import GtkDirective, Import from .imports import GtkDirective, Import
from .ui import UI from .ui import UI
from .types import ClassName from .types import ClassName
from .values import TypeValue, IdentValue, TranslatedStringValue, FlagsValue, Flag, QuotedValue, NumberValue, Value from .values import (
TypeValue,
IdentValue,
TranslatedStringValue,
FlagsValue,
Flag,
QuotedValue,
NumberValue,
Value,
)
from .common import * from .common import *

View file

@ -33,6 +33,7 @@ class ObjectContent(AstNode):
def gir_class(self): def gir_class(self):
return self.parent.gir_class return self.parent.gir_class
class Object(AstNode): class Object(AstNode):
grammar: T.Any = [ grammar: T.Any = [
ConcreteClassName, ConcreteClassName,
@ -75,13 +76,17 @@ def validate_parent_type(node, ns: str, name: str, err_msg: str):
parent = node.root.gir.get_type(name, ns) parent = node.root.gir.get_type(name, ns)
container_type = node.parent_by_type(Object).gir_class container_type = node.parent_by_type(Object).gir_class
if container_type and not container_type.assignable_to(parent): if container_type and not container_type.assignable_to(parent):
raise CompileError(f"{container_type.full_name} is not a {parent.full_name}, so it doesn't have {err_msg}") raise CompileError(
f"{container_type.full_name} is not a {parent.full_name}, so it doesn't have {err_msg}"
)
@decompiler("object") @decompiler("object")
def decompile_object(ctx, gir, klass, id=None): def decompile_object(ctx, gir, klass, id=None):
gir_class = ctx.type_by_cname(klass) gir_class = ctx.type_by_cname(klass)
klass_name = decompile.full_name(gir_class) if gir_class is not None else "." + klass klass_name = (
decompile.full_name(gir_class) if gir_class is not None else "." + klass
)
if id is None: if id is None:
ctx.print(f"{klass_name} {{") ctx.print(f"{klass_name} {{")
else: else:

View file

@ -34,12 +34,16 @@ class Property(AstNode):
UseIdent("bind_source"), UseIdent("bind_source"),
".", ".",
UseIdent("bind_property"), UseIdent("bind_property"),
ZeroOrMore(AnyOf( ZeroOrMore(
AnyOf(
["no-sync-create", UseLiteral("no_sync_create", True)], ["no-sync-create", UseLiteral("no_sync_create", True)],
["inverted", UseLiteral("inverted", True)], ["inverted", UseLiteral("inverted", True)],
["bidirectional", UseLiteral("bidirectional", True)], ["bidirectional", UseLiteral("bidirectional", True)],
Match("sync-create").warn("sync-create is deprecated in favor of no-sync-create"), Match("sync-create").warn(
)), "sync-create is deprecated in favor of no-sync-create"
),
)
),
";", ";",
], ],
Statement( Statement(
@ -63,19 +67,16 @@ class Property(AstNode):
def gir_class(self): def gir_class(self):
return self.parent.parent.gir_class return self.parent.parent.gir_class
@property @property
def gir_property(self): def gir_property(self):
if self.gir_class is not None: if self.gir_class is not None:
return self.gir_class.properties.get(self.tokens["name"]) return self.gir_class.properties.get(self.tokens["name"])
@property @property
def value_type(self): def value_type(self):
if self.gir_property is not None: if self.gir_property is not None:
return self.gir_property.type return self.gir_property.type
@validate("name") @validate("name")
def property_exists(self): def property_exists(self):
if self.gir_class is None: if self.gir_class is None:
@ -91,15 +92,19 @@ class Property(AstNode):
if self.gir_property is None: if self.gir_property is None:
raise CompileError( raise CompileError(
f"Class {self.gir_class.full_name} does not contain a property called {self.tokens['name']}", f"Class {self.gir_class.full_name} does not contain a property called {self.tokens['name']}",
did_you_mean=(self.tokens["name"], self.gir_class.properties.keys()) did_you_mean=(self.tokens["name"], self.gir_class.properties.keys()),
) )
@validate("bind") @validate("bind")
def property_bindable(self): def property_bindable(self):
if self.tokens["bind"] and self.gir_property is not None and self.gir_property.construct_only: if (
self.tokens["bind"]
and self.gir_property is not None
and self.gir_property.construct_only
):
raise CompileError( raise CompileError(
f"{self.gir_property.full_name} can't be bound because it is construct-only", f"{self.gir_property.full_name} can't be bound because it is construct-only",
hints=["construct-only properties may only be set to a static value"] hints=["construct-only properties may only be set to a static value"],
) )
@validate("name") @validate("name")
@ -107,7 +112,6 @@ class Property(AstNode):
if self.gir_property is not None and not self.gir_property.writable: if self.gir_property is not None and not self.gir_property.writable:
raise CompileError(f"{self.gir_property.full_name} is not writable") raise CompileError(f"{self.gir_property.full_name} is not writable")
@validate() @validate()
def obj_property_type(self): def obj_property_type(self):
if len(self.children[Object]) == 0: if len(self.children[Object]) == 0:
@ -115,20 +119,23 @@ class Property(AstNode):
object = self.children[Object][0] object = self.children[Object][0]
type = self.value_type type = self.value_type
if object and type and object.gir_class and not object.gir_class.assignable_to(type): if (
object
and type
and object.gir_class
and not object.gir_class.assignable_to(type)
):
raise CompileError( raise CompileError(
f"Cannot assign {object.gir_class.full_name} to {type.full_name}" f"Cannot assign {object.gir_class.full_name} to {type.full_name}"
) )
@validate("name") @validate("name")
def unique_in_parent(self): def unique_in_parent(self):
self.validate_unique_in_parent( self.validate_unique_in_parent(
f"Duplicate property '{self.tokens['name']}'", f"Duplicate property '{self.tokens['name']}'",
check=lambda child: child.tokens["name"] == self.tokens["name"] check=lambda child: child.tokens["name"] == self.tokens["name"],
) )
@docs("name") @docs("name")
def property_docs(self): def property_docs(self):
if self.gir_property is not None: if self.gir_property is not None:

View file

@ -26,19 +26,23 @@ from .common import *
class Signal(AstNode): class Signal(AstNode):
grammar = Statement( grammar = Statement(
UseIdent("name"), UseIdent("name"),
Optional([ Optional(
[
"::", "::",
UseIdent("detail_name").expected("a signal detail name"), UseIdent("detail_name").expected("a signal detail name"),
]), ]
),
"=>", "=>",
UseIdent("handler").expected("the name of a function to handle the signal"), UseIdent("handler").expected("the name of a function to handle the signal"),
Match("(").expected("argument list"), Match("(").expected("argument list"),
Optional(UseIdent("object")).expected("object identifier"), Optional(UseIdent("object")).expected("object identifier"),
Match(")").expected(), Match(")").expected(),
ZeroOrMore(AnyOf( ZeroOrMore(
AnyOf(
[Keyword("swapped"), UseLiteral("swapped", True)], [Keyword("swapped"), UseLiteral("swapped", True)],
[Keyword("after"), UseLiteral("after", True)], [Keyword("after"), UseLiteral("after", True)],
)), )
),
) )
@property @property
@ -65,18 +69,15 @@ class Signal(AstNode):
def is_after(self) -> bool: def is_after(self) -> bool:
return self.tokens["after"] or False return self.tokens["after"] or False
@property @property
def gir_signal(self): def gir_signal(self):
if self.gir_class is not None: if self.gir_class is not None:
return self.gir_class.signals.get(self.tokens["name"]) return self.gir_class.signals.get(self.tokens["name"])
@property @property
def gir_class(self): def gir_class(self):
return self.parent.parent.gir_class return self.parent.parent.gir_class
@validate("name") @validate("name")
def signal_exists(self): def signal_exists(self):
if self.gir_class is None: if self.gir_class is None:
@ -92,10 +93,9 @@ class Signal(AstNode):
if self.gir_signal is None: if self.gir_signal is None:
raise CompileError( raise CompileError(
f"Class {self.gir_class.full_name} does not contain a signal called {self.tokens['name']}", f"Class {self.gir_class.full_name} does not contain a signal called {self.tokens['name']}",
did_you_mean=(self.tokens["name"], self.gir_class.signals.keys()) did_you_mean=(self.tokens["name"], self.gir_class.signals.keys()),
) )
@validate("object") @validate("object")
def object_exists(self): def object_exists(self):
object_id = self.tokens["object"] object_id = self.tokens["object"]
@ -103,10 +103,7 @@ class Signal(AstNode):
return return
if self.root.objects_by_id.get(object_id) is None: if self.root.objects_by_id.get(object_id) is None:
raise CompileError( raise CompileError(f"Could not find object with ID '{object_id}'")
f"Could not find object with ID '{object_id}'"
)
@docs("name") @docs("name")
def signal_docs(self): def signal_docs(self):

View file

@ -86,6 +86,7 @@ def get_state_types(gir):
"selected": BoolType(), "selected": BoolType(),
} }
def get_types(gir): def get_types(gir):
return { return {
**get_property_types(gir), **get_property_types(gir),
@ -93,6 +94,7 @@ def get_types(gir):
**get_state_types(gir), **get_state_types(gir),
} }
def _get_docs(gir, name): def _get_docs(gir, name):
if gir_type := ( if gir_type := (
gir.get_type("AccessibleProperty", "Gtk").members.get(name) gir.get_type("AccessibleProperty", "Gtk").members.get(name)
@ -174,8 +176,7 @@ class A11y(AstNode):
) )
def a11y_completer(ast_node, match_variables): def a11y_completer(ast_node, match_variables):
yield Completion( yield Completion(
"accessibility", CompletionItemKind.Snippet, "accessibility", CompletionItemKind.Snippet, snippet="accessibility {\n $0\n}"
snippet="accessibility {\n $0\n}"
) )
@ -185,20 +186,24 @@ def a11y_completer(ast_node, match_variables):
) )
def a11y_name_completer(ast_node, match_variables): def a11y_name_completer(ast_node, match_variables):
for name, type in get_types(ast_node.root.gir).items(): for name, type in get_types(ast_node.root.gir).items():
yield Completion(name, CompletionItemKind.Property, docs=_get_docs(ast_node.root.gir, type)) yield Completion(
name, CompletionItemKind.Property, docs=_get_docs(ast_node.root.gir, type)
)
@decompiler("relation", cdata=True) @decompiler("relation", cdata=True)
def decompile_relation(ctx, gir, name, cdata): def decompile_relation(ctx, gir, name, cdata):
ctx.print_attribute(name, cdata, get_types(ctx.gir).get(name)) ctx.print_attribute(name, cdata, get_types(ctx.gir).get(name))
@decompiler("state", cdata=True) @decompiler("state", cdata=True)
def decompile_state(ctx, gir, name, cdata, translatable="false"): def decompile_state(ctx, gir, name, cdata, translatable="false"):
if decompile.truthy(translatable): if decompile.truthy(translatable):
ctx.print(f"{name}: _(\"{_escape_quote(cdata)}\");") ctx.print(f'{name}: _("{_escape_quote(cdata)}");')
else: else:
ctx.print_attribute(name, cdata, get_types(ctx.gir).get(name)) ctx.print_attribute(name, cdata, get_types(ctx.gir).get(name))
@decompiler("accessibility") @decompiler("accessibility")
def decompile_accessibility(ctx, gir): def decompile_accessibility(ctx, gir):
ctx.print("accessibility {") ctx.print("accessibility {")

View file

@ -35,12 +35,14 @@ class Item(BaseTypedAttribute):
item = Group( item = Group(
Item, Item,
[ [
Optional([ Optional(
[
UseIdent("name"), UseIdent("name"),
":", ":",
]),
VALUE_HOOKS,
] ]
),
VALUE_HOOKS,
],
) )
@ -67,7 +69,4 @@ class Items(AstNode):
matches=new_statement_patterns, matches=new_statement_patterns,
) )
def items_completer(ast_node, match_variables): def items_completer(ast_node, match_variables):
yield Completion( yield Completion("items", CompletionItemKind.Snippet, snippet="items [$0]")
"items", CompletionItemKind.Snippet,
snippet="items [$0]"
)

View file

@ -37,6 +37,7 @@ class Filters(AstNode):
f"Duplicate {self.tokens['tag_name']} block", f"Duplicate {self.tokens['tag_name']} block",
check=lambda child: child.tokens["tag_name"] == self.tokens["tag_name"], check=lambda child: child.tokens["tag_name"] == self.tokens["tag_name"],
) )
wrapped_validator(self) wrapped_validator(self)
@ -57,12 +58,12 @@ def create_node(tag_name: str, singular: str):
[ [
UseQuoted("name"), UseQuoted("name"),
UseLiteral("tag_name", singular), UseLiteral("tag_name", singular),
] ],
), ),
",", ",",
), ),
"]", "]",
] ],
) )
@ -77,31 +78,38 @@ suffixes = create_node("suffixes", "suffix")
matches=new_statement_patterns, matches=new_statement_patterns,
) )
def file_filter_completer(ast_node, match_variables): def file_filter_completer(ast_node, match_variables):
yield Completion("mime-types", CompletionItemKind.Snippet, snippet="mime-types [\"$0\"]") yield Completion(
yield Completion("patterns", CompletionItemKind.Snippet, snippet="patterns [\"$0\"]") "mime-types", CompletionItemKind.Snippet, snippet='mime-types ["$0"]'
yield Completion("suffixes", CompletionItemKind.Snippet, snippet="suffixes [\"$0\"]") )
yield Completion("patterns", CompletionItemKind.Snippet, snippet='patterns ["$0"]')
yield Completion("suffixes", CompletionItemKind.Snippet, snippet='suffixes ["$0"]')
@decompiler("mime-types") @decompiler("mime-types")
def decompile_mime_types(ctx, gir): def decompile_mime_types(ctx, gir):
ctx.print("mime-types [") ctx.print("mime-types [")
@decompiler("mime-type", cdata=True) @decompiler("mime-type", cdata=True)
def decompile_mime_type(ctx, gir, cdata): def decompile_mime_type(ctx, gir, cdata):
ctx.print(f'"{cdata}",') ctx.print(f'"{cdata}",')
@decompiler("patterns") @decompiler("patterns")
def decompile_patterns(ctx, gir): def decompile_patterns(ctx, gir):
ctx.print("patterns [") ctx.print("patterns [")
@decompiler("pattern", cdata=True) @decompiler("pattern", cdata=True)
def decompile_pattern(ctx, gir, cdata): def decompile_pattern(ctx, gir, cdata):
ctx.print(f'"{cdata}",') ctx.print(f'"{cdata}",')
@decompiler("suffixes") @decompiler("suffixes")
def decompile_suffixes(ctx, gir): def decompile_suffixes(ctx, gir):
ctx.print("suffixes [") ctx.print("suffixes [")
@decompiler("suffix", cdata=True) @decompiler("suffix", cdata=True)
def decompile_suffix(ctx, gir, cdata): def decompile_suffix(ctx, gir, cdata):
ctx.print(f'"{cdata}",') ctx.print(f'"{cdata}",')

View file

@ -45,7 +45,7 @@ layout_prop = Group(
UseIdent("name"), UseIdent("name"),
":", ":",
VALUE_HOOKS.expected("a value"), VALUE_HOOKS.expected("a value"),
) ),
) )
@ -71,10 +71,7 @@ class Layout(AstNode):
matches=new_statement_patterns, matches=new_statement_patterns,
) )
def layout_completer(ast_node, match_variables): def layout_completer(ast_node, match_variables):
yield Completion( yield Completion("layout", CompletionItemKind.Snippet, snippet="layout {\n $0\n}")
"layout", CompletionItemKind.Snippet,
snippet="layout {\n $0\n}"
)
@decompiler("layout") @decompiler("layout")

View file

@ -56,22 +56,12 @@ menu_contents = Sequence()
menu_section = Group( menu_section = Group(
Menu, Menu,
[ ["section", UseLiteral("tag", "section"), Optional(UseIdent("id")), menu_contents],
"section",
UseLiteral("tag", "section"),
Optional(UseIdent("id")),
menu_contents
]
) )
menu_submenu = Group( menu_submenu = Group(
Menu, Menu,
[ ["submenu", UseLiteral("tag", "submenu"), Optional(UseIdent("id")), menu_contents],
"submenu",
UseLiteral("tag", "submenu"),
Optional(UseIdent("id")),
menu_contents
]
) )
menu_attribute = Group( menu_attribute = Group(
@ -81,7 +71,7 @@ menu_attribute = Group(
":", ":",
VALUE_HOOKS.expected("a value"), VALUE_HOOKS.expected("a value"),
Match(";").expected(), Match(";").expected(),
] ],
) )
menu_item = Group( menu_item = Group(
@ -92,7 +82,7 @@ menu_item = Group(
Optional(UseIdent("id")), Optional(UseIdent("id")),
Match("{").expected(), Match("{").expected(),
Until(menu_attribute, "}"), Until(menu_attribute, "}"),
] ],
) )
menu_item_shorthand = Group( menu_item_shorthand = Group(
@ -105,58 +95,60 @@ menu_item_shorthand = Group(
MenuAttribute, MenuAttribute,
[UseLiteral("name", "label"), VALUE_HOOKS], [UseLiteral("name", "label"), VALUE_HOOKS],
), ),
Optional([ Optional(
[
",", ",",
Optional([ Optional(
[
Group( Group(
MenuAttribute, MenuAttribute,
[UseLiteral("name", "action"), VALUE_HOOKS], [UseLiteral("name", "action"), VALUE_HOOKS],
), ),
Optional([ Optional(
[
",", ",",
Group( Group(
MenuAttribute, MenuAttribute,
[UseLiteral("name", "icon"), VALUE_HOOKS], [UseLiteral("name", "icon"), VALUE_HOOKS],
), ),
])
])
]),
Match(")").expected(),
] ]
),
]
),
]
),
Match(")").expected(),
],
) )
menu_contents.children = [ menu_contents.children = [
Match("{"), Match("{"),
Until(AnyOf( Until(
AnyOf(
menu_section, menu_section,
menu_submenu, menu_submenu,
menu_item_shorthand, menu_item_shorthand,
menu_item, menu_item,
menu_attribute, menu_attribute,
), "}"), ),
"}",
),
] ]
menu: Group = Group( menu: Group = Group(
Menu, Menu,
[ ["menu", UseLiteral("tag", "menu"), Optional(UseIdent("id")), menu_contents],
"menu",
UseLiteral("tag", "menu"),
Optional(UseIdent("id")),
menu_contents
],
) )
from .ui import UI from .ui import UI
@completer( @completer(
applies_in=[UI], applies_in=[UI],
matches=new_statement_patterns, matches=new_statement_patterns,
) )
def menu_completer(ast_node, match_variables): def menu_completer(ast_node, match_variables):
yield Completion( yield Completion("menu", CompletionItemKind.Snippet, snippet="menu {\n $0\n}")
"menu", CompletionItemKind.Snippet,
snippet="menu {\n $0\n}"
)
@completer( @completer(
@ -165,34 +157,21 @@ def menu_completer(ast_node, match_variables):
) )
def menu_content_completer(ast_node, match_variables): def menu_content_completer(ast_node, match_variables):
yield Completion( yield Completion(
"submenu", CompletionItemKind.Snippet, "submenu", CompletionItemKind.Snippet, snippet="submenu {\n $0\n}"
snippet="submenu {\n $0\n}"
) )
yield Completion( yield Completion(
"section", CompletionItemKind.Snippet, "section", CompletionItemKind.Snippet, snippet="section {\n $0\n}"
snippet="section {\n $0\n}"
) )
yield Completion("item", CompletionItemKind.Snippet, snippet="item {\n $0\n}")
yield Completion( yield Completion(
"item", CompletionItemKind.Snippet, "item (shorthand)",
snippet="item {\n $0\n}" CompletionItemKind.Snippet,
) snippet='item (_("${1:Label}"), "${2:action-name}", "${3:icon-name}")',
yield Completion(
"item (shorthand)", CompletionItemKind.Snippet,
snippet='item (_("${1:Label}"), "${2:action-name}", "${3:icon-name}")'
) )
yield Completion( yield Completion("label", CompletionItemKind.Snippet, snippet="label: $0;")
"label", CompletionItemKind.Snippet, yield Completion("action", CompletionItemKind.Snippet, snippet='action: "$0";')
snippet='label: $0;' yield Completion("icon", CompletionItemKind.Snippet, snippet='icon: "$0";')
)
yield Completion(
"action", CompletionItemKind.Snippet,
snippet='action: "$0";'
)
yield Completion(
"icon", CompletionItemKind.Snippet,
snippet='icon: "$0";'
)
@decompiler("menu") @decompiler("menu")
@ -202,6 +181,7 @@ def decompile_menu(ctx, gir, id=None):
else: else:
ctx.print("menu {") ctx.print("menu {")
@decompiler("submenu") @decompiler("submenu")
def decompile_submenu(ctx, gir, id=None): def decompile_submenu(ctx, gir, id=None):
if id: if id:
@ -209,6 +189,7 @@ def decompile_submenu(ctx, gir, id=None):
else: else:
ctx.print("submenu {") ctx.print("submenu {")
@decompiler("item") @decompiler("item")
def decompile_item(ctx, gir, id=None): def decompile_item(ctx, gir, id=None):
if id: if id:
@ -216,6 +197,7 @@ def decompile_item(ctx, gir, id=None):
else: else:
ctx.print("item {") ctx.print("item {")
@decompiler("section") @decompiler("section")
def decompile_section(ctx, gir, id=None): def decompile_section(ctx, gir, id=None):
if id: if id:

View file

@ -32,7 +32,7 @@ class Widget(AstNode):
if object is None: if object is None:
raise CompileError( raise CompileError(
f"Could not find object with ID {self.tokens['name']}", f"Could not find object with ID {self.tokens['name']}",
did_you_mean=(self.tokens['name'], self.root.objects_by_id.keys()), did_you_mean=(self.tokens["name"], self.root.objects_by_id.keys()),
) )
elif object.gir_class and not object.gir_class.assignable_to(type): elif object.gir_class and not object.gir_class.assignable_to(type):
raise CompileError( raise CompileError(

View file

@ -55,7 +55,4 @@ class Strings(AstNode):
matches=new_statement_patterns, matches=new_statement_patterns,
) )
def strings_completer(ast_node, match_variables): def strings_completer(ast_node, match_variables):
yield Completion( yield Completion("strings", CompletionItemKind.Snippet, snippet="strings [$0]")
"strings", CompletionItemKind.Snippet,
snippet="strings [$0]"
)

View file

@ -49,13 +49,14 @@ class Styles(AstNode):
matches=new_statement_patterns, matches=new_statement_patterns,
) )
def style_completer(ast_node, match_variables): def style_completer(ast_node, match_variables):
yield Completion("styles", CompletionItemKind.Keyword, snippet="styles [\"$0\"]") yield Completion("styles", CompletionItemKind.Keyword, snippet='styles ["$0"]')
@decompiler("style") @decompiler("style")
def decompile_style(ctx, gir): def decompile_style(ctx, gir):
ctx.print(f"styles [") ctx.print(f"styles [")
@decompiler("class") @decompiler("class")
def decompile_style_class(ctx, gir, name): def decompile_style_class(ctx, gir, name):
ctx.print(f'"{name}",') ctx.print(f'"{name}",')

View file

@ -26,18 +26,21 @@ from .common import *
ALLOWED_PARENTS: T.List[T.Tuple[str, str]] = [ ALLOWED_PARENTS: T.List[T.Tuple[str, str]] = [
("Gtk", "Buildable"), ("Gtk", "Buildable"),
("Gio", "ListStore") ("Gio", "ListStore"),
] ]
class Child(AstNode): class Child(AstNode):
grammar = [ grammar = [
Optional([ Optional(
[
"[", "[",
Optional(["internal-child", UseLiteral("internal_child", True)]), Optional(["internal-child", UseLiteral("internal_child", True)]),
UseIdent("child_type").expected("a child type"), UseIdent("child_type").expected("a child type"),
Optional(ResponseId), Optional(ResponseId),
"]", "]",
]), ]
),
Object, Object,
] ]
@ -53,9 +56,13 @@ class Child(AstNode):
if gir_class.assignable_to(parent_type): if gir_class.assignable_to(parent_type):
break break
else: else:
hints=["only Gio.ListStore or Gtk.Buildable implementors can have children"] hints = [
"only Gio.ListStore or Gtk.Buildable implementors can have children"
]
if "child" in gir_class.properties: if "child" in gir_class.properties:
hints.append("did you mean to assign this object to the 'child' property?") hints.append(
"did you mean to assign this object to the 'child' property?"
)
raise CompileError( raise CompileError(
f"{gir_class.full_name} doesn't have children", f"{gir_class.full_name} doesn't have children",
hints=hints, hints=hints,

View file

@ -28,10 +28,12 @@ class Template(Object):
grammar = [ grammar = [
"template", "template",
UseIdent("id").expected("template class name"), UseIdent("id").expected("template class name"),
Optional([ Optional(
[
Match(":"), Match(":"),
to_parse_node(ClassName).expected("parent class"), to_parse_node(ClassName).expected("parent class"),
]), ]
),
ObjectContent, ObjectContent,
] ]
@ -54,7 +56,9 @@ class Template(Object):
@validate("id") @validate("id")
def unique_in_parent(self): def unique_in_parent(self):
self.validate_unique_in_parent(f"Only one template may be defined per file, but this file contains {len(self.parent.children[Template])}",) self.validate_unique_in_parent(
f"Only one template may be defined per file, but this file contains {len(self.parent.children[Template])}",
)
@decompiler("template") @decompiler("template")

View file

@ -24,8 +24,12 @@ from .common import *
class GtkDirective(AstNode): class GtkDirective(AstNode):
grammar = Statement( grammar = Statement(
Match("using").err("File must start with a \"using Gtk\" directive (e.g. `using Gtk 4.0;`)"), Match("using").err(
Match("Gtk").err("File must start with a \"using Gtk\" directive (e.g. `using Gtk 4.0;`)"), 'File must start with a "using Gtk" directive (e.g. `using Gtk 4.0;`)'
),
Match("Gtk").err(
'File must start with a "using Gtk" directive (e.g. `using Gtk 4.0;`)'
),
UseNumberText("version").expected("a version number for GTK"), UseNumberText("version").expected("a version number for GTK"),
) )
@ -35,7 +39,9 @@ class GtkDirective(AstNode):
if version not in ["4.0"]: if version not in ["4.0"]:
err = CompileError("Only GTK 4 is supported") err = CompileError("Only GTK 4 is supported")
if version and version.startswith("4"): if version and version.startswith("4"):
err.hint("Expected the GIR version, not an exact version number. Use 'using Gtk 4.0;'.") err.hint(
"Expected the GIR version, not an exact version number. Use 'using Gtk 4.0;'."
)
else: else:
err.hint("Expected 'using Gtk 4.0;'") err.hint("Expected 'using Gtk 4.0;'")
raise err raise err
@ -51,7 +57,6 @@ class GtkDirective(AstNode):
hints=e.hints, hints=e.hints,
) )
@property @property
def gir_namespace(self): def gir_namespace(self):
# validate the GTK version first to make sure the more specific error # validate the GTK version first to make sure the more specific error

View file

@ -26,21 +26,13 @@ from .common import *
class ResponseId(AstNode): class ResponseId(AstNode):
"""Response ID of action widget.""" """Response ID of action widget."""
ALLOWED_PARENTS: T.List[T.Tuple[str, str]] = [ ALLOWED_PARENTS: T.List[T.Tuple[str, str]] = [("Gtk", "Dialog"), ("Gtk", "InfoBar")]
("Gtk", "Dialog"),
("Gtk", "InfoBar")
]
grammar = [ grammar = [
Keyword("response"), Keyword("response"),
"=", "=",
AnyOf( AnyOf(UseIdent("response_id"), UseNumber("response_id")),
UseIdent("response_id"), Optional([Keyword("default"), UseLiteral("is_default", True)]),
UseNumber("response_id")
),
Optional([
Keyword("default"), UseLiteral("is_default", True)
])
] ]
@validate() @validate()
@ -91,18 +83,15 @@ class ResponseId(AstNode):
if isinstance(response, int): if isinstance(response, int):
if response < 0: if response < 0:
raise CompileError( raise CompileError("Numeric response type can't be negative")
"Numeric response type can't be negative")
elif isinstance(response, float): elif isinstance(response, float):
raise CompileError( raise CompileError(
"Response type must be GtkResponseType member or integer," "Response type must be GtkResponseType member or integer," " not float"
" not float"
) )
else: else:
responses = gir.get_type("ResponseType", "Gtk").members.keys() responses = gir.get_type("ResponseType", "Gtk").members.keys()
if response not in responses: if response not in responses:
raise CompileError( raise CompileError(f'Response type "{response}" doesn\'t exist')
f"Response type \"{response}\" doesn't exist")
@validate("default") @validate("default")
def no_multiple_default(self) -> None: def no_multiple_default(self) -> None:
@ -135,4 +124,3 @@ class ResponseId(AstNode):
_object: Object = self.parent.children[Object][0] _object: Object = self.parent.children[Object][0]
return _object.tokens["id"] return _object.tokens["id"]

View file

@ -41,7 +41,9 @@ class TypeName(AstNode):
@validate("class_name") @validate("class_name")
def type_exists(self): def type_exists(self):
if not self.tokens["ignore_gir"] and self.gir_ns is not None: if not self.tokens["ignore_gir"] and self.gir_ns is not None:
self.root.gir.validate_type(self.tokens["class_name"], self.tokens["namespace"]) self.root.gir.validate_type(
self.tokens["class_name"], self.tokens["namespace"]
)
@validate("namespace") @validate("namespace")
def gir_ns_exists(self): def gir_ns_exists(self):
@ -56,7 +58,9 @@ class TypeName(AstNode):
@property @property
def gir_type(self) -> T.Optional[gir.Class]: def gir_type(self) -> T.Optional[gir.Class]:
if self.tokens["class_name"] and not self.tokens["ignore_gir"]: if self.tokens["class_name"] and not self.tokens["ignore_gir"]:
return self.root.gir.get_type(self.tokens["class_name"], self.tokens["namespace"]) return self.root.gir.get_type(
self.tokens["class_name"], self.tokens["namespace"]
)
return None return None
@property @property
@ -82,7 +86,9 @@ class ClassName(TypeName):
def gir_class_exists(self): def gir_class_exists(self):
if self.gir_type is not None and not isinstance(self.gir_type, Class): if self.gir_type is not None and not isinstance(self.gir_type, Class):
if isinstance(self.gir_type, Interface): if isinstance(self.gir_type, Interface):
raise CompileError(f"{self.gir_type.full_name} is an interface, not a class") raise CompileError(
f"{self.gir_type.full_name} is an interface, not a class"
)
else: else:
raise CompileError(f"{self.gir_type.full_name} is not a class") raise CompileError(f"{self.gir_type.full_name} is not a class")
@ -93,6 +99,5 @@ class ConcreteClassName(ClassName):
if isinstance(self.gir_type, Class) and self.gir_type.abstract: if isinstance(self.gir_type, Class) and self.gir_type.abstract:
raise CompileError( raise CompileError(
f"{self.gir_type.full_name} can't be instantiated because it's abstract", f"{self.gir_type.full_name} can't be instantiated because it's abstract",
hints=[f"did you mean to use a subclass of {self.gir_type.full_name}?"] hints=[f"did you mean to use a subclass of {self.gir_type.full_name}?"],
) )

View file

@ -32,11 +32,14 @@ class UI(AstNode):
grammar = [ grammar = [
GtkDirective, GtkDirective,
ZeroOrMore(Import), ZeroOrMore(Import),
Until(AnyOf( Until(
AnyOf(
Template, Template,
menu, menu,
Object, Object,
), Eof()), ),
Eof(),
),
] ]
@property @property
@ -61,11 +64,13 @@ class UI(AstNode):
return gir_ctx return gir_ctx
@property @property
def objects_by_id(self): def objects_by_id(self):
return { obj.tokens["id"]: obj for obj in self.iterate_children_recursive() if obj.tokens["id"] is not None } return {
obj.tokens["id"]: obj
for obj in self.iterate_children_recursive()
if obj.tokens["id"] is not None
}
@validate() @validate()
def gir_errors(self): def gir_errors(self):
@ -74,7 +79,6 @@ class UI(AstNode):
if len(self._gir_errors): if len(self._gir_errors):
raise MultipleErrors(self._gir_errors) raise MultipleErrors(self._gir_errors)
@validate() @validate()
def unique_ids(self): def unique_ids(self):
passed = {} passed = {}
@ -84,5 +88,7 @@ class UI(AstNode):
if obj.tokens["id"] in passed: if obj.tokens["id"] in passed:
token = obj.group.tokens["id"] token = obj.group.tokens["id"]
raise CompileError(f"Duplicate object ID '{obj.tokens['id']}'", token.start, token.end) raise CompileError(
f"Duplicate object ID '{obj.tokens['id']}'", token.start, token.end
)
passed[obj.tokens["id"]] = obj passed[obj.tokens["id"]] = obj

View file

@ -84,13 +84,21 @@ class QuotedValue(Value):
@validate() @validate()
def validate_for_type(self): def validate_for_type(self):
type = self.parent.value_type type = self.parent.value_type
if isinstance(type, gir.IntType) or isinstance(type, gir.UIntType) or isinstance(type, gir.FloatType): if (
isinstance(type, gir.IntType)
or isinstance(type, gir.UIntType)
or isinstance(type, gir.FloatType)
):
raise CompileError(f"Cannot convert string to number") raise CompileError(f"Cannot convert string to number")
elif isinstance(type, gir.StringType): elif isinstance(type, gir.StringType):
pass pass
elif isinstance(type, gir.Class) or isinstance(type, gir.Interface) or isinstance(type, gir.Boxed): elif (
isinstance(type, gir.Class)
or isinstance(type, gir.Interface)
or isinstance(type, gir.Boxed)
):
parseable_types = [ parseable_types = [
"Gdk.Paintable", "Gdk.Paintable",
"Gdk.Texture", "Gdk.Texture",
@ -106,8 +114,12 @@ class QuotedValue(Value):
if type.full_name not in parseable_types: if type.full_name not in parseable_types:
hints = [] hints = []
if isinstance(type, gir.TypeType): if isinstance(type, gir.TypeType):
hints.append(f"use the typeof operator: 'typeof({self.tokens('value')})'") hints.append(
raise CompileError(f"Cannot convert string to {type.full_name}", hints=hints) f"use the typeof operator: 'typeof({self.tokens('value')})'"
)
raise CompileError(
f"Cannot convert string to {type.full_name}", hints=hints
)
elif type is not None: elif type is not None:
raise CompileError(f"Cannot convert string to {type.full_name}") raise CompileError(f"Cannot convert string to {type.full_name}")
@ -127,7 +139,9 @@ class NumberValue(Value):
try: try:
int(self.tokens["value"]) int(self.tokens["value"])
except: except:
raise CompileError(f"Cannot convert {self.group.tokens['value']} to integer") raise CompileError(
f"Cannot convert {self.group.tokens['value']} to integer"
)
elif isinstance(type, gir.UIntType): elif isinstance(type, gir.UIntType):
try: try:
@ -135,13 +149,17 @@ class NumberValue(Value):
if int(self.tokens["value"]) < 0: if int(self.tokens["value"]) < 0:
raise Exception() raise Exception()
except: except:
raise CompileError(f"Cannot convert {self.group.tokens['value']} to unsigned integer") raise CompileError(
f"Cannot convert {self.group.tokens['value']} to unsigned integer"
)
elif isinstance(type, gir.FloatType): elif isinstance(type, gir.FloatType):
try: try:
float(self.tokens["value"]) float(self.tokens["value"])
except: except:
raise CompileError(f"Cannot convert {self.group.tokens['value']} to float") raise CompileError(
f"Cannot convert {self.group.tokens['value']} to float"
)
elif type is not None: elif type is not None:
raise CompileError(f"Cannot convert number to {type.full_name}") raise CompileError(f"Cannot convert number to {type.full_name}")
@ -164,7 +182,7 @@ class Flag(AstNode):
if isinstance(type, gir.Bitfield) and self.tokens["value"] not in type.members: if isinstance(type, gir.Bitfield) and self.tokens["value"] not in type.members:
raise CompileError( raise CompileError(
f"{self.tokens['value']} is not a member of {type.full_name}", f"{self.tokens['value']} is not a member of {type.full_name}",
did_you_mean=(self.tokens['value'], type.members.keys()), did_you_mean=(self.tokens["value"], type.members.keys()),
) )
@ -189,14 +207,14 @@ class IdentValue(Value):
if self.tokens["value"] not in type.members: if self.tokens["value"] not in type.members:
raise CompileError( raise CompileError(
f"{self.tokens['value']} is not a member of {type.full_name}", f"{self.tokens['value']} is not a member of {type.full_name}",
did_you_mean=(self.tokens['value'], type.members.keys()), did_you_mean=(self.tokens["value"], type.members.keys()),
) )
elif isinstance(type, gir.BoolType): elif isinstance(type, gir.BoolType):
if self.tokens["value"] not in ["true", "false"]: if self.tokens["value"] not in ["true", "false"]:
raise CompileError( raise CompileError(
f"Expected 'true' or 'false' for boolean value", f"Expected 'true' or 'false' for boolean value",
did_you_mean=(self.tokens['value'], ["true", "false"]), did_you_mean=(self.tokens["value"], ["true", "false"]),
) )
elif type is not None: elif type is not None:
@ -204,14 +222,13 @@ class IdentValue(Value):
if object is None: if object is None:
raise CompileError( raise CompileError(
f"Could not find object with ID {self.tokens['value']}", f"Could not find object with ID {self.tokens['value']}",
did_you_mean=(self.tokens['value'], self.root.objects_by_id.keys()), did_you_mean=(self.tokens["value"], self.root.objects_by_id.keys()),
) )
elif object.gir_class and not object.gir_class.assignable_to(type): elif object.gir_class and not object.gir_class.assignable_to(type):
raise CompileError( raise CompileError(
f"Cannot assign {object.gir_class.full_name} to {type.full_name}" f"Cannot assign {object.gir_class.full_name} to {type.full_name}"
) )
@docs() @docs()
def docs(self): def docs(self):
type = self.parent.value_type type = self.parent.value_type
@ -223,9 +240,7 @@ class IdentValue(Value):
elif isinstance(type, gir.GirNode): elif isinstance(type, gir.GirNode):
return type.doc return type.doc
def get_semantic_tokens(self) -> T.Iterator[SemanticToken]: def get_semantic_tokens(self) -> T.Iterator[SemanticToken]:
if isinstance(self.parent.value_type, gir.Enumeration): if isinstance(self.parent.value_type, gir.Enumeration):
token = self.group.tokens["value"] token = self.group.tokens["value"]
yield SemanticToken(token.start, token.end, SemanticTokenType.EnumMember) yield SemanticToken(token.start, token.end, SemanticTokenType.EnumMember)

View file

@ -35,6 +35,7 @@ def command(json_method):
def decorator(func): def decorator(func):
func._json_method = json_method func._json_method = json_method
return func return func
return decorator return decorator
@ -50,8 +51,16 @@ class OpenFile:
def apply_changes(self, changes): def apply_changes(self, changes):
for change in changes: for change in changes:
start = utils.pos_to_idx(change["range"]["start"]["line"], change["range"]["start"]["character"], self.text) start = utils.pos_to_idx(
end = utils.pos_to_idx(change["range"]["end"]["line"], change["range"]["end"]["character"], self.text) change["range"]["start"]["line"],
change["range"]["start"]["character"],
self.text,
)
end = utils.pos_to_idx(
change["range"]["end"]["line"],
change["range"]["end"]["character"],
self.text,
)
self.text = self.text[:start] + change["text"] + self.text[end:] self.text = self.text[:start] + change["text"] + self.text[end:]
self._update() self._update()
@ -69,7 +78,6 @@ class OpenFile:
except CompileError as e: except CompileError as e:
self.diagnostics.append(e) self.diagnostics.append(e)
def calc_semantic_tokens(self) -> T.List[int]: def calc_semantic_tokens(self) -> T.List[int]:
tokens = list(self.ast.get_semantic_tokens()) tokens = list(self.ast.get_semantic_tokens())
token_lists = [ token_lists = [
@ -78,7 +86,9 @@ class OpenFile:
token.end - token.start, # length token.end - token.start, # length
token.type, token.type,
0, # token modifiers 0, # token modifiers
] for token in tokens] ]
for token in tokens
]
# convert line, column numbers to deltas # convert line, column numbers to deltas
for i, token_list in enumerate(token_lists[1:]): for i, token_list in enumerate(token_lists[1:]):
@ -125,33 +135,39 @@ class LanguageServer:
except Exception as e: except Exception as e:
printerr(traceback.format_exc()) printerr(traceback.format_exc())
def _send(self, data): def _send(self, data):
data["jsonrpc"] = "2.0" data["jsonrpc"] = "2.0"
line = json.dumps(data, separators=(",", ":")) + "\r\n" line = json.dumps(data, separators=(",", ":")) + "\r\n"
printerr("output: " + line) printerr("output: " + line)
sys.stdout.write(f"Content-Length: {len(line.encode())}\r\nContent-Type: application/vscode-jsonrpc; charset=utf-8\r\n\r\n{line}") sys.stdout.write(
f"Content-Length: {len(line.encode())}\r\nContent-Type: application/vscode-jsonrpc; charset=utf-8\r\n\r\n{line}"
)
sys.stdout.flush() sys.stdout.flush()
def _send_response(self, id, result): def _send_response(self, id, result):
self._send({ self._send(
{
"id": id, "id": id,
"result": result, "result": result,
}) }
)
def _send_notification(self, method, params): def _send_notification(self, method, params):
self._send({ self._send(
{
"method": method, "method": method,
"params": params, "params": params,
}) }
)
@command("initialize") @command("initialize")
def initialize(self, id, params): def initialize(self, id, params):
from . import main from . import main
self.client_capabilities = params.get("capabilities") self.client_capabilities = params.get("capabilities")
self._send_response(id, { self._send_response(
id,
{
"capabilities": { "capabilities": {
"textDocumentSync": { "textDocumentSync": {
"openClose": True, "openClose": True,
@ -171,7 +187,8 @@ class LanguageServer:
"name": "Blueprint", "name": "Blueprint",
"version": main.VERSION, "version": main.VERSION,
}, },
}) },
)
@command("textDocument/didOpen") @command("textDocument/didOpen")
def didOpen(self, id, params): def didOpen(self, id, params):
@ -198,14 +215,23 @@ class LanguageServer:
@command("textDocument/hover") @command("textDocument/hover")
def hover(self, id, params): def hover(self, id, params):
open_file = self._open_files[params["textDocument"]["uri"]] open_file = self._open_files[params["textDocument"]["uri"]]
docs = open_file.ast and open_file.ast.get_docs(utils.pos_to_idx(params["position"]["line"], params["position"]["character"], open_file.text)) docs = open_file.ast and open_file.ast.get_docs(
utils.pos_to_idx(
params["position"]["line"],
params["position"]["character"],
open_file.text,
)
)
if docs: if docs:
self._send_response(id, { self._send_response(
id,
{
"contents": { "contents": {
"kind": "markdown", "kind": "markdown",
"value": docs, "value": docs,
} }
}) },
)
else: else:
self._send_response(id, None) self._send_response(id, None)
@ -217,40 +243,59 @@ class LanguageServer:
self._send_response(id, []) self._send_response(id, [])
return return
idx = utils.pos_to_idx(params["position"]["line"], params["position"]["character"], open_file.text) idx = utils.pos_to_idx(
params["position"]["line"], params["position"]["character"], open_file.text
)
completions = complete(open_file.ast, open_file.tokens, idx) completions = complete(open_file.ast, open_file.tokens, idx)
self._send_response(id, [completion.to_json(True) for completion in completions]) self._send_response(
id, [completion.to_json(True) for completion in completions]
)
@command("textDocument/semanticTokens/full") @command("textDocument/semanticTokens/full")
def semantic_tokens(self, id, params): def semantic_tokens(self, id, params):
open_file = self._open_files[params["textDocument"]["uri"]] open_file = self._open_files[params["textDocument"]["uri"]]
self._send_response(id, { self._send_response(
id,
{
"data": open_file.calc_semantic_tokens(), "data": open_file.calc_semantic_tokens(),
}) },
)
@command("textDocument/codeAction") @command("textDocument/codeAction")
def code_actions(self, id, params): def code_actions(self, id, params):
open_file = self._open_files[params["textDocument"]["uri"]] open_file = self._open_files[params["textDocument"]["uri"]]
range_start = utils.pos_to_idx(params["range"]["start"]["line"], params["range"]["start"]["character"], open_file.text) range_start = utils.pos_to_idx(
range_end = utils.pos_to_idx(params["range"]["end"]["line"], params["range"]["end"]["character"], open_file.text) params["range"]["start"]["line"],
params["range"]["start"]["character"],
open_file.text,
)
range_end = utils.pos_to_idx(
params["range"]["end"]["line"],
params["range"]["end"]["character"],
open_file.text,
)
actions = [ actions = [
{ {
"title": action.title, "title": action.title,
"kind": "quickfix", "kind": "quickfix",
"diagnostics": [self._create_diagnostic(open_file.text, open_file.uri, diagnostic)], "diagnostics": [
self._create_diagnostic(open_file.text, open_file.uri, diagnostic)
],
"edit": { "edit": {
"changes": { "changes": {
open_file.uri: [{ open_file.uri: [
"range": utils.idxs_to_range(diagnostic.start, diagnostic.end, open_file.text), {
"newText": action.replace_with "range": utils.idxs_to_range(
}] diagnostic.start, diagnostic.end, open_file.text
),
"newText": action.replace_with,
} }
]
} }
},
} }
for diagnostic in open_file.diagnostics for diagnostic in open_file.diagnostics
if not (diagnostic.end < range_start or diagnostic.start > range_end) if not (diagnostic.end < range_start or diagnostic.start > range_end)
@ -259,23 +304,30 @@ class LanguageServer:
self._send_response(id, actions) self._send_response(id, actions)
def _send_file_updates(self, open_file: OpenFile): def _send_file_updates(self, open_file: OpenFile):
self._send_notification("textDocument/publishDiagnostics", { self._send_notification(
"textDocument/publishDiagnostics",
{
"uri": open_file.uri, "uri": open_file.uri,
"diagnostics": [self._create_diagnostic(open_file.text, open_file.uri, err) for err in open_file.diagnostics], "diagnostics": [
}) self._create_diagnostic(open_file.text, open_file.uri, err)
for err in open_file.diagnostics
],
},
)
def _create_diagnostic(self, text, uri, err): def _create_diagnostic(self, text, uri, err):
message = err.message message = err.message
for hint in err.hints: for hint in err.hints:
message += '\nhint: ' + hint message += "\nhint: " + hint
result = { result = {
"range": utils.idxs_to_range(err.start, err.end, text), "range": utils.idxs_to_range(err.start, err.end, text),
"message": message, "message": message,
"severity": DiagnosticSeverity.Warning if isinstance(err, CompileWarning) else DiagnosticSeverity.Error, "severity": DiagnosticSeverity.Warning
if isinstance(err, CompileWarning)
else DiagnosticSeverity.Error,
} }
if len(err.references) > 0: if len(err.references) > 0:
@ -285,7 +337,7 @@ class LanguageServer:
"uri": uri, "uri": uri,
"range": utils.idxs_to_range(ref.start, ref.end, text), "range": utils.idxs_to_range(ref.start, ref.end, text),
}, },
"message": ref.message "message": ref.message,
} }
for ref in err.references for ref in err.references
] ]
@ -297,4 +349,3 @@ for name in dir(LanguageServer):
item = getattr(LanguageServer, name) item = getattr(LanguageServer, name)
if callable(item) and hasattr(item, "_json_method"): if callable(item) and hasattr(item, "_json_method"):
LanguageServer.commands[item._json_method] = item LanguageServer.commands[item._json_method] = item

View file

@ -31,13 +31,16 @@ class TextDocumentSyncKind(enum.IntEnum):
Full = 1 Full = 1
Incremental = 2 Incremental = 2
class CompletionItemTag(enum.IntEnum): class CompletionItemTag(enum.IntEnum):
Deprecated = 1 Deprecated = 1
class InsertTextFormat(enum.IntEnum): class InsertTextFormat(enum.IntEnum):
PlainText = 1 PlainText = 1
Snippet = 2 Snippet = 2
class CompletionItemKind(enum.IntEnum): class CompletionItemKind(enum.IntEnum):
Text = 1 Text = 1
Method = 2 Method = 2
@ -91,7 +94,9 @@ class Completion:
"documentation": { "documentation": {
"kind": "markdown", "kind": "markdown",
"value": self.docs, "value": self.docs,
} if self.docs else None, }
if self.docs
else None,
"deprecated": self.deprecated, "deprecated": self.deprecated,
"insertText": insert_text, "insertText": insert_text,
"insertTextFormat": insert_text_format, "insertTextFormat": insert_text_format,
@ -110,7 +115,6 @@ class DiagnosticSeverity(enum.IntEnum):
Hint = 4 Hint = 4
@dataclass @dataclass
class SemanticToken: class SemanticToken:
start: int start: int

View file

@ -30,24 +30,41 @@ from .outputs import XmlOutput
VERSION = "uninstalled" VERSION = "uninstalled"
LIBDIR = None LIBDIR = None
class BlueprintApp: class BlueprintApp:
def main(self): def main(self):
self.parser = argparse.ArgumentParser() self.parser = argparse.ArgumentParser()
self.subparsers = self.parser.add_subparsers(metavar="command") self.subparsers = self.parser.add_subparsers(metavar="command")
self.parser.set_defaults(func=self.cmd_help) self.parser.set_defaults(func=self.cmd_help)
compile = self.add_subcommand("compile", "Compile blueprint files", self.cmd_compile) compile = self.add_subcommand(
"compile", "Compile blueprint files", self.cmd_compile
)
compile.add_argument("--output", dest="output", default="-") compile.add_argument("--output", dest="output", default="-")
compile.add_argument("input", metavar="filename", default=sys.stdin, type=argparse.FileType('r')) compile.add_argument(
"input", metavar="filename", default=sys.stdin, type=argparse.FileType("r")
)
batch_compile = self.add_subcommand("batch-compile", "Compile many blueprint files at once", self.cmd_batch_compile) batch_compile = self.add_subcommand(
"batch-compile",
"Compile many blueprint files at once",
self.cmd_batch_compile,
)
batch_compile.add_argument("output_dir", metavar="output-dir") batch_compile.add_argument("output_dir", metavar="output-dir")
batch_compile.add_argument("input_dir", metavar="input-dir") batch_compile.add_argument("input_dir", metavar="input-dir")
batch_compile.add_argument("inputs", nargs="+", metavar="filenames", default=sys.stdin, type=argparse.FileType('r')) batch_compile.add_argument(
"inputs",
nargs="+",
metavar="filenames",
default=sys.stdin,
type=argparse.FileType("r"),
)
port = self.add_subcommand("port", "Interactive porting tool", self.cmd_port) port = self.add_subcommand("port", "Interactive porting tool", self.cmd_port)
lsp = self.add_subcommand("lsp", "Run the language server (for internal use by IDEs)", self.cmd_lsp) lsp = self.add_subcommand(
"lsp", "Run the language server (for internal use by IDEs)", self.cmd_lsp
)
self.add_subcommand("help", "Show this message", self.cmd_help) self.add_subcommand("help", "Show this message", self.cmd_help)
@ -65,17 +82,14 @@ class BlueprintApp:
except: except:
report_bug() report_bug()
def add_subcommand(self, name, help, func): def add_subcommand(self, name, help, func):
parser = self.subparsers.add_parser(name, help=help) parser = self.subparsers.add_parser(name, help=help)
parser.set_defaults(func=func) parser.set_defaults(func=func)
return parser return parser
def cmd_help(self, opts): def cmd_help(self, opts):
self.parser.print_help() self.parser.print_help()
def cmd_compile(self, opts): def cmd_compile(self, opts):
data = opts.input.read() data = opts.input.read()
try: try:
@ -93,14 +107,15 @@ class BlueprintApp:
e.pretty_print(opts.input.name, data) e.pretty_print(opts.input.name, data)
sys.exit(1) sys.exit(1)
def cmd_batch_compile(self, opts): def cmd_batch_compile(self, opts):
for file in opts.inputs: for file in opts.inputs:
data = file.read() data = file.read()
try: try:
if not os.path.commonpath([file.name, opts.input_dir]): if not os.path.commonpath([file.name, opts.input_dir]):
print(f"{Colors.RED}{Colors.BOLD}error: input file '{file.name}' is not in input directory '{opts.input_dir}'{Colors.CLEAR}") print(
f"{Colors.RED}{Colors.BOLD}error: input file '{file.name}' is not in input directory '{opts.input_dir}'{Colors.CLEAR}"
)
sys.exit(1) sys.exit(1)
xml, warnings = self._compile(data) xml, warnings = self._compile(data)
@ -111,9 +126,8 @@ class BlueprintApp:
path = os.path.join( path = os.path.join(
opts.output_dir, opts.output_dir,
os.path.relpath( os.path.relpath(
os.path.splitext(file.name)[0] + ".ui", os.path.splitext(file.name)[0] + ".ui", opts.input_dir
opts.input_dir ),
)
) )
os.makedirs(os.path.dirname(path), exist_ok=True) os.makedirs(os.path.dirname(path), exist_ok=True)
with open(path, "w") as file: with open(path, "w") as file:
@ -122,16 +136,13 @@ class BlueprintApp:
e.pretty_print(file.name, data) e.pretty_print(file.name, data)
sys.exit(1) sys.exit(1)
def cmd_lsp(self, opts): def cmd_lsp(self, opts):
langserv = LanguageServer() langserv = LanguageServer()
langserv.run() langserv.run()
def cmd_port(self, opts): def cmd_port(self, opts):
interactive_port.run(opts) interactive_port.run(opts)
def _compile(self, data: str) -> T.Tuple[str, T.List[PrintableError]]: def _compile(self, data: str) -> T.Tuple[str, T.List[PrintableError]]:
tokens = tokenizer.tokenize(data) tokens = tokenizer.tokenize(data)
ast, errors, warnings = parser.parse(tokens) ast, errors, warnings = parser.parse(tokens)

View file

@ -1,7 +1,9 @@
from ..language import UI from ..language import UI
class OutputFormat: class OutputFormat:
def emit(self, ui: UI) -> str: def emit(self, ui: UI) -> str:
raise NotImplementedError() raise NotImplementedError()
from .xml import XmlOutput from .xml import XmlOutput

View file

@ -24,7 +24,13 @@ import typing as T
from collections import defaultdict from collections import defaultdict
from enum import Enum from enum import Enum
from .errors import assert_true, CompilerBugError, CompileError, CompileWarning, UnexpectedTokenError from .errors import (
assert_true,
CompilerBugError,
CompileError,
CompileWarning,
UnexpectedTokenError,
)
from .tokenizer import Token, TokenType from .tokenizer import Token, TokenType
@ -83,12 +89,16 @@ class ParseGroup:
try: try:
return self.ast_type(self, children, self.keys, incomplete=self.incomplete) return self.ast_type(self, children, self.keys, incomplete=self.incomplete)
except TypeError as e: except TypeError as e:
raise CompilerBugError(f"Failed to construct ast.{self.ast_type.__name__} from ParseGroup. See the previous stacktrace.") raise CompilerBugError(
f"Failed to construct ast.{self.ast_type.__name__} from ParseGroup. See the previous stacktrace."
)
def __str__(self): def __str__(self):
result = str(self.ast_type.__name__) result = str(self.ast_type.__name__)
result += "".join([f"\n{key}: {val}" for key, val in self.keys.items()]) + "\n" result += "".join([f"\n{key}: {val}" for key, val in self.keys.items()]) + "\n"
result += "\n".join([str(child) for children in self.children.values() for child in children]) result += "\n".join(
[str(child) for children in self.children.values() for child in children]
)
return result.replace("\n", "\n ") return result.replace("\n", "\n ")
@ -110,7 +120,6 @@ class ParseContext:
self.errors = [] self.errors = []
self.warnings = [] self.warnings = []
def create_child(self): def create_child(self):
"""Creates a new ParseContext at this context's position. The new """Creates a new ParseContext at this context's position. The new
context will be used to parse one node. If parsing is successful, the context will be used to parse one node. If parsing is successful, the
@ -150,7 +159,6 @@ class ParseContext:
elif other.last_group: elif other.last_group:
self.last_group = other.last_group self.last_group = other.last_group
def start_group(self, ast_type): def start_group(self, ast_type):
"""Sets this context to have its own match group.""" """Sets this context to have its own match group."""
assert_true(self.group is None) assert_true(self.group is None)
@ -166,10 +174,12 @@ class ParseContext:
parsed, but the parser recovered).""" parsed, but the parser recovered)."""
self.group_incomplete = True self.group_incomplete = True
def skip(self): def skip(self):
"""Skips whitespace and comments.""" """Skips whitespace and comments."""
while self.index < len(self.tokens) and self.tokens[self.index].type in SKIP_TOKENS: while (
self.index < len(self.tokens)
and self.tokens[self.index].type in SKIP_TOKENS
):
self.index += 1 self.index += 1
def next_token(self) -> Token: def next_token(self) -> Token:
@ -194,9 +204,11 @@ class ParseContext:
self.skip() self.skip()
end = self.tokens[self.index - 1].end end = self.tokens[self.index - 1].end
if (len(self.errors) if (
len(self.errors)
and isinstance((err := self.errors[-1]), UnexpectedTokenError) and isinstance((err := self.errors[-1]), UnexpectedTokenError)
and err.end == start): and err.end == start
):
err.end = end err.end = end
else: else:
self.errors.append(UnexpectedTokenError(start, end)) self.errors.append(UnexpectedTokenError(start, end))
@ -272,7 +284,9 @@ class Warning(ParseNode):
if self.child.parse(ctx).succeeded(): if self.child.parse(ctx).succeeded():
start_token = ctx.tokens[start_idx] start_token = ctx.tokens[start_idx]
end_token = ctx.tokens[ctx.index] end_token = ctx.tokens[ctx.index]
ctx.warnings.append(CompileWarning(self.message, start_token.start, end_token.end)) ctx.warnings.append(
CompileWarning(self.message, start_token.start, end_token.end)
)
return True return True
@ -297,6 +311,7 @@ class Fail(ParseNode):
class Group(ParseNode): class Group(ParseNode):
"""ParseNode that creates a match group.""" """ParseNode that creates a match group."""
def __init__(self, ast_type, child): def __init__(self, ast_type, child):
self.ast_type = ast_type self.ast_type = ast_type
self.child = to_parse_node(child) self.child = to_parse_node(child)
@ -309,6 +324,7 @@ class Group(ParseNode):
class Sequence(ParseNode): class Sequence(ParseNode):
"""ParseNode that attempts to match all of its children in sequence.""" """ParseNode that attempts to match all of its children in sequence."""
def __init__(self, *children): def __init__(self, *children):
self.children = [to_parse_node(child) for child in children] self.children = [to_parse_node(child) for child in children]
@ -322,6 +338,7 @@ class Sequence(ParseNode):
class Statement(ParseNode): class Statement(ParseNode):
"""ParseNode that attempts to match all of its children in sequence. If any """ParseNode that attempts to match all of its children in sequence. If any
child raises an error, the error will be logged but parsing will continue.""" child raises an error, the error will be logged but parsing will continue."""
def __init__(self, *children): def __init__(self, *children):
self.children = [to_parse_node(child) for child in children] self.children = [to_parse_node(child) for child in children]
@ -346,12 +363,14 @@ class Statement(ParseNode):
class AnyOf(ParseNode): class AnyOf(ParseNode):
"""ParseNode that attempts to match exactly one of its children. Child """ParseNode that attempts to match exactly one of its children. Child
nodes are attempted in order.""" nodes are attempted in order."""
def __init__(self, *children): def __init__(self, *children):
self.children = children self.children = children
@property @property
def children(self): def children(self):
return self._children return self._children
@children.setter @children.setter
def children(self, children): def children(self, children):
self._children = [to_parse_node(child) for child in children] self._children = [to_parse_node(child) for child in children]
@ -367,6 +386,7 @@ class Until(ParseNode):
"""ParseNode that repeats its child until a delimiting token is found. If """ParseNode that repeats its child until a delimiting token is found. If
the child does not match, one token is skipped and the match is attempted the child does not match, one token is skipped and the match is attempted
again.""" again."""
def __init__(self, child, delimiter): def __init__(self, child, delimiter):
self.child = to_parse_node(child) self.child = to_parse_node(child)
self.delimiter = to_parse_node(delimiter) self.delimiter = to_parse_node(delimiter)
@ -390,10 +410,10 @@ class ZeroOrMore(ParseNode):
"""ParseNode that matches its child any number of times (including zero """ParseNode that matches its child any number of times (including zero
times). It cannot fail to parse. If its child raises an exception, one token times). It cannot fail to parse. If its child raises an exception, one token
will be skipped and parsing will continue.""" will be skipped and parsing will continue."""
def __init__(self, child): def __init__(self, child):
self.child = to_parse_node(child) self.child = to_parse_node(child)
def _parse(self, ctx): def _parse(self, ctx):
while True: while True:
try: try:
@ -407,6 +427,7 @@ class ZeroOrMore(ParseNode):
class Delimited(ParseNode): class Delimited(ParseNode):
"""ParseNode that matches its first child any number of times (including zero """ParseNode that matches its first child any number of times (including zero
times) with its second child in between and optionally at the end.""" times) with its second child in between and optionally at the end."""
def __init__(self, child, delimiter): def __init__(self, child, delimiter):
self.child = to_parse_node(child) self.child = to_parse_node(child)
self.delimiter = to_parse_node(delimiter) self.delimiter = to_parse_node(delimiter)
@ -420,6 +441,7 @@ class Delimited(ParseNode):
class Optional(ParseNode): class Optional(ParseNode):
"""ParseNode that matches its child zero or one times. It cannot fail to """ParseNode that matches its child zero or one times. It cannot fail to
parse.""" parse."""
def __init__(self, child): def __init__(self, child):
self.child = to_parse_node(child) self.child = to_parse_node(child)
@ -430,6 +452,7 @@ class Optional(ParseNode):
class Eof(ParseNode): class Eof(ParseNode):
"""ParseNode that matches an EOF token.""" """ParseNode that matches an EOF token."""
def _parse(self, ctx: ParseContext) -> bool: def _parse(self, ctx: ParseContext) -> bool:
token = ctx.next_token() token = ctx.next_token()
return token.type == TokenType.EOF return token.type == TokenType.EOF
@ -437,6 +460,7 @@ class Eof(ParseNode):
class Match(ParseNode): class Match(ParseNode):
"""ParseNode that matches the given literal token.""" """ParseNode that matches the given literal token."""
def __init__(self, op): def __init__(self, op):
self.op = op self.op = op
@ -455,6 +479,7 @@ class Match(ParseNode):
class UseIdent(ParseNode): class UseIdent(ParseNode):
"""ParseNode that matches any identifier and sets it in a key=value pair on """ParseNode that matches any identifier and sets it in a key=value pair on
the containing match group.""" the containing match group."""
def __init__(self, key): def __init__(self, key):
self.key = key self.key = key
@ -470,6 +495,7 @@ class UseIdent(ParseNode):
class UseNumber(ParseNode): class UseNumber(ParseNode):
"""ParseNode that matches a number and sets it in a key=value pair on """ParseNode that matches a number and sets it in a key=value pair on
the containing match group.""" the containing match group."""
def __init__(self, key): def __init__(self, key):
self.key = key self.key = key
@ -488,6 +514,7 @@ class UseNumber(ParseNode):
class UseNumberText(ParseNode): class UseNumberText(ParseNode):
"""ParseNode that matches a number, but sets its *original text* it in a """ParseNode that matches a number, but sets its *original text* it in a
key=value pair on the containing match group.""" key=value pair on the containing match group."""
def __init__(self, key): def __init__(self, key):
self.key = key self.key = key
@ -503,6 +530,7 @@ class UseNumberText(ParseNode):
class UseQuoted(ParseNode): class UseQuoted(ParseNode):
"""ParseNode that matches a quoted string and sets it in a key=value pair """ParseNode that matches a quoted string and sets it in a key=value pair
on the containing match group.""" on the containing match group."""
def __init__(self, key): def __init__(self, key):
self.key = key self.key = key
@ -511,11 +539,13 @@ class UseQuoted(ParseNode):
if token.type != TokenType.QUOTED: if token.type != TokenType.QUOTED:
return False return False
string = (str(token)[1:-1] string = (
str(token)[1:-1]
.replace("\\n", "\n") .replace("\\n", "\n")
.replace("\\\"", "\"") .replace('\\"', '"')
.replace("\\\\", "\\") .replace("\\\\", "\\")
.replace("\\'", "\'")) .replace("\\'", "'")
)
ctx.set_group_val(self.key, string, token) ctx.set_group_val(self.key, string, token)
return True return True
@ -524,6 +554,7 @@ class UseLiteral(ParseNode):
"""ParseNode that doesn't match anything, but rather sets a static key=value """ParseNode that doesn't match anything, but rather sets a static key=value
pair on the containing group. Useful for, e.g., property and signal flags: pair on the containing group. Useful for, e.g., property and signal flags:
`Sequence(Keyword("swapped"), UseLiteral("swapped", True))`""" `Sequence(Keyword("swapped"), UseLiteral("swapped", True))`"""
def __init__(self, key, literal): def __init__(self, key, literal):
self.key = key self.key = key
self.literal = literal self.literal = literal
@ -536,6 +567,7 @@ class UseLiteral(ParseNode):
class Keyword(ParseNode): class Keyword(ParseNode):
"""Matches the given identifier and sets it as a named token, with the name """Matches the given identifier and sets it as a named token, with the name
being the identifier itself.""" being the identifier itself."""
def __init__(self, kw): def __init__(self, kw):
self.kw = kw self.kw = kw
self.set_token = True self.set_token = True
@ -565,6 +597,7 @@ class Infix(ParseNode):
def __lt__(self, other): def __lt__(self, other):
return self.binding_power < other.binding_power return self.binding_power < other.binding_power
def __eq__(self, other): def __eq__(self, other):
return self.binding_power == other.binding_power return self.binding_power == other.binding_power
@ -578,11 +611,14 @@ class Pratt(ParseNode):
@property @property
def children(self): def children(self):
return self._children return self._children
@children.setter @children.setter
def children(self, children): def children(self, children):
self._children = children self._children = children
self.prefixes = [child for child in children if isinstance(child, Prefix)] self.prefixes = [child for child in children if isinstance(child, Prefix)]
self.infixes = sorted([child for child in children if isinstance(child, Infix)], reverse=True) self.infixes = sorted(
[child for child in children if isinstance(child, Infix)], reverse=True
)
def _parse(self, ctx: ParseContext) -> bool: def _parse(self, ctx: ParseContext) -> bool:
for prefix in self.prefixes: for prefix in self.prefixes:

View file

@ -73,7 +73,9 @@ class Token:
else: else:
return float(string.replace("_", "")) return float(string.replace("_", ""))
except: except:
raise CompileError(f"{str(self)} is not a valid number literal", self.start, self.end) raise CompileError(
f"{str(self)} is not a valid number literal", self.start, self.end
)
def _tokenize(ui_ml: str): def _tokenize(ui_ml: str):
@ -90,7 +92,9 @@ def _tokenize(ui_ml: str):
break break
if not matched: if not matched:
raise CompileError("Could not determine what kind of syntax is meant here", i, i) raise CompileError(
"Could not determine what kind of syntax is meant here", i, i
)
yield Token(TokenType.EOF, i, i, ui_ml) yield Token(TokenType.EOF, i, i, ui_ml)

View file

@ -241,7 +241,9 @@ class Typelib:
return self._typelib_file[loc:end].decode("utf-8") return self._typelib_file[loc:end].decode("utf-8")
def _int(self, size, signed): def _int(self, size, signed):
return int.from_bytes(self._typelib_file[self._offset:self._offset + size], sys.byteorder) return int.from_bytes(
self._typelib_file[self._offset : self._offset + size], sys.byteorder
)
class TypelibHeader(Typelib): class TypelibHeader(Typelib):

View file

@ -21,15 +21,15 @@ import typing as T
class Colors: class Colors:
RED = '\033[91m' RED = "\033[91m"
GREEN = '\033[92m' GREEN = "\033[92m"
YELLOW = '\033[33m' YELLOW = "\033[33m"
FAINT = '\033[2m' FAINT = "\033[2m"
BOLD = '\033[1m' BOLD = "\033[1m"
BLUE = '\033[34m' BLUE = "\033[34m"
UNDERLINE = '\033[4m' UNDERLINE = "\033[4m"
NO_UNDERLINE = '\033[24m' NO_UNDERLINE = "\033[24m"
CLEAR = '\033[0m' CLEAR = "\033[0m"
def did_you_mean(word: str, options: T.List[str]) -> T.Optional[str]: def did_you_mean(word: str, options: T.List[str]) -> T.Optional[str]:
@ -56,7 +56,11 @@ def did_you_mean(word: str, options: T.List[str]) -> T.Optional[str]:
cost = 1 cost = 1
else: else:
cost = 2 cost = 2
distances[i][j] = min(distances[i-1][j] + 2, distances[i][j-1] + 2, distances[i-1][j-1] + cost) distances[i][j] = min(
distances[i - 1][j] + 2,
distances[i][j - 1] + 2,
distances[i - 1][j - 1] + cost,
)
return distances[m - 1][n - 1] return distances[m - 1][n - 1]
@ -75,10 +79,12 @@ def idx_to_pos(idx: int, text: str) -> T.Tuple[int, int]:
col_num = len(sp[-1]) col_num = len(sp[-1])
return (line_num - 1, col_num) return (line_num - 1, col_num)
def pos_to_idx(line: int, col: int, text: str) -> int: def pos_to_idx(line: int, col: int, text: str) -> int:
lines = text.splitlines(keepends=True) lines = text.splitlines(keepends=True)
return sum([len(line) for line in lines[:line]]) + col return sum([len(line) for line in lines[:line]]) + col
def idxs_to_range(start: int, end: int, text: str): def idxs_to_range(start: int, end: int, text: str):
start_l, start_c = idx_to_pos(start, text) start_l, start_c = idx_to_pos(start, text)
end_l, end_c = idx_to_pos(end, text) end_l, end_c = idx_to_pos(end, text)

View file

@ -25,11 +25,24 @@ from xml import sax
# To speed up parsing, we ignore all tags except these # To speed up parsing, we ignore all tags except these
PARSE_GIR = set([ PARSE_GIR = set(
"repository", "namespace", "class", "interface", "property", "glib:signal", [
"include", "implements", "type", "parameter", "parameters", "enumeration", "repository",
"member", "bitfield", "namespace",
]) "class",
"interface",
"property",
"glib:signal",
"include",
"implements",
"type",
"parameter",
"parameters",
"enumeration",
"member",
"bitfield",
]
)
class Element: class Element:
@ -41,14 +54,10 @@ class Element:
@cached_property @cached_property
def cdata(self): def cdata(self):
return ''.join(self.cdata_chunks) return "".join(self.cdata_chunks)
def get_elements(self, name) -> T.List["Element"]: def get_elements(self, name) -> T.List["Element"]:
return [ return [child for child in self.children if child.tag == name]
child
for child in self.children
if child.tag == name
]
def __getitem__(self, key): def __getitem__(self, key):
return self.attrs.get(key) return self.attrs.get(key)

View file

@ -7,10 +7,16 @@ sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
from blueprintcompiler import tokenizer, parser, decompiler, gir from blueprintcompiler import tokenizer, parser, decompiler, gir
from blueprintcompiler.completions import complete from blueprintcompiler.completions import complete
from blueprintcompiler.errors import PrintableError, MultipleErrors, CompileError, CompilerBugError from blueprintcompiler.errors import (
PrintableError,
MultipleErrors,
CompileError,
CompilerBugError,
)
from blueprintcompiler.tokenizer import Token, TokenType, tokenize from blueprintcompiler.tokenizer import Token, TokenType, tokenize
from blueprintcompiler import utils from blueprintcompiler import utils
@PythonFuzz @PythonFuzz
def fuzz(buf): def fuzz(buf):
try: try:
@ -29,6 +35,7 @@ def fuzz(buf):
except UnicodeDecodeError: except UnicodeDecodeError:
pass pass
if __name__ == "__main__": if __name__ == "__main__":
# Make sure Gtk 4.0 is accessible, otherwise every test will fail on that # Make sure Gtk 4.0 is accessible, otherwise every test will fail on that
# and nothing interesting will be tested # and nothing interesting will be tested

View file

@ -70,12 +70,15 @@ class TestSamples(unittest.TestCase):
e.pretty_print(name + ".blp", blueprint) e.pretty_print(name + ".blp", blueprint)
raise AssertionError() raise AssertionError()
def assert_sample_error(self, name): def assert_sample_error(self, name):
try: try:
with open((Path(__file__).parent / f"sample_errors/{name}.blp").resolve()) as f: with open(
(Path(__file__).parent / f"sample_errors/{name}.blp").resolve()
) as f:
blueprint = f.read() blueprint = f.read()
with open((Path(__file__).parent / f"sample_errors/{name}.err").resolve()) as f: with open(
(Path(__file__).parent / f"sample_errors/{name}.err").resolve()
) as f:
expected = f.read() expected = f.read()
tokens = tokenizer.tokenize(blueprint) tokens = tokenizer.tokenize(blueprint)
@ -91,6 +94,7 @@ class TestSamples(unittest.TestCase):
if len(warnings): if len(warnings):
raise MultipleErrors(warnings) raise MultipleErrors(warnings)
except PrintableError as e: except PrintableError as e:
def error_str(error): def error_str(error):
line, col = utils.idx_to_pos(error.start + 1, blueprint) line, col = utils.idx_to_pos(error.start + 1, blueprint)
len = error.end - error.start len = error.end - error.start
@ -110,7 +114,6 @@ class TestSamples(unittest.TestCase):
else: # pragma: no cover else: # pragma: no cover
raise AssertionError("Expected a compiler error, but none was emitted") raise AssertionError("Expected a compiler error, but none was emitted")
def assert_decompile(self, name): def assert_decompile(self, name):
try: try:
with open((Path(__file__).parent / f"samples/{name}.blp").resolve()) as f: with open((Path(__file__).parent / f"samples/{name}.blp").resolve()) as f:
@ -129,7 +132,6 @@ class TestSamples(unittest.TestCase):
e.pretty_print(name + ".blp", blueprint) e.pretty_print(name + ".blp", blueprint)
raise AssertionError() raise AssertionError()
def test_samples(self): def test_samples(self):
self.assert_sample("accessibility") self.assert_sample("accessibility")
self.assert_sample("action_widgets") self.assert_sample("action_widgets")
@ -161,7 +163,6 @@ class TestSamples(unittest.TestCase):
self.assert_sample("unchecked_class") self.assert_sample("unchecked_class")
self.assert_sample("using") self.assert_sample("using")
def test_sample_errors(self): def test_sample_errors(self):
self.assert_sample_error("a11y_in_non_widget") self.assert_sample_error("a11y_in_non_widget")
self.assert_sample_error("a11y_prop_dne") self.assert_sample_error("a11y_prop_dne")
@ -209,7 +210,6 @@ class TestSamples(unittest.TestCase):
self.assert_sample_error("using_invalid_namespace") self.assert_sample_error("using_invalid_namespace")
self.assert_sample_error("widgets_in_non_size_group") self.assert_sample_error("widgets_in_non_size_group")
def test_decompiler(self): def test_decompiler(self):
self.assert_decompile("accessibility_dec") self.assert_decompile("accessibility_dec")
self.assert_decompile("binding") self.assert_decompile("binding")

View file

@ -36,9 +36,10 @@ class TestTokenizer(unittest.TestCase):
e.pretty_print("<test input>", string) e.pretty_print("<test input>", string)
raise e raise e
def test_basic(self): def test_basic(self):
self.assert_tokenize("ident(){}; \n <<+>>*/=", [ self.assert_tokenize(
"ident(){}; \n <<+>>*/=",
[
(TokenType.IDENT, "ident"), (TokenType.IDENT, "ident"),
(TokenType.PUNCTUATION, "("), (TokenType.PUNCTUATION, "("),
(TokenType.PUNCTUATION, ")"), (TokenType.PUNCTUATION, ")"),
@ -53,26 +54,35 @@ class TestTokenizer(unittest.TestCase):
(TokenType.OP, "/"), (TokenType.OP, "/"),
(TokenType.OP, "="), (TokenType.OP, "="),
(TokenType.EOF, ""), (TokenType.EOF, ""),
]) ],
)
def test_quotes(self): def test_quotes(self):
self.assert_tokenize(r'"this is a \n string""this is \\another \"string\""', [ self.assert_tokenize(
r'"this is a \n string""this is \\another \"string\""',
[
(TokenType.QUOTED, r'"this is a \n string"'), (TokenType.QUOTED, r'"this is a \n string"'),
(TokenType.QUOTED, r'"this is \\another \"string\""'), (TokenType.QUOTED, r'"this is \\another \"string\""'),
(TokenType.EOF, ""), (TokenType.EOF, ""),
]) ],
)
def test_comments(self): def test_comments(self):
self.assert_tokenize('/* \n \\n COMMENT /* */', [ self.assert_tokenize(
(TokenType.COMMENT, '/* \n \\n COMMENT /* */'), "/* \n \\n COMMENT /* */",
[
(TokenType.COMMENT, "/* \n \\n COMMENT /* */"),
(TokenType.EOF, ""), (TokenType.EOF, ""),
]) ],
self.assert_tokenize('line // comment\nline', [ )
(TokenType.IDENT, 'line'), self.assert_tokenize(
(TokenType.WHITESPACE, ' '), "line // comment\nline",
(TokenType.COMMENT, '// comment'), [
(TokenType.WHITESPACE, '\n'), (TokenType.IDENT, "line"),
(TokenType.IDENT, 'line'), (TokenType.WHITESPACE, " "),
(TokenType.COMMENT, "// comment"),
(TokenType.WHITESPACE, "\n"),
(TokenType.IDENT, "line"),
(TokenType.EOF, ""), (TokenType.EOF, ""),
]) ],
)