Rename to blueprint-compiler

This isn't an official GTK project so better to avoid using "GTK" in the
name.
This commit is contained in:
James Westman 2021-12-01 15:35:58 -06:00
parent be3c0de670
commit 544d152fb6
No known key found for this signature in database
GPG key ID: CE2DBA0ADB654EA6
37 changed files with 33 additions and 33 deletions

View file

498
blueprintcompiler/ast.py Normal file
View file

@ -0,0 +1,498 @@
# ast.py
#
# Copyright 2021 James Westman <james@jwestman.net>
#
# This file is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This file is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# SPDX-License-Identifier: LGPL-3.0-or-later
import typing as T
from .ast_utils import *
from .errors import assert_true, AlreadyCaughtError, CompileError, CompilerBugError, MultipleErrors
from . import gir
from .lsp_utils import Completion, CompletionItemKind, SemanticToken, SemanticTokenType
from .tokenizer import Token
from .utils import lazy_prop
from .xml_emitter import XmlEmitter
class UI(AstNode):
""" The AST node for the entire file """
@property
def gir(self):
gir_ctx = gir.GirContext()
self._gir_errors = []
try:
gir_ctx.add_namespace(self.children[GtkDirective][0].gir_namespace)
except CompileError as e:
e.start = self.children[GtkDirective][0].group.start
e.end = self.children[GtkDirective][0].group.end
self._gir_errors.append(e)
for i in self.children[Import]:
try:
if i.gir_namespace is not None:
gir_ctx.add_namespace(i.gir_namespace)
except CompileError as e:
e.start = i.group.tokens["namespace"].start
e.end = i.group.tokens["version"].end
self._gir_errors.append(e)
return gir_ctx
@lazy_prop
def objects_by_id(self):
return { obj.tokens["id"]: obj for obj in self.iterate_children_recursive() if obj.tokens["id"] is not None }
@validate()
def gir_errors(self):
# make sure gir is loaded
self.gir
if len(self._gir_errors):
raise MultipleErrors(self._gir_errors)
@validate()
def at_most_one_template(self):
if len(self.children[Template]) > 1:
for template in self.children[Template][1:]:
raise CompileError(
f"Only one template may be defined per file, but this file contains {len(self.children[Template])}",
template.group.tokens["name"].start, template.group.tokens["name"].end,
)
@validate()
def unique_ids(self):
passed = {}
for obj in self.iterate_children_recursive():
if obj.tokens["id"] is None:
continue
if obj.tokens["id"] in passed:
token = obj.group.tokens["id"]
raise CompileError(f"Duplicate object ID '{obj.tokens['id']}'", token.start, token.end)
passed[obj.tokens["id"]] = obj
def emit_xml(self, xml: XmlEmitter):
xml.start_tag("interface")
for x in self.children:
x.emit_xml(xml)
xml.end_tag()
class GtkDirective(AstNode):
@validate("version")
def gtk_version(self):
if self.tokens["version"] not in ["4.0"]:
err = CompileError("Only GTK 4 is supported")
if self.tokens["version"].startswith("4"):
err.hint("Expected the GIR version, not an exact version number. Use `using Gtk 4.0;`.")
else:
err.hint("Expected `using Gtk 4.0;`")
raise err
@property
def gir_namespace(self):
return gir.get_namespace("Gtk", self.tokens["version"])
def emit_xml(self, xml: XmlEmitter):
xml.put_self_closing("requires", lib="gtk", version=self.tokens["version"])
class Import(AstNode):
@validate("namespace", "version")
def namespace_exists(self):
gir.get_namespace(self.tokens["namespace"], self.tokens["version"])
@property
def gir_namespace(self):
try:
return gir.get_namespace(self.tokens["namespace"], self.tokens["version"])
except CompileError:
return None
def emit_xml(self, xml):
pass
class Object(AstNode):
@validate("namespace")
def gir_ns_exists(self):
if not self.tokens["ignore_gir"]:
self.root.gir.validate_ns(self.tokens["namespace"])
@validate("class_name")
def gir_class_exists(self):
if not self.tokens["ignore_gir"] and self.gir_ns is not None:
self.root.gir.validate_class(self.tokens["class_name"], self.tokens["namespace"])
@property
def gir_ns(self):
if not self.tokens["ignore_gir"]:
return self.root.gir.namespaces.get(self.tokens["namespace"] or "Gtk")
@property
def gir_class(self):
if not self.tokens["ignore_gir"]:
return self.root.gir.get_class(self.tokens["class_name"], self.tokens["namespace"])
@docs("namespace")
def namespace_docs(self):
if ns := self.root.gir.namespaces.get(self.tokens["namespace"]):
return ns.doc
@docs("class_name")
def class_docs(self):
if self.gir_class:
return self.gir_class.doc
def emit_xml(self, xml: XmlEmitter):
xml.start_tag("object", **{
"class": self.gir_class.glib_type_name if self.gir_class else self.tokens["class_name"],
"id": self.tokens["id"],
})
for child in self.children:
child.emit_xml(xml)
xml.end_tag()
class Template(Object):
def emit_xml(self, xml: XmlEmitter):
xml.start_tag("template", **{
"class": self.tokens["name"],
"parent": self.gir_class.glib_type_name if self.gir_class else self.tokens["class_name"],
})
for child in self.children:
child.emit_xml(xml)
xml.end_tag()
class Child(AstNode):
def emit_xml(self, xml: XmlEmitter):
xml.start_tag("child", type=self.tokens["child_type"])
for child in self.children:
child.emit_xml(xml)
xml.end_tag()
class ObjectContent(AstNode):
@property
def gir_class(self):
return self.parent.gir_class
# @validate()
# def only_one_style_class(self):
# if len(self.children[Style]) > 1:
# raise CompileError(
# f"Only one style directive allowed per object, but this object contains {len(self.children[Style])}",
# start=self.children[Style][1].group.start,
# )
def emit_xml(self, xml: XmlEmitter):
for x in self.children:
x.emit_xml(xml)
class Property(AstNode):
@property
def gir_class(self):
return self.parent.parent.gir_class
@property
def gir_property(self):
if self.gir_class is not None:
return self.gir_class.properties.get(self.tokens["name"])
@property
def value_type(self):
if self.gir_property is not None:
return self.gir_property.type
@validate("name")
def property_exists(self):
if self.gir_class is None:
# Objects that we have no gir data on should not be validated
# This happens for classes defined by the app itself
return
if isinstance(self.parent.parent, Template):
# If the property is part of a template, it might be defined by
# the application and thus not in gir
return
if self.gir_property is None:
raise CompileError(
f"Class {self.gir_class.full_name} does not contain a property called {self.tokens['name']}",
did_you_mean=(self.tokens["name"], self.gir_class.properties.keys())
)
@validate()
def obj_property_type(self):
if len(self.children[Object]) == 0:
return
object = self.children[Object][0]
type = self.value_type
if object and type and object.gir_class and not object.gir_class.assignable_to(type):
raise CompileError(
f"Cannot assign {object.gir_class.full_name} to {type.full_name}"
)
@docs("name")
def property_docs(self):
if self.gir_property is not None:
return self.gir_property.doc
def emit_xml(self, xml: XmlEmitter):
values = self.children[Value]
value = values[0] if len(values) == 1 else None
bind_flags = []
if self.tokens["sync_create"]:
bind_flags.append("sync-create")
if self.tokens["after"]:
bind_flags.append("after")
bind_flags_str = "|".join(bind_flags) or None
props = {
"name": self.tokens["name"],
"bind-source": self.tokens["bind_source"],
"bind-property": self.tokens["bind_property"],
"bind-flags": bind_flags_str,
}
if isinstance(value, TranslatedStringValue):
props = { **props, **value.attrs }
if len(self.children[Object]) == 1:
xml.start_tag("property", **props)
self.children[Object][0].emit_xml(xml)
xml.end_tag()
elif value is None:
xml.put_self_closing("property", **props)
else:
xml.start_tag("property", **props)
value.emit_xml(xml)
xml.end_tag()
class Signal(AstNode):
@property
def gir_signal(self):
if self.gir_class is not None:
return self.gir_class.signals.get(self.tokens["name"])
@property
def gir_class(self):
return self.parent.parent.gir_class
@validate("name")
def signal_exists(self):
if self.gir_class is None:
# Objects that we have no gir data on should not be validated
# This happens for classes defined by the app itself
return
if isinstance(self.parent.parent, Template):
# If the signal is part of a template, it might be defined by
# the application and thus not in gir
return
if self.gir_signal is None:
raise CompileError(
f"Class {self.gir_class.full_name} does not contain a signal called {self.tokens['name']}",
did_you_mean=(self.tokens["name"], self.gir_class.signals.keys())
)
@docs("name")
def signal_docs(self):
if self.gir_signal is not None:
return self.gir_signal.doc
def emit_xml(self, xml: XmlEmitter):
name = self.tokens["name"]
if self.tokens["detail_name"]:
name += "::" + self.tokens["detail_name"]
xml.put_self_closing("signal", name=name, handler=self.tokens["handler"], swapped="true" if self.tokens["swapped"] else None)
class Value(ast.AstNode):
pass
class TranslatedStringValue(Value):
@property
def attrs(self):
attrs = { "translatable": "true" }
if "context" in self.tokens:
attrs["context"] = self.tokens["context"]
return attrs
def emit_xml(self, xml: XmlEmitter):
xml.put_text(self.tokens["value"])
class LiteralValue(Value):
def emit_xml(self, xml: XmlEmitter):
xml.put_text(self.tokens["value"])
@validate()
def validate_for_type(self):
type = self.parent.value_type
if isinstance(type, gir.IntType):
try:
int(self.tokens["value"])
except:
raise CompileError(f"Cannot convert {self.group.tokens['value']} to integer")
elif isinstance(type, gir.UIntType):
try:
int(self.tokens["value"])
if int(self.tokens["value"]) < 0:
raise Exception()
except:
raise CompileError(f"Cannot convert {self.group.tokens['value']} to unsigned integer")
elif isinstance(type, gir.FloatType):
try:
float(self.tokens["value"])
except:
raise CompileError(f"Cannot convert {self.group.tokens['value']} to float")
elif isinstance(type, gir.StringType):
pass
elif isinstance(type, gir.Class) or isinstance(type, gir.Interface):
parseable_types = [
"Gdk.Paintable",
"Gdk.Texture",
"Gdk.Pixbuf",
"GLib.File",
"Gtk.ShortcutTrigger",
"Gtk.ShortcutAction",
]
if type.full_name not in parseable_types:
raise CompileError(f"Cannot convert {self.group.tokens['value']} to {type.full_name}")
elif type is not None:
raise CompileError(f"Cannot convert {self.group.tokens['value']} to {type.full_name}")
class Flag(AstNode):
pass
class FlagsValue(Value):
def emit_xml(self, xml: XmlEmitter):
xml.put_text("|".join([flag.tokens["value"] for flag in self.children[Flag]]))
class IdentValue(Value):
def emit_xml(self, xml: XmlEmitter):
if isinstance(self.parent.value_type, gir.Enumeration):
xml.put_text(self.parent.value_type.members[self.tokens["value"]].nick)
else:
xml.put_text(self.tokens["value"])
@validate()
def validate_for_type(self):
type = self.parent.value_type
if isinstance(type, gir.Enumeration):
if self.tokens["value"] not in type.members:
raise CompileError(
f"{self.tokens['value']} is not a member of {type.full_name}",
did_you_mean=(self.tokens['value'], type.members.keys()),
)
elif isinstance(type, gir.BoolType):
if self.tokens["value"] not in ["true", "false"]:
raise CompileError(
f"Expected 'true' or 'false' for boolean value",
did_you_mean=(self.tokens['value'], ["true", "false"]),
)
elif type is not None:
object = self.root.objects_by_id.get(self.tokens["value"])
if object is None:
raise CompileError(
f"Could not find object with ID {self.tokens['value']}",
did_you_mean=(self.tokens['value'], self.root.objects_by_id.keys()),
)
elif object.gir_class and not object.gir_class.assignable_to(type):
raise CompileError(
f"Cannot assign {object.gir_class.full_name} to {type.full_name}"
)
@docs()
def docs(self):
type = self.parent.value_type
if isinstance(type, gir.Enumeration):
if member := type.members.get(self.tokens["value"]):
return member.doc
else:
return type.doc
elif isinstance(type, gir.GirNode):
return type.doc
def get_semantic_tokens(self) -> T.Iterator[SemanticToken]:
if isinstance(self.parent.value_type, gir.Enumeration):
token = self.group.tokens["value"]
yield SemanticToken(token.start, token.end, SemanticTokenType.EnumMember)
class BaseAttribute(AstNode):
""" A helper class for attribute syntax of the form `name: literal_value;`"""
tag_name: str = ""
attr_name: str = "name"
def emit_xml(self, xml: XmlEmitter):
value = self.children[Value][0]
attrs = { self.attr_name: self.tokens["name"] }
if isinstance(value, TranslatedStringValue):
attrs = { **attrs, **value.attrs }
xml.start_tag(self.tag_name, **attrs)
value.emit_xml(xml)
xml.end_tag()
class BaseTypedAttribute(BaseAttribute):
""" A BaseAttribute whose parent has a value_type property that can assist
in validation. """

View file

@ -0,0 +1,202 @@
# ast_utils.py
#
# Copyright 2021 James Westman <james@jwestman.net>
#
# This file is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This file is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# SPDX-License-Identifier: LGPL-3.0-or-later
import typing as T
from collections import ChainMap, defaultdict
from . import ast
from .errors import *
from .lsp_utils import SemanticToken
from .utils import lazy_prop
from .xml_emitter import XmlEmitter
class Children:
""" Allows accessing children by type using array syntax. """
def __init__(self, children):
self._children = children
def __iter__(self):
return iter(self._children)
def __getitem__(self, key):
return [child for child in self._children if isinstance(child, key)]
class AstNode:
""" Base class for nodes in the abstract syntax tree. """
completers: T.List = []
def __init__(self, group, children, tokens, incomplete=False):
self.group = group
self.children = Children(children)
self.tokens = ChainMap(tokens, defaultdict(lambda: None))
self.incomplete = incomplete
self.parent = None
for child in self.children:
child.parent = self
def __init_subclass__(cls):
cls.completers = []
cls.validators = [getattr(cls, f) for f in dir(cls) if hasattr(getattr(cls, f), "_validator")]
@property
def root(self):
if self.parent is None:
return self
else:
return self.parent.root
def parent_by_type(self, type):
if self.parent is None:
return None
elif isinstance(self.parent, type):
return self.parent
else:
return self.parent.parent_by_type(type)
def validate_parent_type(self, ns: str, name: str, err_msg: str):
parent = self.root.gir.get_type(name, ns)
container_type = self.parent_by_type(ast.Object).gir_class
if container_type and not container_type.assignable_to(parent):
raise CompileError(f"{container_type.full_name} is not a {parent.full_name}, so it doesn't have {err_msg}")
@lazy_prop
def errors(self):
return list(self._get_errors())
def _get_errors(self):
for validator in self.validators:
try:
validator(self)
except AlreadyCaughtError:
pass
except CompileError as e:
yield e
for child in self.children:
yield from child._get_errors()
def _attrs_by_type(self, attr_type):
for name in dir(type(self)):
item = getattr(type(self), name)
if isinstance(item, attr_type):
yield name, item
def generate(self) -> str:
""" Generates an XML string from the node. """
xml = XmlEmitter()
self.emit_xml(xml)
return xml.result
def emit_xml(self, xml: XmlEmitter):
""" Emits the XML representation of this AST node to the XmlEmitter. """
raise NotImplementedError()
def get_docs(self, idx: int) -> T.Optional[str]:
for name, attr in self._attrs_by_type(Docs):
if attr.token_name:
token = self.group.tokens.get(attr.token_name)
if token and token.start <= idx < token.end:
return getattr(self, name)
else:
return getattr(self, name)
for child in self.children:
if child.group.start <= idx < child.group.end:
docs = child.get_docs(idx)
if docs is not None:
return docs
return None
def get_semantic_tokens(self) -> T.Iterator[SemanticToken]:
for child in self.children:
yield from child.get_semantic_tokens()
def iterate_children_recursive(self) -> T.Iterator["AstNode"]:
yield self
for child in self.children:
yield from child.iterate_children_recursive()
def validate(token_name=None, end_token_name=None, skip_incomplete=False):
""" Decorator for functions that validate an AST node. Exceptions raised
during validation are marked with range information from the tokens. Also
creates a cached property out of the function. """
def decorator(func):
def inner(self):
if skip_incomplete and self.incomplete:
return
try:
func(self)
except CompileError as e:
# If the node is only partially complete, then an error must
# have already been reported at the parsing stage
if self.incomplete:
return
# This mess of code sets the error's start and end positions
# from the tokens passed to the decorator, if they have not
# already been set
if e.start is None:
if token := self.group.tokens.get(token_name):
e.start = token.start
else:
e.start = self.group.start
if e.end is None:
if token := self.group.tokens.get(end_token_name):
e.end = token.end
elif token := self.group.tokens.get(token_name):
e.end = token.end
else:
e.end = self.group.end
# Re-raise the exception
raise e
inner._validator = True
return inner
return decorator
class Docs:
def __init__(self, func, token_name=None):
self.func = func
self.token_name = token_name
def __get__(self, instance, owner):
if instance is None:
return self
return self.func(instance)
def docs(*args, **kwargs):
""" Decorator for functions that return documentation for tokens. """
def decorator(func):
return Docs(func, *args, **kwargs)
return decorator

View file

@ -0,0 +1,144 @@
# completions.py
#
# Copyright 2021 James Westman <james@jwestman.net>
#
# This file is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This file is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# SPDX-License-Identifier: LGPL-3.0-or-later
import typing as T
from . import ast
from . import gir
from .completions_utils import *
from .lsp_utils import Completion, CompletionItemKind
from .parser import SKIP_TOKENS
from .tokenizer import TokenType, Token
Pattern = T.List[T.Tuple[TokenType, T.Optional[str]]]
def _complete(ast_node: ast.AstNode, tokens: T.List[Token], idx: int, token_idx: int) -> T.Iterator[Completion]:
for child in ast_node.children:
if child.group.start <= idx and (idx < child.group.end or (idx == child.group.end and child.incomplete)):
yield from _complete(child, tokens, idx, token_idx)
return
prev_tokens: T.List[Token] = []
# collect the 5 previous non-skipped tokens
while len(prev_tokens) < 5 and token_idx >= 0:
token = tokens[token_idx]
if token.type not in SKIP_TOKENS:
prev_tokens.insert(0, token)
token_idx -= 1
for completer in ast_node.completers:
yield from completer(prev_tokens, ast_node)
def complete(ast_node: ast.AstNode, tokens: T.List[Token], idx: int) -> T.Iterator[Completion]:
token_idx = 0
# find the current token
for i, token in enumerate(tokens):
if token.start < idx <= token.end:
token_idx = i
# if the current token is an identifier or whitespace, move to the token before it
while tokens[token_idx].type in [TokenType.IDENT, TokenType.WHITESPACE]:
idx = tokens[token_idx].start
token_idx -= 1
yield from _complete(ast_node, tokens, idx, token_idx)
@completer([ast.GtkDirective])
def using_gtk(ast_node, match_variables):
yield Completion("using Gtk 4.0;", CompletionItemKind.Keyword)
@completer(
applies_in=[ast.UI, ast.ObjectContent, ast.Template],
matches=new_statement_patterns
)
def namespace(ast_node, match_variables):
yield Completion("Gtk", CompletionItemKind.Module, text="Gtk.")
for ns in ast_node.root.children[ast.Import]:
if ns.gir_namespace is not None:
yield Completion(ns.gir_namespace.name, CompletionItemKind.Module, text=ns.gir_namespace.name + ".")
@completer(
applies_in=[ast.UI, ast.ObjectContent, ast.Template],
matches=[
[(TokenType.IDENT, None), (TokenType.OP, "."), (TokenType.IDENT, None)],
[(TokenType.IDENT, None), (TokenType.OP, ".")],
]
)
def object_completer(ast_node, match_variables):
ns = ast_node.root.gir.namespaces.get(match_variables[0])
if ns is not None:
for c in ns.classes.values():
yield Completion(c.name, CompletionItemKind.Class, docs=c.doc)
@completer(
applies_in=[ast.ObjectContent],
matches=new_statement_patterns,
)
def property_completer(ast_node, match_variables):
if ast_node.gir_class:
for prop in ast_node.gir_class.properties:
yield Completion(prop, CompletionItemKind.Property, snippet=f"{prop}: $0;")
@completer(
applies_in=[ast.Property, ast.BaseTypedAttribute],
matches=[
[(TokenType.IDENT, None), (TokenType.OP, ":")]
],
)
def prop_value_completer(ast_node, match_variables):
if isinstance(ast_node.value_type, gir.Enumeration):
for name, member in ast_node.value_type.members.items():
yield Completion(name, CompletionItemKind.EnumMember, docs=member.doc)
elif isinstance(ast_node.value_type, gir.BoolType):
yield Completion("true", CompletionItemKind.Constant)
yield Completion("false", CompletionItemKind.Constant)
@completer(
applies_in=[ast.ObjectContent],
matches=new_statement_patterns,
)
def signal_completer(ast_node, match_variables):
if ast_node.gir_class:
for signal in ast_node.gir_class.signals:
if not isinstance(ast_node.parent, ast.Object):
name = "on"
else:
name = "on_" + (ast_node.parent.tokens["id"] or ast_node.parent.tokens["class_name"].lower())
yield Completion(signal, CompletionItemKind.Property, snippet=f"{signal} => ${{1:{name}_{signal.replace('-', '_')}}}()$0;")
@completer(
applies_in=[ast.UI],
matches=new_statement_patterns
)
def template_completer(ast_node, match_variables):
yield Completion(
"template", CompletionItemKind.Snippet,
snippet="template ${1:ClassName} : ${2:ParentClass} {\n $0\n}"
)

View file

@ -0,0 +1,80 @@
# completions_utils.py
#
# Copyright 2021 James Westman <james@jwestman.net>
#
# This file is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This file is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# SPDX-License-Identifier: LGPL-3.0-or-later
import typing as T
from . import ast
from .tokenizer import Token, TokenType
from .lsp_utils import Completion
new_statement_patterns = [
[(TokenType.OPEN_BLOCK, None)],
[(TokenType.CLOSE_BLOCK, None)],
[(TokenType.STMT_END, None)],
]
def applies_to(*ast_types):
""" Decorator describing which AST nodes the completer should apply in. """
def decorator(func):
for c in ast_types:
c.completers.append(func)
return func
return decorator
def completer(applies_in: T.List, matches: T.List=[], applies_in_subclass=None):
def decorator(func):
def inner(prev_tokens: T.List[Token], ast_node):
# For completers that apply in ObjectContent nodes, we can further
# check that the object is the right class
if applies_in_subclass is not None:
type = ast_node.root.gir.get_type(applies_in_subclass[1], applies_in_subclass[0])
if ast_node.gir_class and not ast_node.gir_class.assignable_to(type):
return
any_match = len(matches) == 0
match_variables: T.List[str] = []
for pattern in matches:
match_variables = []
if len(pattern) <= len(prev_tokens):
for i in range(0, len(pattern)):
type, value = pattern[i]
token = prev_tokens[i - len(pattern)]
if token.type != type or (value is not None and str(token) != value):
break
if value is None:
match_variables.append(str(token))
else:
any_match = True
break
if not any_match:
return
yield from func(ast_node, match_variables)
for c in applies_in:
c.completers.append(inner)
return inner
return decorator

143
blueprintcompiler/errors.py Normal file
View file

@ -0,0 +1,143 @@
# errors.py
#
# Copyright 2021 James Westman <james@jwestman.net>
#
# This file is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This file is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# SPDX-License-Identifier: LGPL-3.0-or-later
from dataclasses import dataclass
import typing as T
import sys, traceback
from . import utils
class _colors:
RED = '\033[91m'
YELLOW = '\033[33m'
FAINT = '\033[2m'
BOLD = '\033[1m'
BLUE = '\033[34m'
UNDERLINE = '\033[4m'
CLEAR = '\033[0m'
class PrintableError(Exception):
""" Parent class for errors that can be pretty-printed for the user, e.g.
compilation warnings and errors. """
def pretty_print(self, filename, code):
raise NotImplementedError()
class CompileError(PrintableError):
""" A PrintableError with a start/end position and optional hints """
category = "error"
def __init__(self, message, start=None, end=None, did_you_mean=None, hints=None, actions=None):
super().__init__(message)
self.message = message
self.start = start
self.end = end
self.hints = hints or []
self.actions = actions or []
if did_you_mean is not None:
self._did_you_mean(*did_you_mean)
def hint(self, hint: str):
self.hints.append(hint)
return self
def _did_you_mean(self, word: str, options: T.List[str]):
if word.replace("_", "-") in options:
self.hint(f"use '-', not '_': `{word.replace('_', '-')}`")
return
recommend = utils.did_you_mean(word, options)
if recommend is not None:
if word.casefold() == recommend.casefold():
self.hint(f"Did you mean `{recommend}` (note the capitalization)?")
else:
self.hint(f"Did you mean `{recommend}`?")
self.actions.append(CodeAction(f"Change to `{recommend}`", recommend))
else:
self.hint("Did you check your spelling?")
self.hint("Are your dependencies up to date?")
def pretty_print(self, filename, code):
line_num, col_num = utils.idx_to_pos(self.start + 1, code)
line = code.splitlines(True)[line_num]
# Display 1-based line numbers
line_num += 1
print(f"""{_colors.RED}{_colors.BOLD}{self.category}: {self.message}{_colors.CLEAR}
at {filename} line {line_num} column {col_num}:
{_colors.FAINT}{line_num :>4} |{_colors.CLEAR}{line.rstrip()}\n {_colors.FAINT}|{" "*(col_num-1)}^{_colors.CLEAR}""")
for hint in self.hints:
print(f"{_colors.FAINT}hint: {hint}{_colors.CLEAR}")
print()
@dataclass
class CodeAction:
title: str
replace_with: str
class AlreadyCaughtError(Exception):
""" Emitted when a validation has already failed and its error message
should not be repeated. """
class MultipleErrors(PrintableError):
""" If multiple errors occur during compilation, they can be collected into
a list and re-thrown using the MultipleErrors exception. It will
pretty-print all of the errors and a count of how many errors there are. """
def __init__(self, errors: T.List[CompileError]):
super().__init__()
self.errors = errors
def pretty_print(self, filename, code) -> None:
for error in self.errors:
error.pretty_print(filename, code)
if len(self.errors) != 1:
print(f"{len(self.errors)} errors")
class CompilerBugError(Exception):
""" Emitted on assertion errors """
def assert_true(truth: bool, message:str=None):
if not truth:
raise CompilerBugError(message)
def report_compile_error():
""" Report an error and ask people to report it. """
print(traceback.format_exc())
print(f"Arguments: {sys.argv}\n")
print(f"""{_colors.BOLD}{_colors.RED}***** COMPILER BUG *****
The blueprint-compiler program has crashed. Please report the above stacktrace,
along with the input file(s) if possible, on GitLab:
{_colors.BOLD}{_colors.BLUE}{_colors.UNDERLINE}https://gitlab.gnome.org/jwestman/blueprint-compiler/-/issues/new?issue
{_colors.CLEAR}""")

View file

@ -0,0 +1,18 @@
""" Contains all the syntax beyond basic objects, properties, signal, and
templates. """
from .gtk_a11y import a11y
from .gtk_combo_box_text import items
from .gtk_file_filter import mime_types, patterns, suffixes
from .gtk_layout import layout
from .gtk_menu import menu
from .gtk_size_group import widgets
from .gtk_string_list import strings
from .gtk_styles import styles
OBJECT_HOOKS = [menu]
OBJECT_CONTENT_HOOKS = [
a11y, styles, layout, mime_types, patterns, suffixes, widgets, items,
strings,
]

View file

@ -0,0 +1,190 @@
# gtk_a11y.py
#
# Copyright 2021 James Westman <james@jwestman.net>
#
# This file is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This file is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# SPDX-License-Identifier: LGPL-3.0-or-later
from ..ast import BaseTypedAttribute, Value
from ..ast_utils import AstNode, validate, docs
from ..completions_utils import *
from ..gir import StringType, BoolType, IntType, FloatType, GirType
from ..lsp_utils import Completion, CompletionItemKind
from ..parse_tree import *
from ..parser_utils import *
from ..xml_emitter import XmlEmitter
def _get_property_types(gir):
# from <https://docs.gtk.org/gtk4/enum.AccessibleProperty.html>
return {
"autocomplete": gir.get_type("AccessibleAutocomplete", "Gtk"),
"description": StringType(),
"has_popup": BoolType(),
"key_shortcuts": StringType(),
"label": StringType(),
"level": IntType(),
"modal": BoolType(),
"multi_line": BoolType(),
"multi_selectable": BoolType(),
"orientation": gir.get_type("Orientation", "Gtk"),
"placeholder": StringType(),
"read_only": BoolType(),
"required": BoolType(),
"role_description": StringType(),
"sort": gir.get_type("AccessibleSort", "Gtk"),
"value_max": FloatType(),
"value_min": FloatType(),
"value_now": FloatType(),
"value_text": StringType(),
}
def _get_relation_types(gir):
# from <https://docs.gtk.org/gtk4/enum.AccessibleRelation.html>
widget = gir.get_type("Widget", "Gtk")
return {
"active_descendant": widget,
"col_count": IntType(),
"col_index": IntType(),
"col_index_text": StringType(),
"col_span": IntType(),
"controls": widget,
"described_by": widget,
"details": widget,
"error_message": widget,
"flow_to": widget,
"labelled_by": widget,
"owns": widget,
"pos_in_set": IntType(),
"row_count": IntType(),
"row_index": IntType(),
"row_index_text": StringType(),
"row_span": IntType(),
"set_size": IntType(),
}
def _get_state_types(gir):
# from <https://docs.gtk.org/gtk4/enum.AccessibleState.html>
return {
"busy": BoolType(),
"checked": gir.get_type("AccessibleTristate", "Gtk"),
"disabled": BoolType(),
"expanded": BoolType(),
"hidden": BoolType(),
"invalid": gir.get_type("AccessibleInvalidState", "Gtk"),
"pressed": gir.get_type("AccessibleTristate", "Gtk"),
"selected": BoolType(),
}
def _get_types(gir):
return {
**_get_property_types(gir),
**_get_relation_types(gir),
**_get_state_types(gir),
}
def _get_docs(gir, name):
return (
gir.get_type("AccessibleProperty", "Gtk").members.get(name)
or gir.get_type("AccessibleRelation", "Gtk").members.get(name)
or gir.get_type("AccessibleState", "Gtk").members.get(name)
).doc
class A11y(AstNode):
@validate("accessibility")
def container_is_widget(self):
self.validate_parent_type("Gtk", "Widget", "accessibility properties")
def emit_xml(self, xml: XmlEmitter):
xml.start_tag("accessibility")
for child in self.children:
child.emit_xml(xml)
xml.end_tag()
class A11yProperty(BaseTypedAttribute):
@property
def tag_name(self):
name = self.tokens["name"]
gir = self.root.gir
if name in _get_property_types(gir):
return "property"
elif name in _get_relation_types(gir):
return "relation"
elif name in _get_state_types(gir):
return "state"
else:
raise CompilerBugError()
@property
def value_type(self) -> GirType:
return _get_types(self.root.gir).get(self.tokens["name"])
@validate("name")
def is_valid_property(self):
types = _get_types(self.root.gir)
if self.tokens["name"] not in types:
raise CompileError(
f"'{self.tokens['name']}' is not an accessibility property, relation, or state",
did_you_mean=(self.tokens["name"], types.keys()),
)
@docs("name")
def prop_docs(self):
if self.tokens["name"] in _get_types(self.root.gir):
return _get_docs(self.root.gir, self.tokens["name"])
a11y_prop = Group(
A11yProperty,
Statement(
UseIdent("name"),
Op(":"),
value.expected("a value"),
)
)
a11y = Group(
A11y,
Sequence(
Keyword("accessibility", True),
OpenBlock(),
Until(a11y_prop, CloseBlock()),
)
)
@completer(
applies_in=[ast.ObjectContent],
matches=new_statement_patterns,
)
def a11y_completer(ast_node, match_variables):
yield Completion(
"accessibility", CompletionItemKind.Snippet,
snippet="accessibility {\n $0\n}"
)
@completer(
applies_in=[A11y],
matches=new_statement_patterns,
)
def a11y_name_completer(ast_node, match_variables):
for name, type in _get_types(ast_node.root.gir).items():
yield Completion(name, CompletionItemKind.Property, docs=_get_docs(ast_node.root.gir, type))

View file

@ -0,0 +1,89 @@
# gtk_combo_box_text.py
#
# Copyright 2021 James Westman <james@jwestman.net>
#
# This file is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This file is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# SPDX-License-Identifier: LGPL-3.0-or-later
from ..ast import BaseTypedAttribute
from ..ast_utils import AstNode, validate
from ..completions_utils import *
from ..gir import StringType
from ..lsp_utils import Completion, CompletionItemKind
from ..parse_tree import *
from ..parser_utils import *
from ..xml_emitter import XmlEmitter
class Items(AstNode):
@validate("items")
def container_is_combo_box_text(self):
self.validate_parent_type("Gtk", "ComboBoxText", "combo box items")
def emit_xml(self, xml: XmlEmitter):
xml.start_tag("items")
for child in self.children:
child.emit_xml(xml)
xml.end_tag()
class Item(BaseTypedAttribute):
tag_name = "item"
attr_name = "id"
@property
def value_type(self):
return StringType()
item = Group(
Item,
Sequence(
Optional(
Sequence(
UseIdent("name"),
Op(":"),
)
),
value,
)
)
items = Group(
Items,
Sequence(
Keyword("items", True),
OpenBracket(),
Delimited(
item,
Comma()
),
CloseBracket(),
)
)
@completer(
applies_in=[ast.ObjectContent],
applies_in_subclass=("Gtk", "ComboBoxText"),
matches=new_statement_patterns,
)
def items_completer(ast_node, match_variables):
yield Completion(
"items", CompletionItemKind.Snippet,
snippet="items [$0];"
)

View file

@ -0,0 +1,85 @@
# gtk_file_filter.py
#
# Copyright 2021 James Westman <james@jwestman.net>
#
# This file is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This file is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# SPDX-License-Identifier: LGPL-3.0-or-later
from .. import ast
from ..ast_utils import AstNode, validate
from ..completions_utils import *
from ..lsp_utils import Completion, CompletionItemKind
from ..parse_tree import *
from ..parser_utils import *
from ..xml_emitter import XmlEmitter
class Filters(AstNode):
@validate()
def container_is_file_filter(self):
self.validate_parent_type("Gtk", "FileFilter", "file filter properties")
def emit_xml(self, xml: XmlEmitter):
xml.start_tag(self.tokens["tag_name"])
for child in self.children:
child.emit_xml(xml)
xml.end_tag()
class FilterString(AstNode):
def emit_xml(self, xml):
xml.start_tag(self.tokens["tag_name"])
xml.put_text(self.tokens["name"])
xml.end_tag()
def create_node(tag_name: str, singular: str):
return Group(
Filters,
Sequence(
Keyword(tag_name, True),
UseLiteral("tag_name", tag_name),
OpenBracket(),
Delimited(
Group(
FilterString,
Sequence(
UseQuoted("name"),
UseLiteral("tag_name", singular),
)
),
Comma(),
),
CloseBracket(),
)
)
mime_types = create_node("mime-types", "mime-type")
patterns = create_node("patterns", "pattern")
suffixes = create_node("suffixes", "suffix")
@completer(
applies_in=[ast.ObjectContent],
applies_in_subclass=("Gtk", "FileFilter"),
matches=new_statement_patterns,
)
def file_filter_completer(ast_node, match_variables):
yield Completion("mime-types", CompletionItemKind.Snippet, snippet="mime-types [\"$0\"];")
yield Completion("patterns", CompletionItemKind.Snippet, snippet="patterns [\"$0\"];")
yield Completion("suffixes", CompletionItemKind.Snippet, snippet="suffixes [\"$0\"];")

View file

@ -0,0 +1,80 @@
# gtk_layout.py
#
# Copyright 2021 James Westman <james@jwestman.net>
#
# This file is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This file is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# SPDX-License-Identifier: LGPL-3.0-or-later
from ..ast import BaseAttribute
from ..ast_utils import AstNode, validate
from ..completions_utils import *
from ..lsp_utils import Completion, CompletionItemKind
from ..parse_tree import *
from ..parser_utils import *
from ..xml_emitter import XmlEmitter
class Layout(AstNode):
@validate("layout")
def container_is_widget(self):
self.validate_parent_type("Gtk", "Widget", "layout properties")
def emit_xml(self, xml: XmlEmitter):
xml.start_tag("layout")
for child in self.children:
child.emit_xml(xml)
xml.end_tag()
class LayoutProperty(BaseAttribute):
tag_name = "property"
@property
def value_type(self):
# there isn't really a way to validate these
return None
layout_prop = Group(
LayoutProperty,
Statement(
UseIdent("name"),
Op(":"),
value.expected("a value"),
)
)
layout = Group(
Layout,
Sequence(
Keyword("layout", True),
OpenBlock(),
Until(layout_prop, CloseBlock()),
)
)
@completer(
applies_in=[ast.ObjectContent],
applies_in_subclass=("Gtk", "Widget"),
matches=new_statement_patterns,
)
def layout_completer(ast_node, match_variables):
yield Completion(
"layout", CompletionItemKind.Snippet,
snippet="layout {\n $0\n}"
)

View file

@ -0,0 +1,190 @@
# gtk_menus.py
#
# Copyright 2021 James Westman <james@jwestman.net>
#
# This file is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This file is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# SPDX-License-Identifier: LGPL-3.0-or-later
from ..ast import BaseAttribute
from ..ast_utils import AstNode
from ..completions_utils import *
from ..lsp_utils import Completion, CompletionItemKind
from ..parse_tree import *
from ..parser_utils import *
from ..xml_emitter import XmlEmitter
class Menu(AstNode):
def emit_xml(self, xml: XmlEmitter):
xml.start_tag(self.tokens["tag"], id=self.tokens["id"])
for child in self.children:
child.emit_xml(xml)
xml.end_tag()
@property
def gir_class(self):
return self.root.gir.namespaces["Gtk"].lookup_type("Gio.MenuModel")
class MenuAttribute(BaseAttribute):
tag_name = "attribute"
@property
def value_type(self):
return None
menu_contents = Sequence()
menu_section = Group(
Menu,
Sequence(
Keyword("section"),
UseLiteral("tag", "section"),
Optional(UseIdent("id")),
menu_contents
)
)
menu_submenu = Group(
Menu,
Sequence(
Keyword("submenu"),
UseLiteral("tag", "submenu"),
Optional(UseIdent("id")),
menu_contents
)
)
menu_attribute = Group(
MenuAttribute,
Sequence(
UseIdent("name"),
Op(":"),
value.expected("a value"),
StmtEnd().expected("`;`"),
)
)
menu_item = Group(
Menu,
Sequence(
Keyword("item"),
UseLiteral("tag", "item"),
Optional(UseIdent("id")),
OpenBlock().expected("`{`"),
Until(menu_attribute, CloseBlock()),
)
)
menu_item_shorthand = Group(
Menu,
Sequence(
Keyword("item"),
UseLiteral("tag", "item"),
OpenParen(),
Group(
MenuAttribute,
Sequence(UseLiteral("name", "label"), value),
),
Optional(Sequence(
Comma(),
Optional(Sequence(
Group(
MenuAttribute,
Sequence(UseLiteral("name", "action"), value),
),
Optional(Sequence(
Comma(),
Group(
MenuAttribute,
Sequence(UseLiteral("name", "icon"), value),
),
))
))
)),
CloseParen().expected("')'"),
)
)
menu_contents.children = [
OpenBlock(),
Until(AnyOf(
menu_section,
menu_submenu,
menu_item_shorthand,
menu_item,
menu_attribute,
), CloseBlock()),
]
menu = Group(
Menu,
Sequence(
Keyword("menu"),
UseLiteral("tag", "menu"),
Optional(UseIdent("id")),
menu_contents
),
)
@completer(
applies_in=[ast.UI],
matches=new_statement_patterns,
)
def menu_completer(ast_node, match_variables):
yield Completion(
"menu", CompletionItemKind.Snippet,
snippet="menu {\n $0\n}"
)
@completer(
applies_in=[Menu],
matches=new_statement_patterns,
)
def menu_content_completer(ast_node, match_variables):
yield Completion(
"submenu", CompletionItemKind.Snippet,
snippet="submenu {\n $0\n}"
)
yield Completion(
"section", CompletionItemKind.Snippet,
snippet="section {\n $0\n}"
)
yield Completion(
"item", CompletionItemKind.Snippet,
snippet="item {\n $0\n}"
)
yield Completion(
"item (shorthand)", CompletionItemKind.Snippet,
snippet='item (_("${1:Label}"), "${2:action-name}", "${3:icon-name}");'
)
yield Completion(
"label", CompletionItemKind.Snippet,
snippet='label: $0;'
)
yield Completion(
"action", CompletionItemKind.Snippet,
snippet='action: "$0";'
)
yield Completion(
"icon", CompletionItemKind.Snippet,
snippet='icon: "$0";'
)

View file

@ -0,0 +1,84 @@
# gtk_size_group.py
#
# Copyright 2021 James Westman <james@jwestman.net>
#
# This file is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This file is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# SPDX-License-Identifier: LGPL-3.0-or-later
from .. import ast
from ..ast_utils import AstNode, validate
from ..completions_utils import *
from ..lsp_utils import Completion, CompletionItemKind
from ..parse_tree import *
from ..parser_utils import *
from ..xml_emitter import XmlEmitter
class Widgets(AstNode):
@validate("widgets")
def container_is_size_group(self):
self.validate_parent_type("Gtk", "SizeGroup", "size group properties")
def emit_xml(self, xml: XmlEmitter):
xml.start_tag("widgets")
for child in self.children:
child.emit_xml(xml)
xml.end_tag()
class Widget(AstNode):
@validate("name")
def obj_widget(self):
object = self.root.objects_by_id.get(self.tokens["name"])
type = self.root.gir.get_type("Widget", "Gtk")
if object is None:
raise CompileError(
f"Could not find object with ID {self.tokens['name']}",
did_you_mean=(self.tokens['name'], self.root.objects_by_id.keys()),
)
elif object.gir_class and not object.gir_class.assignable_to(type):
raise CompileError(
f"Cannot assign {object.gir_class.full_name} to {type.full_name}"
)
def emit_xml(self, xml: XmlEmitter):
xml.put_self_closing("widget", name=self.tokens["name"])
widgets = Group(
Widgets,
Sequence(
Keyword("widgets", True),
OpenBracket(),
Delimited(
Group(
Widget,
UseIdent("name"),
),
Comma(),
),
CloseBracket(),
)
)
@completer(
applies_in=[ast.ObjectContent],
applies_in_subclass=("Gtk", "SizeGroup"),
matches=new_statement_patterns,
)
def size_group_completer(ast_node, match_variables):
yield Completion("widgets", CompletionItemKind.Snippet, snippet="widgets [$0];")

View file

@ -0,0 +1,85 @@
# gtk_combo_box_text.py
#
# Copyright 2021 James Westman <james@jwestman.net>
#
# This file is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This file is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# SPDX-License-Identifier: LGPL-3.0-or-later
from ..ast import BaseTypedAttribute, Value, TranslatedStringValue
from ..ast_utils import AstNode, validate
from ..completions_utils import *
from ..gir import StringType
from ..lsp_utils import Completion, CompletionItemKind
from ..parse_tree import *
from ..parser_utils import *
from ..xml_emitter import XmlEmitter
class Items(AstNode):
@validate("items")
def container_is_string_list(self):
self.validate_parent_type("Gtk", "StringList", "StringList items")
def emit_xml(self, xml: XmlEmitter):
xml.start_tag("items")
for child in self.children:
child.emit_xml(xml)
xml.end_tag()
class Item(AstNode):
@property
def value_type(self):
return StringType()
def emit_xml(self, xml: XmlEmitter):
value = self.children[Value][0]
attrs = value.attrs if isinstance(value, TranslatedStringValue) else {}
xml.start_tag("item", **attrs)
value.emit_xml(xml)
xml.end_tag()
item = Group(
Item,
value,
)
strings = Group(
Items,
Sequence(
Keyword("strings", True),
OpenBracket(),
Delimited(
item,
Comma()
),
CloseBracket(),
)
)
@completer(
applies_in=[ast.ObjectContent],
applies_in_subclass=("Gtk", "StringList"),
matches=new_statement_patterns,
)
def strings_completer(ast_node, match_variables):
yield Completion(
"strings", CompletionItemKind.Snippet,
snippet="strings [$0];"
)

View file

@ -0,0 +1,71 @@
# gtk_styles.py
#
# Copyright 2021 James Westman <james@jwestman.net>
#
# This file is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This file is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# SPDX-License-Identifier: LGPL-3.0-or-later
from .. import ast
from ..ast_utils import AstNode, validate
from ..completions_utils import *
from ..lsp_utils import Completion, CompletionItemKind
from ..parse_tree import *
from ..parser_utils import *
from ..xml_emitter import XmlEmitter
class Styles(AstNode):
@validate("styles")
def container_is_widget(self):
self.validate_parent_type("Gtk", "Widget", "style classes")
def emit_xml(self, xml: XmlEmitter):
xml.start_tag("style")
for child in self.children:
child.emit_xml(xml)
xml.end_tag()
class StyleClass(AstNode):
def emit_xml(self, xml):
xml.put_self_closing("class", name=self.tokens["name"])
styles = Group(
Styles,
Sequence(
Keyword("styles", True),
OpenBracket(),
Delimited(
Group(
StyleClass,
UseQuoted("name")
),
Comma(),
),
CloseBracket(),
)
)
@completer(
applies_in=[ast.ObjectContent],
applies_in_subclass=("Gtk", "Widget"),
matches=new_statement_patterns,
)
def style_completer(ast_node, match_variables):
yield Completion("styles", CompletionItemKind.Keyword, snippet="styles [\"$0\"];")

434
blueprintcompiler/gir.py Normal file
View file

@ -0,0 +1,434 @@
# gir.py
#
# Copyright 2021 James Westman <james@jwestman.net>
#
# This file is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This file is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# SPDX-License-Identifier: LGPL-3.0-or-later
import typing as T
import os, sys
from .errors import CompileError, CompilerBugError
from .utils import lazy_prop
from . import xml_reader
extra_search_paths: T.List[str] = []
_namespace_cache = {}
_search_paths = []
xdg_data_home = os.environ.get("XDG_DATA_HOME", os.path.expanduser("~/.local/share"))
_search_paths.append(os.path.join(xdg_data_home, "gir-1.0"))
xdg_data_dirs = os.environ.get("XDG_DATA_DIRS", "/usr/share:/usr/local/share").split(":")
_search_paths += [os.path.join(dir, "gir-1.0") for dir in xdg_data_dirs]
def get_namespace(namespace, version):
filename = f"{namespace}-{version}.gir"
if filename not in _namespace_cache:
for search_path in _search_paths:
path = os.path.join(search_path, filename)
if os.path.exists(path) and os.path.isfile(path):
xml = xml_reader.parse(path, xml_reader.PARSE_GIR)
repository = Repository(xml)
_namespace_cache[filename] = repository.namespaces.get(namespace)
break
if filename not in _namespace_cache:
raise CompileError(f"Namespace {namespace}-{version} could not be found")
return _namespace_cache[filename]
class GirType:
@property
def doc(self):
return None
def assignable_to(self, other) -> bool:
raise NotImplementedError()
@property
def full_name(self) -> str:
raise NotImplementedError()
class BasicType(GirType):
name: str = "unknown type"
@property
def full_name(self) -> str:
return self.name
class BoolType(BasicType):
name = "bool"
def assignable_to(self, other) -> bool:
return isinstance(other, BoolType)
class IntType(BasicType):
name = "int"
def assignable_to(self, other) -> bool:
return isinstance(other, IntType) or isinstance(other, UIntType) or isinstance(other, FloatType)
class UIntType(BasicType):
name = "uint"
def assignable_to(self, other) -> bool:
return isinstance(other, IntType) or isinstance(other, UIntType) or isinstance(other, FloatType)
class FloatType(BasicType):
name = "float"
def assignable_to(self, other) -> bool:
return isinstance(other, FloatType)
class StringType(BasicType):
name = "string"
def assignable_to(self, other) -> bool:
return isinstance(other, StringType)
_BASIC_TYPES = {
"gboolean": BoolType,
"int": IntType,
"gint": IntType,
"gint64": IntType,
"guint": UIntType,
"guint64": UIntType,
"gfloat": FloatType,
"gdouble": FloatType,
"float": FloatType,
"double": FloatType,
"utf8": StringType,
}
class GirNode:
def __init__(self, container, xml):
self.container = container
self.xml = xml
def get_containing(self, container_type):
if self.container is None:
return None
elif isinstance(self.container, container_type):
return self.container
else:
return self.container.get_containing(container_type)
@lazy_prop
def glib_type_name(self):
return self.xml["glib:type-name"]
@lazy_prop
def full_name(self):
if self.container is None:
return self.name
else:
return f"{self.container.name}.{self.name}"
@lazy_prop
def name(self) -> str:
return self.xml["name"]
@lazy_prop
def available_in(self) -> str:
return self.xml.get("version")
@lazy_prop
def doc(self) -> T.Optional[str]:
sections = []
if self.signature:
sections.append("```\n" + self.signature + "\n```")
el = self.xml.get_elements("doc")
if len(el) == 1:
sections.append(el[0].cdata.strip())
return "\n\n---\n\n".join(sections)
@property
def signature(self) -> T.Optional[str]:
return None
@property
def type_name(self):
return self.xml.get_elements('type')[0]['name']
@property
def type(self):
return self.get_containing(Namespace).lookup_type(self.type_name)
class Property(GirNode):
def __init__(self, klass, xml: xml_reader.Element):
super().__init__(klass, xml)
@property
def type_name(self):
return self.xml.get_elements('type')[0]['name']
@property
def signature(self):
return f"{self.type_name} {self.container.name}.{self.name}"
class Parameter(GirNode):
def __init__(self, container: GirNode, xml: xml_reader.Element):
super().__init__(container, xml)
class Signal(GirNode):
def __init__(self, klass, xml: xml_reader.Element):
super().__init__(klass, xml)
if parameters := xml.get_elements('parameters'):
self.params = [Parameter(self, child) for child in parameters[0].get_elements('parameter')]
else:
self.params = []
@property
def signature(self):
args = ", ".join([f"{p.type_name} {p.name}" for p in self.params])
return f"signal {self.container.name}.{self.name} ({args})"
class Interface(GirNode, GirType):
def __init__(self, ns, xml: xml_reader.Element):
super().__init__(ns, xml)
self.properties = {child["name"]: Property(self, child) for child in xml.get_elements("property")}
self.signals = {child["name"]: Signal(self, child) for child in xml.get_elements("glib:signal")}
self.prerequisites = [child["name"] for child in xml.get_elements("prerequisite")]
def assignable_to(self, other) -> bool:
if self == other:
return True
for pre in self.prerequisites:
if self.get_containing(Namespace).lookup_type(pre).assignable_to(other):
return True
return False
class Class(GirNode, GirType):
def __init__(self, ns, xml: xml_reader.Element):
super().__init__(ns, xml)
self._parent = xml["parent"]
self.implements = [impl["name"] for impl in xml.get_elements("implements")]
self.own_properties = {child["name"]: Property(self, child) for child in xml.get_elements("property")}
self.own_signals = {child["name"]: Signal(self, child) for child in xml.get_elements("glib:signal")}
@property
def signature(self):
result = f"class {self.container.name}.{self.name}"
if self.parent is not None:
result += f" : {self.parent.container.name}.{self.parent.name}"
if len(self.implements):
result += " implements " + ", ".join(self.implements)
return result
@lazy_prop
def properties(self):
return { p.name: p for p in self._enum_properties() }
@lazy_prop
def signals(self):
return { s.name: s for s in self._enum_signals() }
@lazy_prop
def parent(self):
if self._parent is None:
return None
return self.get_containing(Namespace).lookup_type(self._parent)
def assignable_to(self, other) -> bool:
if self == other:
return True
elif self.parent and self.parent.assignable_to(other):
return True
elif other in self.implements:
return True
else:
return False
def _enum_properties(self):
yield from self.own_properties.values()
if self.parent is not None:
yield from self.parent.properties.values()
for impl in self.implements:
yield from self.get_containing(Namespace).lookup_type(impl).properties.values()
def _enum_signals(self):
yield from self.own_signals.values()
if self.parent is not None:
yield from self.parent.signals.values()
for impl in self.implements:
yield from self.get_containing(Namespace).lookup_type(impl).signals.values()
class EnumMember(GirNode):
def __init__(self, ns, xml: xml_reader.Element):
super().__init__(ns, xml)
self._value = xml["value"]
@property
def value(self):
return self._value
@property
def nick(self):
return self.xml["glib:nick"]
@property
def signature(self):
return f"enum member {self.full_name} = {self.value}"
class Enumeration(GirNode):
def __init__(self, ns, xml: xml_reader.Element):
super().__init__(ns, xml)
self.members = { child["name"]: EnumMember(self, child) for child in xml.get_elements("member") }
@property
def signature(self):
return f"enum {self.full_name}"
class Namespace(GirNode):
def __init__(self, repo, xml: xml_reader.Element):
super().__init__(repo, xml)
self.classes = { child["name"]: Class(self, child) for child in xml.get_elements("class") }
self.interfaces = { child["name"]: Interface(self, child) for child in xml.get_elements("interface") }
self.enumerations = { child["name"]: Enumeration(self, child) for child in xml.get_elements("enumeration") }
self.version = xml["version"]
@property
def signature(self):
return f"namespace {self.name} {self.version}"
def get_type(self, name):
""" Gets a type (class, interface, enum, etc.) from this namespace. """
return self.classes.get(name) or self.interfaces.get(name) or self.enumerations.get(name)
def lookup_type(self, type_name: str):
""" Looks up a type in the scope of this namespace (including in the
namespace's dependencies). """
if type_name in _BASIC_TYPES:
return _BASIC_TYPES[type_name]()
elif "." in type_name:
ns, name = type_name.split(".", 1)
return self.get_containing(Repository).get_type(name, ns)
else:
return self.get_type(type_name)
class Repository(GirNode):
def __init__(self, xml: xml_reader.Element):
super().__init__(None, xml)
self.namespaces = { child["name"]: Namespace(self, child) for child in xml.get_elements("namespace") }
try:
self.includes = { include["name"]: get_namespace(include["name"], include["version"]) for include in xml.get_elements("include") }
except:
raise CompilerBugError(f"Failed to load dependencies.")
def get_type(self, name: str, ns: str) -> T.Optional[GirNode]:
if namespace := self.namespaces.get(ns):
return namespace.get_type(name)
else:
return self.lookup_namespace(ns).get_type(name)
def lookup_namespace(self, ns: str):
""" Finds a namespace among this namespace's dependencies. """
if namespace := self.namespaces.get(ns):
return namespace
else:
for include in self.includes.values():
if namespace := include.get_containing(Repository).lookup_namespace(ns):
return namespace
class GirContext:
def __init__(self):
self.namespaces = {}
def add_namespace(self, namespace: Namespace):
other = self.namespaces.get(namespace.name)
if other is not None and other.version != namespace.version:
raise CompileError(f"Namespace {namespace.name}-{namespace.version} can't be imported because version {other.version} was imported earlier")
self.namespaces[namespace.name] = namespace
def get_type(self, name: str, ns: str) -> T.Optional[GirNode]:
ns = ns or "Gtk"
if ns not in self.namespaces:
return None
return self.namespaces[ns].get_type(name)
def get_class(self, name: str, ns: str) -> T.Optional[Class]:
type = self.get_type(name, ns)
if isinstance(type, Class):
return type
else:
return None
def validate_ns(self, ns: str):
""" Raises an exception if there is a problem looking up the given
namespace. """
ns = ns or "Gtk"
if ns not in self.namespaces:
raise CompileError(
f"Namespace {ns} was not imported",
did_you_mean=(ns, self.namespaces.keys()),
)
def validate_class(self, name: str, ns: str):
""" Raises an exception if there is a problem looking up the given
class (it doesn't exist, it isn't a class, etc.) """
ns = ns or "Gtk"
self.validate_ns(ns)
type = self.get_type(name, ns)
if type is None:
raise CompileError(
f"Namespace {ns} does not contain a class called {name}",
did_you_mean=(name, self.namespaces[ns].classes.keys()),
)
elif not isinstance(type, Class):
raise CompileError(
f"{ns}.{name} is not a class",
did_you_mean=(name, self.namespaces[ns].classes.keys()),
)

277
blueprintcompiler/lsp.py Normal file
View file

@ -0,0 +1,277 @@
# lsp.py
#
# Copyright 2021 James Westman <james@jwestman.net>
#
# This file is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This file is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# SPDX-License-Identifier: LGPL-3.0-or-later
import typing as T
import json, sys, traceback
from .completions import complete
from .errors import PrintableError, CompileError, MultipleErrors
from .lsp_utils import *
from . import tokenizer, parser, utils, xml_reader
def command(json_method):
def decorator(func):
func._json_method = json_method
return func
return decorator
class OpenFile:
def __init__(self, uri, text, version):
self.uri = uri
self.text = text
self.version = version
self.ast = None
self.tokens = None
self._update()
def apply_changes(self, changes):
for change in changes:
start = utils.pos_to_idx(change["range"]["start"]["line"], change["range"]["start"]["character"], self.text)
end = utils.pos_to_idx(change["range"]["end"]["line"], change["range"]["end"]["character"], self.text)
self.text = self.text[:start] + change["text"] + self.text[end:]
self._update()
def _update(self):
self.diagnostics = []
try:
self.tokens = tokenizer.tokenize(self.text)
self.ast, errors = parser.parse(self.tokens)
if errors is not None:
self.diagnostics += errors.errors
self.diagnostics += self.ast.errors
except MultipleErrors as e:
self.diagnostics += e.errors
except CompileError as e:
self.diagnostics.append(e)
def calc_semantic_tokens(self) -> T.List[int]:
tokens = list(self.ast.get_semantic_tokens())
token_lists = [
[
*utils.idx_to_pos(token.start, self.text), # line and column
token.end - token.start, # length
token.type,
0, # token modifiers
] for token in tokens]
# convert line, column numbers to deltas
for i, token_list in enumerate(token_lists[1:]):
token_list[0] -= token_lists[i][0]
if token_list[0] == 0:
token_list[1] -= token_lists[i][1]
# flatten the list
return [x for y in token_lists for x in y]
class LanguageServer:
commands: T.Dict[str, T.Callable] = {}
def __init__(self, logfile=None):
self.client_capabilities = {}
self._open_files: {str: OpenFile} = {}
self.logfile = logfile
def run(self):
# Read <doc> tags from gir files. During normal compilation these are
# ignored.
xml_reader.PARSE_GIR.add("doc")
try:
while True:
line = ""
content_len = -1
while content_len == -1 or (line != "\n" and line != "\r\n"):
line = sys.stdin.readline()
if line == "":
return
if line.startswith("Content-Length:"):
content_len = int(line.split("Content-Length:")[1].strip())
line = sys.stdin.read(content_len)
self._log("input: " + line)
data = json.loads(line)
method = data.get("method")
id = data.get("id")
params = data.get("params")
if method in self.commands:
self.commands[method](self, id, params)
except Exception as e:
self._log(traceback.format_exc())
def _send(self, data):
data["jsonrpc"] = "2.0"
line = json.dumps(data, separators=(",", ":")) + "\r\n"
self._log("output: " + line)
sys.stdout.write(f"Content-Length: {len(line)}\r\nContent-Type: application/vscode-jsonrpc; charset=utf-8\r\n\r\n{line}")
sys.stdout.flush()
def _log(self, msg):
if self.logfile is not None:
self.logfile.write(str(msg))
self.logfile.write("\n")
self.logfile.flush()
def _send_response(self, id, result):
self._send({
"id": id,
"result": result,
})
def _send_notification(self, method, params):
self._send({
"method": method,
"params": params,
})
@command("initialize")
def initialize(self, id, params):
self.client_capabilities = params.get("capabilities")
self._send_response(id, {
"capabilities": {
"textDocumentSync": {
"openClose": True,
"change": TextDocumentSyncKind.Incremental,
},
"semanticTokensProvider": {
"legend": {
"tokenTypes": ["enumMember"],
},
"full": True,
},
"completionProvider": {},
"codeActionProvider": {},
"hoverProvider": True,
}
})
@command("textDocument/didOpen")
def didOpen(self, id, params):
doc = params.get("textDocument")
uri = doc.get("uri")
version = doc.get("version")
text = doc.get("text")
open_file = OpenFile(uri, text, version)
self._open_files[uri] = open_file
self._send_file_updates(open_file)
@command("textDocument/didChange")
def didChange(self, id, params):
if params is not None:
open_file = self._open_files[params["textDocument"]["uri"]]
open_file.apply_changes(params["contentChanges"])
self._send_file_updates(open_file)
@command("textDocument/didClose")
def didClose(self, id, params):
del self._open_files[params["textDocument"]["uri"]]
@command("textDocument/hover")
def hover(self, id, params):
open_file = self._open_files[params["textDocument"]["uri"]]
docs = open_file.ast and open_file.ast.get_docs(utils.pos_to_idx(params["position"]["line"], params["position"]["character"], open_file.text))
if docs:
self._send_response(id, {
"contents": {
"kind": "markdown",
"value": docs,
}
})
else:
self._send_response(id, None)
@command("textDocument/completion")
def completion(self, id, params):
open_file = self._open_files[params["textDocument"]["uri"]]
if open_file.ast is None:
self._send_response(id, [])
return
idx = utils.pos_to_idx(params["position"]["line"], params["position"]["character"], open_file.text)
completions = complete(open_file.ast, open_file.tokens, idx)
self._send_response(id, [completion.to_json(True) for completion in completions])
@command("textDocument/semanticTokens/full")
def semantic_tokens(self, id, params):
open_file = self._open_files[params["textDocument"]["uri"]]
self._send_response(id, {
"data": open_file.calc_semantic_tokens(),
})
@command("textDocument/codeAction")
def code_actions(self, id, params):
open_file = self._open_files[params["textDocument"]["uri"]]
range_start = utils.pos_to_idx(params["range"]["start"]["line"], params["range"]["start"]["character"], open_file.text)
range_end = utils.pos_to_idx(params["range"]["end"]["line"], params["range"]["end"]["character"], open_file.text)
actions = [
{
"title": action.title,
"kind": "quickfix",
"diagnostics": [self._create_diagnostic(open_file.text, diagnostic)],
"edit": {
"changes": {
open_file.uri: [{
"range": utils.idxs_to_range(diagnostic.start, diagnostic.end, open_file.text),
"newText": action.replace_with
}]
}
}
}
for diagnostic in open_file.diagnostics
if not (diagnostic.end < range_start or diagnostic.start > range_end)
for action in diagnostic.actions
]
self._send_response(id, actions)
def _send_file_updates(self, open_file: OpenFile):
self._send_notification("textDocument/publishDiagnostics", {
"uri": open_file.uri,
"diagnostics": [self._create_diagnostic(open_file.text, err) for err in open_file.diagnostics],
})
def _create_diagnostic(self, text, err):
return {
"range": utils.idxs_to_range(err.start, err.end, text),
"message": err.message,
"severity": 1,
}
for name in dir(LanguageServer):
item = getattr(LanguageServer, name)
if callable(item) and hasattr(item, "_json_method"):
LanguageServer.commands[item._json_method] = item

View file

@ -0,0 +1,110 @@
# lsp_enums.py
#
# Copyright 2021 James Westman <james@jwestman.net>
#
# This file is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This file is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# SPDX-License-Identifier: LGPL-3.0-or-later
from dataclasses import dataclass
import enum
import typing as T
from .errors import *
from .utils import *
class TextDocumentSyncKind(enum.IntEnum):
None_ = 0
Full = 1
Incremental = 2
class CompletionItemTag(enum.IntEnum):
Deprecated = 1
class InsertTextFormat(enum.IntEnum):
PlainText = 1
Snippet = 2
class CompletionItemKind(enum.IntEnum):
Text = 1
Method = 2
Function = 3
Constructor = 4
Field = 5
Variable = 6
Class = 7
Interface = 8
Module = 9
Property = 10
Unit = 11
Value = 12
Enum = 13
Keyword = 14
Snippet = 15
Color = 16
File = 17
Reference = 18
Folder = 19
EnumMember = 20
Constant = 21
Struct = 22
Event = 23
Operator = 24
TypeParameter = 25
@dataclass
class Completion:
label: str
kind: CompletionItemKind
signature: T.Optional[str] = None
deprecated: bool = False
docs: T.Optional[str] = None
text: T.Optional[str] = None
snippet: T.Optional[str] = None
def to_json(self, snippets: bool):
insert_text = self.text or self.label
insert_text_format = InsertTextFormat.PlainText
if snippets and self.snippet:
insert_text = self.snippet
insert_text_format = InsertTextFormat.Snippet
result = {
"label": self.label,
"kind": self.kind,
"tags": [CompletionItemTag.Deprecated] if self.deprecated else None,
"detail": self.signature,
"documentation": {
"kind": "markdown",
"value": self.docs,
} if self.docs else None,
"deprecated": self.deprecated,
"insertText": insert_text,
"insertTextFormat": insert_text_format,
}
return { k: v for k, v in result.items() if v is not None }
class SemanticTokenType(enum.IntEnum):
EnumMember = 0
@dataclass
class SemanticToken:
start: int
end: int
type: SemanticTokenType

130
blueprintcompiler/main.py Normal file
View file

@ -0,0 +1,130 @@
# main.py
#
# Copyright 2021 James Westman <james@jwestman.net>
#
# This file is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This file is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# SPDX-License-Identifier: LGPL-3.0-or-later
import argparse, json, os, sys
from .errors import PrintableError, report_compile_error, MultipleErrors
from .lsp import LanguageServer
from . import parser, tokenizer
from .xml_emitter import XmlEmitter
VERSION = "0.1.0"
class BlueprintApp:
def main(self):
self.parser = argparse.ArgumentParser()
self.subparsers = self.parser.add_subparsers(metavar="command")
self.parser.set_defaults(func=self.cmd_help)
compile = self.add_subcommand("compile", "Compile blueprint files", self.cmd_compile)
compile.add_argument("--output", dest="output", default="-")
compile.add_argument("input", metavar="filename", default=sys.stdin, type=argparse.FileType('r'))
batch_compile = self.add_subcommand("batch-compile", "Compile many blueprint files at once", self.cmd_batch_compile)
batch_compile.add_argument("output_dir", metavar="output-dir")
batch_compile.add_argument("input_dir", metavar="input-dir")
batch_compile.add_argument("inputs", nargs="+", metavar="filenames", default=sys.stdin, type=argparse.FileType('r'))
lsp = self.add_subcommand("lsp", "Run the language server (for internal use by IDEs)", self.cmd_lsp)
lsp.add_argument("--logfile", dest="logfile", default=None, type=argparse.FileType('a'))
self.add_subcommand("help", "Show this message", self.cmd_help)
try:
opts = self.parser.parse_args()
opts.func(opts)
except SystemExit as e:
raise e
except:
report_compile_error()
def add_subcommand(self, name, help, func):
parser = self.subparsers.add_parser(name, help=help)
parser.set_defaults(func=func)
return parser
def cmd_help(self, opts):
self.parser.print_help()
def cmd_compile(self, opts):
data = opts.input.read()
try:
xml = self._compile(data)
if opts.output == "-":
print(xml)
else:
with open(opts.output, "w") as file:
file.write(xml)
except PrintableError as e:
e.pretty_print(opts.input.name, data)
sys.exit(1)
def cmd_batch_compile(self, opts):
for file in opts.inputs:
data = file.read()
try:
if not os.path.commonpath([file.name, opts.input_dir]):
print(f"{Colors.RED}{Colors.BOLD}error: input file '{file.name}' is not in input directory '{opts.input_dir}'{Colors.CLEAR}")
sys.exit(1)
xml = self._compile(data)
path = os.path.join(
opts.output_dir,
os.path.relpath(
os.path.splitext(file.name)[0] + ".ui",
opts.input_dir
)
)
os.makedirs(os.path.dirname(path), exist_ok=True)
with open(path, "w") as file:
file.write(xml)
except PrintableError as e:
e.pretty_print(file.name, data)
sys.exit(1)
def cmd_lsp(self, opts):
langserv = LanguageServer(opts.logfile)
langserv.run()
def _compile(self, data: str) -> str:
tokens = tokenizer.tokenize(data)
ast, errors = parser.parse(tokens)
if errors:
raise errors
if len(ast.errors):
raise MultipleErrors(ast.errors)
return ast.generate()
def main():
BlueprintApp().main()

View file

@ -0,0 +1,530 @@
# parse_tree.py
#
# Copyright 2021 James Westman <james@jwestman.net>
#
# This file is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This file is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# SPDX-License-Identifier: LGPL-3.0-or-later
""" Utilities for parsing an AST from a token stream. """
import typing as T
from collections import defaultdict
from enum import Enum
from .ast import AstNode
from .errors import assert_true, CompilerBugError, CompileError
from .tokenizer import Token, TokenType
SKIP_TOKENS = [TokenType.COMMENT, TokenType.WHITESPACE]
class ParseResult(Enum):
""" Represents the result of parsing. The extra EMPTY result is necessary
to avoid freezing the parser: imagine a ZeroOrMore node containing a node
that can match empty. It will repeatedly match empty and never advance
the parser. So, ZeroOrMore stops when a failed *or empty* match is
made. """
SUCCESS = 0
FAILURE = 1
EMPTY = 2
def matched(self):
return self == ParseResult.SUCCESS
def succeeded(self):
return self != ParseResult.FAILURE
def failed(self):
return self == ParseResult.FAILURE
class ParseGroup:
""" A matching group. Match groups have an AST type, children grouped by
type, and key=value pairs. At the end of parsing, the match groups will
be converted to AST nodes by passing the children and key=value pairs to
the AST node constructor. """
def __init__(self, ast_type, start: int):
self.ast_type = ast_type
self.children: T.List[ParseGroup] = []
self.keys: T.Dict[str, T.Any] = {}
self.tokens: T.Dict[str, Token] = {}
self.start = start
self.end = None
self.incomplete = False
def add_child(self, child):
self.children.append(child)
def set_val(self, key, val, token):
assert_true(key not in self.keys)
self.keys[key] = val
self.tokens[key] = token
def to_ast(self) -> AstNode:
""" Creates an AST node from the match group. """
children = [child.to_ast() for child in self.children]
try:
return self.ast_type(self, children, self.keys, incomplete=self.incomplete)
except TypeError as e:
raise CompilerBugError(f"Failed to construct ast.{self.ast_type.__name__} from ParseGroup. See the previous stacktrace.")
def __str__(self):
result = str(self.ast_type.__name__)
result += "".join([f"\n{key}: {val}" for key, val in self.keys.items()]) + "\n"
result += "\n".join([str(child) for children in self.children.values() for child in children])
return result.replace("\n", "\n ")
class ParseContext:
""" Contains the state of the parser. """
def __init__(self, tokens, index=0):
self.tokens = list(tokens)
self.index = index
self.start = index
self.group = None
self.group_keys = {}
self.group_children = []
self.last_group = None
self.group_incomplete = False
self.errors = []
self.warnings = []
def create_child(self):
""" Creates a new ParseContext at this context's position. The new
context will be used to parse one node. If parsing is successful, the
new context will be applied to "self". If parsing fails, the new
context will be discarded. """
ctx = ParseContext(self.tokens, self.index)
ctx.errors = self.errors
ctx.warnings = self.warnings
return ctx
def apply_child(self, other):
""" Applies a child context to this context. """
if other.group is not None:
# If the other context had a match group, collect all the matched
# values into it and then add it to our own match group.
for key, (val, token) in other.group_keys.items():
other.group.set_val(key, val, token)
for child in other.group_children:
other.group.add_child(child)
other.group.end = other.tokens[other.index - 1].end
other.group.incomplete = other.group_incomplete
self.group_children.append(other.group)
else:
# If the other context had no match group of its own, collect all
# its matched values
self.group_keys = {**self.group_keys, **other.group_keys}
self.group_children += other.group_children
self.group_incomplete |= other.group_incomplete
self.index = other.index
# Propagate the last parsed group down the stack so it can be easily
# retrieved at the end of the process
if other.group:
self.last_group = other.group
elif other.last_group:
self.last_group = other.last_group
def start_group(self, ast_type):
""" Sets this context to have its own match group. """
assert_true(self.group is None)
self.group = ParseGroup(ast_type, self.tokens[self.index].start)
def set_group_val(self, key, value, token):
""" Sets a matched key=value pair on the current match group. """
assert_true(key not in self.group_keys)
self.group_keys[key] = (value, token)
def set_group_incomplete(self):
""" Marks the current match group as incomplete (it could not be fully
parsed, but the parser recovered). """
self.group_incomplete = True
def skip(self):
""" Skips whitespace and comments. """
while self.index < len(self.tokens) and self.tokens[self.index].type in SKIP_TOKENS:
self.index += 1
def next_token(self) -> Token:
""" Advances the token iterator and returns the next token. """
self.skip()
token = self.tokens[self.index]
self.index += 1
return token
def peek_token(self) -> Token:
""" Returns the next token without advancing the iterator. """
self.skip()
token = self.tokens[self.index]
return token
def is_eof(self) -> Token:
return self.index >= len(self.tokens) or self.peek_token().type == TokenType.EOF
class ParseNode:
""" Base class for the nodes in the parser tree. """
def parse(self, ctx: ParseContext) -> ParseResult:
""" Attempts to match the ParseNode at the context's current location. """
start_idx = ctx.index
inner_ctx = ctx.create_child()
if self._parse(inner_ctx):
ctx.apply_child(inner_ctx)
if ctx.index == start_idx:
return ParseResult.EMPTY
else:
return ParseResult.SUCCESS
else:
return ParseResult.FAILURE
def _parse(self, ctx: ParseContext) -> bool:
raise NotImplementedError()
def err(self, message):
""" Causes this ParseNode to raise an exception if it fails to parse.
This prevents the parser from backtracking, so you should understand
what it does and how the parser works before using it. """
return Err(self, message)
def expected(self, expect):
""" Convenience method for err(). """
return self.err("Expected " + expect)
class Err(ParseNode):
""" ParseNode that emits a compile error if it fails to parse. """
def __init__(self, child, message):
self.child = child
self.message = message
def _parse(self, ctx):
if self.child.parse(ctx).failed():
start_idx = ctx.start
while ctx.tokens[start_idx].type in SKIP_TOKENS:
start_idx += 1
start_token = ctx.tokens[start_idx]
end_token = ctx.tokens[ctx.index]
raise CompileError(self.message, start_token.start, end_token.end)
return True
class Fail(ParseNode):
""" ParseNode that emits a compile error if it parses successfully. """
def __init__(self, child, message):
self.child = child
self.message = message
def _parse(self, ctx):
if self.child.parse(ctx).succeeded():
start_idx = ctx.start
while ctx.tokens[start_idx].type in SKIP_TOKENS:
start_idx += 1
start_token = ctx.tokens[start_idx]
end_token = ctx.tokens[ctx.index]
raise CompileError(self.message, start_token.start, end_token.end)
return True
class Group(ParseNode):
""" ParseNode that creates a match group. """
def __init__(self, ast_type, child):
self.ast_type = ast_type
self.child = child
def _parse(self, ctx: ParseContext) -> bool:
ctx.skip()
ctx.start_group(self.ast_type)
return self.child.parse(ctx).succeeded()
class Sequence(ParseNode):
""" ParseNode that attempts to match all of its children in sequence. """
def __init__(self, *children):
self.children = children
def _parse(self, ctx) -> bool:
for child in self.children:
if child.parse(ctx).failed():
return False
return True
class Statement(ParseNode):
""" ParseNode that attempts to match all of its children in sequence. If any
child raises an error, the error will be logged but parsing will continue. """
def __init__(self, *children):
self.children = children
def _parse(self, ctx) -> bool:
for child in self.children:
try:
if child.parse(ctx).failed():
return False
except CompileError as e:
ctx.errors.append(e)
ctx.set_group_incomplete()
return True
token = ctx.peek_token()
if token.type != TokenType.STMT_END:
ctx.errors.append(CompileError("Expected `;`", token.start, token.end))
else:
ctx.next_token()
return True
class AnyOf(ParseNode):
""" ParseNode that attempts to match exactly one of its children. Child
nodes are attempted in order. """
def __init__(self, *children):
self.children = children
def _parse(self, ctx):
for child in self.children:
if child.parse(ctx).succeeded():
return True
return False
class Until(ParseNode):
""" ParseNode that repeats its child until a delimiting token is found. If
the child does not match, one token is skipped and the match is attempted
again. """
def __init__(self, child, delimiter):
self.child = child
self.delimiter = delimiter
def _parse(self, ctx):
while not self.delimiter.parse(ctx).succeeded():
try:
if not self.child.parse(ctx).matched():
token = ctx.next_token()
ctx.errors.append(CompileError("Unexpected token", token.start, token.end))
except CompileError as e:
ctx.errors.append(e)
ctx.next_token()
if ctx.is_eof():
return True
return True
class ZeroOrMore(ParseNode):
""" ParseNode that matches its child any number of times (including zero
times). It cannot fail to parse. If its child raises an exception, one token
will be skipped and parsing will continue. """
def __init__(self, child):
self.child = child
def _parse(self, ctx):
while True:
try:
if not self.child.parse(ctx).matched():
return True
except CompileError as e:
ctx.errors.append(e)
ctx.next_token()
class Delimited(ParseNode):
""" ParseNode that matches its first child any number of times (including zero
times) with its second child in between and optionally at the end. """
def __init__(self, child, delimiter):
self.child = child
self.delimiter = delimiter
def _parse(self, ctx):
while self.child.parse(ctx).matched() and self.delimiter.parse(ctx).matched():
pass
return True
class Optional(ParseNode):
""" ParseNode that matches its child zero or one times. It cannot fail to
parse. """
def __init__(self, child):
self.child = child
def _parse(self, ctx):
self.child.parse(ctx)
return True
class StaticToken(ParseNode):
""" Base class for ParseNodes that match a token type without inspecting
the token's contents. """
token_type: T.Optional[TokenType] = None
def _parse(self, ctx: ParseContext) -> bool:
return ctx.next_token().type == self.token_type
class StmtEnd(StaticToken):
token_type = TokenType.STMT_END
class Eof(StaticToken):
token_type = TokenType.EOF
class OpenBracket(StaticToken):
token_type = TokenType.OPEN_BRACKET
class CloseBracket(StaticToken):
token_type = TokenType.CLOSE_BRACKET
class OpenBlock(StaticToken):
token_type = TokenType.OPEN_BLOCK
class CloseBlock(StaticToken):
token_type = TokenType.CLOSE_BLOCK
class OpenParen(StaticToken):
token_type = TokenType.OPEN_PAREN
class CloseParen(StaticToken):
token_type = TokenType.CLOSE_PAREN
class Comma(StaticToken):
token_type = TokenType.COMMA
class Op(ParseNode):
""" ParseNode that matches the given operator. """
def __init__(self, op):
self.op = op
def _parse(self, ctx: ParseContext) -> bool:
token = ctx.next_token()
if token.type != TokenType.OP:
return False
return str(token) == self.op
class UseIdent(ParseNode):
""" ParseNode that matches any identifier and sets it in a key=value pair on
the containing match group. """
def __init__(self, key):
self.key = key
def _parse(self, ctx: ParseContext):
token = ctx.next_token()
if token.type != TokenType.IDENT:
return False
ctx.set_group_val(self.key, str(token), token)
return True
class UseNumber(ParseNode):
""" ParseNode that matches a number and sets it in a key=value pair on
the containing match group. """
def __init__(self, key):
self.key = key
def _parse(self, ctx: ParseContext):
token = ctx.next_token()
if token.type != TokenType.NUMBER:
return False
number = token.get_number()
if number % 1.0 == 0:
number = int(number)
ctx.set_group_val(self.key, number, token)
return True
class UseNumberText(ParseNode):
""" ParseNode that matches a number, but sets its *original text* it in a
key=value pair on the containing match group. """
def __init__(self, key):
self.key = key
def _parse(self, ctx: ParseContext):
token = ctx.next_token()
if token.type != TokenType.NUMBER:
return False
ctx.set_group_val(self.key, str(token), token)
return True
class UseQuoted(ParseNode):
""" ParseNode that matches a quoted string and sets it in a key=value pair
on the containing match group. """
def __init__(self, key):
self.key = key
def _parse(self, ctx: ParseContext):
token = ctx.next_token()
if token.type != TokenType.QUOTED:
return False
string = (str(token)[1:-1]
.replace("\\n", "\n")
.replace("\\\"", "\"")
.replace("\\\\", "\\")
.replace("\\'", "\'"))
ctx.set_group_val(self.key, string, token)
return True
class UseLiteral(ParseNode):
""" ParseNode that doesn't match anything, but rather sets a static key=value
pair on the containing group. Useful for, e.g., property and signal flags:
`Sequence(Keyword("swapped"), UseLiteral("swapped", True))` """
def __init__(self, key, literal):
self.key = key
self.literal = literal
def _parse(self, ctx: ParseContext):
ctx.set_group_val(self.key, self.literal, None)
return True
class Keyword(ParseNode):
""" Matches the given identifier. """
def __init__(self, kw, set_token=False):
self.kw = kw
self.set_token = True
def _parse(self, ctx: ParseContext):
token = ctx.next_token()
if token.type != TokenType.IDENT:
return False
if self.set_token:
ctx.set_group_val(self.kw, True, token)
return str(token) == self.kw

167
blueprintcompiler/parser.py Normal file
View file

@ -0,0 +1,167 @@
# parser.py
#
# Copyright 2021 James Westman <james@jwestman.net>
#
# This file is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This file is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# SPDX-License-Identifier: LGPL-3.0-or-later
from . import ast
from .errors import MultipleErrors
from .parse_tree import *
from .parser_utils import *
from .tokenizer import TokenType
from .extensions import OBJECT_HOOKS, OBJECT_CONTENT_HOOKS
def parse(tokens) -> T.Tuple[ast.UI, T.Optional[MultipleErrors]]:
""" Parses a list of tokens into an abstract syntax tree. """
gtk_directive = Group(
ast.GtkDirective,
Statement(
Keyword("using").err("File must start with a \"using Gtk\" directive (e.g. `using Gtk 4.0;`)"),
Keyword("Gtk").err("File must start with a \"using Gtk\" directive (e.g. `using Gtk 4.0;`)"),
UseNumberText("version").expected("a version number for GTK"),
)
)
import_statement = Group(
ast.Import,
Statement(
Keyword("using"),
UseIdent("namespace").expected("a GIR namespace"),
UseNumberText("version").expected("a version number"),
)
)
object = Group(
ast.Object,
None
)
property = Group(
ast.Property,
Statement(
UseIdent("name"),
Op(":"),
AnyOf(
*OBJECT_HOOKS,
object,
value,
).expected("a value"),
)
)
binding = Group(
ast.Property,
Statement(
UseIdent("name"),
Op(":"),
Keyword("bind"),
UseIdent("bind_source").expected("the ID of a source object to bind from"),
Op("."),
UseIdent("bind_property").expected("a property name to bind from"),
ZeroOrMore(AnyOf(
Sequence(Keyword("sync-create"), UseLiteral("sync_create", True)),
Sequence(Keyword("after"), UseLiteral("after", True)),
)),
)
)
signal = Group(
ast.Signal,
Statement(
UseIdent("name"),
Optional(Sequence(
Op("::"),
UseIdent("detail_name").expected("a signal detail name"),
)),
Op("=>"),
UseIdent("handler").expected("the name of a function to handle the signal"),
OpenParen().expected("argument list"),
CloseParen().expected("`)`"),
ZeroOrMore(AnyOf(
Sequence(Keyword("swapped"), UseLiteral("swapped", True)),
Sequence(Keyword("after"), UseLiteral("after", True)),
Sequence(Keyword("object"), UseLiteral("object", True)),
)),
)
)
child = Group(
ast.Child,
Sequence(
Optional(Sequence(
OpenBracket(),
UseIdent("child_type").expected("a child type"),
CloseBracket(),
)),
object,
)
)
object_content = Group(
ast.ObjectContent,
Sequence(
OpenBlock(),
Until(AnyOf(
*OBJECT_CONTENT_HOOKS,
binding,
property,
signal,
child,
), CloseBlock()),
)
)
# work around the recursive reference
object.child = Sequence(
class_name,
Optional(UseIdent("id")),
object_content,
)
template = Group(
ast.Template,
Sequence(
Keyword("template"),
UseIdent("name").expected("template class name"),
Op(":").expected("`:`"),
class_name.expected("parent class"),
object_content.expected("block"),
)
)
ui = Group(
ast.UI,
Sequence(
gtk_directive,
ZeroOrMore(import_statement),
Until(AnyOf(
*OBJECT_HOOKS,
template,
object,
), Eof()),
)
)
ctx = ParseContext(tokens)
ui.parse(ctx)
ast_node = ctx.last_group.to_ast() if ctx.last_group else None
errors = MultipleErrors(ctx.errors) if len(ctx.errors) else None
return (ast_node, errors)

View file

@ -0,0 +1,82 @@
# parser_utils.py
#
# Copyright 2021 James Westman <james@jwestman.net>
#
# This file is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This file is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# SPDX-License-Identifier: LGPL-3.0-or-later
from . import ast
from .parse_tree import *
class_name = AnyOf(
Sequence(
UseIdent("namespace"),
Op("."),
UseIdent("class_name"),
),
Sequence(
Op("."),
UseIdent("class_name"),
UseLiteral("ignore_gir", True),
),
UseIdent("class_name"),
)
literal = Group(
ast.LiteralValue,
AnyOf(
UseNumber("value"),
UseQuoted("value"),
)
)
ident_value = Group(
ast.IdentValue,
UseIdent("value"),
)
flags_value = Group(
ast.FlagsValue,
Sequence(
Group(ast.Flag, UseIdent("value")),
Op("|"),
Delimited(Group(ast.Flag, UseIdent("value")), Op("|")),
),
)
translated_string = Group(
ast.TranslatedStringValue,
AnyOf(
Sequence(
Keyword("_"),
OpenParen(),
UseQuoted("value").expected("a quoted string"),
CloseParen().expected("`)`"),
),
Sequence(
Keyword("C_"),
OpenParen(),
UseQuoted("context").expected("a quoted string"),
Comma(),
UseQuoted("value").expected("a quoted string"),
Optional(Comma()),
CloseParen().expected("`)`"),
),
),
)
value = AnyOf(translated_string, literal, flags_value, ident_value)

View file

@ -0,0 +1,116 @@
# tokenizer.py
#
# Copyright 2021 James Westman <james@jwestman.net>
#
# This file is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This file is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# SPDX-License-Identifier: LGPL-3.0-or-later
import typing as T
import re
from enum import Enum
from .errors import CompileError
class TokenType(Enum):
EOF = 0
DIRECTIVE = 1
IDENT = 2
QUOTED = 3
NUMBER = 4
OPEN_PAREN = 5
CLOSE_PAREN = 6
OPEN_BLOCK = 7
CLOSE_BLOCK = 8
STMT_END = 9
OP = 10
WHITESPACE = 11
COMMENT = 12
OPEN_BRACKET = 13
CLOSE_BRACKET = 14
COMMA = 15
_tokens = [
(TokenType.DIRECTIVE, r"@[\d\w\-_]+"),
(TokenType.IDENT, r"[A-Za-z_][\d\w\-_]*"),
(TokenType.QUOTED, r'"(\\"|[^"\n])*"'),
(TokenType.QUOTED, r"'(\\'|[^'\n])*'"),
(TokenType.NUMBER, r"[-+]?[\d_]+(\.[\d_]+)?"),
(TokenType.NUMBER, r"0x[A-Fa-f0-9]+"),
(TokenType.OPEN_PAREN, r"\("),
(TokenType.CLOSE_PAREN, r"\)"),
(TokenType.OPEN_BLOCK, r"\{"),
(TokenType.CLOSE_BLOCK, r"\}"),
(TokenType.STMT_END, r";"),
(TokenType.WHITESPACE, r"\s+"),
(TokenType.COMMENT, r"\/\*[\s\S]*?\*\/"),
(TokenType.COMMENT, r"\/\/[^\n]*"),
(TokenType.OPEN_BRACKET, r"\["),
(TokenType.CLOSE_BRACKET, r"\]"),
(TokenType.OP, r"[:=\.=\|<>\+\-/\*]+"),
(TokenType.COMMA, r"\,"),
]
_TOKENS = [(type, re.compile(regex)) for (type, regex) in _tokens]
class Token:
def __init__(self, type, start, end, string):
self.type = type
self.start = start
self.end = end
self.string = string
def __str__(self):
return self.string[self.start:self.end]
def is_directive(self, directive) -> bool:
if self.type != TokenType.DIRECTIVE:
return False
return str(self) == "@" + directive
def get_number(self):
if self.type != TokenType.NUMBER:
return None
string = str(self)
if string.startswith("0x"):
return int(string, 16)
else:
return float(string)
def _tokenize(ui_ml: str):
i = 0
while i < len(ui_ml):
matched = False
for (type, regex) in _TOKENS:
match = regex.match(ui_ml, i)
if match is not None:
yield Token(type, match.start(), match.end(), ui_ml)
i = match.end()
matched = True
break
if not matched:
raise CompileError("Could not determine what kind of syntax is meant here", i, i)
yield Token(TokenType.EOF, i, i, ui_ml)
def tokenize(data: str) -> T.List[Token]:
return list(_tokenize(data))

View file

@ -0,0 +1,94 @@
# utils.py
#
# Copyright 2021 James Westman <james@jwestman.net>
#
# This file is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This file is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# SPDX-License-Identifier: LGPL-3.0-or-later
import typing as T
def lazy_prop(func):
key = "_lazy_prop_" + func.__name__
@property
def real_func(self):
if key not in self.__dict__:
self.__dict__[key] = func(self)
return self.__dict__[key]
return real_func
def did_you_mean(word: str, options: T.List[str]) -> T.Optional[str]:
if len(options) == 0:
return None
def levenshtein(a, b):
# see https://en.wikipedia.org/wiki/Levenshtein_distance
m = len(a)
n = len(b)
distances = [[0 for j in range(n)] for i in range(m)]
for i in range(m):
distances[i][0] = i
for j in range(n):
distances[0][j] = j
for j in range(1, n):
for i in range(1, m):
cost = 0
if a[i] != b[j]:
if a[i].casefold() == b[j].casefold():
cost = 1
else:
cost = 2
distances[i][j] = min(distances[i-1][j] + 2, distances[i][j-1] + 2, distances[i-1][j-1] + cost)
return distances[m-1][n-1]
distances = [(option, levenshtein(word, option)) for option in options]
closest = min(distances, key=lambda item:item[1])
if closest[1] <= 5:
return closest[0]
return None
def idx_to_pos(idx: int, text: str) -> T.Tuple[int, int]:
if idx == 0:
return (0, 0)
sp = text[:idx].splitlines(keepends=True)
line_num = len(sp)
col_num = len(sp[-1])
return (line_num - 1, col_num)
def pos_to_idx(line: int, col: int, text: str) -> int:
lines = text.splitlines(keepends=True)
return sum([len(line) for line in lines[:line]]) + col
def idxs_to_range(start: int, end: int, text: str):
start_l, start_c = idx_to_pos(start, text)
end_l, end_c = idx_to_pos(end, text)
return {
"start": {
"line": start_l,
"character": start_c,
},
"end": {
"line": end_l,
"character": end_c,
},
}

View file

@ -0,0 +1,63 @@
# xml_emitter.py
#
# Copyright 2021 James Westman <james@jwestman.net>
#
# This file is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This file is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# SPDX-License-Identifier: LGPL-3.0-or-later
from xml.sax import saxutils
class XmlEmitter:
def __init__(self, indent=2):
self.indent = indent
self.result = '<?xml version="1.0" encoding="UTF-8"?>'
self._tag_stack = []
self._needs_newline = False
def start_tag(self, tag, **attrs):
self._indent()
self.result += f"<{tag}"
for key, val in attrs.items():
if val is not None:
self.result += f' {key}="{saxutils.escape(str(val))}"'
self.result += ">"
self._tag_stack.append(tag)
self._needs_newline = False
def put_self_closing(self, tag, **attrs):
self._indent()
self.result += f"<{tag}"
for key, val in attrs.items():
if val is not None:
self.result += f' {key}="{saxutils.escape(str(val))}"'
self.result += "/>"
self._needs_newline = True
def end_tag(self):
tag = self._tag_stack.pop()
if self._needs_newline:
self._indent()
self.result += f"</{tag}>"
self._needs_newline = True
def put_text(self, text):
self.result += saxutils.escape(str(text))
self._needs_newline = False
def _indent(self):
if self.indent is not None:
self.result += "\n" + " " * (self.indent * len(self._tag_stack))

View file

@ -0,0 +1,93 @@
# xml_reader.py
#
# Copyright 2021 James Westman <james@jwestman.net>
#
# This file is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This file is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# SPDX-License-Identifier: LGPL-3.0-or-later
from collections import defaultdict
from xml import sax
from .utils import lazy_prop
# To speed up parsing, we ignore all tags except these
PARSE_GIR = set([
"repository", "namespace", "class", "interface", "property", "glib:signal",
"include", "implements", "type", "parameter", "parameters", "enumeration",
"member",
])
class Element:
def __init__(self, tag, attrs):
self.tag = tag
self.attrs = attrs
self.children = defaultdict(list)
self.cdata_chunks = []
@lazy_prop
def cdata(self):
return ''.join(self.cdata_chunks)
def get_elements(self, name):
return self.children.get(name, [])
def __getitem__(self, key):
return self.attrs.get(key)
class Handler(sax.handler.ContentHandler):
def __init__(self, parse_type):
self.root = None
self.stack = []
self.skipping = 0
self._interesting_elements = parse_type
def startElement(self, name, attrs):
if name not in self._interesting_elements:
self.skipping += 1
if self.skipping > 0:
return
element = Element(name, attrs.copy())
if len(self.stack):
last = self.stack[-1]
last.children[name].append(element)
else:
self.root = element
self.stack.append(element)
def endElement(self, name):
if self.skipping == 0:
self.stack.pop()
if name not in self._interesting_elements:
self.skipping -= 1
def characters(self, content):
if not self.skipping:
self.stack[-1].cdata_chunks.append(content)
def parse(filename, parse_type):
parser = sax.make_parser()
handler = Handler(parse_type)
parser.setContentHandler(handler)
parser.parse(filename)
return handler.root