mirror of
https://gitlab.gnome.org/jwestman/blueprint-compiler.git
synced 2025-05-04 15:59:08 -04:00
lsp: Support completions
This commit is contained in:
parent
afecd744ca
commit
408f3ebce5
8 changed files with 460 additions and 106 deletions
|
@ -22,6 +22,8 @@ import typing as T
|
||||||
from .ast_utils import *
|
from .ast_utils import *
|
||||||
from .errors import assert_true, AlreadyCaughtError, CompileError, CompilerBugError, MultipleErrors
|
from .errors import assert_true, AlreadyCaughtError, CompileError, CompilerBugError, MultipleErrors
|
||||||
from .gir import GirContext, get_namespace
|
from .gir import GirContext, get_namespace
|
||||||
|
from .lsp_utils import Completion, CompletionItemKind
|
||||||
|
from .tokenizer import Token
|
||||||
from .utils import lazy_prop
|
from .utils import lazy_prop
|
||||||
from .xml_emitter import XmlEmitter
|
from .xml_emitter import XmlEmitter
|
||||||
|
|
||||||
|
@ -29,11 +31,16 @@ from .xml_emitter import XmlEmitter
|
||||||
class AstNode:
|
class AstNode:
|
||||||
""" Base class for nodes in the abstract syntax tree. """
|
""" Base class for nodes in the abstract syntax tree. """
|
||||||
|
|
||||||
|
completers: T.List = []
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.group = None
|
self.group = None
|
||||||
self.parent = None
|
self.parent = None
|
||||||
self.child_nodes = None
|
self.child_nodes = None
|
||||||
|
|
||||||
|
def __init_subclass__(cls):
|
||||||
|
cls.completers = []
|
||||||
|
|
||||||
@lazy_prop
|
@lazy_prop
|
||||||
def root(self):
|
def root(self):
|
||||||
if self.parent is None:
|
if self.parent is None:
|
||||||
|
@ -263,6 +270,17 @@ class ObjectContent(AstNode):
|
||||||
self.children = children
|
self.children = children
|
||||||
self.style = style
|
self.style = style
|
||||||
|
|
||||||
|
|
||||||
|
@validate()
|
||||||
|
def gir_class(self):
|
||||||
|
parent = self.parent
|
||||||
|
if isinstance(parent, Template):
|
||||||
|
return parent.gir_parent
|
||||||
|
elif isinstance(parent, Object):
|
||||||
|
return parent.gir_class
|
||||||
|
else:
|
||||||
|
raise CompilerBugError()
|
||||||
|
|
||||||
@validate()
|
@validate()
|
||||||
def only_one_style_class(self):
|
def only_one_style_class(self):
|
||||||
if len(self.style) > 1:
|
if len(self.style) > 1:
|
||||||
|
|
221
gtkblueprinttool/completions.py
Normal file
221
gtkblueprinttool/completions.py
Normal file
|
@ -0,0 +1,221 @@
|
||||||
|
# completions.py
|
||||||
|
#
|
||||||
|
# Copyright 2021 James Westman <james@jwestman.net>
|
||||||
|
#
|
||||||
|
# This file is free software; you can redistribute it and/or modify it
|
||||||
|
# under the terms of the GNU Lesser General Public License as
|
||||||
|
# published by the Free Software Foundation; either version 3 of the
|
||||||
|
# License, or (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This file is distributed in the hope that it will be useful, but
|
||||||
|
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||||
|
# Lesser General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU Lesser General Public
|
||||||
|
# License along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: LGPL-3.0-or-later
|
||||||
|
|
||||||
|
import typing as T
|
||||||
|
|
||||||
|
from . import ast
|
||||||
|
from .lsp_utils import Completion, CompletionItemKind
|
||||||
|
from .parser import SKIP_TOKENS
|
||||||
|
from .tokenizer import TokenType, Token
|
||||||
|
|
||||||
|
Pattern = T.List[T.Tuple[TokenType, T.Optional[str]]]
|
||||||
|
|
||||||
|
|
||||||
|
def complete(ast_node: ast.AstNode, tokens: T.List[Token], idx: int) -> T.Iterator[Completion]:
|
||||||
|
for child in ast_node.child_nodes:
|
||||||
|
if child.group.start <= idx <= child.group.end:
|
||||||
|
yield from complete(child, tokens, idx)
|
||||||
|
return
|
||||||
|
|
||||||
|
prev_tokens: T.List[Token] = []
|
||||||
|
token_idx = 0
|
||||||
|
|
||||||
|
# find the current token
|
||||||
|
for i, token in enumerate(tokens):
|
||||||
|
if token.start < idx <= token.end:
|
||||||
|
token_idx = i
|
||||||
|
|
||||||
|
# collect the 5 previous non-skipped tokens
|
||||||
|
while len(prev_tokens) < 5 and token_idx >= 0:
|
||||||
|
token = tokens[token_idx]
|
||||||
|
if token.type not in SKIP_TOKENS:
|
||||||
|
prev_tokens.insert(0, token)
|
||||||
|
token_idx -= 1
|
||||||
|
|
||||||
|
for completer in ast_node.completers:
|
||||||
|
yield from completer.completions(prev_tokens, ast_node)
|
||||||
|
|
||||||
|
|
||||||
|
class Completer:
|
||||||
|
def __init__(self, func):
|
||||||
|
self.func = func
|
||||||
|
self.patterns: T.List = []
|
||||||
|
self.ast_type: T.Type[ast.AstNode] = None
|
||||||
|
|
||||||
|
def completions(self, prev_tokens: list[Token], ast_node: ast.AstNode) -> T.Iterator[Completion]:
|
||||||
|
any_match = len(self.patterns) == 0
|
||||||
|
match_variables: T.List[str] = []
|
||||||
|
|
||||||
|
for pattern in self.patterns:
|
||||||
|
match_variables = []
|
||||||
|
|
||||||
|
if len(pattern) <= len(prev_tokens):
|
||||||
|
for i in range(0, len(pattern)):
|
||||||
|
type, value = pattern[i]
|
||||||
|
token = prev_tokens[i - len(pattern)]
|
||||||
|
if token.type != type or (value is not None and str(token) != value):
|
||||||
|
break
|
||||||
|
if value is None:
|
||||||
|
match_variables.append(str(token))
|
||||||
|
else:
|
||||||
|
any_match = True
|
||||||
|
break
|
||||||
|
|
||||||
|
if not any_match:
|
||||||
|
return
|
||||||
|
|
||||||
|
print("completions", match_variables, self.func)
|
||||||
|
|
||||||
|
if self.ast_type is not None:
|
||||||
|
while ast_node is not None and not isinstance(ast_node, self.ast_type):
|
||||||
|
ast_node = ast_node.parent
|
||||||
|
|
||||||
|
yield from self.func(ast_node, match_variables)
|
||||||
|
|
||||||
|
|
||||||
|
def applies_to(*ast_types):
|
||||||
|
""" Decorator describing which AST nodes the completer should apply in. """
|
||||||
|
def _decorator(func):
|
||||||
|
completer = Completer(func)
|
||||||
|
for c in ast_types:
|
||||||
|
c.completers.append(completer)
|
||||||
|
return completer
|
||||||
|
return _decorator
|
||||||
|
|
||||||
|
def matches(patterns: T.List):
|
||||||
|
def _decorator(cls):
|
||||||
|
cls.patterns = patterns
|
||||||
|
return cls
|
||||||
|
return _decorator
|
||||||
|
|
||||||
|
def ast_type(ast_type: T.Type[ast.AstNode]):
|
||||||
|
def _decorator(cls):
|
||||||
|
cls.ast_type = ast_type
|
||||||
|
return cls
|
||||||
|
return _decorator
|
||||||
|
|
||||||
|
|
||||||
|
new_statement_patterns = [
|
||||||
|
[(TokenType.OPEN_BLOCK, None)],
|
||||||
|
[(TokenType.CLOSE_BLOCK, None)],
|
||||||
|
[(TokenType.STMT_END, None)],
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@applies_to(ast.GtkDirective)
|
||||||
|
def using_gtk(ast_node, match_variables):
|
||||||
|
yield Completion("using Gtk 4.0;", CompletionItemKind.Keyword)
|
||||||
|
|
||||||
|
|
||||||
|
@matches(new_statement_patterns)
|
||||||
|
@ast_type(ast.UI)
|
||||||
|
@applies_to(ast.UI, ast.ObjectContent, ast.Template)
|
||||||
|
def namespace(ast_node, match_variables):
|
||||||
|
yield Completion("Gtk", CompletionItemKind.Module, text="Gtk.")
|
||||||
|
for ns in ast_node.imports:
|
||||||
|
yield Completion(ns.namespace, CompletionItemKind.Module, text=ns.namespace + ".")
|
||||||
|
|
||||||
|
|
||||||
|
@matches([
|
||||||
|
[(TokenType.IDENT, None), (TokenType.OP, "."), (TokenType.IDENT, None)],
|
||||||
|
[(TokenType.IDENT, None), (TokenType.OP, ".")],
|
||||||
|
])
|
||||||
|
@applies_to(ast.UI, ast.ObjectContent, ast.Template)
|
||||||
|
def object_completer(ast_node, match_variables):
|
||||||
|
ns = ast_node.root.gir.namespaces.get(match_variables[0])
|
||||||
|
if ns is not None:
|
||||||
|
for c in ns.classes.values():
|
||||||
|
yield Completion(c.name, CompletionItemKind.Class, docs=c.doc)
|
||||||
|
|
||||||
|
|
||||||
|
@matches(new_statement_patterns)
|
||||||
|
@applies_to(ast.ObjectContent)
|
||||||
|
def property_completer(ast_node, match_variables):
|
||||||
|
if ast_node.gir_class:
|
||||||
|
for prop in ast_node.gir_class.properties:
|
||||||
|
yield Completion(prop, CompletionItemKind.Property, snippet=f"{prop}: $0;")
|
||||||
|
|
||||||
|
|
||||||
|
@matches(new_statement_patterns)
|
||||||
|
@applies_to(ast.ObjectContent)
|
||||||
|
def style_completer(ast_node, match_variables):
|
||||||
|
yield Completion("style", CompletionItemKind.Keyword, snippet="style \"$0\";")
|
||||||
|
|
||||||
|
|
||||||
|
@matches(new_statement_patterns)
|
||||||
|
@applies_to(ast.ObjectContent)
|
||||||
|
def signal_completer(ast_node, match_variables):
|
||||||
|
if ast_node.gir_class:
|
||||||
|
for signal in ast_node.gir_class.signals:
|
||||||
|
name = ("on" if not isinstance(ast_node.parent, ast.Object)
|
||||||
|
else "on_" + (ast_node.parent.id or ast_node.parent.class_name.lower()))
|
||||||
|
yield Completion(signal, CompletionItemKind.Property, snippet=f"{signal} => ${{1:{name}_{signal.replace('-', '_')}}}()$0;")
|
||||||
|
|
||||||
|
|
||||||
|
@matches(new_statement_patterns)
|
||||||
|
@applies_to(ast.UI)
|
||||||
|
def template_completer(ast_node, match_variables):
|
||||||
|
yield Completion(
|
||||||
|
"template", CompletionItemKind.Snippet,
|
||||||
|
snippet="template ${1:ClassName} : ${2:ParentClass} {\n $0\n}"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@matches(new_statement_patterns)
|
||||||
|
@applies_to(ast.UI)
|
||||||
|
def menu_completer(ast_node, match_variables):
|
||||||
|
yield Completion(
|
||||||
|
"menu", CompletionItemKind.Snippet,
|
||||||
|
snippet="menu {\n $0\n}"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@matches(new_statement_patterns)
|
||||||
|
@applies_to(ast.Menu)
|
||||||
|
def menu_content_completer(ast_node, match_variables):
|
||||||
|
yield Completion(
|
||||||
|
"submenu", CompletionItemKind.Snippet,
|
||||||
|
snippet="submenu {\n $0\n}"
|
||||||
|
)
|
||||||
|
yield Completion(
|
||||||
|
"section", CompletionItemKind.Snippet,
|
||||||
|
snippet="section {\n $0\n}"
|
||||||
|
)
|
||||||
|
yield Completion(
|
||||||
|
"item", CompletionItemKind.Snippet,
|
||||||
|
snippet="item {\n $0\n}"
|
||||||
|
)
|
||||||
|
yield Completion(
|
||||||
|
"item (shorthand)", CompletionItemKind.Snippet,
|
||||||
|
snippet='item _("${1:Label}") "${2:action-name}" "${3:icon-name}";'
|
||||||
|
)
|
||||||
|
|
||||||
|
yield Completion(
|
||||||
|
"label", CompletionItemKind.Snippet,
|
||||||
|
snippet='label: $0;'
|
||||||
|
)
|
||||||
|
yield Completion(
|
||||||
|
"action", CompletionItemKind.Snippet,
|
||||||
|
snippet='action: "$0";'
|
||||||
|
)
|
||||||
|
yield Completion(
|
||||||
|
"icon", CompletionItemKind.Snippet,
|
||||||
|
snippet='icon: "$0";'
|
||||||
|
)
|
||||||
|
|
|
@ -21,6 +21,7 @@
|
||||||
import typing as T
|
import typing as T
|
||||||
import json, sys, traceback
|
import json, sys, traceback
|
||||||
|
|
||||||
|
from .completions import complete
|
||||||
from .errors import PrintableError, CompileError, MultipleErrors
|
from .errors import PrintableError, CompileError, MultipleErrors
|
||||||
from .lsp_utils import *
|
from .lsp_utils import *
|
||||||
from . import tokenizer, parser, utils, xml_reader
|
from . import tokenizer, parser, utils, xml_reader
|
||||||
|
@ -33,12 +34,44 @@ def command(json_method):
|
||||||
return decorator
|
return decorator
|
||||||
|
|
||||||
|
|
||||||
|
class OpenFile:
|
||||||
|
def __init__(self, uri, text, version):
|
||||||
|
self.uri = uri
|
||||||
|
self.text = text
|
||||||
|
self.version = version
|
||||||
|
self.ast = None
|
||||||
|
self.tokens = None
|
||||||
|
|
||||||
|
self._update()
|
||||||
|
|
||||||
|
def apply_changes(self, changes):
|
||||||
|
for change in changes:
|
||||||
|
start = utils.pos_to_idx(change["range"]["start"]["line"], change["range"]["start"]["character"], self.text)
|
||||||
|
end = utils.pos_to_idx(change["range"]["end"]["line"], change["range"]["end"]["character"], self.text)
|
||||||
|
self.text = self.text[:start] + change["text"] + self.text[end:]
|
||||||
|
self._update()
|
||||||
|
|
||||||
|
def _update(self):
|
||||||
|
self.diagnostics = []
|
||||||
|
try:
|
||||||
|
self.tokens = tokenizer.tokenize(self.text)
|
||||||
|
self.ast, errors = parser.parse(self.tokens)
|
||||||
|
if errors is not None:
|
||||||
|
self.diagnostics += errors.errors
|
||||||
|
self.diagnostics += self.ast.errors
|
||||||
|
except MultipleErrors as e:
|
||||||
|
self.diagnostics += e.errors
|
||||||
|
except CompileError as e:
|
||||||
|
self.diagnostics.append(e)
|
||||||
|
|
||||||
|
|
||||||
class LanguageServer:
|
class LanguageServer:
|
||||||
commands: T.Dict[str, T.Callable] = {}
|
commands: T.Dict[str, T.Callable] = {}
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self, logfile=None):
|
||||||
self.client_capabilities = {}
|
self.client_capabilities = {}
|
||||||
self._open_files: {str: OpenFile} = {}
|
self._open_files: {str: OpenFile} = {}
|
||||||
|
self.logfile = logfile
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
# Read <doc> tags from gir files. During normal compilation these are
|
# Read <doc> tags from gir files. During normal compilation these are
|
||||||
|
@ -102,8 +135,9 @@ class LanguageServer:
|
||||||
"capabilities": {
|
"capabilities": {
|
||||||
"textDocumentSync": {
|
"textDocumentSync": {
|
||||||
"openClose": True,
|
"openClose": True,
|
||||||
"change": 2, # incremental
|
"change": TextDocumentSyncKind.Incremental,
|
||||||
},
|
},
|
||||||
|
"completionProvider": {},
|
||||||
"hoverProvider": True,
|
"hoverProvider": True,
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
@ -133,8 +167,8 @@ class LanguageServer:
|
||||||
@command("textDocument/hover")
|
@command("textDocument/hover")
|
||||||
def hover(self, id, params):
|
def hover(self, id, params):
|
||||||
open_file = self._open_files[params["textDocument"]["uri"]]
|
open_file = self._open_files[params["textDocument"]["uri"]]
|
||||||
docs = open_file.ast.get_docs(utils.pos_to_idx(params["position"]["line"], params["position"]["character"], open_file.text))
|
docs = open_file.ast and open_file.ast.get_docs(utils.pos_to_idx(params["position"]["line"], params["position"]["character"], open_file.text))
|
||||||
if docs is not None:
|
if docs:
|
||||||
self._send_response(id, {
|
self._send_response(id, {
|
||||||
"contents": {
|
"contents": {
|
||||||
"kind": "markdown",
|
"kind": "markdown",
|
||||||
|
@ -144,6 +178,18 @@ class LanguageServer:
|
||||||
else:
|
else:
|
||||||
self._send_response(id, None)
|
self._send_response(id, None)
|
||||||
|
|
||||||
|
@command("textDocument/completion")
|
||||||
|
def completion(self, id, params):
|
||||||
|
open_file = self._open_files[params["textDocument"]["uri"]]
|
||||||
|
|
||||||
|
if open_file.ast is None:
|
||||||
|
self._send_response(id, [])
|
||||||
|
return
|
||||||
|
|
||||||
|
idx = utils.pos_to_idx(params["position"]["line"], params["position"]["character"], open_file.text)
|
||||||
|
completions = complete(open_file.ast, open_file.tokens, idx)
|
||||||
|
self._send_response(id, [completion.to_json(True) for completion in completions])
|
||||||
|
|
||||||
|
|
||||||
def _send_file_updates(self, open_file: OpenFile):
|
def _send_file_updates(self, open_file: OpenFile):
|
||||||
self._send_notification("textDocument/publishDiagnostics", {
|
self._send_notification("textDocument/publishDiagnostics", {
|
||||||
|
|
|
@ -18,35 +18,78 @@
|
||||||
# SPDX-License-Identifier: LGPL-3.0-or-later
|
# SPDX-License-Identifier: LGPL-3.0-or-later
|
||||||
|
|
||||||
|
|
||||||
from enum import Enum
|
from dataclasses import dataclass
|
||||||
|
import enum
|
||||||
|
import typing as T
|
||||||
|
|
||||||
from . import tokenizer, parser
|
|
||||||
from .errors import *
|
from .errors import *
|
||||||
from .utils import *
|
from .utils import *
|
||||||
|
|
||||||
|
|
||||||
class OpenFile:
|
class TextDocumentSyncKind(enum.IntEnum):
|
||||||
def __init__(self, uri, text, version):
|
None_ = 0
|
||||||
self.uri = uri
|
Full = 1
|
||||||
self.text = text
|
Incremental = 2
|
||||||
self.version = version
|
|
||||||
|
|
||||||
self._update()
|
class CompletionItemTag(enum.IntEnum):
|
||||||
|
Deprecated = 1
|
||||||
|
|
||||||
def apply_changes(self, changes):
|
class InsertTextFormat(enum.IntEnum):
|
||||||
for change in changes:
|
PlainText = 1
|
||||||
start = utils.pos_to_idx(change.range.start.line, change.range.start.character, self.text)
|
Snippet = 2
|
||||||
end = utils.pos_to_idx(change.range.end.line, change.range.end.character, self.text)
|
|
||||||
self.text = self.text[:start] + change.text + self.text[end:]
|
|
||||||
self._update()
|
|
||||||
|
|
||||||
def _update(self):
|
class CompletionItemKind(enum.IntEnum):
|
||||||
self.diagnostics = []
|
Text = 1
|
||||||
try:
|
Method = 2
|
||||||
self.tokens = tokenizer.tokenize(self.text)
|
Function = 3
|
||||||
self.ast = parser.parse(self.tokens)
|
Constructor = 4
|
||||||
self.diagnostics += self.ast.errors
|
Field = 5
|
||||||
except MultipleErrors as e:
|
Variable = 6
|
||||||
self.diagnostics += e.errors
|
Class = 7
|
||||||
except CompileError as e:
|
Interface = 8
|
||||||
self.diagnostics += e
|
Module = 9
|
||||||
|
Property = 10
|
||||||
|
Unit = 11
|
||||||
|
Value = 12
|
||||||
|
Enum = 13
|
||||||
|
Keyword = 14
|
||||||
|
Snippet = 15
|
||||||
|
Color = 16
|
||||||
|
File = 17
|
||||||
|
Reference = 18
|
||||||
|
Folder = 19
|
||||||
|
EnumMember = 20
|
||||||
|
Constant = 21
|
||||||
|
Struct = 22
|
||||||
|
Event = 23
|
||||||
|
Operator = 24
|
||||||
|
TypeParameter = 25
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class Completion:
|
||||||
|
label: str
|
||||||
|
kind: CompletionItemKind
|
||||||
|
signature: T.Optional[str] = None
|
||||||
|
deprecated: bool = False
|
||||||
|
docs: T.Optional[str] = None
|
||||||
|
text: T.Optional[str] = None
|
||||||
|
snippet: T.Optional[str] = None
|
||||||
|
|
||||||
|
def to_json(self, snippets: bool):
|
||||||
|
insert_text = self.text or self.label
|
||||||
|
insert_text_format = InsertTextFormat.PlainText
|
||||||
|
if snippets and self.snippet:
|
||||||
|
insert_text = self.snippet
|
||||||
|
insert_text_format = InsertTextFormat.Snippet
|
||||||
|
|
||||||
|
return {
|
||||||
|
"label": self.label,
|
||||||
|
"kind": self.kind,
|
||||||
|
"tags": [CompletionItemTag.Deprecated] if self.deprecated else None,
|
||||||
|
"detail": self.signature,
|
||||||
|
"documentation": self.docs,
|
||||||
|
"deprecated": self.deprecated,
|
||||||
|
"insertText": insert_text,
|
||||||
|
"insertTextFormat": insert_text_format,
|
||||||
|
}
|
||||||
|
|
|
@ -44,6 +44,7 @@ class BlueprintApp:
|
||||||
batch_compile.add_argument("inputs", nargs="+", metavar="filenames", default=sys.stdin, type=argparse.FileType('r'))
|
batch_compile.add_argument("inputs", nargs="+", metavar="filenames", default=sys.stdin, type=argparse.FileType('r'))
|
||||||
|
|
||||||
lsp = self.add_subcommand("lsp", "Run the language server (for internal use by IDEs)", self.cmd_lsp)
|
lsp = self.add_subcommand("lsp", "Run the language server (for internal use by IDEs)", self.cmd_lsp)
|
||||||
|
lsp.add_argument("--logfile", dest="logfile", default=None, type=argparse.FileType('a'))
|
||||||
|
|
||||||
self.add_subcommand("help", "Show this message", self.cmd_help)
|
self.add_subcommand("help", "Show this message", self.cmd_help)
|
||||||
|
|
||||||
|
@ -97,14 +98,16 @@ class BlueprintApp:
|
||||||
|
|
||||||
|
|
||||||
def cmd_lsp(self, opts):
|
def cmd_lsp(self, opts):
|
||||||
langserv = LanguageServer()
|
langserv = LanguageServer(opts.logfile)
|
||||||
langserv.run()
|
langserv.run()
|
||||||
|
|
||||||
|
|
||||||
def _compile(self, data: str) -> str:
|
def _compile(self, data: str) -> str:
|
||||||
tokens = tokenizer.tokenize(data)
|
tokens = tokenizer.tokenize(data)
|
||||||
ast = parser.parse(tokens)
|
ast, errors = parser.parse(tokens)
|
||||||
|
|
||||||
|
if errors:
|
||||||
|
raise errors
|
||||||
if len(ast.errors):
|
if len(ast.errors):
|
||||||
raise MultipleErrors(ast.errors)
|
raise MultipleErrors(ast.errors)
|
||||||
|
|
||||||
|
|
|
@ -29,13 +29,7 @@ from .errors import assert_true, CompilerBugError, CompileError
|
||||||
from .tokenizer import Token, TokenType
|
from .tokenizer import Token, TokenType
|
||||||
|
|
||||||
|
|
||||||
_SKIP_TOKENS = [TokenType.COMMENT, TokenType.WHITESPACE]
|
SKIP_TOKENS = [TokenType.COMMENT, TokenType.WHITESPACE]
|
||||||
_RECOVER_TOKENS = [
|
|
||||||
TokenType.COMMENT,
|
|
||||||
TokenType.STMT_END,
|
|
||||||
TokenType.CLOSE_BLOCK,
|
|
||||||
TokenType.EOF,
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class ParseResult(Enum):
|
class ParseResult(Enum):
|
||||||
|
@ -175,7 +169,7 @@ class ParseContext:
|
||||||
|
|
||||||
def skip(self):
|
def skip(self):
|
||||||
""" Skips whitespace and comments. """
|
""" Skips whitespace and comments. """
|
||||||
while self.index < len(self.tokens) and self.tokens[self.index].type in _SKIP_TOKENS:
|
while self.index < len(self.tokens) and self.tokens[self.index].type in SKIP_TOKENS:
|
||||||
self.index += 1
|
self.index += 1
|
||||||
|
|
||||||
def next_token(self) -> Token:
|
def next_token(self) -> Token:
|
||||||
|
@ -185,6 +179,15 @@ class ParseContext:
|
||||||
self.index += 1
|
self.index += 1
|
||||||
return token
|
return token
|
||||||
|
|
||||||
|
def peek_token(self) -> Token:
|
||||||
|
""" Returns the next token without advancing the iterator. """
|
||||||
|
self.skip()
|
||||||
|
token = self.tokens[self.index]
|
||||||
|
return token
|
||||||
|
|
||||||
|
def is_eof(self) -> Token:
|
||||||
|
return self.index >= len(self.tokens) or self.peek_token().type == TokenType.EOF
|
||||||
|
|
||||||
|
|
||||||
class ParseNode:
|
class ParseNode:
|
||||||
""" Base class for the nodes in the parser tree. """
|
""" Base class for the nodes in the parser tree. """
|
||||||
|
@ -216,17 +219,6 @@ class ParseNode:
|
||||||
""" Convenience method for err(). """
|
""" Convenience method for err(). """
|
||||||
return self.err("Expected " + expect)
|
return self.err("Expected " + expect)
|
||||||
|
|
||||||
def recover(self):
|
|
||||||
""" Causes the parser to try to recover, even if the ParseNode raises
|
|
||||||
an error. Recovery will log the error so it's still displayed, but
|
|
||||||
skip ahead to the next token in _RECOVERY_TOKENS to try to recover
|
|
||||||
parsing.
|
|
||||||
|
|
||||||
This is important because it allows us to report multiple errors at
|
|
||||||
once in most cases, rather than making the user recompile after
|
|
||||||
fixing each issue. """
|
|
||||||
return Recover(self)
|
|
||||||
|
|
||||||
|
|
||||||
class Err(ParseNode):
|
class Err(ParseNode):
|
||||||
""" ParseNode that emits a compile error if it fails to parse. """
|
""" ParseNode that emits a compile error if it fails to parse. """
|
||||||
|
@ -238,7 +230,7 @@ class Err(ParseNode):
|
||||||
def _parse(self, ctx):
|
def _parse(self, ctx):
|
||||||
if self.child.parse(ctx).failed():
|
if self.child.parse(ctx).failed():
|
||||||
start_idx = ctx.start
|
start_idx = ctx.start
|
||||||
while ctx.tokens[start_idx].type in _SKIP_TOKENS:
|
while ctx.tokens[start_idx].type in SKIP_TOKENS:
|
||||||
start_idx += 1
|
start_idx += 1
|
||||||
|
|
||||||
start_token = ctx.tokens[start_idx]
|
start_token = ctx.tokens[start_idx]
|
||||||
|
@ -257,7 +249,7 @@ class Fail(ParseNode):
|
||||||
def _parse(self, ctx):
|
def _parse(self, ctx):
|
||||||
if self.child.parse(ctx).succeeded():
|
if self.child.parse(ctx).succeeded():
|
||||||
start_idx = ctx.start
|
start_idx = ctx.start
|
||||||
while ctx.tokens[start_idx].type in _SKIP_TOKENS:
|
while ctx.tokens[start_idx].type in SKIP_TOKENS:
|
||||||
start_idx += 1
|
start_idx += 1
|
||||||
|
|
||||||
start_token = ctx.tokens[start_idx]
|
start_token = ctx.tokens[start_idx]
|
||||||
|
@ -266,21 +258,6 @@ class Fail(ParseNode):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
class Recover(ParseNode):
|
|
||||||
""" ParseNode that attempts to recover parsing if an error is emitted. """
|
|
||||||
def __init__(self, child):
|
|
||||||
self.child = child
|
|
||||||
|
|
||||||
def _parse(self, ctx: ParseContext) -> bool:
|
|
||||||
try:
|
|
||||||
return self.child.parse(ctx).succeeded()
|
|
||||||
except CompileError as e:
|
|
||||||
ctx.errors.append(e)
|
|
||||||
while ctx.next_token().type not in _RECOVER_TOKENS:
|
|
||||||
pass
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
class Group(ParseNode):
|
class Group(ParseNode):
|
||||||
""" ParseNode that creates a match group. """
|
""" ParseNode that creates a match group. """
|
||||||
def __init__(self, ast_type, child):
|
def __init__(self, ast_type, child):
|
||||||
|
@ -305,6 +282,29 @@ class Sequence(ParseNode):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
class Statement(ParseNode):
|
||||||
|
""" ParseNode that attempts to match all of its children in sequence. If any
|
||||||
|
child raises an error, the error will be logged but parsing will continue. """
|
||||||
|
def __init__(self, *children):
|
||||||
|
self.children = children
|
||||||
|
|
||||||
|
def _parse(self, ctx) -> bool:
|
||||||
|
for child in self.children:
|
||||||
|
try:
|
||||||
|
if child.parse(ctx).failed():
|
||||||
|
return False
|
||||||
|
except CompileError as e:
|
||||||
|
ctx.errors.append(e)
|
||||||
|
return True
|
||||||
|
|
||||||
|
token = ctx.peek_token()
|
||||||
|
if token.type != TokenType.STMT_END:
|
||||||
|
ctx.errors.append(CompileError("Expected `;`", token.start, token.end))
|
||||||
|
else:
|
||||||
|
ctx.next_token()
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
class AnyOf(ParseNode):
|
class AnyOf(ParseNode):
|
||||||
""" ParseNode that attempts to match exactly one of its children. Child
|
""" ParseNode that attempts to match exactly one of its children. Child
|
||||||
nodes are attempted in order. """
|
nodes are attempted in order. """
|
||||||
|
@ -318,16 +318,46 @@ class AnyOf(ParseNode):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
class Until(ParseNode):
|
||||||
|
""" ParseNode that repeats its child until a delimiting token is found. If
|
||||||
|
the child does not match, one token is skipped and the match is attempted
|
||||||
|
again. """
|
||||||
|
def __init__(self, child, delimiter):
|
||||||
|
self.child = child
|
||||||
|
self.delimiter = delimiter
|
||||||
|
|
||||||
|
def _parse(self, ctx):
|
||||||
|
while not self.delimiter.parse(ctx).succeeded():
|
||||||
|
try:
|
||||||
|
if not self.child.parse(ctx).matched():
|
||||||
|
token = ctx.next_token()
|
||||||
|
ctx.errors.append(CompileError("Unexpected token", token.start, token.end))
|
||||||
|
except CompileError as e:
|
||||||
|
ctx.errors.append(e)
|
||||||
|
ctx.next_token()
|
||||||
|
|
||||||
|
if ctx.is_eof():
|
||||||
|
return True
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
class ZeroOrMore(ParseNode):
|
class ZeroOrMore(ParseNode):
|
||||||
""" ParseNode that matches its child any number of times (including zero
|
""" ParseNode that matches its child any number of times (including zero
|
||||||
times). It cannot fail to parse. """
|
times). It cannot fail to parse. If its child raises an exception, one token
|
||||||
|
will be skipped and parsing will continue. """
|
||||||
def __init__(self, child):
|
def __init__(self, child):
|
||||||
self.child = child
|
self.child = child
|
||||||
|
|
||||||
|
|
||||||
def _parse(self, ctx):
|
def _parse(self, ctx):
|
||||||
while self.child.parse(ctx).matched():
|
while True:
|
||||||
pass
|
try:
|
||||||
|
if not self.child.parse(ctx).matched():
|
||||||
return True
|
return True
|
||||||
|
except CompileError as e:
|
||||||
|
ctx.errors.append(e)
|
||||||
|
ctx.next_token()
|
||||||
|
|
||||||
|
|
||||||
class Delimited(ParseNode):
|
class Delimited(ParseNode):
|
||||||
|
|
|
@ -24,28 +24,26 @@ from .parse_tree import *
|
||||||
from .tokenizer import TokenType
|
from .tokenizer import TokenType
|
||||||
|
|
||||||
|
|
||||||
def parse(tokens) -> ast.UI:
|
def parse(tokens) -> T.Tuple[ast.UI, T.Optional[MultipleErrors]]:
|
||||||
""" Parses a list of tokens into an abstract syntax tree. """
|
""" Parses a list of tokens into an abstract syntax tree. """
|
||||||
|
|
||||||
gtk_directive = Group(
|
gtk_directive = Group(
|
||||||
ast.GtkDirective,
|
ast.GtkDirective,
|
||||||
Sequence(
|
Statement(
|
||||||
Keyword("using"),
|
Keyword("using").err("File must start with a \"using gtk\" directive (e.g. `using Gtk 4.0;`)"),
|
||||||
Keyword("Gtk"),
|
Keyword("Gtk").err("File must start with a \"using gtk\" directive (e.g. `using Gtk 4.0;`)"),
|
||||||
UseNumberText("version").expected("a version number for GTK"),
|
UseNumberText("version").expected("a version number for GTK"),
|
||||||
StmtEnd().expected("`;`"),
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
import_statement = Group(
|
import_statement = Group(
|
||||||
ast.Import,
|
ast.Import,
|
||||||
Sequence(
|
Statement(
|
||||||
Keyword("using"),
|
Keyword("using"),
|
||||||
UseIdent("namespace").expected("a GIR namespace"),
|
UseIdent("namespace").expected("a GIR namespace"),
|
||||||
UseNumberText("version").expected("a version number"),
|
UseNumberText("version").expected("a version number"),
|
||||||
StmtEnd().expected("`;`"),
|
|
||||||
)
|
)
|
||||||
).recover()
|
)
|
||||||
|
|
||||||
class_name = AnyOf(
|
class_name = AnyOf(
|
||||||
Sequence(
|
Sequence(
|
||||||
|
@ -89,20 +87,19 @@ def parse(tokens) -> ast.UI:
|
||||||
|
|
||||||
property = Group(
|
property = Group(
|
||||||
ast.Property,
|
ast.Property,
|
||||||
Sequence(
|
Statement(
|
||||||
UseIdent("name"),
|
UseIdent("name"),
|
||||||
Op(":"),
|
Op(":"),
|
||||||
AnyOf(
|
AnyOf(
|
||||||
object,
|
object,
|
||||||
value,
|
value,
|
||||||
).expected("a value"),
|
).expected("a value"),
|
||||||
StmtEnd().expected("`;`"),
|
|
||||||
)
|
)
|
||||||
).recover()
|
)
|
||||||
|
|
||||||
binding = Group(
|
binding = Group(
|
||||||
ast.Property,
|
ast.Property,
|
||||||
Sequence(
|
Statement(
|
||||||
UseIdent("name"),
|
UseIdent("name"),
|
||||||
Op(":"),
|
Op(":"),
|
||||||
Keyword("bind"),
|
Keyword("bind"),
|
||||||
|
@ -113,13 +110,12 @@ def parse(tokens) -> ast.UI:
|
||||||
Sequence(Keyword("sync-create"), UseLiteral("sync_create", True)),
|
Sequence(Keyword("sync-create"), UseLiteral("sync_create", True)),
|
||||||
Sequence(Keyword("after"), UseLiteral("after", True)),
|
Sequence(Keyword("after"), UseLiteral("after", True)),
|
||||||
),
|
),
|
||||||
StmtEnd().expected("`;`"),
|
|
||||||
)
|
)
|
||||||
).recover()
|
)
|
||||||
|
|
||||||
signal = Group(
|
signal = Group(
|
||||||
ast.Signal,
|
ast.Signal,
|
||||||
Sequence(
|
Statement(
|
||||||
UseIdent("name"),
|
UseIdent("name"),
|
||||||
Optional(Sequence(
|
Optional(Sequence(
|
||||||
Op("::"),
|
Op("::"),
|
||||||
|
@ -134,9 +130,8 @@ def parse(tokens) -> ast.UI:
|
||||||
Sequence(Keyword("after"), UseLiteral("after", True)),
|
Sequence(Keyword("after"), UseLiteral("after", True)),
|
||||||
Sequence(Keyword("object"), UseLiteral("object", True)),
|
Sequence(Keyword("object"), UseLiteral("object", True)),
|
||||||
)),
|
)),
|
||||||
StmtEnd().expected("`;`"),
|
|
||||||
)
|
)
|
||||||
).recover()
|
)
|
||||||
|
|
||||||
child = Group(
|
child = Group(
|
||||||
ast.Child,
|
ast.Child,
|
||||||
|
@ -152,7 +147,7 @@ def parse(tokens) -> ast.UI:
|
||||||
|
|
||||||
style = Group(
|
style = Group(
|
||||||
ast.Style,
|
ast.Style,
|
||||||
Sequence(
|
Statement(
|
||||||
Keyword("style"),
|
Keyword("style"),
|
||||||
Delimited(
|
Delimited(
|
||||||
Group(
|
Group(
|
||||||
|
@ -161,7 +156,6 @@ def parse(tokens) -> ast.UI:
|
||||||
),
|
),
|
||||||
Comma(),
|
Comma(),
|
||||||
),
|
),
|
||||||
StmtEnd(),
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -204,8 +198,7 @@ def parse(tokens) -> ast.UI:
|
||||||
UseLiteral("tag", "item"),
|
UseLiteral("tag", "item"),
|
||||||
Optional(UseIdent("id")),
|
Optional(UseIdent("id")),
|
||||||
OpenBlock().expected("`{`"),
|
OpenBlock().expected("`{`"),
|
||||||
ZeroOrMore(menu_attribute),
|
Until(menu_attribute, CloseBlock()),
|
||||||
CloseBlock().err("Could not understand statement"),
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -232,14 +225,13 @@ def parse(tokens) -> ast.UI:
|
||||||
|
|
||||||
menu_contents.children = [
|
menu_contents.children = [
|
||||||
OpenBlock().expected("`{`"),
|
OpenBlock().expected("`{`"),
|
||||||
ZeroOrMore(AnyOf(
|
Until(AnyOf(
|
||||||
menu_section,
|
menu_section,
|
||||||
menu_submenu,
|
menu_submenu,
|
||||||
menu_item_shorthand,
|
menu_item_shorthand,
|
||||||
menu_item,
|
menu_item,
|
||||||
menu_attribute,
|
menu_attribute,
|
||||||
)),
|
), CloseBlock()),
|
||||||
CloseBlock().err("Could not understand statement"),
|
|
||||||
]
|
]
|
||||||
|
|
||||||
menu = Group(
|
menu = Group(
|
||||||
|
@ -256,14 +248,13 @@ def parse(tokens) -> ast.UI:
|
||||||
ast.ObjectContent,
|
ast.ObjectContent,
|
||||||
Sequence(
|
Sequence(
|
||||||
OpenBlock(),
|
OpenBlock(),
|
||||||
ZeroOrMore(AnyOf(
|
Until(AnyOf(
|
||||||
style,
|
style,
|
||||||
binding,
|
binding,
|
||||||
property,
|
property,
|
||||||
signal,
|
signal,
|
||||||
child,
|
child,
|
||||||
)),
|
), CloseBlock()),
|
||||||
CloseBlock().err("Could not understand statement"),
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -288,19 +279,20 @@ def parse(tokens) -> ast.UI:
|
||||||
ui = Group(
|
ui = Group(
|
||||||
ast.UI,
|
ast.UI,
|
||||||
Sequence(
|
Sequence(
|
||||||
gtk_directive.err("File must start with a \"using gtk\" directive (e.g. `using Gtk 4.0;`)"),
|
gtk_directive.err("File must start with a \"using Gtk\" directive (e.g. `using Gtk 4.0;`)"),
|
||||||
ZeroOrMore(import_statement),
|
ZeroOrMore(import_statement),
|
||||||
ZeroOrMore(AnyOf(
|
Until(AnyOf(
|
||||||
template,
|
template,
|
||||||
menu,
|
menu,
|
||||||
object,
|
object,
|
||||||
)),
|
), Eof()),
|
||||||
Eof().err("Failed to parse the rest of the file"),
|
)
|
||||||
)
|
)
|
||||||
).recover()
|
|
||||||
|
|
||||||
ctx = ParseContext(tokens)
|
ctx = ParseContext(tokens)
|
||||||
ui.parse(ctx)
|
ui.parse(ctx)
|
||||||
if len(ctx.errors):
|
|
||||||
raise MultipleErrors(ctx.errors)
|
ast_node = ctx.last_group.to_ast() if ctx.last_group else None
|
||||||
return ctx.last_group.to_ast()
|
errors = MultipleErrors(ctx.errors) if len(ctx.errors) else None
|
||||||
|
|
||||||
|
return (ast_node, errors)
|
||||||
|
|
|
@ -65,8 +65,9 @@ class TestParser(unittest.TestCase):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
tokens = tokenize(f)
|
tokens = tokenize(f)
|
||||||
ui = parse(tokens)
|
ui, errors = parse(tokens)
|
||||||
self.assertIsInstance(ui, UI)
|
self.assertIsInstance(ui, UI)
|
||||||
|
self.assertIsNone(errors)
|
||||||
self.assertEqual(len(ui.errors), 0)
|
self.assertEqual(len(ui.errors), 0)
|
||||||
|
|
||||||
self.assertIsInstance(ui.gtk_directive, GtkDirective)
|
self.assertIsInstance(ui.gtk_directive, GtkDirective)
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue