lsp: Support completions

This commit is contained in:
James Westman 2021-10-30 17:47:31 -05:00
parent afecd744ca
commit 408f3ebce5
No known key found for this signature in database
GPG key ID: CE2DBA0ADB654EA6
8 changed files with 460 additions and 106 deletions

View file

@ -22,6 +22,8 @@ import typing as T
from .ast_utils import *
from .errors import assert_true, AlreadyCaughtError, CompileError, CompilerBugError, MultipleErrors
from .gir import GirContext, get_namespace
from .lsp_utils import Completion, CompletionItemKind
from .tokenizer import Token
from .utils import lazy_prop
from .xml_emitter import XmlEmitter
@ -29,11 +31,16 @@ from .xml_emitter import XmlEmitter
class AstNode:
""" Base class for nodes in the abstract syntax tree. """
completers: T.List = []
def __init__(self):
self.group = None
self.parent = None
self.child_nodes = None
def __init_subclass__(cls):
cls.completers = []
@lazy_prop
def root(self):
if self.parent is None:
@ -263,6 +270,17 @@ class ObjectContent(AstNode):
self.children = children
self.style = style
@validate()
def gir_class(self):
parent = self.parent
if isinstance(parent, Template):
return parent.gir_parent
elif isinstance(parent, Object):
return parent.gir_class
else:
raise CompilerBugError()
@validate()
def only_one_style_class(self):
if len(self.style) > 1:

View file

@ -0,0 +1,221 @@
# completions.py
#
# Copyright 2021 James Westman <james@jwestman.net>
#
# This file is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This file is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# SPDX-License-Identifier: LGPL-3.0-or-later
import typing as T
from . import ast
from .lsp_utils import Completion, CompletionItemKind
from .parser import SKIP_TOKENS
from .tokenizer import TokenType, Token
Pattern = T.List[T.Tuple[TokenType, T.Optional[str]]]
def complete(ast_node: ast.AstNode, tokens: T.List[Token], idx: int) -> T.Iterator[Completion]:
for child in ast_node.child_nodes:
if child.group.start <= idx <= child.group.end:
yield from complete(child, tokens, idx)
return
prev_tokens: T.List[Token] = []
token_idx = 0
# find the current token
for i, token in enumerate(tokens):
if token.start < idx <= token.end:
token_idx = i
# collect the 5 previous non-skipped tokens
while len(prev_tokens) < 5 and token_idx >= 0:
token = tokens[token_idx]
if token.type not in SKIP_TOKENS:
prev_tokens.insert(0, token)
token_idx -= 1
for completer in ast_node.completers:
yield from completer.completions(prev_tokens, ast_node)
class Completer:
def __init__(self, func):
self.func = func
self.patterns: T.List = []
self.ast_type: T.Type[ast.AstNode] = None
def completions(self, prev_tokens: list[Token], ast_node: ast.AstNode) -> T.Iterator[Completion]:
any_match = len(self.patterns) == 0
match_variables: T.List[str] = []
for pattern in self.patterns:
match_variables = []
if len(pattern) <= len(prev_tokens):
for i in range(0, len(pattern)):
type, value = pattern[i]
token = prev_tokens[i - len(pattern)]
if token.type != type or (value is not None and str(token) != value):
break
if value is None:
match_variables.append(str(token))
else:
any_match = True
break
if not any_match:
return
print("completions", match_variables, self.func)
if self.ast_type is not None:
while ast_node is not None and not isinstance(ast_node, self.ast_type):
ast_node = ast_node.parent
yield from self.func(ast_node, match_variables)
def applies_to(*ast_types):
""" Decorator describing which AST nodes the completer should apply in. """
def _decorator(func):
completer = Completer(func)
for c in ast_types:
c.completers.append(completer)
return completer
return _decorator
def matches(patterns: T.List):
def _decorator(cls):
cls.patterns = patterns
return cls
return _decorator
def ast_type(ast_type: T.Type[ast.AstNode]):
def _decorator(cls):
cls.ast_type = ast_type
return cls
return _decorator
new_statement_patterns = [
[(TokenType.OPEN_BLOCK, None)],
[(TokenType.CLOSE_BLOCK, None)],
[(TokenType.STMT_END, None)],
]
@applies_to(ast.GtkDirective)
def using_gtk(ast_node, match_variables):
yield Completion("using Gtk 4.0;", CompletionItemKind.Keyword)
@matches(new_statement_patterns)
@ast_type(ast.UI)
@applies_to(ast.UI, ast.ObjectContent, ast.Template)
def namespace(ast_node, match_variables):
yield Completion("Gtk", CompletionItemKind.Module, text="Gtk.")
for ns in ast_node.imports:
yield Completion(ns.namespace, CompletionItemKind.Module, text=ns.namespace + ".")
@matches([
[(TokenType.IDENT, None), (TokenType.OP, "."), (TokenType.IDENT, None)],
[(TokenType.IDENT, None), (TokenType.OP, ".")],
])
@applies_to(ast.UI, ast.ObjectContent, ast.Template)
def object_completer(ast_node, match_variables):
ns = ast_node.root.gir.namespaces.get(match_variables[0])
if ns is not None:
for c in ns.classes.values():
yield Completion(c.name, CompletionItemKind.Class, docs=c.doc)
@matches(new_statement_patterns)
@applies_to(ast.ObjectContent)
def property_completer(ast_node, match_variables):
if ast_node.gir_class:
for prop in ast_node.gir_class.properties:
yield Completion(prop, CompletionItemKind.Property, snippet=f"{prop}: $0;")
@matches(new_statement_patterns)
@applies_to(ast.ObjectContent)
def style_completer(ast_node, match_variables):
yield Completion("style", CompletionItemKind.Keyword, snippet="style \"$0\";")
@matches(new_statement_patterns)
@applies_to(ast.ObjectContent)
def signal_completer(ast_node, match_variables):
if ast_node.gir_class:
for signal in ast_node.gir_class.signals:
name = ("on" if not isinstance(ast_node.parent, ast.Object)
else "on_" + (ast_node.parent.id or ast_node.parent.class_name.lower()))
yield Completion(signal, CompletionItemKind.Property, snippet=f"{signal} => ${{1:{name}_{signal.replace('-', '_')}}}()$0;")
@matches(new_statement_patterns)
@applies_to(ast.UI)
def template_completer(ast_node, match_variables):
yield Completion(
"template", CompletionItemKind.Snippet,
snippet="template ${1:ClassName} : ${2:ParentClass} {\n $0\n}"
)
@matches(new_statement_patterns)
@applies_to(ast.UI)
def menu_completer(ast_node, match_variables):
yield Completion(
"menu", CompletionItemKind.Snippet,
snippet="menu {\n $0\n}"
)
@matches(new_statement_patterns)
@applies_to(ast.Menu)
def menu_content_completer(ast_node, match_variables):
yield Completion(
"submenu", CompletionItemKind.Snippet,
snippet="submenu {\n $0\n}"
)
yield Completion(
"section", CompletionItemKind.Snippet,
snippet="section {\n $0\n}"
)
yield Completion(
"item", CompletionItemKind.Snippet,
snippet="item {\n $0\n}"
)
yield Completion(
"item (shorthand)", CompletionItemKind.Snippet,
snippet='item _("${1:Label}") "${2:action-name}" "${3:icon-name}";'
)
yield Completion(
"label", CompletionItemKind.Snippet,
snippet='label: $0;'
)
yield Completion(
"action", CompletionItemKind.Snippet,
snippet='action: "$0";'
)
yield Completion(
"icon", CompletionItemKind.Snippet,
snippet='icon: "$0";'
)

View file

@ -21,6 +21,7 @@
import typing as T
import json, sys, traceback
from .completions import complete
from .errors import PrintableError, CompileError, MultipleErrors
from .lsp_utils import *
from . import tokenizer, parser, utils, xml_reader
@ -33,12 +34,44 @@ def command(json_method):
return decorator
class OpenFile:
def __init__(self, uri, text, version):
self.uri = uri
self.text = text
self.version = version
self.ast = None
self.tokens = None
self._update()
def apply_changes(self, changes):
for change in changes:
start = utils.pos_to_idx(change["range"]["start"]["line"], change["range"]["start"]["character"], self.text)
end = utils.pos_to_idx(change["range"]["end"]["line"], change["range"]["end"]["character"], self.text)
self.text = self.text[:start] + change["text"] + self.text[end:]
self._update()
def _update(self):
self.diagnostics = []
try:
self.tokens = tokenizer.tokenize(self.text)
self.ast, errors = parser.parse(self.tokens)
if errors is not None:
self.diagnostics += errors.errors
self.diagnostics += self.ast.errors
except MultipleErrors as e:
self.diagnostics += e.errors
except CompileError as e:
self.diagnostics.append(e)
class LanguageServer:
commands: T.Dict[str, T.Callable] = {}
def __init__(self):
def __init__(self, logfile=None):
self.client_capabilities = {}
self._open_files: {str: OpenFile} = {}
self.logfile = logfile
def run(self):
# Read <doc> tags from gir files. During normal compilation these are
@ -102,8 +135,9 @@ class LanguageServer:
"capabilities": {
"textDocumentSync": {
"openClose": True,
"change": 2, # incremental
"change": TextDocumentSyncKind.Incremental,
},
"completionProvider": {},
"hoverProvider": True,
}
})
@ -133,8 +167,8 @@ class LanguageServer:
@command("textDocument/hover")
def hover(self, id, params):
open_file = self._open_files[params["textDocument"]["uri"]]
docs = open_file.ast.get_docs(utils.pos_to_idx(params["position"]["line"], params["position"]["character"], open_file.text))
if docs is not None:
docs = open_file.ast and open_file.ast.get_docs(utils.pos_to_idx(params["position"]["line"], params["position"]["character"], open_file.text))
if docs:
self._send_response(id, {
"contents": {
"kind": "markdown",
@ -144,6 +178,18 @@ class LanguageServer:
else:
self._send_response(id, None)
@command("textDocument/completion")
def completion(self, id, params):
open_file = self._open_files[params["textDocument"]["uri"]]
if open_file.ast is None:
self._send_response(id, [])
return
idx = utils.pos_to_idx(params["position"]["line"], params["position"]["character"], open_file.text)
completions = complete(open_file.ast, open_file.tokens, idx)
self._send_response(id, [completion.to_json(True) for completion in completions])
def _send_file_updates(self, open_file: OpenFile):
self._send_notification("textDocument/publishDiagnostics", {

View file

@ -18,35 +18,78 @@
# SPDX-License-Identifier: LGPL-3.0-or-later
from enum import Enum
from dataclasses import dataclass
import enum
import typing as T
from . import tokenizer, parser
from .errors import *
from .utils import *
class OpenFile:
def __init__(self, uri, text, version):
self.uri = uri
self.text = text
self.version = version
class TextDocumentSyncKind(enum.IntEnum):
None_ = 0
Full = 1
Incremental = 2
self._update()
class CompletionItemTag(enum.IntEnum):
Deprecated = 1
def apply_changes(self, changes):
for change in changes:
start = utils.pos_to_idx(change.range.start.line, change.range.start.character, self.text)
end = utils.pos_to_idx(change.range.end.line, change.range.end.character, self.text)
self.text = self.text[:start] + change.text + self.text[end:]
self._update()
class InsertTextFormat(enum.IntEnum):
PlainText = 1
Snippet = 2
def _update(self):
self.diagnostics = []
try:
self.tokens = tokenizer.tokenize(self.text)
self.ast = parser.parse(self.tokens)
self.diagnostics += self.ast.errors
except MultipleErrors as e:
self.diagnostics += e.errors
except CompileError as e:
self.diagnostics += e
class CompletionItemKind(enum.IntEnum):
Text = 1
Method = 2
Function = 3
Constructor = 4
Field = 5
Variable = 6
Class = 7
Interface = 8
Module = 9
Property = 10
Unit = 11
Value = 12
Enum = 13
Keyword = 14
Snippet = 15
Color = 16
File = 17
Reference = 18
Folder = 19
EnumMember = 20
Constant = 21
Struct = 22
Event = 23
Operator = 24
TypeParameter = 25
@dataclass
class Completion:
label: str
kind: CompletionItemKind
signature: T.Optional[str] = None
deprecated: bool = False
docs: T.Optional[str] = None
text: T.Optional[str] = None
snippet: T.Optional[str] = None
def to_json(self, snippets: bool):
insert_text = self.text or self.label
insert_text_format = InsertTextFormat.PlainText
if snippets and self.snippet:
insert_text = self.snippet
insert_text_format = InsertTextFormat.Snippet
return {
"label": self.label,
"kind": self.kind,
"tags": [CompletionItemTag.Deprecated] if self.deprecated else None,
"detail": self.signature,
"documentation": self.docs,
"deprecated": self.deprecated,
"insertText": insert_text,
"insertTextFormat": insert_text_format,
}

View file

@ -44,6 +44,7 @@ class BlueprintApp:
batch_compile.add_argument("inputs", nargs="+", metavar="filenames", default=sys.stdin, type=argparse.FileType('r'))
lsp = self.add_subcommand("lsp", "Run the language server (for internal use by IDEs)", self.cmd_lsp)
lsp.add_argument("--logfile", dest="logfile", default=None, type=argparse.FileType('a'))
self.add_subcommand("help", "Show this message", self.cmd_help)
@ -97,14 +98,16 @@ class BlueprintApp:
def cmd_lsp(self, opts):
langserv = LanguageServer()
langserv = LanguageServer(opts.logfile)
langserv.run()
def _compile(self, data: str) -> str:
tokens = tokenizer.tokenize(data)
ast = parser.parse(tokens)
ast, errors = parser.parse(tokens)
if errors:
raise errors
if len(ast.errors):
raise MultipleErrors(ast.errors)

View file

@ -29,13 +29,7 @@ from .errors import assert_true, CompilerBugError, CompileError
from .tokenizer import Token, TokenType
_SKIP_TOKENS = [TokenType.COMMENT, TokenType.WHITESPACE]
_RECOVER_TOKENS = [
TokenType.COMMENT,
TokenType.STMT_END,
TokenType.CLOSE_BLOCK,
TokenType.EOF,
]
SKIP_TOKENS = [TokenType.COMMENT, TokenType.WHITESPACE]
class ParseResult(Enum):
@ -175,7 +169,7 @@ class ParseContext:
def skip(self):
""" Skips whitespace and comments. """
while self.index < len(self.tokens) and self.tokens[self.index].type in _SKIP_TOKENS:
while self.index < len(self.tokens) and self.tokens[self.index].type in SKIP_TOKENS:
self.index += 1
def next_token(self) -> Token:
@ -185,6 +179,15 @@ class ParseContext:
self.index += 1
return token
def peek_token(self) -> Token:
""" Returns the next token without advancing the iterator. """
self.skip()
token = self.tokens[self.index]
return token
def is_eof(self) -> Token:
return self.index >= len(self.tokens) or self.peek_token().type == TokenType.EOF
class ParseNode:
""" Base class for the nodes in the parser tree. """
@ -216,17 +219,6 @@ class ParseNode:
""" Convenience method for err(). """
return self.err("Expected " + expect)
def recover(self):
""" Causes the parser to try to recover, even if the ParseNode raises
an error. Recovery will log the error so it's still displayed, but
skip ahead to the next token in _RECOVERY_TOKENS to try to recover
parsing.
This is important because it allows us to report multiple errors at
once in most cases, rather than making the user recompile after
fixing each issue. """
return Recover(self)
class Err(ParseNode):
""" ParseNode that emits a compile error if it fails to parse. """
@ -238,7 +230,7 @@ class Err(ParseNode):
def _parse(self, ctx):
if self.child.parse(ctx).failed():
start_idx = ctx.start
while ctx.tokens[start_idx].type in _SKIP_TOKENS:
while ctx.tokens[start_idx].type in SKIP_TOKENS:
start_idx += 1
start_token = ctx.tokens[start_idx]
@ -257,7 +249,7 @@ class Fail(ParseNode):
def _parse(self, ctx):
if self.child.parse(ctx).succeeded():
start_idx = ctx.start
while ctx.tokens[start_idx].type in _SKIP_TOKENS:
while ctx.tokens[start_idx].type in SKIP_TOKENS:
start_idx += 1
start_token = ctx.tokens[start_idx]
@ -266,21 +258,6 @@ class Fail(ParseNode):
return True
class Recover(ParseNode):
""" ParseNode that attempts to recover parsing if an error is emitted. """
def __init__(self, child):
self.child = child
def _parse(self, ctx: ParseContext) -> bool:
try:
return self.child.parse(ctx).succeeded()
except CompileError as e:
ctx.errors.append(e)
while ctx.next_token().type not in _RECOVER_TOKENS:
pass
return True
class Group(ParseNode):
""" ParseNode that creates a match group. """
def __init__(self, ast_type, child):
@ -305,6 +282,29 @@ class Sequence(ParseNode):
return True
class Statement(ParseNode):
""" ParseNode that attempts to match all of its children in sequence. If any
child raises an error, the error will be logged but parsing will continue. """
def __init__(self, *children):
self.children = children
def _parse(self, ctx) -> bool:
for child in self.children:
try:
if child.parse(ctx).failed():
return False
except CompileError as e:
ctx.errors.append(e)
return True
token = ctx.peek_token()
if token.type != TokenType.STMT_END:
ctx.errors.append(CompileError("Expected `;`", token.start, token.end))
else:
ctx.next_token()
return True
class AnyOf(ParseNode):
""" ParseNode that attempts to match exactly one of its children. Child
nodes are attempted in order. """
@ -318,16 +318,46 @@ class AnyOf(ParseNode):
return False
class Until(ParseNode):
""" ParseNode that repeats its child until a delimiting token is found. If
the child does not match, one token is skipped and the match is attempted
again. """
def __init__(self, child, delimiter):
self.child = child
self.delimiter = delimiter
def _parse(self, ctx):
while not self.delimiter.parse(ctx).succeeded():
try:
if not self.child.parse(ctx).matched():
token = ctx.next_token()
ctx.errors.append(CompileError("Unexpected token", token.start, token.end))
except CompileError as e:
ctx.errors.append(e)
ctx.next_token()
if ctx.is_eof():
return True
return True
class ZeroOrMore(ParseNode):
""" ParseNode that matches its child any number of times (including zero
times). It cannot fail to parse. """
times). It cannot fail to parse. If its child raises an exception, one token
will be skipped and parsing will continue. """
def __init__(self, child):
self.child = child
def _parse(self, ctx):
while self.child.parse(ctx).matched():
pass
while True:
try:
if not self.child.parse(ctx).matched():
return True
except CompileError as e:
ctx.errors.append(e)
ctx.next_token()
class Delimited(ParseNode):

View file

@ -24,28 +24,26 @@ from .parse_tree import *
from .tokenizer import TokenType
def parse(tokens) -> ast.UI:
def parse(tokens) -> T.Tuple[ast.UI, T.Optional[MultipleErrors]]:
""" Parses a list of tokens into an abstract syntax tree. """
gtk_directive = Group(
ast.GtkDirective,
Sequence(
Keyword("using"),
Keyword("Gtk"),
Statement(
Keyword("using").err("File must start with a \"using gtk\" directive (e.g. `using Gtk 4.0;`)"),
Keyword("Gtk").err("File must start with a \"using gtk\" directive (e.g. `using Gtk 4.0;`)"),
UseNumberText("version").expected("a version number for GTK"),
StmtEnd().expected("`;`"),
)
)
import_statement = Group(
ast.Import,
Sequence(
Statement(
Keyword("using"),
UseIdent("namespace").expected("a GIR namespace"),
UseNumberText("version").expected("a version number"),
StmtEnd().expected("`;`"),
)
).recover()
)
class_name = AnyOf(
Sequence(
@ -89,20 +87,19 @@ def parse(tokens) -> ast.UI:
property = Group(
ast.Property,
Sequence(
Statement(
UseIdent("name"),
Op(":"),
AnyOf(
object,
value,
).expected("a value"),
StmtEnd().expected("`;`"),
)
).recover()
)
binding = Group(
ast.Property,
Sequence(
Statement(
UseIdent("name"),
Op(":"),
Keyword("bind"),
@ -113,13 +110,12 @@ def parse(tokens) -> ast.UI:
Sequence(Keyword("sync-create"), UseLiteral("sync_create", True)),
Sequence(Keyword("after"), UseLiteral("after", True)),
),
StmtEnd().expected("`;`"),
)
).recover()
)
signal = Group(
ast.Signal,
Sequence(
Statement(
UseIdent("name"),
Optional(Sequence(
Op("::"),
@ -134,9 +130,8 @@ def parse(tokens) -> ast.UI:
Sequence(Keyword("after"), UseLiteral("after", True)),
Sequence(Keyword("object"), UseLiteral("object", True)),
)),
StmtEnd().expected("`;`"),
)
).recover()
)
child = Group(
ast.Child,
@ -152,7 +147,7 @@ def parse(tokens) -> ast.UI:
style = Group(
ast.Style,
Sequence(
Statement(
Keyword("style"),
Delimited(
Group(
@ -161,7 +156,6 @@ def parse(tokens) -> ast.UI:
),
Comma(),
),
StmtEnd(),
)
)
@ -204,8 +198,7 @@ def parse(tokens) -> ast.UI:
UseLiteral("tag", "item"),
Optional(UseIdent("id")),
OpenBlock().expected("`{`"),
ZeroOrMore(menu_attribute),
CloseBlock().err("Could not understand statement"),
Until(menu_attribute, CloseBlock()),
)
)
@ -232,14 +225,13 @@ def parse(tokens) -> ast.UI:
menu_contents.children = [
OpenBlock().expected("`{`"),
ZeroOrMore(AnyOf(
Until(AnyOf(
menu_section,
menu_submenu,
menu_item_shorthand,
menu_item,
menu_attribute,
)),
CloseBlock().err("Could not understand statement"),
), CloseBlock()),
]
menu = Group(
@ -256,14 +248,13 @@ def parse(tokens) -> ast.UI:
ast.ObjectContent,
Sequence(
OpenBlock(),
ZeroOrMore(AnyOf(
Until(AnyOf(
style,
binding,
property,
signal,
child,
)),
CloseBlock().err("Could not understand statement"),
), CloseBlock()),
)
)
@ -288,19 +279,20 @@ def parse(tokens) -> ast.UI:
ui = Group(
ast.UI,
Sequence(
gtk_directive.err("File must start with a \"using gtk\" directive (e.g. `using Gtk 4.0;`)"),
gtk_directive.err("File must start with a \"using Gtk\" directive (e.g. `using Gtk 4.0;`)"),
ZeroOrMore(import_statement),
ZeroOrMore(AnyOf(
Until(AnyOf(
template,
menu,
object,
)),
Eof().err("Failed to parse the rest of the file"),
), Eof()),
)
)
).recover()
ctx = ParseContext(tokens)
ui.parse(ctx)
if len(ctx.errors):
raise MultipleErrors(ctx.errors)
return ctx.last_group.to_ast()
ast_node = ctx.last_group.to_ast() if ctx.last_group else None
errors = MultipleErrors(ctx.errors) if len(ctx.errors) else None
return (ast_node, errors)

View file

@ -65,8 +65,9 @@ class TestParser(unittest.TestCase):
"""
tokens = tokenize(f)
ui = parse(tokens)
ui, errors = parse(tokens)
self.assertIsInstance(ui, UI)
self.assertIsNone(errors)
self.assertEqual(len(ui.errors), 0)
self.assertIsInstance(ui.gtk_directive, GtkDirective)