lsp: Implement semantic tokens

This commit is contained in:
James Westman 2021-11-01 23:52:36 -05:00
parent 7a65956195
commit dfb09b9357
No known key found for this signature in database
GPG key ID: CE2DBA0ADB654EA6
6 changed files with 65 additions and 5 deletions

View file

@ -22,7 +22,7 @@ import typing as T
from .ast_utils import * from .ast_utils import *
from .errors import assert_true, AlreadyCaughtError, CompileError, CompilerBugError, MultipleErrors from .errors import assert_true, AlreadyCaughtError, CompileError, CompilerBugError, MultipleErrors
from . import gir from . import gir
from .lsp_utils import Completion, CompletionItemKind from .lsp_utils import Completion, CompletionItemKind, SemanticToken, SemanticTokenType
from .tokenizer import Token from .tokenizer import Token
from .utils import lazy_prop from .utils import lazy_prop
from .xml_emitter import XmlEmitter from .xml_emitter import XmlEmitter
@ -398,6 +398,12 @@ class IdentValue(Value):
return type.doc return type.doc
def get_semantic_tokens(self) -> T.Iterator[SemanticToken]:
if isinstance(self.parent.value_type, gir.Enumeration):
token = self.group.tokens["value"]
yield SemanticToken(token.start, token.end, SemanticTokenType.EnumMember)
class BaseAttribute(AstNode): class BaseAttribute(AstNode):
""" A helper class for attribute syntax of the form `name: literal_value;`""" """ A helper class for attribute syntax of the form `name: literal_value;`"""

View file

@ -22,6 +22,7 @@ from collections import ChainMap, defaultdict
from . import ast from . import ast
from .errors import * from .errors import *
from .lsp_utils import SemanticToken
from .utils import lazy_prop from .utils import lazy_prop
from .xml_emitter import XmlEmitter from .xml_emitter import XmlEmitter
@ -112,6 +113,10 @@ class AstNode:
return None return None
def get_semantic_tokens(self) -> T.Iterator[SemanticToken]:
for child in self.children:
yield from child.get_semantic_tokens()
def validate(token_name=None, end_token_name=None, skip_incomplete=False): def validate(token_name=None, end_token_name=None, skip_incomplete=False):
""" Decorator for functions that validate an AST node. Exceptions raised """ Decorator for functions that validate an AST node. Exceptions raised

View file

@ -78,7 +78,10 @@ class CompileError(PrintableError):
def pretty_print(self, filename, code): def pretty_print(self, filename, code):
line_num, col_num = utils.idx_to_pos(self.start + 1, code) line_num, col_num = utils.idx_to_pos(self.start + 1, code)
line = code.splitlines(True)[line_num-1] line = code.splitlines(True)[line_num]
# Display 1-based line numbers
line_num += 1
print(f"""{_colors.RED}{_colors.BOLD}{self.category}: {self.message}{_colors.CLEAR} print(f"""{_colors.RED}{_colors.BOLD}{self.category}: {self.message}{_colors.CLEAR}
at {filename} line {line_num} column {col_num}: at {filename} line {line_num} column {col_num}:

View file

@ -65,6 +65,26 @@ class OpenFile:
self.diagnostics.append(e) self.diagnostics.append(e)
def calc_semantic_tokens(self) -> T.List[int]:
tokens = list(self.ast.get_semantic_tokens())
token_lists = [
[
*utils.idx_to_pos(token.start, self.text), # line and column
token.end - token.start, # length
token.type,
0, # token modifiers
] for token in tokens]
# convert line, column numbers to deltas
for i, token_list in enumerate(token_lists[1:]):
token_list[0] -= token_lists[i][0]
if token_list[0] == 0:
token_list[1] -= token_lists[i][1]
# flatten the list
return [x for y in token_lists for x in y]
class LanguageServer: class LanguageServer:
commands: T.Dict[str, T.Callable] = {} commands: T.Dict[str, T.Callable] = {}
@ -137,6 +157,12 @@ class LanguageServer:
"openClose": True, "openClose": True,
"change": TextDocumentSyncKind.Incremental, "change": TextDocumentSyncKind.Incremental,
}, },
"semanticTokensProvider": {
"legend": {
"tokenTypes": ["enumMember"],
},
"full": True,
},
"completionProvider": {}, "completionProvider": {},
"hoverProvider": True, "hoverProvider": True,
} }
@ -191,6 +217,15 @@ class LanguageServer:
self._send_response(id, [completion.to_json(True) for completion in completions]) self._send_response(id, [completion.to_json(True) for completion in completions])
@command("textDocument/semanticTokens/full")
def semantic_tokens(self, id, params):
open_file = self._open_files[params["textDocument"]["uri"]]
self._send_response(id, {
"data": open_file.calc_semantic_tokens(),
})
def _send_file_updates(self, open_file: OpenFile): def _send_file_updates(self, open_file: OpenFile):
self._send_notification("textDocument/publishDiagnostics", { self._send_notification("textDocument/publishDiagnostics", {
"uri": open_file.uri, "uri": open_file.uri,
@ -202,8 +237,8 @@ class LanguageServer:
end_l, end_c = utils.idx_to_pos(err.end or err.start, text) end_l, end_c = utils.idx_to_pos(err.end or err.start, text)
return { return {
"range": { "range": {
"start": { "line": start_l - 1, "character": start_c }, "start": { "line": start_l, "character": start_c },
"end": { "line": end_l - 1, "character": end_c }, "end": { "line": end_l, "character": end_c },
}, },
"message": err.message, "message": err.message,
"severity": 1, "severity": 1,

View file

@ -94,3 +94,14 @@ class Completion:
"insertTextFormat": insert_text_format, "insertTextFormat": insert_text_format,
} }
return { k: v for k, v in result.items() if v is not None } return { k: v for k, v in result.items() if v is not None }
class SemanticTokenType(enum.IntEnum):
EnumMember = 0
@dataclass
class SemanticToken:
start: int
end: int
type: SemanticTokenType

View file

@ -73,7 +73,7 @@ def idx_to_pos(idx: int, text: str) -> T.Tuple[int, int]:
sp = text[:idx].splitlines(keepends=True) sp = text[:idx].splitlines(keepends=True)
line_num = len(sp) line_num = len(sp)
col_num = len(sp[-1]) col_num = len(sp[-1])
return (line_num, col_num) return (line_num - 1, col_num)
def pos_to_idx(line: int, col: int, text: str) -> int: def pos_to_idx(line: int, col: int, text: str) -> int:
lines = text.splitlines(keepends=True) lines = text.splitlines(keepends=True)