lsp: Code cleanup

This commit is contained in:
James Westman 2021-10-24 14:36:54 -05:00
parent 49658c634e
commit 8fc0efb642
No known key found for this signature in database
GPG key ID: CE2DBA0ADB654EA6
5 changed files with 197 additions and 116 deletions

View file

@ -17,69 +17,15 @@
# #
# SPDX-License-Identifier: LGPL-3.0-or-later # SPDX-License-Identifier: LGPL-3.0-or-later
import typing as T
from .ast_utils import *
from .errors import assert_true, AlreadyCaughtError, CompileError, CompilerBugError, MultipleErrors from .errors import assert_true, AlreadyCaughtError, CompileError, CompilerBugError, MultipleErrors
from .gir import GirContext, get_namespace from .gir import GirContext, get_namespace
from .utils import lazy_prop from .utils import lazy_prop
from .xml_emitter import XmlEmitter from .xml_emitter import XmlEmitter
class Validator():
def __init__(self, func, token_name=None, end_token_name=None):
self.func = func
self.token_name = token_name
self.end_token_name = end_token_name
def __get__(self, instance, owner):
if instance is None:
return self
key = "_validation_result_" + self.func.__name__
if key + "_err" in instance.__dict__:
# If the validator has failed before, raise a generic Exception.
# We want anything that depends on this validation result to
# fail, but not report the exception twice.
raise AlreadyCaughtError()
if key not in instance.__dict__:
try:
instance.__dict__[key] = self.func(instance)
except CompileError as e:
# Mark the validator as already failed so we don't print the
# same message again
instance.__dict__[key + "_err"] = True
# This mess of code sets the error's start and end positions
# from the tokens passed to the decorator, if they have not
# already been set
if self.token_name is not None and e.start is None:
group = instance.group.tokens.get(self.token_name)
if self.end_token_name is not None and group is None:
group = instance.group.tokens[self.end_token_name]
e.start = group.start
if (self.token_name is not None or self.end_token_name is not None) and e.end is None:
e.end = instance.group.tokens[self.end_token_name or self.token_name].end
# Re-raise the exception
raise e
# Return the validation result (which other validators, or the code
# generation phase, might depend on)
return instance.__dict__[key]
def validate(*args, **kwargs):
""" Decorator for functions that validate an AST node. Exceptions raised
during validation are marked with range information from the tokens. Also
creates a cached property out of the function. """
def decorator(func):
return Validator(func, *args, **kwargs)
return decorator
class AstNode: class AstNode:
""" Base class for nodes in the abstract syntax tree. """ """ Base class for nodes in the abstract syntax tree. """
@ -100,9 +46,7 @@ class AstNode:
return list(self._get_errors()) return list(self._get_errors())
def _get_errors(self): def _get_errors(self):
for name in dir(type(self)): for name, attr in self._attrs_by_type(Validator):
item = getattr(type(self), name)
if isinstance(item, Validator):
try: try:
getattr(self, name) getattr(self, name)
except AlreadyCaughtError: except AlreadyCaughtError:
@ -113,6 +57,11 @@ class AstNode:
for child in self.child_nodes: for child in self.child_nodes:
yield from child._get_errors() yield from child._get_errors()
def _attrs_by_type(self, attr_type):
for name in dir(type(self)):
item = getattr(type(self), name)
if isinstance(item, attr_type):
yield name, item
def generate(self) -> str: def generate(self) -> str:
""" Generates an XML string from the node. """ """ Generates an XML string from the node. """
@ -124,6 +73,21 @@ class AstNode:
""" Emits the XML representation of this AST node to the XmlEmitter. """ """ Emits the XML representation of this AST node to the XmlEmitter. """
raise NotImplementedError() raise NotImplementedError()
def get_docs(self, idx: int) -> T.Optional[str]:
for name, attr in self._attrs_by_type(Docs):
if attr.token_name:
token = self.group.tokens.get(attr.token_name)
if token.start <= idx < token.end:
return getattr(self, name)
else:
return getattr(self, name)
for child in self.child_nodes:
if child.group.start <= idx < child.group.end:
docs = child.get_docs(idx)
if docs is not None:
return docs
class UI(AstNode): class UI(AstNode):
""" The AST node for the entire file """ """ The AST node for the entire file """
@ -240,6 +204,16 @@ class Object(AstNode):
def gir_class(self): def gir_class(self):
return self.root.gir.get_class(self.class_name, self.namespace) return self.root.gir.get_class(self.class_name, self.namespace)
@docs("namespace")
def namespace_docs(self):
return self.root.gir.namespaces[self.namespace].doc
@docs("namespace")
def class_docs(self):
return self.gir_class.doc
def emit_xml(self, xml: XmlEmitter): def emit_xml(self, xml: XmlEmitter):
xml.start_tag("object", **{ xml.start_tag("object", **{
"class": self.gir_class.glib_type_name, "class": self.gir_class.glib_type_name,

View file

@ -0,0 +1,90 @@
# ast_utils.py
#
# Copyright 2021 James Westman <james@jwestman.net>
#
# This file is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This file is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# SPDX-License-Identifier: LGPL-3.0-or-later
from .errors import *
class Validator:
def __init__(self, func, token_name=None, end_token_name=None):
self.func = func
self.token_name = token_name
self.end_token_name = end_token_name
def __get__(self, instance, owner):
if instance is None:
return self
key = "_validation_result_" + self.func.__name__
if key + "_err" in instance.__dict__:
# If the validator has failed before, raise a generic Exception.
# We want anything that depends on this validation result to
# fail, but not report the exception twice.
raise AlreadyCaughtError()
if key not in instance.__dict__:
try:
instance.__dict__[key] = self.func(instance)
except CompileError as e:
# Mark the validator as already failed so we don't print the
# same message again
instance.__dict__[key + "_err"] = True
# This mess of code sets the error's start and end positions
# from the tokens passed to the decorator, if they have not
# already been set
if self.token_name is not None and e.start is None:
group = instance.group.tokens.get(self.token_name)
if self.end_token_name is not None and group is None:
group = instance.group.tokens[self.end_token_name]
e.start = group.start
if (self.token_name is not None or self.end_token_name is not None) and e.end is None:
e.end = instance.group.tokens[self.end_token_name or self.token_name].end
# Re-raise the exception
raise e
# Return the validation result (which other validators, or the code
# generation phase, might depend on)
return instance.__dict__[key]
def validate(*args, **kwargs):
""" Decorator for functions that validate an AST node. Exceptions raised
during validation are marked with range information from the tokens. Also
creates a cached property out of the function. """
def decorator(func):
return Validator(func, *args, **kwargs)
return decorator
class Docs:
def __init__(self, func, token_name=None):
self.func = func
self.token_name = token_name
def docs(*args, **kwargs):
""" Decorator for functions that return documentation for tokens. """
def decorator(func):
return Docs(func, *args, **kwargs)
return decorator

View file

@ -21,7 +21,7 @@
import json, sys, traceback import json, sys, traceback
from .errors import PrintableError, CompileError, MultipleErrors from .errors import PrintableError, CompileError, MultipleErrors
from .lsp_enums import * from .lsp_utils import *
from . import tokenizer, parser, utils from . import tokenizer, parser, utils
@ -37,7 +37,7 @@ class LanguageServer:
def __init__(self): def __init__(self):
self.client_capabilities = {} self.client_capabilities = {}
self._open_files = {} self._open_files: {str: OpenFile} = {}
def run(self): def run(self):
try: try:
@ -94,7 +94,7 @@ class LanguageServer:
"capabilities": { "capabilities": {
"textDocumentSync": { "textDocumentSync": {
"openClose": True, "openClose": True,
"change": 1 "change": TextDocumentSyncKind.Incremental,
} }
} }
}) })
@ -106,41 +106,26 @@ class LanguageServer:
version = doc.get("version") version = doc.get("version")
text = doc.get("text") text = doc.get("text")
self._open_files[uri] = text open_file = OpenFile(uri, text, version)
self._send_diagnostics(uri) self._open_files[uri] = open_file
self._send_file_updates(open_file)
@command("textDocument/didChange") @command("textDocument/didChange")
def didChange(self, id, params): def didChange(self, id, params):
text = self._open_files[params.textDocument.uri] open_file = self._open_files[params.textDocument.uri]
for change in params.contentChanges: open_file.apply_changes(params.contentChanges)
start = utils.pos_to_idx(change.range.start.line, change.range.start.character, text) self._send_file_updates(open_file)
end = utils.pos_to_idx(change.range.end.line, change.range.end.character, text)
text = text[:start] + change.text + text[end:]
self._open_files[params.textDocument.uri] = text
self._send_diagnostics(uri)
@command("textDocument/didClose") @command("textDocument/didClose")
def didClose(self, id, params): def didClose(self, id, params):
del self._open_files[params.textDocument.uri] del self._open_files[params.textDocument.uri]
def _send_diagnostics(self, uri):
text = self._open_files[uri]
diagnostics = []
try:
tokens = tokenizer.tokenize(text)
ast = parser.parse(tokens)
diagnostics = [self._create_diagnostic(text, err) for err in list(ast.errors)]
except MultipleErrors as e:
diagnostics += [self._create_diagnostic(text, err) for err in e.errors]
except CompileError as e:
diagnostics += [self._create_diagnostic(text, e)]
def _send_file_updates(self, open_file: OpenFile):
self._send_notification("textDocument/publishDiagnostics", { self._send_notification("textDocument/publishDiagnostics", {
"uri": uri, "uri": uri,
"diagnostics": diagnostics, "diagnostics": [self._create_diagnostic(open_file.text, err) for err in open_file.diagnostics],
}) })
def _create_diagnostic(self, text, err): def _create_diagnostic(self, text, err):

View file

@ -1,25 +0,0 @@
# lsp_enums.py
#
# Copyright 2021 James Westman <james@jwestman.net>
#
# This file is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This file is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# SPDX-License-Identifier: LGPL-3.0-or-later
from enum import Enum
class TextDocumentSyncKind(Enum):
None_ = 0,
Full = 1,
Incremental = 2,

View file

@ -0,0 +1,57 @@
# lsp_enums.py
#
# Copyright 2021 James Westman <james@jwestman.net>
#
# This file is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This file is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# SPDX-License-Identifier: LGPL-3.0-or-later
from enum import Enum
from .utils import *
class TextDocumentSyncKind(Enum):
None_ = 0,
Full = 1,
Incremental = 2,
class OpenFile:
def __init__(self, uri, text, version):
self.uri = uri
self.text = text
self.version = version
self.diagnostics = []
self._update()
def apply_changes(self, changes):
for change in changes:
start = utils.pos_to_idx(change.range.start.line, change.range.start.character, self.text)
end = utils.pos_to_idx(change.range.end.line, change.range.end.character, self.text)
self.text = self.text[:start] + change.text + self.text[end:]
self._update()
def _update(self):
self.diagnostics = []
try:
self.tokens = tokenizer.tokenize(self.text)
self.ast = parser.parse(self.tokens)
self.diagnostics = [self._create_diagnostic(text, err) for err in list(ast.errors)]
except MultipleErrors as e:
self.diagnostics += [self._create_diagnostic(text, err) for err in e.errors]
except CompileError as e:
self.diagnostics += [self._create_diagnostic(text, e)]