Fix type checker errors

This commit is contained in:
James Westman 2021-10-26 23:13:31 -05:00
parent b387d4114f
commit b9068e24ab
No known key found for this signature in database
GPG key ID: CE2DBA0ADB654EA6
7 changed files with 35 additions and 18 deletions

View file

@ -88,6 +88,8 @@ class AstNode:
if docs is not None:
return docs
return None
class UI(AstNode):
""" The AST node for the entire file """

View file

@ -18,6 +18,7 @@
# SPDX-License-Identifier: LGPL-3.0-or-later
import typing as T
import sys, traceback
from . import utils
@ -60,7 +61,7 @@ class CompileError(PrintableError):
return self
def _did_you_mean(self, word: str, options: [str]):
def _did_you_mean(self, word: str, options: T.List[str]):
if word.replace("_", "-") in options:
self.hint(f"use '-', not '_': `{word.replace('_', '-')}`")
return
@ -98,11 +99,11 @@ class MultipleErrors(PrintableError):
a list and re-thrown using the MultipleErrors exception. It will
pretty-print all of the errors and a count of how many errors there are. """
def __init__(self, errors: [CompileError]):
def __init__(self, errors: T.List[CompileError]):
super().__init__()
self.errors = errors
def pretty_print(self, filename, code) -> str:
def pretty_print(self, filename, code) -> None:
for error in self.errors:
error.pretty_print(filename, code)
if len(self.errors) != 1:

View file

@ -17,6 +17,7 @@
#
# SPDX-License-Identifier: LGPL-3.0-or-later
import typing as T
import os, sys
from .errors import CompileError, CompilerBugError
@ -24,7 +25,7 @@ from .utils import lazy_prop
from . import xml_reader
extra_search_paths = []
extra_search_paths: T.List[str] = []
_namespace_cache = {}
_search_paths = []
@ -71,7 +72,7 @@ class GirNode:
return self.xml.get("version")
@lazy_prop
def doc(self) -> str:
def doc(self) -> T.Optional[str]:
el = self.xml.get_elements("doc")
if len(el) != 1:
return None
@ -175,7 +176,7 @@ class Repository(GirNode):
try:
self.includes = { include["name"]: get_namespace(include["name"], include["version"]) for include in xml.get_elements("include") }
except:
raise CompilerBugError(f"Failed to load dependencies of {namespace}-{version}")
raise CompilerBugError(f"Failed to load dependencies.")
def lookup_namespace(self, name: str):
ns = self.namespaces.get(name)
@ -195,7 +196,7 @@ class GirContext:
def add_namespace(self, namespace: Namespace):
other = self.namespaces.get(namespace.name)
if other is not None and other.version != namespace.version:
raise CompileError(f"Namespace {namespace}-{version} can't be imported because version {other.version} was imported earlier")
raise CompileError(f"Namespace {namespace.name}-{namespace.version} can't be imported because version {other.version} was imported earlier")
self.namespaces[namespace.name] = namespace

View file

@ -18,6 +18,7 @@
# SPDX-License-Identifier: LGPL-3.0-or-later
import typing as T
import json, sys, traceback
from .errors import PrintableError, CompileError, MultipleErrors
@ -33,7 +34,7 @@ def command(json_method):
class LanguageServer:
commands = {}
commands: T.Dict[str, T.Callable[[LanguageServer, T.Union[str, int], T.Any], None]] = {}
def __init__(self):
self.client_capabilities = {}

View file

@ -19,6 +19,9 @@
""" Utilities for parsing an AST from a token stream. """
import typing as T
from collections import defaultdict
from enum import Enum
from .ast import AstNode
@ -27,7 +30,12 @@ from .tokenizer import Token, TokenType
_SKIP_TOKENS = [TokenType.COMMENT, TokenType.WHITESPACE]
_RECOVER_TOKENS = [TokenType.COMMENT, TokenType.STMT_END, TokenType.CLOSE_BLOCK, TokenType.EOF]
_RECOVER_TOKENS = [
TokenType.COMMENT,
TokenType.STMT_END,
TokenType.CLOSE_BLOCK,
TokenType.EOF,
]
class ParseResult(Enum):
@ -59,9 +67,9 @@ class ParseGroup:
def __init__(self, ast_type, start: int):
self.ast_type = ast_type
self.children = {}
self.keys = {}
self.tokens = {}
self.children: T.Dict[str, T.List[ParseGroup]] = defaultdict()
self.keys: T.Dict[str, T.Any] = {}
self.tokens: T.Dict[str, Token] = {}
self.start = start
self.end = None
@ -195,6 +203,9 @@ class ParseNode:
else:
return ParseResult.FAILURE
def _parse(self, ctx: ParseContext) -> bool:
raise NotImplementedError()
def err(self, message):
""" Causes this ParseNode to raise an exception if it fails to parse.
This prevents the parser from backtracking, so you should understand
@ -346,7 +357,7 @@ class Optional(ParseNode):
class StaticToken(ParseNode):
""" Base class for ParseNodes that match a token type without inspecting
the token's contents. """
token_type = None
token_type: T.Optional[TokenType] = None
def _parse(self, ctx: ParseContext) -> bool:
return ctx.next_token().type == self.token_type

View file

@ -18,6 +18,7 @@
# SPDX-License-Identifier: LGPL-3.0-or-later
import typing as T
import re
from enum import Enum
@ -43,7 +44,7 @@ class TokenType(Enum):
COMMA = 15
_TOKENS = [
_tokens = [
(TokenType.DIRECTIVE, r"@[\d\w\-_]+"),
(TokenType.IDENT, r"[A-Za-z_][\d\w\-_]*"),
(TokenType.QUOTED, r'"(\\"|[^"\n])+"'),
@ -63,7 +64,7 @@ _TOKENS = [
(TokenType.OP, r"[:=\.=\|<>\+\-/\*]+"),
(TokenType.COMMA, r"\,"),
]
_TOKENS = [(type, re.compile(regex)) for (type, regex) in _TOKENS]
_TOKENS = [(type, re.compile(regex)) for (type, regex) in _tokens]
class Token:
@ -111,5 +112,5 @@ def _tokenize(ui_ml: str):
yield Token(TokenType.EOF, i, i, ui_ml)
def tokenize(data: str) -> [Token]:
def tokenize(data: str) -> T.List[Token]:
return list(_tokenize(data))

View file

@ -32,7 +32,7 @@ def lazy_prop(func):
return real_func
def did_you_mean(word: str, options: [str]) -> T.Optional[str]:
def did_you_mean(word: str, options: T.List[str]) -> T.Optional[str]:
if len(options) == 0:
return None
@ -67,7 +67,7 @@ def did_you_mean(word: str, options: [str]) -> T.Optional[str]:
return None
def idx_to_pos(idx: int, text: str) -> (int, int):
def idx_to_pos(idx: int, text: str) -> T.Tuple[int, int]:
if idx == 0:
return (0, 0)
sp = text[:idx].splitlines(keepends=True)