diff --git a/blueprintcompiler/tokenizer.py b/blueprintcompiler/tokenizer.py index bc5c72b..be59e2e 100644 --- a/blueprintcompiler/tokenizer.py +++ b/blueprintcompiler/tokenizer.py @@ -46,7 +46,7 @@ _tokens = [ (TokenType.WHITESPACE, r"\s+"), (TokenType.COMMENT, r"\/\*[\s\S]*?\*\/"), (TokenType.COMMENT, r"\/\/[^\n]*"), - (TokenType.OP, r"[:=\.=\|<>\+\-/\*]+"), + (TokenType.OP, r"<<|>>|=>|::|<|>|:=|\.|\|\||\||\+|\-|\*|=|:|/"), (TokenType.PUNCTUATION, r"\(|\)|\{|\}|;|\[|\]|\,"), ] _TOKENS = [(type, re.compile(regex)) for (type, regex) in _tokens] diff --git a/tests/test_tokenizer.py b/tests/test_tokenizer.py index ec39069..1c87e50 100644 --- a/tests/test_tokenizer.py +++ b/tests/test_tokenizer.py @@ -46,7 +46,12 @@ class TestTokenizer(unittest.TestCase): (TokenType.PUNCTUATION, "}"), (TokenType.PUNCTUATION, ";"), (TokenType.WHITESPACE, " \n "), - (TokenType.OP, "<<+>>*/="), + (TokenType.OP, "<<"), + (TokenType.OP, "+"), + (TokenType.OP, ">>"), + (TokenType.OP, "*"), + (TokenType.OP, "/"), + (TokenType.OP, "="), (TokenType.EOF, ""), ])