mirror of
https://gitlab.gnome.org/jwestman/blueprint-compiler.git
synced 2025-05-04 15:59:08 -04:00
Moved formatting logic to separate file
Also updated branch
This commit is contained in:
parent
54da7fa6b9
commit
81734ed3a1
1 changed files with 51 additions and 0 deletions
51
blueprintcompiler/formatter.py
Normal file
51
blueprintcompiler/formatter.py
Normal file
|
@ -0,0 +1,51 @@
|
|||
# decompiler.py
|
||||
#
|
||||
# Copyright 2021 James Westman <james@jwestman.net>
|
||||
#
|
||||
# This file is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as
|
||||
# published by the Free Software Foundation; either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# This file is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
# SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
|
||||
from . import tokenizer
|
||||
|
||||
newline_after = [";", "]"]
|
||||
opening_tokens = ["{"]
|
||||
closing_tokens = ["}"]
|
||||
|
||||
|
||||
class Format:
|
||||
def format(data):
|
||||
indent_levels = 0
|
||||
tokens = tokenizer.tokenize(data)
|
||||
|
||||
tokenized_str = ""
|
||||
for index, item in enumerate(tokens):
|
||||
if item.type != tokenizer.TokenType.WHITESPACE:
|
||||
tokenized_str += str(item)
|
||||
if str(item) in opening_tokens:
|
||||
indent_levels += 1
|
||||
|
||||
try:
|
||||
if str(tokens[index + 1]) in closing_tokens:
|
||||
indent_levels -= 1
|
||||
except:
|
||||
pass
|
||||
|
||||
if str(item) in newline_after + closing_tokens + opening_tokens:
|
||||
tokenized_str += "\n"
|
||||
tokenized_str += indent_levels * " "
|
||||
else:
|
||||
tokenized_str += " "
|
||||
|
||||
return tokenized_str
|
Loading…
Add table
Add a link
Reference in a new issue