diff --git a/README.md b/README.md index 2dc7889..55efb36 100644 --- a/README.md +++ b/README.md @@ -151,6 +151,9 @@ The entire list of completed and scheduled tasks is available in the [TODO file] +## Status + +Project frozen until studying end :( ## Licence diff --git a/config/analyzer.json b/config/analyzer.json index 50c3b43..01dd38f 100644 --- a/config/analyzer.json +++ b/config/analyzer.json @@ -5,6 +5,7 @@ "fastpy.parser.nodes.FuncNode": "fastpy.semantic_analyzer.node_analyzers.FuncNodeAnalyzer", "fastpy.parser.nodes.IfNode": "fastpy.semantic_analyzer.node_analyzers.IfNodeAnalyzer", "fastpy.parser.nodes.CallNode": "fastpy.semantic_analyzer.node_analyzers.CallNodeAnalyzer", - "fastpy.parser.nodes.WhileNode": "fastpy.semantic_analyzer.node_analyzers.WhileNodeAnalyzer" + "fastpy.parser.nodes.WhileNode": "fastpy.semantic_analyzer.node_analyzers.WhileNodeAnalyzer", + "fastpy.parser.nodes.ElseNode": "fastpy.semantic_analyzer.node_analyzers.ElseNodeAnalyzer" } } \ No newline at end of file diff --git a/config/builtin.json b/config/builtin.json index 40aba85..0f1d479 100644 --- a/config/builtin.json +++ b/config/builtin.json @@ -10,6 +10,8 @@ "builtin_types": { "string": "str", "integer": "int", - "boolean": "bool" + "boolean": "bool", + "list": "list", + "vector": "vector" } } \ No newline at end of file diff --git a/config/lexer.json b/config/lexer.json index 0a4006c..6b0fabd 100644 --- a/config/lexer.json +++ b/config/lexer.json @@ -10,10 +10,10 @@ ] }, "literal": { - "detector": "fastpy.lexer.detectors.UniversalDetector", + "detector": "fastpy.lexer.detectors.LiteralDetector", "regexes": [ - "\"[\\s\\S]+\"", - "'[\\s\\S]+'" + "'[\\S\\s ]+'", + "\"[\\S\\s ]+\"" ] }, "number": { @@ -25,7 +25,7 @@ "identifier": { "detector": "fastpy.lexer.detectors.UniversalDetector", "regexes": [ - "[a-zA-Z_]+" + "[a-zA-Z_][a-zA-Z0-9_]*" ] } } diff --git a/config/parser.json b/config/parser.json index a992eb7..c16a8e2 100644 --- a/config/parser.json +++ b/config/parser.json @@ -35,7 +35,14 @@ "index": 0 }, "value_type": { - "index": 2 + "parser_class": "fastpy.parser.node_parsers.UniversalNodeParser", + "possible_node_classes": [ + "fastpy.parser.nodes.VariableNode" + ], + "tokens_slice": { + "start_index": 2, + "end_index": 3 + } }, "value": { "parser_class": "fastpy.parser.node_parsers.UniversalNodeParser", @@ -80,7 +87,8 @@ "possible_node_classes": [ "fastpy.parser.nodes.ValueNode", "fastpy.parser.nodes.VariableNode", - "fastpy.parser.nodes.BinOpNode" + "fastpy.parser.nodes.BinOpNode", + "fastpy.parser.nodes.CallNode" ], "tokens_slice": { "start_index": 2 @@ -115,7 +123,55 @@ "index": 0 }, "value_type": { - "index": 2 + "parser_class": "fastpy.parser.node_parsers.UniversalNodeParser", + "possible_node_classes": [ + "fastpy.parser.nodes.VariableNode" + ], + "tokens_slice": { + "start_index": 2, + "end_index": 3 + } + } + } + }, + { + "validate_data": { + "methods": { + "check_min_tokens_length": { + "min_length": 5 + }, + "check_token_name_presence": { + "required_names": [ + "assign" + ] + }, + "check_token_types": { + "types": [ + "identifier", + "operator", + "identifier" + ] + }, + "check_token_names": { + "names": [ + null, + "body_start" + ] + } + } + }, + "parse_data": { + "identifier": { + "index": 0 + }, + "value_type": { + "parser_class": "fastpy.parser.node_parsers.ComplexTypeNodeParser", + "possible_node_classes": [ + "fastpy.parser.nodes.VariableNode" + ], + "tokens_slice": { + "start_index": 2 + } } } } @@ -177,6 +233,73 @@ "parser_class": "fastpy.parser.node_parsers.UniversalNodeParser", "node_class": "fastpy.parser.nodes.FuncNode", "cases": [ + { + "validate_data": { + "methods": { + "check_min_tokens_length": { + "min_length": 7 + }, + "check_token_types": { + "types": [ + "operator", + "identifier", + "start_parenthesis" + ] + }, + "check_token_names": { + "names": [ + "function" + ] + }, + "check_token_name_presence": { + "required_names": [ + "return_type" + ] + }, + "check_token_name": { + "exception": { + "message": "SyntaxError: body start operator expected" + }, + "token_index": -1, + "possible_names": [ + "body_start" + ] + }, + "check_token_type_presence": { + "exception": { + "message": "SyntaxError: close bracket expected" + }, + "required_types": [ + "end_parenthesis" + ] + } + } + }, + "parse_data": { + "identifier": { + "index": 1 + }, + "arguments": { + "parser_class": "fastpy.parser.node_parsers.ArgumentsParser", + "possible_node_classes": [ + "fastpy.parser.nodes.AssignNode" + ], + "tokens_slice": { + "start_index": 3 + } + }, + "return_type": { + "parser_class": "fastpy.parser.node_parsers.UniversalNodeParser", + "possible_node_classes": [ + "fastpy.parser.nodes.VariableNode" + ], + "tokens_slice": { + "start_index": -2, + "end_index": -1 + } + } + } + }, { "validate_data": { "methods": { @@ -429,6 +552,38 @@ } ] }, + "ReturnNode": { + "parser_class": "fastpy.parser.node_parsers.UniversalNodeParser", + "node_class": "fastpy.parser.nodes.ReturnNode", + "cases": [ + { + "validate_data": { + "methods": { + "check_token_names": { + "names": [ + "return" + ] + } + } + }, + "parse_data": { + "node": { + "parser_class": "fastpy.parser.node_parsers.UniversalNodeParser", + "possible_node_classes": [ + "fastpy.parser.nodes.LogicOpNode", + "fastpy.parser.nodes.BinOpNode", + "fastpy.parser.nodes.VariableNode", + "fastpy.parser.nodes.ValueNode", + "fastpy.parser.nodes.CallNode" + ], + "tokens_slice": { + "start_index": 1 + } + } + } + } + ] + }, "LogicOpNode": { "parser_class": "fastpy.parser.node_parsers.OperationNodeParser", "node_class": "fastpy.parser.nodes.LogicOpNode", diff --git a/config/transpiler.json b/config/transpiler.json index cb817c3..6761f8b 100644 --- a/config/transpiler.json +++ b/config/transpiler.json @@ -13,6 +13,7 @@ "fastpy.parser.nodes.IfNode": "fastpy.transpiler.node_transpilers.IfNodeTranspiler", "fastpy.parser.nodes.LogicOpNode": "fastpy.transpiler.node_transpilers.OperationsNodeTranspiler", "fastpy.parser.nodes.ElseNode": "fastpy.transpiler.node_transpilers.ElseNodeTranspiler", - "fastpy.parser.nodes.WhileNode": "fastpy.transpiler.node_transpilers.WhileNodeTranspiler" + "fastpy.parser.nodes.WhileNode": "fastpy.transpiler.node_transpilers.WhileNodeTranspiler", + "fastpy.parser.nodes.ReturnNode": "fastpy.transpiler.node_transpilers.ReturnNodeTranspiler" } } \ No newline at end of file diff --git a/docs/TODO.md b/docs/TODO.md index 5636b8d..128d74b 100644 --- a/docs/TODO.md +++ b/docs/TODO.md @@ -5,7 +5,11 @@ - [x] Variables - [x] scopes - [x] creation -- [x] Functions +- [ ] Functions + - [x] return + - [x] body + - [x] arguments + - [ ] decorators - [ ] Modules - [x] importing - [ ] namespaces @@ -27,3 +31,12 @@ - [ ] functions - [x] logging - [x] input +- [x] Binary operations + - [x] number + - [x] variable + - [x] string +- [x] Logic operations + - [x] number + - [x] variable + - [x] string +- [ ] Templates \ No newline at end of file diff --git a/examples/first_tic_tac_toe.fpy b/examples/first_tic_tac_toe.fpy new file mode 100644 index 0000000..7a3cc70 --- /dev/null +++ b/examples/first_tic_tac_toe.fpy @@ -0,0 +1,116 @@ +a: str = ' ' +b: str = ' ' +c: str = ' ' +d: str = ' ' +e: str = ' ' +f: str = ' ' +g: str = ' ' +h: str = ' ' +i: str = ' ' + +who_move: str = 'X' +continue_game: bool = true + + +fun next_player(who_move: str) -> str: + if who_move == 'X': + return 'O' + else: + return 'X' + + + + +fun draw_vert_border(endl: bool = false): + log('|', endl) + +fun draw_horz_border(): + log('+-+-+-+', true) + +fun draw_row(a: str, b: str, c: str): + draw_vert_border() + log(a) + draw_vert_border() + log(b) + draw_vert_border() + log(c) + draw_vert_border(true) + +fun draw(a: str, b: str, c: str, d: str, e: str, f: str, g: str, h: str, i: str): + draw_horz_border() + draw_row(a, b, c) + draw_horz_border() + draw_row(d, e, f) + draw_horz_border() + draw_row(g, h, i) + draw_horz_border() + + + +fun ask_cell(who_move: str) -> int: + log(who_move) + cell: int = input(' choose cell >>') + return cell + +fun check_three(a: str, b: str, c: str) -> bool: + return a == b and b == c and c != ' ' + +fun check_game_over(a: str, b: str, c: str, d: str, e: str, f: str, g: str, h: str, i: str) -> bool: + r1 = check_three(a, b, c) + r2 = check_three(a, d, g) + r3 = check_three(a, e, i) + r4 = check_three(b, e, h) + r5 = check_three(d, e, f) + r6 = check_three(g, e, c) + r7 = check_three(c, f, i) + r8 = check_three(g, h , i) + return r1 or r2 or r3 or r4 or r5 or r6 or r7 or r8 + +fun check_draw(a: str, b: str, c: str, d: str, e: str, f: str, g: str, h: str, i: str) -> bool: + return a != ' ' and b != ' ' and c != ' ' and d != ' ' and e != ' ' and f != ' ' and g != ' ' and h != ' ' and i != ' ' + +while continue_game: + draw(a, b, c, d, e, f, g, h, i) + cell = ask_cell(who_move) + + if cell == 1: + a = who_move + elif cell == 2: + b = who_move + elif cell == 3: + c = who_move + elif cell == 4: + d = who_move + elif cell == 5: + e = who_move + elif cell == 6: + f = who_move + elif cell == 7: + g = who_move + elif cell == 8: + h = who_move + elif cell == 9: + i = who_move + else: + log_error('Enter number in range: 1 - 9!', true) + who_move = next_player(who_move) + + game_over = check_game_over(a, b, c, d, e, f, g, h, i) + drw = check_draw(a, b, c, d, e, f, g, h, i) + + if drw: + draw(a, b, c, d, e, f, g, h, i) + log_info('Draw!') + continue_game = false + elif game_over: + draw(a, b, c, d, e, f, g, h, i) + log_info('Game Over! ') + log_info('Won: ') + log_info(who_move) + continue_game = false + + + + who_move = next_player(who_move) + + diff --git a/fastpy/__init__.py b/fastpy/__init__.py index bd9fd9a..119cd72 100644 --- a/fastpy/__init__.py +++ b/fastpy/__init__.py @@ -1,3 +1,5 @@ +"""This module provides a set of tools for development on FastPy lang.""" + import fastpy.config import fastpy.exceptions import fastpy.log @@ -10,6 +12,5 @@ 'config', 'log', 'exceptions', - 'lexer', 'TranspileAPI', ] diff --git a/fastpy/config/__init__.py b/fastpy/config/__init__.py index 762a8a3..bce54db 100644 --- a/fastpy/config/__init__.py +++ b/fastpy/config/__init__.py @@ -1,3 +1,5 @@ +"""This module provides tools to help you load configs.""" + from .json_config import * __all__ = [ diff --git a/fastpy/config/json_config.py b/fastpy/config/json_config.py index 01d613a..213b4b7 100644 --- a/fastpy/config/json_config.py +++ b/fastpy/config/json_config.py @@ -5,6 +5,8 @@ class JsonConfig(BaseConfig): + """Json config loader""" + def __init__(self, filepath: str, authoload: bool = True): self._filepath = None self._config = None diff --git a/fastpy/dev_kit/__init__.py b/fastpy/dev_kit/__init__.py index 21e2e58..ec63ccb 100644 --- a/fastpy/dev_kit/__init__.py +++ b/fastpy/dev_kit/__init__.py @@ -1,3 +1,7 @@ +""" +This module provides an API for FastPy language development tools, such as transpilation API. +""" + from .transpiler import TranspileAPI __all__ = [ diff --git a/fastpy/exceptions/__init__.py b/fastpy/exceptions/__init__.py index 0179aa8..e213869 100644 --- a/fastpy/exceptions/__init__.py +++ b/fastpy/exceptions/__init__.py @@ -1 +1,3 @@ +"""This module contains all possible internal exceptions of the FastPy tools.""" + from .errors import * diff --git a/fastpy/filesystem/__init__.py b/fastpy/filesystem/__init__.py index 51313ac..cf36a35 100644 --- a/fastpy/filesystem/__init__.py +++ b/fastpy/filesystem/__init__.py @@ -1,3 +1,7 @@ +""" +This module simplifies working with the file system by extending the base os module. +""" + from .filesystem import * __all__ = [ diff --git a/fastpy/import_tools.py b/fastpy/import_tools.py index d1be4e3..462d9cd 100644 --- a/fastpy/import_tools.py +++ b/fastpy/import_tools.py @@ -3,13 +3,20 @@ imported = {} -def import_class(name): - if name in imported.keys(): - return imported.get(name) +def import_class(path: str): + """ + Imports Python classes - components = name.split('.') + :param path: Python classpath (separated by ".") + :return: Python class + """ + + if path in imported.keys(): + return imported.get(path) + + components = path.split('.') mod = __import__(components[0]) for comp in components[1:]: mod = getattr(mod, comp) - imported.update({name: mod}) + imported.update({path: mod}) return mod diff --git a/fastpy/lexer/__init__.py b/fastpy/lexer/__init__.py index ea113ba..2f46438 100644 --- a/fastpy/lexer/__init__.py +++ b/fastpy/lexer/__init__.py @@ -1,3 +1,7 @@ +""" +This module is responsible for splitting the source code into tokens. +""" + from .lexers import * from .tokens import * from .config import * diff --git a/fastpy/lexer/config.py b/fastpy/lexer/config.py index 6833dda..dc9e91b 100644 --- a/fastpy/lexer/config.py +++ b/fastpy/lexer/config.py @@ -9,14 +9,14 @@ ) COMMENT_START_SYMBOL = lexer_config['comment_start'] -TOKEN_CLASS_PATH = lexer_config['token_class'] -LEXER_CLASS_PATH = lexer_config['lexer_class'] -TOKEN_DETECTION = lexer_config.get('token_detection', {}) +TOKEN_CLASS_PATH = lexer_config['token_class'] # Token classpath to import, by default - fastpy.lexer.tokens.Token +LEXER_CLASS_PATH = lexer_config['lexer_class'] # Lexer classpath to import, by default - fastpy.lexer.tokens.Token +TOKEN_DETECTION = lexer_config.get('token_detection', {}) # token detection data operators_config = JsonConfig( filepath=os.path.join(CONFIG_FOLDER, 'operators.json'), authoload=True ) -OPERATORS = operators_config['operators'] -SIMILAR_OPERATORS = operators_config['similar_operators'] +OPERATORS = operators_config['operators'] # dict of operators and it names +SIMILAR_OPERATORS = operators_config['similar_operators'] # dict of operators and lists of similar operators diff --git a/fastpy/lexer/detectors.py b/fastpy/lexer/detectors.py index 8c4c5be..e2a264d 100644 --- a/fastpy/lexer/detectors.py +++ b/fastpy/lexer/detectors.py @@ -2,32 +2,49 @@ from .tokens import BaseToken, create_token from .token_types import TokenTypes from .config import * +from ..exceptions import * import re import string class BaseDetector(ABC): + """Token detector interface""" + detects: tuple[TokenTypes] @abstractmethod def detect(self, - code_line: str, + code: str, line_number: int, column_number: int, regex_pattern: str, - supposed_token_type: TokenTypes) -> BaseToken: ... + supposed_token_type: TokenTypes) -> BaseToken: + """ + Splits a portion of a FastPy code line into a token + + :param code: FastPy source code part + :param line_number: + :param column_number: + :param regex_pattern: regular expression that helps to detect and extract token + :param supposed_token_type: supposed type of token + :return: extracted token + """ class UniversalDetector(BaseDetector): - detects = (TokenTypes.literal, TokenTypes.number, TokenTypes.identifier) + """ + Lets to detect and extract several types of token such number & identifier + """ + + detects = (TokenTypes.number, TokenTypes.identifier) def detect(self, - code_line: str, + code: str, line_number: int, column_number: int, regex_pattern: str, supposed_token_type: TokenTypes) -> BaseToken: - cut_string = code_line[column_number::] + cut_string = code[column_number::] result = re.match(regex_pattern, cut_string) if result: @@ -35,17 +52,89 @@ def detect(self, return create_token(supposed_token_type, result_string, line_number) +class LiteralDetector(BaseDetector): + """ + Lets to detect and extract token of string literal type + """ + detects = (TokenTypes.literal,) + + @staticmethod + def _escape_double_quote(literal: str) -> str: + out_literal = '' + + for char in literal[1:-1]: + match char: + case '\\': + pass + case '"': + out_literal += '\\"' + case _: + out_literal += char + return '"' + out_literal + '"' + + @staticmethod + def _extract_string_literal(code: str, pattern: str) -> str: + start = code[0] + + result = re.search(pattern, code) + literal = None + if result: + literal = result.group() + + if literal.count(start) > 2: + ignore_next = False + for i, char in enumerate(literal[1::]): + if ignore_next: + ignore_next = False + continue + + if char == '\\': + ignore_next = True + if char == start: + return '"' + literal[1:i + 1] + '"' + + return ('"' + literal[1:-1] + '"') if literal else None + + def detect(self, + code: str, + line_number: int, + column_number: int, + regex_pattern: str, + supposed_token_type: TokenTypes) -> BaseToken | None: + cut_string = code[column_number::] + + if cut_string[0] not in ['"', "'"]: + return + + string_literal = self._extract_string_literal( + code=cut_string, + pattern=regex_pattern + ) + if not string_literal: + return + + return create_token( + token_type=supposed_token_type, + text=self._escape_double_quote(string_literal), + line=line_number, + ) + + class OperatorDetector(BaseDetector): + """ + Lets to detect and extract token of operator type + """ + detects = (TokenTypes.operator,) def detect(self, - code_line: str, + code: str, line_number: int, column_number: int, regex_pattern: str, supposed_token_type: TokenTypes) -> BaseToken: - cut_string = code_line[column_number::] + cut_string = code[column_number::] start = cut_string[0] diff --git a/fastpy/lexer/lexers.py b/fastpy/lexer/lexers.py index 0a5237f..6650959 100644 --- a/fastpy/lexer/lexers.py +++ b/fastpy/lexer/lexers.py @@ -10,6 +10,7 @@ class BaseLexer(ABC): + """Lexer interface""" @abstractmethod def __init__(self, module: Module): @@ -24,7 +25,7 @@ def lex(self) -> list[BaseToken]: class Lexer(BaseLexer): - """Basic lexer of FastPy""" + """Basic lexer implementation of FastPy""" @Logger.info(pattern='Lexer created ({module})') def __init__(self, module: Module): @@ -49,6 +50,7 @@ def _detect_token(self, code_line: str, line_number: int, column_number: int) -> line=line_number, ) self._tokens.append(token) + return len(token.text) + column_number - 1 for token_type, detection_info in TOKEN_DETECTION.items(): supposed_token_type = TokenTypes.__getattr__(token_type) @@ -61,7 +63,7 @@ def _detect_token(self, code_line: str, line_number: int, column_number: int) -> for regex in regexes: token = detector.detect( - code_line=code_line, + code=code_line, line_number=line_number, column_number=column_number, regex_pattern=regex, diff --git a/fastpy/lexer/special_symbols.py b/fastpy/lexer/special_symbols.py index af3ad40..d3cd6bf 100644 --- a/fastpy/lexer/special_symbols.py +++ b/fastpy/lexer/special_symbols.py @@ -7,6 +7,8 @@ ']': TokenTypes.end_square, '{': TokenTypes.start_braces, '}': TokenTypes.end_braces, + '<': TokenTypes.start_chevrons, + '>': TokenTypes.end_chevrons, ' ': TokenTypes.gap, '\t': TokenTypes.tab, ',': TokenTypes.comma diff --git a/fastpy/lexer/token_types.py b/fastpy/lexer/token_types.py index 706f065..a538fe7 100644 --- a/fastpy/lexer/token_types.py +++ b/fastpy/lexer/token_types.py @@ -16,6 +16,8 @@ class TokenTypes(Enum): tab = 12 number = 13 endline = 14 + start_chevrons = 15 + end_chevrons = 16 def __eq__(self, other): if isinstance(other, int): diff --git a/fastpy/lexer/tokens.py b/fastpy/lexer/tokens.py index c82f25a..dc4aa4f 100644 --- a/fastpy/lexer/tokens.py +++ b/fastpy/lexer/tokens.py @@ -5,6 +5,8 @@ class BaseToken(ABC): + """Token interface""" + @property @abstractmethod def type(self) -> TokenTypes: ... @@ -23,11 +25,21 @@ def name(self) -> str: ... class Token(BaseToken): + """Basic token implementation of FastPy""" + def __init__(self, token_type: TokenTypes, text: str, line: int, name: str = None): + """ + + :param token_type: + :param text: code part from which this token is extracted + :param line: line number of code part + :param name: operator name, used only for operators customization, for non-op tokens is None + """ + self._type = token_type self._text = text self._line = line @@ -63,8 +75,15 @@ def code_from_tokens(tokens: list[BaseToken] | tuple[BaseToken]): _token_class: BaseToken | None = None -def create_token(token_type: TokenTypes, text: str, line: int, name: str = None, **kwargs) -> BaseToken: +def create_token( + token_type: TokenTypes, + text: str, + line: int, + name: str = None, + **kwargs) -> BaseToken: + """Token factory""" + global _token_class if not _token_class: diff --git a/fastpy/log/__init__.py b/fastpy/log/__init__.py index 811a15a..f4a236f 100644 --- a/fastpy/log/__init__.py +++ b/fastpy/log/__init__.py @@ -1,3 +1,7 @@ +""" +This module is responsible for pretty internal information printing. +""" + from .logger import * from .config import * diff --git a/fastpy/module/__init__.py b/fastpy/module/__init__.py index df956ea..c533822 100644 --- a/fastpy/module/__init__.py +++ b/fastpy/module/__init__.py @@ -1,3 +1,8 @@ +""" +This module provides a convenient class for working with sources. +Used in every step of FastPy transpilation. +""" + from .module import * diff --git a/fastpy/parser/__init__.py b/fastpy/parser/__init__.py index 56fe721..e9ed158 100644 --- a/fastpy/parser/__init__.py +++ b/fastpy/parser/__init__.py @@ -1,3 +1,7 @@ +""" +This module is responsible for building an Abstract Syntax Tree from the tokens obtained using the lexer. +""" + from .parsers import * from .ast import * from .config import * @@ -12,7 +16,6 @@ 'create_parser', 'create_ast', 'BaseNode', - 'nodes', - 'ImportNode' + 'nodes' ] diff --git a/fastpy/parser/ast.py b/fastpy/parser/ast.py index 35e2833..cd1302e 100644 --- a/fastpy/parser/ast.py +++ b/fastpy/parser/ast.py @@ -7,23 +7,52 @@ class BaseAST(ABC): + """Abstract Syntax Tree interface""" + @abstractmethod - def add_module(self, module: Module) -> None: ... + def add_module(self, module: Module) -> None: + """ + + :param module: module for which a branch should be created + """ @abstractmethod - def push_node(self, module: Module, node: BaseNode) -> None: ... + def push_node(self, module: Module, node: BaseNode) -> None: + """ + + :param module: module to which the node should be added to the branch + :param node: node to be added + """ @abstractmethod - def pop_node(self, module: Module, index: int) -> BaseNode: ... + def pop_node(self, module: Module, index: int) -> BaseNode: + """ + + :param module: module that branch contains node to be removed + :param index: index of node to be removed + :return: node that was removed + """ @abstractmethod - def remove_node(self, module: Module, node: BaseNode) -> None: ... + def remove_node(self, module: Module, node: BaseNode) -> None: + """ + + :param module: module that branch contains node to be removed + :param node: node to be removed + """ @abstractmethod - def nodes(self, module_name: str) -> Iterable[BaseNode]: ... + def nodes(self, module_name: str) -> Iterable[BaseNode]: + """ + + :param module_name: name of the module branch + :return: iterator of all nodes in module branch + """ class AST(BaseAST): + """Basic AST implementation of FastPy""" + def __init__(self): self._tree = { '__main__': [] diff --git a/fastpy/parser/config.py b/fastpy/parser/config.py index eeca835..c49876b 100644 --- a/fastpy/parser/config.py +++ b/fastpy/parser/config.py @@ -6,14 +6,20 @@ authoload=True ) -AST_CLASS_PATH: str = parser_config['ast_class'] # Abstract Syntax Tree class path to import -PARSER_CLASS_PATH: str = parser_config['parser_class'] # Main parser class path to import -NODE_PARSING: dict = parser_config.get('node_parsing', {}) # Node parsing data +AST_CLASS_PATH: str = parser_config[ + 'ast_class' +] # Abstract Syntax Tree classpath to import, by default - fastpy.parser.ast.AST + +PARSER_CLASS_PATH: str = parser_config[ + 'parser_class' +] # main parser classpath to import, by default - fastpy.parser.parsers.Parser + +NODE_PARSING: dict = parser_config.get('node_parsing', {}) # node parsing data operators_config = JsonConfig( filepath=os.path.join(CONFIG_FOLDER, 'operators.json'), authoload=True ) -BIN_OP_NAMES: list = operators_config['binary_operator_names'] # Names of binary operators -LOGIC_OP_NAMES: list = operators_config['logic_operator_names'] # Names of logic operators +BIN_OP_NAMES: list = operators_config['binary_operator_names'] # names of binary operators +LOGIC_OP_NAMES: list = operators_config['logic_operator_names'] # names of logic operators diff --git a/fastpy/parser/node_parsers.py b/fastpy/parser/node_parsers.py index e805983..117ff86 100644 --- a/fastpy/parser/node_parsers.py +++ b/fastpy/parser/node_parsers.py @@ -7,20 +7,42 @@ class BaseNodeParser(ABC): + """Node Parser interface""" + parses: tuple[type[BaseNode]] @abstractmethod def validate(self, tokens: list[BaseToken], supposed_node_type: type[BaseNode], - **extra_data) -> bool: ... + **extra_data) -> bool: + """ + Validates tokens and returns whether the parser is ready to parse the provided tokens + + :param tokens: tokens to parse + :param supposed_node_type: supposed type of node + :param extra_data: data that helps to validate and parse tokens, + can be loaded from config or transmitted from another node parser + :return: parser readiness + """ @abstractmethod def parse(self, tokens: list[BaseToken], parse_node_clb: callable, supposed_node_type: type[BaseNode], - **extra_data) -> BaseNode | list[BaseNode]: ... + **extra_data) -> BaseNode | list[BaseNode]: + """ + Parses tokens and returns one or more obtained nodes. + Should only run when the validate method returns True + + :param tokens: tokens to parse + :param parse_node_clb: callback that can be useful to the parser to parse subnodes + :param supposed_node_type: supposed type of node + :param extra_data: data that helps to validate and parse tokens, + can be loaded from config or transmitted from another node parser + :return: + """ @singleton @@ -33,7 +55,8 @@ class UniversalNodeParser(BaseNodeParser): VariableNode, IfNode, ElseNode, - WhileNode + WhileNode, + ReturnNode ) @staticmethod @@ -89,10 +112,11 @@ def _parse_value(tokens: list[BaseToken], parse_node: callable, **value_data): ) tokens_slice_data = value_data.get('tokens_slice') slice_start = tokens_slice_data.get('start_index') + slice_end = tokens_slice_data.get('end_index') if slice_start: value = parse_node( - tokens=tokens[slice_start::], + tokens=tokens[slice_start::] if slice_end is None else tokens[slice_start:slice_end], possible_node_types=possible_node_types, parser=parser ) @@ -135,11 +159,12 @@ def validate(self, for token in tokens: if token.type == TokenTypes.operator and token.name in ['and', 'or', 'not']: return True + elif supposed_node_type is BinOpNode: left_operand = None for token in tokens: - if not left_operand and token.type in [TokenTypes.identifier, TokenTypes.number]: + if not left_operand and token.type in [TokenTypes.identifier, TokenTypes.number, TokenTypes.literal]: left_operand = token elif token.type == TokenTypes.operator \ and token.name in BIN_OP_NAMES: @@ -196,7 +221,7 @@ def parse(self, continue match token.type: - case TokenTypes.number: + case TokenTypes.number | TokenTypes.literal: if not left_operand: left_operand = ValueNode(token) elif not right_operand: @@ -304,29 +329,30 @@ def parse(self, supposed_node_type: type[BaseNode], **extra_data) -> BaseNode | list[BaseNode]: - if tokens[0].type == TokenTypes.end_parenthesis: + if tokens[0].type == TokenTypes.end_parenthesis or tokens[0].type == TokenTypes.end_chevrons: return [] arguments = [] expr_tokens = [] + opened_parenthesis_counter = 1 for token in tokens: - if token.type == TokenTypes.comma: - node = parse_node_clb(expr_tokens) - arguments.append(node) - expr_tokens.clear() - continue - elif token.type == TokenTypes.end_parenthesis: - if OperationNodeParser().validate(expr_tokens, BinOpNode): - try: + match token.type: + case TokenTypes.start_parenthesis: + opened_parenthesis_counter += 1 + case TokenTypes.end_parenthesis | TokenTypes.end_chevrons: + opened_parenthesis_counter -= 1 + + if opened_parenthesis_counter == 0: node = parse_node_clb(expr_tokens) - except ParsingError: - expr_tokens.append(token) + arguments.append(node) + return arguments + case TokenTypes.comma: + if opened_parenthesis_counter == 1: + node = parse_node_clb(expr_tokens) + arguments.append(node) + expr_tokens.clear() continue - else: - node = parse_node_clb(expr_tokens) - arguments.append(node) - return arguments expr_tokens.append(token) @@ -348,3 +374,30 @@ def parse(self, (LogicOpNode, BinOpNode), OperationNodeParser() ) + + +@singleton +class ComplexTypeNodeParser(BaseNodeParser): + def validate(self, + tokens: list[BaseToken], + supposed_node_type: type[BaseNode], + **extra_data) -> bool: + return \ + Validators.check_token_type_presence( + tokens, + (TokenTypes.start_chevrons, TokenTypes.end_chevrons) + ) and Validators.check_token_types(tokens, (TokenTypes.identifier, TokenTypes.start_chevrons)) + + def parse(self, + tokens: list[BaseToken], + parse_node_clb: callable, + supposed_node_type: type[BaseNode], + **extra_data) -> BaseNode | list[BaseNode]: + type_token = tokens[0] + + arguments = parse_node_clb( + tokens[2::], + (VariableNode,), + ArgumentsParser() + ) + print(type_token, arguments) diff --git a/fastpy/parser/nodes.py b/fastpy/parser/nodes.py index 941f3d5..cd81238 100644 --- a/fastpy/parser/nodes.py +++ b/fastpy/parser/nodes.py @@ -5,12 +5,18 @@ class BaseNode(ABC): + """Node interface""" + @abstractmethod def __init__(self, *args, **kwargs): ... @property @abstractmethod - def line(self) -> int: ... + def line(self) -> int: + """ + + :return: line number from which the node was extracted + """ class NodeWithBody(BaseNode, ABC): @@ -62,7 +68,7 @@ def line(self) -> int: class AssignNode(BasicNode, PrintableNode, NamedNode): def __init__(self, identifier: BaseToken, - value_type: BaseToken = None, + value_type: BaseNode = None, value: BaseNode = None): self.identifier = identifier self.value_type = value_type @@ -79,7 +85,7 @@ def __init__(self, identifier: BaseToken, arguments: list[AssignNode] = None, body: list[BaseNode] = None, - return_type: BaseToken = None, + return_type: VariableNode = None, template: bool = False): self.identifier = identifier self.arguments = arguments or [] @@ -116,6 +122,8 @@ def __init__(self, @property def line(self) -> int: + if not self.left_operand: + return -1 return self.left_operand.line @@ -207,17 +215,17 @@ def __init__(self, @property def line(self) -> int: - if not self.condition or len(self.condition) == 0: + if not self.condition: return -1 return self.condition.line -# class ImportNode(BasicNode, PrintableNode): -# def __init__(self, filepath: BaseToken = None, parts: list[BaseToken] = None): -# self.filepath = filepath -# self.parts = parts -# -# @property -# def line(self) -> int: -# if not self.filepath: -# return -1 -# return self.filepath.line + +class ReturnNode(BasicNode, PrintableNode): + def __init__(self, node: BaseNode): + self.node = node + + @property + def line(self) -> int: + if not self.node: + return -1 + return self.node.line diff --git a/fastpy/parser/parsers.py b/fastpy/parser/parsers.py index 74dd6fe..e12ff80 100644 --- a/fastpy/parser/parsers.py +++ b/fastpy/parser/parsers.py @@ -16,7 +16,12 @@ class BaseParser(ABC): @abstractmethod def __init__(self, module: Module, - tokens: list[BaseToken]): ... + tokens: list[BaseToken]): + """ + + :param module: the module parameter contains information about the currently processed file + :param tokens: list of tokens - output of previous stage + """ @abstractmethod def parse(self) -> BaseAST: @@ -24,7 +29,7 @@ def parse(self) -> BaseAST: class Parser(BaseParser): - """Basic parser of FastPy""" + """Basic parser implementation of FastPy""" @Logger.info(pattern='Parser created ({module})') def __init__(self, @@ -34,6 +39,7 @@ def __init__(self, self._current_module = module self._current_struct: Structure | None = None self._structs: list[Structure] = [] + self._structs: list[Structure] = [] self._tokens = tokens self._node_parsers = {} self._load_parsers() @@ -104,24 +110,32 @@ def _detect_struct_start(self, node: BaseNode, level: int): def _within_struct(self, level: int): return self._current_struct and self._current_struct.within_struct(level=level) + def _get_struct(self, level: int) -> Structure: + for struct in reversed(self._structs): + if struct.within_struct(level=level): + return struct + + @staticmethod + def _check_expr_level(level: int): + if level % 4 != 0: + raise ParsingError('SyntaxError: invalid number of spaces, number of spaces must be a multiple of four') + @Logger.info(pattern='Parsing: {expr_tokens[0].line}: {expr_tokens}: level: {expr_level}') def _parse_expression(self, expr_tokens: list[BaseToken], expr_level: int): """Parses each line of code split into tokens""" + self._check_expr_level(level=expr_level) + node = self._parse_node(expr_tokens) - if self._within_struct(level=expr_level): - self._current_struct.push_node(node=node) + struct = self._get_struct(expr_level) + if struct: + struct.push_node(node) else: self._ast.push_node( module=self._current_module, node=node ) - if len(self._structs) >= 2: - self._structs.pop(-1) - self._current_struct = self._structs[-1] - else: - self._current_struct = None self._detect_struct_start(node, expr_level) diff --git a/fastpy/parser/structure.py b/fastpy/parser/structure.py index 328f552..e3b7534 100644 --- a/fastpy/parser/structure.py +++ b/fastpy/parser/structure.py @@ -2,6 +2,10 @@ class Structure: + """ + Contains information about currently detected structure, such as if condition or function + """ + def __init__(self, node, level: int = 0): self._level = level self._node: NodeWithBody = node @@ -11,3 +15,6 @@ def push_node(self, node: BaseNode): def within_struct(self, level: int): return level == self._level + + def __repr__(self): + return str(self._node) diff --git a/fastpy/parser/validators.py b/fastpy/parser/validators.py index 4925561..ab90332 100644 --- a/fastpy/parser/validators.py +++ b/fastpy/parser/validators.py @@ -1,10 +1,23 @@ """Additional tools for tokens validation""" + from ..lexer import BaseToken, TokenTypes class Validators: + """ + This class is just a wrapper for functions that are used for parsing + """ + @staticmethod def check_token_type(tokens: list[BaseToken], token_index: int, possible_types: list[int | str]) -> bool: + """ + + :param tokens: list of tokens for check + :param token_index: particular token index + :param possible_types: possible types of particular token + :return: some type matches with token name + """ + token_type = tokens[token_index].type if isinstance(possible_types[0], str): @@ -14,11 +27,29 @@ def check_token_type(tokens: list[BaseToken], token_index: int, possible_types: @staticmethod def check_token_name(tokens: list[BaseToken], token_index: int, possible_names: list[str]) -> bool: + """ + + :param tokens: list of tokens for check + :param token_index: particular token index + :param possible_names: possible names of particular token + :return: some name matches with token name + """ + token_name = tokens[token_index].name return token_name in possible_names @staticmethod - def check_token_type_presence(tokens: list[BaseToken], required_types: list[str | int | TokenTypes]): + def check_token_type_presence( + tokens: list[BaseToken], + required_types: list[str | int | TokenTypes] + ): + """ + + :param tokens: list of tokens for check + :param required_types: types that must presence in the token list + :return: present all required types + """ + types = tuple(map(lambda t: t.type.name, tokens)) \ if isinstance(required_types[0], str) \ else tuple(map(lambda t: t.type.value, tokens)) @@ -30,6 +61,13 @@ def check_token_type_presence(tokens: list[BaseToken], required_types: list[str @staticmethod def check_token_name_presence(tokens: list[BaseToken], required_names: list[str]): + """ + + :param tokens: list of tokens for check + :param required_names: names that must presence in the token list + :return: present all required names + """ + names = tuple(map(lambda t: t.name, tokens)) for required_name in required_names: if required_name not in names: @@ -38,6 +76,13 @@ def check_token_name_presence(tokens: list[BaseToken], required_names: list[str] @staticmethod def check_token_types(tokens: list[BaseToken], types: list[int]) -> bool: + """ + + :param tokens: list of tokens for check + :param types: list of supposed token types in the same order as the tokens + :return: match types + """ + for token, supposed_type in zip(tokens, types): if supposed_type is None: continue @@ -49,6 +94,13 @@ def check_token_types(tokens: list[BaseToken], types: list[int]) -> bool: @staticmethod def check_token_texts(tokens: list[BaseToken], texts: list[str]) -> bool: + """ + + :param tokens: list of tokens for check + :param texts: list of supposed token texts in the same order as the tokens + :return: match texts + """ + for token, supposed_text in zip(tokens, texts): if supposed_text is None: continue @@ -60,6 +112,13 @@ def check_token_texts(tokens: list[BaseToken], texts: list[str]) -> bool: @staticmethod def check_token_names(tokens: list[BaseToken], names: list[str]) -> bool: + """ + + :param tokens: list of tokens for check + :param names: list of supposed token names in the same order as the tokens + :return: match names + """ + for token, supposed_name in zip(tokens, names): if supposed_name is None: continue @@ -71,8 +130,20 @@ def check_token_names(tokens: list[BaseToken], names: list[str]) -> bool: @staticmethod def check_min_tokens_length(tokens: list[BaseToken], min_length: int): + """ + + :param tokens: list of tokens for check + :param min_length: min length of tokens list + :return: len of tokens >= min length + """ return len(tokens) >= min_length @staticmethod def check_fixed_tokens_length(tokens: list[BaseToken], length: int): + """ + + :param tokens: list of tokens for check + :param length: supposed length of tokens list + :return: len of tokens == length + """ return len(tokens) == length diff --git a/fastpy/semantic_analyzer/__init__.py b/fastpy/semantic_analyzer/__init__.py index f0f560e..79e9664 100644 --- a/fastpy/semantic_analyzer/__init__.py +++ b/fastpy/semantic_analyzer/__init__.py @@ -1,3 +1,7 @@ +""" +This module is responsible for analyzing an Abstract Syntax Tree obtained using the parser. +""" + from .analyzers import * __all__ = [ diff --git a/fastpy/semantic_analyzer/analyzers.py b/fastpy/semantic_analyzer/analyzers.py index 7dd715e..337b1bd 100644 --- a/fastpy/semantic_analyzer/analyzers.py +++ b/fastpy/semantic_analyzer/analyzers.py @@ -16,7 +16,12 @@ class BaseAnalyzer(ABC): @abstractmethod def __init__(self, module: Module, - ast: BaseAST): ... + ast: BaseAST): + """ + + :param module: module: the module parameter contains information about the currently processed file + :param ast: Abstract Syntax Tree - output of previous parsing stage + """ @abstractmethod def analyze(self) -> None: @@ -24,7 +29,7 @@ def analyze(self) -> None: class Analyzer(BaseAnalyzer): - """Basic Analyzer of FastPy""" + """Basic Analyzer implementation of FastPy""" def __init__(self, module: Module, diff --git a/fastpy/semantic_analyzer/config.py b/fastpy/semantic_analyzer/config.py index f61063f..4bb65e1 100644 --- a/fastpy/semantic_analyzer/config.py +++ b/fastpy/semantic_analyzer/config.py @@ -1,6 +1,7 @@ +import os + from ..config import JsonConfig, CONFIG_FOLDER from ..filesystem import FileSystem as Fs -import os analyzer_config = JsonConfig( filepath=os.path.join(CONFIG_FOLDER, 'analyzer.json'), @@ -8,7 +9,7 @@ ) ANALYZER_CLASS_PATH: str = analyzer_config['semantic_analyzer_class'] # analyzer class path to import -NODE_ANALYZING: dict = analyzer_config['node_analyzing'] +NODE_ANALYZING: dict = analyzer_config['node_analyzing'] # node analyzing data builtin_config = JsonConfig( filepath=Fs.join(CONFIG_FOLDER, 'builtin.json'), diff --git a/fastpy/semantic_analyzer/node_analyzers.py b/fastpy/semantic_analyzer/node_analyzers.py index 38457f7..8b8cd80 100644 --- a/fastpy/semantic_analyzer/node_analyzers.py +++ b/fastpy/semantic_analyzer/node_analyzers.py @@ -96,6 +96,21 @@ def analyze(self, raise AnalyzingError(f'SemanticError: function with name "{node.identifier.text}" does not exists') +class ElseNodeAnalyzer(BaseNodeAnalyzer): + @staticmethod + def _analyze_body(body: list[BaseNode], analyze_node_clb: callable): + for body_node in body: + analyze_node_clb(body_node) + + def analyze(self, + node: ElseNode, + module: Module, + ast: BaseAST, + analyze_node_clb: callable, + scope: Scope): + self._analyze_body(node.body, analyze_node_clb) + + class WhileNodeAnalyzer(BaseNodeAnalyzer): def _analyze_condition(self): pass diff --git a/fastpy/singleton.py b/fastpy/singleton.py index 8c8c50f..3da1a06 100644 --- a/fastpy/singleton.py +++ b/fastpy/singleton.py @@ -1,3 +1,4 @@ + def singleton(cls): instances = {} diff --git a/fastpy/transpiler/__init__.py b/fastpy/transpiler/__init__.py index 814261d..2091928 100644 --- a/fastpy/transpiler/__init__.py +++ b/fastpy/transpiler/__init__.py @@ -1,3 +1,7 @@ +""" +This module is responsible for converting the Abstract Syntax Tree into C++ source code. +""" + from .transpilers import * from .config import * diff --git a/fastpy/transpiler/code.py b/fastpy/transpiler/code.py index c4f7f98..e6e5ecc 100644 --- a/fastpy/transpiler/code.py +++ b/fastpy/transpiler/code.py @@ -2,6 +2,12 @@ class BaseCode(ABC): + """ + Code interface. + This class is used by transpiler for separation convenience of internal and external code. + Internal code - code in main function, external - outside of main func + """ + @abstractmethod def push_internal(self, code: str, **kwargs): ... diff --git a/fastpy/transpiler/config.py b/fastpy/transpiler/config.py index 882a33c..3a5016e 100644 --- a/fastpy/transpiler/config.py +++ b/fastpy/transpiler/config.py @@ -6,25 +6,27 @@ authoload=True ) -TRANSPILER_CLASS_PATH = transpiler_config['transpiler_class'] +TRANSPILER_CLASS_PATH = transpiler_config[ + 'transpiler_class' +] # Transpailer classpath to import, by default - fastpy.transpiler.transpilers.Transpiler -CPP_TEMPLATES_DIR = transpiler_config['cpp_templates_dir'] -CPP_MAIN_TEMPLATE_PATH = transpiler_config['cpp_main_template'] -CPP_TEMPLATE_PATH = transpiler_config['cpp_template'] +CPP_TEMPLATES_DIR = transpiler_config['cpp_templates_dir'] # folder of C++ templates +CPP_MAIN_TEMPLATE_PATH = transpiler_config['cpp_main_template'] # main C++ template filename +CPP_TEMPLATE_PATH = transpiler_config['cpp_template'] # module C++ template filename -NODE_TRANSPILING = transpiler_config['node_transpiling'] +NODE_TRANSPILING = transpiler_config['node_transpiling'] # node transpiling data builtin_config = JsonConfig( filepath=Fs.join(CONFIG_FOLDER, 'builtin.json'), authoload=True ) -BUILTIN_TYPES = builtin_config['builtin_types'] -BUILTIN_FUNCTIONS = builtin_config['builtin_functions'] +BUILTIN_TYPES: dict = builtin_config['builtin_types'] +BUILTIN_FUNCTIONS: dict = builtin_config['builtin_functions'] operators_config = JsonConfig( filepath=Fs.join(CONFIG_FOLDER, 'operators.json'), authoload=True ) -OPERATORS_EQUIVALENTS = operators_config['fastpy_cpp_equivalents'] +OPERATORS_EQUIVALENTS = operators_config['fastpy_cpp_equivalents'] # C++ equivalents of FastPy operators diff --git a/fastpy/transpiler/node_transpilers.py b/fastpy/transpiler/node_transpilers.py index f778685..775dc37 100644 --- a/fastpy/transpiler/node_transpilers.py +++ b/fastpy/transpiler/node_transpilers.py @@ -6,35 +6,50 @@ class BaseNodeTranspiler(ABC): + """ + Node Transpiler interface + """ + @abstractmethod def transpile(self, node: BaseNode, transpile_node_clb: callable, - **kwargs) -> BaseCode: ... + **extra_data) -> BaseCode: + """ + + :param node: node to transpile + :param transpile_node_clb: callback to transpile node body or arguments or condition + :param extra_data: extra data, transmitted by another node transpiler + :return: + """ @singleton class AssignNodeTranspiler(BaseNodeTranspiler): - def transpile(self, node: AssignNode, transpile_node_clb: callable, **kwargs) -> BaseCode: + def transpile(self, node: AssignNode, transpile_node_clb: callable, **extra_data) -> BaseCode: code = Code() value_node = node.value value = '' if value_node: value = transpile_node_clb( node=value_node, - **kwargs, - type=node.value_type.text if node.value_type else None + **extra_data, + type=node.value_type if node.value_type else None ).internal var_type = '' if node.definition: - var_type = f'{node.value_type.text if node.value_type is not None else "auto"} ' + # var_type = f'{node.value_type.text if node.value_type is not None else "auto"} ' + if node.value_type: + var_type = transpile_node_clb(node.value_type, endl=False, auto_semicolon=False) + else: + var_type = 'auto' code.push_internal( - f'{var_type}' + f'{var_type} ' f'{node.identifier.text}{" = " if value else ""}' f'{value}', - **kwargs + **extra_data ) return code @@ -45,16 +60,17 @@ class ValueNodeTranspiler(BaseNodeTranspiler): def transpile(self, node: ValueNode, transpile_node_clb: callable, - **kwargs) -> BaseCode: + **extra_data) -> BaseCode: code = Code() value = node.value.text if isinstance(value, str): - value = value.replace('"', '\"').replace("'", '"') + if value[0] == "'": + value = '"' + value[1:-1] + '"' code.push_internal( f'{value}', - **kwargs + **extra_data ) return code @@ -64,11 +80,11 @@ class VariableNodeTranspiler(BaseNodeTranspiler): def transpile(self, node: VariableNode, transpile_node_clb: callable, - **kwargs) -> BaseCode: + **extra_data) -> BaseCode: code = Code() code.push_internal( f'{node.identifier.text}', - **kwargs + **extra_data ) return code @@ -93,7 +109,8 @@ def _transpile_body(body: list[BaseNode], transpile_node_clb) -> str: for i, node in enumerate(body): code.push_internal( - transpile_node_clb(node=node, endl=False, auto_semicolon=False).internal, + transpile_node_clb(node=node, ).internal, + endl=False, auto_semicolon=False ) return code.internal @@ -101,12 +118,12 @@ def _transpile_body(body: list[BaseNode], transpile_node_clb) -> str: def transpile(self, node: FuncNode, transpile_node_clb: callable, - **kwargs) -> BaseCode: + **extra_data) -> BaseCode: code = Code() arguments = self._transpile_arguments(node.arguments, transpile_node_clb) body = self._transpile_body(node.body, transpile_node_clb) - return_type = node.return_type.text if node.return_type else 'void' + return_type = node.return_type.identifier.text if node.return_type else 'void' func_code = f'{return_type} {node.identifier.text} ({arguments}){{\n{body}\n}}' code.push_external(func_code) return code @@ -128,10 +145,10 @@ def _transpile_arguments(arguments: list[BaseNode], transpile_node_clb) -> str: def transpile(self, node: CallNode, transpile_node_clb: callable, - **kwargs) -> BaseCode: + **extra_data) -> BaseCode: code = Code() - cast_type = kwargs.get('type') + cast_type: VariableNode = extra_data.get('type') specify_type = False if node.identifier.text == BUILTIN_FUNCTIONS['input']: @@ -139,9 +156,9 @@ def transpile(self, code.push_internal( f'{node.identifier.text}' - f'{("<" + cast_type + ">") if specify_type else ""}' + f'{("<" + cast_type.identifier.text + ">") if specify_type else ""}' f'({self._transpile_arguments(node.arguments, transpile_node_clb)})', - **kwargs + **extra_data ) return code @@ -159,7 +176,7 @@ def _transpile_operator(operator: str) -> str: def transpile(self, node: LogicOpNode | BinOpNode, transpile_node_clb: callable, - **kwargs) -> BaseCode: + **extra_data) -> BaseCode: code = Code() left_operand = transpile_node_clb(node.left_operand, auto_semicolon=False, endl=False) if not node.right_operand: @@ -171,7 +188,7 @@ def transpile(self, if node.in_brackets: match_expr = '(' + match_expr + ')' - code.push_internal(match_expr, **kwargs) + code.push_internal(match_expr, **extra_data) return code @@ -195,7 +212,7 @@ def _transpile_body(body: list[BaseNode], transpile_node_clb) -> str: def transpile(self, node: IfNode, transpile_node_clb: callable, - **kwargs) -> BaseCode: + **extra_data) -> BaseCode: code = Code() condition = self._transpile_condition(node.condition, transpile_node_clb) body = self._transpile_body(node.body, transpile_node_clb) @@ -205,6 +222,7 @@ def transpile(self, auto_semicolon=False, endl=True ) + return code @@ -224,7 +242,7 @@ def _transpile_body(body: list[BaseNode], transpile_node_clb) -> str: def transpile(self, node: ElseNode, transpile_node_clb: callable, - **kwargs) -> BaseCode: + **extra_data) -> BaseCode: code = Code() body = self._transpile_body(node.body, transpile_node_clb) @@ -243,7 +261,9 @@ def _transpile_body(body: list[BaseNode], transpile_node_clb) -> str: for i, node in enumerate(body): code.push_internal( - transpile_node_clb(node=node, endl=False, auto_semicolon=False).internal, + transpile_node_clb(node=node).internal, + auto_semicolon=False, + endl=True ) return code.internal @@ -255,8 +275,19 @@ def _transpile_condition(node: LogicOpNode, transpile_node_clb) -> str: def transpile(self, node: WhileNode, transpile_node_clb: callable, - **kwargs) -> BaseCode: + **extra_data) -> BaseCode: code = Code() code.push_internal(f'while ({self._transpile_condition(node.condition, transpile_node_clb)})' f'\n{{\n{self._transpile_body(node.body, transpile_node_clb)}\n}}\n') return code + + +class ReturnNodeTranspiler(BaseNodeTranspiler): + def transpile(self, + node: ReturnNode, + transpile_node_clb: callable, + **extra_data) -> BaseCode: + code = Code() + + code.push_internal(f'return {transpile_node_clb(node.node).internal if node.node else ""}') + return code diff --git a/fastpy/transpiler/transpilers.py b/fastpy/transpiler/transpilers.py index 3e96d95..8ae6a93 100644 --- a/fastpy/transpiler/transpilers.py +++ b/fastpy/transpiler/transpilers.py @@ -14,7 +14,14 @@ class BaseTranspiler(ABC): """Transpailer interface""" @abstractmethod - def __init__(self, module: Module, ast: BaseAST): ... + def __init__(self, + module: Module, + ast: BaseAST): + """ + + :param module: module: the module parameter contains information about the currently processed file + :param ast: Abstract Syntax Tree - output of previous parsing stage + """ @abstractmethod def transpile(self) -> str: @@ -22,7 +29,7 @@ def transpile(self) -> str: class Transpiler(BaseTranspiler): - """Basic transpailer of FastPy""" + """Basic Transpailer implementation of FastPy""" @Logger.info(pattern='Transpiler created ({module})') def __init__(self, module: Module, ast: BaseAST): @@ -59,12 +66,14 @@ def _transpile_node(self, node: BaseNode, **kwargs) -> BaseCode: raise TranspilingError(f'You need to specify the node transpiler for "{node.__class__.__name__}"' f' in "transpiler.json" config file') - return transpiler.transpile( + code = transpiler.transpile( node=node, transpile_node_clb=self._transpile_node, **kwargs ) + return code + def _transpile_import(self, node: CallNode): for importing_file in node.arguments: if isinstance(importing_file, ValueNode): diff --git a/fastpy_build/bin/main.exe b/fastpy_build/bin/main.exe index d2e2e79..c4af839 100644 Binary files a/fastpy_build/bin/main.exe and b/fastpy_build/bin/main.exe differ diff --git a/fastpy_build/src/main.cpp b/fastpy_build/src/main.cpp index a2ab59b..16d01a4 100644 --- a/fastpy_build/src/main.cpp +++ b/fastpy_build/src/main.cpp @@ -5,15 +5,16 @@ #include"include/builtin.hpp" -int main() { - auto i = 20; - while (i > 0) { - i = i - 1; - log(i, true); - }; + + + + + +int main(){ + auto abc; } \ No newline at end of file diff --git a/main.fpy b/main.fpy index 113fffe..afa1b63 100644 --- a/main.fpy +++ b/main.fpy @@ -1,5 +1,124 @@ -i = 20 +#a: str = ' ' +#b: str = ' ' +#c: str = ' ' +#d: str = ' ' +#e: str = ' ' +#f: str = ' ' +#g: str = ' ' +#h: str = ' ' +#i: str = ' ' +# +#who_move: str = 'X' +#continue_game: bool = true +# +# +#fun next_player(who_move: str) -> str: +# if who_move == 'X': +# return 'O' +# else: +# return 'X' +# +# +# +# +#fun draw_vert_border(endl: bool = false): +# log('|', endl) +# +#fun draw_horz_border(): +# log('+-+-+-+', true) +# +#fun draw_row(a: str, b: str, c: str): +# draw_vert_border() +# log(a) +# draw_vert_border() +# log(b) +# draw_vert_border() +# log(c) +# draw_vert_border(true) +# +#fun draw(a: str, b: str, c: str, d: str, e: str, f: str, g: str, h: str, i: str): +# draw_horz_border() +# draw_row(a, b, c) +# draw_horz_border() +# draw_row(d, e, f) +# draw_horz_border() +# draw_row(g, h, i) +# draw_horz_border() +# +# +# +#fun ask_cell(who_move: str) -> int: +# log(who_move) +# cell: int = input(' choose cell >>') +# return cell +# +#fun check_three(a: str, b: str, c: str) -> bool: +# return a == b and b == c and c != ' ' +# +#fun check_game_over(a: str, b: str, c: str, d: str, e: str, f: str, g: str, h: str, i: str) -> bool: +# r1 = check_three(a, b, c) +# r2 = check_three(a, d, g) +# r3 = check_three(a, e, i) +# r4 = check_three(b, e, h) +# r5 = check_three(d, e, f) +# r6 = check_three(g, e, c) +# r7 = check_three(c, f, i) +# r8 = check_three(g, h , i) +# return r1 or r2 or r3 or r4 or r5 or r6 or r7 or r8 +# +#fun check_draw(a: str, b: str, c: str, d: str, e: str, f: str, g: str, h: str, i: str) -> bool: +# return a != ' ' and b != ' ' and c != ' ' and d != ' ' and e != ' ' and f != ' ' and g != ' ' and h != ' ' and i != ' ' +# +#while continue_game: +# draw(a, b, c, d, e, f, g, h, i) +# cell = ask_cell(who_move) +# +# if cell == 1: +# a = who_move +# elif cell == 2: +# b = who_move +# elif cell == 3: +# c = who_move +# elif cell == 4: +# d = who_move +# elif cell == 5: +# e = who_move +# elif cell == 6: +# f = who_move +# elif cell == 7: +# g = who_move +# elif cell == 8: +# h = who_move +# elif cell == 9: +# i = who_move +# else: +# log_error('Enter number in range: 1 - 9!', true) +# who_move = next_player(who_move) +# +# game_over = check_game_over(a, b, c, d, e, f, g, h, i) +# drw = check_draw(a, b, c, d, e, f, g, h, i) +# +# if drw: +# draw(a, b, c, d, e, f, g, h, i) +# log_info('Draw!') +# continue_game = false +# elif game_over: +# draw(a, b, c, d, e, f, g, h, i) +# log_info('Game Over! ') +# log_info('Won: ') +# log_info(who_move) +# continue_game = false +# +# +# +# who_move = next_player(who_move) +# +# -while i > 0: - i = i - 1 - log(i, true) \ No newline at end of file +abc: list = [1, 2, 3, 4, 5, 6, 7, 8] + + + +#fun templated_func(arg: auto) -> T: +# arg_copy: T = arg +# return arg_copy \ No newline at end of file diff --git a/main.py b/main.py index c5d62d7..6c61148 100644 --- a/main.py +++ b/main.py @@ -4,12 +4,13 @@ def make_action(args: argparse.Namespace): + """Performs actions such as transpiling or compiling depending on the input""" if args.translate: fastpy.TranspileAPI(**vars(args)).transpile() def setup_argparse() -> argparse.ArgumentParser: - """Configuring the console argument parser""" + """Configures the console argument parser""" argparser = argparse.ArgumentParser(**ARGPARSE_CONFIG['parser']) for argument_config in ARGPARSE_CONFIG['arguments']: