From 719b598e98baff2cf5ea34bb6d36eddd8fd29f38 Mon Sep 17 00:00:00 2001 From: 简律纯 Date: Thu, 28 Sep 2023 00:09:06 +0800 Subject: feat(src): delete `src` dir & rename `psi` dir feat(frame): 添加`Lib` `Grammar` `Modules` 目录 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- psi/Grammar/Token | 72 ++++++++++++ psi/Lib/IOStream/__init__.psi | 0 psi/Modules/asyncio/__init__.py | 0 psi/__init__.py | 73 ++++++++++++ psi/exception.py | 36 ++++++ psi/execution.py | 46 ++++++++ psi/interpreter.py | 75 ++++++++++++ psi/lexer.py | 249 ++++++++++++++++++++++++++++++++++++++++ psi/mathmatics.py | 0 psi/parsers.py | 144 +++++++++++++++++++++++ psi/type.py | 0 src/psi/__init__.py | 73 ------------ src/psi/exception.py | 36 ------ src/psi/execution.py | 46 -------- src/psi/interpreter.py | 75 ------------ src/psi/lexer.py | 249 ---------------------------------------- src/psi/mathmatics.py | 0 src/psi/parsers.py | 144 ----------------------- src/psi/type.py | 0 19 files changed, 695 insertions(+), 623 deletions(-) create mode 100644 psi/Grammar/Token create mode 100644 psi/Lib/IOStream/__init__.psi create mode 100644 psi/Modules/asyncio/__init__.py create mode 100644 psi/__init__.py create mode 100644 psi/exception.py create mode 100644 psi/execution.py create mode 100644 psi/interpreter.py create mode 100644 psi/lexer.py create mode 100644 psi/mathmatics.py create mode 100644 psi/parsers.py create mode 100644 psi/type.py delete mode 100644 src/psi/__init__.py delete mode 100644 src/psi/exception.py delete mode 100644 src/psi/execution.py delete mode 100644 src/psi/interpreter.py delete mode 100644 src/psi/lexer.py delete mode 100644 src/psi/mathmatics.py delete mode 100644 src/psi/parsers.py delete mode 100644 src/psi/type.py diff --git a/psi/Grammar/Token b/psi/Grammar/Token new file mode 100644 index 0000000..f73152f --- /dev/null +++ b/psi/Grammar/Token @@ -0,0 +1,72 @@ +ENDMARKER +NAME +NUMBER +STRING +NEWLINE +INDENT +DEDENT + +LPAR '(' +RPAR ')' +LSQB '[' +RSQB ']' +COLON ':' +COMMA ',' +SEMI ';' +PLUS '+' +MINUS '-' +STAR '*' +SLASH '/' +VBAR '|' +AMPER '&' +LESS '<' +GREATER '>' +EQUAL '=' +DOT '.' +PERCENT '%' +LBRACE '{' +RBRACE '}' +EQEQUAL '==' +NOTEQUAL '!=' +LESSEQUAL '<=' +GREATEREQUAL '>=' +TILDE '~' +CIRCUMFLEX '^' +LEFTSHIFT '<<' +RIGHTSHIFT '>>' +DOUBLESTAR '**' +PLUSEQUAL '+=' +MINEQUAL '-=' +STAREQUAL '*=' +SLASHEQUAL '/=' +PERCENTEQUAL '%=' +AMPEREQUAL '&=' +VBAREQUAL '|=' +CIRCUMFLEXEQUAL '^=' +LEFTSHIFTEQUAL '<<=' +RIGHTSHIFTEQUAL '>>=' +DOUBLESTAREQUAL '**=' +DOUBLESLASH '//' +DOUBLESLASHEQUAL '//=' +AT '@' +ATEQUAL '@=' +RARROW '->' +ELLIPSIS '...' +COLONEQUAL ':=' +EXCLAMATION '!' + +OP +AWAIT +ASYNC +TYPE_IGNORE +TYPE_COMMENT +SOFT_KEYWORD +FSTRING_START +FSTRING_MIDDLE +FSTRING_END +COMMENT +NL +ERRORTOKEN + +# These aren't used by the C tokenizer but are needed for tokenize.py +ENCODING \ No newline at end of file diff --git a/psi/Lib/IOStream/__init__.psi b/psi/Lib/IOStream/__init__.psi new file mode 100644 index 0000000..e69de29 diff --git a/psi/Modules/asyncio/__init__.py b/psi/Modules/asyncio/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/psi/__init__.py b/psi/__init__.py new file mode 100644 index 0000000..4bb4651 --- /dev/null +++ b/psi/__init__.py @@ -0,0 +1,73 @@ +"""Psi +@TODO 词法分析器 +@BODY 似乎要写的还蛮多的,所以先写几个TODO List +""" + +__all__ = ['psi'] + +from psi.execution import Execution + +class Psi: + """ + A class representing a Psi object. + + Args: + input: The input value for the Psi object. + + Returns: + None + + Example: + ```python + obj = Psi("example") + ``` + """ + + def __init__(self, input): + """ + Initializes a Psi object. + + Args: + input: The input value for the Psi object. + + Returns: + None + """ + self.input = input + self.execution = Execution(input) + self.result = None + + def execute(self): + """ + Executes the Psi object. + + Returns: + The result of the execution. + """ + self.result = self.execution.execute() + return self.result + + def get_result(self): + """ + Retrieves the result of the Psi object. + + Returns: + The result of the execution. + """ + return self.result + + def set_input(self, input): + """ + Sets the input value for the Psi object. + + Args: + input: The new input value. + + Returns: + None + """ + self.input = input + self.execution = Execution(input) + self.result = None + + diff --git a/psi/exception.py b/psi/exception.py new file mode 100644 index 0000000..a8b7201 --- /dev/null +++ b/psi/exception.py @@ -0,0 +1,36 @@ +class PsiException(Exception): + """ + An exception class for Psi-specific exceptions. + + This class inherits from the built-in `Exception` class. + + Example: + ```python + raise PsiException("An error occurred in the Psi code.") + ``` + """ + + +class ValueError(PsiException): + """ + An exception class for value-related errors in Psi code. + + This class inherits from the `PsiException` class. + + Example: + ```python + raise ValueError("Invalid value encountered in the Psi code.") + ``` + """ + +class GrammarError(PsiException): + """ + An exception class for grammar-related errors in Psi code. + + This class inherits from the `PsiException` class. + + Example: + ```python + raise GrammarError("Invalid grammar encountered in the Psi code.") + ``` + """ diff --git a/psi/execution.py b/psi/execution.py new file mode 100644 index 0000000..0abdf2c --- /dev/null +++ b/psi/execution.py @@ -0,0 +1,46 @@ +from psi.parsers import Parser +from psi.interpreter import Interpreter + +__all__ = ['Execution'] + +class Execution: + """ + A class representing the execution of Psi code. + + Args: + input: The input code to be executed. + + Returns: + None + + Example: + ```python + execution = Execution("print('Hello, World!')") + execution.execute() + ``` + """ + + def __init__(self, input): + """ + Initializes an Execution object. + + Args: + input: The input code to be executed. + + Returns: + None + """ + self.input = input + + def execute(self): + """ + Executes the input code. + + Returns: + The result of the execution. + """ + parser = Parser(self.input) + ast = parser.parse() + + interpreter = Interpreter(ast) + return interpreter.interpret() diff --git a/psi/interpreter.py b/psi/interpreter.py new file mode 100644 index 0000000..8aa8fad --- /dev/null +++ b/psi/interpreter.py @@ -0,0 +1,75 @@ +from psi.lexer import Token + + +__all__ = ['Interpreter'] + +class Interpreter: + """ + A class representing an interpreter for Psi code. + + Args: + ast: The abstract syntax tree (AST) of the code to be interpreted. + + Returns: + None + + Example: + ```python + interpreter = Interpreter(ast) + interpreter.interpret() + ``` + """ + + def __init__(self, ast): + """ + Initializes an Interpreter object. + + Args: + ast: The abstract syntax tree (AST) of the code to be interpreted. + + Returns: + None + """ + self.ast = ast + + def interpret(self): + """ + Interprets the code represented by the AST. + + Returns: + The result of the interpretation. + """ + return self.interpret_expr(self.ast) + + def interpret_expr(self, node): + """ + Interprets an expression node in the AST. + + Args: + node: The expression node to be interpreted. + + Returns: + The result of the interpretation. + """ + if isinstance(node, Token): + return node.value + elif isinstance(node, list): + for expr in node: + result = self.interpret_expr(expr) + if result is not None: + return result + + def interpret_condition(self, node): + """ + Interprets a condition node in the AST. + + Args: + node: The condition node to be interpreted. + + Returns: + The result of the interpretation. + """ + variable = self.interpret_expr(node[0]) + value = self.interpret_expr(node[2]) + + return variable == value diff --git a/psi/lexer.py b/psi/lexer.py new file mode 100644 index 0000000..d2c6f68 --- /dev/null +++ b/psi/lexer.py @@ -0,0 +1,249 @@ +""" +Token and Lexer Documentation +============================= + +This module provides the `Token` and `Lexer` classes for tokenizing input strings. + +Token Class +----------- + +The `Token` class represents a token with a type, value, and position in the input string. It is a subclass of the built-in `dict` class. + +Attributes: +- `type` (str): The type of the token. +- `value` (str or int): The value of the token. +- `position` (int): The position of the token in the input string. + +Methods: +- `__getattr__(self, name)`: Retrieves the value of an attribute by name. Raises an `AttributeError` if the attribute does not exist. + +Lexer Class +----------- + +The `Lexer` class tokenizes an input string using a set of rules. + +Attributes: +- `input` (str): The input string to tokenize. +- `position` (int): The current position in the input string. +- `tokens` (list): The list of tokens generated by the lexer. + +Methods: +- `get_next_token(self)`: Retrieves the next token from the input string. +- `__iter__(self)`: Returns an iterator over the tokens. +- `__getitem__(self, index)`: Retrieves a token by index. +- `__len__(self)`: Returns the number of tokens. + +Usage Example +------------- + +```python +lexer = Lexer(''' +@newMessage: { + ? message == 1: reply: hi + ! reply: no +} +''') + +token = lexer.get_next_token() +while token['type'] != 'EOF': + print(f'Type: {token["type"]}, Value: {token["value"]}, Position: {token["position"]}') + token = lexer.get_next_token() + +print("\nAll tokens:") +print([t['type'] for t in lexer]) +""" + +__all__ = ['Token', 'Lexer'] + +class Token(dict): + """ + A class representing a token in the lexer. + + Args: + type: The type of the token. + value: The value of the token. + position: The position of the token. + + Returns: + None + + Example: + ```python + token = Token("identifier", "x", (1, 5)) + ``` + """ + + def __init__(self, type, value, position): + """ + Initializes a Token object. + + Args: + type: The type of the token. + value: The value of the token. + position: The position of the token. + + Returns: + None + """ + super().__init__(type=type, value=value, position=position) + + def __getattr__(self, name): + """ + Retrieves the value of an attribute from the Token object. + + Args: + name: The name of the attribute. + + Returns: + The value of the attribute. + + Raises: + AttributeError: Raised when the attribute does not exist. + """ + try: + return self[name] + except KeyError: + raise AttributeError(f"'Token' object has no attribute '{name}'") + + +class Lexer: + """ + A class representing a lexer for Psi code. + + Args: + input: The input code to be lexed. + + Returns: + None + + Example: + ```python + lexer = Lexer("x = 10") + for token in lexer: + print(token) + ``` + """ + def __init__(self, input): + """ + Initializes a Lexer object. + + Args: + input: The input code to be lexed. + + Returns: + None + """ + self.input = input + self.position = 0 + self.tokens = [] + + def get_next_token(self): + """ + Retrieves the next token from the input code. + + Returns: + The next token. + + Raises: + Exception: Raised when an unknown character is encountered. + """ + while self.position < len(self.input): + current_char = self.input[self.position] + + if current_char.isspace(): + self.position += 1 + continue + + if current_char == '#': + self.position += 1 + while (self.position < len(self.input) and + self.input[self.position] != '\n'): + self.position += 1 + continue + + if current_char == '/' and self.position + 1 < len(self.input) and self.input[self.position + 1] == '*': + self.position += 2 + while (self.position < len(self.input) - 1 and + (self.input[self.position] != '*' or self.input[self.position + 1] != '/')): + self.position += 1 + if self.position < len(self.input) - 1: + self.position += 2 + continue + + if current_char.isalpha(): + start_position = self.position + while (self.position < len(self.input) and + self.input[self.position].isalnum()): + self.position += 1 + token = Token('IDENTIFIER', self.input[start_position:self.position], start_position) + self.tokens.append(token) + return token + + if current_char.isdigit(): + start_position = self.position + while (self.position < len(self.input) and + self.input[self.position].isdigit()): + self.position += 1 + token = Token('INTEGER', int(self.input[start_position:self.position]), start_position) + self.tokens.append(token) + return token + + if current_char in {'<', '>', '=', '!', '&', '|', '@'}: + if (self.position + 1 < len(self.input) and + self.input[self.position + 1] in {'=', '&', '|'}): + token = Token('OPERATOR', current_char + self.input[self.position + 1], self.position) + self.position += 2 + else: + token = Token('OPERATOR', current_char, self.position) + self.position += 1 + self.tokens.append(token) + return token + + if current_char in {'{', '}', '(', ')', '[', ']', ';', ',', '.', ':'}: + token = Token('SEPARATOR', current_char, self.position) + self.position += 1 + self.tokens.append(token) + return token + + if current_char in {'?', '!', '|'}: + token = Token('CONTROL', current_char, self.position) + self.position += 1 + self.tokens.append(token) + return token + + self.position += 1 + raise Exception(f'Unknown character: {current_char}') + + token = Token('EOF', None, self.position) + self.tokens.append(token) + return token + + def __iter__(self): + """ + Returns an iterator over the tokens. + + Returns: + An iterator over the tokens. + """ + return iter(self.tokens) + + def __getitem__(self, index): + """ + Retrieves the token at the specified index. + + Args: + index: The index of the token. + + Returns: + The token at the specified index. + """ + return self.tokens[index] + + def __len__(self): + """ + Returns the number of tokens. + + Returns: + The number of tokens. + """ + return len(self.tokens) \ No newline at end of file diff --git a/psi/mathmatics.py b/psi/mathmatics.py new file mode 100644 index 0000000..e69de29 diff --git a/psi/parsers.py b/psi/parsers.py new file mode 100644 index 0000000..f68f95f --- /dev/null +++ b/psi/parsers.py @@ -0,0 +1,144 @@ +from psi.lexer import Lexer, Token + + +__all__ = ['Parser'] + +class Parser: + """ + A class representing a parser for Psi code. + + Args: + input: The input code to be parsed. + + Returns: + None + + Example: + ```python + parser = Parser(input) + parser.parse() + ``` + """ + + def __init__(self, input): + """ + Initializes a Parser object. + + Args: + input: The input code to be parsed. + + Returns: + None + """ + self.lexer = Lexer(input) + self.tokens = iter(self.lexer) + self.current_token = next(self.tokens) + + def parse(self): + """ + Parses the input code. + + Returns: + The result of the parsing. + """ + return self.parse_expr() + + def parse_expr(self): + """ + Parses an expression in the input code. + + Returns: + The result of the parsing. + """ + token = self.current_token + if token.value == '?': + self.eat('?') + + condition = self.parse_condition() + + self.eat(':') + + if condition: + result = self.parse_reply() + else: + result = None + + return result + + def parse_condition(self): + """ + Parses a condition in the input code. + + Returns: + The result of the parsing. + """ + variable = self.parse_variable() + self.eat('==') + value = self.parse_value() + + return variable == value + + def parse_variable(self): + """ + Parses a variable in the input code. + + Returns: + The result of the parsing. + """ + token = self.current_token + self.eat('IDENTIFIER') + return token.value + + def parse_value(self): + """ + Parses a value in the input code. + + Returns: + The result of the parsing. + + Raises: + Exception: Raised when an invalid value is encountered. + """ + token = self.current_token + if token.type == 'INTEGER': + self.eat('INTEGER') + return token.value + else: + raise Exception(f'Invalid value: {token.value}') + + def parse_reply(self): + """ + Parses a reply in the input code. + + Returns: + The result of the parsing. + + Raises: + Exception: Raised when an invalid reply is encountered. + """ + self.eat('reply') + self.eat(':') + + token = self.current_token + if token.type != 'SEPARATOR': + raise Exception(f'Invalid reply: {token.value}') + + return token.value + + def eat(self, expected_type): + """ + Consumes the current token if it matches the expected type. + + Args: + expected_type: The expected type of the token. + + Returns: + None + + Raises: + Exception: Raised when an unexpected token is encountered. + """ + if self.current_token.type == expected_type: + self.current_token = next(self.tokens) + else: + raise Exception(f'Unexpected token: {self.current_token.value}') diff --git a/psi/type.py b/psi/type.py new file mode 100644 index 0000000..e69de29 diff --git a/src/psi/__init__.py b/src/psi/__init__.py deleted file mode 100644 index 4bb4651..0000000 --- a/src/psi/__init__.py +++ /dev/null @@ -1,73 +0,0 @@ -"""Psi -@TODO 词法分析器 -@BODY 似乎要写的还蛮多的,所以先写几个TODO List -""" - -__all__ = ['psi'] - -from psi.execution import Execution - -class Psi: - """ - A class representing a Psi object. - - Args: - input: The input value for the Psi object. - - Returns: - None - - Example: - ```python - obj = Psi("example") - ``` - """ - - def __init__(self, input): - """ - Initializes a Psi object. - - Args: - input: The input value for the Psi object. - - Returns: - None - """ - self.input = input - self.execution = Execution(input) - self.result = None - - def execute(self): - """ - Executes the Psi object. - - Returns: - The result of the execution. - """ - self.result = self.execution.execute() - return self.result - - def get_result(self): - """ - Retrieves the result of the Psi object. - - Returns: - The result of the execution. - """ - return self.result - - def set_input(self, input): - """ - Sets the input value for the Psi object. - - Args: - input: The new input value. - - Returns: - None - """ - self.input = input - self.execution = Execution(input) - self.result = None - - diff --git a/src/psi/exception.py b/src/psi/exception.py deleted file mode 100644 index a8b7201..0000000 --- a/src/psi/exception.py +++ /dev/null @@ -1,36 +0,0 @@ -class PsiException(Exception): - """ - An exception class for Psi-specific exceptions. - - This class inherits from the built-in `Exception` class. - - Example: - ```python - raise PsiException("An error occurred in the Psi code.") - ``` - """ - - -class ValueError(PsiException): - """ - An exception class for value-related errors in Psi code. - - This class inherits from the `PsiException` class. - - Example: - ```python - raise ValueError("Invalid value encountered in the Psi code.") - ``` - """ - -class GrammarError(PsiException): - """ - An exception class for grammar-related errors in Psi code. - - This class inherits from the `PsiException` class. - - Example: - ```python - raise GrammarError("Invalid grammar encountered in the Psi code.") - ``` - """ diff --git a/src/psi/execution.py b/src/psi/execution.py deleted file mode 100644 index 0abdf2c..0000000 --- a/src/psi/execution.py +++ /dev/null @@ -1,46 +0,0 @@ -from psi.parsers import Parser -from psi.interpreter import Interpreter - -__all__ = ['Execution'] - -class Execution: - """ - A class representing the execution of Psi code. - - Args: - input: The input code to be executed. - - Returns: - None - - Example: - ```python - execution = Execution("print('Hello, World!')") - execution.execute() - ``` - """ - - def __init__(self, input): - """ - Initializes an Execution object. - - Args: - input: The input code to be executed. - - Returns: - None - """ - self.input = input - - def execute(self): - """ - Executes the input code. - - Returns: - The result of the execution. - """ - parser = Parser(self.input) - ast = parser.parse() - - interpreter = Interpreter(ast) - return interpreter.interpret() diff --git a/src/psi/interpreter.py b/src/psi/interpreter.py deleted file mode 100644 index 8aa8fad..0000000 --- a/src/psi/interpreter.py +++ /dev/null @@ -1,75 +0,0 @@ -from psi.lexer import Token - - -__all__ = ['Interpreter'] - -class Interpreter: - """ - A class representing an interpreter for Psi code. - - Args: - ast: The abstract syntax tree (AST) of the code to be interpreted. - - Returns: - None - - Example: - ```python - interpreter = Interpreter(ast) - interpreter.interpret() - ``` - """ - - def __init__(self, ast): - """ - Initializes an Interpreter object. - - Args: - ast: The abstract syntax tree (AST) of the code to be interpreted. - - Returns: - None - """ - self.ast = ast - - def interpret(self): - """ - Interprets the code represented by the AST. - - Returns: - The result of the interpretation. - """ - return self.interpret_expr(self.ast) - - def interpret_expr(self, node): - """ - Interprets an expression node in the AST. - - Args: - node: The expression node to be interpreted. - - Returns: - The result of the interpretation. - """ - if isinstance(node, Token): - return node.value - elif isinstance(node, list): - for expr in node: - result = self.interpret_expr(expr) - if result is not None: - return result - - def interpret_condition(self, node): - """ - Interprets a condition node in the AST. - - Args: - node: The condition node to be interpreted. - - Returns: - The result of the interpretation. - """ - variable = self.interpret_expr(node[0]) - value = self.interpret_expr(node[2]) - - return variable == value diff --git a/src/psi/lexer.py b/src/psi/lexer.py deleted file mode 100644 index d2c6f68..0000000 --- a/src/psi/lexer.py +++ /dev/null @@ -1,249 +0,0 @@ -""" -Token and Lexer Documentation -============================= - -This module provides the `Token` and `Lexer` classes for tokenizing input strings. - -Token Class ------------ - -The `Token` class represents a token with a type, value, and position in the input string. It is a subclass of the built-in `dict` class. - -Attributes: -- `type` (str): The type of the token. -- `value` (str or int): The value of the token. -- `position` (int): The position of the token in the input string. - -Methods: -- `__getattr__(self, name)`: Retrieves the value of an attribute by name. Raises an `AttributeError` if the attribute does not exist. - -Lexer Class ------------ - -The `Lexer` class tokenizes an input string using a set of rules. - -Attributes: -- `input` (str): The input string to tokenize. -- `position` (int): The current position in the input string. -- `tokens` (list): The list of tokens generated by the lexer. - -Methods: -- `get_next_token(self)`: Retrieves the next token from the input string. -- `__iter__(self)`: Returns an iterator over the tokens. -- `__getitem__(self, index)`: Retrieves a token by index. -- `__len__(self)`: Returns the number of tokens. - -Usage Example -------------- - -```python -lexer = Lexer(''' -@newMessage: { - ? message == 1: reply: hi - ! reply: no -} -''') - -token = lexer.get_next_token() -while token['type'] != 'EOF': - print(f'Type: {token["type"]}, Value: {token["value"]}, Position: {token["position"]}') - token = lexer.get_next_token() - -print("\nAll tokens:") -print([t['type'] for t in lexer]) -""" - -__all__ = ['Token', 'Lexer'] - -class Token(dict): - """ - A class representing a token in the lexer. - - Args: - type: The type of the token. - value: The value of the token. - position: The position of the token. - - Returns: - None - - Example: - ```python - token = Token("identifier", "x", (1, 5)) - ``` - """ - - def __init__(self, type, value, position): - """ - Initializes a Token object. - - Args: - type: The type of the token. - value: The value of the token. - position: The position of the token. - - Returns: - None - """ - super().__init__(type=type, value=value, position=position) - - def __getattr__(self, name): - """ - Retrieves the value of an attribute from the Token object. - - Args: - name: The name of the attribute. - - Returns: - The value of the attribute. - - Raises: - AttributeError: Raised when the attribute does not exist. - """ - try: - return self[name] - except KeyError: - raise AttributeError(f"'Token' object has no attribute '{name}'") - - -class Lexer: - """ - A class representing a lexer for Psi code. - - Args: - input: The input code to be lexed. - - Returns: - None - - Example: - ```python - lexer = Lexer("x = 10") - for token in lexer: - print(token) - ``` - """ - def __init__(self, input): - """ - Initializes a Lexer object. - - Args: - input: The input code to be lexed. - - Returns: - None - """ - self.input = input - self.position = 0 - self.tokens = [] - - def get_next_token(self): - """ - Retrieves the next token from the input code. - - Returns: - The next token. - - Raises: - Exception: Raised when an unknown character is encountered. - """ - while self.position < len(self.input): - current_char = self.input[self.position] - - if current_char.isspace(): - self.position += 1 - continue - - if current_char == '#': - self.position += 1 - while (self.position < len(self.input) and - self.input[self.position] != '\n'): - self.position += 1 - continue - - if current_char == '/' and self.position + 1 < len(self.input) and self.input[self.position + 1] == '*': - self.position += 2 - while (self.position < len(self.input) - 1 and - (self.input[self.position] != '*' or self.input[self.position + 1] != '/')): - self.position += 1 - if self.position < len(self.input) - 1: - self.position += 2 - continue - - if current_char.isalpha(): - start_position = self.position - while (self.position < len(self.input) and - self.input[self.position].isalnum()): - self.position += 1 - token = Token('IDENTIFIER', self.input[start_position:self.position], start_position) - self.tokens.append(token) - return token - - if current_char.isdigit(): - start_position = self.position - while (self.position < len(self.input) and - self.input[self.position].isdigit()): - self.position += 1 - token = Token('INTEGER', int(self.input[start_position:self.position]), start_position) - self.tokens.append(token) - return token - - if current_char in {'<', '>', '=', '!', '&', '|', '@'}: - if (self.position + 1 < len(self.input) and - self.input[self.position + 1] in {'=', '&', '|'}): - token = Token('OPERATOR', current_char + self.input[self.position + 1], self.position) - self.position += 2 - else: - token = Token('OPERATOR', current_char, self.position) - self.position += 1 - self.tokens.append(token) - return token - - if current_char in {'{', '}', '(', ')', '[', ']', ';', ',', '.', ':'}: - token = Token('SEPARATOR', current_char, self.position) - self.position += 1 - self.tokens.append(token) - return token - - if current_char in {'?', '!', '|'}: - token = Token('CONTROL', current_char, self.position) - self.position += 1 - self.tokens.append(token) - return token - - self.position += 1 - raise Exception(f'Unknown character: {current_char}') - - token = Token('EOF', None, self.position) - self.tokens.append(token) - return token - - def __iter__(self): - """ - Returns an iterator over the tokens. - - Returns: - An iterator over the tokens. - """ - return iter(self.tokens) - - def __getitem__(self, index): - """ - Retrieves the token at the specified index. - - Args: - index: The index of the token. - - Returns: - The token at the specified index. - """ - return self.tokens[index] - - def __len__(self): - """ - Returns the number of tokens. - - Returns: - The number of tokens. - """ - return len(self.tokens) \ No newline at end of file diff --git a/src/psi/mathmatics.py b/src/psi/mathmatics.py deleted file mode 100644 index e69de29..0000000 diff --git a/src/psi/parsers.py b/src/psi/parsers.py deleted file mode 100644 index f68f95f..0000000 --- a/src/psi/parsers.py +++ /dev/null @@ -1,144 +0,0 @@ -from psi.lexer import Lexer, Token - - -__all__ = ['Parser'] - -class Parser: - """ - A class representing a parser for Psi code. - - Args: - input: The input code to be parsed. - - Returns: - None - - Example: - ```python - parser = Parser(input) - parser.parse() - ``` - """ - - def __init__(self, input): - """ - Initializes a Parser object. - - Args: - input: The input code to be parsed. - - Returns: - None - """ - self.lexer = Lexer(input) - self.tokens = iter(self.lexer) - self.current_token = next(self.tokens) - - def parse(self): - """ - Parses the input code. - - Returns: - The result of the parsing. - """ - return self.parse_expr() - - def parse_expr(self): - """ - Parses an expression in the input code. - - Returns: - The result of the parsing. - """ - token = self.current_token - if token.value == '?': - self.eat('?') - - condition = self.parse_condition() - - self.eat(':') - - if condition: - result = self.parse_reply() - else: - result = None - - return result - - def parse_condition(self): - """ - Parses a condition in the input code. - - Returns: - The result of the parsing. - """ - variable = self.parse_variable() - self.eat('==') - value = self.parse_value() - - return variable == value - - def parse_variable(self): - """ - Parses a variable in the input code. - - Returns: - The result of the parsing. - """ - token = self.current_token - self.eat('IDENTIFIER') - return token.value - - def parse_value(self): - """ - Parses a value in the input code. - - Returns: - The result of the parsing. - - Raises: - Exception: Raised when an invalid value is encountered. - """ - token = self.current_token - if token.type == 'INTEGER': - self.eat('INTEGER') - return token.value - else: - raise Exception(f'Invalid value: {token.value}') - - def parse_reply(self): - """ - Parses a reply in the input code. - - Returns: - The result of the parsing. - - Raises: - Exception: Raised when an invalid reply is encountered. - """ - self.eat('reply') - self.eat(':') - - token = self.current_token - if token.type != 'SEPARATOR': - raise Exception(f'Invalid reply: {token.value}') - - return token.value - - def eat(self, expected_type): - """ - Consumes the current token if it matches the expected type. - - Args: - expected_type: The expected type of the token. - - Returns: - None - - Raises: - Exception: Raised when an unexpected token is encountered. - """ - if self.current_token.type == expected_type: - self.current_token = next(self.tokens) - else: - raise Exception(f'Unexpected token: {self.current_token.value}') diff --git a/src/psi/type.py b/src/psi/type.py deleted file mode 100644 index e69de29..0000000 -- cgit v1.2.3-70-g09d2