From e2c1c1911e117245551a612499d02eacabbbe5d8 Mon Sep 17 00:00:00 2001 From: 简律纯 Date: Sun, 25 Feb 2024 12:00:16 +0800 Subject: refactor: rename nivis-python to nivis --- nivis-python/Grammar/Token | 55 ------ nivis-python/Lib/IOStream/__init__.nivis | 1 - nivis-python/Modules/asyncio/__init__.py | 0 nivis-python/__init__.py | 12 -- nivis-python/exception.py | 37 ----- nivis-python/execution.py | 47 ------ nivis-python/interpreter.py | 76 --------- nivis-python/lexer.py | 276 ------------------------------- nivis-python/mathmatics.py | 0 nivis-python/parsers.py | 145 ---------------- nivis-python/type.py | 0 11 files changed, 649 deletions(-) delete mode 100644 nivis-python/Grammar/Token delete mode 100644 nivis-python/Lib/IOStream/__init__.nivis delete mode 100644 nivis-python/Modules/asyncio/__init__.py delete mode 100644 nivis-python/__init__.py delete mode 100644 nivis-python/exception.py delete mode 100644 nivis-python/execution.py delete mode 100644 nivis-python/interpreter.py delete mode 100644 nivis-python/lexer.py delete mode 100644 nivis-python/mathmatics.py delete mode 100644 nivis-python/parsers.py delete mode 100644 nivis-python/type.py (limited to 'nivis-python') diff --git a/nivis-python/Grammar/Token b/nivis-python/Grammar/Token deleted file mode 100644 index 0de3014..0000000 --- a/nivis-python/Grammar/Token +++ /dev/null @@ -1,55 +0,0 @@ -LPAR '(' -RPAR ')' -LSQB '[' -RSQB ']' -COLON ':' -COMMA ',' -SEMI ';' -PLUS '+' -MINUS '-' -STAR '*' -SLASH '/' -VBAR '|' -AMPER '&' -LESS '<' -GREATER '>' -EQUAL '=' -DOT '.' -PERCENT '%' -LBRACE '{' -RBRACE '}' -EQEQUAL '==' -NOTEQUAL '!=' -LESSEQUAL '<=' -GREATEREQUAL '>=' -TILDE '~' -CIRCUMFLEX '^' -LEFTSHIFT '<<' -RIGHTSHIFT '>>' -DOUBLESTAR '**' -PLUSEQUAL '+=' -MINEQUAL '-=' -STAREQUAL '*=' -SLASHEQUAL '/=' -PERCENTEQUAL '%=' -AMPEREQUAL '&=' -VBAREQUAL '|=' -CIRCUMFLEXEQUAL '^=' -LEFTSHIFTEQUAL '<<=' -RIGHTSHIFTEQUAL '>>=' -DOUBLESTAREQUAL '**=' -DOUBLESLASH '//' -DOUBLESLASHEQUAL '//=' -AT '@' -ATEQUAL '@=' -RARROW '->' -ELLIPSIS '...' -COLONEQUAL ':=' -EXCLAMATION '!' -INTEGER 'INTEGER' -EOF 'EOF' -SPACE ' ' - - -AWAIT -ASYNC diff --git a/nivis-python/Lib/IOStream/__init__.nivis b/nivis-python/Lib/IOStream/__init__.nivis deleted file mode 100644 index d38024d..0000000 --- a/nivis-python/Lib/IOStream/__init__.nivis +++ /dev/null @@ -1 +0,0 @@ -# TODO: nivis Plugins in VsCode \ No newline at end of file diff --git a/nivis-python/Modules/asyncio/__init__.py b/nivis-python/Modules/asyncio/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/nivis-python/__init__.py b/nivis-python/__init__.py deleted file mode 100644 index 943368b..0000000 --- a/nivis-python/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -"""nivis - -@TODO Lexer support -@BODY Lex function. -""" - -__all__ = ["Execution", "Interpreter", "Lexer", "Parser"] - -from .execution import Execution -from .interpreter import Interpreter -from .lexer import Lexer -from .parsers import Parser diff --git a/nivis-python/exception.py b/nivis-python/exception.py deleted file mode 100644 index 0ec22e5..0000000 --- a/nivis-python/exception.py +++ /dev/null @@ -1,37 +0,0 @@ -class PsiException(Exception): - """ - An exception class for Psi-specific exceptions. - - This class inherits from the built-in `Exception` class. - - Example: - ```python - raise PsiException("An error occurred in the Psi code.") - ``` - """ - - -class ValueError(PsiException): - """ - An exception class for value-related errors in Psi code. - - This class inherits from the `PsiException` class. - - Example: - ```python - raise ValueError("Invalid value encountered in the Psi code.") - ``` - """ - - -class GrammarError(PsiException): - """ - An exception class for grammar-related errors in Psi code. - - This class inherits from the `PsiException` class. - - Example: - ```python - raise GrammarError("Invalid grammar encountered in the Psi code.") - ``` - """ diff --git a/nivis-python/execution.py b/nivis-python/execution.py deleted file mode 100644 index 09055cf..0000000 --- a/nivis-python/execution.py +++ /dev/null @@ -1,47 +0,0 @@ -from .parsers import Parser -from .interpreter import Interpreter - -__all__ = ["Execution"] - - -class Execution: - """ - A class representing the execution of Psi code. - - Args: - input: The input code to be executed. - - Returns: - None - - Example: - ```python - execution = Execution("print('Hello, World!')") - execution.execute() - ``` - """ - - def __init__(self, input): - """ - Initializes an Execution object. - - Args: - input: The input code to be executed. - - Returns: - None - """ - self.input = input - - def execute(self): - """ - Executes the input code. - - Returns: - The result of the execution. - """ - parser = Parser(self.input) - ast = parser.parse() - - interpreter = Interpreter(ast) - return interpreter.interpret() diff --git a/nivis-python/interpreter.py b/nivis-python/interpreter.py deleted file mode 100644 index 6322180..0000000 --- a/nivis-python/interpreter.py +++ /dev/null @@ -1,76 +0,0 @@ -from .lexer import Token - - -__all__ = ["Interpreter"] - - -class Interpreter: - """ - A class representing an interpreter for Psi code. - - Args: - ast: The abstract syntax tree (AST) of the code to be interpreted. - - Returns: - None - - Example: - ```python - interpreter = Interpreter(ast) - interpreter.interpret() - ``` - """ - - def __init__(self, ast): - """ - Initializes an Interpreter object. - - Args: - ast: The abstract syntax tree (AST) of the code to be interpreted. - - Returns: - None - """ - self.ast = ast - - def interpret(self): - """ - Interprets the code represented by the AST. - - Returns: - The result of the interpretation. - """ - return self.interpret_expr(self.ast) - - def interpret_expr(self, node): - """ - Interprets an expression node in the AST. - - Args: - node: The expression node to be interpreted. - - Returns: - The result of the interpretation. - """ - if isinstance(node, Token): - return node.value - elif isinstance(node, list): - for expr in node: - result = self.interpret_expr(expr) - if result is not None: - return result - - def interpret_condition(self, node): - """ - Interprets a condition node in the AST. - - Args: - node: The condition node to be interpreted. - - Returns: - The result of the interpretation. - """ - variable = self.interpret_expr(node[0]) - value = self.interpret_expr(node[2]) - - return variable == value diff --git a/nivis-python/lexer.py b/nivis-python/lexer.py deleted file mode 100644 index 7ba94e3..0000000 --- a/nivis-python/lexer.py +++ /dev/null @@ -1,276 +0,0 @@ -""" -Token and Lexer Documentation -============================= - -This module provides the `Token` and `Lexer` classes for tokenizing input strings. - -Token Class ------------ - -The `Token` class represents a token with a type, value, and position in the input string. It is a subclass of the built-in `dict` class. - -Attributes: -- `type` (str): The type of the token. -- `value` (str or int): The value of the token. -- `position` (int): The position of the token in the input string. - -Methods: -- `__getattr__(self, name)`: Retrieves the value of an attribute by name. Raises an `AttributeError` if the attribute does not exist. - -Lexer Class ------------ - -The `Lexer` class tokenizes an input string using a set of rules. - -Attributes: -- `input` (str): The input string to tokenize. -- `position` (int): The current position in the input string. -- `tokens` (list): The list of tokens generated by the lexer. - -Methods: -- `get_next_token(self)`: Retrieves the next token from the input string. -- `__iter__(self)`: Returns an iterator over the tokens. -- `__getitem__(self, index)`: Retrieves a token by index. -- `__len__(self)`: Returns the number of tokens. - -Usage Example -------------- - -```python -lexer = Lexer(''' -@newMessage: { - ? message == 1: reply: hi - ! reply: no -} -''') - -token = lexer.get_next_token() -while token['type'] != 'EOF': - print(f'Type: {token["type"]}, Value: {token["value"]}, Position: {token["position"]}') - token = lexer.get_next_token() - -print("\nAll tokens:") -print([t['type'] for t in lexer]) -""" -from .exception import ValueError - -__all__ = ["Token", "Lexer"] - - -class Token(dict): - """ - A class representing a token in the lexer. - - Args: - type: The type of the token. - value: The value of the token. - position: The position of the token. - - Returns: - None - - Example: - ```python - token = Token("identifier", "x", (1, 5)) - ``` - """ - - def __init__(self, type, value, position): - """ - Initializes a Token object. - - Args: - type: The type of the token. - value: The value of the token. - position: The position of the token. - - Returns: - None - """ - super().__init__(type=type, value=value, position=position) - - def __getattr__(self, name): - """ - Retrieves the value of an attribute from the Token object. - - Args: - name: The name of the attribute. - - Returns: - The value of the attribute. - - Raises: - AttributeError: Raised when the attribute does not exist. - """ - try: - return self[name] - except KeyError as e: - raise AttributeError(f"'Token' object has no attribute '{name}'") from e - - -class Lexer: - """ - A class representing a lexer for Psi code. - - Args: - input: The input code to be lexed. - - Returns: - None - - Example: - ```python - lexer = Lexer("x = 10") - for token in lexer: - print(token) - ``` - """ - - def __init__(self, input): - """ - Initializes a Lexer object. - - Args: - input: The input code to be lexed. - - Returns: - None - """ - self.input = input - self.position = 0 - self.tokens = [] - - def get_next_token(self): - """ - Retrieves the next token from the input code. - - Returns: - The next token. - - Raises: - Exception: Raised when an unknown character is encountered. - """ - while self.position < len(self.input): - current_char = self.input[self.position] - - if current_char.isspace(): - self.position += 1 - continue - - if current_char == "#": - self.position += 1 - while ( - self.position < len(self.input) - and self.input[self.position] != "\n" - ): - self.position += 1 - continue - - if ( - current_char == "/" - and self.position + 1 < len(self.input) - and self.input[self.position + 1] == "*" - ): - self.position += 2 - while self.position < len(self.input) - 1 and ( - self.input[self.position] != "*" - or self.input[self.position + 1] != "/" - ): - self.position += 1 - if self.position < len(self.input) - 1: - self.position += 2 - continue - - if current_char.isalpha(): - start_position = self.position - while ( - self.position < len(self.input) - and self.input[self.position].isalnum() - ): - self.position += 1 - token = Token( - "IDENTIFIER", - self.input[start_position : self.position], - start_position, - ) - self.tokens.append(token) - return token - - if current_char.isdigit(): - start_position = self.position - while ( - self.position < len(self.input) - and self.input[self.position].isdigit() - ): - self.position += 1 - token = Token( - "INTEGER", - int(self.input[start_position : self.position]), - start_position, - ) - self.tokens.append(token) - return token - - if current_char in {"<", ">", "=", "!", "&", "|", "@"}: - if self.position + 1 < len(self.input) and self.input[ - self.position + 1 - ] in {"=", "&", "|"}: - token = Token( - "OPERATOR", - current_char + self.input[self.position + 1], - self.position, - ) - self.position += 2 - else: - token = Token("OPERATOR", current_char, self.position) - self.position += 1 - self.tokens.append(token) - return token - - if current_char in {"{", "}", "(", ")", "[", "]", ";", ",", ".", ":"}: - return self._extracted_from_get_next_token_64("SEPARATOR", current_char) - if current_char in {"?", "!", "|"}: - return self._extracted_from_get_next_token_64("CONTROL", current_char) - self.position += 1 - raise ValueError(f"Unknown character: {current_char}") - - token = Token("EOF", None, self.position) - self.tokens.append(token) - return token - - # TODO Rename this here and in `get_next_token` - def _extracted_from_get_next_token_64(self, arg0, current_char): - token = Token(arg0, current_char, self.position) - self.position += 1 - self.tokens.append(token) - return token - - def __iter__(self): - """ - Returns an iterator over the tokens. - - Returns: - An iterator over the tokens. - """ - return iter(self.tokens) - - def __getitem__(self, index): - """ - Retrieves the token at the specified index. - - Args: - index: The index of the token. - - Returns: - The token at the specified index. - """ - return self.tokens[index] - - def __len__(self): - """ - Returns the number of tokens. - - Returns: - The number of tokens. - """ - return len(self.tokens) diff --git a/nivis-python/mathmatics.py b/nivis-python/mathmatics.py deleted file mode 100644 index e69de29..0000000 diff --git a/nivis-python/parsers.py b/nivis-python/parsers.py deleted file mode 100644 index ca004f7..0000000 --- a/nivis-python/parsers.py +++ /dev/null @@ -1,145 +0,0 @@ -from .lexer import Lexer, Token - - -__all__ = ["Parser"] - - -class Parser: - """ - A class representing a parser for Psi code. - - Args: - input: The input code to be parsed. - - Returns: - None - - Example: - ```python - parser = Parser(input) - parser.parse() - ``` - """ - - def __init__(self, input): - """ - Initializes a Parser object. - - Args: - input: The input code to be parsed. - - Returns: - None - """ - self.lexer = Lexer(input) - self.tokens = iter(self.lexer) - self.current_token = next(self.tokens) - - def parse(self): - """ - Parses the input code. - - Returns: - The result of the parsing. - """ - return self.parse_expr() - - def parse_expr(self): - """ - Parses an expression in the input code. - - Returns: - The result of the parsing. - """ - token = self.current_token - if token.value == "?": - self.eat("?") - - condition = self.parse_condition() - - self.eat(":") - - if condition: - result = self.parse_reply() - else: - result = None - - return result - - def parse_condition(self): - """ - Parses a condition in the input code. - - Returns: - The result of the parsing. - """ - variable = self.parse_variable() - self.eat("==") - value = self.parse_value() - - return variable == value - - def parse_variable(self): - """ - Parses a variable in the input code. - - Returns: - The result of the parsing. - """ - token = self.current_token - self.eat("IDENTIFIER") - return token.value - - def parse_value(self): - """ - Parses a value in the input code. - - Returns: - The result of the parsing. - - Raises: - Exception: Raised when an invalid value is encountered. - """ - token = self.current_token - if token.type == "INTEGER": - self.eat("INTEGER") - return token.value - else: - raise Exception(f"Invalid value: {token.value}") - - def parse_reply(self): - """ - Parses a reply in the input code. - - Returns: - The result of the parsing. - - Raises: - Exception: Raised when an invalid reply is encountered. - """ - self.eat("reply") - self.eat(":") - - token = self.current_token - if token.type != "SEPARATOR": - raise Exception(f"Invalid reply: {token.value}") - - return token.value - - def eat(self, expected_type): - """ - Consumes the current token if it matches the expected type. - - Args: - expected_type: The expected type of the token. - - Returns: - None - - Raises: - Exception: Raised when an unexpected token is encountered. - """ - if self.current_token.type == expected_type: - self.current_token = next(self.tokens) - else: - raise Exception(f"Unexpected token: {self.current_token.value}") diff --git a/nivis-python/type.py b/nivis-python/type.py deleted file mode 100644 index e69de29..0000000 -- cgit v1.2.3-70-g09d2