aboutsummaryrefslogtreecommitdiffstatshomepage
path: root/nivis-python
diff options
context:
space:
mode:
author简律纯 <i@jyunko.cn>2024-02-28 11:37:54 +0800
committer简律纯 <i@jyunko.cn>2024-02-28 11:37:54 +0800
commit672e0d03d3ffdba740d166f2c4a7243f142cec5f (patch)
tree46cdb4ea015877e7828710a47c945aa2e834beb0 /nivis-python
parentfc6fc64ca3ff7df62dd270fc021f674e56b2dedf (diff)
downloadTRPGNivis-672e0d03d3ffdba740d166f2c4a7243f142cec5f.tar.gz
TRPGNivis-672e0d03d3ffdba740d166f2c4a7243f142cec5f.zip
refactor!: built in rust
Diffstat (limited to 'nivis-python')
-rw-r--r--nivis-python/Grammar/Token55
-rw-r--r--nivis-python/Lib/IOStream/__init__.nivis1
-rw-r--r--nivis-python/Modules/asyncio/__init__.py0
-rw-r--r--nivis-python/__init__.py12
-rw-r--r--nivis-python/exception.py37
-rw-r--r--nivis-python/execution.py47
-rw-r--r--nivis-python/interpreter.py76
-rw-r--r--nivis-python/lexer.py276
-rw-r--r--nivis-python/mathmatics.py0
-rw-r--r--nivis-python/parsers.py145
-rw-r--r--nivis-python/type.py0
11 files changed, 649 insertions, 0 deletions
diff --git a/nivis-python/Grammar/Token b/nivis-python/Grammar/Token
new file mode 100644
index 0000000..0de3014
--- /dev/null
+++ b/nivis-python/Grammar/Token
@@ -0,0 +1,55 @@
+LPAR '('
+RPAR ')'
+LSQB '['
+RSQB ']'
+COLON ':'
+COMMA ','
+SEMI ';'
+PLUS '+'
+MINUS '-'
+STAR '*'
+SLASH '/'
+VBAR '|'
+AMPER '&'
+LESS '<'
+GREATER '>'
+EQUAL '='
+DOT '.'
+PERCENT '%'
+LBRACE '{'
+RBRACE '}'
+EQEQUAL '=='
+NOTEQUAL '!='
+LESSEQUAL '<='
+GREATEREQUAL '>='
+TILDE '~'
+CIRCUMFLEX '^'
+LEFTSHIFT '<<'
+RIGHTSHIFT '>>'
+DOUBLESTAR '**'
+PLUSEQUAL '+='
+MINEQUAL '-='
+STAREQUAL '*='
+SLASHEQUAL '/='
+PERCENTEQUAL '%='
+AMPEREQUAL '&='
+VBAREQUAL '|='
+CIRCUMFLEXEQUAL '^='
+LEFTSHIFTEQUAL '<<='
+RIGHTSHIFTEQUAL '>>='
+DOUBLESTAREQUAL '**='
+DOUBLESLASH '//'
+DOUBLESLASHEQUAL '//='
+AT '@'
+ATEQUAL '@='
+RARROW '->'
+ELLIPSIS '...'
+COLONEQUAL ':='
+EXCLAMATION '!'
+INTEGER 'INTEGER'
+EOF 'EOF'
+SPACE ' '
+
+
+AWAIT
+ASYNC
diff --git a/nivis-python/Lib/IOStream/__init__.nivis b/nivis-python/Lib/IOStream/__init__.nivis
new file mode 100644
index 0000000..d38024d
--- /dev/null
+++ b/nivis-python/Lib/IOStream/__init__.nivis
@@ -0,0 +1 @@
+# TODO: nivis Plugins in VsCode \ No newline at end of file
diff --git a/nivis-python/Modules/asyncio/__init__.py b/nivis-python/Modules/asyncio/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/nivis-python/Modules/asyncio/__init__.py
diff --git a/nivis-python/__init__.py b/nivis-python/__init__.py
new file mode 100644
index 0000000..943368b
--- /dev/null
+++ b/nivis-python/__init__.py
@@ -0,0 +1,12 @@
+"""nivis
+
+@TODO Lexer support
+@BODY Lex function.
+"""
+
+__all__ = ["Execution", "Interpreter", "Lexer", "Parser"]
+
+from .execution import Execution
+from .interpreter import Interpreter
+from .lexer import Lexer
+from .parsers import Parser
diff --git a/nivis-python/exception.py b/nivis-python/exception.py
new file mode 100644
index 0000000..0ec22e5
--- /dev/null
+++ b/nivis-python/exception.py
@@ -0,0 +1,37 @@
+class PsiException(Exception):
+ """
+ An exception class for Psi-specific exceptions.
+
+ This class inherits from the built-in `Exception` class.
+
+ Example:
+ ```python
+ raise PsiException("An error occurred in the Psi code.")
+ ```
+ """
+
+
+class ValueError(PsiException):
+ """
+ An exception class for value-related errors in Psi code.
+
+ This class inherits from the `PsiException` class.
+
+ Example:
+ ```python
+ raise ValueError("Invalid value encountered in the Psi code.")
+ ```
+ """
+
+
+class GrammarError(PsiException):
+ """
+ An exception class for grammar-related errors in Psi code.
+
+ This class inherits from the `PsiException` class.
+
+ Example:
+ ```python
+ raise GrammarError("Invalid grammar encountered in the Psi code.")
+ ```
+ """
diff --git a/nivis-python/execution.py b/nivis-python/execution.py
new file mode 100644
index 0000000..09055cf
--- /dev/null
+++ b/nivis-python/execution.py
@@ -0,0 +1,47 @@
+from .parsers import Parser
+from .interpreter import Interpreter
+
+__all__ = ["Execution"]
+
+
+class Execution:
+ """
+ A class representing the execution of Psi code.
+
+ Args:
+ input: The input code to be executed.
+
+ Returns:
+ None
+
+ Example:
+ ```python
+ execution = Execution("print('Hello, World!')")
+ execution.execute()
+ ```
+ """
+
+ def __init__(self, input):
+ """
+ Initializes an Execution object.
+
+ Args:
+ input: The input code to be executed.
+
+ Returns:
+ None
+ """
+ self.input = input
+
+ def execute(self):
+ """
+ Executes the input code.
+
+ Returns:
+ The result of the execution.
+ """
+ parser = Parser(self.input)
+ ast = parser.parse()
+
+ interpreter = Interpreter(ast)
+ return interpreter.interpret()
diff --git a/nivis-python/interpreter.py b/nivis-python/interpreter.py
new file mode 100644
index 0000000..6322180
--- /dev/null
+++ b/nivis-python/interpreter.py
@@ -0,0 +1,76 @@
+from .lexer import Token
+
+
+__all__ = ["Interpreter"]
+
+
+class Interpreter:
+ """
+ A class representing an interpreter for Psi code.
+
+ Args:
+ ast: The abstract syntax tree (AST) of the code to be interpreted.
+
+ Returns:
+ None
+
+ Example:
+ ```python
+ interpreter = Interpreter(ast)
+ interpreter.interpret()
+ ```
+ """
+
+ def __init__(self, ast):
+ """
+ Initializes an Interpreter object.
+
+ Args:
+ ast: The abstract syntax tree (AST) of the code to be interpreted.
+
+ Returns:
+ None
+ """
+ self.ast = ast
+
+ def interpret(self):
+ """
+ Interprets the code represented by the AST.
+
+ Returns:
+ The result of the interpretation.
+ """
+ return self.interpret_expr(self.ast)
+
+ def interpret_expr(self, node):
+ """
+ Interprets an expression node in the AST.
+
+ Args:
+ node: The expression node to be interpreted.
+
+ Returns:
+ The result of the interpretation.
+ """
+ if isinstance(node, Token):
+ return node.value
+ elif isinstance(node, list):
+ for expr in node:
+ result = self.interpret_expr(expr)
+ if result is not None:
+ return result
+
+ def interpret_condition(self, node):
+ """
+ Interprets a condition node in the AST.
+
+ Args:
+ node: The condition node to be interpreted.
+
+ Returns:
+ The result of the interpretation.
+ """
+ variable = self.interpret_expr(node[0])
+ value = self.interpret_expr(node[2])
+
+ return variable == value
diff --git a/nivis-python/lexer.py b/nivis-python/lexer.py
new file mode 100644
index 0000000..7ba94e3
--- /dev/null
+++ b/nivis-python/lexer.py
@@ -0,0 +1,276 @@
+"""
+Token and Lexer Documentation
+=============================
+
+This module provides the `Token` and `Lexer` classes for tokenizing input strings.
+
+Token Class
+-----------
+
+The `Token` class represents a token with a type, value, and position in the input string. It is a subclass of the built-in `dict` class.
+
+Attributes:
+- `type` (str): The type of the token.
+- `value` (str or int): The value of the token.
+- `position` (int): The position of the token in the input string.
+
+Methods:
+- `__getattr__(self, name)`: Retrieves the value of an attribute by name. Raises an `AttributeError` if the attribute does not exist.
+
+Lexer Class
+-----------
+
+The `Lexer` class tokenizes an input string using a set of rules.
+
+Attributes:
+- `input` (str): The input string to tokenize.
+- `position` (int): The current position in the input string.
+- `tokens` (list): The list of tokens generated by the lexer.
+
+Methods:
+- `get_next_token(self)`: Retrieves the next token from the input string.
+- `__iter__(self)`: Returns an iterator over the tokens.
+- `__getitem__(self, index)`: Retrieves a token by index.
+- `__len__(self)`: Returns the number of tokens.
+
+Usage Example
+-------------
+
+```python
+lexer = Lexer('''
+@newMessage: {
+ ? message == 1: reply: hi
+ ! reply: no
+}
+''')
+
+token = lexer.get_next_token()
+while token['type'] != 'EOF':
+ print(f'Type: {token["type"]}, Value: {token["value"]}, Position: {token["position"]}')
+ token = lexer.get_next_token()
+
+print("\nAll tokens:")
+print([t['type'] for t in lexer])
+"""
+from .exception import ValueError
+
+__all__ = ["Token", "Lexer"]
+
+
+class Token(dict):
+ """
+ A class representing a token in the lexer.
+
+ Args:
+ type: The type of the token.
+ value: The value of the token.
+ position: The position of the token.
+
+ Returns:
+ None
+
+ Example:
+ ```python
+ token = Token("identifier", "x", (1, 5))
+ ```
+ """
+
+ def __init__(self, type, value, position):
+ """
+ Initializes a Token object.
+
+ Args:
+ type: The type of the token.
+ value: The value of the token.
+ position: The position of the token.
+
+ Returns:
+ None
+ """
+ super().__init__(type=type, value=value, position=position)
+
+ def __getattr__(self, name):
+ """
+ Retrieves the value of an attribute from the Token object.
+
+ Args:
+ name: The name of the attribute.
+
+ Returns:
+ The value of the attribute.
+
+ Raises:
+ AttributeError: Raised when the attribute does not exist.
+ """
+ try:
+ return self[name]
+ except KeyError as e:
+ raise AttributeError(f"'Token' object has no attribute '{name}'") from e
+
+
+class Lexer:
+ """
+ A class representing a lexer for Psi code.
+
+ Args:
+ input: The input code to be lexed.
+
+ Returns:
+ None
+
+ Example:
+ ```python
+ lexer = Lexer("x = 10")
+ for token in lexer:
+ print(token)
+ ```
+ """
+
+ def __init__(self, input):
+ """
+ Initializes a Lexer object.
+
+ Args:
+ input: The input code to be lexed.
+
+ Returns:
+ None
+ """
+ self.input = input
+ self.position = 0
+ self.tokens = []
+
+ def get_next_token(self):
+ """
+ Retrieves the next token from the input code.
+
+ Returns:
+ The next token.
+
+ Raises:
+ Exception: Raised when an unknown character is encountered.
+ """
+ while self.position < len(self.input):
+ current_char = self.input[self.position]
+
+ if current_char.isspace():
+ self.position += 1
+ continue
+
+ if current_char == "#":
+ self.position += 1
+ while (
+ self.position < len(self.input)
+ and self.input[self.position] != "\n"
+ ):
+ self.position += 1
+ continue
+
+ if (
+ current_char == "/"
+ and self.position + 1 < len(self.input)
+ and self.input[self.position + 1] == "*"
+ ):
+ self.position += 2
+ while self.position < len(self.input) - 1 and (
+ self.input[self.position] != "*"
+ or self.input[self.position + 1] != "/"
+ ):
+ self.position += 1
+ if self.position < len(self.input) - 1:
+ self.position += 2
+ continue
+
+ if current_char.isalpha():
+ start_position = self.position
+ while (
+ self.position < len(self.input)
+ and self.input[self.position].isalnum()
+ ):
+ self.position += 1
+ token = Token(
+ "IDENTIFIER",
+ self.input[start_position : self.position],
+ start_position,
+ )
+ self.tokens.append(token)
+ return token
+
+ if current_char.isdigit():
+ start_position = self.position
+ while (
+ self.position < len(self.input)
+ and self.input[self.position].isdigit()
+ ):
+ self.position += 1
+ token = Token(
+ "INTEGER",
+ int(self.input[start_position : self.position]),
+ start_position,
+ )
+ self.tokens.append(token)
+ return token
+
+ if current_char in {"<", ">", "=", "!", "&", "|", "@"}:
+ if self.position + 1 < len(self.input) and self.input[
+ self.position + 1
+ ] in {"=", "&", "|"}:
+ token = Token(
+ "OPERATOR",
+ current_char + self.input[self.position + 1],
+ self.position,
+ )
+ self.position += 2
+ else:
+ token = Token("OPERATOR", current_char, self.position)
+ self.position += 1
+ self.tokens.append(token)
+ return token
+
+ if current_char in {"{", "}", "(", ")", "[", "]", ";", ",", ".", ":"}:
+ return self._extracted_from_get_next_token_64("SEPARATOR", current_char)
+ if current_char in {"?", "!", "|"}:
+ return self._extracted_from_get_next_token_64("CONTROL", current_char)
+ self.position += 1
+ raise ValueError(f"Unknown character: {current_char}")
+
+ token = Token("EOF", None, self.position)
+ self.tokens.append(token)
+ return token
+
+ # TODO Rename this here and in `get_next_token`
+ def _extracted_from_get_next_token_64(self, arg0, current_char):
+ token = Token(arg0, current_char, self.position)
+ self.position += 1
+ self.tokens.append(token)
+ return token
+
+ def __iter__(self):
+ """
+ Returns an iterator over the tokens.
+
+ Returns:
+ An iterator over the tokens.
+ """
+ return iter(self.tokens)
+
+ def __getitem__(self, index):
+ """
+ Retrieves the token at the specified index.
+
+ Args:
+ index: The index of the token.
+
+ Returns:
+ The token at the specified index.
+ """
+ return self.tokens[index]
+
+ def __len__(self):
+ """
+ Returns the number of tokens.
+
+ Returns:
+ The number of tokens.
+ """
+ return len(self.tokens)
diff --git a/nivis-python/mathmatics.py b/nivis-python/mathmatics.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/nivis-python/mathmatics.py
diff --git a/nivis-python/parsers.py b/nivis-python/parsers.py
new file mode 100644
index 0000000..ca004f7
--- /dev/null
+++ b/nivis-python/parsers.py
@@ -0,0 +1,145 @@
+from .lexer import Lexer, Token
+
+
+__all__ = ["Parser"]
+
+
+class Parser:
+ """
+ A class representing a parser for Psi code.
+
+ Args:
+ input: The input code to be parsed.
+
+ Returns:
+ None
+
+ Example:
+ ```python
+ parser = Parser(input)
+ parser.parse()
+ ```
+ """
+
+ def __init__(self, input):
+ """
+ Initializes a Parser object.
+
+ Args:
+ input: The input code to be parsed.
+
+ Returns:
+ None
+ """
+ self.lexer = Lexer(input)
+ self.tokens = iter(self.lexer)
+ self.current_token = next(self.tokens)
+
+ def parse(self):
+ """
+ Parses the input code.
+
+ Returns:
+ The result of the parsing.
+ """
+ return self.parse_expr()
+
+ def parse_expr(self):
+ """
+ Parses an expression in the input code.
+
+ Returns:
+ The result of the parsing.
+ """
+ token = self.current_token
+ if token.value == "?":
+ self.eat("?")
+
+ condition = self.parse_condition()
+
+ self.eat(":")
+
+ if condition:
+ result = self.parse_reply()
+ else:
+ result = None
+
+ return result
+
+ def parse_condition(self):
+ """
+ Parses a condition in the input code.
+
+ Returns:
+ The result of the parsing.
+ """
+ variable = self.parse_variable()
+ self.eat("==")
+ value = self.parse_value()
+
+ return variable == value
+
+ def parse_variable(self):
+ """
+ Parses a variable in the input code.
+
+ Returns:
+ The result of the parsing.
+ """
+ token = self.current_token
+ self.eat("IDENTIFIER")
+ return token.value
+
+ def parse_value(self):
+ """
+ Parses a value in the input code.
+
+ Returns:
+ The result of the parsing.
+
+ Raises:
+ Exception: Raised when an invalid value is encountered.
+ """
+ token = self.current_token
+ if token.type == "INTEGER":
+ self.eat("INTEGER")
+ return token.value
+ else:
+ raise Exception(f"Invalid value: {token.value}")
+
+ def parse_reply(self):
+ """
+ Parses a reply in the input code.
+
+ Returns:
+ The result of the parsing.
+
+ Raises:
+ Exception: Raised when an invalid reply is encountered.
+ """
+ self.eat("reply")
+ self.eat(":")
+
+ token = self.current_token
+ if token.type != "SEPARATOR":
+ raise Exception(f"Invalid reply: {token.value}")
+
+ return token.value
+
+ def eat(self, expected_type):
+ """
+ Consumes the current token if it matches the expected type.
+
+ Args:
+ expected_type: The expected type of the token.
+
+ Returns:
+ None
+
+ Raises:
+ Exception: Raised when an unexpected token is encountered.
+ """
+ if self.current_token.type == expected_type:
+ self.current_token = next(self.tokens)
+ else:
+ raise Exception(f"Unexpected token: {self.current_token.value}")
diff --git a/nivis-python/type.py b/nivis-python/type.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/nivis-python/type.py