refactor: pass source and tokens in constructors instead of method

This commit is contained in:
2026-02-06 15:05:02 +01:00
parent 7f188ed14c
commit 0175212026
3 changed files with 13 additions and 27 deletions

View File

@@ -11,12 +11,12 @@ def main():
source: str = ""
with open(path, "r") as f:
source = f.read()
lexer: Lexer = Lexer()
tokens: list[Token] = lexer.process(source, path)
lexer: Lexer = Lexer(source, path)
tokens: list[Token] = lexer.process()
print(list(filter(lambda t: t.type not in Parser.IGNORE, tokens)))
parser: Parser = Parser()
program: list[Stmt] = parser.parse(tokens)
parser: Parser = Parser(tokens)
program: list[Stmt] = parser.parse()
interpreter: Interpreter = Interpreter()
interpreter.interpret(program)

View File

@@ -6,13 +6,13 @@ from src.token import Token, TokenType
class Lexer:
def __init__(self):
self.path: str = "<main>"
self.source: str = ""
def __init__(self, source: str, path: Optional[str] = None):
self.path: str = path or "<main>"
self.source: str = source
self.tokens: list[Token] = []
self.start: int = 0
self.idx: int = 0
self.length: int = 0
self.length: int = len(self.source)
self.line: int = 1
self.column: int = 1
self.start_pos: Position = self.get_position()
@@ -20,23 +20,13 @@ class Lexer:
def error(self, msg: str):
raise SyntaxError(f"[ERROR] Error at {self.start_pos}: {msg}")
def process(self, source: str, path: Optional[str] = None) -> list[Token]:
self.path = path or "<main>"
self.source = source
self.tokens = []
self.start = 0
self.idx = 0
self.length = len(self.source)
self.line = 1
self.column = 1
def process(self) -> list[Token]:
while not self.is_at_end():
self.start_pos = self.get_position()
self.start = self.idx
self.scan_token()
self.tokens.append(Token(TokenType.EOF, "", None, self.get_position()))
return self.tokens
def is_at_end(self) -> bool:

View File

@@ -19,21 +19,17 @@ class Parser:
TokenType.FOR, TokenType.WHILE, TokenType.IF, TokenType.PRINT
}
def __init__(self):
self.tokens: list[Token] = []
def __init__(self, tokens: list[Token]):
self.tokens: list[Token] = list(filter(lambda t: t.type not in self.IGNORE, tokens))
self.current: int = 0
self.length: int = 0
self.length: int = len(self.tokens)
@staticmethod
def error(token: Token, msg: str):
Pebble.token_error(token, msg)
return ParsingError()
def parse(self, tokens: list[Token]) -> list[Stmt]:
self.tokens = list(filter(lambda t: t.type not in self.IGNORE, tokens))
self.current = 0
self.length = len(self.tokens)
def parse(self) -> list[Stmt]:
statements: list[Stmt] = []
self.skip_newlines()
while not self.is_at_end():