feat(lexer): add strings

This commit is contained in:
2026-02-05 03:21:50 +01:00
parent 84e9a8bb1e
commit f5e2d791b5
3 changed files with 23 additions and 5 deletions

View File

@@ -4,7 +4,10 @@ from src.token import Token
def main():
source: str = """() {} +- += / /= // sefs + {, )
}:: *"""
}:: *
"This is a string"
"This is
another string" """
lexer: Lexer = Lexer()
tokens: list[Token] = lexer.process(source)
print(tokens)

View File

@@ -16,6 +16,9 @@ class Lexer:
self.column: int = 0
self.start_pos: Position = self.get_position()
def error(self, msg: str):
raise SyntaxError(f"[ERROR] Error at {self.start_pos}: {msg}")
def process(self, source: str, path: Optional[str] = None) -> list[Token]:
self.path = path or "<main>"
self.source = source
@@ -116,10 +119,22 @@ class Lexer:
while self.peek().isspace() and not self.is_at_end():
self.advance()
self.add_token(TokenType.WHITESPACE)
case '"':
self.scan_string()
case _:
self.error("Unexpected character")
return None
def error(self, msg: str):
raise SyntaxError(f"[ERROR] Error at {self.start_pos}: {msg}")
def scan_string(self):
while self.peek() != '"' and not self.is_at_end():
if self.peek() == "\n":
self.line += 1
self.column = 0
self.advance()
if self.is_at_end():
self.error("Unterminated string")
self.advance()
value: str = self.source[self.start + 1:self.idx - 1]
self.add_token(TokenType.STRING, value)

View File

@@ -70,6 +70,6 @@ class Token:
def __repr__(self) -> str:
res: str = f"[{self.type.name}"
if self.value is not None:
res += f" ({self.value})"
res += f" ({self.value!r})"
res += "]"
return res