feat(lexer): add multiline comments

This commit is contained in:
2026-02-05 03:51:13 +01:00
parent 1362a071cf
commit 767e55a059
3 changed files with 25 additions and 4 deletions

7
examples/06_comments.peb Normal file
View File

@@ -0,0 +1,7 @@
let var1 = 3 // A comment
let var2 // A second comment *{}
12 /*
var1 += var2
*/ 34
56 /* // drgdr */ 78

View File

@@ -10,7 +10,7 @@ def main():
123
"This is
another string" """
path: str = "examples/05_loop.peb"
path: str = "examples/06_comments.peb"
with open(path, "r") as f:
source = f.read()
lexer: Lexer = Lexer()

View File

@@ -119,9 +119,9 @@ class Lexer:
self.add_token(TokenType.LESS_EQUAL if self.match("=") else TokenType.LESS)
case "/":
if self.match("/"):
while self.peek() != "\n" and not self.is_at_end():
self.advance()
self.add_token(TokenType.COMMENT)
self.scan_comment()
elif self.match("*"):
self.scan_comment_multiline()
else:
self.add_token(TokenType.SLASH_EQUAL if self.match("=") else TokenType.SLASH)
case " " | "\r" | "\t" | "\n":
@@ -169,3 +169,17 @@ class Lexer:
lexeme: str = self.source[self.start:self.idx]
token_type: TokenType = KEYWORDS.get(lexeme, TokenType.IDENTIFIER)
self.add_token(token_type)
def scan_comment(self):
while self.peek() != "\n" and not self.is_at_end():
self.advance()
self.add_token(TokenType.COMMENT)
def scan_comment_multiline(self):
while not (self.peek() == "*" and self.peek_next() == "/") and not self.is_at_end():
self.advance()
if not self.is_at_end():
self.advance()
if not self.is_at_end():
self.advance()
self.add_token(TokenType.COMMENT)