feat(lexer): add identifiers and keywords
This commit is contained in:
5
main.py
5
main.py
@@ -10,8 +10,11 @@ def main():
|
||||
123
|
||||
"This is
|
||||
another string" """
|
||||
path: str = "examples/05_loop.peb"
|
||||
with open(path, "r") as f:
|
||||
source = f.read()
|
||||
lexer: Lexer = Lexer()
|
||||
tokens: list[Token] = lexer.process(source)
|
||||
tokens: list[Token] = lexer.process(source, path)
|
||||
print(tokens)
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
14
src/keyword.py
Normal file
14
src/keyword.py
Normal file
@@ -0,0 +1,14 @@
|
||||
from src.token import TokenType
|
||||
|
||||
KEYWORDS: dict[str, TokenType] = {
|
||||
"let": TokenType.LET,
|
||||
"and": TokenType.AND,
|
||||
"or": TokenType.OR,
|
||||
"if": TokenType.IF,
|
||||
"else": TokenType.ELSE,
|
||||
"for": TokenType.FOR,
|
||||
"while": TokenType.WHILE,
|
||||
"from": TokenType.FROM,
|
||||
"to": TokenType.TO,
|
||||
"by": TokenType.BY
|
||||
}
|
||||
11
src/lexer.py
11
src/lexer.py
@@ -1,5 +1,6 @@
|
||||
from typing import Optional, Any
|
||||
|
||||
from src.keyword import KEYWORDS
|
||||
from src.position import Position
|
||||
from src.token import Token, TokenType
|
||||
|
||||
@@ -129,6 +130,8 @@ class Lexer:
|
||||
case _:
|
||||
if char.isdigit():
|
||||
self.scan_number()
|
||||
elif char.isalpha():
|
||||
self.scan_identifier()
|
||||
else:
|
||||
self.error("Unexpected character")
|
||||
return None
|
||||
@@ -158,3 +161,11 @@ class Lexer:
|
||||
|
||||
value: float = float(self.source[self.start:self.idx])
|
||||
self.add_token(TokenType.NUMBER, value)
|
||||
|
||||
def scan_identifier(self):
|
||||
while self.peek().isalnum() or self.peek() == "_":
|
||||
self.advance()
|
||||
|
||||
lexeme: str = self.source[self.start:self.idx]
|
||||
token_type: TokenType = KEYWORDS.get(lexeme, TokenType.IDENTIFIER)
|
||||
self.add_token(token_type)
|
||||
|
||||
Reference in New Issue
Block a user