feat(runner): add command-line arguments
This commit is contained in:
25
main.py
Normal file → Executable file
25
main.py
Normal file → Executable file
@@ -1,8 +1,31 @@
|
|||||||
|
#!/usr/bin/python
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
from src.runner import Runner
|
from src.runner import Runner
|
||||||
|
|
||||||
|
|
||||||
|
def is_valid_file(parser, arg):
|
||||||
|
if not os.path.isfile(arg):
|
||||||
|
parser.error("The file %s does not exist!" % arg)
|
||||||
|
return None
|
||||||
|
return Path(arg)
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
Runner.repl()
|
parser = argparse.ArgumentParser(
|
||||||
|
prog="pebble"
|
||||||
|
)
|
||||||
|
parser.add_argument("script", type=lambda x: is_valid_file(parser, x), help="File to run", nargs="?")
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
if args.script is None:
|
||||||
|
Runner.repl()
|
||||||
|
else:
|
||||||
|
Runner.run_file(args.script)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
from pathlib import Path
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from src.ast.stmt import Stmt
|
from src.ast.stmt import Stmt
|
||||||
@@ -11,12 +12,16 @@ from src.token import Token, TokenType
|
|||||||
|
|
||||||
|
|
||||||
class Runner:
|
class Runner:
|
||||||
def __init__(self, source: str, path: Optional[str] = None):
|
@staticmethod
|
||||||
self.source: str = source
|
def run_file(path: Path):
|
||||||
self.path: Optional[str] = path
|
with open(path, "r") as f:
|
||||||
|
source: str = f.read()
|
||||||
|
|
||||||
def run(self):
|
Runner.run(source, path)
|
||||||
lexer: Lexer = Lexer(self.source, self.path)
|
|
||||||
|
@staticmethod
|
||||||
|
def run(source: str, path: Optional[Path] = None):
|
||||||
|
lexer: Lexer = Lexer(source, path)
|
||||||
tokens: list[Token] = lexer.process()
|
tokens: list[Token] = lexer.process()
|
||||||
print(list(filter(lambda t: t.type not in Parser.IGNORE, tokens)))
|
print(list(filter(lambda t: t.type not in Parser.IGNORE, tokens)))
|
||||||
|
|
||||||
@@ -75,7 +80,6 @@ class Runner:
|
|||||||
Pebble.had_error = False
|
Pebble.had_error = False
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
|
||||||
brace_depth = 0
|
brace_depth = 0
|
||||||
paren_depth = 0
|
paren_depth = 0
|
||||||
for token in tokens:
|
for token in tokens:
|
||||||
|
|||||||
Reference in New Issue
Block a user