refactor: extract file handlers + move EnvDefault
This commit is contained in:
parent
460ae94925
commit
775d3da6ed
0
src/__init__.py
Normal file
0
src/__init__.py
Normal file
25
src/env_default.py
Normal file
25
src/env_default.py
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import os
|
||||||
|
|
||||||
|
|
||||||
|
# https://stackoverflow.com/a/10551190/11109181
|
||||||
|
class EnvDefault(argparse.Action):
|
||||||
|
def __init__(self, envvar, required=True, default=None, help=None, **kwargs):
|
||||||
|
if envvar:
|
||||||
|
if envvar in os.environ:
|
||||||
|
default = os.environ[envvar]
|
||||||
|
if required and default is not None:
|
||||||
|
required = False
|
||||||
|
|
||||||
|
if default is not None and help is not None:
|
||||||
|
help += f" (default: {default})"
|
||||||
|
|
||||||
|
if envvar and help is not None:
|
||||||
|
help += f"\nCan also be specified through the {envvar} environment variable"
|
||||||
|
super(EnvDefault, self).__init__(default=default, required=required, help=help,
|
||||||
|
**kwargs)
|
||||||
|
|
||||||
|
def __call__(self, parser, namespace, values, option_string=None):
|
||||||
|
setattr(namespace, self.dest, values)
|
106
src/file_handlers.py
Normal file
106
src/file_handlers.py
Normal file
@ -0,0 +1,106 @@
|
|||||||
|
import json
|
||||||
|
import os
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
|
||||||
|
class FileHandler:
|
||||||
|
def __init__(self, directory: str):
|
||||||
|
self.cache: dict[str, dict] = {}
|
||||||
|
self.directory: str = directory
|
||||||
|
|
||||||
|
def get_files(self, base_path: str = ""):
|
||||||
|
root_path: str = os.path.abspath(self.directory)
|
||||||
|
full_path: str = os.path.join(root_path, base_path)
|
||||||
|
full_path = os.path.abspath(full_path)
|
||||||
|
common_prefix: str = os.path.commonprefix([full_path, root_path])
|
||||||
|
|
||||||
|
if common_prefix != root_path:
|
||||||
|
return []
|
||||||
|
|
||||||
|
return [
|
||||||
|
os.path.join(base_path, f)
|
||||||
|
for f in os.listdir(full_path)
|
||||||
|
]
|
||||||
|
|
||||||
|
def get_files_meta(self, base_path: str = ""):
|
||||||
|
files: list[str] = self.get_files(base_path)
|
||||||
|
files = [
|
||||||
|
os.path.join(self.directory, f)
|
||||||
|
for f in files
|
||||||
|
]
|
||||||
|
files_meta: list[dict] = []
|
||||||
|
|
||||||
|
deleted = set(self.cache.keys()) - set(files)
|
||||||
|
for path in deleted:
|
||||||
|
del self.cache[path]
|
||||||
|
|
||||||
|
for path in files:
|
||||||
|
last_modified: float = os.path.getmtime(path)
|
||||||
|
if path not in self.cache or self.cache[path]["ts"] < last_modified:
|
||||||
|
self.update_meta(path)
|
||||||
|
|
||||||
|
files_meta.append(self.cache[path])
|
||||||
|
|
||||||
|
return files_meta
|
||||||
|
|
||||||
|
def update_meta(self, path: str) -> None:
|
||||||
|
self.cache[path] = self.get_meta(path)
|
||||||
|
|
||||||
|
def get_meta(self, path: str) -> dict:
|
||||||
|
return {
|
||||||
|
"path": os.path.relpath(path, self.directory),
|
||||||
|
"filename": os.path.basename(path),
|
||||||
|
"ts": os.path.getmtime(path)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class JsonFileHandler(FileHandler):
|
||||||
|
def read(self, path: str) -> Optional[dict|list]:
|
||||||
|
if path not in self.get_files():
|
||||||
|
return None
|
||||||
|
with open(os.path.join(self.directory, path), "r") as f:
|
||||||
|
data = json.load(f)
|
||||||
|
return data
|
||||||
|
|
||||||
|
def write(self, path: str, data: dict|list) -> bool:
|
||||||
|
if path not in self.get_files():
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(os.path.join(self.directory, path), "w", encoding="utf-8") as f:
|
||||||
|
json.dump(data, f, indent=2, ensure_ascii=False)
|
||||||
|
except:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
class MetadataFileHandler(JsonFileHandler):
|
||||||
|
def get_meta(self, path: str) -> dict:
|
||||||
|
meta: dict = super().get_meta(path)
|
||||||
|
|
||||||
|
with open(path, "r") as f:
|
||||||
|
data = json.load(f)
|
||||||
|
is_series = "filename" not in data
|
||||||
|
meta["type"] = "series" if is_series else "film"
|
||||||
|
if is_series:
|
||||||
|
meta["episodes"] = len(data)
|
||||||
|
meta["title"] = meta["filename"].split("_metadata")[0]
|
||||||
|
else:
|
||||||
|
meta["title"] = data["title"]
|
||||||
|
|
||||||
|
return meta
|
||||||
|
|
||||||
|
|
||||||
|
class ToConvertFileHandler(FileHandler):
|
||||||
|
def get_meta(self, path: str) -> dict:
|
||||||
|
meta: dict = super().get_meta(path)
|
||||||
|
is_dir: bool = os.path.isdir(path)
|
||||||
|
|
||||||
|
meta["size"] = os.path.getsize(path)
|
||||||
|
meta["type"] = "folder" if is_dir else "media"
|
||||||
|
if is_dir:
|
||||||
|
meta["elements"] = len(os.listdir(path))
|
||||||
|
if not meta["path"].endswith("/"):
|
||||||
|
meta["path"] += "/"
|
||||||
|
|
||||||
|
return meta
|
@ -177,11 +177,7 @@ function showAgents() {
|
|||||||
function updateConvertBtn() {
|
function updateConvertBtn() {
|
||||||
const agent = document.querySelector("#agents .agent input:checked")
|
const agent = document.querySelector("#agents .agent input:checked")
|
||||||
const convertBtn = document.getElementById("convert")
|
const convertBtn = document.getElementById("convert")
|
||||||
if (agent) {
|
convertBtn.disabled = !agent
|
||||||
convertBtn.disabled = false
|
|
||||||
} else {
|
|
||||||
convertBtn.disabled = true
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function addAgents(agents) {
|
function addAgents(agents) {
|
||||||
|
@ -119,6 +119,7 @@ export class Track {
|
|||||||
|
|
||||||
|
|
||||||
input.value = value
|
input.value = value
|
||||||
|
break
|
||||||
|
|
||||||
default:
|
default:
|
||||||
break
|
break
|
||||||
|
212
src/server.py
212
src/server.py
@ -7,49 +7,28 @@ import json
|
|||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import socketserver
|
import socketserver
|
||||||
|
import time
|
||||||
|
from functools import partial
|
||||||
from http import HTTPStatus
|
from http import HTTPStatus
|
||||||
from http.server import SimpleHTTPRequestHandler
|
from http.server import SimpleHTTPRequestHandler
|
||||||
import time
|
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
from urllib.parse import parse_qs, unquote, urlparse
|
from urllib.parse import parse_qs, unquote, urlparse
|
||||||
|
|
||||||
from watchdog.events import DirModifiedEvent, FileSystemEventHandler
|
from watchdog.events import (FileClosedEvent, FileDeletedEvent, FileMovedEvent,
|
||||||
|
FileSystemEventHandler)
|
||||||
from watchdog.observers import Observer
|
from watchdog.observers import Observer
|
||||||
from watchdog.observers.api import BaseObserver
|
from watchdog.observers.api import BaseObserver
|
||||||
|
|
||||||
|
from src.env_default import EnvDefault
|
||||||
# https://stackoverflow.com/a/10551190/11109181
|
from src.file_handlers import ToConvertFileHandler, MetadataFileHandler
|
||||||
class EnvDefault(argparse.Action):
|
|
||||||
def __init__(self, envvar, required=True, default=None, help=None, **kwargs):
|
|
||||||
if envvar:
|
|
||||||
if envvar in os.environ:
|
|
||||||
default = os.environ[envvar]
|
|
||||||
if required and default is not None:
|
|
||||||
required = False
|
|
||||||
|
|
||||||
if default is not None and help is not None:
|
|
||||||
help += f" (default: {default})"
|
|
||||||
|
|
||||||
if envvar and help is not None:
|
|
||||||
help += f"\nCan also be specified through the {envvar} environment variable"
|
|
||||||
super(EnvDefault, self).__init__(default=default, required=required, help=help,
|
|
||||||
**kwargs)
|
|
||||||
|
|
||||||
def __call__(self, parser, namespace, values, option_string=None):
|
|
||||||
setattr(namespace, self.dest, values)
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPHandler(SimpleHTTPRequestHandler):
|
class HTTPHandler(SimpleHTTPRequestHandler):
|
||||||
SERVER: MeliesServer = None
|
def __init__(self, server: MeliesServer, *args, **kwargs):
|
||||||
METADATA_CACHE = {}
|
self.server_: MeliesServer = server
|
||||||
TO_CONVERT_CACHE = {}
|
self.to_convert_files: ToConvertFileHandler = self.server_.to_convert_files
|
||||||
|
self.metadata_files: MetadataFileHandler = self.server_.metadata_files
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
self.MAX_PAYLOAD_SIZE: int = self.SERVER.max_payload_size
|
|
||||||
self.TO_CONVERT_DIR: str = self.SERVER.to_convert_dir
|
|
||||||
self.CONVERTED_DIR: str = self.SERVER.converted_dir
|
|
||||||
self.METADATA_DIR: str = self.SERVER.metadata_dir
|
|
||||||
|
|
||||||
super().__init__(
|
super().__init__(
|
||||||
*args,
|
*args,
|
||||||
directory=os.path.join(os.path.dirname(__file__), "public"),
|
directory=os.path.join(os.path.dirname(__file__), "public"),
|
||||||
@ -68,9 +47,9 @@ class HTTPHandler(SimpleHTTPRequestHandler):
|
|||||||
def read_body_data(self):
|
def read_body_data(self):
|
||||||
try:
|
try:
|
||||||
size: int = int(self.headers["Content-Length"])
|
size: int = int(self.headers["Content-Length"])
|
||||||
if size > self.MAX_PAYLOAD_SIZE:
|
if size > self.server_.max_payload_size:
|
||||||
self.send_error(HTTPStatus.CONTENT_TOO_LARGE)
|
self.send_error(HTTPStatus.CONTENT_TOO_LARGE)
|
||||||
self.log_error(f"Payload is too big ({self.MAX_PAYLOAD_SIZE=}B)")
|
self.log_error(f"Payload is too big ({self.server_.max_payload_size=}B)")
|
||||||
return False
|
return False
|
||||||
raw_data = self.rfile.read(size)
|
raw_data = self.rfile.read(size)
|
||||||
self.data = json.loads(raw_data)
|
self.data = json.loads(raw_data)
|
||||||
@ -103,16 +82,17 @@ class HTTPHandler(SimpleHTTPRequestHandler):
|
|||||||
def handle_api_get(self, path: str):
|
def handle_api_get(self, path: str):
|
||||||
self.log_message(f"API request at {path}")
|
self.log_message(f"API request at {path}")
|
||||||
if path == "files/to_convert":
|
if path == "files/to_convert":
|
||||||
files: list[str] = self.get_to_convert_files_meta(self.query.get("f", [""])[0])
|
base_path: str = self.query.get("f", [""])[0]
|
||||||
|
files: list[dict] = self.to_convert_files.get_files_meta(base_path)
|
||||||
self.send_json(files)
|
self.send_json(files)
|
||||||
|
|
||||||
elif path == "files/metadata":
|
elif path == "files/metadata":
|
||||||
files: list[str] = self.get_metadata_files_meta()
|
files: list[dict] = self.metadata_files.get_files_meta()
|
||||||
self.send_json(files)
|
self.send_json(files)
|
||||||
|
|
||||||
elif path.startswith("file"):
|
elif path.startswith("file"):
|
||||||
filename: str = path.split("/", 1)[1]
|
filename: str = path.split("/", 1)[1]
|
||||||
data = self.read_file(filename)
|
data = self.metadata_files.read(filename)
|
||||||
if data is None:
|
if data is None:
|
||||||
self.send_error(HTTPStatus.NOT_FOUND)
|
self.send_error(HTTPStatus.NOT_FOUND)
|
||||||
else:
|
else:
|
||||||
@ -125,9 +105,11 @@ class HTTPHandler(SimpleHTTPRequestHandler):
|
|||||||
if path.startswith("file"):
|
if path.startswith("file"):
|
||||||
if self.read_body_data():
|
if self.read_body_data():
|
||||||
filename: str = path.split("/", 1)[1]
|
filename: str = path.split("/", 1)[1]
|
||||||
if self.write_file(filename, self.data):
|
if self.metadata_files.write(filename, self.data):
|
||||||
self.send_response(HTTPStatus.OK)
|
self.send_response(HTTPStatus.OK)
|
||||||
self.end_headers()
|
self.end_headers()
|
||||||
|
else:
|
||||||
|
self.send_error(HTTPStatus.INTERNAL_SERVER_ERROR)
|
||||||
else:
|
else:
|
||||||
self.send_response(HTTPStatus.NOT_FOUND, f"Unknown path {path}")
|
self.send_response(HTTPStatus.NOT_FOUND, f"Unknown path {path}")
|
||||||
self.end_headers()
|
self.end_headers()
|
||||||
@ -137,114 +119,6 @@ class HTTPHandler(SimpleHTTPRequestHandler):
|
|||||||
self.send_header("Content-Type", "application/json")
|
self.send_header("Content-Type", "application/json")
|
||||||
self.end_headers()
|
self.end_headers()
|
||||||
self.wfile.write(json.dumps(data).encode("utf-8"))
|
self.wfile.write(json.dumps(data).encode("utf-8"))
|
||||||
|
|
||||||
def get_to_convert_files(self, base_path: str):
|
|
||||||
root_path: str = os.path.abspath(self.TO_CONVERT_DIR)
|
|
||||||
full_path: str = os.path.join(root_path, base_path)
|
|
||||||
full_path = os.path.abspath(full_path)
|
|
||||||
common_prefix: str = os.path.commonprefix([full_path, root_path])
|
|
||||||
|
|
||||||
if common_prefix != root_path:
|
|
||||||
return []
|
|
||||||
|
|
||||||
return os.listdir(full_path)
|
|
||||||
|
|
||||||
def get_metadata_files(self):
|
|
||||||
return os.listdir(self.METADATA_DIR)
|
|
||||||
|
|
||||||
def read_file(self, filename: str) -> Optional[dict|list]:
|
|
||||||
if filename not in self.get_metadata_files():
|
|
||||||
return None
|
|
||||||
with open(os.path.join(self.METADATA_DIR, filename), "r") as f:
|
|
||||||
data = json.load(f)
|
|
||||||
return data
|
|
||||||
|
|
||||||
def write_file(self, filename: str, data: dict|list) -> bool:
|
|
||||||
if filename not in self.get_metadata_files():
|
|
||||||
self.send_error(HTTPStatus.NOT_FOUND)
|
|
||||||
return False
|
|
||||||
|
|
||||||
try:
|
|
||||||
with open(os.path.join(self.METADATA_DIR, filename), "w", encoding="utf-8") as f:
|
|
||||||
json.dump(data, f, indent=2, ensure_ascii=False)
|
|
||||||
except:
|
|
||||||
self.send_error(HTTPStatus.INTERNAL_SERVER_ERROR)
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
def get_to_convert_files_meta(self, base_path: str):
|
|
||||||
files: list[str] = self.get_to_convert_files(base_path)
|
|
||||||
files = [os.path.join(self.TO_CONVERT_DIR, base_path, f) for f in files]
|
|
||||||
files_meta: list[dict] = []
|
|
||||||
|
|
||||||
deleted = set(self.TO_CONVERT_CACHE.keys()) - set(files)
|
|
||||||
for path in deleted:
|
|
||||||
del self.TO_CONVERT_CACHE[path]
|
|
||||||
|
|
||||||
for path in files:
|
|
||||||
last_modified: float = os.path.getmtime(path)
|
|
||||||
if path not in self.TO_CONVERT_CACHE or self.TO_CONVERT_CACHE[path]["ts"] < last_modified:
|
|
||||||
self.update_to_convert_file_meta(path)
|
|
||||||
|
|
||||||
files_meta.append(self.TO_CONVERT_CACHE[path])
|
|
||||||
|
|
||||||
return files_meta
|
|
||||||
|
|
||||||
def get_metadata_files_meta(self):
|
|
||||||
files: list[str] = self.get_metadata_files()
|
|
||||||
files_meta: list[dict] = []
|
|
||||||
|
|
||||||
deleted = set(self.METADATA_CACHE.keys()) - set(files)
|
|
||||||
for filename in deleted:
|
|
||||||
del self.METADATA_CACHE[filename]
|
|
||||||
|
|
||||||
for filename in files:
|
|
||||||
path: str = os.path.join(self.METADATA_DIR, filename)
|
|
||||||
last_modified: float = os.path.getmtime(path)
|
|
||||||
if filename not in self.METADATA_CACHE or self.METADATA_CACHE[filename]["ts"] < last_modified:
|
|
||||||
self.update_metadata_file_meta(filename)
|
|
||||||
|
|
||||||
files_meta.append(self.METADATA_CACHE[filename])
|
|
||||||
|
|
||||||
return files_meta
|
|
||||||
|
|
||||||
def update_metadata_file_meta(self, filename: str):
|
|
||||||
path: str = os.path.join(self.METADATA_DIR, filename)
|
|
||||||
|
|
||||||
meta = {
|
|
||||||
"filename": filename,
|
|
||||||
"ts": os.path.getmtime(path)
|
|
||||||
}
|
|
||||||
|
|
||||||
with open(path, "r") as f:
|
|
||||||
data = json.load(f)
|
|
||||||
is_series = "filename" not in data
|
|
||||||
meta["type"] = "series" if is_series else "film"
|
|
||||||
if is_series:
|
|
||||||
meta["episodes"] = len(data)
|
|
||||||
meta["title"] = filename.split("_metadata")[0]
|
|
||||||
else:
|
|
||||||
meta["title"] = data["title"]
|
|
||||||
|
|
||||||
self.METADATA_CACHE[filename] = meta
|
|
||||||
|
|
||||||
def update_to_convert_file_meta(self, path: str):
|
|
||||||
filename: str = os.path.basename(path)
|
|
||||||
|
|
||||||
is_dir: bool = os.path.isdir(path)
|
|
||||||
meta = {
|
|
||||||
"path": os.path.relpath(path, self.TO_CONVERT_DIR),
|
|
||||||
"filename": filename,
|
|
||||||
"ts": os.path.getmtime(path),
|
|
||||||
"size": os.path.getsize(path),
|
|
||||||
"type": "folder" if is_dir else "media"
|
|
||||||
}
|
|
||||||
if is_dir:
|
|
||||||
meta["elements"] = len(os.listdir(path))
|
|
||||||
if not meta["path"].endswith("/"):
|
|
||||||
meta["path"] += "/"
|
|
||||||
|
|
||||||
self.TO_CONVERT_CACHE[path] = meta
|
|
||||||
|
|
||||||
|
|
||||||
class MeliesServer(FileSystemEventHandler):
|
class MeliesServer(FileSystemEventHandler):
|
||||||
@ -264,14 +138,15 @@ class MeliesServer(FileSystemEventHandler):
|
|||||||
self.metadata_dir: str = metadata_dir
|
self.metadata_dir: str = metadata_dir
|
||||||
self.max_payload_size: int = max_payload_size
|
self.max_payload_size: int = max_payload_size
|
||||||
|
|
||||||
HTTPHandler.SERVER = self
|
|
||||||
|
|
||||||
if not os.path.exists(self.to_convert_dir):
|
if not os.path.exists(self.to_convert_dir):
|
||||||
os.mkdir(self.to_convert_dir)
|
os.mkdir(self.to_convert_dir)
|
||||||
if not os.path.exists(self.converted_dir):
|
if not os.path.exists(self.converted_dir):
|
||||||
os.mkdir(self.converted_dir)
|
os.mkdir(self.converted_dir)
|
||||||
if not os.path.exists(self.metadata_dir):
|
if not os.path.exists(self.metadata_dir):
|
||||||
os.mkdir(self.metadata_dir)
|
os.mkdir(self.metadata_dir)
|
||||||
|
|
||||||
|
self.to_convert_files: ToConvertFileHandler = ToConvertFileHandler(self.to_convert_dir)
|
||||||
|
self.metadata_files: MetadataFileHandler = MetadataFileHandler(self.metadata_dir)
|
||||||
|
|
||||||
logging.basicConfig(
|
logging.basicConfig(
|
||||||
level=logging.INFO,
|
level=logging.INFO,
|
||||||
@ -281,13 +156,20 @@ class MeliesServer(FileSystemEventHandler):
|
|||||||
|
|
||||||
self.httpd: Optional[socketserver.TCPServer] = None
|
self.httpd: Optional[socketserver.TCPServer] = None
|
||||||
self.observer: BaseObserver = Observer()
|
self.observer: BaseObserver = Observer()
|
||||||
self.observer.schedule(self, self.converted_dir, event_filter=[DirModifiedEvent])
|
self.observer.schedule(
|
||||||
|
self,
|
||||||
|
self.converted_dir,
|
||||||
|
recursive=True,
|
||||||
|
event_filter=[FileDeletedEvent, FileMovedEvent, FileClosedEvent]
|
||||||
|
)
|
||||||
self.last_event: float = time.time()
|
self.last_event: float = time.time()
|
||||||
|
|
||||||
|
self.http_handler_cls = partial(HTTPHandler, self)
|
||||||
|
|
||||||
def start(self):
|
def start(self):
|
||||||
self.observer.start()
|
self.observer.start()
|
||||||
try:
|
try:
|
||||||
with socketserver.TCPServer(("", self.port), HTTPHandler) as self.httpd:
|
with socketserver.TCPServer(("", self.port), self.http_handler_cls) as self.httpd:
|
||||||
logging.info(f"Serving on port {self.port}")
|
logging.info(f"Serving on port {self.port}")
|
||||||
self.httpd.serve_forever()
|
self.httpd.serve_forever()
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
@ -297,14 +179,30 @@ class MeliesServer(FileSystemEventHandler):
|
|||||||
def stop(self):
|
def stop(self):
|
||||||
self.observer.stop()
|
self.observer.stop()
|
||||||
self.observer.join()
|
self.observer.join()
|
||||||
|
|
||||||
|
def on_deleted(self, event: FileDeletedEvent):
|
||||||
|
logging.info(f"Converted media deleted: {event.src_path}")
|
||||||
|
self.delete_metadata(event.src_path)
|
||||||
|
return super().on_deleted(event)
|
||||||
|
|
||||||
|
def on_moved(self, event: FileMovedEvent):
|
||||||
|
logging.info(f"Converted media moved: {event.src_path} -> {event.dest_path}")
|
||||||
|
self.rename_metadata(event.src_path, event.dest_path)
|
||||||
|
return super().on_moved(event)
|
||||||
|
|
||||||
|
def on_closed(self, event: FileClosedEvent):
|
||||||
|
logging.info(f"Converted media created or modified: {event.src_path}")
|
||||||
|
self.extract_metadata(event.src_path)
|
||||||
|
return super().on_closed(event)
|
||||||
|
|
||||||
def on_modified(self, event: DirModifiedEvent):
|
def extract_metadata(self, path: str):
|
||||||
t: float = time.time()
|
pass
|
||||||
|
|
||||||
logging.info(event)
|
def rename_metadata(self, src: str, dst: str):
|
||||||
if t - self.last_event > 1:
|
pass
|
||||||
self.last_event = t
|
|
||||||
|
def delete_metadata(self, path: str):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
Loading…
x
Reference in New Issue
Block a user