feat: process a folder based on the json
This commit is contained in:
parent
1084b959af
commit
1e7d3d521e
@ -52,8 +52,9 @@ def write_metadata_to_video(metadata, input_file, output_file=None):
|
||||
cmd.extend(["--title", metadata["title"]])
|
||||
|
||||
# Process audio tracks
|
||||
for i, track in enumerate(metadata.get("audio_tracks", [])):
|
||||
track_id = i + 1 # mkvmerge track IDs start from 0 for video, 1 for first audio
|
||||
for track in metadata.get("audio_tracks", []):
|
||||
# Use the actual track index from the metadata
|
||||
track_id = track.get("index", 0)
|
||||
|
||||
# Set language
|
||||
if "language" in track:
|
||||
@ -82,9 +83,9 @@ def write_metadata_to_video(metadata, input_file, output_file=None):
|
||||
cmd.extend([f"--original-flag", f"{track_id}:no"])
|
||||
|
||||
# Process subtitle tracks
|
||||
for i, track in enumerate(metadata.get("subtitle_tracks", [])):
|
||||
# Calculate track ID (assuming all audio tracks come before subtitle tracks)
|
||||
track_id = len(metadata.get("audio_tracks", [])) + i + 1
|
||||
for track in metadata.get("subtitle_tracks", []):
|
||||
# Use the actual track index from the metadata
|
||||
track_id = track.get("index", 0)
|
||||
|
||||
# Set language
|
||||
if "language" in track:
|
||||
@ -107,6 +108,11 @@ def write_metadata_to_video(metadata, input_file, output_file=None):
|
||||
else:
|
||||
cmd.extend([f"--forced-track", f"{track_id}:no"])
|
||||
|
||||
if flags.get("original", False):
|
||||
cmd.extend([f"--original-flag", f"{track_id}:yes"])
|
||||
else:
|
||||
cmd.extend([f"--original-flag", f"{track_id}:no"])
|
||||
|
||||
# Add input file
|
||||
cmd.append(input_file)
|
||||
|
||||
@ -126,32 +132,140 @@ def write_metadata_to_video(metadata, input_file, output_file=None):
|
||||
print(f"❌ Error executing mkvmerge: {str(e)}")
|
||||
return False
|
||||
|
||||
def process_single_file(metadata, video_file, output_dir=None):
|
||||
"""
|
||||
Process a single video file with the given metadata.
|
||||
|
||||
Args:
|
||||
metadata (dict): Metadata for the video file
|
||||
video_file (str): Path to the video file
|
||||
output_dir (str, optional): Directory to save the output file
|
||||
|
||||
Returns:
|
||||
bool: True if successful, False otherwise
|
||||
"""
|
||||
if not os.path.isfile(video_file):
|
||||
print(f"❌ Video file not found: {video_file}")
|
||||
return False
|
||||
|
||||
# Create output file path
|
||||
if output_dir:
|
||||
# Ensure output directory exists
|
||||
os.makedirs(output_dir, exist_ok=True)
|
||||
|
||||
# Use the same filename in the output directory
|
||||
output_file = os.path.join(output_dir, os.path.basename(video_file))
|
||||
else:
|
||||
output_file = None # Let write_metadata_to_video create a default output file
|
||||
|
||||
# Write metadata to video
|
||||
return write_metadata_to_video(metadata, video_file, output_file)
|
||||
|
||||
def process_directory(metadata_dict, source_dir, output_dir=None):
|
||||
"""
|
||||
Process all video files in the metadata dictionary.
|
||||
|
||||
Args:
|
||||
metadata_dict (dict): Dictionary of metadata keyed by filename
|
||||
source_dir (str): Directory containing the video files
|
||||
output_dir (str, optional): Directory to save the output files
|
||||
|
||||
Returns:
|
||||
bool: True if all files were processed successfully, False otherwise
|
||||
"""
|
||||
if not os.path.isdir(source_dir):
|
||||
print(f"❌ Source directory not found: {source_dir}")
|
||||
return False
|
||||
|
||||
# Create output directory if specified
|
||||
if output_dir:
|
||||
os.makedirs(output_dir, exist_ok=True)
|
||||
|
||||
success = True
|
||||
processed_count = 0
|
||||
|
||||
# Process each file in the metadata dictionary
|
||||
for filename, file_metadata in metadata_dict.items():
|
||||
# Construct the full path to the video file
|
||||
video_file = os.path.join(source_dir, filename)
|
||||
|
||||
if not os.path.isfile(video_file):
|
||||
print(f"❌ Video file not found: {video_file}")
|
||||
success = False
|
||||
continue
|
||||
|
||||
# Process the file
|
||||
if process_single_file(file_metadata, video_file, output_dir):
|
||||
processed_count += 1
|
||||
else:
|
||||
success = False
|
||||
|
||||
print(f"✅ Processed {processed_count} out of {len(metadata_dict)} files")
|
||||
return success
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description="Write metadata from JSON to video file.")
|
||||
parser = argparse.ArgumentParser(description="Write metadata from JSON to video files.")
|
||||
parser.add_argument("json_file", help="Path to input JSON metadata file")
|
||||
parser.add_argument("video_file", help="Path to input video file")
|
||||
parser.add_argument("-o", "--output", help="Path to output video file")
|
||||
parser.add_argument("-o", "--output", help="Path to output directory")
|
||||
parser.add_argument("-s", "--source", help="Source directory (overrides automatic detection)")
|
||||
args = parser.parse_args()
|
||||
|
||||
json_file = args.json_file
|
||||
video_file = args.video_file
|
||||
output_file = args.output
|
||||
output_dir = args.output
|
||||
source_dir = args.source
|
||||
|
||||
if not os.path.isfile(json_file):
|
||||
print(f"❌ JSON file not found: {json_file}")
|
||||
sys.exit(1)
|
||||
|
||||
if not os.path.isfile(video_file):
|
||||
print(f"❌ Video file not found: {video_file}")
|
||||
sys.exit(1)
|
||||
|
||||
# Read metadata from JSON
|
||||
metadata = read_metadata_json(json_file)
|
||||
if not metadata:
|
||||
sys.exit(1)
|
||||
|
||||
# Write metadata to video
|
||||
success = write_metadata_to_video(metadata, video_file, output_file)
|
||||
# Determine if the JSON contains metadata for multiple files or a single file
|
||||
is_multi_file = isinstance(metadata, dict) and all(isinstance(metadata[key], dict) for key in metadata)
|
||||
|
||||
# If source directory is not specified, try to determine it from the JSON filename
|
||||
if not source_dir and is_multi_file:
|
||||
# Extract folder name from JSON filename (e.g., "Millenium" from "Millenium_metadata.json")
|
||||
json_basename = os.path.basename(json_file)
|
||||
if "_metadata.json" in json_basename:
|
||||
folder_name = json_basename.split("_metadata.json")[0]
|
||||
potential_source_dir = os.path.join(os.path.dirname(os.path.abspath(json_file)), folder_name)
|
||||
|
||||
if os.path.isdir(potential_source_dir):
|
||||
source_dir = potential_source_dir
|
||||
print(f"📂 Using source directory: {source_dir}")
|
||||
|
||||
# If no output directory is specified, create one based on the source directory
|
||||
if not output_dir and source_dir:
|
||||
output_dir = os.path.join("ready", os.path.basename(source_dir))
|
||||
print(f"📂 Using output directory: {output_dir}")
|
||||
|
||||
# Process files based on the metadata format
|
||||
if is_multi_file:
|
||||
if not source_dir:
|
||||
print("❌ Source directory not specified and could not be determined automatically.")
|
||||
print(" Please specify a source directory with --source or use a JSON filename like 'FolderName_metadata.json'")
|
||||
sys.exit(1)
|
||||
|
||||
success = process_directory(metadata, source_dir, output_dir)
|
||||
else:
|
||||
# Single file metadata
|
||||
if "filename" not in metadata:
|
||||
print("❌ Invalid metadata format: missing 'filename' field")
|
||||
sys.exit(1)
|
||||
|
||||
# If source directory is specified, look for the file there
|
||||
if source_dir:
|
||||
video_file = os.path.join(source_dir, metadata["filename"])
|
||||
else:
|
||||
# Look for the file in the same directory as the JSON
|
||||
video_file = os.path.join(os.path.dirname(json_file), metadata["filename"])
|
||||
|
||||
success = process_single_file(metadata, video_file, output_dir)
|
||||
|
||||
if not success:
|
||||
sys.exit(1)
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user