Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import json
- import os
- import argparse
- from PIL import Image
- from multiprocessing import Pool, cpu_count
- from tqdm import tqdm # For progress bar
- def convert_single_image(args):
- filename, input_dir, output_dir, quality, keep_metadata = args
- try:
- img_path = os.path.join(input_dir, filename)
- img = Image.open(img_path)
- output_filename = os.path.splitext(filename)[0] + '.webp'
- output_path = os.path.join(output_dir, output_filename)
- # Handle duplicate filenames
- if os.path.exists(output_path):
- base_name = os.path.splitext(output_filename)[0]
- counter = 1
- while os.path.exists(output_path):
- output_filename = f"{base_name}_{counter}.webp"
- output_path = os.path.join(output_dir, output_filename)
- counter += 1
- if keep_metadata:
- try:
- dict_of_info = img.info.copy()
- try:
- c = json.loads(dict_of_info.get("workflow"))
- nodes = c.get('nodes')
- for n in nodes:
- if n['type'] == 'LoraInfo':
- nodes.remove(n)
- dict_of_info['workflow'] = json.dumps(c)
- except Exception:
- pass
- img_exif = img.getexif()
- user_comment = dict_of_info.get("workflow", "")
- img_exif[0x010e] = "Workflow:" + user_comment
- img.convert("RGB").save(output_path, lossless=False,
- quality=quality, webp_method=6,
- exif=img_exif)
- except Exception as e:
- return (filename, False, str(e))
- else:
- img.save(output_path, 'webp', quality=quality)
- return (filename, True, output_filename)
- except Exception as e:
- return (filename, False, str(e))
- def convert_images_to_webp(input_dir, output_dir, quality=97, keep_metadata=True):
- os.makedirs(output_dir, exist_ok=True)
- # Get all PNG files
- png_files = [f for f in os.listdir(input_dir) if f.lower().endswith('.png')]
- if not png_files:
- print("No PNG files found in input directory")
- return
- # Prepare arguments for parallel processing
- args = [(f, input_dir, output_dir, quality, keep_metadata) for f in png_files]
- # Use all available cores except one
- num_workers = max(1, cpu_count() - 4)
- print(f"Converting {len(png_files)} files using {num_workers} processes...")
- # Process files in parallel with progress bar
- with Pool(num_workers) as pool:
- results = list(tqdm(
- pool.imap(convert_single_image, args),
- total=len(png_files),
- desc="Converting"
- ))
- # Process results
- success = [r for r in results if r[1]]
- failures = [r for r in results if not r[1]]
- print(f"\nSuccessfully converted {len(success)} of {len(png_files)} files")
- if failures:
- print("\nFailed conversions:")
- for f in failures:
- print(f" {f[0]}: {f[2]}")
- if __name__ == '__main__':
- parser = argparse.ArgumentParser(description='Convert PNG images to WebP format')
- parser.add_argument('--quality', type=int, default=97, help='WebP quality (1-100)')
- parser.add_argument('--no-metadata', action='store_true', help='Do not preserve ComfyUI workflow metadata')
- parser.add_argument('--input-dir', default='.', help='Input directory (default: current directory)')
- parser.add_argument('--output-dir', default='./converted', help='Output directory (default: ./converted)')
- args = parser.parse_args()
- convert_images_to_webp(
- args.input_dir,
- args.output_dir,
- args.quality,
- not args.no_metadata
- )
Advertisement
Add Comment
Please, Sign In to add comment