Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- ##CC0 Kaelygon 2025
- """
- Palettize and dither using arbitrary palette
- """
- import math
- import random
- import numpy as np
- from PIL import Image
- from dataclasses import dataclass, field
- from typing import List, Optional
- from scipy.spatial import cKDTree as KDTree
- ### Constants ###
- OKLAB_GAMUT_VOLUME = 0.054197416
- def approxOkGap(point_count: int):
- return (OKLAB_GAMUT_VOLUME/max(1,point_count))**(1.0/3.0)
- ### Color Conversion ###
- def srgbToLinear(srgb: np.ndarray):
- cutoff = srgb <= 0.04045
- higher = ((srgb + 0.055) / 1.055) ** 2.4
- lower = srgb / 12.92
- return np.where(cutoff, lower, higher)
- def linearToSrgb(lin: np.ndarray):
- lin = np.maximum(lin, 0.0)
- cutoff = lin <= 0.0031308
- higher = 1.055 * np.power(lin, 1/2.4) - 0.055
- lower = lin * 12.92
- return np.where(cutoff, lower, higher)
- def linearToOklab(lin: np.ndarray):
- r, g, b = lin[:,0], lin[:,1], lin[:,2]
- l = 0.4122214708*r + 0.5363325363*g + 0.0514459929*b
- m = 0.2119034982*r + 0.6806995451*g + 0.1073969566*b
- s = 0.0883024619*r + 0.2817188376*g + 0.6299787005*b
- l_ = np.sign(l) * np.abs(l) ** (1/3)
- m_ = np.sign(m) * np.abs(m) ** (1/3)
- s_ = np.sign(s) * np.abs(s) ** (1/3)
- L = 0.2104542553*l_ + 0.7936177850*m_ - 0.0040720468*s_
- a = 1.9779984951*l_ - 2.4285922050*m_ + 0.4505937099*s_
- b = 0.0259040371*l_ + 0.7827717662*m_ - 0.8086757660*s_
- return np.stack([L,a,b], axis=1)
- def oklabToLinear(lab: np.ndarray):
- L, a, b = lab[:,0], lab[:,1], lab[:,2]
- l_ = L + 0.3963377774*a + 0.2158037573*b
- m_ = L - 0.1055613458*a - 0.0638541728*b
- s_ = L - 0.0894841775*a - 1.2914855480*b
- l = l_**3
- m = m_**3
- s = s_**3
- r = +4.0767416621*l - 3.3077115913*m + 0.2309699292*s
- g = -1.2684380046*l + 2.6097574011*m - 0.3413193965*s
- b = -0.0041960863*l - 0.7034186147*m + 1.7076147010*s
- return np.stack([r,g,b], axis=1)
- def srgbToOklab(col: np.ndarray):
- linRGB = srgbToLinear(col)
- oklab = linearToOklab(linRGB)
- return oklab
- def oklabToSrgb(col: np.ndarray):
- linRGB = oklabToLinear(col)
- sRGB = linearToSrgb(linRGB)
- return sRGB
- #### Image conversion ###
- #striped list. item = (color[i],alpha[i],area[i])
- @dataclass
- class UniqueList:
- color: np.ndarray #uniques only
- alpha: np.ndarray
- area: np.ndarray
- unique_idxs: np.ndarray
- original_idxs: np.ndarray #colors_with_dupes = color[original_idxs]
- class OkImage:
- #const
- BAYER_2 = np.array([
- [ 0, 2],
- [ 3, 1],
- ])
- BAYER_4 = np.array([
- [ 0, 8, 2,10],
- [12, 4,14, 6],
- [ 3,11, 1, 9],
- [15, 7,13, 5],
- ])
- BAYER_8 = np.array([
- [ 0, 32, 8, 40, 2, 34, 10, 42],
- [48, 16, 56, 24, 50, 18, 58, 26],
- [12, 44, 4, 36, 14, 46, 6, 38],
- [60, 28, 52, 20, 62, 30, 54, 22],
- [ 3, 35, 11, 43, 1, 33, 9, 41],
- [51, 19, 59, 27, 49, 17, 57, 25],
- [15, 47, 7, 39, 13, 45, 5, 37],
- [63, 31, 55, 23, 61, 29, 53, 21],
- ])
- BAYER_16 = np.array([
- [ 0, 128, 32, 160, 8, 136, 40, 168, 2, 130, 34, 162, 10, 138, 42, 170],
- [192, 64, 224, 96, 200, 72, 232, 104, 194, 66, 226, 98, 202, 74, 234, 106],
- [ 48, 176, 16, 144, 56, 184, 24, 152, 50, 178, 18, 146, 58, 186, 26, 154],
- [240, 112, 208, 80, 248, 120, 216, 88, 242, 114, 210, 82, 250, 122, 218, 90],
- [ 12, 140, 44, 172, 4, 132, 36, 164, 14, 142, 46, 174, 6, 134, 38, 166],
- [204, 76, 236, 108, 196, 68, 228, 100, 206, 78, 238, 110, 198, 70, 230, 102],
- [ 60, 188, 28, 156, 52, 180, 20, 148, 62, 190, 30, 158, 54, 182, 22, 150],
- [252, 124, 220, 92, 244, 116, 212, 84, 254, 126, 222, 94, 246, 118, 214, 86],
- [ 3, 131, 35, 163, 11, 139, 43, 171, 1, 129, 33, 161, 9, 137, 41, 169],
- [195, 67, 227, 99, 203, 75, 235, 107, 193, 65, 225, 97, 201, 73, 233, 105],
- [ 51, 179, 19, 147, 59, 187, 27, 155, 49, 177, 17, 145, 57, 185, 25, 153],
- [243, 115, 211, 83, 251, 123, 219, 91, 241, 113, 209, 81, 249, 121, 217, 89],
- [ 15, 143, 47, 175, 7, 135, 39, 167, 13, 141, 45, 173, 5, 133, 37, 165],
- [207, 79, 239, 111, 199, 71, 231, 103, 205, 77, 237, 109, 197, 69, 229, 101],
- [ 63, 191, 31, 159, 55, 183, 23, 151, 61, 189, 29, 157, 53, 181, 21, 149],
- [255, 127, 223, 95, 247, 119, 215, 87, 253, 125, 221, 93, 245, 117, 213, 85],
- ])
- AMOGUS_5 = np.array([
- [ 0, 1, 1, 1, 0],
- [ 1, 1, 0, 0, 0],
- [ 1, 1, 1, 1, 0],
- [ 0, 1, 0, 1, 0],
- [ 0, 0, 0, 0, 0],
- ])
- BAYER_N = [BAYER_2, BAYER_4, BAYER_8, BAYER_16, AMOGUS_5]
- #gamma correct ok
- BAYER_OK_N = [(bayer*bayer) / ((np.max(bayer)+1)**2) - (0.5 - 1e-8) for bayer in BAYER_N]
- #non-const
- pixels = None #don't mutate after init
- pixels_output = None #copy of pixels that can be modified
- size = None
- def __init__(self, input_path):
- self.imgToOkPixels(input_path)
- #vals = reference to np.ndarray
- def _quantize(self, vals, step_count: int):
- vals[:] = np.round(vals*step_count)/step_count
- #public
- def imgToOkPixels(self, img_path: str):
- in_img = Image.open(img_path).convert("RGBA")
- col_list = np.array(in_img, dtype=np.float64) / 255.0
- col_list = col_list.reshape(-1, 4)
- col_list[:,:3] = srgbToOklab(col_list[:,:3])
- self.pixels = col_list
- self.pixels[:] = np.clip(self.pixels,[0,-0.5,-0.5,0.0],[1.0,0.5,0.5,1.0])
- self.pixels_output = self.pixels.copy()
- self.size = in_img.size
- def saveImage(self, output_path: str):
- col_list = self.pixels_output.copy()
- col_list[:,:3] = oklabToSrgb(col_list[:,:3])
- rgba = np.clip(np.round(col_list * 255), 0, 255).astype(np.uint8)
- rgba = rgba.reshape((self.size[1], self.size[0], 4))
- img = Image.fromarray(rgba, "RGBA")
- img.save(output_path)
- def quantizeAxes(self, col_list, step_count: int):
- if not step_count:
- return col_list
- if col_list is None:
- col_list = self.pixels_output
- self._quantize(col_list,step_count)
- return col_list
- def quantizeAlpha(self, alpha_count: int):
- alpha = self.pixels_output[:,3]
- if alpha_count == 0:
- alpha[:] = np.zeros(len(alpha)) + 1.0
- else:
- self._quantize(alpha,alpha_count)
- def createUniqueList(self):
- #strip dupes
- unique_colors, unique_idxs, original_idxs = np.unique(self.pixels_output, axis=0, return_index=True, return_inverse=True)
- #area[original_index] = dupe_count, so area[0] is how many pixels are unique_color[0]
- nontransp = self.pixels_output[:, 3] > (1.0 / 255.0) #exclude transparent
- area = np.bincount(
- original_idxs,
- weights=nontransp,
- minlength=len(unique_colors)
- )
- self.unique_list = UniqueList(
- unique_colors[:,:3],
- unique_colors[:, 3],
- area,
- unique_idxs,
- original_idxs
- )
- #### palettize methods ###
- def applyPalette(self, unique_palettized):
- self.pixels_output[:,:3] = unique_palettized[self.unique_list.original_idxs]
- def ditherNone(self, palette_img):
- pal_list = palette_img.unique_list
- pixels = self.pixels_output[:,:3]
- tree = KDTree(pal_list.color)
- _, idxs = tree.query(pixels, k=1, workers=-1)
- self.pixels_output[:,:3] = pal_list.color[:,:3][idxs]
- def ditherOrdered(self, palette_img, matrix_size=1):
- pal_list = palette_img.unique_list
- pixels = self.pixels_output[:,:3].copy()
- matrix_size=np.clip( matrix_size, 0, len(self.BAYER_OK_N)-1 )
- b_m=self.BAYER_OK_N[matrix_size]
- b_h, b_w = b_m.shape
- y_idxs, x_idxs = np.divmod(np.arange(len(pixels)), self.size[0])
- thresholds = b_m[y_idxs % b_h, x_idxs % b_w]
- #gap between two closest palette colors of current pixel
- tree = KDTree(pal_list.color)
- _, idxs = tree.query(pixels, k=2, workers=-1)
- pixel_gaps = np.abs(pal_list.color[idxs[:,1]] - pal_list.color[idxs[:,0]])
- #apply mask
- new_pixels = pixels + thresholds[:,None] * pixel_gaps
- _, idxs = tree.query(new_pixels, k=1, workers=-1)
- new_pixels = pal_list.color[idxs]
- self.pixels_output[:,:3] = new_pixels
- def calcBucketScore(bucket_areas, col_dists, col_idxs, max_radius):
- area_weight = max(8.0,4.0*max_radius)
- max_bucket = max(1.0, max(bucket_areas))
- return col_dists * (1.0 + area_weight * (bucket_areas[col_idxs]/max_bucket) )
- #Map unique colors to palette, but avoid collapsing similar colors
- #Return unique_palettized[len(unique_list.color)] = [l,a,b,alpha]
- def createWeightedPalette(
- src_img: OkImage,
- palette_img: OkImage,
- max_error: int = 1,
- k_count = 13
- ):
- unique_list = src_img.unique_list
- palette_list = palette_img.unique_list
- pal_length = len(palette_list.color)
- max_radius = approxOkGap(pal_length) * max_error
- #accumulated area of colors in each palette bucket
- bucket_areas = np.zeros(pal_length)
- #Closest palette colors
- tree = KDTree(palette_list.color)
- est_maxk = max_error * 12 #Sphere kissing number within 1 radius
- k_count = min(max(2,k_count),pal_length,est_maxk)
- dists, idxs = tree.query(unique_list.color, k = int(k_count), workers=-1)
- #choose palette index for each color
- unique_count = len(unique_list.color)
- unique_palettized = np.zeros((unique_count,3))
- #prioritize largest area
- unique_sorted_idx = np.argsort(-1.0*unique_list.area)
- for i in unique_sorted_idx:
- #lowest dist and emptiest bucket ; lowest score = better
- local_scores = calcBucketScore(bucket_areas, dists[i], idxs[i], max_radius)
- mask = dists[i] <= max_radius
- if np.any(mask):
- valid = np.where(mask)[0]
- best_pos = valid[np.argmin(local_scores[valid])]
- best_j = int(idxs[i][best_pos])
- else:
- #choose nearest if exceeds max_error
- best_pos = 0
- best_j = int(idxs[i][best_pos])
- unique_palettized[i] = palette_list.color[best_j]
- bucket_areas[best_j] += unique_list.area[i]
- return unique_palettized
- ### Main functions ###
- @dataclass
- class ConvertPreset:
- image: str #file names
- palette: str
- output: str
- alpha_count: int
- max_error: float #radius that within neighboring palette colors can replace unique colors
- merge_radius: float #quantize original image. >1.0 is lower quant than palette. May improve quality if you got thousands of unique colors and tiny palette
- dither: int #only 0=None 1=ordered dither
- dither_size: int #bayer matrix 0=2x2, 1=4x4, 2=8x8, 3=16x16
- def palettizeImage(preset: ConvertPreset):
- palette_ok = OkImage(preset.palette)
- palette_ok.quantizeAlpha(0)
- palette_ok.createUniqueList()
- image_ok = OkImage(preset.image)
- if preset.merge_radius:
- axis_step_size = approxOkGap(len(palette_ok.unique_list.color)) * preset.merge_radius
- axis_count = int(1.0/axis_step_size)
- image_ok.quantizeAxes(None, axis_count)
- image_ok.quantizeAlpha(preset.alpha_count)
- image_ok.createUniqueList()
- #replace original img pixels with convert_dict
- if preset.dither == 0:
- if preset.max_error:
- unique_palettized = createWeightedPalette(image_ok, palette_ok, preset.max_error)
- image_ok.applyPalette(unique_palettized)
- else:
- image_ok.ditherNone(palette_ok)
- if preset.dither == 1:
- image_ok.ditherOrdered(palette_ok,preset.dither_size)
- image_ok.saveImage(preset.output)
- if __name__ == '__main__':
- input_palette = "./palettes/pal256.png"
- preset_list = [
- ConvertPreset(
- image = "./testImg/KaelygonLogo25.png",
- palette = input_palette,
- output = "./output/palettizedImg.png",
- alpha_count = 1,
- max_error = 2.0,
- merge_radius = 0.0,
- dither = 0,
- dither_size = 0,
- ),
- ConvertPreset(
- image = "./testImg/tienaPride.png",
- palette = input_palette,
- output = "./output/palettizedImg.png",
- alpha_count = 1,
- max_error = 1,
- merge_radius = 0.0,
- dither = 1,
- dither_size = 3,
- ),
- ConvertPreset(
- image = "./testImg/TienaPortrait.png",
- palette = input_palette,
- output = "./output/palettizedImg.png",
- alpha_count = 1,
- max_error = 1.0,
- merge_radius = 0.09,
- dither = 0,
- dither_size = 0,
- ),
- ConvertPreset(
- image = "./testImg/testChart.png",
- palette = input_palette,
- output = "./output/palettizedImg.png",
- alpha_count = 1,
- max_error = 1.0,
- merge_radius = 0.0,
- dither = 1,
- dither_size = 4,
- ),
- ConvertPreset(
- image = "./testImg/KaelygonSeawing.png",
- palette = input_palette,
- output = "./output/palettizedImg.png",
- alpha_count = 1,
- max_error = 0.0,
- merge_radius = 0.0,
- dither = 1,
- dither_size = 4,
- )
- ]
- preset_index = 4
- palettizeImage( preset_list[preset_index] )
Advertisement
Add Comment
Please, Sign In to add comment