Hello developer friends.
The first time I ask a question on Stackoverflow.
For the first time I came across writing custom Metal kernels to create a Core Image filter.
The task seemed simple. You need to make a filter to adjust the hue, saturation and lightness of colors in the image, limited by the hue range /- 22.5 degrees. As in applications such as Lightroom color offset adjustment.
The algorithm is outrageously simple:
- I pass the original pixel color and values for the range and offset of hue, saturation and lightness to the function;
- Inside the function, I transform the color from the RGB scheme to the HSL scheme;
- I check if the shade is in the target range; If I didn't hit it, I don't apply the offset, if I hit it, I add the offset values to the hue, saturation and lightness obtained during the conversion;
- I will transform the pixel color back to the RGB scheme;
- I return the result.
It turned out to be a wonderful algorithm that has been successfully and without any problems worked out in PlayGround:
Here is the source:
struct RGB {
let r: Float
let g: Float
let b: Float
}
struct HSL {
let hue: Float
let sat: Float
let lum: Float
}
func adjustingHSL(_ s: RGB, center: Float, hueOffset: Float, satOffset: Float, lumOffset: Float) -> RGB {
// Determine the maximum and minimum color components
let maxComp = (s.r > s.g && s.r > s.b) ? s.r : (s.g > s.b) ? s.g : s.b
let minComp = (s.r < s.g && s.r < s.b) ? s.r : (s.g < s.b) ? s.g : s.b
// Convert to HSL
var inputHue: Float = (maxComp minComp)/2
var inputSat: Float = (maxComp minComp)/2
let inputLum: Float = (maxComp minComp)/2
if maxComp == minComp {
inputHue = 0
inputSat = 0
} else {
let delta: Float = maxComp - minComp
inputSat = inputLum > 0.5 ? delta/(2.0 - maxComp - minComp) : delta/(maxComp minComp)
if (s.r > s.g && s.r > s.b) {inputHue = (s.g - s.b)/delta (s.g < s.b ? 6.0 : 0.0) }
else if (s.g > s.b) {inputHue = (s.b - s.r)/delta 2.0}
else {inputHue = (s.r - s.g)/delta 4.0 }
inputHue = inputHue/6
}
// Setting the boundaries of the offset hue range
let minHue: Float = center - 22.5/(360)
let maxHue: Float = center 22.5/(360)
// I apply offsets for hue, saturation and lightness
let adjustedHue: Float = inputHue ((inputHue > minHue && inputHue < maxHue) ? hueOffset : 0 )
let adjustedSat: Float = inputSat ((inputHue > minHue && inputHue < maxHue) ? satOffset : 0 )
let adjustedLum: Float = inputLum ((inputHue > minHue && inputHue < maxHue) ? lumOffset : 0 )
// Convert color to RGB
var red: Float = 0
var green: Float = 0
var blue: Float = 0
if adjustedSat == 0 {
red = adjustedLum
green = adjustedLum
blue = adjustedLum
} else {
let q = adjustedLum < 0.5 ? adjustedLum*(1 adjustedSat) : adjustedLum adjustedSat - (adjustedLum*adjustedSat)
let p = 2*adjustedLum - q
var t: Float = 0
// Calculating red
t = adjustedHue 1/3
if t < 0 { t = 1 }
if t > 1 { t -= 1 }
if t < 1/6 { red = p (q - p)*6*t }
else if t < 1/2 { red = q }
else if t < 2/3 { red = p (q - p)*(2/3 - t)*6 }
else { red = p }
// Calculating green
t = adjustedHue
if t < 0 { t = 1 }
if t > 1 { t -= 1 }
if t < 1/6 { green = p (q - p)*6*t }
else if t < 1/2 { green = q }
else if t < 2/3 { green = p (q - p)*(2/3 - t)*6 }
else { green = p }
// Calculating blue
t = adjustedHue - 1/3
if t < 0 { t = 1 }
if t > 1 { t -= 1 }
if t < 1/6 { blue = p (q - p)*6*t }
else if t < 1/2 { blue = q }
else if t < 2/3 { blue = p (q - p)*(2/3 - t)*6 }
else { blue = p }
}
return RGB(r: red, g: green, b: blue)
}
Application in the PlayGround for example like this:
let inputColor = RGB(r: 255/255, g: 120/255, b: 0/255)
// For visual perception of the input color
let initColor = UIColor(red: CGFloat(inputColor.r), green: CGFloat(inputColor.g), blue: CGFloat(inputColor.b), alpha: 1.0)
let rgb = adjustingHSL(inputColor, center: 45/360, hueOffset: 0, satOffset: 0, lumOffset: -0.2)
// For visual perception of the output color
let adjustedColor = UIColor(red: CGFloat(rgb.r), green: CGFloat(rgb.g), blue: CGFloat(rgb.b), alpha: 1.0)
The same function, rewritten for the Metal kernel in the Xcode project, gives a completely unexpected result.
The image after it becomes black and white. At the same time, changing the input parameters by sliders also changes the image itself. Only it is also strange: it is covered with small black or white squares.
Here is the source code in Metal kernel:
#include <metal_stdlib>
using namespace metal;
#include <CoreImage/CoreImage.h>
extern "C" {
namespace coreimage {
float4 hslFilterKernel(sample_t s, float center, float hueOffset, float satOffset, float lumOffset) {
// Convert pixel color from RGB to HSL
// Determine the maximum and minimum color components
float maxComp = (s.r > s.g && s.r > s.b) ? s.r : (s.g > s.b) ? s.g : s.b ;
float minComp = (s.r < s.g && s.r < s.b) ? s.r : (s.g < s.b) ? s.g : s.b ;
float inputHue = (maxComp minComp)/2 ;
float inputSat = (maxComp minComp)/2 ;
float inputLum = (maxComp minComp)/2 ;
if (maxComp == minComp) {
inputHue = 0 ;
inputSat = 0 ;
} else {
float delta = maxComp - minComp ;
inputSat = inputLum > 0.5 ? delta/(2.0 - maxComp - minComp) : delta/(maxComp minComp);
if (s.r > s.g && s.r > s.b) {
inputHue = (s.g - s.b)/delta (s.g < s.b ? 6.0 : 0.0);
} else if (s.g > s.b) {
inputHue = (s.b - s.r)/delta 2.0;
}
else {
inputHue = (s.r - s.g)/delta 4.0;
}
inputHue = inputHue/6 ;
}
float minHue = center - 22.5/(360) ;
float maxHue = center 22.5/(360) ;
//I apply offsets for hue, saturation and lightness
float adjustedHue = inputHue ((inputHue > minHue && inputHue < maxHue) ? hueOffset : 0 );
float adjustedSat = inputSat ((inputHue > minHue && inputHue < maxHue) ? satOffset : 0 );
float adjustedLum = inputLum ((inputHue > minHue && inputHue < maxHue) ? lumOffset : 0 );
// Convert pixel color from HSL to RGB
float red = 0 ;
float green = 0 ;
float blue = 0 ;
if (adjustedSat == 0) {
red = adjustedLum;
green = adjustedLum;
blue = adjustedLum;
} else {
float q = adjustedLum < 0.5 ? adjustedLum*(1 adjustedSat) : adjustedLum adjustedSat - (adjustedLum*adjustedSat);
float p = 2*adjustedLum - q;
// Calculating Red color
float t = adjustedHue 1/3;
if (t < 0) { t = 1; }
if (t > 1) { t -= 1; }
if (t < 1/6) { red = p (q - p)*6*t; }
else if (t < 1/2) { red = q; }
else if (t < 2/3) { red = p (q - p)*(2/3 - t)*6; }
else { red = p; }
// Calculating Green color
t = adjustedHue;
if (t < 0) { t = 1; }
if (t > 1) { t -= 1; }
if (t < 1/6) { green = p (q - p)*6*t; }
else if (t < 1/2) { green = q ;}
else if (t < 2/3) { green = p (q - p)*(2/3 - t)*6; }
else { green = p; }
// Calculating Blue color
t = adjustedHue - 1/3;
if (t < 0) { t = 1; }
if (t > 1) { t -= 1; }
if (t < 1/6) { blue = p (q - p)*6*t; }
else if (t < 1/2) { blue = q; }
else if (t < 2/3) { blue = p (q - p)*(2/3 - t)*6;}
else { blue = p; }
}
float4 outColor;
outColor.r = red;
outColor.g = green;
outColor.b = blue;
outColor.a = s.a;
return outColor;
}
}
}
I can't figure out where I could have made a mistake.
Just in case, I attach a filter class (but it seems to work fine):
class HSLAdjustFilter: CIFilter {
var inputImage: CIImage?
var center: CGFloat?
var hueOffset: CGFloat?
var satOffset: CGFloat?
var lumOffset: CGFloat?
static var kernel: CIKernel = { () -> CIColorKernel in
guard let url = Bundle.main.url(forResource: "HSLAdjustKernel.ci", withExtension: "metallib"),
let data = try? Data(contentsOf: url)
else { fatalError("Unable to load metallib") }
guard let kernel = try? CIColorKernel(functionName: "hslFilterKernel", fromMetalLibraryData: data)
else { fatalError("Unable to create color kernel") }
return kernel
}()
override var outputImage: CIImage? {
guard let inputImage = self.inputImage else { return nil }
return HSLAdjustFilter.kernel.apply(extent: inputImage.extent, roiCallback: { _, rect in return rect }, arguments: [inputImage, self.center ?? 0, self.hueOffset ?? 0, self.satOffset ?? 0, self.lumOffset ?? 0])
}
}
Also the function of calling the filter:
func imageProcessing(_ inputImage: CIImage) -> CIImage {
let filter = HSLAdjustFilter()
filter.inputImage = inputImage
filter.center = 180/360
filter.hueOffset = CGFloat(hue)
filter.satOffset = CGFloat(saturation)
filter.lumOffset = CGFloat(luminance)
if let outputImage = filter.outputImage {
return outputImage
} else {
return inputImage
}
}
The most depressing thing is that you can't even output anything to the console. It is unclear how to look for errors. I will be grateful for any hints.
PS: Xcode 13.1, iOS 14-15. SwiftUI life cycle.
GitHub: https://github.com/VKostin8311/MetalKernelsTestApp
CodePudding user response:
Welcome!
The main issue with the kernel code is the use of integer literals:
The Metal Shading Language is based on C , which doesn't have the same type inference system as Swift. So when you write 1/3
it will actually perform integer division, so that something like float t = adjustedHue 1/3
would equal adjustedHue 0
. You have to use float literals here: adjustedHue 1.0/3.0
.
I created a pull request on your sample project with various fixes and improvements. Please let me know if something is unclear.
As for debugging: Debugging kernel code is unfortunately not possible with breakpoints and direct print statements. I usually use pixel colors for printf
-debugging like this:
if (<condition I want to check>) { return float4(1.0, 0.0, 0.0, 1.0); }
All pixels that meet the condition become red in this case, which you can use to verify assumptions, etc.
CodePudding user response:
The most depressing thing is that you can't even output anything to the console. It is unclear how to look for errors. I will be grateful for any hints.
For developing and debugging metal shaders, you should use Xcode shader profiler.
I’m writing from my phone right now, from what I can see there is an mistake in your shader code:
if(maxComp == minComp)
Floating point numbers should always be compared with an epsilon value.