Home > Blockchain >  Converting UIImage to CIImage fails
Converting UIImage to CIImage fails

Time:05-30

I'm making video recording iOS app. I need to add timestamp to recorded video thus I convert CMSampleBuffer to UIImage and add timestamp on in and convert it back to CMSampleBuffer. While process of converting UIImage to CMSample buffer, I need to convert UIImage to CIImage but this process fails. How can I properly convert UIImage to CIImage?

    func appendVideo(from sampleBuffer: CMSampleBuffer) {
        
        // - Guards
        
        guard let videoInput = assetWriter?.inputs.first(where: { $0.mediaType == .video }) else {
            print("video input not found")
            return
        }
        
        guard videoInput.isReadyForMoreMediaData else {
            print("video input not ready for more media data")
            return
        }
        
        // - Timestamp
        
        let sample: Sample = Sample(sampleBuffer: sampleBuffer)
        
        guard let ciImage = generateCIImage(from: sampleBuffer) else {
            print("CIImage creation from sampleBuffer failed")
            return
        }
        
        let uiImage = UIImage(ciImage: ciImage)
        
        guard let timestampAddedImage = self.addTimestamp(on: uiImage) else {
            fatalError("should not reach here")
        }
        
        ////////////////////////////////////
        // THIS PROCESS FAILS
        ////////////////////////////////////
        guard let timestampAddedCiImage = timestampAddedImage.ciImage else {
            print("UIImage to CIImage conversion faield")
            return
        }
        
        guard let timestampAddedCvpixelBuffer = generateCVPixelBuffer(from: timestampAddedCiImage) else {
            print("CVPixelBuffer creation from CIImage failed")
            return
        }
        
        guard let timestampAddedSampleBuffer = generateCMSampleBuffer(from: timestampAddedCvpixelBuffer, timingInfo: sample.timingInfo) else {
            print("CMSampleBuffer creation from CVPixelBufer failed")
            return
        }
        
        DispatchQueue.main.sync { [weak self] in
            self?.compositeImageView.image = timestampAddedImage
        }
        
        print("append video")
        videoInput.append(timestampAddedSampleBuffer)
    }

   func addTimestamp(on image: UIImage) -> UIImage? {
        let imageRect = CGRect(x: 0, y: 0, width: image.size.width, height: image.size.height)
        UIGraphicsBeginImageContextWithOptions(image.size, true, 0.0)
        image.draw(in: imageRect)
        
        // Text Attributes
        let textColor = UIColor.white
        let textFont = UIFont.systemFont(ofSize: FontSize.sizeL, weight: .bold)
        
        let textFontAttributes = [
            NSAttributedString.Key.font: textFont,
            NSAttributedString.Key.foregroundColor: textColor,
            NSAttributedString.Key.backgroundColor: UIColor(hex: ColorConstants.black, alpha: 0.4)
        ]
        
        // Timestamp to add to image
        let formatter = DateFormatter()
        formatter.dateFormat = "yyyy/MM/dd HH:mm:ss:SSS"
        let timestamp: NSString = formatter.string(from: Date()) as NSString
        
        let textRect = CGRect(x: 6.0, y: 6.0, width: image.size.width, height: 32)
        timestamp.draw(in: textRect, withAttributes: textFontAttributes)
        
        // New Image
        let newImage = UIGraphicsGetImageFromCurrentImageContext();
        UIGraphicsEndImageContext()
        
        return newImage
    }


// MARK: - CMSampleBuffer <-> Image Conversion
// ref: https://fromatom.hatenablog.com/entry/2019/10/28/172628

    // CMSampleFubber > CIImage
    func generateCIImage(from sampleBuffer: CMSampleBuffer) -> CIImage? {
        guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
            return nil
        }
        
        let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
        return ciImage
    }
    
    func generateCVPixelBuffer(from ciImage: CIImage) -> CVPixelBuffer? {
        let attrs = [kCVPixelBufferCGImageCompatibilityKey: kCFBooleanTrue, kCVPixelBufferCGBitmapContextCompatibilityKey: kCFBooleanTrue] as CFDictionary
        var pixelBuffer: CVPixelBuffer!
        let status = CVPixelBufferCreate(kCFAllocatorDefault, Int(ciImage.extent.size.width), Int(ciImage.extent.size.height), kCVPixelFormatType_32BGRA, attrs, &pixelBuffer)
        guard status == kCVReturnSuccess else {
            print("CVPixelBufferCreateに失敗")
            return nil
        }

        let ciContext = CIContext()
        ciContext.render(ciImage, to: pixelBuffer, bounds: ciImage.extent, colorSpace: CGColorSpaceCreateDeviceRGB())
        CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: 0))
        
        return pixelBuffer
    }
    
    func generateCMSampleBuffer(from cvPixelBuffer: CVPixelBuffer, timingInfo: CMSampleTimingInfo) -> CMSampleBuffer? {
        var sampleBuffer: CMSampleBuffer?
        var timimgInfo: CMSampleTimingInfo = timingInfo
        var videoInfo: CMVideoFormatDescription!
        CMVideoFormatDescriptionCreateForImageBuffer(allocator: nil, imageBuffer: cvPixelBuffer, formatDescriptionOut: &videoInfo)
        CMSampleBufferCreateForImageBuffer(allocator: kCFAllocatorDefault,
                                           imageBuffer: cvPixelBuffer,
                                           dataReady: true,
                                           makeDataReadyCallback: nil,
                                           refcon: nil,
                                           formatDescription: videoInfo,
                                           sampleTiming: &timimgInfo,
                                           sampleBufferOut: &sampleBuffer)

        return sampleBuffer
    }


private final class Sample {
    let timingInfo: CMSampleTimingInfo

    init(sampleBuffer: CMSampleBuffer) {
        let presentationTimeStamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
        let duration = CMSampleBufferGetDuration(sampleBuffer)
        let decodeTimeStamp = CMSampleBufferGetDecodeTimeStamp(sampleBuffer)
        timingInfo = CMSampleTimingInfo(duration: duration, presentationTimeStamp: presentationTimeStamp, decodeTimeStamp: decodeTimeStamp)
    }
}

CodePudding user response:

Please update your addTimestamp function like this and try again:

func addTimestamp(on image: UIImage) -> UIImage? {
        let imageRect = CGRect(x: 0, y: 0, width: image.size.width, height: image.size.height)
        UIGraphicsBeginImageContextWithOptions(image.size, true, 0.0)
        image.draw(in: imageRect)
        
        // Text Attributes
        let textColor = UIColor.white
        let textFont = UIFont.systemFont(ofSize: FontSize.sizeL, weight: .bold)
        
        let textFontAttributes = [
            NSAttributedString.Key.font: textFont,
            NSAttributedString.Key.foregroundColor: textColor,
            NSAttributedString.Key.backgroundColor: UIColor(hex: ColorConstants.black, alpha: 0.4)
        ]
        
        // Timestamp to add to image
        let formatter = DateFormatter()
        formatter.dateFormat = "yyyy/MM/dd HH:mm:ss:SSS"
        let timestamp: NSString = formatter.string(from: Date()) as NSString
        
        let textRect = CGRect(x: 6.0, y: 6.0, width: image.size.width, height: 32)
        timestamp.draw(in: textRect, withAttributes: textFontAttributes)
        
        // New Image
        guard let newImage = UIGraphicsGetImageFromCurrentImageContext(),
                  let cgImage = newImage.cgImage else { return nil }

        UIGraphicsEndImageContext()
            
        let ciImage = CIImage(cgImage: cgImage)
        return UIImage(ciImage: ciImage)
        
    }

  • Related