Home > Software design >  Convert YUV data to CVPixelBufferRef and play in AVSampleBufferDisplayLayer
Convert YUV data to CVPixelBufferRef and play in AVSampleBufferDisplayLayer

Time:10-05

I'm having a stream of video in IYUV (4:2:0) format and trying to convert it into CVPixelBufferRef and then into CMSampleBufferRef and play it in AVSampleBufferDisplayLayer (AVPictureInPictureController required). I've tried several version of solution, but none actually works well, hope someone with video processing experience can tell what I've done wrong here.

Full function:

- (CMSampleBufferRef)makeSampleBufferFromTexturesWithY:(void *)yPtr U:(void *)uPtr V:(void *)vPtr yStride:(int)yStride uStride:(int)uStride vStride:(int)vStride width:(int)width height:(int)height doMirror:(BOOL)doMirror doMirrorVertical:(BOOL)doMirrorVertical
{
    NSDictionary *pixelAttributes = @{(NSString *)kCVPixelBufferIOSurfacePropertiesKey:@{}}; // For 1,2,3
    
    CVPixelBufferRef pixelBuffer = NULL;
    CVReturn result;
    result = CVPixelBufferCreate(kCFAllocatorDefault,
                                 width,
                                 height,
                                 kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange // For 1,2,3
//                                 kCVPixelFormatType_32BGRA, // For 4.
                                 (__bridge CFDictionaryRef)(pixelAttributes),
                                 &pixelBuffer);
    if (result != kCVReturnSuccess) {
        NSLog(@"PIP: Unable to create cvpixelbuffer %d", result);
        return nil;
    }

/// Converter code below...

    CMFormatDescriptionRef formatDesc;
    result = CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, &formatDesc);
    if (result != kCVReturnSuccess) {
        NSAssert(NO, @"PIP: Failed to create CMFormatDescription: %d", result);
        CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
        return nil;
    }
    
    CMTime now = CMTimeMakeWithSeconds(CACurrentMediaTime(), 1000);
    CMSampleTimingInfo timingInfo;
    timingInfo.duration = CMTimeMakeWithSeconds(1, 1000);
    timingInfo.presentationTimeStamp = now;
    timingInfo.decodeTimeStamp = now;
    
    @try {
        if (@available(iOS 13.0, *)) {
            CMSampleBufferRef sampleBuffer;
            CMSampleBufferCreateReadyWithImageBuffer(kCFAllocatorDefault, pixelBuffer, formatDesc, &timingInfo, &sampleBuffer);
//            CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
            CVPixelBufferRelease(pixelBuffer);
            pixelBuffer = nil;
//            free(dest.data);
//            free(uvPlane);
            
            return sampleBuffer;
        } else {
            CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
            return nil;
        }
    } @catch (NSException *exception) {
        NSAssert(NO, @"PIP: Failed to create CVSampleBuffer: %@", exception);
        CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
        return nil;
    }
}

Here's some solutions that I found:

  1. Combine UV, but half bottom is green.
    CVPixelBufferLockBaseAddress(pixelBuffer, 0);
    uint8_t *yDestPlane = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0);
    memcpy(yDestPlane, yPtr, width * height);

    CGFloat uPlaneSize =  width * height / 4;
    CGFloat vPlaneSize = width * height / 4;
    CGFloat numberOfElementsForChroma = uPlaneSize   vPlaneSize;
    // for simplicity and speed create a combined UV panel to hold the pixels
    uint8_t *uvPlane = calloc(numberOfElementsForChroma, sizeof(uint8_t));
    memcpy(uvPlane, uPtr, uPlaneSize);
    memcpy(uvPlane  = (uint8_t)(uPlaneSize), vPtr, vPlaneSize);
    uint8_t *uvDestPlane = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1);
    memcpy(uvDestPlane, uvPlane, numberOfElementsForChroma);
  1. Interleave U and V, image is still distorted
    CVPixelBufferLockBaseAddress(pixelBuffer, 0);
    uint8_t *yDestPlane = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0);
    for (int i = 0, k = 0; i < height; i   ) {
        for (int j = 0; j < width; j   ) {
            yDestPlane[k  ] = ((unsigned char *)yPtr)[j   i * yStride];
        }
    }
    uint8_t *uvDestPlane = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1);
    for (int row = 0, index = 0; row < height / 2; row  ) {
        for (int col = 0; col < width / 2; col  ) {
            uvDestPlane[index  ] = ((unsigned char *)uPtr)[col   row * uStride];
            uvDestPlane[index  ] = ((unsigned char *)vPtr)[col   row * vStride];
        }
    }
  1. Some what similar to 1.
    int yPixels = yStride * height;
    int uPixels = uStride * height/2;
    int vPixels = vStride * height/2;
    CVPixelBufferLockBaseAddress(pixelBuffer, 0);
    uint8_t *yDestPlane = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0);
    memcpy(yDestPlane, yPtr, yPixels);

    uint8_t *uvDestPlane = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1);
    memcpy(uvDestPlane , uPtr, uPixels);
    memcpy(uvDestPlane   uPixels, vPtr, vPixels);
  1. Use Accelerate to convert YUV to BGRA and then convert to CVPixelBuffer, no error but no video rendered
    vImage_Buffer srcYp = {
        .width = width,
        .height = height,
        .rowBytes = yStride,
        .data = yPtr,
    };
    vImage_Buffer srcCb = {
        .width = width / 2,
        .height = height / 2,
        .rowBytes = uStride,
        .data = uPtr,
    };
    vImage_Buffer srcCr = {
        .width = width / 2,
        .height = height / 2,
        .rowBytes = vStride,
        .data = vPtr,
    };

    vImage_Buffer dest;
    dest.data = NULL;
    dest.width = width;
    dest.height = height;

    vImage_Error error = kvImageNoError;
    error = vImageBuffer_Init(&dest, height, width, 32, kvImagePrintDiagnosticsToConsole);
    // vImage_YpCbCrPixelRange pixelRange = (vImage_YpCbCrPixelRange){ 0, 128, 255, 255, 255, 1, 255, 0 };
    vImage_YpCbCrPixelRange pixelRange = { 16, 128, 235, 240, 255, 0, 255, 0 };
    vImage_YpCbCrToARGB info;
    error = kvImageNoError;
    error = vImageConvert_YpCbCrToARGB_GenerateConversion(kvImage_YpCbCrToARGBMatrix_ITU_R_601_4,
                                                          &pixelRange,
                                                          &info,
                                                          kvImage420Yp8_Cb8_Cr8,
                                                          kvImageARGB8888,
                                                          kvImagePrintDiagnosticsToConsole);
    error = kvImageNoError;
    uint8_t permuteMap[4] = {3, 2, 1, 0}; // BGRA - iOS only support BGRA
    error = vImageConvert_420Yp8_Cb8_Cr8ToARGB8888(&srcYp,
                                                   &srcCb,
                                                   &srcCr,
                                                   &dest,
                                                   &info,
                                                   permuteMap, // for iOS must be no NULL, mac can be NULL iOS only support BGRA
                                                   255,
                                                   kvImagePrintDiagnosticsToConsole);

    if (error != kvImageNoError) {
        NSAssert(NO, @"PIP: vImageConvert error %ld", error);
        return nil;
    }

    // vImageBuffer_CopyToCVPixelBuffer will give out error destFormat bitsPerComponent = 0 is not supported
//    vImage_CGImageFormat format = {
//        .bitsPerComponent = 8,
//        .bitsPerPixel = 32,
//        .bitmapInfo = (CGBitmapInfo)kCGBitmapByteOrder32Little | kCGImageAlphaNoneSkipFirst,
//        .colorSpace = CGColorSpaceCreateDeviceRGB()
//    };
//    vImageCVImageFormatRef vformat = vImageCVImageFormat_CreateWithCVPixelBuffer(pixelBuffer);
//
//    error = vImageBuffer_CopyToCVPixelBuffer(&dest, &format, pixelBuffer, vformat, 0, kvImagePrintDiagnosticsToConsole);

    result = CVPixelBufferCreateWithBytes(kCFAllocatorDefault,
                                          width,
                                          height,
                                          kCVPixelFormatType_32BGRA,
                                          dest.data,
                                          dest.rowBytes,
                                          NULL,
                                          NULL,
                                          (__bridge CFDictionaryRef)pixelAttributes,
                                          &pixelBuffer);

CodePudding user response:

I have to resort to use a third-party library OGVKit to makes it works with some minor tweaks. The decoder is from the function (void)updatePixelBuffer420:pixelBuffer works with very fast decoding time for YUV420 data.

  • Related