[iOS]利用libyuv 轉(zhuǎn)化視頻分辨率(nv12格式)

libyuv下載地址
鏈接: https://pan.baidu.com/s/1DTInjVivZHQW0PnzJYQK5g 提取碼: cy4b

我也是在網(wǎng)上查找了好多東西,自己試出來可以正常轉(zhuǎn)化將nv12數(shù)據(jù)分辨率的方法,有的地方也并不能完全解釋,但確實可行,僅供參考吧,也是自己記錄下,因為這個過程真的好辛苦,從找到nv12轉(zhuǎn)化分辨率到將yuv轉(zhuǎn)化成CVPixelBufferRef,再到CMSampleBufferRef.

過程:得到CMSampleBufferRef(此處得到的是420f,也是nv12,也是kCVPixelFormatType_420YpCbCr8BiPlanarFullRange),解析得到nv12格式y(tǒng)uv數(shù)據(jù).利用libyuv ,先將nv12轉(zhuǎn)化成I420,通過I420轉(zhuǎn)化分辨率,再將I420轉(zhuǎn)化成nv12,用到的libyuv的方法有三個:NV12ToI420,I420Scale,I420ToNV12.
首先我運用的場景是通過系統(tǒng)的錄屏方法獲取到的CMSampleBufferRef,得到的視頻流分辨率是1080*1920,但是我需要的是720 *1280.

[[RPScreenRecorder sharedRecorder] startCaptureWithHandler:^(CMSampleBufferRef  _Nonnull sampleBuffer, RPSampleBufferType bufferType, NSError * _Nullable error) {
            
            if (bufferType == RPSampleBufferTypeVideo && sampleBuffer != nil) {
                
                CVPixelBufferRef pixel =[self convertVideoSmapleBufferToYuvData:sampleBuffer];
                CMSampleBufferRef sample = [self pixelBufferToSampleBuffer:pixel];
                [self setwangyiBuffer:sample];
            }
        } completionHandler:^(NSError * _Nullable error) {}];

其中 [[RPScreenRecorder sharedRecorder] startCaptureWithHandler:^(CMSampleBufferRef _Nonnull sampleBuffer, RPSampleBufferType bufferType, NSError * _Nullable error) 是系統(tǒng)的錄屏方法,可以直接得到錄屏數(shù)據(jù)的視頻流,CVPixelBufferRef pixel =[self convertVideoSmapleBufferToYuvData:sampleBuffer];
是我自己轉(zhuǎn)化調(diào)用的方法其中shareSWidth = 720;
shareSHeight = 1280;

//轉(zhuǎn)化
-(CVPixelBufferRef)convertVideoSmapleBufferToYuvData:(CMSampleBufferRef) videoSample{
    
//    1.
    //CVPixelBufferRef是CVImageBufferRef的別名,兩者操作幾乎一致。
    //獲取CMSampleBuffer的圖像地址
    CVImageBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(videoSample);
    //表示開始操作數(shù)據(jù)
    CVPixelBufferLockBaseAddress(pixelBuffer, 0);
    //圖像寬度(像素)
    size_t pixelWidth = CVPixelBufferGetWidth(pixelBuffer);
    //圖像高度(像素)
    size_t pixelHeight = CVPixelBufferGetHeight(pixelBuffer);
    //獲取CVImageBufferRef中的y數(shù)據(jù)
    uint8_t *y_frame = (unsigned char *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0);
    //獲取CMVImageBufferRef中的uv數(shù)據(jù)
    uint8_t *uv_frame =(unsigned char *) CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1);
    
    //y stride
    size_t plane1_stride = CVPixelBufferGetBytesPerRowOfPlane (pixelBuffer, 0);
    //uv stride
    size_t plane2_stride = CVPixelBufferGetBytesPerRowOfPlane (pixelBuffer, 1);
    //y_size
    size_t plane1_size = plane1_stride * CVPixelBufferGetHeightOfPlane(pixelBuffer, 0);
    //uv_size
    size_t plane2_size = CVPixelBufferGetBytesPerRowOfPlane (pixelBuffer, 1) * CVPixelBufferGetHeightOfPlane(pixelBuffer, 1);
    //yuv_size(內(nèi)存空間)
    size_t frame_size = plane1_size + plane2_size;
    
    //開辟frame_size大小的內(nèi)存空間用于存放轉(zhuǎn)換好的i420數(shù)據(jù)
    uint8* buffer = (unsigned char *)malloc(frame_size);
    //buffer為這段內(nèi)存的首地址,plane1_size代表這一幀中y數(shù)據(jù)的長度
    uint8* dst_u = buffer + plane1_size;
    //dst_u為u數(shù)據(jù)的首地,plane1_size/4為u數(shù)據(jù)的長度
    uint8* dst_v = dst_u + plane1_size/4;
    
    
    // Let libyuv convert
    libyuv::NV12ToI420(y_frame,plane1_stride,
             uv_frame, plane2_stride,
               buffer, plane1_stride,
            dst_u,plane2_stride/2,
              dst_v, plane2_stride/2,
               pixelWidth, pixelHeight);
    
    
//    2
    //scale-size
    int scale_yuvBufSize = shareSWidth * shareSHeight * 3 / 2;
    //uint8_t* scale_yuvBuf= new uint8_t[scale_yuvBufSize];
    uint8* scale_yuvBuf = (unsigned char *)malloc(scale_yuvBufSize);

    //scale-stride
    const int32 scale_uv_stride = (shareSWidth + 1) / 2;

    //scale-length
    const int scale_y_length = shareSWidth * shareSHeight;
    int scale_uv_length = scale_uv_stride * ((shareSWidth+1) / 2);

    unsigned char *scale_Y_data_Dst = scale_yuvBuf;
    unsigned char *scale_U_data_Dst = scale_yuvBuf + scale_y_length;
    unsigned char *scale_V_data_Dst = scale_U_data_Dst + scale_y_length/4;


    libyuv::I420Scale(buffer, plane1_stride, dst_u, plane2_stride/2, dst_v, plane2_stride/2, pixelWidth, pixelHeight, scale_Y_data_Dst, shareSWidth,
                      scale_U_data_Dst, scale_uv_stride,
                      scale_V_data_Dst, scale_uv_stride,
                      shareSWidth, shareSHeight,
                      libyuv::kFilterNone);


//    3.
    uint8 *dst_y = (uint8 *)malloc((shareSWidth * shareSHeight * 3) >> 1);
    int dst_Stride_Y = shareSWidth;
    uint8 *dst_uv = dst_y + shareSWidth*shareSHeight;
    int dst_Stride_uv = shareSWidth/2;

    libyuv::I420ToNV12(scale_Y_data_Dst, shareSWidth,
                       scale_U_data_Dst, scale_uv_stride,
                       scale_V_data_Dst, scale_uv_stride,dst_y, dst_Stride_Y, dst_uv, dst_Stride_Y,shareSWidth, shareSHeight);

    CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
    free(buffer);
    free(scale_yuvBuf);

    //轉(zhuǎn)化
    NSDictionary *pixelAttributes = @{(id)kCVPixelBufferIOSurfacePropertiesKey : @{}};
    CVPixelBufferRef pixelBuffer1 = NULL;
    CVReturn result = CVPixelBufferCreate(kCFAllocatorDefault,
        shareSWidth,shareSHeight,kCVPixelFormatType_420YpCbCr8BiPlanarFullRange,
                                          (__bridge CFDictionaryRef)pixelAttributes,&pixelBuffer1);

    CVPixelBufferLockBaseAddress(pixelBuffer1, 0);
    uint8_t *yDestPlane = (uint8*)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer1, 0);
    memcpy(yDestPlane, dst_y, shareSWidth * shareSHeight);
    uint8_t *uvDestPlane = (uint8*)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer1, 1);
    memcpy(uvDestPlane, dst_uv, shareSWidth * shareSHeight/2);
    if (result != kCVReturnSuccess) {
        NSLog(@"Unable to create cvpixelbuffer %d", result);
    }
    CVPixelBufferUnlockBaseAddress(pixelBuffer1, 0);
    free(dst_y);
//    CVPixelBufferRelease(pixelBuffer1);

    return pixelBuffer1;
}

時間戳
-(CMSampleBufferRef)pixelBufferToSampleBuffer:(CVPixelBufferRef)pixelBuffer
{

    CMSampleBufferRef sampleBuffer;
    CMTime frameTime = CMTimeMakeWithSeconds([[NSDate date] timeIntervalSince1970], 1000000000);
    CMSampleTimingInfo timing = {frameTime, frameTime, kCMTimeInvalid};
    CMVideoFormatDescriptionRef videoInfo = NULL;
    CMVideoFormatDescriptionCreateForImageBuffer(NULL, pixelBuffer, &videoInfo);
    
    OSStatus status = CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, true, NULL, NULL, videoInfo, &timing, &sampleBuffer);
    if (status != noErr) {
        NSLog(@"Failed to create sample buffer with error %zd.", status);
    }
    CVPixelBufferRelease(pixelBuffer);
    if(videoInfo)
    CFRelease(videoInfo);
    
    return sampleBuffer;
}

參考鏈接
https://blog.csdn.net/sinat_36684217/article/details/75117920
http://www.itdecent.cn/p/050234c5fff2
http://www.itdecent.cn/p/68e05ad85490
http://www.itdecent.cn/p/dac9857b34d0

最后編輯于
?著作權(quán)歸作者所有,轉(zhuǎn)載或內(nèi)容合作請聯(lián)系作者
【社區(qū)內(nèi)容提示】社區(qū)部分內(nèi)容疑似由AI輔助生成,瀏覽時請結(jié)合常識與多方信息審慎甄別。
平臺聲明:文章內(nèi)容(如有圖片或視頻亦包括在內(nèi))由作者上傳并發(fā)布,文章內(nèi)容僅代表作者本人觀點,簡書系信息發(fā)布平臺,僅提供信息存儲服務(wù)。

友情鏈接更多精彩內(nèi)容