• Mac OS 使用Metal渲染NV12、YUV420、CMSampleBufferRef视频



    资料较少,整合后仅作为记录学习使用。

    需求

    yuv420原始视频数据使用metal渲染。

    MTKView初始化

    vector_uint2 viewportSize;
    MTKView *mMtkview;
    id <MTLDevice> mDevice;
    id <MTLCommandQueue> mCmdQueue;
    id <MTLRenderPipelineState> mPipeline;
    id <MTLBuffer> mBuffer;
    id <MTLTexture> mTexture;
    CVMetalTextureCacheRef mTextureCache;
    NSUInteger vertexCount;
    
    id<MTLTexture> mTextureY;
    id<MTLTexture> mTextureUV;
    
    id <MTLBuffer> mConvertMatrix;
    
    • 1
    • 2
    • 3
    • 4
    • 5
    • 6
    • 7
    • 8
    • 9
    • 10
    • 11
    • 12
    • 13
    • 14

    setMetal

    mMtkview = [[MTKView alloc] initWithFrame:self.view.frame device:MTLCreateSystemDefaultDevice()];
    mDevice = mMtkview.device;
    self.view = mMtkview;
    mMtkview.delegate = self;
    mCmdQueue = [mDevice newCommandQueue];
    
    CVMetalTextureCacheCreate(NULL, NULL, mDevice, NULL, &mTextureCache);
    
    • 1
    • 2
    • 3
    • 4
    • 5
    • 6
    • 7

    setPipeline

    id <MTLLibrary> library = [mDevice newDefaultLibrary];
    id <MTLFunction> vertexfunc = [library newFunctionWithName:@"verfunc"];
    id <MTLFunction> fragfunc = [library newFunctionWithName:@"fragfunc"];
    MTLRenderPipelineDescriptor *renderdes = [MTLRenderPipelineDescriptor new];
    renderdes.vertexFunction = vertexfunc;
    renderdes.fragmentFunction = fragfunc;
    renderdes.colorAttachments[0].pixelFormat = mMtkview.colorPixelFormat;
    mPipeline = [mDevice newRenderPipelineStateWithDescriptor:renderdes error:nil];
    
    • 1
    • 2
    • 3
    • 4
    • 5
    • 6
    • 7
    • 8

    setupMatrix

    matrix_float3x3 kColorConversion601FullRangeMatrix = (matrix_float3x3){
        (simd_float3){1.0,    1.0,    1.0},
        (simd_float3){0.0,    -0.343, 1.765},
        (simd_float3){1.4,    -0.711, 0.0},
    };
    vector_float3 kColorConversion601FullRangeOffset = (vector_float3){ -(16.0/255.0), -0.5, -0.5}; // 这个是偏移
    
    HQHConvertMatrix converMatrix;
    converMatrix.matrix = kColorConversion601FullRangeMatrix;
    converMatrix.offset = kColorConversion601FullRangeOffset;
    mConvertMatrix = [mDevice newBufferWithBytes:&converMatrix length:sizeof(HQHConvertMatrix) options:MTLResourceStorageModeShared];
    
    • 1
    • 2
    • 3
    • 4
    • 5
    • 6
    • 7
    • 8
    • 9
    • 10
    • 11

    MTKViewDelegate

    - (void)mtkView:(MTKView *)view drawableSizeWillChange:(CGSize)size {
        viewportSize = (vector_uint2){size.width, size.height};
    }
    
    - (void)drawInMTKView:(MTKView *)view {
        if (mTextureY && mTextureUV) {
            id <MTLCommandBuffer> cmdBuffer = [mCmdQueue commandBuffer];
            MTLRenderPassDescriptor *passdes = view.currentRenderPassDescriptor;
            if (passdes != nil) {
                id <MTLRenderCommandEncoder> cmdEncoder = [cmdBuffer renderCommandEncoderWithDescriptor:passdes];
                [cmdEncoder setViewport:(MTLViewport){0.0,0.0,viewportSize.x,viewportSize.y, -1.0,1.0}];
                [cmdEncoder setRenderPipelineState:mPipeline];
                [cmdEncoder setVertexBuffer:mBuffer offset:0 atIndex:0];
    
                [cmdEncoder setFragmentTexture:mTextureY atIndex:0];
                [cmdEncoder setFragmentTexture:mTextureUV atIndex:1];
                [cmdEncoder setFragmentBuffer:mConvertMatrix offset:0 atIndex:HQHFragmentInputindexMatrix];
                [cmdEncoder drawPrimitives:MTLPrimitiveTypeTriangle vertexStart:0 vertexCount:vertexCount];
                
                
                /**
                 rgb 数据
                 
                [cmdEncoder setFragmentTexture:mTexture atIndex:0];
                [cmdEncoder drawPrimitives:MTLPrimitiveTypeTriangleStrip vertexStart:0 vertexCount:vertexCount];
                */
                [cmdEncoder endEncoding];
                [cmdBuffer presentDrawable:view.currentDrawable];
                [cmdBuffer commit];
            }
        }
    }
    
    • 1
    • 2
    • 3
    • 4
    • 5
    • 6
    • 7
    • 8
    • 9
    • 10
    • 11
    • 12
    • 13
    • 14
    • 15
    • 16
    • 17
    • 18
    • 19
    • 20
    • 21
    • 22
    • 23
    • 24
    • 25
    • 26
    • 27
    • 28
    • 29
    • 30
    • 31
    • 32

    摄像头采集CMSampleBufferRef

    AVCaptureSession *mCaptureSession;
    AVCaptureDeviceInput *mCaptureInput;
    AVCaptureVideoDataOutput *mCaptureOutput;
    
    • 1
    • 2
    • 3

    setupSession

    mCaptureSession = [[AVCaptureSession alloc] init];
    mCaptureSession.sessionPreset = AVCaptureSessionPreset1280x720;
    if (@available(macOS 10.15, *)) {
        AVCaptureDevice *cameraDevice = [AVCaptureDevice defaultDeviceWithDeviceType:AVCaptureDeviceTypeBuiltInWideAngleCamera mediaType:AVMediaTypeVideo position:AVCaptureDevicePositionBack];
        mCaptureInput = [[AVCaptureDeviceInput alloc] initWithDevice:cameraDevice error:nil];
    } else {
        AVCaptureDevice *cameraDevice = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo].firstObject;
        mCaptureInput = [[AVCaptureDeviceInput alloc] initWithDevice:cameraDevice error:nil];
    }
    if ([mCaptureSession canAddInput:mCaptureInput]) {
        [mCaptureSession addInput:mCaptureInput];
    }
    
    mCaptureOutput = [[AVCaptureVideoDataOutput alloc] init];
    [mCaptureOutput setAlwaysDiscardsLateVideoFrames:NO];
    [mCaptureOutput setSampleBufferDelegate:self queue:dispatch_queue_create("bd", DISPATCH_QUEUE_SERIAL)];
    
    if ([mCaptureSession canAddOutput:mCaptureOutput]) {
        [mCaptureSession addOutput:mCaptureOutput];
    }
    NSLog(@"out = %@ ary = %@",mCaptureOutput,[mCaptureOutput availableVideoCodecTypes]);
    [mCaptureOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
    AVCaptureConnection *connection = [mCaptureOutput connectionWithMediaType:AVMediaTypeVideo];
    [connection setVideoOrientation:AVCaptureVideoOrientationPortrait];
    [mCaptureSession startRunning];
    
    • 1
    • 2
    • 3
    • 4
    • 5
    • 6
    • 7
    • 8
    • 9
    • 10
    • 11
    • 12
    • 13
    • 14
    • 15
    • 16
    • 17
    • 18
    • 19
    • 20
    • 21
    • 22
    • 23
    • 24
    • 25

    AVCaptureVideoDataOutputSampleBufferDelegate

    - (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{
        NSLog(@"data");
    //    [self setTexture:sampleBuffer];
    }
    
    • 1
    • 2
    • 3
    • 4

    渲染CMSampleBufferRef

    - (void)setTexture:(CMSampleBufferRef)samplebuffer {
        CVPixelBufferRef pixelbuffer = CMSampleBufferGetImageBuffer(samplebuffer);
        // textureY
        {
            size_t width = CVPixelBufferGetWidthOfPlane(pixelbuffer, 0);
            size_t height = CVPixelBufferGetHeightOfPlane(pixelbuffer, 0);
            CVMetalTextureRef texture = NULL;
            CVReturn status = CVMetalTextureCacheCreateTextureFromImage(NULL, mTextureCache, pixelbuffer, NULL, MTLPixelFormatR8Unorm, width, height, 0, &texture);
            if (status == kCVReturnSuccess) {
                mTextureY = CVMetalTextureGetTexture(texture);
                CFRelease(texture);
            }
        }
        // textureUV
        {
            size_t width = CVPixelBufferGetWidthOfPlane(pixelbuffer, 1);
            size_t height = CVPixelBufferGetHeightOfPlane(pixelbuffer, 1);
            CVMetalTextureRef texture = NULL;
            CVReturn status = CVMetalTextureCacheCreateTextureFromImage(NULL, mTextureCache, pixelbuffer, NULL, MTLPixelFormatRG8Unorm, width, height, 1, &texture);
            if (status == kCVReturnSuccess) {
                mTextureUV = CVMetalTextureGetTexture(texture);
                CFRelease(texture);
            }
        }
        
        size_t width = CVPixelBufferGetWidth(pixelbuffer);
        size_t heigth = CVPixelBufferGetHeight(pixelbuffer);
        CVMetalTextureRef temTexture = nil;
        CVReturn status = CVMetalTextureCacheCreateTextureFromImage(NULL, mTextureCache, pixelbuffer, NULL, MTLPixelFormatBGRA8Unorm, width, heigth, 0, &temTexture);
        if (status == kCVReturnSuccess) {
            mMtkview.drawableSize = CGSizeMake(width, heigth);
            mTexture = CVMetalTextureGetTexture(temTexture);
            CFRelease(temTexture);
        }
    }
    
    • 1
    • 2
    • 3
    • 4
    • 5
    • 6
    • 7
    • 8
    • 9
    • 10
    • 11
    • 12
    • 13
    • 14
    • 15
    • 16
    • 17
    • 18
    • 19
    • 20
    • 21
    • 22
    • 23
    • 24
    • 25
    • 26
    • 27
    • 28
    • 29
    • 30
    • 31
    • 32
    • 33
    • 34
    • 35

    yuv420转NV12

    void YUV420PtoNV12(unsigned char *Src, unsigned char* Dst,int Width,int Height){
        unsigned char* SrcU = Src + Width * Height;
        unsigned char* SrcV = SrcU + Width * Height / 4 ;
        memcpy(Dst, Src, Width * Height);
        unsigned char* DstU = Dst + Width * Height;
        for(int i = 0 ; i < Width * Height / 4 ; i++ ){
            ( *DstU++) = ( *SrcU++);
            ( *DstU++) = ( *SrcV++);
        }
    }
    
    • 1
    • 2
    • 3
    • 4
    • 5
    • 6
    • 7
    • 8
    • 9
    • 10

    渲染NV12

    - (void)renderYUV420P:(EVFrame *)frame {
        int w = frame.vsize.width;
        int h = frame.vsize.height;
        unsigned char *i420 = frame.data;
        unsigned char *buffer = malloc(w*h*10);
        
        YUV420PtoNV12(i420, buffer, w, h);
        
        NSDictionary *pixelAttributes = @{(NSString*)kCVPixelBufferIOSurfacePropertiesKey:@{}};
        CVPixelBufferRef pixelBuffer = NULL;
        CVReturn result = CVPixelBufferCreate(kCFAllocatorDefault, w, h, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, (__bridge CFDictionaryRef)(pixelAttributes), &pixelBuffer);
        CVPixelBufferLockBaseAddress(pixelBuffer,0);
        void *yDestPlane = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0);
        unsigned char *y_ch0 = buffer;
        unsigned char *y_ch1 = buffer + w * h;
        memcpy(yDestPlane, y_ch0, w * h);
        void *uvDestPlane = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1);
        // NV12
        memcpy(uvDestPlane, y_ch1, w * h * 0.5);
        CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
        
        if (result != kCVReturnSuccess) {
            NSLog(@"Unable to create cvpixelbuffer %d", result);
        }
        
        {
            size_t width = CVPixelBufferGetWidthOfPlane(pixelBuffer, 0);
            size_t height = CVPixelBufferGetHeightOfPlane(pixelBuffer, 0);
            CVMetalTextureRef texture = NULL;
            CVReturn status = CVMetalTextureCacheCreateTextureFromImage(NULL, mTextureCache, pixelBuffer, NULL, MTLPixelFormatR8Unorm, width, height, 0, &texture);
            if (status == kCVReturnSuccess) {
                mTextureY = CVMetalTextureGetTexture(texture);
                CFRelease(texture);
            }
        }
        // textureUV
        {
            size_t width = CVPixelBufferGetWidthOfPlane(pixelBuffer, 1);
            size_t height = CVPixelBufferGetHeightOfPlane(pixelBuffer, 1);
            CVMetalTextureRef texture = NULL;
            CVReturn status = CVMetalTextureCacheCreateTextureFromImage(NULL, mTextureCache, pixelBuffer, NULL, MTLPixelFormatRG8Unorm, width, height, 1, &texture);
            if (status == kCVReturnSuccess) {
                mTextureUV = CVMetalTextureGetTexture(texture);
                CFRelease(texture);
            }
        }
    
        size_t width = CVPixelBufferGetWidth(pixelBuffer);
        size_t heigth = CVPixelBufferGetHeight(pixelBuffer);
        CVMetalTextureRef temTexture = nil;
        CVReturn status = CVMetalTextureCacheCreateTextureFromImage(NULL, mTextureCache, pixelBuffer, NULL, MTLPixelFormatBGRA8Unorm, width, heigth, 0, &temTexture);
        if (status == kCVReturnSuccess) {
            mMtkview.drawableSize = CGSizeMake(width, heigth);
            mTexture = CVMetalTextureGetTexture(temTexture);
            CFRelease(temTexture);
        }
        
        CVPixelBufferRelease(pixelBuffer);
        free(buffer);
        buffer = NULL;
    }
    
    • 1
    • 2
    • 3
    • 4
    • 5
    • 6
    • 7
    • 8
    • 9
    • 10
    • 11
    • 12
    • 13
    • 14
    • 15
    • 16
    • 17
    • 18
    • 19
    • 20
    • 21
    • 22
    • 23
    • 24
    • 25
    • 26
    • 27
    • 28
    • 29
    • 30
    • 31
    • 32
    • 33
    • 34
    • 35
    • 36
    • 37
    • 38
    • 39
    • 40
    • 41
    • 42
    • 43
    • 44
    • 45
    • 46
    • 47
    • 48
    • 49
    • 50
    • 51
    • 52
    • 53
    • 54
    • 55
    • 56
    • 57
    • 58
    • 59
    • 60
    • 61

    END

    metal确实比OpenGL简单好用很多

  • 相关阅读:
    Redis设置开机自启动
    java 企业工程管理系统软件源码+Spring Cloud + Spring Boot +二次开发+ MybatisPlus + Redis
    解密铭控传感从数字压力表到多品类物联感知硬件的发展史!
    抖音矩阵系统源码独立部署定制开发。tell me
    数据中台建设方案(Word版源文档)
    图论学习笔记 - 二分图的匹配
    机械臂速成小指南(十七):直线规划
    MATLAB|风玫瑰图
    用Python编写自己的微型Redis
    【漏洞复现】泛微e-Weaver SQL注入
  • 原文地址:https://blog.csdn.net/quanhaoH/article/details/126585118