当前位置:   article > 正文

GPUImage实现人脸实时识别_gpuimage 脸部贴图

gpuimage 脸部贴图

最近在研究OC的生物活检方面的实现,发现SDK中自带有相应的功能类,则进行了调研与实现。

实现过程中发现一个比较坑人的一个地方,就是GPUIMAGE这个框架里面对于视频采集使用的YUV格式,而YUV格式无法与OC的类库进行配合实现实时识别。

现在我们来剖析一下GPUImageVideoCamera的实现:

  1. @interface GPUImageVideoCamera : GPUImageOutput <AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate>
  2. - (id)initWithSessionPreset:(NSString *)sessionPreset cameraPosition:(AVCaptureDevicePosition)cameraPosition;

可以看到提供了一个初始化方法,此初始化方法内部的代码如下: 

  1. - (id)initWithSessionPreset:(NSString *)sessionPreset cameraPosition:(AVCaptureDevicePosition)cameraPosition;
  2. {
  3. if (!(self = [super init]))
  4. {
  5. return nil;
  6. }
  7. cameraProcessingQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH,0);
  8. audioProcessingQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_LOW,0);
  9. frameRenderingSemaphore = dispatch_semaphore_create(1);
  10. _frameRate = 0; // This will not set frame rate unless this value gets set to 1 or above
  11. _runBenchmark = NO;
  12. capturePaused = NO;
  13. outputRotation = kGPUImageNoRotation;
  14. internalRotation = kGPUImageNoRotation;
  15. captureAsYUV = YES;
  16. _preferredConversion = kColorConversion709;
  17. // Grab the back-facing or front-facing camera
  18. _inputCamera = nil;
  19. NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
  20. for (AVCaptureDevice *device in devices)
  21. {
  22. if ([device position] == cameraPosition)
  23. {
  24. _inputCamera = device;
  25. }
  26. }
  27. if (!_inputCamera) {
  28. return nil;
  29. }
  30. // Create the capture session
  31. _captureSession = [[AVCaptureSession alloc] init];
  32. [_captureSession beginConfiguration];
  33. // Add the video input
  34. NSError *error = nil;
  35. videoInput = [[AVCaptureDeviceInput alloc] initWithDevice:_inputCamera error:&error];
  36. if ([_captureSession canAddInput:videoInput])
  37. {
  38. [_captureSession addInput:videoInput];
  39. }
  40. // Add the video frame output
  41. videoOutput = [[AVCaptureVideoDataOutput alloc] init];
  42. [videoOutput setAlwaysDiscardsLateVideoFrames:NO];
  43. // if (captureAsYUV && [GPUImageContext deviceSupportsRedTextures])
  44. if (captureAsYUV && [GPUImageContext supportsFastTextureUpload])
  45. {
  46. BOOL supportsFullYUVRange = NO;
  47. NSArray *supportedPixelFormats = videoOutput.availableVideoCVPixelFormatTypes;
  48. for (NSNumber *currentPixelFormat in supportedPixelFormats)
  49. {
  50. if ([currentPixelFormat intValue] == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
  51. {
  52. supportsFullYUVRange = YES;
  53. }
  54. }
  55. if (supportsFullYUVRange)
  56. {
  57. [videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
  58. isFullYUVRange = YES;
  59. }
  60. else
  61. {
  62. [videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
  63. isFullYUVRange = NO;
  64. }
  65. }
  66. else
  67. {
  68. [videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
  69. }
  70. runSynchronouslyOnVideoProcessingQueue(^{
  71. if (captureAsYUV)
  72. {
  73. [GPUImageContext useImageProcessingContext];
  74. // if ([GPUImageContext deviceSupportsRedTextures])
  75. // {
  76. // yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageYUVVideoRangeConversionForRGFragmentShaderString];
  77. // }
  78. // else
  79. // {
  80. if (isFullYUVRange)
  81. {
  82. yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageYUVFullRangeConversionForLAFragmentShaderString];
  83. }
  84. else
  85. {
  86. yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageYUVVideoRangeConversionForLAFragmentShaderString];
  87. }
  88. // }
  89. if (!yuvConversionProgram.initialized)
  90. {
  91. [yuvConversionProgram addAttribute:@"position"];
  92. [yuvConversionProgram addAttribute:@"inputTextureCoordinate"];
  93. if (![yuvConversionProgram link])
  94. {
  95. NSString *progLog = [yuvConversionProgram programLog];
  96. NSLog(@"Program link log: %@", progLog);
  97. NSString *fragLog = [yuvConversionProgram fragmentShaderLog];
  98. NSLog(@"Fragment shader compile log: %@", fragLog);
  99. NSString *vertLog = [yuvConversionProgram vertexShaderLog];
  100. NSLog(@"Vertex shader compile log: %@", vertLog);
  101. yuvConversionProgram = nil;
  102. NSAssert(NO, @"Filter shader link failed");
  103. }
  104. }
  105. yuvConversionPositionAttribute = [yuvConversionProgram attributeIndex:@"position"];
  106. yuvConversionTextureCoordinateAttribute = [yuvConversionProgram attributeIndex:@"inputTextureCoordinate"];
  107. yuvConversionLuminanceTextureUniform = [yuvConversionProgram uniformIndex:@"luminanceTexture"];
  108. yuvConversionChrominanceTextureUniform = [yuvConversionProgram uniformIndex:@"chrominanceTexture"];
  109. yuvConversionMatrixUniform = [yuvConversionProgram uniformIndex:@"colorConversionMatrix"];
  110. [GPUImageContext setActiveShaderProgram:yuvConversionProgram];
  111. glEnableVertexAttribArray(yuvConversionPositionAttribute);
  112. glEnableVertexAttribArray(yuvConversionTextureCoordinateAttribute);
  113. }
  114. });
  115. [videoOutput setSampleBufferDelegate:self queue:cameraProcessingQueue];
  116. if ([_captureSession
声明:本文内容由网友自发贡献,不代表【wpsshop博客】立场,版权归原作者所有,本站不承担相应法律责任。如您发现有侵权的内容,请联系我们。转载请注明出处:https://www.wpsshop.cn/w/AllinToyou/article/detail/729930
推荐阅读
相关标签
  

闽ICP备14008679号