赞
踩
1.配置相机基本环境(初始化AVCaptureSession,设置代理,开启),在示例代码中有,这里不再重复。
2.通过AVCaptureVideoDataOutputSampleBufferDelegate代理中拿到原始画面数据(CMSampleBufferRef)进行处理
- // AVCaptureVideoDataOutputSampleBufferDelegate
- - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
-
- // 以下两种方法任选一种即可
- // 1.利用CPU截取
- cropSampleBuffer = [self cropSampleBufferBySoftware:sampleBuffer];
- // 2.利用GPU截取
- cropSampleBuffer = [self cropSampleBufferByHardware:sampleBuffer];
-
- // note : don't forget to release cropSampleBuffer so that avoid memory error !!! 一定要对cropSampleBuffer进行release避免内存泄露过多而发生闪退
- CFRelease(cropSampleBuffer);
- }
- - (CMSampleBufferRef)cropSampleBufferBySoftware:(CMSampleBufferRef)sampleBuffer {
- OSStatus status;
- // Get a CMSampleBuffer's Core Video image buffer for the media data
- CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
- // Lock the image buffer
- CVPixelBufferLockBaseAddress(imageBuffer,0);
- // Get information about the image
- uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
- size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
- size_t width = CVPixelBufferGetWidth(imageBuffer);
- size_t height = CVPixelBufferGetHeight(imageBuffer);
- NSInteger bytesPerPixel = bytesPerRow/width;
-
- // NSLog(@"demon pix first : %zu - %zu",width, height);
-
- CVPixelBufferRef pixbuffer;
- // 网上关于一下字典的写法很多,亲测如果不按以下写法画面有问题。
- NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
- [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
- [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
- [NSNumber numberWithInt:g_width_size], kCVPixelBufferWidthKey,
- [NSNumber numberWithInt:g_height_size], kCVPixelBufferHeightKey,
- nil];
-
- int cropX = (int)(currentResolutionW / kScreenWidth * self.cropView.frame.origin.x);
- int cropY = (int)(currentResolutionH / kScreenHeight * self.cropView.frame.origin.y);
-
- // 根据YUV原理,解析中有介绍,总之就是x必须为偶数,否则渲染会失败
- if (cropX % 2 != 0) cropX += 1;
- // 通过此行代码确认开始位置,通过计算每行有多少byte可以得到Y的位置,通过计算bytesPerPixel可以得到X的位置
- NSInteger baseAddressStart = cropY*bytesPerRow+bytesPerPixel*cropX;
- status = CVPixelBufferCreateWithBytes(kCFAllocatorDefault, g_width_size, g_height_size, kCVPixelFormatType_32BGRA, &baseAddress[baseAddressStart], bytesPerRow, NULL, NULL, (CFDictionaryRef)options, &pixbuffer);
- if (status != 0) {
- log4cplus_debug("AVCaptureVideoDataOutputSampleBufferDelegate", "CVPixelBufferCreateWithBytes error %d",(int)status);
- return NULL;
- }
- CVPixelBufferUnlockBaseAddress(imageBuffer,0);
-
- CMSampleTimingInfo sampleTime = {
- .duration = CMSampleBufferGetDuration(sampleBuffer),
- .presentationTimeStamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer),
- .decodeTimeStamp = CMSampleBufferGetDecodeTimeStamp(sampleBuffer)
- };
- //
- CMVideoFormatDescriptionRef videoInfo = NULL;
- status = CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, pixbuffer, &videoInfo);
- if (status != 0) log4cplus_debug("AVCaptureVideoDataOutputSampleBufferDelegate", "CMVideoFormatDescriptionCreateForImageBuffer error %d",(int)status);
-
- CMSampleBufferRef cropBuffer;
- status = CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixbuffer, true, NULL, NULL, videoInfo, &sampleTime, &cropBuffer);
- if (status != 0) log4cplus_debug("AVCaptureVideoDataOutputSampleBufferDelegate", "CMSampleBufferCreateForImageBuffer error %d",(int)status);
-
- CFRelease(videoInfo);
- CVPixelBufferRelease(pixbuffer);
-
- return cropBuffer;
- }
-
-
- // hardware crop
- - (CMSampleBufferRef)cropSampleBufferByHardware:(CMSampleBufferRef)buffer {
- // a CMSampleBuffer's CVImageBuffer of media data.
- CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(buffer);
- size_t height = CVPixelBufferGetHeight(imageBuffer);
- size_t width = CVPixelBufferGetWidth(imageBuffer);
- // log4cplus_debug("AVCaptureVideoDataOutputSampleBufferDelegate", "CMSampleBufferRef origin pix width: %zu - height : %zu",width, height);
-
- CGFloat cropViewX = currentResolutionW / kScreenWidth * self.cropView.frame.origin.x;
- // CIImage base point is locate left-bottom so need to convert
- CGFloat cropViewY = currentResolutionH / kScreenHeight * (kScreenHeight - self.cropView.frame.origin.y - self.cropView.frame.size.height);
-
- CGRect cropRect = CGRectMake(cropViewX, cropViewY, g_width_size, g_height_size);
- // log4cplus_debug("AVCaptureVideoDataOutputSampleBufferDelegate", "dropRect x: %f - y : %f - width : %zu - height : %zu", cropViewX, cropViewY, width, height);
-
-
- /*
- First, to render to a texture, you need an image that is compatible with the OpenGL texture cache. Images that were created with the camera API are already compatible and you can immediately map them for inputs. Suppose you want to create an image to render on and later read out for some other processing though. You have to have create the image with a special property. The attributes for the image must have kCVPixelBufferIOSurfacePropertiesKey as one of the keys to the dictionary.
- 如果要进行页面渲染,需要一个和OpenGL缓冲兼容的图像。用相机API创建的图像已经兼容,您可以马上映射他们进行输入。假设你从已有画面中截取一个新的画面,用作其他处理,你必须创建一种特殊的属性用来创建图像。对于图像的属性必须有kCVPixelBufferIOSurfacePropertiesKey 作为字典的Key.因此以下步骤不可省略
-
- */
- OSStatus status;
- CVPixelBufferRef pixelBuffer;
- NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
- // [NSNumber numberWithBool:YES], kCVPixelBufferOpenGLCompatibilityKey,
- // [NSNumber numberWithBool:YES], kCVPixelBufferOpenGLESCompatibilityKey,
- // [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
- // [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
- [NSNumber numberWithInt:g_width_size], kCVPixelBufferWidthKey,
- [NSNumber numberWithInt:g_height_size], kCVPixelBufferHeightKey,
-
- nil];
- status = CVPixelBufferCreate(kCFAllocatorSystemDefault, g_width_size, g_height_size, kCVPixelFormatType_420YpCbCr8BiPlanarFullRange, (__bridge CFDictionaryRef)options, &pixelBuffer);
-
- CVPixelBufferLockBaseAddress(pixelBuffer, 0);
- CIImage *ciImage = [CIImage imageWithCVPixelBuffer:imageBuffer];
- // ciImage = [ciImage imageByCroppingToRect:cropRect];
-
-
- if (_ciContext == nil) {
- EAGLContext *eaglContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
- _ciContext = [CIContext contextWithEAGLContext:eaglContext options:@{kCIContextWorkingColorSpace : [NSNull null]}];
- #warning if project is MRC, Must to do it,如果是MRC代码必须手动retain ciContext对象,因为初始化中并没有retain它,不然渲染将报错找不到ciContext对象的内存地址。
- // [eaglContext release];
- // [ciContext retain];
- }
-
- // In OS X 10.11.3 and iOS 9.3 and later
- // CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
- // [ciContext render:ciImage toCVPixelBuffer:pixelBuffer];
- // 两种渲染方式,博客里有介绍,亲测这种方案较好
- [_ciContext render:ciImage toCVPixelBuffer:pixelBuffer bounds:cropRect colorSpace:nil];
-
- CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
- CMSampleTimingInfo sampleTime = {
- .duration = CMSampleBufferGetDuration(buffer),
- .presentationTimeStamp = CMSampleBufferGetPresentationTimeStamp(buffer),
- .decodeTimeStamp = CMSampleBufferGetDecodeTimeStamp(buffer)
- };
-
- CMVideoFormatDescriptionRef videoInfo = NULL;
- status = CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, &videoInfo);
- if (status != 0){
- // log4cplus_debug("AVCaptureVideoDataOutputSampleBufferDelegate", "CMVideoFormatDescriptionCreateForImageBuffer error %d",(int)status);
- }
-
- CMSampleBufferRef cropBuffer;
- status = CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, true, NULL, NULL, videoInfo, &sampleTime, &cropBuffer);
- if (status != 0) {
- // log4cplus_debug("AVCaptureVideoDataOutputSampleBufferDelegate", "CMSampleBufferCreateForImageBuffer error %d",(int)status);
- }
-
- CFRelease(videoInfo);
- CFRelease(pixelBuffer);
-
- return cropBuffer;
-
- }
-
而CoreImage坐标系是以左下角为原点:(在CoreImage中,每个图像的坐标系是独立于设备的)
所以切割的时候一定要注意转换Y,X的位置是正确的,Y是相反的。
ciImage = [ciImage imageByCroppingToRect:cropRect];
如果使用此行代码则渲染时用[ciContext render:ciImage toCVPixelBuffer:pixelBuffer];
[ciContext render:ciImage toCVPixelBuffer:pixelBuffer bounds:cropRect colorSpace:rgbColorSpace];
Copyright © 2003-2013 www.wpsshop.cn 版权所有,并保留所有权利。