如何将cfdataref nsdata转换为NSData
1个回答
展开全部
决方法;
/, height;/ Lock the base address of the pixel buffer
CVPixelBufferLockBaseAddress(imageBuffer;/ Create a bitmap graphics context with the sample buffer data
CGContextRef context = CGBitmapContextCreate(baseAddress, width; Works only if pixel format is kCVPixelFormatType_420YpCbCr8BiPlanarFullRange
- (UIImage *)convertSampleBufferToUIImageSampleBuffer:(CMSampleBufferRef)sampleBuffer{
/
// Create a Quartz image from the pixel data in the bitmap graphics context
CGImageRef quartzImage = CGBitmapContextCreateImage(context):quartzImage];
// Get a CMSampleBuffer;s Core Video image buffer for the media data
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
/ Free up the context and color space
CGContextRelease(context);
// Get the number of bytes per row for the plane pixel buffer
size_t bytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer,0); Create a device-dependent gray color space
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceGray();
, colorSpace, 0);
size_t height = CVPixelBufferGetHeight(imageBuffer);
/:
pragma mark Convert SampleBuffer to UIImage
// Get the number of bytes per row for the plane pixel buffer
void *baseAddress = CVPixelBufferGetBaseAddressOfPlane(imageBuffer;
/, kCGImageAlphaNone), 8,
bytesPerRow;/ Release the Quartz image
CGImageRelease(quartzImage), 0); Unlock the pixel buffer
CVPixelBufferUnlockBaseAddress(imageBuffer,0); Create an image object from the Quartz image
UIImage *image = [UIImage imageWithCGImage;
//
// Get the pixel buffer width and height
size_t width = CVPixelBufferGetWidth(imageBuffer);
//
CGColorSpaceRelease(colorSpace)
/, height;/ Lock the base address of the pixel buffer
CVPixelBufferLockBaseAddress(imageBuffer;/ Create a bitmap graphics context with the sample buffer data
CGContextRef context = CGBitmapContextCreate(baseAddress, width; Works only if pixel format is kCVPixelFormatType_420YpCbCr8BiPlanarFullRange
- (UIImage *)convertSampleBufferToUIImageSampleBuffer:(CMSampleBufferRef)sampleBuffer{
/
// Create a Quartz image from the pixel data in the bitmap graphics context
CGImageRef quartzImage = CGBitmapContextCreateImage(context):quartzImage];
// Get a CMSampleBuffer;s Core Video image buffer for the media data
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
/ Free up the context and color space
CGContextRelease(context);
// Get the number of bytes per row for the plane pixel buffer
size_t bytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer,0); Create a device-dependent gray color space
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceGray();
, colorSpace, 0);
size_t height = CVPixelBufferGetHeight(imageBuffer);
/:
pragma mark Convert SampleBuffer to UIImage
// Get the number of bytes per row for the plane pixel buffer
void *baseAddress = CVPixelBufferGetBaseAddressOfPlane(imageBuffer;
/, kCGImageAlphaNone), 8,
bytesPerRow;/ Release the Quartz image
CGImageRelease(quartzImage), 0); Unlock the pixel buffer
CVPixelBufferUnlockBaseAddress(imageBuffer,0); Create an image object from the Quartz image
UIImage *image = [UIImage imageWithCGImage;
//
// Get the pixel buffer width and height
size_t width = CVPixelBufferGetWidth(imageBuffer);
//
CGColorSpaceRelease(colorSpace)
推荐律师服务:
若未解决您的问题,请您详细描述您的问题,通过百度律临进行免费专业咨询