如何通过 HTML5 实现 iOS 7 的实时毛玻璃模糊效果
2个回答
展开全部
要想通过HTML5实现IOS7的毛玻璃模糊效果需要用代码来执行
//加模糊效果,image是图片,blur是模糊度
- (UIImage *)blurryImage:(UIImage *)image withBlurLevel:(CGFloat)blur {
//模糊度,
if ((blur < 0.1f) || (blur > 2.0f)) {
blur = 0.5f;
}
//boxSize必须大于0
int boxSize = (int)(blur * 100);
boxSize -= (boxSize % 2) + 1;
NSLog(@"boxSize:%i",boxSize);
//图像处理
CGImageRef img = image.CGImage;
//需要引入#import <Accelerate/Accelerate.h>
/*
This document describes the Accelerate Framework, which contains C APIs for vector and matrix math, digital signal processing, large number handling, and image processing.
本文档介绍了Accelerate Framework,其中包含C语言应用程序接口(API)的向量和矩阵数学,数字信号处理,大量处理和图像处理。
*/
//图像缓存,输入缓存,输出缓存
vImage_Buffer inBuffer, outBuffer;
vImage_Error error;
//像素缓存
void *pixelBuffer;
//数据源提供者,Defines an opaque type that supplies Quartz with data.
CGDataProviderRef inProvider = CGImageGetDataProvider(img);
// provider’s data.
CFDataRef inBitmapData = CGDataProviderCopyData(inProvider);
//宽,高,字节/行,data
inBuffer.width = CGImageGetWidth(img);
inBuffer.height = CGImageGetHeight(img);
inBuffer.rowBytes = CGImageGetBytesPerRow(img);
inBuffer.data = (void*)CFDataGetBytePtr(inBitmapData);
//像数缓存,字节行*图片高
pixelBuffer = malloc(CGImageGetBytesPerRow(img) * CGImageGetHeight(img));
outBuffer.data = pixelBuffer;
outBuffer.width = CGImageGetWidth(img);
outBuffer.height = CGImageGetHeight(img);
outBuffer.rowBytes = CGImageGetBytesPerRow(img);
// 第三个中间的缓存区,抗锯齿的效果
void *pixelBuffer2 = malloc(CGImageGetBytesPerRow(img) * CGImageGetHeight(img));
vImage_Buffer outBuffer2;
outBuffer2.data = pixelBuffer2;
outBuffer2.width = CGImageGetWidth(img);
outBuffer2.height = CGImageGetHeight(img);
outBuffer2.rowBytes = CGImageGetBytesPerRow(img);
//Convolves a region of interest within an ARGB8888 source image by an implicit M x N kernel that has the effect of a box filter.
error = vImageBoxConvolve_ARGB8888(&inBuffer, &outBuffer2, NULL, 0, 0, boxSize, boxSize, NULL, kvImageEdgeExtend);
error = vImageBoxConvolve_ARGB8888(&outBuffer2, &inBuffer, NULL, 0, 0, boxSize, boxSize, NULL, kvImageEdgeExtend);
error = vImageBoxConvolve_ARGB8888(&inBuffer, &outBuffer, NULL, 0, 0, boxSize, boxSize, NULL, kvImageEdgeExtend);
if (error) {
NSLog(@"error from convolution %ld", error);
}
// NSLog(@"字节组成部分:%zu",CGImageGetBitsPerComponent(img));
//颜色空间DeviceRGB
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
//用图片创建上下文,CGImageGetBitsPerComponent(img),7,8
CGContextRef ctx = CGBitmapContextCreate(
outBuffer.data,
outBuffer.width,
outBuffer.height,
8,
outBuffer.rowBytes,
colorSpace,
CGImageGetBitmapInfo(image.CGImage));
//根据上下文,处理过的图片,重新组件
CGImageRef imageRef = CGBitmapContextCreateImage (ctx);
UIImage *returnImage = [UIImage imageWithCGImage:imageRef];
//clean up
CGContextRelease(ctx);
CGColorSpaceRelease(colorSpace);
free(pixelBuffer);
free(pixelBuffer2);
CFRelease(inBitmapData);
CGColorSpaceRelease(colorSpace);
CGImageRelease(imageRef);
return returnImage;
}
这里要注意的是以上的代码会有崩溃的情况
崩溃log:Assertion failed: (!space->is_singleton), function color_space_dealloc, file ColorSpaces/color-space
可以用如下方式来实现
- (UIImage *)applyBlurRadius:(CGFloat)radius toImage:(UIImage *)image
{
if (radius < 0) radius = 0;
CIContext *context = [CIContextcontextWithOptions:nil];
CIImage *inputImage = [CIImageimageWithCGImage:image.CGImage];
// Setting up gaussian blur
CIFilter *filter = [CIFilterfilterWithName:@"CIGaussianBlur"];
[filter setValue:inputImageforKey:kCIInputImageKey];
[filter setValue:[NSNumbernumberWithFloat:radius] forKey:@"inputRadius"];
CIImage *result = [filtervalueForKey:kCIOutputImageKey];
CGImageRef cgImage = [context createCGImage:result fromRect:[inputImage extent]];
UIImage *returnImage = [UIImageimageWithCGImage:cgImage];
CGImageRelease(cgImage);
return returnImage;
}
参考资料:
1、第一部分demo下载:http://download.csdn.net/detail/rhljiayou/6000293
2、回答参考网址:http://www.seosohu.com/thread-204-1-1.html
//加模糊效果,image是图片,blur是模糊度
- (UIImage *)blurryImage:(UIImage *)image withBlurLevel:(CGFloat)blur {
//模糊度,
if ((blur < 0.1f) || (blur > 2.0f)) {
blur = 0.5f;
}
//boxSize必须大于0
int boxSize = (int)(blur * 100);
boxSize -= (boxSize % 2) + 1;
NSLog(@"boxSize:%i",boxSize);
//图像处理
CGImageRef img = image.CGImage;
//需要引入#import <Accelerate/Accelerate.h>
/*
This document describes the Accelerate Framework, which contains C APIs for vector and matrix math, digital signal processing, large number handling, and image processing.
本文档介绍了Accelerate Framework,其中包含C语言应用程序接口(API)的向量和矩阵数学,数字信号处理,大量处理和图像处理。
*/
//图像缓存,输入缓存,输出缓存
vImage_Buffer inBuffer, outBuffer;
vImage_Error error;
//像素缓存
void *pixelBuffer;
//数据源提供者,Defines an opaque type that supplies Quartz with data.
CGDataProviderRef inProvider = CGImageGetDataProvider(img);
// provider’s data.
CFDataRef inBitmapData = CGDataProviderCopyData(inProvider);
//宽,高,字节/行,data
inBuffer.width = CGImageGetWidth(img);
inBuffer.height = CGImageGetHeight(img);
inBuffer.rowBytes = CGImageGetBytesPerRow(img);
inBuffer.data = (void*)CFDataGetBytePtr(inBitmapData);
//像数缓存,字节行*图片高
pixelBuffer = malloc(CGImageGetBytesPerRow(img) * CGImageGetHeight(img));
outBuffer.data = pixelBuffer;
outBuffer.width = CGImageGetWidth(img);
outBuffer.height = CGImageGetHeight(img);
outBuffer.rowBytes = CGImageGetBytesPerRow(img);
// 第三个中间的缓存区,抗锯齿的效果
void *pixelBuffer2 = malloc(CGImageGetBytesPerRow(img) * CGImageGetHeight(img));
vImage_Buffer outBuffer2;
outBuffer2.data = pixelBuffer2;
outBuffer2.width = CGImageGetWidth(img);
outBuffer2.height = CGImageGetHeight(img);
outBuffer2.rowBytes = CGImageGetBytesPerRow(img);
//Convolves a region of interest within an ARGB8888 source image by an implicit M x N kernel that has the effect of a box filter.
error = vImageBoxConvolve_ARGB8888(&inBuffer, &outBuffer2, NULL, 0, 0, boxSize, boxSize, NULL, kvImageEdgeExtend);
error = vImageBoxConvolve_ARGB8888(&outBuffer2, &inBuffer, NULL, 0, 0, boxSize, boxSize, NULL, kvImageEdgeExtend);
error = vImageBoxConvolve_ARGB8888(&inBuffer, &outBuffer, NULL, 0, 0, boxSize, boxSize, NULL, kvImageEdgeExtend);
if (error) {
NSLog(@"error from convolution %ld", error);
}
// NSLog(@"字节组成部分:%zu",CGImageGetBitsPerComponent(img));
//颜色空间DeviceRGB
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
//用图片创建上下文,CGImageGetBitsPerComponent(img),7,8
CGContextRef ctx = CGBitmapContextCreate(
outBuffer.data,
outBuffer.width,
outBuffer.height,
8,
outBuffer.rowBytes,
colorSpace,
CGImageGetBitmapInfo(image.CGImage));
//根据上下文,处理过的图片,重新组件
CGImageRef imageRef = CGBitmapContextCreateImage (ctx);
UIImage *returnImage = [UIImage imageWithCGImage:imageRef];
//clean up
CGContextRelease(ctx);
CGColorSpaceRelease(colorSpace);
free(pixelBuffer);
free(pixelBuffer2);
CFRelease(inBitmapData);
CGColorSpaceRelease(colorSpace);
CGImageRelease(imageRef);
return returnImage;
}
这里要注意的是以上的代码会有崩溃的情况
崩溃log:Assertion failed: (!space->is_singleton), function color_space_dealloc, file ColorSpaces/color-space
可以用如下方式来实现
- (UIImage *)applyBlurRadius:(CGFloat)radius toImage:(UIImage *)image
{
if (radius < 0) radius = 0;
CIContext *context = [CIContextcontextWithOptions:nil];
CIImage *inputImage = [CIImageimageWithCGImage:image.CGImage];
// Setting up gaussian blur
CIFilter *filter = [CIFilterfilterWithName:@"CIGaussianBlur"];
[filter setValue:inputImageforKey:kCIInputImageKey];
[filter setValue:[NSNumbernumberWithFloat:radius] forKey:@"inputRadius"];
CIImage *result = [filtervalueForKey:kCIOutputImageKey];
CGImageRef cgImage = [context createCGImage:result fromRect:[inputImage extent]];
UIImage *returnImage = [UIImageimageWithCGImage:cgImage];
CGImageRelease(cgImage);
return returnImage;
}
参考资料:
1、第一部分demo下载:http://download.csdn.net/detail/rhljiayou/6000293
2、回答参考网址:http://www.seosohu.com/thread-204-1-1.html
博思aippt
2024-07-20 广告
2024-07-20 广告
博思AIPPT是基于ai制作PPT的智能在线工具,它提供了4种AI制作PPT的方式,包括AI生成大纲、AI直接生成PPT、文本生成PPT、AI提炼文档生成PPT,一站式集成多种AI生成PPT的方式,可满足办公用户的不同需求和使用场景。ai生...
点击进入详情页
本回答由博思aippt提供
展开全部
要想通过HTML5实现IOS7的毛玻璃模糊效果需要用代码来执行
//加模糊效果,image是图片,blur是模糊度
- (UIImage *)blurryImage:(UIImage *)image withBlurLevel:(CGFloat)blur {
//模糊度,
if ((blur < 0.1f) || (blur > 2.0f)) {
blur = 0.5f;
}
//boxSize必须大于0
int boxSize = (int)(blur * 100);
boxSize -= (boxSize % 2) + 1;
NSLog(@"boxSize:%i",boxSize);
//图像处理
CGImageRef img = image.CGImage;
//需要引入#import <Accelerate/Accelerate.h>
/*
This document describes the Accelerate Framework, which contains C APIs for vector and matrix math, digital signal processing, large number handling, and image processing.
本文档介绍了Accelerate Framework,其中包含C语言应用程序接口(API)的向量和矩阵数学,数字信号处理,大量处理和图像处理。
*/
//图像缓存,输入缓存,输出缓存
vImage_Buffer inBuffer, outBuffer;
vImage_Error error;
//像素缓存
void *pixelBuffer;
//数据源提供者,Defines an opaque type that supplies Quartz with data.
CGDataProviderRef inProvider = CGImageGetDataProvider(img);
// provider’s data.
CFDataRef inBitmapData = CGDataProviderCopyData(inProvider);
//宽,高,字节/行,data
inBuffer.width = CGImageGetWidth(img);
inBuffer.height = CGImageGetHeight(img);
inBuffer.rowBytes = CGImageGetBytesPerRow(img);
inBuffer.data = (void*)CFDataGetBytePtr(inBitmapData);
//像数缓存,字节行*图片高
pixelBuffer = malloc(CGImageGetBytesPerRow(img) * CGImageGetHeight(img));
outBuffer.data = pixelBuffer;
outBuffer.width = CGImageGetWidth(img);
outBuffer.height = CGImageGetHeight(img);
outBuffer.rowBytes = CGImageGetBytesPerRow(img);
// 第三个中间的缓存区,抗锯齿的效果
void *pixelBuffer2 = malloc(CGImageGetBytesPerRow(img) * CGImageGetHeight(img));
vImage_Buffer outBuffer2;
outBuffer2.data = pixelBuffer2;
outBuffer2.width = CGImageGetWidth(img);
outBuffer2.height = CGImageGetHeight(img);
outBuffer2.rowBytes = CGImageGetBytesPerRow(img);
//Convolves a region of interest within an ARGB8888 source image by an implicit M x N kernel that has the effect of a box filter.
error = vImageBoxConvolve_ARGB8888(&inBuffer, &outBuffer2, NULL, 0, 0, boxSize, boxSize, NULL, kvImageEdgeExtend);
error = vImageBoxConvolve_ARGB8888(&outBuffer2, &inBuffer, NULL, 0, 0, boxSize, boxSize, NULL, kvImageEdgeExtend);
error = vImageBoxConvolve_ARGB8888(&inBuffer, &outBuffer, NULL, 0, 0, boxSize, boxSize, NULL, kvImageEdgeExtend);
if (error) {
NSLog(@"error from convolution %ld", error);
}
// NSLog(@"字节组成部分:%zu",CGImageGetBitsPerComponent(img));
//颜色空间DeviceRGB
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
//用图片创建上下文,CGImageGetBitsPerComponent(img),7,8
CGContextRef ctx = CGBitmapContextCreate(
outBuffer.data,
outBuffer.width,
outBuffer.height,
8,
outBuffer.rowBytes,
colorSpace,
CGImageGetBitmapInfo(image.CGImage));
//根据上下文,处理过的图片,重新组件
CGImageRef imageRef = CGBitmapContextCreateImage (ctx);
UIImage *returnImage = [UIImage imageWithCGImage:imageRef];
//clean up
CGContextRelease(ctx);
CGColorSpaceRelease(colorSpace);
free(pixelBuffer);
free(pixelBuffer2);
CFRelease(inBitmapData);
CGColorSpaceRelease(colorSpace);
CGImageRelease(imageRef);
return returnImage;
}
这里要注意的是以上的代码会有崩溃的情况
崩溃log:Assertion failed: (!space->is_singleton), function color_space_dealloc, file ColorSpaces/color-space
可以用如下方式来实现
- (UIImage *)applyBlurRadius:(CGFloat)radius toImage:(UIImage *)image
{
if (radius < 0) radius = 0;
CIContext *context = [CIContextcontextWithOptions:nil];
CIImage *inputImage = [CIImageimageWithCGImage:image.CGImage];
// Setting up gaussian blur
CIFilter *filter = [CIFilterfilterWithName:@"CIGaussianBlur"];
[filter setValue:inputImageforKey:kCIInputImageKey];
[filter setValue:[NSNumbernumberWithFloat:radius] forKey:@"inputRadius"];
CIImage *result = [filtervalueForKey:kCIOutputImageKey];
CGImageRef cgImage = [context createCGImage:result fromRect:[inputImage extent]];
UIImage *returnImage = [UIImageimageWithCGImage:cgImage];
CGImageRelease(cgImage);
return returnImage;
}
参考资料:
1、第一部分demo下载:http://download.csdn.net/detail/rhljiayou/6000293
2、回答参考网址:http://www.seosohu.com/thread-204-1-1.html
//加模糊效果,image是图片,blur是模糊度
- (UIImage *)blurryImage:(UIImage *)image withBlurLevel:(CGFloat)blur {
//模糊度,
if ((blur < 0.1f) || (blur > 2.0f)) {
blur = 0.5f;
}
//boxSize必须大于0
int boxSize = (int)(blur * 100);
boxSize -= (boxSize % 2) + 1;
NSLog(@"boxSize:%i",boxSize);
//图像处理
CGImageRef img = image.CGImage;
//需要引入#import <Accelerate/Accelerate.h>
/*
This document describes the Accelerate Framework, which contains C APIs for vector and matrix math, digital signal processing, large number handling, and image processing.
本文档介绍了Accelerate Framework,其中包含C语言应用程序接口(API)的向量和矩阵数学,数字信号处理,大量处理和图像处理。
*/
//图像缓存,输入缓存,输出缓存
vImage_Buffer inBuffer, outBuffer;
vImage_Error error;
//像素缓存
void *pixelBuffer;
//数据源提供者,Defines an opaque type that supplies Quartz with data.
CGDataProviderRef inProvider = CGImageGetDataProvider(img);
// provider’s data.
CFDataRef inBitmapData = CGDataProviderCopyData(inProvider);
//宽,高,字节/行,data
inBuffer.width = CGImageGetWidth(img);
inBuffer.height = CGImageGetHeight(img);
inBuffer.rowBytes = CGImageGetBytesPerRow(img);
inBuffer.data = (void*)CFDataGetBytePtr(inBitmapData);
//像数缓存,字节行*图片高
pixelBuffer = malloc(CGImageGetBytesPerRow(img) * CGImageGetHeight(img));
outBuffer.data = pixelBuffer;
outBuffer.width = CGImageGetWidth(img);
outBuffer.height = CGImageGetHeight(img);
outBuffer.rowBytes = CGImageGetBytesPerRow(img);
// 第三个中间的缓存区,抗锯齿的效果
void *pixelBuffer2 = malloc(CGImageGetBytesPerRow(img) * CGImageGetHeight(img));
vImage_Buffer outBuffer2;
outBuffer2.data = pixelBuffer2;
outBuffer2.width = CGImageGetWidth(img);
outBuffer2.height = CGImageGetHeight(img);
outBuffer2.rowBytes = CGImageGetBytesPerRow(img);
//Convolves a region of interest within an ARGB8888 source image by an implicit M x N kernel that has the effect of a box filter.
error = vImageBoxConvolve_ARGB8888(&inBuffer, &outBuffer2, NULL, 0, 0, boxSize, boxSize, NULL, kvImageEdgeExtend);
error = vImageBoxConvolve_ARGB8888(&outBuffer2, &inBuffer, NULL, 0, 0, boxSize, boxSize, NULL, kvImageEdgeExtend);
error = vImageBoxConvolve_ARGB8888(&inBuffer, &outBuffer, NULL, 0, 0, boxSize, boxSize, NULL, kvImageEdgeExtend);
if (error) {
NSLog(@"error from convolution %ld", error);
}
// NSLog(@"字节组成部分:%zu",CGImageGetBitsPerComponent(img));
//颜色空间DeviceRGB
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
//用图片创建上下文,CGImageGetBitsPerComponent(img),7,8
CGContextRef ctx = CGBitmapContextCreate(
outBuffer.data,
outBuffer.width,
outBuffer.height,
8,
outBuffer.rowBytes,
colorSpace,
CGImageGetBitmapInfo(image.CGImage));
//根据上下文,处理过的图片,重新组件
CGImageRef imageRef = CGBitmapContextCreateImage (ctx);
UIImage *returnImage = [UIImage imageWithCGImage:imageRef];
//clean up
CGContextRelease(ctx);
CGColorSpaceRelease(colorSpace);
free(pixelBuffer);
free(pixelBuffer2);
CFRelease(inBitmapData);
CGColorSpaceRelease(colorSpace);
CGImageRelease(imageRef);
return returnImage;
}
这里要注意的是以上的代码会有崩溃的情况
崩溃log:Assertion failed: (!space->is_singleton), function color_space_dealloc, file ColorSpaces/color-space
可以用如下方式来实现
- (UIImage *)applyBlurRadius:(CGFloat)radius toImage:(UIImage *)image
{
if (radius < 0) radius = 0;
CIContext *context = [CIContextcontextWithOptions:nil];
CIImage *inputImage = [CIImageimageWithCGImage:image.CGImage];
// Setting up gaussian blur
CIFilter *filter = [CIFilterfilterWithName:@"CIGaussianBlur"];
[filter setValue:inputImageforKey:kCIInputImageKey];
[filter setValue:[NSNumbernumberWithFloat:radius] forKey:@"inputRadius"];
CIImage *result = [filtervalueForKey:kCIOutputImageKey];
CGImageRef cgImage = [context createCGImage:result fromRect:[inputImage extent]];
UIImage *returnImage = [UIImageimageWithCGImage:cgImage];
CGImageRelease(cgImage);
return returnImage;
}
参考资料:
1、第一部分demo下载:http://download.csdn.net/detail/rhljiayou/6000293
2、回答参考网址:http://www.seosohu.com/thread-204-1-1.html
已赞过
已踩过<
评论
收起
你对这个回答的评价是?
推荐律师服务:
若未解决您的问题,请您详细描述您的问题,通过百度律临进行免费专业咨询