废话开篇:通过 openCV 的像素点修改实现单张图片与 gif 图组合在一起展示效果。
一、效果展示
将一张 上古神兽 犼 的与 火 动图进行合并展示。
合并后的效果
这里并没有进行保存处理,只是在不断绘制操作,这里是一个可以优化的地方。
二、实现思路
1、进行 犼 图片的平铺。
2、拆分 火 动图的图组,计算出每张图的展示时间。
3、定时器循环进行图片填充。
三、代码实现
首先创建一个 WSLLetMeSmile (笑一笑)类,内部实现全部的逻辑。
1、外部调用
//图片展示
NSString * bundleImage = [[NSBundle mainBundle] pathForResource:@"犼" ofType:@"jpeg"];
UIImage * image = [UIImage imageWithContentsOfFile:bundleImage];
UIImageView * imageView = [[UIImageView alloc] initWithFrame:CGRectMake(0, 0, self.view.frame.size.width, self.view.frame.size.width * image.size.height / image.size.width)];
imageView.center = self.view.center;
[self.view addSubview:imageView];
//gif图
NSURL *url = [[NSBundle mainBundle] URLForResource:@"火" withExtension:@"gif"];
// 转换为图片源
CGImageSourceRef gifImageSourceRef = CGImageSourceCreateWithURL((CFURLRef)url, nil);
//主线程反复渲染
self.letMeSmile = [WSLLetMeSmile letMeSmileWithImage:image gifSource:gifImageSourceRef resImage:^(UIImage * _Nonnull resImage) {
dispatch_async(dispatch_get_main_queue(), ^{
imageView.image = resImage;
});
}];
2、WSLLetMeSmile 类实现
WSLLetMeSmile.h
只包含一个类方法,参数就是一张底图、gif图组、渲染回调闭包
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
NS_ASSUME_NONNULL_BEGIN
@interface WSLLetMeSmile : NSObject
+ (WSLLetMeSmile *)letMeSmileWithImage:(UIImage *)image gifSource:(CGImageSourceRef)gifSource resImage:(void(^)(UIImage *))resCallBack;
@end
NS_ASSUME_NONNULL_END
WSLLetMeSmile.m
需要进行 gif 图信息获取、渲染合并及相关图片像素点阀值判断(去掉动图中一些不协调的因素)
#ifdef __cplusplus
#import <opencv2/opencv.hpp>
#import <opencv2/imgcodecs/ios.h> // Mat 和 UIImage互转
#endif
#import "WSLLetMeSmile.h"
//命名空间
using namespace cv;
//渲染回调闭包
typedef void(^ResCallBack)(UIImage *);
@interface WSLLetMeSmile()
//定时器
@property (nonatomic,strong) dispatch_source_t timer;
//回调闭包
@property (nonatomic,copy) ResCallBack resCallBack;
//gif 图时间集合
@property (nonatomic,strong) NSMutableArray * gifTimeArr;
@end
@implementation WSLLetMeSmile
- (instancetype)init
{
self = [super init];
if (self) {
self.gifTimeArr = [[NSMutableArray alloc] init];
}
return self;
}
/初始化
+ (WSLLetMeSmile *)letMeSmileWithImage:(UIImage *)image gifSource:(CGImageSourceRef)gifSource resImage:(void(^)(UIImage *))resCallBack
{
WSLLetMeSmile * letMeSmile = [[WSLLetMeSmile alloc] init];
letMeSmile.resCallBack = resCallBack;
[letMeSmile addAnimationWithImage:image gifSource:gifSource];
return letMeSmile;
}
//添加动图
- (void)addAnimationWithImage:(UIImage *)image gifSource:(CGImageSourceRef)gifSource
{
__weak typeof(self) weakSelf = self;
[self getGifDurationWithGifSource:gifSource callBack:^(NSTimeInterval totalDuration, NSArray *frames) {
//播放gif图组索引
__block int index = 0;
//循环时间
__block float time = 0;
//gif下一张图片展示时间
__block float nextTime = 0;
//定时器初始化
dispatch_queue_t queue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0);
weakSelf.timer = dispatch_source_create(DISPATCH_SOURCE_TYPE_TIMER, 0, 0, queue);
dispatch_source_set_timer(weakSelf.timer, dispatch_walltime(NULL, 0), 0.01*NSEC_PER_SEC, 0);
dispatch_source_set_event_handler(weakSelf.timer, ^{
__strong typeof(weakSelf) strongSelf = weakSelf;
//循环一个周期,重新开始
if (time > totalDuration) {
time = 0;
index = 0;
//犼图转换
Mat _other;
UIImageToMat(image, _other);
//gif图转换
Mat _fire;
UIImageToMat(frames.firstObject, _fire);
Mat result = [strongSelf drawOnFace:_other fire:_fire];
UIImage * smileImage = MatToUIImage(result);
weakSelf.resCallBack(smileImage);
nextTime = [strongSelf.gifTimeArr[index + 1] floatValue];
}
//时间到下一张节点后渲染下一张图片
if (time >= nextTime) {
index += 1;
if (index < frames.count) {
//犼图转换
Mat _other;
UIImageToMat(image, _other);
//gif图转换
Mat _fire;
UIImageToMat(frames[index], _fire);
Mat result = [strongSelf drawOnFace:_other fire:_fire];
UIImage * smileImage = MatToUIImage(result);
strongSelf.resCallBack(smileImage);
}
if (index + 1 < strongSelf.gifTimeArr.count) {
nextTime = [strongSelf.gifTimeArr[index + 1] floatValue];
}
}
time += 0.01;
});
dispatch_resume(weakSelf.timer);
}];
}
//合并渲染
- (Mat)drawOnFace:(Mat &)img fire:(Mat)fire
{
//底图尺寸
int otherWidth = img.cols;
int otherHeight = img.rows;
//gif单张图尺寸
int fireWidth = fire.cols;
int fireHeight = fire.rows;
//这里需要转换,否则定位不准
Mat showImg(cvRound(img.rows), cvRound(img.cols), CV_8UC1 );
cvtColor(img, showImg, COLOR_BGR2RGB);
Mat showFireImg(cvRound(img.rows), cvRound(img.cols), CV_8UC1 );
cvtColor(fire, showFireImg, COLOR_BGR2RGB);
//x轴剧中绘制
int addX = (otherWidth - fireWidth) / 2;
//y轴底部对齐
int addY = otherHeight - fireHeight;
for (int x = 0; x < fireWidth; x ++) {
for (int y = 0; y < fireHeight; y++) {
int b = showFireImg.at<Vec3b>(y,x)[0];
int g = showFireImg.at<Vec3b>(y,x)[1];
int r = showFireImg.at<Vec3b>(y,x)[2];
//设置阀值,去掉偏黑色的影响因素
int threshold = 5;
if (r < threshold || g < threshold || b < threshold) {
continue;
}
//底图进行绘制动图的像素值
showImg.at<Vec3b>(y + addY,x + addX)[0] = b;
showImg.at<Vec3b>(y + addY,x + addX)[1] = g;
showImg.at<Vec3b>(y + addY,x + addX)[2] = r;
}
}
Mat mat_image_face;
cvtColor(showImg, mat_image_face, cv::COLOR_BGR2RGB, 3);
return mat_image_face;
}
//gif动图时间处理
- (void)getGifDurationWithGifSource:(CGImageSourceRef)gifSource callBack:(void(^)(NSTimeInterval totalDuration,NSArray * frames))callBack
{
[self.gifTimeArr removeAllObjects];
size_t frameCount = CGImageSourceGetCount(gifSource);
NSMutableArray * saveFrames = [[NSMutableArray alloc] init];
NSTimeInterval totalDuration = 0;
//记录开始时间
[self.gifTimeArr addObject:[NSNumber numberWithDouble:totalDuration]];
for (size_t i = 0; i < frameCount; i++)
CGImageRef imageRef = CGImageSourceCreateImageAtIndex(gifSource, i, NULL);
UIImage * image = [UIImage imageWithCGImage:imageRef];
[saveFrames addObject:image];
//获取每帧时间
NSTimeInterval duration = [self getGifPerFrameTime:gifSource index:i];
totalDuration += duration;
[self.gifTimeArr addObject:[NSNumber numberWithDouble:totalDuration]];
CGImageRelease(imageRef);
}
CFRelease(gifSource);
callBack(totalDuration,saveFrames);
}
//获取每帧时间
- (NSTimeInterval)getGifPerFrameTime:(CGImageSourceRef)gifSource index:(NSInteger)index
{
NSTimeInterval duration = 0;
CFDictionaryRef frameProperties = CGImageSourceCopyPropertiesAtIndex(gifSource, index, NULL);
if (frameProperties) {
CFDictionaryRef gifProperties;
BOOL result = CFDictionaryGetValueIfPresent(frameProperties, kCGImagePropertyGIFDictionary, (const void **)&gifProperties);
if (result) {
const void *durationValue;
if (CFDictionaryGetValueIfPresent(gifProperties,kCGImagePropertyGIFUnclampedDelayTime,&durationValue)) {
duration = [( __bridge NSNumber *)durationValue doubleValue];
if (duration < 0) {
if (CFDictionaryGetValueIfPresent(gifProperties, kCGImagePropertyGIFDelayTime, &durationValue)) {
duration = [( __bridge NSNumber *)durationValue doubleValue];
}
}
}
}
}
return duration;
}
-(void)dealloc
{
NSLog(@"我销毁了");
//销毁定时器
dispatch_source_cancel(self.timer);
}
@end
四、总结与思考
简单的合并展示功能就完成了。
需要注意的是:
1、进行转换之前需要对图片进行 cvtColor处理,否则像素定位不准。
2、修改坐标像素值的时候,at< Vec3b >里面的第一个参数是 y 值 ,第二个参数是 x 值。如果选择用 cv::Point 作为参数,可避免参数混淆。
没什么高级的内容,大神勿笑,共同进步。[抱拳][抱拳][抱拳]