废话开篇:在完成多图拼接的功能基础上,对视频自选帧视图进行截取并实现多图拼接。下面简单的封装一个工具类,来实现上述需求。
相关文章:iOS 下利用 CGContextRef 简单实现多图拼接功能
一、实现效果
选取视频
选取视频
1、拖拽进行视频自选预览。
2、轻点进行帧画面截取。
3、展示当前选取图片张数并进行合并操作。
4、临时保存到沙盒目录。
看一下沙盒路径下的多图拼接效果图
查看大图
查看大图
二、代码实现
1、首先看一下外部调用代码
这里只用到了 WSLVideoPlayManagement 视频管理类。
playWithAVPlayerItem 方法需要两个参数 :
一个是 AVPlayerItem,视频资源。
一个是 UIView,视频播放载体。
2、功能清单
总的先概述一下功能清单
1、本地视频播放器封装:
(1)播放界面可拖拽调节进度;
(2)播放界面轻点进行截图;
(3)显示用户所选图片数量及合并操作按钮。
2、视频帧视图获取及保存封装。
(1)获取当前播放器帧视图。
(2)缓存记录用户所选图片。
3、自定义 UIView 分类,实现挂载视频关联视频管理对象,其转移生命周期。
3、代码
这里说一下创建一个 UIView 的分类的目的,是将视频处理类的实例对象的生命周期交给视频播放的载体 View。分类里面只是添加了一个关联操作。
1、WSLVideoPlayManagement 视频播放类
WSLVideoPlayManagement.h
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
NS_ASSUME_NONNULL_BEGIN
@interface WSLVideoPlayManagement : NSObject
+ (WSLVideoPlayManagement *)playWithAVPlayerItem:(AVPlayerItem *)playerItem onView:(UIView *)onView;
+ (AVPlayerItem *)createAVPlayerItemWithLocalUrl:(NSURL *)localUrl;
- (void)playWithAVPlayerItem:(AVPlayerItem *)playerItem onView:(UIView *)onView;
//调整播放器组件rect(根据视频画面尺寸重调播放器尺寸)
- (void)resetPlayerUI;
//显示拖拽播放进度时间
- (void)isShowTime:(BOOL)isShowTime;
//跳到当前播放位置(滑动偏移量)
- (void)playerSeek:(float)changeDistance;
//显示合并按钮
- (void)isShowCombineBtn:(BOOL)isShowCombineBtn;
//显示选择了图片数量
- (void)showChooseImageNum:(NSInteger)chooseNum;
@end
WSLVideoPlayManagement.m
#import "WSLVideoPlayManagement.h"
#import "UIView+WSLVideoPlayManagement.h"
#import "WSLVideoScreenshotsManagement.h"
@interface WSLVideoPlayManagement()
//播放器相关
@property (nonatomic,strong) AVAsset * asset;
@property (nonatomic,strong) AVPlayerItem * currentPlayerItem;
@property (nonatomic,strong) AVPlayer * player;
@property (nonatomic,strong) AVPlayerLayer * playerLayer;
//播放器挂载视图
@property (nonatomic,weak) UIView * onView;
//时间指示器
@property (nonatomic,strong) UILabel * timeLab;
//开始合成图片
@property (nonatomic,strong) UIButton * benginCombineImagesBtn;
//选择数量
@property (nonatomic,strong) UILabel * chooseImageCountLab;
//视频输出流(获取帧视图)
@property (nonatomic,strong) AVPlayerItemVideoOutput * videoOutPut;
//视频总长度
@property (nonatomic,strong) NSString * videoTotleTimeStr;
//拖拽开始时的时间(需要记录拖拽时候播放进度)
@property (nonatomic,assign) CMTime beginTime;
//视频截图工具类
@property (nonatomic,strong) WSLVideoScreenshotsManagement * videoScreenshotsManagement;
@end
@implementation WSLVideoPlayManagement
+ (WSLVideoPlayManagement *)playWithAVPlayerItem:(AVPlayerItem *)playerItem onView:(UIView *)onView
{
WSLVideoPlayManagement * videoPlayManagement = [[WSLVideoPlayManagement alloc] init];
[videoPlayManagement playWithAVPlayerItem:playerItem onView:onView];
//注册观察者,检测 player 播放状态
[videoPlayManagement registObserver];
onView.videoPlayManagement = videoPlayManagement;
return videoPlayManagement;
}
+ (AVPlayerItem *)createAVPlayerItemWithLocalUrl:(NSURL *)localUrl
{
AVAsset * asset = [AVAsset assetWithURL:localUrl];
AVPlayerItem *playerItem = [[AVPlayerItem alloc] initWithAsset:asset];
return playerItem;
}
- (void)playWithAVPlayerItem:(AVPlayerItem *)playerItem onView:(UIView *)onView
{
self.onView = onView;
self.currentPlayerItem = playerItem;
self.videoOutPut = [[AVPlayerItemVideoOutput alloc] init];
[self.currentPlayerItem addOutput:self.videoOutPut];
self.player = [[AVPlayer alloc] initWithPlayerItem:self.currentPlayerItem];
self.playerLayer = [AVPlayerLayer playerLayerWithPlayer:self.player];
self.playerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
self.playerLayer.frame = onView.layer.bounds;
[onView.layer addSublayer:self.playerLayer];
//获取帧视图工具类
self.videoScreenshotsManagement = [[WSLVideoScreenshotsManagement alloc] initWithOnView:onView player:self.player videoOutput:self.videoOutPut];
//时间拖拽显示器
self.timeLab = [[UILabel alloc] initWithFrame:CGRectZero];
self.timeLab.hidden = YES;
self.timeLab.backgroundColor = [[UIColor groupTableViewBackgroundColor] colorWithAlphaComponent:0.2];
self.timeLab.textColor = [UIColor whiteColor];
self.timeLab.textAlignment = NSTextAlignmentCenter;
CMTime cTime = self.currentPlayerItem.asset.duration;
self.videoTotleTimeStr = [self transTime:cTime.value / (cTime.timescale)];
self.timeLab.text = [NSString stringWithFormat:@" 0:00 / %@",self.videoTotleTimeStr];
[self.timeLab sizeToFit];
self.timeLab.layer.masksToBounds = YES;
self.timeLab.layer.cornerRadius = self.timeLab.frame.size.height / 2.0;
self.timeLab.center = CGPointMake(self.onView.center.x, (self.onView.frame.size.height - self.timeLab.frame.size.height) / 2.0);
[self.onView addSubview:self.timeLab];
__weak typeof(self) weakSelf = self;
[self.player addPeriodicTimeObserverForInterval:CMTimeMake(1.0, 1.0) queue:dispatch_get_main_queue() usingBlock:^(CMTime time) {
// 更新播放进度
weakSelf.timeLab.text = [NSString stringWithFormat:@" %@ / %@ ",[weakSelf transTime:time.value / (time.timescale)],weakSelf.videoTotleTimeStr];
[weakSelf.timeLab sizeToFit];
}];
//合并按钮
self.benginCombineImagesBtn = [UIButton buttonWithType:(UIButtonTypeCustom)];
self.benginCombineImagesBtn.hidden = YES;
self.benginCombineImagesBtn.frame = CGRectMake(10, self.onView.frame.size.height - 50, 70, 40);
[self.benginCombineImagesBtn setTitle:@"合并" forState:(UIControlStateNormal)];
[self.benginCombineImagesBtn setTitleColor:[UIColor whiteColor] forState:(UIControlStateNormal)];
self.benginCombineImagesBtn.layer.masksToBounds = YES;
self.benginCombineImagesBtn.layer.cornerRadius = 5;
self.benginCombineImagesBtn.backgroundColor = [[UIColor groupTableViewBackgroundColor] colorWithAlphaComponent:0.2];
[self.benginCombineImagesBtn addTarget:self action: @selector(beginCombineImages) forControlEvents:(UIControlEventTouchUpInside)];
[self.onView addSubview:self.benginCombineImagesBtn];
//显示选择图片数
self.chooseImageCountLab = [[UILabel alloc] initWithFrame:CGRectMake(CGRectGetMaxX(self.benginCombineImagesBtn.frame) - 10, CGRectGetMinY(self.benginCombineImagesBtn.frame) - 5, 0, 0)];
self.chooseImageCountLab.layer.masksToBounds = YES;
self.chooseImageCountLab.backgroundColor = [UIColor whiteColor];
self.chooseImageCountLab.textColor = [UIColor redColor];
self.chooseImageCountLab.font = [UIFont systemFontOfSize:15.f];
[self.onView addSubview:self.chooseImageCountLab];
}
//注册观察者
- (void)registObserver
{
//观察 playerItem 的状态
[self.player.currentItem addObserver:self forKeyPath:@"status" options:NSKeyValueObservingOptionNew context:nil];
}
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary<NSKeyValueChangeKey,id> *)change context:(void *)context
{
if ([object isKindOfClass:[AVPlayerItem class]]) {
if ([keyPath isEqualToString:@"status"]) {
AVPlayerStatus status = [[change objectForKey:@"new"] intValue];
switch (status) {
case AVPlayerStatusReadyToPlay:
{
//准备可播放了
[self.player play];
//获取帧动画调整视频尺寸
[self resizePlayerView];
}
break;
default:
break;
}
}
}
}
//获取帧动画调整视频尺寸
- (void)resizePlayerView
{
[self.videoScreenshotsManagement reSizePlayerView];
}
//调整播放器组件rect
- (void)resetPlayerUI
{
self.playerLayer.frame = self.onView.layer.bounds;
self.timeLab.center = CGPointMake(self.onView.center.x, (self.onView.frame.size.height - self.timeLab.frame.size.height) / 2.0);
self.benginCombineImagesBtn.frame = CGRectMake(10, self.onView.frame.size.height - 50, 70, 40);
self.chooseImageCountLab.frame = CGRectMake(CGRectGetMaxX(self.benginCombineImagesBtn.frame) - 10, CGRectGetMinY(self.benginCombineImagesBtn.frame) - 5, 0, 0);
}
//时间转换
- (NSString *)transTime:(NSUInteger)time
{
NSString * timeStr = @"";
if (time < 60) {
timeStr = [NSString stringWithFormat:@"00:%02ld",time];
} else {
NSUInteger minutes = time / 60;
NSUInteger sections = time % 60;
timeStr = [NSString stringWithFormat:@"%ld:%02ld",minutes,sections];
}
return timeStr;
}
//显示拖拽播放进度时间
- (void)isShowTime:(BOOL)isShowTime
{
self.timeLab.hidden = !isShowTime;
if (isShowTime) {
[self.player pause];
self.beginTime = self.currentPlayerItem.currentTime;
} else {
[self.player play];
}
}
//显示合并按钮
- (void)isShowCombineBtn:(BOOL)isShowCombineBtn
{
self.benginCombineImagesBtn.hidden = !isShowCombineBtn;
self.chooseImageCountLab.hidden = self.benginCombineImagesBtn.hidden;
}
//显示选择了图片数量
- (void)showChooseImageNum:(NSInteger)chooseNum
{
NSString * numStr = @"0";
if (chooseNum < 10) {
numStr = [NSString stringWithFormat:@" %ld ",chooseNum];
} else {
numStr = [NSString stringWithFormat:@" %ld ",chooseNum];
}
self.chooseImageCountLab.text = numStr;
[self.chooseImageCountLab sizeToFit];
self.chooseImageCountLab.layer.cornerRadius = self.chooseImageCountLab.frame.size.height / 2.0;
}
//跳到当前播放位置相对秒数位置进行播放
- (void)playerSeek:(float)changeDistance
{
CMTime currentTime = self.beginTime;
CMTime totleTime = self.currentPlayerItem.asset.duration;
int64_t addValue = totleTime.value * (changeDistance / self.onView.frame.size.width);
int64_t min = 0;
int64_t max = totleTime.value;
int64_t value = (currentTime.value / currentTime.timescale) * totleTime.timescale + addValue;
if (value <= min) {
value = min;
} else if (value >= max) {
value = max;
}
CMTime seekTime = CMTimeMake(value, totleTime.timescale);
[self.player seekToTime:seekTime];
self.timeLab.text = [NSString stringWithFormat:@" %@ / %@ ",[self transTime:value / (totleTime.timescale)],self.videoTotleTimeStr];
[self.timeLab sizeToFit];
}
//开始合并
- (void)beginCombineImages
{
[self.videoScreenshotsManagement beginCombineImages];
}
- (void)dealloc
{
[self.player.currentItem removeObserver:self.forKeyPath:@"status"];
NSLog(@"视频管理类销毁了");
}
@end
2、WSLVideoScreenshotsManagement 帧视图获取类
WSLVideoScreenshotsManagement.h
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
NS_ASSUME_NONNULL_BEGIN
@interface WSLVideoScreenshotsManagement : NSObject
- (instancetype)initWithOnView:(UIView *)onView player:(AVPlayer *)player videoOutput:(AVPlayerItemVideoOutput *)videoOutput;
//获取帧动画调整视频尺寸
- (void)reSizePlayerView;
//获取帧画面
- (UIImage *)getCurrentImage;
//开始合并
- (void)beginCombineImages;
@end
NS_ASSUME_NONNULL_END
WSLVideoScreenshotsManagement.m
#import "WSLVideoScreenshotsManagement.h"
#import "UIView+WSLVideoPlayManagement.h"
#import "WSLImageCombineOperation.h"
@interface WSLVideoScreenshotsManagement()
//视频挂载视图
@property (nonatomic,weak) UIView * onView;
//轻点获取帧动画
@property (nonatomic,strong) UITapGestureRecognizer * lightTap;
//拖拽修改进度
@property (nonatomic,strong) UIPanGestureRecognizer * panGes;
//开始拖拽起点
@property (nonatomic,assign) CGPoint startPanPoint;
//视频播放器(外界传入)
@property (nonatomic,weak) AVPlayer * player;
//视频输出数据流
@property (nonatomic,weak) AVPlayerItemVideoOutput * videoOutPut;
//保存要合并的图片
@property (nonatomic,strong) NSMutableArray * saveNeedCombineImagesArr;
@end
@implementation WSLVideoScreenshotsManagement
- (instancetype)initWithOnView:(UIView *)onView player:(AVPlayer *)player videoOutput:(AVPlayerItemVideoOutput *)videoOutput;
{
if (self = [super init]) {
self.onView = onView;
self.player = player;
self.videoOutPut = videoOutput;
self.onView.userInteractionEnabled = **YES**;
self.lightTap = [[UITapGestureRecognizer alloc] initWithTarget:self action: @selector(lightTapAction:)];
[self.onView addGestureRecognizer:self.lightTap];
self.panGes = [[UIPanGestureRecognizer alloc] initWithTarget:self action: @selector(panGes:)];
[self.onView addGestureRecognizer:self.panGes];
}
return self;
}
//轻点事件
- (void)lightTapAction:(UITapGestureRecognizer *)lightTap
{
switch (lightTap.state) {
case UIGestureRecognizerStateEnded:
{
if (self.player.status == AVPlayerStatusReadyToPlay) {
NSLog(@"开始采集图片");
[self addSaveAnimation];
}
}
break;
default:
break;
}
}
//拖拽事件
- (void)panGes:(UIPanGestureRecognizer *)pan
{
switch (pan.state) {
case UIGestureRecognizerStateBegan:
{
self.startPanPoint = [pan locationInView:self.onView];
[self.onView.videoPlayManagement isShowTime:YES];
}
break;
case UIGestureRecognizerStateChanged:
{
CGPoint currentPoint = [pan locationInView:self.onView];
[self.onView.videoPlayManagement playerSeek:currentPoint.x - self.startPanPoint.x];
}
break;
case UIGestureRecognizerStateEnded:{
[self.onView.videoPlayManagement isShowTime:NO];
}
default:
break;
}
}
//添加截图动画
- (void)addSaveAnimation
{
UIImage * needSaveImage = [self getCurrentImage];
if (needSaveImage) {
UIImageView * imageView = [[UIImageView alloc] initWithFrame:self.onView.frame];
imageView.contentMode = UIViewContentModeScaleAspectFit;
imageView.image = needSaveImage;
[self.onView.superview addSubview:imageView];
[UIView animateWithDuration:0.4 animations:^{
imageView.frame = CGRectMake(imageView.frame.origin.x, CGRectGetMaxY(self.onView.frame), imageView.frame.size.width / 10, imageView.frame.size.height / 10);
imageView.alpha = 0.1;
} completion:^(BOOL finished) {
//保存图片
[self.saveNeedCombineImagesArr addObject:needSaveImage];
[self.onView.videoPlayManagement isShowCombineBtn:YES];
[self.onView.videoPlayManagement showChooseImageNum:self.saveNeedCombineImagesArr.count];
[imageView removeFromSuperview];
}];
}
}
//获取帧画面
-(UIImage *)getCurrentImage
{
CMTime itemTime = self.player.currentItem.currentTime;
CVPixelBufferRef pixelBuffer = [self.videoOutPut copyPixelBufferForItemTime:itemTime itemTimeForDisplay:nil];
CIImage *ciImage = [CIImage imageWithCVPixelBuffer:pixelBuffer];
CIContext *temporaryContext = [CIContext contextWithOptions:nil];
CGImageRef videoImage = [temporaryContext
createCGImage:ciImage
fromRect:CGRectMake(0, 0,
CVPixelBufferGetWidth(pixelBuffer),
CVPixelBufferGetHeight(pixelBuffer))];
//当前帧的画面
UIImage *currentImage = [UIImage imageWithCGImage:videoImage];
CGImageRelease(videoImage);
return currentImage;
}
//获取帧动画调整视频尺寸
- (void)reSizePlayerView
{
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(0.1 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
UIImage * image = [self getCurrentImage];
if (image && self.onView) {
self.onView.frame = CGRectMake(self.onView.frame.origin.x, self.onView.frame.origin.y, self.onView.frame.size.width, self.onView.frame.size.width * (image.size.height / image.size.width));
[self.onView.videoPlayManagement resetPlayerUI];
}
});
}
//开始合并
- (void)beginCombineImages
{
[WSLImageCombineOperation combineImages:self.saveNeedCombineImagesArr callBack:^(UIImage * _Nonnull resultImage) {
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,NSUserDomainMask, YES);
NSString *filePath = [[paths objectAtIndex:0] stringByAppendingPathComponent:@"猩球崛起.jpg"];
NSLog(@"filePath = %@",filePath);
[WSLImageCombineOperation saveImageToCache:resultImage filePath:filePath];
[self.saveNeedCombineImagesArr removeAllObjects];
[self.onView.videoPlayManagement showChooseImageNum:self.saveNeedCombineImagesArr.count];
[self.onView.videoPlayManagement isShowCombineBtn:**NO**];
}];
}
//懒加载
- (NSMutableArray *)saveNeedCombineImagesArr
{
if (!_saveNeedCombineImagesArr) {
_saveNeedCombineImagesArr = [[NSMutableArray alloc] init];
}
return _saveNeedCombineImagesArr;
}
@end
3、分类 UIView+WSLVideoPlayManagement
UIView+WSLVideoPlayManagement.h
#import <UIKit/UIKit.h>
#import "WSLVideoPlayManagement.h"
NS_ASSUME_NONNULL_BEGIN
@interface UIView (WSLVideoPlayManagement)
//视频控制类
@property (nonatomic,strong) WSLVideoPlayManagement * videoPlayManagement;
@end
NS_ASSUME_NONNULL_END
UIView+WSLVideoPlayManagement.m
#import "UIView+WSLVideoPlayManagement.h"
#import <objc/runtime.h>
static char * mineWSLVideoPlayManagement;
@implementation UIView (WSLVideoPlayManagement)
- (WSLVideoPlayManagement *)videoPlayManagement
{
return objc_getAssociatedObject(self, mineWSLVideoPlayManagement);
}
- (void)setVideoPlayManagement:(WSLVideoPlayManagement *)videoPlayManagement
{
objc_setAssociatedObject(self, mineWSLVideoPlayManagement, videoPlayManagement, OBJC_ASSOCIATION_RETAIN_NONATOMIC);
}
@end
三、总结与思考
这里想说一下 CMTime 本身是结构体,关键的两个变量:
1、value :份数(它是以 timescale 为单位的份数),value / timescale 为当前秒数。
2、timescale :将单位秒分为几等份。
不同的 CMTime 下的这两个参数可能是不一样的,所以可以进行计算然后同步数据。
好了,简单的本地视频自选帧视图多图拼接工具类就实现好了,上面的是全部的代码。
代码拙劣,大神勿笑[抱拳][抱拳][抱拳]