直播架构
CDN 为了解决访问资源过慢的 比如: 当你访问的资源在北京,你需要询问长沙(边缘节点),回答你没有,然后继续往上找,如北京(边缘节点),这就造成了链路过长,CDN就是讲数据缓存到长沙
请求数据是一阶一阶往上找
CDN结构
- 边缘节点
- 二级节点
- 源站节点
VideoToolBox视频编解码
音视频学习从零到整(1)
音视频学习从零到整(2)
音视频学习从零到整(3)
音视频学习从零到整(4)
音视频学习从零到整(5)
音视频学习从零到整(6)
音视频学习从零到整(7)
什么叫数据冗余?
你的妻子,Helen,将于明天晚上6点零5分在上海虹桥机场接你 23*2+10 = 56个字符
你的妻子将于明天晚上6点5分在虹桥机场接你 20*2 + 2 = 42个字符
Helen将于明天晚上6点在虹桥接你 10*2 + 2 = 26个字符
结论: 只要接收端不会产生误解,就可以减少承载信息的数据量
视频到底是什么?
I,P, B帧
I帧关键帧,必须要有 P帧向前一帧参考保存不同的数据 B帧往前后两帧对比保存不同数据: 会导致卡,因为要参考两针数据,所以可以抛弃
一组帧
一秒内30帧 组成一组
视频花屏/卡顿原因
- 如果GOP分组中的P帧丢失就会造成解码端图像发生错误
- 为了避免花屏问题的发生,一般如果发现P帧或者I帧丢失,就不显示GOP内的所有帧,直到下一个I帧来后重新刷新图像
- 当这时因为没有刷新屏幕,丢包的这一组帧全部丢掉了,图像就会卡在那里不动,这就是卡顿的原因
videoToolbox工作流程
videoToolbox工作流程基于Core Foundation库函数,C语言
- 创建session->设置编码相关的参数->开始编码->循环输入源数据(YUV类型数据,直接从摄像头获取)->获取编码后的H264数据->结束编码
- H264文件
SPS,PPS包含着帧数据信息必须存在
SPS: 序列参数集: 帧间信息数据
PPS:图像参数集:帧内信息
如下图数据
计算上面的数值,然后对照下表就知道是啥信息了
案例代码
一. 首先创建采集,看以前文章哈,我们直接看编码过程 1)
#pragma mark-输出代理
-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{
/// 未编码的视频流
[_CCVideoEncoder encodeVideoSampleBuffer:sampleBuffer];
}
二:设置编码会话,编码参数 2. 创建编码会话VTCompressionSessionCreate 3. 设置编码器属性 VTSessionSetProperty 4. 指定编码比特流的配置文件和级别。直播一般使用baseline,可减少由于b帧带来的延时 5. 设置码率均值(比特率可以高于此。默认比特率为零,表示视频编码器。应该确定压缩数据的大小。注意,比特率设置只在定时时有效) 6. 码率限制(只在定时时起作用)*待确认 7. 设置关键帧间隔(GOPSize)GOP太大图像会模糊 8. 设置fps(预期) 9. 准备编码VTCompressionSessionPrepareToEncodeFrames
//1.初始化(配置编码参数)
- (instancetype)initWithConfig:(CCVideoConfig *)config
{
self = [super init];
if (self) {
_config = config;
_encodeQueue = dispatch_queue_create("h264 hard encode queue", DISPATCH_QUEUE_SERIAL);
_callbackQueue = dispatch_queue_create("h264 hard encode callback queue", DISPATCH_QUEUE_SERIAL);
/**编码设置*/
//创建编码会话
OSStatus status = VTCompressionSessionCreate(kCFAllocatorDefault, (int32_t)_config.width, (int32_t)_config.height, kCMVideoCodecType_H264, NULL, NULL, NULL, VideoEncodeCallback, (__bridge void * _Nullable)(self), &_encodeSesion);
if (status != noErr) {
NSLog(@"VTCompressionSession create failed. status=%d", (int)status);
return self;
}
//设置编码器属性
//设置是否实时执行
status = VTSessionSetProperty(_encodeSesion, kVTCompressionPropertyKey_RealTime, kCFBooleanTrue);
NSLog(@"VTSessionSetProperty: set RealTime return: %d", (int)status);
//指定编码比特流的配置文件和级别。直播一般使用baseline,可减少由于b帧带来的延时
status = VTSessionSetProperty(_encodeSesion, kVTCompressionPropertyKey_ProfileLevel, kVTProfileLevel_H264_Baseline_AutoLevel);
NSLog(@"VTSessionSetProperty: set profile return: %d", (int)status);
//设置码率均值(比特率可以高于此。默认比特率为零,表示视频编码器。应该确定压缩数据的大小。注意,比特率设置只在定时时有效)
CFNumberRef bit = (__bridge CFNumberRef)@(_config.bitrate);
status = VTSessionSetProperty(_encodeSesion, kVTCompressionPropertyKey_AverageBitRate, bit);
NSLog(@"VTSessionSetProperty: set AverageBitRate return: %d", (int)status);
//码率限制(只在定时时起作用)*待确认
CFArrayRef limits = (__bridge CFArrayRef)@[@(_config.bitrate / 4), @(_config.bitrate * 4)];
status = VTSessionSetProperty(_encodeSesion, kVTCompressionPropertyKey_DataRateLimits,limits);
NSLog(@"VTSessionSetProperty: set DataRateLimits return: %d", (int)status);
//设置关键帧间隔(GOPSize)GOP太大图像会模糊
CFNumberRef maxKeyFrameInterval = (__bridge CFNumberRef)@(_config.fps * 2);
status = VTSessionSetProperty(_encodeSesion, kVTCompressionPropertyKey_MaxKeyFrameInterval, maxKeyFrameInterval);
NSLog(@"VTSessionSetProperty: set MaxKeyFrameInterval return: %d", (int)status);
//设置fps(预期)
CFNumberRef expectedFrameRate = (__bridge CFNumberRef)@(_config.fps);
status = VTSessionSetProperty(_encodeSesion, kVTCompressionPropertyKey_ExpectedFrameRate, expectedFrameRate);
NSLog(@"VTSessionSetProperty: set ExpectedFrameRate return: %d", (int)status);
//准备编码
status = VTCompressionSessionPrepareToEncodeFrames(_encodeSesion);
NSLog(@"VTSessionSetProperty: set PrepareToEncodeFrames return: %d", (int)status);
}
return self;
}
三:开始编码 1.获取到sampleBuffer 数据 进行H264硬编码
//2.获取到sampleBuffer 数据 进行H264硬编码
- (**void**)encodeVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer {
CFRetain(sampleBuffer);
dispatch_async(_encodeQueue, ^{
//帧数据
CVImageBufferRef imageBuffer = (CVImageBufferRef)CMSampleBufferGetImageBuffer(sampleBuffer);
//该帧的时间戳
frameID++;
CMTime timeStamp = CMTimeMake(frameID, 1000);
//持续时间
CMTime duration = kCMTimeInvalid;
//编码
VTEncodeInfoFlags flags;
OSStatus status = VTCompressionSessionEncodeFrame(**self**.encodeSesion, imageBuffer, timeStamp, duration, **NULL**, **NULL**, &flags);
**if** (status != noErr) {
NSLog(@"VTCompression: encode failed: status=%d",(**int**)status);
}
CFRelease(sampleBuffer);
});
}
编码完成回调
- 编码成功回调
// startCode 长度 4
const Byte startCode[] = "\x00\x00\x00\x01";
//编码成功回调
void VideoEncodeCallback(void * CM_NULLABLE outputCallbackRefCon, void * CM_NULLABLE sourceFrameRefCon,OSStatus status, VTEncodeInfoFlags infoFlags, CMSampleBufferRef sampleBuffer ) {
if (status != noErr) {
NSLog(@"VideoEncodeCallback: encode error, status = %d", (int)status);
return;
}
if (!CMSampleBufferDataIsReady(sampleBuffer)) {
NSLog(@"VideoEncodeCallback: data is not ready");
return;
}
CCVideoEncoder *encoder = (__bridge CCVideoEncoder *)(outputCallbackRefCon);
//判断是否为关键帧
BOOL keyFrame = NO;
CFArrayRef attachArray = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, true);
keyFrame = !CFDictionaryContainsKey(CFArrayGetValueAtIndex(attachArray, 0), kCMSampleAttachmentKey_NotSync);//(注意取反符号)
//获取sps & pps 数据 ,只需获取一次,保存在h264文件开头即可
if (keyFrame && !encoder->hasSpsPps) {
size_t spsSize, spsCount;
size_t ppsSize, ppsCount;
const uint8_t *spsData, *ppsData;
//获取图像源格式
CMFormatDescriptionRef formatDesc = CMSampleBufferGetFormatDescription(sampleBuffer);
OSStatus status1 = CMVideoFormatDescriptionGetH264ParameterSetAtIndex(formatDesc, 0, &spsData, &spsSize, &spsCount, 0);
OSStatus status2 = CMVideoFormatDescriptionGetH264ParameterSetAtIndex(formatDesc, 1, &ppsData, &ppsSize, &ppsCount, 0);
//判断sps/pps获取成功
if (status1 == noErr & status2 == noErr) {
NSLog(@"VideoEncodeCallback: get sps, pps success");
encoder->hasSpsPps = true;
//sps data
NSMutableData *sps = [NSMutableData dataWithCapacity:4 + spsSize];
[sps appendBytes:startCode length:4];
[sps appendBytes:spsData length:spsSize];
//pps data
NSMutableData *pps = [NSMutableData dataWithCapacity:4 + ppsSize];
[pps appendBytes:startCode length:4];
[pps appendBytes:ppsData length:ppsSize];
dispatch_async(encoder.callbackQueue, ^{
//回调方法传递sps/pps
[encoder.delegate videoEncodeCallbacksps:sps pps:pps];
});
} else {
NSLog(@"VideoEncodeCallback: get sps/pps failed spsStatus=%d, ppsStatus=%d", (int)status1, (int)status2);
}
}
//获取NALU数据
size_t lengthAtOffset, totalLength;
char *dataPoint;
//将数据复制到dataPoint
CMBlockBufferRef blockBuffer = CMSampleBufferGetDataBuffer(sampleBuffer);
OSStatus error = CMBlockBufferGetDataPointer(blockBuffer, 0, &lengthAtOffset, &totalLength, &dataPoint);
if (error != kCMBlockBufferNoErr) {
NSLog(@"VideoEncodeCallback: get datapoint failed, status = %d", (int)error);
return;
}
//循环获取nalu数据
size_t offet = 0;
//返回的nalu数据前四个字节不是0001的startcode(不是系统端的0001),而是大端模式的帧长度length
const int lengthInfoSize = 4;
while (offet < totalLength - lengthInfoSize) {
uint32_t naluLength = 0;
//获取nalu 数据长度
memcpy(&naluLength, dataPoint + offet, lengthInfoSize);
//大端转系统端
naluLength = CFSwapInt32BigToHost(naluLength);
//获取到编码好的视频数据
NSMutableData *data = [NSMutableData dataWithCapacity:4 + naluLength];
[data appendBytes:startCode length:4];
[data appendBytes:dataPoint + offet + lengthInfoSize length:naluLength];
//将NALU数据回调到代理中
dispatch_async(encoder.callbackQueue, ^{
[encoder.delegate videoEncodeCallback:data];
});
//移动下标,继续读取下一个数据
offet += lengthInfoSize + naluLength;
}
}
解码
videoToolBox基本概念 videoToolBox基于CoreMedia, coreVideo,coreFoundatioon框架C语言API,三种类型会话: 编码,解码,像素移动 从coreMedia,coreVideo框架衍生出事件或帧管理数据类型,CMTime,CVPixelBuffer CMVideoFormatDescriptionRef: 视频格式描述
思路
- 解码数据(BALU Unit) I/P/B..
- 初始化解码器
- 将解析后的H264 NALU Unit输入解码器
- 解码完成回调,输出解码数据
- 解码数据显示(OpenGL ES)
解码三个核心函数:
- 创建session,VTDecompressionSessionCreate
- 解码一个frame, VTDecompressionSessionDecodeFrame
- 销毁解码session,VTDecompressionSessionInvalidate
H264原始码流->NALU I帧: 保留了一张完整视频帧,解码关键 P帧:向前参考帧,差异书记,解码需要依赖于I帧 B帧:双向参考帧,解码时既需要I帧,也需要P帧! 如果H264码流中I帧错误/丢失,就会导致错误传递, P/B帧单独是完成不了解码工作!花屏的现象产生 解码时: 需要使用SPS/PPS数据来对解码器进行初始化
- 解析数据 既然NALU,一个接一个,实时解码! 首先,你要多数据解析,分析NALU数据,前面4个字节时起始位!标识一个NALU的开始! 从第5位开始来获取 第5位开始NALU数据类型 要获取第5位数据,转化为十进制,然后根据表格判断它的数据类型 判断好数据类型,才能将NALU送入解码器,SPS/PPS获取就可以,时不需要解码的 CVPixelBufferRef保存时解码后的数据或者未编码前的数据
- 初始化解码器
/*初始化解码器**/
- (BOOL)initDecoder {
if (_decodeSesion) return true;
const uint8_t * const parameterSetPointers[2] = {_sps, _pps};
const size_t parameterSetSizes[2] = {_spsSize, _ppsSize};
int naluHeaderLen = 4;
/**
根据sps pps设置解码参数
param kCFAllocatorDefault 分配器
param 2 参数个数
param parameterSetPointers 参数集指针
param parameterSetSizes 参数集大小
param naluHeaderLen nalu nalu start code 的长度 4
param _decodeDesc 解码器描述
return 状态
*/
OSStatus status = CMVideoFormatDescriptionCreateFromH264ParameterSets(kCFAllocatorDefault, 2, parameterSetPointers, parameterSetSizes, naluHeaderLen, &_decodeDesc);
if (status != noErr) {
NSLog(@"Video hard DecodeSession create H264ParameterSets(sps, pps) failed status= %d", (int)status);
return false;
}
/*
解码参数:
* kCVPixelBufferPixelFormatTypeKey:摄像头的输出数据格式
kCVPixelBufferPixelFormatTypeKey,已测可用值为
kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,即420v
kCVPixelFormatType_420YpCbCr8BiPlanarFullRange,即420f
kCVPixelFormatType_32BGRA,iOS在内部进行YUV至BGRA格式转换
YUV420一般用于标清视频,YUV422用于高清视频,这里的限制让人感到意外。但是,在相同条件下,YUV420计算耗时和传输压力比YUV422都小。
* kCVPixelBufferWidthKey/kCVPixelBufferHeightKey: 视频源的分辨率 width*height
* kCVPixelBufferOpenGLCompatibilityKey : 它允许在 OpenGL 的上下文中直接绘制解码后的图像,而不是从总线和 CPU 之间复制数据。这有时候被称为零拷贝通道,因为在绘制过程中没有解码的图像被拷贝.
*/
NSDictionary *destinationPixBufferAttrs =
@{
(id)kCVPixelBufferPixelFormatTypeKey: [NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange], //iOS上 nv12(uvuv排布) 而不是nv21(vuvu排布)
(id)kCVPixelBufferWidthKey: [NSNumber numberWithInteger:_config.width],
(id)kCVPixelBufferHeightKey: [NSNumber numberWithInteger:_config.height],
(id)kCVPixelBufferOpenGLCompatibilityKey: [NSNumber numberWithBool:true]
};
//解码回调设置
/*
VTDecompressionOutputCallbackRecord 是一个简单的结构体,它带有一个指针 (decompressionOutputCallback),指向帧解压完成后的回调方法。你需要提供可以找到这个回调方法的实例 (decompressionOutputRefCon)。VTDecompressionOutputCallback 回调方法包括七个参数:
参数1: 回调的引用
参数2: 帧的引用
参数3: 一个状态标识 (包含未定义的代码)
参数4: 指示同步/异步解码,或者解码器是否打算丢帧的标识
参数5: 实际图像的缓冲
参数6: 出现的时间戳
参数7: 出现的持续时间
*/
VTDecompressionOutputCallbackRecord callbackRecord;
callbackRecord.decompressionOutputCallback = videoDecompressionOutputCallback;
callbackRecord.decompressionOutputRefCon = (__bridge void * _Nullable)(self);
//创建session
/*!
@function VTDecompressionSessionCreate
@abstract 创建用于解压缩视频帧的会话。
@discussion 解压后的帧将通过调用OutputCallback发出
@param allocator 内存的会话。通过使用默认的kCFAllocatorDefault的分配器。
@param videoFormatDescription 描述源视频帧
@param videoDecoderSpecification 指定必须使用的特定视频解码器.NULL
@param destinationImageBufferAttributes 描述源像素缓冲区的要求 NULL
@param outputCallback 使用已解压缩的帧调用的回调
@param decompressionSessionOut 指向一个变量以接收新的解压会话
*/
status = VTDecompressionSessionCreate(kCFAllocatorDefault, _decodeDesc, NULL, (__bridge CFDictionaryRef _Nullable)(destinationPixBufferAttrs), &callbackRecord, &_decodeSesion);
//判断一下status
if (status != noErr) {
NSLog(@"Video hard DecodeSession create failed status= %d", (int)status);
return false;
}
//设置解码会话属性(实时编码)
status = VTSessionSetProperty(_decodeSesion, kVTDecompressionPropertyKey_RealTime,kCFBooleanTrue);
NSLog(@"Vidoe hard decodeSession set property RealTime status = %d", (int)status);
return true;
}
- 解码数据
/**解码函数(private)*/
- (CVPixelBufferRef)decode:(uint8_t *)frame withSize:(uint32_t)frameSize {
CVPixelBufferRef outputPixelBuffer = NULL;
CMBlockBufferRef blockBuffer = NULL;
CMBlockBufferFlags flag0 = 0;
//创建blockBuffer
/*!
参数1: structureAllocator kCFAllocatorDefault
参数2: memoryBlock frame
参数3: frame size
参数4: blockAllocator: Pass NULL
参数5: customBlockSource Pass NULL
参数6: offsetToData 数据偏移
参数7: dataLength 数据长度
参数8: flags 功能和控制标志
参数9: newBBufOut blockBuffer地址,不能为空
*/
OSStatus status = CMBlockBufferCreateWithMemoryBlock(kCFAllocatorDefault, frame, frameSize, kCFAllocatorNull, NULL, 0, frameSize, flag0, &blockBuffer);
if (status != kCMBlockBufferNoErr) {
NSLog(@"Video hard decode create blockBuffer error code=%d", (int)status);
return outputPixelBuffer;
}
CMSampleBufferRef sampleBuffer = NULL;
const size_t sampleSizeArray[] = {frameSize};
//创建sampleBuffer
/*
参数1: allocator 分配器,使用默认内存分配, kCFAllocatorDefault
参数2: blockBuffer.需要编码的数据blockBuffer.不能为NULL
参数3: formatDescription,视频输出格式
参数4: numSamples.CMSampleBuffer 个数.
参数5: numSampleTimingEntries 必须为0,1,numSamples
参数6: sampleTimingArray. 数组.为空
参数7: numSampleSizeEntries 默认为1
参数8: sampleSizeArray
参数9: sampleBuffer对象
*/
status = CMSampleBufferCreateReady(kCFAllocatorDefault, blockBuffer, _decodeDesc, 1, 0, NULL, 1, sampleSizeArray, &sampleBuffer);
if (status != noErr || !sampleBuffer) {
NSLog(@"Video hard decode create sampleBuffer failed status=%d", (int)status);
CFRelease(blockBuffer);
return outputPixelBuffer;
}
//解码
//向视频解码器提示使用低功耗模式是可以的
VTDecodeFrameFlags flag1 = kVTDecodeFrame_1xRealTimePlayback;
//异步解码
VTDecodeInfoFlags infoFlag = kVTDecodeInfo_Asynchronous;
//解码数据
/*
参数1: 解码session
参数2: 源数据 包含一个或多个视频帧的CMsampleBuffer
参数3: 解码标志
参数4: 解码后数据outputPixelBuffer
参数5: 同步/异步解码标识
*/
status = VTDecompressionSessionDecodeFrame(_decodeSesion, sampleBuffer, flag1, &outputPixelBuffer, &infoFlag);
if (status == kVTInvalidSessionErr) {
NSLog(@"Video hard decode InvalidSessionErr status =%d", (int)status);
} else if (status == kVTVideoDecoderBadDataErr) {
NSLog(@"Video hard decode BadData status =%d", (int)status);
} else if (status != noErr) {
NSLog(@"Video hard decode failed status =%d", (int)status);
}
CFRelease(sampleBuffer);
CFRelease(blockBuffer);
return outputPixelBuffer;
}
// private
- (void)decodeNaluData:(uint8_t *)frame size:(uint32_t)size {
//数据类型:frame的前4个字节是NALU数据的开始码,也就是00 00 00 01,
// 第5个字节是表示数据类型,转为10进制后,7是sps, 8是pps, 5是IDR(I帧)信息
int type = (frame[4] & 0x1F);
// 将NALU的开始码转为4字节大端NALU的长度信息
uint32_t naluSize = size - 4;
uint8_t *pNaluSize = (uint8_t *)(&naluSize);
CVPixelBufferRef pixelBuffer = NULL;
frame[0] = *(pNaluSize + 3);
frame[1] = *(pNaluSize + 2);
frame[2] = *(pNaluSize + 1);
frame[3] = *(pNaluSize);
//第一次解析时: 初始化解码器initDecoder
/*
关键帧/其他帧数据: 调用[self decode:frame withSize:size] 方法
sps/pps数据:则将sps/pps数据赋值到_sps/_pps中.
*/
switch (type) {
case 0x05: //关键帧
if ([self initDecoder]) {
pixelBuffer= [self decode:frame withSize:size];
}
break;
case 0x06:
//NSLog(@"SEI");//增强信息
break;
case 0x07: //sps
_spsSize = naluSize;
_sps = malloc(_spsSize);
memcpy(_sps, &frame[4], _spsSize);
break;
case 0x08: //pps
_ppsSize = naluSize;
_pps = malloc(_ppsSize);
memcpy(_pps, &frame[4], _ppsSize);
break;
default: //其他帧(1-5)
if ([self initDecoder]) {
pixelBuffer = [self decode:frame withSize:size];
}
break;
}
}
// public
- (void)decodeNaluData:(NSData *)frame {
//将解码放在异步队列.
dispatch_async(_decodeQueue, ^{
//获取frame 二进制数据
uint8_t *nalu = (uint8_t *)frame.bytes;
//调用解码Nalu数据方法,参数1:数据 参数2:数据长度
[self decodeNaluData:nalu size:(uint32_t)frame.length];
});
}
- 解码完成回调 CVImageBufferRef imageBuffer 这个就可以拿来渲染展示视频了
/**解码回调函数*/
void videoDecompressionOutputCallback(void * CM_NULLABLE decompressionOutputRefCon,
void * CM_NULLABLE sourceFrameRefCon,
OSStatus status,
VTDecodeInfoFlags infoFlags,
CM_NULLABLE CVImageBufferRef imageBuffer,
CMTime presentationTimeStamp,
CMTime presentationDuration ) {
if (status != noErr) {
NSLog(@"Video hard decode callback error status=%d", (int)status);
return;
}
//解码后的数据sourceFrameRefCon -> CVPixelBufferRef
CVPixelBufferRef *outputPixelBuffer = (CVPixelBufferRef *)sourceFrameRefCon;
*outputPixelBuffer = CVPixelBufferRetain(imageBuffer);
//获取self
CCVideoDecoder *decoder = (__bridge CCVideoDecoder *)(decompressionOutputRefCon);
//调用回调队列
dispatch_async(decoder.callbackQueue, ^{
//将解码后的数据给decoder代理.viewController
[decoder.delegate videoDecodeCallback:imageBuffer];
//释放数据
CVPixelBufferRelease(imageBuffer);
});
}
渲染
解码完成后渲染
//h264解码回调
- (void)videoDecodeCallback:(CVPixelBufferRef)imageBuffer {
//显示
///CVPixelBufferRef-》显示到屏幕上
///渲染 OpenGL ES图形渲染/图片渲染( 纹理)
///CVPixelBufferRef 纹理来处理
///2个纹理: 不是RGB, YUV数据
///OpenGL ES 默认的颜色体系RGB。YUV->RGB
///2个纹理,图片数据只有Y数据,能不能显示?可以显示(黑白)
///UV信心。图片才会变成彩色
///是不是就意味着视频由2个图层构成: Y图层纹理+UV图层纹理
///所谓视频渲染->纹理的渲染->片远着色器填充width*height 正方形(渲染2个纹理)
///_displayLayer为图层,继承与CAEAGLayer(coreAnimation)
///OpenGl ES 只负责核心渲染动作,至于显示(layer/view,你所在的编译器提供给你API)
///这也是为什么OpenGL ES垮平台的核心! 这样子不会呗任何系统约束
///CAEAGLayer 是iOS提供一个专门渲染OpenGL ES的图层,继承自CALayer
if (imageBuffer) {
_displayLayer.pixelBuffer = imageBuffer;
}
}
这里需要用到OpenGL ES的知识了比较复杂,请移步Opengl专栏 上代码
#include <QuartzCore/QuartzCore.h>
#include <CoreVideo/CoreVideo.h>
@interface AAPLEAGLLayer : CAEAGLLayer
@property CVPixelBufferRef pixelBuffer;
- (id)initWithFrame:(CGRect)frame;
- (void)resetRenderBuffer;
@end
#import "AAPLEAGLLayer.h"
#import <AVFoundation/AVUtilities.h>
#import <mach/mach_time.h>
#include <AVFoundation/AVFoundation.h>
#import <UIKit/UIScreen.h>
#include <OpenGLES/EAGL.h>
#include <OpenGLES/ES2/gl.h>
#include <OpenGLES/ES2/glext.h>
// Uniform index.
enum
{
UNIFORM_Y,
UNIFORM_UV,
UNIFORM_ROTATION_ANGLE,
UNIFORM_COLOR_CONVERSION_MATRIX,
NUM_UNIFORMS
};
GLint uniforms[NUM_UNIFORMS];
// Attribute index.
enum
{
ATTRIB_VERTEX,
ATTRIB_TEXCOORD,
NUM_ATTRIBUTES
};
//YUV->RGB
//颜色转换常量(yuv到rgb),包括从16-235/16-240(视频范围)进行调整
static const GLfloat kColorConversion601[] = {
1.164, 1.164, 1.164,
0.0, -0.392, 2.017,
1.596, -0.813, 0.0,
};
// BT.709, 这是高清电视的标准
static const GLfloat kColorConversion709[] = {
1.164, 1.164, 1.164,
0.0, -0.213, 2.112,
1.793, -0.533, 0.0,
};
@interface AAPLEAGLLayer ()
{
// The pixel dimensions of the CAEAGLLayer.
//宽
GLint _backingWidth;
//高
GLint _backingHeight;
EAGLContext *_context;
/*
YUV分为2个YUV视频帧分为亮度和色度两个纹理,
分别用GL_LUMINANCE格式和GL_LUMINANCE_ALPHA格式读取。
*/
CVOpenGLESTextureRef _lumaTexture;
CVOpenGLESTextureRef _chromaTexture;
//帧缓存区
GLuint _frameBufferHandle;
//颜色缓存区
GLuint _colorBufferHandle;
//选择颜色通道
const GLfloat *_preferredConversion;
}
@property GLuint program;
@end
@implementation AAPLEAGLLayer
@synthesize pixelBuffer = _pixelBuffer;
-(CVPixelBufferRef) pixelBuffer
{
return _pixelBuffer;
}
- (void)setPixelBuffer:(CVPixelBufferRef)pb
{
if(_pixelBuffer) {
CVPixelBufferRelease(_pixelBuffer);
}
/*
在iOS里,我们经常能看到 CVPixelBufferRef 这个类型,在Camera 采集返回的数据里得到一个CMSampleBufferRef,而每个CMSampleBufferRef里则包含一个 CVPixelBufferRef,在视频硬解码的返回数据里也是一个 CVPixelBufferRef(里面包好了所有的压缩的图片信息)。CVPixelBufferRef:是一种像素图片类型,由于CV开头,所以它是属于 CoreVideo 模块的。
*/
_pixelBuffer = CVPixelBufferRetain(pb);
//获取视频帧的宽与高
int frameWidth = (int)CVPixelBufferGetWidth(_pixelBuffer);
int frameHeight = (int)CVPixelBufferGetHeight(_pixelBuffer);
//显示_pixelBuffer
/*
参数1: 显示数据
参数2: frame宽
参数3: frame高
*/
[self displayPixelBuffer:_pixelBuffer width:frameWidth height:frameHeight];
}
- (instancetype)initWithFrame:(CGRect)frame
{
self = [super init];
if (self) {
CGFloat scale = [[UIScreen mainScreen] scale];
self.contentsScale = scale;
//一个布尔值,指示层是否包含完全不透明的内容.默认为NO
self.opaque = TRUE;
/*
kEAGLDrawablePropertyRetainedBacking指定可绘制表面在显示后是否保留其内容的键.默认为NO.
*/
self.drawableProperties = @{ kEAGLDrawablePropertyRetainedBacking :[NSNumber numberWithBool:YES]};
//设置layer图层frame
[self setFrame:frame];
// 设置绘制框架的上下文.
_context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
if (!_context) {
return nil;
}
// 将默认转换设置为BT.709,这是HDTV的标准
_preferredConversion = kColorConversion709;
[self setupGL];
}
return self;
}
- (void)displayPixelBuffer:(CVPixelBufferRef)pixelBuffer width:(uint32_t)frameWidth height:(uint32_t)frameHeight
{
//判断_context 是否创建成功.不成功则无法继续
if (!_context || ![EAGLContext setCurrentContext:_context]) {
return;
}
//判断需要显示的数据是否为空.为空则返回并给出错误信息
if(pixelBuffer == NULL) {
NSLog(@"Pixel buffer is null");
return;
}
CVReturn err;
//返回像素缓冲区的平面数
size_t planeCount = CVPixelBufferGetPlaneCount(pixelBuffer);
/*
使用像素缓冲区的颜色附件确定适当的颜色转换矩阵.
参数1: 像素缓存区
参数2: kCVImageBufferYCbCrMatrixKey YCbCr->RGB
参数3: 附件模式,NULL
*/
CFTypeRef colorAttachments = CVBufferGetAttachment(pixelBuffer, kCVImageBufferYCbCrMatrixKey, NULL);
//将一个字符串中的字符范围与另一个字符串中的字符范围进行比较
/*
参数1:theString1,用于比较的第一个字符串
参数2:theString2,用于比较的第二个字符串。
参数3:rangeToCompare,要比较的字符范围。要使用整个字符串,请传递范围或使用。指定的范围不得超过字符串的长度
*/
if (CFStringCompare(colorAttachments, kCVImageBufferYCbCrMatrix_ITU_R_601_4, 0) == kCFCompareEqualTo) {
_preferredConversion = kColorConversion601;
}
else {
_preferredConversion = kColorConversion709;
}
/*
CVOpenGLESTextureCacheCreateTextureFromImage 将创建 GLES texture 从 CVPixelBufferRef.
*/
/*
从像素缓存区pixelBuffer创建Y和UV纹理,这些纹理会被绘制在帧缓存区的Y平面上.
*/
CVOpenGLESTextureCacheRef _videoTextureCache;
/*
CVOpenGLESTextureCacheCreate
功能: 创建 CVOpenGLESTextureCacheRef 创建新的纹理缓存
参数1: kCFAllocatorDefault默认内存分配器.
参数2: NULL
参数3: EAGLContext 图形上下文
参数4: NULL
参数5: 新创建的纹理缓存
@result kCVReturnSuccess
*/
err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, _context, NULL, &_videoTextureCache);
if (err != noErr) {
NSLog(@"Error at CVOpenGLESTextureCacheCreate %d", err);
return;
}
//激活纹理
glActiveTexture(GL_TEXTURE0);
//1.创建亮度纹理-Y纹理
/*
CVOpenGLESTextureCacheCreateTextureFromImage
功能:根据CVImageBuffer创建CVOpenGlESTexture 纹理对象
参数1: 内存分配器,kCFAllocatorDefault
参数2: 纹理缓存.纹理缓存将管理纹理的纹理缓存对象
参数3: sourceImage.
参数4: 纹理属性.默认给NULL
参数5: 目标纹理,GL_TEXTURE_2D
参数6: 指定纹理中颜色组件的数量(GL_RGBA, GL_LUMINANCE, GL_RGBA8_OES, GL_RG, and GL_RED (NOTE: 在 GLES3 使用 GL_R8 替代 GL_RED).)
参数7: 帧宽度
参数8: 帧高度
参数9: 格式指定像素数据的格式
参数10: 指定像素数据的数据类型,GL_UNSIGNED_BYTE
参数11: planeIndex
参数12: 纹理输出新创建的纹理对象将放置在此处。
*/
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
_videoTextureCache,
pixelBuffer,
NULL,
GL_TEXTURE_2D,
GL_RED_EXT,
frameWidth,
frameHeight,
GL_RED_EXT,
GL_UNSIGNED_BYTE,
0,
&_lumaTexture);
if (err) {
NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
}
//2.配置亮度纹理属性
//绑定纹理.
glBindTexture(CVOpenGLESTextureGetTarget(_lumaTexture), CVOpenGLESTextureGetName(_lumaTexture));
//配置纹理放大/缩小过滤方式以及纹理围绕S/T环绕方式
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
//3.UV-plane 纹理
//如果颜色通道个数>1,则除了Y还有UV-Plane.
if(planeCount == 2) {
// UV-plane.
//激活UV-plane纹理
glActiveTexture(GL_TEXTURE1);
//4.创建UV-plane纹理
/*
CVOpenGLESTextureCacheCreateTextureFromImage
功能:根据CVImageBuffer创建CVOpenGlESTexture 纹理对象
参数1: 内存分配器,kCFAllocatorDefault
参数2: 纹理缓存.纹理缓存将管理纹理的纹理缓存对象
参数3: sourceImage.
参数4: 纹理属性.默认给NULL
参数5: 目标纹理,GL_TEXTURE_2D
参数6: 指定纹理中颜色组件的数量(GL_RGBA, GL_LUMINANCE, GL_RGBA8_OES, GL_RG, and GL_RED (NOTE: 在 GLES3 使用 GL_R8 替代 GL_RED).)
参数7: 帧宽度
参数8: 帧高度
参数9: 格式指定像素数据的格式
参数10: 指定像素数据的数据类型,GL_UNSIGNED_BYTE
参数11: planeIndex
参数12: 纹理输出新创建的纹理对象将放置在此处。
*/
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
_videoTextureCache,
pixelBuffer,
NULL,
GL_TEXTURE_2D,
GL_RG_EXT,
frameWidth / 2,
frameHeight / 2,
GL_RG_EXT,
GL_UNSIGNED_BYTE,
1,
&_chromaTexture);
if (err) {
NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
}
//5.绑定纹理
glBindTexture(CVOpenGLESTextureGetTarget(_chromaTexture), CVOpenGLESTextureGetName(_chromaTexture));
//6.配置纹理放大/缩小过滤方式以及纹理围绕S/T环绕方式
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
}
//绑定帧缓存区
glBindFramebuffer(GL_FRAMEBUFFER, _frameBufferHandle);
//设置视口.
glViewport(0, 0, _backingWidth, _backingHeight);
//清理屏幕
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
//使用shaderProgram
glUseProgram(self.program);
//传递Uniform属性到shader
//UNIFORM_ROTATION_ANGLE 旋转角度
glUniform1f(uniforms[UNIFORM_ROTATION_ANGLE], 0);
//UNIFORM_COLOR_CONVERSION_MATRIX YUV->RGB颜色矩阵
glUniformMatrix3fv(uniforms[UNIFORM_COLOR_CONVERSION_MATRIX], 1, GL_FALSE, _preferredConversion);
// 根据视频的方向和纵横比设置四边形顶点
CGRect viewBounds = self.bounds;
CGSize contentSize = CGSizeMake(frameWidth, frameHeight);
/*
AVMakeRectWithAspectRatioInsideRect
功能: 返回一个按比例缩放的CGRect,该CGRect保持由边界CGRect内的CGSize指定的纵横比
参数1:希望保持的宽高比或纵横比
参数2:填充的rect
*/
CGRect vertexSamplingRect = AVMakeRectWithAspectRatioInsideRect(contentSize, viewBounds);
// 计算标准化的四边形坐标以将帧绘制到其中
//标准化采样大小
CGSize normalizedSamplingSize = CGSizeMake(0.0, 0.0);
//标准化规模
CGSize cropScaleAmount = CGSizeMake(vertexSamplingRect.size.width/viewBounds.size.width,vertexSamplingRect.size.height/viewBounds.size.height);
// 规范化四元顶点
if (cropScaleAmount.width > cropScaleAmount.height) {
normalizedSamplingSize.width = 1.0;
normalizedSamplingSize.height = cropScaleAmount.height/cropScaleAmount.width;
}
else {
normalizedSamplingSize.width = cropScaleAmount.width/cropScaleAmount.height;
normalizedSamplingSize.height = 1.0;;
}
/*
四顶点数据定义了我们绘制像素缓冲区的二维平面区域。
使用(-1,-1)和(1,1)分别作为左下角和右上角坐标形成的顶点数据覆盖整个屏幕。
*/
GLfloat quadVertexData [] = {
-1 * normalizedSamplingSize.width, -1 * normalizedSamplingSize.height,
normalizedSamplingSize.width, -1 * normalizedSamplingSize.height,
-1 * normalizedSamplingSize.width, normalizedSamplingSize.height,
normalizedSamplingSize.width, normalizedSamplingSize.height,
};
// 更新属性值.
//坐标数据
glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, 0, 0, quadVertexData);
glEnableVertexAttribArray(ATTRIB_VERTEX);
/*
纹理顶点的设置使我们垂直翻转纹理。这使得我们的左上角原点缓冲区匹配OpenGL的左下角纹理坐标系
*/
CGRect textureSamplingRect = CGRectMake(0, 0, 1, 1);
GLfloat quadTextureData[] = {
CGRectGetMinX(textureSamplingRect), CGRectGetMaxY(textureSamplingRect),
CGRectGetMaxX(textureSamplingRect), CGRectGetMaxY(textureSamplingRect),
CGRectGetMinX(textureSamplingRect), CGRectGetMinY(textureSamplingRect),
CGRectGetMaxX(textureSamplingRect), CGRectGetMinY(textureSamplingRect)
};
//更新纹理坐标属性值
glVertexAttribPointer(ATTRIB_TEXCOORD, 2, GL_FLOAT, 0, 0, quadTextureData);
glEnableVertexAttribArray(ATTRIB_TEXCOORD);
//绘制图形
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
//绑定渲染缓存区->显示到屏幕
glBindRenderbuffer(GL_RENDERBUFFER, _colorBufferHandle);
[_context presentRenderbuffer:GL_RENDERBUFFER];
//清理纹理,方便下一帧纹理显示
[self cleanUpTextures];
// 定期纹理缓存刷新每帧
CVOpenGLESTextureCacheFlush(_videoTextureCache, 0);
if(_videoTextureCache) {
CFRelease(_videoTextureCache);
}
}
# pragma mark - OpenGL setup
//OpenGL 相关设置
- (void)setupGL
{
if (!_context || ![EAGLContext setCurrentContext:_context]) {
return;
}
//设置缓冲区
[self setupBuffers];
//加载shaders 着色器
[self loadShaders];
glUseProgram(self.program);
// 0 and 1 are the texture IDs of _lumaTexture and _chromaTexture respectively.
glUniform1i(uniforms[UNIFORM_Y], 0);
glUniform1i(uniforms[UNIFORM_UV], 1);
glUniform1f(uniforms[UNIFORM_ROTATION_ANGLE], 0);
glUniformMatrix3fv(uniforms[UNIFORM_COLOR_CONVERSION_MATRIX], 1, GL_FALSE, _preferredConversion);
}
#pragma mark - Utilities
- (void)setupBuffers
{
//取消深度测试
glDisable(GL_DEPTH_TEST);
//打开ATTRIB_VERTEX 属性 position
glEnableVertexAttribArray(ATTRIB_VERTEX);
//顶点数据解析方式
/*
参数1: 指定从索引0开始取数据,与顶点着色器对应
参数2: 顶点属性大小
参数3: 数据类型
参数4: 归一化
参数5: 步长(Stride)
参数6: 数据在缓冲区起始位置的偏移量
*/
glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, GL_FALSE, 2 * sizeof(GLfloat), 0);
//ATTRIB_TEXCOORD == texCoord
glEnableVertexAttribArray(ATTRIB_TEXCOORD);
glVertexAttribPointer(ATTRIB_TEXCOORD, 2, GL_FLOAT, GL_FALSE, 2 * sizeof(GLfloat), 0);
//创建buffer
[self createBuffers];
}
- (void) createBuffers
{
//创建帧缓存区 frameBuffer
glGenFramebuffers(1, &_frameBufferHandle);
glBindFramebuffer(GL_FRAMEBUFFER, _frameBufferHandle);
//创建color缓存区 RenderBuffer
glGenRenderbuffers(1, &_colorBufferHandle);
glBindRenderbuffer(GL_RENDERBUFFER, _colorBufferHandle);
//绑定渲染缓存区
[_context renderbufferStorage:GL_RENDERBUFFER fromDrawable:self];
//设置渲染缓存区的尺寸:_backingWidth/_backingHeight
glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &_backingWidth);
glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &_backingHeight);
//绑定renderBuffer到FrameBuffer
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, _colorBufferHandle);
//检查FrameBuffer状态
if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) {
NSLog(@"Failed to make complete framebuffer object %x", glCheckFramebufferStatus(GL_FRAMEBUFFER));
}
}
//释放帧缓存区与渲染缓存区
- (void) releaseBuffers
{
if(_frameBufferHandle) {
glDeleteFramebuffers(1, &_frameBufferHandle);
_frameBufferHandle = 0;
}
if(_colorBufferHandle) {
glDeleteRenderbuffers(1, &_colorBufferHandle);
_colorBufferHandle = 0;
}
}
//重新设置帧缓存区与渲染缓存区
- (void) resetRenderBuffer
{
if (!_context || ![EAGLContext setCurrentContext:_context]) {
return;
}
[self releaseBuffers];
[self createBuffers];
}
//清理纹理(Y纹理,UV纹理)
- (void) cleanUpTextures
{
if (_lumaTexture) {
CFRelease(_lumaTexture);
_lumaTexture = NULL;
}
if (_chromaTexture) {
CFRelease(_chromaTexture);
_chromaTexture = NULL;
}
}
#pragma mark - OpenGL ES 2 shader compilation
//片元着色器代码
const GLchar *shader_fsh = (const GLchar*)"varying highp vec2 texCoordVarying;"
"precision mediump float;"
"uniform sampler2D SamplerY;"
"uniform sampler2D SamplerUV;"
"uniform mat3 colorConversionMatrix;"
"void main()"
"{"
" mediump vec3 yuv;"
" lowp vec3 rgb;"
// Subtract constants to map the video range start at 0
" yuv.x = (texture2D(SamplerY, texCoordVarying).r - (16.0/255.0));"
" yuv.yz = (texture2D(SamplerUV, texCoordVarying).rg - vec2(0.5, 0.5));"
" rgb = colorConversionMatrix * yuv;"
" gl_FragColor = vec4(rgb, 1);"
"}";
//顶点着色器代码
const GLchar *shader_vsh = (const GLchar*)"attribute vec4 position;"
"attribute vec2 texCoord;"
"uniform float preferredRotation;"
"varying vec2 texCoordVarying;"
"void main()"
"{"
" mat4 rotationMatrix = mat4(cos(preferredRotation), -sin(preferredRotation), 0.0, 0.0,"
" sin(preferredRotation), cos(preferredRotation), 0.0, 0.0,"
" 0.0, 0.0, 1.0, 0.0,"
" 0.0, 0.0, 0.0, 1.0);"
" gl_Position = position * rotationMatrix;"
" texCoordVarying = texCoord;"
"}";
- (BOOL)loadShaders
{
GLuint vertShader = 0, fragShader = 0;
// 创建着色program.
self.program = glCreateProgram();
//编译顶点着色器
if(![self compileShaderString:&vertShader type:GL_VERTEX_SHADER shaderString:shader_vsh]) {
NSLog(@"Failed to compile vertex shader");
return NO;
}
//编译片元着色器
if(![self compileShaderString:&fragShader type:GL_FRAGMENT_SHADER shaderString:shader_fsh]) {
NSLog(@"Failed to compile fragment shader");
return NO;
}
// 附着顶点着色器到program.
glAttachShader(self.program, vertShader);
// 附着片元着色器到program.
glAttachShader(self.program, fragShader);
// 绑定属性位置。这需要在链接之前完成.(让ATTRIB_VERTEX/ATTRIB_TEXCOORD 与position/texCoord产生连接)
glBindAttribLocation(self.program, ATTRIB_VERTEX, "position");
glBindAttribLocation(self.program, ATTRIB_TEXCOORD, "texCoord");
// Link the program.
if (![self linkProgram:self.program]) {
NSLog(@"Failed to link program: %d", self.program);
if (vertShader) {
glDeleteShader(vertShader);
vertShader = 0;
}
if (fragShader) {
glDeleteShader(fragShader);
fragShader = 0;
}
if (self.program) {
glDeleteProgram(self.program);
self.program = 0;
}
return NO;
}
//获取uniform的位置
//Y亮度纹理
uniforms[UNIFORM_Y] = glGetUniformLocation(self.program, "SamplerY");
//UV色量纹理
uniforms[UNIFORM_UV] = glGetUniformLocation(self.program, "SamplerUV");
//旋转角度preferredRotation
uniforms[UNIFORM_ROTATION_ANGLE] = glGetUniformLocation(self.program, "preferredRotation");
//YUV->RGB
uniforms[UNIFORM_COLOR_CONVERSION_MATRIX] = glGetUniformLocation(self.program, "colorConversionMatrix");
// Release vertex and fragment shaders.
if (vertShader) {
glDetachShader(self.program, vertShader);
glDeleteShader(vertShader);
}
if (fragShader) {
glDetachShader(self.program, fragShader);
glDeleteShader(fragShader);
}
return YES;
}
//编译shader
- (BOOL)compileShaderString:(GLuint *)shader type:(GLenum)type shaderString:(const GLchar*)shaderString
{
*shader = glCreateShader(type);
glShaderSource(*shader, 1, &shaderString, NULL);
glCompileShader(*shader);
#if defined(DEBUG)
GLint logLength;
glGetShaderiv(*shader, GL_INFO_LOG_LENGTH, &logLength);
if (logLength > 0) {
GLchar *log = (GLchar *)malloc(logLength);
glGetShaderInfoLog(*shader, logLength, &logLength, log);
NSLog(@"Shader compile log:\n%s", log);
free(log);
}
#endif
GLint status = 0;
glGetShaderiv(*shader, GL_COMPILE_STATUS, &status);
if (status == 0) {
glDeleteShader(*shader);
return NO;
}
return YES;
}
- (BOOL)compileShader:(GLuint *)shader type:(GLenum)type URL:(NSURL *)URL
{
NSError *error;
NSString *sourceString = [[NSString alloc] initWithContentsOfURL:URL encoding:NSUTF8StringEncoding error:&error];
if (sourceString == nil) {
NSLog(@"Failed to load vertex shader: %@", [error localizedDescription]);
return NO;
}
const GLchar *source = (GLchar *)[sourceString UTF8String];
return [self compileShaderString:shader type:type shaderString:source];
}
- (BOOL)linkProgram:(GLuint)prog
{
GLint status;
glLinkProgram(prog);
#if defined(DEBUG)
GLint logLength;
glGetProgramiv(prog, GL_INFO_LOG_LENGTH, &logLength);
if (logLength > 0) {
GLchar *log = (GLchar *)malloc(logLength);
glGetProgramInfoLog(prog, logLength, &logLength, log);
NSLog(@"Program link log:\n%s", log);
free(log);
}
#endif
glGetProgramiv(prog, GL_LINK_STATUS, &status);
if (status == 0) {
return NO;
}
return YES;
}
- (BOOL)validateProgram:(GLuint)prog
{
GLint logLength, status;
glValidateProgram(prog);
glGetProgramiv(prog, GL_INFO_LOG_LENGTH, &logLength);
if (logLength > 0) {
GLchar *log = (GLchar *)malloc(logLength);
glGetProgramInfoLog(prog, logLength, &logLength, log);
NSLog(@"Program validate log:\n%s", log);
free(log);
}
glGetProgramiv(prog, GL_VALIDATE_STATUS, &status);
if (status == 0) {
return NO;
}
return YES;
}
- (void)dealloc
{
if (!_context || ![EAGLContext setCurrentContext:_context]) {
return;
}
[self cleanUpTextures];
if(_pixelBuffer) {
CVPixelBufferRelease(_pixelBuffer);
}
if (self.program) {
glDeleteProgram(self.program);
self.program = 0;
}
if(_context) {
_context = nil;
}
}
@end
demo 密码:fqqg
demo 密码:kxxu