@qidiandasheng
2020-07-13T14:45:18.000000Z
字数 12513
阅读 1704
音视频
GPUImage
响应链条上最先需要有输入源输入图片数据,这些输入源主要继承自GPUImageOutput
。我们常用到的主要是以下几个:
这里我们将以GPUImageVideoCamera
为例来解析一下响应链上的源数据的一个处理。
GPUImageVideoCamera
主要使用AVCaptureSession
类从AV输入设备的采集数据到制定的输出。关于AVCaptureSession
的使用这里就不讲了,主要说一下对应的采集数据输出回调函数之后的处理。
以下方法为AVCaptureVideoDataOutput
的输出回调函数,省去部分代码,我们主要看处理图像部分的代码:
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{
// 帧渲染的信号量,用于等待处理完一帧后,再接着处理下一帧
// 初始信号为1,调用wait后为0,继续往下执行任务加入队列
if (dispatch_semaphore_wait(frameRenderingSemaphore, DISPATCH_TIME_NOW) != 0)
{
return;
}
CFRetain(sampleBuffer);
// 把获取到的每一帧图像放入到异步队列中进行处理
runAsynchronouslyOnVideoProcessingQueue(^{
// 回调(调用方可获取摄像头捕获的回调数据CMSampleBufferRef)
if (self.delegate)
{
[self.delegate willOutputSampleBuffer:sampleBuffer];
}
// 核心处理图像数据的代码
[self processVideoSampleBuffer:sampleBuffer];
CFRelease(sampleBuffer);
dispatch_semaphore_signal(frameRenderingSemaphore);
});
}
- (void)processVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer;
{
if (capturePaused)
{
return;
}
//获取当前时间
CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent();
//获取当前视频帧
CVImageBufferRef cameraFrame = CMSampleBufferGetImageBuffer(sampleBuffer);
//获取视频的宽高
int bufferWidth = (int) CVPixelBufferGetWidth(cameraFrame);
int bufferHeight = (int) CVPixelBufferGetHeight(cameraFrame);
//获取颜色空间格式
CFTypeRef colorAttachments = CVBufferGetAttachment(cameraFrame, kCVImageBufferYCbCrMatrixKey, NULL);
//选择对应的颜色转换矩阵(YUV颜色空间到RGB颜色空间的转换矩阵)
if (colorAttachments != NULL)
{
if(CFStringCompare(colorAttachments, kCVImageBufferYCbCrMatrix_ITU_R_601_4, 0) == kCFCompareEqualTo)
{
if (isFullYUVRange)
{
_preferredConversion = kColorConversion601FullRange;
}
else
{
_preferredConversion = kColorConversion601;
}
}
else
{
_preferredConversion = kColorConversion709;
}
}
else
{
if (isFullYUVRange)
{
_preferredConversion = kColorConversion601FullRange;
}
else
{
_preferredConversion = kColorConversion601;
}
}
//获取样本时间戳
CMTime currentTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
//创建上下文EAGLContext,并设置为当前上下文环境
[GPUImageContext useImageProcessingContext];
/*是否支持快速读取CVPixelBufferRef,是否是YUV格式
模拟器不支持
真机iOS5开始支持的一种CVOpenGLESTextureCacheRef和CVImageBufferRef的映射
*/
if ([GPUImageContext supportsFastTextureUpload] && captureAsYUV)
{
//CV纹理的缓存(亮度纹理和色度纹理)
CVOpenGLESTextureRef luminanceTextureRef = NULL;
CVOpenGLESTextureRef chrominanceTextureRef = NULL;
//得到YUV像素缓冲区平面数量
if (CVPixelBufferGetPlaneCount(cameraFrame) > 0) // Check for YUV planar inputs to do RGB conversion
{
//锁定缓存
CVPixelBufferLockBaseAddress(cameraFrame, 0);
//根据CVImageBufferRef纹理得到的宽高赋值
if ( (imageBufferWidth != bufferWidth) && (imageBufferHeight != bufferHeight) )
{
imageBufferWidth = bufferWidth;
imageBufferHeight = bufferHeight;
}
CVReturn err;
// Y-plane
//设置纹理单元,为了给着色器程序绑定纹理ID
glActiveTexture(GL_TEXTURE4);
//判断本设备是否支持红色的纹理,创建亮度纹理引用对象
if ([GPUImageContext deviceSupportsRedTextures])
{
//将BGRA的数据转换到RGBA的纹理上
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], cameraFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE, bufferWidth, bufferHeight, GL_LUMINANCE, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef);
}
else
{
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], cameraFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE, bufferWidth, bufferHeight, GL_LUMINANCE, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef);
}
if (err)
{
NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
}
// 获取亮度纹理对象
luminanceTexture = CVOpenGLESTextureGetName(luminanceTextureRef);
// 绑定纹理
glBindTexture(GL_TEXTURE_2D, luminanceTexture);
// 设置纹理滤波
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
// UV-plane
glActiveTexture(GL_TEXTURE5);
//判断本设备是否支持红色的纹理,创建色度纹理引用对象
if ([GPUImageContext deviceSupportsRedTextures])
{
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], cameraFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE_ALPHA, bufferWidth/2, bufferHeight/2, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef);
}
else
{
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], cameraFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE_ALPHA, bufferWidth/2, bufferHeight/2, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef);
}
if (err)
{
NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
}
// 获取色度纹理对象
chrominanceTexture = CVOpenGLESTextureGetName(chrominanceTextureRef);
glBindTexture(GL_TEXTURE_2D, chrominanceTexture);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
//YUV转换成RGB输出
[self convertYUVToRGBOutput];
//旋转后的宽高
int rotatedImageBufferWidth = bufferWidth, rotatedImageBufferHeight = bufferHeight;
if (GPUImageRotationSwapsWidthAndHeight(internalRotation))
{
rotatedImageBufferWidth = bufferHeight;
rotatedImageBufferHeight = bufferWidth;
}
//通过纹理缓存更新target
[self updateTargetsForVideoCameraUsingCacheTextureAtWidth:rotatedImageBufferWidth height:rotatedImageBufferHeight time:currentTime];
CVPixelBufferUnlockBaseAddress(cameraFrame, 0);
CFRelease(luminanceTextureRef);
CFRelease(chrominanceTextureRef);
}
else
{
}
if (_runBenchmark)
{
numberOfFramesCaptured++;
if (numberOfFramesCaptured > INITIALFRAMESTOIGNOREFORBENCHMARK)
{
CFAbsoluteTime currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime);
totalFrameTimeDuringCapture += currentFrameTime;
NSLog(@"Average frame time : %f ms", [self averageFrameDurationDuringCapture]);
NSLog(@"Current frame time : %f ms", 1000.0 * currentFrameTime);
}
}
}
else
{
CVPixelBufferLockBaseAddress(cameraFrame, 0);
int bytesPerRow = (int) CVPixelBufferGetBytesPerRow(cameraFrame);
outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:CGSizeMake(bytesPerRow / 4, bufferHeight) onlyTexture:YES];
[outputFramebuffer activateFramebuffer];
glBindTexture(GL_TEXTURE_2D, [outputFramebuffer texture]);
// glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, bufferWidth, bufferHeight, 0, GL_BGRA, GL_UNSIGNED_BYTE, CVPixelBufferGetBaseAddress(cameraFrame));
// Using BGRA extension to pull in video frame data directly
// The use of bytesPerRow / 4 accounts for a display glitch present in preview video frames when using the photo preset on the camera
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, bytesPerRow / 4, bufferHeight, 0, GL_BGRA, GL_UNSIGNED_BYTE, CVPixelBufferGetBaseAddress(cameraFrame));
[self updateTargetsForVideoCameraUsingCacheTextureAtWidth:bytesPerRow / 4 height:bufferHeight time:currentTime];
CVPixelBufferUnlockBaseAddress(cameraFrame, 0);
if (_runBenchmark)
{
numberOfFramesCaptured++;
if (numberOfFramesCaptured > INITIALFRAMESTOIGNOREFORBENCHMARK)
{
CFAbsoluteTime currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime);
totalFrameTimeDuringCapture += currentFrameTime;
}
}
}
}
// YUV转换成RGB输出
- (void)convertYUVToRGBOutput;
{
// 设置顶点着色器
[GPUImageContext setActiveShaderProgram:yuvConversionProgram];
int rotatedImageBufferWidth = imageBufferWidth, rotatedImageBufferHeight = imageBufferHeight;
if (GPUImageRotationSwapsWidthAndHeight(internalRotation))
{
rotatedImageBufferWidth = imageBufferHeight;
rotatedImageBufferHeight = imageBufferWidth;
}
// 从当前上下文中获取输出帧缓冲
outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:CGSizeMake(rotatedImageBufferWidth, rotatedImageBufferHeight) textureOptions:self.outputTextureOptions onlyTexture:NO];
[outputFramebuffer activateFramebuffer];
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
static const GLfloat squareVertices[] = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
};
// 绑定亮度纹理到纹理单元4
glActiveTexture(GL_TEXTURE4);
glBindTexture(GL_TEXTURE_2D, luminanceTexture);
// 把位置4的纹理单元赋值给采样器
glUniform1i(yuvConversionLuminanceTextureUniform, 4);
// 绑定色度纹理到纹理单元5
glActiveTexture(GL_TEXTURE5);
glBindTexture(GL_TEXTURE_2D, chrominanceTexture);
// 把位置5的纹理单元赋值给采样器
glUniform1i(yuvConversionChrominanceTextureUniform, 5);
glUniformMatrix3fv(yuvConversionMatrixUniform, 1, GL_FALSE, _preferredConversion);
//设置顶点数据
glVertexAttribPointer(yuvConversionPositionAttribute, 2, GL_FLOAT, 0, 0, squareVertices);
//设置纹理数据
glVertexAttribPointer(yuvConversionTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [GPUImageFilter textureCoordinatesForRotation:internalRotation]);
//开始绘制
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
}
以下代码遍历了两次当前所有的targets
。
第一次给target
设置各种参数:旋转、大小、帧缓冲(绑定的附件为纹理缓冲)。
第二次通知所有的target
开始重新绘制渲染纹理。
- (void)updateTargetsForVideoCameraUsingCacheTextureAtWidth:(int)bufferWidth height:(int)bufferHeight time:(CMTime)currentTime;
{
// First, update all the framebuffers in the targets
for (id<GPUImageInput> currentTarget in targets)
{
if ([currentTarget enabled])
{
NSInteger indexOfObject = [targets indexOfObject:currentTarget];
NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
if (currentTarget != self.targetToIgnoreForUpdates)
{
[currentTarget setInputRotation:outputRotation atIndex:textureIndexOfTarget];
[currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:textureIndexOfTarget];
if ([currentTarget wantsMonochromeInput] && captureAsYUV)
{
[currentTarget setCurrentlyReceivingMonochromeInput:YES];
// TODO: Replace optimization for monochrome output
[currentTarget setInputFramebuffer:outputFramebuffer atIndex:textureIndexOfTarget];
}
else
{
[currentTarget setCurrentlyReceivingMonochromeInput:NO];
[currentTarget setInputFramebuffer:outputFramebuffer atIndex:textureIndexOfTarget];
}
}
else
{
[currentTarget setInputRotation:outputRotation atIndex:textureIndexOfTarget];
[currentTarget setInputFramebuffer:outputFramebuffer atIndex:textureIndexOfTarget];
}
}
}
// Then release our hold on the local framebuffer to send it back to the cache as soon as it's no longer needed
[outputFramebuffer unlock];
outputFramebuffer = nil;
// Finally, trigger rendering as needed
for (id<GPUImageInput> currentTarget in targets)
{
if ([currentTarget enabled])
{
NSInteger indexOfObject = [targets indexOfObject:currentTarget];
NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
if (currentTarget != self.targetToIgnoreForUpdates)
{
[currentTarget newFrameReadyAtTime:currentTime atIndex:textureIndexOfTarget];
}
}
}
}
上面讲了输入链条里的输入源,这里讲的就是输入链条中间的处理部分。
GPUImageFilter
就是用来接收源图像,通过自定义的顶点、片元着色器来渲染新的图像,并在绘制完成后通知响应链的下一个对象。
GPUImageFilter
和响应链的其他元素实现了GPUImageInput
协议,他们都可以提供纹理参与响应链,或者从响应链的前面接收并处理纹理。响应链的下一个对象是target
,响应链可能有多个分支(添加多个targets
)。
上面说过输入源输入缓冲帧准备好之后通过调用- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex
方法来通知target
准备渲染。
- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex
{
// 顶点坐标系矩阵
static const GLfloat imageVertices[] = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
};
// 绘制结果输出
[self renderToTextureWithVertices:imageVertices textureCoordinates:[[self class] textureCoordinatesForRotation:inputRotation]];
// 通知targets
[self informTargetsAboutNewFrameAtTime:frameTime];
}
以下为固定的顶点坐标:
static const GLfloat imageVertices[] = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
};
通过这个方法获取纹理坐标,根据传进来的选择类型选择纹理坐标:
+ (const GLfloat *)textureCoordinatesForRotation:(GPUImageRotationMode)rotationMode{
// 正常不旋转
static const GLfloat noRotationTextureCoordinates[] = {
0.0f, 0.0f,
1.0f, 0.0f,
0.0f, 1.0f,
1.0f, 1.0f,
};
// 图片向右旋转
static const GLfloat rotateLeftTextureCoordinates[] = {
1.0f, 0.0f,
1.0f, 1.0f,
0.0f, 0.0f,
0.0f, 1.0f,
};
.........
}
// 渲染到帧缓存
- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
{
// 是否阻止渲染
if (self.preventRendering)
{
// 解锁帧缓存
[firstInputFramebuffer unlock];
return;
}
[GPUImageContext setActiveShaderProgram:filterProgram];
// 自己的输出帧缓存
outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];
[outputFramebuffer activateFramebuffer];
if (usingNextFrameForImageCapture)
{
[outputFramebuffer lock];
}
[self setUniformsForProgramAtIndex:0];
glClearColor(backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha);
glClear(GL_COLOR_BUFFER_BIT);
// 绑定输入纹理
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, [firstInputFramebuffer texture]);
// 把位置2的纹理单元赋值给采样器(告诉GLSL选择的纹理单元是2)
glUniform1i(filterInputTextureUniform, 2);
// 绑定顶点和纹理坐标并绘制图元
glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);
glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
// 解锁输入帧缓存对象
[firstInputFramebuffer unlock];
// 需要等待绘制完成才去生成图像(默认为NO)
if (usingNextFrameForImageCapture)
{
// 发送绘制完成信号
dispatch_semaphore_signal(imageCaptureSemaphore);
}
}
这一步基本就跟上面输入源更新所有的targets的输入参数,并通知渲染差不多了,一样的步骤:
- (void)informTargetsAboutNewFrameAtTime:(CMTime)frameTime;