需求
在做如美颜,滤镜等功能时,我们不能使用相机原生的AVCaptureVideoPreviewLayer
,而是需要通过其他方式将视频原始帧数据如RGB,NV12等等渲染到iOS界面上.
实现原理
利用OpenGL完成高效的渲染功能.本例中仅提供简单流程讲解,具体每行代码含义可在开源库中查询.
注意:
- 系统的
AVCaptureVideoPreviewLayer
仅能直接渲染从相机采集到的数据,当做美颜等功能时无法使用. - 本例仅实现了RGB与NV12两种类型视频数据的渲染,其他可根据需求自行添加.
阅读前提
- 音视频基础
- OpenGL基础
代码地址 : iOS视频渲染
掘金地址 : iOS视频渲染
简书地址 : iOS视频渲染
博客地址 : iOS视频渲染
具体步骤
1. 创建EAGLContext
上下文对象
- 创建OpenGL预览层
1 | CAEAGLLayer *eaglLayer = (CAEAGLLayer *)self.layer; |
- 创建OpenGL上下文对象
1 | EAGLContext *context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2]; |
设置上下文渲染缓冲区
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21- (void)setupBuffersWithContext:(EAGLContext *)context width:(int *)width height:(int *)height colorBufferHandle:(GLuint *)colorBufferHandle frameBufferHandle:(GLuint *)frameBufferHandle {
glDisable(GL_DEPTH_TEST);
glEnableVertexAttribArray(ATTRIB_VERTEX);
glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, GL_FALSE, 2 * sizeof(GLfloat), 0);
glEnableVertexAttribArray(ATTRIB_TEXCOORD);
glVertexAttribPointer(ATTRIB_TEXCOORD, 2, GL_FLOAT, GL_FALSE, 2 * sizeof(GLfloat), 0);
glGenFramebuffers(1, frameBufferHandle);
glBindFramebuffer(GL_FRAMEBUFFER, *frameBufferHandle);
glGenRenderbuffers(1, colorBufferHandle);
glBindRenderbuffer(GL_RENDERBUFFER, *colorBufferHandle);
[context renderbufferStorage:GL_RENDERBUFFER fromDrawable:(CAEAGLLayer *)self.layer];
glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH , width);
glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, height);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, *colorBufferHandle);
}加载着色器
着色器本Demo中只添加了NV12格式与RGB格式两种原始视频数据.
1 | - (void)loadShaderWithBufferType:(XDXPixelBufferType)type { |
- 创建视频纹理缓存区
1 | if (!*videoTextureCache) { |
2. 将pixelBuffer
渲染到屏幕
开始渲染前先清空缓存数据
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
- (void)cleanUpTextures {
if (_lumaTexture) {
CFRelease(_lumaTexture);
_lumaTexture = NULL;
}
if (_chromaTexture) {
CFRelease(_chromaTexture);
_chromaTexture = NULL;
}
if (_renderTexture) {
CFRelease(_renderTexture);
_renderTexture = NULL;
}
CVOpenGLESTextureCacheFlush(_videoTextureCache, 0);
}根据pixelBuffer格式确定视频数据类型
1 | XDXPixelBufferType bufferType; |
通过当前的pixelBuffer对象创建
CVOpenGLESTexture
对象1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82CVOpenGLESTextureRef lumaTexture,chromaTexture,renderTexture;
if (bufferType == XDXPixelBufferTypeNV12) {
// Y
glActiveTexture(GL_TEXTURE0);
error = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
videoTextureCache,
pixelBuffer,
NULL,
GL_TEXTURE_2D,
GL_LUMINANCE,
frameWidth,
frameHeight,
GL_LUMINANCE,
GL_UNSIGNED_BYTE,
0,
&lumaTexture);
if (error) {
log4cplus_error(kModuleName, "Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", error);
}else {
_lumaTexture = lumaTexture;
}
glBindTexture(CVOpenGLESTextureGetTarget(lumaTexture), CVOpenGLESTextureGetName(lumaTexture));
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
// UV
glActiveTexture(GL_TEXTURE1);
error = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
videoTextureCache,
pixelBuffer,
NULL,
GL_TEXTURE_2D,
GL_LUMINANCE_ALPHA,
frameWidth / 2,
frameHeight / 2,
GL_LUMINANCE_ALPHA,
GL_UNSIGNED_BYTE,
1,
&chromaTexture);
if (error) {
log4cplus_error(kModuleName, "Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", error);
}else {
_chromaTexture = chromaTexture;
}
glBindTexture(CVOpenGLESTextureGetTarget(chromaTexture), CVOpenGLESTextureGetName(chromaTexture));
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
} else if (bufferType == XDXPixelBufferTypeRGB) {
// RGB
glActiveTexture(GL_TEXTURE0);
error = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
videoTextureCache,
pixelBuffer,
NULL,
GL_TEXTURE_2D,
GL_RGBA,
frameWidth,
frameHeight,
GL_BGRA,
GL_UNSIGNED_BYTE,
0,
&renderTexture);
if (error) {
log4cplus_error(kModuleName, "Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", error);
}else {
_renderTexture = renderTexture;
}
glBindTexture(CVOpenGLESTextureGetTarget(renderTexture), CVOpenGLESTextureGetName(renderTexture));
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
}选择OpenGL程序
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20glBindFramebuffer(GL_FRAMEBUFFER, frameBufferHandle);
glViewport(0, 0, backingWidth, backingHeight);
glClearColor(0.1f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
if (bufferType == XDXPixelBufferTypeNV12) {
if (self.lastBufferType != bufferType) {
glUseProgram(nv12Program);
glUniform1i(uniforms[UNIFORM_Y], 0);
glUniform1i(uniforms[UNIFORM_UV], 1);
glUniformMatrix3fv(uniforms[UNIFORM_COLOR_CONVERSION_MATRIX], 1, GL_FALSE, preferredConversion);
}
} else if (bufferType == XDXPixelBufferTypeRGB) {
if (self.lastBufferType != bufferType) {
glUseProgram(rgbProgram);
glUniform1i(displayInputTextureUniform, 0);
}
}计算非全屏尺寸
渲染的画面可能是全屏,可能是留黑边.
1 | static CGSize normalizedSamplingSize; |
- 渲染画面
1 | glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, 0, 0, quadVertexData); |