曾大稳丶


  • 首页

  • 分类

  • 标签

  • 归档

  • 关于

Android自定义GLSurfaceView

发表于 2018-08-15 | 分类于 OpenGLES |
字数统计: 1,123字 | 阅读时长 ≈ 6分钟

当我们需要把同一个场景渲染到不同的Surface上时,此时系统GLSurfaceView 就不能满足需求了,所以我们需要自己创建EGL环境来实现渲染操作。
注意: OpenGL整体是一个状态机,通过改变状态就能改变后续的渲染方式,而 EGLContext(EgL上下文)就保存有所有状态,因此可以通过共享EGLContext 来实现同一场景渲染到不同的Surface上。

我们通过分析GLSurfaceView来实现自己的代码

GLSurfaceView

首先需要配置EGL环境(EGLHelper):Android配置EGL环境

EGL环境配置成功后,定义GLSurfaceView主要为以下步骤:

1、继成SurfaceView,并实现其CallBack回调
2、自定义GLThread线程类,主要用于OpenGL的绘制操作
3、添加设置Surface和EglContext的方法
4、提供和系统GLSurfaceView相同的调用方法

EglSurfaceView.java

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
package com.zzw.glsurfaceviewdemo;

import android.content.Context;
import android.util.AttributeSet;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;

import java.lang.ref.WeakReference;

import javax.microedition.khronos.egl.EGLContext;

public class EglSurfaceView extends SurfaceView implements SurfaceHolder.Callback {

private Renderer mRenderer;
private EGLThread mEGLThread;
private Surface mSurface;
private EGLContext mEglContext;


public final static int RENDERMODE_WHEN_DIRTY = 0;
public final static int RENDERMODE_CONTINUOUSLY = 1;
private int mRenderMode = RENDERMODE_CONTINUOUSLY;



public EglSurfaceView(Context context) {
this(context, null);
}

public EglSurfaceView(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}

public EglSurfaceView(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
init();
}

private void init() {
SurfaceHolder holder = getHolder();
holder.addCallback(this);
}


@Override
public void surfaceCreated(SurfaceHolder holder) {
if (mSurface == null) {
mSurface = holder.getSurface();
}
mEGLThread = new EGLThread(new WeakReference<>(this));
mEGLThread.isCreate = true;
mEGLThread.start();
}

@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
mEGLThread.width = width;
mEGLThread.height = height;
mEGLThread.isChange = true;
}

@Override
public void surfaceDestroyed(SurfaceHolder holder) {
mEGLThread.onDestroy();
mEGLThread = null;
mSurface = null;
mEglContext = null;
}

public void setRenderer(Renderer mRenderer) {
this.mRenderer = mRenderer;
}

public void setRenderMode(int renderMode) {
if (mRenderer == null) {
throw new RuntimeException("must set render before");
}
this.mRenderMode =renderMode;
}

public void requestRender() {
if (mEGLThread != null) {
mEGLThread.requestRender();
}
}

public void setSurfaceAndEglContext(Surface surface, EGLContext eglContext) {
this.mSurface = surface;
this.mEglContext = eglContext;
}

public EGLContext getEglContext() {
if (mEGLThread != null) {
return mEGLThread.getEglContext();
}
return null;
}


private static class EGLThread extends Thread {


EGLThread(WeakReference<EglSurfaceView> eGLSurfaceViewWeakRef) {
this.mEGLSurfaceViewWeakRef = eGLSurfaceViewWeakRef;
}

@Override
public void run() {
super.run();
try {
guardedRun();
} catch (Exception e) {
// fall thru and exit normally
}
}

private void guardedRun() throws InterruptedException {
isExit = false;
isStart = false;
object = new Object();
mEglHelper = new EglHelper();
mEglHelper.initEgl(mEGLSurfaceViewWeakRef.get().mSurface, mEGLSurfaceViewWeakRef.get().mEglContext);

while (true) {
if (isExit) {
//释放资源
release();
break;
}

if (isStart) {
if (mEGLSurfaceViewWeakRef.get().mRenderMode == RENDERMODE_WHEN_DIRTY) {
synchronized (object) {
object.wait();
}
} else if (mEGLSurfaceViewWeakRef.get().mRenderMode == RENDERMODE_CONTINUOUSLY) {
Thread.sleep(1000 / 60);
} else {
throw new IllegalArgumentException("renderMode");
}
}

onCreate();
onChange(width, height);
onDraw();
isStart = true;
}

}

private void onCreate() {
if (!isCreate || mEGLSurfaceViewWeakRef.get().mRenderer == null)
return;

isCreate = false;
mEGLSurfaceViewWeakRef.get().mRenderer.onSurfaceCreated();
}

private void onChange(int width, int height) {
if (!isChange || mEGLSurfaceViewWeakRef.get().mRenderer == null)
return;

isChange = false;
mEGLSurfaceViewWeakRef.get().mRenderer.onSurfaceChanged(width, height);
}

private void onDraw() {
if (mEGLSurfaceViewWeakRef.get().mRenderer == null)
return;

mEGLSurfaceViewWeakRef.get().mRenderer.onDrawFrame();
//第一次的时候手动调用一次 不然不会显示ui
if (!isStart) {
mEGLSurfaceViewWeakRef.get().mRenderer.onDrawFrame();
}

mEglHelper.swapBuffers();
}

void requestRender() {
if (object != null) {
synchronized (object) {
object.notifyAll();
}
}
}

void onDestroy() {
isExit = true;
//释放锁
requestRender();
}

void release() {
if (mEglHelper != null) {
mEglHelper.destoryEgl();
mEglHelper = null;
object = null;
mEGLSurfaceViewWeakRef = null;
}
}

EGLContext getEglContext() {
if (mEglHelper != null) {
return mEglHelper.getEglContext();
}
return null;
}

private WeakReference<EglSurfaceView> mEGLSurfaceViewWeakRef;
private EglHelper mEglHelper;

private int width;
private int height;

private boolean isCreate;
private boolean isChange;
private boolean isStart;
private boolean isExit;

private Object object;
}


interface Renderer {
void onSurfaceCreated();

void onSurfaceChanged(int width, int height);

void onDrawFrame();

}
}

使用和正常的GLSurfaceView一样:

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
package com.zzw.glsurfaceviewdemo;

import android.opengl.GLES20;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.View;

public class MainActivity extends AppCompatActivity implements EglSurfaceView.Renderer {

private EglSurfaceView eglSurfaceView;

@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);

eglSurfaceView = findViewById(R.id.egl_surface_view);

eglSurfaceView = new EglSurfaceView(this);
eglSurfaceView.setRenderer(this);
// eglSurfaceView.setRenderMode(EglSurfaceView.RENDERMODE_CONTINUOUSLY);

eglSurfaceView.setRenderMode(EglSurfaceView.RENDERMODE_WHEN_DIRTY);
eglSurfaceView.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
eglSurfaceView.requestRender();
}
});
setContentView(eglSurfaceView);
}

@Override
public void onSurfaceCreated() {
Log.e("zzz", "onSurfaceCreated");
}

@Override
public void onSurfaceChanged(int width, int height) {
Log.e("zzz", "onSurfaceChanged");
GLES20.glViewport(0, 0, width, height);
}

@Override
public void onDrawFrame() {
Log.e("zzz", "onDrawFrame");
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glClearColor(1.0f, 0.0f, 1.0f, 1.0f);
}
}

Android配置EGL环境

发表于 2018-08-14 | 分类于 OpenGLES |
字数统计: 1,164字 | 阅读时长 ≈ 6分钟

EGL

是OpenGL ES和本地窗口系统的接口,不同平台上EGL配置是不一样的,而
OpenGL的调用方式是一致的,就是说:OpenGL跨平台就是依赖于EGL接口。
我的得理解是:在一个平台上搭建OpenGL的环境。

为什么要自己创建EGL环境?

有的人会想,在android里面系统已经提供了GLSurfaceView
,已经有了EGL环境,我们为什么还要自己搭建这个环境呢?
当我们需要把同一个场景渲染到不同的Surface上时,此时系统GLSurfaceView
就不能满足需求了,所以我们需要自己创建EGL环境来实现渲染操作。
注意: OpenGL整体是一个状态机,通过改变状态就能改变后续的渲染方式,而
EGLContext(EgL上下文)就保存有所有状态,因此可以通过共享EGLContext
来实现同一场景渲染到不同的Surface上。

Android配置egl环境我们根据GLSurfaceView源码来实现。在GLSurfaceView源码里面,当调用setRenderer的时候会开启一个线程GLThread,GLThread调用start的时候会初始化EglHelper来配置egl环境,然后一个while(true)执行,根据不同的标识判断执行egl的环境配置,Renderer的onSurfaceCreated,onSurfaceChanged,onDrawFrame等函数。

从源码得知我们配置egl环境主要根据GLSurfaceView.EglHelper来写,主要分为已下几步:

1、得到Egl实例
2、得到默认的显示设备(就是窗口)
3、初始化默认显示设备
4、设置显示设备的属性
5、从系统中获取对应属性的配置
6、创建EglContext
7、创建渲染的Surface
8、绑定EglContext和Surface到显示设备中
9、刷新数据,显示渲染场景

最终代码如下:
EglHelper.java

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175

package com.zzw.glsurfaceviewdemo;

import android.view.Surface;

import javax.microedition.khronos.egl.EGL10;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.egl.EGLContext;
import javax.microedition.khronos.egl.EGLDisplay;
import javax.microedition.khronos.egl.EGLSurface;

public class EglHelper {
private static final String TAG = "EglHelper";
private EGL10 mEgl;
private EGLDisplay mEglDisplay;
private EGLContext mEglContext;
private EGLSurface mEglSurface;


public void initEgl(Surface surface, EGLContext eglContext) {
//1. 得到Egl实例
mEgl = (EGL10) EGLContext.getEGL();

//2. 得到默认的显示设备(就是窗口)
mEglDisplay = mEgl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
if (mEglDisplay == EGL10.EGL_NO_DISPLAY) {
throw new RuntimeException("eglGetDisplay failed");
}

//3. 初始化默认显示设备
int[] version = new int[2];
if (!mEgl.eglInitialize(mEglDisplay, version)) {
throw new RuntimeException("eglInitialize failed");
}

//4. 设置显示设备的属性
int[] attrib_list = new int[]{
EGL10.EGL_RED_SIZE, mRedSize,
EGL10.EGL_GREEN_SIZE, mGreenSize,
EGL10.EGL_BLUE_SIZE, mBlueSize,
EGL10.EGL_ALPHA_SIZE, mAlphaSize,
EGL10.EGL_DEPTH_SIZE, mDepthSize,
EGL10.EGL_STENCIL_SIZE, mStencilSize,
EGL10.EGL_RENDERABLE_TYPE, mRenderType,//egl版本 2.0
EGL10.EGL_NONE};


int[] num_config = new int[1];
if (!mEgl.eglChooseConfig(mEglDisplay, attrib_list, null, 1,
num_config)) {
throw new IllegalArgumentException("eglChooseConfig failed");
}
int numConfigs = num_config[0];
if (numConfigs <= 0) {
throw new IllegalArgumentException(
"No configs match configSpec");
}

//5. 从系统中获取对应属性的配置
EGLConfig[] configs = new EGLConfig[numConfigs];
if (!mEgl.eglChooseConfig(mEglDisplay, attrib_list, configs, numConfigs,
num_config)) {
throw new IllegalArgumentException("eglChooseConfig#2 failed");
}
EGLConfig eglConfig = chooseConfig(mEgl, mEglDisplay, configs);
if (eglConfig == null) {
eglConfig = configs[0];
}

//6. 创建EglContext
int[] contextAttr = new int[]{
EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
EGL10.EGL_NONE
};
if (eglContext == null) {
mEglContext = mEgl.eglCreateContext(mEglDisplay, eglConfig, EGL10.EGL_NO_CONTEXT, contextAttr);
} else {
mEglContext = mEgl.eglCreateContext(mEglDisplay, eglConfig, eglContext, contextAttr);
}

//7. 创建渲染的Surface
mEglSurface = mEgl.eglCreateWindowSurface(mEglDisplay, eglConfig, surface, null);

//8. 绑定EglContext和Surface到显示设备中
if (!mEgl.eglMakeCurrent(mEglDisplay, mEglSurface, mEglSurface, mEglContext)) {
throw new RuntimeException("eglMakeCurrent fail");
}
}


//9. 刷新数据,显示渲染场景
public boolean swapBuffers() {
if (mEgl != null) {
return mEgl.eglSwapBuffers(mEglDisplay, mEglSurface);
} else {
throw new RuntimeException("egl is null");
}
}

public void destoryEgl() {
if (mEgl != null) {
if (mEglSurface != null && mEglSurface != EGL10.EGL_NO_SURFACE) {
mEgl.eglMakeCurrent(mEglDisplay, EGL10.EGL_NO_SURFACE,
EGL10.EGL_NO_SURFACE,
EGL10.EGL_NO_CONTEXT);

mEgl.eglDestroySurface(mEglDisplay, mEglSurface);
mEglSurface = null;
}


if (mEglContext != null) {
mEgl.eglDestroyContext(mEglDisplay, mEglContext);
mEglContext = null;
}


if (mEglDisplay != null) {
mEgl.eglTerminate(mEglDisplay);
mEglDisplay = null;
}

mEgl = null;
}


}


public EGLContext getEglContext() {
return mEglContext;
}

private final int mRedSize = 8;
private final int mGreenSize = 8;
private final int mBlueSize = 8;
private final int mAlphaSize = 8;
private final int mDepthSize = 8;
private final int mStencilSize = 8;
private final int mRenderType = 4;

private EGLConfig chooseConfig(EGL10 egl, EGLDisplay display,
EGLConfig[] configs) {
for (EGLConfig config : configs) {
int d = findConfigAttrib(egl, display, config,
EGL10.EGL_DEPTH_SIZE, 0);
int s = findConfigAttrib(egl, display, config,
EGL10.EGL_STENCIL_SIZE, 0);
if ((d >= mDepthSize) && (s >= mStencilSize)) {
int r = findConfigAttrib(egl, display, config,
EGL10.EGL_RED_SIZE, 0);
int g = findConfigAttrib(egl, display, config,
EGL10.EGL_GREEN_SIZE, 0);
int b = findConfigAttrib(egl, display, config,
EGL10.EGL_BLUE_SIZE, 0);
int a = findConfigAttrib(egl, display, config,
EGL10.EGL_ALPHA_SIZE, 0);
if ((r == mRedSize) && (g == mGreenSize)
&& (b == mBlueSize) && (a == mAlphaSize)) {
return config;
}
}
}
return null;
}

private int findConfigAttrib(EGL10 egl, EGLDisplay display,
EGLConfig config, int attribute, int defaultValue) {
int[] value = new int[1];
if (egl.eglGetConfigAttrib(display, config, attribute, value)) {
return value[0];
}
return defaultValue;
}
}

ffmpeg音视频合成

发表于 2018-07-27 | 分类于 ffmpeg |
字数统计: 1,371字 | 阅读时长 ≈ 8分钟

原理 : 主要是拿到视频文件得视频流,然后拿到音频文件的音频流,根据时间戳一帧一帧的封装成一个新的视频文件

效果:音频文件和视频文件合成一个文件,合成的文件时间就是两个文件中短的时间。
源代码如下:具体看注释

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294


#include <jni.h>
#include <android/log.h>

extern "C" {
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
};


#define LOG_TAG "JNI_TAG"
#define LOGD(...) __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__)


extern "C"
JNIEXPORT void JNICALL
Java_com_zzw_ffmpegdemo_FFmpegHelper_megre(JNIEnv *env, jobject instance, jstring musicPath_,
jstring videoPath_,jstring outPath_) {


AVOutputFormat *ofmt = NULL;
//Input AVFormatContext and Output AVFormatContext
AVFormatContext *ifmt_ctx_v = NULL, *ifmt_ctx_a = NULL,*ofmt_ctx = NULL;
int ret, i;
int videoindex_v=-1,videoindex_out=-1;
int audioindex_a=-1,audioindex_out=-1;
int frame_index=0;
int64_t cur_pts_v=0,cur_pts_a=0;


const char *musicPath = env->GetStringUTFChars(musicPath_, 0);
const char *videoPath = env->GetStringUTFChars(videoPath_, 0);
const char *outPath = env->GetStringUTFChars(outPath_, 0);

av_register_all();
//--------------------------------input init start---------------------------------------------
if ((ret = avformat_open_input(&ifmt_ctx_v, videoPath, 0, 0)) < 0) {//打开输入的视频文件
LOGD( "Could not open input video file.");
goto end;
}
if ((ret = avformat_find_stream_info(ifmt_ctx_v, 0)) < 0) {//获取视频文件信息
LOGD( "Failed to retrieve input video stream information");
goto end;
}
if ((ret = avformat_open_input(&ifmt_ctx_a, musicPath, 0, 0)) < 0) {//打开输入的音频文件
LOGD( "Could not open input audio file.");
goto end;
}
if ((ret = avformat_find_stream_info(ifmt_ctx_a, 0)) < 0) {//获取音频文件信息
LOGD( "Failed to retrieve input audio stream information");
goto end;
}

// LOGD("===========Input Information==========\n");
// av_dump_format(ifmt_ctx_v, 0, videoPath, 0);
// av_dump_format(ifmt_ctx_a, 0, musicPath, 0);
// LOGD("======================================\n");

//--------------------------------input init end---------------------------------------------

//--------------------------------out init start---------------------------------------------
//初始化输出码流的AVFormatContext
avformat_alloc_output_context2(&ofmt_ctx,NULL,NULL, outPath);
if(!ofmt_ctx){
LOGD( "Could not create output context\n");
ret = AVERROR_UNKNOWN;
goto end;
}
ofmt = ofmt_ctx->oformat;
//--------------------------------out init end-----------------------------------------------


//--------------------------------相关值获取-----------------------------------------------
//从输入video的AVStream中获取一个video输出的out_stream
for (i = 0; i < ifmt_ctx_v->nb_streams; i++) {
if(ifmt_ctx_v->streams[i]->codecpar->codec_type==AVMEDIA_TYPE_VIDEO){
AVStream* in_stream = ifmt_ctx_v->streams[i];
AVCodec *dec = avcodec_find_decoder(in_stream->codecpar->codec_id);
if(!dec){
LOGD( "Could not find decoder\n");
ret = AVERROR_UNKNOWN;
goto end;
}
AVStream* out_stream = avformat_new_stream(ofmt_ctx,dec);
videoindex_v =i;
if(!out_stream){
LOGD( "Failed allocating output stream\n");
ret = AVERROR_UNKNOWN;
goto end;
}
videoindex_out=out_stream->index;

AVCodecContext* avCodecContext = avcodec_alloc_context3(dec);
if ((ret =avcodec_parameters_to_context(avCodecContext, in_stream->codecpar)) < 0) {
avcodec_free_context(&avCodecContext);
avCodecContext = NULL;
LOGD("can not fill decodecctx");
goto end;
}
avCodecContext->codec_tag = 0;
if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER) {
avCodecContext->flags |= CODEC_FLAG_GLOBAL_HEADER;
}
ret = avcodec_parameters_from_context(out_stream->codecpar, avCodecContext);
if (ret < 0) {
printf("Failed to copy context input to output stream codec context\n");
goto end;
}
break;
}
}
//从输入audio的AVStream中获取一个audio输出的out_stream
for (i = 0; i < ifmt_ctx_a->nb_streams; i++) {
if(ifmt_ctx_a->streams[i]->codecpar->codec_type==AVMEDIA_TYPE_AUDIO){
AVStream* in_stream = ifmt_ctx_a->streams[i];
AVCodec *dec = avcodec_find_decoder(in_stream->codecpar->codec_id);
if(!dec){
LOGD( "Could not find decoder\n");
ret = AVERROR_UNKNOWN;
goto end;
}
AVStream* out_stream = avformat_new_stream(ofmt_ctx,dec);
audioindex_a =i;
if(!out_stream){
LOGD( "Failed allocating output stream\n");
ret = AVERROR_UNKNOWN;
goto end;
}
audioindex_out=out_stream->index;

AVCodecContext* avCodecContext = avcodec_alloc_context3(dec);
if ((ret =avcodec_parameters_to_context(avCodecContext, in_stream->codecpar)) < 0) {
avcodec_free_context(&avCodecContext);
avCodecContext = NULL;
LOGD("can not fill decodecctx");
goto end;
}
avCodecContext->codec_tag = 0;
if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER) {
avCodecContext->flags |= CODEC_FLAG_GLOBAL_HEADER;
}
ret = avcodec_parameters_from_context(out_stream->codecpar, avCodecContext);
if (ret < 0) {
printf("Failed to copy context input to output stream codec context\n");
goto end;
}
break;
}
}

// LOGD("==========Output Information==========\n");
// av_dump_format(ofmt_ctx, 0, outPath, 1);
// LOGD("======================================\n");


// -------------------------------合成文件-------------------------------------------

// Open output file
if (!(ofmt->flags & AVFMT_NOFILE)) {
ret = avio_open(&ofmt_ctx->pb, outPath, AVIO_FLAG_WRITE);
if (ret < 0) {
LOGD("Could not open output file %s ", outPath);
goto end;
}
}

// Write file header
ret = avformat_write_header(ofmt_ctx, NULL);
if (ret < 0) {
LOGD("Error occurred when opening output file\n");
goto end;
}


while (1) {
AVFormatContext *ifmt_ctx;
int stream_index=0;
AVStream *in_stream, *out_stream;
AVPacket *pkt = av_packet_alloc();

//Get an AVPacket . av_compare_ts是比较时间戳用的。通过该函数可以决定该写入视频还是音频。
//video 在 audio之前
if(av_compare_ts(cur_pts_v,
ifmt_ctx_v->streams[videoindex_v]->time_base,
cur_pts_a,
ifmt_ctx_a->streams[audioindex_a]->time_base) <= 0){
ifmt_ctx=ifmt_ctx_v;
stream_index=videoindex_out;
} else{
ifmt_ctx=ifmt_ctx_a;
stream_index=audioindex_out;
}

//如果video在audio之后
if(av_compare_ts(cur_pts_v,
ifmt_ctx_v->streams[videoindex_v]->time_base,
cur_pts_a,
ifmt_ctx_a->streams[audioindex_a]->time_base) <= 0){
ifmt_ctx=ifmt_ctx_v;
stream_index=videoindex_out;

if(av_read_frame(ifmt_ctx, pkt) >= 0){
do{
if(pkt->stream_index==videoindex_v){
in_stream = ifmt_ctx->streams[pkt->stream_index];
out_stream = ofmt_ctx->streams[stream_index];
//FIX:No PTS (Example: Raw H.264) H.264裸流没有PTS,因此必须手动写入PTS
//Simple Write PTS
if(pkt->pts==AV_NOPTS_VALUE){
//Write PTS
AVRational time_base1=in_stream->time_base;
//Duration between 2 frames (us)
int64_t calc_duration=(double)AV_TIME_BASE/av_q2d(in_stream->r_frame_rate);
//Parameters
pkt->pts=(double)(frame_index*calc_duration)/(double)(av_q2d(time_base1)*AV_TIME_BASE);
pkt->dts=pkt->pts;
pkt->duration=(double)calc_duration/(double)(av_q2d(time_base1)*AV_TIME_BASE);
frame_index++;
}

cur_pts_v=pkt->pts;
break;
}
}while(av_read_frame(ifmt_ctx, pkt) >= 0);
}else{
av_packet_free(&pkt);
av_free(pkt);
break;
}
}else{
ifmt_ctx=ifmt_ctx_a;
stream_index=audioindex_out;
if(av_read_frame(ifmt_ctx, pkt) >= 0){
do{
if(pkt->stream_index==audioindex_a){
in_stream = ifmt_ctx->streams[pkt->stream_index];
out_stream = ofmt_ctx->streams[stream_index];
//FIX:No PTS
//Simple Write PTS
if(pkt->pts==AV_NOPTS_VALUE){
//Write PTS
AVRational time_base1=in_stream->time_base;
//Duration between 2 frames (us)
int64_t calc_duration=(double)AV_TIME_BASE/av_q2d(in_stream->r_frame_rate);
//Parameters
pkt->pts=(double)(frame_index*calc_duration)/(double)(av_q2d(time_base1)*AV_TIME_BASE);
pkt->dts=pkt->pts;
pkt->duration=(double)calc_duration/(double)(av_q2d(time_base1)*AV_TIME_BASE);
frame_index++;
}
cur_pts_a=pkt->pts;
break;
}
}while(av_read_frame(ifmt_ctx, pkt) >= 0);
}else{
av_packet_free(&pkt);
av_free(pkt);
break;
}
}

//Convert PTS/DTS
pkt->pts = av_rescale_q_rnd(pkt->pts, in_stream->time_base, out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX));
pkt->dts = av_rescale_q_rnd(pkt->dts, in_stream->time_base, out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX));
pkt->duration = av_rescale_q(pkt->duration, in_stream->time_base, out_stream->time_base);
pkt->pos = -1;
pkt->stream_index=stream_index;

LOGD("Write 1 Packet. size:%5d\tpts:%lld\n",pkt->size,pkt->pts);
//Write AVPacket 音频或视频裸流
if (av_interleaved_write_frame(ofmt_ctx, pkt) < 0) {
LOGD( "Error muxing packet\n");
av_packet_free(&pkt);
av_free(pkt);
break;
}
av_packet_free(&pkt);
av_free(pkt);
}
//Write file trailer
av_write_trailer(ofmt_ctx);

end:
avformat_close_input(&ifmt_ctx_v);
avformat_close_input(&ifmt_ctx_a);
/* close output */
if (ofmt_ctx && !(ofmt->flags & AVFMT_NOFILE))
avio_close(ofmt_ctx->pb);
avformat_free_context(ofmt_ctx);
env->ReleaseStringUTFChars(musicPath_, musicPath);
env->ReleaseStringUTFChars(videoPath_, videoPath);
env->ReleaseStringUTFChars(outPath_, outPath);
}

ffmpeg添加水印和滤镜效果

发表于 2018-07-27 | 分类于 ffmpeg |
字数统计: 1,271字 | 阅读时长 ≈ 7分钟

主要用到的api

1
2
3
4
5
6
7
avfilter_register_all():注册所有AVFilter。
avfilter_graph_alloc():为FilterGraph分配内存。
avfilter_graph_create_filter():创建并向FilterGraph中添加一个Filter。
avfilter_graph_parse_ptr():将一串通过字符串描述的Graph添加到FilterGraph中。
avfilter_graph_config():检查FilterGraph的配置。
av_buffersrc_add_frame():向FilterGraph中加入一个AVFrame。
av_buffersink_get_frame():从FilterGraph中取出一个AVFrame。

源代码:

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286

#include <jni.h>
#include <android/log.h>
#include <android/native_window.h>
#include <android/native_window_jni.h>

extern "C" {
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libswscale/swscale.h"
#include "libavutil/imgutils.h"

//-----AVfilter-----
#include <libavfilter/avfiltergraph.h>
#include <libavfilter/buffersrc.h>
#include <libavfilter/buffersink.h>
//-----AVfilter-----
};


#define LOG_TAG "JNI_TAG"
#define LOGD(...) __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__)


const char *filters_descr = "lutyuv='u=128:v=128'";
//const char *filters_descr = "hflip";
//const char *filters_descr = "hue='h=60:s=-3'";
//const char *filters_descr = "crop=2/3*in_w:2/3*in_h";
//const char *filters_descr = "drawbox=x=200:y=200:w=300:h=300:color=pink@0.5";
//const char *filters_descr = "movie=/storage/emulated/0/shuiyin.png[wm];[in][wm]overlay=5:5[out]";
//const char *filters_descr="drawgrid=width=100:height=100:thickness=4:color=pink@0.9";


int getCodecContext(AVCodecParameters *codecpar, AVCodecContext **avCodecContext) {
AVCodec *dec = avcodec_find_decoder(codecpar->codec_id);
if (!dec) {
LOGD("can not find decoder");
return -1;
}

*avCodecContext = avcodec_alloc_context3(dec);
if (!*avCodecContext) {
LOGD("can not alloc new decodecctx");
return -1;
}

if (avcodec_parameters_to_context(*avCodecContext, codecpar) < 0) {
avcodec_free_context(avCodecContext);
*avCodecContext = NULL;
LOGD("can not fill decodecctx");
return -1;
}

if (avcodec_open2(*avCodecContext, dec, 0) != 0) {
LOGD("cant not open audio strames");
avcodec_free_context(avCodecContext);
*avCodecContext = NULL;
return -1;
}
return 0;
}


extern "C"
JNIEXPORT void JNICALL
Java_com_zzw_ffmpegdemo_FFmpegHelper_play(JNIEnv *env, jobject instance, jstring url_,
jobject surface) {
const char *url = env->GetStringUTFChars(url_, 0);

AVFormatContext *pFormatCtx = NULL;
AVCodecContext *pCodecCtx = NULL;

AVFilterContext *buffersink_ctx = NULL;
AVFilterContext *buffersrc_ctx = NULL;
AVFilterGraph *filter_graph = NULL;

//-----------------------------AVCodecContext init start----------------------------
av_register_all();
avfilter_register_all();//
pFormatCtx = avformat_alloc_context();

// Open video file
if (avformat_open_input(&pFormatCtx, url, NULL, NULL) != 0) {

LOGD("Couldn't open url:%s\n", url);
return; // Couldn't open file
}

// Retrieve stream information
if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
LOGD("Couldn't find stream information.");
return;
}

// Find the first video stream
int videoStream = -1, i;
for (i = 0; i < pFormatCtx->nb_streams; i++) {
if (pFormatCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO
&& videoStream < 0) {
videoStream = i;
}
}
if (videoStream == -1) {
LOGD("Didn't find a video stream.");
return; // Didn't find a video stream
}

if (getCodecContext(pFormatCtx->streams[videoStream]->codecpar, &pCodecCtx) != 0) {
LOGD("Didn't get CodecContext.");
return;
}
//-----------------------------AVCodecContext init end-------------------------------

//------------------------------filter init start------------------------------------
char args[512];
AVFilter *buffersrc = avfilter_get_by_name("buffer");
AVFilter *buffersink = avfilter_get_by_name("buffersink");//新版的ffmpeg库必须为buffersink
AVFilterInOut *outputs = avfilter_inout_alloc();
AVFilterInOut *inputs = avfilter_inout_alloc();

enum AVPixelFormat pix_fmts[] = {AV_PIX_FMT_YUV420P, AV_PIX_FMT_NONE};

filter_graph = avfilter_graph_alloc();

/* buffer video source: the decoded frames from the decoder will be inserted here. */
snprintf(args, sizeof(args),
"video_size=%dx%d:pix_fmt=%d:time_base=%d/%d:pixel_aspect=%d/%d",
pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt,
pFormatCtx->streams[videoStream]->time_base.num,
pFormatCtx->streams[videoStream]->time_base.den,
pCodecCtx->sample_aspect_ratio.num, pCodecCtx->sample_aspect_ratio.den);

if (avfilter_graph_create_filter(&buffersrc_ctx, buffersrc, "in",
args, NULL, filter_graph) < 0) {
LOGD("Cannot create buffer source\n");
return;
}
AVBufferSinkParams *buffersink_params = av_buffersink_params_alloc();
buffersink_params->pixel_fmts = pix_fmts;
if (avfilter_graph_create_filter(&buffersink_ctx, buffersink, "out",
NULL, buffersink_params, filter_graph) < 0) {
LOGD("Cannot create buffer sink\n");
return;
}
av_free(buffersink_params);

/* Endpoints for the filter graph. */
outputs->name = av_strdup("in");
outputs->filter_ctx = buffersrc_ctx;
outputs->pad_idx = 0;
outputs->next = NULL;

inputs->name = av_strdup("out");
inputs->filter_ctx = buffersink_ctx;
inputs->pad_idx = 0;
inputs->next = NULL;

if ((avfilter_graph_parse_ptr(filter_graph, filters_descr,
&inputs, &outputs, NULL)) < 0) {
LOGD("Cannot avfilter_graph_parse_ptr\n");
return;
}

if ((avfilter_graph_config(filter_graph, NULL)) < 0) {
LOGD("Cannot avfilter_graph_config\n");
return;
}
//------------------------------filter init end------------------------------------

//------------------------------window init start-----------------------------------
// 获取native window
ANativeWindow *nativeWindow = ANativeWindow_fromSurface(env, surface);

// 获取视频宽高
int videoWidth = pCodecCtx->width;
int videoHeight = pCodecCtx->height;

// 设置native window的buffer大小,可自动拉伸
ANativeWindow_setBuffersGeometry(nativeWindow, videoWidth, videoHeight,
WINDOW_FORMAT_RGBA_8888);
ANativeWindow_Buffer windowBuffer;
//------------------------------window init end-----------------------------------


//------------------------------get data-----------------------------------


// 用于渲染
AVFrame *pFrameRGBA = av_frame_alloc();

// Determine required buffer size and allocate buffer
// buffer中数据就是用于渲染的,且格式为RGBA
int numBytes = av_image_get_buffer_size(AV_PIX_FMT_RGBA, pCodecCtx->width, pCodecCtx->height,
1);
uint8_t *buffer = (uint8_t *) av_malloc(numBytes * sizeof(uint8_t));
av_image_fill_arrays(pFrameRGBA->data, pFrameRGBA->linesize, buffer, AV_PIX_FMT_RGBA,
pCodecCtx->width, pCodecCtx->height, 1);

// 由于解码出来的帧格式不是RGBA的,在渲染之前需要进行格式转换
SwsContext *sws_ctx = sws_getContext(pCodecCtx->width,
pCodecCtx->height,
pCodecCtx->pix_fmt,
pCodecCtx->width,
pCodecCtx->height,
AV_PIX_FMT_RGBA,
SWS_BILINEAR,
NULL,
NULL,
NULL);

AVPacket *packet = av_packet_alloc();
int count = 0;
while (av_read_frame(pFormatCtx, packet) == 0) {
// Is this a packet from the video stream?
if (packet->stream_index == videoStream) {
// Decode video frame
if (avcodec_send_packet(pCodecCtx, packet) != 0) {
break;
}

AVFrame *pFrame = av_frame_alloc();

while (avcodec_receive_frame(pCodecCtx, pFrame) == 0) {
// lock native window buffer
ANativeWindow_lock(nativeWindow, &windowBuffer, 0);

//for AVfilter start
pFrame->pts = av_frame_get_best_effort_timestamp(pFrame);
//* push the decoded frame into the filtergraph
if (av_buffersrc_add_frame(buffersrc_ctx, pFrame) == 0) {
av_buffersink_get_frame(buffersink_ctx, pFrame);
} else{
LOGD("Could not av_buffersrc_add_frame");
}
// 格式转换
sws_scale(sws_ctx, (uint8_t const *const *) pFrame->data,
pFrame->linesize, 0, pCodecCtx->height,
pFrameRGBA->data, pFrameRGBA->linesize);

// 获取stride
uint8_t *dst = (uint8_t *) windowBuffer.bits;
int dstStride = windowBuffer.stride * 4;
uint8_t *src = (pFrameRGBA->data[0]);
int srcStride = pFrameRGBA->linesize[0];

// 由于window的stride和帧的stride不同,因此需要逐行复制
int h;
for (h = 0; h < videoHeight; h++) {
memcpy(dst + h * dstStride, src + h * srcStride, srcStride);
}
ANativeWindow_unlockAndPost(nativeWindow);

count++;
LOGD("解码渲染%d帧", count);
}
av_frame_free(&pFrame);
av_free(pFrame);
}
}

ANativeWindow_release(nativeWindow);
av_packet_free(&packet);
sws_freeContext(sws_ctx);

avfilter_inout_free(&outputs);
av_free(outputs);
avfilter_inout_free(&inputs);
av_free(inputs);

av_free(buffer);
av_frame_free(&pFrameRGBA);
av_free(pFrameRGBA);

avfilter_graph_free(&filter_graph); //for avfilter
// Close the codecs
avcodec_close(pCodecCtx);
avcodec_free_context(&pCodecCtx);
pCodecCtx = NULL;

// Close the video file
avformat_close_input(&pFormatCtx);
avformat_free_context(pFormatCtx);
pFormatCtx = NULL;

env->ReleaseStringUTFChars(url_, url);
}

更多的特效使用: http://www.ffmpeg.org/ffmpeg-filters.html

Android OpenGLES渲染MediaCodec解码数据

发表于 2018-07-16 | 分类于 OpenGLES |
字数统计: 764字 | 阅读时长 ≈ 5分钟

1、OpenGL生成纹理
2、纹理绑定到SurfaceTexture上
3、用SurfaceTexture做参数创建Surface
4、MediaCodec解码的视频就往Surface发送,就显示出画面了

  • Shader编写

vertex_shader.glsl

1
2
3
4
5
6
7
8

attribute vec4 av_Position;
attribute vec2 af_Position;
varying vec2 v_texPosition;
void main() {
v_texPosition = af_Position;
gl_Position = av_Position;
}

fragment_mediacodec.glsl

1
2
3
4
5
6
7
8
9
#extension GL_OES_EGL_image_external : require
precision mediump float;
varying vec2 v_texPosition;
//samplerExternalOES渲染视频
uniform samplerExternalOES sTexture;

void main() {
gl_FragColor=texture2D(sTexture, v_texPosition);
}

VideoRender.java

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
import android.content.Context;
import android.graphics.SurfaceTexture;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.view.Surface;

import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;

import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;

public class VideoRender implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener {

private Context context;

private final float[] vertexData = {
-1f, -1f,
1f, -1f,
-1f, 1f,
1f, 1f

};

private final float[] textureData = {
0f, 1f,
1f, 1f,
0f, 0f,
1f, 0f
};

private FloatBuffer vertexBuffer;
private FloatBuffer textureBuffer;


//mediacodec
private int program_mediacodec;
private int avPosition_mediacodec;
private int afPosition_mediacodec;
private int samplerOES_mediacodec;
private int textureId_mediacodec;
private SurfaceTexture surfaceTexture;
private Surface surface;

private OnSurfaceCreateListener onSurfaceCreateListener;
private OnRenderListener onRenderListener;

public VideoRender(Context context) {
this.context = context;
vertexBuffer = ByteBuffer.allocateDirect(vertexData.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer()
.put(vertexData);
vertexBuffer.position(0);

textureBuffer = ByteBuffer.allocateDirect(textureData.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer()
.put(textureData);
textureBuffer.position(0);
}


public void setOnSurfaceCreateListener(OnSurfaceCreateListener onSurfaceCreateListener) {
this.onSurfaceCreateListener = onSurfaceCreateListener;
}

public void setOnRenderListener(OnRenderListener onRenderListener) {
this.onRenderListener = onRenderListener;
}

@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
initRenderMediacodec();
}

@Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
GLES20.glViewport(0, 0, width, height);
}

@Override
public void onDrawFrame(GL10 gl) {
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
renderMediacodec();
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
}

@Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
if (onRenderListener != null) {
////将onFrameAvailable函数回掉到GLSurfaceView调用requestRender()触发onDrawFrame()
onRenderListener.onRender();
}
}

private void initRenderMediacodec() {
String vertexSource = ShaderUtil.readRawTxt(context, R.raw.vertex_shader);
String fragmentSource = ShaderUtil.readRawTxt(context, R.raw.fragment_mediacodec);
program_mediacodec = ShaderUtil.createProgram(vertexSource, fragmentSource);

avPosition_mediacodec = GLES20.glGetAttribLocation(program_mediacodec, "av_Position");
afPosition_mediacodec = GLES20.glGetAttribLocation(program_mediacodec, "af_Position");
samplerOES_mediacodec = GLES20.glGetUniformLocation(program_mediacodec, "sTexture");

int[] textureids = new int[1];
GLES20.glGenTextures(1, textureids, 0);
textureId_mediacodec = textureids[0];

GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_REPEAT);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_REPEAT);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);

surfaceTexture = new SurfaceTexture(textureId_mediacodec);
surface = new Surface(surfaceTexture);
surfaceTexture.setOnFrameAvailableListener(this);

if (onSurfaceCreateListener != null) {
//将Surface回掉出去给MediaCodec绑定渲染
onSurfaceCreateListener.onSurfaceCreate(surface);
}
}

private void renderMediacodec() {
surfaceTexture.updateTexImage();
GLES20.glUseProgram(program_mediacodec);

GLES20.glEnableVertexAttribArray(avPosition_mediacodec);
GLES20.glVertexAttribPointer(avPosition_mediacodec, 2, GLES20.GL_FLOAT, false, 8, vertexBuffer);

GLES20.glEnableVertexAttribArray(afPosition_mediacodec);
GLES20.glVertexAttribPointer(afPosition_mediacodec, 2, GLES20.GL_FLOAT, false, 8, textureBuffer);

GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId_mediacodec);
GLES20.glUniform1i(samplerOES_mediacodec, 0);
}


public interface OnSurfaceCreateListener {
void onSurfaceCreate(Surface surface);
}

public interface OnRenderListener {
void onRender();
}
}

VideoGLSurfaceView.java

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
import android.content.Context;
import android.opengl.GLSurfaceView;
import android.util.AttributeSet;

public class VideoGLSurfaceView extends GLSurfaceView {

private VideoRender render;

public VideoGLSurfaceView(Context context) {
this(context, null);
}

public VideoGLSurfaceView(Context context, AttributeSet attrs) {
super(context, attrs);
setEGLContextClientVersion(2);
render = new VideoRender(context);
setRenderer(render);
setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);

render.setOnRenderListener(new VideoRender.OnRenderListener() {
@Override
public void onRender() {
requestRender();
}
});
}

public VideoRender getWlRender() {
return render;
}
}

Android OpenGLES绘制yuv420纹理

发表于 2018-07-16 | 分类于 OpenGLES |
字数统计: 1,738字 | 阅读时长 ≈ 10分钟
  1. 把shader代码写入raw里面

vertex_shader.glsl

1
2
3
4
5
6
7
attribute vec4 av_Position;//顶点位置
attribute vec2 af_Position;//纹理位置
varying vec2 v_texPo;//纹理位置 与fragment_shader交互
void main() {
v_texPo = af_Position;
gl_Position = av_Position;
}

fragment_shader.glsl

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
precision mediump float;//精度 为float
varying vec2 v_texPo;//纹理位置 接收于vertex_shader
uniform sampler2D sampler_y;//纹理y
uniform sampler2D sampler_u;//纹理u
uniform sampler2D sampler_v;//纹理v

void main() {
//yuv420->rgb
float y,u,v;
y = texture2D(sampler_y,v_texPo).r;
u = texture2D(sampler_u,v_texPo).r- 0.5;
v = texture2D(sampler_v,v_texPo).r- 0.5;
vec3 rgb;
rgb.r = y + 1.403 * v;
rgb.g = y - 0.344 * u - 0.714 * v;
rgb.b = y + 1.770 * u;

gl_FragColor=vec4(rgb,1);
}

因为OpenGLES需要用rgb来加载显示,这里就需要将yuv转rgb,这里放在OpenGL里面转换,OpenGL里面使用GPU,提高性能。

  1. 数据写入

YUV420Texture.java

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174


import android.content.Context;
import android.opengl.GLES20;


import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;

public class YUV420Texture {

private Context context;

//顶点坐标
static float vertexData[] = { // in counterclockwise order:
-1f, -1f, 0.0f, // bottom left
1f, -1f, 0.0f, // bottom right
-1f, 1f, 0.0f, // top left
1f, 1f, 0.0f, // top right
};

//纹理坐标
static float textureData[] = { // in counterclockwise order:
0f, 1f, 0.0f, // bottom left
1f, 1f, 0.0f, // bottom right
0f, 0f, 0.0f, // top left
1f, 0f, 0.0f, // top right
};

//每一次取点的时候取几个点
static final int COORDS_PER_VERTEX = 3;

private final int vertexCount = vertexData.length / COORDS_PER_VERTEX;
//每一次取的总的点 大小
private final int vertexStride = COORDS_PER_VERTEX * 4; // 4 bytes per vertex

//位置
private FloatBuffer vertexBuffer;
//纹理
private FloatBuffer textureBuffer;

private int program;

//顶点位置
private int avPosition;
//纹理位置
private int afPosition;

//shader yuv变量
private int sampler_y;
private int sampler_u;
private int sampler_v;
private int[] textureId_yuv;


//YUV数据
private int width_yuv;
private int height_yuv;
private ByteBuffer y;
private ByteBuffer u;
private ByteBuffer v;


public YUV420Texture(Context context) {
this.context = context;

vertexBuffer = ByteBuffer.allocateDirect(vertexData.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer()
.put(vertexData);
vertexBuffer.position(0);

textureBuffer = ByteBuffer.allocateDirect(textureData.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer()
.put(textureData);
textureBuffer.position(0);
}

public void initYUV() {
String vertexSource = ShaderUtil.readRawTxt(context, R.raw.vertex_shader);
String fragmentSource = ShaderUtil.readRawTxt(context, R.raw.fragment_shader);
program = ShaderUtil.createProgram(vertexSource, fragmentSource);
if (program > 0) {
//获取顶点坐标字段
avPosition = GLES20.glGetAttribLocation(program, "av_Position");
//获取纹理坐标字段
afPosition = GLES20.glGetAttribLocation(program, "af_Position");
//获取yuv字段
sampler_y = GLES20.glGetUniformLocation(program, "sampler_y");
sampler_u = GLES20.glGetUniformLocation(program, "sampler_u");
sampler_v = GLES20.glGetUniformLocation(program, "sampler_v");

textureId_yuv = new int[3];
//创建3个纹理
GLES20.glGenTextures(3, textureId_yuv, 0);

//绑定纹理
for (int id : textureId_yuv) {
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, id);
//环绕(超出纹理坐标范围) (s==x t==y GL_REPEAT 重复)
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_REPEAT);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_REPEAT);
//过滤(纹理像素映射到坐标点) (缩小、放大:GL_LINEAR线性)
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
}

}

}

public void setYUVData(int width, int height, byte[] y, byte[] u, byte[] v) {
this.width_yuv = width;
this.height_yuv = height;
this.y = ByteBuffer.wrap(y);
this.u = ByteBuffer.wrap(u);
this.v = ByteBuffer.wrap(v);
}

public void draw() {
if (width_yuv > 0 && height_yuv > 0 && y != null && u != null && v != null) {
GLES20.glUseProgram(program);
GLES20.glEnableVertexAttribArray(avPosition);
GLES20.glVertexAttribPointer(avPosition, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, vertexStride, vertexBuffer);

GLES20.glEnableVertexAttribArray(afPosition);
GLES20.glVertexAttribPointer(afPosition, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, vertexStride, textureBuffer);

//激活纹理0来绑定y数据
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId_yuv[0]);
//glTexImage2D (int target,
// int level,
// int internalformat,
// int width,
// int height,
// int border,
// int format,
// int type,
// Buffer pixels)
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, width_yuv, height_yuv, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, y);

//激活纹理1来绑定u数据
GLES20.glActiveTexture(GLES20.GL_TEXTURE1);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId_yuv[1]);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, width_yuv / 2, height_yuv / 2, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, u);

//激活纹理2来绑定u数据
GLES20.glActiveTexture(GLES20.GL_TEXTURE2);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId_yuv[2]);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, width_yuv / 2, height_yuv / 2, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, v);

//给fragment_shader里面yuv变量设置值 0 1 2 标识纹理x
GLES20.glUniform1i(sampler_y, 0);
GLES20.glUniform1i(sampler_u, 1);
GLES20.glUniform1i(sampler_v, 2);

//绘制
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, vertexCount);

y.clear();
u.clear();
v.clear();
y = null;
u = null;
v = null;
GLES20.glDisableVertexAttribArray(afPosition);
GLES20.glDisableVertexAttribArray(avPosition);

}
}
}

ShaderUtil.java

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84

import android.content.Context;
import android.opengl.GLES20;
import android.util.Log;

import java.io.BufferedReader;
import java.io.InputStream;
import java.io.InputStreamReader;

public class ShaderUtil {
private static final String TAG = "ShaderUtil";


public static String readRawTxt(Context context, int rawId) {
InputStream inputStream = context.getResources().openRawResource(rawId);
BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream));
StringBuffer sb = new StringBuffer();
String line;
try {
while ((line = reader.readLine()) != null) {
sb.append(line).append("\n");
}
reader.close();
} catch (Exception e) {
e.printStackTrace();
}
return sb.toString();
}

public static int loadShader(int shaderType, String source) {
// create a vertex shader type (GLES20.GL_VERTEX_SHADER)
// or a fragment shader type (GLES20.GL_FRAGMENT_SHADER)
int shader = GLES20.glCreateShader(shaderType);
if (shader != 0) {
//添加代码到shader
GLES20.glShaderSource(shader, source);
//编译shader
GLES20.glCompileShader(shader);
int[] compile = new int[1];
//检测是否编译成功
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compile, 0);
if (compile[0] != GLES20.GL_TRUE) {
Log.d(TAG, "shader compile error");
GLES20.glDeleteShader(shader);
shader = 0;
}
}
return shader;
}

public static int createProgram(String vertexSource, String fragmentSource) {
//获取vertex shader
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
if (vertexShader == 0) {
return 0;
}
//获取fragment shader
int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
if (fragmentShader == 0) {
return 0;
}
//创建一个空的渲染程序
int program = GLES20.glCreateProgram();
if (program != 0) {
//添加vertexShader到渲染程序
GLES20.glAttachShader(program, vertexShader);
//添加fragmentShader到渲染程序
GLES20.glAttachShader(program, fragmentShader);
//关联为可执行渲染程序
GLES20.glLinkProgram(program);
int[] linsStatus = new int[1];
//检测是否关联成功
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linsStatus, 0);
if (linsStatus[0] != GLES20.GL_TRUE) {
Log.d(TAG, "link program error");
GLES20.glDeleteProgram(program);
program = 0;
}
}
return program;

}

}

  1. Render书写
    MyRender.java
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
import android.content.Context;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;

import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;

public class MyRender implements GLSurfaceView.Renderer {

private Context context;

private YUV420Texture yuv420Texture;

public MyRender(Context context) {
this.context = context;
}

@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
yuv420Texture = new YUV420Texture(context);
yuv420Texture.initYUV();
}


@Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
//宽高
GLES20.glViewport(0, 0, width, height);
}

@Override
public void onDrawFrame(GL10 gl) {
//清空颜色
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
//设置背景颜色
// GLES20.glClearColor(1.0f, 1.0f, 1.0f, 1.0f);

yuv420Texture.draw();
}

public void setYuvData(int width, int height, byte[] y, byte[] u, byte[] v) {
if (yuv420Texture != null) {
yuv420Texture.setYUVData(width, height, y, u, v);
}
}
}
  1. GLSurfaceView引用Renderer

MyGLSurfaceView.java

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30

import android.content.Context;
import android.opengl.GLSurfaceView;
import android.util.AttributeSet;

public class MyGLSurfaceView extends GLSurfaceView {

private MyRender myRender;

public MyGLSurfaceView(Context context) {
this(context, null);
}

public MyGLSurfaceView(Context context, AttributeSet attrs) {
super(context, attrs);
setEGLContextClientVersion(2);
myRender = new MyRender(context);
setRenderer(myRender);
//mode=GLSurfaceView.RENDERMODE_WHEN_DIRTY之后 调用requestRender()触发Render的onDrawFrame函数
//mode=GLSurfaceView.RENDERMODE_CONTINUOUSLY之后 自动调用onDrawFrame 60fps左右
setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
}

public void setYUVData(int width, int height, byte[] y, byte[] u, byte[] v) {
if (myRender != null) {
myRender.setYuvData(width, height, y, u, v);
requestRender();
}
}
}

Android OpenGLES 绘制图片纹理

发表于 2018-07-16 | 分类于 OpenGLES |
字数统计: 1,298字 | 阅读时长 ≈ 7分钟
  1. 把shader代码写入raw里面

vertex_shader.glsl

1
2
3
4
5
6
7
attribute vec4 av_Position;//顶点位置
attribute vec2 af_Position;//纹理位置
varying vec2 v_texPo;//纹理位置 与fragment_shader交互
void main() {
v_texPo = af_Position;
gl_Position = av_Position;
}

fragment_shader.glsl

1
2
3
4
5
6
7

precision mediump float;//精度 为float
varying vec2 v_texPo;//纹理位置 接收于vertex_shader
uniform sampler2D sTexture;//纹理
void main() {
gl_FragColor=texture2D(sTexture, v_texPo);
}
  1. 写入数据,注意纹理映射

纹理坐标系

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122

import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.opengl.GLES20;
import android.opengl.GLUtils;

import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;


//纹理 根据坐标系映射
public class BitmapTexture {


//顶点坐标
static float vertexData[] = { // in counterclockwise order:
-1f, -1f, 0.0f, // bottom left
1f, -1f, 0.0f, // bottom right
-1f, 1f, 0.0f, // top left
1f, 1f, 0.0f, // top right
};

//纹理坐标 对应顶点坐标 与之映射
static float textureData[] = { // in counterclockwise order:
0f, 1f, 0.0f, // bottom left
1f, 1f, 0.0f, // bottom right
0f, 0f, 0.0f, // top left
1f, 0f, 0.0f, // top right
};

//每一次取点的时候取几个点
static final int COORDS_PER_VERTEX = 3;

private final int vertexCount = vertexData.length / COORDS_PER_VERTEX;
//每一次取的总的点 大小
private final int vertexStride = COORDS_PER_VERTEX * 4; // 4 bytes per vertex


private Context context;

//位置
private FloatBuffer vertexBuffer;
//纹理
private FloatBuffer textureBuffer;
private int program;
private int avPosition;
//纹理位置
private int afPosition;
//纹理id
private int textureId;


public BitmapTexture(Context context) {
this.context = context;

vertexBuffer = ByteBuffer.allocateDirect(vertexData.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer()
.put(vertexData);
vertexBuffer.position(0);

textureBuffer = ByteBuffer.allocateDirect(textureData.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer()
.put(textureData);
textureBuffer.position(0);
}


public void onSurfaceCreated() {
String vertexSource = ShaderUtil.readRawTxt(context, R.raw.vertex_shader);
String fragmentSource = ShaderUtil.readRawTxt(context, R.raw.fragment_shader);
program = ShaderUtil.createProgram(vertexSource, fragmentSource);

if (program > 0) {
//获取顶点坐标字段
avPosition = GLES20.glGetAttribLocation(program, "av_Position");
//获取纹理坐标字段
afPosition = GLES20.glGetAttribLocation(program, "af_Position");
int[] textureIds = new int[1];
//创建纹理
GLES20.glGenTextures(1, textureIds, 0);
if (textureIds[0] == 0) {
return;
}
textureId = textureIds[0];
//绑定纹理
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
//环绕(超出纹理坐标范围) (s==x t==y GL_REPEAT 重复)
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_REPEAT);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_REPEAT);
//过滤(纹理像素映射到坐标点) (缩小、放大:GL_LINEAR线性)
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);

Bitmap bitmap = BitmapFactory.decodeResource(context.getResources(), R.mipmap.bg);

if (bitmap == null) {
return;
}
//设置纹理为2d图片
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0);
}
}

public void draw() {
//使用程序
GLES20.glUseProgram(program);
GLES20.glEnableVertexAttribArray(avPosition);
GLES20.glEnableVertexAttribArray(afPosition);
//设置顶点位置值
GLES20.glVertexAttribPointer(avPosition, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, vertexStride, vertexBuffer);
//设置纹理位置值
GLES20.glVertexAttribPointer(afPosition, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, vertexStride, textureBuffer);
//绘制 GLES20.GL_TRIANGLE_STRIP:复用坐标
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, vertexCount);
GLES20.glDisableVertexAttribArray(avPosition);
GLES20.glDisableVertexAttribArray(afPosition);
}
}

ShaderUtil.java

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84

import android.content.Context;
import android.opengl.GLES20;
import android.util.Log;

import java.io.BufferedReader;
import java.io.InputStream;
import java.io.InputStreamReader;

public class ShaderUtil {
private static final String TAG = "ShaderUtil";


public static String readRawTxt(Context context, int rawId) {
InputStream inputStream = context.getResources().openRawResource(rawId);
BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream));
StringBuffer sb = new StringBuffer();
String line;
try {
while ((line = reader.readLine()) != null) {
sb.append(line).append("\n");
}
reader.close();
} catch (Exception e) {
e.printStackTrace();
}
return sb.toString();
}

public static int loadShader(int shaderType, String source) {
// create a vertex shader type (GLES20.GL_VERTEX_SHADER)
// or a fragment shader type (GLES20.GL_FRAGMENT_SHADER)
int shader = GLES20.glCreateShader(shaderType);
if (shader != 0) {
//添加代码到shader
GLES20.glShaderSource(shader, source);
//编译shader
GLES20.glCompileShader(shader);
int[] compile = new int[1];
//检测是否编译成功
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compile, 0);
if (compile[0] != GLES20.GL_TRUE) {
Log.d(TAG, "shader compile error");
GLES20.glDeleteShader(shader);
shader = 0;
}
}
return shader;
}

public static int createProgram(String vertexSource, String fragmentSource) {
//获取vertex shader
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
if (vertexShader == 0) {
return 0;
}
//获取fragment shader
int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
if (fragmentShader == 0) {
return 0;
}
//创建一个空的渲染程序
int program = GLES20.glCreateProgram();
if (program != 0) {
//添加vertexShader到渲染程序
GLES20.glAttachShader(program, vertexShader);
//添加fragmentShader到渲染程序
GLES20.glAttachShader(program, fragmentShader);
//关联为可执行渲染程序
GLES20.glLinkProgram(program);
int[] linsStatus = new int[1];
//检测是否关联成功
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linsStatus, 0);
if (linsStatus[0] != GLES20.GL_TRUE) {
Log.d(TAG, "link program error");
GLES20.glDeleteProgram(program);
program = 0;
}
}
return program;

}

}

  1. Render书写
    MyRender.java

    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    12
    13
    14
    15
    16
    17
    18
    19
    20
    21
    22
    23
    24
    25
    26
    27
    28
    29
    30
    31
    32
    33
    34
    35
    36
    37
    38
    39
    40

    import android.content.Context;
    import android.opengl.GLES20;
    import android.opengl.GLSurfaceView;

    import javax.microedition.khronos.egl.EGLConfig;
    import javax.microedition.khronos.opengles.GL10;

    public class MyRender implements GLSurfaceView.Renderer {

    private Context context;

    private BitmapTexture bitmapTexture;

    public MyRender(Context context) {
    this.context = context;
    }

    @Override
    public void onSurfaceCreated(GL10 gl, EGLConfig config) {
    bitmapTexture = new BitmapTexture(context);
    bitmapTexture.onSurfaceCreated();
    }

    @Override
    public void onSurfaceChanged(GL10 gl, int width, int height) {
    //宽高
    GLES20.glViewport(0, 0, width, height);
    }

    @Override
    public void onDrawFrame(GL10 gl) {
    //清空颜色
    GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
    //设置背景颜色
    // GLES20.glClearColor(1.0f, 1.0f, 1.0f, 1.0f);

    bitmapTexture.draw();
    }
    }
  2. GLSurfaceView引用Renderer

MyGLSurfaceView.java

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
import android.content.Context;
import android.opengl.GLSurfaceView;
import android.util.AttributeSet;

public class MyGLSurfaceView extends GLSurfaceView{

public MyGLSurfaceView(Context context) {
this(context, null);
}

public MyGLSurfaceView(Context context, AttributeSet attrs) {
super(context, attrs);
setEGLContextClientVersion(2);
setRenderer(new MyRender(context));
}
}

Android OpenGLES 绘制三角形 ,四边形

发表于 2018-07-16 | 分类于 OpenGLES |
字数统计: 1,176字 | 阅读时长 ≈ 6分钟
  1. 验证是否支持OpenGLES2.0
1
2
3
4
//检查设备是否支持OpenGL ES 2.0
final ActivityManager activityManager = (ActivityManager) getSystemService(ACTIVITY_SERVICE);
final ConfigurationInfo configurationInfo = activityManager.getDeviceConfigurationInfo();
final boolean supportES2 = configurationInfo.reqGlEsVersion >= 0x00020000;

确保可用,在manifest里面添加验证

1
<uses-feature android:glEsVersion="0x00020000" android:required="true" />

  1. 定义vertex_shader.glsl和fragment_shader.glsl到res/raw里面

vertex_shader.glsl

1
2
3
4
attribute vec4 vPosition;
void main(){
gl_Position = vPosition;
}

fragment_shader.glsl

1
2
3
4
5
precision mediump float;
uniform vec4 vColor;
void main(){
gl_FragColor = vColor;
}

OpenGL ES Shader的三种变量类型uniform,attribute和varying

  1. 定义形状
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
import android.content.Context;
import android.opengl.GLES20;

import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;

public class Triangle {

//坐标本地内存地址
private FloatBuffer vertexBuffer;

//每一次取点的时候取几个点
static final int COORDS_PER_VERTEX = 3;

//绘制坐标
static float triangleCoords[] = { // in counterclockwise order:
0.0f, 0.622008459f, 0.0f, // top
-0.5f, -0.311004243f, 0.0f, // bottom left
0.5f, -0.311004243f, 0.0f // bottom right
};

// Set color with red, green, blue and alpha (opacity) values
float color[] = {0.63671875f, 0.76953125f, 0.22265625f, 1.0f};


private final int mProgram;

private int mPositionHandle;
private int mColorHandle;

private final int vertexCount = triangleCoords.length / COORDS_PER_VERTEX;
//每一次取的总的点 大小
private final int vertexStride = COORDS_PER_VERTEX * 4; // 4 bytes per vertex

public Triangle(Context context) {
//为坐标分配本地内存地址
vertexBuffer = ByteBuffer
.allocateDirect(triangleCoords.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer()
.put(triangleCoords);
vertexBuffer.position(0);

//根据shader代码和fragment代码 获取到一个渲染程序
mProgram = ShaderUtil.createProgram(ShaderUtil.readRawTxt(context, R.raw.vertex_shader),
ShaderUtil.readRawTxt(context, R.raw.fragment_shader));
if (mProgram > 0) {
//获取vertex shader的属性vPosition 的地址
mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
//获取fragment shader的属性vColor 的地址
mColorHandle = GLES20.glGetUniformLocation(mProgram, "vColor");
}
}


public void draw() {
//使用渲染程序
GLES20.glUseProgram(mProgram);

// 使顶点属性数组有效
GLES20.glEnableVertexAttribArray(mPositionHandle);

// 为顶点属性赋值
GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX,
GLES20.GL_FLOAT, false,
vertexStride, vertexBuffer);

// 设置颜色
GLES20.glUniform4fv(mColorHandle, 1, color, 0);

// 绘制图形
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, vertexCount);

// 禁用顶点数组
GLES20.glDisableVertexAttribArray(mPositionHandle);

}
}

顶点坐标系

ShaderUtil.java

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84

import android.content.Context;
import android.opengl.GLES20;
import android.util.Log;

import java.io.BufferedReader;
import java.io.InputStream;
import java.io.InputStreamReader;

public class ShaderUtil {
private static final String TAG = "ShaderUtil";


public static String readRawTxt(Context context, int rawId) {
InputStream inputStream = context.getResources().openRawResource(rawId);
BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream));
StringBuffer sb = new StringBuffer();
String line;
try {
while ((line = reader.readLine()) != null) {
sb.append(line).append("\n");
}
reader.close();
} catch (Exception e) {
e.printStackTrace();
}
return sb.toString();
}

public static int loadShader(int shaderType, String source) {
// create a vertex shader type (GLES20.GL_VERTEX_SHADER)
// or a fragment shader type (GLES20.GL_FRAGMENT_SHADER)
int shader = GLES20.glCreateShader(shaderType);
if (shader != 0) {
//添加代码到shader
GLES20.glShaderSource(shader, source);
//编译shader
GLES20.glCompileShader(shader);
int[] compile = new int[1];
//检测是否编译成功
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compile, 0);
if (compile[0] != GLES20.GL_TRUE) {
Log.d(TAG, "shader compile error");
GLES20.glDeleteShader(shader);
shader = 0;
}
}
return shader;
}

public static int createProgram(String vertexSource, String fragmentSource) {
//获取vertex shader
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
if (vertexShader == 0) {
return 0;
}
//获取fragment shader
int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
if (fragmentShader == 0) {
return 0;
}
//创建一个空的渲染程序
int program = GLES20.glCreateProgram();
if (program != 0) {
//添加vertexShader到渲染程序
GLES20.glAttachShader(program, vertexShader);
//添加fragmentShader到渲染程序
GLES20.glAttachShader(program, fragmentShader);
//关联为可执行渲染程序
GLES20.glLinkProgram(program);
int[] linsStatus = new int[1];
//检测是否关联成功
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linsStatus, 0);
if (linsStatus[0] != GLES20.GL_TRUE) {
Log.d(TAG, "link program error");
GLES20.glDeleteProgram(program);
program = 0;
}
}
return program;

}

}
  1. 定义shader

    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    12
    13
    14
    15
    16
    17
    18
    19
    20
    21
    22
    23
    24
    25
    26
    27
    28
    29
    30
    31
    32
    33
    34
    35
    36
    37
    38
    39

    import android.content.Context;
    import android.opengl.GLES20;
    import android.opengl.GLSurfaceView;

    import javax.microedition.khronos.egl.EGLConfig;
    import javax.microedition.khronos.opengles.GL10;

    public class MyRender implements GLSurfaceView.Renderer {

    private Context context;

    private Triangle triangle;

    public MyRender(Context context) {
    this.context = context;
    }

    @Override
    public void onSurfaceCreated(GL10 gl, EGLConfig config) {
    triangle = new Triangle(context);
    }

    @Override
    public void onSurfaceChanged(GL10 gl, int width, int height) {
    //宽高
    GLES20.glViewport(0, 0, width, height);
    }

    @Override
    public void onDrawFrame(GL10 gl) {
    //清空颜色
    GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);

    //设置背景颜色
    // GLES20.glClearColor(1.0f, 1.0f, 1.0f, 1.0f);
    triangle.draw();
    }
    }
  2. 设置shader到GLSurfaceView

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
import android.content.Context;
import android.opengl.GLSurfaceView;
import android.util.AttributeSet;

public class MyGLSurfaceView extends GLSurfaceView{

public MyGLSurfaceView(Context context) {
this(context, null);
}

public MyGLSurfaceView(Context context, AttributeSet attrs) {
super(context, attrs);
setEGLContextClientVersion(2);
setRenderer(new MyRender(context));
}
}

上述三角形已经绘制成功。

  1. 绘制四边形
    绘制四边形的方式是由两个三角形形成一个四边形,所以顶点位置一定要注意。
    调用GLES20.glDrawArrays的时候设置flog可以设置为GLES20.GL_TRIANGLE_STRIP和GLES20.GL_TRIANGLES,前者复用坐标,后者分别取几个坐标。
    绘制四边形

MediaCodec硬编码pcm2aac

发表于 2018-07-16 | 分类于 MediaCodec |
字数统计: 786字 | 阅读时长 ≈ 4分钟

MediaCodec是Android(api>=16)提供的一个多媒体硬解编码库,能实现音视频的编解码。

工作原理:其内部有2个队列,一个是输入队列,一个是输出队列。输入队列负责存储编
解码前的原始数据存储,并输送给MediaCodec处理;输出队列负责存储编解码后
的新数据,可以直接处理或保存到文件中。

AAC 的头部信息介绍 :https://blog.csdn.net/jay100500/article/details/52955232

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184


//mediacodec
private MediaFormat encoderFormat = null;
private MediaCodec encoder = null;
private FileOutputStream outputStream = null;
private MediaCodec.BufferInfo info = null;
private int perpcmsize = 0;
private byte[] outByteBuffer = null;
private int aacsamplerate = 4;
private double recordTime = 0;
private int audioSamplerate = 0;

private void initMediacodec(int samperate, File outfile)
{
try {
aacsamplerate = getADTSsamplerate(samperate);
//立体声
encoderFormat = MediaFormat.createAudioFormat(MediaFormat.MIMETYPE_AUDIO_AAC, samperate, 2);
//96kbps fm音质
encoderFormat.setInteger(MediaFormat.KEY_BIT_RATE, 96000);
encoderFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
encoderFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, 4096);
encoder = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_AUDIO_AAC);
info = new MediaCodec.BufferInfo();
if(encoder == null)
{
MyLog.d("craete encoder wrong");
return;
}
recordTime = 0;
encoder.configure(encoderFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
outputStream = new FileOutputStream(outfile);
encoder.start();
} catch (IOException e) {
e.printStackTrace();
}
}

private void encodecPcmToAAc(int size, byte[] buffer)
{
if(buffer != null && encoder != null)
{
//录音时间 size/ 采样率*声道数 * bits/8
recordTime += size * 1.0 / (audioSamplerate * 2 * (16 / 8));
MyLog.d("recordTime = " + recordTime);
//回掉
if(wlOnRecordTimeListener != null)
{
wlOnRecordTimeListener.onRecordTime((int) recordTime);
}

int inputBufferindex = encoder.dequeueInputBuffer(0);
if(inputBufferindex >= 0)
{
ByteBuffer byteBuffer = encoder.getInputBuffers()[inputBufferindex];
byteBuffer.clear();
byteBuffer.put(buffer);
encoder.queueInputBuffer(inputBufferindex, 0, size, 0, 0);
}

int index = encoder.dequeueOutputBuffer(info, 0);
while(index >= 0)
{
try {
perpcmsize = info.size + 7;
outByteBuffer = new byte[perpcmsize];

ByteBuffer byteBuffer = encoder.getOutputBuffers()[index];
byteBuffer.position(info.offset);
byteBuffer.limit(info.offset + info.size);

addADtsHeader(outByteBuffer, perpcmsize, aacsamplerate);

byteBuffer.get(outByteBuffer, 7, info.size);
byteBuffer.position(info.offset);
outputStream.write(outByteBuffer, 0, perpcmsize);

encoder.releaseOutputBuffer(index, false);
index = encoder.dequeueOutputBuffer(info, 0);
outByteBuffer = null;
} catch (IOException e) {
e.printStackTrace();
}
}
}
}

private void addADtsHeader(byte[] packet, int packetLen, int samplerate)
{
int profile = 2; // AAC LC
int freqIdx = samplerate; // samplerate
int chanCfg = 2; // CPE

packet[0] = (byte) 0xFF; // 0xFFF(12bit) 这里只取了8位,所以还差4位放到下一个里面
packet[1] = (byte) 0xF9; // 第一个t位放F
packet[2] = (byte) (((profile - 1) << 6) + (freqIdx << 2) + (chanCfg >> 2));
packet[3] = (byte) (((chanCfg & 3) << 6) + (packetLen >> 11));
packet[4] = (byte) ((packetLen & 0x7FF) >> 3);
packet[5] = (byte) (((packetLen & 7) << 5) + 0x1F);
packet[6] = (byte) 0xFC;
}

private int getADTSsamplerate(int samplerate)
{
int rate = 4;
switch (samplerate)
{
case 96000:
rate = 0;
break;
case 88200:
rate = 1;
break;
case 64000:
rate = 2;
break;
case 48000:
rate = 3;
break;
case 44100:
rate = 4;
break;
case 32000:
rate = 5;
break;
case 24000:
rate = 6;
break;
case 22050:
rate = 7;
break;
case 16000:
rate = 8;
break;
case 12000:
rate = 9;
break;
case 11025:
rate = 10;
break;
case 8000:
rate = 11;
break;
case 7350:
rate = 12;
break;
}
return rate;
}

private void releaseMedicacodec()
{
if(encoder == null)
{
return;
}
try {
recordTime = 0;
outputStream.close();
outputStream = null;
encoder.stop();
encoder.release();
encoder = null;
encoderFormat = null;
info = null;
initmediacodec = false;

MyLog.d("录制完成...");
} catch (IOException e) {
e.printStackTrace();
}
finally {
if(outputStream != null)
{
try {
outputStream.close();
} catch (IOException e) {
e.printStackTrace();
}
outputStream = null;
}
}
}

mediacodec解码ffmpeg AvPacket

发表于 2018-07-16 | 分类于 MediaCodec |
字数统计: 600字 | 阅读时长 ≈ 4分钟
初始化MediaCodec
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38

private MediaFormat mediaFormat;
private MediaCodec mediaCodec;
private MediaCodec.BufferInfo info;
private Surface surface;//这个是OpenGL渲染的Surface

/**
* 初始化MediaCodec
*
* @param codecName
* @param width
* @param height
* @param csd_0
* @param csd_1
*/
public void initMediaCodec(String codecName, int width, int height, byte[] csd_0, byte[] csd_1) {
try {
if (surface != null) {
String mime = VideoSupportUtil.findVideoCodecName(codecName);
mediaFormat = MediaFormat.createVideoFormat(mime, width, height);
mediaFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, width * height);
mediaFormat.setByteBuffer("csd-0", ByteBuffer.wrap(csd_0));
mediaFormat.setByteBuffer("csd-1", ByteBuffer.wrap(csd_1));
MyLog.d(mediaFormat.toString());
mediaCodec = MediaCodec.createDecoderByType(mime);
info = new MediaCodec.BufferInfo();
if(mediaCodec == null) {
MyLog.d("craete mediaCodec wrong");
return;
}
mediaCodec.configure(mediaFormat, surface, null, 0);
mediaCodec.start();
}
} catch (Exception e) {
e.printStackTrace();
}
}
}

VideoSupportUtil.java

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40


import android.media.MediaCodecList;

import java.util.HashMap;
import java.util.Map;

public class VideoSupportUtil {

private static Map<String, String> codecMap = new HashMap<>();
static {
codecMap.put("h264", "video/avc");
}

public static String findVideoCodecName(String ffcodename){
if(codecMap.containsKey(ffcodename))
{
return codecMap.get(ffcodename);
}
return "";
}

public static boolean isSupportCodec(String ffcodecname){
boolean supportvideo = false;
int count = MediaCodecList.getCodecCount();
for(int i = 0; i < count; i++){
String[] tyeps = MediaCodecList.getCodecInfoAt(i).getSupportedTypes();
for(int j = 0; j < tyeps.length; j++){
if(tyeps[j].equals(findVideoCodecName(ffcodecname))){
supportvideo = true;
break;
}
}
if(supportvideo){
break;
}
}
return supportvideo;
}
}

c++层:

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44



const char* codecName = ((const AVCodec*)avCodecContext->codec)->name;
onCallInitMediacodec(
codecName,
avCodecContext->width,
avCodecContext->height,
avCodecContext->extradata_size,
avCodecContext->extradata_size,
avCodecContext->extradata,
avCodecContext->extradata
);

//获取jmid_initmediacodec
jclass jlz = jniEnv->GetObjectClass(jobj);
jmethodID jmid_initmediacodec = env->GetMethodID(jlz, "initMediaCodec", "(Ljava/lang/String;II[B[B)V");

//在子线程
void onCallInitMediacodec(const char* mime, int width, int height, int csd0_size, int csd1_size, uint8_t *csd_0, uint8_t *csd_1) {

JNIEnv *jniEnv;
if(javaVM->AttachCurrentThread(&jniEnv, 0) != JNI_OK)
{
if(LOG_DEBUG)
{
LOGE("call onCallComplete worng");
}
}

jstring type = jniEnv->NewStringUTF(mime);
jbyteArray csd0 = jniEnv->NewByteArray(csd0_size);
jniEnv->SetByteArrayRegion(csd0, 0, csd0_size, reinterpret_cast<const jbyte *>(csd_0));
jbyteArray csd1 = jniEnv->NewByteArray(csd1_size);
jniEnv->SetByteArrayRegion(csd1, 0, csd1_size, reinterpret_cast<const jbyte *>(csd_1));

jniEnv->CallVoidMethod(jobj, jmid_initmediacodec, type, width, height, csd0, csd1);

jniEnv->DeleteLocalRef(csd0);
jniEnv->DeleteLocalRef(csd1);
jniEnv->DeleteLocalRef(type);
javaVM->DetachCurrentThread();

}
解码AvPacket数据
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
public void decodeAVPacket(int datasize, byte[] data) {
if (surface != null && datasize > 0 && data != null) {
int intputBufferIndex = mediaCodec.dequeueInputBuffer(10);
if (intputBufferIndex >= 0) {
ByteBuffer byteBuffer = mediaCodec.getOutputBuffers()[intputBufferIndex];
byteBuffer.clear();
byteBuffer.put(data);
mediaCodec.queueInputBuffer(intputBufferIndex, 0, datasize, 0, 0);
}
//这里拿到outputBufferIndex然后就可以获取到数据,这里会通过surface达到渲染
int outputBufferIndex = mediaCodec.dequeueOutputBuffer(info, 10);
while (outputBufferIndex >= 0) {
mediaCodec.releaseOutputBuffer(outputBufferIndex, true);
outputBufferIndex = mediaCodec.dequeueOutputBuffer(info, 10);
}
}
}

c++层回调decodeAVPacket

1
2
datasize = avPacket->size;
data = avPacket->data;//jni这里需要把uint8_t转为jbyteArray,类似初始化那

123…8
曾大稳丶

曾大稳丶

80 日志
11 分类
20 标签
© 2018 — 2019 曾大稳丶
由 Hexo 强力驱动
|
主题 — NexT.Mist v5.1.4
访问人数 人 总访问量 次