曾大稳丶


  • 首页

  • 分类

  • 标签

  • 归档

  • 关于

Android配置EGL环境C++版

发表于 2019-04-22 | 分类于 OpenGLES |
字数统计: 1,405字 | 阅读时长 ≈ 8分钟

android搭建opengles 的egl环境之前使用java已经写过,但是一般实际开发opengles的相关代码都在native层,因为native的话效率会比java高很多,步骤都是一致的,只是换一种语言写而已。之前使用java写的opengles egl环境搭建点击下面链接:
https://www.jianshu.com/p/ce3496ab9e02

本文demo下载地址:
https://github.com/ChinaZeng/NativeEglDemo

步骤都是一样的:

1、得到Egl实例
2、得到默认的显示设备(就是窗口)
3、初始化默认显示设备
4、设置显示设备的属性
5、从系统中获取对应属性的配置
6、创建EglContext
7、创建渲染的Surface
8、绑定EglContext和Surface到显示设备中
9、刷新数据,显示渲染场景

代码目录:

代码目录

  1. 首先配置android ndk开发环境,我使用的是cmake

    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    12
    13
    14
    15
    16
    cmake_minimum_required(VERSION 3.4.1)

    add_library( # Sets the name of the library.
    native-lib
    SHARED
    native-lib.cpp
    egl/EglHelper.cpp
    )

    target_link_libraries(
    native-lib
    EGL
    GLESv2
    android
    log
    )
  2. 书写EglHelper,这个类主要负责egl的环境初始化,绘制和销毁

    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    12
    13
    14
    15
    16
    17
    18
    19
    20
    21
    22
    23
    24
    25
    26
    27
    28
    29
    30
    31
    32
    33
    34
    35
    36
    37
    38
    39
    40
    41
    42
    43
    44
    45
    46
    47
    48
    49
    50
    51
    52
    53
    54
    55
    56
    57
    58
    59
    60
    61
    62
    63
    64
    65
    66
    67
    68
    69
    70
    71
    72
    73
    74
    75
    76
    77
    78
    79
    80
    81
    82
    83
    84
    85
    86
    87
    88
    89
    90
    91
    92
    93
    94
    95
    96
    97
    98
    99
    100
    //环境搭建初始化
    int EglHelper::initEgl(EGLNativeWindowType window) {
    //1.得到默认的显示设备(就是窗口) -- eglGetDisplay
    mEglDisplay = eglGetDisplay(EGL_DEFAULT_DISPLAY);
    if (mEglDisplay == EGL_NO_DISPLAY) {
    LOGE("eglGetDisplay error");
    return -1;
    }

    //2. 初始化默认显示设备 -- eglInitialize
    EGLint *version = new EGLint[2];
    if (!eglInitialize(mEglDisplay, &version[0], &version[1])) {
    LOGE("eglInitialize error");
    return -1;
    }

    //3. 设置显示设备的属性
    const EGLint attrib_config_list[] = {
    EGL_RED_SIZE, 8,
    EGL_GREEN_SIZE, 8,
    EGL_BLUE_SIZE, 8,
    EGL_ALPHA_SIZE, 8,
    EGL_DEPTH_SIZE, 8,
    EGL_STENCIL_SIZE, 8,// 眼睛屏幕的距离
    EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,//版本号
    EGL_NONE
    };

    //3.1 根据所需的参数获取符合该参数的config_size,主要是解决有些手机eglChooseConfig失败的兼容性问题
    EGLint num_config;

    if (!eglChooseConfig(mEglDisplay, attrib_config_list, NULL, 1, &num_config)) {
    LOGE("eglChooseConfig error");
    return -1;
    }
    //3.2 根据获取到的config_size得到eglConfig
    EGLConfig eglConfig;
    if (!eglChooseConfig(mEglDisplay, attrib_config_list, &eglConfig, num_config, &num_config)) {
    LOGE("eglChooseConfig error");
    return -1;
    }

    //4. 创建egl上下文 eglCreateContext
    const EGLint attrib_ctx_list[] = {
    EGL_CONTEXT_CLIENT_VERSION, 2,
    EGL_NONE
    };
    mEglContext = eglCreateContext(mEglDisplay, eglConfig, NULL, attrib_ctx_list);
    if (mEglContext == EGL_NO_CONTEXT) {
    LOGE("eglCreateContext error");
    return -1;
    }
    //5.创建渲染的surface
    mEglSurface = eglCreateWindowSurface(mEglDisplay, eglConfig, window, NULL);
    if (mEglSurface == EGL_NO_SURFACE) {
    LOGE("eglCreateWindowSurface error");
    return -1;
    }
    //6. 绑定eglContext和surface到display
    if (!eglMakeCurrent(mEglDisplay, mEglSurface, mEglSurface, mEglContext)) {
    LOGE("eglMakeCurrent error");
    return -1;
    }

    //7. 刷新数据,显示渲染场景 -- eglSwapBuffers

    return 0;
    }

    //交换缓冲 绘制
    int EglHelper::swapBuffers() {
    if (mEglDisplay != EGL_NO_DISPLAY && mEglSurface != EGL_NO_SURFACE &&
    eglSwapBuffers(mEglDisplay, mEglSurface)) {
    return 0;
    }
    return -1;
    }
    //销毁
    void EglHelper::destroyEgl() {
    if (mEglDisplay != EGL_NO_DISPLAY) {
    //解绑display上的eglContext和surface
    eglMakeCurrent(mEglDisplay, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT);

    //销毁surface 和 eglContext
    if (mEglSurface != EGL_NO_SURFACE) {
    eglDestroySurface(mEglDisplay, mEglSurface);
    mEglSurface = EGL_NO_SURFACE;
    }

    if (mEglContext != EGL_NO_CONTEXT) {
    eglDestroyContext(mEglDisplay, mEglContext);
    mEglContext = EGL_NO_CONTEXT;
    }

    if (mEglDisplay != EGL_NO_DISPLAY) {
    eglTerminate(mEglDisplay);
    mEglDisplay = EGL_NO_DISPLAY;
    }
    }
    }

书写EglThread,顾名思义,这个类主要负责开启一个线程然后根据外部的生命周期调用EglHelper完成egl的环境搭建,并且和外部交互。

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106

EglThread::EglThread() {
pthread_mutex_init(&pthread_mutex, NULL);
pthread_cond_init(&pthread_cond, NULL);
}

EglThread::~EglThread() {
pthread_mutex_destroy(&pthread_mutex);
pthread_cond_destroy(&pthread_cond);
}


void *eglThreadImpl(void *context) {
EglThread *eglThread = static_cast<EglThread *>(context);
if (!eglThread) {
LOGE("eglThreadImpl eglThread is null");
return 0;
}

EglHelper *eglHelper = new EglHelper();

if (eglHelper->initEgl(eglThread->mANativeWindow) != 0) {
LOGE("eglHelper initEgl error");
return 0;
}
eglThread->isExit = false;
while (!eglThread->isExit) {

if (eglThread->isCreate) {
eglThread->isCreate = false;
eglThread->onCreate();
}

if (eglThread->isChange) {
eglThread->isChange = false;
eglThread->isStart = true;
eglThread->onChange(eglThread->surfaceWidth, eglThread->surfaceHeight);
}

if (eglThread->isStart) {
eglThread->onDraw();
//切换缓冲区,显示
eglHelper->swapBuffers();

if (eglThread->mRenderType == RENDER_MODULE_AUTO) {
usleep(1000000 / 60);
} else {
pthread_mutex_lock(&eglThread->pthread_mutex);
pthread_cond_wait(&eglThread->pthread_cond, &eglThread->pthread_mutex);
pthread_mutex_unlock(&eglThread->pthread_mutex);
}
}

}

eglHelper->destroyEgl();
delete eglHelper;
eglHelper = NULL;
//return 0表示线程结束
return 0;

}


void EglThread::onSurfaceCreate(EGLNativeWindowType window) {
if (mEglThread == -1) {
isCreate = true;
mANativeWindow = window;
pthread_create(&mEglThread, NULL, eglThreadImpl, this);
}
}


void EglThread::onSurfaceChange(int width, int height) {
if (mEglThread != -1) {
surfaceWidth = width;
surfaceHeight = height;
isChange = true;

notifyRender();
}
}

void EglThread::setRenderModule(int renderType) {
mRenderType = renderType;
notifyRender();
}

void EglThread::notifyRender() {
pthread_mutex_lock(&pthread_mutex);
pthread_cond_signal(&pthread_cond);
pthread_mutex_unlock(&pthread_mutex);
}


void EglThread::callBackOnCreate(EglThread::OnCreate onCreate) {
this->onCreate = onCreate;
}

void EglThread::callBackOnChange(EglThread::OnChange onChange) {
this->onChange = onChange;
}

void EglThread::callBackOnDraw(EglThread::OnDraw onDraw) {
this->onDraw = onDraw;
}
  1. 书写java层和native层交互,对应生命周期回调即可
    NationOpenGL.java
    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    12
    13
    public class NationOpenGL {

    static {
    System.loadLibrary("native-lib");
    }

    public native void nativeSurfaceCreate(Surface surface);

    public native void nativeSurfaceChanged(int width, int height);

    public native void nativeSurfaceDestroyed();

    }

NativeGLSurfaceView.java

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
public class NativeGLSurfaceView extends SurfaceView implements SurfaceHolder.Callback {
private NationOpenGL mNationOpenGL;

public NativeGLSurfaceView(Context context) {
this(context, null);
}

public NativeGLSurfaceView(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}

public NativeGLSurfaceView(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
mNationOpenGL = new NationOpenGL();
getHolder().addCallback(this);
}

@Override
public void surfaceCreated(SurfaceHolder holder) {
mNationOpenGL.nativeSurfaceCreate(holder.getSurface());
}

@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
mNationOpenGL.nativeSurfaceChanged(width, height);
}

@Override
public void surfaceDestroyed(SurfaceHolder holder) {
mNationOpenGL.nativeSurfaceDestroyed();
}
}

native-lib.cpp

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
EglThread *eglThread = NULL;


void callBackOnCreate() {
LOGE("callBackOnCreate");
}

void callBackOnChange(int width, int height) {
glViewport(0, 0, width, height);
LOGE("callBackOnChange");
}

void callBackOnDraw() {
glClearColor(0.0f, 1.0f, 1.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
LOGE("callBackOnDraw");
}

extern "C"
JNIEXPORT void JNICALL
Java_com_zzw_nativeopnegldemo_opengl_NationOpenGL_nativeSurfaceCreate(JNIEnv *env, jobject instance,
jobject surface) {

eglThread = new EglThread();
eglThread->callBackOnCreate(callBackOnCreate);
eglThread->callBackOnChange(callBackOnChange);
eglThread->callBackOnDraw(callBackOnDraw);
eglThread->setRenderModule(RENDER_MODULE_MANUAL);


ANativeWindow *nativeWindow = ANativeWindow_fromSurface(env, surface);
eglThread->onSurfaceCreate(nativeWindow);
}

extern "C"
JNIEXPORT void JNICALL
Java_com_zzw_nativeopnegldemo_opengl_NationOpenGL_nativeSurfaceChanged(JNIEnv *env,
jobject instance, jint width,
jint height) {
if (eglThread) {
eglThread->onSurfaceChange(width, height);
}
}


extern "C"
JNIEXPORT void JNICALL
Java_com_zzw_nativeopnegldemo_opengl_NationOpenGL_nativeSurfaceDestroyed(JNIEnv *env,
jobject instance) {
if (eglThread) {
eglThread->isExit = true;
delete (eglThread);
eglThread = NULL;
}

}

Android使用libRtmp直播推流

发表于 2018-09-20 |
字数统计: 1,039字 | 阅读时长 ≈ 6分钟
  1. 初始化rtmp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
//分配空间
RTMP *rtmp = RTMP_Alloc();
//初始化
RTMP_Init(rtmp);
//设置推流URL
RTMP_SetupURL(rtmp, url);
//设置可写状态
RTMP_EnableWrite(rtmp);
//链接服务器
RTMP_Connect(rtmp, NULL);
//链接流
RTMP_ConnectStream(rtmp, 0);

//循环推流(AAC、H264) //开始推流
while(1){
int result = RTMP_SendPacket(rtmp, packet, 1);
RTMPPacket_Free(packet);
free(packet);
packet = NULL;
}

//关闭链接
RTMP_Close(rtmp);
//释放资源
RTMP_Free(rtmp);
rtmp=NULL;
  1. H264包封装。在发送每一帧关键帧之前得先发送SPS、PPS帧信息,发送的每一帧(I、P、SPS、PPS)数据得添加头部信息。

获取摄像头预览数据并编码为H264,pcm数据编码AAC

2.1 SPS PPS数据

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66

void RtmpPush::pushSPSPPS(char *sps, int spsLen, char *pps, int ppsLen) {
if (!this->queue) return;
int bodySize = spsLen + ppsLen + 16;
RTMPPacket *rtmpPacket = static_cast<RTMPPacket *>(malloc(sizeof(RTMPPacket)));
RTMPPacket_Alloc(rtmpPacket, bodySize);
RTMPPacket_Reset(rtmpPacket);

char *body = rtmpPacket->m_body;

int i = 0;
//frame type(4bit)和CodecId(4bit)合成一个字节(byte)
//frame type 关键帧1 非关键帧2
//CodecId 7表示avc
body[i++] = 0x17;

//fixed 4byte
body[i++] = 0x00;
body[i++] = 0x00;
body[i++] = 0x00;
body[i++] = 0x00;

//configurationVersion: 版本 1byte
body[i++] = 0x01;

//AVCProfileIndication:Profile 1byte sps[1]
body[i++] = sps[1];

//compatibility: 兼容性 1byte sps[2]
body[i++] = sps[2];

//AVCLevelIndication: ProfileLevel 1byte sps[3]
body[i++] = sps[3];

//lengthSizeMinusOne: 包长数据所使用的字节数 1byte
body[i++] = 0xff;

//sps个数 1byte
body[i++] = 0xe1;
//sps长度 2byte
body[i++] = (spsLen >> 8) & 0xff;
body[i++] = spsLen & 0xff;

//sps data 内容
memcpy(&body[i], sps, spsLen);
i += spsLen;
//pps个数 1byte
body[i++] = 0x01;
//pps长度 2byte
body[i++] = (ppsLen >> 8) & 0xff;
body[i++] = ppsLen & 0xff;
//pps data 内容
memcpy(&body[i], pps, ppsLen);


rtmpPacket->m_packetType = RTMP_PACKET_TYPE_VIDEO;
rtmpPacket->m_nBodySize = bodySize;
rtmpPacket->m_nTimeStamp = 0;
rtmpPacket->m_hasAbsTimestamp = 0;
rtmpPacket->m_nChannel = 0x04;//音频或者视频
rtmpPacket->m_headerType = RTMP_PACKET_SIZE_MEDIUM;
rtmpPacket->m_nInfoField2 = this->rtmp->m_stream_id;

queue->putRtmpPacket(rtmpPacket);

}

2.2 H264数据

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
void RtmpPush::pushVideoData(char *data, int dataLen, bool keyFrame) {
if (!this->queue) return;
int bodySize = dataLen + 9;
RTMPPacket *rtmpPacket = static_cast<RTMPPacket *>(malloc(sizeof(RTMPPacket)));
RTMPPacket_Alloc(rtmpPacket, bodySize);
RTMPPacket_Reset(rtmpPacket);

char *body = rtmpPacket->m_body;

int i = 0;
//frame type(4bit)和CodecId(4bit)合成一个字节(byte)
//frame type 关键帧1 非关键帧2
//CodecId 7表示avc
if (keyFrame) {
body[i++] = 0x17;
} else {
body[i++] = 0x27;
}

//fixed 4byte 0x01表示NALU单元
body[i++] = 0x01;
body[i++] = 0x00;
body[i++] = 0x00;
body[i++] = 0x00;

//dataLen 4byte
body[i++] = (dataLen >> 24) & 0xff;
body[i++] = (dataLen >> 16) & 0xff;
body[i++] = (dataLen >> 8) & 0xff;
body[i++] = dataLen & 0xff;

//data
memcpy(&body[i], data, dataLen);

rtmpPacket->m_packetType = RTMP_PACKET_TYPE_VIDEO;
rtmpPacket->m_nBodySize = bodySize;
//持续播放时间
rtmpPacket->m_nTimeStamp = RTMP_GetTime() - this->startTime;
//进入直播播放开始时间
rtmpPacket->m_hasAbsTimestamp = 0;
rtmpPacket->m_nChannel = 0x04;//音频或者视频
rtmpPacket->m_headerType = RTMP_PACKET_SIZE_LARGE;
rtmpPacket->m_nInfoField2 = this->rtmp->m_stream_id;

queue->putRtmpPacket(rtmpPacket);


}
  1. AAC包封装 需要添加头部
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35

void RtmpPush::pushAudioData(char *data, int dataLen) {
if (!this->queue) return;
int bodySize = dataLen + 2;
RTMPPacket *rtmpPacket = static_cast<RTMPPacket *>(malloc(sizeof(RTMPPacket)));
RTMPPacket_Alloc(rtmpPacket, bodySize);
RTMPPacket_Reset(rtmpPacket);

char *body = rtmpPacket->m_body;
//前四位表示音频数据格式 10(十进制)表示AAC,16进制就是A
//第5-6位的数值表示采样率,0 = 5.5 kHz,1 = 11 kHz,2 = 22 kHz,3(11) = 44 kHz。
//第7位表示采样精度,0 = 8bits,1 = 16bits。
//第8位表示音频类型,0 = mono,1 = stereo
//这里是44100 立体声 16bit 二进制就是1111 16进制就是F
body[0] = 0xAF;

//0x00 aac头信息, 0x01 aac 原始数据
//这里都用0x01都可以
body[1] = 0x01;

//data
memcpy(&body[2], data, dataLen);

rtmpPacket->m_packetType = RTMP_PACKET_TYPE_AUDIO;
rtmpPacket->m_nBodySize = bodySize;
//持续播放时间
rtmpPacket->m_nTimeStamp = RTMP_GetTime() - this->startTime;
//进入直播播放开始时间
rtmpPacket->m_hasAbsTimestamp = 0;
rtmpPacket->m_nChannel = 0x04;//音频或者视频
rtmpPacket->m_headerType = RTMP_PACKET_SIZE_LARGE;
rtmpPacket->m_nInfoField2 = this->rtmp->m_stream_id;

queue->putRtmpPacket(rtmpPacket);
}
  1. Android MediaCodec获取PPS和SPS
1
2
3
4
5
6
7
8
9
10
11
12
13
int outputBufferIndex = videoEncodec.dequeueOutputBuffer(videoBufferinfo, 0);
if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
ByteBuffer spsb = videoEncodec.getOutputFormat().getByteBuffer("csd-0");
byte[] sps = new byte[spsb.remaining()];
spsb.get(sps, 0,sps.length);
Log.e("zzz", "sps: " + ByteUtil.bytesToHexSpaceString(sps));

ByteBuffer ppsb = videoEncodec.getOutputFormat().getByteBuffer("csd-1");
byte[] pps = new byte[ppsb.remaining()];
ppsb.get(pps, 0,pps.length);
Log.e("zzz", "pps: " + ByteUtil.bytesToHexSpaceString(pps));

}

具体查看demo: https://github.com/ChinaZeng/RtmpLivePushDemo

Android集成libRtmp

发表于 2018-09-20 |
字数统计: 100字 | 阅读时长 ≈ 1分钟
  1. librtmp下载: http://rtmpdump.mplayerhq.hu/download/
    librtmp下载

  2. 拷贝相关文件到cpp里面

目录

  1. cmake配置
    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    12
    13
    14
    15
    16
    17
    18
    cmake_minimum_required(VERSION 3.4.1)

    #不配置ssl
    set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -DNO_CRYPTO")

    add_library( lib-native
    SHARED

    src/main/cpp/librtmp/amf.c
    src/main/cpp/librtmp/hashswf.c
    src/main/cpp/librtmp/log.c
    src/main/cpp/librtmp/parseurl.c
    src/main/cpp/librtmp/rtmp.c
    )


    target_link_libraries( lib-native
    log)

具体的配置查看README

MediaCodec录制音视频并将合成为一个文件

发表于 2018-08-30 | 分类于 MediaCodec |
字数统计: 797字 | 阅读时长 ≈ 4分钟

主要的步骤分为视频录制,音频录制,视频合成。

视频录制采用OpenGLES渲染预览摄像头画面,通过MediaCodec创建一个surface,然后通过创建一个新的egl环境共享预览的EglContext和这个surface绑定,渲染摄像头预览的fbo绑定的纹理,即可录制。
音频录制采用MediaCodec即可,从外部传入pcm数据进行编码录制。
音视频合成采用MediaMuxer合成。

录制

视频录制
OpenGLES渲染画面通过MediaCodec录制

音频录制
相关参考 MediaCodec硬编码pcm2aac
主要分为以下几步骤:

  1. 初始化

    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    12
    13
    14
    15
    16
    private void initAudioEncoder(String mineType, int sampleRate, int channel) {
    try {
    mAudioEncodec = MediaCodec.createEncoderByType(mineType);
    MediaFormat audioFormat = MediaFormat.createAudioFormat(mineType, sampleRate, channel);
    audioFormat.setInteger(MediaFormat.KEY_BIT_RATE, 96000);
    audioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
    audioFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, 4096);
    mAudioEncodec.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);

    mAudioBuffInfo = new MediaCodec.BufferInfo();
    } catch (IOException e) {
    e.printStackTrace();
    mAudioEncodec = null;
    mAudioBuffInfo = null;
    }
    }
  2. 开始录制

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20

audioEncodec.start();
int outputBufferIndex = audioEncodec.dequeueOutputBuffer(audioBufferinfo, 0);
while (outputBufferIndex >= 0) {

ByteBuffer outputBuffer = audioEncodec.getOutputBuffers()[outputBufferIndex];
outputBuffer.position(audioBufferinfo.offset);
outputBuffer.limit(audioBufferinfo.offset + audioBufferinfo.size);

//设置时间戳
if (pts == 0) {
pts = audioBufferinfo.presentationTimeUs;
}
audioBufferinfo.presentationTimeUs = audioBufferinfo.presentationTimeUs - pts;
//写入数据
mediaMuxer.writeSampleData(audioTrackIndex, outputBuffer, audioBufferinfo);

audioEncodec.releaseOutputBuffer(outputBufferIndex, false);
outputBufferIndex = audioEncodec.dequeueOutputBuffer(audioBufferinfo, 0);
}
  1. 传入数据

这里编码为aac不用添加adts是因为这里是写入到mp4,而不是单独的aac文件

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22

private long audioPts;

private long getAudioPts(int size, int sampleRate, int channel, int sampleBit) {
audioPts += (long) (1.0 * size / (sampleRate * channel * (sampleBit / 8)) * 1000000.0);
return audioPts;
}

public void putPcmData(byte[] buffer, int size) {
if (mAudioEncodecThread != null && !mAudioEncodecThread.isExit && buffer != null && size > 0) {
int inputBufferIndex = mAudioEncodec.dequeueInputBuffer(0);
if (inputBufferIndex >= 0) {
ByteBuffer byteBuffer = mAudioEncodec.getInputBuffers()[inputBufferIndex];
byteBuffer.clear();
byteBuffer.put(buffer);
//获取时间戳
long pts = getAudioPts(size, sampleRate, channel, sampleBit);
Log.e("zzz", "AudioTime = " + pts / 1000000.0f);
mAudioEncodec.queueInputBuffer(inputBufferIndex, 0, size, pts, 0);
}
}
}
  1. 停止录制
1
2
3
audioEncodec.stop();
audioEncodec.release();
audioEncodec = null;

音视频合成

有了音视频数据,通过MediaMuxer进行合并。

官方示例:

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
//MediaMuxer facilitates muxing elementary streams. Currently MediaMuxer supports MP4, Webm
//and 3GP file as the output. It also supports muxing B-frames in MP4 since Android Nougat.
//MediaMuxer muxer = new MediaMuxer("temp.mp4", OutputFormat.MUXER_OUTPUT_MPEG_4);
// More often, the MediaFormat will be retrieved from MediaCodec.getOutputFormat()
// or MediaExtractor.getTrackFormat().
MediaFormat audioFormat = new MediaFormat(...);
MediaFormat videoFormat = new MediaFormat(...);
int audioTrackIndex = muxer.addTrack(audioFormat);
int videoTrackIndex = muxer.addTrack(videoFormat);
ByteBuffer inputBuffer = ByteBuffer.allocate(bufferSize);
boolean finished = false;
BufferInfo bufferInfo = new BufferInfo();

muxer.start();
while(!finished) {
// getInputBuffer() will fill the inputBuffer with one frame of encoded
// sample from either MediaCodec or MediaExtractor, set isAudioSample to
// true when the sample is audio data, set up all the fields of bufferInfo,
// and return true if there are no more samples.
finished = getInputBuffer(inputBuffer, isAudioSample, bufferInfo);
if (!finished) {
int currentTrackIndex = isAudioSample ? audioTrackIndex : videoTrackIndex;
muxer.writeSampleData(currentTrackIndex, inputBuffer, bufferInfo);
}
};
muxer.stop();
muxer.release();

主要步骤如下:

  1. 初始化
1
mMediaMuxer = new MediaMuxer(savePath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
  1. 获取录制音视频的TrackIndex
1
2
3
4
5
6
7
8
9
10
int outputBufferIndex = videoEncodec.dequeueOutputBuffer(videoBufferinfo, 0);
if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
videoTrackIndex = mediaMuxer.addTrack(videoEncodec.getOutputFormat());
}


int outputBufferIndex = audioEncodec.dequeueOutputBuffer(audioBufferinfo, 0);
if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
audioTrackIndex = mediaMuxer.addTrack(audioEncodec.getOutputFormat());
}
  1. 开始合成
1
2
3
4
5
6
7
mediaMuxer.start();

//写入视频数据
mediaMuxer.writeSampleData(videoTrackIndex, outputBuffer, videoBufferinfo);

//写入音频数据
mediaMuxer.writeSampleData(audioTrackIndex, outputBuffer, audioBufferinfo);
  1. 合成结束,写入头信息
1
2
3
mediaMuxer.stop();
mediaMuxer.release();
mediaMuxer = null;

具体查看demo:
https://github.com/ChinaZeng/SurfaceRecodeDemo

OpenGLES添加水印

发表于 2018-08-24 | 分类于 OpenGLES |
字数统计: 2,177字 | 阅读时长 ≈ 12分钟

原理

多个纹理绘制在一个surface上

理解了添加水印的原理,不管是视频水印还是图片水印都是很简单的了,只是使用的纹理不一样而已。如果是绘制文字水印的话,则需要将文字生成图片,然后将图片使用纹理绘制即可。

Android OpenGLES 绘制图片纹理

那么怎样将多个纹理添加到同一个surface上?

简单示例代码:

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261

import android.content.Context;
import android.graphics.Bitmap;
import android.opengl.GLES20;
import android.util.Log;

import com.zzw.live.R;
import com.zzw.live.egl.EglSurfaceView;
import com.zzw.live.util.ShaderUtil;

import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;

public class CameraRender implements EglSurfaceView.Render {
//顶点坐标
static float vertexData[] = { // in counterclockwise order:
-1f, -1f, 0.0f, // bottom left
1f, -1f, 0.0f, // bottom right
-1f, 1f, 0.0f, // top left
1f, 1f, 0.0f, // top right

0f, 0f, 0f,//水印预留位置
0f, 0f, 0f,
0f, 0f, 0f,
0f, 0f, 0f
};

//纹理坐标 对应顶点坐标 与之映射
static float textureData[] = { // in counterclockwise order:
0f, 1f, 0.0f, // bottom left
1f, 1f, 0.0f, // bottom right
0f, 0f, 0.0f, // top left
1f, 0f, 0.0f, // top right
};
//每一次取点的时候取几个点
static final int COORDS_PER_VERTEX = 3;

final int vertexCount = vertexData.length / COORDS_PER_VERTEX;
//每一次取的总的点 大小
static final int vertexStride = COORDS_PER_VERTEX * 4; // 4 bytes per vertex

//位置
protected FloatBuffer vertexBuffer;
//纹理
protected FloatBuffer textureBuffer;
private int program;
private int avPosition;

//纹理位置
private int afPosition;
//纹理 默认第0个位置 可以不获取
private int texture;


//vbo id
private int vboId;

private int fboTextureId;

private Context context;

private Bitmap bitmap;
private int waterTextureId;

public CameraRender(Context context) {
this.context = context;
initWater();

vertexBuffer = ByteBuffer.allocateDirect(vertexData.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer()
.put(vertexData);
vertexBuffer.position(0);

textureBuffer = ByteBuffer.allocateDirect(textureData.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer()
.put(textureData);
textureBuffer.position(0);
}

@Override
public void onSurfaceCreated() {
//启用透明
GLES20.glEnable(GLES20.GL_BLEND);
GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA);
program = ShaderUtil.createProgram(ShaderUtil.readRawTxt(context, R.raw.vertex_shader_screen),
ShaderUtil.readRawTxt(context, R.raw.fragment_shader_screen));

if (program > 0) {
//获取顶点坐标字段
avPosition = GLES20.glGetAttribLocation(program, "av_Position");
//获取纹理坐标字段
afPosition = GLES20.glGetAttribLocation(program, "af_Position");
//获取纹理字段
texture = GLES20.glGetUniformLocation(program, "sTexture");

//创建vbo
createVBO();

//创建水印纹理
createWaterTextureId();
}
}

@Override
public void onSurfaceChanged(int width, int height) {
//宽高
GLES20.glViewport(0, 0, width, height);
}

@Override
public void onDrawFrame() {
//清空颜色
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
//设置背景颜色
GLES20.glClearColor(1.0f, 1.0f, 1.0f, 1.0f);

//使用程序
GLES20.glUseProgram(program);

//设置纹理
//绑定渲染纹理 默认是第0个位置
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, fboTextureId);

GLES20.glEnableVertexAttribArray(avPosition);
GLES20.glEnableVertexAttribArray(afPosition);

//使用VBO设置纹理和顶点值
useVboSetVertext();

// //设置顶点位置值
// GLES20.glVertexAttribPointer(avPosition, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, vertexStride, vertexBuffer);
// //设置纹理位置值
// GLES20.glVertexAttribPointer(afPosition, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, vertexStride, textureBuffer);

//绘制 GLES20.GL_TRIANGLE_STRIP:复用坐标
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
GLES20.glDisableVertexAttribArray(avPosition);
GLES20.glDisableVertexAttribArray(afPosition);
//解绑纹理
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);


drawWater();
}


public void onDraw(int fboTextureId) {
this.fboTextureId = fboTextureId;
onDrawFrame();
}

/**
* 创建vbo
*/
private void createVBO() {
//1. 创建VBO
int[] vbos = new int[1];
GLES20.glGenBuffers(vbos.length, vbos, 0);
vboId = vbos[0];
//2. 绑定VBO
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vboId);
//3. 分配VBO需要的缓存大小
GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, vertexData.length * 4 + textureData.length * 4, null, GLES20.GL_STATIC_DRAW);
//4. 为VBO设置顶点数据的值
GLES20.glBufferSubData(GLES20.GL_ARRAY_BUFFER, 0, vertexData.length * 4, vertexBuffer);
GLES20.glBufferSubData(GLES20.GL_ARRAY_BUFFER, vertexData.length * 4, textureData.length * 4, textureBuffer);
//5. 解绑VBO
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0);
}


/**
* 使用vbo设置顶点位置
*/
private void useVboSetVertext() {
//1. 绑定VBO
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vboId);
//2. 设置顶点数据
GLES20.glVertexAttribPointer(avPosition, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, vertexStride, 0);
GLES20.glVertexAttribPointer(afPosition, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, vertexStride, vertexData.length * 4);
//3. 解绑VBO
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0);
}


private void initWater() {
bitmap = ShaderUtil.createTextImage("我是水印", 40, "#fff000", "#00000000", 0);

//设置位置 根据需求自己配置
float r = 1.0f * bitmap.getWidth() / bitmap.getHeight();
float w = r * 0.1f;
vertexData[12] = 0.8f - w;
vertexData[13] = -0.8f;
vertexData[14] = 0;

vertexData[15] = 0.8f;
vertexData[16] = -0.8f;
vertexData[17] = 0;

vertexData[18] = 0.8f - w;
vertexData[19] = -0.7f;
vertexData[20] = 0;

vertexData[21] = 0.8f;
vertexData[22] = -0.7f;
vertexData[23] = 0;
}

private void createWaterTextureId() {

int[] textureIds = new int[1];
//创建纹理
GLES20.glGenTextures(1, textureIds, 0);
waterTextureId = textureIds[0];
//绑定纹理
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, waterTextureId);
//环绕(超出纹理坐标范围) (s==x t==y GL_REPEAT 重复)
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_REPEAT);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_REPEAT);
//过滤(纹理像素映射到坐标点) (缩小、放大:GL_LINEAR线性)
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);

ByteBuffer bitmapBuffer = ByteBuffer.allocate(bitmap.getHeight() * bitmap.getWidth() * 4);//RGBA
bitmap.copyPixelsToBuffer(bitmapBuffer);
//将bitmapBuffer位置移动到初始位置
bitmapBuffer.flip();

//设置内存大小绑定内存地址
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, bitmap.getWidth(), bitmap.getHeight(),
0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, bitmapBuffer);

//解绑纹理
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
}


public void drawWater() {
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vboId);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, waterTextureId);

GLES20.glEnableVertexAttribArray(avPosition);
GLES20.glEnableVertexAttribArray(afPosition);

GLES20.glVertexAttribPointer(avPosition, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, vertexStride,
vertexStride * 4);//四个坐标之后的是水印的坐标
GLES20.glVertexAttribPointer(afPosition, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, vertexStride,
vertexData.length * 4);

GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);

GLES20.glDisableVertexAttribArray(avPosition);
GLES20.glDisableVertexAttribArray(afPosition);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0);
}

}

ShaderUtil.java

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129


import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.opengl.GLES20;
import android.util.Log;

import java.io.BufferedReader;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.ByteBuffer;

public class ShaderUtil {
private static final String TAG = "ShaderUtil";

public static String readRawTxt(Context context, int rawId) {
InputStream inputStream = context.getResources().openRawResource(rawId);
BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream));
StringBuffer sb = new StringBuffer();
String line;
try {
while ((line = reader.readLine()) != null) {
sb.append(line).append("\n");
}
reader.close();
} catch (Exception e) {
e.printStackTrace();
}
return sb.toString();
}

public static int loadShader(int shaderType, String source) {
// create a vertex shader type (GLES20.GL_VERTEX_SHADER)
// or a fragment shader type (GLES20.GL_FRAGMENT_SHADER)
int shader = GLES20.glCreateShader(shaderType);
if (shader != 0) {
//添加代码到shader
GLES20.glShaderSource(shader, source);
//编译shader
GLES20.glCompileShader(shader);
int[] compile = new int[1];
//检测是否编译成功
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compile, 0);
if (compile[0] != GLES20.GL_TRUE) {
Log.d(TAG, "shader compile error");
GLES20.glDeleteShader(shader);
shader = 0;
}
}
return shader;
}

public static int createProgram(String vertexSource, String fragmentSource) {
//获取vertex shader
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
if (vertexShader == 0) {
return 0;
}
//获取fragment shader
int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
if (fragmentShader == 0) {
return 0;
}
//创建一个空的渲染程序
int program = GLES20.glCreateProgram();
if (program != 0) {
//添加vertexShader到渲染程序
GLES20.glAttachShader(program, vertexShader);
//添加fragmentShader到渲染程序
GLES20.glAttachShader(program, fragmentShader);
//关联为可执行渲染程序
GLES20.glLinkProgram(program);
int[] linsStatus = new int[1];
//检测是否关联成功
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linsStatus, 0);
if (linsStatus[0] != GLES20.GL_TRUE) {
Log.d(TAG, "link program error");
GLES20.glDeleteProgram(program);
program = 0;
}
}
return program;
}


public static Bitmap createTextImage(String text, int textSize, String textColor, String bgColor, int padding) {

Paint paint = new Paint();
paint.setColor(Color.parseColor(textColor));
paint.setTextSize(textSize);
paint.setStyle(Paint.Style.FILL);
paint.setAntiAlias(true);

float width = paint.measureText(text, 0, text.length());

float top = paint.getFontMetrics().top;
float bottom = paint.getFontMetrics().bottom;

Bitmap bm = Bitmap.createBitmap((int) (width + padding * 2), (int) ((bottom - top) + padding * 2), Bitmap.Config.ARGB_8888);
Canvas canvas = new Canvas(bm);

canvas.drawColor(Color.parseColor(bgColor));
canvas.drawText(text, padding, -top + padding, paint);
return bm;
}

public static int loadBitmapTexture(Bitmap bitmap) {
int[] textureIds = new int[1];
GLES20.glGenTextures(1, textureIds, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureIds[0]);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_REPEAT);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_REPEAT);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);

ByteBuffer bitmapBuffer = ByteBuffer.allocate(bitmap.getHeight() * bitmap.getWidth() * 4);//ARGB
bitmap.copyPixelsToBuffer(bitmapBuffer);
bitmapBuffer.flip();

GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, bitmap.getWidth(),
bitmap.getHeight(), 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, bitmapBuffer);
return textureIds[0];
}


}

我们只需要在当前GL_TEXTURE_2D纹理绘制之后在glBindTexture绑定水印的纹理绘制即可。这里需要注意的几个点:

  1. 需要开启透明,不然没有透明效果。
1
2
3
//启用透明
GLES20.glEnable(GLES20.GL_BLEND);
GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA);
  1. 如果是OES纹理上添加水印,需要开个fbo来绘制OES的纹理,然后另外用一个Render来绘制OES的fbo纹理和添加水印,也就是说:OES和2D不能混用,不然不会起作用。如果可以混用(我试了混用绘制不出来),可以留言告诉我一下,谢谢。

  2. 使用VBO需要注意点的位置。

OpenGLES渲染画面通过MediaCodec录制

发表于 2018-08-22 | 分类于 OpenGLES |
字数统计: 526字 | 阅读时长 ≈ 3分钟

录制原理

  • 预览

通过fbo处理视频数据,通过samplerExternalOES纹理来创建SurfaceTexture,这样的话摄像头数据就和fbo相关联,具体可以看OpenGLES通过SurfaceTexture预览摄像头画面

  • 录制

通过MediaCodec创建一个surface,然后通过创建一个新的egl环境共享预览的EglContext和这个surface绑定,渲染fbo绑定的纹理,即可录制。
egl环境配置:
Android配置EGL环境
Android自定义GLSurfaceView

流程如下图所示:
录制原理

MediaCodec录制主要代码

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93

private MediaMuxer mMediaMuxer;
private MediaCodec.BufferInfo mBuffInfo;
private MediaCodec mVideoEncodec;
private int width, height;


//初始化
public void initEncoder(EGLContext eglContext,String savePath,String mineType,int width,int height){
this.width = width;
this.height = height;
this.mEGLContext = eglContext;
initMediaEncoder(savePath,mineType,width,height);
}

private void initMediaEncoder(String savePath, String mineType, int width, int height) {
try {
mMediaMuxer = new MediaMuxer(savePath,MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
initVideoEncoder(mineType,width,height);
} catch (IOException e) {
e.printStackTrace();
}
}

private void initVideoEncoder(String mineType, int width, int height) {
try {
mVideoEncodec= MediaCodec.createEncoderByType(mineType);

MediaFormat videoFormat = MediaFormat.createVideoFormat(mineType,width,height);
videoFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
videoFormat.setInteger(MediaFormat.KEY_FRAME_RATE,30);//30帧
videoFormat.setInteger(MediaFormat.KEY_BIT_RATE,width*height*4);//RGBA
videoFormat.setInteger(MediaFormat.KEY_BIT_RATE,width*height*4);//RGBA
//设置压缩等级 默认是baseline
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
videoFormat.setInteger(MediaFormat.KEY_PROFILE,MediaCodecInfo.CodecProfileLevel.AVCProfileMain);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
videoFormat.setInteger(MediaFormat.KEY_LEVEL, MediaCodecInfo.CodecProfileLevel.AVCLevel3);
}
}

mVideoEncodec.configure(videoFormat,null,null,MediaCodec.CONFIGURE_FLAG_ENCODE);

mBuffInfo = new MediaCodec.BufferInfo();
mSurface = mVideoEncodec.createInputSurface();
} catch (IOException e) {
e.printStackTrace();
mVideoEncodec=null;
mBuffInfo=null;
mSurface=null;
}
}


//开始录制
public void startRecode(){
videoEncodec.start();
int outputBufferIndex = videoEncodec.dequeueOutputBuffer(videoBufferinfo, 0);
if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
videoTrackIndex = mediaMuxer.addTrack(videoEncodec.getOutputFormat());
mediaMuxer.start();
}else {
while (outputBufferIndex>=0){
ByteBuffer outputBuffer= videoEncodec.getOutputBuffers()[outputBufferIndex];
outputBuffer.position(videoBufferinfo.offset);
outputBuffer.limit(videoBufferinfo.offset + videoBufferinfo.size);

//设置时间戳
if(pts==0){
pts = videoBufferinfo.presentationTimeUs;
}
videoBufferinfo.presentationTimeUs = videoBufferinfo.presentationTimeUs - pts;
//写入数据
mediaMuxer.writeSampleData(videoTrackIndex,outputBuffer,videoBufferinfo);
if(encoderWeakReference.get().onMediaInfoListener!=null){
encoderWeakReference.get().onMediaInfoListener.onMediaTime((int) (videoBufferinfo.presentationTimeUs/1000000));
}
videoEncodec.releaseOutputBuffer(outputBufferIndex,false);
outputBufferIndex = videoEncodec.dequeueOutputBuffer(videoBufferinfo, 0);
}
}
}

//停止录制
public void stopRecode(){
videoEncodec.stop();
videoEncodec.release();
videoEncodec =null;

mediaMuxer.stop();
mediaMuxer.release();
mediaMuxer = null;
}

具体示例请看:
https://github.com/ChinaZeng/SurfaceRecodeDemo

OpenGLES通过SurfaceTexture预览摄像头画面

发表于 2018-08-21 | 分类于 OpenGLES |
字数统计: 1,110字 | 阅读时长 ≈ 5分钟

在这篇文章主要用到的知识点有如下,建议先看一下:

OpenGLES绘制图片纹理

OpenGLES顶点缓冲VBO

OpenGLES帧缓冲FBO

有一个渲染流数据的相关的示例,也可以看一下,这样对本篇理解就会很简单 :

Android OpenGLES渲染MediaCodec解码数据

原理

利用OpenGL生成纹理并绑定到SurfaceTexture,然后把camera的预览数据设置显示到SurfaceTexture中,这样就可以在OpenGL中拿到摄像头数据并显示了。

主要步骤

1.OpenGL ES生成纹理
2.OpenGL ES创建SurfaceTexture并绑定
3.OpenGL ES摄像头预览

比如美颜相机那些,处理摄像头数据展示出来,为了提高预览的效率,所以这里使用了VBO和FBO,如果不知道这个,请看上面的文章。

FBO所需的glsl:
vertex_shader.glsl

1
2
3
4
5
6
7
8
9
attribute vec4 av_Position;//顶点位置
attribute vec2 af_Position;//纹理位置
varying vec2 v_texPo;//纹理位置 与fragment_shader交互
uniform mat4 u_Matrix;//矩阵变换

void main() {
v_texPo = af_Position;
gl_Position = av_Position * u_Matrix;
}

fragment_shader.glsl

1
2
3
4
5
6
7
#extension GL_OES_EGL_image_external : require //申明使用扩展纹理
precision mediump float;//精度 为float
varying vec2 v_texPo;//纹理位置 接收于vertex_shader
uniform samplerExternalOES sTexture;//加载流数据(摄像头数据)
void main() {
gl_FragColor=texture2D(sTexture, v_texPo);
}

这里使用FBO里面使用samplerExternalOES是为了加载流数据,摄像头数据属于流数据,所以这里需要用这个。

创建相机预览扩展纹理:

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27

/**
* 创建摄像头预览扩展纹理
*/
private void createCameraRenderTexture() {
int[] textureIds = new int[1];
GLES20.glGenTextures(1, textureIds, 0);
cameraRenderTextureId = textureIds[0];
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, cameraRenderTextureId);
//环绕(超出纹理坐标范围) (s==x t==y GL_REPEAT 重复)
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_REPEAT);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_REPEAT);
//过滤(纹理像素映射到坐标点) (缩小、放大:GL_LINEAR线性)
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);

surfaceTexture = new SurfaceTexture(cameraRenderTextureId);
surfaceTexture.setOnFrameAvailableListener(this);

if (onSurfaceListener != null) {
//这里相机拿到surfaceTexture绑定
onSurfaceListener.onSurfaceCreate(surfaceTexture);
}

// 解绑扩展纹理
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
}

camera绑定SurfaceTexture:

1
camera.setPreviewTexture(surfaceTexture);

预览画面,先通过fbo处理,然后拿到fbo的纹理id渲染即可:

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40

@Override
public void onDrawFrame() {
//调用触发onFrameAvailable
surfaceTexture.updateTexImage();
//清空颜色
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
//设置背景颜色
GLES20.glClearColor(1.0f, 1.0f, 1.0f, 1.0f);

//使用程序
GLES20.glUseProgram(program);

//绑定fbo
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, fboId);

//摄像头预览扩展纹理赋值
GLES20.glActiveTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, cameraRenderTextureId);
GLES20.glUniform1i(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);

//给变换矩阵赋值
GLES20.glUniformMatrix4fv(uMatrix, 1, false, matrix, 0);

GLES20.glEnableVertexAttribArray(avPosition);
GLES20.glEnableVertexAttribArray(afPosition);

//使用VBO设置纹理和顶点值
useVboSetVertext();

//绘制 GLES20.GL_TRIANGLE_STRIP:复用坐标
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, vertexCount);
GLES20.glDisableVertexAttribArray(avPosition);
GLES20.glDisableVertexAttribArray(afPosition);

GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);

//渲染显示
cameraRender.onDraw(fboTextureId);
}

摄像头方向调整

默认的摄像头预览不同的角度预览出来效果是不同的,我们需要把它给矫正,一般通常是在camera里面设置parms,这里通过OpenGLES自己矫正,通过变换矩阵实现即可:

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
 //变换矩阵 location
private int uMatrix;

//变换矩阵
private float[] matrix = new float[16];

public void onSurfaceCreated() {
//...
uMatrix = GLES20.glGetUniformLocation(program, "u_Matrix");
}

@Override
public void onDrawFrame() {
//...
//给变换矩阵赋值
GLES20.glUniformMatrix4fv(uMatrix, 1, false, matrix, 0);
}


/**
* 初始化矩阵
*/
public void resetMatirx() {
//初始化
Matrix.setIdentityM(matrix, 0);
}


/**
* 旋转
*
* @param angle
* @param x
* @param y
* @param z
*/
public void setAngle(float angle, float x, float y, float z) {
//旋转
Matrix.rotateM(matrix, 0, angle, x, y, z);
}

在外层调用:

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
public void previewAngle(Context context) {
int angle = ((WindowManager) context.getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay().getRotation();
render.resetMatirx();
switch (angle) {
case Surface.ROTATION_0:
if (cameraId == Camera.CameraInfo.CAMERA_FACING_BACK) {
render.setAngle(90, 0, 0, 1);
render.setAngle(180, 1, 0, 0);
} else {
render.setAngle(90f, 0f, 0f, 1f);
}

break;
case Surface.ROTATION_90:
if (cameraId == Camera.CameraInfo.CAMERA_FACING_BACK) {
render.setAngle(180, 0, 0, 1);
render.setAngle(180, 0, 1, 0);
} else {
render.setAngle(90f, 0f, 0f, 1f);
}
break;
case Surface.ROTATION_180:
if (cameraId == Camera.CameraInfo.CAMERA_FACING_BACK) {
render.setAngle(90f, 0.0f, 0f, 1f);
render.setAngle(180f, 0.0f, 1f, 0f);
} else {
render.setAngle(-90, 0f, 0f, 1f);
}
break;
case Surface.ROTATION_270:
if (cameraId == Camera.CameraInfo.CAMERA_FACING_BACK) {
render.setAngle(180f, 0.0f, 1f, 0f);
} else {
render.setAngle(0f, 0f, 0f, 1f);
}
break;
}
}

具体代码下载地址: https://github.com/ChinaZeng/OpenGLESCameraDemo

OpenGLES正交投影

发表于 2018-08-17 | 分类于 OpenGLES |
字数统计: 1,456字 | 阅读时长 ≈ 8分钟

在图片渲染的时候,之前使用的顶点坐标是占满整个屏幕的归一化坐标

1
2
3
4
5
6
7
8

//顶点坐标
static float vertexData[] = { // in counterclockwise order:
-1f, -1f, 0.0f, // bottom left
1f, -1f, 0.0f, // bottom right
-1f, 1f, 0.0f, // top left
1f, 1f, 0.0f, // top right
};

这样就导致了如下图所示的的问题

横竖屏切换存在的问题

所以我们应该根据屏幕宽高和图片宽高对应的比例算出正确的位置:

横竖屏切换问题解决

上面我们得到的( ?)是不在归一化坐标范围内的,为了能使OpenGL正确的渲染,我们就需要把(?)以及其他边统一转换到归一化坐标内,这个操作就是正交投影

使用正交投影,不管物体多远多近,物体看起来总是形状、大小比例相同的。

在OpenGLES里面使用投影矩阵:

vertex_shader_m.glsl

1
2
3
4
5
6
7
8
9
10
attribute vec4 av_Position;//顶点位置
attribute vec2 af_Position;//纹理位置
varying vec2 v_texPo;//纹理位置 与fragment_shader交互

uniform mat4 u_Matrix; //投影矩阵

void main() {
v_texPo = af_Position;
gl_Position = av_Position * u_Matrix;
}

在使用的时候:

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
//投影矩阵
private int uMatrix;
private float[] matrix = new float[4 * 4];


//1. 获取投影矩阵
uMatrix = GLES20.glGetUniformLocation(program, "u_Matrix");


//2. 计算值
public void onSurfaceChanged(int width, int height) {
if (width > height) {
float x = width / ((float) height / bitmap.getHeight() * bitmap.getWidth());
Matrix.orthoM(matrix, 0, -x, x, -1, 1, -1, 1);
} else {
float y = height / ((float) width / bitmap.getWidth() * bitmap.getHeight());
Matrix.orthoM(matrix, 0, -1, 1, -y, y, -1, 1);
}
}


//3. 赋值

GLES20.glUniformMatrix4fv(uMatrix, 1, false, matrix, 0);

主要代码如下:BitmapTexture.java

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161


import android.content.Context;
import android.graphics.Bitmap;
import android.opengl.GLES20;
import android.opengl.GLUtils;
import android.opengl.Matrix;

import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;


//纹理 根据坐标系映射
public class BitmapTexture {


//顶点坐标
static float vertexData[] = { // in counterclockwise order:
-1f, -1f, 0.0f, // bottom left
1f, -1f, 0.0f, // bottom right
-1f, 1f, 0.0f, // top left
1f, 1f, 0.0f, // top right
};

//纹理坐标 对应顶点坐标 与之映射
static float textureData[] = { // in counterclockwise order:
0f, 1f, 0.0f, // bottom left
1f, 1f, 0.0f, // bottom right
0f, 0f, 0.0f, // top left
1f, 0f, 0.0f, // top right
};

//每一次取点的时候取几个点
static final int COORDS_PER_VERTEX = 3;

private final int vertexCount = vertexData.length / COORDS_PER_VERTEX;
//每一次取的总的点 大小
private final int vertexStride = COORDS_PER_VERTEX * 4; // 4 bytes per vertex


private Context context;

//位置
private FloatBuffer vertexBuffer;
//纹理
private FloatBuffer textureBuffer;
private int program;
private int avPosition;
//纹理位置
private int afPosition;
//正交投影
private int uMatrix;
private float[] matrix = new float[4 * 4];

//需要渲染的纹理id
private int imageTextureId;


private Bitmap bitmap;

public void setBitmap(Bitmap bitmap) {
this.bitmap = bitmap;
}

public BitmapTexture(Context context) {
this.context = context;

vertexBuffer = ByteBuffer.allocateDirect(vertexData.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer()
.put(vertexData);
vertexBuffer.position(0);

textureBuffer = ByteBuffer.allocateDirect(textureData.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer()
.put(textureData);
textureBuffer.position(0);
}


public void onSurfaceCreated() {
String vertexSource = ShaderUtil.readRawTxt(context, R.raw.vertex_shader_m);
String fragmentSource = ShaderUtil.readRawTxt(context, R.raw.fragment_shader);
program = ShaderUtil.createProgram(vertexSource, fragmentSource);

if (program > 0) {
//获取顶点坐标字段
avPosition = GLES20.glGetAttribLocation(program, "av_Position");
//获取纹理坐标字段
afPosition = GLES20.glGetAttribLocation(program, "af_Position");

uMatrix = GLES20.glGetUniformLocation(program, "u_Matrix");

imageTextureId = createImageTexture();
}
}

public void onSurfaceChanged(int width, int height) {
if (width > height) {
float x = width / ((float) height / bitmap.getHeight() * bitmap.getWidth());
Matrix.orthoM(matrix, 0, -x, x, -1, 1, -1, 1);
} else {
float y = height / ((float) width / bitmap.getWidth() * bitmap.getHeight());
Matrix.orthoM(matrix, 0, -1, 1, -y, y, -1, 1);
}
}


public void draw() {

//使用程序
GLES20.glUseProgram(program);

GLES20.glUniformMatrix4fv(uMatrix, 1, false, matrix, 0);

//绑定渲染纹理
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, imageTextureId);

GLES20.glEnableVertexAttribArray(avPosition);
GLES20.glEnableVertexAttribArray(afPosition);
//设置顶点位置值
GLES20.glVertexAttribPointer(avPosition, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, vertexStride, vertexBuffer);
//设置纹理位置值
GLES20.glVertexAttribPointer(afPosition, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, vertexStride, textureBuffer);

//绘制 GLES20.GL_TRIANGLE_STRIP:复用坐标
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, vertexCount);
GLES20.glDisableVertexAttribArray(avPosition);
GLES20.glDisableVertexAttribArray(afPosition);

//解绑纹理
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
}


private int createImageTexture() {
int[] textureIds = new int[1];
//创建纹理
GLES20.glGenTextures(1, textureIds, 0);
if (textureIds[0] == 0) {
return 0;
}
//绑定纹理
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureIds[0]);
//环绕(超出纹理坐标范围) (s==x t==y GL_REPEAT 重复)
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_REPEAT);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_REPEAT);
//过滤(纹理像素映射到坐标点) (缩小、放大:GL_LINEAR线性)
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);

//测试图片
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0);

//解绑纹理
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
return textureIds[0];
}
}

注意: 在使用FBO GLES20.glTexImage2D分配内存大小的时候,需要根据横竖屏来设置值。不然计算出来的值和渲染的宽高不一样,渲染就会出现变形。

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53

public void onSurfaceChanged(int width, int height) {
if (width > height) {
float x = width / ((float) height / bitmap.getHeight() * bitmap.getWidth());
Matrix.orthoM(matrix, 0, -x, x, -1, 1, -1, 1);
} else {
float y = height / ((float) width / bitmap.getWidth() * bitmap.getHeight());
Matrix.orthoM(matrix, 0, -1, 1, -y, y, -1, 1);
}


if (fboId != 0) {
GLES20.glDeleteFramebuffers(1, new int[]{fboId}, 0);
GLES20.glDeleteTextures(1, new int[]{imageTextureId}, 0);
}

createFBO(width, height);
}



private void createFBO(int w, int h) {
if (bitmap == null) {
throw new IllegalArgumentException("bitmap is null");
}

//1. 创建FBO
int[] fbos = new int[1];
GLES20.glGenFramebuffers(1, fbos, 0);
fboId = fbos[0];
//2. 绑定FBO
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, fboId);

//3. 创建FBO纹理
fboTextureId = createTexture();

//4. 把纹理绑定到FBO
GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0,
GLES20.GL_TEXTURE_2D, fboTextureId, 0);

//5. 设置FBO分配内存大小
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, w, h,
0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);

//6. 检测是否绑定从成功
if (GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER)
!= GLES20.GL_FRAMEBUFFER_COMPLETE) {
Log.e("zzz", "glFramebufferTexture2D error");
}
//7. 解绑纹理和FBO
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
}

OpenGLES帧缓冲FBO

发表于 2018-08-17 | 分类于 OpenGLES |
字数统计: 2,122字 | 阅读时长 ≈ 11分钟

FBO

Frame Buffer object

为什么要用FBO

我们需要对纹理进行多次渲染采样时,而这些渲染采样是不需要展示给用户看的,所以我们就可以用一个单独的缓冲对象(离屏渲染)来存储我们的这几次渲染采样的结果,等处理完后才显示到窗口上

优势

提高渲染效率,避免闪屏,可以很方便的实现纹理共享等。

渲染方式

  1. 渲染到纹理(Texture)- 图像渲染
  2. 渲染到缓冲区(Render)- 深度测试和模板测试

FBO纹理的坐标系

FBO坐标系

渲染到纹理

工作流程

创建FBO的步骤:

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26

//1. 创建FBO
int[] fbos = new int[1];
GLES20.glGenFramebuffers(1, fbos, 0);
fboId = fbos[0];

//2. 绑定FBO
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, fboId);

//3. 创建FBO纹理
fboTextureId = createTexture();

//4. 把纹理绑定到FBO
GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0,GLES20.GL_TEXTURE_2D, fboTextureId, 0);

//5. 设置FBO分配内存大小
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA,bitmap.getWidth(), bitmap.getHeight(),0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);

//6. 检测是否绑定从成功
if (GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER)!= GLES20.GL_FRAMEBUFFER_COMPLETE) {
Log.e("zzz", "glFramebufferTexture2D error");
}

//7. 解绑纹理和FBO
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);

使用FBO的步骤:

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
//1. 绑定fbo
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, fboId);

//2. FBO绘制
GLES20.glUseProgram(program);
//绑定渲染纹理
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, imageTextureId);
//...
//解绑纹理
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
//解绑fbo
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);

//3. 根据绑定到fbo上的纹理id,渲染
GLES20.glUseProgram(program);
//绑定渲染纹理
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
//...

示例代码如下:

TexureRender.java

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43

import android.content.Context;
import android.graphics.BitmapFactory;
import android.opengl.GLES20;

public class TexureRender implements EglSurfaceView.Renderer {
private BitmapFboTexture bitmapFboTexture;
private BitmapRenderTexture bitmapRenderTexture;

public TexureRender(Context context) {
bitmapFboTexture = new BitmapFboTexture(context);
bitmapFboTexture.setBitmap(BitmapFactory.decodeResource(context.getResources(),R.mipmap.bg));

bitmapRenderTexture = new BitmapRenderTexture(context);
}

@Override
public void onSurfaceCreated() {
bitmapFboTexture.onSurfaceCreated();
bitmapRenderTexture.onSurfaceCreated();
}

@Override
public void onSurfaceChanged(int width, int height) {
//宽高
GLES20.glViewport(0, 0, width, height);

bitmapFboTexture.onSurfaceChanged(width, height);
bitmapRenderTexture.onSurfaceChanged(width, height);
}

@Override
public void onDrawFrame() {
//清空颜色
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
//设置背景颜色
GLES20.glClearColor(1.0f, 1.0f, 1.0f, 1.0f);
//FBO处理
bitmapFboTexture.draw();
//通过FBO处理之后,拿到纹理id,然后渲染
bitmapRenderTexture.draw(bitmapFboTexture.getFboTextureId());
}
}

FBO处理类: BitmapFboTexture.java

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217

import android.content.Context;
import android.graphics.Bitmap;
import android.opengl.GLES20;
import android.opengl.GLUtils;
import android.util.Log;

import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;


//纹理 根据坐标系映射
public class BitmapFboTexture {


//顶点坐标
static float vertexData[] = { // in counterclockwise order:
-1f, -1f, 0.0f, // bottom left
1f, -1f, 0.0f, // bottom right
-1f, 1f, 0.0f, // top left
1f, 1f, 0.0f, // top right
};

//正常纹理坐标 对应顶点坐标 与之映射
// static float textureData[] = { // in counterclockwise order:
// 0f, 1f, 0.0f, // bottom left
// 1f, 1f, 0.0f, // bottom right
// 0f, 0f, 0.0f, // top left
// 1f, 0f, 0.0f, // top right
// };

//fbo 纹理坐标
static float textureData[] = { // in counterclockwise order:
0f, 0f, 0.0f, // bottom left
1f, 0f, 0.0f, // bottom right
0f, 1f, 0.0f, // top left
1f, 1f, 0.0f, // top right
};

//每一次取点的时候取几个点
static final int COORDS_PER_VERTEX = 3;

private final int vertexCount = vertexData.length / COORDS_PER_VERTEX;
//每一次取的总的点 大小
private final int vertexStride = COORDS_PER_VERTEX * 4; // 4 bytes per vertex


private Context context;

//位置
private FloatBuffer vertexBuffer;
//纹理
private FloatBuffer textureBuffer;
private int program;
private int avPosition;
//纹理位置
private int afPosition;
//需要渲染的纹理id
private int imageTextureId;
//fbo纹理id
private int fboTextureId;
//fbo Id
private int fboId;

private Bitmap bitmap;

public void setBitmap(Bitmap bitmap) {
this.bitmap = bitmap;
}

public BitmapFboTexture(Context context) {
this.context = context;

vertexBuffer = ByteBuffer.allocateDirect(vertexData.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer()
.put(vertexData);
vertexBuffer.position(0);

textureBuffer = ByteBuffer.allocateDirect(textureData.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer()
.put(textureData);
textureBuffer.position(0);
}


public void onSurfaceCreated() {
String vertexSource = ShaderUtil.readRawTxt(context, R.raw.vertex_shader);
String fragmentSource = ShaderUtil.readRawTxt(context, R.raw.fragment_shader);
program = ShaderUtil.createProgram(vertexSource, fragmentSource);

if (program > 0) {
//获取顶点坐标字段
avPosition = GLES20.glGetAttribLocation(program, "av_Position");
//获取纹理坐标字段
afPosition = GLES20.glGetAttribLocation(program, "af_Position");
createFBO();
imageTextureId = createImageTexture();
}
}

public void draw() {

//绑定fbo
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, fboId);

//使用程序
GLES20.glUseProgram(program);

//绑定渲染纹理
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, imageTextureId);

GLES20.glEnableVertexAttribArray(avPosition);
GLES20.glEnableVertexAttribArray(afPosition);
//设置顶点位置值
GLES20.glVertexAttribPointer(avPosition, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, vertexStride, vertexBuffer);
//设置纹理位置值
GLES20.glVertexAttribPointer(afPosition, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, vertexStride, textureBuffer);
//绘制 GLES20.GL_TRIANGLE_STRIP:复用坐标
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, vertexCount);
GLES20.glDisableVertexAttribArray(avPosition);
GLES20.glDisableVertexAttribArray(afPosition);

//解绑纹理
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);

//解绑fbo
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
}


private void createFBO() {
if (bitmap == null) {
throw new IllegalArgumentException("bitmap is null");
}

//1. 创建FBO
int[] fbos = new int[1];
GLES20.glGenFramebuffers(1, fbos, 0);
fboId = fbos[0];
//2. 绑定FBO
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, fboId);

//3. 创建FBO纹理
fboTextureId = createTexture();

//4. 把纹理绑定到FBO
GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0,
GLES20.GL_TEXTURE_2D, fboTextureId, 0);

//5. 设置FBO分配内存大小
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, bitmap.getWidth(), bitmap.getHeight(),
0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);

//6. 检测是否绑定从成功
if (GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER)
!= GLES20.GL_FRAMEBUFFER_COMPLETE) {
Log.e("zzz", "glFramebufferTexture2D error");
}
//7. 解绑纹理和FBO
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
}

private int createImageTexture() {
int[] textureIds = new int[1];
//创建纹理
GLES20.glGenTextures(1, textureIds, 0);
if (textureIds[0] == 0) {
return 0;
}
//绑定纹理
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureIds[0]);
//环绕(超出纹理坐标范围) (s==x t==y GL_REPEAT 重复)
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_REPEAT);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_REPEAT);
//过滤(纹理像素映射到坐标点) (缩小、放大:GL_LINEAR线性)
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);

//测试图片
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0);

//解绑纹理
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
return textureIds[0];
}


private int createTexture() {
int[] textureIds = new int[1];
//创建纹理
GLES20.glGenTextures(1, textureIds, 0);
if (textureIds[0] == 0) {
return 0;
}
//绑定纹理
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureIds[0]);
//环绕(超出纹理坐标范围) (s==x t==y GL_REPEAT 重复)
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_REPEAT);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_REPEAT);
//过滤(纹理像素映射到坐标点) (缩小、放大:GL_LINEAR线性)
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
return textureIds[0];
}

public int getFboTextureId() {
return fboTextureId;
}

public void onSurfaceChanged(int width, int height) {

}
}

渲染类:BitmapRenderTexture.java

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104

import android.content.Context;
import android.opengl.GLES20;

import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;


//纹理 根据坐标系映射
public class BitmapRenderTexture {


//顶点坐标
static float vertexData[] = { // in counterclockwise order:
-1f, -1f, 0.0f, // bottom left
1f, -1f, 0.0f, // bottom right
-1f, 1f, 0.0f, // top left
1f, 1f, 0.0f, // top right
};

//纹理坐标 对应顶点坐标 与之映射
static float textureData[] = { // in counterclockwise order:
0f, 1f, 0.0f, // bottom left
1f, 1f, 0.0f, // bottom right
0f, 0f, 0.0f, // top left
1f, 0f, 0.0f, // top right
};

//每一次取点的时候取几个点
static final int COORDS_PER_VERTEX = 3;

private final int vertexCount = vertexData.length / COORDS_PER_VERTEX;
//每一次取的总的点 大小
private final int vertexStride = COORDS_PER_VERTEX * 4; // 4 bytes per vertex

private Context context;
//位置
private FloatBuffer vertexBuffer;
//纹理
private FloatBuffer textureBuffer;
private int program;
private int avPosition;
//纹理位置
private int afPosition;


public BitmapRenderTexture(Context context) {
this.context = context;

vertexBuffer = ByteBuffer.allocateDirect(vertexData.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer()
.put(vertexData);
vertexBuffer.position(0);

textureBuffer = ByteBuffer.allocateDirect(textureData.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer()
.put(textureData);
textureBuffer.position(0);
}


public void onSurfaceCreated() {
String vertexSource = ShaderUtil.readRawTxt(context, R.raw.vertex_shader);
String fragmentSource = ShaderUtil.readRawTxt(context, R.raw.fragment_shader);
program = ShaderUtil.createProgram(vertexSource, fragmentSource);

if (program > 0) {
//获取顶点坐标字段
avPosition = GLES20.glGetAttribLocation(program, "av_Position");
//获取纹理坐标字段
afPosition = GLES20.glGetAttribLocation(program, "af_Position");
}
}

public void draw(int textureId) {

//使用程序
GLES20.glUseProgram(program);

//绑定渲染纹理
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);

GLES20.glEnableVertexAttribArray(avPosition);
GLES20.glEnableVertexAttribArray(afPosition);
//设置顶点位置值
GLES20.glVertexAttribPointer(avPosition, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, vertexStride, vertexBuffer);
//设置纹理位置值
GLES20.glVertexAttribPointer(afPosition, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, vertexStride, textureBuffer);
//绘制 GLES20.GL_TRIANGLE_STRIP:复用坐标
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, vertexCount);
GLES20.glDisableVertexAttribArray(avPosition);
GLES20.glDisableVertexAttribArray(afPosition);

//解绑纹理
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
}

public void onSurfaceChanged(int width, int height) {
GLES20.glViewport(0, 0, width, height);
}
}

OpenGLES顶点缓冲VBO

发表于 2018-08-16 | 分类于 OpenGLES |
字数统计: 1,122字 | 阅读时长 ≈ 6分钟

VBO

Vertex Buffer object

为什么要用VBO

不使用VBO时,我们每次绘制( glDrawArrays )图形时都是从本地内存处获取顶点数据然后传输给OpenGL来绘制,这样就会频繁的操作CPU->GPU增大开销,从而降低效率。
使用VBO,我们就能把顶点数据缓存到GPU开辟的一段内存中,然后使用时不必再从本地获取,而是直接从显存中获取,这样就能提升绘制的效率。

创建VBO的主要步骤:

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
//1. 创建VBO得到vboId
int[] vbos = new int[1];
GLES20.glGenBuffers(1, vbos, 0);
vboId = vbos[0];

//2. 根据id绑定VBO
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vboId);

//3. 分配VBO需要的缓存大小
GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, vertex.length * 4,null, GLES20. GL_STATIC_DRAW);

//4. 为VBO设置顶点数据的值
GLES20.glBufferSubData(GLES20.GL_ARRAY_BUFFER, 0, vertexData.length * 4, vertexBuffer);

//5. 解绑VBO
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0);

使用VBO的主要步骤:

1
2
3
4
5
6
7
8
//1. 根据id绑定VBO
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vboId);

//2. 设置顶点数据
GLES20.glVertexAttribPointer(vPosition, 2, GLES20.GL_FLOAT, false, 8, 0);

//3. 解绑VBO
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0);

我使用绘制图片纹理的代码来进行改造为VBO,OpenGLES 绘制图片纹理

改造的只有BitmapTexture这个类

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162

import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.opengl.GLES20;
import android.opengl.GLUtils;

import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;


//纹理 根据坐标系映射
public class BitmapTexture {


//顶点坐标
static float vertexData[] = { // in counterclockwise order:
-1f, -1f, 0.0f, // bottom left
1f, -1f, 0.0f, // bottom right
-1f, 1f, 0.0f, // top left
1f, 1f, 0.0f, // top right
};

//纹理坐标 对应顶点坐标 与之映射
static float textureData[] = { // in counterclockwise order:
0f, 1f, 0.0f, // bottom left
1f, 1f, 0.0f, // bottom right
0f, 0f, 0.0f, // top left
1f, 0f, 0.0f, // top right
};

//每一次取点的时候取几个点
static final int COORDS_PER_VERTEX = 3;

private final int vertexCount = vertexData.length / COORDS_PER_VERTEX;
//每一次取的总的点 大小
private final int vertexStride = COORDS_PER_VERTEX * 4; // 4 bytes per vertex


private Context context;

//位置
private FloatBuffer vertexBuffer;
//纹理
private FloatBuffer textureBuffer;
private int program;
private int avPosition;
//纹理位置
private int afPosition;
//纹理id
private int textureId;
//vbo id
private int vboId;

public BitmapTexture(Context context) {
this.context = context;

vertexBuffer = ByteBuffer.allocateDirect(vertexData.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer()
.put(vertexData);
vertexBuffer.position(0);


textureBuffer = ByteBuffer.allocateDirect(textureData.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer()
.put(textureData);
textureBuffer.position(0);
}


public void onSurfaceCreated() {
String vertexSource = ShaderUtil.readRawTxt(context, R.raw.vertex_shader);
String fragmentSource = ShaderUtil.readRawTxt(context, R.raw.fragment_shader);
program = ShaderUtil.createProgram(vertexSource, fragmentSource);

if (program > 0) {
//获取顶点坐标字段
avPosition = GLES20.glGetAttribLocation(program, "av_Position");
//获取纹理坐标字段
afPosition = GLES20.glGetAttribLocation(program, "af_Position");

//创建vbo
createVBO();

int[] textureIds = new int[1];
//创建纹理
GLES20.glGenTextures(1, textureIds, 0);
if (textureIds[0] == 0) {
return;
}
textureId = textureIds[0];
//绑定纹理
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
//环绕(超出纹理坐标范围) (s==x t==y GL_REPEAT 重复)
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_REPEAT);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_REPEAT);
//过滤(纹理像素映射到坐标点) (缩小、放大:GL_LINEAR线性)
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);

Bitmap bitmap = BitmapFactory.decodeResource(context.getResources(), R.mipmap.bg);

if (bitmap == null) {
return;
}
//设置纹理为2d图片
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0);
}
}

public void draw() {
//使用程序
GLES20.glUseProgram(program);
GLES20.glEnableVertexAttribArray(avPosition);
GLES20.glEnableVertexAttribArray(afPosition);


//直接设置
// 设置顶点位置值
// GLES20.glVertexAttribPointer(avPosition, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, vertexStride, vertexBuffer);
//设置纹理值
// GLES20.glVertexAttribPointer(afPosition, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, vertexStride, textureBuffer);

//使用vbo设置
useVboDraw();

//绘制 GLES20.GL_TRIANGLE_STRIP:复用坐标
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, vertexCount);
GLES20.glDisableVertexAttribArray(avPosition);
GLES20.glDisableVertexAttribArray(afPosition);

}

private void createVBO() {
//1. 创建VBO
int[] vbos = new int[1];
GLES20.glGenBuffers(vbos.length, vbos, 0);
vboId = vbos[0];
//2. 绑定VBO
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vboId);
//3. 分配VBO需要的缓存大小
GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, vertexData.length * 4 + textureData.length * 4, null, GLES20.GL_STATIC_DRAW);
//4. 为VBO设置顶点数据的值
GLES20.glBufferSubData(GLES20.GL_ARRAY_BUFFER, 0, vertexData.length * 4, vertexBuffer);
GLES20.glBufferSubData(GLES20.GL_ARRAY_BUFFER, vertexData.length * 4, textureData.length * 4, textureBuffer);
//5. 解绑VBO
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0);
}

private void useVboDraw() {
//1. 绑定VBO
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vboId);
//2. 设置顶点数据
GLES20.glVertexAttribPointer(avPosition, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, vertexStride, 0);
GLES20.glVertexAttribPointer(afPosition, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, vertexStride, vertexData.length * 4);
//3. 解绑VBO
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0);
}
}
12…8
曾大稳丶

曾大稳丶

80 日志
11 分类
20 标签
© 2018 — 2019 曾大稳丶
由 Hexo 强力驱动
|
主题 — NexT.Mist v5.1.4
访问人数 人 总访问量 次