Android使用libRtmp直播推流

  1. 初始化rtmp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
//分配空间
RTMP *rtmp = RTMP_Alloc();
//初始化
RTMP_Init(rtmp);
//设置推流URL
RTMP_SetupURL(rtmp, url);
//设置可写状态
RTMP_EnableWrite(rtmp);
//链接服务器
RTMP_Connect(rtmp, NULL);
//链接流
RTMP_ConnectStream(rtmp, 0);

//循环推流(AAC、H264) //开始推流
while(1){
int result = RTMP_SendPacket(rtmp, packet, 1);
RTMPPacket_Free(packet);
free(packet);
packet = NULL;
}

//关闭链接
RTMP_Close(rtmp);
//释放资源
RTMP_Free(rtmp);
rtmp=NULL;
  1. H264包封装。在发送每一帧关键帧之前得先发送SPS、PPS帧信息,发送的每一帧(I、P、SPS、PPS)数据得添加头部信息。

获取摄像头预览数据并编码为H264,pcm数据编码AAC

2.1 SPS PPS数据

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66

void RtmpPush::pushSPSPPS(char *sps, int spsLen, char *pps, int ppsLen) {
if (!this->queue) return;
int bodySize = spsLen + ppsLen + 16;
RTMPPacket *rtmpPacket = static_cast<RTMPPacket *>(malloc(sizeof(RTMPPacket)));
RTMPPacket_Alloc(rtmpPacket, bodySize);
RTMPPacket_Reset(rtmpPacket);

char *body = rtmpPacket->m_body;

int i = 0;
//frame type(4bit)和CodecId(4bit)合成一个字节(byte)
//frame type 关键帧1 非关键帧2
//CodecId 7表示avc
body[i++] = 0x17;

//fixed 4byte
body[i++] = 0x00;
body[i++] = 0x00;
body[i++] = 0x00;
body[i++] = 0x00;

//configurationVersion: 版本 1byte
body[i++] = 0x01;

//AVCProfileIndication:Profile 1byte sps[1]
body[i++] = sps[1];

//compatibility: 兼容性 1byte sps[2]
body[i++] = sps[2];

//AVCLevelIndication: ProfileLevel 1byte sps[3]
body[i++] = sps[3];

//lengthSizeMinusOne: 包长数据所使用的字节数 1byte
body[i++] = 0xff;

//sps个数 1byte
body[i++] = 0xe1;
//sps长度 2byte
body[i++] = (spsLen >> 8) & 0xff;
body[i++] = spsLen & 0xff;

//sps data 内容
memcpy(&body[i], sps, spsLen);
i += spsLen;
//pps个数 1byte
body[i++] = 0x01;
//pps长度 2byte
body[i++] = (ppsLen >> 8) & 0xff;
body[i++] = ppsLen & 0xff;
//pps data 内容
memcpy(&body[i], pps, ppsLen);


rtmpPacket->m_packetType = RTMP_PACKET_TYPE_VIDEO;
rtmpPacket->m_nBodySize = bodySize;
rtmpPacket->m_nTimeStamp = 0;
rtmpPacket->m_hasAbsTimestamp = 0;
rtmpPacket->m_nChannel = 0x04;//音频或者视频
rtmpPacket->m_headerType = RTMP_PACKET_SIZE_MEDIUM;
rtmpPacket->m_nInfoField2 = this->rtmp->m_stream_id;

queue->putRtmpPacket(rtmpPacket);

}

2.2 H264数据

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
void RtmpPush::pushVideoData(char *data, int dataLen, bool keyFrame) {
if (!this->queue) return;
int bodySize = dataLen + 9;
RTMPPacket *rtmpPacket = static_cast<RTMPPacket *>(malloc(sizeof(RTMPPacket)));
RTMPPacket_Alloc(rtmpPacket, bodySize);
RTMPPacket_Reset(rtmpPacket);

char *body = rtmpPacket->m_body;

int i = 0;
//frame type(4bit)和CodecId(4bit)合成一个字节(byte)
//frame type 关键帧1 非关键帧2
//CodecId 7表示avc
if (keyFrame) {
body[i++] = 0x17;
} else {
body[i++] = 0x27;
}

//fixed 4byte 0x01表示NALU单元
body[i++] = 0x01;
body[i++] = 0x00;
body[i++] = 0x00;
body[i++] = 0x00;

//dataLen 4byte
body[i++] = (dataLen >> 24) & 0xff;
body[i++] = (dataLen >> 16) & 0xff;
body[i++] = (dataLen >> 8) & 0xff;
body[i++] = dataLen & 0xff;

//data
memcpy(&body[i], data, dataLen);

rtmpPacket->m_packetType = RTMP_PACKET_TYPE_VIDEO;
rtmpPacket->m_nBodySize = bodySize;
//持续播放时间
rtmpPacket->m_nTimeStamp = RTMP_GetTime() - this->startTime;
//进入直播播放开始时间
rtmpPacket->m_hasAbsTimestamp = 0;
rtmpPacket->m_nChannel = 0x04;//音频或者视频
rtmpPacket->m_headerType = RTMP_PACKET_SIZE_LARGE;
rtmpPacket->m_nInfoField2 = this->rtmp->m_stream_id;

queue->putRtmpPacket(rtmpPacket);


}
  1. AAC包封装 需要添加头部
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35

void RtmpPush::pushAudioData(char *data, int dataLen) {
if (!this->queue) return;
int bodySize = dataLen + 2;
RTMPPacket *rtmpPacket = static_cast<RTMPPacket *>(malloc(sizeof(RTMPPacket)));
RTMPPacket_Alloc(rtmpPacket, bodySize);
RTMPPacket_Reset(rtmpPacket);

char *body = rtmpPacket->m_body;
//前四位表示音频数据格式 10(十进制)表示AAC,16进制就是A
//第5-6位的数值表示采样率,0 = 5.5 kHz,1 = 11 kHz,2 = 22 kHz,3(11) = 44 kHz。
//第7位表示采样精度,0 = 8bits,1 = 16bits。
//第8位表示音频类型,0 = mono,1 = stereo
//这里是44100 立体声 16bit 二进制就是1111 16进制就是F
body[0] = 0xAF;

//0x00 aac头信息, 0x01 aac 原始数据
//这里都用0x01都可以
body[1] = 0x01;

//data
memcpy(&body[2], data, dataLen);

rtmpPacket->m_packetType = RTMP_PACKET_TYPE_AUDIO;
rtmpPacket->m_nBodySize = bodySize;
//持续播放时间
rtmpPacket->m_nTimeStamp = RTMP_GetTime() - this->startTime;
//进入直播播放开始时间
rtmpPacket->m_hasAbsTimestamp = 0;
rtmpPacket->m_nChannel = 0x04;//音频或者视频
rtmpPacket->m_headerType = RTMP_PACKET_SIZE_LARGE;
rtmpPacket->m_nInfoField2 = this->rtmp->m_stream_id;

queue->putRtmpPacket(rtmpPacket);
}
  1. Android MediaCodec获取PPS和SPS
1
2
3
4
5
6
7
8
9
10
11
12
13
int outputBufferIndex = videoEncodec.dequeueOutputBuffer(videoBufferinfo, 0);
if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
ByteBuffer spsb = videoEncodec.getOutputFormat().getByteBuffer("csd-0");
byte[] sps = new byte[spsb.remaining()];
spsb.get(sps, 0,sps.length);
Log.e("zzz", "sps: " + ByteUtil.bytesToHexSpaceString(sps));

ByteBuffer ppsb = videoEncodec.getOutputFormat().getByteBuffer("csd-1");
byte[] pps = new byte[ppsb.remaining()];
ppsb.get(pps, 0,pps.length);
Log.e("zzz", "pps: " + ByteUtil.bytesToHexSpaceString(pps));

}

具体查看demo: https://github.com/ChinaZeng/RtmpLivePushDemo

-------------The End-------------