100字范文,内容丰富有趣,生活中的好帮手!
100字范文 > android 实现屏幕录制功能 极速简单实现Android 屏幕录制编码为H264并且使用RTMP推流...

android 实现屏幕录制功能 极速简单实现Android 屏幕录制编码为H264并且使用RTMP推流...

时间:2021-11-10 12:51:57

相关推荐

android 实现屏幕录制功能 极速简单实现Android 屏幕录制编码为H264并且使用RTMP推流...

最近有使用到屏幕录制功能并需要将屏幕数据推送到服务器端。采用RTMP推送,MediaCodec编码,MediaProjection 获取屏幕数据。

1.录制屏幕

在Android5.0 后可以采用原生的APIMediaProjection 来获取一个virtual的display 从而实现屏幕录制。

我们第一步就是要先把屏幕数据拿出来

@RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)

private void creatVirtualDisPlay(){

Log.d(TAG, "created virtual display: " +this.mediaProjection);

VirtualDisplay mVirtualDisplay = mediaProjection.createVirtualDisplay(TAG + "-display",

width, heigiht, 1, DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC,

inputSurface, null, null);

Log.d(TAG, "created virtual display: " + mVirtualDisplay);

}

​​​​​​2.数据编码

在Android 4.1 后我们可以采用原生的MediaCodec 进行编码也就是我们常说的硬件编码。

@SuppressLint("NewApi")

private void prepareEncoder() {

try {

String MIME_TYPE = "video/avc";

MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, width, heigiht);

format.setInteger(MediaFormat.KEY_COLOR_FORMAT,MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);

format.setInteger(MediaFormat.KEY_BIT_RATE, 1024*1000);

format.setInteger(MediaFormat.KEY_FRAME_RATE, 30); // 设置帧率

format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);

format.setInteger(MediaFormat.KEY_BITRATE_MODE,MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_CQ);

Log.d("chenzhu", "created video format: " + format);

mEncoder = MediaCodec.createEncoderByType(MIME_TYPE);

mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);

inputSurface = mEncoder.createInputSurface();

Log.d(TAG, "created input surface: " + inputSurface);

mEncoder.start();

}catch (Exception e){

Log.d("chenzhu", "created encoder fail" );

}

}

​​​​​​​​​​​​​​3.数据处理

将编码后的数据打包封装成FLV数据

@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN)

private void recordVirtualDisplay() {

while (true) {

int eobIndex = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_US);

switch (eobIndex) {

case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:

Log.d(TAG,"VideoSenderThread,MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED");

break;

case MediaCodec.INFO_TRY_AGAIN_LATER:

// Log.d(TAG,"VideoSenderThread,MediaCodec.INFO_TRY_AGAIN_LATER");

break;

case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:

Log.d(TAG,"VideoSenderThread,MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:" + mEncoder.getOutputFormat().toString());

sendAVCDecoderConfigurationRecord(0, mEncoder.getOutputFormat());

break;

default:

Log.d(TAG,"VideoSenderThread,MediaCode,eobIndex=" + eobIndex);

if (startTime == 0) {

startTime = mBufferInfo.presentationTimeUs / 1000;

}

/**

* we send sps pps already in INFO_OUTPUT_FORMAT_CHANGED

* so we ignore MediaCodec.BUFFER_FLAG_CODEC_CONFIG

*/

if (mBufferInfo.flags != MediaCodec.BUFFER_FLAG_CODEC_CONFIG && mBufferInfo.size != 0) {

ByteBuffer realData = mEncoder.getOutputBuffers()[eobIndex];

realData.position(mBufferInfo.offset + 4);

realData.limit(mBufferInfo.offset + mBufferInfo.size);

sendRealData((mBufferInfo.presentationTimeUs / 1000) - startTime, realData);

}

mEncoder.releaseOutputBuffer(eobIndex, false);

break;

}

}

}

private void sendAVCDecoderConfigurationRecord(long tms, MediaFormat format) {

byte[] AVCDecoderConfigurationRecord = Packager.H264Packager.generateAVCDecoderConfigurationRecord(format);

int packetLen = Packager.FLVPackager.FLV_VIDEO_TAG_LENGTH +

AVCDecoderConfigurationRecord.length;

byte[] finalBuff = new byte[packetLen];

Packager.FLVPackager.fillFlvVideoTag(finalBuff,

0,

true,

true,

AVCDecoderConfigurationRecord.length);

System.arraycopy(AVCDecoderConfigurationRecord, 0,

finalBuff, Packager.FLVPackager.FLV_VIDEO_TAG_LENGTH, AVCDecoderConfigurationRecord.length);

RESFlvData resFlvData = new RESFlvData();

resFlvData.droppable = false;

resFlvData.byteBuffer = finalBuff;

resFlvData.size = finalBuff.length;

resFlvData.dts = (int) tms;

resFlvData.flvTagType = FLV_RTMP_PACKET_TYPE_VIDEO;

resFlvData.videoFrameType = RESFlvData.NALU_TYPE_IDR;

// TODO send

RtmpClient.write(jniRtmpPointer, resFlvData.byteBuffer, resFlvData.byteBuffer.length, resFlvData.flvTagType, resFlvData.dts);

}

private void sendRealData(long tms, ByteBuffer realData) {

int realDataLength = realData.remaining();

int packetLen = Packager.FLVPackager.FLV_VIDEO_TAG_LENGTH +

Packager.FLVPackager.NALU_HEADER_LENGTH +

realDataLength;

byte[] finalBuff = new byte[packetLen];

realData.get(finalBuff, Packager.FLVPackager.FLV_VIDEO_TAG_LENGTH +

Packager.FLVPackager.NALU_HEADER_LENGTH,

realDataLength);

int frameType = finalBuff[Packager.FLVPackager.FLV_VIDEO_TAG_LENGTH +

Packager.FLVPackager.NALU_HEADER_LENGTH] & 0x1F;

Packager.FLVPackager.fillFlvVideoTag(finalBuff,

0,

false,

frameType == 5,

realDataLength);

RESFlvData resFlvData = new RESFlvData();

resFlvData.droppable = true;

resFlvData.byteBuffer = finalBuff;

resFlvData.size = finalBuff.length;

resFlvData.dts = (int) tms;

resFlvData.flvTagType = FLV_RTMP_PACKET_TYPE_VIDEO;

resFlvData.videoFrameType = frameType;

// TODO send

RtmpClient.write(jniRtmpPointer, resFlvData.byteBuffer, resFlvData.byteBuffer.length, resFlvData.flvTagType, resFlvData.dts);

}

4 集成rtmp包并且发送到服务器端

我们采用rtmp推送 对rtmp进行封装

extern "C"

JNIEXPORT jlong JNICALL

Java_com_example_administrator_mypush_RtmpClient_open(JNIEnv *env, jclass type, jstring url_,

jboolean isPublishMode) {

const char *url = (env)->GetStringUTFChars( url_, 0);

LOGD("RTMP_OPENING:%s",url);

RTMP* rtmp = RTMP_Alloc();

if (rtmp == NULL) {

LOGD("RTMP_Alloc=NULL");

return NULL;

}

RTMP_Init(rtmp);

int ret = RTMP_SetupURL(rtmp, const_cast(url));

if (!ret) {

RTMP_Free(rtmp);

rtmp=NULL;

LOGD("RTMP_SetupURL=ret");

return NULL;

}

if (isPublishMode) {

RTMP_EnableWrite(rtmp);

}

ret = RTMP_Connect(rtmp, NULL);

if (!ret) {

RTMP_Free(rtmp);

rtmp=NULL;

LOGD("RTMP_Connect=ret");

return NULL;

}

ret = RTMP_ConnectStream(rtmp, 0);

if (!ret) {

ret = RTMP_ConnectStream(rtmp, 0);

RTMP_Close(rtmp);

RTMP_Free(rtmp);

rtmp=NULL;

LOGD("RTMP_ConnectStream=ret");

return NULL;

}

(env)->ReleaseStringUTFChars( url_, url);

LOGD("RTMP_OPENED");

return reinterpret_cast(rtmp);

}extern "C"

JNIEXPORT jint JNICALL

Java_com_example_administrator_mypush_RtmpClient_write(JNIEnv *env, jclass type_, jlong rtmpPointer,

jbyteArray data_, jint size, jint type,

jint ts) {

LOGD("start write");

jbyte *buffer = (env)->GetByteArrayElements( data_, NULL);

RTMPPacket *packet = (RTMPPacket*)malloc(sizeof(RTMPPacket));

RTMPPacket_Alloc(packet, size);

RTMPPacket_Reset(packet);

if (type == RTMP_PACKET_TYPE_INFO) { // metadata

packet->m_nChannel = 0x03;

} else if (type == RTMP_PACKET_TYPE_VIDEO) { // video

packet->m_nChannel = 0x04;

} else if (type == RTMP_PACKET_TYPE_AUDIO) { //audio

packet->m_nChannel = 0x05;

} else {

packet->m_nChannel = -1;

}

packet->m_nInfoField2 = ((RTMP*)rtmpPointer)->m_stream_id;

LOGD("write data type: %d, ts %d", type, ts);

memcpy(packet->m_body, buffer, size);

packet->m_headerType = RTMP_PACKET_SIZE_LARGE;

packet->m_hasAbsTimestamp = FALSE;

packet->m_nTimeStamp = ts;

packet->m_packetType = type;

packet->m_nBodySize = size;

int ret = RTMP_SendPacket((RTMP*)rtmpPointer, packet, 0);

RTMPPacket_Free(packet);

free(packet);

(env)->ReleaseByteArrayElements( data_, buffer, 0);

if (!ret) {

LOGD("end write error %d", ret);

return ret;

}else

{

LOGD("end write success");

return 0;

}

以上就是整体的思路

最后附上效果图:

大概有1s左右的延时

本文地址:/qq_33023933/article/details/107282156

本内容不代表本网观点和政治立场,如有侵犯你的权益请联系我们处理。
网友评论
网友评论仅供其表达个人看法,并不表明网站立场。