image.png
image.png
帧类型 | value |
|---|---|
vps | 32 |
sps | 33 |
pps | 34 |
IDR | 19 |
P | 1 |
B | 0 |
image.png
0100 0000 40& 0111 1110 7E= 0100 0000 40>>1 0010 0000 =32我们发现结果是32也就是vps
0100 0010 42& 0111 1110 7E= 0100 0010 42>>1 0010 0001 =33实现效果.gif
image.png
首先我们需要获取录制屏幕的数据,其实也就是编码层
public void startLive() { try { //服务器端编码H264通过socket发送给客户端 MediaFormat format = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_HEVC, mWidth, mHeight); format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1); format.setInteger(KEY_BIT_RATE, mWidth * mHeight); format.setInteger(KEY_FRAME_RATE, 20); mMediaCodec = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_VIDEO_HEVC); mMediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); //创建场地 Surface surface = mMediaCodec.createInputSurface(); mVirtualDisplay = mMediaProjection.createVirtualDisplay("CodecLiveH265", mWidth, mHeight, 1, DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC, surface, null, null); mHandler.post(this); } catch (IOException e) { e.printStackTrace(); } } @Override public void run() { mMediaCodec.start(); MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo(); while (true) { //取出数据发送给客户端 int outIndex = mMediaCodec.dequeueOutputBuffer(bufferInfo, 1000); if (outIndex >= 0) { ByteBuffer buffer = mMediaCodec.getOutputBuffer(outIndex); dealFrame(buffer, bufferInfo); mMediaCodec.releaseOutputBuffer(outIndex, false); } } }如果大家有不懂的可以看我之前的文章:Android音视频开发——MedCodec实现屏幕录制编码成H264
我们需要注意处理帧的方法dealFrame。在h265的数据中,其实只会出现一次VPS,SPS和PPS,但是在投屏过程中,我们必须在每次传I帧的时候,都需要将VPS_PPS_SPS一并传过去
public static final int NAL_I = 19; public static final int NAL_VPS = 32; //vps+sps+pps是一帧,所以只需要获取vps private byte[] vps_sps_pps_buffer; private void dealFrame(ByteBuffer buffer, MediaCodec.BufferInfo bufferInfo) { //过滤掉第一个0x00 00 00 01 或者0x 00 00 01 int offset = 4; if (buffer.get(2) == 0x01) { offset = 3; } //获取帧类型 int type = (buffer.get(offset) & 0x7E) >> 1; if (type == NAL_VPS) { vps_sps_pps_buffer = new byte[bufferInfo.size]; buffer.get(vps_sps_pps_buffer); } else if (type == NAL_I) { //I帧 final byte[] bytes = new byte[bufferInfo.size]; buffer.get(bytes); //vps_pps_sps+I帧的数据 byte[] newBuffer = new byte[vps_sps_pps_buffer.length + bytes.length]; System.arraycopy(vps_sps_pps_buffer, 0, newBuffer, 0, vps_sps_pps_buffer.length); System.arraycopy(bytes, 0, newBuffer, vps_sps_pps_buffer.length, bytes.length); mWebSocketSendLive.sendData(newBuffer); }else{ //P帧 B帧直接发送就可以了 final byte[] bytes = new byte[bufferInfo.size]; buffer.get(bytes); mWebSocketSendLive.sendData(bytes); } }接下来就是接收端去解析获得buffer
第一步初始化解码器
//初始化解码器 private fun initDecoder(surface: Surface?) { mMediaCodec = MediaCodec.createDecoderByType(MediaFormat.MIMETYPE_VIDEO_HEVC) val format = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_HEVC, mWidth, mHeight) format.setInteger(MediaFormat.KEY_BIT_RATE, mWidth * mHeight) format.setInteger(MediaFormat.KEY_FRAME_RATE, 20) format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1) mMediaCodec.configure( format, surface, null, 0 ) mMediaCodec.start() }第二步,对获得的数据进行解码
override fun callBack(data: ByteArray?) { //回调 LogUtils.e("接收到数据的长度:${data?.size}") //客户端主要将获取到的数据进行解码,首先需要通过dsp进行解码 val index = mMediaCodec.dequeueInputBuffer(10000) if (index >= 0) { val inputBuffer = mMediaCodec.getInputBuffer(index) inputBuffer.clear() inputBuffer.put(data, 0, data!!.size) //通知dsp芯片帮忙解码 mMediaCodec.queueInputBuffer(index, 0, data.size, System.currentTimeMillis(), 0) } //取出数据 val bufferInfo = MediaCodec.BufferInfo() var outIndex: Int = mMediaCodec.dequeueOutputBuffer(bufferInfo, 10000) while (outIndex > 0) { mMediaCodec.releaseOutputBuffer(outIndex, true) outIndex = mMediaCodec.dequeueOutputBuffer(bufferInfo, 10000) } }原创声明:本文系作者授权腾讯云开发者社区发表,未经许可,不得转载。
如有侵权,请联系 cloudcommunity@tencent.com 删除。
原创声明:本文系作者授权腾讯云开发者社区发表,未经许可,不得转载。
如有侵权,请联系 cloudcommunity@tencent.com 删除。