赞
踩
- mediaFormat = MediaFormat.createVideoFormat(MIME_TYPE, this.mWidth, this.mHeight);
- mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
- mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
- mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar);
- mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
-
-
-
- final MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
- format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); // API >= 18
- format.setInteger(MediaFormat.KEY_BIT_RATE, calcBitRate());
- format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
- format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 10);
两者什么宽高视频格式都一样,甚至帧率、采集点大小都一样,只有一个不一样MediaFormat.KEY_COLOR_FORMAT,这个官方说明是:
一样,说实话没啥不同,只有一个不同那就是第一个编码函数开头用了这个,然后在编码的时候使用了i420bytes,这个i420bytes通过xnv21bytes变换而来
private void encodeFrame(byte[] input) { Log.w(TAG, "VideoEncoderThread.encodeFrame()"); // 将原始的N21数据转为I420 NV21toI420SemiPlanar(input, mFrameData, this.mWidth, this.mHeight); ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers(); ByteBuffer[] outputBuffers = mMediaCodec.getOutputBuffers(); int inputBufferIndex = mMediaCodec.dequeueInputBuffer(TIMEOUT_USEC); if (inputBufferIndex >= 0) { ByteBuffer inputBuffer = inputBuffers[inputBufferIndex]; inputBuffer.clear(); inputBuffer.put(mFrameData); mMediaCodec.queueInputBuffer(inputBufferIndex, 0, mFrameData.length, System.nanoTime() / 1000, 0); } else { Log.e(TAG, "input buffer not available"); } —————-省略 } protected void encode(final ByteBuffer buffer, final int length, final long presentationTimeUs) { if (!mIsCapturing) return; final ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers(); while (mIsCapturing) { final int inputBufferIndex = mMediaCodec.dequeueInputBuffer(TIMEOUT_USEC); if (inputBufferIndex >= 0) { final ByteBuffer inputBuffer = inputBuffers[inputBufferIndex]; inputBuffer.clear(); if (buffer != null) { inputBuffer.put(buffer); } // if (DEBUG) Log.v(TAG, "encode:queueInputBuffer"); if (length <= 0) { // send EOS mIsEOS = true; if (DEBUG) Log.i(TAG, "send BUFFER_FLAG_END_OF_STREAM"); mMediaCodec.queueInputBuffer(inputBufferIndex, 0, 0, presentationTimeUs, MediaCodec.BUFFER_FLAG_END_OF_STREAM); break; } else { mMediaCodec.queueInputBuffer(inputBufferIndex, 0, length, presentationTimeUs, 0); } break; } else if (inputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) { // wait for MediaCodec encoder is ready to encode // nothing to do here because MediaCodec#dequeueInputBuffer(TIMEOUT_USEC) // will wait for maximum TIMEOUT_USEC(10msec) on each call } } }
- private static void NV21toI420SemiPlanar(byte[] nv21bytes, byte[] i420bytes, int width, int height) {
- System.arraycopy(nv21bytes, 0, i420bytes, 0, width * height);
- for (int i = width * height; i < nv21bytes.length; i += 2) {
- i420bytes[i] = nv21bytes[i + 1];
- i420bytes[i + 1] = nv21bytes[i];
- }
- }
我们再看每一次编码一个帧放入混合器分别是咋搞的,下面这个代码意思是监听编码一帧前后编码器的状态的变化并将编码后的数据放入MP4文件里,然后释放内存
- drain();
- // request stop recording
- signalEndOfInputStream();
- // process output data again for EOS signale
- drain();
- // release all related objects
- release();
我们在看看drain()里面说啥,
打完收工,代码很多,多多抽象理解,重在理解过程,细节。。。。。,自我总结
protected void drain() { if (mMediaCodec == null) return; ByteBuffer[] encoderOutputBuffers = mMediaCodec.getOutputBuffers(); int encoderStatus, count = 0; final MediaMuxerWrapper muxer = mWeakMuxer.get(); if (muxer == null) { // throw new NullPointerException("muxer is unexpectedly null"); Log.w(TAG, "muxer is unexpectedly null"); return; } LOOP: while (mIsCapturing) { // get encoded data with maximum timeout duration of TIMEOUT_USEC(=10[msec]) encoderStatus = mMediaCodec.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC); if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { // wait 5 counts(=TIMEOUT_USEC x 5 = 50msec) until data/EOS come if (!mIsEOS) { if (++count > 5) break LOOP; // out of while } } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { if (DEBUG) Log.v(TAG, "INFO_OUTPUT_BUFFERS_CHANGED"); // this shoud not come when encoding encoderOutputBuffers = mMediaCodec.getOutputBuffers(); } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { if (DEBUG) Log.v(TAG, "INFO_OUTPUT_FORMAT_CHANGED"); // this status indicate the output format of codec is changed // this should come only once before actual encoded data // but this status never come on Android4.3 or less // and in that case, you should treat when MediaCodec.BUFFER_FLAG_CODEC_CONFIG come. if (mMuxerStarted) { // second time request is error throw new RuntimeException("format changed twice"); } // get output format from codec and pass them to muxer // getOutputFormat should be called after INFO_OUTPUT_FORMAT_CHANGED otherwise crash. final MediaFormat format = mMediaCodec.getOutputFormat(); // API >= 16 mTrackIndex = muxer.addTrack(format); mMuxerStarted = true; if (!muxer.start()) { // we should wait until muxer is ready synchronized (muxer) { while (!muxer.isStarted()) try { muxer.wait(100); } catch (final InterruptedException e) { break LOOP; } } } } else if (encoderStatus < 0) { // unexpected status if (DEBUG) Log.w(TAG, "drain:unexpected result from encoder#dequeueOutputBuffer: " + encoderStatus); } else { final ByteBuffer encodedData = encoderOutputBuffers[encoderStatus]; if (encodedData == null) { // this never should come...may be a MediaCodec internal error throw new RuntimeException("encoderOutputBuffer " + encoderStatus + " was null"); } if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { // You shoud set output format to muxer here when you target Android4.3 or less // but MediaCodec#getOutputFormat can not call here(because INFO_OUTPUT_FORMAT_CHANGED don't come yet) // therefor we should expand and prepare output format from buffer data. // This sample is for API>=18(>=Android 4.3), just ignore this flag here if (DEBUG) Log.d(TAG, "drain:BUFFER_FLAG_CODEC_CONFIG"); mBufferInfo.size = 0; } if (mBufferInfo.size != 0) { // encoded data is ready, clear waiting counter count = 0; if (!mMuxerStarted) { // muxer is not ready...this will prrograming failure. throw new RuntimeException("drain:muxer hasn't started"); } // write encoded data to muxer(need to adjust presentationTimeUs. mBufferInfo.presentationTimeUs = getPTSUs(); muxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo); prevOutputPTSUs = mBufferInfo.presentationTimeUs; } // return buffer to encoder mMediaCodec.releaseOutputBuffer(encoderStatus, false); if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { // when EOS come. mIsCapturing = false; break; // out of while } } } }
Copyright © 2003-2013 www.wpsshop.cn 版权所有,并保留所有权利。