Here are the examples of the java api class android.media.MediaCodec.BufferInfo taken from open source projects.
1. AACLATMPacketizer#run()
Project: spydroid-ipcamera
File: AACLATMPacketizer.java
File: AACLATMPacketizer.java
@SuppressLint("NewApi") public void run() { Log.d(TAG, "AAC LATM packetizer started !"); int length = 0; long oldts; BufferInfo bufferInfo; try { while (!Thread.interrupted()) { buffer = socket.requestBuffer(); length = is.read(buffer, rtphl + 4, MAXPACKETSIZE - (rtphl + 4)); if (length > 0) { bufferInfo = ((MediaCodecInputStream) is).getLastBufferInfo(); //Log.d(TAG,"length: "+length+" ts: "+bufferInfo.presentationTimeUs); oldts = ts; ts = bufferInfo.presentationTimeUs * 1000; // Seems to happen sometimes if (oldts > ts) { socket.commitBuffer(); continue; } socket.markNextPacket(); socket.updateTimestamp(ts); // AU-headers-length field: contains the size in bits of a AU-header // 13+3 = 16 bits -> 13bits for AU-size and 3bits for AU-Index / AU-Index-delta // 13 bits will be enough because ADTS uses 13 bits for frame length buffer[rtphl] = 0; buffer[rtphl + 1] = 0x10; // AU-size buffer[rtphl + 2] = (byte) (length >> 5); buffer[rtphl + 3] = (byte) (length << 3); // AU-Index buffer[rtphl + 3] &= 0xF8; buffer[rtphl + 3] |= 0x00; send(rtphl + length + 4); } else { socket.commitBuffer(); } } } catch (IOException e) { } catch (ArrayIndexOutOfBoundsException e) { Log.e(TAG, "ArrayIndexOutOfBoundsException: " + (e.getMessage() != null ? e.getMessage() : "unknown error")); e.printStackTrace(); } catch (InterruptedException ignore) { } Log.d(TAG, "AAC LATM packetizer stopped !"); }
2. EncoderDebugger#flushMediaCodec()
Project: spydroid-ipcamera
File: EncoderDebugger.java
File: EncoderDebugger.java
private void flushMediaCodec(MediaCodec mc) { int index = 0; BufferInfo info = new BufferInfo(); while (index != MediaCodec.INFO_TRY_AGAIN_LATER) { index = mc.dequeueOutputBuffer(info, 1000000 / FRAMERATE); if (index >= 0) { mc.releaseOutputBuffer(index, false); } } }
3. EncoderDebugger#decode()
Project: spydroid-ipcamera
File: EncoderDebugger.java
File: EncoderDebugger.java
/** * @param withPrefix If set to true, the decoder will be fed with NALs preceeded with 0x00000001. * @return How long it took to decode all the NALs */ private long decode(boolean withPrefix) { int n = 0, i = 0, j = 0; long elapsed = 0, now = timestamp(); int decInputIndex = 0, decOutputIndex = 0; ByteBuffer[] decInputBuffers = mDecoder.getInputBuffers(); ByteBuffer[] decOutputBuffers = mDecoder.getOutputBuffers(); BufferInfo info = new BufferInfo(); while (elapsed < 3000000) { // Feeds the decoder with a NAL unit if (i < NB_ENCODED) { decInputIndex = mDecoder.dequeueInputBuffer(1000000 / FRAMERATE); if (decInputIndex >= 0) { int l1 = decInputBuffers[decInputIndex].capacity(); int l2 = mVideo[i].length; decInputBuffers[decInputIndex].clear(); if ((withPrefix && hasPrefix(mVideo[i])) || (!withPrefix && !hasPrefix(mVideo[i]))) { check(l1 >= l2, "The decoder input buffer is not big enough (nal=" + l2 + ", capacity=" + l1 + ")."); decInputBuffers[decInputIndex].put(mVideo[i], 0, mVideo[i].length); } else if (withPrefix && !hasPrefix(mVideo[i])) { check(l1 >= l2 + 4, "The decoder input buffer is not big enough (nal=" + (l2 + 4) + ", capacity=" + l1 + ")."); decInputBuffers[decInputIndex].put(new byte[] { 0, 0, 0, 1 }); decInputBuffers[decInputIndex].put(mVideo[i], 0, mVideo[i].length); } else if (!withPrefix && hasPrefix(mVideo[i])) { check(l1 >= l2 - 4, "The decoder input buffer is not big enough (nal=" + (l2 - 4) + ", capacity=" + l1 + ")."); decInputBuffers[decInputIndex].put(mVideo[i], 4, mVideo[i].length - 4); } mDecoder.queueInputBuffer(decInputIndex, 0, l2, timestamp(), 0); i++; } else { if (VERBOSE) Log.d(TAG, "No buffer available !"); } } // Tries to get a decoded image decOutputIndex = mDecoder.dequeueOutputBuffer(info, 1000000 / FRAMERATE); if (decOutputIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { decOutputBuffers = mDecoder.getOutputBuffers(); } else if (decOutputIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { mDecOutputFormat = mDecoder.getOutputFormat(); } else if (decOutputIndex >= 0) { if (n > 2) { // We have successfully encoded and decoded an image ! int length = info.size; mDecodedVideo[j] = new byte[length]; decOutputBuffers[decOutputIndex].clear(); decOutputBuffers[decOutputIndex].get(mDecodedVideo[j], 0, length); // Converts the decoded frame to NV21 convertToNV21(j); if (j >= NB_DECODED - 1) { flushMediaCodec(mDecoder); if (VERBOSE) Log.v(TAG, "Decoding " + n + " frames took " + elapsed / 1000 + " ms"); return elapsed; } j++; } mDecoder.releaseOutputBuffer(decOutputIndex, false); n++; } elapsed = timestamp() - now; } throw new RuntimeException("The decoder did not decode anything."); }
4. EncoderDebugger#encode()
Project: spydroid-ipcamera
File: EncoderDebugger.java
File: EncoderDebugger.java
private long encode() { int n = 0; long elapsed = 0, now = timestamp(); int encOutputIndex = 0, encInputIndex = 0; BufferInfo info = new BufferInfo(); ByteBuffer[] encInputBuffers = mEncoder.getInputBuffers(); ByteBuffer[] encOutputBuffers = mEncoder.getOutputBuffers(); while (elapsed < 5000000) { // Feeds the encoder with an image encInputIndex = mEncoder.dequeueInputBuffer(1000000 / FRAMERATE); if (encInputIndex >= 0) { check(encInputBuffers[encInputIndex].capacity() >= mData.length, "The input buffer is not big enough."); encInputBuffers[encInputIndex].clear(); encInputBuffers[encInputIndex].put(mData, 0, mData.length); mEncoder.queueInputBuffer(encInputIndex, 0, mData.length, timestamp(), 0); } else { if (VERBOSE) Log.d(TAG, "No buffer available !"); } // Tries to get a NAL unit encOutputIndex = mEncoder.dequeueOutputBuffer(info, 1000000 / FRAMERATE); if (encOutputIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { encOutputBuffers = mEncoder.getOutputBuffers(); } else if (encOutputIndex >= 0) { mVideo[n] = new byte[info.size]; encOutputBuffers[encOutputIndex].clear(); encOutputBuffers[encOutputIndex].get(mVideo[n++], 0, info.size); mEncoder.releaseOutputBuffer(encOutputIndex, false); if (n >= NB_ENCODED) { flushMediaCodec(mEncoder); return elapsed; } } elapsed = timestamp() - now; } throw new RuntimeException("The encoder is too slow."); }
5. EncoderDebugger#searchSPSandPPS()
Project: spydroid-ipcamera
File: EncoderDebugger.java
File: EncoderDebugger.java
/** * Tries to obtain the SPS and the PPS for the encoder. */ private long searchSPSandPPS() { ByteBuffer[] inputBuffers = mEncoder.getInputBuffers(); ByteBuffer[] outputBuffers = mEncoder.getOutputBuffers(); BufferInfo info = new BufferInfo(); byte[] csd = new byte[128]; int len = 0, p = 4, q = 4; long elapsed = 0, now = timestamp(); while (elapsed < 3000000 && (mSPS == null || mPPS == null)) { // Some encoders won't give us the SPS and PPS unless they receive something to encode first... int bufferIndex = mEncoder.dequeueInputBuffer(1000000 / FRAMERATE); if (bufferIndex >= 0) { check(inputBuffers[bufferIndex].capacity() >= mData.length, "The input buffer is not big enough."); inputBuffers[bufferIndex].clear(); inputBuffers[bufferIndex].put(mData, 0, mData.length); mEncoder.queueInputBuffer(bufferIndex, 0, mData.length, timestamp(), 0); } else { if (VERBOSE) Log.e(TAG, "No buffer available !"); } // We are looking for the SPS and the PPS here. As always, Android is very inconsistent, I have observed that some // encoders will give those parameters through the MediaFormat object (that is the normal behaviour). // But some other will not, in that case we try to find a NAL unit of type 7 or 8 in the byte stream outputed by the encoder... int index = mEncoder.dequeueOutputBuffer(info, 1000000 / FRAMERATE); if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { // The PPS and PPS shoud be there MediaFormat format = mEncoder.getOutputFormat(); ByteBuffer spsb = format.getByteBuffer("csd-0"); ByteBuffer ppsb = format.getByteBuffer("csd-1"); mSPS = new byte[spsb.capacity() - 4]; spsb.position(4); spsb.get(mSPS, 0, mSPS.length); mPPS = new byte[ppsb.capacity() - 4]; ppsb.position(4); ppsb.get(mPPS, 0, mPPS.length); break; } else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { outputBuffers = mEncoder.getOutputBuffers(); } else if (index >= 0) { len = info.size; if (len < 128) { outputBuffers[index].get(csd, 0, len); if (len > 0 && csd[0] == 0 && csd[1] == 0 && csd[2] == 0 && csd[3] == 1) { //depending on the phone so we don't make any assumption about that while (p < len) { while (!(csd[p + 0] == 0 && csd[p + 1] == 0 && csd[p + 2] == 0 && csd[p + 3] == 1) && p + 3 < len) p++; if (p + 3 >= len) p = len; if ((csd[q] & 0x1F) == 7) { mSPS = new byte[p - q]; System.arraycopy(csd, q, mSPS, 0, p - q); } else { mPPS = new byte[p - q]; System.arraycopy(csd, q, mPPS, 0, p - q); } p += 4; q = p; } } } mEncoder.releaseOutputBuffer(index, false); } elapsed = timestamp() - now; } check(mPPS != null & mSPS != null, "Could not determine the SPS & PPS."); mB64PPS = Base64.encodeToString(mPPS, 0, mPPS.length, Base64.NO_WRAP); mB64SPS = Base64.encodeToString(mSPS, 0, mSPS.length, Base64.NO_WRAP); return elapsed; }
6. AACLATMPacketizer#run()
Project: libstreaming
File: AACLATMPacketizer.java
File: AACLATMPacketizer.java
@SuppressLint("NewApi") public void run() { Log.d(TAG, "AAC LATM packetizer started !"); int length = 0; long oldts; BufferInfo bufferInfo; try { while (!Thread.interrupted()) { buffer = socket.requestBuffer(); length = is.read(buffer, rtphl + 4, MAXPACKETSIZE - (rtphl + 4)); if (length > 0) { bufferInfo = ((MediaCodecInputStream) is).getLastBufferInfo(); //Log.d(TAG,"length: "+length+" ts: "+bufferInfo.presentationTimeUs); oldts = ts; ts = bufferInfo.presentationTimeUs * 1000; // Seems to happen sometimes if (oldts > ts) { socket.commitBuffer(); continue; } socket.markNextPacket(); socket.updateTimestamp(ts); // AU-headers-length field: contains the size in bits of a AU-header // 13+3 = 16 bits -> 13bits for AU-size and 3bits for AU-Index / AU-Index-delta // 13 bits will be enough because ADTS uses 13 bits for frame length buffer[rtphl] = 0; buffer[rtphl + 1] = 0x10; // AU-size buffer[rtphl + 2] = (byte) (length >> 5); buffer[rtphl + 3] = (byte) (length << 3); // AU-Index buffer[rtphl + 3] &= 0xF8; buffer[rtphl + 3] |= 0x00; send(rtphl + length + 4); } else { socket.commitBuffer(); } } } catch (IOException e) { } catch (ArrayIndexOutOfBoundsException e) { Log.e(TAG, "ArrayIndexOutOfBoundsException: " + (e.getMessage() != null ? e.getMessage() : "unknown error")); e.printStackTrace(); } catch (InterruptedException ignore) { } Log.d(TAG, "AAC LATM packetizer stopped !"); }
7. EncoderDebugger#flushMediaCodec()
Project: libstreaming
File: EncoderDebugger.java
File: EncoderDebugger.java
private void flushMediaCodec(MediaCodec mc) { int index = 0; BufferInfo info = new BufferInfo(); while (index != MediaCodec.INFO_TRY_AGAIN_LATER) { index = mc.dequeueOutputBuffer(info, 1000000 / FRAMERATE); if (index >= 0) { mc.releaseOutputBuffer(index, false); } } }
8. EncoderDebugger#decode()
Project: libstreaming
File: EncoderDebugger.java
File: EncoderDebugger.java
/** * @param withPrefix If set to true, the decoder will be fed with NALs preceeded with 0x00000001. * @return How long it took to decode all the NALs */ private long decode(boolean withPrefix) { int n = 0, i = 0, j = 0; long elapsed = 0, now = timestamp(); int decInputIndex = 0, decOutputIndex = 0; ByteBuffer[] decInputBuffers = mDecoder.getInputBuffers(); ByteBuffer[] decOutputBuffers = mDecoder.getOutputBuffers(); BufferInfo info = new BufferInfo(); while (elapsed < 3000000) { // Feeds the decoder with a NAL unit if (i < NB_ENCODED) { decInputIndex = mDecoder.dequeueInputBuffer(1000000 / FRAMERATE); if (decInputIndex >= 0) { int l1 = decInputBuffers[decInputIndex].capacity(); int l2 = mVideo[i].length; decInputBuffers[decInputIndex].clear(); if ((withPrefix && hasPrefix(mVideo[i])) || (!withPrefix && !hasPrefix(mVideo[i]))) { check(l1 >= l2, "The decoder input buffer is not big enough (nal=" + l2 + ", capacity=" + l1 + ")."); decInputBuffers[decInputIndex].put(mVideo[i], 0, mVideo[i].length); } else if (withPrefix && !hasPrefix(mVideo[i])) { check(l1 >= l2 + 4, "The decoder input buffer is not big enough (nal=" + (l2 + 4) + ", capacity=" + l1 + ")."); decInputBuffers[decInputIndex].put(new byte[] { 0, 0, 0, 1 }); decInputBuffers[decInputIndex].put(mVideo[i], 0, mVideo[i].length); } else if (!withPrefix && hasPrefix(mVideo[i])) { check(l1 >= l2 - 4, "The decoder input buffer is not big enough (nal=" + (l2 - 4) + ", capacity=" + l1 + ")."); decInputBuffers[decInputIndex].put(mVideo[i], 4, mVideo[i].length - 4); } mDecoder.queueInputBuffer(decInputIndex, 0, l2, timestamp(), 0); i++; } else { if (VERBOSE) Log.d(TAG, "No buffer available !"); } } // Tries to get a decoded image decOutputIndex = mDecoder.dequeueOutputBuffer(info, 1000000 / FRAMERATE); if (decOutputIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { decOutputBuffers = mDecoder.getOutputBuffers(); } else if (decOutputIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { mDecOutputFormat = mDecoder.getOutputFormat(); } else if (decOutputIndex >= 0) { if (n > 2) { // We have successfully encoded and decoded an image ! int length = info.size; mDecodedVideo[j] = new byte[length]; decOutputBuffers[decOutputIndex].clear(); decOutputBuffers[decOutputIndex].get(mDecodedVideo[j], 0, length); // Converts the decoded frame to NV21 convertToNV21(j); if (j >= NB_DECODED - 1) { flushMediaCodec(mDecoder); if (VERBOSE) Log.v(TAG, "Decoding " + n + " frames took " + elapsed / 1000 + " ms"); return elapsed; } j++; } mDecoder.releaseOutputBuffer(decOutputIndex, false); n++; } elapsed = timestamp() - now; } throw new RuntimeException("The decoder did not decode anything."); }
9. EncoderDebugger#encode()
Project: libstreaming
File: EncoderDebugger.java
File: EncoderDebugger.java
private long encode() { int n = 0; long elapsed = 0, now = timestamp(); int encOutputIndex = 0, encInputIndex = 0; BufferInfo info = new BufferInfo(); ByteBuffer[] encInputBuffers = mEncoder.getInputBuffers(); ByteBuffer[] encOutputBuffers = mEncoder.getOutputBuffers(); while (elapsed < 5000000) { // Feeds the encoder with an image encInputIndex = mEncoder.dequeueInputBuffer(1000000 / FRAMERATE); if (encInputIndex >= 0) { check(encInputBuffers[encInputIndex].capacity() >= mData.length, "The input buffer is not big enough."); encInputBuffers[encInputIndex].clear(); encInputBuffers[encInputIndex].put(mData, 0, mData.length); mEncoder.queueInputBuffer(encInputIndex, 0, mData.length, timestamp(), 0); } else { if (VERBOSE) Log.d(TAG, "No buffer available !"); } // Tries to get a NAL unit encOutputIndex = mEncoder.dequeueOutputBuffer(info, 1000000 / FRAMERATE); if (encOutputIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { encOutputBuffers = mEncoder.getOutputBuffers(); } else if (encOutputIndex >= 0) { mVideo[n] = new byte[info.size]; encOutputBuffers[encOutputIndex].clear(); encOutputBuffers[encOutputIndex].get(mVideo[n++], 0, info.size); mEncoder.releaseOutputBuffer(encOutputIndex, false); if (n >= NB_ENCODED) { flushMediaCodec(mEncoder); return elapsed; } } elapsed = timestamp() - now; } throw new RuntimeException("The encoder is too slow."); }
10. EncoderDebugger#searchSPSandPPS()
Project: libstreaming
File: EncoderDebugger.java
File: EncoderDebugger.java
/** * Tries to obtain the SPS and the PPS for the encoder. */ private long searchSPSandPPS() { ByteBuffer[] inputBuffers = mEncoder.getInputBuffers(); ByteBuffer[] outputBuffers = mEncoder.getOutputBuffers(); BufferInfo info = new BufferInfo(); byte[] csd = new byte[128]; int len = 0, p = 4, q = 4; long elapsed = 0, now = timestamp(); while (elapsed < 3000000 && (mSPS == null || mPPS == null)) { // Some encoders won't give us the SPS and PPS unless they receive something to encode first... int bufferIndex = mEncoder.dequeueInputBuffer(1000000 / FRAMERATE); if (bufferIndex >= 0) { check(inputBuffers[bufferIndex].capacity() >= mData.length, "The input buffer is not big enough."); inputBuffers[bufferIndex].clear(); inputBuffers[bufferIndex].put(mData, 0, mData.length); mEncoder.queueInputBuffer(bufferIndex, 0, mData.length, timestamp(), 0); } else { if (VERBOSE) Log.e(TAG, "No buffer available !"); } // We are looking for the SPS and the PPS here. As always, Android is very inconsistent, I have observed that some // encoders will give those parameters through the MediaFormat object (that is the normal behaviour). // But some other will not, in that case we try to find a NAL unit of type 7 or 8 in the byte stream outputed by the encoder... int index = mEncoder.dequeueOutputBuffer(info, 1000000 / FRAMERATE); if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { // The PPS and PPS shoud be there MediaFormat format = mEncoder.getOutputFormat(); ByteBuffer spsb = format.getByteBuffer("csd-0"); ByteBuffer ppsb = format.getByteBuffer("csd-1"); mSPS = new byte[spsb.capacity() - 4]; spsb.position(4); spsb.get(mSPS, 0, mSPS.length); mPPS = new byte[ppsb.capacity() - 4]; ppsb.position(4); ppsb.get(mPPS, 0, mPPS.length); break; } else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { outputBuffers = mEncoder.getOutputBuffers(); } else if (index >= 0) { len = info.size; if (len < 128) { outputBuffers[index].get(csd, 0, len); if (len > 0 && csd[0] == 0 && csd[1] == 0 && csd[2] == 0 && csd[3] == 1) { //depending on the phone so we don't make any assumption about that while (p < len) { while (!(csd[p + 0] == 0 && csd[p + 1] == 0 && csd[p + 2] == 0 && csd[p + 3] == 1) && p + 3 < len) p++; if (p + 3 >= len) p = len; if ((csd[q] & 0x1F) == 7) { mSPS = new byte[p - q]; System.arraycopy(csd, q, mSPS, 0, p - q); } else { mPPS = new byte[p - q]; System.arraycopy(csd, q, mPPS, 0, p - q); } p += 4; q = p; } } } mEncoder.releaseOutputBuffer(index, false); } elapsed = timestamp() - now; } check(mPPS != null & mSPS != null, "Could not determine the SPS & PPS."); mB64PPS = Base64.encodeToString(mPPS, 0, mPPS.length, Base64.NO_WRAP); mB64SPS = Base64.encodeToString(mSPS, 0, mSPS.length, Base64.NO_WRAP); return elapsed; }