Here are the examples of the java api class android.media.MediaFormat taken from open source projects.
1. ServerService#createDisplaySurface()
Project: RemoteDroid
File: ServerService.java
File: ServerService.java
/** * Create the display surface out of the encoder. The data to encoder will be fed from this * Surface itself. * @return * @throws IOException */ @TargetApi(19) private Surface createDisplaySurface() throws IOException { MediaFormat mMediaFormat = MediaFormat.createVideoFormat(CodecUtils.MIME_TYPE, CodecUtils.WIDTH, CodecUtils.HEIGHT); mMediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, (int) (1024 * 1024 * 0.5)); mMediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30); mMediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); mMediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1); Log.i(TAG, "Starting encoder"); encoder = MediaCodec.createEncoderByType(CodecUtils.MIME_TYPE); encoder.configure(mMediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); Surface surface = encoder.createInputSurface(); return surface; }
2. AndroidTrackInfo#getFormat()
Project: WliveTV
File: AndroidTrackInfo.java
File: AndroidTrackInfo.java
@TargetApi(Build.VERSION_CODES.KITKAT) @Override public IMediaFormat getFormat() { if (mTrackInfo == null) return null; if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) return null; MediaFormat mediaFormat = mTrackInfo.getFormat(); if (mediaFormat == null) return null; return new AndroidMediaFormat(mediaFormat); }
3. AndroidTrackInfo#getFormat()
Project: GiraffePlayer
File: AndroidTrackInfo.java
File: AndroidTrackInfo.java
@TargetApi(Build.VERSION_CODES.KITKAT) @Override public IMediaFormat getFormat() { if (mTrackInfo == null) return null; if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) return null; MediaFormat mediaFormat = mTrackInfo.getFormat(); if (mediaFormat == null) return null; return new AndroidMediaFormat(mediaFormat); }
4. SrsEncoder#start()
Project: yasea
File: SrsEncoder.java
File: SrsEncoder.java
public int start() { // the referent PTS for video and audio encoder. mPresentTimeUs = System.nanoTime() / 1000; // requires sdk level 16+, Android 4.1, 4.1.1, the JELLY_BEAN try { aencoder = MediaCodec.createEncoderByType(ACODEC); } catch (IOException e) { Log.e(TAG, "create aencoder failed."); e.printStackTrace(); return -1; } // setup the aencoder. // @see https://developer.android.com/reference/android/media/MediaCodec.html int ach = ACHANNEL == AudioFormat.CHANNEL_IN_STEREO ? 2 : 1; MediaFormat audioFormat = MediaFormat.createAudioFormat(ACODEC, ASAMPLERATE, ach); audioFormat.setInteger(MediaFormat.KEY_BIT_RATE, ABITRATE); audioFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, 0); aencoder.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); // add the audio tracker to muxer. audioFlvTrack = flvMuxer.addTrack(audioFormat); audioMp4Track = mp4Muxer.addTrack(audioFormat); // requires sdk level 16+, Android 4.1, 4.1.1, the JELLY_BEAN try { vencoder = MediaCodec.createByCodecName(vmci.getName()); } catch (IOException e) { Log.e(TAG, "create vencoder failed."); e.printStackTrace(); return -1; } // setup the vencoder. // Note: landscape to portrait, 90 degree rotation, so we need to switch width and height in configuration MediaFormat videoFormat = MediaFormat.createVideoFormat(VCODEC, vCropWidth, vCropHeight); videoFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, mVideoColorFormat); videoFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, 0); videoFormat.setInteger(MediaFormat.KEY_BIT_RATE, VBITRATE); videoFormat.setInteger(MediaFormat.KEY_FRAME_RATE, VFPS); videoFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, VGOP / VFPS); vencoder.configure(videoFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); // add the video tracker to muxer. videoFlvTrack = flvMuxer.addTrack(videoFormat); videoMp4Track = mp4Muxer.addTrack(videoFormat); // start device and encoder. vencoder.start(); aencoder.start(); // better process YUV data in threading yuvPreprocessThread = new Thread(new Runnable() { @Override public void run() { while (!Thread.interrupted()) { while (!yuvQueue.isEmpty()) { byte[] data = yuvQueue.poll(); if (mOrientation == Configuration.ORIENTATION_PORTRAIT) { portraitPreprocessYuvFrame(data); } else { landscapePreprocessYuvFrame(data); } ByteBuffer[] inBuffers = vencoder.getInputBuffers(); ByteBuffer[] outBuffers = vencoder.getOutputBuffers(); int inBufferIndex = vencoder.dequeueInputBuffer(-1); if (inBufferIndex >= 0) { ByteBuffer bb = inBuffers[inBufferIndex]; bb.clear(); bb.put(mRotatedFrameBuffer, 0, mRotatedFrameBuffer.length); long pts = System.nanoTime() / 1000 - mPresentTimeUs; vencoder.queueInputBuffer(inBufferIndex, 0, mRotatedFrameBuffer.length, pts, 0); } for (; ; ) { int outBufferIndex = vencoder.dequeueOutputBuffer(vebi, 0); if (outBufferIndex >= 0) { ByteBuffer bb = outBuffers[outBufferIndex]; onEncodedAnnexbFrame(bb, vebi); vencoder.releaseOutputBuffer(outBufferIndex, false); yuvCacheNum.getAndDecrement(); } else { break; } } } // Wait for next yuv synchronized (yuvLock) { try { // isEmpty() may take some time, so time out should be set to wait the next one. yuvLock.wait(500); } catch (InterruptedException ex) { yuvPreprocessThread.interrupt(); } } } } }); yuvPreprocessThread.start(); return 0; }
5. AACStream#encodeWithMediaCodec()
Project: spydroid-ipcamera
File: AACStream.java
File: AACStream.java
@Override @SuppressLint({ "InlinedApi", "NewApi" }) protected void encodeWithMediaCodec() throws IOException { final int bufferSize = AudioRecord.getMinBufferSize(mQuality.samplingRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT) * 2; ((AACLATMPacketizer) mPacketizer).setSamplingRate(mQuality.samplingRate); mAudioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, mQuality.samplingRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize); mMediaCodec = MediaCodec.createEncoderByType("audio/mp4a-latm"); MediaFormat format = new MediaFormat(); format.setString(MediaFormat.KEY_MIME, "audio/mp4a-latm"); format.setInteger(MediaFormat.KEY_BIT_RATE, mQuality.bitRate); format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1); format.setInteger(MediaFormat.KEY_SAMPLE_RATE, mQuality.samplingRate); format.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC); format.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, bufferSize); mMediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); mAudioRecord.startRecording(); mMediaCodec.start(); final MediaCodecInputStream inputStream = new MediaCodecInputStream(mMediaCodec); final ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers(); mThread = new Thread(new Runnable() { @Override public void run() { int len = 0, bufferIndex = 0; try { while (!Thread.interrupted()) { bufferIndex = mMediaCodec.dequeueInputBuffer(10000); if (bufferIndex >= 0) { inputBuffers[bufferIndex].clear(); len = mAudioRecord.read(inputBuffers[bufferIndex], bufferSize); if (len == AudioRecord.ERROR_INVALID_OPERATION || len == AudioRecord.ERROR_BAD_VALUE) { Log.e(TAG, "An error occured with the AudioRecord API !"); } else { //Log.v(TAG,"Pushing raw audio to the decoder: len="+len+" bs: "+inputBuffers[bufferIndex].capacity()); mMediaCodec.queueInputBuffer(bufferIndex, 0, len, System.nanoTime() / 1000, 0); } } } } catch (RuntimeException e) { e.printStackTrace(); } } }); mThread.start(); // The packetizer encapsulates this stream in an RTP stream and send it over the network mPacketizer.setDestination(mDestination, mRtpPort, mRtcpPort); mPacketizer.setInputStream(inputStream); mPacketizer.start(); mStreaming = true; }
6. AACStream#encodeWithMediaCodec()
Project: libstreaming
File: AACStream.java
File: AACStream.java
@Override @SuppressLint({ "InlinedApi", "NewApi" }) protected void encodeWithMediaCodec() throws IOException { final int bufferSize = AudioRecord.getMinBufferSize(mQuality.samplingRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT) * 2; ((AACLATMPacketizer) mPacketizer).setSamplingRate(mQuality.samplingRate); mAudioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, mQuality.samplingRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize); mMediaCodec = MediaCodec.createEncoderByType("audio/mp4a-latm"); MediaFormat format = new MediaFormat(); format.setString(MediaFormat.KEY_MIME, "audio/mp4a-latm"); format.setInteger(MediaFormat.KEY_BIT_RATE, mQuality.bitRate); format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1); format.setInteger(MediaFormat.KEY_SAMPLE_RATE, mQuality.samplingRate); format.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC); format.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, bufferSize); mMediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); mAudioRecord.startRecording(); mMediaCodec.start(); final MediaCodecInputStream inputStream = new MediaCodecInputStream(mMediaCodec); final ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers(); mThread = new Thread(new Runnable() { @Override public void run() { int len = 0, bufferIndex = 0; try { while (!Thread.interrupted()) { bufferIndex = mMediaCodec.dequeueInputBuffer(10000); if (bufferIndex >= 0) { inputBuffers[bufferIndex].clear(); len = mAudioRecord.read(inputBuffers[bufferIndex], bufferSize); if (len == AudioRecord.ERROR_INVALID_OPERATION || len == AudioRecord.ERROR_BAD_VALUE) { Log.e(TAG, "An error occured with the AudioRecord API !"); } else { //Log.v(TAG,"Pushing raw audio to the decoder: len="+len+" bs: "+inputBuffers[bufferIndex].capacity()); mMediaCodec.queueInputBuffer(bufferIndex, 0, len, System.nanoTime() / 1000, 0); } } } } catch (RuntimeException e) { e.printStackTrace(); } } }); mThread.start(); // The packetizer encapsulates this stream in an RTP stream and send it over the network mPacketizer.setInputStream(inputStream); mPacketizer.start(); mStreaming = true; }
7. VideoStream#encodeWithMediaCodecMethod2()
Project: spydroid-ipcamera
File: VideoStream.java
File: VideoStream.java
/** * Video encoding is done by a MediaCodec. * But here we will use the buffer-to-surface methode */ @SuppressLint({ "InlinedApi", "NewApi" }) protected void encodeWithMediaCodecMethod2() throws RuntimeException, IOException { Log.d(TAG, "Video encoded using the MediaCodec API with a surface"); // Updates the parameters of the camera if needed createCamera(); updateCamera(); // Estimates the framerate of the camera measureFramerate(); EncoderDebugger debugger = EncoderDebugger.debug(mSettings, mQuality.resX, mQuality.resY); mMediaCodec = MediaCodec.createByCodecName(debugger.getEncoderName()); MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", mQuality.resX, mQuality.resY); mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, mQuality.bitrate); mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, mQuality.framerate); mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1); mMediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); Surface surface = mMediaCodec.createInputSurface(); ((SurfaceView) mSurfaceView).addMediaCodecSurface(surface); mMediaCodec.start(); // The packetizer encapsulates the bit stream in an RTP stream and send it over the network mPacketizer.setDestination(mDestination, mRtpPort, mRtcpPort); mPacketizer.setInputStream(new MediaCodecInputStream(mMediaCodec)); mPacketizer.start(); mStreaming = true; }
8. VideoStream#encodeWithMediaCodecMethod1()
Project: spydroid-ipcamera
File: VideoStream.java
File: VideoStream.java
/** * Video encoding is done by a MediaCodec. */ @SuppressLint("NewApi") protected void encodeWithMediaCodecMethod1() throws RuntimeException, IOException { Log.d(TAG, "Video encoded using the MediaCodec API with a buffer"); // Updates the parameters of the camera if needed createCamera(); updateCamera(); // Estimates the framerate of the camera measureFramerate(); // Starts the preview if needed if (!mPreviewStarted) { try { mCamera.startPreview(); mPreviewStarted = true; } catch (RuntimeException e) { destroyCamera(); throw e; } } EncoderDebugger debugger = EncoderDebugger.debug(mSettings, mQuality.resX, mQuality.resY); final NV21Convertor convertor = debugger.getNV21Convertor(); mMediaCodec = MediaCodec.createByCodecName(debugger.getEncoderName()); MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", mQuality.resX, mQuality.resY); mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, mQuality.bitrate); mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, mQuality.framerate); mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, debugger.getEncoderColorFormat()); mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1); mMediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); mMediaCodec.start(); Camera.PreviewCallback callback = new Camera.PreviewCallback() { long now = System.nanoTime() / 1000, oldnow = now, i = 0; ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers(); @Override public void onPreviewFrame(byte[] data, Camera camera) { oldnow = now; now = System.nanoTime() / 1000; if (i++ > 3) { i = 0; //Log.d(TAG,"Measured: "+1000000L/(now-oldnow)+" fps."); } try { int bufferIndex = mMediaCodec.dequeueInputBuffer(500000); if (bufferIndex >= 0) { inputBuffers[bufferIndex].clear(); convertor.convert(data, inputBuffers[bufferIndex]); mMediaCodec.queueInputBuffer(bufferIndex, 0, inputBuffers[bufferIndex].position(), now, 0); } else { Log.e(TAG, "No buffer available !"); } } finally { mCamera.addCallbackBuffer(data); } } }; for (int i = 0; i < 10; i++) mCamera.addCallbackBuffer(new byte[convertor.getBufferSize()]); mCamera.setPreviewCallbackWithBuffer(callback); // The packetizer encapsulates the bit stream in an RTP stream and send it over the network mPacketizer.setDestination(mDestination, mRtpPort, mRtcpPort); mPacketizer.setInputStream(new MediaCodecInputStream(mMediaCodec)); mPacketizer.start(); mStreaming = true; }
9. EncoderDebugger#configureEncoder()
Project: spydroid-ipcamera
File: EncoderDebugger.java
File: EncoderDebugger.java
/** * Instantiates and starts the encoder. */ private void configureEncoder() { mEncoder = MediaCodec.createByCodecName(mEncoderName); MediaFormat mediaFormat = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight); mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, BITRATE); mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, FRAMERATE); mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, mEncoderColorFormat); mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1); mEncoder.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); mEncoder.start(); }
10. VideoStream#encodeWithMediaCodecMethod2()
Project: libstreaming
File: VideoStream.java
File: VideoStream.java
/** * Video encoding is done by a MediaCodec. * But here we will use the buffer-to-surface method */ @SuppressLint({ "InlinedApi", "NewApi" }) protected void encodeWithMediaCodecMethod2() throws RuntimeException, IOException { Log.d(TAG, "Video encoded using the MediaCodec API with a surface"); // Updates the parameters of the camera if needed createCamera(); updateCamera(); // Estimates the frame rate of the camera measureFramerate(); EncoderDebugger debugger = EncoderDebugger.debug(mSettings, mQuality.resX, mQuality.resY); mMediaCodec = MediaCodec.createByCodecName(debugger.getEncoderName()); MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", mQuality.resX, mQuality.resY); mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, mQuality.bitrate); mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, mQuality.framerate); mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1); mMediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); Surface surface = mMediaCodec.createInputSurface(); ((SurfaceView) mSurfaceView).addMediaCodecSurface(surface); mMediaCodec.start(); // The packetizer encapsulates the bit stream in an RTP stream and send it over the network mPacketizer.setInputStream(new MediaCodecInputStream(mMediaCodec)); mPacketizer.start(); mStreaming = true; }
11. VideoStream#encodeWithMediaCodecMethod1()
Project: libstreaming
File: VideoStream.java
File: VideoStream.java
/** * Video encoding is done by a MediaCodec. */ @SuppressLint("NewApi") protected void encodeWithMediaCodecMethod1() throws RuntimeException, IOException { Log.d(TAG, "Video encoded using the MediaCodec API with a buffer"); // Updates the parameters of the camera if needed createCamera(); updateCamera(); // Estimates the frame rate of the camera measureFramerate(); // Starts the preview if needed if (!mPreviewStarted) { try { mCamera.startPreview(); mPreviewStarted = true; } catch (RuntimeException e) { destroyCamera(); throw e; } } EncoderDebugger debugger = EncoderDebugger.debug(mSettings, mQuality.resX, mQuality.resY); final NV21Convertor convertor = debugger.getNV21Convertor(); mMediaCodec = MediaCodec.createByCodecName(debugger.getEncoderName()); MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", mQuality.resX, mQuality.resY); mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, mQuality.bitrate); mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, mQuality.framerate); mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, debugger.getEncoderColorFormat()); mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1); mMediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); mMediaCodec.start(); Camera.PreviewCallback callback = new Camera.PreviewCallback() { long now = System.nanoTime() / 1000, oldnow = now, i = 0; ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers(); @Override public void onPreviewFrame(byte[] data, Camera camera) { oldnow = now; now = System.nanoTime() / 1000; if (i++ > 3) { i = 0; //Log.d(TAG,"Measured: "+1000000L/(now-oldnow)+" fps."); } try { int bufferIndex = mMediaCodec.dequeueInputBuffer(500000); if (bufferIndex >= 0) { inputBuffers[bufferIndex].clear(); if (data == null) Log.e(TAG, "Symptom of the \"Callback buffer was to small\" problem..."); else convertor.convert(data, inputBuffers[bufferIndex]); mMediaCodec.queueInputBuffer(bufferIndex, 0, inputBuffers[bufferIndex].position(), now, 0); } else { Log.e(TAG, "No buffer available !"); } } finally { mCamera.addCallbackBuffer(data); } } }; for (int i = 0; i < 10; i++) mCamera.addCallbackBuffer(new byte[convertor.getBufferSize()]); mCamera.setPreviewCallbackWithBuffer(callback); // The packetizer encapsulates the bit stream in an RTP stream and send it over the network mPacketizer.setInputStream(new MediaCodecInputStream(mMediaCodec)); mPacketizer.start(); mStreaming = true; }
12. EncoderDebugger#configureEncoder()
Project: libstreaming
File: EncoderDebugger.java
File: EncoderDebugger.java
/** * Instantiates and starts the encoder. * @throws IOException The encoder cannot be configured */ private void configureEncoder() throws IOException { mEncoder = MediaCodec.createByCodecName(mEncoderName); MediaFormat mediaFormat = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight); mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, BITRATE); mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, FRAMERATE); mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, mEncoderColorFormat); mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1); mEncoder.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); mEncoder.start(); }
13. SoftInputSurfaceActivity#prepareEncoder()
Project: grafika
File: SoftInputSurfaceActivity.java
File: SoftInputSurfaceActivity.java
/** * Prepares the video encoder, muxer, and an input surface. */ private void prepareEncoder(File outputFile) throws IOException { mBufferInfo = new MediaCodec.BufferInfo(); MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, WIDTH, HEIGHT); // Set some properties. Failing to specify some of these can cause the MediaCodec // configure() call to throw an unhelpful exception. format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); format.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE); format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAMES_PER_SECOND); format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL); if (VERBOSE) Log.d(TAG, "format: " + format); // Create a MediaCodec encoder, and configure it with our format. Get a Surface // we can use for input and wrap it with a class that handles the EGL work. mEncoder = MediaCodec.createEncoderByType(MIME_TYPE); mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); mInputSurface = mEncoder.createInputSurface(); mEncoder.start(); // the raw H.264 elementary stream we get from MediaCodec into a .mp4 file. if (VERBOSE) Log.d(TAG, "output will go to " + outputFile); mMuxer = new MediaMuxer(outputFile.toString(), MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4); mTrackIndex = -1; mMuxerStarted = false; }
14. GeneratedMovie#prepareEncoder()
Project: grafika
File: GeneratedMovie.java
File: GeneratedMovie.java
/** * Prepares the video encoder, muxer, and an EGL input surface. */ protected void prepareEncoder(String mimeType, int width, int height, int bitRate, int framesPerSecond, File outputFile) throws IOException { mBufferInfo = new MediaCodec.BufferInfo(); MediaFormat format = MediaFormat.createVideoFormat(mimeType, width, height); // Set some properties. Failing to specify some of these can cause the MediaCodec // configure() call to throw an unhelpful exception. format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate); format.setInteger(MediaFormat.KEY_FRAME_RATE, framesPerSecond); format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL); if (VERBOSE) Log.d(TAG, "format: " + format); // Create a MediaCodec encoder, and configure it with our format. Get a Surface // we can use for input and wrap it with a class that handles the EGL work. mEncoder = MediaCodec.createEncoderByType(mimeType); mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); Log.v(TAG, "encoder is " + mEncoder.getCodecInfo().getName()); Surface surface; try { surface = mEncoder.createInputSurface(); } catch (IllegalStateException ise) { if (isSoftwareCodec(mEncoder)) { throw new RuntimeException("Can't use input surface with software codec: " + mEncoder.getCodecInfo().getName(), ise); } else { throw new RuntimeException("Failed to create input surface", ise); } } mEglCore = new EglCore(null, EglCore.FLAG_RECORDABLE); mInputSurface = new WindowSurface(mEglCore, surface, true); mInputSurface.makeCurrent(); mEncoder.start(); // the raw H.264 elementary stream we get from MediaCodec into a .mp4 file. if (VERBOSE) Log.d(TAG, "output will go to " + outputFile); mMuxer = new MediaMuxer(outputFile.toString(), MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4); mTrackIndex = -1; mMuxerStarted = false; }
15. CodecOpenActivity#clickStart()
Project: grafika
File: CodecOpenActivity.java
File: CodecOpenActivity.java
/** * onClick handler for "start" button. * * We create as many codecs as we can and return without releasing them. The codecs * will remain in use until the next GC. */ public void clickStart(@SuppressWarnings("unused") View unused) { final String MIME_TYPE = "video/avc"; final int WIDTH = 320; final int HEIGHT = 240; final int BIT_RATE = 1000000; final int FRAME_RATE = 15; final int IFRAME_INTERVAL = 1; final boolean START_CODEC = true; MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, WIDTH, HEIGHT); format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); format.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE); format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE); format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL); Log.d(TAG, "format: " + format); MediaCodec[] codecs = new MediaCodec[MAX_OPEN]; int i; for (i = 0; i < MAX_OPEN; i++) { try { codecs[i] = MediaCodec.createEncoderByType(MIME_TYPE); codecs[i].configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); if (START_CODEC) { codecs[i].createInputSurface(); codecs[i].start(); } } catch (Exception ex) { Log.i(TAG, "Failed on creation of codec #" + i, ex); break; } } showCountDialog(i); }
16. MediaCodecBridge#createVideoEncoderFormat()
Project: chromium_webview
File: MediaCodecBridge.java
File: MediaCodecBridge.java
@CalledByNative private static MediaFormat createVideoEncoderFormat(String mime, int width, int height, int bitRate, int frameRate, int iFrameInterval, int colorFormat) { MediaFormat format = MediaFormat.createVideoFormat(mime, width, height); format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate); format.setInteger(MediaFormat.KEY_FRAME_RATE, frameRate); format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, iFrameInterval); format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat); return format; }
17. EncoderDebugger#configureDecoder()
Project: spydroid-ipcamera
File: EncoderDebugger.java
File: EncoderDebugger.java
/** * Instantiates and starts the decoder. */ private void configureDecoder() { byte[] prefix = new byte[] { 0x00, 0x00, 0x00, 0x01 }; ByteBuffer csd0 = ByteBuffer.allocate(4 + mSPS.length + 4 + mPPS.length); csd0.put(new byte[] { 0x00, 0x00, 0x00, 0x01 }); csd0.put(mSPS); csd0.put(new byte[] { 0x00, 0x00, 0x00, 0x01 }); csd0.put(mPPS); mDecoder = MediaCodec.createByCodecName(mDecoderName); MediaFormat mediaFormat = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight); mediaFormat.setByteBuffer("csd-0", csd0); mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, mDecoderColorFormat); mDecoder.configure(mediaFormat, null, null, 0); mDecoder.start(); ByteBuffer[] decInputBuffers = mDecoder.getInputBuffers(); int decInputIndex = mDecoder.dequeueInputBuffer(1000000 / FRAMERATE); if (decInputIndex >= 0) { decInputBuffers[decInputIndex].clear(); decInputBuffers[decInputIndex].put(prefix); decInputBuffers[decInputIndex].put(mSPS); mDecoder.queueInputBuffer(decInputIndex, 0, decInputBuffers[decInputIndex].position(), timestamp(), 0); } else { if (VERBOSE) Log.e(TAG, "No buffer available !"); } decInputIndex = mDecoder.dequeueInputBuffer(1000000 / FRAMERATE); if (decInputIndex >= 0) { decInputBuffers[decInputIndex].clear(); decInputBuffers[decInputIndex].put(prefix); decInputBuffers[decInputIndex].put(mPPS); mDecoder.queueInputBuffer(decInputIndex, 0, decInputBuffers[decInputIndex].position(), timestamp(), 0); } else { if (VERBOSE) Log.e(TAG, "No buffer available !"); } }
18. EncoderDebugger#configureDecoder()
Project: libstreaming
File: EncoderDebugger.java
File: EncoderDebugger.java
/** * Instantiates and starts the decoder. * @throws IOException The decoder cannot be configured */ private void configureDecoder() throws IOException { byte[] prefix = new byte[] { 0x00, 0x00, 0x00, 0x01 }; ByteBuffer csd0 = ByteBuffer.allocate(4 + mSPS.length + 4 + mPPS.length); csd0.put(new byte[] { 0x00, 0x00, 0x00, 0x01 }); csd0.put(mSPS); csd0.put(new byte[] { 0x00, 0x00, 0x00, 0x01 }); csd0.put(mPPS); mDecoder = MediaCodec.createByCodecName(mDecoderName); MediaFormat mediaFormat = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight); mediaFormat.setByteBuffer("csd-0", csd0); mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, mDecoderColorFormat); mDecoder.configure(mediaFormat, null, null, 0); mDecoder.start(); ByteBuffer[] decInputBuffers = mDecoder.getInputBuffers(); int decInputIndex = mDecoder.dequeueInputBuffer(1000000 / FRAMERATE); if (decInputIndex >= 0) { decInputBuffers[decInputIndex].clear(); decInputBuffers[decInputIndex].put(prefix); decInputBuffers[decInputIndex].put(mSPS); mDecoder.queueInputBuffer(decInputIndex, 0, decInputBuffers[decInputIndex].position(), timestamp(), 0); } else { if (VERBOSE) Log.e(TAG, "No buffer available !"); } decInputIndex = mDecoder.dequeueInputBuffer(1000000 / FRAMERATE); if (decInputIndex >= 0) { decInputBuffers[decInputIndex].clear(); decInputBuffers[decInputIndex].put(prefix); decInputBuffers[decInputIndex].put(mPPS); mDecoder.queueInputBuffer(decInputIndex, 0, decInputBuffers[decInputIndex].position(), timestamp(), 0); } else { if (VERBOSE) Log.e(TAG, "No buffer available !"); } }
19. MediaUtils#canDecodeVideo()
Project: CtsVerifier
File: MediaUtils.java
File: MediaUtils.java
public static boolean canDecodeVideo(String mime, int width, int height, float rate) { MediaFormat format = MediaFormat.createVideoFormat(mime, width, height); format.setFloat(MediaFormat.KEY_FRAME_RATE, rate); return canDecode(format); }
20. WebAudioMediaCodecBridge#decodeAudioFile()
Project: chromium_webview
File: WebAudioMediaCodecBridge.java
File: WebAudioMediaCodecBridge.java
@CalledByNative private static boolean decodeAudioFile(Context ctx, long nativeMediaCodecBridge, int inputFD, long dataSize) { if (dataSize < 0 || dataSize > 0x7fffffff) return false; MediaExtractor extractor = new MediaExtractor(); ParcelFileDescriptor encodedFD; encodedFD = ParcelFileDescriptor.adoptFd(inputFD); try { extractor.setDataSource(encodedFD.getFileDescriptor(), 0, dataSize); } catch (Exception e) { e.printStackTrace(); encodedFD.detachFd(); return false; } if (extractor.getTrackCount() <= 0) { encodedFD.detachFd(); return false; } MediaFormat format = extractor.getTrackFormat(0); // Number of channels specified in the file int inputChannelCount = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT); // Number of channels the decoder will provide. (Not // necessarily the same as inputChannelCount. See // crbug.com/266006.) int outputChannelCount = inputChannelCount; int sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE); String mime = format.getString(MediaFormat.KEY_MIME); long durationMicroseconds = 0; if (format.containsKey(MediaFormat.KEY_DURATION)) { try { durationMicroseconds = format.getLong(MediaFormat.KEY_DURATION); } catch (Exception e) { Log.d(LOG_TAG, "Cannot get duration"); } } // about 2148 sec (35.8 min). if (durationMicroseconds > 0x7fffffff) { durationMicroseconds = 0; } Log.d(LOG_TAG, "Initial: Tracks: " + extractor.getTrackCount() + " Format: " + format); // Create decoder MediaCodec codec; try { codec = MediaCodec.createDecoderByType(mime); } catch (Exception e) { Log.w(LOG_TAG, "Failed to create MediaCodec for mime type: " + mime); encodedFD.detachFd(); return false; } codec.configure(format, null, /* surface */ null, /* crypto */ 0); codec.start(); ByteBuffer[] codecInputBuffers = codec.getInputBuffers(); ByteBuffer[] codecOutputBuffers = codec.getOutputBuffers(); // A track must be selected and will be used to read samples. extractor.selectTrack(0); boolean sawInputEOS = false; boolean sawOutputEOS = false; boolean destinationInitialized = false; // Keep processing until the output is done. while (!sawOutputEOS) { if (!sawInputEOS) { // Input side int inputBufIndex = codec.dequeueInputBuffer(TIMEOUT_MICROSECONDS); if (inputBufIndex >= 0) { ByteBuffer dstBuf = codecInputBuffers[inputBufIndex]; int sampleSize = extractor.readSampleData(dstBuf, 0); long presentationTimeMicroSec = 0; if (sampleSize < 0) { sawInputEOS = true; sampleSize = 0; } else { presentationTimeMicroSec = extractor.getSampleTime(); } codec.queueInputBuffer(inputBufIndex, 0, /* offset */ sampleSize, presentationTimeMicroSec, sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0); if (!sawInputEOS) { extractor.advance(); } } } // Output side MediaCodec.BufferInfo info = new BufferInfo(); final int outputBufIndex = codec.dequeueOutputBuffer(info, TIMEOUT_MICROSECONDS); if (outputBufIndex >= 0) { ByteBuffer buf = codecOutputBuffers[outputBufIndex]; if (!destinationInitialized) { // Initialize the destination as late as possible to // catch any changes in format. But be sure to // initialize it BEFORE we send any decoded audio, // and only initialize once. Log.d(LOG_TAG, "Final: Rate: " + sampleRate + " Channels: " + inputChannelCount + " Mime: " + mime + " Duration: " + durationMicroseconds + " microsec"); nativeInitializeDestination(nativeMediaCodecBridge, inputChannelCount, sampleRate, durationMicroseconds); destinationInitialized = true; } if (destinationInitialized && info.size > 0) { nativeOnChunkDecoded(nativeMediaCodecBridge, buf, info.size, inputChannelCount, outputChannelCount); } buf.clear(); codec.releaseOutputBuffer(outputBufIndex, false); if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { sawOutputEOS = true; } } else if (outputBufIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { codecOutputBuffers = codec.getOutputBuffers(); } else if (outputBufIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { MediaFormat newFormat = codec.getOutputFormat(); outputChannelCount = newFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT); sampleRate = newFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE); Log.d(LOG_TAG, "output format changed to " + newFormat); } } encodedFD.detachFd(); codec.stop(); codec.release(); codec = null; return true; }
21. MediaCodecDecoderRenderer#setup()
Project: moonlight-android
File: MediaCodecDecoderRenderer.java
File: MediaCodecDecoderRenderer.java
@Override public boolean setup(VideoDecoderRenderer.VideoFormat format, int width, int height, int redrawRate, Object renderTarget, int drFlags) { this.initialWidth = width; this.initialHeight = height; this.videoFormat = format; String mimeType; String selectedDecoderName; if (videoFormat == VideoFormat.H264) { mimeType = "video/avc"; selectedDecoderName = avcDecoderName; if (avcDecoderName == null) { LimeLog.severe("No available AVC decoder!"); return false; } // These fixups only apply to H264 decoders needsSpsBitstreamFixup = MediaCodecHelper.decoderNeedsSpsBitstreamRestrictions(selectedDecoderName); needsBaselineSpsHack = MediaCodecHelper.decoderNeedsBaselineSpsHack(selectedDecoderName); constrainedHighProfile = MediaCodecHelper.decoderNeedsConstrainedHighProfile(selectedDecoderName); isExynos4 = MediaCodecHelper.isExynos4Device(); if (needsSpsBitstreamFixup) { LimeLog.info("Decoder " + selectedDecoderName + " needs SPS bitstream restrictions fixup"); } if (needsBaselineSpsHack) { LimeLog.info("Decoder " + selectedDecoderName + " needs baseline SPS hack"); } if (constrainedHighProfile) { LimeLog.info("Decoder " + selectedDecoderName + " needs constrained high profile"); } if (isExynos4) { LimeLog.info("Decoder " + selectedDecoderName + " is on Exynos 4"); } } else if (videoFormat == VideoFormat.H265) { mimeType = "video/hevc"; selectedDecoderName = hevcDecoderName; if (hevcDecoderName == null) { LimeLog.severe("No available HEVC decoder!"); return false; } } else { // Unknown format return false; } // due to implementation problems try { videoDecoder = MediaCodec.createByCodecName(selectedDecoderName); } catch (Exception e) { return false; } MediaFormat videoFormat = MediaFormat.createVideoFormat(mimeType, width, height); // so we don't fill these pre-KitKat if (adaptivePlayback && Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) { videoFormat.setInteger(MediaFormat.KEY_MAX_WIDTH, width); videoFormat.setInteger(MediaFormat.KEY_MAX_HEIGHT, height); } videoDecoder.configure(videoFormat, ((SurfaceHolder) renderTarget).getSurface(), null, 0); videoDecoder.setVideoScalingMode(MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT); LimeLog.info("Using codec " + selectedDecoderName + " for hardware decoding " + mimeType); return true; }