com.google.android.exoplayer.ext.vp9.LibvpxVideoTrackRenderer

Here are the examples of the java api class com.google.android.exoplayer.ext.vp9.LibvpxVideoTrackRenderer taken from open source projects.

1. DashRendererBuilder#onSingleManifest()

Project: ExoPlayer
Source File: DashRendererBuilder.java
View license
@Override
public void onSingleManifest(MediaPresentationDescription manifest) {
    LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE));
    DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(null, null);
    // Obtain Representations for playback.
    Representation audioRepresentation = null;
    boolean audioRepresentationIsOpus = false;
    ArrayList<Representation> videoRepresentationsList = new ArrayList<>();
    Period period = manifest.getPeriod(0);
    for (int i = 0; i < period.adaptationSets.size(); i++) {
        AdaptationSet adaptationSet = period.adaptationSets.get(i);
        int adaptationSetType = adaptationSet.type;
        for (int j = 0; j < adaptationSet.representations.size(); j++) {
            Representation representation = adaptationSet.representations.get(j);
            String codecs = representation.format.codecs;
            if (adaptationSetType == AdaptationSet.TYPE_AUDIO && audioRepresentation == null) {
                audioRepresentation = representation;
                audioRepresentationIsOpus = !TextUtils.isEmpty(codecs) && codecs.startsWith("opus");
            } else if (adaptationSetType == AdaptationSet.TYPE_VIDEO && !TextUtils.isEmpty(codecs) && codecs.startsWith("vp9")) {
                videoRepresentationsList.add(representation);
            }
        }
    }
    Representation[] videoRepresentations = new Representation[videoRepresentationsList.size()];
    videoRepresentationsList.toArray(videoRepresentations);
    // Build the video renderer.
    LibvpxVideoTrackRenderer videoRenderer = null;
    if (!videoRepresentationsList.isEmpty()) {
        DataSource videoDataSource = new DefaultUriDataSource(player, bandwidthMeter, userAgent);
        ChunkSource videoChunkSource = new DashChunkSource(DefaultDashTrackSelector.newVideoInstance(null, false, false), videoDataSource, new AdaptiveEvaluator(bandwidthMeter), manifest.getPeriodDuration(0), AdaptationSet.TYPE_VIDEO, videoRepresentations);
        ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl, VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE);
        videoRenderer = new LibvpxVideoTrackRenderer(videoSampleSource, true, player.getMainHandler(), player, 50);
    }
    // Build the audio renderer.
    TrackRenderer audioRenderer;
    if (audioRepresentation == null) {
        audioRenderer = null;
    } else {
        DataSource audioDataSource = new DefaultUriDataSource(player, bandwidthMeter, userAgent);
        DashChunkSource audioChunkSource = new DashChunkSource(DefaultDashTrackSelector.newAudioInstance(), audioDataSource, null, manifest.getPeriodDuration(0), AdaptationSet.TYPE_AUDIO, audioRepresentation);
        SampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl, AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE);
        if (audioRepresentationIsOpus) {
            audioRenderer = new LibopusAudioTrackRenderer(audioSampleSource);
        } else {
            audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource, MediaCodecSelector.DEFAULT);
        }
    }
    TrackRenderer[] renderers = new TrackRenderer[(audioRenderer == null) ? 1 : 2];
    renderers[0] = videoRenderer;
    if (audioRenderer != null) {
        renderers[1] = audioRenderer;
    }
    player.onRenderersBuilt(renderers);
}