com.google.android.exoplayer2.Format

Here are the examples of the java api com.google.android.exoplayer2.Format taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

553 Examples 7

19 Source : DefaultTrackNameProvider.java
with GNU General Public License v2.0
from warren-bank

private String buildBitrateString(Format format) {
    int bitrate = format.bitrate;
    return bitrate == Format.NO_VALUE ? "" : resources.getString(R.string.exo_track_bitrate, bitrate / 1000000f);
}

19 Source : DefaultTrackNameProvider.java
with GNU General Public License v2.0
from warren-bank

private String buildResolutionString(Format format) {
    int width = format.width;
    int height = format.height;
    return width == Format.NO_VALUE || height == Format.NO_VALUE ? "" : resources.getString(R.string.exo_track_resolution, width, height);
}

19 Source : DefaultTrackNameProvider.java
with GNU General Public License v2.0
from warren-bank

private String buildLabelString(Format format) {
    if (!TextUtils.isEmpty(format.label)) {
        return format.label;
    }
    // Fall back to using the language.
    String language = format.language;
    return TextUtils.isEmpty(language) || C.LANGUAGE_UNDETERMINED.equals(language) ? "" : buildLanguageString(language);
}

19 Source : DefaultTrackNameProvider.java
with GNU General Public License v2.0
from warren-bank

private static int inferPrimaryTrackType(Format format) {
    int trackType = MimeTypes.getTrackType(format.sampleMimeType);
    if (trackType != C.TRACK_TYPE_UNKNOWN) {
        return trackType;
    }
    if (MimeTypes.getVideoMediaMimeType(format.codecs) != null) {
        return C.TRACK_TYPE_VIDEO;
    }
    if (MimeTypes.getAudioMediaMimeType(format.codecs) != null) {
        return C.TRACK_TYPE_AUDIO;
    }
    if (format.width != Format.NO_VALUE || format.height != Format.NO_VALUE) {
        return C.TRACK_TYPE_VIDEO;
    }
    if (format.channelCount != Format.NO_VALUE || format.sampleRate != Format.NO_VALUE) {
        return C.TRACK_TYPE_AUDIO;
    }
    return C.TRACK_TYPE_UNKNOWN;
}

19 Source : DefaultTrackNameProvider.java
with GNU General Public License v2.0
from warren-bank

@Override
public String getTrackName(Format format) {
    String trackName;
    int trackType = inferPrimaryTrackType(format);
    if (trackType == C.TRACK_TYPE_VIDEO) {
        trackName = joinWithSeparator(buildResolutionString(format), buildBitrateString(format));
    } else if (trackType == C.TRACK_TYPE_AUDIO) {
        trackName = joinWithSeparator(buildLabelString(format), buildAudioChannelString(format), buildBitrateString(format));
    } else {
        trackName = buildLabelString(format);
    }
    return trackName.length() == 0 ? resources.getString(R.string.exo_track_unknown) : trackName;
}

19 Source : DefaultTrackNameProvider.java
with GNU General Public License v2.0
from warren-bank

private String buildAudioChannelString(Format format) {
    int channelCount = format.channelCount;
    if (channelCount == Format.NO_VALUE || channelCount < 1) {
        return "";
    }
    switch(channelCount) {
        case 1:
            return resources.getString(R.string.exo_track_mono);
        case 2:
            return resources.getString(R.string.exo_track_stereo);
        case 6:
        case 7:
            return resources.getString(R.string.exo_track_surround_5_point_1);
        case 8:
            return resources.getString(R.string.exo_track_surround_7_point_1);
        default:
            return resources.getString(R.string.exo_track_surround);
    }
}

19 Source : DebugTextViewHelper.java
with GNU General Public License v2.0
from warren-bank

/**
 * Returns a string containing audio debugging information.
 */
protected String getAudioString() {
    Format format = player.getAudioFormat();
    if (format == null) {
        return "";
    }
    return "\n" + format.sampleMimeType + "(id:" + format.id + " hz:" + format.sampleRate + " ch:" + format.channelCount + getDecoderCountersBufferCountString(player.getAudioDecoderCounters()) + ")";
}

19 Source : RtspSampleStreamWrapper.java
with GNU General Public License v2.0
from warren-bank

// UpstreamFormatChangedListener implementation. Called by the loading thread.
@Override
public void onUpstreamFormatChanged(Format format) {
    mainHandler.post(maybeFinishPrepareRunnable);
}

19 Source : RtpH264PayloadReader.java
with GNU General Public License v2.0
from warren-bank

/**
 * Extracts individual NAL Units from H.264 RTP payload
 * Only supports two of three packetization modes: Single NAL Unit and
 * Non-Interleaved defines in RFC 6184 for H.264 NALU transmission over RTP.
 */
/*package*/
final clreplaced RtpH264PayloadReader implements RtpPayloadReader {

    // Coded slice of a non-IDR picture
    private static final int NAL_UNIT_TYPE_NON_IDR = 1;

    // Coded slice of a IDR picture
    private static final int NAL_UNIT_TYPE_IDR = 5;

    // Supplemental enhancement information
    private static final int NAL_UNIT_TYPE_SEI = 6;

    // Sequence parameter set
    private static final int NAL_UNIT_TYPE_SPS = 7;

    // Picture parameter set
    private static final int NAL_UNIT_TYPE_PPS = 8;

    // Access unit delimiter
    private static final int NAL_UNIT_TYPE_AUD = 9;

    // Single-Time Aggregation Packet A
    private static final int NAL_UNIT_TYPE_STAP_A = 24;

    // Fragmentation Units A
    private static final int NAL_UNIT_TYPE_FU_A = 28;

    private final boolean allowNonIdrKeyframes;

    private final boolean detectAccessUnits;

    // Temporary arrays.
    // Stores size of an nal unit in aggregation mode
    private final ParsableByteArray nalLength;

    // Stores the nal unit start code
    private final ParsableByteArray nalStartCode;

    // To join all fragment units in an only one NAL unit
    private FragmentedNalUnit fragments;

    private SampleReader sampleReader;

    private TrackOutput output;

    private int sampleLength;

    private boolean sampleIsKeyframe;

    private int lastSequenceNumber;

    private int sequenceNumber;

    private boolean nalUnitCompleteIndicator;

    private Format format;

    private final RtpVideoPayload payloadFormat;

    private final RtpTimestampAdjuster timestampAdjuster;

    private boolean hasOutputFormat;

    private int trackId;

    private String formatId;

    public RtpH264PayloadReader(RtpVideoPayload payloadFormat) {
        this(payloadFormat, true, true);
    }

    public RtpH264PayloadReader(RtpVideoPayload payloadFormat, boolean allowNonIdrKeyframes, boolean detectAccessUnits) {
        this.payloadFormat = payloadFormat;
        this.allowNonIdrKeyframes = allowNonIdrKeyframes;
        this.detectAccessUnits = detectAccessUnits;
        timestampAdjuster = new RtpTimestampAdjuster(payloadFormat.clockrate());
        fragments = new FragmentedNalUnit();
        nalLength = new ParsableByteArray(2);
        nalStartCode = new ParsableByteArray(NalUnitUtil.NAL_START_CODE);
        lastSequenceNumber = -1;
    }

    @Override
    public void seek() {
        sampleReader.reset();
    }

    @Override
    public void createTracks(ExtractorOutput extractorOutput, TrackIdGenerator trackIdGenerator) {
        trackIdGenerator.generateNewId();
        trackId = trackIdGenerator.getTrackId();
        formatId = trackIdGenerator.getFormatId();
        output = extractorOutput.track(trackId, C.TRACK_TYPE_VIDEO);
        sampleReader = new SampleReader(formatId, output, allowNonIdrKeyframes, detectAccessUnits, false, timestampAdjuster);
        List<byte[]> codecSpecificData = payloadFormat.buildCodecSpecificData();
        if (codecSpecificData != null) {
            format = Format.createVideoSampleFormat(formatId, payloadFormat.sampleMimeType(), payloadFormat.codecs(), payloadFormat.bitrate(), Format.NO_VALUE, payloadFormat.width() > 0 ? payloadFormat.width() : Format.NO_VALUE, payloadFormat.height() > 0 ? payloadFormat.height() : Format.NO_VALUE, payloadFormat.framerate(), codecSpecificData, Format.NO_VALUE, payloadFormat.pixelWidthAspectRatio(), null);
            hasOutputFormat = true;
            output.format(format);
        }
    }

    @Override
    public boolean packetStarted(long sampleTimeStamp, boolean nalUnitCompleteIndicator, int sequenceNumber) {
        this.nalUnitCompleteIndicator = nalUnitCompleteIndicator;
        if (nalUnitCompleteIndicator) {
            timestampAdjuster.adjustSampleTimestamp(sampleTimeStamp);
        }
        if (lastSequenceNumber == -1) {
            lastSequenceNumber = sequenceNumber - 1;
        }
        this.sequenceNumber = sequenceNumber;
        return true;
    }

    @Override
    public void consume(ParsableByteArray packet) throws ParserException {
        int nalUnitType = getNalUnitType(packet);
        if (!hasOutputFormat && sampleReader.hasOutputFormat()) {
            sampleReader.outputSampleMetadata(nalUnitType);
            hasOutputFormat = true;
        }
        // Single NAL Unit Mode and Non-Interleaved Mode are only supports
        if ((nalUnitType > 0) && (nalUnitType < NAL_UNIT_TYPE_STAP_A)) {
            handleSingleNalUnit(packet);
        } else if (nalUnitType == NAL_UNIT_TYPE_STAP_A) {
            handleAggregationNalUnit(packet);
        } else if (nalUnitType == NAL_UNIT_TYPE_FU_A) {
            handleFragmentationNalUnit(packet);
        }
        if (hasOutputFormat) {
            outputSampleMetadata();
        }
    }

    private void outputSampleMetadata() {
        if (nalUnitCompleteIndicator) {
            @C.BufferFlags
            int flags = sampleIsKeyframe ? C.BUFFER_FLAG_KEY_FRAME : 0;
            output.sampleMetadata(timestampAdjuster.getSampleTimeUs(), flags, sampleLength, 0, null);
            sampleLength = 0;
            sampleIsKeyframe = false;
        }
    }

    private int getNalUnitType(ParsableByteArray packet) {
        return getNalUnitType(packet, packet.getPosition());
    }

    private int getNalUnitType(ParsableByteArray packet, int offset) {
        return packet.data[offset] & 0x1F;
    }

    private void handleSingleNalUnit(ParsableByteArray packet) {
        int limit = packet.bytesLeft();
        int nalUnitType = getNalUnitType(packet);
        // Log.v("RtpH264PayloadReader", "[Single] NAL unit type=[" + nalUnitType + "]");
        if (nalUnitType == NAL_UNIT_TYPE_IDR || nalUnitType == NAL_UNIT_TYPE_NON_IDR) {
            sampleIsKeyframe = true;
        }
        if (hasOutputFormat) {
            nalStartCode.setPosition(0);
            output.sampleData(nalStartCode, nalStartCode.limit());
            output.sampleData(packet, limit);
            sampleLength += limit + nalStartCode.limit();
        } else {
            sampleReader.consume(nalUnitType, packet);
        }
    }

    private void handleAggregationNalUnit(ParsableByteArray packet) {
        int nalUnitLength;
        int offset = 1;
        int limit = packet.limit();
        while (offset < limit) {
            packet.setPosition(offset);
            // Read the NAL length so that we know where we find the next one.
            packet.readBytes(nalLength.data, 0, 2);
            nalLength.setPosition(0);
            nalUnitLength = nalLength.readUnsignedShort();
            int nalUnitType = getNalUnitType(packet);
            // Log.v("RtpH264PayloadReader", "[Aggregation] NAL unit type=[" + nalUnitType + "]");
            if (hasOutputFormat) {
                nalStartCode.setPosition(0);
                output.sampleData(nalStartCode, nalStartCode.limit());
                output.sampleData(packet, nalUnitLength);
                sampleLength += nalUnitLength + nalStartCode.limit();
            } else {
                byte[] data = Arrays.copyOfRange(packet.data, offset + 2, offset + 2 + nalUnitLength);
                sampleReader.consume(nalUnitType, new ParsableByteArray(data));
            }
            offset += nalUnitLength + 2;
        }
    }

    private void handleFragmentationNalUnit(ParsableByteArray packet) {
        int limit = packet.bytesLeft();
        int indicatorFU = packet.data[0] & 0xFF;
        int headerFU = packet.data[1] & 0xFF;
        /**
         * The NAL unit type octet of the fragmented NAL unit is not included as such in the
         * fragmentation unit payload, but rather the information of the NAL unit type octet of the
         * fragmented NAL unit is conveyed in the F and NRI fields of the FU indicator octet of the
         * fragmentation unit and in the type field of the FU header.
         */
        int nalUnitType = headerFU & 0x1F;
        byte headerNAL = (byte) ((indicatorFU & 0xE0) | nalUnitType);
        sampleIsKeyframe = false;
        boolean isFirstFragmentUnit = (headerFU & 0x80) > 0;
        // Fragmented NAL unit start flag enabled
        if (isFirstFragmentUnit) {
            fragments.reset();
            fragments.sequence(sequenceNumber);
            // Log.v("RtpH264PayloadReader", "[Fragmented] NAL unit type=[" + nalUnitType + "]");
            byte[] fragmentUnit = Arrays.copyOfRange(packet.data, 1, limit);
            // replaces FU header octet to NAL unit header octet
            fragmentUnit[0] = headerNAL;
            fragments.appendFragmentUnit(fragmentUnit, 0, fragmentUnit.length);
        } else {
            if (((fragments.sequence() + 1) % 65536) != sequenceNumber) {
                fragments.reset();
                return;
            }
            fragments.sequence(sequenceNumber);
            byte[] fragmentUnit = Arrays.copyOfRange(packet.data, 2, limit);
            fragments.appendFragmentUnit(fragmentUnit, 0, fragmentUnit.length);
            boolean isLastFragmentUnit = (headerFU & 0x40) > 0;
            // Fragmented NAL unit end flag enabled
            if (isLastFragmentUnit) {
                // Consume the payload of the NAL unit.
                int length = 4 + fragments.nalLength;
                byte[] data = new byte[length];
                System.arraycopy(NalUnitUtil.NAL_START_CODE, 0, data, 0, 4);
                System.arraycopy(fragments.nalData, 0, data, 4, fragments.nalLength);
                if (hasOutputFormat) {
                    sampleLength += data.length;
                    sampleIsKeyframe = true;
                    output.sampleData(new ParsableByteArray(data), data.length);
                } else {
                    sampleReader.consume(nalUnitType, new ParsableByteArray(fragments.nalData, fragments.nalLength));
                }
                fragments.reset();
            }
        }
    }

    /**
     * Stores the consecutive fragment nal units to reconstruct the fragmented nal unit
     */
    private static final clreplaced FragmentedNalUnit {

        public byte[] nalData;

        public int nalLength;

        private int sequence;

        public FragmentedNalUnit() {
            // Initialize data
            nalData = new byte[128];
            sequence = -1;
        }

        /**
         * Resets the buffer, clearing any data that it holds.
         */
        public void reset() {
            nalLength = 0;
            sequence = -1;
        }

        public void sequence(int sequence) {
            this.sequence = sequence;
        }

        public int sequence() {
            return sequence;
        }

        /**
         * Called to add a fragment unit to fragmented nal unit.
         *
         * @param fragment Holds the data of fragment unit being preplaceded.
         * @param offset The offset of the data in {@code fragment}.
         * @param limit The limit (exclusive) of the data in {@code fragment}.
         */
        public void appendFragmentUnit(byte[] fragment, int offset, int limit) {
            int readLength = limit - offset;
            if (nalData.length < nalLength + readLength) {
                nalData = Arrays.copyOf(nalData, (nalLength + readLength) * 2);
            }
            System.arraycopy(fragment, offset, nalData, nalLength, readLength);
            nalLength += readLength;
        }
    }

    /**
     * Consumes NAL units and outputs samples.
     */
    private static final clreplaced SampleReader {

        // Coded slice of a non-IDR picture
        private static final int NAL_UNIT_TYPE_NON_IDR = 1;

        // Coded slice data parreplacedion A
        private static final int NAL_UNIT_TYPE_PARreplacedION_A = 2;

        // Coded slice of an IDR picture
        private static final int NAL_UNIT_TYPE_IDR = 5;

        // Access unit delimiter
        private static final int NAL_UNIT_TYPE_AUD = 9;

        private final String formatId;

        private final TrackOutput output;

        private final boolean allowNonIdrKeyframes;

        private final boolean detectAccessUnits;

        // Nal start code
        private final ParsableByteArray nalStartCode;

        private final RtpTimestampAdjuster timestampAdjuster;

        private final SparseArray<NalUnitUtil.SpsData> sps;

        private final SparseArray<NalUnitUtil.PpsData> pps;

        private ParsableByteArray spsNalUnit;

        private ParsableByteArray ppsNalUnit;

        private SliceHeaderData previousSliceHeader;

        private SliceHeaderData sliceHeader;

        private int nalUnitType;

        private long sampleTimeUs;

        private int sampleLength;

        private boolean hasOutputFormat;

        private boolean readingSample;

        private boolean sampleIsKeyframe;

        private Format format;

        public SampleReader(String formatId, TrackOutput output, boolean allowNonIdrKeyframes, boolean detectAccessUnits, boolean hasOutputFormat, RtpTimestampAdjuster timestampAdjuster) {
            this.output = output;
            this.formatId = formatId;
            this.allowNonIdrKeyframes = allowNonIdrKeyframes;
            this.detectAccessUnits = detectAccessUnits;
            this.hasOutputFormat = hasOutputFormat;
            this.timestampAdjuster = timestampAdjuster;
            sps = new SparseArray<>();
            pps = new SparseArray<>();
            previousSliceHeader = new SliceHeaderData(sps, pps, detectAccessUnits);
            sliceHeader = new SliceHeaderData(sps, pps, detectAccessUnits);
            nalStartCode = new ParsableByteArray(NalUnitUtil.NAL_START_CODE);
            sampleTimeUs = timestampAdjuster.getSampleTimeUs();
        }

        public void reset() {
            sliceHeader.clear();
        }

        public void outputSampleMetadata(int type) {
            if (type == NAL_UNIT_TYPE_AUD || (detectAccessUnits && sliceHeader.isFirstVclNalUnitOfPicture(previousSliceHeader))) {
                if (readingSample) {
                    @C.BufferFlags
                    int flags = sampleIsKeyframe ? C.BUFFER_FLAG_KEY_FRAME : 0;
                    output.sampleMetadata(sampleTimeUs, flags, sampleLength, 0, null);
                    sampleLength = 0;
                }
                sampleIsKeyframe = false;
                readingSample = true;
            }
        }

        public boolean hasOutputFormat() {
            return hasOutputFormat;
        }

        private void consume(int type, ParsableByteArray nalUnit) {
            outputSampleMetadata(type);
            nalUnitType = type;
            sampleTimeUs = timestampAdjuster.getSampleTimeUs();
            // Write a start code (0, 0, 0, 1) for the current NAL unit.
            nalStartCode.setPosition(0);
            output.sampleData(nalStartCode, nalStartCode.limit());
            // Write the current NAL unit (header + payload).
            output.sampleData(nalUnit, nalUnit.limit());
            switch(nalUnitType) {
                case NAL_UNIT_TYPE_AUD:
                    // Do nothing
                    break;
                case NAL_UNIT_TYPE_SPS:
                    if (!hasOutputFormat || detectAccessUnits) {
                        NalUnitUtil.SpsData spsData = NalUnitUtil.parseSpsNalUnit(nalUnit.data, 0, nalUnit.limit());
                        sps.append(spsData.seqParameterSetId, spsData);
                        if (spsNalUnit == null) {
                            spsNalUnit = new ParsableByteArray(Arrays.copyOf(nalUnit.data, nalUnit.limit()));
                        }
                    }
                    break;
                case NAL_UNIT_TYPE_PPS:
                    if (!hasOutputFormat || detectAccessUnits) {
                        NalUnitUtil.PpsData ppsData = NalUnitUtil.parsePpsNalUnit(nalUnit.data, 0, nalUnit.limit());
                        pps.append(ppsData.picParameterSetId, ppsData);
                        if (ppsNalUnit == null) {
                            ppsNalUnit = new ParsableByteArray(Arrays.copyOf(nalUnit.data, nalUnit.limit()));
                        }
                    }
                    break;
                case NAL_UNIT_TYPE_SEI:
                    // Do nothing
                    break;
                default:
                    if ((allowNonIdrKeyframes && nalUnitType == NAL_UNIT_TYPE_NON_IDR) || (detectAccessUnits && (nalUnitType == NAL_UNIT_TYPE_IDR || nalUnitType == NAL_UNIT_TYPE_NON_IDR || nalUnitType == NAL_UNIT_TYPE_PARreplacedION_A))) {
                        // Store the previous header and prepare to populate the new one.
                        SliceHeaderData newSliceHeader = previousSliceHeader;
                        previousSliceHeader = sliceHeader;
                        sliceHeader = newSliceHeader;
                        sliceHeader.clear();
                        sliceHeader.parseNalUnit(nalUnitType, nalUnit.data, nalUnit.limit());
                    }
            }
            if (!hasOutputFormat) {
                if (spsNalUnit != null && ppsNalUnit != null) {
                    List<byte[]> initializationData = new ArrayList<>();
                    initializationData.add(CodecSpecificDataUtil.buildNalUnit(spsNalUnit.data, 0, spsNalUnit.limit()));
                    initializationData.add(CodecSpecificDataUtil.buildNalUnit(ppsNalUnit.data, 0, ppsNalUnit.limit()));
                    NalUnitUtil.SpsData spsData = NalUnitUtil.parseSpsNalUnit(spsNalUnit.data, 0, spsNalUnit.limit());
                    format = Format.createVideoSampleFormat(formatId, MimeTypes.VIDEO_H264, null, Format.NO_VALUE, Format.NO_VALUE, spsData.width, spsData.height, Format.NO_VALUE, initializationData, Format.NO_VALUE, spsData.pixelWidthAspectRatio, null);
                    output.format(format);
                    hasOutputFormat = true;
                }
            }
            sampleIsKeyframe |= nalUnitType == NAL_UNIT_TYPE_IDR || (allowNonIdrKeyframes && nalUnitType == NAL_UNIT_TYPE_NON_IDR && sliceHeader.isISlice());
            sampleLength += nalStartCode.limit() + nalUnit.limit();
        }
    }

    private static final clreplaced SliceHeaderData {

        private static final int DEFAULT_BUFFER_SIZE = 128;

        // Coded slice of an IDR picture
        private static final int NAL_UNIT_TYPE_IDR = 5;

        private static final int SLICE_TYPE_I = 2;

        private static final int SLICE_TYPE_ALL_I = 7;

        private boolean isComplete;

        private boolean hreplacedliceType;

        private NalUnitUtil.SpsData spsData;

        private int nalRefIdc;

        private int sliceType;

        private int frameNum;

        private int picParameterSetId;

        private boolean fieldPicFlag;

        private boolean bottomFieldFlagPresent;

        private boolean bottomFieldFlag;

        private boolean idrPicFlag;

        private int idrPicId;

        private int picOrderCntLsb;

        private int deltaPicOrderCntBottom;

        private int deltaPicOrderCnt0;

        private int deltaPicOrderCnt1;

        private byte[] buffer;

        private final ParsableNalUnitBitArray bitArray;

        private final boolean detectAccessUnits;

        private final SparseArray<NalUnitUtil.SpsData> sps;

        private final SparseArray<NalUnitUtil.PpsData> pps;

        public SliceHeaderData(SparseArray<NalUnitUtil.SpsData> sps, SparseArray<NalUnitUtil.PpsData> pps, boolean detectAccessUnits) {
            this.sps = sps;
            this.pps = pps;
            this.detectAccessUnits = detectAccessUnits;
            buffer = new byte[DEFAULT_BUFFER_SIZE];
            bitArray = new ParsableNalUnitBitArray(buffer, 0, 0);
        }

        public void clear() {
            nalRefIdc = 0;
            sliceType = 0;
            picParameterSetId = 0;
            frameNum = 0;
            idrPicFlag = false;
            idrPicId = 0;
            fieldPicFlag = false;
            bottomFieldFlagPresent = false;
            bottomFieldFlag = false;
            picOrderCntLsb = 0;
            deltaPicOrderCntBottom = 0;
            deltaPicOrderCnt0 = 0;
            deltaPicOrderCnt1 = 0;
            hreplacedliceType = false;
            isComplete = false;
        }

        public boolean isISlice() {
            return hreplacedliceType && (sliceType == SLICE_TYPE_ALL_I || sliceType == SLICE_TYPE_I);
        }

        private void parseNalUnit(int nalUnitType, byte[] buffer, int length) {
            if (sps.size() == 0 || pps.size() == 0) {
                return;
            }
            bitArray.reset(buffer, 0, length);
            if (!bitArray.canReadBits(8)) {
                return;
            }
            // forbidden_zero_bit
            bitArray.skipBit();
            nalRefIdc = bitArray.readBits(2);
            // nal_unit_type
            bitArray.skipBits(5);
            // Read the slice header using the syntax defined in ITU-T Recommendation H.264 (2013)
            // subsection 7.3.3.
            if (!bitArray.canReadExpGolombCodedNum()) {
                return;
            }
            // first_mb_in_slice
            bitArray.readUnsignedExpGolombCodedInt();
            if (!bitArray.canReadExpGolombCodedNum()) {
                return;
            }
            sliceType = bitArray.readUnsignedExpGolombCodedInt();
            if (!detectAccessUnits) {
                // There are AUDs in the stream so the rest of the header can be ignored.
                setSliceType(sliceType);
                return;
            }
            if (!bitArray.canReadExpGolombCodedNum()) {
                return;
            }
            picParameterSetId = bitArray.readUnsignedExpGolombCodedInt();
            if (pps.indexOfKey(picParameterSetId) < 0) {
                // We have not seen the PPS yet, so don't try to decode the slice header.
                return;
            }
            NalUnitUtil.PpsData ppsData = pps.get(picParameterSetId);
            spsData = sps.get(ppsData.seqParameterSetId);
            if (spsData.separateColorPlaneFlag) {
                if (!bitArray.canReadBits(2)) {
                    return;
                }
                // colour_plane_id
                bitArray.skipBits(2);
            }
            if (!bitArray.canReadBits(spsData.frameNumLength)) {
                return;
            }
            frameNum = bitArray.readBits(spsData.frameNumLength);
            if (!spsData.frameMbsOnlyFlag) {
                if (!bitArray.canReadBits(1)) {
                    return;
                }
                fieldPicFlag = bitArray.readBit();
                if (fieldPicFlag) {
                    if (!bitArray.canReadBits(1)) {
                        return;
                    }
                    bottomFieldFlag = bitArray.readBit();
                    bottomFieldFlagPresent = true;
                }
            }
            idrPicFlag = nalUnitType == NAL_UNIT_TYPE_IDR;
            idrPicId = 0;
            if (idrPicFlag) {
                if (!bitArray.canReadExpGolombCodedNum()) {
                    return;
                }
                idrPicId = bitArray.readUnsignedExpGolombCodedInt();
            }
            if (spsData.picOrderCountType == 0) {
                if (!bitArray.canReadBits(spsData.picOrderCntLsbLength)) {
                    return;
                }
                picOrderCntLsb = bitArray.readBits(spsData.picOrderCntLsbLength);
                if (ppsData.bottomFieldPicOrderInFramePresentFlag && !fieldPicFlag) {
                    if (!bitArray.canReadExpGolombCodedNum()) {
                        return;
                    }
                    deltaPicOrderCntBottom = bitArray.readSignedExpGolombCodedInt();
                }
            } else if (spsData.picOrderCountType == 1 && !spsData.deltaPicOrderAlwaysZeroFlag) {
                if (!bitArray.canReadExpGolombCodedNum()) {
                    return;
                }
                deltaPicOrderCnt0 = bitArray.readSignedExpGolombCodedInt();
                if (ppsData.bottomFieldPicOrderInFramePresentFlag && !fieldPicFlag) {
                    if (!bitArray.canReadExpGolombCodedNum()) {
                        return;
                    }
                    deltaPicOrderCnt1 = bitArray.readSignedExpGolombCodedInt();
                }
            }
            isComplete = true;
            hreplacedliceType = true;
        }

        public boolean isFirstVclNalUnitOfPicture(SliceHeaderData other) {
            // See ISO 14496-10 subsection 7.4.1.2.4.
            return isComplete && (!other.isComplete || frameNum != other.frameNum || picParameterSetId != other.picParameterSetId || fieldPicFlag != other.fieldPicFlag || (bottomFieldFlagPresent && other.bottomFieldFlagPresent && bottomFieldFlag != other.bottomFieldFlag) || (nalRefIdc != other.nalRefIdc && (nalRefIdc == 0 || other.nalRefIdc == 0)) || (spsData.picOrderCountType == 0 && other.spsData.picOrderCountType == 0 && (picOrderCntLsb != other.picOrderCntLsb || deltaPicOrderCntBottom != other.deltaPicOrderCntBottom)) || (spsData.picOrderCountType == 1 && other.spsData.picOrderCountType == 1 && (deltaPicOrderCnt0 != other.deltaPicOrderCnt0 || deltaPicOrderCnt1 != other.deltaPicOrderCnt1)) || idrPicFlag != other.idrPicFlag || (idrPicFlag && other.idrPicFlag && idrPicId != other.idrPicId));
        }

        private void setSliceType(int sliceType) {
            this.sliceType = sliceType;
            hreplacedliceType = true;
        }
    }
}

19 Source : CameraMotionRenderer.java
with GNU General Public License v2.0
from warren-bank

@Override
protected void onStreamChanged(Format[] formats, long offsetUs) throws ExoPlaybackException {
    this.offsetUs = offsetUs;
}

19 Source : CameraMotionRenderer.java
with GNU General Public License v2.0
from warren-bank

@Override
public int supportsFormat(Format format) {
    return MimeTypes.APPLICATION_CAMERA_MOTION.equals(format.sampleMimeType) ? FORMAT_HANDLED : FORMAT_UNSUPPORTED_TYPE;
}

19 Source : EventLogger.java
with GNU General Public License v2.0
from warren-bank

@Override
public void onDecoderInputFormatChanged(EventTime eventTime, int trackType, Format format) {
    logd(eventTime, "decoderInputFormatChanged", getTrackTypeString(trackType) + ", " + Format.toLogString(format));
}

19 Source : BaseTrackSelection.java
with GNU General Public License v2.0
from warren-bank

/**
 * An abstract base clreplaced suitable for most {@link TrackSelection} implementations.
 */
public abstract clreplaced BaseTrackSelection implements TrackSelection {

    /**
     * The selected {@link TrackGroup}.
     */
    protected final TrackGroup group;

    /**
     * The number of selected tracks within the {@link TrackGroup}. Always greater than zero.
     */
    protected final int length;

    /**
     * The indices of the selected tracks in {@link #group}, in order of decreasing bandwidth.
     */
    protected final int[] tracks;

    /**
     * The {@link Format}s of the selected tracks, in order of decreasing bandwidth.
     */
    private final Format[] formats;

    /**
     * Selected track blacklist timestamps, in order of decreasing bandwidth.
     */
    private final long[] blacklistUntilTimes;

    // Lazily initialized hashcode.
    private int hashCode;

    /**
     * @param group The {@link TrackGroup}. Must not be null.
     * @param tracks The indices of the selected tracks within the {@link TrackGroup}. Must not be
     *     null or empty. May be in any order.
     */
    public BaseTrackSelection(TrackGroup group, int... tracks) {
        replacedertions.checkState(tracks.length > 0);
        this.group = replacedertions.checkNotNull(group);
        this.length = tracks.length;
        // Set the formats, sorted in order of decreasing bandwidth.
        formats = new Format[length];
        for (int i = 0; i < tracks.length; i++) {
            formats[i] = group.getFormat(tracks[i]);
        }
        Arrays.sort(formats, new DecreasingBandwidthComparator());
        // Set the format indices in the same order.
        this.tracks = new int[length];
        for (int i = 0; i < length; i++) {
            this.tracks[i] = group.indexOf(formats[i]);
        }
        blacklistUntilTimes = new long[length];
    }

    @Override
    public void enable() {
    // Do nothing.
    }

    @Override
    public void disable() {
    // Do nothing.
    }

    @Override
    public final TrackGroup getTrackGroup() {
        return group;
    }

    @Override
    public final int length() {
        return tracks.length;
    }

    @Override
    public final Format getFormat(int index) {
        return formats[index];
    }

    @Override
    public final int getIndexInTrackGroup(int index) {
        return tracks[index];
    }

    @Override
    @SuppressWarnings("ReferenceEquality")
    public final int indexOf(Format format) {
        for (int i = 0; i < length; i++) {
            if (formats[i] == format) {
                return i;
            }
        }
        return C.INDEX_UNSET;
    }

    @Override
    public final int indexOf(int indexInTrackGroup) {
        for (int i = 0; i < length; i++) {
            if (tracks[i] == indexInTrackGroup) {
                return i;
            }
        }
        return C.INDEX_UNSET;
    }

    @Override
    public final Format getSelectedFormat() {
        return formats[getSelectedIndex()];
    }

    @Override
    public final int getSelectedIndexInTrackGroup() {
        return tracks[getSelectedIndex()];
    }

    @Override
    public void onPlaybackSpeed(float playbackSpeed) {
    // Do nothing.
    }

    @Override
    public int evaluateQueueSize(long playbackPositionUs, List<? extends MediaChunk> queue) {
        return queue.size();
    }

    @Override
    public final boolean blacklist(int index, long blacklistDurationMs) {
        long nowMs = SystemClock.elapsedRealtime();
        boolean canBlacklist = isBlacklisted(index, nowMs);
        for (int i = 0; i < length && !canBlacklist; i++) {
            canBlacklist = i != index && !isBlacklisted(i, nowMs);
        }
        if (!canBlacklist) {
            return false;
        }
        blacklistUntilTimes[index] = Math.max(blacklistUntilTimes[index], nowMs + blacklistDurationMs);
        return true;
    }

    /**
     * Returns whether the track at the specified index in the selection is blacklisted.
     *
     * @param index The index of the track in the selection.
     * @param nowMs The current time in the timebase of {@link SystemClock#elapsedRealtime()}.
     */
    protected final boolean isBlacklisted(int index, long nowMs) {
        return blacklistUntilTimes[index] > nowMs;
    }

    // Object overrides.
    @Override
    public int hashCode() {
        if (hashCode == 0) {
            hashCode = 31 * System.idenreplacedyHashCode(group) + Arrays.hashCode(tracks);
        }
        return hashCode;
    }

    // Track groups are compared by idenreplacedy not value, as distinct groups may have the same value.
    @Override
    @SuppressWarnings({ "ReferenceEquality", "EqualsGetClreplaced" })
    public boolean equals(@Nullable Object obj) {
        if (this == obj) {
            return true;
        }
        if (obj == null || getClreplaced() != obj.getClreplaced()) {
            return false;
        }
        BaseTrackSelection other = (BaseTrackSelection) obj;
        return group == other.group && Arrays.equals(tracks, other.tracks);
    }

    /**
     * Sorts {@link Format} objects in order of decreasing bandwidth.
     */
    private static final clreplaced DecreasingBandwidthComparator implements Comparator<Format> {

        @Override
        public int compare(Format a, Format b) {
            return b.bitrate - a.bitrate;
        }
    }
}

19 Source : BaseTrackSelection.java
with GNU General Public License v2.0
from warren-bank

@Override
@SuppressWarnings("ReferenceEquality")
public final int indexOf(Format format) {
    for (int i = 0; i < length; i++) {
        if (formats[i] == format) {
            return i;
        }
    }
    return C.INDEX_UNSET;
}

19 Source : AdaptiveTrackSelection.java
with GNU General Public License v2.0
from warren-bank

/**
 * Computes the ideal selected index ignoring buffer health.
 *
 * @param nowMs The current time in the timebase of {@link Clock#elapsedRealtime()}, or {@link
 *     Long#MIN_VALUE} to ignore blacklisting.
 */
private int determineIdealSelectedIndex(long nowMs) {
    long effectiveBitrate = (long) (bandwidthMeter.getBitrateEstimate() * bandwidthFraction);
    int lowestBitrateNonBlacklistedIndex = 0;
    for (int i = 0; i < length; i++) {
        if (nowMs == Long.MIN_VALUE || !isBlacklisted(i, nowMs)) {
            Format format = getFormat(i);
            if (Math.round(format.bitrate * playbackSpeed) <= effectiveBitrate) {
                return i;
            } else {
                lowestBitrateNonBlacklistedIndex = i;
            }
        }
    }
    return lowestBitrateNonBlacklistedIndex;
}

19 Source : AdaptiveTrackSelection.java
with GNU General Public License v2.0
from warren-bank

@Override
public void updateSelectedTrack(long playbackPositionUs, long bufferedDurationUs, long availableDurationUs, List<? extends MediaChunk> queue, MediaChunkIterator[] mediaChunkIterators) {
    long nowMs = clock.elapsedRealtime();
    // Stash the current selection, then make a new one.
    int currentSelectedIndex = selectedIndex;
    selectedIndex = determineIdealSelectedIndex(nowMs);
    if (selectedIndex == currentSelectedIndex) {
        return;
    }
    if (!isBlacklisted(currentSelectedIndex, nowMs)) {
        // Revert back to the current selection if conditions are not suitable for switching.
        Format currentFormat = getFormat(currentSelectedIndex);
        Format selectedFormat = getFormat(selectedIndex);
        if (selectedFormat.bitrate > currentFormat.bitrate && bufferedDurationUs < minDurationForQualityIncreaseUs(availableDurationUs)) {
            // The selected track is a higher quality, but we have insufficient buffer to safely switch
            // up. Defer switching up for now.
            selectedIndex = currentSelectedIndex;
        } else if (selectedFormat.bitrate < currentFormat.bitrate && bufferedDurationUs >= maxDurationForQualityDecreaseUs) {
            // The selected track is a lower quality, but we have sufficient buffer to defer switching
            // down for now.
            selectedIndex = currentSelectedIndex;
        }
    }
    // If we adapted, update the trigger.
    if (selectedIndex != currentSelectedIndex) {
        reason = C.SELECTION_REASON_ADAPTIVE;
    }
}

19 Source : AdaptiveTrackSelection.java
with GNU General Public License v2.0
from warren-bank

@Override
public int evaluateQueueSize(long playbackPositionUs, List<? extends MediaChunk> queue) {
    long nowMs = clock.elapsedRealtime();
    if (lastBufferEvaluationMs != C.TIME_UNSET && nowMs - lastBufferEvaluationMs < minTimeBetweenBufferReevaluationMs) {
        return queue.size();
    }
    lastBufferEvaluationMs = nowMs;
    if (queue.isEmpty()) {
        return 0;
    }
    int queueSize = queue.size();
    MediaChunk lastChunk = queue.get(queueSize - 1);
    long playoutBufferedDurationBeforeLastChunkUs = Util.getPlayoutDurationForMediaDuration(lastChunk.startTimeUs - playbackPositionUs, playbackSpeed);
    if (playoutBufferedDurationBeforeLastChunkUs < minDurationToRetainAfterDiscardUs) {
        return queueSize;
    }
    int idealSelectedIndex = determineIdealSelectedIndex(nowMs);
    Format idealFormat = getFormat(idealSelectedIndex);
    // If the chunks contain video, discard from the first SD chunk beyond
    // minDurationToRetainAfterDiscardUs whose resolution and bitrate are both lower than the ideal
    // track.
    for (int i = 0; i < queueSize; i++) {
        MediaChunk chunk = queue.get(i);
        Format format = chunk.trackFormat;
        long mediaDurationBeforeThisChunkUs = chunk.startTimeUs - playbackPositionUs;
        long playoutDurationBeforeThisChunkUs = Util.getPlayoutDurationForMediaDuration(mediaDurationBeforeThisChunkUs, playbackSpeed);
        if (playoutDurationBeforeThisChunkUs >= minDurationToRetainAfterDiscardUs && format.bitrate < idealFormat.bitrate && format.height != Format.NO_VALUE && format.height < 720 && format.width != Format.NO_VALUE && format.width < 1280 && format.height < idealFormat.height) {
            return i;
        }
    }
    return queueSize;
}

19 Source : TextRenderer.java
with GNU General Public License v2.0
from warren-bank

/**
 * A renderer for text.
 * <p>
 * {@link Subreplacedle}s are decoded from sample data using {@link SubreplacedleDecoder} instances obtained
 * from a {@link SubreplacedleDecoderFactory}. The actual rendering of the subreplacedle {@link Cue}s is
 * delegated to a {@link TextOutput}.
 */
public final clreplaced TextRenderer extends BaseRenderer implements Callback {

    /**
     * @deprecated Use {@link TextOutput}.
     */
    @Deprecated
    public interface Output extends TextOutput {
    }

    @Doreplacedented
    @Retention(RetentionPolicy.SOURCE)
    @IntDef({ REPLACEMENT_STATE_NONE, REPLACEMENT_STATE_SIGNAL_END_OF_STREAM, REPLACEMENT_STATE_WAIT_END_OF_STREAM })
    private @interface ReplacementState {
    }

    /**
     * The decoder does not need to be replaced.
     */
    private static final int REPLACEMENT_STATE_NONE = 0;

    /**
     * The decoder needs to be replaced, but we haven't yet signaled an end of stream to the existing
     * decoder. We need to do so in order to ensure that it outputs any remaining buffers before we
     * release it.
     */
    private static final int REPLACEMENT_STATE_SIGNAL_END_OF_STREAM = 1;

    /**
     * The decoder needs to be replaced, and we've signaled an end of stream to the existing decoder.
     * We're waiting for the decoder to output an end of stream signal to indicate that it has output
     * any remaining buffers before we release it.
     */
    private static final int REPLACEMENT_STATE_WAIT_END_OF_STREAM = 2;

    private static final int MSG_UPDATE_OUTPUT = 0;

    @Nullable
    private final Handler outputHandler;

    private final TextOutput output;

    private final SubreplacedleDecoderFactory decoderFactory;

    private final FormatHolder formatHolder;

    private boolean inputStreamEnded;

    private boolean outputStreamEnded;

    @ReplacementState
    private int decoderReplacementState;

    private Format streamFormat;

    private SubreplacedleDecoder decoder;

    private SubreplacedleInputBuffer nextInputBuffer;

    private SubreplacedleOutputBuffer subreplacedle;

    private SubreplacedleOutputBuffer nextSubreplacedle;

    private int nextSubreplacedleEventIndex;

    /**
     * @param output The output.
     * @param outputLooper The looper replacedociated with the thread on which the output should be called.
     *     If the output makes use of standard Android UI components, then this should normally be the
     *     looper replacedociated with the application's main thread, which can be obtained using {@link
     *     android.app.Activity#getMainLooper()}. Null may be preplaceded if the output should be called
     *     directly on the player's internal rendering thread.
     */
    public TextRenderer(TextOutput output, @Nullable Looper outputLooper) {
        this(output, outputLooper, SubreplacedleDecoderFactory.DEFAULT);
    }

    /**
     * @param output The output.
     * @param outputLooper The looper replacedociated with the thread on which the output should be called.
     *     If the output makes use of standard Android UI components, then this should normally be the
     *     looper replacedociated with the application's main thread, which can be obtained using {@link
     *     android.app.Activity#getMainLooper()}. Null may be preplaceded if the output should be called
     *     directly on the player's internal rendering thread.
     * @param decoderFactory A factory from which to obtain {@link SubreplacedleDecoder} instances.
     */
    public TextRenderer(TextOutput output, @Nullable Looper outputLooper, SubreplacedleDecoderFactory decoderFactory) {
        super(C.TRACK_TYPE_TEXT);
        this.output = replacedertions.checkNotNull(output);
        this.outputHandler = outputLooper == null ? null : Util.createHandler(outputLooper, /* callback= */
        this);
        this.decoderFactory = decoderFactory;
        formatHolder = new FormatHolder();
    }

    @Override
    public int supportsFormat(Format format) {
        if (decoderFactory.supportsFormat(format)) {
            return supportsFormatDrm(null, format.drmInitData) ? FORMAT_HANDLED : FORMAT_UNSUPPORTED_DRM;
        } else if (MimeTypes.isText(format.sampleMimeType)) {
            return FORMAT_UNSUPPORTED_SUBTYPE;
        } else {
            return FORMAT_UNSUPPORTED_TYPE;
        }
    }

    @Override
    protected void onStreamChanged(Format[] formats, long offsetUs) throws ExoPlaybackException {
        streamFormat = formats[0];
        if (decoder != null) {
            decoderReplacementState = REPLACEMENT_STATE_SIGNAL_END_OF_STREAM;
        } else {
            decoder = decoderFactory.createDecoder(streamFormat);
        }
    }

    @Override
    protected void onPositionReset(long positionUs, boolean joining) {
        clearOutput();
        inputStreamEnded = false;
        outputStreamEnded = false;
        if (decoderReplacementState != REPLACEMENT_STATE_NONE) {
            replaceDecoder();
        } else {
            releaseBuffers();
            decoder.flush();
        }
    }

    @Override
    public void render(long positionUs, long elapsedRealtimeUs) throws ExoPlaybackException {
        if (outputStreamEnded) {
            return;
        }
        if (nextSubreplacedle == null) {
            decoder.setPositionUs(positionUs);
            try {
                nextSubreplacedle = decoder.dequeueOutputBuffer();
            } catch (SubreplacedleDecoderException e) {
                throw ExoPlaybackException.createForRenderer(e, getIndex());
            }
        }
        if (getState() != STATE_STARTED) {
            return;
        }
        boolean textRendererNeedsUpdate = false;
        if (subreplacedle != null) {
            // We're iterating through the events in a subreplacedle. Set textRendererNeedsUpdate if we
            // advance to the next event.
            long subreplacedleNextEventTimeUs = getNextEventTime();
            while (subreplacedleNextEventTimeUs <= positionUs) {
                nextSubreplacedleEventIndex++;
                subreplacedleNextEventTimeUs = getNextEventTime();
                textRendererNeedsUpdate = true;
            }
        }
        if (nextSubreplacedle != null) {
            if (nextSubreplacedle.isEndOfStream()) {
                if (!textRendererNeedsUpdate && getNextEventTime() == Long.MAX_VALUE) {
                    if (decoderReplacementState == REPLACEMENT_STATE_WAIT_END_OF_STREAM) {
                        replaceDecoder();
                    } else {
                        releaseBuffers();
                        outputStreamEnded = true;
                    }
                }
            } else if (nextSubreplacedle.timeUs <= positionUs) {
                // Advance to the next subreplacedle. Sync the next event index and trigger an update.
                if (subreplacedle != null) {
                    subreplacedle.release();
                }
                subreplacedle = nextSubreplacedle;
                nextSubreplacedle = null;
                nextSubreplacedleEventIndex = subreplacedle.getNextEventTimeIndex(positionUs);
                textRendererNeedsUpdate = true;
            }
        }
        if (textRendererNeedsUpdate) {
            // textRendererNeedsUpdate is set and we're playing. Update the renderer.
            updateOutput(subreplacedle.getCues(positionUs));
        }
        if (decoderReplacementState == REPLACEMENT_STATE_WAIT_END_OF_STREAM) {
            return;
        }
        try {
            while (!inputStreamEnded) {
                if (nextInputBuffer == null) {
                    nextInputBuffer = decoder.dequeueInputBuffer();
                    if (nextInputBuffer == null) {
                        return;
                    }
                }
                if (decoderReplacementState == REPLACEMENT_STATE_SIGNAL_END_OF_STREAM) {
                    nextInputBuffer.setFlags(C.BUFFER_FLAG_END_OF_STREAM);
                    decoder.queueInputBuffer(nextInputBuffer);
                    nextInputBuffer = null;
                    decoderReplacementState = REPLACEMENT_STATE_WAIT_END_OF_STREAM;
                    return;
                }
                // Try and read the next subreplacedle from the source.
                int result = readSource(formatHolder, nextInputBuffer, false);
                if (result == C.RESULT_BUFFER_READ) {
                    if (nextInputBuffer.isEndOfStream()) {
                        inputStreamEnded = true;
                    } else {
                        nextInputBuffer.subsampleOffsetUs = formatHolder.format.subsampleOffsetUs;
                        nextInputBuffer.flip();
                    }
                    decoder.queueInputBuffer(nextInputBuffer);
                    nextInputBuffer = null;
                } else if (result == C.RESULT_NOTHING_READ) {
                    return;
                }
            }
        } catch (SubreplacedleDecoderException e) {
            throw ExoPlaybackException.createForRenderer(e, getIndex());
        }
    }

    @Override
    protected void onDisabled() {
        streamFormat = null;
        clearOutput();
        releaseDecoder();
    }

    @Override
    public boolean isEnded() {
        return outputStreamEnded;
    }

    @Override
    public boolean isReady() {
        // Don't block playback whilst subreplacedles are loading.
        // Note: To change this behavior, it will be necessary to consider [Internal: b/12949941].
        return true;
    }

    private void releaseBuffers() {
        nextInputBuffer = null;
        nextSubreplacedleEventIndex = C.INDEX_UNSET;
        if (subreplacedle != null) {
            subreplacedle.release();
            subreplacedle = null;
        }
        if (nextSubreplacedle != null) {
            nextSubreplacedle.release();
            nextSubreplacedle = null;
        }
    }

    private void releaseDecoder() {
        releaseBuffers();
        decoder.release();
        decoder = null;
        decoderReplacementState = REPLACEMENT_STATE_NONE;
    }

    private void replaceDecoder() {
        releaseDecoder();
        decoder = decoderFactory.createDecoder(streamFormat);
    }

    private long getNextEventTime() {
        return nextSubreplacedleEventIndex == C.INDEX_UNSET || nextSubreplacedleEventIndex >= subreplacedle.getEventTimeCount() ? Long.MAX_VALUE : subreplacedle.getEventTime(nextSubreplacedleEventIndex);
    }

    private void updateOutput(List<Cue> cues) {
        if (outputHandler != null) {
            outputHandler.obtainMessage(MSG_UPDATE_OUTPUT, cues).sendToTarget();
        } else {
            invokeUpdateOutputInternal(cues);
        }
    }

    private void clearOutput() {
        updateOutput(Collections.emptyList());
    }

    @SuppressWarnings("unchecked")
    @Override
    public boolean handleMessage(Message msg) {
        switch(msg.what) {
            case MSG_UPDATE_OUTPUT:
                invokeUpdateOutputInternal((List<Cue>) msg.obj);
                return true;
            default:
                throw new IllegalStateException();
        }
    }

    private void invokeUpdateOutputInternal(List<Cue> cues) {
        output.onCues(cues);
    }
}

19 Source : TextRenderer.java
with GNU General Public License v2.0
from warren-bank

@Override
protected void onStreamChanged(Format[] formats, long offsetUs) throws ExoPlaybackException {
    streamFormat = formats[0];
    if (decoder != null) {
        decoderReplacementState = REPLACEMENT_STATE_SIGNAL_END_OF_STREAM;
    } else {
        decoder = decoderFactory.createDecoder(streamFormat);
    }
}

19 Source : TextRenderer.java
with GNU General Public License v2.0
from warren-bank

@Override
public int supportsFormat(Format format) {
    if (decoderFactory.supportsFormat(format)) {
        return supportsFormatDrm(null, format.drmInitData) ? FORMAT_HANDLED : FORMAT_UNSUPPORTED_DRM;
    } else if (MimeTypes.isText(format.sampleMimeType)) {
        return FORMAT_UNSUPPORTED_SUBTYPE;
    } else {
        return FORMAT_UNSUPPORTED_TYPE;
    }
}

19 Source : TrackGroup.java
with GNU General Public License v2.0
from warren-bank

// TODO: Add an allowMultipleStreams boolean to indicate where the one stream per group restriction
// does not apply.
/**
 * Defines a group of tracks exposed by a {@link MediaPeriod}.
 *
 * <p>A {@link MediaPeriod} is only able to provide one {@link SampleStream} corresponding to a
 * group at any given time, however this {@link SampleStream} may adapt between multiple tracks
 * within the group.
 */
public final clreplaced TrackGroup implements Parcelable {

    /**
     * The number of tracks in the group.
     */
    public final int length;

    private final Format[] formats;

    // Lazily initialized hashcode.
    private int hashCode;

    /**
     * @param formats The track formats. Must not be null, contain null elements or be of length 0.
     */
    public TrackGroup(Format... formats) {
        replacedertions.checkState(formats.length > 0);
        this.formats = formats;
        this.length = formats.length;
    }

    /* package */
    TrackGroup(Parcel in) {
        length = in.readInt();
        formats = new Format[length];
        for (int i = 0; i < length; i++) {
            formats[i] = in.readParcelable(Format.clreplaced.getClreplacedLoader());
        }
    }

    /**
     * Returns the format of the track at a given index.
     *
     * @param index The index of the track.
     * @return The track's format.
     */
    public Format getFormat(int index) {
        return formats[index];
    }

    /**
     * Returns the index of the track with the given format in the group. The format is located by
     * idenreplacedy so, for example, {@code group.indexOf(group.getFormat(index)) == index} even if
     * multiple tracks have formats that contain the same values.
     *
     * @param format The format.
     * @return The index of the track, or {@link C#INDEX_UNSET} if no such track exists.
     */
    @SuppressWarnings("ReferenceEquality")
    public int indexOf(Format format) {
        for (int i = 0; i < formats.length; i++) {
            if (format == formats[i]) {
                return i;
            }
        }
        return C.INDEX_UNSET;
    }

    @Override
    public int hashCode() {
        if (hashCode == 0) {
            int result = 17;
            result = 31 * result + Arrays.hashCode(formats);
            hashCode = result;
        }
        return hashCode;
    }

    @Override
    public boolean equals(@Nullable Object obj) {
        if (this == obj) {
            return true;
        }
        if (obj == null || getClreplaced() != obj.getClreplaced()) {
            return false;
        }
        TrackGroup other = (TrackGroup) obj;
        return length == other.length && Arrays.equals(formats, other.formats);
    }

    // Parcelable implementation.
    @Override
    public int describeContents() {
        return 0;
    }

    @Override
    public void writeToParcel(Parcel dest, int flags) {
        dest.writeInt(length);
        for (int i = 0; i < length; i++) {
            dest.writeParcelable(formats[i], 0);
        }
    }

    public static final Parcelable.Creator<TrackGroup> CREATOR = new Parcelable.Creator<TrackGroup>() {

        @Override
        public TrackGroup createFromParcel(Parcel in) {
            return new TrackGroup(in);
        }

        @Override
        public TrackGroup[] newArray(int size) {
            return new TrackGroup[size];
        }
    };
}

19 Source : TrackGroup.java
with GNU General Public License v2.0
from warren-bank

/**
 * Returns the index of the track with the given format in the group. The format is located by
 * idenreplacedy so, for example, {@code group.indexOf(group.getFormat(index)) == index} even if
 * multiple tracks have formats that contain the same values.
 *
 * @param format The format.
 * @return The index of the track, or {@link C#INDEX_UNSET} if no such track exists.
 */
@SuppressWarnings("ReferenceEquality")
public int indexOf(Format format) {
    for (int i = 0; i < formats.length; i++) {
        if (format == formats[i]) {
            return i;
        }
    }
    return C.INDEX_UNSET;
}

19 Source : SingleSampleMediaSource.java
with GNU General Public License v2.0
from warren-bank

/**
 * Loads data at a given {@link Uri} as a single sample belonging to a single {@link MediaPeriod}.
 */
public final clreplaced SingleSampleMediaSource extends BaseMediaSource {

    /**
     * Listener of {@link SingleSampleMediaSource} events.
     *
     * @deprecated Use {@link MediaSourceEventListener}.
     */
    @Deprecated
    public interface EventListener {

        /**
         * Called when an error occurs loading media data.
         *
         * @param sourceId The id of the reporting {@link SingleSampleMediaSource}.
         * @param e The cause of the failure.
         */
        void onLoadError(int sourceId, IOException e);
    }

    /**
     * Factory for {@link SingleSampleMediaSource}.
     */
    public static final clreplaced Factory {

        private final DataSource.Factory dataSourceFactory;

        private LoadErrorHandlingPolicy loadErrorHandlingPolicy;

        private boolean treatLoadErrorsAsEndOfStream;

        private boolean isCreateCalled;

        @Nullable
        private Object tag;

        /**
         * Creates a factory for {@link SingleSampleMediaSource}s.
         *
         * @param dataSourceFactory The factory from which the {@link DataSource} to read the media will
         *     be obtained.
         */
        public Factory(DataSource.Factory dataSourceFactory) {
            this.dataSourceFactory = replacedertions.checkNotNull(dataSourceFactory);
            loadErrorHandlingPolicy = new DefaultLoadErrorHandlingPolicy();
        }

        /**
         * Sets a tag for the media source which will be published in the {@link Timeline} of the source
         * as {@link Timeline.Window#tag}.
         *
         * @param tag A tag for the media source.
         * @return This factory, for convenience.
         * @throws IllegalStateException If one of the {@code create} methods has already been called.
         */
        public Factory setTag(Object tag) {
            replacedertions.checkState(!isCreateCalled);
            this.tag = tag;
            return this;
        }

        /**
         * Sets the minimum number of times to retry if a loading error occurs. See {@link
         * #setLoadErrorHandlingPolicy} for the default value.
         *
         * <p>Calling this method is equivalent to calling {@link #setLoadErrorHandlingPolicy} with
         * {@link DefaultLoadErrorHandlingPolicy#DefaultLoadErrorHandlingPolicy(int)
         * DefaultLoadErrorHandlingPolicy(minLoadableRetryCount)}
         *
         * @param minLoadableRetryCount The minimum number of times to retry if a loading error occurs.
         * @return This factory, for convenience.
         * @throws IllegalStateException If one of the {@code create} methods has already been called.
         * @deprecated Use {@link #setLoadErrorHandlingPolicy(LoadErrorHandlingPolicy)} instead.
         */
        @Deprecated
        public Factory setMinLoadableRetryCount(int minLoadableRetryCount) {
            return setLoadErrorHandlingPolicy(new DefaultLoadErrorHandlingPolicy(minLoadableRetryCount));
        }

        /**
         * Sets the {@link LoadErrorHandlingPolicy}. The default value is created by calling {@link
         * DefaultLoadErrorHandlingPolicy#DefaultLoadErrorHandlingPolicy()}.
         *
         * <p>Calling this method overrides any calls to {@link #setMinLoadableRetryCount(int)}.
         *
         * @param loadErrorHandlingPolicy A {@link LoadErrorHandlingPolicy}.
         * @return This factory, for convenience.
         * @throws IllegalStateException If one of the {@code create} methods has already been called.
         */
        public Factory setLoadErrorHandlingPolicy(LoadErrorHandlingPolicy loadErrorHandlingPolicy) {
            replacedertions.checkState(!isCreateCalled);
            this.loadErrorHandlingPolicy = loadErrorHandlingPolicy;
            return this;
        }

        /**
         * Sets whether load errors will be treated as end-of-stream signal (load errors will not be
         * propagated). The default value is false.
         *
         * @param treatLoadErrorsAsEndOfStream If true, load errors will not be propagated by sample
         *     streams, treating them as ended instead. If false, load errors will be propagated
         *     normally by {@link SampleStream#maybeThrowError()}.
         * @return This factory, for convenience.
         * @throws IllegalStateException If one of the {@code create} methods has already been called.
         */
        public Factory setTreatLoadErrorsAsEndOfStream(boolean treatLoadErrorsAsEndOfStream) {
            replacedertions.checkState(!isCreateCalled);
            this.treatLoadErrorsAsEndOfStream = treatLoadErrorsAsEndOfStream;
            return this;
        }

        /**
         * Returns a new {@link ExtractorMediaSource} using the current parameters.
         *
         * @param uri The {@link Uri}.
         * @param format The {@link Format} of the media stream.
         * @param durationUs The duration of the media stream in microseconds.
         * @return The new {@link ExtractorMediaSource}.
         */
        public SingleSampleMediaSource createMediaSource(Uri uri, Format format, long durationUs) {
            isCreateCalled = true;
            return new SingleSampleMediaSource(uri, dataSourceFactory, format, durationUs, loadErrorHandlingPolicy, treatLoadErrorsAsEndOfStream, tag);
        }

        /**
         * @deprecated Use {@link #createMediaSource(Uri, Format, long)} and {@link
         *     #addEventListener(Handler, MediaSourceEventListener)} instead.
         */
        @Deprecated
        public SingleSampleMediaSource createMediaSource(Uri uri, Format format, long durationUs, @Nullable Handler eventHandler, @Nullable MediaSourceEventListener eventListener) {
            SingleSampleMediaSource mediaSource = createMediaSource(uri, format, durationUs);
            if (eventHandler != null && eventListener != null) {
                mediaSource.addEventListener(eventHandler, eventListener);
            }
            return mediaSource;
        }
    }

    private final DataSpec dataSpec;

    private final DataSource.Factory dataSourceFactory;

    private final Format format;

    private final long durationUs;

    private final LoadErrorHandlingPolicy loadErrorHandlingPolicy;

    private final boolean treatLoadErrorsAsEndOfStream;

    private final Timeline timeline;

    @Nullable
    private TransferListener transferListener;

    /**
     * @param uri The {@link Uri} of the media stream.
     * @param dataSourceFactory The factory from which the {@link DataSource} to read the media will
     *     be obtained.
     * @param format The {@link Format} replacedociated with the output track.
     * @param durationUs The duration of the media stream in microseconds.
     * @deprecated Use {@link Factory} instead.
     */
    @Deprecated
    @SuppressWarnings("deprecation")
    public SingleSampleMediaSource(Uri uri, DataSource.Factory dataSourceFactory, Format format, long durationUs) {
        this(uri, dataSourceFactory, format, durationUs, DefaultLoadErrorHandlingPolicy.DEFAULT_MIN_LOADABLE_RETRY_COUNT);
    }

    /**
     * @param uri The {@link Uri} of the media stream.
     * @param dataSourceFactory The factory from which the {@link DataSource} to read the media will
     *     be obtained.
     * @param format The {@link Format} replacedociated with the output track.
     * @param durationUs The duration of the media stream in microseconds.
     * @param minLoadableRetryCount The minimum number of times to retry if a loading error occurs.
     * @deprecated Use {@link Factory} instead.
     */
    @Deprecated
    public SingleSampleMediaSource(Uri uri, DataSource.Factory dataSourceFactory, Format format, long durationUs, int minLoadableRetryCount) {
        this(uri, dataSourceFactory, format, durationUs, new DefaultLoadErrorHandlingPolicy(minLoadableRetryCount), /* treatLoadErrorsAsEndOfStream= */
        false, /* tag= */
        null);
    }

    /**
     * @param uri The {@link Uri} of the media stream.
     * @param dataSourceFactory The factory from which the {@link DataSource} to read the media will
     *     be obtained.
     * @param format The {@link Format} replacedociated with the output track.
     * @param durationUs The duration of the media stream in microseconds.
     * @param minLoadableRetryCount The minimum number of times to retry if a loading error occurs.
     * @param eventHandler A handler for events. May be null if delivery of events is not required.
     * @param eventListener A listener of events. May be null if delivery of events is not required.
     * @param eventSourceId An identifier that gets preplaceded to {@code eventListener} methods.
     * @param treatLoadErrorsAsEndOfStream If true, load errors will not be propagated by sample
     *     streams, treating them as ended instead. If false, load errors will be propagated normally
     *     by {@link SampleStream#maybeThrowError()}.
     * @deprecated Use {@link Factory} instead.
     */
    @Deprecated
    @SuppressWarnings("deprecation")
    public SingleSampleMediaSource(Uri uri, DataSource.Factory dataSourceFactory, Format format, long durationUs, int minLoadableRetryCount, Handler eventHandler, EventListener eventListener, int eventSourceId, boolean treatLoadErrorsAsEndOfStream) {
        this(uri, dataSourceFactory, format, durationUs, new DefaultLoadErrorHandlingPolicy(minLoadableRetryCount), treatLoadErrorsAsEndOfStream, /* tag= */
        null);
        if (eventHandler != null && eventListener != null) {
            addEventListener(eventHandler, new EventListenerWrapper(eventListener, eventSourceId));
        }
    }

    private SingleSampleMediaSource(Uri uri, DataSource.Factory dataSourceFactory, Format format, long durationUs, LoadErrorHandlingPolicy loadErrorHandlingPolicy, boolean treatLoadErrorsAsEndOfStream, @Nullable Object tag) {
        this.dataSourceFactory = dataSourceFactory;
        this.format = format;
        this.durationUs = durationUs;
        this.loadErrorHandlingPolicy = loadErrorHandlingPolicy;
        this.treatLoadErrorsAsEndOfStream = treatLoadErrorsAsEndOfStream;
        dataSpec = new DataSpec(uri, DataSpec.FLAG_ALLOW_GZIP | DataSpec.FLAG_ALLOW_CACHING_UNKNOWN_LENGTH);
        timeline = new SinglePeriodTimeline(durationUs, /* isSeekable= */
        true, /* isDynamic= */
        false, tag);
    }

    // MediaSource implementation.
    @Override
    public void prepareSourceInternal(ExoPlayer player, boolean isTopLevelSource, @Nullable TransferListener mediaTransferListener) {
        transferListener = mediaTransferListener;
        refreshSourceInfo(timeline, /* manifest= */
        null);
    }

    @Override
    public void maybeThrowSourceInfoRefreshError() throws IOException {
    // Do nothing.
    }

    @Override
    public MediaPeriod createPeriod(MediaPeriodId id, Allocator allocator) {
        return new SingleSampleMediaPeriod(dataSpec, dataSourceFactory, transferListener, format, durationUs, loadErrorHandlingPolicy, createEventDispatcher(id), treatLoadErrorsAsEndOfStream);
    }

    @Override
    public void releasePeriod(MediaPeriod mediaPeriod) {
        ((SingleSampleMediaPeriod) mediaPeriod).release();
    }

    @Override
    public void releaseSourceInternal() {
    // Do nothing.
    }

    /**
     * Wraps a deprecated {@link EventListener}, invoking its callback from the equivalent callback in
     * {@link MediaSourceEventListener}.
     */
    @Deprecated
    @SuppressWarnings("deprecation")
    private static final clreplaced EventListenerWrapper extends DefaultMediaSourceEventListener {

        private final EventListener eventListener;

        private final int eventSourceId;

        public EventListenerWrapper(EventListener eventListener, int eventSourceId) {
            this.eventListener = replacedertions.checkNotNull(eventListener);
            this.eventSourceId = eventSourceId;
        }

        @Override
        public void onLoadError(int windowIndex, @Nullable MediaPeriodId mediaPeriodId, LoadEventInfo loadEventInfo, MediaLoadData mediaLoadData, IOException error, boolean wasCanceled) {
            eventListener.onLoadError(eventSourceId, error);
        }
    }
}

19 Source : SingleSampleMediaPeriod.java
with GNU General Public License v2.0
from warren-bank

/**
 * A {@link MediaPeriod} with a single sample.
 */
/* package */
final clreplaced SingleSampleMediaPeriod implements MediaPeriod, Loader.Callback<SingleSampleMediaPeriod.SourceLoadable> {

    /**
     * The initial size of the allocation used to hold the sample data.
     */
    private static final int INITIAL_SAMPLE_SIZE = 1024;

    private final DataSpec dataSpec;

    private final DataSource.Factory dataSourceFactory;

    @Nullable
    private final TransferListener transferListener;

    private final LoadErrorHandlingPolicy loadErrorHandlingPolicy;

    private final EventDispatcher eventDispatcher;

    private final TrackGroupArray tracks;

    private final ArrayList<SampleStreamImpl> sampleStreams;

    private final long durationUs;

    // Package private to avoid thunk methods.
    /* package */
    final Loader loader;

    /* package */
    final Format format;

    /* package */
    final boolean treatLoadErrorsAsEndOfStream;

    /* package */
    boolean notifiedReadingStarted;

    /* package */
    boolean loadingFinished;

    /* package */
    boolean loadingSucceeded;

    /* package */
    byte[] sampleData;

    /* package */
    int sampleSize;

    public SingleSampleMediaPeriod(DataSpec dataSpec, DataSource.Factory dataSourceFactory, @Nullable TransferListener transferListener, Format format, long durationUs, LoadErrorHandlingPolicy loadErrorHandlingPolicy, EventDispatcher eventDispatcher, boolean treatLoadErrorsAsEndOfStream) {
        this.dataSpec = dataSpec;
        this.dataSourceFactory = dataSourceFactory;
        this.transferListener = transferListener;
        this.format = format;
        this.durationUs = durationUs;
        this.loadErrorHandlingPolicy = loadErrorHandlingPolicy;
        this.eventDispatcher = eventDispatcher;
        this.treatLoadErrorsAsEndOfStream = treatLoadErrorsAsEndOfStream;
        tracks = new TrackGroupArray(new TrackGroup(format));
        sampleStreams = new ArrayList<>();
        loader = new Loader("Loader:SingleSampleMediaPeriod");
        eventDispatcher.mediaPeriodCreated();
    }

    public void release() {
        loader.release();
        eventDispatcher.mediaPeriodReleased();
    }

    @Override
    public void prepare(Callback callback, long positionUs) {
        callback.onPrepared(this);
    }

    @Override
    public void maybeThrowPrepareError() throws IOException {
    // Do nothing.
    }

    @Override
    public TrackGroupArray getTrackGroups() {
        return tracks;
    }

    @Override
    public long selectTracks(TrackSelection[] selections, boolean[] mayRetainStreamFlags, SampleStream[] streams, boolean[] streamResetFlags, long positionUs) {
        for (int i = 0; i < selections.length; i++) {
            if (streams[i] != null && (selections[i] == null || !mayRetainStreamFlags[i])) {
                sampleStreams.remove(streams[i]);
                streams[i] = null;
            }
            if (streams[i] == null && selections[i] != null) {
                SampleStreamImpl stream = new SampleStreamImpl();
                sampleStreams.add(stream);
                streams[i] = stream;
                streamResetFlags[i] = true;
            }
        }
        return positionUs;
    }

    @Override
    public void discardBuffer(long positionUs, boolean toKeyframe) {
    // Do nothing.
    }

    @Override
    public void reevaluateBuffer(long positionUs) {
    // Do nothing.
    }

    @Override
    public boolean continueLoading(long positionUs) {
        if (loadingFinished || loader.isLoading()) {
            return false;
        }
        DataSource dataSource = dataSourceFactory.createDataSource();
        if (transferListener != null) {
            dataSource.addTransferListener(transferListener);
        }
        long elapsedRealtimeMs = loader.startLoading(new SourceLoadable(dataSpec, dataSource), /* callback= */
        this, loadErrorHandlingPolicy.getMinimumLoadableRetryCount(C.DATA_TYPE_MEDIA));
        eventDispatcher.loadStarted(dataSpec, C.DATA_TYPE_MEDIA, C.TRACK_TYPE_UNKNOWN, format, C.SELECTION_REASON_UNKNOWN, /* trackSelectionData= */
        null, /* mediaStartTimeUs= */
        0, durationUs, elapsedRealtimeMs);
        return true;
    }

    @Override
    public long readDiscontinuity() {
        if (!notifiedReadingStarted) {
            eventDispatcher.readingStarted();
            notifiedReadingStarted = true;
        }
        return C.TIME_UNSET;
    }

    @Override
    public long getNextLoadPositionUs() {
        return loadingFinished || loader.isLoading() ? C.TIME_END_OF_SOURCE : 0;
    }

    @Override
    public long getBufferedPositionUs() {
        return loadingFinished ? C.TIME_END_OF_SOURCE : 0;
    }

    @Override
    public void pause() {
    // Do nothing.
    }

    @Override
    public void resume() {
    // Do nothing.
    }

    @Override
    public long seekToUs(long positionUs) {
        for (int i = 0; i < sampleStreams.size(); i++) {
            sampleStreams.get(i).reset();
        }
        return positionUs;
    }

    @Override
    public long getAdjustedSeekPositionUs(long positionUs, SeekParameters seekParameters) {
        return positionUs;
    }

    // Loader.Callback implementation.
    @Override
    public void onLoadCompleted(SourceLoadable loadable, long elapsedRealtimeMs, long loadDurationMs) {
        sampleSize = (int) loadable.dataSource.getBytesRead();
        sampleData = loadable.sampleData;
        loadingFinished = true;
        loadingSucceeded = true;
        eventDispatcher.loadCompleted(loadable.dataSpec, loadable.dataSource.getLastOpenedUri(), loadable.dataSource.getLastResponseHeaders(), C.DATA_TYPE_MEDIA, C.TRACK_TYPE_UNKNOWN, format, C.SELECTION_REASON_UNKNOWN, /* trackSelectionData= */
        null, /* mediaStartTimeUs= */
        0, durationUs, elapsedRealtimeMs, loadDurationMs, sampleSize);
    }

    @Override
    public void onLoadCanceled(SourceLoadable loadable, long elapsedRealtimeMs, long loadDurationMs, boolean released) {
        eventDispatcher.loadCanceled(loadable.dataSpec, loadable.dataSource.getLastOpenedUri(), loadable.dataSource.getLastResponseHeaders(), C.DATA_TYPE_MEDIA, C.TRACK_TYPE_UNKNOWN, /* trackFormat= */
        null, C.SELECTION_REASON_UNKNOWN, /* trackSelectionData= */
        null, /* mediaStartTimeUs= */
        0, durationUs, elapsedRealtimeMs, loadDurationMs, loadable.dataSource.getBytesRead());
    }

    @Override
    public LoadErrorAction onLoadError(SourceLoadable loadable, long elapsedRealtimeMs, long loadDurationMs, IOException error, int errorCount) {
        long retryDelay = loadErrorHandlingPolicy.getRetryDelayMsFor(C.DATA_TYPE_MEDIA, durationUs, error, errorCount);
        boolean errorCanBePropagated = retryDelay == C.TIME_UNSET || errorCount >= loadErrorHandlingPolicy.getMinimumLoadableRetryCount(C.DATA_TYPE_MEDIA);
        LoadErrorAction action;
        if (treatLoadErrorsAsEndOfStream && errorCanBePropagated) {
            loadingFinished = true;
            action = Loader.DONT_RETRY;
        } else {
            action = retryDelay != C.TIME_UNSET ? Loader.createRetryAction(/* resetErrorCount= */
            false, retryDelay) : Loader.DONT_RETRY_FATAL;
        }
        eventDispatcher.loadError(loadable.dataSpec, loadable.dataSource.getLastOpenedUri(), loadable.dataSource.getLastResponseHeaders(), C.DATA_TYPE_MEDIA, C.TRACK_TYPE_UNKNOWN, format, C.SELECTION_REASON_UNKNOWN, /* trackSelectionData= */
        null, /* mediaStartTimeUs= */
        0, durationUs, elapsedRealtimeMs, loadDurationMs, loadable.dataSource.getBytesRead(), error, /* wasCanceled= */
        !action.isRetry());
        return action;
    }

    private final clreplaced SampleStreamImpl implements SampleStream {

        private static final int STREAM_STATE_SEND_FORMAT = 0;

        private static final int STREAM_STATE_SEND_SAMPLE = 1;

        private static final int STREAM_STATE_END_OF_STREAM = 2;

        private int streamState;

        private boolean notifiedDownstreamFormat;

        public void reset() {
            if (streamState == STREAM_STATE_END_OF_STREAM) {
                streamState = STREAM_STATE_SEND_SAMPLE;
            }
        }

        @Override
        public boolean isReady() {
            return loadingFinished;
        }

        @Override
        public void maybeThrowError() throws IOException {
            if (!treatLoadErrorsAsEndOfStream) {
                loader.maybeThrowError();
            }
        }

        @Override
        public int readData(FormatHolder formatHolder, DecoderInputBuffer buffer, boolean requireFormat) {
            maybeNotifyDownstreamFormat();
            if (streamState == STREAM_STATE_END_OF_STREAM) {
                buffer.addFlag(C.BUFFER_FLAG_END_OF_STREAM);
                return C.RESULT_BUFFER_READ;
            } else if (requireFormat || streamState == STREAM_STATE_SEND_FORMAT) {
                formatHolder.format = format;
                streamState = STREAM_STATE_SEND_SAMPLE;
                return C.RESULT_FORMAT_READ;
            } else if (loadingFinished) {
                if (loadingSucceeded) {
                    buffer.timeUs = 0;
                    buffer.addFlag(C.BUFFER_FLAG_KEY_FRAME);
                    buffer.ensureSpaceForWrite(sampleSize);
                    buffer.data.put(sampleData, 0, sampleSize);
                } else {
                    buffer.addFlag(C.BUFFER_FLAG_END_OF_STREAM);
                }
                streamState = STREAM_STATE_END_OF_STREAM;
                return C.RESULT_BUFFER_READ;
            }
            return C.RESULT_NOTHING_READ;
        }

        @Override
        public int skipData(long positionUs) {
            maybeNotifyDownstreamFormat();
            if (positionUs > 0 && streamState != STREAM_STATE_END_OF_STREAM) {
                streamState = STREAM_STATE_END_OF_STREAM;
                return 1;
            }
            return 0;
        }

        private void maybeNotifyDownstreamFormat() {
            if (!notifiedDownstreamFormat) {
                eventDispatcher.downstreamFormatChanged(MimeTypes.getTrackType(format.sampleMimeType), format, C.SELECTION_REASON_UNKNOWN, /* trackSelectionData= */
                null, /* mediaTimeUs= */
                0);
                notifiedDownstreamFormat = true;
            }
        }
    }

    /* package */
    static final clreplaced SourceLoadable implements Loadable {

        public final DataSpec dataSpec;

        private final StatsDataSource dataSource;

        private byte[] sampleData;

        public SourceLoadable(DataSpec dataSpec, DataSource dataSource) {
            this.dataSpec = dataSpec;
            this.dataSource = new StatsDataSource(dataSource);
        }

        @Override
        public void cancelLoad() {
        // Never happens.
        }

        @Override
        public void load() throws IOException, InterruptedException {
            // We always load from the beginning, so reset bytesRead to 0.
            dataSource.resetBytesRead();
            try {
                // Create and open the input.
                dataSource.open(dataSpec);
                // Load the sample data.
                int result = 0;
                while (result != C.RESULT_END_OF_INPUT) {
                    int sampleSize = (int) dataSource.getBytesRead();
                    if (sampleData == null) {
                        sampleData = new byte[INITIAL_SAMPLE_SIZE];
                    } else if (sampleSize == sampleData.length) {
                        sampleData = Arrays.copyOf(sampleData, sampleData.length * 2);
                    }
                    result = dataSource.read(sampleData, sampleSize, sampleData.length - sampleSize);
                }
            } finally {
                Util.closeQuietly(dataSource);
            }
        }
    }
}

19 Source : SampleQueue.java
with GNU General Public License v2.0
from warren-bank

/**
 * A queue of media samples.
 */
public final clreplaced SampleQueue implements TrackOutput {

    /**
     * A listener for changes to the upstream format.
     */
    public interface UpstreamFormatChangedListener {

        /**
         * Called on the loading thread when an upstream format change occurs.
         *
         * @param format The new upstream format.
         */
        void onUpstreamFormatChanged(Format format);
    }

    public static final int ADVANCE_FAILED = -1;

    private static final int INITIAL_SCRATCH_SIZE = 32;

    private final Allocator allocator;

    private final int allocationLength;

    private final SampleMetadataQueue metadataQueue;

    private final SampleExtrasHolder extrasHolder;

    private final ParsableByteArray scratch;

    // References into the linked list of allocations.
    private AllocationNode firstAllocationNode;

    private AllocationNode readAllocationNode;

    private AllocationNode writeAllocationNode;

    // Accessed only by the consuming thread.
    private Format downstreamFormat;

    // Accessed only by the loading thread (or the consuming thread when there is no loading thread).
    private boolean pendingFormatAdjustment;

    private Format lastUnadjustedFormat;

    private long sampleOffsetUs;

    private long totalBytesWritten;

    private boolean pendingSplice;

    private UpstreamFormatChangedListener upstreamFormatChangeListener;

    /**
     * @param allocator An {@link Allocator} from which allocations for sample data can be obtained.
     */
    public SampleQueue(Allocator allocator) {
        this.allocator = allocator;
        allocationLength = allocator.getIndividualAllocationLength();
        metadataQueue = new SampleMetadataQueue();
        extrasHolder = new SampleExtrasHolder();
        scratch = new ParsableByteArray(INITIAL_SCRATCH_SIZE);
        firstAllocationNode = new AllocationNode(0, allocationLength);
        readAllocationNode = firstAllocationNode;
        writeAllocationNode = firstAllocationNode;
    }

    // Called by the consuming thread, but only when there is no loading thread.
    /**
     * Resets the output without clearing the upstream format. Equivalent to {@code reset(false)}.
     */
    public void reset() {
        reset(false);
    }

    /**
     * Resets the output.
     *
     * @param resetUpstreamFormat Whether the upstream format should be cleared. If set to false,
     *     samples queued after the reset (and before a subsequent call to {@link #format(Format)})
     *     are replacedumed to have the current upstream format. If set to true, {@link #format(Format)}
     *     must be called after the reset before any more samples can be queued.
     */
    public void reset(boolean resetUpstreamFormat) {
        metadataQueue.reset(resetUpstreamFormat);
        clearAllocationNodes(firstAllocationNode);
        firstAllocationNode = new AllocationNode(0, allocationLength);
        readAllocationNode = firstAllocationNode;
        writeAllocationNode = firstAllocationNode;
        totalBytesWritten = 0;
        allocator.trim();
    }

    /**
     * Sets a source identifier for subsequent samples.
     *
     * @param sourceId The source identifier.
     */
    public void sourceId(int sourceId) {
        metadataQueue.sourceId(sourceId);
    }

    /**
     * Indicates samples that are subsequently queued should be spliced into those already queued.
     */
    public void splice() {
        pendingSplice = true;
    }

    /**
     * Returns the current absolute write index.
     */
    public int getWriteIndex() {
        return metadataQueue.getWriteIndex();
    }

    /**
     * Discards samples from the write side of the queue.
     *
     * @param discardFromIndex The absolute index of the first sample to be discarded. Must be in the
     *     range [{@link #getReadIndex()}, {@link #getWriteIndex()}].
     */
    public void discardUpstreamSamples(int discardFromIndex) {
        totalBytesWritten = metadataQueue.discardUpstreamSamples(discardFromIndex);
        if (totalBytesWritten == 0 || totalBytesWritten == firstAllocationNode.startPosition) {
            clearAllocationNodes(firstAllocationNode);
            firstAllocationNode = new AllocationNode(totalBytesWritten, allocationLength);
            readAllocationNode = firstAllocationNode;
            writeAllocationNode = firstAllocationNode;
        } else {
            // Find the last node containing at least 1 byte of data that we need to keep.
            AllocationNode lastNodeToKeep = firstAllocationNode;
            while (totalBytesWritten > lastNodeToKeep.endPosition) {
                lastNodeToKeep = lastNodeToKeep.next;
            }
            // Discard all subsequent nodes.
            AllocationNode firstNodeToDiscard = lastNodeToKeep.next;
            clearAllocationNodes(firstNodeToDiscard);
            // Reset the successor of the last node to be an uninitialized node.
            lastNodeToKeep.next = new AllocationNode(lastNodeToKeep.endPosition, allocationLength);
            // Update writeAllocationNode and readAllocationNode as necessary.
            writeAllocationNode = totalBytesWritten == lastNodeToKeep.endPosition ? lastNodeToKeep.next : lastNodeToKeep;
            if (readAllocationNode == firstNodeToDiscard) {
                readAllocationNode = lastNodeToKeep.next;
            }
        }
    }

    // Called by the consuming thread.
    /**
     * Returns whether a sample is available to be read.
     */
    public boolean hasNextSample() {
        return metadataQueue.hasNextSample();
    }

    /**
     * Returns the absolute index of the first sample.
     */
    public int getFirstIndex() {
        return metadataQueue.getFirstIndex();
    }

    /**
     * Returns the current absolute read index.
     */
    public int getReadIndex() {
        return metadataQueue.getReadIndex();
    }

    /**
     * Peeks the source id of the next sample to be read, or the current upstream source id if the
     * queue is empty or if the read position is at the end of the queue.
     *
     * @return The source id.
     */
    public int peekSourceId() {
        return metadataQueue.peekSourceId();
    }

    /**
     * Returns the upstream {@link Format} in which samples are being queued.
     */
    public Format getUpstreamFormat() {
        return metadataQueue.getUpstreamFormat();
    }

    /**
     * Returns the largest sample timestamp that has been queued since the last {@link #reset}.
     * <p>
     * Samples that were discarded by calling {@link #discardUpstreamSamples(int)} are not
     * considered as having been queued. Samples that were dequeued from the front of the queue are
     * considered as having been queued.
     *
     * @return The largest sample timestamp that has been queued, or {@link Long#MIN_VALUE} if no
     *     samples have been queued.
     */
    public long getLargestQueuedTimestampUs() {
        return metadataQueue.getLargestQueuedTimestampUs();
    }

    /**
     * Returns the timestamp of the first sample, or {@link Long#MIN_VALUE} if the queue is empty.
     */
    public long getFirstTimestampUs() {
        return metadataQueue.getFirstTimestampUs();
    }

    /**
     * Rewinds the read position to the first sample in the queue.
     */
    public void rewind() {
        metadataQueue.rewind();
        readAllocationNode = firstAllocationNode;
    }

    /**
     * Discards up to but not including the sample immediately before or at the specified time.
     *
     * @param timeUs The time to discard to.
     * @param toKeyframe If true then discards samples up to the keyframe before or at the specified
     *     time, rather than any sample before or at that time.
     * @param stopAtReadPosition If true then samples are only discarded if they're before the
     *     read position. If false then samples at and beyond the read position may be discarded, in
     *     which case the read position is advanced to the first remaining sample.
     */
    public void discardTo(long timeUs, boolean toKeyframe, boolean stopAtReadPosition) {
        discardDownstreamTo(metadataQueue.discardTo(timeUs, toKeyframe, stopAtReadPosition));
    }

    /**
     * Discards up to but not including the read position.
     */
    public void discardToRead() {
        discardDownstreamTo(metadataQueue.discardToRead());
    }

    /**
     * Discards to the end of the queue. The read position is also advanced.
     */
    public void discardToEnd() {
        discardDownstreamTo(metadataQueue.discardToEnd());
    }

    /**
     * Advances the read position to the end of the queue.
     *
     * @return The number of samples that were skipped.
     */
    public int advanceToEnd() {
        return metadataQueue.advanceToEnd();
    }

    /**
     * Attempts to advance the read position to the sample before or at the specified time.
     *
     * @param timeUs The time to advance to.
     * @param toKeyframe If true then attempts to advance to the keyframe before or at the specified
     *     time, rather than to any sample before or at that time.
     * @param allowTimeBeyondBuffer Whether the operation can succeed if {@code timeUs} is beyond the
     *     end of the queue, by advancing the read position to the last sample (or keyframe).
     * @return The number of samples that were skipped if the operation was successful, which may be
     *     equal to 0, or {@link #ADVANCE_FAILED} if the operation was not successful. A successful
     *     advance is one in which the read position was unchanged or advanced, and is now at a sample
     *     meeting the specified criteria.
     */
    public int advanceTo(long timeUs, boolean toKeyframe, boolean allowTimeBeyondBuffer) {
        return metadataQueue.advanceTo(timeUs, toKeyframe, allowTimeBeyondBuffer);
    }

    /**
     * Attempts to set the read position to the specified sample index.
     *
     * @param sampleIndex The sample index.
     * @return Whether the read position was set successfully. False is returned if the specified
     *     index is smaller than the index of the first sample in the queue, or larger than the index
     *     of the next sample that will be written.
     */
    public boolean setReadPosition(int sampleIndex) {
        return metadataQueue.setReadPosition(sampleIndex);
    }

    /**
     * Attempts to read from the queue.
     *
     * @param formatHolder A {@link FormatHolder} to populate in the case of reading a format.
     * @param buffer A {@link DecoderInputBuffer} to populate in the case of reading a sample or the
     *     end of the stream. If the end of the stream has been reached, the
     *     {@link C#BUFFER_FLAG_END_OF_STREAM} flag will be set on the buffer.
     * @param formatRequired Whether the caller requires that the format of the stream be read even if
     *     it's not changing. A sample will never be read if set to true, however it is still possible
     *     for the end of stream or nothing to be read.
     * @param loadingFinished True if an empty queue should be considered the end of the stream.
     * @param decodeOnlyUntilUs If a buffer is read, the {@link C#BUFFER_FLAG_DECODE_ONLY} flag will
     *     be set if the buffer's timestamp is less than this value.
     * @return The result, which can be {@link C#RESULT_NOTHING_READ}, {@link C#RESULT_FORMAT_READ} or
     *     {@link C#RESULT_BUFFER_READ}.
     */
    public int read(FormatHolder formatHolder, DecoderInputBuffer buffer, boolean formatRequired, boolean loadingFinished, long decodeOnlyUntilUs) {
        int result = metadataQueue.read(formatHolder, buffer, formatRequired, loadingFinished, downstreamFormat, extrasHolder);
        switch(result) {
            case C.RESULT_FORMAT_READ:
                downstreamFormat = formatHolder.format;
                return C.RESULT_FORMAT_READ;
            case C.RESULT_BUFFER_READ:
                if (!buffer.isEndOfStream()) {
                    if (buffer.timeUs < decodeOnlyUntilUs) {
                        buffer.addFlag(C.BUFFER_FLAG_DECODE_ONLY);
                    }
                    // Read encryption data if the sample is encrypted.
                    if (buffer.isEncrypted()) {
                        readEncryptionData(buffer, extrasHolder);
                    }
                    // Write the sample data into the holder.
                    buffer.ensureSpaceForWrite(extrasHolder.size);
                    readData(extrasHolder.offset, buffer.data, extrasHolder.size);
                }
                return C.RESULT_BUFFER_READ;
            case C.RESULT_NOTHING_READ:
                return C.RESULT_NOTHING_READ;
            default:
                throw new IllegalStateException();
        }
    }

    /**
     * Reads encryption data for the current sample.
     * <p>
     * The encryption data is written into {@link DecoderInputBuffer#cryptoInfo}, and
     * {@link SampleExtrasHolder#size} is adjusted to subtract the number of bytes that were read. The
     * same value is added to {@link SampleExtrasHolder#offset}.
     *
     * @param buffer The buffer into which the encryption data should be written.
     * @param extrasHolder The extras holder whose offset should be read and subsequently adjusted.
     */
    private void readEncryptionData(DecoderInputBuffer buffer, SampleExtrasHolder extrasHolder) {
        long offset = extrasHolder.offset;
        // Read the signal byte.
        scratch.reset(1);
        readData(offset, scratch.data, 1);
        offset++;
        byte signalByte = scratch.data[0];
        boolean subsampleEncryption = (signalByte & 0x80) != 0;
        int ivSize = signalByte & 0x7F;
        // Read the initialization vector.
        if (buffer.cryptoInfo.iv == null) {
            buffer.cryptoInfo.iv = new byte[16];
        }
        readData(offset, buffer.cryptoInfo.iv, ivSize);
        offset += ivSize;
        // Read the subsample count, if present.
        int subsampleCount;
        if (subsampleEncryption) {
            scratch.reset(2);
            readData(offset, scratch.data, 2);
            offset += 2;
            subsampleCount = scratch.readUnsignedShort();
        } else {
            subsampleCount = 1;
        }
        // Write the clear and encrypted subsample sizes.
        int[] clearDataSizes = buffer.cryptoInfo.numBytesOfClearData;
        if (clearDataSizes == null || clearDataSizes.length < subsampleCount) {
            clearDataSizes = new int[subsampleCount];
        }
        int[] encryptedDataSizes = buffer.cryptoInfo.numBytesOfEncryptedData;
        if (encryptedDataSizes == null || encryptedDataSizes.length < subsampleCount) {
            encryptedDataSizes = new int[subsampleCount];
        }
        if (subsampleEncryption) {
            int subsampleDataLength = 6 * subsampleCount;
            scratch.reset(subsampleDataLength);
            readData(offset, scratch.data, subsampleDataLength);
            offset += subsampleDataLength;
            scratch.setPosition(0);
            for (int i = 0; i < subsampleCount; i++) {
                clearDataSizes[i] = scratch.readUnsignedShort();
                encryptedDataSizes[i] = scratch.readUnsignedIntToInt();
            }
        } else {
            clearDataSizes[0] = 0;
            encryptedDataSizes[0] = extrasHolder.size - (int) (offset - extrasHolder.offset);
        }
        // Populate the cryptoInfo.
        CryptoData cryptoData = extrasHolder.cryptoData;
        buffer.cryptoInfo.set(subsampleCount, clearDataSizes, encryptedDataSizes, cryptoData.encryptionKey, buffer.cryptoInfo.iv, cryptoData.cryptoMode, cryptoData.encryptedBlocks, cryptoData.clearBlocks);
        // Adjust the offset and size to take into account the bytes read.
        int bytesRead = (int) (offset - extrasHolder.offset);
        extrasHolder.offset += bytesRead;
        extrasHolder.size -= bytesRead;
    }

    /**
     * Reads data from the front of the rolling buffer.
     *
     * @param absolutePosition The absolute position from which data should be read.
     * @param target The buffer into which data should be written.
     * @param length The number of bytes to read.
     */
    private void readData(long absolutePosition, ByteBuffer target, int length) {
        advanceReadTo(absolutePosition);
        int remaining = length;
        while (remaining > 0) {
            int toCopy = Math.min(remaining, (int) (readAllocationNode.endPosition - absolutePosition));
            Allocation allocation = readAllocationNode.allocation;
            target.put(allocation.data, readAllocationNode.translateOffset(absolutePosition), toCopy);
            remaining -= toCopy;
            absolutePosition += toCopy;
            if (absolutePosition == readAllocationNode.endPosition) {
                readAllocationNode = readAllocationNode.next;
            }
        }
    }

    /**
     * Reads data from the front of the rolling buffer.
     *
     * @param absolutePosition The absolute position from which data should be read.
     * @param target The array into which data should be written.
     * @param length The number of bytes to read.
     */
    private void readData(long absolutePosition, byte[] target, int length) {
        advanceReadTo(absolutePosition);
        int remaining = length;
        while (remaining > 0) {
            int toCopy = Math.min(remaining, (int) (readAllocationNode.endPosition - absolutePosition));
            Allocation allocation = readAllocationNode.allocation;
            System.arraycopy(allocation.data, readAllocationNode.translateOffset(absolutePosition), target, length - remaining, toCopy);
            remaining -= toCopy;
            absolutePosition += toCopy;
            if (absolutePosition == readAllocationNode.endPosition) {
                readAllocationNode = readAllocationNode.next;
            }
        }
    }

    /**
     * Advances {@link #readAllocationNode} to the specified absolute position.
     *
     * @param absolutePosition The position to which {@link #readAllocationNode} should be advanced.
     */
    private void advanceReadTo(long absolutePosition) {
        while (absolutePosition >= readAllocationNode.endPosition) {
            readAllocationNode = readAllocationNode.next;
        }
    }

    /**
     * Advances {@link #firstAllocationNode} to the specified absolute position.
     * {@link #readAllocationNode} is also advanced if necessary to avoid it falling behind
     * {@link #firstAllocationNode}. Nodes that have been advanced past are cleared, and their
     * underlying allocations are returned to the allocator.
     *
     * @param absolutePosition The position to which {@link #firstAllocationNode} should be advanced.
     *     May be {@link C#POSITION_UNSET}, in which case calling this method is a no-op.
     */
    private void discardDownstreamTo(long absolutePosition) {
        if (absolutePosition == C.POSITION_UNSET) {
            return;
        }
        while (absolutePosition >= firstAllocationNode.endPosition) {
            allocator.release(firstAllocationNode.allocation);
            firstAllocationNode = firstAllocationNode.clear();
        }
        // If we discarded the node referenced by readAllocationNode then we need to advance it to the
        // first remaining node.
        if (readAllocationNode.startPosition < firstAllocationNode.startPosition) {
            readAllocationNode = firstAllocationNode;
        }
    }

    // Called by the loading thread.
    /**
     * Sets a listener to be notified of changes to the upstream format.
     *
     * @param listener The listener.
     */
    public void setUpstreamFormatChangeListener(UpstreamFormatChangedListener listener) {
        upstreamFormatChangeListener = listener;
    }

    /**
     * Sets an offset that will be added to the timestamps (and sub-sample timestamps) of samples
     * that are subsequently queued.
     *
     * @param sampleOffsetUs The timestamp offset in microseconds.
     */
    public void setSampleOffsetUs(long sampleOffsetUs) {
        if (this.sampleOffsetUs != sampleOffsetUs) {
            this.sampleOffsetUs = sampleOffsetUs;
            pendingFormatAdjustment = true;
        }
    }

    @Override
    public void format(Format format) {
        Format adjustedFormat = getAdjustedSampleFormat(format, sampleOffsetUs);
        boolean formatChanged = metadataQueue.format(adjustedFormat);
        lastUnadjustedFormat = format;
        pendingFormatAdjustment = false;
        if (upstreamFormatChangeListener != null && formatChanged) {
            upstreamFormatChangeListener.onUpstreamFormatChanged(adjustedFormat);
        }
    }

    @Override
    public int sampleData(ExtractorInput input, int length, boolean allowEndOfInput) throws IOException, InterruptedException {
        length = preAppend(length);
        int bytesAppended = input.read(writeAllocationNode.allocation.data, writeAllocationNode.translateOffset(totalBytesWritten), length);
        if (bytesAppended == C.RESULT_END_OF_INPUT) {
            if (allowEndOfInput) {
                return C.RESULT_END_OF_INPUT;
            }
            throw new EOFException();
        }
        postAppend(bytesAppended);
        return bytesAppended;
    }

    @Override
    public void sampleData(ParsableByteArray buffer, int length) {
        while (length > 0) {
            int bytesAppended = preAppend(length);
            buffer.readBytes(writeAllocationNode.allocation.data, writeAllocationNode.translateOffset(totalBytesWritten), bytesAppended);
            length -= bytesAppended;
            postAppend(bytesAppended);
        }
    }

    @Override
    public void sampleMetadata(long timeUs, @C.BufferFlags int flags, int size, int offset, @Nullable CryptoData cryptoData) {
        if (pendingFormatAdjustment) {
            format(lastUnadjustedFormat);
        }
        timeUs += sampleOffsetUs;
        if (pendingSplice) {
            if ((flags & C.BUFFER_FLAG_KEY_FRAME) == 0 || !metadataQueue.attemptSplice(timeUs)) {
                return;
            }
            pendingSplice = false;
        }
        long absoluteOffset = totalBytesWritten - size - offset;
        metadataQueue.commitSample(timeUs, flags, absoluteOffset, size, cryptoData);
    }

    // Private methods.
    /**
     * Clears allocation nodes starting from {@code fromNode}.
     *
     * @param fromNode The node from which to clear.
     */
    private void clearAllocationNodes(AllocationNode fromNode) {
        if (!fromNode.wasInitialized) {
            return;
        }
        // Bulk release allocations for performance (it's significantly faster when using
        // DefaultAllocator because the allocator's lock only needs to be acquired and released once)
        // [Internal: See b/29542039].
        int allocationCount = (writeAllocationNode.wasInitialized ? 1 : 0) + ((int) (writeAllocationNode.startPosition - fromNode.startPosition) / allocationLength);
        Allocation[] allocationsToRelease = new Allocation[allocationCount];
        AllocationNode currentNode = fromNode;
        for (int i = 0; i < allocationsToRelease.length; i++) {
            allocationsToRelease[i] = currentNode.allocation;
            currentNode = currentNode.clear();
        }
        allocator.release(allocationsToRelease);
    }

    /**
     * Called before writing sample data to {@link #writeAllocationNode}. May cause
     * {@link #writeAllocationNode} to be initialized.
     *
     * @param length The number of bytes that the caller wishes to write.
     * @return The number of bytes that the caller is permitted to write, which may be less than
     *     {@code length}.
     */
    private int preAppend(int length) {
        if (!writeAllocationNode.wasInitialized) {
            writeAllocationNode.initialize(allocator.allocate(), new AllocationNode(writeAllocationNode.endPosition, allocationLength));
        }
        return Math.min(length, (int) (writeAllocationNode.endPosition - totalBytesWritten));
    }

    /**
     * Called after writing sample data. May cause {@link #writeAllocationNode} to be advanced.
     *
     * @param length The number of bytes that were written.
     */
    private void postAppend(int length) {
        totalBytesWritten += length;
        if (totalBytesWritten == writeAllocationNode.endPosition) {
            writeAllocationNode = writeAllocationNode.next;
        }
    }

    /**
     * Adjusts a {@link Format} to incorporate a sample offset into {@link Format#subsampleOffsetUs}.
     *
     * @param format The {@link Format} to adjust.
     * @param sampleOffsetUs The offset to apply.
     * @return The adjusted {@link Format}.
     */
    private static Format getAdjustedSampleFormat(Format format, long sampleOffsetUs) {
        if (format == null) {
            return null;
        }
        if (sampleOffsetUs != 0 && format.subsampleOffsetUs != Format.OFFSET_SAMPLE_RELATIVE) {
            format = format.copyWithSubsampleOffsetUs(format.subsampleOffsetUs + sampleOffsetUs);
        }
        return format;
    }

    /**
     * A node in a linked list of {@link Allocation}s held by the output.
     */
    private static final clreplaced AllocationNode {

        /**
         * The absolute position of the start of the data (inclusive).
         */
        public final long startPosition;

        /**
         * The absolute position of the end of the data (exclusive).
         */
        public final long endPosition;

        /**
         * Whether the node has been initialized. Remains true after {@link #clear()}.
         */
        public boolean wasInitialized;

        /**
         * The {@link Allocation}, or {@code null} if the node is not initialized.
         */
        @Nullable
        public Allocation allocation;

        /**
         * The next {@link AllocationNode} in the list, or {@code null} if the node has not been
         * initialized. Remains set after {@link #clear()}.
         */
        @Nullable
        public AllocationNode next;

        /**
         * @param startPosition See {@link #startPosition}.
         * @param allocationLength The length of the {@link Allocation} with which this node will be
         *     initialized.
         */
        public AllocationNode(long startPosition, int allocationLength) {
            this.startPosition = startPosition;
            this.endPosition = startPosition + allocationLength;
        }

        /**
         * Initializes the node.
         *
         * @param allocation The node's {@link Allocation}.
         * @param next The next {@link AllocationNode}.
         */
        public void initialize(Allocation allocation, AllocationNode next) {
            this.allocation = allocation;
            this.next = next;
            wasInitialized = true;
        }

        /**
         * Gets the offset into the {@link #allocation}'s {@link Allocation#data} that corresponds to
         * the specified absolute position.
         *
         * @param absolutePosition The absolute position.
         * @return The corresponding offset into the allocation's data.
         */
        public int translateOffset(long absolutePosition) {
            return (int) (absolutePosition - startPosition) + allocation.offset;
        }

        /**
         * Clears {@link #allocation} and {@link #next}.
         *
         * @return The cleared next {@link AllocationNode}.
         */
        public AllocationNode clear() {
            allocation = null;
            AllocationNode temp = next;
            next = null;
            return temp;
        }
    }
}

19 Source : SampleQueue.java
with GNU General Public License v2.0
from warren-bank

/**
 * Adjusts a {@link Format} to incorporate a sample offset into {@link Format#subsampleOffsetUs}.
 *
 * @param format The {@link Format} to adjust.
 * @param sampleOffsetUs The offset to apply.
 * @return The adjusted {@link Format}.
 */
private static Format getAdjustedSampleFormat(Format format, long sampleOffsetUs) {
    if (format == null) {
        return null;
    }
    if (sampleOffsetUs != 0 && format.subsampleOffsetUs != Format.OFFSET_SAMPLE_RELATIVE) {
        format = format.copyWithSubsampleOffsetUs(format.subsampleOffsetUs + sampleOffsetUs);
    }
    return format;
}

19 Source : SampleQueue.java
with GNU General Public License v2.0
from warren-bank

@Override
public void format(Format format) {
    Format adjustedFormat = getAdjustedSampleFormat(format, sampleOffsetUs);
    boolean formatChanged = metadataQueue.format(adjustedFormat);
    lastUnadjustedFormat = format;
    pendingFormatAdjustment = false;
    if (upstreamFormatChangeListener != null && formatChanged) {
        upstreamFormatChangeListener.onUpstreamFormatChanged(adjustedFormat);
    }
}

19 Source : SampleMetadataQueue.java
with GNU General Public License v2.0
from warren-bank

/**
 * A queue of metadata describing the contents of a media buffer.
 */
/* package */
final clreplaced SampleMetadataQueue {

    /**
     * A holder for sample metadata not held by {@link DecoderInputBuffer}.
     */
    public static final clreplaced SampleExtrasHolder {

        public int size;

        public long offset;

        public CryptoData cryptoData;
    }

    private static final int SAMPLE_CAPACITY_INCREMENT = 1000;

    private int capacity;

    private int[] sourceIds;

    private long[] offsets;

    private int[] sizes;

    private int[] flags;

    private long[] timesUs;

    private CryptoData[] cryptoDatas;

    private Format[] formats;

    private int length;

    private int absoluteFirstIndex;

    private int relativeFirstIndex;

    private int readPosition;

    private long largestDiscardedTimestampUs;

    private long largestQueuedTimestampUs;

    private boolean upstreamKeyframeRequired;

    private boolean upstreamFormatRequired;

    private Format upstreamFormat;

    private int upstreamSourceId;

    public SampleMetadataQueue() {
        capacity = SAMPLE_CAPACITY_INCREMENT;
        sourceIds = new int[capacity];
        offsets = new long[capacity];
        timesUs = new long[capacity];
        flags = new int[capacity];
        sizes = new int[capacity];
        cryptoDatas = new CryptoData[capacity];
        formats = new Format[capacity];
        largestDiscardedTimestampUs = Long.MIN_VALUE;
        largestQueuedTimestampUs = Long.MIN_VALUE;
        upstreamFormatRequired = true;
        upstreamKeyframeRequired = true;
    }

    /**
     * Clears all sample metadata from the queue.
     *
     * @param resetUpstreamFormat Whether the upstream format should be cleared. If set to false,
     *     samples queued after the reset (and before a subsequent call to {@link #format(Format)})
     *     are replacedumed to have the current upstream format. If set to true, {@link #format(Format)}
     *     must be called after the reset before any more samples can be queued.
     */
    public void reset(boolean resetUpstreamFormat) {
        length = 0;
        absoluteFirstIndex = 0;
        relativeFirstIndex = 0;
        readPosition = 0;
        upstreamKeyframeRequired = true;
        largestDiscardedTimestampUs = Long.MIN_VALUE;
        largestQueuedTimestampUs = Long.MIN_VALUE;
        if (resetUpstreamFormat) {
            upstreamFormat = null;
            upstreamFormatRequired = true;
        }
    }

    /**
     * Returns the current absolute write index.
     */
    public int getWriteIndex() {
        return absoluteFirstIndex + length;
    }

    /**
     * Discards samples from the write side of the queue.
     *
     * @param discardFromIndex The absolute index of the first sample to be discarded.
     * @return The reduced total number of bytes written after the samples have been discarded, or 0
     *     if the queue is now empty.
     */
    public long discardUpstreamSamples(int discardFromIndex) {
        int discardCount = getWriteIndex() - discardFromIndex;
        replacedertions.checkArgument(0 <= discardCount && discardCount <= (length - readPosition));
        length -= discardCount;
        largestQueuedTimestampUs = Math.max(largestDiscardedTimestampUs, getLargestTimestamp(length));
        if (length == 0) {
            return 0;
        } else {
            int relativeLastWriteIndex = getRelativeIndex(length - 1);
            return offsets[relativeLastWriteIndex] + sizes[relativeLastWriteIndex];
        }
    }

    public void sourceId(int sourceId) {
        upstreamSourceId = sourceId;
    }

    // Called by the consuming thread.
    /**
     * Returns the current absolute start index.
     */
    public int getFirstIndex() {
        return absoluteFirstIndex;
    }

    /**
     * Returns the current absolute read index.
     */
    public int getReadIndex() {
        return absoluteFirstIndex + readPosition;
    }

    /**
     * Peeks the source id of the next sample to be read, or the current upstream source id if the
     * queue is empty or if the read position is at the end of the queue.
     *
     * @return The source id.
     */
    public int peekSourceId() {
        int relativeReadIndex = getRelativeIndex(readPosition);
        return hasNextSample() ? sourceIds[relativeReadIndex] : upstreamSourceId;
    }

    /**
     * Returns whether a sample is available to be read.
     */
    public synchronized boolean hasNextSample() {
        return readPosition != length;
    }

    /**
     * Returns the upstream {@link Format} in which samples are being queued.
     */
    public synchronized Format getUpstreamFormat() {
        return upstreamFormatRequired ? null : upstreamFormat;
    }

    /**
     * Returns the largest sample timestamp that has been queued since the last call to
     * {@link #reset(boolean)}.
     * <p>
     * Samples that were discarded by calling {@link #discardUpstreamSamples(int)} are not
     * considered as having been queued. Samples that were dequeued from the front of the queue are
     * considered as having been queued.
     *
     * @return The largest sample timestamp that has been queued, or {@link Long#MIN_VALUE} if no
     *     samples have been queued.
     */
    public synchronized long getLargestQueuedTimestampUs() {
        return largestQueuedTimestampUs;
    }

    /**
     * Returns the timestamp of the first sample, or {@link Long#MIN_VALUE} if the queue is empty.
     */
    public synchronized long getFirstTimestampUs() {
        return length == 0 ? Long.MIN_VALUE : timesUs[relativeFirstIndex];
    }

    /**
     * Rewinds the read position to the first sample retained in the queue.
     */
    public synchronized void rewind() {
        readPosition = 0;
    }

    /**
     * Attempts to read from the queue.
     *
     * @param formatHolder A {@link FormatHolder} to populate in the case of reading a format.
     * @param buffer A {@link DecoderInputBuffer} to populate in the case of reading a sample or the
     *     end of the stream. If a sample is read then the buffer is populated with information
     *     about the sample, but not its data. The size and absolute position of the data in the
     *     rolling buffer is stored in {@code extrasHolder}, along with an encryption id if present
     *     and the absolute position of the first byte that may still be required after the current
     *     sample has been read. May be null if the caller requires that the format of the stream be
     *     read even if it's not changing.
     * @param formatRequired Whether the caller requires that the format of the stream be read even
     *     if it's not changing. A sample will never be read if set to true, however it is still
     *     possible for the end of stream or nothing to be read.
     * @param loadingFinished True if an empty queue should be considered the end of the stream.
     * @param downstreamFormat The current downstream {@link Format}. If the format of the next
     *     sample is different to the current downstream format then a format will be read.
     * @param extrasHolder The holder into which extra sample information should be written.
     * @return The result, which can be {@link C#RESULT_NOTHING_READ}, {@link C#RESULT_FORMAT_READ}
     *     or {@link C#RESULT_BUFFER_READ}.
     */
    @SuppressWarnings("ReferenceEquality")
    public synchronized int read(FormatHolder formatHolder, DecoderInputBuffer buffer, boolean formatRequired, boolean loadingFinished, Format downstreamFormat, SampleExtrasHolder extrasHolder) {
        if (!hasNextSample()) {
            if (loadingFinished) {
                buffer.setFlags(C.BUFFER_FLAG_END_OF_STREAM);
                return C.RESULT_BUFFER_READ;
            } else if (upstreamFormat != null && (formatRequired || upstreamFormat != downstreamFormat)) {
                formatHolder.format = upstreamFormat;
                return C.RESULT_FORMAT_READ;
            } else {
                return C.RESULT_NOTHING_READ;
            }
        }
        int relativeReadIndex = getRelativeIndex(readPosition);
        if (formatRequired || formats[relativeReadIndex] != downstreamFormat) {
            formatHolder.format = formats[relativeReadIndex];
            return C.RESULT_FORMAT_READ;
        }
        if (buffer.isFlagsOnly()) {
            return C.RESULT_NOTHING_READ;
        }
        buffer.timeUs = timesUs[relativeReadIndex];
        buffer.setFlags(flags[relativeReadIndex]);
        extrasHolder.size = sizes[relativeReadIndex];
        extrasHolder.offset = offsets[relativeReadIndex];
        extrasHolder.cryptoData = cryptoDatas[relativeReadIndex];
        readPosition++;
        return C.RESULT_BUFFER_READ;
    }

    /**
     * Attempts to advance the read position to the sample before or at the specified time.
     *
     * @param timeUs The time to advance to.
     * @param toKeyframe If true then attempts to advance to the keyframe before or at the specified
     *     time, rather than to any sample before or at that time.
     * @param allowTimeBeyondBuffer Whether the operation can succeed if {@code timeUs} is beyond the
     *     end of the queue, by advancing the read position to the last sample (or keyframe) in the
     *     queue.
     * @return The number of samples that were skipped if the operation was successful, which may be
     *     equal to 0, or {@link SampleQueue#ADVANCE_FAILED} if the operation was not successful. A
     *     successful advance is one in which the read position was unchanged or advanced, and is now
     *     at a sample meeting the specified criteria.
     */
    public synchronized int advanceTo(long timeUs, boolean toKeyframe, boolean allowTimeBeyondBuffer) {
        int relativeReadIndex = getRelativeIndex(readPosition);
        if (!hasNextSample() || timeUs < timesUs[relativeReadIndex] || (timeUs > largestQueuedTimestampUs && !allowTimeBeyondBuffer)) {
            return SampleQueue.ADVANCE_FAILED;
        }
        int offset = findSampleBefore(relativeReadIndex, length - readPosition, timeUs, toKeyframe);
        if (offset == -1) {
            return SampleQueue.ADVANCE_FAILED;
        }
        readPosition += offset;
        return offset;
    }

    /**
     * Advances the read position to the end of the queue.
     *
     * @return The number of samples that were skipped.
     */
    public synchronized int advanceToEnd() {
        int skipCount = length - readPosition;
        readPosition = length;
        return skipCount;
    }

    /**
     * Attempts to set the read position to the specified sample index.
     *
     * @param sampleIndex The sample index.
     * @return Whether the read position was set successfully. False is returned if the specified
     *     index is smaller than the index of the first sample in the queue, or larger than the index
     *     of the next sample that will be written.
     */
    public synchronized boolean setReadPosition(int sampleIndex) {
        if (absoluteFirstIndex <= sampleIndex && sampleIndex <= absoluteFirstIndex + length) {
            readPosition = sampleIndex - absoluteFirstIndex;
            return true;
        }
        return false;
    }

    /**
     * Discards up to but not including the sample immediately before or at the specified time.
     *
     * @param timeUs The time to discard up to.
     * @param toKeyframe If true then discards samples up to the keyframe before or at the specified
     *     time, rather than just any sample before or at that time.
     * @param stopAtReadPosition If true then samples are only discarded if they're before the read
     *     position. If false then samples at and beyond the read position may be discarded, in which
     *     case the read position is advanced to the first remaining sample.
     * @return The corresponding offset up to which data should be discarded, or
     *     {@link C#POSITION_UNSET} if no discarding of data is necessary.
     */
    public synchronized long discardTo(long timeUs, boolean toKeyframe, boolean stopAtReadPosition) {
        if (length == 0 || timeUs < timesUs[relativeFirstIndex]) {
            return C.POSITION_UNSET;
        }
        int searchLength = stopAtReadPosition && readPosition != length ? readPosition + 1 : length;
        int discardCount = findSampleBefore(relativeFirstIndex, searchLength, timeUs, toKeyframe);
        if (discardCount == -1) {
            return C.POSITION_UNSET;
        }
        return discardSamples(discardCount);
    }

    /**
     * Discards samples up to but not including the read position.
     *
     * @return The corresponding offset up to which data should be discarded, or
     *     {@link C#POSITION_UNSET} if no discarding of data is necessary.
     */
    public synchronized long discardToRead() {
        if (readPosition == 0) {
            return C.POSITION_UNSET;
        }
        return discardSamples(readPosition);
    }

    /**
     * Discards all samples in the queue. The read position is also advanced.
     *
     * @return The corresponding offset up to which data should be discarded, or
     *     {@link C#POSITION_UNSET} if no discarding of data is necessary.
     */
    public synchronized long discardToEnd() {
        if (length == 0) {
            return C.POSITION_UNSET;
        }
        return discardSamples(length);
    }

    // Called by the loading thread.
    public synchronized boolean format(Format format) {
        if (format == null) {
            upstreamFormatRequired = true;
            return false;
        }
        upstreamFormatRequired = false;
        if (Util.areEqual(format, upstreamFormat)) {
            // Suppress changes between equal formats so we can use referential equality in readData.
            return false;
        } else {
            upstreamFormat = format;
            return true;
        }
    }

    public synchronized void commitSample(long timeUs, @C.BufferFlags int sampleFlags, long offset, int size, CryptoData cryptoData) {
        if (upstreamKeyframeRequired) {
            if ((sampleFlags & C.BUFFER_FLAG_KEY_FRAME) == 0) {
                return;
            }
            upstreamKeyframeRequired = false;
        }
        replacedertions.checkState(!upstreamFormatRequired);
        commitSampleTimestamp(timeUs);
        int relativeEndIndex = getRelativeIndex(length);
        timesUs[relativeEndIndex] = timeUs;
        offsets[relativeEndIndex] = offset;
        sizes[relativeEndIndex] = size;
        flags[relativeEndIndex] = sampleFlags;
        cryptoDatas[relativeEndIndex] = cryptoData;
        formats[relativeEndIndex] = upstreamFormat;
        sourceIds[relativeEndIndex] = upstreamSourceId;
        length++;
        if (length == capacity) {
            // Increase the capacity.
            int newCapacity = capacity + SAMPLE_CAPACITY_INCREMENT;
            int[] newSourceIds = new int[newCapacity];
            long[] newOffsets = new long[newCapacity];
            long[] newTimesUs = new long[newCapacity];
            int[] newFlags = new int[newCapacity];
            int[] newSizes = new int[newCapacity];
            CryptoData[] newCryptoDatas = new CryptoData[newCapacity];
            Format[] newFormats = new Format[newCapacity];
            int beforeWrap = capacity - relativeFirstIndex;
            System.arraycopy(offsets, relativeFirstIndex, newOffsets, 0, beforeWrap);
            System.arraycopy(timesUs, relativeFirstIndex, newTimesUs, 0, beforeWrap);
            System.arraycopy(flags, relativeFirstIndex, newFlags, 0, beforeWrap);
            System.arraycopy(sizes, relativeFirstIndex, newSizes, 0, beforeWrap);
            System.arraycopy(cryptoDatas, relativeFirstIndex, newCryptoDatas, 0, beforeWrap);
            System.arraycopy(formats, relativeFirstIndex, newFormats, 0, beforeWrap);
            System.arraycopy(sourceIds, relativeFirstIndex, newSourceIds, 0, beforeWrap);
            int afterWrap = relativeFirstIndex;
            System.arraycopy(offsets, 0, newOffsets, beforeWrap, afterWrap);
            System.arraycopy(timesUs, 0, newTimesUs, beforeWrap, afterWrap);
            System.arraycopy(flags, 0, newFlags, beforeWrap, afterWrap);
            System.arraycopy(sizes, 0, newSizes, beforeWrap, afterWrap);
            System.arraycopy(cryptoDatas, 0, newCryptoDatas, beforeWrap, afterWrap);
            System.arraycopy(formats, 0, newFormats, beforeWrap, afterWrap);
            System.arraycopy(sourceIds, 0, newSourceIds, beforeWrap, afterWrap);
            offsets = newOffsets;
            timesUs = newTimesUs;
            flags = newFlags;
            sizes = newSizes;
            cryptoDatas = newCryptoDatas;
            formats = newFormats;
            sourceIds = newSourceIds;
            relativeFirstIndex = 0;
            length = capacity;
            capacity = newCapacity;
        }
    }

    public synchronized void commitSampleTimestamp(long timeUs) {
        largestQueuedTimestampUs = Math.max(largestQueuedTimestampUs, timeUs);
    }

    /**
     * Attempts to discard samples from the end of the queue to allow samples starting from the
     * specified timestamp to be spliced in. Samples will not be discarded prior to the read position.
     *
     * @param timeUs The timestamp at which the splice occurs.
     * @return Whether the splice was successful.
     */
    public synchronized boolean attemptSplice(long timeUs) {
        if (length == 0) {
            return timeUs > largestDiscardedTimestampUs;
        }
        long largestReadTimestampUs = Math.max(largestDiscardedTimestampUs, getLargestTimestamp(readPosition));
        if (largestReadTimestampUs >= timeUs) {
            return false;
        }
        int retainCount = length;
        int relativeSampleIndex = getRelativeIndex(length - 1);
        while (retainCount > readPosition && timesUs[relativeSampleIndex] >= timeUs) {
            retainCount--;
            relativeSampleIndex--;
            if (relativeSampleIndex == -1) {
                relativeSampleIndex = capacity - 1;
            }
        }
        discardUpstreamSamples(absoluteFirstIndex + retainCount);
        return true;
    }

    // Internal methods.
    /**
     * Finds the sample in the specified range that's before or at the specified time. If
     * {@code keyframe} is {@code true} then the sample is additionally required to be a keyframe.
     *
     * @param relativeStartIndex The relative index from which to start searching.
     * @param length The length of the range being searched.
     * @param timeUs The specified time.
     * @param keyframe Whether only keyframes should be considered.
     * @return The offset from {@code relativeFirstIndex} to the found sample, or -1 if no matching
     *     sample was found.
     */
    private int findSampleBefore(int relativeStartIndex, int length, long timeUs, boolean keyframe) {
        // This could be optimized to use a binary search, however in practice callers to this method
        // normally preplaced times near to the start of the search region. Hence it's unclear whether
        // switching to a binary search would yield any real benefit.
        int sampleCountToTarget = -1;
        int searchIndex = relativeStartIndex;
        for (int i = 0; i < length && timesUs[searchIndex] <= timeUs; i++) {
            if (!keyframe || (flags[searchIndex] & C.BUFFER_FLAG_KEY_FRAME) != 0) {
                // We've found a suitable sample.
                sampleCountToTarget = i;
            }
            searchIndex++;
            if (searchIndex == capacity) {
                searchIndex = 0;
            }
        }
        return sampleCountToTarget;
    }

    /**
     * Discards the specified number of samples.
     *
     * @param discardCount The number of samples to discard.
     * @return The corresponding offset up to which data should be discarded.
     */
    private long discardSamples(int discardCount) {
        largestDiscardedTimestampUs = Math.max(largestDiscardedTimestampUs, getLargestTimestamp(discardCount));
        length -= discardCount;
        absoluteFirstIndex += discardCount;
        relativeFirstIndex += discardCount;
        if (relativeFirstIndex >= capacity) {
            relativeFirstIndex -= capacity;
        }
        readPosition -= discardCount;
        if (readPosition < 0) {
            readPosition = 0;
        }
        if (length == 0) {
            int relativeLastDiscardIndex = (relativeFirstIndex == 0 ? capacity : relativeFirstIndex) - 1;
            return offsets[relativeLastDiscardIndex] + sizes[relativeLastDiscardIndex];
        } else {
            return offsets[relativeFirstIndex];
        }
    }

    /**
     * Finds the largest timestamp of any sample from the start of the queue up to the specified
     * length, replaceduming that the timestamps prior to a keyframe are always less than the timestamp of
     * the keyframe itself, and of subsequent frames.
     *
     * @param length The length of the range being searched.
     * @return The largest timestamp, or {@link Long#MIN_VALUE} if {@code length == 0}.
     */
    private long getLargestTimestamp(int length) {
        if (length == 0) {
            return Long.MIN_VALUE;
        }
        long largestTimestampUs = Long.MIN_VALUE;
        int relativeSampleIndex = getRelativeIndex(length - 1);
        for (int i = 0; i < length; i++) {
            largestTimestampUs = Math.max(largestTimestampUs, timesUs[relativeSampleIndex]);
            if ((flags[relativeSampleIndex] & C.BUFFER_FLAG_KEY_FRAME) != 0) {
                break;
            }
            relativeSampleIndex--;
            if (relativeSampleIndex == -1) {
                relativeSampleIndex = capacity - 1;
            }
        }
        return largestTimestampUs;
    }

    /**
     * Returns the relative index for a given offset from the start of the queue.
     *
     * @param offset The offset, which must be in the range [0, length].
     */
    private int getRelativeIndex(int offset) {
        int relativeIndex = relativeFirstIndex + offset;
        return relativeIndex < capacity ? relativeIndex : relativeIndex - capacity;
    }
}

19 Source : SampleMetadataQueue.java
with GNU General Public License v2.0
from warren-bank

public synchronized void commitSample(long timeUs, @C.BufferFlags int sampleFlags, long offset, int size, CryptoData cryptoData) {
    if (upstreamKeyframeRequired) {
        if ((sampleFlags & C.BUFFER_FLAG_KEY_FRAME) == 0) {
            return;
        }
        upstreamKeyframeRequired = false;
    }
    replacedertions.checkState(!upstreamFormatRequired);
    commitSampleTimestamp(timeUs);
    int relativeEndIndex = getRelativeIndex(length);
    timesUs[relativeEndIndex] = timeUs;
    offsets[relativeEndIndex] = offset;
    sizes[relativeEndIndex] = size;
    flags[relativeEndIndex] = sampleFlags;
    cryptoDatas[relativeEndIndex] = cryptoData;
    formats[relativeEndIndex] = upstreamFormat;
    sourceIds[relativeEndIndex] = upstreamSourceId;
    length++;
    if (length == capacity) {
        // Increase the capacity.
        int newCapacity = capacity + SAMPLE_CAPACITY_INCREMENT;
        int[] newSourceIds = new int[newCapacity];
        long[] newOffsets = new long[newCapacity];
        long[] newTimesUs = new long[newCapacity];
        int[] newFlags = new int[newCapacity];
        int[] newSizes = new int[newCapacity];
        CryptoData[] newCryptoDatas = new CryptoData[newCapacity];
        Format[] newFormats = new Format[newCapacity];
        int beforeWrap = capacity - relativeFirstIndex;
        System.arraycopy(offsets, relativeFirstIndex, newOffsets, 0, beforeWrap);
        System.arraycopy(timesUs, relativeFirstIndex, newTimesUs, 0, beforeWrap);
        System.arraycopy(flags, relativeFirstIndex, newFlags, 0, beforeWrap);
        System.arraycopy(sizes, relativeFirstIndex, newSizes, 0, beforeWrap);
        System.arraycopy(cryptoDatas, relativeFirstIndex, newCryptoDatas, 0, beforeWrap);
        System.arraycopy(formats, relativeFirstIndex, newFormats, 0, beforeWrap);
        System.arraycopy(sourceIds, relativeFirstIndex, newSourceIds, 0, beforeWrap);
        int afterWrap = relativeFirstIndex;
        System.arraycopy(offsets, 0, newOffsets, beforeWrap, afterWrap);
        System.arraycopy(timesUs, 0, newTimesUs, beforeWrap, afterWrap);
        System.arraycopy(flags, 0, newFlags, beforeWrap, afterWrap);
        System.arraycopy(sizes, 0, newSizes, beforeWrap, afterWrap);
        System.arraycopy(cryptoDatas, 0, newCryptoDatas, beforeWrap, afterWrap);
        System.arraycopy(formats, 0, newFormats, beforeWrap, afterWrap);
        System.arraycopy(sourceIds, 0, newSourceIds, beforeWrap, afterWrap);
        offsets = newOffsets;
        timesUs = newTimesUs;
        flags = newFlags;
        sizes = newSizes;
        cryptoDatas = newCryptoDatas;
        formats = newFormats;
        sourceIds = newSourceIds;
        relativeFirstIndex = 0;
        length = capacity;
        capacity = newCapacity;
    }
}

19 Source : ClippingMediaPeriod.java
with GNU General Public License v2.0
from warren-bank

private static boolean shouldKeepInitialDiscontinuity(long startUs, TrackSelection[] selections) {
    // If the clipping start position is non-zero, the clipping sample streams will adjust
    // timestamps on buffers they read from the unclipped sample streams. These adjusted buffer
    // timestamps can be negative, because sample streams provide buffers starting at a key-frame,
    // which may be before the clipping start point. When the renderer reads a buffer with a
    // negative timestamp, its offset timestamp can jump backwards compared to the last timestamp
    // read in the previous period. Renderer implementations may not allow this, so we signal a
    // discontinuity which resets the renderers before they read the clipping sample stream.
    // However, for audio-only track selections we replacedume to have random access seek behaviour and
    // do not need an initial discontinuity to reset the renderer.
    if (startUs != 0) {
        for (TrackSelection trackSelection : selections) {
            if (trackSelection != null) {
                Format selectedFormat = trackSelection.getSelectedFormat();
                if (!MimeTypes.isAudio(selectedFormat.sampleMimeType)) {
                    return true;
                }
            }
        }
    }
    return false;
}

19 Source : SingleSampleMediaChunk.java
with GNU General Public License v2.0
from warren-bank

/**
 * A {@link BaseMediaChunk} for chunks consisting of a single raw sample.
 */
public final clreplaced SingleSampleMediaChunk extends BaseMediaChunk {

    private final int trackType;

    private final Format sampleFormat;

    private long nextLoadPosition;

    private boolean loadCompleted;

    /**
     * @param dataSource The source from which the data should be loaded.
     * @param dataSpec Defines the data to be loaded.
     * @param trackFormat See {@link #trackFormat}.
     * @param trackSelectionReason See {@link #trackSelectionReason}.
     * @param trackSelectionData See {@link #trackSelectionData}.
     * @param startTimeUs The start time of the media contained by the chunk, in microseconds.
     * @param endTimeUs The end time of the media contained by the chunk, in microseconds.
     * @param chunkIndex The index of the chunk, or {@link C#INDEX_UNSET} if it is not known.
     * @param trackType The type of the chunk. Typically one of the {@link C} {@code TRACK_TYPE_*}
     *     constants.
     * @param sampleFormat The {@link Format} of the sample in the chunk.
     */
    public SingleSampleMediaChunk(DataSource dataSource, DataSpec dataSpec, Format trackFormat, int trackSelectionReason, Object trackSelectionData, long startTimeUs, long endTimeUs, long chunkIndex, int trackType, Format sampleFormat) {
        super(dataSource, dataSpec, trackFormat, trackSelectionReason, trackSelectionData, startTimeUs, endTimeUs, /* clippedStartTimeUs= */
        C.TIME_UNSET, /* clippedEndTimeUs= */
        C.TIME_UNSET, chunkIndex);
        this.trackType = trackType;
        this.sampleFormat = sampleFormat;
    }

    @Override
    public boolean isLoadCompleted() {
        return loadCompleted;
    }

    // Loadable implementation.
    @Override
    public void cancelLoad() {
    // Do nothing.
    }

    @SuppressWarnings("NonAtomicVolatileUpdate")
    @Override
    public void load() throws IOException, InterruptedException {
        DataSpec loadDataSpec = dataSpec.subrange(nextLoadPosition);
        try {
            // Create and open the input.
            long length = dataSource.open(loadDataSpec);
            if (length != C.LENGTH_UNSET) {
                length += nextLoadPosition;
            }
            ExtractorInput extractorInput = new DefaultExtractorInput(dataSource, nextLoadPosition, length);
            BaseMediaChunkOutput output = getOutput();
            output.setSampleOffsetUs(0);
            TrackOutput trackOutput = output.track(0, trackType);
            trackOutput.format(sampleFormat);
            // Load the sample data.
            int result = 0;
            while (result != C.RESULT_END_OF_INPUT) {
                nextLoadPosition += result;
                result = trackOutput.sampleData(extractorInput, Integer.MAX_VALUE, true);
            }
            int sampleSize = (int) nextLoadPosition;
            trackOutput.sampleMetadata(startTimeUs, C.BUFFER_FLAG_KEY_FRAME, sampleSize, 0, null);
        } finally {
            Util.closeQuietly(dataSource);
        }
        loadCompleted = true;
    }
}

19 Source : ChunkSampleStream.java
with GNU General Public License v2.0
from warren-bank

/**
 * A {@link SampleStream} that loads media in {@link Chunk}s, obtained from a {@link ChunkSource}.
 * May also be configured to expose additional embedded {@link SampleStream}s.
 */
public clreplaced ChunkSampleStream<T extends ChunkSource> implements SampleStream, SequenceableLoader, Loader.Callback<Chunk>, Loader.ReleaseCallback {

    /**
     * A callback to be notified when a sample stream has finished being released.
     */
    public interface ReleaseCallback<T extends ChunkSource> {

        /**
         * Called when the {@link ChunkSampleStream} has finished being released.
         *
         * @param chunkSampleStream The released sample stream.
         */
        void onSampleStreamReleased(ChunkSampleStream<T> chunkSampleStream);
    }

    private static final String TAG = "ChunkSampleStream";

    public final int primaryTrackType;

    private final int[] embeddedTrackTypes;

    private final Format[] embeddedTrackFormats;

    private final boolean[] embeddedTracksSelected;

    private final T chunkSource;

    private final SequenceableLoader.Callback<ChunkSampleStream<T>> callback;

    private final EventDispatcher eventDispatcher;

    private final LoadErrorHandlingPolicy loadErrorHandlingPolicy;

    private final Loader loader;

    private final ChunkHolder nextChunkHolder;

    private final ArrayList<BaseMediaChunk> mediaChunks;

    private final List<BaseMediaChunk> readOnlyMediaChunks;

    private final SampleQueue primarySampleQueue;

    private final SampleQueue[] embeddedSampleQueues;

    private final BaseMediaChunkOutput mediaChunkOutput;

    private Format primaryDownstreamTrackFormat;

    @Nullable
    private ReleaseCallback<T> releaseCallback;

    private long pendingResetPositionUs;

    private long lastSeekPositionUs;

    private int nextNotifyPrimaryFormatMediaChunkIndex;

    /* package */
    long decodeOnlyUntilPositionUs;

    /* package */
    boolean loadingFinished;

    /**
     * Constructs an instance.
     *
     * @param primaryTrackType The type of the primary track. One of the {@link C} {@code
     *     TRACK_TYPE_*} constants.
     * @param embeddedTrackTypes The types of any embedded tracks, or null.
     * @param embeddedTrackFormats The formats of the embedded tracks, or null.
     * @param chunkSource A {@link ChunkSource} from which chunks to load are obtained.
     * @param callback An {@link Callback} for the stream.
     * @param allocator An {@link Allocator} from which allocations can be obtained.
     * @param positionUs The position from which to start loading media.
     * @param minLoadableRetryCount The minimum number of times that the source should retry a load
     *     before propagating an error.
     * @param eventDispatcher A dispatcher to notify of events.
     * @deprecated Use {@link #ChunkSampleStream(int, int[], Format[], ChunkSource, Callback,
     *     Allocator, long, LoadErrorHandlingPolicy, EventDispatcher)} instead.
     */
    @Deprecated
    public ChunkSampleStream(int primaryTrackType, int[] embeddedTrackTypes, Format[] embeddedTrackFormats, T chunkSource, Callback<ChunkSampleStream<T>> callback, Allocator allocator, long positionUs, int minLoadableRetryCount, EventDispatcher eventDispatcher) {
        this(primaryTrackType, embeddedTrackTypes, embeddedTrackFormats, chunkSource, callback, allocator, positionUs, new DefaultLoadErrorHandlingPolicy(minLoadableRetryCount), eventDispatcher);
    }

    /**
     * Constructs an instance.
     *
     * @param primaryTrackType The type of the primary track. One of the {@link C} {@code
     *     TRACK_TYPE_*} constants.
     * @param embeddedTrackTypes The types of any embedded tracks, or null.
     * @param embeddedTrackFormats The formats of the embedded tracks, or null.
     * @param chunkSource A {@link ChunkSource} from which chunks to load are obtained.
     * @param callback An {@link Callback} for the stream.
     * @param allocator An {@link Allocator} from which allocations can be obtained.
     * @param positionUs The position from which to start loading media.
     * @param loadErrorHandlingPolicy The {@link LoadErrorHandlingPolicy}.
     * @param eventDispatcher A dispatcher to notify of events.
     */
    public ChunkSampleStream(int primaryTrackType, int[] embeddedTrackTypes, Format[] embeddedTrackFormats, T chunkSource, Callback<ChunkSampleStream<T>> callback, Allocator allocator, long positionUs, LoadErrorHandlingPolicy loadErrorHandlingPolicy, EventDispatcher eventDispatcher) {
        this.primaryTrackType = primaryTrackType;
        this.embeddedTrackTypes = embeddedTrackTypes;
        this.embeddedTrackFormats = embeddedTrackFormats;
        this.chunkSource = chunkSource;
        this.callback = callback;
        this.eventDispatcher = eventDispatcher;
        this.loadErrorHandlingPolicy = loadErrorHandlingPolicy;
        loader = new Loader("Loader:ChunkSampleStream");
        nextChunkHolder = new ChunkHolder();
        mediaChunks = new ArrayList<>();
        readOnlyMediaChunks = Collections.unmodifiableList(mediaChunks);
        int embeddedTrackCount = embeddedTrackTypes == null ? 0 : embeddedTrackTypes.length;
        embeddedSampleQueues = new SampleQueue[embeddedTrackCount];
        embeddedTracksSelected = new boolean[embeddedTrackCount];
        int[] trackTypes = new int[1 + embeddedTrackCount];
        SampleQueue[] sampleQueues = new SampleQueue[1 + embeddedTrackCount];
        primarySampleQueue = new SampleQueue(allocator);
        trackTypes[0] = primaryTrackType;
        sampleQueues[0] = primarySampleQueue;
        for (int i = 0; i < embeddedTrackCount; i++) {
            SampleQueue sampleQueue = new SampleQueue(allocator);
            embeddedSampleQueues[i] = sampleQueue;
            sampleQueues[i + 1] = sampleQueue;
            trackTypes[i + 1] = embeddedTrackTypes[i];
        }
        mediaChunkOutput = new BaseMediaChunkOutput(trackTypes, sampleQueues);
        pendingResetPositionUs = positionUs;
        lastSeekPositionUs = positionUs;
    }

    /**
     * Discards buffered media up to the specified position.
     *
     * @param positionUs The position to discard up to, in microseconds.
     * @param toKeyframe If true then for each track discards samples up to the keyframe before or at
     *     the specified position, rather than any sample before or at that position.
     */
    public void discardBuffer(long positionUs, boolean toKeyframe) {
        if (isPendingReset()) {
            return;
        }
        int oldFirstSampleIndex = primarySampleQueue.getFirstIndex();
        primarySampleQueue.discardTo(positionUs, toKeyframe, true);
        int newFirstSampleIndex = primarySampleQueue.getFirstIndex();
        if (newFirstSampleIndex > oldFirstSampleIndex) {
            long discardToUs = primarySampleQueue.getFirstTimestampUs();
            for (int i = 0; i < embeddedSampleQueues.length; i++) {
                embeddedSampleQueues[i].discardTo(discardToUs, toKeyframe, embeddedTracksSelected[i]);
            }
        }
        discardDownstreamMediaChunks(newFirstSampleIndex);
    }

    /**
     * Selects the embedded track, returning a new {@link EmbeddedSampleStream} from which the track's
     * samples can be consumed. {@link EmbeddedSampleStream#release()} must be called on the returned
     * stream when the track is no longer required, and before calling this method again to obtain
     * another stream for the same track.
     *
     * @param positionUs The current playback position in microseconds.
     * @param trackType The type of the embedded track to enable.
     * @return The {@link EmbeddedSampleStream} for the embedded track.
     */
    public EmbeddedSampleStream selectEmbeddedTrack(long positionUs, int trackType) {
        for (int i = 0; i < embeddedSampleQueues.length; i++) {
            if (embeddedTrackTypes[i] == trackType) {
                replacedertions.checkState(!embeddedTracksSelected[i]);
                embeddedTracksSelected[i] = true;
                embeddedSampleQueues[i].rewind();
                embeddedSampleQueues[i].advanceTo(positionUs, true, true);
                return new EmbeddedSampleStream(this, embeddedSampleQueues[i], i);
            }
        }
        // Should never happen.
        throw new IllegalStateException();
    }

    /**
     * Returns the {@link ChunkSource} used by this stream.
     */
    public T getChunkSource() {
        return chunkSource;
    }

    /**
     * Returns an estimate of the position up to which data is buffered.
     *
     * @return An estimate of the absolute position in microseconds up to which data is buffered, or
     *     {@link C#TIME_END_OF_SOURCE} if the track is fully buffered.
     */
    @Override
    public long getBufferedPositionUs() {
        if (loadingFinished) {
            return C.TIME_END_OF_SOURCE;
        } else if (isPendingReset()) {
            return pendingResetPositionUs;
        } else {
            long bufferedPositionUs = lastSeekPositionUs;
            BaseMediaChunk lastMediaChunk = getLastMediaChunk();
            BaseMediaChunk lastCompletedMediaChunk = lastMediaChunk.isLoadCompleted() ? lastMediaChunk : mediaChunks.size() > 1 ? mediaChunks.get(mediaChunks.size() - 2) : null;
            if (lastCompletedMediaChunk != null) {
                bufferedPositionUs = Math.max(bufferedPositionUs, lastCompletedMediaChunk.endTimeUs);
            }
            return Math.max(bufferedPositionUs, primarySampleQueue.getLargestQueuedTimestampUs());
        }
    }

    /**
     * Adjusts a seek position given the specified {@link SeekParameters}. Chunk boundaries are used
     * as sync points.
     *
     * @param positionUs The seek position in microseconds.
     * @param seekParameters Parameters that control how the seek is performed.
     * @return The adjusted seek position, in microseconds.
     */
    public long getAdjustedSeekPositionUs(long positionUs, SeekParameters seekParameters) {
        return chunkSource.getAdjustedSeekPositionUs(positionUs, seekParameters);
    }

    /**
     * Seeks to the specified position in microseconds.
     *
     * @param positionUs The seek position in microseconds.
     */
    public void seekToUs(long positionUs) {
        lastSeekPositionUs = positionUs;
        if (isPendingReset()) {
            // A reset is already pending. We only need to update its position.
            pendingResetPositionUs = positionUs;
            return;
        }
        // Detect whether the seek is to the start of a chunk that's at least partially buffered.
        BaseMediaChunk seekToMediaChunk = null;
        for (int i = 0; i < mediaChunks.size(); i++) {
            BaseMediaChunk mediaChunk = mediaChunks.get(i);
            long mediaChunkStartTimeUs = mediaChunk.startTimeUs;
            if (mediaChunkStartTimeUs == positionUs && mediaChunk.clippedStartTimeUs == C.TIME_UNSET) {
                seekToMediaChunk = mediaChunk;
                break;
            } else if (mediaChunkStartTimeUs > positionUs) {
                // We're not going to find a chunk with a matching start time.
                break;
            }
        }
        // See if we can seek inside the primary sample queue.
        boolean seekInsideBuffer;
        primarySampleQueue.rewind();
        if (seekToMediaChunk != null) {
            // When seeking to the start of a chunk we use the index of the first sample in the chunk
            // rather than the seek position. This ensures we seek to the keyframe at the start of the
            // chunk even if the sample timestamps are slightly offset from the chunk start times.
            seekInsideBuffer = primarySampleQueue.setReadPosition(seekToMediaChunk.getFirstSampleIndex(0));
            decodeOnlyUntilPositionUs = Long.MIN_VALUE;
        } else {
            seekInsideBuffer = primarySampleQueue.advanceTo(positionUs, /* toKeyframe= */
            true, /* allowTimeBeyondBuffer= */
            positionUs < getNextLoadPositionUs()) != SampleQueue.ADVANCE_FAILED;
            decodeOnlyUntilPositionUs = lastSeekPositionUs;
        }
        if (seekInsideBuffer) {
            // We can seek inside the buffer.
            nextNotifyPrimaryFormatMediaChunkIndex = primarySampleIndexToMediaChunkIndex(primarySampleQueue.getReadIndex(), /* minChunkIndex= */
            0);
            // Advance the embedded sample queues to the seek position.
            for (SampleQueue embeddedSampleQueue : embeddedSampleQueues) {
                embeddedSampleQueue.rewind();
                embeddedSampleQueue.advanceTo(positionUs, true, false);
            }
        } else {
            // We can't seek inside the buffer, and so need to reset.
            pendingResetPositionUs = positionUs;
            loadingFinished = false;
            mediaChunks.clear();
            nextNotifyPrimaryFormatMediaChunkIndex = 0;
            if (loader.isLoading()) {
                loader.cancelLoading();
            } else {
                primarySampleQueue.reset();
                for (SampleQueue embeddedSampleQueue : embeddedSampleQueues) {
                    embeddedSampleQueue.reset();
                }
            }
        }
    }

    /**
     * Releases the stream.
     *
     * <p>This method should be called when the stream is no longer required. Either this method or
     * {@link #release(ReleaseCallback)} can be used to release this stream.
     */
    public void release() {
        release(null);
    }

    /**
     * Releases the stream.
     *
     * <p>This method should be called when the stream is no longer required. Either this method or
     * {@link #release()} can be used to release this stream.
     *
     * @param callback An optional callback to be called on the loading thread once the loader has
     *     been released.
     */
    public void release(@Nullable ReleaseCallback<T> callback) {
        this.releaseCallback = callback;
        // Discard as much as we can synchronously.
        primarySampleQueue.discardToEnd();
        for (SampleQueue embeddedSampleQueue : embeddedSampleQueues) {
            embeddedSampleQueue.discardToEnd();
        }
        loader.release(this);
    }

    @Override
    public void onLoaderReleased() {
        primarySampleQueue.reset();
        for (SampleQueue embeddedSampleQueue : embeddedSampleQueues) {
            embeddedSampleQueue.reset();
        }
        if (releaseCallback != null) {
            releaseCallback.onSampleStreamReleased(this);
        }
    }

    // SampleStream implementation.
    @Override
    public boolean isReady() {
        return loadingFinished || (!isPendingReset() && primarySampleQueue.hasNextSample());
    }

    @Override
    public void maybeThrowError() throws IOException {
        loader.maybeThrowError();
        if (!loader.isLoading()) {
            chunkSource.maybeThrowError();
        }
    }

    @Override
    public int readData(FormatHolder formatHolder, DecoderInputBuffer buffer, boolean formatRequired) {
        if (isPendingReset()) {
            return C.RESULT_NOTHING_READ;
        }
        maybeNotifyPrimaryTrackFormatChanged();
        return primarySampleQueue.read(formatHolder, buffer, formatRequired, loadingFinished, decodeOnlyUntilPositionUs);
    }

    @Override
    public int skipData(long positionUs) {
        if (isPendingReset()) {
            return 0;
        }
        int skipCount;
        if (loadingFinished && positionUs > primarySampleQueue.getLargestQueuedTimestampUs()) {
            skipCount = primarySampleQueue.advanceToEnd();
        } else {
            skipCount = primarySampleQueue.advanceTo(positionUs, true, true);
            if (skipCount == SampleQueue.ADVANCE_FAILED) {
                skipCount = 0;
            }
        }
        maybeNotifyPrimaryTrackFormatChanged();
        return skipCount;
    }

    // Loader.Callback implementation.
    @Override
    public void onLoadCompleted(Chunk loadable, long elapsedRealtimeMs, long loadDurationMs) {
        chunkSource.onChunkLoadCompleted(loadable);
        eventDispatcher.loadCompleted(loadable.dataSpec, loadable.getUri(), loadable.getResponseHeaders(), loadable.type, primaryTrackType, loadable.trackFormat, loadable.trackSelectionReason, loadable.trackSelectionData, loadable.startTimeUs, loadable.endTimeUs, elapsedRealtimeMs, loadDurationMs, loadable.bytesLoaded());
        callback.onContinueLoadingRequested(this);
    }

    @Override
    public void onLoadCanceled(Chunk loadable, long elapsedRealtimeMs, long loadDurationMs, boolean released) {
        eventDispatcher.loadCanceled(loadable.dataSpec, loadable.getUri(), loadable.getResponseHeaders(), loadable.type, primaryTrackType, loadable.trackFormat, loadable.trackSelectionReason, loadable.trackSelectionData, loadable.startTimeUs, loadable.endTimeUs, elapsedRealtimeMs, loadDurationMs, loadable.bytesLoaded());
        if (!released) {
            primarySampleQueue.reset();
            for (SampleQueue embeddedSampleQueue : embeddedSampleQueues) {
                embeddedSampleQueue.reset();
            }
            callback.onContinueLoadingRequested(this);
        }
    }

    @Override
    public LoadErrorAction onLoadError(Chunk loadable, long elapsedRealtimeMs, long loadDurationMs, IOException error, int errorCount) {
        long bytesLoaded = loadable.bytesLoaded();
        boolean isMediaChunk = isMediaChunk(loadable);
        int lastChunkIndex = mediaChunks.size() - 1;
        boolean cancelable = bytesLoaded == 0 || !isMediaChunk || !haveReadFromMediaChunk(lastChunkIndex);
        long blacklistDurationMs = cancelable ? loadErrorHandlingPolicy.getBlacklistDurationMsFor(loadable.type, loadDurationMs, error, errorCount) : C.TIME_UNSET;
        LoadErrorAction loadErrorAction = null;
        if (chunkSource.onChunkLoadError(loadable, cancelable, error, blacklistDurationMs)) {
            if (cancelable) {
                loadErrorAction = Loader.DONT_RETRY;
                if (isMediaChunk) {
                    BaseMediaChunk removed = discardUpstreamMediaChunksFromIndex(lastChunkIndex);
                    replacedertions.checkState(removed == loadable);
                    if (mediaChunks.isEmpty()) {
                        pendingResetPositionUs = lastSeekPositionUs;
                    }
                }
            } else {
                Log.w(TAG, "Ignoring attempt to cancel non-cancelable load.");
            }
        }
        if (loadErrorAction == null) {
            // The load was not cancelled. Either the load must be retried or the error propagated.
            long retryDelayMs = loadErrorHandlingPolicy.getRetryDelayMsFor(loadable.type, loadDurationMs, error, errorCount);
            loadErrorAction = retryDelayMs != C.TIME_UNSET ? Loader.createRetryAction(/* resetErrorCount= */
            false, retryDelayMs) : Loader.DONT_RETRY_FATAL;
        }
        boolean canceled = !loadErrorAction.isRetry();
        eventDispatcher.loadError(loadable.dataSpec, loadable.getUri(), loadable.getResponseHeaders(), loadable.type, primaryTrackType, loadable.trackFormat, loadable.trackSelectionReason, loadable.trackSelectionData, loadable.startTimeUs, loadable.endTimeUs, elapsedRealtimeMs, loadDurationMs, bytesLoaded, error, canceled);
        if (canceled) {
            callback.onContinueLoadingRequested(this);
        }
        return loadErrorAction;
    }

    // SequenceableLoader implementation
    @Override
    public boolean continueLoading(long positionUs) {
        if (loadingFinished || loader.isLoading()) {
            return false;
        }
        boolean pendingReset = isPendingReset();
        List<BaseMediaChunk> chunkQueue;
        long loadPositionUs;
        if (pendingReset) {
            chunkQueue = Collections.emptyList();
            loadPositionUs = pendingResetPositionUs;
        } else {
            chunkQueue = readOnlyMediaChunks;
            loadPositionUs = getLastMediaChunk().endTimeUs;
        }
        chunkSource.getNextChunk(positionUs, loadPositionUs, chunkQueue, nextChunkHolder);
        boolean endOfStream = nextChunkHolder.endOfStream;
        Chunk loadable = nextChunkHolder.chunk;
        nextChunkHolder.clear();
        if (endOfStream) {
            pendingResetPositionUs = C.TIME_UNSET;
            loadingFinished = true;
            return true;
        }
        if (loadable == null) {
            return false;
        }
        if (isMediaChunk(loadable)) {
            BaseMediaChunk mediaChunk = (BaseMediaChunk) loadable;
            if (pendingReset) {
                boolean resetToMediaChunk = mediaChunk.startTimeUs == pendingResetPositionUs;
                // Only enable setting of the decode only flag if we're not resetting to a chunk boundary.
                decodeOnlyUntilPositionUs = resetToMediaChunk ? Long.MIN_VALUE : pendingResetPositionUs;
                pendingResetPositionUs = C.TIME_UNSET;
            }
            mediaChunk.init(mediaChunkOutput);
            mediaChunks.add(mediaChunk);
        }
        long elapsedRealtimeMs = loader.startLoading(loadable, this, loadErrorHandlingPolicy.getMinimumLoadableRetryCount(loadable.type));
        eventDispatcher.loadStarted(loadable.dataSpec, loadable.type, primaryTrackType, loadable.trackFormat, loadable.trackSelectionReason, loadable.trackSelectionData, loadable.startTimeUs, loadable.endTimeUs, elapsedRealtimeMs);
        return true;
    }

    @Override
    public long getNextLoadPositionUs() {
        if (isPendingReset()) {
            return pendingResetPositionUs;
        } else {
            return loadingFinished ? C.TIME_END_OF_SOURCE : getLastMediaChunk().endTimeUs;
        }
    }

    @Override
    public void reevaluateBuffer(long positionUs) {
        if (loader.isLoading() || isPendingReset()) {
            return;
        }
        int currentQueueSize = mediaChunks.size();
        int preferredQueueSize = chunkSource.getPreferredQueueSize(positionUs, readOnlyMediaChunks);
        if (currentQueueSize <= preferredQueueSize) {
            return;
        }
        int newQueueSize = currentQueueSize;
        for (int i = preferredQueueSize; i < currentQueueSize; i++) {
            if (!haveReadFromMediaChunk(i)) {
                newQueueSize = i;
                break;
            }
        }
        if (newQueueSize == currentQueueSize) {
            return;
        }
        long endTimeUs = getLastMediaChunk().endTimeUs;
        BaseMediaChunk firstRemovedChunk = discardUpstreamMediaChunksFromIndex(newQueueSize);
        if (mediaChunks.isEmpty()) {
            pendingResetPositionUs = lastSeekPositionUs;
        }
        loadingFinished = false;
        eventDispatcher.upstreamDiscarded(primaryTrackType, firstRemovedChunk.startTimeUs, endTimeUs);
    }

    // Internal methods
    private boolean isMediaChunk(Chunk chunk) {
        return chunk instanceof BaseMediaChunk;
    }

    /**
     * Returns whether samples have been read from media chunk at given index.
     */
    private boolean haveReadFromMediaChunk(int mediaChunkIndex) {
        BaseMediaChunk mediaChunk = mediaChunks.get(mediaChunkIndex);
        if (primarySampleQueue.getReadIndex() > mediaChunk.getFirstSampleIndex(0)) {
            return true;
        }
        for (int i = 0; i < embeddedSampleQueues.length; i++) {
            if (embeddedSampleQueues[i].getReadIndex() > mediaChunk.getFirstSampleIndex(i + 1)) {
                return true;
            }
        }
        return false;
    }

    /* package */
    boolean isPendingReset() {
        return pendingResetPositionUs != C.TIME_UNSET;
    }

    private void discardDownstreamMediaChunks(int discardToSampleIndex) {
        int discardToMediaChunkIndex = primarySampleIndexToMediaChunkIndex(discardToSampleIndex, /* minChunkIndex= */
        0);
        // Don't discard any chunks that we haven't reported the primary format change for yet.
        discardToMediaChunkIndex = Math.min(discardToMediaChunkIndex, nextNotifyPrimaryFormatMediaChunkIndex);
        if (discardToMediaChunkIndex > 0) {
            Util.removeRange(mediaChunks, /* fromIndex= */
            0, /* toIndex= */
            discardToMediaChunkIndex);
            nextNotifyPrimaryFormatMediaChunkIndex -= discardToMediaChunkIndex;
        }
    }

    private void maybeNotifyPrimaryTrackFormatChanged() {
        int readSampleIndex = primarySampleQueue.getReadIndex();
        int notifyToMediaChunkIndex = primarySampleIndexToMediaChunkIndex(readSampleIndex, /* minChunkIndex= */
        nextNotifyPrimaryFormatMediaChunkIndex - 1);
        while (nextNotifyPrimaryFormatMediaChunkIndex <= notifyToMediaChunkIndex) {
            maybeNotifyPrimaryTrackFormatChanged(nextNotifyPrimaryFormatMediaChunkIndex++);
        }
    }

    private void maybeNotifyPrimaryTrackFormatChanged(int mediaChunkReadIndex) {
        BaseMediaChunk currentChunk = mediaChunks.get(mediaChunkReadIndex);
        Format trackFormat = currentChunk.trackFormat;
        if (!trackFormat.equals(primaryDownstreamTrackFormat)) {
            eventDispatcher.downstreamFormatChanged(primaryTrackType, trackFormat, currentChunk.trackSelectionReason, currentChunk.trackSelectionData, currentChunk.startTimeUs);
        }
        primaryDownstreamTrackFormat = trackFormat;
    }

    /**
     * Returns the media chunk index corresponding to a given primary sample index.
     *
     * @param primarySampleIndex The primary sample index for which the corresponding media chunk
     *     index is required.
     * @param minChunkIndex A minimum chunk index from which to start searching, or -1 if no hint can
     *     be provided.
     * @return The index of the media chunk corresponding to the sample index, or -1 if the list of
     *     media chunks is empty, or {@code minChunkIndex} if the sample precedes the first chunk in
     *     the search (i.e. the chunk at {@code minChunkIndex}, or at index 0 if {@code minChunkIndex}
     *     is -1.
     */
    private int primarySampleIndexToMediaChunkIndex(int primarySampleIndex, int minChunkIndex) {
        for (int i = minChunkIndex + 1; i < mediaChunks.size(); i++) {
            if (mediaChunks.get(i).getFirstSampleIndex(0) > primarySampleIndex) {
                return i - 1;
            }
        }
        return mediaChunks.size() - 1;
    }

    private BaseMediaChunk getLastMediaChunk() {
        return mediaChunks.get(mediaChunks.size() - 1);
    }

    /**
     * Discard upstream media chunks from {@code chunkIndex} and corresponding samples from sample
     * queues.
     *
     * @param chunkIndex The index of the first chunk to discard.
     * @return The chunk at given index.
     */
    private BaseMediaChunk discardUpstreamMediaChunksFromIndex(int chunkIndex) {
        BaseMediaChunk firstRemovedChunk = mediaChunks.get(chunkIndex);
        Util.removeRange(mediaChunks, /* fromIndex= */
        chunkIndex, /* toIndex= */
        mediaChunks.size());
        nextNotifyPrimaryFormatMediaChunkIndex = Math.max(nextNotifyPrimaryFormatMediaChunkIndex, mediaChunks.size());
        primarySampleQueue.discardUpstreamSamples(firstRemovedChunk.getFirstSampleIndex(0));
        for (int i = 0; i < embeddedSampleQueues.length; i++) {
            embeddedSampleQueues[i].discardUpstreamSamples(firstRemovedChunk.getFirstSampleIndex(i + 1));
        }
        return firstRemovedChunk;
    }

    /**
     * A {@link SampleStream} embedded in a {@link ChunkSampleStream}.
     */
    public final clreplaced EmbeddedSampleStream implements SampleStream {

        public final ChunkSampleStream<T> parent;

        private final SampleQueue sampleQueue;

        private final int index;

        private boolean notifiedDownstreamFormat;

        public EmbeddedSampleStream(ChunkSampleStream<T> parent, SampleQueue sampleQueue, int index) {
            this.parent = parent;
            this.sampleQueue = sampleQueue;
            this.index = index;
        }

        @Override
        public boolean isReady() {
            return loadingFinished || (!isPendingReset() && sampleQueue.hasNextSample());
        }

        @Override
        public int skipData(long positionUs) {
            if (isPendingReset()) {
                return 0;
            }
            maybeNotifyDownstreamFormat();
            int skipCount;
            if (loadingFinished && positionUs > sampleQueue.getLargestQueuedTimestampUs()) {
                skipCount = sampleQueue.advanceToEnd();
            } else {
                skipCount = sampleQueue.advanceTo(positionUs, true, true);
                if (skipCount == SampleQueue.ADVANCE_FAILED) {
                    skipCount = 0;
                }
            }
            return skipCount;
        }

        @Override
        public void maybeThrowError() throws IOException {
        // Do nothing. Errors will be thrown from the primary stream.
        }

        @Override
        public int readData(FormatHolder formatHolder, DecoderInputBuffer buffer, boolean formatRequired) {
            if (isPendingReset()) {
                return C.RESULT_NOTHING_READ;
            }
            maybeNotifyDownstreamFormat();
            return sampleQueue.read(formatHolder, buffer, formatRequired, loadingFinished, decodeOnlyUntilPositionUs);
        }

        public void release() {
            replacedertions.checkState(embeddedTracksSelected[index]);
            embeddedTracksSelected[index] = false;
        }

        private void maybeNotifyDownstreamFormat() {
            if (!notifiedDownstreamFormat) {
                eventDispatcher.downstreamFormatChanged(embeddedTrackTypes[index], embeddedTrackFormats[index], C.SELECTION_REASON_UNKNOWN, /* trackSelectionData= */
                null, lastSeekPositionUs);
                notifiedDownstreamFormat = true;
            }
        }
    }
}

19 Source : ChunkExtractorWrapper.java
with GNU General Public License v2.0
from warren-bank

/**
 * An {@link Extractor} wrapper for loading chunks that contain a single primary track, and possibly
 * additional embedded tracks.
 * <p>
 * The wrapper allows switching of the {@link TrackOutput}s that receive parsed data.
 */
public final clreplaced ChunkExtractorWrapper implements ExtractorOutput {

    /**
     * Provides {@link TrackOutput} instances to be written to by the wrapper.
     */
    public interface TrackOutputProvider {

        /**
         * Called to get the {@link TrackOutput} for a specific track.
         * <p>
         * The same {@link TrackOutput} is returned if multiple calls are made with the same {@code id}.
         *
         * @param id A track identifier.
         * @param type The type of the track. Typically one of the
         *     {@link com.google.android.exoplayer2.C} {@code TRACK_TYPE_*} constants.
         * @return The {@link TrackOutput} for the given track identifier.
         */
        TrackOutput track(int id, int type);
    }

    public final Extractor extractor;

    private final int primaryTrackType;

    private final Format primaryTrackManifestFormat;

    private final SparseArray<BindingTrackOutput> bindingTrackOutputs;

    private boolean extractorInitialized;

    private TrackOutputProvider trackOutputProvider;

    private long endTimeUs;

    private SeekMap seekMap;

    private Format[] sampleFormats;

    /**
     * @param extractor The extractor to wrap.
     * @param primaryTrackType The type of the primary track. Typically one of the
     *     {@link com.google.android.exoplayer2.C} {@code TRACK_TYPE_*} constants.
     * @param primaryTrackManifestFormat A manifest defined {@link Format} whose data should be merged
     *     into any sample {@link Format} output from the {@link Extractor} for the primary track.
     */
    public ChunkExtractorWrapper(Extractor extractor, int primaryTrackType, Format primaryTrackManifestFormat) {
        this.extractor = extractor;
        this.primaryTrackType = primaryTrackType;
        this.primaryTrackManifestFormat = primaryTrackManifestFormat;
        bindingTrackOutputs = new SparseArray<>();
    }

    /**
     * Returns the {@link SeekMap} most recently output by the extractor, or null.
     */
    public SeekMap getSeekMap() {
        return seekMap;
    }

    /**
     * Returns the sample {@link Format}s most recently output by the extractor, or null.
     */
    public Format[] getSampleFormats() {
        return sampleFormats;
    }

    /**
     * Initializes the wrapper to output to {@link TrackOutput}s provided by the specified {@link
     * TrackOutputProvider}, and configures the extractor to receive data from a new chunk.
     *
     * @param trackOutputProvider The provider of {@link TrackOutput}s that will receive sample data.
     * @param startTimeUs The start position in the new chunk, or {@link C#TIME_UNSET} to output
     *     samples from the start of the chunk.
     * @param endTimeUs The end position in the new chunk, or {@link C#TIME_UNSET} to output samples
     *     to the end of the chunk.
     */
    public void init(@Nullable TrackOutputProvider trackOutputProvider, long startTimeUs, long endTimeUs) {
        this.trackOutputProvider = trackOutputProvider;
        this.endTimeUs = endTimeUs;
        if (!extractorInitialized) {
            extractor.init(this);
            if (startTimeUs != C.TIME_UNSET) {
                extractor.seek(/* position= */
                0, startTimeUs);
            }
            extractorInitialized = true;
        } else {
            extractor.seek(/* position= */
            0, startTimeUs == C.TIME_UNSET ? 0 : startTimeUs);
            for (int i = 0; i < bindingTrackOutputs.size(); i++) {
                bindingTrackOutputs.valueAt(i).bind(trackOutputProvider, endTimeUs);
            }
        }
    }

    // ExtractorOutput implementation.
    @Override
    public TrackOutput track(int id, int type) {
        BindingTrackOutput bindingTrackOutput = bindingTrackOutputs.get(id);
        if (bindingTrackOutput == null) {
            // replacedert that if we're seeing a new track we have not seen endTracks.
            replacedertions.checkState(sampleFormats == null);
            // TODO: Manifest formats for embedded tracks should also be preplaceded here.
            bindingTrackOutput = new BindingTrackOutput(id, type, type == primaryTrackType ? primaryTrackManifestFormat : null);
            bindingTrackOutput.bind(trackOutputProvider, endTimeUs);
            bindingTrackOutputs.put(id, bindingTrackOutput);
        }
        return bindingTrackOutput;
    }

    @Override
    public void endTracks() {
        Format[] sampleFormats = new Format[bindingTrackOutputs.size()];
        for (int i = 0; i < bindingTrackOutputs.size(); i++) {
            sampleFormats[i] = bindingTrackOutputs.valueAt(i).sampleFormat;
        }
        this.sampleFormats = sampleFormats;
    }

    @Override
    public void seekMap(SeekMap seekMap) {
        this.seekMap = seekMap;
    }

    // Internal logic.
    private static final clreplaced BindingTrackOutput implements TrackOutput {

        private final int id;

        private final int type;

        private final Format manifestFormat;

        private final DummyTrackOutput dummyTrackOutput;

        public Format sampleFormat;

        private TrackOutput trackOutput;

        private long endTimeUs;

        public BindingTrackOutput(int id, int type, Format manifestFormat) {
            this.id = id;
            this.type = type;
            this.manifestFormat = manifestFormat;
            dummyTrackOutput = new DummyTrackOutput();
        }

        public void bind(TrackOutputProvider trackOutputProvider, long endTimeUs) {
            if (trackOutputProvider == null) {
                trackOutput = dummyTrackOutput;
                return;
            }
            this.endTimeUs = endTimeUs;
            trackOutput = trackOutputProvider.track(id, type);
            if (sampleFormat != null) {
                trackOutput.format(sampleFormat);
            }
        }

        @Override
        public void format(Format format) {
            sampleFormat = manifestFormat != null ? format.copyWithManifestFormatInfo(manifestFormat) : format;
            trackOutput.format(sampleFormat);
        }

        @Override
        public int sampleData(ExtractorInput input, int length, boolean allowEndOfInput) throws IOException, InterruptedException {
            return trackOutput.sampleData(input, length, allowEndOfInput);
        }

        @Override
        public void sampleData(ParsableByteArray data, int length) {
            trackOutput.sampleData(data, length);
        }

        @Override
        public void sampleMetadata(long timeUs, @C.BufferFlags int flags, int size, int offset, CryptoData cryptoData) {
            if (endTimeUs != C.TIME_UNSET && timeUs >= endTimeUs) {
                trackOutput = dummyTrackOutput;
            }
            trackOutput.sampleMetadata(timeUs, flags, size, offset, cryptoData);
        }
    }
}

19 Source : ChunkExtractorWrapper.java
with GNU General Public License v2.0
from warren-bank

@Override
public void endTracks() {
    Format[] sampleFormats = new Format[bindingTrackOutputs.size()];
    for (int i = 0; i < bindingTrackOutputs.size(); i++) {
        sampleFormats[i] = bindingTrackOutputs.valueAt(i).sampleFormat;
    }
    this.sampleFormats = sampleFormats;
}

19 Source : Chunk.java
with GNU General Public License v2.0
from warren-bank

/**
 * An abstract base clreplaced for {@link Loadable} implementations that load chunks of data required
 * for the playback of streams.
 */
public abstract clreplaced Chunk implements Loadable {

    /**
     * The {@link DataSpec} that defines the data to be loaded.
     */
    public final DataSpec dataSpec;

    /**
     * The type of the chunk. One of the {@code DATA_TYPE_*} constants defined in {@link C}. For
     * reporting only.
     */
    public final int type;

    /**
     * The format of the track to which this chunk belongs, or null if the chunk does not belong to
     * a track.
     */
    public final Format trackFormat;

    /**
     * One of the {@link C} {@code SELECTION_REASON_*} constants if the chunk belongs to a track.
     * {@link C#SELECTION_REASON_UNKNOWN} if the chunk does not belong to a track.
     */
    public final int trackSelectionReason;

    /**
     * Optional data replacedociated with the selection of the track to which this chunk belongs. Null if
     * the chunk does not belong to a track.
     */
    @Nullable
    public final Object trackSelectionData;

    /**
     * The start time of the media contained by the chunk, or {@link C#TIME_UNSET} if the data
     * being loaded does not contain media samples.
     */
    public final long startTimeUs;

    /**
     * The end time of the media contained by the chunk, or {@link C#TIME_UNSET} if the data being
     * loaded does not contain media samples.
     */
    public final long endTimeUs;

    protected final StatsDataSource dataSource;

    /**
     * @param dataSource The source from which the data should be loaded.
     * @param dataSpec Defines the data to be loaded.
     * @param type See {@link #type}.
     * @param trackFormat See {@link #trackFormat}.
     * @param trackSelectionReason See {@link #trackSelectionReason}.
     * @param trackSelectionData See {@link #trackSelectionData}.
     * @param startTimeUs See {@link #startTimeUs}.
     * @param endTimeUs See {@link #endTimeUs}.
     */
    public Chunk(DataSource dataSource, DataSpec dataSpec, int type, Format trackFormat, int trackSelectionReason, @Nullable Object trackSelectionData, long startTimeUs, long endTimeUs) {
        this.dataSource = new StatsDataSource(dataSource);
        this.dataSpec = replacedertions.checkNotNull(dataSpec);
        this.type = type;
        this.trackFormat = trackFormat;
        this.trackSelectionReason = trackSelectionReason;
        this.trackSelectionData = trackSelectionData;
        this.startTimeUs = startTimeUs;
        this.endTimeUs = endTimeUs;
    }

    /**
     * Returns the duration of the chunk in microseconds.
     */
    public final long getDurationUs() {
        return endTimeUs - startTimeUs;
    }

    /**
     * Returns the number of bytes that have been loaded. Must only be called after the load
     * completed, failed, or was canceled.
     */
    public final long bytesLoaded() {
        return dataSource.getBytesRead();
    }

    /**
     * Returns the {@link Uri} replacedociated with the last {@link DataSource#open} call. If redirection
     * occurred, this is the redirected uri. Must only be called after the load completed, failed, or
     * was canceled.
     *
     * @see DataSource#getUri()
     */
    public final Uri getUri() {
        return dataSource.getLastOpenedUri();
    }

    /**
     * Returns the response headers replacedociated with the last {@link DataSource#open} call. Must only
     * be called after the load completed, failed, or was canceled.
     *
     * @see DataSource#getResponseHeaders()
     */
    public final Map<String, List<String>> getResponseHeaders() {
        return dataSource.getLastResponseHeaders();
    }
}

19 Source : MetadataRenderer.java
with GNU General Public License v2.0
from warren-bank

@Override
public int supportsFormat(Format format) {
    if (decoderFactory.supportsFormat(format)) {
        return supportsFormatDrm(null, format.drmInitData) ? FORMAT_HANDLED : FORMAT_UNSUPPORTED_DRM;
    } else {
        return FORMAT_UNSUPPORTED_TYPE;
    }
}

19 Source : MetadataRenderer.java
with GNU General Public License v2.0
from warren-bank

@Override
protected void onStreamChanged(Format[] formats, long offsetUs) throws ExoPlaybackException {
    decoder = decoderFactory.createDecoder(formats[0]);
}

19 Source : MediaCodecInfo.java
with GNU General Public License v2.0
from warren-bank

/**
 * Returns whether the decoder may support decoding the given {@code format}.
 *
 * @param format The input media format.
 * @return Whether the decoder may support decoding the given {@code format}.
 * @throws MediaCodecUtil.DecoderQueryException Thrown if an error occurs while querying decoders.
 */
public boolean isFormatSupported(Format format) throws MediaCodecUtil.DecoderQueryException {
    if (!isCodecSupported(format.codecs)) {
        return false;
    }
    if (isVideo) {
        if (format.width <= 0 || format.height <= 0) {
            return true;
        }
        if (Util.SDK_INT >= 21) {
            return isVideoSizeAndRateSupportedV21(format.width, format.height, format.frameRate);
        } else {
            boolean isFormatSupported = format.width * format.height <= MediaCodecUtil.maxH264DecodableFrameSize();
            if (!isFormatSupported) {
                logNoSupport("legacyFrameSize, " + format.width + "x" + format.height);
            }
            return isFormatSupported;
        }
    } else {
        // Audio
        return Util.SDK_INT < 21 || ((format.sampleRate == Format.NO_VALUE || isAudioSampleRateSupportedV21(format.sampleRate)) && (format.channelCount == Format.NO_VALUE || isAudioChannelCountSupportedV21(format.channelCount)));
    }
}

19 Source : DtsReader.java
with GNU General Public License v2.0
from warren-bank

/**
 * Parses a continuous DTS byte stream and extracts individual samples.
 */
public final clreplaced DtsReader implements ElementaryStreamReader {

    private static final int STATE_FINDING_SYNC = 0;

    private static final int STATE_READING_HEADER = 1;

    private static final int STATE_READING_SAMPLE = 2;

    private static final int HEADER_SIZE = 18;

    private final ParsableByteArray headerScratchBytes;

    private final String language;

    private String formatId;

    private TrackOutput output;

    private int state;

    private int bytesRead;

    // Used to find the header.
    private int syncBytes;

    // Used when parsing the header.
    private long sampleDurationUs;

    private Format format;

    private int sampleSize;

    // Used when reading the samples.
    private long timeUs;

    /**
     * Constructs a new reader for DTS elementary streams.
     *
     * @param language Track language.
     */
    public DtsReader(String language) {
        headerScratchBytes = new ParsableByteArray(new byte[HEADER_SIZE]);
        state = STATE_FINDING_SYNC;
        this.language = language;
    }

    @Override
    public void seek() {
        state = STATE_FINDING_SYNC;
        bytesRead = 0;
        syncBytes = 0;
    }

    @Override
    public void createTracks(ExtractorOutput extractorOutput, TrackIdGenerator idGenerator) {
        idGenerator.generateNewId();
        formatId = idGenerator.getFormatId();
        output = extractorOutput.track(idGenerator.getTrackId(), C.TRACK_TYPE_AUDIO);
    }

    @Override
    public void packetStarted(long pesTimeUs, boolean dataAlignmentIndicator) {
        timeUs = pesTimeUs;
    }

    @Override
    public void consume(ParsableByteArray data) {
        while (data.bytesLeft() > 0) {
            switch(state) {
                case STATE_FINDING_SYNC:
                    if (skipToNextSync(data)) {
                        state = STATE_READING_HEADER;
                    }
                    break;
                case STATE_READING_HEADER:
                    if (continueRead(data, headerScratchBytes.data, HEADER_SIZE)) {
                        parseHeader();
                        headerScratchBytes.setPosition(0);
                        output.sampleData(headerScratchBytes, HEADER_SIZE);
                        state = STATE_READING_SAMPLE;
                    }
                    break;
                case STATE_READING_SAMPLE:
                    int bytesToRead = Math.min(data.bytesLeft(), sampleSize - bytesRead);
                    output.sampleData(data, bytesToRead);
                    bytesRead += bytesToRead;
                    if (bytesRead == sampleSize) {
                        output.sampleMetadata(timeUs, C.BUFFER_FLAG_KEY_FRAME, sampleSize, 0, null);
                        timeUs += sampleDurationUs;
                        state = STATE_FINDING_SYNC;
                    }
                    break;
                default:
                    throw new IllegalStateException();
            }
        }
    }

    @Override
    public void packetFinished() {
    // Do nothing.
    }

    /**
     * Continues a read from the provided {@code source} into a given {@code target}. It's replacedumed
     * that the data should be written into {@code target} starting from an offset of zero.
     *
     * @param source The source from which to read.
     * @param target The target into which data is to be read.
     * @param targetLength The target length of the read.
     * @return Whether the target length was reached.
     */
    private boolean continueRead(ParsableByteArray source, byte[] target, int targetLength) {
        int bytesToRead = Math.min(source.bytesLeft(), targetLength - bytesRead);
        source.readBytes(target, bytesRead, bytesToRead);
        bytesRead += bytesToRead;
        return bytesRead == targetLength;
    }

    /**
     * Locates the next SYNC value in the buffer, advancing the position to the byte that immediately
     * follows it. If SYNC was not located, the position is advanced to the limit.
     *
     * @param pesBuffer The buffer whose position should be advanced.
     * @return Whether SYNC was found.
     */
    private boolean skipToNextSync(ParsableByteArray pesBuffer) {
        while (pesBuffer.bytesLeft() > 0) {
            syncBytes <<= 8;
            syncBytes |= pesBuffer.readUnsignedByte();
            if (DtsUtil.isSyncWord(syncBytes)) {
                headerScratchBytes.data[0] = (byte) ((syncBytes >> 24) & 0xFF);
                headerScratchBytes.data[1] = (byte) ((syncBytes >> 16) & 0xFF);
                headerScratchBytes.data[2] = (byte) ((syncBytes >> 8) & 0xFF);
                headerScratchBytes.data[3] = (byte) (syncBytes & 0xFF);
                bytesRead = 4;
                syncBytes = 0;
                return true;
            }
        }
        return false;
    }

    /**
     * Parses the sample header.
     */
    private void parseHeader() {
        byte[] frameData = headerScratchBytes.data;
        if (format == null) {
            format = DtsUtil.parseDtsFormat(frameData, formatId, language, null);
            output.format(format);
        }
        sampleSize = DtsUtil.getDtsFrameSize(frameData);
        // In this clreplaced a sample is an access unit (frame in DTS), but the format's sample rate
        // specifies the number of PCM audio samples per second.
        sampleDurationUs = (int) (C.MICROS_PER_SECOND * DtsUtil.parseDtsAudioSampleCount(frameData) / format.sampleRate);
    }
}

19 Source : RawCcExtractor.java
with GNU General Public License v2.0
from warren-bank

/**
 * Extracts data from the RawCC container format.
 */
public final clreplaced RawCcExtractor implements Extractor {

    private static final int SCRATCH_SIZE = 9;

    private static final int HEADER_SIZE = 8;

    private static final int HEADER_ID = Util.getIntegerCodeForString("RCC\u0001");

    private static final int TIMESTAMP_SIZE_V0 = 4;

    private static final int TIMESTAMP_SIZE_V1 = 8;

    // Parser states.
    private static final int STATE_READING_HEADER = 0;

    private static final int STATE_READING_TIMESTAMP_AND_COUNT = 1;

    private static final int STATE_READING_SAMPLES = 2;

    private final Format format;

    private final ParsableByteArray dataScratch;

    private TrackOutput trackOutput;

    private int parserState;

    private int version;

    private long timestampUs;

    private int remainingSampleCount;

    private int sampleBytesWritten;

    public RawCcExtractor(Format format) {
        this.format = format;
        dataScratch = new ParsableByteArray(SCRATCH_SIZE);
        parserState = STATE_READING_HEADER;
    }

    @Override
    public void init(ExtractorOutput output) {
        output.seekMap(new SeekMap.Unseekable(C.TIME_UNSET));
        trackOutput = output.track(0, C.TRACK_TYPE_TEXT);
        output.endTracks();
        trackOutput.format(format);
    }

    @Override
    public boolean sniff(ExtractorInput input) throws IOException, InterruptedException {
        dataScratch.reset();
        input.peekFully(dataScratch.data, 0, HEADER_SIZE);
        return dataScratch.readInt() == HEADER_ID;
    }

    @Override
    public int read(ExtractorInput input, PositionHolder seekPosition) throws IOException, InterruptedException {
        while (true) {
            switch(parserState) {
                case STATE_READING_HEADER:
                    if (parseHeader(input)) {
                        parserState = STATE_READING_TIMESTAMP_AND_COUNT;
                    } else {
                        return RESULT_END_OF_INPUT;
                    }
                    break;
                case STATE_READING_TIMESTAMP_AND_COUNT:
                    if (parseTimestampAndSampleCount(input)) {
                        parserState = STATE_READING_SAMPLES;
                    } else {
                        parserState = STATE_READING_HEADER;
                        return RESULT_END_OF_INPUT;
                    }
                    break;
                case STATE_READING_SAMPLES:
                    parseSamples(input);
                    parserState = STATE_READING_TIMESTAMP_AND_COUNT;
                    return RESULT_CONTINUE;
                default:
                    throw new IllegalStateException();
            }
        }
    }

    @Override
    public void seek(long position, long timeUs) {
        parserState = STATE_READING_HEADER;
    }

    @Override
    public void release() {
    // Do nothing
    }

    private boolean parseHeader(ExtractorInput input) throws IOException, InterruptedException {
        dataScratch.reset();
        if (input.readFully(dataScratch.data, 0, HEADER_SIZE, true)) {
            if (dataScratch.readInt() != HEADER_ID) {
                throw new IOException("Input not RawCC");
            }
            version = dataScratch.readUnsignedByte();
            // no versions use the flag fields yet
            return true;
        } else {
            return false;
        }
    }

    private boolean parseTimestampAndSampleCount(ExtractorInput input) throws IOException, InterruptedException {
        dataScratch.reset();
        if (version == 0) {
            if (!input.readFully(dataScratch.data, 0, TIMESTAMP_SIZE_V0 + 1, true)) {
                return false;
            }
            // version 0 timestamps are 45kHz, so we need to convert them into us
            timestampUs = dataScratch.readUnsignedInt() * 1000 / 45;
        } else if (version == 1) {
            if (!input.readFully(dataScratch.data, 0, TIMESTAMP_SIZE_V1 + 1, true)) {
                return false;
            }
            timestampUs = dataScratch.readLong();
        } else {
            throw new ParserException("Unsupported version number: " + version);
        }
        remainingSampleCount = dataScratch.readUnsignedByte();
        sampleBytesWritten = 0;
        return true;
    }

    private void parseSamples(ExtractorInput input) throws IOException, InterruptedException {
        for (; remainingSampleCount > 0; remainingSampleCount--) {
            dataScratch.reset();
            input.readFully(dataScratch.data, 0, 3);
            trackOutput.sampleData(dataScratch, 3);
            sampleBytesWritten += 3;
        }
        if (sampleBytesWritten > 0) {
            trackOutput.sampleMetadata(timestampUs, C.BUFFER_FLAG_KEY_FRAME, sampleBytesWritten, 0, null);
        }
    }
}

19 Source : Track.java
with GNU General Public License v2.0
from warren-bank

/**
 * Encapsulates information describing an MP4 track.
 */
public final clreplaced Track {

    /**
     * The transformation to apply to samples in the track, if any. One of {@link
     * #TRANSFORMATION_NONE} or {@link #TRANSFORMATION_CEA608_CDAT}.
     */
    @Doreplacedented
    @Retention(RetentionPolicy.SOURCE)
    @IntDef({ TRANSFORMATION_NONE, TRANSFORMATION_CEA608_CDAT })
    public @interface Transformation {
    }

    /**
     * A no-op sample transformation.
     */
    public static final int TRANSFORMATION_NONE = 0;

    /**
     * A transformation for caption samples in cdat atoms.
     */
    public static final int TRANSFORMATION_CEA608_CDAT = 1;

    /**
     * The track identifier.
     */
    public final int id;

    /**
     * One of {@link C#TRACK_TYPE_AUDIO}, {@link C#TRACK_TYPE_VIDEO} and {@link C#TRACK_TYPE_TEXT}.
     */
    public final int type;

    /**
     * The track timescale, defined as the number of time units that preplaced in one second.
     */
    public final long timescale;

    /**
     * The movie timescale.
     */
    public final long movieTimescale;

    /**
     * The duration of the track in microseconds, or {@link C#TIME_UNSET} if unknown.
     */
    public final long durationUs;

    /**
     * The format.
     */
    public final Format format;

    /**
     * One of {@code TRANSFORMATION_*}. Defines the transformation to apply before outputting each
     * sample.
     */
    @Transformation
    public final int sampleTransformation;

    /**
     * Durations of edit list segments in the movie timescale. Null if there is no edit list.
     */
    @Nullable
    public final long[] editListDurations;

    /**
     * Media times for edit list segments in the track timescale. Null if there is no edit list.
     */
    @Nullable
    public final long[] editListMediaTimes;

    /**
     * For H264 video tracks, the length in bytes of the NALUnitLength field in each sample. 0 for
     * other track types.
     */
    public final int nalUnitLengthFieldLength;

    @Nullable
    private final TrackEncryptionBox[] sampleDescriptionEncryptionBoxes;

    public Track(int id, int type, long timescale, long movieTimescale, long durationUs, Format format, @Transformation int sampleTransformation, @Nullable TrackEncryptionBox[] sampleDescriptionEncryptionBoxes, int nalUnitLengthFieldLength, @Nullable long[] editListDurations, @Nullable long[] editListMediaTimes) {
        this.id = id;
        this.type = type;
        this.timescale = timescale;
        this.movieTimescale = movieTimescale;
        this.durationUs = durationUs;
        this.format = format;
        this.sampleTransformation = sampleTransformation;
        this.sampleDescriptionEncryptionBoxes = sampleDescriptionEncryptionBoxes;
        this.nalUnitLengthFieldLength = nalUnitLengthFieldLength;
        this.editListDurations = editListDurations;
        this.editListMediaTimes = editListMediaTimes;
    }

    /**
     * Returns the {@link TrackEncryptionBox} for the given sample description index.
     *
     * @param sampleDescriptionIndex The given sample description index
     * @return The {@link TrackEncryptionBox} for the given sample description index. Maybe null if no
     *     such entry exists.
     */
    public TrackEncryptionBox getSampleDescriptionEncryptionBox(int sampleDescriptionIndex) {
        return sampleDescriptionEncryptionBoxes == null ? null : sampleDescriptionEncryptionBoxes[sampleDescriptionIndex];
    }
}

19 Source : DummyTrackOutput.java
with GNU General Public License v2.0
from warren-bank

@Override
public void format(Format format) {
// Do nothing.
}

19 Source : SimpleDecoderAudioRenderer.java
with GNU General Public License v2.0
from warren-bank

/**
 * Decodes and renders audio using a {@link SimpleDecoder}.
 *
 * <p>This renderer accepts the following messages sent via {@link ExoPlayer#createMessage(Target)}
 * on the playback thread:
 *
 * <ul>
 *   <li>Message with type {@link C#MSG_SET_VOLUME} to set the volume. The message payload should be
 *       a {@link Float} with 0 being silence and 1 being unity gain.
 *   <li>Message with type {@link C#MSG_SET_AUDIO_ATTRIBUTES} to set the audio attributes. The
 *       message payload should be an {@link com.google.android.exoplayer2.audio.AudioAttributes}
 *       instance that will configure the underlying audio track.
 *   <li>Message with type {@link C#MSG_SET_AUX_EFFECT_INFO} to set the auxiliary effect. The
 *       message payload should be an {@link AuxEffectInfo} instance that will configure the
 *       underlying audio track.
 * </ul>
 */
public abstract clreplaced SimpleDecoderAudioRenderer extends BaseRenderer implements MediaClock {

    @Doreplacedented
    @Retention(RetentionPolicy.SOURCE)
    @IntDef({ REINITIALIZATION_STATE_NONE, REINITIALIZATION_STATE_SIGNAL_END_OF_STREAM, REINITIALIZATION_STATE_WAIT_END_OF_STREAM })
    private @interface ReinitializationState {
    }

    /**
     * The decoder does not need to be re-initialized.
     */
    private static final int REINITIALIZATION_STATE_NONE = 0;

    /**
     * The input format has changed in a way that requires the decoder to be re-initialized, but we
     * haven't yet signaled an end of stream to the existing decoder. We need to do so in order to
     * ensure that it outputs any remaining buffers before we release it.
     */
    private static final int REINITIALIZATION_STATE_SIGNAL_END_OF_STREAM = 1;

    /**
     * The input format has changed in a way that requires the decoder to be re-initialized, and we've
     * signaled an end of stream to the existing decoder. We're waiting for the decoder to output an
     * end of stream signal to indicate that it has output any remaining buffers before we release it.
     */
    private static final int REINITIALIZATION_STATE_WAIT_END_OF_STREAM = 2;

    private final DrmSessionManager<ExoMediaCrypto> drmSessionManager;

    private final boolean playClearSamplesWithoutKeys;

    private final EventDispatcher eventDispatcher;

    private final AudioSink audioSink;

    private final FormatHolder formatHolder;

    private final DecoderInputBuffer flagsOnlyBuffer;

    private DecoderCounters decoderCounters;

    private Format inputFormat;

    private int encoderDelay;

    private int encoderPadding;

    private SimpleDecoder<DecoderInputBuffer, ? extends SimpleOutputBuffer, ? extends AudioDecoderException> decoder;

    private DecoderInputBuffer inputBuffer;

    private SimpleOutputBuffer outputBuffer;

    private DrmSession<ExoMediaCrypto> drmSession;

    private DrmSession<ExoMediaCrypto> pendingDrmSession;

    @ReinitializationState
    private int decoderReinitializationState;

    private boolean decoderReceivedBuffers;

    private boolean audioTrackNeedsConfigure;

    private long currentPositionUs;

    private boolean allowFirstBufferPositionDiscontinuity;

    private boolean allowPositionDiscontinuity;

    private boolean inputStreamEnded;

    private boolean outputStreamEnded;

    private boolean waitingForKeys;

    public SimpleDecoderAudioRenderer() {
        this(/* eventHandler= */
        null, /* eventListener= */
        null);
    }

    /**
     * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
     *     null if delivery of events is not required.
     * @param eventListener A listener of events. May be null if delivery of events is not required.
     * @param audioProcessors Optional {@link AudioProcessor}s that will process audio before output.
     */
    public SimpleDecoderAudioRenderer(@Nullable Handler eventHandler, @Nullable AudioRendererEventListener eventListener, AudioProcessor... audioProcessors) {
        this(eventHandler, eventListener, /* audioCapabilities= */
        null, /* drmSessionManager= */
        null, /* playClearSamplesWithoutKeys= */
        false, audioProcessors);
    }

    /**
     * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
     *     null if delivery of events is not required.
     * @param eventListener A listener of events. May be null if delivery of events is not required.
     * @param audioCapabilities The audio capabilities for playback on this device. May be null if the
     *     default capabilities (no encoded audio preplacedthrough support) should be replacedumed.
     */
    public SimpleDecoderAudioRenderer(@Nullable Handler eventHandler, @Nullable AudioRendererEventListener eventListener, @Nullable AudioCapabilities audioCapabilities) {
        this(eventHandler, eventListener, audioCapabilities, /* drmSessionManager= */
        null, /* playClearSamplesWithoutKeys= */
        false);
    }

    /**
     * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
     *     null if delivery of events is not required.
     * @param eventListener A listener of events. May be null if delivery of events is not required.
     * @param audioCapabilities The audio capabilities for playback on this device. May be null if the
     *     default capabilities (no encoded audio preplacedthrough support) should be replacedumed.
     * @param drmSessionManager For use with encrypted media. May be null if support for encrypted
     *     media is not required.
     * @param playClearSamplesWithoutKeys Encrypted media may contain clear (un-encrypted) regions.
     *     For example a media file may start with a short clear region so as to allow playback to
     *     begin in parallel with key acquisition. This parameter specifies whether the renderer is
     *     permitted to play clear regions of encrypted media files before {@code drmSessionManager}
     *     has obtained the keys necessary to decrypt encrypted regions of the media.
     * @param audioProcessors Optional {@link AudioProcessor}s that will process audio before output.
     */
    public SimpleDecoderAudioRenderer(@Nullable Handler eventHandler, @Nullable AudioRendererEventListener eventListener, @Nullable AudioCapabilities audioCapabilities, @Nullable DrmSessionManager<ExoMediaCrypto> drmSessionManager, boolean playClearSamplesWithoutKeys, AudioProcessor... audioProcessors) {
        this(eventHandler, eventListener, drmSessionManager, playClearSamplesWithoutKeys, new DefaultAudioSink(audioCapabilities, audioProcessors));
    }

    /**
     * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
     *     null if delivery of events is not required.
     * @param eventListener A listener of events. May be null if delivery of events is not required.
     * @param drmSessionManager For use with encrypted media. May be null if support for encrypted
     *     media is not required.
     * @param playClearSamplesWithoutKeys Encrypted media may contain clear (un-encrypted) regions.
     *     For example a media file may start with a short clear region so as to allow playback to
     *     begin in parallel with key acquisition. This parameter specifies whether the renderer is
     *     permitted to play clear regions of encrypted media files before {@code drmSessionManager}
     *     has obtained the keys necessary to decrypt encrypted regions of the media.
     * @param audioSink The sink to which audio will be output.
     */
    public SimpleDecoderAudioRenderer(@Nullable Handler eventHandler, @Nullable AudioRendererEventListener eventListener, @Nullable DrmSessionManager<ExoMediaCrypto> drmSessionManager, boolean playClearSamplesWithoutKeys, AudioSink audioSink) {
        super(C.TRACK_TYPE_AUDIO);
        this.drmSessionManager = drmSessionManager;
        this.playClearSamplesWithoutKeys = playClearSamplesWithoutKeys;
        eventDispatcher = new EventDispatcher(eventHandler, eventListener);
        this.audioSink = audioSink;
        audioSink.setListener(new AudioSinkListener());
        formatHolder = new FormatHolder();
        flagsOnlyBuffer = DecoderInputBuffer.newFlagsOnlyInstance();
        decoderReinitializationState = REINITIALIZATION_STATE_NONE;
        audioTrackNeedsConfigure = true;
    }

    @Override
    public MediaClock getMediaClock() {
        return this;
    }

    @Override
    public final int supportsFormat(Format format) {
        if (!MimeTypes.isAudio(format.sampleMimeType)) {
            return FORMAT_UNSUPPORTED_TYPE;
        }
        int formatSupport = supportsFormatInternal(drmSessionManager, format);
        if (formatSupport <= FORMAT_UNSUPPORTED_DRM) {
            return formatSupport;
        }
        int tunnelingSupport = Util.SDK_INT >= 21 ? TUNNELING_SUPPORTED : TUNNELING_NOT_SUPPORTED;
        return ADAPTIVE_NOT_SEAMLESS | tunnelingSupport | formatSupport;
    }

    /**
     * Returns the {@link #FORMAT_SUPPORT_MASK} component of the return value for {@link
     * #supportsFormat(Format)}.
     *
     * @param drmSessionManager The renderer's {@link DrmSessionManager}.
     * @param format The format, which has an audio {@link Format#sampleMimeType}.
     * @return The extent to which the renderer supports the format itself.
     */
    protected abstract int supportsFormatInternal(DrmSessionManager<ExoMediaCrypto> drmSessionManager, Format format);

    /**
     * Returns whether the audio sink can accept audio in the specified encoding.
     *
     * @param encoding The audio encoding.
     * @return Whether the audio sink can accept audio in the specified encoding.
     */
    protected final boolean supportsOutputEncoding(@C.Encoding int encoding) {
        return audioSink.isEncodingSupported(encoding);
    }

    @Override
    public void render(long positionUs, long elapsedRealtimeUs) throws ExoPlaybackException {
        if (outputStreamEnded) {
            try {
                audioSink.playToEndOfStream();
            } catch (AudioSink.WriteException e) {
                throw ExoPlaybackException.createForRenderer(e, getIndex());
            }
            return;
        }
        // Try and read a format if we don't have one already.
        if (inputFormat == null) {
            // We don't have a format yet, so try and read one.
            flagsOnlyBuffer.clear();
            int result = readSource(formatHolder, flagsOnlyBuffer, true);
            if (result == C.RESULT_FORMAT_READ) {
                onInputFormatChanged(formatHolder.format);
            } else if (result == C.RESULT_BUFFER_READ) {
                // End of stream read having not read a format.
                replacedertions.checkState(flagsOnlyBuffer.isEndOfStream());
                inputStreamEnded = true;
                processEndOfStream();
                return;
            } else {
                // We still don't have a format and can't make progress without one.
                return;
            }
        }
        // If we don't have a decoder yet, we need to instantiate one.
        maybeInitDecoder();
        if (decoder != null) {
            try {
                // Rendering loop.
                TraceUtil.beginSection("drainAndFeed");
                while (drainOutputBuffer()) {
                }
                while (feedInputBuffer()) {
                }
                TraceUtil.endSection();
            } catch (AudioDecoderException | AudioSink.ConfigurationException | AudioSink.InitializationException | AudioSink.WriteException e) {
                throw ExoPlaybackException.createForRenderer(e, getIndex());
            }
            decoderCounters.ensureUpdated();
        }
    }

    /**
     * Called when the audio session id becomes known. The default implementation is a no-op. One
     * reason for overriding this method would be to instantiate and enable a {@link Virtualizer} in
     * order to spatialize the audio channels. For this use case, any {@link Virtualizer} instances
     * should be released in {@link #onDisabled()} (if not before).
     *
     * @see AudioSink.Listener#onAudioSessionId(int)
     */
    protected void onAudioSessionId(int audioSessionId) {
    // Do nothing.
    }

    /**
     * @see AudioSink.Listener#onPositionDiscontinuity()
     */
    protected void onAudioTrackPositionDiscontinuity() {
    // Do nothing.
    }

    /**
     * @see AudioSink.Listener#onUnderrun(int, long, long)
     */
    protected void onAudioTrackUnderrun(int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs) {
    // Do nothing.
    }

    /**
     * Creates a decoder for the given format.
     *
     * @param format The format for which a decoder is required.
     * @param mediaCrypto The {@link ExoMediaCrypto} object required for decoding encrypted content.
     *     Maybe null and can be ignored if decoder does not handle encrypted content.
     * @return The decoder.
     * @throws AudioDecoderException If an error occurred creating a suitable decoder.
     */
    protected abstract SimpleDecoder<DecoderInputBuffer, ? extends SimpleOutputBuffer, ? extends AudioDecoderException> createDecoder(Format format, ExoMediaCrypto mediaCrypto) throws AudioDecoderException;

    /**
     * Returns the format of audio buffers output by the decoder. Will not be called until the first
     * output buffer has been dequeued, so the decoder may use input data to determine the format.
     * <p>
     * The default implementation returns a 16-bit PCM format with the same channel count and sample
     * rate as the input.
     */
    protected Format getOutputFormat() {
        return Format.createAudioSampleFormat(null, MimeTypes.AUDIO_RAW, null, Format.NO_VALUE, Format.NO_VALUE, inputFormat.channelCount, inputFormat.sampleRate, C.ENCODING_PCM_16BIT, null, null, 0, null);
    }

    private boolean drainOutputBuffer() throws ExoPlaybackException, AudioDecoderException, AudioSink.ConfigurationException, AudioSink.InitializationException, AudioSink.WriteException {
        if (outputBuffer == null) {
            outputBuffer = decoder.dequeueOutputBuffer();
            if (outputBuffer == null) {
                return false;
            }
            decoderCounters.skippedOutputBufferCount += outputBuffer.skippedOutputBufferCount;
        }
        if (outputBuffer.isEndOfStream()) {
            if (decoderReinitializationState == REINITIALIZATION_STATE_WAIT_END_OF_STREAM) {
                // We're waiting to re-initialize the decoder, and have now processed all final buffers.
                releaseDecoder();
                maybeInitDecoder();
                // The audio track may need to be recreated once the new output format is known.
                audioTrackNeedsConfigure = true;
            } else {
                outputBuffer.release();
                outputBuffer = null;
                processEndOfStream();
            }
            return false;
        }
        if (audioTrackNeedsConfigure) {
            Format outputFormat = getOutputFormat();
            audioSink.configure(outputFormat.pcmEncoding, outputFormat.channelCount, outputFormat.sampleRate, 0, null, encoderDelay, encoderPadding);
            audioTrackNeedsConfigure = false;
        }
        if (audioSink.handleBuffer(outputBuffer.data, outputBuffer.timeUs)) {
            decoderCounters.renderedOutputBufferCount++;
            outputBuffer.release();
            outputBuffer = null;
            return true;
        }
        return false;
    }

    private boolean feedInputBuffer() throws AudioDecoderException, ExoPlaybackException {
        if (decoder == null || decoderReinitializationState == REINITIALIZATION_STATE_WAIT_END_OF_STREAM || inputStreamEnded) {
            // We need to reinitialize the decoder or the input stream has ended.
            return false;
        }
        if (inputBuffer == null) {
            inputBuffer = decoder.dequeueInputBuffer();
            if (inputBuffer == null) {
                return false;
            }
        }
        if (decoderReinitializationState == REINITIALIZATION_STATE_SIGNAL_END_OF_STREAM) {
            inputBuffer.setFlags(C.BUFFER_FLAG_END_OF_STREAM);
            decoder.queueInputBuffer(inputBuffer);
            inputBuffer = null;
            decoderReinitializationState = REINITIALIZATION_STATE_WAIT_END_OF_STREAM;
            return false;
        }
        int result;
        if (waitingForKeys) {
            // We've already read an encrypted sample into buffer, and are waiting for keys.
            result = C.RESULT_BUFFER_READ;
        } else {
            result = readSource(formatHolder, inputBuffer, false);
        }
        if (result == C.RESULT_NOTHING_READ) {
            return false;
        }
        if (result == C.RESULT_FORMAT_READ) {
            onInputFormatChanged(formatHolder.format);
            return true;
        }
        if (inputBuffer.isEndOfStream()) {
            inputStreamEnded = true;
            decoder.queueInputBuffer(inputBuffer);
            inputBuffer = null;
            return false;
        }
        boolean bufferEncrypted = inputBuffer.isEncrypted();
        waitingForKeys = shouldWaitForKeys(bufferEncrypted);
        if (waitingForKeys) {
            return false;
        }
        inputBuffer.flip();
        onQueueInputBuffer(inputBuffer);
        decoder.queueInputBuffer(inputBuffer);
        decoderReceivedBuffers = true;
        decoderCounters.inputBufferCount++;
        inputBuffer = null;
        return true;
    }

    private boolean shouldWaitForKeys(boolean bufferEncrypted) throws ExoPlaybackException {
        if (drmSession == null || (!bufferEncrypted && playClearSamplesWithoutKeys)) {
            return false;
        }
        @DrmSession.State
        int drmSessionState = drmSession.getState();
        if (drmSessionState == DrmSession.STATE_ERROR) {
            throw ExoPlaybackException.createForRenderer(drmSession.getError(), getIndex());
        }
        return drmSessionState != DrmSession.STATE_OPENED_WITH_KEYS;
    }

    private void processEndOfStream() throws ExoPlaybackException {
        outputStreamEnded = true;
        try {
            audioSink.playToEndOfStream();
        } catch (AudioSink.WriteException e) {
            throw ExoPlaybackException.createForRenderer(e, getIndex());
        }
    }

    private void flushDecoder() throws ExoPlaybackException {
        waitingForKeys = false;
        if (decoderReinitializationState != REINITIALIZATION_STATE_NONE) {
            releaseDecoder();
            maybeInitDecoder();
        } else {
            inputBuffer = null;
            if (outputBuffer != null) {
                outputBuffer.release();
                outputBuffer = null;
            }
            decoder.flush();
            decoderReceivedBuffers = false;
        }
    }

    @Override
    public boolean isEnded() {
        return outputStreamEnded && audioSink.isEnded();
    }

    @Override
    public boolean isReady() {
        return audioSink.hasPendingData() || (inputFormat != null && !waitingForKeys && (isSourceReady() || outputBuffer != null));
    }

    @Override
    public long getPositionUs() {
        if (getState() == STATE_STARTED) {
            updateCurrentPosition();
        }
        return currentPositionUs;
    }

    @Override
    public PlaybackParameters setPlaybackParameters(PlaybackParameters playbackParameters) {
        return audioSink.setPlaybackParameters(playbackParameters);
    }

    @Override
    public PlaybackParameters getPlaybackParameters() {
        return audioSink.getPlaybackParameters();
    }

    @Override
    protected void onEnabled(boolean joining) throws ExoPlaybackException {
        decoderCounters = new DecoderCounters();
        eventDispatcher.enabled(decoderCounters);
        int tunnelingAudioSessionId = getConfiguration().tunnelingAudioSessionId;
        if (tunnelingAudioSessionId != C.AUDIO_SESSION_ID_UNSET) {
            audioSink.enableTunnelingV21(tunnelingAudioSessionId);
        } else {
            audioSink.disableTunneling();
        }
    }

    @Override
    protected void onPositionReset(long positionUs, boolean joining) throws ExoPlaybackException {
        audioSink.reset();
        currentPositionUs = positionUs;
        allowFirstBufferPositionDiscontinuity = true;
        allowPositionDiscontinuity = true;
        inputStreamEnded = false;
        outputStreamEnded = false;
        if (decoder != null) {
            flushDecoder();
        }
    }

    @Override
    protected void onStarted() {
        audioSink.play();
    }

    @Override
    protected void onStopped() {
        updateCurrentPosition();
        audioSink.pause();
    }

    @Override
    protected void onDisabled() {
        inputFormat = null;
        audioTrackNeedsConfigure = true;
        waitingForKeys = false;
        try {
            releaseDecoder();
            audioSink.release();
        } finally {
            try {
                if (drmSession != null) {
                    drmSessionManager.releaseSession(drmSession);
                }
            } finally {
                try {
                    if (pendingDrmSession != null && pendingDrmSession != drmSession) {
                        drmSessionManager.releaseSession(pendingDrmSession);
                    }
                } finally {
                    drmSession = null;
                    pendingDrmSession = null;
                    decoderCounters.ensureUpdated();
                    eventDispatcher.disabled(decoderCounters);
                }
            }
        }
    }

    @Override
    public void handleMessage(int messageType, @Nullable Object message) throws ExoPlaybackException {
        switch(messageType) {
            case C.MSG_SET_VOLUME:
                audioSink.setVolume((Float) message);
                break;
            case C.MSG_SET_AUDIO_ATTRIBUTES:
                AudioAttributes audioAttributes = (AudioAttributes) message;
                audioSink.setAudioAttributes(audioAttributes);
                break;
            case C.MSG_SET_AUX_EFFECT_INFO:
                AuxEffectInfo auxEffectInfo = (AuxEffectInfo) message;
                audioSink.setAuxEffectInfo(auxEffectInfo);
                break;
            default:
                super.handleMessage(messageType, message);
                break;
        }
    }

    private void maybeInitDecoder() throws ExoPlaybackException {
        if (decoder != null) {
            return;
        }
        drmSession = pendingDrmSession;
        ExoMediaCrypto mediaCrypto = null;
        if (drmSession != null) {
            mediaCrypto = drmSession.getMediaCrypto();
            if (mediaCrypto == null) {
                DrmSessionException drmError = drmSession.getError();
                if (drmError != null) {
                // Continue for now. We may be able to avoid failure if the session recovers, or if a new
                // input format causes the session to be replaced before it's used.
                } else {
                    // The drm session isn't open yet.
                    return;
                }
            }
        }
        try {
            long codecInitializingTimestamp = SystemClock.elapsedRealtime();
            TraceUtil.beginSection("createAudioDecoder");
            decoder = createDecoder(inputFormat, mediaCrypto);
            TraceUtil.endSection();
            long codecInitializedTimestamp = SystemClock.elapsedRealtime();
            eventDispatcher.decoderInitialized(decoder.getName(), codecInitializedTimestamp, codecInitializedTimestamp - codecInitializingTimestamp);
            decoderCounters.decoderInitCount++;
        } catch (AudioDecoderException e) {
            throw ExoPlaybackException.createForRenderer(e, getIndex());
        }
    }

    private void releaseDecoder() {
        if (decoder == null) {
            return;
        }
        inputBuffer = null;
        outputBuffer = null;
        decoder.release();
        decoder = null;
        decoderCounters.decoderReleaseCount++;
        decoderReinitializationState = REINITIALIZATION_STATE_NONE;
        decoderReceivedBuffers = false;
    }

    private void onInputFormatChanged(Format newFormat) throws ExoPlaybackException {
        Format oldFormat = inputFormat;
        inputFormat = newFormat;
        boolean drmInitDataChanged = !Util.areEqual(inputFormat.drmInitData, oldFormat == null ? null : oldFormat.drmInitData);
        if (drmInitDataChanged) {
            if (inputFormat.drmInitData != null) {
                if (drmSessionManager == null) {
                    throw ExoPlaybackException.createForRenderer(new IllegalStateException("Media requires a DrmSessionManager"), getIndex());
                }
                pendingDrmSession = drmSessionManager.acquireSession(Looper.myLooper(), inputFormat.drmInitData);
                if (pendingDrmSession == drmSession) {
                    drmSessionManager.releaseSession(pendingDrmSession);
                }
            } else {
                pendingDrmSession = null;
            }
        }
        if (decoderReceivedBuffers) {
            // Signal end of stream and wait for any final output buffers before re-initialization.
            decoderReinitializationState = REINITIALIZATION_STATE_SIGNAL_END_OF_STREAM;
        } else {
            // There aren't any final output buffers, so release the decoder immediately.
            releaseDecoder();
            maybeInitDecoder();
            audioTrackNeedsConfigure = true;
        }
        encoderDelay = newFormat.encoderDelay;
        encoderPadding = newFormat.encoderPadding;
        eventDispatcher.inputFormatChanged(newFormat);
    }

    private void onQueueInputBuffer(DecoderInputBuffer buffer) {
        if (allowFirstBufferPositionDiscontinuity && !buffer.isDecodeOnly()) {
            // TODO: Remove this hack once we have a proper fix for [Internal: b/71876314].
            // Allow the position to jump if the first presentable input buffer has a timestamp that
            // differs significantly from what was expected.
            if (Math.abs(buffer.timeUs - currentPositionUs) > 500000) {
                currentPositionUs = buffer.timeUs;
            }
            allowFirstBufferPositionDiscontinuity = false;
        }
    }

    private void updateCurrentPosition() {
        long newCurrentPositionUs = audioSink.getCurrentPositionUs(isEnded());
        if (newCurrentPositionUs != AudioSink.CURRENT_POSITION_NOT_SET) {
            currentPositionUs = allowPositionDiscontinuity ? newCurrentPositionUs : Math.max(currentPositionUs, newCurrentPositionUs);
            allowPositionDiscontinuity = false;
        }
    }

    private final clreplaced AudioSinkListener implements AudioSink.Listener {

        @Override
        public void onAudioSessionId(int audioSessionId) {
            eventDispatcher.audioSessionId(audioSessionId);
            SimpleDecoderAudioRenderer.this.onAudioSessionId(audioSessionId);
        }

        @Override
        public void onPositionDiscontinuity() {
            onAudioTrackPositionDiscontinuity();
            // We are out of sync so allow currentPositionUs to jump backwards.
            SimpleDecoderAudioRenderer.this.allowPositionDiscontinuity = true;
        }

        @Override
        public void onUnderrun(int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs) {
            eventDispatcher.audioTrackUnderrun(bufferSize, bufferSizeMs, elapsedSinceLastFeedMs);
            onAudioTrackUnderrun(bufferSize, bufferSizeMs, elapsedSinceLastFeedMs);
        }
    }
}

19 Source : SimpleDecoderAudioRenderer.java
with GNU General Public License v2.0
from warren-bank

private boolean drainOutputBuffer() throws ExoPlaybackException, AudioDecoderException, AudioSink.ConfigurationException, AudioSink.InitializationException, AudioSink.WriteException {
    if (outputBuffer == null) {
        outputBuffer = decoder.dequeueOutputBuffer();
        if (outputBuffer == null) {
            return false;
        }
        decoderCounters.skippedOutputBufferCount += outputBuffer.skippedOutputBufferCount;
    }
    if (outputBuffer.isEndOfStream()) {
        if (decoderReinitializationState == REINITIALIZATION_STATE_WAIT_END_OF_STREAM) {
            // We're waiting to re-initialize the decoder, and have now processed all final buffers.
            releaseDecoder();
            maybeInitDecoder();
            // The audio track may need to be recreated once the new output format is known.
            audioTrackNeedsConfigure = true;
        } else {
            outputBuffer.release();
            outputBuffer = null;
            processEndOfStream();
        }
        return false;
    }
    if (audioTrackNeedsConfigure) {
        Format outputFormat = getOutputFormat();
        audioSink.configure(outputFormat.pcmEncoding, outputFormat.channelCount, outputFormat.sampleRate, 0, null, encoderDelay, encoderPadding);
        audioTrackNeedsConfigure = false;
    }
    if (audioSink.handleBuffer(outputBuffer.data, outputBuffer.timeUs)) {
        decoderCounters.renderedOutputBufferCount++;
        outputBuffer.release();
        outputBuffer = null;
        return true;
    }
    return false;
}

19 Source : SimpleDecoderAudioRenderer.java
with GNU General Public License v2.0
from warren-bank

private void onInputFormatChanged(Format newFormat) throws ExoPlaybackException {
    Format oldFormat = inputFormat;
    inputFormat = newFormat;
    boolean drmInitDataChanged = !Util.areEqual(inputFormat.drmInitData, oldFormat == null ? null : oldFormat.drmInitData);
    if (drmInitDataChanged) {
        if (inputFormat.drmInitData != null) {
            if (drmSessionManager == null) {
                throw ExoPlaybackException.createForRenderer(new IllegalStateException("Media requires a DrmSessionManager"), getIndex());
            }
            pendingDrmSession = drmSessionManager.acquireSession(Looper.myLooper(), inputFormat.drmInitData);
            if (pendingDrmSession == drmSession) {
                drmSessionManager.releaseSession(pendingDrmSession);
            }
        } else {
            pendingDrmSession = null;
        }
    }
    if (decoderReceivedBuffers) {
        // Signal end of stream and wait for any final output buffers before re-initialization.
        decoderReinitializationState = REINITIALIZATION_STATE_SIGNAL_END_OF_STREAM;
    } else {
        // There aren't any final output buffers, so release the decoder immediately.
        releaseDecoder();
        maybeInitDecoder();
        audioTrackNeedsConfigure = true;
    }
    encoderDelay = newFormat.encoderDelay;
    encoderPadding = newFormat.encoderPadding;
    eventDispatcher.inputFormatChanged(newFormat);
}

19 Source : SimpleDecoderAudioRenderer.java
with GNU General Public License v2.0
from warren-bank

@Override
public final int supportsFormat(Format format) {
    if (!MimeTypes.isAudio(format.sampleMimeType)) {
        return FORMAT_UNSUPPORTED_TYPE;
    }
    int formatSupport = supportsFormatInternal(drmSessionManager, format);
    if (formatSupport <= FORMAT_UNSUPPORTED_DRM) {
        return formatSupport;
    }
    int tunnelingSupport = Util.SDK_INT >= 21 ? TUNNELING_SUPPORTED : TUNNELING_NOT_SUPPORTED;
    return ADAPTIVE_NOT_SEAMLESS | tunnelingSupport | formatSupport;
}

19 Source : MediaCodecAudioRenderer.java
with GNU General Public License v2.0
from warren-bank

@Override
protected void onInputFormatChanged(Format newFormat) throws ExoPlaybackException {
    super.onInputFormatChanged(newFormat);
    eventDispatcher.inputFormatChanged(newFormat);
    // If the input format is anything other than PCM then we replacedume that the audio decoder will
    // output 16-bit PCM.
    pcmEncoding = MimeTypes.AUDIO_RAW.equals(newFormat.sampleMimeType) ? newFormat.pcmEncoding : C.ENCODING_PCM_16BIT;
    channelCount = newFormat.channelCount;
    encoderDelay = newFormat.encoderDelay;
    encoderPadding = newFormat.encoderPadding;
}

19 Source : MediaCodecAudioRenderer.java
with GNU General Public License v2.0
from warren-bank

@Override
protected List<MediaCodecInfo> getDecoderInfos(MediaCodecSelector mediaCodecSelector, Format format, boolean requiresSecureDecoder) throws DecoderQueryException {
    if (allowPreplacedthrough(format.sampleMimeType)) {
        MediaCodecInfo preplacedthroughDecoderInfo = mediaCodecSelector.getPreplacedthroughDecoderInfo();
        if (preplacedthroughDecoderInfo != null) {
            return Collections.singletonList(preplacedthroughDecoderInfo);
        }
    }
    return super.getDecoderInfos(mediaCodecSelector, format, requiresSecureDecoder);
}

19 Source : MediaCodecAudioRenderer.java
with GNU General Public License v2.0
from warren-bank

/**
 * Returns whether two {@link Format}s will cause the same codec to be configured in an identical
 * way, excluding {@link MediaFormat#KEY_MAX_INPUT_SIZE} and configuration that does not come from
 * the {@link Format}.
 *
 * @param oldFormat The first format.
 * @param newFormat The second format.
 * @return Whether the two formats will cause a codec to be configured in an identical way,
 *     excluding {@link MediaFormat#KEY_MAX_INPUT_SIZE} and configuration that does not come from
 *     the {@link Format}.
 */
protected boolean areCodecConfigurationCompatible(Format oldFormat, Format newFormat) {
    return Util.areEqual(oldFormat.sampleMimeType, newFormat.sampleMimeType) && oldFormat.channelCount == newFormat.channelCount && oldFormat.sampleRate == newFormat.sampleRate && oldFormat.initializationDataEquals(newFormat);
}

19 Source : MediaCodecAudioRenderer.java
with GNU General Public License v2.0
from warren-bank

@Override
protected void onStreamChanged(Format[] formats, long offsetUs) throws ExoPlaybackException {
    super.onStreamChanged(formats, offsetUs);
    if (lastInputTimeUs != C.TIME_UNSET) {
        if (pendingStreamChangeCount == pendingStreamChangeTimesUs.length) {
            Log.w(TAG, "Too many stream changes, so dropping change at " + pendingStreamChangeTimesUs[pendingStreamChangeCount - 1]);
        } else {
            pendingStreamChangeCount++;
        }
        pendingStreamChangeTimesUs[pendingStreamChangeCount - 1] = lastInputTimeUs;
    }
}

See More Examples