Here are the examples of the java api class org.apache.lucene.store.ChecksumIndexInput taken from open source projects.
1. TestCodecUtil#testCheckFooterValidPastFooter()
Project: lucene-solr
File: TestCodecUtil.java
File: TestCodecUtil.java
public void testCheckFooterValidPastFooter() throws Exception { RAMFile file = new RAMFile(); IndexOutput output = new RAMOutputStream(file, true); CodecUtil.writeHeader(output, "FooBar", 5); output.writeString("this is the data"); CodecUtil.writeFooter(output); output.close(); ChecksumIndexInput input = new BufferedChecksumIndexInput(new RAMInputStream("file", file)); CodecUtil.checkHeader(input, "FooBar", 5, 5); assertEquals("this is the data", input.readString()); // bogusly read a byte too far (can happen) input.readByte(); Exception mine = new RuntimeException("fake exception"); RuntimeException expected = expectThrows(RuntimeException.class, () -> { CodecUtil.checkFooter(input, mine); }); assertEquals("fake exception", expected.getMessage()); Throwable suppressed[] = expected.getSuppressed(); assertEquals(1, suppressed.length); assertTrue(suppressed[0].getMessage().contains("checksum status indeterminate")); input.close(); }
2. TestCodecUtil#testCheckFooterInvalid()
Project: lucene-solr
File: TestCodecUtil.java
File: TestCodecUtil.java
public void testCheckFooterInvalid() throws Exception { RAMFile file = new RAMFile(); IndexOutput output = new RAMOutputStream(file, true); CodecUtil.writeHeader(output, "FooBar", 5); output.writeString("this is the data"); output.writeInt(CodecUtil.FOOTER_MAGIC); output.writeInt(0); // write a bogus checksum output.writeLong(1234567); output.close(); ChecksumIndexInput input = new BufferedChecksumIndexInput(new RAMInputStream("file", file)); CodecUtil.checkHeader(input, "FooBar", 5, 5); assertEquals("this is the data", input.readString()); Exception mine = new RuntimeException("fake exception"); RuntimeException expected = expectThrows(RuntimeException.class, () -> { CodecUtil.checkFooter(input, mine); }); assertEquals("fake exception", expected.getMessage()); Throwable suppressed[] = expected.getSuppressed(); assertEquals(1, suppressed.length); assertTrue(suppressed[0].getMessage().contains("checksum failed")); input.close(); }
3. TestCodecUtil#testCheckFooterValidAtFooter()
Project: lucene-solr
File: TestCodecUtil.java
File: TestCodecUtil.java
public void testCheckFooterValidAtFooter() throws Exception { RAMFile file = new RAMFile(); IndexOutput output = new RAMOutputStream(file, true); CodecUtil.writeHeader(output, "FooBar", 5); output.writeString("this is the data"); CodecUtil.writeFooter(output); output.close(); ChecksumIndexInput input = new BufferedChecksumIndexInput(new RAMInputStream("file", file)); CodecUtil.checkHeader(input, "FooBar", 5, 5); assertEquals("this is the data", input.readString()); Exception mine = new RuntimeException("fake exception"); RuntimeException expected = expectThrows(RuntimeException.class, () -> { CodecUtil.checkFooter(input, mine); }); assertEquals("fake exception", expected.getMessage()); Throwable suppressed[] = expected.getSuppressed(); assertEquals(1, suppressed.length); assertTrue(suppressed[0].getMessage().contains("checksum passed")); input.close(); }
4. TestCodecUtil#testCheckFooterValid()
Project: lucene-solr
File: TestCodecUtil.java
File: TestCodecUtil.java
public void testCheckFooterValid() throws Exception { RAMFile file = new RAMFile(); IndexOutput output = new RAMOutputStream(file, true); CodecUtil.writeHeader(output, "FooBar", 5); output.writeString("this is the data"); CodecUtil.writeFooter(output); output.close(); ChecksumIndexInput input = new BufferedChecksumIndexInput(new RAMInputStream("file", file)); Exception mine = new RuntimeException("fake exception"); RuntimeException expected = expectThrows(RuntimeException.class, () -> { CodecUtil.checkFooter(input, mine); }); assertEquals("fake exception", expected.getMessage()); Throwable suppressed[] = expected.getSuppressed(); assertEquals(1, suppressed.length); assertTrue(suppressed[0].getMessage().contains("checksum passed")); input.close(); }
5. SimpleTextTermVectorsReader#readIndex()
Project: lucene-solr
File: SimpleTextTermVectorsReader.java
File: SimpleTextTermVectorsReader.java
// we don't actually write a .tvx-like index, instead we read the // vectors file in entirety up-front and save the offsets // so we can seek to the data later. private void readIndex(int maxDoc) throws IOException { ChecksumIndexInput input = new BufferedChecksumIndexInput(in); offsets = new long[maxDoc]; int upto = 0; while (!scratch.get().equals(END)) { SimpleTextUtil.readLine(input, scratch); if (StringHelper.startsWith(scratch.get(), DOC)) { offsets[upto] = input.getFilePointer(); upto++; } } SimpleTextUtil.checkFooter(input); assert upto == offsets.length; }
6. SimpleTextStoredFieldsReader#readIndex()
Project: lucene-solr
File: SimpleTextStoredFieldsReader.java
File: SimpleTextStoredFieldsReader.java
// we don't actually write a .fdx-like index, instead we read the // stored fields file in entirety up-front and save the offsets // so we can seek to the documents later. private void readIndex(int size) throws IOException { ChecksumIndexInput input = new BufferedChecksumIndexInput(in); offsets = new long[size]; int upto = 0; while (!scratch.get().equals(END)) { SimpleTextUtil.readLine(input, scratch); if (StringHelper.startsWith(scratch.get(), DOC)) { offsets[upto] = input.getFilePointer(); upto++; } } SimpleTextUtil.checkFooter(input); assert upto == offsets.length; }
7. SimpleTextPointsReader#checkIntegrity()
Project: lucene-solr
File: SimpleTextPointsReader.java
File: SimpleTextPointsReader.java
@Override public void checkIntegrity() throws IOException { BytesRefBuilder scratch = new BytesRefBuilder(); IndexInput clone = dataIn.clone(); clone.seek(0); // checksum is fixed-width encoded with 20 bytes, plus 1 byte for newline (the space is included in SimpleTextUtil.CHECKSUM): long footerStartPos = dataIn.length() - (SimpleTextUtil.CHECKSUM.length + 21); ChecksumIndexInput input = new BufferedChecksumIndexInput(clone); while (true) { SimpleTextUtil.readLine(input, scratch); if (input.getFilePointer() >= footerStartPos) { // Make sure we landed at precisely the right location: if (input.getFilePointer() != footerStartPos) { throw new CorruptIndexException("SimpleText failure: footer does not start at expected position current=" + input.getFilePointer() + " vs expected=" + footerStartPos, input); } SimpleTextUtil.checkFooter(input); break; } } }
8. SimpleTextFieldsReader#readFields()
Project: lucene-solr
File: SimpleTextFieldsReader.java
File: SimpleTextFieldsReader.java
private TreeMap<String, Long> readFields(IndexInput in) throws IOException { ChecksumIndexInput input = new BufferedChecksumIndexInput(in); BytesRefBuilder scratch = new BytesRefBuilder(); TreeMap<String, Long> fields = new TreeMap<>(); while (true) { SimpleTextUtil.readLine(input, scratch); if (scratch.get().equals(END)) { SimpleTextUtil.checkFooter(input); return fields; } else if (StringHelper.startsWith(scratch.get(), FIELD)) { String fieldName = new String(scratch.bytes(), FIELD.length, scratch.length() - FIELD.length, StandardCharsets.UTF_8); fields.put(fieldName, input.getFilePointer()); } } }
9. SimpleTextDocValuesReader#checkIntegrity()
Project: lucene-solr
File: SimpleTextDocValuesReader.java
File: SimpleTextDocValuesReader.java
@Override public void checkIntegrity() throws IOException { BytesRefBuilder scratch = new BytesRefBuilder(); IndexInput clone = data.clone(); clone.seek(0); // checksum is fixed-width encoded with 20 bytes, plus 1 byte for newline (the space is included in SimpleTextUtil.CHECKSUM): long footerStartPos = data.length() - (SimpleTextUtil.CHECKSUM.length + 21); ChecksumIndexInput input = new BufferedChecksumIndexInput(clone); while (true) { SimpleTextUtil.readLine(input, scratch); if (input.getFilePointer() >= footerStartPos) { // Make sure we landed at precisely the right location: if (input.getFilePointer() != footerStartPos) { throw new CorruptIndexException("SimpleText failure: footer does not start at expected position current=" + input.getFilePointer() + " vs expected=" + footerStartPos, input); } SimpleTextUtil.checkFooter(input); break; } } }
10. MockRandomPostingsFormat#fieldsProducer()
Project: lucene-solr
File: MockRandomPostingsFormat.java
File: MockRandomPostingsFormat.java
@Override public FieldsProducer fieldsProducer(SegmentReadState state) throws IOException { final String seedFileName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, SEED_EXT); final ChecksumIndexInput in = state.directory.openChecksumInput(seedFileName, state.context); CodecUtil.checkIndexHeader(in, "MockRandomSeed", 0, 0, state.segmentInfo.getId(), state.segmentSuffix); final long seed = in.readLong(); CodecUtil.checkFooter(in); if (LuceneTestCase.VERBOSE) { System.out.println("MockRandomCodec: reading from seg=" + state.segmentInfo.name + " formatID=" + state.segmentSuffix + " seed=" + seed); } in.close(); final Random random = new Random(seed); int readBufferSize = TestUtil.nextInt(random, 1, 4096); if (LuceneTestCase.VERBOSE) { System.out.println("MockRandomCodec: readBufferSize=" + readBufferSize); } PostingsReaderBase postingsReader = new Lucene50PostingsReader(state); final FieldsProducer fields; final int t1 = random.nextInt(5); if (t1 == 0) { boolean success = false; try { fields = new FSTTermsReader(state, postingsReader); success = true; } finally { if (!success) { postingsReader.close(); } } } else if (t1 == 1) { boolean success = false; try { fields = new FSTOrdTermsReader(state, postingsReader); success = true; } finally { if (!success) { postingsReader.close(); } } } else if (t1 == 2) { // Use BlockTree terms dict if (LuceneTestCase.VERBOSE) { System.out.println("MockRandomCodec: reading BlockTree terms dict"); } boolean success = false; try { fields = new BlockTreeTermsReader(postingsReader, state); success = true; } finally { if (!success) { postingsReader.close(); } } } else if (t1 == 3) { if (LuceneTestCase.VERBOSE) { System.out.println("MockRandomCodec: reading Block terms dict"); } final TermsIndexReaderBase indexReader; boolean success = false; try { final boolean doFixedGap = random.nextBoolean(); if (doFixedGap) { if (LuceneTestCase.VERBOSE) { System.out.println("MockRandomCodec: fixed-gap terms index"); } indexReader = new FixedGapTermsIndexReader(state); } else { final int n2 = random.nextInt(3); if (n2 == 1) { random.nextInt(); } else if (n2 == 2) { random.nextLong(); } if (LuceneTestCase.VERBOSE) { System.out.println("MockRandomCodec: variable-gap terms index"); } indexReader = new VariableGapTermsIndexReader(state); } success = true; } finally { if (!success) { postingsReader.close(); } } success = false; try { fields = new BlockTermsReader(indexReader, postingsReader, state); success = true; } finally { if (!success) { try { postingsReader.close(); } finally { indexReader.close(); } } } } else if (t1 == 4) { // Use OrdsBlockTree terms dict if (LuceneTestCase.VERBOSE) { System.out.println("MockRandomCodec: reading OrdsBlockTree terms dict"); } boolean success = false; try { fields = new OrdsBlockTreeTermsReader(postingsReader, state); success = true; } finally { if (!success) { postingsReader.close(); } } } else { // BUG! throw new AssertionError(); } return fields; }
11. CodecUtil#checksumEntireFile()
Project: lucene-solr
File: CodecUtil.java
File: CodecUtil.java
/** * Clones the provided input, reads all bytes from the file, and calls {@link #checkFooter} * <p> * Note that this method may be slow, as it must process the entire file. * If you just need to extract the checksum value, call {@link #retrieveChecksum}. */ public static long checksumEntireFile(IndexInput input) throws IOException { IndexInput clone = input.clone(); clone.seek(0); ChecksumIndexInput in = new BufferedChecksumIndexInput(clone); assert in.getFilePointer() == 0; in.seek(in.length() - footerLength()); return checkFooter(in); }
12. Lucene60FieldInfosFormat#read()
Project: lucene-solr
File: Lucene60FieldInfosFormat.java
File: Lucene60FieldInfosFormat.java
@Override public FieldInfos read(Directory directory, SegmentInfo segmentInfo, String segmentSuffix, IOContext context) throws IOException { final String fileName = IndexFileNames.segmentFileName(segmentInfo.name, segmentSuffix, EXTENSION); try (ChecksumIndexInput input = directory.openChecksumInput(fileName, context)) { Throwable priorE = null; FieldInfo infos[] = null; try { CodecUtil.checkIndexHeader(input, Lucene60FieldInfosFormat.CODEC_NAME, Lucene60FieldInfosFormat.FORMAT_START, Lucene60FieldInfosFormat.FORMAT_CURRENT, segmentInfo.getId(), segmentSuffix); //read in the size final int size = input.readVInt(); infos = new FieldInfo[size]; // previous field's attribute map, we share when possible: Map<String, String> lastAttributes = Collections.emptyMap(); for (int i = 0; i < size; i++) { String name = input.readString(); final int fieldNumber = input.readVInt(); if (fieldNumber < 0) { throw new CorruptIndexException("invalid field number for field: " + name + ", fieldNumber=" + fieldNumber, input); } byte bits = input.readByte(); boolean storeTermVector = (bits & STORE_TERMVECTOR) != 0; boolean omitNorms = (bits & OMIT_NORMS) != 0; boolean storePayloads = (bits & STORE_PAYLOADS) != 0; final IndexOptions indexOptions = getIndexOptions(input, input.readByte()); // DV Types are packed in one byte final DocValuesType docValuesType = getDocValuesType(input, input.readByte()); final long dvGen = input.readLong(); Map<String, String> attributes = input.readMapOfStrings(); // just use the last field's map if its the same if (attributes.equals(lastAttributes)) { attributes = lastAttributes; } lastAttributes = attributes; int pointDimensionCount = input.readVInt(); int pointNumBytes; if (pointDimensionCount != 0) { pointNumBytes = input.readVInt(); } else { pointNumBytes = 0; } try { infos[i] = new FieldInfo(name, fieldNumber, storeTermVector, omitNorms, storePayloads, indexOptions, docValuesType, dvGen, attributes, pointDimensionCount, pointNumBytes); infos[i].checkConsistency(); } catch (IllegalStateException e) { throw new CorruptIndexException("invalid fieldinfo for field: " + name + ", fieldNumber=" + fieldNumber, input, e); } } } catch (Throwable exception) { priorE = exception; } finally { CodecUtil.checkFooter(input, priorE); } return new FieldInfos(infos); } }
13. Lucene50LiveDocsFormat#readLiveDocs()
Project: lucene-solr
File: Lucene50LiveDocsFormat.java
File: Lucene50LiveDocsFormat.java
@Override public Bits readLiveDocs(Directory dir, SegmentCommitInfo info, IOContext context) throws IOException { long gen = info.getDelGen(); String name = IndexFileNames.fileNameFromGeneration(info.info.name, EXTENSION, gen); final int length = info.info.maxDoc(); try (ChecksumIndexInput input = dir.openChecksumInput(name, context)) { Throwable priorE = null; try { CodecUtil.checkIndexHeader(input, CODEC_NAME, VERSION_START, VERSION_CURRENT, info.info.getId(), Long.toString(gen, Character.MAX_RADIX)); long data[] = new long[FixedBitSet.bits2words(length)]; for (int i = 0; i < data.length; i++) { data[i] = input.readLong(); } FixedBitSet fbs = new FixedBitSet(data, length); if (fbs.length() - fbs.cardinality() != info.getDelCount()) { throw new CorruptIndexException("bits.deleted=" + (fbs.length() - fbs.cardinality()) + " info.delcount=" + info.getDelCount(), input); } return fbs; } catch (Throwable exception) { priorE = exception; } finally { CodecUtil.checkFooter(input, priorE); } } throw new AssertionError(); }
14. Lucene50FieldInfosFormat#read()
Project: lucene-solr
File: Lucene50FieldInfosFormat.java
File: Lucene50FieldInfosFormat.java
@Override public FieldInfos read(Directory directory, SegmentInfo segmentInfo, String segmentSuffix, IOContext context) throws IOException { final String fileName = IndexFileNames.segmentFileName(segmentInfo.name, segmentSuffix, EXTENSION); try (ChecksumIndexInput input = directory.openChecksumInput(fileName, context)) { Throwable priorE = null; FieldInfo infos[] = null; try { int format = CodecUtil.checkIndexHeader(input, Lucene50FieldInfosFormat.CODEC_NAME, Lucene50FieldInfosFormat.FORMAT_START, Lucene50FieldInfosFormat.FORMAT_CURRENT, segmentInfo.getId(), segmentSuffix); //read in the size final int size = input.readVInt(); infos = new FieldInfo[size]; // previous field's attribute map, we share when possible: Map<String, String> lastAttributes = Collections.emptyMap(); for (int i = 0; i < size; i++) { String name = input.readString(); final int fieldNumber = input.readVInt(); if (fieldNumber < 0) { throw new CorruptIndexException("invalid field number for field: " + name + ", fieldNumber=" + fieldNumber, input); } byte bits = input.readByte(); boolean storeTermVector = (bits & STORE_TERMVECTOR) != 0; boolean omitNorms = (bits & OMIT_NORMS) != 0; boolean storePayloads = (bits & STORE_PAYLOADS) != 0; final IndexOptions indexOptions = getIndexOptions(input, input.readByte()); // DV Types are packed in one byte final DocValuesType docValuesType = getDocValuesType(input, input.readByte()); final long dvGen = input.readLong(); Map<String, String> attributes; if (format >= FORMAT_SAFE_MAPS) { attributes = input.readMapOfStrings(); } else { attributes = Collections.unmodifiableMap(input.readStringStringMap()); } // just use the last field's map if its the same if (attributes.equals(lastAttributes)) { attributes = lastAttributes; } lastAttributes = attributes; try { infos[i] = new FieldInfo(name, fieldNumber, storeTermVector, omitNorms, storePayloads, indexOptions, docValuesType, dvGen, attributes, 0, 0); infos[i].checkConsistency(); } catch (IllegalStateException e) { throw new CorruptIndexException("invalid fieldinfo for field: " + name + ", fieldNumber=" + fieldNumber, input, e); } } } catch (Throwable exception) { priorE = exception; } finally { CodecUtil.checkFooter(input, priorE); } return new FieldInfos(infos); } }
15. Lucene50CompoundReader#readEntries()
Project: lucene-solr
File: Lucene50CompoundReader.java
File: Lucene50CompoundReader.java
/** Helper method that reads CFS entries from an input stream */ private Map<String, FileEntry> readEntries(byte[] segmentID, Directory dir, String entriesFileName) throws IOException { Map<String, FileEntry> mapping = null; try (ChecksumIndexInput entriesStream = dir.openChecksumInput(entriesFileName, IOContext.READONCE)) { Throwable priorE = null; try { version = CodecUtil.checkIndexHeader(entriesStream, Lucene50CompoundFormat.ENTRY_CODEC, Lucene50CompoundFormat.VERSION_START, Lucene50CompoundFormat.VERSION_CURRENT, segmentID, ""); final int numEntries = entriesStream.readVInt(); mapping = new HashMap<>(numEntries); for (int i = 0; i < numEntries; i++) { final FileEntry fileEntry = new FileEntry(); final String id = entriesStream.readString(); FileEntry previous = mapping.put(id, fileEntry); if (previous != null) { throw new CorruptIndexException("Duplicate cfs entry id=" + id + " in CFS ", entriesStream); } fileEntry.offset = entriesStream.readLong(); fileEntry.length = entriesStream.readLong(); } } catch (Throwable exception) { priorE = exception; } finally { CodecUtil.checkFooter(entriesStream, priorE); } } return Collections.unmodifiableMap(mapping); }
16. SimpleTextLiveDocsFormat#readLiveDocs()
Project: lucene-solr
File: SimpleTextLiveDocsFormat.java
File: SimpleTextLiveDocsFormat.java
@Override public Bits readLiveDocs(Directory dir, SegmentCommitInfo info, IOContext context) throws IOException { assert info.hasDeletions(); BytesRefBuilder scratch = new BytesRefBuilder(); CharsRefBuilder scratchUTF16 = new CharsRefBuilder(); String fileName = IndexFileNames.fileNameFromGeneration(info.info.name, LIVEDOCS_EXTENSION, info.getDelGen()); ChecksumIndexInput in = null; boolean success = false; try { in = dir.openChecksumInput(fileName, context); SimpleTextUtil.readLine(in, scratch); assert StringHelper.startsWith(scratch.get(), SIZE); int size = parseIntAt(scratch.get(), SIZE.length, scratchUTF16); BitSet bits = new BitSet(size); SimpleTextUtil.readLine(in, scratch); while (!scratch.get().equals(END)) { assert StringHelper.startsWith(scratch.get(), DOC); int docid = parseIntAt(scratch.get(), DOC.length, scratchUTF16); bits.set(docid); SimpleTextUtil.readLine(in, scratch); } SimpleTextUtil.checkFooter(in); success = true; return new SimpleTextBits(bits, size); } finally { if (success) { IOUtils.close(in); } else { IOUtils.closeWhileHandlingException(in); } } }
17. SimpleTextFieldInfosFormat#read()
Project: lucene-solr
File: SimpleTextFieldInfosFormat.java
File: SimpleTextFieldInfosFormat.java
@Override public FieldInfos read(Directory directory, SegmentInfo segmentInfo, String segmentSuffix, IOContext iocontext) throws IOException { final String fileName = IndexFileNames.segmentFileName(segmentInfo.name, segmentSuffix, FIELD_INFOS_EXTENSION); ChecksumIndexInput input = directory.openChecksumInput(fileName, iocontext); BytesRefBuilder scratch = new BytesRefBuilder(); boolean success = false; try { SimpleTextUtil.readLine(input, scratch); assert StringHelper.startsWith(scratch.get(), NUMFIELDS); final int size = Integer.parseInt(readString(NUMFIELDS.length, scratch)); FieldInfo infos[] = new FieldInfo[size]; for (int i = 0; i < size; i++) { SimpleTextUtil.readLine(input, scratch); assert StringHelper.startsWith(scratch.get(), NAME); String name = readString(NAME.length, scratch); SimpleTextUtil.readLine(input, scratch); assert StringHelper.startsWith(scratch.get(), NUMBER); int fieldNumber = Integer.parseInt(readString(NUMBER.length, scratch)); SimpleTextUtil.readLine(input, scratch); assert StringHelper.startsWith(scratch.get(), INDEXOPTIONS); String s = readString(INDEXOPTIONS.length, scratch); final IndexOptions indexOptions = IndexOptions.valueOf(s); SimpleTextUtil.readLine(input, scratch); assert StringHelper.startsWith(scratch.get(), STORETV); boolean storeTermVector = Boolean.parseBoolean(readString(STORETV.length, scratch)); SimpleTextUtil.readLine(input, scratch); assert StringHelper.startsWith(scratch.get(), PAYLOADS); boolean storePayloads = Boolean.parseBoolean(readString(PAYLOADS.length, scratch)); SimpleTextUtil.readLine(input, scratch); assert StringHelper.startsWith(scratch.get(), NORMS); boolean omitNorms = !Boolean.parseBoolean(readString(NORMS.length, scratch)); SimpleTextUtil.readLine(input, scratch); assert StringHelper.startsWith(scratch.get(), DOCVALUES); String dvType = readString(DOCVALUES.length, scratch); final DocValuesType docValuesType = docValuesType(dvType); SimpleTextUtil.readLine(input, scratch); assert StringHelper.startsWith(scratch.get(), DOCVALUES_GEN); final long dvGen = Long.parseLong(readString(DOCVALUES_GEN.length, scratch)); SimpleTextUtil.readLine(input, scratch); assert StringHelper.startsWith(scratch.get(), NUM_ATTS); int numAtts = Integer.parseInt(readString(NUM_ATTS.length, scratch)); Map<String, String> atts = new HashMap<>(); for (int j = 0; j < numAtts; j++) { SimpleTextUtil.readLine(input, scratch); assert StringHelper.startsWith(scratch.get(), ATT_KEY); String key = readString(ATT_KEY.length, scratch); SimpleTextUtil.readLine(input, scratch); assert StringHelper.startsWith(scratch.get(), ATT_VALUE); String value = readString(ATT_VALUE.length, scratch); atts.put(key, value); } SimpleTextUtil.readLine(input, scratch); assert StringHelper.startsWith(scratch.get(), DIM_COUNT); int dimensionalCount = Integer.parseInt(readString(DIM_COUNT.length, scratch)); SimpleTextUtil.readLine(input, scratch); assert StringHelper.startsWith(scratch.get(), DIM_NUM_BYTES); int dimensionalNumBytes = Integer.parseInt(readString(DIM_NUM_BYTES.length, scratch)); infos[i] = new FieldInfo(name, fieldNumber, storeTermVector, omitNorms, storePayloads, indexOptions, docValuesType, dvGen, Collections.unmodifiableMap(atts), dimensionalCount, dimensionalNumBytes); } SimpleTextUtil.checkFooter(input); FieldInfos fieldInfos = new FieldInfos(infos); success = true; return fieldInfos; } finally { if (success) { input.close(); } else { IOUtils.closeWhileHandlingException(input); } } }
18. MemoryPostingsFormat#fieldsProducer()
Project: lucene-solr
File: MemoryPostingsFormat.java
File: MemoryPostingsFormat.java
@Override public FieldsProducer fieldsProducer(SegmentReadState state) throws IOException { final String fileName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, EXTENSION); final SortedMap<String, TermsReader> fields = new TreeMap<>(); try (ChecksumIndexInput in = state.directory.openChecksumInput(fileName, IOContext.READONCE)) { Throwable priorE = null; try { CodecUtil.checkIndexHeader(in, CODEC_NAME, VERSION_START, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix); while (true) { final int termCount = in.readVInt(); if (termCount == 0) { break; } final TermsReader termsReader = new TermsReader(state.fieldInfos, in, termCount); // System.out.println("load field=" + termsReader.field.name); fields.put(termsReader.field.name, termsReader); } } catch (Throwable exception) { priorE = exception; } finally { CodecUtil.checkFooter(in, priorE); } } return new FieldsProducer() { @Override public Iterator<String> iterator() { return Collections.unmodifiableSet(fields.keySet()).iterator(); } @Override public Terms terms(String field) { return fields.get(field); } @Override public int size() { return fields.size(); } @Override public void close() { // Drop ref to FST: for (TermsReader termsReader : fields.values()) { termsReader.fst = null; } } @Override public long ramBytesUsed() { long sizeInBytes = 0; for (Map.Entry<String, TermsReader> entry : fields.entrySet()) { sizeInBytes += (entry.getKey().length() * Character.BYTES); sizeInBytes += entry.getValue().ramBytesUsed(); } return sizeInBytes; } @Override public Collection<Accountable> getChildResources() { return Accountables.namedAccountables("field", fields); } @Override public String toString() { return "MemoryPostings(fields=" + fields.size() + ")"; } @Override public void checkIntegrity() throws IOException { } }; }
19. Lucene50RWSegmentInfoFormat#read()
Project: lucene-solr
File: Lucene50RWSegmentInfoFormat.java
File: Lucene50RWSegmentInfoFormat.java
@Override public SegmentInfo read(Directory dir, String segment, byte[] segmentID, IOContext context) throws IOException { final String fileName = IndexFileNames.segmentFileName(segment, "", Lucene50SegmentInfoFormat.SI_EXTENSION); try (ChecksumIndexInput input = dir.openChecksumInput(fileName, context)) { Throwable priorE = null; SegmentInfo si = null; try { int format = CodecUtil.checkIndexHeader(input, Lucene50SegmentInfoFormat.CODEC_NAME, Lucene50SegmentInfoFormat.VERSION_START, Lucene50SegmentInfoFormat.VERSION_CURRENT, segmentID, ""); final Version version = Version.fromBits(input.readInt(), input.readInt(), input.readInt()); final int docCount = input.readInt(); if (docCount < 0) { throw new CorruptIndexException("invalid docCount: " + docCount, input); } final boolean isCompoundFile = input.readByte() == SegmentInfo.YES; final Map<String, String> diagnostics; final Set<String> files; final Map<String, String> attributes; if (format >= VERSION_SAFE_MAPS) { diagnostics = input.readMapOfStrings(); files = input.readSetOfStrings(); attributes = input.readMapOfStrings(); } else { diagnostics = Collections.unmodifiableMap(input.readStringStringMap()); files = Collections.unmodifiableSet(input.readStringSet()); attributes = Collections.unmodifiableMap(input.readStringStringMap()); } si = new SegmentInfo(dir, version, segment, docCount, isCompoundFile, null, diagnostics, segmentID, attributes, null); si.setFiles(files); } catch (Throwable exception) { priorE = exception; } finally { CodecUtil.checkFooter(input, priorE); } return si; } }
20. Lucene50SegmentInfoFormat#read()
Project: lucene-solr
File: Lucene50SegmentInfoFormat.java
File: Lucene50SegmentInfoFormat.java
@Override public SegmentInfo read(Directory dir, String segment, byte[] segmentID, IOContext context) throws IOException { final String fileName = IndexFileNames.segmentFileName(segment, "", Lucene50SegmentInfoFormat.SI_EXTENSION); try (ChecksumIndexInput input = dir.openChecksumInput(fileName, context)) { Throwable priorE = null; SegmentInfo si = null; try { int format = CodecUtil.checkIndexHeader(input, Lucene50SegmentInfoFormat.CODEC_NAME, Lucene50SegmentInfoFormat.VERSION_START, Lucene50SegmentInfoFormat.VERSION_CURRENT, segmentID, ""); final Version version = Version.fromBits(input.readInt(), input.readInt(), input.readInt()); final int docCount = input.readInt(); if (docCount < 0) { throw new CorruptIndexException("invalid docCount: " + docCount, input); } final boolean isCompoundFile = input.readByte() == SegmentInfo.YES; final Map<String, String> diagnostics; final Set<String> files; final Map<String, String> attributes; if (format >= VERSION_SAFE_MAPS) { diagnostics = input.readMapOfStrings(); files = input.readSetOfStrings(); attributes = input.readMapOfStrings(); } else { diagnostics = Collections.unmodifiableMap(input.readStringStringMap()); files = Collections.unmodifiableSet(input.readStringSet()); attributes = Collections.unmodifiableMap(input.readStringStringMap()); } si = new SegmentInfo(dir, version, segment, docCount, isCompoundFile, null, diagnostics, segmentID, attributes, null); si.setFiles(files); } catch (Throwable exception) { priorE = exception; } finally { CodecUtil.checkFooter(input, priorE); } return si; } }
21. OfflineSorter#verifyChecksum()
Project: lucene-solr
File: OfflineSorter.java
File: OfflineSorter.java
/** Called on exception, to check whether the checksum is also corrupt in this source, and add that * information (checksum matched or didn't) as a suppressed exception. */ private void verifyChecksum(Throwable priorException, ByteSequencesReader reader) throws IOException { try (ChecksumIndexInput in = dir.openChecksumInput(reader.name, IOContext.READONCE)) { CodecUtil.checkFooter(in, priorException); } }
22. SegmentInfos#readCommit()
Project: lucene-solr
File: SegmentInfos.java
File: SegmentInfos.java
/** * Read a particular segmentFileName. Note that this may * throw an IOException if a commit is in process. * * @param directory -- directory containing the segments file * @param segmentFileName -- segment file to load * @throws CorruptIndexException if the index is corrupt * @throws IOException if there is a low-level IO error */ public static final SegmentInfos readCommit(Directory directory, String segmentFileName) throws IOException { long generation = generationFromSegmentsFileName(segmentFileName); //System.out.println(Thread.currentThread() + ": SegmentInfos.readCommit " + segmentFileName); try (ChecksumIndexInput input = directory.openChecksumInput(segmentFileName, IOContext.READ)) { return readCommit(directory, input, generation); } }
23. Lucene62SegmentInfoFormat#read()
Project: lucene-solr
File: Lucene62SegmentInfoFormat.java
File: Lucene62SegmentInfoFormat.java
@Override public SegmentInfo read(Directory dir, String segment, byte[] segmentID, IOContext context) throws IOException { final String fileName = IndexFileNames.segmentFileName(segment, "", Lucene62SegmentInfoFormat.SI_EXTENSION); try (ChecksumIndexInput input = dir.openChecksumInput(fileName, context)) { Throwable priorE = null; SegmentInfo si = null; try { int format = CodecUtil.checkIndexHeader(input, Lucene62SegmentInfoFormat.CODEC_NAME, Lucene62SegmentInfoFormat.VERSION_START, Lucene62SegmentInfoFormat.VERSION_CURRENT, segmentID, ""); final Version version = Version.fromBits(input.readInt(), input.readInt(), input.readInt()); final int docCount = input.readInt(); if (docCount < 0) { throw new CorruptIndexException("invalid docCount: " + docCount, input); } final boolean isCompoundFile = input.readByte() == SegmentInfo.YES; final Map<String, String> diagnostics = input.readMapOfStrings(); final Set<String> files = input.readSetOfStrings(); final Map<String, String> attributes = input.readMapOfStrings(); int numSortFields = input.readVInt(); Sort indexSort; if (numSortFields > 0) { SortField[] sortFields = new SortField[numSortFields]; for (int i = 0; i < numSortFields; i++) { String fieldName = input.readString(); int sortTypeID = input.readVInt(); SortField.Type sortType; switch(sortTypeID) { case 0: sortType = SortField.Type.STRING; break; case 1: sortType = SortField.Type.LONG; break; case 2: sortType = SortField.Type.INT; break; case 3: sortType = SortField.Type.DOUBLE; break; case 4: sortType = SortField.Type.FLOAT; break; default: throw new CorruptIndexException("invalid index sort field type ID: " + sortTypeID, input); } byte b = input.readByte(); boolean reverse; if (b == 0) { reverse = true; } else if (b == 1) { reverse = false; } else { throw new CorruptIndexException("invalid index sort reverse: " + b, input); } sortFields[i] = new SortField(fieldName, sortType, reverse); Object missingValue; b = input.readByte(); if (b == 0) { missingValue = null; } else { switch(sortType) { case STRING: if (b == 1) { missingValue = SortField.STRING_LAST; } else if (b == 2) { missingValue = SortField.STRING_FIRST; } else { throw new CorruptIndexException("invalid missing value flag: " + b, input); } break; case LONG: if (b != 1) { throw new CorruptIndexException("invalid missing value flag: " + b, input); } missingValue = input.readLong(); break; case INT: if (b != 1) { throw new CorruptIndexException("invalid missing value flag: " + b, input); } missingValue = input.readInt(); break; case DOUBLE: if (b != 1) { throw new CorruptIndexException("invalid missing value flag: " + b, input); } missingValue = Double.longBitsToDouble(input.readLong()); break; case FLOAT: if (b != 1) { throw new CorruptIndexException("invalid missing value flag: " + b, input); } missingValue = Float.intBitsToFloat(input.readInt()); break; default: throw new AssertionError("unhandled sortType=" + sortType); } } if (missingValue != null) { sortFields[i].setMissingValue(missingValue); } } indexSort = new Sort(sortFields); } else if (numSortFields < 0) { throw new CorruptIndexException("invalid index sort field count: " + numSortFields, input); } else { indexSort = null; } si = new SegmentInfo(dir, version, segment, docCount, isCompoundFile, null, diagnostics, segmentID, attributes, indexSort); si.setFiles(files); } catch (Throwable exception) { priorE = exception; } finally { CodecUtil.checkFooter(input, priorE); } return si; } }