Here are the examples of the java api class org.apache.lucene.store.IndexOutput taken from open source projects.
1. TestCodecUtil#testReadBogusCRC()
Project: lucene-solr
File: TestCodecUtil.java
File: TestCodecUtil.java
public void testReadBogusCRC() throws Exception { RAMFile file = new RAMFile(); IndexOutput output = new RAMOutputStream(file, false); // bad output.writeLong(-1L); // bad output.writeLong(1L << 32); // bad output.writeLong(-(1L << 32)); // ok output.writeLong((1L << 32) - 1); output.close(); IndexInput input = new RAMInputStream("file", file); // read 3 bogus values for (int i = 0; i < 3; i++) { expectThrows(CorruptIndexException.class, () -> { CodecUtil.readCRC(input); }); } // good value CodecUtil.readCRC(input); }
2. TestCodecUtil#testCheckFooterInvalid()
Project: lucene-solr
File: TestCodecUtil.java
File: TestCodecUtil.java
public void testCheckFooterInvalid() throws Exception { RAMFile file = new RAMFile(); IndexOutput output = new RAMOutputStream(file, true); CodecUtil.writeHeader(output, "FooBar", 5); output.writeString("this is the data"); output.writeInt(CodecUtil.FOOTER_MAGIC); output.writeInt(0); // write a bogus checksum output.writeLong(1234567); output.close(); ChecksumIndexInput input = new BufferedChecksumIndexInput(new RAMInputStream("file", file)); CodecUtil.checkHeader(input, "FooBar", 5, 5); assertEquals("this is the data", input.readString()); Exception mine = new RuntimeException("fake exception"); RuntimeException expected = expectThrows(RuntimeException.class, () -> { CodecUtil.checkFooter(input, mine); }); assertEquals("fake exception", expected.getMessage()); Throwable suppressed[] = expected.getSuppressed(); assertEquals(1, suppressed.length); assertTrue(suppressed[0].getMessage().contains("checksum failed")); input.close(); }
3. HdfsDirectoryTest#createFile()
Project: lucene-solr
File: HdfsDirectoryTest.java
File: HdfsDirectoryTest.java
private void createFile(String name, Directory fsDir, HdfsDirectory hdfs) throws IOException { int writes = random.nextInt(MAX_NUMBER_OF_WRITES); int fileLength = random.nextInt(MAX_FILE_SIZE - MIN_FILE_SIZE) + MIN_FILE_SIZE; IndexOutput fsOutput = fsDir.createOutput(name, new IOContext()); IndexOutput hdfsOutput = hdfs.createOutput(name, new IOContext()); for (int i = 0; i < writes; i++) { byte[] buf = new byte[random.nextInt(Math.min(MAX_BUFFER_SIZE - MIN_BUFFER_SIZE, fileLength)) + MIN_BUFFER_SIZE]; random.nextBytes(buf); int offset = random.nextInt(buf.length); int length = random.nextInt(buf.length - offset); fsOutput.writeBytes(buf, offset, length); hdfsOutput.writeBytes(buf, offset, length); } fsOutput.close(); hdfsOutput.close(); }
4. BlockDirectoryTest#createFile()
Project: lucene-solr
File: BlockDirectoryTest.java
File: BlockDirectoryTest.java
private void createFile(String name, Directory fsDir, Directory hdfs) throws IOException { int writes = random.nextInt(MAX_NUMBER_OF_WRITES); int fileLength = random.nextInt(MAX_FILE_SIZE - MIN_FILE_SIZE) + MIN_FILE_SIZE; IndexOutput fsOutput = fsDir.createOutput(name, IOContext.DEFAULT); IndexOutput hdfsOutput = hdfs.createOutput(name, IOContext.DEFAULT); for (int i = 0; i < writes; i++) { byte[] buf = new byte[random.nextInt(Math.min(MAX_BUFFER_SIZE - MIN_BUFFER_SIZE, fileLength)) + MIN_BUFFER_SIZE]; random.nextBytes(buf); int offset = random.nextInt(buf.length); int length = random.nextInt(buf.length - offset); fsOutput.writeBytes(buf, offset, length); hdfsOutput.writeBytes(buf, offset, length); } fsOutput.close(); hdfsOutput.close(); }
5. HdfsDirectoryTest#testRename()
Project: lucene-solr
File: HdfsDirectoryTest.java
File: HdfsDirectoryTest.java
public void testRename() throws IOException { String[] listAll = directory.listAll(); for (String file : listAll) { directory.deleteFile(file); } IndexOutput output = directory.createOutput("testing.test", new IOContext()); output.writeInt(12345); output.close(); directory.rename("testing.test", "testing.test.renamed"); assertFalse(slowFileExists(directory, "testing.test")); assertTrue(slowFileExists(directory, "testing.test.renamed")); IndexInput input = directory.openInput("testing.test.renamed", new IOContext()); assertEquals(12345, input.readInt()); assertEquals(input.getFilePointer(), input.length()); input.close(); directory.deleteFile("testing.test.renamed"); assertFalse(slowFileExists(directory, "testing.test.renamed")); }
6. BaseCompoundFormatTestCase#testMakeLockDisabled()
Project: lucene-solr
File: BaseCompoundFormatTestCase.java
File: BaseCompoundFormatTestCase.java
// test that cfs reader is read-only public void testMakeLockDisabled() throws IOException { final String testfile = "_123.test"; Directory dir = newDirectory(); IndexOutput out = dir.createOutput(testfile, IOContext.DEFAULT); out.writeInt(3); out.close(); SegmentInfo si = newSegmentInfo(dir, "_123"); si.setFiles(Collections.emptyList()); si.getCodec().compoundFormat().write(dir, si, IOContext.DEFAULT); Directory cfs = si.getCodec().compoundFormat().getCompoundReader(dir, si, IOContext.DEFAULT); expectThrows(UnsupportedOperationException.class, () -> { cfs.obtainLock("foobar"); }); cfs.close(); dir.close(); }
7. BaseCompoundFormatTestCase#testSyncDisabled()
Project: lucene-solr
File: BaseCompoundFormatTestCase.java
File: BaseCompoundFormatTestCase.java
// test that cfs reader is read-only public void testSyncDisabled() throws IOException { final String testfile = "_123.test"; Directory dir = newDirectory(); IndexOutput out = dir.createOutput(testfile, IOContext.DEFAULT); out.writeInt(3); out.close(); SegmentInfo si = newSegmentInfo(dir, "_123"); si.setFiles(Collections.emptyList()); si.getCodec().compoundFormat().write(dir, si, IOContext.DEFAULT); Directory cfs = si.getCodec().compoundFormat().getCompoundReader(dir, si, IOContext.DEFAULT); expectThrows(UnsupportedOperationException.class, () -> { cfs.sync(Collections.singleton(testfile)); }); cfs.close(); dir.close(); }
8. BaseCompoundFormatTestCase#testRenameFileDisabled()
Project: lucene-solr
File: BaseCompoundFormatTestCase.java
File: BaseCompoundFormatTestCase.java
// test that cfs reader is read-only public void testRenameFileDisabled() throws IOException { final String testfile = "_123.test"; Directory dir = newDirectory(); IndexOutput out = dir.createOutput(testfile, IOContext.DEFAULT); out.writeInt(3); out.close(); SegmentInfo si = newSegmentInfo(dir, "_123"); si.setFiles(Collections.emptyList()); si.getCodec().compoundFormat().write(dir, si, IOContext.DEFAULT); Directory cfs = si.getCodec().compoundFormat().getCompoundReader(dir, si, IOContext.DEFAULT); expectThrows(UnsupportedOperationException.class, () -> { cfs.rename(testfile, "bogus"); }); cfs.close(); dir.close(); }
9. BaseCompoundFormatTestCase#testDeleteFileDisabled()
Project: lucene-solr
File: BaseCompoundFormatTestCase.java
File: BaseCompoundFormatTestCase.java
// test that cfs reader is read-only public void testDeleteFileDisabled() throws IOException { final String testfile = "_123.test"; Directory dir = newDirectory(); IndexOutput out = dir.createOutput(testfile, IOContext.DEFAULT); out.writeInt(3); out.close(); SegmentInfo si = newSegmentInfo(dir, "_123"); si.setFiles(Collections.emptyList()); si.getCodec().compoundFormat().write(dir, si, IOContext.DEFAULT); Directory cfs = si.getCodec().compoundFormat().getCompoundReader(dir, si, IOContext.DEFAULT); expectThrows(UnsupportedOperationException.class, () -> { cfs.deleteFile(testfile); }); cfs.close(); dir.close(); }
10. TestCodecUtil#testSegmentHeaderLength()
Project: lucene-solr
File: TestCodecUtil.java
File: TestCodecUtil.java
public void testSegmentHeaderLength() throws Exception { RAMFile file = new RAMFile(); IndexOutput output = new RAMOutputStream(file, true); CodecUtil.writeIndexHeader(output, "FooBar", 5, StringHelper.randomId(), "xyz"); output.writeString("this is the data"); output.close(); IndexInput input = new RAMInputStream("file", file); input.seek(CodecUtil.indexHeaderLength("FooBar", "xyz")); assertEquals("this is the data", input.readString()); input.close(); }
11. TestCodecUtil#testCheckFooterValidPastFooter()
Project: lucene-solr
File: TestCodecUtil.java
File: TestCodecUtil.java
public void testCheckFooterValidPastFooter() throws Exception { RAMFile file = new RAMFile(); IndexOutput output = new RAMOutputStream(file, true); CodecUtil.writeHeader(output, "FooBar", 5); output.writeString("this is the data"); CodecUtil.writeFooter(output); output.close(); ChecksumIndexInput input = new BufferedChecksumIndexInput(new RAMInputStream("file", file)); CodecUtil.checkHeader(input, "FooBar", 5, 5); assertEquals("this is the data", input.readString()); // bogusly read a byte too far (can happen) input.readByte(); Exception mine = new RuntimeException("fake exception"); RuntimeException expected = expectThrows(RuntimeException.class, () -> { CodecUtil.checkFooter(input, mine); }); assertEquals("fake exception", expected.getMessage()); Throwable suppressed[] = expected.getSuppressed(); assertEquals(1, suppressed.length); assertTrue(suppressed[0].getMessage().contains("checksum status indeterminate")); input.close(); }
12. TestCodecUtil#testCheckFooterValidAtFooter()
Project: lucene-solr
File: TestCodecUtil.java
File: TestCodecUtil.java
public void testCheckFooterValidAtFooter() throws Exception { RAMFile file = new RAMFile(); IndexOutput output = new RAMOutputStream(file, true); CodecUtil.writeHeader(output, "FooBar", 5); output.writeString("this is the data"); CodecUtil.writeFooter(output); output.close(); ChecksumIndexInput input = new BufferedChecksumIndexInput(new RAMInputStream("file", file)); CodecUtil.checkHeader(input, "FooBar", 5, 5); assertEquals("this is the data", input.readString()); Exception mine = new RuntimeException("fake exception"); RuntimeException expected = expectThrows(RuntimeException.class, () -> { CodecUtil.checkFooter(input, mine); }); assertEquals("fake exception", expected.getMessage()); Throwable suppressed[] = expected.getSuppressed(); assertEquals(1, suppressed.length); assertTrue(suppressed[0].getMessage().contains("checksum passed")); input.close(); }
13. TestCodecUtil#testCheckFooterValid()
Project: lucene-solr
File: TestCodecUtil.java
File: TestCodecUtil.java
public void testCheckFooterValid() throws Exception { RAMFile file = new RAMFile(); IndexOutput output = new RAMOutputStream(file, true); CodecUtil.writeHeader(output, "FooBar", 5); output.writeString("this is the data"); CodecUtil.writeFooter(output); output.close(); ChecksumIndexInput input = new BufferedChecksumIndexInput(new RAMInputStream("file", file)); Exception mine = new RuntimeException("fake exception"); RuntimeException expected = expectThrows(RuntimeException.class, () -> { CodecUtil.checkFooter(input, mine); }); assertEquals("fake exception", expected.getMessage()); Throwable suppressed[] = expected.getSuppressed(); assertEquals(1, suppressed.length); assertTrue(suppressed[0].getMessage().contains("checksum passed")); input.close(); }
14. TestCodecUtil#testChecksumEntireFile()
Project: lucene-solr
File: TestCodecUtil.java
File: TestCodecUtil.java
public void testChecksumEntireFile() throws Exception { RAMFile file = new RAMFile(); IndexOutput output = new RAMOutputStream(file, true); CodecUtil.writeHeader(output, "FooBar", 5); output.writeString("this is the data"); CodecUtil.writeFooter(output); output.close(); IndexInput input = new RAMInputStream("file", file); CodecUtil.checksumEntireFile(input); input.close(); }
15. TestCodecUtil#testHeaderLength()
Project: lucene-solr
File: TestCodecUtil.java
File: TestCodecUtil.java
public void testHeaderLength() throws Exception { RAMFile file = new RAMFile(); IndexOutput output = new RAMOutputStream(file, true); CodecUtil.writeHeader(output, "FooBar", 5); output.writeString("this is the data"); output.close(); IndexInput input = new RAMInputStream("file", file); input.seek(CodecUtil.headerLength("FooBar")); assertEquals("this is the data", input.readString()); input.close(); }
16. OakDirectoryTest#assertWrites()
Project: jackrabbit-oak
File: OakDirectoryTest.java
File: OakDirectoryTest.java
byte[] assertWrites(Directory dir, int blobSize) throws IOException { byte[] data = randomBytes(fileSize); IndexOutput o = dir.createOutput("test", IOContext.DEFAULT); o.writeBytes(data, data.length); o.close(); assertTrue(dir.fileExists("test")); assertEquals(fileSize, dir.fileLength("test")); IndexInput i = dir.openInput("test", IOContext.DEFAULT); assertEquals(fileSize, i.length()); byte[] result = new byte[fileSize]; i.readBytes(result, 0, result.length); assertTrue(Arrays.equals(data, result)); NodeBuilder testNode = builder.child(INDEX_DATA_CHILD_NAME).child("test"); assertEquals(blobSize, testNode.getProperty(PROP_BLOB_SIZE).getValue(Type.LONG).longValue()); List<Blob> blobs = newArrayList(testNode.getProperty(JCR_DATA).getValue(BINARIES)); assertEquals(blobSize + OakDirectory.UNIQUE_KEY_SIZE, blobs.get(0).length()); return data; }
17. LuceneBlobCacheTest#assertWrites()
Project: jackrabbit-oak
File: LuceneBlobCacheTest.java
File: LuceneBlobCacheTest.java
byte[] assertWrites(Directory dir, int blobSize) throws IOException { byte[] data = randomBytes(blobSize); IndexOutput o = dir.createOutput("test", IOContext.DEFAULT); o.writeBytes(data, data.length); o.close(); IndexInput i = dir.openInput("test", IOContext.DEFAULT); assertEquals(blobSize, i.length()); byte[] result = new byte[blobSize]; i.readBytes(result, 0, result.length); assertTrue(Arrays.equals(data, result)); // Load agagin to see if cached i = dir.openInput("test", IOContext.DEFAULT); assertEquals(blobSize, i.length()); result = new byte[blobSize]; i.readBytes(result, 0, result.length); assertTrue(Arrays.equals(data, result)); assertEquals(1, fileDataStore.count); return data; }
18. IndexInputStreamTest#checkStream()
Project: jackrabbit
File: IndexInputStreamTest.java
File: IndexInputStreamTest.java
private void checkStream(int size, int buffer) throws IOException { Random rand = new Random(); byte[] data = new byte[size]; rand.nextBytes(data); Directory dir = new RAMDirectory(); IndexOutput out = dir.createOutput("test"); out.writeBytes(data, data.length); out.close(); InputStream in = new IndexInputStream(dir.openInput("test")); if (buffer != 0) { in = new BufferedInputStream(in, buffer); } byte[] buf = new byte[3]; int len; int pos = 0; while ((len = in.read(buf)) > -1) { for (int i = 0; i < len; i++, pos++) { assertEquals(data[pos], buf[i]); } } in.close(); // assert length assertEquals(data.length, pos); }
19. TestMixedDirectory#testMixedDirectoryAndPolicy()
Project: hadoop-mapreduce
File: TestMixedDirectory.java
File: TestMixedDirectory.java
public void testMixedDirectoryAndPolicy() throws IOException { Directory readDir = new RAMDirectory(); updateIndex(readDir, 0, numDocsPerUpdate, new KeepOnlyLastCommitDeletionPolicy()); verify(readDir, numDocsPerUpdate); IndexOutput out = readDir.createOutput("_" + (numDocsPerUpdate / maxBufferedDocs + 2) + ".cfs"); out.writeInt(0); out.close(); Directory writeDir = new RAMDirectory(); Directory mixedDir = new MixedDirectory(readDir, writeDir); updateIndex(mixedDir, numDocsPerUpdate, numDocsPerUpdate, new MixedDeletionPolicy()); verify(readDir, numDocsPerUpdate); verify(mixedDir, 2 * numDocsPerUpdate); }
20. TestMixedDirectory#testMixedDirectoryAndPolicy()
Project: hadoop-common
File: TestMixedDirectory.java
File: TestMixedDirectory.java
public void testMixedDirectoryAndPolicy() throws IOException { Directory readDir = new RAMDirectory(); updateIndex(readDir, 0, numDocsPerUpdate, new KeepOnlyLastCommitDeletionPolicy()); verify(readDir, numDocsPerUpdate); IndexOutput out = readDir.createOutput("_" + (numDocsPerUpdate / maxBufferedDocs + 2) + ".cfs"); out.writeInt(0); out.close(); Directory writeDir = new RAMDirectory(); Directory mixedDir = new MixedDirectory(readDir, writeDir); updateIndex(mixedDir, numDocsPerUpdate, numDocsPerUpdate, new MixedDeletionPolicy()); verify(readDir, numDocsPerUpdate); verify(mixedDir, 2 * numDocsPerUpdate); }
21. TestMixedDirectory#testMixedDirectoryAndPolicy()
Project: hadoop-20
File: TestMixedDirectory.java
File: TestMixedDirectory.java
public void testMixedDirectoryAndPolicy() throws IOException { Directory readDir = new RAMDirectory(); updateIndex(readDir, 0, numDocsPerUpdate, new KeepOnlyLastCommitDeletionPolicy()); verify(readDir, numDocsPerUpdate); IndexOutput out = readDir.createOutput("_" + (numDocsPerUpdate / maxBufferedDocs + 2) + ".cfs"); out.writeInt(0); out.close(); Directory writeDir = new RAMDirectory(); Directory mixedDir = new MixedDirectory(readDir, writeDir); updateIndex(mixedDir, numDocsPerUpdate, numDocsPerUpdate, new MixedDeletionPolicy()); verify(readDir, numDocsPerUpdate); verify(mixedDir, 2 * numDocsPerUpdate); }
22. TestOfflineSorter#checkSort()
Project: lucene-solr
File: TestOfflineSorter.java
File: TestOfflineSorter.java
/** * Check sorting data on an instance of {@link OfflineSorter}. */ private SortInfo checkSort(Directory dir, OfflineSorter sorter, byte[][] data) throws IOException { IndexOutput unsorted = dir.createTempOutput("unsorted", "tmp", IOContext.DEFAULT); writeAll(unsorted, data); IndexOutput golden = dir.createTempOutput("golden", "tmp", IOContext.DEFAULT); Arrays.sort(data, unsignedByteOrderComparator); writeAll(golden, data); String sorted = sorter.sort(unsorted.getName()); //System.out.println("Input size [MB]: " + unsorted.length() / (1024 * 1024)); //System.out.println(sortInfo); assertFilesIdentical(dir, golden.getName(), sorted); return sorter.sortInfo; }
23. TestDirectPacked#testNotEnoughValues()
Project: lucene-solr
File: TestDirectPacked.java
File: TestDirectPacked.java
/** test exception is delivered if you add the wrong number of values */ public void testNotEnoughValues() throws Exception { Directory dir = newDirectory(); int bitsPerValue = DirectWriter.bitsRequired(2); IndexOutput output = dir.createOutput("foo", IOContext.DEFAULT); DirectWriter writer = DirectWriter.getInstance(output, 5, bitsPerValue); writer.add(1); writer.add(0); writer.add(2); writer.add(1); IllegalStateException expected = expectThrows(IllegalStateException.class, () -> { writer.finish(); }); assertTrue(expected.getMessage().startsWith("Wrong number of values added")); output.close(); dir.close(); }
24. TestDirectPacked#testSimple()
Project: lucene-solr
File: TestDirectPacked.java
File: TestDirectPacked.java
/** simple encode/decode */ public void testSimple() throws Exception { Directory dir = newDirectory(); int bitsPerValue = DirectWriter.bitsRequired(2); IndexOutput output = dir.createOutput("foo", IOContext.DEFAULT); DirectWriter writer = DirectWriter.getInstance(output, 5, bitsPerValue); writer.add(1); writer.add(0); writer.add(2); writer.add(1); writer.add(2); writer.finish(); output.close(); IndexInput input = dir.openInput("foo", IOContext.DEFAULT); NumericDocValues reader = DirectReader.getInstance(input.randomAccessSlice(0, input.length()), bitsPerValue, 0); assertEquals(1, reader.get(0)); assertEquals(0, reader.get(1)); assertEquals(2, reader.get(2)); assertEquals(1, reader.get(3)); assertEquals(2, reader.get(4)); input.close(); dir.close(); }
25. TestDirectMonotonic#testEmpty()
Project: lucene-solr
File: TestDirectMonotonic.java
File: TestDirectMonotonic.java
public void testEmpty() throws IOException { Directory dir = newDirectory(); final int blockShift = TestUtil.nextInt(random(), DirectMonotonicWriter.MIN_BLOCK_SHIFT, DirectMonotonicWriter.MAX_BLOCK_SHIFT); final long dataLength; try (IndexOutput metaOut = dir.createOutput("meta", IOContext.DEFAULT); IndexOutput dataOut = dir.createOutput("data", IOContext.DEFAULT)) { DirectMonotonicWriter w = DirectMonotonicWriter.getInstance(metaOut, dataOut, 0, blockShift); w.finish(); dataLength = dataOut.getFilePointer(); } try (IndexInput metaIn = dir.openInput("meta", IOContext.READONCE); IndexInput dataIn = dir.openInput("data", IOContext.DEFAULT)) { DirectMonotonicReader.Meta meta = DirectMonotonicReader.loadMeta(metaIn, 0, blockShift); DirectMonotonicReader.getInstance(meta, dataIn.randomAccessSlice(0, dataLength)); // no exception } dir.close(); }
26. TestIndexWriter#testLeftoverTempFiles()
Project: lucene-solr
File: TestIndexWriter.java
File: TestIndexWriter.java
public void testLeftoverTempFiles() throws Exception { Directory dir = newDirectory(); IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random())); IndexWriter w = new IndexWriter(dir, iwc); w.close(); IndexOutput out = dir.createTempOutput("_0", "bkd", IOContext.DEFAULT); String tempName = out.getName(); out.close(); iwc = new IndexWriterConfig(new MockAnalyzer(random())); w = new IndexWriter(dir, iwc); // Make sure IW deleted the unref'd file: try { dir.openInput(tempName, IOContext.DEFAULT); fail("did not hit exception"); } catch (FileNotFoundExceptionNoSuchFileException | e) { } w.close(); dir.close(); }
27. TestIndexFileDeleter#copyFile()
Project: lucene-solr
File: TestIndexFileDeleter.java
File: TestIndexFileDeleter.java
public void copyFile(Directory dir, String src, String dest) throws IOException { IndexInput in = dir.openInput(src, newIOContext(random())); IndexOutput out = dir.createOutput(dest, newIOContext(random())); byte[] b = new byte[1024]; long remainder = in.length(); while (remainder > 0) { int len = (int) Math.min(b.length, remainder); in.readBytes(b, 0, len); out.writeBytes(b, len); remainder -= len; } in.close(); out.close(); }
28. TestCodecUtil#testWriteVeryLongSuffix()
Project: lucene-solr
File: TestCodecUtil.java
File: TestCodecUtil.java
public void testWriteVeryLongSuffix() throws Exception { StringBuilder justLongEnough = new StringBuilder(); for (int i = 0; i < 255; i++) { justLongEnough.append('a'); } RAMFile file = new RAMFile(); IndexOutput output = new RAMOutputStream(file, true); byte[] id = StringHelper.randomId(); CodecUtil.writeIndexHeader(output, "foobar", 5, id, justLongEnough.toString()); output.close(); IndexInput input = new RAMInputStream("file", file); CodecUtil.checkIndexHeader(input, "foobar", 5, 5, id, justLongEnough.toString()); assertEquals(input.getFilePointer(), input.length()); assertEquals(input.getFilePointer(), CodecUtil.indexHeaderLength("foobar", justLongEnough.toString())); input.close(); }
29. InputStreamIndexInputTests#testMarkRest()
Project: elasticsearch
File: InputStreamIndexInputTests.java
File: InputStreamIndexInputTests.java
public void testMarkRest() throws Exception { RAMDirectory dir = new RAMDirectory(); IndexOutput output = dir.createOutput("test", IOContext.DEFAULT); for (int i = 0; i < 3; i++) { output.writeByte((byte) 1); } for (int i = 0; i < 3; i++) { output.writeByte((byte) 2); } output.close(); IndexInput input = dir.openInput("test", IOContext.DEFAULT); InputStreamIndexInput is = new InputStreamIndexInput(input, 4); assertThat(is.markSupported(), equalTo(true)); assertThat(is.read(), equalTo(1)); assertThat(is.read(), equalTo(1)); is.mark(0); assertThat(is.read(), equalTo(1)); assertThat(is.read(), equalTo(2)); is.reset(); assertThat(is.read(), equalTo(1)); assertThat(is.read(), equalTo(2)); }
30. BaseCompoundFormatTestCase#testCorruptFilesAreCaught()
Project: lucene-solr
File: BaseCompoundFormatTestCase.java
File: BaseCompoundFormatTestCase.java
public void testCorruptFilesAreCaught() throws Exception { Directory dir = newDirectory(); String subFile = "_123.xyz"; // wrong checksum SegmentInfo si = newSegmentInfo(dir, "_123"); try (IndexOutput os = dir.createOutput(subFile, newIOContext(random()))) { CodecUtil.writeIndexHeader(os, "Foo", 0, si.getId(), "suffix"); for (int i = 0; i < 1024; i++) { os.writeByte((byte) i); } // write footer w/ wrong checksum os.writeInt(CodecUtil.FOOTER_MAGIC); os.writeInt(0); long checksum = os.getChecksum(); os.writeLong(checksum + 1); } si.setFiles(Collections.singletonList(subFile)); Exception e = expectThrows(CorruptIndexException.class, () -> si.getCodec().compoundFormat().write(dir, si, IOContext.DEFAULT)); assertTrue(e.getMessage().contains("checksum failed (hardware problem?)")); dir.close(); }
31. BaseCompoundFormatTestCase#testMissingCodecHeadersAreCaught()
Project: lucene-solr
File: BaseCompoundFormatTestCase.java
File: BaseCompoundFormatTestCase.java
public void testMissingCodecHeadersAreCaught() throws Exception { Directory dir = newDirectory(); String subFile = "_123.xyz"; // missing codec header try (IndexOutput os = dir.createOutput(subFile, newIOContext(random()))) { for (int i = 0; i < 1024; i++) { os.writeByte((byte) i); } } SegmentInfo si = newSegmentInfo(dir, "_123"); si.setFiles(Collections.singletonList(subFile)); Exception e = expectThrows(CorruptIndexException.class, () -> si.getCodec().compoundFormat().write(dir, si, IOContext.DEFAULT)); assertTrue(e.getMessage().contains("codec header mismatch")); dir.close(); }
32. BaseCompoundFormatTestCase#testLargeCFS()
Project: lucene-solr
File: BaseCompoundFormatTestCase.java
File: BaseCompoundFormatTestCase.java
// LUCENE-5724: actually test we play nice with NRTCachingDir and massive file public void testLargeCFS() throws IOException { final String testfile = "_123.test"; IOContext context = new IOContext(new FlushInfo(0, 512 * 1024 * 1024)); Directory dir = new NRTCachingDirectory(newFSDirectory(createTempDir()), 2.0, 25.0); SegmentInfo si = newSegmentInfo(dir, "_123"); try (IndexOutput out = dir.createOutput(testfile, context)) { CodecUtil.writeIndexHeader(out, "Foo", 0, si.getId(), "suffix"); byte[] bytes = new byte[512]; for (int i = 0; i < 1024 * 1024; i++) { out.writeBytes(bytes, 0, bytes.length); } CodecUtil.writeFooter(out); } si.setFiles(Collections.singleton(testfile)); si.getCodec().compoundFormat().write(dir, si, context); dir.close(); }
33. BaseCompoundFormatTestCase#testPassIOContext()
Project: lucene-solr
File: BaseCompoundFormatTestCase.java
File: BaseCompoundFormatTestCase.java
// LUCENE-5724: things like NRTCachingDir rely upon IOContext being properly passed down public void testPassIOContext() throws IOException { final String testfile = "_123.test"; final IOContext myContext = new IOContext(); Directory dir = new FilterDirectory(newDirectory()) { @Override public IndexOutput createOutput(String name, IOContext context) throws IOException { assertSame(myContext, context); return super.createOutput(name, context); } }; SegmentInfo si = newSegmentInfo(dir, "_123"); try (IndexOutput out = dir.createOutput(testfile, myContext)) { CodecUtil.writeIndexHeader(out, "Foo", 0, si.getId(), "suffix"); out.writeInt(3); CodecUtil.writeFooter(out); } si.setFiles(Collections.singleton(testfile)); si.getCodec().compoundFormat().write(dir, si, myContext); dir.close(); }
34. BaseCompoundFormatTestCase#testDoubleClose()
Project: lucene-solr
File: BaseCompoundFormatTestCase.java
File: BaseCompoundFormatTestCase.java
// test that a second call to close() behaves according to Closeable public void testDoubleClose() throws IOException { final String testfile = "_123.test"; Directory dir = newDirectory(); SegmentInfo si = newSegmentInfo(dir, "_123"); try (IndexOutput out = dir.createOutput(testfile, IOContext.DEFAULT)) { CodecUtil.writeIndexHeader(out, "Foo", 0, si.getId(), "suffix"); out.writeInt(3); CodecUtil.writeFooter(out); } si.setFiles(Collections.singleton(testfile)); si.getCodec().compoundFormat().write(dir, si, IOContext.DEFAULT); Directory cfs = si.getCodec().compoundFormat().getCompoundReader(dir, si, IOContext.DEFAULT); assertEquals(1, cfs.listAll().length); cfs.close(); // second close should not throw exception cfs.close(); dir.close(); }
35. TestOfflineSorter#testFixedLengthLiesLiesLies()
Project: lucene-solr
File: TestOfflineSorter.java
File: TestOfflineSorter.java
public void testFixedLengthLiesLiesLies() throws Exception { // Make sure OfflineSorter catches me if I lie about the fixed value length: Directory dir = newDirectory(); IndexOutput out = dir.createTempOutput("unsorted", "tmp", IOContext.DEFAULT); try (ByteSequencesWriter w = new OfflineSorter.ByteSequencesWriter(out)) { byte[] bytes = new byte[Integer.BYTES]; random().nextBytes(bytes); w.write(bytes); CodecUtil.writeFooter(out); } OfflineSorter sorter = new OfflineSorter(dir, "foo", OfflineSorter.DEFAULT_COMPARATOR, BufferSize.megabytes(4), OfflineSorter.MAX_TEMPFILES, Long.BYTES); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { sorter.sort(out.getName()); }); assertEquals("value length is 4 but is supposed to always be 8", e.getMessage()); dir.close(); }
36. TestOfflineSorter#testFixedLengthHeap()
Project: lucene-solr
File: TestOfflineSorter.java
File: TestOfflineSorter.java
public void testFixedLengthHeap() throws Exception { // Make sure the RAM accounting is correct, i.e. if we are sorting fixed width // ints (4 bytes) then the heap used is really only 4 bytes per value: Directory dir = newDirectory(); IndexOutput out = dir.createTempOutput("unsorted", "tmp", IOContext.DEFAULT); try (ByteSequencesWriter w = new OfflineSorter.ByteSequencesWriter(out)) { byte[] bytes = new byte[Integer.BYTES]; for (int i = 0; i < 1024 * 1024; i++) { random().nextBytes(bytes); w.write(bytes); } CodecUtil.writeFooter(out); } OfflineSorter sorter = new OfflineSorter(dir, "foo", OfflineSorter.DEFAULT_COMPARATOR, BufferSize.megabytes(4), OfflineSorter.MAX_TEMPFILES, Integer.BYTES); sorter.sort(out.getName()); // 1 MB of ints with 4 MH heap allowed should have been sorted in a single heap partition: assertEquals(0, sorter.sortInfo.mergeRounds); dir.close(); }
37. OfflineSorter#sortPartition()
Project: lucene-solr
File: OfflineSorter.java
File: OfflineSorter.java
/** Sort a single partition in-memory. */ protected String sortPartition(TrackingDirectoryWrapper trackingDir) throws IOException { try (IndexOutput tempFile = trackingDir.createTempOutput(tempFileNamePrefix, "sort", IOContext.DEFAULT); ByteSequencesWriter out = getWriter(tempFile)) { BytesRef spare; long start = System.currentTimeMillis(); BytesRefIterator iter = buffer.iterator(comparator); sortInfo.sortTime += System.currentTimeMillis() - start; while ((spare = iter.next()) != null) { assert spare.length <= Short.MAX_VALUE; out.write(spare); } // Clean up the buffer for the next partition. buffer.clear(); CodecUtil.writeFooter(out.out); return tempFile.getName(); } }
38. Lucene50LiveDocsFormat#writeLiveDocs()
Project: lucene-solr
File: Lucene50LiveDocsFormat.java
File: Lucene50LiveDocsFormat.java
@Override public void writeLiveDocs(MutableBits bits, Directory dir, SegmentCommitInfo info, int newDelCount, IOContext context) throws IOException { long gen = info.getNextDelGen(); String name = IndexFileNames.fileNameFromGeneration(info.info.name, EXTENSION, gen); FixedBitSet fbs = (FixedBitSet) bits; if (fbs.length() - fbs.cardinality() != info.getDelCount() + newDelCount) { throw new CorruptIndexException("bits.deleted=" + (fbs.length() - fbs.cardinality()) + " info.delcount=" + info.getDelCount() + " newdelcount=" + newDelCount, name); } long data[] = fbs.getBits(); try (IndexOutput output = dir.createOutput(name, context)) { CodecUtil.writeIndexHeader(output, CODEC_NAME, VERSION_CURRENT, info.info.getId(), Long.toString(gen, Character.MAX_RADIX)); for (int i = 0; i < data.length; i++) { output.writeLong(data[i]); } CodecUtil.writeFooter(output); } }
39. StoreTests#corruptFile()
Project: elasticsearch
File: StoreTests.java
File: StoreTests.java
private void corruptFile(Directory dir, String fileIn, String fileOut) throws IOException { IndexInput input = dir.openInput(fileIn, IOContext.READONCE); IndexOutput output = dir.createOutput(fileOut, IOContext.DEFAULT); long len = input.length(); byte[] b = new byte[1024]; long broken = randomInt((int) len - 1); long pos = 0; while (pos < len) { int min = (int) Math.min(input.length() - pos, b.length); input.readBytes(b, 0, min); if (broken >= pos && broken < pos + min) { // Flip one byte int flipPos = (int) (broken - pos); b[flipPos] = (byte) (b[flipPos] ^ 42); } output.writeBytes(b, min); pos += min; } IOUtils.close(input, output); }
40. StoreTests#testCheckIntegrity()
Project: elasticsearch
File: StoreTests.java
File: StoreTests.java
public void testCheckIntegrity() throws IOException { Directory dir = newDirectory(); long luceneFileLength = 0; try (IndexOutput output = dir.createOutput("lucene_checksum.bin", IOContext.DEFAULT)) { int iters = scaledRandomIntBetween(10, 100); for (int i = 0; i < iters; i++) { BytesRef bytesRef = new BytesRef(TestUtil.randomRealisticUnicodeString(random(), 10, 1024)); output.writeBytes(bytesRef.bytes, bytesRef.offset, bytesRef.length); luceneFileLength += bytesRef.length; } CodecUtil.writeFooter(output); luceneFileLength += CodecUtil.footerLength(); } final long luceneChecksum; try (IndexInput indexInput = dir.openInput("lucene_checksum.bin", IOContext.DEFAULT)) { assertEquals(luceneFileLength, indexInput.length()); luceneChecksum = CodecUtil.retrieveChecksum(indexInput); } dir.close(); }
41. StoreTests#testVerifyingIndexOutputWithBogusInput()
Project: elasticsearch
File: StoreTests.java
File: StoreTests.java
public void testVerifyingIndexOutputWithBogusInput() throws IOException { Directory dir = newDirectory(); int length = scaledRandomIntBetween(10, 1024); IndexOutput verifyingOutput = new Store.LuceneVerifyingIndexOutput(new StoreFileMetaData("foo1.bar", length, ""), dir.createOutput("foo1.bar", IOContext.DEFAULT)); try { while (length > 0) { verifyingOutput.writeByte((byte) random().nextInt()); length--; } fail("should be a corrupted index"); } catch (CorruptIndexExceptionIndexFormatTooOldException | IndexFormatTooNewException | ex) { } IOUtils.close(verifyingOutput, dir); }
42. StoreTests#testVerifyingIndexOutputOnEmptyFile()
Project: elasticsearch
File: StoreTests.java
File: StoreTests.java
public void testVerifyingIndexOutputOnEmptyFile() throws IOException { Directory dir = newDirectory(); IndexOutput verifyingOutput = new Store.LuceneVerifyingIndexOutput(new StoreFileMetaData("foo.bar", 0, Store.digestToString(0)), dir.createOutput("foo1.bar", IOContext.DEFAULT)); try { Store.verify(verifyingOutput); fail("should be a corrupted index"); } catch (CorruptIndexExceptionIndexFormatTooOldException | IndexFormatTooNewException | ex) { } IOUtils.close(verifyingOutput, dir); }
43. StoreRecoveryTests#testStatsDirWrapper()
Project: elasticsearch
File: StoreRecoveryTests.java
File: StoreRecoveryTests.java
public void testStatsDirWrapper() throws IOException { Directory dir = newDirectory(); Directory target = newDirectory(); RecoveryState.Index indexStats = new RecoveryState.Index(); StoreRecovery.StatsDirectoryWrapper wrapper = new StoreRecovery.StatsDirectoryWrapper(target, indexStats); try (IndexOutput output = dir.createOutput("foo.bar", IOContext.DEFAULT)) { CodecUtil.writeHeader(output, "foo", 0); int numBytes = randomIntBetween(100, 20000); for (int i = 0; i < numBytes; i++) { output.writeByte((byte) i); } CodecUtil.writeFooter(output); } wrapper.copyFrom(dir, "foo.bar", "bar.foo", IOContext.DEFAULT); assertNotNull(indexStats.getFileDetails("bar.foo")); assertNull(indexStats.getFileDetails("foo.bar")); assertEquals(dir.fileLength("foo.bar"), indexStats.getFileDetails("bar.foo").length()); assertEquals(dir.fileLength("foo.bar"), indexStats.getFileDetails("bar.foo").recovered()); assertFalse(indexStats.getFileDetails("bar.foo").reused()); IOUtils.close(dir, target); }
44. RecoveryTarget#openAndPutIndexOutput()
Project: elasticsearch
File: RecoveryTarget.java
File: RecoveryTarget.java
/** * Creates an {@link org.apache.lucene.store.IndexOutput} for the given file name. Note that the * IndexOutput actually point at a temporary file. * <p> * Note: You can use {@link #getOpenIndexOutput(String)} with the same filename to retrieve the same IndexOutput * at a later stage */ public IndexOutput openAndPutIndexOutput(String fileName, StoreFileMetaData metaData, Store store) throws IOException { ensureRefCount(); String tempFileName = getTempNameForFile(fileName); if (tempFileNames.containsKey(tempFileName)) { throw new IllegalStateException("output for file [" + fileName + "] has already been created"); } // add first, before it's created tempFileNames.put(tempFileName, fileName); IndexOutput indexOutput = store.createVerifyingOutput(tempFileName, metaData, IOContext.DEFAULT); openIndexOutputs.put(fileName, indexOutput); return indexOutput; }
45. Store#createVerifyingOutput()
Project: elasticsearch
File: Store.java
File: Store.java
/** * The returned IndexOutput validates the files checksum. * <p> * Note: Checksums are calculated by default since version 4.8.0. This method only adds the * verification against the checksum in the given metadata and does not add any significant overhead. */ public IndexOutput createVerifyingOutput(String fileName, final StoreFileMetaData metadata, final IOContext context) throws IOException { IndexOutput output = directory().createOutput(fileName, context); boolean success = false; try { assert metadata.writtenBy() != null; assert metadata.writtenBy().onOrAfter(StoreFileMetaData.FIRST_LUCENE_CHECKSUM_VERSION); output = new LuceneVerifyingIndexOutput(metadata, output); success = true; } finally { if (success == false) { IOUtils.closeWhileHandlingException(output); } } return output; }
46. RecoveryStatus#openAndPutIndexOutput()
Project: elassandra
File: RecoveryStatus.java
File: RecoveryStatus.java
/** * Creates an {@link org.apache.lucene.store.IndexOutput} for the given file name. Note that the * IndexOutput actually point at a temporary file. * <p/> * Note: You can use {@link #getOpenIndexOutput(String)} with the same filename to retrieve the same IndexOutput * at a later stage */ public IndexOutput openAndPutIndexOutput(String fileName, StoreFileMetaData metaData, Store store) throws IOException { ensureRefCount(); String tempFileName = getTempNameForFile(fileName); if (tempFileNames.containsKey(tempFileName)) { throw new IllegalStateException("output for file [" + fileName + "] has already been created"); } // add first, before it's created tempFileNames.put(tempFileName, fileName); IndexOutput indexOutput = store.createVerifyingOutput(tempFileName, metaData, IOContext.DEFAULT); openIndexOutputs.put(fileName, indexOutput); return indexOutput; }
47. StoreTests#testChecksumCorrupted()
Project: elasticsearch
File: StoreTests.java
File: StoreTests.java
public void testChecksumCorrupted() throws IOException { Directory dir = newDirectory(); IndexOutput output = dir.createOutput("foo.bar", IOContext.DEFAULT); int iters = scaledRandomIntBetween(10, 100); for (int i = 0; i < iters; i++) { BytesRef bytesRef = new BytesRef(TestUtil.randomRealisticUnicodeString(random(), 10, 1024)); output.writeBytes(bytesRef.bytes, bytesRef.offset, bytesRef.length); } output.writeInt(CodecUtil.FOOTER_MAGIC); output.writeInt(0); String checksum = Store.digestToString(output.getChecksum()); // write a wrong checksum to the file output.writeLong(output.getChecksum() + 1); output.close(); IndexInput indexInput = dir.openInput("foo.bar", IOContext.DEFAULT); indexInput.seek(0); BytesRef ref = new BytesRef(scaledRandomIntBetween(1, 1024)); long length = indexInput.length(); IndexOutput verifyingOutput = new Store.LuceneVerifyingIndexOutput(new StoreFileMetaData("foo1.bar", length, checksum), dir.createOutput("foo1.bar", IOContext.DEFAULT)); // we write the checksum in the try / catch block below length -= 8; while (length > 0) { if (random().nextInt(10) == 0) { verifyingOutput.writeByte(indexInput.readByte()); length--; } else { int min = (int) Math.min(length, ref.bytes.length); indexInput.readBytes(ref.bytes, ref.offset, min); verifyingOutput.writeBytes(ref.bytes, ref.offset, min); length -= min; } } try { BytesRef checksumBytes = new BytesRef(8); checksumBytes.length = 8; indexInput.readBytes(checksumBytes.bytes, checksumBytes.offset, checksumBytes.length); if (randomBoolean()) { verifyingOutput.writeBytes(checksumBytes.bytes, checksumBytes.offset, checksumBytes.length); } else { for (int i = 0; i < checksumBytes.length; i++) { verifyingOutput.writeByte(checksumBytes.bytes[i]); } } fail("should be a corrupted index"); } catch (CorruptIndexExceptionIndexFormatTooOldException | IndexFormatTooNewException | ex) { } IOUtils.close(indexInput, verifyingOutput, dir); }
48. TestIndexWriterExceptions#testSegmentsChecksumError()
Project: lucene-solr
File: TestIndexWriterExceptions.java
File: TestIndexWriterExceptions.java
// LUCENE-1044: Simulate checksum error in segments_N public void testSegmentsChecksumError() throws IOException { BaseDirectoryWrapper dir = newDirectory(); // we corrupt the index dir.setCheckIndexOnClose(false); IndexWriter writer = null; writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); // add 100 documents for (int i = 0; i < 100; i++) { addDoc(writer); } // close writer.close(); long gen = SegmentInfos.getLastCommitGeneration(dir); assertTrue("segment generation should be > 0 but got " + gen, gen > 0); final String segmentsFileName = SegmentInfos.getLastCommitSegmentsFileName(dir); IndexInput in = dir.openInput(segmentsFileName, newIOContext(random())); IndexOutput out = dir.createOutput(IndexFileNames.fileNameFromGeneration(IndexFileNames.SEGMENTS, "", 1 + gen), newIOContext(random())); out.copyBytes(in, in.length() - 1); byte b = in.readByte(); out.writeByte((byte) (1 + b)); out.close(); in.close(); expectThrows(CorruptIndexException.class, () -> { DirectoryReader.open(dir); }); dir.close(); }
49. StoreTests#testCanReadOldCorruptionMarker()
Project: elasticsearch
File: StoreTests.java
File: StoreTests.java
public void testCanReadOldCorruptionMarker() throws IOException { final ShardId shardId = new ShardId("index", "_na_", 1); // I use ram dir to prevent that virusscanner being a PITA final Directory dir = new RAMDirectory(); DirectoryService directoryService = new DirectoryService(shardId, INDEX_SETTINGS) { @Override public long throttleTimeInNanos() { return 0; } @Override public Directory newDirectory() throws IOException { return dir; } }; Store store = new Store(shardId, INDEX_SETTINGS, directoryService, new DummyShardLock(shardId)); CorruptIndexException exception = new CorruptIndexException("foo", "bar"); String uuid = Store.CORRUPTED + UUIDs.randomBase64UUID(); try (IndexOutput output = dir.createOutput(uuid, IOContext.DEFAULT)) { CodecUtil.writeHeader(output, Store.CODEC, Store.VERSION_STACK_TRACE); output.writeString(ExceptionsHelper.detailedMessage(exception)); output.writeString(ExceptionsHelper.stackTrace(exception)); CodecUtil.writeFooter(output); } try { store.failIfCorrupted(); fail("should be corrupted"); } catch (CorruptIndexException e) { assertTrue(e.getMessage().startsWith("[index][1] Preexisting corrupted index [" + uuid + "] caused by: CorruptIndexException[foo (resource=bar)]")); assertTrue(e.getMessage().contains(ExceptionsHelper.stackTrace(exception))); } store.removeCorruptionMarker(); try (IndexOutput output = dir.createOutput(uuid, IOContext.DEFAULT)) { CodecUtil.writeHeader(output, Store.CODEC, Store.VERSION_START); output.writeString(ExceptionsHelper.detailedMessage(exception)); CodecUtil.writeFooter(output); } try { store.failIfCorrupted(); fail("should be corrupted"); } catch (CorruptIndexException e) { assertTrue(e.getMessage().startsWith("[index][1] Preexisting corrupted index [" + uuid + "] caused by: CorruptIndexException[foo (resource=bar)]")); assertFalse(e.getMessage().contains(ExceptionsHelper.stackTrace(exception))); } store.removeCorruptionMarker(); try (IndexOutput output = dir.createOutput(uuid, IOContext.DEFAULT)) { // corrupted header CodecUtil.writeHeader(output, Store.CODEC, Store.VERSION_START - 1); CodecUtil.writeFooter(output); } try { store.failIfCorrupted(); fail("should be too old"); } catch (IndexFormatTooOldException e) { } store.removeCorruptionMarker(); try (IndexOutput output = dir.createOutput(uuid, IOContext.DEFAULT)) { // corrupted header CodecUtil.writeHeader(output, Store.CODEC, Store.VERSION + 1); CodecUtil.writeFooter(output); } try { store.failIfCorrupted(); fail("should be too new"); } catch (IndexFormatTooNewException e) { } store.close(); }
50. HdfsDirectoryTest#testWritingAndReadingAFile()
Project: lucene-solr
File: HdfsDirectoryTest.java
File: HdfsDirectoryTest.java
@Test public void testWritingAndReadingAFile() throws IOException { String[] listAll = directory.listAll(); for (String file : listAll) { directory.deleteFile(file); } IndexOutput output = directory.createOutput("testing.test", new IOContext()); output.writeInt(12345); output.close(); IndexInput input = directory.openInput("testing.test", new IOContext()); assertEquals(12345, input.readInt()); input.close(); listAll = directory.listAll(); assertEquals(1, listAll.length); assertEquals("testing.test", listAll[0]); assertEquals(4, directory.fileLength("testing.test")); IndexInput input1 = directory.openInput("testing.test", new IOContext()); IndexInput input2 = (IndexInput) input1.clone(); assertEquals(12345, input2.readInt()); input2.close(); assertEquals(12345, input1.readInt()); input1.close(); assertFalse(slowFileExists(directory, "testing.test.other")); assertTrue(slowFileExists(directory, "testing.test")); directory.deleteFile("testing.test"); assertFalse(slowFileExists(directory, "testing.test")); }
51. TestCodecUtil#testReadHeaderWrongMagic()
Project: lucene-solr
File: TestCodecUtil.java
File: TestCodecUtil.java
public void testReadHeaderWrongMagic() throws Exception { RAMFile file = new RAMFile(); IndexOutput output = new RAMOutputStream(file, true); output.writeInt(1234); output.close(); IndexInput input = new RAMInputStream("file", file); expectThrows(CorruptIndexException.class, () -> { CodecUtil.checkHeader(input, "bogus", 1, 1); }); }
52. OakDirectoryTest#writeFile()
Project: jackrabbit-oak
File: OakDirectoryTest.java
File: OakDirectoryTest.java
private static void writeFile(Directory directory, String fileName, long size) throws Exception { IndexOutput o = directory.createOutput(fileName, IOContext.DEFAULT); o.copyBytes(new InputStreamDataInput(new NullInputStream(size)), size); o.close(); }
53. OakDirectoryTest#dirNameInException_Writes()
Project: jackrabbit-oak
File: OakDirectoryTest.java
File: OakDirectoryTest.java
@Test public void dirNameInException_Writes() throws Exception { FailOnDemandBlobStore blobStore = new FailOnDemandBlobStore(); FileStore store = FileStore.builder(tempFolder.getRoot()).withMemoryMapping(false).withBlobStore(blobStore).build(); SegmentNodeStore nodeStore = SegmentNodeStore.builder(store).build(); String indexPath = "/foo/bar"; int minFileSize = Segment.MEDIUM_LIMIT; int blobSize = minFileSize + 1000; builder = nodeStore.getRoot().builder(); builder.setProperty(IndexConstants.INDEX_PATH, indexPath); builder.setProperty(LuceneIndexConstants.BLOB_SIZE, blobSize); Directory dir = createDir(builder, false); blobStore.startFailing(); IndexOutput o = dir.createOutput("test1.txt", IOContext.DEFAULT); try { o.writeBytes(randomBytes(blobSize + 10), blobSize + 10); fail(); } catch (IOException e) { assertThat(e.getMessage(), containsString(indexPath)); assertThat(e.getMessage(), containsString("test1.txt")); } blobStore.reset(); IndexOutput o3 = dir.createOutput("test3.txt", IOContext.DEFAULT); o3.writeBytes(randomBytes(minFileSize), minFileSize); blobStore.startFailing(); try { o3.flush(); fail(); } catch (IOException e) { assertThat(e.getMessage(), containsString(indexPath)); assertThat(e.getMessage(), containsString("test3.txt")); } store.close(); }
54. OakDirectoryTest#createFile()
Project: jackrabbit-oak
File: OakDirectoryTest.java
File: OakDirectoryTest.java
private int createFile(Directory dir, String fileName) throws IOException { int size = rnd.nextInt(1000) + 1; byte[] data = randomBytes(size); IndexOutput o = dir.createOutput(fileName, IOContext.DEFAULT); o.writeBytes(data, data.length); o.close(); return size; }
55. IndexCopierTest#writeFile()
Project: jackrabbit-oak
File: IndexCopierTest.java
File: IndexCopierTest.java
private byte[] writeFile(Directory dir, String name) throws IOException { byte[] data = randomBytes(rnd.nextInt(maxFileSize) + 1); IndexOutput o = dir.createOutput(name, IOContext.DEFAULT); o.writeBytes(data, data.length); o.close(); return data; }
56. StoreTests#testVerifyingIndexOutput()
Project: elasticsearch
File: StoreTests.java
File: StoreTests.java
public void testVerifyingIndexOutput() throws IOException { Directory dir = newDirectory(); IndexOutput output = dir.createOutput("foo.bar", IOContext.DEFAULT); int iters = scaledRandomIntBetween(10, 100); for (int i = 0; i < iters; i++) { BytesRef bytesRef = new BytesRef(TestUtil.randomRealisticUnicodeString(random(), 10, 1024)); output.writeBytes(bytesRef.bytes, bytesRef.offset, bytesRef.length); } CodecUtil.writeFooter(output); output.close(); IndexInput indexInput = dir.openInput("foo.bar", IOContext.DEFAULT); String checksum = Store.digestToString(CodecUtil.retrieveChecksum(indexInput)); indexInput.seek(0); BytesRef ref = new BytesRef(scaledRandomIntBetween(1, 1024)); long length = indexInput.length(); IndexOutput verifyingOutput = new Store.LuceneVerifyingIndexOutput(new StoreFileMetaData("foo1.bar", length, checksum), dir.createOutput("foo1.bar", IOContext.DEFAULT)); while (length > 0) { if (random().nextInt(10) == 0) { verifyingOutput.writeByte(indexInput.readByte()); length--; } else { int min = (int) Math.min(length, ref.bytes.length); indexInput.readBytes(ref.bytes, ref.offset, min); verifyingOutput.writeBytes(ref.bytes, ref.offset, min); length -= min; } } Store.verify(verifyingOutput); try { appendRandomData(verifyingOutput); fail("should be a corrupted index"); } catch (CorruptIndexExceptionIndexFormatTooOldException | IndexFormatTooNewException | ex) { } try { Store.verify(verifyingOutput); fail("should be a corrupted index"); } catch (CorruptIndexExceptionIndexFormatTooOldException | IndexFormatTooNewException | ex) { } IOUtils.close(indexInput, verifyingOutput, dir); }
57. TestPagedBytes#testOverflow()
Project: lucene-solr
File: TestPagedBytes.java
File: TestPagedBytes.java
// memory hole @Ignore public void testOverflow() throws IOException { BaseDirectoryWrapper dir = newFSDirectory(createTempDir("testOverflow")); if (dir instanceof MockDirectoryWrapper) { ((MockDirectoryWrapper) dir).setThrottling(MockDirectoryWrapper.Throttling.NEVER); } final int blockBits = TestUtil.nextInt(random(), 14, 28); final int blockSize = 1 << blockBits; byte[] arr = new byte[TestUtil.nextInt(random(), blockSize / 2, blockSize * 2)]; for (int i = 0; i < arr.length; ++i) { arr[i] = (byte) i; } final long numBytes = (1L << 31) + TestUtil.nextInt(random(), 1, blockSize * 3); final PagedBytes p = new PagedBytes(blockBits); final IndexOutput out = dir.createOutput("foo", IOContext.DEFAULT); for (long i = 0; i < numBytes; ) { assertEquals(i, out.getFilePointer()); final int len = (int) Math.min(arr.length, numBytes - i); out.writeBytes(arr, len); i += len; } assertEquals(numBytes, out.getFilePointer()); out.close(); final IndexInput in = dir.openInput("foo", IOContext.DEFAULT); p.copy(in, numBytes); final PagedBytes.Reader reader = p.freeze(random().nextBoolean()); for (long offset : new long[] { 0L, Integer.MAX_VALUE, numBytes - 1, TestUtil.nextLong(random(), 1, numBytes - 2) }) { BytesRef b = new BytesRef(); reader.fillSlice(b, offset, 1); assertEquals(arr[(int) (offset % arr.length)], b.bytes[b.offset]); } in.close(); dir.close(); }
58. Test2BPagedBytes#test()
Project: lucene-solr
File: Test2BPagedBytes.java
File: Test2BPagedBytes.java
public void test() throws Exception { BaseDirectoryWrapper dir = newFSDirectory(createTempDir("test2BPagedBytes")); if (dir instanceof MockDirectoryWrapper) { ((MockDirectoryWrapper) dir).setThrottling(MockDirectoryWrapper.Throttling.NEVER); } PagedBytes pb = new PagedBytes(15); IndexOutput dataOutput = dir.createOutput("foo", IOContext.DEFAULT); long netBytes = 0; long seed = random().nextLong(); long lastFP = 0; Random r2 = new Random(seed); while (netBytes < 1.1 * Integer.MAX_VALUE) { int numBytes = TestUtil.nextInt(r2, 1, 32768); byte[] bytes = new byte[numBytes]; r2.nextBytes(bytes); dataOutput.writeBytes(bytes, bytes.length); long fp = dataOutput.getFilePointer(); assert fp == lastFP + numBytes; lastFP = fp; netBytes += numBytes; } dataOutput.close(); IndexInput input = dir.openInput("foo", IOContext.DEFAULT); pb.copy(input, input.length()); input.close(); PagedBytes.Reader reader = pb.freeze(true); r2 = new Random(seed); netBytes = 0; while (netBytes < 1.1 * Integer.MAX_VALUE) { int numBytes = TestUtil.nextInt(r2, 1, 32768); byte[] bytes = new byte[numBytes]; r2.nextBytes(bytes); BytesRef expected = new BytesRef(bytes); BytesRef actual = new BytesRef(); reader.fillSlice(actual, netBytes, numBytes); assertEquals(expected, actual); netBytes += numBytes; } dir.close(); }
59. TestPackedInts#testBlockReaderOverflow()
Project: lucene-solr
File: TestPackedInts.java
File: TestPackedInts.java
@Nightly public void testBlockReaderOverflow() throws IOException { final long valueCount = TestUtil.nextLong(random(), 1L + Integer.MAX_VALUE, (long) Integer.MAX_VALUE * 2); final int blockSize = 1 << TestUtil.nextInt(random(), 20, 22); final Directory dir = newDirectory(); final IndexOutput out = dir.createOutput("out.bin", IOContext.DEFAULT); final BlockPackedWriter writer = new BlockPackedWriter(out, blockSize); long value = random().nextInt() & 0xFFFFFFFFL; long valueOffset = TestUtil.nextLong(random(), 0, valueCount - 1); for (long i = 0; i < valueCount; ) { assertEquals(i, writer.ord()); if ((i & (blockSize - 1)) == 0 && (i + blockSize < valueOffset || i > valueOffset && i + blockSize < valueCount)) { writer.addBlockOfZeros(); i += blockSize; } else if (i == valueOffset) { writer.add(value); ++i; } else { writer.add(0); ++i; } } writer.finish(); out.close(); final IndexInput in = dir.openInput("out.bin", IOContext.DEFAULT); final BlockPackedReaderIterator it = new BlockPackedReaderIterator(in, PackedInts.VERSION_CURRENT, blockSize, valueCount); it.skip(valueOffset); assertEquals(value, it.next()); in.seek(0L); final BlockPackedReader reader = new BlockPackedReader(in, PackedInts.VERSION_CURRENT, blockSize, valueCount, random().nextBoolean()); assertEquals(value, reader.get(valueOffset)); for (int i = 0; i < 5; ++i) { final long offset = TestUtil.nextLong(random(), 0, valueCount - 1); if (offset == valueOffset) { assertEquals(value, reader.get(offset)); } else { assertEquals(0, reader.get(offset)); } } in.close(); dir.close(); }
60. TestPackedInts#testPackedInputOutput()
Project: lucene-solr
File: TestPackedInts.java
File: TestPackedInts.java
public void testPackedInputOutput() throws IOException { final long[] longs = new long[random().nextInt(8192)]; final int[] bitsPerValues = new int[longs.length]; final boolean[] skip = new boolean[longs.length]; for (int i = 0; i < longs.length; ++i) { final int bpv = RandomInts.randomIntBetween(random(), 1, 64); bitsPerValues[i] = random().nextBoolean() ? bpv : TestUtil.nextInt(random(), bpv, 64); if (bpv == 64) { longs[i] = random().nextLong(); } else { longs[i] = TestUtil.nextLong(random(), 0, PackedInts.maxValue(bpv)); } skip[i] = rarely(); } final Directory dir = newDirectory(); final IndexOutput out = dir.createOutput("out.bin", IOContext.DEFAULT); PackedDataOutput pout = new PackedDataOutput(out); long totalBits = 0; for (int i = 0; i < longs.length; ++i) { pout.writeLong(longs[i], bitsPerValues[i]); totalBits += bitsPerValues[i]; if (skip[i]) { pout.flush(); totalBits = 8 * (long) Math.ceil((double) totalBits / 8); } } pout.flush(); assertEquals((long) Math.ceil((double) totalBits / 8), out.getFilePointer()); out.close(); final IndexInput in = dir.openInput("out.bin", IOContext.READONCE); final PackedDataInput pin = new PackedDataInput(in); for (int i = 0; i < longs.length; ++i) { assertEquals("" + i, longs[i], pin.readLong(bitsPerValues[i])); if (skip[i]) { pin.skipToNextByte(); } } assertEquals((long) Math.ceil((double) totalBits / 8), in.getFilePointer()); in.close(); dir.close(); }
61. TestPackedInts#testEndPointer()
Project: lucene-solr
File: TestPackedInts.java
File: TestPackedInts.java
public void testEndPointer() throws IOException { final Directory dir = newDirectory(); final int valueCount = RandomInts.randomIntBetween(random(), 1, 1000); final IndexOutput out = dir.createOutput("tests.bin", newIOContext(random())); for (int i = 0; i < valueCount; ++i) { out.writeLong(0); } out.close(); final IndexInput in = dir.openInput("tests.bin", newIOContext(random())); for (int version = PackedInts.VERSION_START; version <= PackedInts.VERSION_CURRENT; ++version) { for (int bpv = 1; bpv <= 64; ++bpv) { for (PackedInts.Format format : PackedInts.Format.values()) { if (!format.isSupported(bpv)) { continue; } final long byteCount = format.byteCount(version, valueCount, bpv); String msg = "format=" + format + ",version=" + version + ",valueCount=" + valueCount + ",bpv=" + bpv; // test iterator in.seek(0L); final PackedInts.ReaderIterator it = PackedInts.getReaderIteratorNoHeader(in, format, version, valueCount, bpv, RandomInts.randomIntBetween(random(), 1, 1 << 16)); for (int i = 0; i < valueCount; ++i) { it.next(); } assertEquals(msg, byteCount, in.getFilePointer()); // test direct reader in.seek(0L); final PackedInts.Reader directReader = PackedInts.getDirectReaderNoHeader(in, format, version, valueCount, bpv); directReader.get(valueCount - 1); assertEquals(msg, byteCount, in.getFilePointer()); // test reader in.seek(0L); PackedInts.getReaderNoHeader(in, format, version, valueCount, bpv); assertEquals(msg, byteCount, in.getFilePointer()); } } } in.close(); dir.close(); }
62. TestDirectMonotonic#testConstantSlope()
Project: lucene-solr
File: TestDirectMonotonic.java
File: TestDirectMonotonic.java
public void testConstantSlope() throws IOException { Directory dir = newDirectory(); final int blockShift = TestUtil.nextInt(random(), DirectMonotonicWriter.MIN_BLOCK_SHIFT, DirectMonotonicWriter.MAX_BLOCK_SHIFT); final int numValues = TestUtil.nextInt(random(), 1, 1 << 20); final long min = random().nextLong(); final long inc = random().nextInt(1 << random().nextInt(20)); List<Long> actualValues = new ArrayList<>(); for (int i = 0; i < numValues; ++i) { actualValues.add(min + inc * i); } final long dataLength; try (IndexOutput metaOut = dir.createOutput("meta", IOContext.DEFAULT); IndexOutput dataOut = dir.createOutput("data", IOContext.DEFAULT)) { DirectMonotonicWriter w = DirectMonotonicWriter.getInstance(metaOut, dataOut, numValues, blockShift); for (long v : actualValues) { w.add(v); } w.finish(); dataLength = dataOut.getFilePointer(); } try (IndexInput metaIn = dir.openInput("meta", IOContext.READONCE); IndexInput dataIn = dir.openInput("data", IOContext.DEFAULT)) { DirectMonotonicReader.Meta meta = DirectMonotonicReader.loadMeta(metaIn, numValues, blockShift); LongValues values = DirectMonotonicReader.getInstance(meta, dataIn.randomAccessSlice(0, dataLength)); for (int i = 0; i < numValues; ++i) { assertEquals(actualValues.get(i).longValue(), values.get(i)); } assertEquals(0, dataIn.getFilePointer()); } dir.close(); }
63. TestDirectMonotonic#testSimple()
Project: lucene-solr
File: TestDirectMonotonic.java
File: TestDirectMonotonic.java
public void testSimple() throws IOException { Directory dir = newDirectory(); final int blockShift = 2; List<Long> actualValues = Arrays.asList(1L, 2L, 5L, 7L, 8L, 100L); final int numValues = actualValues.size(); final long dataLength; try (IndexOutput metaOut = dir.createOutput("meta", IOContext.DEFAULT); IndexOutput dataOut = dir.createOutput("data", IOContext.DEFAULT)) { DirectMonotonicWriter w = DirectMonotonicWriter.getInstance(metaOut, dataOut, numValues, blockShift); for (long v : actualValues) { w.add(v); } w.finish(); dataLength = dataOut.getFilePointer(); } try (IndexInput metaIn = dir.openInput("meta", IOContext.READONCE); IndexInput dataIn = dir.openInput("data", IOContext.DEFAULT)) { DirectMonotonicReader.Meta meta = DirectMonotonicReader.loadMeta(metaIn, numValues, blockShift); LongValues values = DirectMonotonicReader.getInstance(meta, dataIn.randomAccessSlice(0, dataLength)); for (int i = 0; i < numValues; ++i) { final long v = values.get(i); assertEquals(actualValues.get(i).longValue(), v); } } dir.close(); }
64. TestFSTs#testNonFinalStopNode()
Project: lucene-solr
File: TestFSTs.java
File: TestFSTs.java
// Make sure raw FST can differentiate between final vs // non-final end nodes public void testNonFinalStopNode() throws Exception { final PositiveIntOutputs outputs = PositiveIntOutputs.getSingleton(); final Long nothing = outputs.getNoOutput(); final Builder<Long> b = new Builder<>(FST.INPUT_TYPE.BYTE1, outputs); //final FST<Long> fst = new FST<>(FST.INPUT_TYPE.BYTE1, outputs, false, PackedInts.COMPACT, 15); final FST<Long> fst = b.fst; final Builder.UnCompiledNode<Long> rootNode = new Builder.UnCompiledNode<>(b, 0); // Add final stop node { final Builder.UnCompiledNode<Long> node = new Builder.UnCompiledNode<>(b, 0); node.isFinal = true; rootNode.addArc('a', node); final Builder.CompiledNode frozen = new Builder.CompiledNode(); frozen.node = fst.addNode(b, node); rootNode.arcs[0].nextFinalOutput = 17L; rootNode.arcs[0].isFinal = true; rootNode.arcs[0].output = nothing; rootNode.arcs[0].target = frozen; } // Add non-final stop node { final Builder.UnCompiledNode<Long> node = new Builder.UnCompiledNode<>(b, 0); rootNode.addArc('b', node); final Builder.CompiledNode frozen = new Builder.CompiledNode(); frozen.node = fst.addNode(b, node); rootNode.arcs[1].nextFinalOutput = nothing; rootNode.arcs[1].output = 42L; rootNode.arcs[1].target = frozen; } fst.finish(fst.addNode(b, rootNode)); StringWriter w = new StringWriter(); //Writer w = new OutputStreamWriter(new FileOutputStream("/x/tmp3/out.dot")); Util.toDot(fst, w, false, false); w.close(); checkStopNodes(fst, outputs); // Make sure it still works after save/load: Directory dir = newDirectory(); IndexOutput out = dir.createOutput("fst", IOContext.DEFAULT); fst.save(out); out.close(); IndexInput in = dir.openInput("fst", IOContext.DEFAULT); final FST<Long> fst2 = new FST<>(in, outputs); checkStopNodes(fst2, outputs); in.close(); dir.close(); }
65. Test2BBKDPoints#test2D()
Project: lucene-solr
File: Test2BBKDPoints.java
File: Test2BBKDPoints.java
public void test2D() throws Exception { Directory dir = FSDirectory.open(createTempDir("2BBKDPoints2D")); final int numDocs = (Integer.MAX_VALUE / 26) + 100; BKDWriter w = new BKDWriter(numDocs, dir, "_0", 2, Long.BYTES, BKDWriter.DEFAULT_MAX_POINTS_IN_LEAF_NODE, BKDWriter.DEFAULT_MAX_MB_SORT_IN_HEAP, 26L * numDocs, false); int counter = 0; byte[] packedBytes = new byte[2 * Long.BYTES]; for (int docID = 0; docID < numDocs; docID++) { for (int j = 0; j < 26; j++) { // first a random int: NumericUtils.intToSortableBytes(random().nextInt(), packedBytes, 0); // then our counter, which will overflow a bit in the end: NumericUtils.intToSortableBytes(counter, packedBytes, Integer.BYTES); // then two random ints for the 2nd dimension: NumericUtils.intToSortableBytes(random().nextInt(), packedBytes, Long.BYTES); NumericUtils.intToSortableBytes(random().nextInt(), packedBytes, Long.BYTES + Integer.BYTES); w.add(packedBytes, docID); counter++; } if (VERBOSE && docID % 100000 == 0) { System.out.println(docID + " of " + numDocs + "..."); } } IndexOutput out = dir.createOutput("2d.bkd", IOContext.DEFAULT); long indexFP = w.finish(out); out.close(); IndexInput in = dir.openInput("2d.bkd", IOContext.DEFAULT); in.seek(indexFP); BKDReader r = new BKDReader(in); r.verify(numDocs); in.close(); dir.close(); }
66. Test2BBKDPoints#test1D()
Project: lucene-solr
File: Test2BBKDPoints.java
File: Test2BBKDPoints.java
public void test1D() throws Exception { Directory dir = FSDirectory.open(createTempDir("2BBKDPoints1D")); final int numDocs = (Integer.MAX_VALUE / 26) + 100; BKDWriter w = new BKDWriter(numDocs, dir, "_0", 1, Long.BYTES, BKDWriter.DEFAULT_MAX_POINTS_IN_LEAF_NODE, BKDWriter.DEFAULT_MAX_MB_SORT_IN_HEAP, 26L * numDocs, false); int counter = 0; byte[] packedBytes = new byte[Long.BYTES]; for (int docID = 0; docID < numDocs; docID++) { for (int j = 0; j < 26; j++) { // first a random int: NumericUtils.intToSortableBytes(random().nextInt(), packedBytes, 0); // then our counter, which will overflow a bit in the end: NumericUtils.intToSortableBytes(counter, packedBytes, Integer.BYTES); w.add(packedBytes, docID); counter++; } if (VERBOSE && docID % 100000 == 0) { System.out.println(docID + " of " + numDocs + "..."); } } IndexOutput out = dir.createOutput("1d.bkd", IOContext.DEFAULT); long indexFP = w.finish(out); out.close(); IndexInput in = dir.openInput("1d.bkd", IOContext.DEFAULT); in.seek(indexFP); BKDReader r = new BKDReader(in); r.verify(numDocs); in.close(); dir.close(); }
67. TestIndexWriterExceptions#testSimulatedCorruptIndex1()
Project: lucene-solr
File: TestIndexWriterExceptions.java
File: TestIndexWriterExceptions.java
// Simulate a corrupt index by removing last byte of // latest segments file and make sure we get an // IOException trying to open the index: public void testSimulatedCorruptIndex1() throws IOException { BaseDirectoryWrapper dir = newDirectory(); // we are corrupting it! dir.setCheckIndexOnClose(false); IndexWriter writer = null; writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); // add 100 documents for (int i = 0; i < 100; i++) { addDoc(writer); } // close writer.close(); long gen = SegmentInfos.getLastCommitGeneration(dir); assertTrue("segment generation should be > 0 but got " + gen, gen > 0); String fileNameIn = SegmentInfos.getLastCommitSegmentsFileName(dir); String fileNameOut = IndexFileNames.fileNameFromGeneration(IndexFileNames.SEGMENTS, "", 1 + gen); IndexInput in = dir.openInput(fileNameIn, newIOContext(random())); IndexOutput out = dir.createOutput(fileNameOut, newIOContext(random())); long length = in.length(); for (int i = 0; i < length - 1; i++) { out.writeByte(in.readByte()); } in.close(); out.close(); dir.deleteFile(fileNameIn); expectThrows(Exception.class, () -> { DirectoryReader.open(dir); }); dir.close(); }
68. TestCodecUtil#testWriteBogusCRC()
Project: lucene-solr
File: TestCodecUtil.java
File: TestCodecUtil.java
public void testWriteBogusCRC() throws Exception { RAMFile file = new RAMFile(); final IndexOutput output = new RAMOutputStream(file, false); AtomicLong fakeChecksum = new AtomicLong(); // wrap the index input where we control the checksum for mocking IndexOutput fakeOutput = new IndexOutput("fake", "fake") { @Override public void close() throws IOException { output.close(); } @Override public long getFilePointer() { return output.getFilePointer(); } @Override public long getChecksum() throws IOException { return fakeChecksum.get(); } @Override public void writeByte(byte b) throws IOException { output.writeByte(b); } @Override public void writeBytes(byte[] b, int offset, int length) throws IOException { output.writeBytes(b, offset, length); } }; // bad fakeChecksum.set(-1L); expectThrows(IllegalStateException.class, () -> { CodecUtil.writeCRC(fakeOutput); }); // bad fakeChecksum.set(1L << 32); expectThrows(IllegalStateException.class, () -> { CodecUtil.writeCRC(fakeOutput); }); // bad fakeChecksum.set(-(1L << 32)); expectThrows(IllegalStateException.class, () -> { CodecUtil.writeCRC(fakeOutput); }); // ok fakeChecksum.set((1L << 32) - 1); CodecUtil.writeCRC(fakeOutput); }
69. OfflinePointReader#split()
Project: lucene-solr
File: OfflinePointReader.java
File: OfflinePointReader.java
@Override public long split(long count, LongBitSet rightTree, PointWriter left, PointWriter right, boolean doClearBits) throws IOException { if (left instanceof OfflinePointWriter == false || right instanceof OfflinePointWriter == false) { return super.split(count, rightTree, left, right, doClearBits); } // We specialize the offline -> offline split since the default impl // is somewhat wasteful otherwise (e.g. decoding docID when we don't // need to) int packedBytesLength = packedValue.length; int bytesPerDoc = packedBytesLength + Integer.BYTES; if (singleValuePerDoc == false) { if (longOrds) { bytesPerDoc += Long.BYTES; } else { bytesPerDoc += Integer.BYTES; } } long rightCount = 0; IndexOutput rightOut = ((OfflinePointWriter) right).out; IndexOutput leftOut = ((OfflinePointWriter) left).out; assert count <= countLeft : "count=" + count + " countLeft=" + countLeft; countLeft -= count; long countStart = count; byte[] buffer = new byte[bytesPerDoc]; while (count > 0) { in.readBytes(buffer, 0, buffer.length); long ord; if (longOrds) { // A long ord, after the docID: ord = readLong(buffer, packedBytesLength + Integer.BYTES); } else if (singleValuePerDoc) { // docID is the ord: ord = readInt(buffer, packedBytesLength); } else { // An int ord, after the docID: ord = readInt(buffer, packedBytesLength + Integer.BYTES); } if (rightTree.get(ord)) { rightOut.writeBytes(buffer, 0, bytesPerDoc); if (doClearBits) { rightTree.clear(ord); } rightCount++; } else { leftOut.writeBytes(buffer, 0, bytesPerDoc); } count--; } ((OfflinePointWriter) right).count = rightCount; ((OfflinePointWriter) left).count = countStart - rightCount; return rightCount; }
70. Lucene50CompoundFormat#write()
Project: lucene-solr
File: Lucene50CompoundFormat.java
File: Lucene50CompoundFormat.java
@Override public void write(Directory dir, SegmentInfo si, IOContext context) throws IOException { String dataFile = IndexFileNames.segmentFileName(si.name, "", DATA_EXTENSION); String entriesFile = IndexFileNames.segmentFileName(si.name, "", ENTRIES_EXTENSION); byte[] expectedID = si.getId(); byte[] id = new byte[StringHelper.ID_LENGTH]; try (IndexOutput data = dir.createOutput(dataFile, context); IndexOutput entries = dir.createOutput(entriesFile, context)) { CodecUtil.writeIndexHeader(data, DATA_CODEC, VERSION_CURRENT, si.getId(), ""); CodecUtil.writeIndexHeader(entries, ENTRY_CODEC, VERSION_CURRENT, si.getId(), ""); // write number of files entries.writeVInt(si.files().size()); for (String file : si.files()) { // write bytes for file long startOffset = data.getFilePointer(); try (ChecksumIndexInput in = dir.openChecksumInput(file, IOContext.READONCE)) { // just copies the index header, verifying that its id matches what we expect CodecUtil.verifyAndCopyIndexHeader(in, data, si.getId()); // copy all bytes except the footer long numBytesToCopy = in.length() - CodecUtil.footerLength() - in.getFilePointer(); data.copyBytes(in, numBytesToCopy); // verify footer (checksum) matches for the incoming file we are copying long checksum = CodecUtil.checkFooter(in); // this is poached from CodecUtil.writeFooter, but we need to use our own checksum, not data.getChecksum(), but I think // adding a public method to CodecUtil to do that is somewhat dangerous: data.writeInt(CodecUtil.FOOTER_MAGIC); data.writeInt(0); data.writeLong(checksum); } long endOffset = data.getFilePointer(); long length = endOffset - startOffset; // write entry for file entries.writeString(IndexFileNames.stripSegmentName(file)); entries.writeLong(startOffset); entries.writeLong(length); } CodecUtil.writeFooter(data); CodecUtil.writeFooter(entries); } }
71. StoreTests#testVerifyingIndexInput()
Project: elasticsearch
File: StoreTests.java
File: StoreTests.java
public void testVerifyingIndexInput() throws IOException { Directory dir = newDirectory(); IndexOutput output = dir.createOutput("foo.bar", IOContext.DEFAULT); int iters = scaledRandomIntBetween(10, 100); for (int i = 0; i < iters; i++) { BytesRef bytesRef = new BytesRef(TestUtil.randomRealisticUnicodeString(random(), 10, 1024)); output.writeBytes(bytesRef.bytes, bytesRef.offset, bytesRef.length); } CodecUtil.writeFooter(output); output.close(); // Check file IndexInput indexInput = dir.openInput("foo.bar", IOContext.DEFAULT); long checksum = CodecUtil.retrieveChecksum(indexInput); indexInput.seek(0); IndexInput verifyingIndexInput = new Store.VerifyingIndexInput(dir.openInput("foo.bar", IOContext.DEFAULT)); readIndexInputFullyWithRandomSeeks(verifyingIndexInput); Store.verify(verifyingIndexInput); assertThat(checksum, equalTo(((ChecksumIndexInput) verifyingIndexInput).getChecksum())); IOUtils.close(indexInput, verifyingIndexInput); // Corrupt file and check again corruptFile(dir, "foo.bar", "foo1.bar"); verifyingIndexInput = new Store.VerifyingIndexInput(dir.openInput("foo1.bar", IOContext.DEFAULT)); readIndexInputFullyWithRandomSeeks(verifyingIndexInput); try { Store.verify(verifyingIndexInput); fail("should be a corrupted index"); } catch (CorruptIndexExceptionIndexFormatTooOldException | IndexFormatTooNewException | ex) { } IOUtils.close(verifyingIndexInput); IOUtils.close(dir); }
72. InputStreamIndexInputTests#testReadMultiFourBytesLimit()
Project: elasticsearch
File: InputStreamIndexInputTests.java
File: InputStreamIndexInputTests.java
public void testReadMultiFourBytesLimit() throws IOException { RAMDirectory dir = new RAMDirectory(); IndexOutput output = dir.createOutput("test", IOContext.DEFAULT); for (int i = 0; i < 3; i++) { output.writeByte((byte) 1); } for (int i = 0; i < 3; i++) { output.writeByte((byte) 2); } output.close(); IndexInput input = dir.openInput("test", IOContext.DEFAULT); byte[] read = new byte[4]; assertThat(input.getFilePointer(), lessThan(input.length())); InputStreamIndexInput is = new InputStreamIndexInput(input, 4); assertThat(is.actualSizeToRead(), equalTo(4L)); assertThat(is.read(read), equalTo(4)); assertThat(read[0], equalTo((byte) 1)); assertThat(read[1], equalTo((byte) 1)); assertThat(read[2], equalTo((byte) 1)); assertThat(read[3], equalTo((byte) 2)); assertThat(input.getFilePointer(), lessThan(input.length())); is = new InputStreamIndexInput(input, 4); assertThat(is.actualSizeToRead(), equalTo(2L)); assertThat(is.read(read), equalTo(2)); assertThat(read[0], equalTo((byte) 2)); assertThat(read[1], equalTo((byte) 2)); assertThat(input.getFilePointer(), equalTo(input.length())); is = new InputStreamIndexInput(input, 4); assertThat(is.actualSizeToRead(), equalTo(0L)); assertThat(is.read(read), equalTo(-1)); }
73. InputStreamIndexInputTests#testReadMultiTwoBytesLimit1()
Project: elasticsearch
File: InputStreamIndexInputTests.java
File: InputStreamIndexInputTests.java
public void testReadMultiTwoBytesLimit1() throws IOException { RAMDirectory dir = new RAMDirectory(); IndexOutput output = dir.createOutput("test", IOContext.DEFAULT); for (int i = 0; i < 3; i++) { output.writeByte((byte) 1); } for (int i = 0; i < 3; i++) { output.writeByte((byte) 2); } output.close(); IndexInput input = dir.openInput("test", IOContext.DEFAULT); byte[] read = new byte[2]; assertThat(input.getFilePointer(), lessThan(input.length())); InputStreamIndexInput is = new InputStreamIndexInput(input, 2); assertThat(is.actualSizeToRead(), equalTo(2L)); assertThat(is.read(read), equalTo(2)); assertThat(read[0], equalTo((byte) 1)); assertThat(read[1], equalTo((byte) 1)); assertThat(input.getFilePointer(), lessThan(input.length())); is = new InputStreamIndexInput(input, 2); assertThat(is.actualSizeToRead(), equalTo(2L)); assertThat(is.read(read), equalTo(2)); assertThat(read[0], equalTo((byte) 1)); assertThat(read[1], equalTo((byte) 2)); assertThat(input.getFilePointer(), lessThan(input.length())); is = new InputStreamIndexInput(input, 2); assertThat(is.actualSizeToRead(), equalTo(2L)); assertThat(is.read(read), equalTo(2)); assertThat(read[0], equalTo((byte) 2)); assertThat(read[1], equalTo((byte) 2)); assertThat(input.getFilePointer(), equalTo(input.length())); is = new InputStreamIndexInput(input, 2); assertThat(is.actualSizeToRead(), equalTo(0L)); assertThat(is.read(read), equalTo(-1)); }
74. InputStreamIndexInputTests#testSingleReadTwoBytesLimit()
Project: elasticsearch
File: InputStreamIndexInputTests.java
File: InputStreamIndexInputTests.java
public void testSingleReadTwoBytesLimit() throws IOException { RAMDirectory dir = new RAMDirectory(); IndexOutput output = dir.createOutput("test", IOContext.DEFAULT); for (int i = 0; i < 3; i++) { output.writeByte((byte) 1); } for (int i = 0; i < 3; i++) { output.writeByte((byte) 2); } output.close(); IndexInput input = dir.openInput("test", IOContext.DEFAULT); assertThat(input.getFilePointer(), lessThan(input.length())); InputStreamIndexInput is = new InputStreamIndexInput(input, 2); assertThat(is.actualSizeToRead(), equalTo(2L)); assertThat(is.read(), equalTo(1)); assertThat(is.read(), equalTo(1)); assertThat(is.read(), equalTo(-1)); assertThat(input.getFilePointer(), lessThan(input.length())); is = new InputStreamIndexInput(input, 2); assertThat(is.actualSizeToRead(), equalTo(2L)); assertThat(is.read(), equalTo(1)); assertThat(is.read(), equalTo(2)); assertThat(is.read(), equalTo(-1)); assertThat(input.getFilePointer(), lessThan(input.length())); is = new InputStreamIndexInput(input, 2); assertThat(is.actualSizeToRead(), equalTo(2L)); assertThat(is.read(), equalTo(2)); assertThat(is.read(), equalTo(2)); assertThat(is.read(), equalTo(-1)); assertThat(input.getFilePointer(), equalTo(input.length())); is = new InputStreamIndexInput(input, 2); assertThat(is.actualSizeToRead(), equalTo(0L)); assertThat(is.read(), equalTo(-1)); }
75. InputStreamIndexInputTests#testReadMultiSingleByteLimit1()
Project: elasticsearch
File: InputStreamIndexInputTests.java
File: InputStreamIndexInputTests.java
public void testReadMultiSingleByteLimit1() throws IOException { RAMDirectory dir = new RAMDirectory(); IndexOutput output = dir.createOutput("test", IOContext.DEFAULT); for (int i = 0; i < 3; i++) { output.writeByte((byte) 1); } for (int i = 0; i < 3; i++) { output.writeByte((byte) 2); } output.close(); IndexInput input = dir.openInput("test", IOContext.DEFAULT); byte[] read = new byte[2]; for (int i = 0; i < 3; i++) { assertThat(input.getFilePointer(), lessThan(input.length())); InputStreamIndexInput is = new InputStreamIndexInput(input, 1); assertThat(is.actualSizeToRead(), equalTo(1L)); assertThat(is.read(read), equalTo(1)); assertThat(read[0], equalTo((byte) 1)); } for (int i = 0; i < 3; i++) { assertThat(input.getFilePointer(), lessThan(input.length())); InputStreamIndexInput is = new InputStreamIndexInput(input, 1); assertThat(is.actualSizeToRead(), equalTo(1L)); assertThat(is.read(read), equalTo(1)); assertThat(read[0], equalTo((byte) 2)); } assertThat(input.getFilePointer(), equalTo(input.length())); InputStreamIndexInput is = new InputStreamIndexInput(input, 1); assertThat(is.actualSizeToRead(), equalTo(0L)); assertThat(is.read(read), equalTo(-1)); }
76. InputStreamIndexInputTests#testSingleReadSingleByteLimit()
Project: elasticsearch
File: InputStreamIndexInputTests.java
File: InputStreamIndexInputTests.java
public void testSingleReadSingleByteLimit() throws IOException { RAMDirectory dir = new RAMDirectory(); IndexOutput output = dir.createOutput("test", IOContext.DEFAULT); for (int i = 0; i < 3; i++) { output.writeByte((byte) 1); } for (int i = 0; i < 3; i++) { output.writeByte((byte) 2); } output.close(); IndexInput input = dir.openInput("test", IOContext.DEFAULT); for (int i = 0; i < 3; i++) { InputStreamIndexInput is = new InputStreamIndexInput(input, 1); assertThat(input.getFilePointer(), lessThan(input.length())); assertThat(is.actualSizeToRead(), equalTo(1L)); assertThat(is.read(), equalTo(1)); assertThat(is.read(), equalTo(-1)); } for (int i = 0; i < 3; i++) { InputStreamIndexInput is = new InputStreamIndexInput(input, 1); assertThat(input.getFilePointer(), lessThan(input.length())); assertThat(is.actualSizeToRead(), equalTo(1L)); assertThat(is.read(), equalTo(2)); assertThat(is.read(), equalTo(-1)); } assertThat(input.getFilePointer(), equalTo(input.length())); InputStreamIndexInput is = new InputStreamIndexInput(input, 1); assertThat(is.actualSizeToRead(), equalTo(0L)); assertThat(is.read(), equalTo(-1)); }
77. RecoveryTarget#writeFileChunk()
Project: elasticsearch
File: RecoveryTarget.java
File: RecoveryTarget.java
@Override public void writeFileChunk(StoreFileMetaData fileMetaData, long position, BytesReference content, boolean lastChunk, int totalTranslogOps) throws IOException { final Store store = store(); final String name = fileMetaData.name(); state().getTranslog().totalOperations(totalTranslogOps); final RecoveryState.Index indexState = state().getIndex(); IndexOutput indexOutput; if (position == 0) { indexOutput = openAndPutIndexOutput(name, fileMetaData, store); } else { indexOutput = getOpenIndexOutput(name); } if (content.hasArray() == false) { content = content.toBytesArray(); } indexOutput.writeBytes(content.array(), content.arrayOffset(), content.length()); indexState.addRecoveredBytesToFile(name, content.length()); if (indexOutput.getFilePointer() >= fileMetaData.length() || lastChunk) { try { Store.verify(indexOutput); } finally { // we are done indexOutput.close(); } final String temporaryFileName = getTempNameForFile(name); assert Arrays.asList(store.directory().listAll()).contains(temporaryFileName); store.directory().sync(Collections.singleton(temporaryFileName)); IndexOutput remove = removeOpenIndexOutputs(name); // remove maybe null if we got finished assert remove == null || remove == indexOutput; } }
78. HdfsDirectoryFactoryTest#testLocalityReporter()
Project: lucene-solr
File: HdfsDirectoryFactoryTest.java
File: HdfsDirectoryFactoryTest.java
@Test public void testLocalityReporter() throws Exception { Configuration conf = HdfsTestUtil.getClientConfiguration(dfsCluster); conf.set("dfs.permissions.enabled", "false"); HdfsDirectoryFactory factory = new HdfsDirectoryFactory(); Map<String, String> props = new HashMap<String, String>(); props.put(HdfsDirectoryFactory.HDFS_HOME, HdfsTestUtil.getURI(dfsCluster) + "/solr"); props.put(HdfsDirectoryFactory.BLOCKCACHE_ENABLED, "false"); props.put(HdfsDirectoryFactory.NRTCACHINGDIRECTORY_ENABLE, "false"); props.put(HdfsDirectoryFactory.LOCALITYMETRICS_ENABLED, "true"); factory.init(new NamedList<>(props)); Iterator<SolrInfoMBean> it = factory.offerMBeans().iterator(); // skip it.next(); // brittle, but it's ok SolrInfoMBean localityBean = it.next(); // Make sure we have the right bean. assertEquals("Got the wrong bean: " + localityBean.getName(), "hdfs-locality", localityBean.getName()); // We haven't done anything, so there should be no data NamedList<?> statistics = localityBean.getStatistics(); assertEquals("Saw bytes that were not written: " + statistics.get(HdfsLocalityReporter.LOCALITY_BYTES_TOTAL), 0l, statistics.get(HdfsLocalityReporter.LOCALITY_BYTES_TOTAL)); assertEquals("Counted bytes as local when none written: " + statistics.get(HdfsLocalityReporter.LOCALITY_BYTES_RATIO), 0, statistics.get(HdfsLocalityReporter.LOCALITY_BYTES_RATIO)); // create a directory and a file String path = HdfsTestUtil.getURI(dfsCluster) + "/solr3/"; Directory dir = factory.create(path, NoLockFactory.INSTANCE, DirContext.DEFAULT); try (IndexOutput writer = dir.createOutput("output", null)) { writer.writeLong(42l); } final long long_bytes = Long.SIZE / Byte.SIZE; // no locality because hostname not set factory.setHost("bogus"); statistics = localityBean.getStatistics(); assertEquals("Wrong number of total bytes counted: " + statistics.get(HdfsLocalityReporter.LOCALITY_BYTES_TOTAL), long_bytes, statistics.get(HdfsLocalityReporter.LOCALITY_BYTES_TOTAL)); assertEquals("Wrong number of total blocks counted: " + statistics.get(HdfsLocalityReporter.LOCALITY_BLOCKS_TOTAL), 1, statistics.get(HdfsLocalityReporter.LOCALITY_BLOCKS_TOTAL)); assertEquals("Counted block as local when bad hostname set: " + statistics.get(HdfsLocalityReporter.LOCALITY_BLOCKS_LOCAL), 0, statistics.get(HdfsLocalityReporter.LOCALITY_BLOCKS_LOCAL)); // set hostname and check again factory.setHost("127.0.0.1"); statistics = localityBean.getStatistics(); assertEquals("Did not count block as local after setting hostname: " + statistics.get(HdfsLocalityReporter.LOCALITY_BYTES_LOCAL), long_bytes, statistics.get(HdfsLocalityReporter.LOCALITY_BYTES_LOCAL)); factory.close(); }
79. BlockDirectory#createOutput()
Project: lucene-solr
File: BlockDirectory.java
File: BlockDirectory.java
@Override public IndexOutput createOutput(String name, IOContext context) throws IOException { final IndexOutput dest = super.createOutput(name, context); if (useWriteCache(name, context)) { return new CachedIndexOutput(this, dest, blockSize, name, cache, blockSize); } return dest; }
80. BaseCompoundFormatTestCase#createSequenceFile()
Project: lucene-solr
File: BaseCompoundFormatTestCase.java
File: BaseCompoundFormatTestCase.java
/** Creates a file of the specified size with sequential data. The first * byte is written as the start byte provided. All subsequent bytes are * computed as start + offset where offset is the number of the byte. */ protected static void createSequenceFile(Directory dir, String name, byte start, int size, byte[] segID, String segSuffix) throws IOException { try (IndexOutput os = dir.createOutput(name, newIOContext(random()))) { CodecUtil.writeIndexHeader(os, "Foo", 0, segID, segSuffix); for (int i = 0; i < size; i++) { os.writeByte(start); start++; } CodecUtil.writeFooter(os); } }
81. BaseCompoundFormatTestCase#createRandomFile()
Project: lucene-solr
File: BaseCompoundFormatTestCase.java
File: BaseCompoundFormatTestCase.java
/** Creates a file of the specified size with random data. */ protected static void createRandomFile(Directory dir, String name, int size, byte[] segId) throws IOException { Random rnd = random(); try (IndexOutput os = dir.createOutput(name, newIOContext(random()))) { CodecUtil.writeIndexHeader(os, "Foo", 0, segId, "suffix"); for (int i = 0; i < size; i++) { byte b = (byte) rnd.nextInt(256); os.writeByte(b); } CodecUtil.writeFooter(os); } }
82. RAMOnlyPostingsFormat#fieldsConsumer()
Project: lucene-solr
File: RAMOnlyPostingsFormat.java
File: RAMOnlyPostingsFormat.java
@Override public FieldsConsumer fieldsConsumer(SegmentWriteState writeState) throws IOException { final int id = nextID.getAndIncrement(); // TODO -- ok to do this up front instead of // on close....? should be ok? // Write our ID: final String idFileName = IndexFileNames.segmentFileName(writeState.segmentInfo.name, writeState.segmentSuffix, ID_EXTENSION); IndexOutput out = writeState.directory.createOutput(idFileName, writeState.context); boolean success = false; try { CodecUtil.writeHeader(out, RAM_ONLY_NAME, VERSION_LATEST); out.writeVInt(id); success = true; } finally { if (!success) { IOUtils.closeWhileHandlingException(out); } else { IOUtils.close(out); } } final RAMPostings postings = new RAMPostings(); final RAMFieldsConsumer consumer = new RAMFieldsConsumer(writeState, postings); synchronized (state) { state.put(id, postings); } return consumer; }
83. FSTCompletionLookup#build()
Project: lucene-solr
File: FSTCompletionLookup.java
File: FSTCompletionLookup.java
@Override public void build(InputIterator iterator) throws IOException { if (iterator.hasPayloads()) { throw new IllegalArgumentException("this suggester doesn't support payloads"); } if (iterator.hasContexts()) { throw new IllegalArgumentException("this suggester doesn't support contexts"); } OfflineSorter sorter = new OfflineSorter(tempDir, tempFileNamePrefix); ExternalRefSorter externalSorter = new ExternalRefSorter(sorter); IndexOutput tempInput = tempDir.createTempOutput(tempFileNamePrefix, "input", IOContext.DEFAULT); String tempSortedFileName = null; OfflineSorter.ByteSequencesWriter writer = new OfflineSorter.ByteSequencesWriter(tempInput); OfflineSorter.ByteSequencesReader reader = null; // Push floats up front before sequences to sort them. For now, assume they are non-negative. // If negative floats are allowed some trickery needs to be done to find their byte order. count = 0; try { byte[] buffer = new byte[0]; ByteArrayDataOutput output = new ByteArrayDataOutput(buffer); BytesRef spare; int inputLineCount = 0; while ((spare = iterator.next()) != null) { if (spare.length + 4 >= buffer.length) { buffer = ArrayUtil.grow(buffer, spare.length + 4); } output.reset(buffer); output.writeInt(encodeWeight(iterator.weight())); output.writeBytes(spare.bytes, spare.offset, spare.length); writer.write(buffer, 0, output.getPosition()); inputLineCount++; } CodecUtil.writeFooter(tempInput); writer.close(); // We don't know the distribution of scores and we need to bucket them, so we'll sort // and divide into equal buckets. tempSortedFileName = sorter.sort(tempInput.getName()); tempDir.deleteFile(tempInput.getName()); FSTCompletionBuilder builder = new FSTCompletionBuilder(buckets, externalSorter, sharedTailLength); reader = new OfflineSorter.ByteSequencesReader(tempDir.openChecksumInput(tempSortedFileName, IOContext.READONCE), tempSortedFileName); long line = 0; int previousBucket = 0; int previousScore = 0; ByteArrayDataInput input = new ByteArrayDataInput(); BytesRef tmp2 = new BytesRef(); while (true) { BytesRef scratch = reader.next(); if (scratch == null) { break; } input.reset(scratch.bytes, scratch.offset, scratch.length); int currentScore = input.readInt(); int bucket; if (line > 0 && currentScore == previousScore) { bucket = previousBucket; } else { bucket = (int) (line * buckets / inputLineCount); } previousScore = currentScore; previousBucket = bucket; // Only append the input, discard the weight. tmp2.bytes = scratch.bytes; tmp2.offset = scratch.offset + input.getPosition(); tmp2.length = scratch.length - input.getPosition(); builder.add(tmp2, bucket); line++; count++; } // The two FSTCompletions share the same automaton. this.higherWeightsCompletion = builder.build(); this.normalCompletion = new FSTCompletion(higherWeightsCompletion.getFST(), false, exactMatchFirst); } finally { IOUtils.closeWhileHandlingException(reader, writer, externalSorter); IOUtils.deleteFilesIgnoringExceptions(tempDir, tempInput.getName(), tempSortedFileName); } }
84. CompletionFieldsConsumer#close()
Project: lucene-solr
File: CompletionFieldsConsumer.java
File: CompletionFieldsConsumer.java
@Override public void close() throws IOException { if (closed) { return; } closed = true; String indexFile = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, INDEX_EXTENSION); boolean success = false; try (IndexOutput indexOut = state.directory.createOutput(indexFile, state.context)) { delegateFieldsConsumer.close(); CodecUtil.writeIndexHeader(indexOut, CODEC_NAME, COMPLETION_VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix); /* * we write the delegate postings format name so we can load it * without getting an instance in the ctor */ indexOut.writeString(delegatePostingsFormatName); // write # of seen fields indexOut.writeVInt(seenFields.size()); // write field numbers and dictOut offsets for (Map.Entry<String, CompletionMetaData> seenField : seenFields.entrySet()) { FieldInfo fieldInfo = state.fieldInfos.fieldInfo(seenField.getKey()); indexOut.writeVInt(fieldInfo.number); CompletionMetaData metaData = seenField.getValue(); indexOut.writeVLong(metaData.filePointer); indexOut.writeVLong(metaData.minWeight); indexOut.writeVLong(metaData.maxWeight); indexOut.writeByte(metaData.type); } CodecUtil.writeFooter(indexOut); CodecUtil.writeFooter(dictOut); IOUtils.close(dictOut); success = true; } finally { if (success == false) { IOUtils.closeWhileHandlingException(dictOut, delegateFieldsConsumer); } } }
85. TestDocIdsWriter#test()
Project: lucene-solr
File: TestDocIdsWriter.java
File: TestDocIdsWriter.java
private void test(Directory dir, int[] ints) throws Exception { final long len; try (IndexOutput out = dir.createOutput("tmp", IOContext.DEFAULT)) { DocIdsWriter.writeDocIds(ints, 0, ints.length, out); len = out.getFilePointer(); if (random().nextBoolean()) { // garbage out.writeLong(0); } } try (IndexInput in = dir.openInput("tmp", IOContext.READONCE)) { int[] read = new int[ints.length]; DocIdsWriter.readInts(in, ints.length, read); assertArrayEquals(ints, read); assertEquals(len, in.getFilePointer()); } try (IndexInput in = dir.openInput("tmp", IOContext.READONCE)) { int[] read = new int[ints.length]; DocIdsWriter.readInts(in, ints.length, new IntersectVisitor() { int i = 0; @Override public void visit(int docID) throws IOException { read[i++] = docID; } @Override public void visit(int docID, byte[] packedValue) throws IOException { throw new UnsupportedOperationException(); } @Override public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { throw new UnsupportedOperationException(); } }); assertArrayEquals(ints, read); assertEquals(len, in.getFilePointer()); } dir.deleteFile("tmp"); }
86. TestCodecUtil#testWriteNonAsciiSuffix()
Project: lucene-solr
File: TestCodecUtil.java
File: TestCodecUtil.java
public void testWriteNonAsciiSuffix() throws Exception { RAMFile file = new RAMFile(); IndexOutput output = new RAMOutputStream(file, true); expectThrows(IllegalArgumentException.class, () -> { CodecUtil.writeIndexHeader(output, "foobar", 5, StringHelper.randomId(), "?"); }); }
87. TestCodecUtil#testWriteTooLongSuffix()
Project: lucene-solr
File: TestCodecUtil.java
File: TestCodecUtil.java
public void testWriteTooLongSuffix() throws Exception { StringBuilder tooLong = new StringBuilder(); for (int i = 0; i < 256; i++) { tooLong.append('a'); } RAMFile file = new RAMFile(); IndexOutput output = new RAMOutputStream(file, true); expectThrows(IllegalArgumentException.class, () -> { CodecUtil.writeIndexHeader(output, "foobar", 5, StringHelper.randomId(), tooLong.toString()); }); }
88. TestCodecUtil#testWriteNonAsciiHeader()
Project: lucene-solr
File: TestCodecUtil.java
File: TestCodecUtil.java
public void testWriteNonAsciiHeader() throws Exception { RAMFile file = new RAMFile(); IndexOutput output = new RAMOutputStream(file, true); expectThrows(IllegalArgumentException.class, () -> { CodecUtil.writeHeader(output, "?", 5); }); }
89. TestCodecUtil#testWriteTooLongHeader()
Project: lucene-solr
File: TestCodecUtil.java
File: TestCodecUtil.java
public void testWriteTooLongHeader() throws Exception { StringBuilder tooLong = new StringBuilder(); for (int i = 0; i < 128; i++) { tooLong.append('a'); } RAMFile file = new RAMFile(); IndexOutput output = new RAMOutputStream(file, true); expectThrows(IllegalArgumentException.class, () -> { CodecUtil.writeHeader(output, tooLong.toString(), 5); }); }
90. SegmentInfos#write()
Project: lucene-solr
File: SegmentInfos.java
File: SegmentInfos.java
private void write(Directory directory) throws IOException { long nextGeneration = getNextPendingGeneration(); String segmentFileName = IndexFileNames.fileNameFromGeneration(IndexFileNames.PENDING_SEGMENTS, "", nextGeneration); // Always advance the generation on write: generation = nextGeneration; IndexOutput segnOutput = null; boolean success = false; try { segnOutput = directory.createOutput(segmentFileName, IOContext.DEFAULT); write(directory, segnOutput); segnOutput.close(); directory.sync(Collections.singleton(segmentFileName)); success = true; } finally { if (success) { pendingCommit = true; } else { // We hit an exception above; try to close the file // but suppress any exception: IOUtils.closeWhileHandlingException(segnOutput); // Try not to leave a truncated segments_N file in // the index: IOUtils.deleteFilesIgnoringExceptions(directory, segmentFileName); } } }
91. PersistentSnapshotDeletionPolicy#persist()
Project: lucene-solr
File: PersistentSnapshotDeletionPolicy.java
File: PersistentSnapshotDeletionPolicy.java
private synchronized void persist() throws IOException { String fileName = SNAPSHOTS_PREFIX + nextWriteGen; IndexOutput out = dir.createOutput(fileName, IOContext.DEFAULT); boolean success = false; try { CodecUtil.writeHeader(out, CODEC_NAME, VERSION_CURRENT); out.writeVInt(refCounts.size()); for (Entry<Long, Integer> ent : refCounts.entrySet()) { out.writeVLong(ent.getKey()); out.writeVInt(ent.getValue()); } success = true; } finally { if (!success) { IOUtils.closeWhileHandlingException(out); IOUtils.deleteFilesIgnoringExceptions(dir, fileName); } else { IOUtils.close(out); } } dir.sync(Collections.singletonList(fileName)); if (nextWriteGen > 0) { String lastSaveFile = SNAPSHOTS_PREFIX + (nextWriteGen - 1); // exception OK: likely it didn't exist IOUtils.deleteFilesIgnoringExceptions(dir, lastSaveFile); } nextWriteGen++; }
92. Lucene62SegmentInfoFormat#write()
Project: lucene-solr
File: Lucene62SegmentInfoFormat.java
File: Lucene62SegmentInfoFormat.java
@Override public void write(Directory dir, SegmentInfo si, IOContext ioContext) throws IOException { final String fileName = IndexFileNames.segmentFileName(si.name, "", Lucene62SegmentInfoFormat.SI_EXTENSION); try (IndexOutput output = dir.createOutput(fileName, ioContext)) { // Only add the file once we've successfully created it, else IFD assert can trip: si.addFile(fileName); CodecUtil.writeIndexHeader(output, Lucene62SegmentInfoFormat.CODEC_NAME, Lucene62SegmentInfoFormat.VERSION_CURRENT, si.getId(), ""); Version version = si.getVersion(); if (version.major < 5) { throw new IllegalArgumentException("invalid major version: should be >= 5 but got: " + version.major + " segment=" + si); } // Write the Lucene version that created this segment, since 3.1 output.writeInt(version.major); output.writeInt(version.minor); output.writeInt(version.bugfix); assert version.prerelease == 0; output.writeInt(si.maxDoc()); output.writeByte((byte) (si.getUseCompoundFile() ? SegmentInfo.YES : SegmentInfo.NO)); output.writeMapOfStrings(si.getDiagnostics()); Set<String> files = si.files(); for (String file : files) { if (!IndexFileNames.parseSegmentName(file).equals(si.name)) { throw new IllegalArgumentException("invalid files: expected segment=" + si.name + ", got=" + files); } } output.writeSetOfStrings(files); output.writeMapOfStrings(si.getAttributes()); Sort indexSort = si.getIndexSort(); int numSortFields = indexSort == null ? 0 : indexSort.getSort().length; output.writeVInt(numSortFields); for (int i = 0; i < numSortFields; ++i) { SortField sortField = indexSort.getSort()[i]; output.writeString(sortField.getField()); int sortTypeID; switch(sortField.getType()) { case STRING: sortTypeID = 0; break; case LONG: sortTypeID = 1; break; case INT: sortTypeID = 2; break; case DOUBLE: sortTypeID = 3; break; case FLOAT: sortTypeID = 4; break; default: throw new IllegalStateException("Unexpected sort type: " + sortField.getType()); } output.writeVInt(sortTypeID); output.writeByte((byte) (sortField.getReverse() ? 0 : 1)); // write missing value Object missingValue = sortField.getMissingValue(); if (missingValue == null) { output.writeByte((byte) 0); } else { switch(sortField.getType()) { case STRING: if (missingValue == SortField.STRING_LAST) { output.writeByte((byte) 1); } else if (missingValue == SortField.STRING_FIRST) { output.writeByte((byte) 2); } else { throw new AssertionError("unrecognized missing value for STRING field \"" + sortField.getField() + "\": " + missingValue); } break; case LONG: output.writeByte((byte) 1); output.writeLong(((Long) missingValue).longValue()); break; case INT: output.writeByte((byte) 1); output.writeInt(((Integer) missingValue).intValue()); break; case DOUBLE: output.writeByte((byte) 1); output.writeLong(Double.doubleToLongBits(((Double) missingValue).doubleValue())); break; case FLOAT: output.writeByte((byte) 1); output.writeInt(Float.floatToIntBits(((Float) missingValue).floatValue())); break; default: throw new IllegalStateException("Unexpected sort type: " + sortField.getType()); } } } CodecUtil.writeFooter(output); } }
93. Lucene60PointsWriter#finish()
Project: lucene-solr
File: Lucene60PointsWriter.java
File: Lucene60PointsWriter.java
@Override public void finish() throws IOException { if (finished) { throw new IllegalStateException("already finished"); } finished = true; CodecUtil.writeFooter(dataOut); String indexFileName = IndexFileNames.segmentFileName(writeState.segmentInfo.name, writeState.segmentSuffix, Lucene60PointsFormat.INDEX_EXTENSION); // Write index file try (IndexOutput indexOut = writeState.directory.createOutput(indexFileName, writeState.context)) { CodecUtil.writeIndexHeader(indexOut, Lucene60PointsFormat.META_CODEC_NAME, Lucene60PointsFormat.INDEX_VERSION_CURRENT, writeState.segmentInfo.getId(), writeState.segmentSuffix); int count = indexFPs.size(); indexOut.writeVInt(count); for (Map.Entry<String, Long> ent : indexFPs.entrySet()) { FieldInfo fieldInfo = writeState.fieldInfos.fieldInfo(ent.getKey()); if (fieldInfo == null) { throw new IllegalStateException("wrote field=\"" + ent.getKey() + "\" but that field doesn't exist in FieldInfos"); } indexOut.writeVInt(fieldInfo.number); indexOut.writeVLong(ent.getValue()); } CodecUtil.writeFooter(indexOut); } }
94. Lucene60FieldInfosFormat#write()
Project: lucene-solr
File: Lucene60FieldInfosFormat.java
File: Lucene60FieldInfosFormat.java
@Override public void write(Directory directory, SegmentInfo segmentInfo, String segmentSuffix, FieldInfos infos, IOContext context) throws IOException { final String fileName = IndexFileNames.segmentFileName(segmentInfo.name, segmentSuffix, EXTENSION); try (IndexOutput output = directory.createOutput(fileName, context)) { CodecUtil.writeIndexHeader(output, Lucene60FieldInfosFormat.CODEC_NAME, Lucene60FieldInfosFormat.FORMAT_CURRENT, segmentInfo.getId(), segmentSuffix); output.writeVInt(infos.size()); for (FieldInfo fi : infos) { fi.checkConsistency(); output.writeString(fi.name); output.writeVInt(fi.number); byte bits = 0x0; if (fi.hasVectors()) bits |= STORE_TERMVECTOR; if (fi.omitsNorms()) bits |= OMIT_NORMS; if (fi.hasPayloads()) bits |= STORE_PAYLOADS; output.writeByte(bits); output.writeByte(indexOptionsByte(fi.getIndexOptions())); // pack the DV type and hasNorms in one byte output.writeByte(docValuesByte(fi.getDocValuesType())); output.writeLong(fi.getDocValuesGen()); output.writeMapOfStrings(fi.attributes()); int pointDimensionCount = fi.getPointDimensionCount(); output.writeVInt(pointDimensionCount); if (pointDimensionCount != 0) { output.writeVInt(fi.getPointNumBytes()); } } CodecUtil.writeFooter(output); } }
95. Lucene50FieldInfosFormat#write()
Project: lucene-solr
File: Lucene50FieldInfosFormat.java
File: Lucene50FieldInfosFormat.java
@Override public void write(Directory directory, SegmentInfo segmentInfo, String segmentSuffix, FieldInfos infos, IOContext context) throws IOException { final String fileName = IndexFileNames.segmentFileName(segmentInfo.name, segmentSuffix, EXTENSION); try (IndexOutput output = directory.createOutput(fileName, context)) { CodecUtil.writeIndexHeader(output, Lucene50FieldInfosFormat.CODEC_NAME, Lucene50FieldInfosFormat.FORMAT_CURRENT, segmentInfo.getId(), segmentSuffix); output.writeVInt(infos.size()); for (FieldInfo fi : infos) { fi.checkConsistency(); output.writeString(fi.name); output.writeVInt(fi.number); byte bits = 0x0; if (fi.hasVectors()) bits |= STORE_TERMVECTOR; if (fi.omitsNorms()) bits |= OMIT_NORMS; if (fi.hasPayloads()) bits |= STORE_PAYLOADS; output.writeByte(bits); output.writeByte(indexOptionsByte(fi.getIndexOptions())); // pack the DV type and hasNorms in one byte output.writeByte(docValuesByte(fi.getDocValuesType())); output.writeLong(fi.getDocValuesGen()); output.writeMapOfStrings(fi.attributes()); } CodecUtil.writeFooter(output); } }
96. SimpleTextSegmentInfoFormat#write()
Project: lucene-solr
File: SimpleTextSegmentInfoFormat.java
File: SimpleTextSegmentInfoFormat.java
@Override public void write(Directory dir, SegmentInfo si, IOContext ioContext) throws IOException { String segFileName = IndexFileNames.segmentFileName(si.name, "", SimpleTextSegmentInfoFormat.SI_EXTENSION); try (IndexOutput output = dir.createOutput(segFileName, ioContext)) { // Only add the file once we've successfully created it, else IFD assert can trip: si.addFile(segFileName); BytesRefBuilder scratch = new BytesRefBuilder(); SimpleTextUtil.write(output, SI_VERSION); SimpleTextUtil.write(output, si.getVersion().toString(), scratch); SimpleTextUtil.writeNewline(output); SimpleTextUtil.write(output, SI_DOCCOUNT); SimpleTextUtil.write(output, Integer.toString(si.maxDoc()), scratch); SimpleTextUtil.writeNewline(output); SimpleTextUtil.write(output, SI_USECOMPOUND); SimpleTextUtil.write(output, Boolean.toString(si.getUseCompoundFile()), scratch); SimpleTextUtil.writeNewline(output); Map<String, String> diagnostics = si.getDiagnostics(); int numDiagnostics = diagnostics == null ? 0 : diagnostics.size(); SimpleTextUtil.write(output, SI_NUM_DIAG); SimpleTextUtil.write(output, Integer.toString(numDiagnostics), scratch); SimpleTextUtil.writeNewline(output); if (numDiagnostics > 0) { for (Map.Entry<String, String> diagEntry : diagnostics.entrySet()) { SimpleTextUtil.write(output, SI_DIAG_KEY); SimpleTextUtil.write(output, diagEntry.getKey(), scratch); SimpleTextUtil.writeNewline(output); SimpleTextUtil.write(output, SI_DIAG_VALUE); SimpleTextUtil.write(output, diagEntry.getValue(), scratch); SimpleTextUtil.writeNewline(output); } } Map<String, String> attributes = si.getAttributes(); SimpleTextUtil.write(output, SI_NUM_ATT); SimpleTextUtil.write(output, Integer.toString(attributes.size()), scratch); SimpleTextUtil.writeNewline(output); for (Map.Entry<String, String> attEntry : attributes.entrySet()) { SimpleTextUtil.write(output, SI_ATT_KEY); SimpleTextUtil.write(output, attEntry.getKey(), scratch); SimpleTextUtil.writeNewline(output); SimpleTextUtil.write(output, SI_ATT_VALUE); SimpleTextUtil.write(output, attEntry.getValue(), scratch); SimpleTextUtil.writeNewline(output); } Set<String> files = si.files(); int numFiles = files == null ? 0 : files.size(); SimpleTextUtil.write(output, SI_NUM_FILES); SimpleTextUtil.write(output, Integer.toString(numFiles), scratch); SimpleTextUtil.writeNewline(output); if (numFiles > 0) { for (String fileName : files) { SimpleTextUtil.write(output, SI_FILE); SimpleTextUtil.write(output, fileName, scratch); SimpleTextUtil.writeNewline(output); } } SimpleTextUtil.write(output, SI_ID); SimpleTextUtil.write(output, new BytesRef(si.getId())); SimpleTextUtil.writeNewline(output); Sort indexSort = si.getIndexSort(); SimpleTextUtil.write(output, SI_SORT); final int numSortFields = indexSort == null ? 0 : indexSort.getSort().length; SimpleTextUtil.write(output, Integer.toString(numSortFields), scratch); SimpleTextUtil.writeNewline(output); for (int i = 0; i < numSortFields; ++i) { final SortField sortField = indexSort.getSort()[i]; SimpleTextUtil.write(output, SI_SORT_FIELD); SimpleTextUtil.write(output, sortField.getField(), scratch); SimpleTextUtil.writeNewline(output); SimpleTextUtil.write(output, SI_SORT_TYPE); final String sortType; switch(sortField.getType()) { case STRING: sortType = "string"; break; case LONG: sortType = "long"; break; case INT: sortType = "int"; break; case DOUBLE: sortType = "double"; break; case FLOAT: sortType = "float"; break; default: throw new IllegalStateException("Unexpected sort type: " + sortField.getType()); } SimpleTextUtil.write(output, sortType, scratch); SimpleTextUtil.writeNewline(output); SimpleTextUtil.write(output, SI_SORT_REVERSE); SimpleTextUtil.write(output, Boolean.toString(sortField.getReverse()), scratch); SimpleTextUtil.writeNewline(output); SimpleTextUtil.write(output, SI_SORT_MISSING); final Object missingValue = sortField.getMissingValue(); final String missing; if (missingValue == null) { missing = "null"; } else if (missingValue == SortField.STRING_FIRST) { missing = "first"; } else if (missingValue == SortField.STRING_LAST) { missing = "last"; } else { missing = missingValue.toString(); } SimpleTextUtil.write(output, missing, scratch); SimpleTextUtil.writeNewline(output); } SimpleTextUtil.writeChecksum(output, scratch); } }
97. SimpleTextLiveDocsFormat#writeLiveDocs()
Project: lucene-solr
File: SimpleTextLiveDocsFormat.java
File: SimpleTextLiveDocsFormat.java
@Override public void writeLiveDocs(MutableBits bits, Directory dir, SegmentCommitInfo info, int newDelCount, IOContext context) throws IOException { BitSet set = ((SimpleTextBits) bits).bits; int size = bits.length(); BytesRefBuilder scratch = new BytesRefBuilder(); String fileName = IndexFileNames.fileNameFromGeneration(info.info.name, LIVEDOCS_EXTENSION, info.getNextDelGen()); IndexOutput out = null; boolean success = false; try { out = dir.createOutput(fileName, context); SimpleTextUtil.write(out, SIZE); SimpleTextUtil.write(out, Integer.toString(size), scratch); SimpleTextUtil.writeNewline(out); for (int i = set.nextSetBit(0); i >= 0; i = set.nextSetBit(i + 1)) { SimpleTextUtil.write(out, DOC); SimpleTextUtil.write(out, Integer.toString(i), scratch); SimpleTextUtil.writeNewline(out); } SimpleTextUtil.write(out, END); SimpleTextUtil.writeNewline(out); SimpleTextUtil.writeChecksum(out, scratch); success = true; } finally { if (success) { IOUtils.close(out); } else { IOUtils.closeWhileHandlingException(out); } } }
98. SimpleTextFieldInfosFormat#write()
Project: lucene-solr
File: SimpleTextFieldInfosFormat.java
File: SimpleTextFieldInfosFormat.java
@Override public void write(Directory directory, SegmentInfo segmentInfo, String segmentSuffix, FieldInfos infos, IOContext context) throws IOException { final String fileName = IndexFileNames.segmentFileName(segmentInfo.name, segmentSuffix, FIELD_INFOS_EXTENSION); IndexOutput out = directory.createOutput(fileName, context); BytesRefBuilder scratch = new BytesRefBuilder(); boolean success = false; try { SimpleTextUtil.write(out, NUMFIELDS); SimpleTextUtil.write(out, Integer.toString(infos.size()), scratch); SimpleTextUtil.writeNewline(out); for (FieldInfo fi : infos) { SimpleTextUtil.write(out, NAME); SimpleTextUtil.write(out, fi.name, scratch); SimpleTextUtil.writeNewline(out); SimpleTextUtil.write(out, NUMBER); SimpleTextUtil.write(out, Integer.toString(fi.number), scratch); SimpleTextUtil.writeNewline(out); SimpleTextUtil.write(out, INDEXOPTIONS); IndexOptions indexOptions = fi.getIndexOptions(); assert indexOptions.compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0 || !fi.hasPayloads(); SimpleTextUtil.write(out, indexOptions.toString(), scratch); SimpleTextUtil.writeNewline(out); SimpleTextUtil.write(out, STORETV); SimpleTextUtil.write(out, Boolean.toString(fi.hasVectors()), scratch); SimpleTextUtil.writeNewline(out); SimpleTextUtil.write(out, PAYLOADS); SimpleTextUtil.write(out, Boolean.toString(fi.hasPayloads()), scratch); SimpleTextUtil.writeNewline(out); SimpleTextUtil.write(out, NORMS); SimpleTextUtil.write(out, Boolean.toString(!fi.omitsNorms()), scratch); SimpleTextUtil.writeNewline(out); SimpleTextUtil.write(out, DOCVALUES); SimpleTextUtil.write(out, getDocValuesType(fi.getDocValuesType()), scratch); SimpleTextUtil.writeNewline(out); SimpleTextUtil.write(out, DOCVALUES_GEN); SimpleTextUtil.write(out, Long.toString(fi.getDocValuesGen()), scratch); SimpleTextUtil.writeNewline(out); Map<String, String> atts = fi.attributes(); int numAtts = atts == null ? 0 : atts.size(); SimpleTextUtil.write(out, NUM_ATTS); SimpleTextUtil.write(out, Integer.toString(numAtts), scratch); SimpleTextUtil.writeNewline(out); if (numAtts > 0) { for (Map.Entry<String, String> entry : atts.entrySet()) { SimpleTextUtil.write(out, ATT_KEY); SimpleTextUtil.write(out, entry.getKey(), scratch); SimpleTextUtil.writeNewline(out); SimpleTextUtil.write(out, ATT_VALUE); SimpleTextUtil.write(out, entry.getValue(), scratch); SimpleTextUtil.writeNewline(out); } } SimpleTextUtil.write(out, DIM_COUNT); SimpleTextUtil.write(out, Integer.toString(fi.getPointDimensionCount()), scratch); SimpleTextUtil.writeNewline(out); SimpleTextUtil.write(out, DIM_NUM_BYTES); SimpleTextUtil.write(out, Integer.toString(fi.getPointNumBytes()), scratch); SimpleTextUtil.writeNewline(out); } SimpleTextUtil.writeChecksum(out, scratch); success = true; } finally { if (success) { out.close(); } else { IOUtils.closeWhileHandlingException(out); } } }
99. SimpleTextCompoundFormat#write()
Project: lucene-solr
File: SimpleTextCompoundFormat.java
File: SimpleTextCompoundFormat.java
@Override public void write(Directory dir, SegmentInfo si, IOContext context) throws IOException { String dataFile = IndexFileNames.segmentFileName(si.name, "", DATA_EXTENSION); int numFiles = si.files().size(); String names[] = si.files().toArray(new String[numFiles]); Arrays.sort(names); long startOffsets[] = new long[numFiles]; long endOffsets[] = new long[numFiles]; BytesRefBuilder scratch = new BytesRefBuilder(); try (IndexOutput out = dir.createOutput(dataFile, context)) { for (int i = 0; i < names.length; i++) { // write header for file SimpleTextUtil.write(out, HEADER); SimpleTextUtil.write(out, names[i], scratch); SimpleTextUtil.writeNewline(out); // write bytes for file startOffsets[i] = out.getFilePointer(); try (IndexInput in = dir.openInput(names[i], IOContext.READONCE)) { out.copyBytes(in, in.length()); } endOffsets[i] = out.getFilePointer(); } long tocPos = out.getFilePointer(); // write CFS table SimpleTextUtil.write(out, TABLE); SimpleTextUtil.write(out, Integer.toString(numFiles), scratch); SimpleTextUtil.writeNewline(out); for (int i = 0; i < names.length; i++) { SimpleTextUtil.write(out, TABLENAME); SimpleTextUtil.write(out, names[i], scratch); SimpleTextUtil.writeNewline(out); SimpleTextUtil.write(out, TABLESTART); SimpleTextUtil.write(out, Long.toString(startOffsets[i]), scratch); SimpleTextUtil.writeNewline(out); SimpleTextUtil.write(out, TABLEEND); SimpleTextUtil.write(out, Long.toString(endOffsets[i]), scratch); SimpleTextUtil.writeNewline(out); } DecimalFormat df = new DecimalFormat(OFFSETPATTERN, DecimalFormatSymbols.getInstance(Locale.ROOT)); SimpleTextUtil.write(out, TABLEPOS); SimpleTextUtil.write(out, df.format(tocPos), scratch); SimpleTextUtil.writeNewline(out); } }
100. Lucene50RWSegmentInfoFormat#write()
Project: lucene-solr
File: Lucene50RWSegmentInfoFormat.java
File: Lucene50RWSegmentInfoFormat.java
@Override public void write(Directory dir, SegmentInfo si, IOContext ioContext) throws IOException { final String fileName = IndexFileNames.segmentFileName(si.name, "", Lucene50SegmentInfoFormat.SI_EXTENSION); assert si.getIndexSort() == null; try (IndexOutput output = dir.createOutput(fileName, ioContext)) { // Only add the file once we've successfully created it, else IFD assert can trip: si.addFile(fileName); CodecUtil.writeIndexHeader(output, Lucene50SegmentInfoFormat.CODEC_NAME, Lucene50SegmentInfoFormat.VERSION_CURRENT, si.getId(), ""); Version version = si.getVersion(); if (version.major < 5) { throw new IllegalArgumentException("invalid major version: should be >= 5 but got: " + version.major + " segment=" + si); } // Write the Lucene version that created this segment, since 3.1 output.writeInt(version.major); output.writeInt(version.minor); output.writeInt(version.bugfix); assert version.prerelease == 0; output.writeInt(si.maxDoc()); output.writeByte((byte) (si.getUseCompoundFile() ? SegmentInfo.YES : SegmentInfo.NO)); output.writeMapOfStrings(si.getDiagnostics()); Set<String> files = si.files(); for (String file : files) { if (!IndexFileNames.parseSegmentName(file).equals(si.name)) { throw new IllegalArgumentException("invalid files: expected segment=" + si.name + ", got=" + files); } } output.writeSetOfStrings(files); output.writeMapOfStrings(si.getAttributes()); CodecUtil.writeFooter(output); } }