org.apache.lucene.store.Directory

Here are the examples of the java api class org.apache.lucene.store.Directory taken from open source projects.

1. BaseCompoundFormatTestCase#testDoubleClose()

View license
// test that a second call to close() behaves according to Closeable
public void testDoubleClose() throws IOException {
    final String testfile = "_123.test";
    Directory dir = newDirectory();
    SegmentInfo si = newSegmentInfo(dir, "_123");
    try (IndexOutput out = dir.createOutput(testfile, IOContext.DEFAULT)) {
        CodecUtil.writeIndexHeader(out, "Foo", 0, si.getId(), "suffix");
        out.writeInt(3);
        CodecUtil.writeFooter(out);
    }
    si.setFiles(Collections.singleton(testfile));
    si.getCodec().compoundFormat().write(dir, si, IOContext.DEFAULT);
    Directory cfs = si.getCodec().compoundFormat().getCompoundReader(dir, si, IOContext.DEFAULT);
    assertEquals(1, cfs.listAll().length);
    cfs.close();
    // second close should not throw exception
    cfs.close();
    dir.close();
}

2. TestDocValuesIndexing#testTypeChangeViaAddIndexes()

View license
public void testTypeChangeViaAddIndexes() throws Exception {
    Directory dir = newDirectory();
    IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
    IndexWriter writer = new IndexWriter(dir, conf);
    Document doc = new Document();
    doc.add(new NumericDocValuesField("dv", 0L));
    writer.addDocument(doc);
    writer.close();
    Directory dir2 = newDirectory();
    conf = newIndexWriterConfig(new MockAnalyzer(random()));
    IndexWriter writer2 = new IndexWriter(dir2, conf);
    doc = new Document();
    doc.add(new SortedDocValuesField("dv", new BytesRef("foo")));
    writer2.addDocument(doc);
    expectThrows(IllegalArgumentException.class, () -> {
        writer2.addIndexes(dir);
    });
    writer2.close();
    dir.close();
    dir2.close();
}

3. TestDocValuesIndexing#testTypeChangeViaAddIndexes2()

View license
public void testTypeChangeViaAddIndexes2() throws Exception {
    Directory dir = newDirectory();
    IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
    IndexWriter writer = new IndexWriter(dir, conf);
    Document doc = new Document();
    doc.add(new NumericDocValuesField("dv", 0L));
    writer.addDocument(doc);
    writer.close();
    Directory dir2 = newDirectory();
    conf = newIndexWriterConfig(new MockAnalyzer(random()));
    IndexWriter writer2 = new IndexWriter(dir2, conf);
    writer2.addIndexes(dir);
    Document doc2 = new Document();
    doc2.add(new SortedDocValuesField("dv", new BytesRef("foo")));
    expectThrows(IllegalArgumentException.class, () -> {
        writer2.addDocument(doc2);
    });
    writer2.close();
    dir2.close();
    dir.close();
}

4. TestDocValuesIndexing#testTypeChangeViaAddIndexesIR2()

View license
public void testTypeChangeViaAddIndexesIR2() throws Exception {
    Directory dir = newDirectory();
    IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
    IndexWriter writer = new IndexWriter(dir, conf);
    Document doc = new Document();
    doc.add(new NumericDocValuesField("dv", 0L));
    writer.addDocument(doc);
    writer.close();
    Directory dir2 = newDirectory();
    conf = newIndexWriterConfig(new MockAnalyzer(random()));
    IndexWriter writer2 = new IndexWriter(dir2, conf);
    DirectoryReader reader = DirectoryReader.open(dir);
    TestUtil.addIndexesSlowly(writer2, reader);
    reader.close();
    Document doc2 = new Document();
    doc2.add(new SortedDocValuesField("dv", new BytesRef("foo")));
    expectThrows(IllegalArgumentException.class, () -> {
        writer2.addDocument(doc2);
    });
    writer2.close();
    dir2.close();
    dir.close();
}

5. TestParallelCompositeReader#testRefCounts1()

View license
public void testRefCounts1() throws IOException {
    Directory dir1 = getDir1(random());
    Directory dir2 = getDir2(random());
    DirectoryReader ir1, ir2;
    // close subreaders, ParallelReader will not change refCounts, but close on its own close
    ParallelCompositeReader pr = new ParallelCompositeReader(ir1 = DirectoryReader.open(dir1), ir2 = DirectoryReader.open(dir2));
    IndexReader psub1 = pr.getSequentialSubReaders().get(0);
    // check RefCounts
    assertEquals(1, ir1.getRefCount());
    assertEquals(1, ir2.getRefCount());
    assertEquals(1, psub1.getRefCount());
    pr.close();
    assertEquals(0, ir1.getRefCount());
    assertEquals(0, ir2.getRefCount());
    assertEquals(0, psub1.getRefCount());
    dir1.close();
    dir2.close();
}

6. TestParallelCompositeReader#testRefCounts2()

View license
public void testRefCounts2() throws IOException {
    Directory dir1 = getDir1(random());
    Directory dir2 = getDir2(random());
    DirectoryReader ir1 = DirectoryReader.open(dir1);
    DirectoryReader ir2 = DirectoryReader.open(dir2);
    // don't close subreaders, so ParallelReader will increment refcounts
    ParallelCompositeReader pr = new ParallelCompositeReader(false, ir1, ir2);
    IndexReader psub1 = pr.getSequentialSubReaders().get(0);
    // check RefCounts
    assertEquals(2, ir1.getRefCount());
    assertEquals(2, ir2.getRefCount());
    assertEquals("refCount must be 1, as the synthetic reader was created by ParallelCompositeReader", 1, psub1.getRefCount());
    pr.close();
    assertEquals(1, ir1.getRefCount());
    assertEquals(1, ir2.getRefCount());
    assertEquals("refcount must be 0 because parent was closed", 0, psub1.getRefCount());
    ir1.close();
    ir2.close();
    assertEquals(0, ir1.getRefCount());
    assertEquals(0, ir2.getRefCount());
    assertEquals("refcount should not change anymore", 0, psub1.getRefCount());
    dir1.close();
    dir2.close();
}

7. TestParallelCompositeReader#testIncompatibleIndexes2()

View license
public void testIncompatibleIndexes2() throws IOException {
    Directory dir1 = getDir1(random());
    Directory dir2 = getInvalidStructuredDir2(random());
    DirectoryReader ir1 = DirectoryReader.open(dir1), ir2 = DirectoryReader.open(dir2);
    CompositeReader[] readers = new CompositeReader[] { ir1, ir2 };
    expectThrows(IllegalArgumentException.class, () -> {
        new ParallelCompositeReader(readers);
    });
    expectThrows(IllegalArgumentException.class, () -> {
        new ParallelCompositeReader(random().nextBoolean(), readers, readers);
    });
    assertEquals(1, ir1.getRefCount());
    assertEquals(1, ir2.getRefCount());
    ir1.close();
    ir2.close();
    assertEquals(0, ir1.getRefCount());
    assertEquals(0, ir2.getRefCount());
    dir1.close();
    dir2.close();
}

8. TestParallelLeafReader#testFieldNames()

View license
public void testFieldNames() throws Exception {
    Directory dir1 = getDir1(random());
    Directory dir2 = getDir2(random());
    ParallelLeafReader pr = new ParallelLeafReader(getOnlyLeafReader(DirectoryReader.open(dir1)), getOnlyLeafReader(DirectoryReader.open(dir2)));
    FieldInfos fieldInfos = pr.getFieldInfos();
    assertEquals(4, fieldInfos.size());
    assertNotNull(fieldInfos.fieldInfo("f1"));
    assertNotNull(fieldInfos.fieldInfo("f2"));
    assertNotNull(fieldInfos.fieldInfo("f3"));
    assertNotNull(fieldInfos.fieldInfo("f4"));
    pr.close();
    dir1.close();
    dir2.close();
}

9. TestParallelLeafReader#testRefCounts1()

View license
public void testRefCounts1() throws IOException {
    Directory dir1 = getDir1(random());
    Directory dir2 = getDir2(random());
    LeafReader ir1, ir2;
    // close subreaders, ParallelReader will not change refCounts, but close on its own close
    ParallelLeafReader pr = new ParallelLeafReader(ir1 = getOnlyLeafReader(DirectoryReader.open(dir1)), ir2 = getOnlyLeafReader(DirectoryReader.open(dir2)));
    // check RefCounts
    assertEquals(1, ir1.getRefCount());
    assertEquals(1, ir2.getRefCount());
    pr.close();
    assertEquals(0, ir1.getRefCount());
    assertEquals(0, ir2.getRefCount());
    dir1.close();
    dir2.close();
}

10. TestParallelLeafReader#testRefCounts2()

View license
public void testRefCounts2() throws IOException {
    Directory dir1 = getDir1(random());
    Directory dir2 = getDir2(random());
    LeafReader ir1 = getOnlyLeafReader(DirectoryReader.open(dir1));
    LeafReader ir2 = getOnlyLeafReader(DirectoryReader.open(dir2));
    // don't close subreaders, so ParallelReader will increment refcounts
    ParallelLeafReader pr = new ParallelLeafReader(false, ir1, ir2);
    // check RefCounts
    assertEquals(2, ir1.getRefCount());
    assertEquals(2, ir2.getRefCount());
    pr.close();
    assertEquals(1, ir1.getRefCount());
    assertEquals(1, ir2.getRefCount());
    ir1.close();
    ir2.close();
    assertEquals(0, ir1.getRefCount());
    assertEquals(0, ir2.getRefCount());
    dir1.close();
    dir2.close();
}

11. TestStressIndexing2#testRandom()

View license
public void testRandom() throws Throwable {
    Directory dir1 = newMaybeVirusCheckingDirectory();
    Directory dir2 = newMaybeVirusCheckingDirectory();
    // mergeFactor=2; maxBufferedDocs=2; Map docs = indexRandom(1, 3, 2, dir1);
    boolean doReaderPooling = random().nextBoolean();
    Map<String, Document> docs = indexRandom(5, 3, 100, dir1, doReaderPooling);
    indexSerial(random(), docs, dir2);
    // verifying verify
    // verifyEquals(dir1, dir1, "id");
    // verifyEquals(dir2, dir2, "id");
    verifyEquals(dir1, dir2, "id");
    dir1.close();
    dir2.close();
}

12. TestSwappedIndexFiles#test()

View license
public void test() throws Exception {
    Directory dir1 = newDirectory();
    Directory dir2 = newDirectory();
    // Disable CFS 80% of the time so we can truncate individual files, but the other 20% of the time we test truncation of .cfs/.cfe too:
    boolean useCFS = random().nextInt(5) == 1;
    // Use LineFileDocs so we (hopefully) get most Lucene features
    // tested, e.g. IntPoint was recently added to it:
    LineFileDocs docs = new LineFileDocs(random());
    Document doc = docs.nextDoc();
    long seed = random().nextLong();
    indexOneDoc(seed, dir1, doc, useCFS);
    indexOneDoc(seed, dir2, doc, useCFS);
    swapFiles(dir1, dir2);
    dir1.close();
    dir2.close();
}

13. TestTermVectors#testFullMergeAddIndexesReader()

Project: lucene-solr
Source File: TestTermVectors.java
View license
public void testFullMergeAddIndexesReader() throws Exception {
    Directory[] input = new Directory[] { newDirectory(), newDirectory() };
    Directory target = newDirectory();
    for (Directory dir : input) {
        createDir(dir);
    }
    IndexWriter writer = createWriter(target);
    for (Directory dir : input) {
        DirectoryReader r = DirectoryReader.open(dir);
        TestUtil.addIndexesSlowly(writer, r);
        r.close();
    }
    writer.forceMerge(1);
    writer.close();
    verifyIndex(target);
    IOUtils.close(target, input[0], input[1]);
}

14. TestOrdinalMappingLeafReader#testTaxonomyMergeUtils()

View license
@Test
public void testTaxonomyMergeUtils() throws Exception {
    Directory srcIndexDir = newDirectory();
    Directory srcTaxoDir = newDirectory();
    buildIndexWithFacets(srcIndexDir, srcTaxoDir, true);
    Directory targetIndexDir = newDirectory();
    Directory targetTaxoDir = newDirectory();
    buildIndexWithFacets(targetIndexDir, targetTaxoDir, false);
    IndexWriter destIndexWriter = new IndexWriter(targetIndexDir, newIndexWriterConfig(null));
    DirectoryTaxonomyWriter destTaxoWriter = new DirectoryTaxonomyWriter(targetTaxoDir);
    try {
        TaxonomyMergeUtils.merge(srcIndexDir, srcTaxoDir, new MemoryOrdinalMap(), destIndexWriter, destTaxoWriter, facetConfig);
    } finally {
        IOUtils.close(destIndexWriter, destTaxoWriter);
    }
    verifyResults(targetIndexDir, targetTaxoDir);
    IOUtils.close(targetIndexDir, targetTaxoDir, srcIndexDir, srcTaxoDir);
}

15. TestSearcherTaxonomyManager#testReplaceTaxonomyNRT()

View license
public void testReplaceTaxonomyNRT() throws Exception {
    Directory dir = newDirectory();
    Directory taxoDir = newDirectory();
    IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
    DirectoryTaxonomyWriter tw = new DirectoryTaxonomyWriter(taxoDir);
    Directory taxoDir2 = newDirectory();
    DirectoryTaxonomyWriter tw2 = new DirectoryTaxonomyWriter(taxoDir2);
    tw2.close();
    SearcherTaxonomyManager mgr = new SearcherTaxonomyManager(w, true, null, tw);
    w.addDocument(new Document());
    tw.replaceTaxonomy(taxoDir2);
    taxoDir2.close();
    expectThrows(IllegalStateException.class, () -> {
        mgr.maybeRefresh();
    });
    w.close();
    IOUtils.close(mgr, tw, taxoDir, dir);
}

16. TestPKIndexSplitter#checkSplitting()

View license
private void checkSplitting(Directory dir, Term splitTerm, int leftCount, int rightCount) throws Exception {
    Directory dir1 = newDirectory();
    Directory dir2 = newDirectory();
    PKIndexSplitter splitter = new PKIndexSplitter(dir, dir1, dir2, splitTerm, newIndexWriterConfig(new MockAnalyzer(random())), newIndexWriterConfig(new MockAnalyzer(random())));
    splitter.split();
    IndexReader ir1 = DirectoryReader.open(dir1);
    IndexReader ir2 = DirectoryReader.open(dir2);
    assertEquals(leftCount, ir1.numDocs());
    assertEquals(rightCount, ir2.numDocs());
    checkContents(ir1, "1");
    checkContents(ir2, "2");
    ir1.close();
    ir2.close();
    dir1.close();
    dir2.close();
}

17. BaseCompoundFormatTestCase#testTwoFiles()

View license
/** 
   * This test creates compound file based on two files.
   */
public void testTwoFiles() throws IOException {
    String files[] = { "_123.d1", "_123.d2" };
    Directory dir = newDirectory();
    SegmentInfo si = newSegmentInfo(dir, "_123");
    createSequenceFile(dir, files[0], (byte) 0, 15, si.getId(), "suffix");
    createSequenceFile(dir, files[1], (byte) 0, 114, si.getId(), "suffix");
    si.setFiles(Arrays.asList(files));
    si.getCodec().compoundFormat().write(dir, si, IOContext.DEFAULT);
    Directory cfs = si.getCodec().compoundFormat().getCompoundReader(dir, si, IOContext.DEFAULT);
    for (String file : files) {
        IndexInput expected = dir.openInput(file, newIOContext(random()));
        IndexInput actual = cfs.openInput(file, newIOContext(random()));
        assertSameStreams(file, expected, actual);
        assertSameSeekBehavior(file, expected, actual);
        expected.close();
        actual.close();
    }
    cfs.close();
    dir.close();
}

18. BaseCompoundFormatTestCase#testCreateOutputDisabled()

View license
// test that cfs reader is read-only
public void testCreateOutputDisabled() throws IOException {
    Directory dir = newDirectory();
    SegmentInfo si = newSegmentInfo(dir, "_123");
    si.setFiles(Collections.emptyList());
    si.getCodec().compoundFormat().write(dir, si, IOContext.DEFAULT);
    Directory cfs = si.getCodec().compoundFormat().getCompoundReader(dir, si, IOContext.DEFAULT);
    expectThrows(UnsupportedOperationException.class, () -> {
        cfs.createOutput("bogus", IOContext.DEFAULT);
    });
    cfs.close();
    dir.close();
}

19. BaseCompoundFormatTestCase#testDeleteFileDisabled()

View license
// test that cfs reader is read-only
public void testDeleteFileDisabled() throws IOException {
    final String testfile = "_123.test";
    Directory dir = newDirectory();
    IndexOutput out = dir.createOutput(testfile, IOContext.DEFAULT);
    out.writeInt(3);
    out.close();
    SegmentInfo si = newSegmentInfo(dir, "_123");
    si.setFiles(Collections.emptyList());
    si.getCodec().compoundFormat().write(dir, si, IOContext.DEFAULT);
    Directory cfs = si.getCodec().compoundFormat().getCompoundReader(dir, si, IOContext.DEFAULT);
    expectThrows(UnsupportedOperationException.class, () -> {
        cfs.deleteFile(testfile);
    });
    cfs.close();
    dir.close();
}

20. BaseCompoundFormatTestCase#testRenameFileDisabled()

View license
// test that cfs reader is read-only
public void testRenameFileDisabled() throws IOException {
    final String testfile = "_123.test";
    Directory dir = newDirectory();
    IndexOutput out = dir.createOutput(testfile, IOContext.DEFAULT);
    out.writeInt(3);
    out.close();
    SegmentInfo si = newSegmentInfo(dir, "_123");
    si.setFiles(Collections.emptyList());
    si.getCodec().compoundFormat().write(dir, si, IOContext.DEFAULT);
    Directory cfs = si.getCodec().compoundFormat().getCompoundReader(dir, si, IOContext.DEFAULT);
    expectThrows(UnsupportedOperationException.class, () -> {
        cfs.rename(testfile, "bogus");
    });
    cfs.close();
    dir.close();
}

21. BaseCompoundFormatTestCase#testSyncDisabled()

View license
// test that cfs reader is read-only
public void testSyncDisabled() throws IOException {
    final String testfile = "_123.test";
    Directory dir = newDirectory();
    IndexOutput out = dir.createOutput(testfile, IOContext.DEFAULT);
    out.writeInt(3);
    out.close();
    SegmentInfo si = newSegmentInfo(dir, "_123");
    si.setFiles(Collections.emptyList());
    si.getCodec().compoundFormat().write(dir, si, IOContext.DEFAULT);
    Directory cfs = si.getCodec().compoundFormat().getCompoundReader(dir, si, IOContext.DEFAULT);
    expectThrows(UnsupportedOperationException.class, () -> {
        cfs.sync(Collections.singleton(testfile));
    });
    cfs.close();
    dir.close();
}

22. BaseCompoundFormatTestCase#testMakeLockDisabled()

View license
// test that cfs reader is read-only
public void testMakeLockDisabled() throws IOException {
    final String testfile = "_123.test";
    Directory dir = newDirectory();
    IndexOutput out = dir.createOutput(testfile, IOContext.DEFAULT);
    out.writeInt(3);
    out.close();
    SegmentInfo si = newSegmentInfo(dir, "_123");
    si.setFiles(Collections.emptyList());
    si.getCodec().compoundFormat().write(dir, si, IOContext.DEFAULT);
    Directory cfs = si.getCodec().compoundFormat().getCompoundReader(dir, si, IOContext.DEFAULT);
    expectThrows(UnsupportedOperationException.class, () -> {
        cfs.obtainLock("foobar");
    });
    cfs.close();
    dir.close();
}

23. BaseCompoundFormatTestCase#testClonedStreamsClosing()

View license
public void testClonedStreamsClosing() throws IOException {
    Directory dir = newDirectory();
    Directory cr = createLargeCFS(dir);
    // basic clone
    IndexInput expected = dir.openInput("_123.f11", newIOContext(random()));
    IndexInput one = cr.openInput("_123.f11", newIOContext(random()));
    IndexInput two = one.clone();
    assertSameStreams("basic clone one", expected, one);
    expected.seek(0);
    assertSameStreams("basic clone two", expected, two);
    // Now close the compound reader
    cr.close();
    expected.close();
    dir.close();
}

24. BaseCompoundFormatTestCase#testFileNotFound()

View license
public void testFileNotFound() throws IOException {
    Directory dir = newDirectory();
    Directory cr = createLargeCFS(dir);
    // Open bogus file
    expectThrows(IOException.class, () -> {
        cr.openInput("bogus", newIOContext(random()));
    });
    cr.close();
    dir.close();
}

25. BaseCompoundFormatTestCase#testReadPastEOF()

View license
public void testReadPastEOF() throws IOException {
    Directory dir = newDirectory();
    Directory cr = createLargeCFS(dir);
    IndexInput is = cr.openInput("_123.f2", newIOContext(random()));
    is.seek(is.length() - 10);
    byte b[] = new byte[100];
    is.readBytes(b, 0, 10);
    // Single byte read past end of file
    expectThrows(IOException.class, () -> {
        is.readByte();
    });
    is.seek(is.length() - 10);
    // Block read past end of file
    expectThrows(IOException.class, () -> {
        is.readBytes(b, 0, 50);
    });
    is.close();
    cr.close();
    dir.close();
}

26. BaseCompoundFormatTestCase#testResourceNameInsideCompoundFile()

View license
// LUCENE-6311: make sure the resource name inside a compound file confesses that it's inside a compound file
public void testResourceNameInsideCompoundFile() throws Exception {
    Directory dir = newDirectory();
    String subFile = "_123.xyz";
    SegmentInfo si = newSegmentInfo(dir, "_123");
    createSequenceFile(dir, subFile, (byte) 0, 10, si.getId(), "suffix");
    si.setFiles(Collections.singletonList(subFile));
    si.getCodec().compoundFormat().write(dir, si, IOContext.DEFAULT);
    Directory cfs = si.getCodec().compoundFormat().getCompoundReader(dir, si, IOContext.DEFAULT);
    IndexInput in = cfs.openInput(subFile, IOContext.DEFAULT);
    String desc = in.toString();
    assertTrue("resource description hides that it's inside a compound file: " + desc, desc.contains("[slice=" + subFile + "]"));
    cfs.close();
    dir.close();
}

27. TestMixedDirectory#testMixedDirectoryAndPolicy()

Project: hadoop-20
Source File: TestMixedDirectory.java
View license
public void testMixedDirectoryAndPolicy() throws IOException {
    Directory readDir = new RAMDirectory();
    updateIndex(readDir, 0, numDocsPerUpdate, new KeepOnlyLastCommitDeletionPolicy());
    verify(readDir, numDocsPerUpdate);
    IndexOutput out = readDir.createOutput("_" + (numDocsPerUpdate / maxBufferedDocs + 2) + ".cfs");
    out.writeInt(0);
    out.close();
    Directory writeDir = new RAMDirectory();
    Directory mixedDir = new MixedDirectory(readDir, writeDir);
    updateIndex(mixedDir, numDocsPerUpdate, numDocsPerUpdate, new MixedDeletionPolicy());
    verify(readDir, numDocsPerUpdate);
    verify(mixedDir, 2 * numDocsPerUpdate);
}

28. TestMixedDirectory#testMixedDirectoryAndPolicy()

View license
public void testMixedDirectoryAndPolicy() throws IOException {
    Directory readDir = new RAMDirectory();
    updateIndex(readDir, 0, numDocsPerUpdate, new KeepOnlyLastCommitDeletionPolicy());
    verify(readDir, numDocsPerUpdate);
    IndexOutput out = readDir.createOutput("_" + (numDocsPerUpdate / maxBufferedDocs + 2) + ".cfs");
    out.writeInt(0);
    out.close();
    Directory writeDir = new RAMDirectory();
    Directory mixedDir = new MixedDirectory(readDir, writeDir);
    updateIndex(mixedDir, numDocsPerUpdate, numDocsPerUpdate, new MixedDeletionPolicy());
    verify(readDir, numDocsPerUpdate);
    verify(mixedDir, 2 * numDocsPerUpdate);
}

29. TestMixedDirectory#testMixedDirectoryAndPolicy()

View license
public void testMixedDirectoryAndPolicy() throws IOException {
    Directory readDir = new RAMDirectory();
    updateIndex(readDir, 0, numDocsPerUpdate, new KeepOnlyLastCommitDeletionPolicy());
    verify(readDir, numDocsPerUpdate);
    IndexOutput out = readDir.createOutput("_" + (numDocsPerUpdate / maxBufferedDocs + 2) + ".cfs");
    out.writeInt(0);
    out.close();
    Directory writeDir = new RAMDirectory();
    Directory mixedDir = new MixedDirectory(readDir, writeDir);
    updateIndex(mixedDir, numDocsPerUpdate, numDocsPerUpdate, new MixedDeletionPolicy());
    verify(readDir, numDocsPerUpdate);
    verify(mixedDir, 2 * numDocsPerUpdate);
}

30. IndexCopierTest#basicTest()

Project: jackrabbit-oak
Source File: IndexCopierTest.java
View license
@Test
public void basicTest() throws Exception {
    Directory baseDir = new RAMDirectory();
    IndexDefinition defn = new IndexDefinition(root, builder.getNodeState());
    IndexCopier c1 = new RAMIndexCopier(baseDir, sameThreadExecutor(), getWorkDir());
    Directory remote = new RAMDirectory();
    Directory wrapped = c1.wrapForRead("/foo", defn, remote);
    byte[] t1 = writeFile(remote, "t1");
    byte[] t2 = writeFile(remote, "t2");
    assertEquals(2, wrapped.listAll().length);
    assertTrue(wrapped.fileExists("t1"));
    assertTrue(wrapped.fileExists("t2"));
    assertEquals(t1.length, wrapped.fileLength("t1"));
    assertEquals(t2.length, wrapped.fileLength("t2"));
    readAndAssert(wrapped, "t1", t1);
    //t1 should now be added to testDir
    assertTrue(baseDir.fileExists("t1"));
}

31. IndexCopierTest#nonExistentFile()

Project: jackrabbit-oak
Source File: IndexCopierTest.java
View license
@Test
public void nonExistentFile() throws Exception {
    Directory baseDir = new RAMDirectory();
    IndexDefinition defn = new IndexDefinition(root, builder.getNodeState());
    CollectingExecutor executor = new CollectingExecutor();
    IndexCopier c1 = new RAMIndexCopier(baseDir, executor, getWorkDir(), true);
    Directory remote = new RAMDirectory();
    Directory wrapped = c1.wrapForRead("/foo", defn, remote);
    try {
        wrapped.openInput("foo.txt", IOContext.DEFAULT);
        fail();
    } catch (FileNotFoundException ignore) {
    }
    assertEquals(0, executor.commands.size());
}

32. IndexCopierTest#wrapForWriteWithoutIndexPath()

Project: jackrabbit-oak
Source File: IndexCopierTest.java
View license
@Test
public void wrapForWriteWithoutIndexPath() throws Exception {
    assumeNotWindows();
    Directory remote = new CloseSafeDir();
    IndexCopier copier = new IndexCopier(sameThreadExecutor(), getWorkDir());
    IndexDefinition defn = new IndexDefinition(root, builder.getNodeState());
    Directory dir = copier.wrapForWrite(defn, remote, false);
    byte[] t1 = writeFile(dir, "t1");
    dir.close();
    readAndAssert(remote, "t1", t1);
    //Work dir must be empty post close
    assertArrayEquals(FileUtils.EMPTY_FILE_ARRAY, copier.getIndexWorkDir().listFiles());
}

33. IndexCopierTest#wrapForWriteWithIndexPath()

Project: jackrabbit-oak
Source File: IndexCopierTest.java
View license
@Test
public void wrapForWriteWithIndexPath() throws Exception {
    assumeNotWindows();
    Directory remote = new CloseSafeDir();
    IndexCopier copier = new IndexCopier(sameThreadExecutor(), getWorkDir());
    builder.setProperty(IndexConstants.INDEX_PATH, "foo");
    IndexDefinition defn = new IndexDefinition(root, builder.getNodeState());
    Directory dir = copier.wrapForWrite(defn, remote, false);
    byte[] t1 = writeFile(dir, "t1");
    dir.close();
    readAndAssert(remote, "t1", t1);
    //Work dir must be empty post close
    File indexDir = copier.getIndexDir(defn, "foo");
    List<File> files = new ArrayList<File>(FileUtils.listFiles(indexDir, null, true));
    Set<String> fileNames = Sets.newHashSet();
    for (File f : files) {
        fileNames.add(f.getName());
    }
    assertThat(fileNames, contains("t1"));
}

34. TestTermVectors#testFullMergeAddIndexesDir()

Project: lucene-solr
Source File: TestTermVectors.java
View license
public void testFullMergeAddIndexesDir() throws Exception {
    Directory[] input = new Directory[] { newDirectory(), newDirectory() };
    Directory target = newDirectory();
    for (Directory dir : input) {
        createDir(dir);
    }
    IndexWriter writer = createWriter(target);
    writer.addIndexes(input);
    writer.forceMerge(1);
    writer.close();
    verifyIndex(target);
    IOUtils.close(target, input[0], input[1]);
}

35. RAMDirectoryFactoryTest#dotestOpenReturnsTheSameForSamePath()

View license
private void dotestOpenReturnsTheSameForSamePath() throws IOException {
    final Directory directory = new RAMDirectory();
    RAMDirectoryFactory factory = new RAMDirectoryFactory() {

        @Override
        protected Directory create(String path, LockFactory lockFactory, DirContext dirContext) {
            return directory;
        }
    };
    String path = "/fake/path";
    Directory dir1 = factory.get(path, DirContext.DEFAULT, DirectoryFactory.LOCK_TYPE_SINGLE);
    Directory dir2 = factory.get(path, DirContext.DEFAULT, DirectoryFactory.LOCK_TYPE_SINGLE);
    assertEquals("RAMDirectoryFactory should not create new instance of RefCntRamDirectory " + "every time open() is called for the same path", dir1, dir2);
    factory.release(dir1);
    factory.release(dir2);
    factory.close();
}

36. MinDocQueryTests#testRandom()

Project: elasticsearch
Source File: MinDocQueryTests.java
View license
public void testRandom() throws IOException {
    final int numDocs = randomIntBetween(10, 200);
    final Document doc = new Document();
    final Directory dir = newDirectory();
    final RandomIndexWriter w = new RandomIndexWriter(random(), dir);
    for (int i = 0; i < numDocs; ++i) {
        w.addDocument(doc);
    }
    final IndexReader reader = w.getReader();
    final IndexSearcher searcher = newSearcher(reader);
    for (int i = 0; i <= numDocs; ++i) {
        assertEquals(numDocs - i, searcher.count(new MinDocQuery(i)));
    }
    w.close();
    reader.close();
    dir.close();
}

37. VersionLookupTests#testSimple()

View license
/** 
     * test version lookup actually works
     */
public void testSimple() throws Exception {
    Directory dir = newDirectory();
    IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER));
    Document doc = new Document();
    doc.add(new Field(UidFieldMapper.NAME, "6", UidFieldMapper.Defaults.FIELD_TYPE));
    doc.add(new NumericDocValuesField(VersionFieldMapper.NAME, 87));
    writer.addDocument(doc);
    DirectoryReader reader = DirectoryReader.open(writer);
    LeafReaderContext segment = reader.leaves().get(0);
    PerThreadIDAndVersionLookup lookup = new PerThreadIDAndVersionLookup(segment.reader());
    // found doc
    DocIdAndVersion result = lookup.lookup(new BytesRef("6"), null, segment);
    assertNotNull(result);
    assertEquals(87, result.version);
    assertEquals(0, result.docId);
    // not found doc
    assertNull(lookup.lookup(new BytesRef("7"), null, segment));
    // deleted doc
    assertNull(lookup.lookup(new BytesRef("6"), new Bits.MatchNoBits(1), segment));
    reader.close();
    writer.close();
    dir.close();
}

38. VersionsTests#testCache()

Project: elasticsearch
Source File: VersionsTests.java
View license
/** Test that version map cache works, is evicted on close, etc */
public void testCache() throws Exception {
    int size = Versions.lookupStates.size();
    Directory dir = newDirectory();
    IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER));
    Document doc = new Document();
    doc.add(new Field(UidFieldMapper.NAME, "6", UidFieldMapper.Defaults.FIELD_TYPE));
    doc.add(new NumericDocValuesField(VersionFieldMapper.NAME, 87));
    writer.addDocument(doc);
    DirectoryReader reader = DirectoryReader.open(writer);
    // should increase cache size by 1
    assertEquals(87, Versions.loadVersion(reader, new Term(UidFieldMapper.NAME, "6")));
    assertEquals(size + 1, Versions.lookupStates.size());
    // should be cache hit
    assertEquals(87, Versions.loadVersion(reader, new Term(UidFieldMapper.NAME, "6")));
    assertEquals(size + 1, Versions.lookupStates.size());
    reader.close();
    writer.close();
    // core should be evicted from the map
    assertEquals(size, Versions.lookupStates.size());
    dir.close();
}

39. VersionsTests#testCacheFilterReader()

Project: elasticsearch
Source File: VersionsTests.java
View license
/** Test that version map cache behaves properly with a filtered reader */
public void testCacheFilterReader() throws Exception {
    int size = Versions.lookupStates.size();
    Directory dir = newDirectory();
    IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER));
    Document doc = new Document();
    doc.add(new Field(UidFieldMapper.NAME, "6", UidFieldMapper.Defaults.FIELD_TYPE));
    doc.add(new NumericDocValuesField(VersionFieldMapper.NAME, 87));
    writer.addDocument(doc);
    DirectoryReader reader = DirectoryReader.open(writer);
    assertEquals(87, Versions.loadVersion(reader, new Term(UidFieldMapper.NAME, "6")));
    assertEquals(size + 1, Versions.lookupStates.size());
    // now wrap the reader
    DirectoryReader wrapped = ElasticsearchDirectoryReader.wrap(reader, new ShardId("bogus", "_na_", 5));
    assertEquals(87, Versions.loadVersion(wrapped, new Term(UidFieldMapper.NAME, "6")));
    // same size map: core cache key is shared
    assertEquals(size + 1, Versions.lookupStates.size());
    reader.close();
    writer.close();
    // core should be evicted from the map
    assertEquals(size, Versions.lookupStates.size());
    dir.close();
}

40. CodecTests#assertCompressionEquals()

Project: elasticsearch
Source File: CodecTests.java
View license
// write some docs with it, inspect .si to see this was the used compression
private void assertCompressionEquals(Mode expected, Codec actual) throws Exception {
    Directory dir = newDirectory();
    IndexWriterConfig iwc = newIndexWriterConfig(null);
    iwc.setCodec(actual);
    IndexWriter iw = new IndexWriter(dir, iwc);
    iw.addDocument(new Document());
    iw.commit();
    iw.close();
    DirectoryReader ir = DirectoryReader.open(dir);
    SegmentReader sr = (SegmentReader) ir.leaves().get(0).reader();
    String v = sr.getSegmentInfo().info.getAttribute(Lucene50StoredFieldsFormat.MODE_KEY);
    assertNotNull(v);
    assertEquals(expected, Mode.valueOf(v));
    ir.close();
    dir.close();
}

41. StoreRecoveryTests#testStatsDirWrapper()

View license
public void testStatsDirWrapper() throws IOException {
    Directory dir = newDirectory();
    Directory target = newDirectory();
    RecoveryState.Index indexStats = new RecoveryState.Index();
    StoreRecovery.StatsDirectoryWrapper wrapper = new StoreRecovery.StatsDirectoryWrapper(target, indexStats);
    try (IndexOutput output = dir.createOutput("foo.bar", IOContext.DEFAULT)) {
        CodecUtil.writeHeader(output, "foo", 0);
        int numBytes = randomIntBetween(100, 20000);
        for (int i = 0; i < numBytes; i++) {
            output.writeByte((byte) i);
        }
        CodecUtil.writeFooter(output);
    }
    wrapper.copyFrom(dir, "foo.bar", "bar.foo", IOContext.DEFAULT);
    assertNotNull(indexStats.getFileDetails("bar.foo"));
    assertNull(indexStats.getFileDetails("foo.bar"));
    assertEquals(dir.fileLength("foo.bar"), indexStats.getFileDetails("bar.foo").length());
    assertEquals(dir.fileLength("foo.bar"), indexStats.getFileDetails("bar.foo").recovered());
    assertFalse(indexStats.getFileDetails("bar.foo").reused());
    IOUtils.close(dir, target);
}

42. FsDirectoryServiceTests#testHasSleepWrapperOnSharedFS()

View license
public void testHasSleepWrapperOnSharedFS() throws IOException {
    Settings build = randomBoolean() ? Settings.builder().put(IndexMetaData.SETTING_SHARED_FILESYSTEM, true).build() : Settings.builder().put(IndexMetaData.SETTING_SHADOW_REPLICAS, true).build();
    ;
    IndexSettings settings = IndexSettingsModule.newIndexSettings("foo", build);
    IndexStoreConfig config = new IndexStoreConfig(build);
    IndexStore store = new IndexStore(settings, config);
    Path tempDir = createTempDir().resolve(settings.getUUID()).resolve("0");
    Files.createDirectories(tempDir);
    ShardPath path = new ShardPath(false, tempDir, tempDir, new ShardId(settings.getIndex(), 0));
    FsDirectoryService fsDirectoryService = new FsDirectoryService(settings, store, path);
    Directory directory = fsDirectoryService.newDirectory();
    assertTrue(directory instanceof RateLimitedFSDirectory);
    RateLimitedFSDirectory rateLimitingDirectory = (RateLimitedFSDirectory) directory;
    Directory delegate = rateLimitingDirectory.getDelegate();
    assertTrue(delegate.getClass().toString(), delegate instanceof SleepingLockWrapper);
}

43. FsDirectoryServiceTests#testHasNoSleepWrapperOnNormalFS()

View license
public void testHasNoSleepWrapperOnNormalFS() throws IOException {
    Settings build = Settings.builder().put(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), "simplefs").build();
    IndexSettings settings = IndexSettingsModule.newIndexSettings("foo", build);
    IndexStoreConfig config = new IndexStoreConfig(build);
    IndexStore store = new IndexStore(settings, config);
    Path tempDir = createTempDir().resolve(settings.getUUID()).resolve("0");
    Files.createDirectories(tempDir);
    ShardPath path = new ShardPath(false, tempDir, tempDir, new ShardId(settings.getIndex(), 0));
    FsDirectoryService fsDirectoryService = new FsDirectoryService(settings, store, path);
    Directory directory = fsDirectoryService.newDirectory();
    assertTrue(directory instanceof RateLimitedFSDirectory);
    RateLimitedFSDirectory rateLimitingDirectory = (RateLimitedFSDirectory) directory;
    Directory delegate = rateLimitingDirectory.getDelegate();
    assertFalse(delegate instanceof SleepingLockWrapper);
    assertTrue(delegate instanceof SimpleFSDirectory);
}

44. StoreTests#testCheckIntegrity()

Project: elasticsearch
Source File: StoreTests.java
View license
public void testCheckIntegrity() throws IOException {
    Directory dir = newDirectory();
    long luceneFileLength = 0;
    try (IndexOutput output = dir.createOutput("lucene_checksum.bin", IOContext.DEFAULT)) {
        int iters = scaledRandomIntBetween(10, 100);
        for (int i = 0; i < iters; i++) {
            BytesRef bytesRef = new BytesRef(TestUtil.randomRealisticUnicodeString(random(), 10, 1024));
            output.writeBytes(bytesRef.bytes, bytesRef.offset, bytesRef.length);
            luceneFileLength += bytesRef.length;
        }
        CodecUtil.writeFooter(output);
        luceneFileLength += CodecUtil.footerLength();
    }
    final long luceneChecksum;
    try (IndexInput indexInput = dir.openInput("lucene_checksum.bin", IOContext.DEFAULT)) {
        assertEquals(luceneFileLength, indexInput.length());
        luceneChecksum = CodecUtil.retrieveChecksum(indexInput);
    }
    dir.close();
}

45. OakDirectoryTest#saveListing()

View license
@Test
public void saveListing() throws Exception {
    builder.setProperty(LuceneIndexConstants.SAVE_DIR_LISTING, true);
    Directory dir = createDir(builder, false);
    Set<String> fileNames = newHashSet();
    for (int i = 0; i < 10; i++) {
        String fileName = "foo" + i;
        createFile(dir, fileName);
        fileNames.add(fileName);
    }
    dir.close();
    dir = createDir(builder, true);
    assertEquals(fileNames, newHashSet(dir.listAll()));
}

46. TestDictionary#testCompressedDictionary()

Project: lucene-solr
Source File: TestDictionary.java
View license
public void testCompressedDictionary() throws Exception {
    InputStream affixStream = getClass().getResourceAsStream("compressed.aff");
    InputStream dictStream = getClass().getResourceAsStream("compressed.dic");
    Directory tempDir = getDirectory();
    Dictionary dictionary = new Dictionary(tempDir, "dictionary", affixStream, dictStream);
    assertEquals(3, dictionary.lookupSuffix(new char[] { 'e' }, 0, 1).length);
    assertEquals(1, dictionary.lookupPrefix(new char[] { 's' }, 0, 1).length);
    IntsRef ordList = dictionary.lookupWord(new char[] { 'o', 'l', 'r' }, 0, 3);
    BytesRef ref = new BytesRef();
    dictionary.flagLookup.get(ordList.ints[0], ref);
    char flags[] = Dictionary.decodeFlags(ref);
    assertEquals(1, flags.length);
    affixStream.close();
    dictStream.close();
    tempDir.close();
}

47. TestDictionary#testCompressedBeforeSetDictionary()

Project: lucene-solr
Source File: TestDictionary.java
View license
public void testCompressedBeforeSetDictionary() throws Exception {
    InputStream affixStream = getClass().getResourceAsStream("compressed-before-set.aff");
    InputStream dictStream = getClass().getResourceAsStream("compressed.dic");
    Directory tempDir = getDirectory();
    Dictionary dictionary = new Dictionary(tempDir, "dictionary", affixStream, dictStream);
    assertEquals(3, dictionary.lookupSuffix(new char[] { 'e' }, 0, 1).length);
    assertEquals(1, dictionary.lookupPrefix(new char[] { 's' }, 0, 1).length);
    IntsRef ordList = dictionary.lookupWord(new char[] { 'o', 'l', 'r' }, 0, 3);
    BytesRef ref = new BytesRef();
    dictionary.flagLookup.get(ordList.ints[0], ref);
    char flags[] = Dictionary.decodeFlags(ref);
    assertEquals(1, flags.length);
    affixStream.close();
    dictStream.close();
    tempDir.close();
}

48. TestDictionary#testCompressedEmptyAliasDictionary()

Project: lucene-solr
Source File: TestDictionary.java
View license
public void testCompressedEmptyAliasDictionary() throws Exception {
    InputStream affixStream = getClass().getResourceAsStream("compressed-empty-alias.aff");
    InputStream dictStream = getClass().getResourceAsStream("compressed.dic");
    Directory tempDir = getDirectory();
    Dictionary dictionary = new Dictionary(tempDir, "dictionary", affixStream, dictStream);
    assertEquals(3, dictionary.lookupSuffix(new char[] { 'e' }, 0, 1).length);
    assertEquals(1, dictionary.lookupPrefix(new char[] { 's' }, 0, 1).length);
    IntsRef ordList = dictionary.lookupWord(new char[] { 'o', 'l', 'r' }, 0, 3);
    BytesRef ref = new BytesRef();
    dictionary.flagLookup.get(ordList.ints[0], ref);
    char flags[] = Dictionary.decodeFlags(ref);
    assertEquals(1, flags.length);
    affixStream.close();
    dictStream.close();
    tempDir.close();
}

49. TestDictionary#testInvalidData()

Project: lucene-solr
Source File: TestDictionary.java
View license
// malformed rule causes ParseException
public void testInvalidData() throws Exception {
    InputStream affixStream = getClass().getResourceAsStream("broken.aff");
    InputStream dictStream = getClass().getResourceAsStream("simple.dic");
    Directory tempDir = getDirectory();
    ParseException expected = expectThrows(ParseException.class, () -> {
        new Dictionary(tempDir, "dictionary", affixStream, dictStream);
    });
    assertTrue(expected.getMessage().startsWith("The affix file contains a rule with less than four elements"));
    assertEquals(24, expected.getErrorOffset());
    affixStream.close();
    dictStream.close();
    tempDir.close();
}

50. TestDictionary#testInvalidFlags()

Project: lucene-solr
Source File: TestDictionary.java
View license
// malformed flags causes ParseException
public void testInvalidFlags() throws Exception {
    InputStream affixStream = getClass().getResourceAsStream("broken-flags.aff");
    InputStream dictStream = getClass().getResourceAsStream("simple.dic");
    Directory tempDir = getDirectory();
    Exception expected = expectThrows(Exception.class, () -> {
        new Dictionary(tempDir, "dictionary", affixStream, dictStream);
    });
    assertTrue(expected.getMessage().startsWith("expected only one flag"));
    affixStream.close();
    dictStream.close();
    tempDir.close();
}

51. TestDictionary#testResourceCleanup()

Project: lucene-solr
Source File: TestDictionary.java
View license
public void testResourceCleanup() throws Exception {
    CloseCheckInputStream affixStream = new CloseCheckInputStream(getClass().getResourceAsStream("compressed.aff"));
    CloseCheckInputStream dictStream = new CloseCheckInputStream(getClass().getResourceAsStream("compressed.dic"));
    Directory tempDir = getDirectory();
    new Dictionary(tempDir, "dictionary", affixStream, dictStream);
    assertFalse(affixStream.isClosed());
    assertFalse(dictStream.isClosed());
    affixStream.close();
    dictStream.close();
    tempDir.close();
    assertTrue(affixStream.isClosed());
    assertTrue(dictStream.isClosed());
}

52. TestHunspellStemFilter#beforeClass()

View license
@BeforeClass
public static void beforeClass() throws Exception {
    // no multiple try-with to workaround bogus VerifyError
    InputStream affixStream = TestStemmer.class.getResourceAsStream("simple.aff");
    InputStream dictStream = TestStemmer.class.getResourceAsStream("simple.dic");
    Directory tempDir = getDirectory();
    try {
        dictionary = new Dictionary(tempDir, "dictionary", affixStream, dictStream);
    } finally {
        IOUtils.closeWhileHandlingException(affixStream, dictStream);
    }
    tempDir.close();
}

53. TestHunspellStemFilter#testIgnoreCaseNoSideEffects()

View license
public void testIgnoreCaseNoSideEffects() throws Exception {
    final Dictionary d;
    // no multiple try-with to workaround bogus VerifyError
    InputStream affixStream = TestStemmer.class.getResourceAsStream("simple.aff");
    InputStream dictStream = TestStemmer.class.getResourceAsStream("simple.dic");
    Directory tempDir = getDirectory();
    try {
        d = new Dictionary(tempDir, "dictionary", affixStream, Collections.singletonList(dictStream), true);
    } finally {
        IOUtils.closeWhileHandlingException(affixStream, dictStream);
    }
    Analyzer a = new Analyzer() {

        @Override
        protected TokenStreamComponents createComponents(String fieldName) {
            Tokenizer tokenizer = new KeywordTokenizer();
            return new TokenStreamComponents(tokenizer, new HunspellStemFilter(tokenizer, d));
        }
    };
    checkOneTerm(a, "NoChAnGy", "NoChAnGy");
    a.close();
    tempDir.close();
}

54. TestEmptyTokenStream#testIndexWriter_LUCENE4656()

View license
public void testIndexWriter_LUCENE4656() throws IOException {
    Directory directory = newDirectory();
    IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(null));
    TokenStream ts = new EmptyTokenStream();
    assertFalse(ts.hasAttribute(TermToBytesRefAttribute.class));
    Document doc = new Document();
    doc.add(new StringField("id", "0", Field.Store.YES));
    doc.add(new TextField("description", ts));
    // this should not fail because we have no TermToBytesRefAttribute
    writer.addDocument(doc);
    assertEquals(1, writer.numDocs());
    writer.close();
    directory.close();
}

55. TestBlockPostingsFormat#testFinalBlock()

View license
/** Make sure the final sub-block(s) are not skipped. */
public void testFinalBlock() throws Exception {
    Directory d = newDirectory();
    IndexWriter w = new IndexWriter(d, new IndexWriterConfig(new MockAnalyzer(random())));
    for (int i = 0; i < 25; i++) {
        Document doc = new Document();
        doc.add(newStringField("field", Character.toString((char) (97 + i)), Field.Store.NO));
        doc.add(newStringField("field", "z" + Character.toString((char) (97 + i)), Field.Store.NO));
        w.addDocument(doc);
    }
    w.forceMerge(1);
    DirectoryReader r = DirectoryReader.open(w);
    assertEquals(1, r.leaves().size());
    FieldReader field = (FieldReader) r.leaves().get(0).reader().fields().terms("field");
    // We should see exactly two blocks: one root block (prefix empty string) and one block for z* terms (prefix z):
    Stats stats = field.getStats();
    assertEquals(0, stats.floorBlockCount);
    assertEquals(2, stats.nonFloorBlockCount);
    r.close();
    w.close();
    d.close();
}

56. TestPerFieldPostingsFormat2#testMergeUnusedPerFieldCodec()

View license
/*
   * Test that heterogeneous index segments are merge successfully
   */
@Test
public void testMergeUnusedPerFieldCodec() throws IOException {
    Directory dir = newDirectory();
    IndexWriterConfig iwconf = newIndexWriterConfig(new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE).setCodec(new MockCodec());
    IndexWriter writer = newWriter(dir, iwconf);
    addDocs(writer, 10);
    writer.commit();
    addDocs3(writer, 10);
    writer.commit();
    addDocs2(writer, 10);
    writer.commit();
    assertEquals(30, writer.maxDoc());
    TestUtil.checkIndex(dir);
    writer.forceMerge(1);
    assertEquals(30, writer.maxDoc());
    writer.close();
    dir.close();
}

57. TestPerFieldPostingsFormat2#doTestMixedPostings()

View license
private void doTestMixedPostings(Codec codec) throws Exception {
    Directory dir = newDirectory();
    IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
    iwc.setCodec(codec);
    RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc);
    Document doc = new Document();
    FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);
    // turn on vectors for the checkindex cross-check
    ft.setStoreTermVectors(true);
    ft.setStoreTermVectorOffsets(true);
    ft.setStoreTermVectorPositions(true);
    Field idField = new Field("id", "", ft);
    Field dateField = new Field("date", "", ft);
    doc.add(idField);
    doc.add(dateField);
    for (int i = 0; i < 100; i++) {
        idField.setStringValue(Integer.toString(random().nextInt(50)));
        dateField.setStringValue(Integer.toString(random().nextInt(100)));
        iw.addDocument(doc);
    }
    iw.close();
    // checkindex
    dir.close();
}

58. TestDocument#testConstructorExceptions()

Project: lucene-solr
Source File: TestDocument.java
View license
public void testConstructorExceptions() throws Exception {
    FieldType ft = new FieldType();
    ft.setStored(true);
    // okay
    new Field("name", "value", ft);
    // okay
    new StringField("name", "value", Field.Store.NO);
    expectThrows(IllegalArgumentException.class, () -> {
        new Field("name", "value", new FieldType());
    });
    Directory dir = newDirectory();
    RandomIndexWriter w = new RandomIndexWriter(random(), dir);
    // okay
    new Field("name", "value", ft);
    Document doc = new Document();
    FieldType ft2 = new FieldType();
    ft2.setStored(true);
    ft2.setStoreTermVectors(true);
    doc.add(new Field("name", "value", ft2));
    expectThrows(IllegalArgumentException.class, () -> {
        w.addDocument(doc);
    });
    w.close();
    dir.close();
}

59. TestDocument#testGetValuesForIndexedDocument()

Project: lucene-solr
Source File: TestDocument.java
View license
/**
   * Tests [email protected] Document#getValues(String)} method for a Document retrieved
   * from an index.
   * 
   * @throws Exception on error
   */
public void testGetValuesForIndexedDocument() throws Exception {
    Directory dir = newDirectory();
    RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
    writer.addDocument(makeDocumentWithFields());
    IndexReader reader = writer.getReader();
    IndexSearcher searcher = newSearcher(reader);
    // search for something that does exist
    Query query = new TermQuery(new Term("keyword", "test1"));
    // ensure that queries return expected results without DateFilter first
    ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs;
    assertEquals(1, hits.length);
    doAssert(searcher.doc(hits[0].doc), true);
    writer.close();
    reader.close();
    dir.close();
}

60. TestDocument#testPositionIncrementMultiFields()

Project: lucene-solr
Source File: TestDocument.java
View license
public void testPositionIncrementMultiFields() throws Exception {
    Directory dir = newDirectory();
    RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
    writer.addDocument(makeDocumentWithFields());
    IndexReader reader = writer.getReader();
    IndexSearcher searcher = newSearcher(reader);
    PhraseQuery query = new PhraseQuery("indexed_not_tokenized", "test1", "test2");
    ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs;
    assertEquals(1, hits.length);
    doAssert(searcher.doc(hits[0].doc), true);
    writer.close();
    reader.close();
    dir.close();
}

61. TestDocument#testNumericFieldAsString()

Project: lucene-solr
Source File: TestDocument.java
View license
public void testNumericFieldAsString() throws Exception {
    Document doc = new Document();
    doc.add(new StoredField("int", 5));
    assertEquals("5", doc.get("int"));
    assertNull(doc.get("somethingElse"));
    doc.add(new StoredField("int", 4));
    assertArrayEquals(new String[] { "5", "4" }, doc.getValues("int"));
    Directory dir = newDirectory();
    RandomIndexWriter iw = new RandomIndexWriter(random(), dir);
    iw.addDocument(doc);
    DirectoryReader ir = iw.getReader();
    Document sdoc = ir.document(0);
    assertEquals("5", sdoc.get("int"));
    assertNull(sdoc.get("somethingElse"));
    assertArrayEquals(new String[] { "5", "4" }, sdoc.getValues("int"));
    ir.close();
    iw.close();
    dir.close();
}

62. TestField#testIndexedBinaryField()

Project: lucene-solr
Source File: TestField.java
View license
public void testIndexedBinaryField() throws Exception {
    Directory dir = newDirectory();
    RandomIndexWriter w = new RandomIndexWriter(random(), dir);
    Document doc = new Document();
    BytesRef br = new BytesRef(new byte[5]);
    Field field = new StringField("binary", br, Field.Store.YES);
    assertEquals(br, field.binaryValue());
    doc.add(field);
    w.addDocument(doc);
    IndexReader r = w.getReader();
    IndexSearcher s = newSearcher(r);
    TopDocs hits = s.search(new TermQuery(new Term("binary", br)), 1);
    assertEquals(1, hits.totalHits);
    Document storedDoc = s.doc(hits.scoreDocs[0].doc);
    assertEquals(br, storedDoc.getField("binary").binaryValue());
    r.close();
    w.close();
    dir.close();
}

63. TestAddIndexes#testAddEmpty()

Project: lucene-solr
Source File: TestAddIndexes.java
View license
public void testAddEmpty() throws Exception {
    Directory d1 = newDirectory();
    RandomIndexWriter w = new RandomIndexWriter(random(), d1);
    w.addIndexes(new CodecReader[0]);
    w.close();
    DirectoryReader dr = DirectoryReader.open(d1);
    for (LeafReaderContext ctx : dr.leaves()) {
        assertTrue("empty segments should be dropped by addIndexes", ctx.reader().maxDoc() > 0);
    }
    dr.close();
    d1.close();
}

64. TestAddIndexes#testLocksBlock()

Project: lucene-solr
Source File: TestAddIndexes.java
View license
/** Make sure an open IndexWriter on an incoming Directory
   *  causes a LockObtainFailedException */
public void testLocksBlock() throws Exception {
    Directory src = newDirectory();
    RandomIndexWriter w1 = new RandomIndexWriter(random(), src);
    w1.addDocument(new Document());
    w1.commit();
    Directory dest = newDirectory();
    IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
    RandomIndexWriter w2 = new RandomIndexWriter(random(), dest, iwc);
    expectThrows(LockObtainFailedException.class, () -> {
        w2.addIndexes(src);
    });
    w1.close();
    w2.close();
    IOUtils.close(src, dest);
}

65. TestAddIndexes#testIllegalIndexSortChange1()

Project: lucene-solr
Source File: TestAddIndexes.java
View license
public void testIllegalIndexSortChange1() throws Exception {
    Directory dir1 = newDirectory();
    IndexWriterConfig iwc1 = newIndexWriterConfig(new MockAnalyzer(random()));
    iwc1.setIndexSort(new Sort(new SortField("foo", SortField.Type.INT)));
    RandomIndexWriter w1 = new RandomIndexWriter(random(), dir1, iwc1);
    w1.addDocument(new Document());
    w1.commit();
    w1.addDocument(new Document());
    w1.commit();
    // so the index sort is in fact burned into the index:
    w1.forceMerge(1);
    w1.close();
    Directory dir2 = newDirectory();
    IndexWriterConfig iwc2 = newIndexWriterConfig(new MockAnalyzer(random()));
    iwc2.setIndexSort(new Sort(new SortField("foo", SortField.Type.STRING)));
    RandomIndexWriter w2 = new RandomIndexWriter(random(), dir2, iwc2);
    String message = expectThrows(IllegalArgumentException.class, () -> {
        w2.addIndexes(dir1);
    }).getMessage();
    assertEquals("cannot change index sort from <int: \"foo\"> to <string: \"foo\">", message);
    IOUtils.close(dir1, w2, dir2);
}

66. TestAddIndexes#testIllegalIndexSortChange2()

Project: lucene-solr
Source File: TestAddIndexes.java
View license
public void testIllegalIndexSortChange2() throws Exception {
    Directory dir1 = newDirectory();
    IndexWriterConfig iwc1 = newIndexWriterConfig(new MockAnalyzer(random()));
    iwc1.setIndexSort(new Sort(new SortField("foo", SortField.Type.INT)));
    RandomIndexWriter w1 = new RandomIndexWriter(random(), dir1, iwc1);
    w1.addDocument(new Document());
    w1.commit();
    w1.addDocument(new Document());
    w1.commit();
    // so the index sort is in fact burned into the index:
    w1.forceMerge(1);
    w1.close();
    Directory dir2 = newDirectory();
    IndexWriterConfig iwc2 = newIndexWriterConfig(new MockAnalyzer(random()));
    iwc2.setIndexSort(new Sort(new SortField("foo", SortField.Type.STRING)));
    RandomIndexWriter w2 = new RandomIndexWriter(random(), dir2, iwc2);
    IndexReader r1 = DirectoryReader.open(dir1);
    String message = expectThrows(IllegalArgumentException.class, () -> {
        w2.addIndexes((SegmentReader) getOnlyLeafReader(r1));
    }).getMessage();
    assertEquals("cannot change index sort from <int: \"foo\"> to <string: \"foo\">", message);
    IOUtils.close(r1, dir1, w2, dir2);
}

67. TestAllFilesHaveChecksumFooter#test()

View license
public void test() throws Exception {
    Directory dir = newDirectory();
    IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
    conf.setCodec(TestUtil.getDefaultCodec());
    RandomIndexWriter riw = new RandomIndexWriter(random(), dir, conf);
    // Use LineFileDocs so we (hopefully) get most Lucene features
    // tested, e.g. IntPoint was recently added to it:
    LineFileDocs docs = new LineFileDocs(random());
    for (int i = 0; i < 100; i++) {
        riw.addDocument(docs.nextDoc());
        if (random().nextInt(7) == 0) {
            riw.commit();
        }
        if (random().nextInt(20) == 0) {
            riw.deleteDocuments(new Term("docid", Integer.toString(i)));
        }
        if (random().nextInt(15) == 0) {
            riw.updateNumericDocValue(new Term("docid", Integer.toString(i)), "docid_intDV", Long.valueOf(i));
        }
    }
    riw.close();
    checkFooters(dir);
    dir.close();
}

68. TestAllFilesHaveCodecHeader#test()

View license
public void test() throws Exception {
    Directory dir = newDirectory();
    IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
    conf.setCodec(TestUtil.getDefaultCodec());
    RandomIndexWriter riw = new RandomIndexWriter(random(), dir, conf);
    // Use LineFileDocs so we (hopefully) get most Lucene features
    // tested, e.g. IntPoint was recently added to it:
    LineFileDocs docs = new LineFileDocs(random());
    for (int i = 0; i < 100; i++) {
        riw.addDocument(docs.nextDoc());
        if (random().nextInt(7) == 0) {
            riw.commit();
        }
        if (random().nextInt(20) == 0) {
            riw.deleteDocuments(new Term("docid", Integer.toString(i)));
        }
        if (random().nextInt(15) == 0) {
            riw.updateNumericDocValue(new Term("docid", Integer.toString(i)), "docid_intDV", Long.valueOf(i));
        }
    }
    riw.close();
    checkHeaders(dir, new HashMap<String, String>());
    dir.close();
}

69. TestBinaryDocValuesUpdates#testUpdatesAreFlushed()

View license
public void testUpdatesAreFlushed() throws IOException {
    Directory dir = newDirectory();
    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)).setRAMBufferSizeMB(0.00000001));
    // val=1
    writer.addDocument(doc(0));
    // val=2
    writer.addDocument(doc(1));
    // val=2
    writer.addDocument(doc(3));
    writer.commit();
    assertEquals(1, writer.getFlushDeletesCount());
    writer.updateBinaryDocValue(new Term("id", "doc-0"), "val", toBytes(5));
    assertEquals(2, writer.getFlushDeletesCount());
    writer.updateBinaryDocValue(new Term("id", "doc-1"), "val", toBytes(6));
    assertEquals(3, writer.getFlushDeletesCount());
    writer.updateBinaryDocValue(new Term("id", "doc-2"), "val", toBytes(7));
    assertEquals(4, writer.getFlushDeletesCount());
    writer.getConfig().setRAMBufferSizeMB(1000d);
    writer.updateBinaryDocValue(new Term("id", "doc-2"), "val", toBytes(7));
    assertEquals(4, writer.getFlushDeletesCount());
    writer.close();
    dir.close();
}

70. TestBinaryDocValuesUpdates#testUpdateNonBinaryDocValuesField()

View license
public void testUpdateNonBinaryDocValuesField() throws Exception {
    // we don't support adding new fields or updating existing non-binary-dv
    // fields through binary updates
    Directory dir = newDirectory();
    IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
    IndexWriter writer = new IndexWriter(dir, conf);
    Document doc = new Document();
    doc.add(new StringField("key", "doc", Store.NO));
    doc.add(new StringField("foo", "bar", Store.NO));
    // flushed document
    writer.addDocument(doc);
    writer.commit();
    // in-memory document
    writer.addDocument(doc);
    expectThrows(IllegalArgumentException.class, () -> {
        writer.updateBinaryDocValue(new Term("key", "doc"), "bdv", toBytes(17L));
    });
    expectThrows(IllegalArgumentException.class, () -> {
        writer.updateBinaryDocValue(new Term("key", "doc"), "foo", toBytes(17L));
    });
    writer.close();
    dir.close();
}

71. TestBinaryDocValuesUpdates#testUpdateSameDocMultipleTimes()

View license
public void testUpdateSameDocMultipleTimes() throws Exception {
    Directory dir = newDirectory();
    IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
    IndexWriter writer = new IndexWriter(dir, conf);
    Document doc = new Document();
    doc.add(new StringField("key", "doc", Store.NO));
    doc.add(new BinaryDocValuesField("bdv", toBytes(5L)));
    // flushed document
    writer.addDocument(doc);
    writer.commit();
    // in-memory document
    writer.addDocument(doc);
    // update existing field
    writer.updateBinaryDocValue(new Term("key", "doc"), "bdv", toBytes(17L));
    // update existing field 2nd time in this commit
    writer.updateBinaryDocValue(new Term("key", "doc"), "bdv", toBytes(3L));
    writer.close();
    final DirectoryReader reader = DirectoryReader.open(dir);
    BinaryDocValues bdv = MultiDocValues.getBinaryValues(reader, "bdv");
    for (int i = 0; i < reader.maxDoc(); i++) {
        assertEquals(3, getValue(bdv, i));
    }
    reader.close();
    dir.close();
}

72. TestBinaryDocValuesUpdates#testUpdateBinaryDVFieldWithSameNameAsPostingField()

View license
public void testUpdateBinaryDVFieldWithSameNameAsPostingField() throws Exception {
    // this used to fail because FieldInfos.Builder neglected to update
    // globalFieldMaps.docValuesTypes map
    Directory dir = newDirectory();
    IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
    IndexWriter writer = new IndexWriter(dir, conf);
    Document doc = new Document();
    doc.add(new StringField("f", "mock-value", Store.NO));
    doc.add(new BinaryDocValuesField("f", toBytes(5L)));
    writer.addDocument(doc);
    writer.commit();
    writer.updateBinaryDocValue(new Term("f", "mock-value"), "f", toBytes(17L));
    writer.close();
    DirectoryReader r = DirectoryReader.open(dir);
    BinaryDocValues bdv = r.leaves().get(0).reader().getBinaryDocValues("f");
    assertEquals(17, getValue(bdv, 0));
    r.close();
    dir.close();
}

73. TestBinaryDocValuesUpdates#testUpdatesOrder()

View license
public void testUpdatesOrder() throws Exception {
    Directory dir = newDirectory();
    IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
    IndexWriter writer = new IndexWriter(dir, conf);
    Document doc = new Document();
    doc.add(new StringField("upd", "t1", Store.NO));
    doc.add(new StringField("upd", "t2", Store.NO));
    doc.add(new BinaryDocValuesField("f1", toBytes(1L)));
    doc.add(new BinaryDocValuesField("f2", toBytes(1L)));
    writer.addDocument(doc);
    // update f1 to 2
    writer.updateBinaryDocValue(new Term("upd", "t1"), "f1", toBytes(2L));
    // update f2 to 2
    writer.updateBinaryDocValue(new Term("upd", "t1"), "f2", toBytes(2L));
    // update f1 to 3
    writer.updateBinaryDocValue(new Term("upd", "t2"), "f1", toBytes(3L));
    // update f2 to 3
    writer.updateBinaryDocValue(new Term("upd", "t2"), "f2", toBytes(3L));
    // update f1 to 4 (but not f2)
    writer.updateBinaryDocValue(new Term("upd", "t1"), "f1", toBytes(4L));
    writer.close();
    DirectoryReader reader = DirectoryReader.open(dir);
    assertEquals(4, getValue(reader.leaves().get(0).reader().getBinaryDocValues("f1"), 0));
    assertEquals(3, getValue(reader.leaves().get(0).reader().getBinaryDocValues("f2"), 0));
    reader.close();
    dir.close();
}

74. TestBinaryDocValuesUpdates#testUpdateAllDeletedSegment()

View license
public void testUpdateAllDeletedSegment() throws Exception {
    Directory dir = newDirectory();
    IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
    IndexWriter writer = new IndexWriter(dir, conf);
    Document doc = new Document();
    doc.add(new StringField("id", "doc", Store.NO));
    doc.add(new BinaryDocValuesField("f1", toBytes(1L)));
    writer.addDocument(doc);
    writer.addDocument(doc);
    writer.commit();
    // delete all docs in the first segment
    writer.deleteDocuments(new Term("id", "doc"));
    writer.addDocument(doc);
    writer.updateBinaryDocValue(new Term("id", "doc"), "f1", toBytes(2L));
    writer.close();
    DirectoryReader reader = DirectoryReader.open(dir);
    assertEquals(1, reader.leaves().size());
    assertEquals(2L, getValue(reader.leaves().get(0).reader().getBinaryDocValues("f1"), 0));
    reader.close();
    dir.close();
}

75. TestBinaryDocValuesUpdates#testUpdateTwoNonexistingTerms()

View license
public void testUpdateTwoNonexistingTerms() throws Exception {
    Directory dir = newDirectory();
    IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
    IndexWriter writer = new IndexWriter(dir, conf);
    Document doc = new Document();
    doc.add(new StringField("id", "doc", Store.NO));
    doc.add(new BinaryDocValuesField("f1", toBytes(1L)));
    writer.addDocument(doc);
    // update w/ multiple nonexisting terms in same field
    writer.updateBinaryDocValue(new Term("c", "foo"), "f1", toBytes(2L));
    writer.updateBinaryDocValue(new Term("c", "bar"), "f1", toBytes(2L));
    writer.close();
    DirectoryReader reader = DirectoryReader.open(dir);
    assertEquals(1, reader.leaves().size());
    assertEquals(1L, getValue(reader.leaves().get(0).reader().getBinaryDocValues("f1"), 0));
    reader.close();
    dir.close();
}

76. TestConcurrentMergeScheduler#testMaybeStallCalled()

View license
// LUCENE-6063
public void testMaybeStallCalled() throws Exception {
    final AtomicBoolean wasCalled = new AtomicBoolean();
    Directory dir = newDirectory();
    IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
    iwc.setMergeScheduler(new ConcurrentMergeScheduler() {

        @Override
        protected boolean maybeStall(IndexWriter writer) {
            wasCalled.set(true);
            return true;
        }
    });
    IndexWriter w = new IndexWriter(dir, iwc);
    w.addDocument(new Document());
    w.forceMerge(1);
    assertTrue(wasCalled.get());
    w.close();
    dir.close();
}

77. TestCrashCausesCorruptIndex#indexAfterRestart()

View license
/**
   * Attempts to index another 1 document.
   */
private void indexAfterRestart() throws IOException {
    Directory realDirectory = newFSDirectory(path);
    // LUCENE-3627 (before the fix): this line fails because
    // it doesn't know what to do with the created but empty
    // segments_2 file
    IndexWriter indexWriter = new IndexWriter(realDirectory, newIndexWriterConfig(new MockAnalyzer(random())));
    // currently the test fails above.
    // however, to test the fix, the following lines should pass as well.
    indexWriter.addDocument(getDocument());
    indexWriter.close();
    assertFalse(slowFileExists(realDirectory, "segments_2"));
    realDirectory.close();
}

78. TestDirectoryReader#testIsCurrent()

View license
public void testIsCurrent() throws Exception {
    Directory d = newDirectory();
    IndexWriter writer = new IndexWriter(d, newIndexWriterConfig(new MockAnalyzer(random())));
    addDocumentWithFields(writer);
    writer.close();
    // set up reader:
    DirectoryReader reader = DirectoryReader.open(d);
    assertTrue(reader.isCurrent());
    // modify index by adding another document:
    writer = new IndexWriter(d, newIndexWriterConfig(new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
    addDocumentWithFields(writer);
    writer.close();
    assertFalse(reader.isCurrent());
    // re-create index:
    writer = new IndexWriter(d, newIndexWriterConfig(new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE));
    addDocumentWithFields(writer);
    writer.close();
    assertFalse(reader.isCurrent());
    reader.close();
    d.close();
}

79. TestDirectoryReader#testOpenReaderAfterDelete()

View license
public void testOpenReaderAfterDelete() throws IOException {
    Path dirFile = createTempDir("deletetest");
    Directory dir = newFSDirectory(dirFile);
    if (dir instanceof BaseDirectoryWrapper) {
        // we will hit NoSuchFileException in MDW since we nuked it!
        ((BaseDirectoryWrapper) dir).setCheckIndexOnClose(false);
    }
    try {
        DirectoryReader.open(dir);
        fail("expected FileNotFoundException/NoSuchFileException");
    } catch (FileNotFoundExceptionNoSuchFileException |  e) {
    }
    IOUtils.rm(dirFile);
    // Make sure we still get a CorruptIndexException (not NPE):
    try {
        DirectoryReader.open(dir);
        fail("expected FileNotFoundException/NoSuchFileException");
    } catch (FileNotFoundExceptionNoSuchFileException |  e) {
    }
    dir.close();
}

80. TestDirectoryReader#testNoDupCommitFileNames()

View license
// LUCENE-1509
public void testNoDupCommitFileNames() throws Throwable {
    Directory dir = newDirectory();
    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())).setMaxBufferedDocs(2));
    writer.addDocument(createDocument("a"));
    writer.addDocument(createDocument("a"));
    writer.addDocument(createDocument("a"));
    writer.close();
    Collection<IndexCommit> commits = DirectoryReader.listCommits(dir);
    for (final IndexCommit commit : commits) {
        Collection<String> files = commit.getFileNames();
        HashSet<String> seen = new HashSet<>();
        for (final String fileName : files) {
            assertTrue("file " + fileName + " was duplicated", !seen.contains(fileName));
            seen.add(fileName);
        }
    }
    dir.close();
}

81. TestDirectoryReader#testPrepareCommitIsCurrent()

View license
// LUCENE-2046
public void testPrepareCommitIsCurrent() throws Throwable {
    Directory dir = newDirectory();
    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
    writer.commit();
    Document doc = new Document();
    writer.addDocument(doc);
    DirectoryReader r = DirectoryReader.open(dir);
    assertTrue(r.isCurrent());
    writer.addDocument(doc);
    writer.prepareCommit();
    assertTrue(r.isCurrent());
    DirectoryReader r2 = DirectoryReader.openIfChanged(r);
    assertNull(r2);
    writer.commit();
    assertFalse(r.isCurrent());
    writer.close();
    r.close();
    dir.close();
}

82. TestDirectoryReader#testListCommits()

View license
// LUCENE-2753
public void testListCommits() throws Exception {
    Directory dir = newDirectory();
    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(null).setIndexDeletionPolicy(new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy())));
    SnapshotDeletionPolicy sdp = (SnapshotDeletionPolicy) writer.getConfig().getIndexDeletionPolicy();
    writer.addDocument(new Document());
    writer.commit();
    sdp.snapshot();
    writer.addDocument(new Document());
    writer.commit();
    sdp.snapshot();
    writer.addDocument(new Document());
    writer.commit();
    sdp.snapshot();
    writer.close();
    long currentGen = 0;
    for (IndexCommit ic : DirectoryReader.listCommits(dir)) {
        assertTrue("currentGen=" + currentGen + " commitGen=" + ic.getGeneration(), currentGen < ic.getGeneration());
        currentGen = ic.getGeneration();
    }
    dir.close();
}

83. TestDirectoryReader#testOOBDocID()

View license
public void testOOBDocID() throws Exception {
    Directory dir = newDirectory();
    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
    writer.addDocument(new Document());
    DirectoryReader r = writer.getReader();
    writer.close();
    r.document(0);
    expectThrows(IllegalArgumentException.class, () -> {
        r.document(1);
    });
    r.close();
    dir.close();
}

84. TestDirectoryReader#testTryIncRef()

View license
public void testTryIncRef() throws IOException {
    Directory dir = newDirectory();
    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
    writer.addDocument(new Document());
    writer.commit();
    DirectoryReader r = DirectoryReader.open(dir);
    assertTrue(r.tryIncRef());
    r.decRef();
    r.close();
    assertFalse(r.tryIncRef());
    writer.close();
    dir.close();
}

85. TestDirectoryReader#testLoadCertainFields()

View license
public void testLoadCertainFields() throws Exception {
    Directory dir = newDirectory();
    RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
    Document doc = new Document();
    doc.add(newStringField("field1", "foobar", Field.Store.YES));
    doc.add(newStringField("field2", "foobaz", Field.Store.YES));
    writer.addDocument(doc);
    DirectoryReader r = writer.getReader();
    writer.close();
    Set<String> fieldsToLoad = new HashSet<>();
    assertEquals(0, r.document(0, fieldsToLoad).getFields().size());
    fieldsToLoad.add("field1");
    Document doc2 = r.document(0, fieldsToLoad);
    assertEquals(1, doc2.getFields().size());
    assertEquals("foobar", doc2.get("field1"));
    r.close();
    dir.close();
}

86. TestDirectoryReaderReopen#testOpenIfChangedNRTToCommit()

View license
public void testOpenIfChangedNRTToCommit() throws Exception {
    Directory dir = newDirectory();
    // Can't use RIW because it randomly commits:
    IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
    Document doc = new Document();
    doc.add(newStringField("field", "value", Field.Store.NO));
    w.addDocument(doc);
    w.commit();
    List<IndexCommit> commits = DirectoryReader.listCommits(dir);
    assertEquals(1, commits.size());
    w.addDocument(doc);
    DirectoryReader r = DirectoryReader.open(w);
    assertEquals(2, r.numDocs());
    IndexReader r2 = DirectoryReader.openIfChanged(r, commits.get(0));
    assertNotNull(r2);
    r.close();
    assertEquals(1, r2.numDocs());
    w.close();
    r2.close();
    dir.close();
}

87. TestDoc#setUp()

Project: lucene-solr
Source File: TestDoc.java
View license
/** Set the test case. This test case needs
   *  a few text files created in the current working directory.
   */
@Override
public void setUp() throws Exception {
    super.setUp();
    if (VERBOSE) {
        System.out.println("TEST: setUp");
    }
    workDir = createTempDir("TestDoc");
    indexDir = createTempDir("testIndex");
    Directory directory = newFSDirectory(indexDir);
    directory.close();
    files = new LinkedList<>();
    files.add(createOutput("test.txt", "This is the first test file"));
    files.add(createOutput("test2.txt", "This is the second test file"));
}

88. TestDocCount#testSimple()

Project: lucene-solr
Source File: TestDocCount.java
View license
public void testSimple() throws Exception {
    Directory dir = newDirectory();
    RandomIndexWriter iw = new RandomIndexWriter(random(), dir);
    int numDocs = atLeast(100);
    for (int i = 0; i < numDocs; i++) {
        iw.addDocument(doc());
    }
    IndexReader ir = iw.getReader();
    verifyCount(ir);
    ir.close();
    iw.forceMerge(1);
    ir = iw.getReader();
    verifyCount(ir);
    ir.close();
    iw.close();
    dir.close();
}

89. TestDocInverterPerFieldErrorInfo#testInfoStreamGetsFieldName()

View license
@Test
public void testInfoStreamGetsFieldName() throws Exception {
    Directory dir = newDirectory();
    IndexWriter writer;
    IndexWriterConfig c = new IndexWriterConfig(new ThrowingAnalyzer());
    final ByteArrayOutputStream infoBytes = new ByteArrayOutputStream();
    PrintStream infoPrintStream = new PrintStream(infoBytes, true, IOUtils.UTF_8);
    PrintStreamInfoStream printStreamInfoStream = new PrintStreamInfoStream(infoPrintStream);
    c.setInfoStream(printStreamInfoStream);
    writer = new IndexWriter(dir, c);
    Document doc = new Document();
    doc.add(newField("distinctiveFieldName", "aaa ", storedTextType));
    expectThrows(BadNews.class, () -> {
        writer.addDocument(doc);
    });
    infoPrintStream.flush();
    String infoStream = new String(infoBytes.toByteArray(), IOUtils.UTF_8);
    assertTrue(infoStream.contains("distinctiveFieldName"));
    writer.close();
    dir.close();
}

90. TestDocInverterPerFieldErrorInfo#testNoExtraNoise()

View license
@Test
public void testNoExtraNoise() throws Exception {
    Directory dir = newDirectory();
    IndexWriter writer;
    IndexWriterConfig c = new IndexWriterConfig(new ThrowingAnalyzer());
    final ByteArrayOutputStream infoBytes = new ByteArrayOutputStream();
    PrintStream infoPrintStream = new PrintStream(infoBytes, true, IOUtils.UTF_8);
    PrintStreamInfoStream printStreamInfoStream = new PrintStreamInfoStream(infoPrintStream);
    c.setInfoStream(printStreamInfoStream);
    writer = new IndexWriter(dir, c);
    Document doc = new Document();
    doc.add(newField("boringFieldName", "aaa ", storedTextType));
    try {
        writer.addDocument(doc);
    } catch (BadNews badNews) {
        fail("Unwanted exception");
    }
    infoPrintStream.flush();
    String infoStream = new String(infoBytes.toByteArray(), IOUtils.UTF_8);
    assertFalse(infoStream.contains("boringFieldName"));
    writer.close();
    dir.close();
}

91. TestDocsAndPositions#testDocsEnumStart()

View license
public void testDocsEnumStart() throws Exception {
    Directory dir = newDirectory();
    RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
    Document doc = new Document();
    doc.add(newStringField("foo", "bar", Field.Store.NO));
    writer.addDocument(doc);
    DirectoryReader reader = writer.getReader();
    LeafReader r = getOnlyLeafReader(reader);
    PostingsEnum disi = TestUtil.docs(random(), r, "foo", new BytesRef("bar"), null, PostingsEnum.NONE);
    int docid = disi.docID();
    assertEquals(-1, docid);
    assertTrue(disi.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
    // now reuse and check again
    TermsEnum te = r.terms("foo").iterator();
    assertTrue(te.seekExact(new BytesRef("bar")));
    disi = TestUtil.docs(random(), te, disi, PostingsEnum.NONE);
    docid = disi.docID();
    assertEquals(-1, docid);
    assertTrue(disi.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
    writer.close();
    r.close();
    dir.close();
}

92. TestDocsAndPositions#testDocsAndPositionsEnumStart()

View license
public void testDocsAndPositionsEnumStart() throws Exception {
    Directory dir = newDirectory();
    RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
    Document doc = new Document();
    doc.add(newTextField("foo", "bar", Field.Store.NO));
    writer.addDocument(doc);
    DirectoryReader reader = writer.getReader();
    LeafReader r = getOnlyLeafReader(reader);
    PostingsEnum disi = r.postings(new Term("foo", "bar"), PostingsEnum.ALL);
    int docid = disi.docID();
    assertEquals(-1, docid);
    assertTrue(disi.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
    // now reuse and check again
    TermsEnum te = r.terms("foo").iterator();
    assertTrue(te.seekExact(new BytesRef("bar")));
    disi = te.postings(disi, PostingsEnum.ALL);
    docid = disi.docID();
    assertEquals(-1, docid);
    assertTrue(disi.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
    writer.close();
    r.close();
    dir.close();
}

93. TestDocValues#testEmptyIndex()

Project: lucene-solr
Source File: TestDocValues.java
View license
/** 
   * If the field doesn't exist, we return empty instances:
   * it can easily happen that a segment just doesn't have any docs with the field.
   */
public void testEmptyIndex() throws Exception {
    Directory dir = newDirectory();
    IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(null));
    iw.addDocument(new Document());
    DirectoryReader dr = DirectoryReader.open(iw);
    LeafReader r = getOnlyLeafReader(dr);
    // ok
    assertNotNull(DocValues.getBinary(r, "bogus"));
    assertNotNull(DocValues.getNumeric(r, "bogus"));
    assertNotNull(DocValues.getSorted(r, "bogus"));
    assertNotNull(DocValues.getSortedSet(r, "bogus"));
    assertNotNull(DocValues.getSortedNumeric(r, "bogus"));
    assertNotNull(DocValues.getDocsWithField(r, "bogus"));
    dr.close();
    iw.close();
    dir.close();
}

94. TestDocValuesIndexing#testMultiValuedDocValuesField()

View license
public void testMultiValuedDocValuesField() throws Exception {
    Directory d = newDirectory();
    RandomIndexWriter w = new RandomIndexWriter(random(), d);
    Document doc = new Document();
    Field f = new NumericDocValuesField("field", 17);
    doc.add(f);
    // add the doc
    w.addDocument(doc);
    // Index doc values are single-valued so we should not
    // be able to add same field more than once:
    doc.add(f);
    expectThrows(IllegalArgumentException.class, () -> {
        w.addDocument(doc);
        fail("didn't hit expected exception");
    });
    DirectoryReader r = w.getReader();
    w.close();
    assertEquals(17, DocValues.getNumeric(getOnlyLeafReader(r), "field").get(0));
    r.close();
    d.close();
}

95. TestDocValuesIndexing#testDifferentTypedDocValuesField()

View license
public void testDifferentTypedDocValuesField() throws Exception {
    Directory d = newDirectory();
    RandomIndexWriter w = new RandomIndexWriter(random(), d);
    Document doc = new Document();
    doc.add(new NumericDocValuesField("field", 17));
    w.addDocument(doc);
    // Index doc values are single-valued so we should not
    // be able to add same field more than once:
    doc.add(new BinaryDocValuesField("field", new BytesRef("blah")));
    expectThrows(IllegalArgumentException.class, () -> {
        w.addDocument(doc);
    });
    DirectoryReader r = w.getReader();
    w.close();
    assertEquals(17, DocValues.getNumeric(getOnlyLeafReader(r), "field").get(0));
    r.close();
    d.close();
}

96. TestDocValuesIndexing#testDifferentTypedDocValuesField2()

View license
public void testDifferentTypedDocValuesField2() throws Exception {
    Directory d = newDirectory();
    RandomIndexWriter w = new RandomIndexWriter(random(), d);
    Document doc = new Document();
    doc.add(new NumericDocValuesField("field", 17));
    w.addDocument(doc);
    // Index doc values are single-valued so we should not
    // be able to add same field more than once:
    doc.add(new SortedDocValuesField("field", new BytesRef("hello")));
    expectThrows(IllegalArgumentException.class, () -> {
        w.addDocument(doc);
    });
    DirectoryReader r = w.getReader();
    assertEquals(17, getOnlyLeafReader(r).getNumericDocValues("field").get(0));
    r.close();
    w.close();
    d.close();
}

97. TestDocValuesIndexing#testMixedTypesSameDocument()

View license
// Same field in one document as different types:
public void testMixedTypesSameDocument() throws Exception {
    Directory dir = newDirectory();
    IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
    w.addDocument(new Document());
    Document doc = new Document();
    doc.add(new NumericDocValuesField("foo", 0));
    doc.add(new SortedDocValuesField("foo", new BytesRef("hello")));
    expectThrows(IllegalArgumentException.class, () -> {
        w.addDocument(doc);
    });
    IndexReader ir = w.getReader();
    assertEquals(1, ir.numDocs());
    ir.close();
    w.close();
    dir.close();
}

98. TestDocValuesIndexing#testMixedTypesDifferentDocuments()

View license
// Two documents with same field as different types:
public void testMixedTypesDifferentDocuments() throws Exception {
    Directory dir = newDirectory();
    IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
    Document doc = new Document();
    doc.add(new NumericDocValuesField("foo", 0));
    w.addDocument(doc);
    Document doc2 = new Document();
    doc2.add(new SortedDocValuesField("foo", new BytesRef("hello")));
    expectThrows(IllegalArgumentException.class, () -> {
        w.addDocument(doc2);
    });
    IndexReader ir = w.getReader();
    assertEquals(1, ir.numDocs());
    ir.close();
    w.close();
    dir.close();
}

99. TestDocValuesIndexing#testAddSortedTwice()

View license
public void testAddSortedTwice() throws IOException {
    Analyzer analyzer = new MockAnalyzer(random());
    Directory directory = newDirectory();
    // we don't use RandomIndexWriter because it might add more docvalues than we expect !!!!1
    IndexWriterConfig iwc = newIndexWriterConfig(analyzer);
    iwc.setMergePolicy(newLogMergePolicy());
    IndexWriter iwriter = new IndexWriter(directory, iwc);
    Document doc = new Document();
    doc.add(new SortedDocValuesField("dv", new BytesRef("foo!")));
    iwriter.addDocument(doc);
    doc.add(new SortedDocValuesField("dv", new BytesRef("bar!")));
    expectThrows(IllegalArgumentException.class, () -> {
        iwriter.addDocument(doc);
    });
    IndexReader ir = iwriter.getReader();
    assertEquals(1, ir.numDocs());
    ir.close();
    iwriter.close();
    directory.close();
}

100. TestDocValuesIndexing#testAddBinaryTwice()

View license
public void testAddBinaryTwice() throws IOException {
    Analyzer analyzer = new MockAnalyzer(random());
    Directory directory = newDirectory();
    // we don't use RandomIndexWriter because it might add more docvalues than we expect !!!!1
    IndexWriterConfig iwc = newIndexWriterConfig(analyzer);
    iwc.setMergePolicy(newLogMergePolicy());
    IndexWriter iwriter = new IndexWriter(directory, iwc);
    Document doc = new Document();
    doc.add(new BinaryDocValuesField("dv", new BytesRef("foo!")));
    iwriter.addDocument(doc);
    doc.add(new BinaryDocValuesField("dv", new BytesRef("bar!")));
    expectThrows(IllegalArgumentException.class, () -> {
        iwriter.addDocument(doc);
    });
    IndexReader ir = iwriter.getReader();
    assertEquals(1, ir.numDocs());
    ir.close();
    iwriter.close();
    directory.close();
}