org.apache.lucene.store.Directory

Here are the examples of the java api class org.apache.lucene.store.Directory taken from open source projects.

1. BaseCompoundFormatTestCase#testDoubleClose()

View license
// test that a second call to close() behaves according to Closeable
public void testDoubleClose() throws IOException {
    final String testfile = "_123.test";
    Directory dir = newDirectory();
    SegmentInfo si = newSegmentInfo(dir, "_123");
    try (IndexOutput out = dir.createOutput(testfile, IOContext.DEFAULT)) {
        CodecUtil.writeIndexHeader(out, "Foo", 0, si.getId(), "suffix");
        out.writeInt(3);
        CodecUtil.writeFooter(out);
    }
    si.setFiles(Collections.singleton(testfile));
    si.getCodec().compoundFormat().write(dir, si, IOContext.DEFAULT);
    Directory cfs = si.getCodec().compoundFormat().getCompoundReader(dir, si, IOContext.DEFAULT);
    assertEquals(1, cfs.listAll().length);
    cfs.close();
    // second close should not throw exception
    cfs.close();
    dir.close();
}

2. BaseCompoundFormatTestCase#testTwoFiles()

View license
/** 
   * This test creates compound file based on two files.
   */
public void testTwoFiles() throws IOException {
    String files[] = { "_123.d1", "_123.d2" };
    Directory dir = newDirectory();
    SegmentInfo si = newSegmentInfo(dir, "_123");
    createSequenceFile(dir, files[0], (byte) 0, 15, si.getId(), "suffix");
    createSequenceFile(dir, files[1], (byte) 0, 114, si.getId(), "suffix");
    si.setFiles(Arrays.asList(files));
    si.getCodec().compoundFormat().write(dir, si, IOContext.DEFAULT);
    Directory cfs = si.getCodec().compoundFormat().getCompoundReader(dir, si, IOContext.DEFAULT);
    for (String file : files) {
        IndexInput expected = dir.openInput(file, newIOContext(random()));
        IndexInput actual = cfs.openInput(file, newIOContext(random()));
        assertSameStreams(file, expected, actual);
        assertSameSeekBehavior(file, expected, actual);
        expected.close();
        actual.close();
    }
    cfs.close();
    dir.close();
}

3. BaseCompoundFormatTestCase#testCreateOutputDisabled()

View license
// test that cfs reader is read-only
public void testCreateOutputDisabled() throws IOException {
    Directory dir = newDirectory();
    SegmentInfo si = newSegmentInfo(dir, "_123");
    si.setFiles(Collections.emptyList());
    si.getCodec().compoundFormat().write(dir, si, IOContext.DEFAULT);
    Directory cfs = si.getCodec().compoundFormat().getCompoundReader(dir, si, IOContext.DEFAULT);
    expectThrows(UnsupportedOperationException.class, () -> {
        cfs.createOutput("bogus", IOContext.DEFAULT);
    });
    cfs.close();
    dir.close();
}

4. BaseCompoundFormatTestCase#testDeleteFileDisabled()

View license
// test that cfs reader is read-only
public void testDeleteFileDisabled() throws IOException {
    final String testfile = "_123.test";
    Directory dir = newDirectory();
    IndexOutput out = dir.createOutput(testfile, IOContext.DEFAULT);
    out.writeInt(3);
    out.close();
    SegmentInfo si = newSegmentInfo(dir, "_123");
    si.setFiles(Collections.emptyList());
    si.getCodec().compoundFormat().write(dir, si, IOContext.DEFAULT);
    Directory cfs = si.getCodec().compoundFormat().getCompoundReader(dir, si, IOContext.DEFAULT);
    expectThrows(UnsupportedOperationException.class, () -> {
        cfs.deleteFile(testfile);
    });
    cfs.close();
    dir.close();
}

5. BaseCompoundFormatTestCase#testRenameFileDisabled()

View license
// test that cfs reader is read-only
public void testRenameFileDisabled() throws IOException {
    final String testfile = "_123.test";
    Directory dir = newDirectory();
    IndexOutput out = dir.createOutput(testfile, IOContext.DEFAULT);
    out.writeInt(3);
    out.close();
    SegmentInfo si = newSegmentInfo(dir, "_123");
    si.setFiles(Collections.emptyList());
    si.getCodec().compoundFormat().write(dir, si, IOContext.DEFAULT);
    Directory cfs = si.getCodec().compoundFormat().getCompoundReader(dir, si, IOContext.DEFAULT);
    expectThrows(UnsupportedOperationException.class, () -> {
        cfs.rename(testfile, "bogus");
    });
    cfs.close();
    dir.close();
}

6. BaseCompoundFormatTestCase#testSyncDisabled()

View license
// test that cfs reader is read-only
public void testSyncDisabled() throws IOException {
    final String testfile = "_123.test";
    Directory dir = newDirectory();
    IndexOutput out = dir.createOutput(testfile, IOContext.DEFAULT);
    out.writeInt(3);
    out.close();
    SegmentInfo si = newSegmentInfo(dir, "_123");
    si.setFiles(Collections.emptyList());
    si.getCodec().compoundFormat().write(dir, si, IOContext.DEFAULT);
    Directory cfs = si.getCodec().compoundFormat().getCompoundReader(dir, si, IOContext.DEFAULT);
    expectThrows(UnsupportedOperationException.class, () -> {
        cfs.sync(Collections.singleton(testfile));
    });
    cfs.close();
    dir.close();
}

7. BaseCompoundFormatTestCase#testMakeLockDisabled()

View license
// test that cfs reader is read-only
public void testMakeLockDisabled() throws IOException {
    final String testfile = "_123.test";
    Directory dir = newDirectory();
    IndexOutput out = dir.createOutput(testfile, IOContext.DEFAULT);
    out.writeInt(3);
    out.close();
    SegmentInfo si = newSegmentInfo(dir, "_123");
    si.setFiles(Collections.emptyList());
    si.getCodec().compoundFormat().write(dir, si, IOContext.DEFAULT);
    Directory cfs = si.getCodec().compoundFormat().getCompoundReader(dir, si, IOContext.DEFAULT);
    expectThrows(UnsupportedOperationException.class, () -> {
        cfs.obtainLock("foobar");
    });
    cfs.close();
    dir.close();
}

8. BaseCompoundFormatTestCase#testClonedStreamsClosing()

View license
public void testClonedStreamsClosing() throws IOException {
    Directory dir = newDirectory();
    Directory cr = createLargeCFS(dir);
    // basic clone
    IndexInput expected = dir.openInput("_123.f11", newIOContext(random()));
    IndexInput one = cr.openInput("_123.f11", newIOContext(random()));
    IndexInput two = one.clone();
    assertSameStreams("basic clone one", expected, one);
    expected.seek(0);
    assertSameStreams("basic clone two", expected, two);
    // Now close the compound reader
    cr.close();
    expected.close();
    dir.close();
}

9. BaseCompoundFormatTestCase#testFileNotFound()

View license
public void testFileNotFound() throws IOException {
    Directory dir = newDirectory();
    Directory cr = createLargeCFS(dir);
    // Open bogus file
    expectThrows(IOException.class, () -> {
        cr.openInput("bogus", newIOContext(random()));
    });
    cr.close();
    dir.close();
}

10. BaseCompoundFormatTestCase#testReadPastEOF()

View license
public void testReadPastEOF() throws IOException {
    Directory dir = newDirectory();
    Directory cr = createLargeCFS(dir);
    IndexInput is = cr.openInput("_123.f2", newIOContext(random()));
    is.seek(is.length() - 10);
    byte b[] = new byte[100];
    is.readBytes(b, 0, 10);
    // Single byte read past end of file
    expectThrows(IOException.class, () -> {
        is.readByte();
    });
    is.seek(is.length() - 10);
    // Block read past end of file
    expectThrows(IOException.class, () -> {
        is.readBytes(b, 0, 50);
    });
    is.close();
    cr.close();
    dir.close();
}

11. BaseCompoundFormatTestCase#testResourceNameInsideCompoundFile()

View license
// LUCENE-6311: make sure the resource name inside a compound file confesses that it's inside a compound file
public void testResourceNameInsideCompoundFile() throws Exception {
    Directory dir = newDirectory();
    String subFile = "_123.xyz";
    SegmentInfo si = newSegmentInfo(dir, "_123");
    createSequenceFile(dir, subFile, (byte) 0, 10, si.getId(), "suffix");
    si.setFiles(Collections.singletonList(subFile));
    si.getCodec().compoundFormat().write(dir, si, IOContext.DEFAULT);
    Directory cfs = si.getCodec().compoundFormat().getCompoundReader(dir, si, IOContext.DEFAULT);
    IndexInput in = cfs.openInput(subFile, IOContext.DEFAULT);
    String desc = in.toString();
    assertTrue("resource description hides that it's inside a compound file: " + desc, desc.contains("[slice=" + subFile + "]"));
    cfs.close();
    dir.close();
}

12. TestStressIndexing2#testRandom()

View license
public void testRandom() throws Throwable {
    Directory dir1 = newMaybeVirusCheckingDirectory();
    Directory dir2 = newMaybeVirusCheckingDirectory();
    // mergeFactor=2; maxBufferedDocs=2; Map docs = indexRandom(1, 3, 2, dir1);
    boolean doReaderPooling = random().nextBoolean();
    Map<String, Document> docs = indexRandom(5, 3, 100, dir1, doReaderPooling);
    indexSerial(random(), docs, dir2);
    // verifying verify
    // verifyEquals(dir1, dir1, "id");
    // verifyEquals(dir2, dir2, "id");
    verifyEquals(dir1, dir2, "id");
    dir1.close();
    dir2.close();
}

13. TestSwappedIndexFiles#test()

View license
public void test() throws Exception {
    Directory dir1 = newDirectory();
    Directory dir2 = newDirectory();
    // Disable CFS 80% of the time so we can truncate individual files, but the other 20% of the time we test truncation of .cfs/.cfe too:
    boolean useCFS = random().nextInt(5) == 1;
    // Use LineFileDocs so we (hopefully) get most Lucene features
    // tested, e.g. IntPoint was recently added to it:
    LineFileDocs docs = new LineFileDocs(random());
    Document doc = docs.nextDoc();
    long seed = random().nextLong();
    indexOneDoc(seed, dir1, doc, useCFS);
    indexOneDoc(seed, dir2, doc, useCFS);
    swapFiles(dir1, dir2);
    dir1.close();
    dir2.close();
}

14. TestTermVectors#testFullMergeAddIndexesReader()

Project: lucene-solr
Source File: TestTermVectors.java
View license
public void testFullMergeAddIndexesReader() throws Exception {
    Directory[] input = new Directory[] { newDirectory(), newDirectory() };
    Directory target = newDirectory();
    for (Directory dir : input) {
        createDir(dir);
    }
    IndexWriter writer = createWriter(target);
    for (Directory dir : input) {
        DirectoryReader r = DirectoryReader.open(dir);
        TestUtil.addIndexesSlowly(writer, r);
        r.close();
    }
    writer.forceMerge(1);
    writer.close();
    verifyIndex(target);
    IOUtils.close(target, input[0], input[1]);
}

15. TestParallelCompositeReader#testRefCounts1()

View license
public void testRefCounts1() throws IOException {
    Directory dir1 = getDir1(random());
    Directory dir2 = getDir2(random());
    DirectoryReader ir1, ir2;
    // close subreaders, ParallelReader will not change refCounts, but close on its own close
    ParallelCompositeReader pr = new ParallelCompositeReader(ir1 = DirectoryReader.open(dir1), ir2 = DirectoryReader.open(dir2));
    IndexReader psub1 = pr.getSequentialSubReaders().get(0);
    // check RefCounts
    assertEquals(1, ir1.getRefCount());
    assertEquals(1, ir2.getRefCount());
    assertEquals(1, psub1.getRefCount());
    pr.close();
    assertEquals(0, ir1.getRefCount());
    assertEquals(0, ir2.getRefCount());
    assertEquals(0, psub1.getRefCount());
    dir1.close();
    dir2.close();
}

16. TestParallelCompositeReader#testRefCounts2()

View license
public void testRefCounts2() throws IOException {
    Directory dir1 = getDir1(random());
    Directory dir2 = getDir2(random());
    DirectoryReader ir1 = DirectoryReader.open(dir1);
    DirectoryReader ir2 = DirectoryReader.open(dir2);
    // don't close subreaders, so ParallelReader will increment refcounts
    ParallelCompositeReader pr = new ParallelCompositeReader(false, ir1, ir2);
    IndexReader psub1 = pr.getSequentialSubReaders().get(0);
    // check RefCounts
    assertEquals(2, ir1.getRefCount());
    assertEquals(2, ir2.getRefCount());
    assertEquals("refCount must be 1, as the synthetic reader was created by ParallelCompositeReader", 1, psub1.getRefCount());
    pr.close();
    assertEquals(1, ir1.getRefCount());
    assertEquals(1, ir2.getRefCount());
    assertEquals("refcount must be 0 because parent was closed", 0, psub1.getRefCount());
    ir1.close();
    ir2.close();
    assertEquals(0, ir1.getRefCount());
    assertEquals(0, ir2.getRefCount());
    assertEquals("refcount should not change anymore", 0, psub1.getRefCount());
    dir1.close();
    dir2.close();
}

17. TestParallelCompositeReader#testIncompatibleIndexes2()

View license
public void testIncompatibleIndexes2() throws IOException {
    Directory dir1 = getDir1(random());
    Directory dir2 = getInvalidStructuredDir2(random());
    DirectoryReader ir1 = DirectoryReader.open(dir1), ir2 = DirectoryReader.open(dir2);
    CompositeReader[] readers = new CompositeReader[] { ir1, ir2 };
    expectThrows(IllegalArgumentException.class, () -> {
        new ParallelCompositeReader(readers);
    });
    expectThrows(IllegalArgumentException.class, () -> {
        new ParallelCompositeReader(random().nextBoolean(), readers, readers);
    });
    assertEquals(1, ir1.getRefCount());
    assertEquals(1, ir2.getRefCount());
    ir1.close();
    ir2.close();
    assertEquals(0, ir1.getRefCount());
    assertEquals(0, ir2.getRefCount());
    dir1.close();
    dir2.close();
}

18. TestParallelLeafReader#testFieldNames()

View license
public void testFieldNames() throws Exception {
    Directory dir1 = getDir1(random());
    Directory dir2 = getDir2(random());
    ParallelLeafReader pr = new ParallelLeafReader(getOnlyLeafReader(DirectoryReader.open(dir1)), getOnlyLeafReader(DirectoryReader.open(dir2)));
    FieldInfos fieldInfos = pr.getFieldInfos();
    assertEquals(4, fieldInfos.size());
    assertNotNull(fieldInfos.fieldInfo("f1"));
    assertNotNull(fieldInfos.fieldInfo("f2"));
    assertNotNull(fieldInfos.fieldInfo("f3"));
    assertNotNull(fieldInfos.fieldInfo("f4"));
    pr.close();
    dir1.close();
    dir2.close();
}

19. TestParallelLeafReader#testRefCounts1()

View license
public void testRefCounts1() throws IOException {
    Directory dir1 = getDir1(random());
    Directory dir2 = getDir2(random());
    LeafReader ir1, ir2;
    // close subreaders, ParallelReader will not change refCounts, but close on its own close
    ParallelLeafReader pr = new ParallelLeafReader(ir1 = getOnlyLeafReader(DirectoryReader.open(dir1)), ir2 = getOnlyLeafReader(DirectoryReader.open(dir2)));
    // check RefCounts
    assertEquals(1, ir1.getRefCount());
    assertEquals(1, ir2.getRefCount());
    pr.close();
    assertEquals(0, ir1.getRefCount());
    assertEquals(0, ir2.getRefCount());
    dir1.close();
    dir2.close();
}

20. TestParallelLeafReader#testRefCounts2()

View license
public void testRefCounts2() throws IOException {
    Directory dir1 = getDir1(random());
    Directory dir2 = getDir2(random());
    LeafReader ir1 = getOnlyLeafReader(DirectoryReader.open(dir1));
    LeafReader ir2 = getOnlyLeafReader(DirectoryReader.open(dir2));
    // don't close subreaders, so ParallelReader will increment refcounts
    ParallelLeafReader pr = new ParallelLeafReader(false, ir1, ir2);
    // check RefCounts
    assertEquals(2, ir1.getRefCount());
    assertEquals(2, ir2.getRefCount());
    pr.close();
    assertEquals(1, ir1.getRefCount());
    assertEquals(1, ir2.getRefCount());
    ir1.close();
    ir2.close();
    assertEquals(0, ir1.getRefCount());
    assertEquals(0, ir2.getRefCount());
    dir1.close();
    dir2.close();
}

21. TestPKIndexSplitter#checkSplitting()

View license
private void checkSplitting(Directory dir, Term splitTerm, int leftCount, int rightCount) throws Exception {
    Directory dir1 = newDirectory();
    Directory dir2 = newDirectory();
    PKIndexSplitter splitter = new PKIndexSplitter(dir, dir1, dir2, splitTerm, newIndexWriterConfig(new MockAnalyzer(random())), newIndexWriterConfig(new MockAnalyzer(random())));
    splitter.split();
    IndexReader ir1 = DirectoryReader.open(dir1);
    IndexReader ir2 = DirectoryReader.open(dir2);
    assertEquals(leftCount, ir1.numDocs());
    assertEquals(rightCount, ir2.numDocs());
    checkContents(ir1, "1");
    checkContents(ir2, "2");
    ir1.close();
    ir2.close();
    dir1.close();
    dir2.close();
}

22. TestOrdinalMappingLeafReader#testTaxonomyMergeUtils()

View license
@Test
public void testTaxonomyMergeUtils() throws Exception {
    Directory srcIndexDir = newDirectory();
    Directory srcTaxoDir = newDirectory();
    buildIndexWithFacets(srcIndexDir, srcTaxoDir, true);
    Directory targetIndexDir = newDirectory();
    Directory targetTaxoDir = newDirectory();
    buildIndexWithFacets(targetIndexDir, targetTaxoDir, false);
    IndexWriter destIndexWriter = new IndexWriter(targetIndexDir, newIndexWriterConfig(null));
    DirectoryTaxonomyWriter destTaxoWriter = new DirectoryTaxonomyWriter(targetTaxoDir);
    try {
        TaxonomyMergeUtils.merge(srcIndexDir, srcTaxoDir, new MemoryOrdinalMap(), destIndexWriter, destTaxoWriter, facetConfig);
    } finally {
        IOUtils.close(destIndexWriter, destTaxoWriter);
    }
    verifyResults(targetIndexDir, targetTaxoDir);
    IOUtils.close(targetIndexDir, targetTaxoDir, srcIndexDir, srcTaxoDir);
}

23. TestSearcherTaxonomyManager#testReplaceTaxonomyNRT()

View license
public void testReplaceTaxonomyNRT() throws Exception {
    Directory dir = newDirectory();
    Directory taxoDir = newDirectory();
    IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
    DirectoryTaxonomyWriter tw = new DirectoryTaxonomyWriter(taxoDir);
    Directory taxoDir2 = newDirectory();
    DirectoryTaxonomyWriter tw2 = new DirectoryTaxonomyWriter(taxoDir2);
    tw2.close();
    SearcherTaxonomyManager mgr = new SearcherTaxonomyManager(w, true, null, tw);
    w.addDocument(new Document());
    tw.replaceTaxonomy(taxoDir2);
    taxoDir2.close();
    expectThrows(IllegalStateException.class, () -> {
        mgr.maybeRefresh();
    });
    w.close();
    IOUtils.close(mgr, tw, taxoDir, dir);
}

24. TestDocValuesIndexing#testTypeChangeViaAddIndexes()

View license
public void testTypeChangeViaAddIndexes() throws Exception {
    Directory dir = newDirectory();
    IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
    IndexWriter writer = new IndexWriter(dir, conf);
    Document doc = new Document();
    doc.add(new NumericDocValuesField("dv", 0L));
    writer.addDocument(doc);
    writer.close();
    Directory dir2 = newDirectory();
    conf = newIndexWriterConfig(new MockAnalyzer(random()));
    IndexWriter writer2 = new IndexWriter(dir2, conf);
    doc = new Document();
    doc.add(new SortedDocValuesField("dv", new BytesRef("foo")));
    writer2.addDocument(doc);
    expectThrows(IllegalArgumentException.class, () -> {
        writer2.addIndexes(dir);
    });
    writer2.close();
    dir.close();
    dir2.close();
}

25. TestDocValuesIndexing#testTypeChangeViaAddIndexes2()

View license
public void testTypeChangeViaAddIndexes2() throws Exception {
    Directory dir = newDirectory();
    IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
    IndexWriter writer = new IndexWriter(dir, conf);
    Document doc = new Document();
    doc.add(new NumericDocValuesField("dv", 0L));
    writer.addDocument(doc);
    writer.close();
    Directory dir2 = newDirectory();
    conf = newIndexWriterConfig(new MockAnalyzer(random()));
    IndexWriter writer2 = new IndexWriter(dir2, conf);
    writer2.addIndexes(dir);
    Document doc2 = new Document();
    doc2.add(new SortedDocValuesField("dv", new BytesRef("foo")));
    expectThrows(IllegalArgumentException.class, () -> {
        writer2.addDocument(doc2);
    });
    writer2.close();
    dir2.close();
    dir.close();
}

26. TestDocValuesIndexing#testTypeChangeViaAddIndexesIR2()

View license
public void testTypeChangeViaAddIndexesIR2() throws Exception {
    Directory dir = newDirectory();
    IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
    IndexWriter writer = new IndexWriter(dir, conf);
    Document doc = new Document();
    doc.add(new NumericDocValuesField("dv", 0L));
    writer.addDocument(doc);
    writer.close();
    Directory dir2 = newDirectory();
    conf = newIndexWriterConfig(new MockAnalyzer(random()));
    IndexWriter writer2 = new IndexWriter(dir2, conf);
    DirectoryReader reader = DirectoryReader.open(dir);
    TestUtil.addIndexesSlowly(writer2, reader);
    reader.close();
    Document doc2 = new Document();
    doc2.add(new SortedDocValuesField("dv", new BytesRef("foo")));
    expectThrows(IllegalArgumentException.class, () -> {
        writer2.addDocument(doc2);
    });
    writer2.close();
    dir2.close();
    dir.close();
}

27. TestMixedDirectory#testMixedDirectoryAndPolicy()

Project: hadoop-20
Source File: TestMixedDirectory.java
View license
public void testMixedDirectoryAndPolicy() throws IOException {
    Directory readDir = new RAMDirectory();
    updateIndex(readDir, 0, numDocsPerUpdate, new KeepOnlyLastCommitDeletionPolicy());
    verify(readDir, numDocsPerUpdate);
    IndexOutput out = readDir.createOutput("_" + (numDocsPerUpdate / maxBufferedDocs + 2) + ".cfs");
    out.writeInt(0);
    out.close();
    Directory writeDir = new RAMDirectory();
    Directory mixedDir = new MixedDirectory(readDir, writeDir);
    updateIndex(mixedDir, numDocsPerUpdate, numDocsPerUpdate, new MixedDeletionPolicy());
    verify(readDir, numDocsPerUpdate);
    verify(mixedDir, 2 * numDocsPerUpdate);
}

28. TestMixedDirectory#testMixedDirectoryAndPolicy()

View license
public void testMixedDirectoryAndPolicy() throws IOException {
    Directory readDir = new RAMDirectory();
    updateIndex(readDir, 0, numDocsPerUpdate, new KeepOnlyLastCommitDeletionPolicy());
    verify(readDir, numDocsPerUpdate);
    IndexOutput out = readDir.createOutput("_" + (numDocsPerUpdate / maxBufferedDocs + 2) + ".cfs");
    out.writeInt(0);
    out.close();
    Directory writeDir = new RAMDirectory();
    Directory mixedDir = new MixedDirectory(readDir, writeDir);
    updateIndex(mixedDir, numDocsPerUpdate, numDocsPerUpdate, new MixedDeletionPolicy());
    verify(readDir, numDocsPerUpdate);
    verify(mixedDir, 2 * numDocsPerUpdate);
}

29. TestMixedDirectory#testMixedDirectoryAndPolicy()

View license
public void testMixedDirectoryAndPolicy() throws IOException {
    Directory readDir = new RAMDirectory();
    updateIndex(readDir, 0, numDocsPerUpdate, new KeepOnlyLastCommitDeletionPolicy());
    verify(readDir, numDocsPerUpdate);
    IndexOutput out = readDir.createOutput("_" + (numDocsPerUpdate / maxBufferedDocs + 2) + ".cfs");
    out.writeInt(0);
    out.close();
    Directory writeDir = new RAMDirectory();
    Directory mixedDir = new MixedDirectory(readDir, writeDir);
    updateIndex(mixedDir, numDocsPerUpdate, numDocsPerUpdate, new MixedDeletionPolicy());
    verify(readDir, numDocsPerUpdate);
    verify(mixedDir, 2 * numDocsPerUpdate);
}

30. IndexCopierTest#basicTest()

Project: jackrabbit-oak
Source File: IndexCopierTest.java
View license
@Test
public void basicTest() throws Exception {
    Directory baseDir = new RAMDirectory();
    IndexDefinition defn = new IndexDefinition(root, builder.getNodeState());
    IndexCopier c1 = new RAMIndexCopier(baseDir, sameThreadExecutor(), getWorkDir());
    Directory remote = new RAMDirectory();
    Directory wrapped = c1.wrapForRead("/foo", defn, remote);
    byte[] t1 = writeFile(remote, "t1");
    byte[] t2 = writeFile(remote, "t2");
    assertEquals(2, wrapped.listAll().length);
    assertTrue(wrapped.fileExists("t1"));
    assertTrue(wrapped.fileExists("t2"));
    assertEquals(t1.length, wrapped.fileLength("t1"));
    assertEquals(t2.length, wrapped.fileLength("t2"));
    readAndAssert(wrapped, "t1", t1);
    //t1 should now be added to testDir
    assertTrue(baseDir.fileExists("t1"));
}

31. IndexCopierTest#nonExistentFile()

Project: jackrabbit-oak
Source File: IndexCopierTest.java
View license
@Test
public void nonExistentFile() throws Exception {
    Directory baseDir = new RAMDirectory();
    IndexDefinition defn = new IndexDefinition(root, builder.getNodeState());
    CollectingExecutor executor = new CollectingExecutor();
    IndexCopier c1 = new RAMIndexCopier(baseDir, executor, getWorkDir(), true);
    Directory remote = new RAMDirectory();
    Directory wrapped = c1.wrapForRead("/foo", defn, remote);
    try {
        wrapped.openInput("foo.txt", IOContext.DEFAULT);
        fail();
    } catch (FileNotFoundException ignore) {
    }
    assertEquals(0, executor.commands.size());
}

32. IndexCopierTest#wrapForWriteWithoutIndexPath()

Project: jackrabbit-oak
Source File: IndexCopierTest.java
View license
@Test
public void wrapForWriteWithoutIndexPath() throws Exception {
    assumeNotWindows();
    Directory remote = new CloseSafeDir();
    IndexCopier copier = new IndexCopier(sameThreadExecutor(), getWorkDir());
    IndexDefinition defn = new IndexDefinition(root, builder.getNodeState());
    Directory dir = copier.wrapForWrite(defn, remote, false);
    byte[] t1 = writeFile(dir, "t1");
    dir.close();
    readAndAssert(remote, "t1", t1);
    //Work dir must be empty post close
    assertArrayEquals(FileUtils.EMPTY_FILE_ARRAY, copier.getIndexWorkDir().listFiles());
}

33. IndexCopierTest#wrapForWriteWithIndexPath()

Project: jackrabbit-oak
Source File: IndexCopierTest.java
View license
@Test
public void wrapForWriteWithIndexPath() throws Exception {
    assumeNotWindows();
    Directory remote = new CloseSafeDir();
    IndexCopier copier = new IndexCopier(sameThreadExecutor(), getWorkDir());
    builder.setProperty(IndexConstants.INDEX_PATH, "foo");
    IndexDefinition defn = new IndexDefinition(root, builder.getNodeState());
    Directory dir = copier.wrapForWrite(defn, remote, false);
    byte[] t1 = writeFile(dir, "t1");
    dir.close();
    readAndAssert(remote, "t1", t1);
    //Work dir must be empty post close
    File indexDir = copier.getIndexDir(defn, "foo");
    List<File> files = new ArrayList<File>(FileUtils.listFiles(indexDir, null, true));
    Set<String> fileNames = Sets.newHashSet();
    for (File f : files) {
        fileNames.add(f.getName());
    }
    assertThat(fileNames, contains("t1"));
}

34. TestTermVectors#testFullMergeAddIndexesDir()

Project: lucene-solr
Source File: TestTermVectors.java
View license
public void testFullMergeAddIndexesDir() throws Exception {
    Directory[] input = new Directory[] { newDirectory(), newDirectory() };
    Directory target = newDirectory();
    for (Directory dir : input) {
        createDir(dir);
    }
    IndexWriter writer = createWriter(target);
    writer.addIndexes(input);
    writer.forceMerge(1);
    writer.close();
    verifyIndex(target);
    IOUtils.close(target, input[0], input[1]);
}

35. RAMDirectoryFactoryTest#dotestOpenReturnsTheSameForSamePath()

View license
private void dotestOpenReturnsTheSameForSamePath() throws IOException {
    final Directory directory = new RAMDirectory();
    RAMDirectoryFactory factory = new RAMDirectoryFactory() {

        @Override
        protected Directory create(String path, LockFactory lockFactory, DirContext dirContext) {
            return directory;
        }
    };
    String path = "/fake/path";
    Directory dir1 = factory.get(path, DirContext.DEFAULT, DirectoryFactory.LOCK_TYPE_SINGLE);
    Directory dir2 = factory.get(path, DirContext.DEFAULT, DirectoryFactory.LOCK_TYPE_SINGLE);
    assertEquals("RAMDirectoryFactory should not create new instance of RefCntRamDirectory " + "every time open() is called for the same path", dir1, dir2);
    factory.release(dir1);
    factory.release(dir2);
    factory.close();
}

36. TestIndexWriterExceptions#testExceptionDocumentsWriterInit()

View license
public void testExceptionDocumentsWriterInit() throws IOException {
    Directory dir = newDirectory();
    TestPoint2 testPoint = new TestPoint2();
    IndexWriter w = RandomIndexWriter.mockIndexWriter(random(), dir, newIndexWriterConfig(new MockAnalyzer(random())), testPoint);
    Document doc = new Document();
    doc.add(newTextField("field", "a field", Field.Store.YES));
    w.addDocument(doc);
    testPoint.doFail = true;
    expectThrows(RuntimeException.class, () -> {
        w.addDocument(doc);
    });
    w.close();
    dir.close();
}

37. TestTaxonomyCombined#testWriterTwice2()

View license
/**  testWriterTwice2 is similar to testWriterTwice, except that the index
    is closed and reopened before attempting to write to it the same
    categories again. While testWriterTwice can get along with writing
    and reading correctly just to the cache, testWriterTwice2 checks also
    the actual disk read part of the writer:
   */
@Test
public void testWriterTwice2() throws Exception {
    Directory indexDir = newDirectory();
    TaxonomyWriter tw = new DirectoryTaxonomyWriter(indexDir);
    fillTaxonomy(tw);
    tw.close();
    tw = new DirectoryTaxonomyWriter(indexDir);
    // run fillTaxonomy again - this will try to add the same categories
    // again, and check that we see the same ordinals again, not different
    // ones, and that the number of categories hasn't grown by the new
    // additions
    fillTaxonomy(tw);
    assertEquals(expectedCategories.length, tw.getSize());
    tw.close();
    indexDir.close();
}

38. TestIndexWriterExceptions#testRollbackExceptionHang()

View license
// LUCENE-1347
public void testRollbackExceptionHang() throws Throwable {
    Directory dir = newDirectory();
    TestPoint4 testPoint = new TestPoint4();
    IndexWriter w = RandomIndexWriter.mockIndexWriter(random(), dir, newIndexWriterConfig(new MockAnalyzer(random())), testPoint);
    addDoc(w);
    testPoint.doFail = true;
    expectThrows(RuntimeException.class, () -> {
        w.rollback();
    });
    testPoint.doFail = false;
    w.rollback();
    dir.close();
}

39. TestIndexWriterExceptions#testNullStoredField()

View license
/** test a null string value doesn't abort the entire segment */
public void testNullStoredField() throws Exception {
    Directory dir = newDirectory();
    Analyzer analyzer = new MockAnalyzer(random());
    IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(analyzer));
    // add good document
    Document doc = new Document();
    iw.addDocument(doc);
    expectThrows(IllegalArgumentException.class, () -> {
        // set to null value
        String value = null;
        doc.add(new StoredField("foo", value));
        iw.addDocument(doc);
    });
    assertNull(iw.getTragicException());
    iw.close();
    // make sure we see our good doc
    DirectoryReader r = DirectoryReader.open(dir);
    assertEquals(1, r.numDocs());
    r.close();
    dir.close();
}

40. TestIndexWriterExceptions#testNullStoredFieldReuse()

View license
/** test a null string value doesn't abort the entire segment */
public void testNullStoredFieldReuse() throws Exception {
    Directory dir = newDirectory();
    Analyzer analyzer = new MockAnalyzer(random());
    IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(analyzer));
    // add good document
    Document doc = new Document();
    Field theField = new StoredField("foo", "hello", StoredField.TYPE);
    doc.add(theField);
    iw.addDocument(doc);
    expectThrows(IllegalArgumentException.class, () -> {
        // set to null value
        theField.setStringValue(null);
        iw.addDocument(doc);
    });
    assertNull(iw.getTragicException());
    iw.close();
    // make sure we see our good doc
    DirectoryReader r = DirectoryReader.open(dir);
    assertEquals(1, r.numDocs());
    r.close();
    dir.close();
}

41. TestIndexWriterExceptions#testNullStoredBytesField()

View license
/** test a null byte[] value doesn't abort the entire segment */
public void testNullStoredBytesField() throws Exception {
    Directory dir = newDirectory();
    Analyzer analyzer = new MockAnalyzer(random());
    IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(analyzer));
    // add good document
    Document doc = new Document();
    iw.addDocument(doc);
    expectThrows(NullPointerException.class, () -> {
        // set to null value
        byte v[] = null;
        Field theField = new StoredField("foo", v);
        doc.add(theField);
        iw.addDocument(doc);
    });
    assertNull(iw.getTragicException());
    iw.close();
    // make sure we see our good doc
    DirectoryReader r = DirectoryReader.open(dir);
    assertEquals(1, r.numDocs());
    r.close();
    dir.close();
}

42. TestIndexWriterExceptions#testNullStoredBytesFieldReuse()

View license
/** test a null byte[] value doesn't abort the entire segment */
public void testNullStoredBytesFieldReuse() throws Exception {
    Directory dir = newDirectory();
    Analyzer analyzer = new MockAnalyzer(random());
    IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(analyzer));
    // add good document
    Document doc = new Document();
    Field theField = new StoredField("foo", new BytesRef("hello").bytes);
    doc.add(theField);
    iw.addDocument(doc);
    expectThrows(NullPointerException.class, () -> {
        // set to null value
        byte v[] = null;
        theField.setBytesValue(v);
        iw.addDocument(doc);
    });
    assertNull(iw.getTragicException());
    iw.close();
    // make sure we see our good doc
    DirectoryReader r = DirectoryReader.open(dir);
    assertEquals(1, r.numDocs());
    r.close();
    dir.close();
}

43. TestIndexWriterExceptions#testNullStoredBytesRefField()

View license
/** test a null bytesref value doesn't abort the entire segment */
public void testNullStoredBytesRefField() throws Exception {
    Directory dir = newDirectory();
    Analyzer analyzer = new MockAnalyzer(random());
    IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(analyzer));
    // add good document
    Document doc = new Document();
    iw.addDocument(doc);
    expectThrows(IllegalArgumentException.class, () -> {
        // set to null value
        BytesRef v = null;
        Field theField = new StoredField("foo", v);
        doc.add(theField);
        iw.addDocument(doc);
        fail("didn't get expected exception");
    });
    assertNull(iw.getTragicException());
    iw.close();
    // make sure we see our good doc
    DirectoryReader r = DirectoryReader.open(dir);
    assertEquals(1, r.numDocs());
    r.close();
    dir.close();
}

44. TestIndexWriterExceptions#testNullStoredBytesRefFieldReuse()

View license
/** test a null bytesref value doesn't abort the entire segment */
public void testNullStoredBytesRefFieldReuse() throws Exception {
    Directory dir = newDirectory();
    Analyzer analyzer = new MockAnalyzer(random());
    IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(analyzer));
    // add good document
    Document doc = new Document();
    Field theField = new StoredField("foo", new BytesRef("hello"));
    doc.add(theField);
    iw.addDocument(doc);
    expectThrows(IllegalArgumentException.class, () -> {
        // set to null value
        BytesRef v = null;
        theField.setBytesValue(v);
        iw.addDocument(doc);
        fail("didn't get expected exception");
    });
    assertNull(iw.getTragicException());
    iw.close();
    // make sure we see our good doc
    DirectoryReader r = DirectoryReader.open(dir);
    assertEquals(1, r.numDocs());
    r.close();
    dir.close();
}

45. TestIndexWriterExceptions#testExceptionOnCtor()

View license
public void testExceptionOnCtor() throws Exception {
    UOEDirectory uoe = new UOEDirectory();
    Directory d = new MockDirectoryWrapper(random(), uoe);
    IndexWriter iw = new IndexWriter(d, newIndexWriterConfig(null));
    iw.addDocument(new Document());
    iw.close();
    uoe.doFail = true;
    expectThrows(UnsupportedOperationException.class, () -> {
        new IndexWriter(d, newIndexWriterConfig(null));
    });
    uoe.doFail = false;
    d.close();
}

46. TestIndexWriterFromReader#testFromNonNRTReader()

View license
// Open from non-NRT reader
public void testFromNonNRTReader() throws Exception {
    Directory dir = newDirectory();
    IndexWriter w = new IndexWriter(dir, newIndexWriterConfig());
    w.addDocument(new Document());
    w.close();
    DirectoryReader r = DirectoryReader.open(dir);
    assertEquals(1, r.maxDoc());
    IndexWriterConfig iwc = newIndexWriterConfig();
    iwc.setIndexCommit(r.getIndexCommit());
    IndexWriter w2 = new IndexWriter(dir, iwc);
    assertEquals(1, r.maxDoc());
    r.close();
    assertEquals(1, w2.maxDoc());
    w2.addDocument(new Document());
    assertEquals(2, w2.maxDoc());
    w2.close();
    IndexReader r2 = DirectoryReader.open(dir);
    assertEquals(2, r2.maxDoc());
    r2.close();
    dir.close();
}

47. TestIndexWriterFromReader#testWithNoFirstCommit()

View license
// Pull NRT reader from a writer on a new index with no commit:
public void testWithNoFirstCommit() throws Exception {
    Directory dir = newDirectory();
    IndexWriter w = new IndexWriter(dir, newIndexWriterConfig());
    w.addDocument(new Document());
    DirectoryReader r = DirectoryReader.open(w);
    w.rollback();
    IndexWriterConfig iwc = newIndexWriterConfig();
    iwc.setIndexCommit(r.getIndexCommit());
    IllegalArgumentException expected = expectThrows(IllegalArgumentException.class, () -> {
        new IndexWriter(dir, iwc);
    });
    assertEquals("cannot use IndexWriterConfig.setIndexCommit() when index has no commit", expected.getMessage());
    r.close();
    dir.close();
}

48. TestIndexWriterFromReader#testAfterCommitThenIndex()

View license
// Pull NRT reader after writer has committed and then indexed another doc:
public void testAfterCommitThenIndex() throws Exception {
    Directory dir = newDirectory();
    IndexWriter w = new IndexWriter(dir, newIndexWriterConfig());
    w.addDocument(new Document());
    w.commit();
    w.addDocument(new Document());
    DirectoryReader r = DirectoryReader.open(w);
    assertEquals(2, r.maxDoc());
    w.close();
    IndexWriterConfig iwc = newIndexWriterConfig();
    iwc.setIndexCommit(r.getIndexCommit());
    IllegalArgumentException expected = expectThrows(IllegalArgumentException.class, () -> {
        new IndexWriter(dir, iwc);
    });
    assertTrue(expected.getMessage().contains("the provided reader is stale: its prior commit file"));
    r.close();
    dir.close();
}

49. TestIndexWriterFromReader#testNRTRollback()

View license
// NRT rollback: pull NRT reader after writer has committed and then before indexing another doc
public void testNRTRollback() throws Exception {
    Directory dir = newDirectory();
    IndexWriter w = new IndexWriter(dir, newIndexWriterConfig());
    w.addDocument(new Document());
    w.commit();
    DirectoryReader r = DirectoryReader.open(w);
    assertEquals(1, r.maxDoc());
    // Add another doc
    w.addDocument(new Document());
    assertEquals(2, w.maxDoc());
    w.close();
    IndexWriterConfig iwc = newIndexWriterConfig();
    iwc.setIndexCommit(r.getIndexCommit());
    IllegalArgumentException expected = expectThrows(IllegalArgumentException.class, () -> {
        new IndexWriter(dir, iwc);
    });
    assertTrue(expected.getMessage().contains("the provided reader is stale: its prior commit file"));
    r.close();
    dir.close();
}

50. TestIndexWriterMaxDocs#testCorruptIndexExceptionTooLarge()

View license
// LUCENE-6299
public void testCorruptIndexExceptionTooLarge() throws Exception {
    Directory dir = newDirectory();
    IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(null));
    w.addDocument(new Document());
    w.addDocument(new Document());
    w.close();
    setIndexWriterMaxDocs(1);
    try {
        expectThrows(CorruptIndexException.class, () -> {
            DirectoryReader.open(dir);
        });
    } finally {
        restoreIndexWriterMaxDocs();
    }
    dir.close();
}

51. TestIndexWriterMaxDocs#testCorruptIndexExceptionTooLargeWriter()

View license
// LUCENE-6299
public void testCorruptIndexExceptionTooLargeWriter() throws Exception {
    Directory dir = newDirectory();
    IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(null));
    w.addDocument(new Document());
    w.addDocument(new Document());
    w.close();
    setIndexWriterMaxDocs(1);
    try {
        expectThrows(CorruptIndexException.class, () -> {
            new IndexWriter(dir, new IndexWriterConfig(null));
        });
    } finally {
        restoreIndexWriterMaxDocs();
    }
    dir.close();
}

52. TestIndexWriterMergePolicy#testNormalCase()

View license
// Test the normal case
public void testNormalCase() throws IOException {
    Directory dir = newDirectory();
    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())).setMaxBufferedDocs(10).setMergePolicy(new LogDocMergePolicy()));
    for (int i = 0; i < 100; i++) {
        addDoc(writer);
        checkInvariants(writer);
    }
    writer.close();
    dir.close();
}

53. TestIndexWriterMergePolicy#testNoOverMerge()

View license
// Test to see if there is over merge
public void testNoOverMerge() throws IOException {
    Directory dir = newDirectory();
    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())).setMaxBufferedDocs(10).setMergePolicy(new LogDocMergePolicy()));
    boolean noOverMerge = false;
    for (int i = 0; i < 100; i++) {
        addDoc(writer);
        checkInvariants(writer);
        if (writer.getNumBufferedDocuments() + writer.getSegmentCount() >= 18) {
            noOverMerge = true;
        }
    }
    assertTrue(noOverMerge);
    writer.close();
    dir.close();
}

54. TestIndexWriterMerging#testSetMaxMergeDocs()

View license
// LUCENE-1013
public void testSetMaxMergeDocs() throws IOException {
    Directory dir = newDirectory();
    IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())).setMergeScheduler(new MyMergeScheduler()).setMaxBufferedDocs(2).setMergePolicy(newLogMergePolicy());
    LogMergePolicy lmp = (LogMergePolicy) conf.getMergePolicy();
    lmp.setMaxMergeDocs(20);
    lmp.setMergeFactor(2);
    IndexWriter iw = new IndexWriter(dir, conf);
    Document document = new Document();
    FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
    customType.setStoreTermVectors(true);
    document.add(newField("tvtest", "a b c", customType));
    for (int i = 0; i < 177; i++) iw.addDocument(document);
    iw.close();
    dir.close();
}

55. BaseDocValuesFormatTestCase#testOneSortedNumber()

View license
public void testOneSortedNumber() throws IOException {
    assumeTrue("Codec does not support SORTED_NUMERIC", codecSupportsSortedNumeric());
    Directory directory = newDirectory();
    RandomIndexWriter writer = new RandomIndexWriter(random(), directory);
    Document doc = new Document();
    doc.add(new SortedNumericDocValuesField("dv", 5));
    writer.addDocument(doc);
    writer.close();
    // Now search the index:
    IndexReader reader = DirectoryReader.open(directory);
    assert reader.leaves().size() == 1;
    SortedNumericDocValues dv = reader.leaves().get(0).reader().getSortedNumericDocValues("dv");
    dv.setDocument(0);
    assertEquals(1, dv.count());
    assertEquals(5, dv.valueAt(0));
    reader.close();
    directory.close();
}

56. BaseDocValuesFormatTestCase#testTwoSortedNumber()

View license
public void testTwoSortedNumber() throws IOException {
    assumeTrue("Codec does not support SORTED_NUMERIC", codecSupportsSortedNumeric());
    Directory directory = newDirectory();
    RandomIndexWriter writer = new RandomIndexWriter(random(), directory);
    Document doc = new Document();
    doc.add(new SortedNumericDocValuesField("dv", 11));
    doc.add(new SortedNumericDocValuesField("dv", -5));
    writer.addDocument(doc);
    writer.close();
    // Now search the index:
    IndexReader reader = DirectoryReader.open(directory);
    assert reader.leaves().size() == 1;
    SortedNumericDocValues dv = reader.leaves().get(0).reader().getSortedNumericDocValues("dv");
    dv.setDocument(0);
    assertEquals(2, dv.count());
    assertEquals(-5, dv.valueAt(0));
    assertEquals(11, dv.valueAt(1));
    reader.close();
    directory.close();
}

57. BaseDocValuesFormatTestCase#testTwoSortedNumberSameValue()

View license
public void testTwoSortedNumberSameValue() throws IOException {
    assumeTrue("Codec does not support SORTED_NUMERIC", codecSupportsSortedNumeric());
    Directory directory = newDirectory();
    RandomIndexWriter writer = new RandomIndexWriter(random(), directory);
    Document doc = new Document();
    doc.add(new SortedNumericDocValuesField("dv", 11));
    doc.add(new SortedNumericDocValuesField("dv", 11));
    writer.addDocument(doc);
    writer.close();
    // Now search the index:
    IndexReader reader = DirectoryReader.open(directory);
    assert reader.leaves().size() == 1;
    SortedNumericDocValues dv = reader.leaves().get(0).reader().getSortedNumericDocValues("dv");
    dv.setDocument(0);
    assertEquals(2, dv.count());
    assertEquals(11, dv.valueAt(0));
    assertEquals(11, dv.valueAt(1));
    reader.close();
    directory.close();
}

58. BaseFieldInfoFormatTestCase#testOneField()

View license
/** Test field infos read/write with a single field */
public void testOneField() throws Exception {
    Directory dir = newDirectory();
    Codec codec = getCodec();
    SegmentInfo segmentInfo = newSegmentInfo(dir, "_123");
    FieldInfos.Builder builder = new FieldInfos.Builder();
    FieldInfo fi = builder.getOrAdd("field");
    fi.setIndexOptions(TextField.TYPE_STORED.indexOptions());
    addAttributes(fi);
    FieldInfos infos = builder.finish();
    codec.fieldInfosFormat().write(dir, segmentInfo, "", infos, IOContext.DEFAULT);
    FieldInfos infos2 = codec.fieldInfosFormat().read(dir, segmentInfo, "", IOContext.DEFAULT);
    assertEquals(1, infos2.size());
    assertNotNull(infos2.fieldInfo("field"));
    assertTrue(infos2.fieldInfo("field").getIndexOptions() != IndexOptions.NONE);
    assertFalse(infos2.fieldInfo("field").getDocValuesType() != DocValuesType.NONE);
    assertFalse(infos2.fieldInfo("field").omitsNorms());
    assertFalse(infos2.fieldInfo("field").hasPayloads());
    assertFalse(infos2.fieldInfo("field").hasVectors());
    dir.close();
}

59. BaseFieldInfoFormatTestCase#testImmutableAttributes()

View license
/** Test field infos attributes coming back are not mutable */
public void testImmutableAttributes() throws Exception {
    Directory dir = newDirectory();
    Codec codec = getCodec();
    SegmentInfo segmentInfo = newSegmentInfo(dir, "_123");
    FieldInfos.Builder builder = new FieldInfos.Builder();
    FieldInfo fi = builder.getOrAdd("field");
    fi.setIndexOptions(TextField.TYPE_STORED.indexOptions());
    addAttributes(fi);
    fi.putAttribute("foo", "bar");
    fi.putAttribute("bar", "baz");
    FieldInfos infos = builder.finish();
    codec.fieldInfosFormat().write(dir, segmentInfo, "", infos, IOContext.DEFAULT);
    FieldInfos infos2 = codec.fieldInfosFormat().read(dir, segmentInfo, "", IOContext.DEFAULT);
    assertEquals(1, infos2.size());
    assertNotNull(infos2.fieldInfo("field"));
    Map<String, String> attributes = infos2.fieldInfo("field").attributes();
    // shouldn't be able to modify attributes
    expectThrows(UnsupportedOperationException.class, () -> {
        attributes.put("bogus", "bogus");
    });
    dir.close();
}

60. BasePostingsFormatTestCase#testPostingsEnumReuse()

View license
public void testPostingsEnumReuse() throws Exception {
    Path path = createTempDir("testPostingsEnumReuse");
    Directory dir = newFSDirectory(path);
    FieldsProducer fieldsProducer = postingsTester.buildIndex(getCodec(), dir, IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS, random().nextBoolean(), true);
    Collections.shuffle(postingsTester.allTerms, random());
    RandomPostingsTester.FieldAndTerm fieldAndTerm = postingsTester.allTerms.get(0);
    Terms terms = fieldsProducer.terms(fieldAndTerm.field);
    TermsEnum te = terms.iterator();
    te.seekExact(fieldAndTerm.term);
    checkReuse(te, PostingsEnum.FREQS, PostingsEnum.ALL, false);
    if (isPostingsEnumReuseImplemented()) {
        checkReuse(te, PostingsEnum.ALL, PostingsEnum.ALL, true);
    }
    fieldsProducer.close();
    dir.close();
}

61. BasePostingsFormatTestCase#testJustEmptyField()

View license
public void testJustEmptyField() throws Exception {
    Directory dir = newDirectory();
    IndexWriterConfig iwc = newIndexWriterConfig(null);
    iwc.setCodec(getCodec());
    RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc);
    Document doc = new Document();
    doc.add(newStringField("", "something", Field.Store.NO));
    iw.addDocument(doc);
    DirectoryReader ir = iw.getReader();
    LeafReader ar = getOnlyLeafReader(ir);
    Fields fields = ar.fields();
    int fieldCount = fields.size();
    // -1 is allowed, if the codec doesn't implement fields.size():
    assertTrue(fieldCount == 1 || fieldCount == -1);
    Terms terms = ar.terms("");
    assertNotNull(terms);
    TermsEnum termsEnum = terms.iterator();
    assertNotNull(termsEnum.next());
    assertEquals(termsEnum.term(), new BytesRef("something"));
    assertNull(termsEnum.next());
    ir.close();
    iw.close();
    dir.close();
}

62. BasePostingsFormatTestCase#testEmptyFieldAndEmptyTerm()

View license
public void testEmptyFieldAndEmptyTerm() throws Exception {
    Directory dir = newDirectory();
    IndexWriterConfig iwc = newIndexWriterConfig(null);
    iwc.setCodec(getCodec());
    RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc);
    Document doc = new Document();
    doc.add(newStringField("", "", Field.Store.NO));
    iw.addDocument(doc);
    DirectoryReader ir = iw.getReader();
    LeafReader ar = getOnlyLeafReader(ir);
    Fields fields = ar.fields();
    int fieldCount = fields.size();
    // -1 is allowed, if the codec doesn't implement fields.size():
    assertTrue(fieldCount == 1 || fieldCount == -1);
    Terms terms = ar.terms("");
    assertNotNull(terms);
    TermsEnum termsEnum = terms.iterator();
    assertNotNull(termsEnum.next());
    assertEquals(termsEnum.term(), new BytesRef(""));
    assertNull(termsEnum.next());
    ir.close();
    iw.close();
    dir.close();
}

63. BasePostingsFormatTestCase#testDidntWantFreqsButAskedAnyway()

View license
public void testDidntWantFreqsButAskedAnyway() throws Exception {
    Directory dir = newDirectory();
    IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
    iwc.setCodec(getCodec());
    RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc);
    Document doc = new Document();
    doc.add(newTextField("field", "value", Field.Store.NO));
    iw.addDocument(doc);
    iw.addDocument(doc);
    DirectoryReader ir = iw.getReader();
    LeafReader ar = getOnlyLeafReader(ir);
    TermsEnum termsEnum = ar.terms("field").iterator();
    assertTrue(termsEnum.seekExact(new BytesRef("value")));
    PostingsEnum docsEnum = termsEnum.postings(null, PostingsEnum.NONE);
    assertEquals(0, docsEnum.nextDoc());
    assertEquals(1, docsEnum.freq());
    assertEquals(1, docsEnum.nextDoc());
    assertEquals(1, docsEnum.freq());
    ir.close();
    iw.close();
    dir.close();
}

64. BasePostingsFormatTestCase#testAskForPositionsWhenNotThere()

View license
public void testAskForPositionsWhenNotThere() throws Exception {
    Directory dir = newDirectory();
    IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
    iwc.setCodec(getCodec());
    RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc);
    Document doc = new Document();
    doc.add(newStringField("field", "value", Field.Store.NO));
    iw.addDocument(doc);
    iw.addDocument(doc);
    DirectoryReader ir = iw.getReader();
    LeafReader ar = getOnlyLeafReader(ir);
    TermsEnum termsEnum = ar.terms("field").iterator();
    assertTrue(termsEnum.seekExact(new BytesRef("value")));
    PostingsEnum docsEnum = termsEnum.postings(null, PostingsEnum.POSITIONS);
    assertEquals(0, docsEnum.nextDoc());
    assertEquals(1, docsEnum.freq());
    assertEquals(1, docsEnum.nextDoc());
    assertEquals(1, docsEnum.freq());
    ir.close();
    iw.close();
    dir.close();
}

65. BaseSegmentInfoFormatTestCase#testFiles()

View license
/** Test files map */
public void testFiles() throws Exception {
    Directory dir = newDirectory();
    Codec codec = getCodec();
    byte id[] = StringHelper.randomId();
    SegmentInfo info = new SegmentInfo(dir, getVersions()[0], "_123", 1, false, codec, Collections.<String, String>emptyMap(), id, new HashMap<>(), null);
    info.setFiles(Collections.<String>emptySet());
    codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);
    SegmentInfo info2 = codec.segmentInfoFormat().read(dir, "_123", id, IOContext.DEFAULT);
    assertEquals(info.files(), info2.files());
    dir.close();
}

66. BaseSegmentInfoFormatTestCase#testAddsSelfToFiles()

View license
/** Tests SI writer adds itself to files... */
public void testAddsSelfToFiles() throws Exception {
    Directory dir = newDirectory();
    Codec codec = getCodec();
    byte id[] = StringHelper.randomId();
    SegmentInfo info = new SegmentInfo(dir, getVersions()[0], "_123", 1, false, codec, Collections.<String, String>emptyMap(), id, new HashMap<>(), null);
    Set<String> originalFiles = Collections.singleton("_123.a");
    info.setFiles(originalFiles);
    codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);
    Set<String> modifiedFiles = info.files();
    assertTrue(modifiedFiles.containsAll(originalFiles));
    assertTrue("did you forget to add yourself to files()", modifiedFiles.size() > originalFiles.size());
    SegmentInfo info2 = codec.segmentInfoFormat().read(dir, "_123", id, IOContext.DEFAULT);
    assertEquals(info.files(), info2.files());
    // files set should be immutable
    expectThrows(UnsupportedOperationException.class, () -> {
        info2.files().add("bogus");
    });
    dir.close();
}

67. BaseSegmentInfoFormatTestCase#testDiagnostics()

View license
/** Test diagnostics map */
public void testDiagnostics() throws Exception {
    Directory dir = newDirectory();
    Codec codec = getCodec();
    byte id[] = StringHelper.randomId();
    Map<String, String> diagnostics = new HashMap<>();
    diagnostics.put("key1", "value1");
    diagnostics.put("key2", "value2");
    SegmentInfo info = new SegmentInfo(dir, getVersions()[0], "_123", 1, false, codec, diagnostics, id, new HashMap<>(), null);
    info.setFiles(Collections.<String>emptySet());
    codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);
    SegmentInfo info2 = codec.segmentInfoFormat().read(dir, "_123", id, IOContext.DEFAULT);
    assertEquals(diagnostics, info2.getDiagnostics());
    // diagnostics map should be immutable
    expectThrows(UnsupportedOperationException.class, () -> {
        info2.getDiagnostics().put("bogus", "bogus");
    });
    dir.close();
}

68. BaseSegmentInfoFormatTestCase#testAttributes()

View license
/** Test attributes map */
public void testAttributes() throws Exception {
    Directory dir = newDirectory();
    Codec codec = getCodec();
    byte id[] = StringHelper.randomId();
    Map<String, String> attributes = new HashMap<>();
    attributes.put("key1", "value1");
    attributes.put("key2", "value2");
    SegmentInfo info = new SegmentInfo(dir, getVersions()[0], "_123", 1, false, codec, Collections.emptyMap(), id, attributes, null);
    info.setFiles(Collections.<String>emptySet());
    codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);
    SegmentInfo info2 = codec.segmentInfoFormat().read(dir, "_123", id, IOContext.DEFAULT);
    assertEquals(attributes, info2.getAttributes());
    // attributes map should be immutable
    expectThrows(UnsupportedOperationException.class, () -> {
        info2.getAttributes().put("bogus", "bogus");
    });
    dir.close();
}

69. BaseSegmentInfoFormatTestCase#testUniqueID()

View license
/** Test unique ID */
public void testUniqueID() throws Exception {
    Codec codec = getCodec();
    Directory dir = newDirectory();
    byte id[] = StringHelper.randomId();
    SegmentInfo info = new SegmentInfo(dir, getVersions()[0], "_123", 1, false, codec, Collections.<String, String>emptyMap(), id, new HashMap<>(), null);
    info.setFiles(Collections.<String>emptySet());
    codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);
    SegmentInfo info2 = codec.segmentInfoFormat().read(dir, "_123", id, IOContext.DEFAULT);
    assertIDEquals(id, info2.getId());
    dir.close();
}

70. BaseStoredFieldsFormatTestCase#testIndexedBit()

View license
public void testIndexedBit() throws Exception {
    Directory dir = newDirectory();
    RandomIndexWriter w = new RandomIndexWriter(random(), dir);
    Document doc = new Document();
    FieldType onlyStored = new FieldType();
    onlyStored.setStored(true);
    doc.add(new Field("field", "value", onlyStored));
    doc.add(new StringField("field2", "value", Field.Store.YES));
    w.addDocument(doc);
    IndexReader r = w.getReader();
    w.close();
    assertEquals(IndexOptions.NONE, r.document(0).getField("field").fieldType().indexOptions());
    assertNotNull(r.document(0).getField("field2").fieldType().indexOptions());
    r.close();
    dir.close();
}

71. BaseStoredFieldsFormatTestCase#testEmptyDocs()

View license
public void testEmptyDocs() throws IOException {
    Directory dir = newDirectory();
    IndexWriterConfig iwConf = newIndexWriterConfig(new MockAnalyzer(random()));
    iwConf.setMaxBufferedDocs(RandomInts.randomIntBetween(random(), 2, 30));
    RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwConf);
    // make sure that the fact that documents might be empty is not a problem
    final Document emptyDoc = new Document();
    final int numDocs = random().nextBoolean() ? 1 : atLeast(1000);
    for (int i = 0; i < numDocs; ++i) {
        iw.addDocument(emptyDoc);
    }
    iw.commit();
    final DirectoryReader rd = DirectoryReader.open(dir);
    for (int i = 0; i < numDocs; ++i) {
        final Document doc = rd.document(i);
        assertNotNull(doc);
        assertTrue(doc.getFields().isEmpty());
    }
    rd.close();
    iw.close();
    dir.close();
}

72. BaseStoredFieldsFormatTestCase#testBulkMergeWithDeletes()

View license
public void testBulkMergeWithDeletes() throws IOException {
    final int numDocs = atLeast(200);
    Directory dir = newDirectory();
    RandomIndexWriter w = new RandomIndexWriter(random(), dir, newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.INSTANCE));
    for (int i = 0; i < numDocs; ++i) {
        Document doc = new Document();
        doc.add(new StringField("id", Integer.toString(i), Store.YES));
        doc.add(new StoredField("f", TestUtil.randomSimpleString(random())));
        w.addDocument(doc);
    }
    final int deleteCount = TestUtil.nextInt(random(), 5, numDocs);
    for (int i = 0; i < deleteCount; ++i) {
        final int id = random().nextInt(numDocs);
        w.deleteDocuments(new Term("id", Integer.toString(id)));
    }
    w.commit();
    w.close();
    w = new RandomIndexWriter(random(), dir);
    w.forceMerge(TestUtil.nextInt(random(), 1, 3));
    w.commit();
    w.close();
    TestUtil.checkIndex(dir);
    dir.close();
}

73. BaseTermVectorsFormatTestCase#testRandom()

View license
public void testRandom() throws IOException {
    final RandomDocumentFactory docFactory = new RandomDocumentFactory(5, 20);
    final int numDocs = atLeast(100);
    final RandomDocument[] docs = new RandomDocument[numDocs];
    for (int i = 0; i < numDocs; ++i) {
        docs[i] = docFactory.newDocument(TestUtil.nextInt(random(), 1, 3), TestUtil.nextInt(random(), 10, 50), randomOptions());
    }
    final Directory dir = newDirectory();
    final RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
    for (int i = 0; i < numDocs; ++i) {
        writer.addDocument(addId(docs[i].toDocument(), "" + i));
    }
    final IndexReader reader = writer.getReader();
    for (int i = 0; i < numDocs; ++i) {
        final int docID = docID(reader, "" + i);
        assertEquals(docs[i], reader.getTermVectors(docID));
    }
    reader.close();
    writer.close();
    dir.close();
}

74. RandomPostingsTester#testFull()

View license
/** Indexes all fields/terms at the specified
   *  IndexOptions, and fully tests at that IndexOptions. */
public void testFull(Codec codec, Path path, IndexOptions options, boolean withPayloads) throws Exception {
    Directory dir = LuceneTestCase.newFSDirectory(path);
    // TODO test thread safety of buildIndex too
    FieldsProducer fieldsProducer = buildIndex(codec, dir, options, withPayloads, true);
    testFields(fieldsProducer);
    IndexOptions[] allOptions = IndexOptions.values();
    int maxIndexOption = Arrays.asList(allOptions).indexOf(options);
    for (int i = 0; i <= maxIndexOption; i++) {
        testTerms(fieldsProducer, EnumSet.allOf(Option.class), allOptions[i], options, true);
        if (withPayloads) {
            // If we indexed w/ payloads, also test enums w/o accessing payloads:
            testTerms(fieldsProducer, EnumSet.complementOf(EnumSet.of(Option.PAYLOADS)), allOptions[i], options, true);
        }
    }
    fieldsProducer.close();
    dir.close();
}

75. StandardDirectoryFactory#move()

View license
/**
   * Override for more efficient moves.
   * 
   * Intended for use with replication - use
   * carefully - some Directory wrappers will
   * cache files for example.
   * 
   * This implementation works with NRTCachingDirectory.
   * 
   * You should first {@link Directory#sync(java.util.Collection)} any file that will be 
   * moved or avoid cached files through settings.
   * 
   * @throws IOException
   *           If there is a low-level I/O error.
   */
@Override
public void move(Directory fromDir, Directory toDir, String fileName, IOContext ioContext) throws IOException {
    Directory baseFromDir = getBaseDir(fromDir);
    Directory baseToDir = getBaseDir(toDir);
    if (baseFromDir instanceof FSDirectory && baseToDir instanceof FSDirectory) {
        File dir1 = ((FSDirectory) baseFromDir).getDirectory().toFile();
        File dir2 = ((FSDirectory) baseToDir).getDirectory().toFile();
        File indexFileInTmpDir = new File(dir1, fileName);
        File indexFileInIndex = new File(dir2, fileName);
        boolean success = indexFileInTmpDir.renameTo(indexFileInIndex);
        if (success) {
            return;
        }
    }
    super.move(fromDir, toDir, fileName, ioContext);
}

76. TestQueryWrapperFilter#testThousandDocuments()

View license
public void testThousandDocuments() throws Exception {
    Directory dir = newDirectory();
    RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
    for (int i = 0; i < 1000; i++) {
        Document doc = new Document();
        doc.add(newStringField("field", English.intToEnglish(i), Field.Store.NO));
        writer.addDocument(doc);
    }
    IndexReader reader = writer.getReader();
    writer.close();
    IndexSearcher searcher = newSearcher(reader);
    for (int i = 0; i < 1000; i++) {
        TermQuery termQuery = new TermQuery(new Term("field", English.intToEnglish(i)));
        QueryWrapperFilter qwf = new QueryWrapperFilter(termQuery);
        TopDocs td = searcher.search(qwf, 10);
        assertEquals(1, td.totalHits);
    }
    reader.close();
    dir.close();
}

77. TestQueryWrapperFilter#testScore()

View license
public void testScore() throws IOException {
    Directory dir = newDirectory();
    RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
    Document doc = new Document();
    doc.add(new StringField("foo", "bar", Store.NO));
    writer.addDocument(doc);
    writer.commit();
    final IndexReader reader = writer.getReader();
    writer.close();
    final IndexSearcher searcher = new IndexSearcher(reader);
    final Query query = new QueryWrapperFilter(new TermQuery(new Term("foo", "bar")));
    final TopDocs topDocs = searcher.search(query, 1);
    assertEquals(1, topDocs.totalHits);
    assertEquals(0f, topDocs.scoreDocs[0].score, 0f);
    reader.close();
    dir.close();
}

78. TestQueryWrapperFilter#testQueryWrapperFilterPropagatesApproximations()

View license
public void testQueryWrapperFilterPropagatesApproximations() throws IOException {
    Directory dir = newDirectory();
    RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
    Document doc = new Document();
    doc.add(new StringField("foo", "bar", Store.NO));
    writer.addDocument(doc);
    writer.commit();
    final IndexReader reader = writer.getReader();
    writer.close();
    final IndexSearcher searcher = new IndexSearcher(reader);
    // to still have approximations
    searcher.setQueryCache(null);
    final Query query = new QueryWrapperFilter(new RandomApproximationQuery(new TermQuery(new Term("foo", "bar")), random()));
    final Weight weight = searcher.createNormalizedWeight(query, random().nextBoolean());
    final Scorer scorer = weight.scorer(reader.leaves().get(0));
    assertNotNull(scorer.twoPhaseIterator());
    reader.close();
    dir.close();
}

79. BlockDirectoryTest#testEOF()

Project: lucene-solr
Source File: BlockDirectoryTest.java
View license
@Test
public void testEOF() throws IOException {
    Directory fsDir = FSDirectory.open(new File(file, "normal").toPath());
    String name = "test.eof";
    createFile(name, fsDir, directory);
    long fsLength = fsDir.fileLength(name);
    long hdfsLength = directory.fileLength(name);
    assertEquals(fsLength, hdfsLength);
    testEof(name, fsDir, fsLength);
    testEof(name, directory, hdfsLength);
    fsDir.close();
}

80. TestFieldCache#testEmptyIndex()

Project: lucene-solr
Source File: TestFieldCache.java
View license
public void testEmptyIndex() throws Exception {
    Directory dir = newDirectory();
    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())).setMaxBufferedDocs(500));
    writer.close();
    IndexReader r = DirectoryReader.open(dir);
    LeafReader reader = SlowCompositeReaderWrapper.wrap(r);
    TestUtil.checkReader(reader);
    FieldCache.DEFAULT.getTerms(reader, "foobar", true);
    FieldCache.DEFAULT.getTermsIndex(reader, "foobar");
    FieldCache.DEFAULT.purgeByCacheKey(reader.getCoreCacheKey());
    r.close();
    dir.close();
}

81. TestFieldCacheSort#testFieldDoc()

Project: lucene-solr
Source File: TestFieldCacheSort.java
View license
/** Tests sorting on internal docid order */
public void testFieldDoc() throws Exception {
    Directory dir = newDirectory();
    RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
    Document doc = new Document();
    doc.add(newStringField("value", "foo", Field.Store.NO));
    writer.addDocument(doc);
    doc = new Document();
    doc.add(newStringField("value", "bar", Field.Store.NO));
    writer.addDocument(doc);
    IndexReader ir = writer.getReader();
    writer.close();
    IndexSearcher searcher = newSearcher(ir);
    Sort sort = new Sort(SortField.FIELD_DOC);
    TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
    assertEquals(2, td.totalHits);
    // docid 0, then docid 1
    assertEquals(0, td.scoreDocs[0].doc);
    assertEquals(1, td.scoreDocs[1].doc);
    TestUtil.checkReader(ir);
    ir.close();
    dir.close();
}

82. TestFieldCacheSort#testFieldDocReverse()

Project: lucene-solr
Source File: TestFieldCacheSort.java
View license
/** Tests sorting on reverse internal docid order */
public void testFieldDocReverse() throws Exception {
    Directory dir = newDirectory();
    RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
    Document doc = new Document();
    doc.add(newStringField("value", "foo", Field.Store.NO));
    writer.addDocument(doc);
    doc = new Document();
    doc.add(newStringField("value", "bar", Field.Store.NO));
    writer.addDocument(doc);
    IndexReader ir = writer.getReader();
    writer.close();
    IndexSearcher searcher = newSearcher(ir);
    Sort sort = new Sort(new SortField(null, SortField.Type.DOC, true));
    TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
    assertEquals(2, td.totalHits);
    // docid 1, then docid 0
    assertEquals(1, td.scoreDocs[0].doc);
    assertEquals(0, td.scoreDocs[1].doc);
    TestUtil.checkReader(ir);
    ir.close();
    dir.close();
}

83. TestFieldCacheSort#testEmptyStringVsNullStringSort()

Project: lucene-solr
Source File: TestFieldCacheSort.java
View license
public void testEmptyStringVsNullStringSort() throws Exception {
    Directory dir = newDirectory();
    IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
    Document doc = new Document();
    doc.add(newStringField("f", "", Field.Store.NO));
    doc.add(newStringField("t", "1", Field.Store.NO));
    w.addDocument(doc);
    w.commit();
    doc = new Document();
    doc.add(newStringField("t", "1", Field.Store.NO));
    w.addDocument(doc);
    IndexReader r = UninvertingReader.wrap(DirectoryReader.open(w), Collections.singletonMap("f", Type.SORTED));
    w.close();
    IndexSearcher s = newSearcher(r);
    TopDocs hits = s.search(new TermQuery(new Term("t", "1")), 10, new Sort(new SortField("f", SortField.Type.STRING)));
    assertEquals(2, hits.totalHits);
    // null sorts first
    assertEquals(1, hits.scoreDocs[0].doc);
    assertEquals(0, hits.scoreDocs[1].doc);
    TestUtil.checkReader(r);
    r.close();
    dir.close();
}

84. TestFieldCacheSort#testMultiValuedField()

Project: lucene-solr
Source File: TestFieldCacheSort.java
View license
/** test that we throw exception on multi-valued field, creates corrupt reader, use SORTED_SET instead */
public void testMultiValuedField() throws IOException {
    Directory indexStore = newDirectory();
    IndexWriter writer = new IndexWriter(indexStore, newIndexWriterConfig(new MockAnalyzer(random())));
    for (int i = 0; i < 5; i++) {
        Document doc = new Document();
        doc.add(new StringField("string", "a" + i, Field.Store.NO));
        doc.add(new StringField("string", "b" + i, Field.Store.NO));
        writer.addDocument(doc);
    }
    // enforce one segment to have a higher unique term count in all cases
    writer.forceMerge(1);
    writer.close();
    Sort sort = new Sort(new SortField("string", SortField.Type.STRING), SortField.FIELD_DOC);
    IndexReader reader = UninvertingReader.wrap(DirectoryReader.open(indexStore), Collections.singletonMap("string", Type.SORTED));
    IndexSearcher searcher = new IndexSearcher(reader);
    expectThrows(IllegalStateException.class, () -> {
        searcher.search(new MatchAllDocsQuery(), 500, sort);
    });
    reader.close();
    indexStore.close();
}

85. TestFieldCacheSort#testSortOneDocument()

Project: lucene-solr
Source File: TestFieldCacheSort.java
View license
/** Tests sorting a single document */
public void testSortOneDocument() throws Exception {
    Directory dir = newDirectory();
    RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
    Document doc = new Document();
    doc.add(newStringField("value", "foo", Field.Store.YES));
    writer.addDocument(doc);
    IndexReader ir = UninvertingReader.wrap(writer.getReader(), Collections.singletonMap("value", Type.SORTED));
    writer.close();
    IndexSearcher searcher = newSearcher(ir);
    Sort sort = new Sort(new SortField("value", SortField.Type.STRING));
    TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
    assertEquals(1, td.totalHits);
    assertEquals("foo", searcher.doc(td.scoreDocs[0].doc).get("value"));
    TestUtil.checkReader(ir);
    ir.close();
    dir.close();
}

86. TestFieldCacheSort#testSortOneDocumentWithScores()

Project: lucene-solr
Source File: TestFieldCacheSort.java
View license
/** Tests sorting a single document with scores */
public void testSortOneDocumentWithScores() throws Exception {
    Directory dir = newDirectory();
    RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
    Document doc = new Document();
    doc.add(newStringField("value", "foo", Field.Store.YES));
    writer.addDocument(doc);
    IndexReader ir = UninvertingReader.wrap(writer.getReader(), Collections.singletonMap("value", Type.SORTED));
    writer.close();
    IndexSearcher searcher = newSearcher(ir);
    Sort sort = new Sort(new SortField("value", SortField.Type.STRING));
    TopDocs expected = searcher.search(new TermQuery(new Term("value", "foo")), 10);
    assertEquals(1, expected.totalHits);
    TopDocs actual = searcher.search(new TermQuery(new Term("value", "foo")), 10, sort, true, true);
    assertEquals(expected.totalHits, actual.totalHits);
    assertEquals(expected.scoreDocs[0].score, actual.scoreDocs[0].score, 0F);
    TestUtil.checkReader(ir);
    ir.close();
    dir.close();
}

87. TestLegacyFieldCache#testEmptyIndex()

View license
public void testEmptyIndex() throws Exception {
    Directory dir = newDirectory();
    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())).setMaxBufferedDocs(500));
    writer.close();
    IndexReader r = DirectoryReader.open(dir);
    LeafReader reader = SlowCompositeReaderWrapper.wrap(r);
    TestUtil.checkReader(reader);
    FieldCache.DEFAULT.getTerms(reader, "foobar", true);
    FieldCache.DEFAULT.getTermsIndex(reader, "foobar");
    FieldCache.DEFAULT.purgeByCacheKey(reader.getCoreCacheKey());
    r.close();
    dir.close();
}

88. MockFSDirectoryFactory#create()

View license
@Override
public Directory create(String path, LockFactory lockFactory, DirContext dirContext) throws IOException {
    // we pass NoLockFactory, because the real lock factory is set later by injectLockFactory:
    Directory dir = LuceneTestCase.newFSDirectory(new File(path).toPath(), lockFactory);
    // we can't currently do this check because of how
    // Solr has to reboot a new Directory sometimes when replicating
    // or rolling back - the old directory is closed and the following
    // test assumes it can open an IndexWriter when that happens - we
    // have a new Directory for the same dir and still an open IW at 
    // this point
    Directory cdir = reduce(dir);
    cdir = reduce(cdir);
    cdir = reduce(cdir);
    if (cdir instanceof MockDirectoryWrapper) {
        ((MockDirectoryWrapper) cdir).setAssertNoUnrefencedFilesOnClose(false);
    }
    return dir;
}

89. TestPostingsOffsets#testAddFieldTwice()

View license
public void testAddFieldTwice() throws Exception {
    Directory dir = newDirectory();
    RandomIndexWriter iw = new RandomIndexWriter(random(), dir);
    Document doc = new Document();
    FieldType customType3 = new FieldType(TextField.TYPE_STORED);
    customType3.setStoreTermVectors(true);
    customType3.setStoreTermVectorPositions(true);
    customType3.setStoreTermVectorOffsets(true);
    customType3.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
    doc.add(new Field("content3", "here is more content with aaa aaa aaa", customType3));
    doc.add(new Field("content3", "here is more content with aaa aaa aaa", customType3));
    iw.addDocument(doc);
    iw.close();
    // checkindex
    dir.close();
}

90. TestPostingsOffsets#testLegalbutVeryLargeOffsets()

View license
public void testLegalbutVeryLargeOffsets() throws Exception {
    Directory dir = newDirectory();
    IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(null));
    Document doc = new Document();
    Token t1 = new Token("foo", 0, Integer.MAX_VALUE - 500);
    if (random().nextBoolean()) {
        t1.setPayload(new BytesRef("test"));
    }
    Token t2 = new Token("foo", Integer.MAX_VALUE - 500, Integer.MAX_VALUE);
    TokenStream tokenStream = new CannedTokenStream(new Token[] { t1, t2 });
    FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);
    ft.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
    // store some term vectors for the checkindex cross-check
    ft.setStoreTermVectors(true);
    ft.setStoreTermVectorPositions(true);
    ft.setStoreTermVectorOffsets(true);
    Field field = new Field("foo", tokenStream, ft);
    doc.add(field);
    iw.addDocument(doc);
    iw.close();
    dir.close();
}

91. TestReadOnlyIndex#buildIndex()

Project: lucene-solr
Source File: TestReadOnlyIndex.java
View license
@BeforeClass
public static void buildIndex() throws Exception {
    indexPath = Files.createTempDirectory("readonlyindex");
    // borrows from TestDemo, but not important to keep in sync with demo
    Analyzer analyzer = new MockAnalyzer(random());
    Directory directory = newFSDirectory(indexPath);
    RandomIndexWriter iwriter = new RandomIndexWriter(random(), directory, analyzer);
    Document doc = new Document();
    doc.add(newTextField("fieldname", text, Field.Store.YES));
    iwriter.addDocument(doc);
    iwriter.close();
    directory.close();
    analyzer.close();
}

92. TestSnapshotDeletionPolicy#testRollbackToOldSnapshot()

View license
@Test
public void testRollbackToOldSnapshot() throws Exception {
    int numSnapshots = 2;
    Directory dir = newDirectory();
    SnapshotDeletionPolicy sdp = getDeletionPolicy();
    IndexWriter writer = new IndexWriter(dir, getConfig(random(), sdp));
    prepareIndexAndSnapshots(sdp, writer, numSnapshots);
    writer.close();
    // now open the writer on "snapshot0" - make sure it succeeds
    writer = new IndexWriter(dir, getConfig(random(), sdp).setIndexCommit(snapshots.get(0)));
    // this does the actual rollback
    writer.commit();
    writer.deleteUnusedFiles();
    assertSnapshotExists(dir, sdp, numSnapshots - 1, false);
    writer.close();
    // but 'snapshot1' files will still exist (need to release snapshot before they can be deleted).
    String segFileName = snapshots.get(1).getSegmentsFileName();
    assertTrue("snapshot files should exist in the directory: " + segFileName, slowFileExists(dir, segFileName));
    dir.close();
}

93. TestSnapshotDeletionPolicy#testReleaseSnapshot()

View license
@Test
public void testReleaseSnapshot() throws Exception {
    Directory dir = newDirectory();
    IndexWriter writer = new IndexWriter(dir, getConfig(random(), getDeletionPolicy()));
    SnapshotDeletionPolicy sdp = (SnapshotDeletionPolicy) writer.getConfig().getIndexDeletionPolicy();
    prepareIndexAndSnapshots(sdp, writer, 1);
    // Create another commit - we must do that, because otherwise the "snapshot"
    // files will still remain in the index, since it's the last commit.
    writer.addDocument(new Document());
    writer.commit();
    // Release
    String segFileName = snapshots.get(0).getSegmentsFileName();
    sdp.release(snapshots.get(0));
    writer.deleteUnusedFiles();
    writer.close();
    assertFalse("segments file should not be found in dirctory: " + segFileName, slowFileExists(dir, segFileName));
    dir.close();
}

94. TestOfflineSorter#testFixedLengthHeap()

Project: lucene-solr
Source File: TestOfflineSorter.java
View license
public void testFixedLengthHeap() throws Exception {
    // Make sure the RAM accounting is correct, i.e. if we are sorting fixed width
    // ints (4 bytes) then the heap used is really only 4 bytes per value:
    Directory dir = newDirectory();
    IndexOutput out = dir.createTempOutput("unsorted", "tmp", IOContext.DEFAULT);
    try (ByteSequencesWriter w = new OfflineSorter.ByteSequencesWriter(out)) {
        byte[] bytes = new byte[Integer.BYTES];
        for (int i = 0; i < 1024 * 1024; i++) {
            random().nextBytes(bytes);
            w.write(bytes);
        }
        CodecUtil.writeFooter(out);
    }
    OfflineSorter sorter = new OfflineSorter(dir, "foo", OfflineSorter.DEFAULT_COMPARATOR, BufferSize.megabytes(4), OfflineSorter.MAX_TEMPFILES, Integer.BYTES);
    sorter.sort(out.getName());
    // 1 MB of ints with 4 MH heap allowed should have been sorted in a single heap partition:
    assertEquals(0, sorter.sortInfo.mergeRounds);
    dir.close();
}

95. TestDirectMonotonic#testEmpty()

View license
public void testEmpty() throws IOException {
    Directory dir = newDirectory();
    final int blockShift = TestUtil.nextInt(random(), DirectMonotonicWriter.MIN_BLOCK_SHIFT, DirectMonotonicWriter.MAX_BLOCK_SHIFT);
    final long dataLength;
    try (IndexOutput metaOut = dir.createOutput("meta", IOContext.DEFAULT);
        IndexOutput dataOut = dir.createOutput("data", IOContext.DEFAULT)) {
        DirectMonotonicWriter w = DirectMonotonicWriter.getInstance(metaOut, dataOut, 0, blockShift);
        w.finish();
        dataLength = dataOut.getFilePointer();
    }
    try (IndexInput metaIn = dir.openInput("meta", IOContext.READONCE);
        IndexInput dataIn = dir.openInput("data", IOContext.DEFAULT)) {
        DirectMonotonicReader.Meta meta = DirectMonotonicReader.loadMeta(metaIn, 0, blockShift);
        DirectMonotonicReader.getInstance(meta, dataIn.randomAccessSlice(0, dataLength));
    // no exception
    }
    dir.close();
}

96. TestDirectPacked#testSimple()

Project: lucene-solr
Source File: TestDirectPacked.java
View license
/** simple encode/decode */
public void testSimple() throws Exception {
    Directory dir = newDirectory();
    int bitsPerValue = DirectWriter.bitsRequired(2);
    IndexOutput output = dir.createOutput("foo", IOContext.DEFAULT);
    DirectWriter writer = DirectWriter.getInstance(output, 5, bitsPerValue);
    writer.add(1);
    writer.add(0);
    writer.add(2);
    writer.add(1);
    writer.add(2);
    writer.finish();
    output.close();
    IndexInput input = dir.openInput("foo", IOContext.DEFAULT);
    NumericDocValues reader = DirectReader.getInstance(input.randomAccessSlice(0, input.length()), bitsPerValue, 0);
    assertEquals(1, reader.get(0));
    assertEquals(0, reader.get(1));
    assertEquals(2, reader.get(2));
    assertEquals(1, reader.get(3));
    assertEquals(2, reader.get(4));
    input.close();
    dir.close();
}

97. TestDirectPacked#testNotEnoughValues()

Project: lucene-solr
Source File: TestDirectPacked.java
View license
/** test exception is delivered if you add the wrong number of values */
public void testNotEnoughValues() throws Exception {
    Directory dir = newDirectory();
    int bitsPerValue = DirectWriter.bitsRequired(2);
    IndexOutput output = dir.createOutput("foo", IOContext.DEFAULT);
    DirectWriter writer = DirectWriter.getInstance(output, 5, bitsPerValue);
    writer.add(1);
    writer.add(0);
    writer.add(2);
    writer.add(1);
    IllegalStateException expected = expectThrows(IllegalStateException.class, () -> {
        writer.finish();
    });
    assertTrue(expected.getMessage().startsWith("Wrong number of values added"));
    output.close();
    dir.close();
}

98. PrintTaxonomyStats#main()

Project: lucene-solr
Source File: PrintTaxonomyStats.java
View license
/** Command-line tool. */
@SuppressForbidden(reason = "System.out required: command line tool")
public static void main(String[] args) throws IOException {
    boolean printTree = false;
    String path = null;
    for (int i = 0; i < args.length; i++) {
        if (args[i].equals("-printTree")) {
            printTree = true;
        } else {
            path = args[i];
        }
    }
    if (args.length != (printTree ? 2 : 1)) {
        System.out.println("\nUsage: java -classpath ... org.apache.lucene.facet.util.PrintTaxonomyStats [-printTree] /path/to/taxononmy/index\n");
        System.exit(1);
    }
    Directory dir = FSDirectory.open(Paths.get(path));
    TaxonomyReader r = new DirectoryTaxonomyReader(dir);
    printStats(r, System.out, printTree);
    r.close();
    dir.close();
}

99. TestAddTaxonomy#testAddEmpty()

Project: lucene-solr
Source File: TestAddTaxonomy.java
View license
public void testAddEmpty() throws Exception {
    Directory dest = newDirectory();
    DirectoryTaxonomyWriter destTW = new DirectoryTaxonomyWriter(dest);
    destTW.addCategory(new FacetLabel("Author", "Rob Pike"));
    destTW.addCategory(new FacetLabel("Aardvarks", "Bob"));
    destTW.commit();
    Directory src = newDirectory();
    // create an empty taxonomy
    new DirectoryTaxonomyWriter(src).close();
    OrdinalMap map = randomOrdinalMap();
    destTW.addTaxonomy(src, map);
    destTW.close();
    validate(dest, src, map);
    IOUtils.close(dest, src);
}

100. TestLRUQueryCache#testRefuseToCacheTooLargeEntries()

Project: lucene-solr
Source File: TestLRUQueryCache.java
View license
public void testRefuseToCacheTooLargeEntries() throws IOException {
    Directory dir = newDirectory();
    final RandomIndexWriter w = new RandomIndexWriter(random(), dir);
    for (int i = 0; i < 100; ++i) {
        w.addDocument(new Document());
    }
    IndexReader reader = w.getReader();
    // size of 1 byte
    final LRUQueryCache queryCache = new LRUQueryCache(1, 1,  context -> random().nextBoolean());
    final IndexSearcher searcher = newSearcher(reader);
    searcher.setQueryCache(queryCache);
    searcher.setQueryCachingPolicy(QueryCachingPolicy.ALWAYS_CACHE);
    searcher.count(new MatchAllDocsQuery());
    assertEquals(0, queryCache.getCacheCount());
    assertEquals(0, queryCache.getEvictionCount());
    reader.close();
    w.close();
    dir.close();
}