Here are the examples of the java api class org.apache.lucene.store.BaseDirectoryWrapper taken from open source projects.
1. TestSegmentInfos#testVersionsOneSegment()
Project: lucene-solr
File: TestSegmentInfos.java
File: TestSegmentInfos.java
// LUCENE-5954 public void testVersionsOneSegment() throws IOException { BaseDirectoryWrapper dir = newDirectory(); dir.setCheckIndexOnClose(false); byte id[] = StringHelper.randomId(); Codec codec = Codec.getDefault(); SegmentInfos sis = new SegmentInfos(); SegmentInfo info = new SegmentInfo(dir, Version.LUCENE_6_0_0, "_0", 1, false, Codec.getDefault(), Collections.<String, String>emptyMap(), id, Collections.<String, String>emptyMap(), null); info.setFiles(Collections.<String>emptySet()); codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT); SegmentCommitInfo commitInfo = new SegmentCommitInfo(info, 0, -1, -1, -1); sis.add(commitInfo); sis.commit(dir); sis = SegmentInfos.readLatestCommit(dir); assertEquals(Version.LUCENE_6_0_0, sis.getMinSegmentLuceneVersion()); assertEquals(Version.LATEST, sis.getCommitLuceneVersion()); dir.close(); }
2. Test2BPostingsBytes#test()
Project: lucene-solr
File: Test2BPostingsBytes.java
File: Test2BPostingsBytes.java
public void test() throws Exception { IndexWriterConfig defaultConfig = new IndexWriterConfig(null); Codec defaultCodec = defaultConfig.getCodec(); if ((new IndexWriterConfig(null)).getCodec() instanceof CompressingCodec) { Pattern regex = Pattern.compile("maxDocsPerChunk=(\\d+), blockSize=(\\d+)"); Matcher matcher = regex.matcher(defaultCodec.toString()); assertTrue("Unexpected CompressingCodec toString() output: " + defaultCodec.toString(), matcher.find()); int maxDocsPerChunk = Integer.parseInt(matcher.group(1)); int blockSize = Integer.parseInt(matcher.group(2)); int product = maxDocsPerChunk * blockSize; assumeTrue(defaultCodec.getName() + " maxDocsPerChunk (" + maxDocsPerChunk + ") * blockSize (" + blockSize + ") < 16 - this can trigger OOM with -Dtests.heapsize=30g", product >= 16); } BaseDirectoryWrapper dir = newFSDirectory(createTempDir("2BPostingsBytes1")); if (dir instanceof MockDirectoryWrapper) { ((MockDirectoryWrapper) dir).setThrottling(MockDirectoryWrapper.Throttling.NEVER); } IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random())).setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH).setRAMBufferSizeMB(256.0).setMergeScheduler(new ConcurrentMergeScheduler()).setMergePolicy(newLogMergePolicy(false, 10)).setOpenMode(IndexWriterConfig.OpenMode.CREATE).setCodec(TestUtil.getDefaultCodec())); MergePolicy mp = w.getConfig().getMergePolicy(); if (mp instanceof LogByteSizeMergePolicy) { // 1 petabyte: ((LogByteSizeMergePolicy) mp).setMaxMergeMB(1024 * 1024 * 1024); } Document doc = new Document(); FieldType ft = new FieldType(TextField.TYPE_NOT_STORED); ft.setIndexOptions(IndexOptions.DOCS_AND_FREQS); ft.setOmitNorms(true); MyTokenStream tokenStream = new MyTokenStream(); Field field = new Field("field", tokenStream, ft); doc.add(field); final int numDocs = 1000; for (int i = 0; i < numDocs; i++) { if (i % 2 == 1) { // trick blockPF's little optimization tokenStream.n = 65536; } else { tokenStream.n = 65537; } w.addDocument(doc); } w.forceMerge(1); w.close(); DirectoryReader oneThousand = DirectoryReader.open(dir); DirectoryReader subReaders[] = new DirectoryReader[1000]; Arrays.fill(subReaders, oneThousand); BaseDirectoryWrapper dir2 = newFSDirectory(createTempDir("2BPostingsBytes2")); if (dir2 instanceof MockDirectoryWrapper) { ((MockDirectoryWrapper) dir2).setThrottling(MockDirectoryWrapper.Throttling.NEVER); } IndexWriter w2 = new IndexWriter(dir2, new IndexWriterConfig(null)); TestUtil.addIndexesSlowly(w2, subReaders); w2.forceMerge(1); w2.close(); oneThousand.close(); DirectoryReader oneMillion = DirectoryReader.open(dir2); subReaders = new DirectoryReader[2000]; Arrays.fill(subReaders, oneMillion); BaseDirectoryWrapper dir3 = newFSDirectory(createTempDir("2BPostingsBytes3")); if (dir3 instanceof MockDirectoryWrapper) { ((MockDirectoryWrapper) dir3).setThrottling(MockDirectoryWrapper.Throttling.NEVER); } IndexWriter w3 = new IndexWriter(dir3, new IndexWriterConfig(null)); TestUtil.addIndexesSlowly(w3, subReaders); w3.forceMerge(1); w3.close(); oneMillion.close(); dir.close(); dir2.close(); dir3.close(); }
3. TestIndexWriterExceptions#testSimulatedCorruptIndex1()
Project: lucene-solr
File: TestIndexWriterExceptions.java
File: TestIndexWriterExceptions.java
// Simulate a corrupt index by removing last byte of // latest segments file and make sure we get an // IOException trying to open the index: public void testSimulatedCorruptIndex1() throws IOException { BaseDirectoryWrapper dir = newDirectory(); // we are corrupting it! dir.setCheckIndexOnClose(false); IndexWriter writer = null; writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); // add 100 documents for (int i = 0; i < 100; i++) { addDoc(writer); } // close writer.close(); long gen = SegmentInfos.getLastCommitGeneration(dir); assertTrue("segment generation should be > 0 but got " + gen, gen > 0); String fileNameIn = SegmentInfos.getLastCommitSegmentsFileName(dir); String fileNameOut = IndexFileNames.fileNameFromGeneration(IndexFileNames.SEGMENTS, "", 1 + gen); IndexInput in = dir.openInput(fileNameIn, newIOContext(random())); IndexOutput out = dir.createOutput(fileNameOut, newIOContext(random())); long length = in.length(); for (int i = 0; i < length - 1; i++) { out.writeByte(in.readByte()); } in.close(); out.close(); dir.deleteFile(fileNameIn); expectThrows(Exception.class, () -> { DirectoryReader.open(dir); }); dir.close(); }
4. TestCodecHoldsOpenFiles#test()
Project: lucene-solr
File: TestCodecHoldsOpenFiles.java
File: TestCodecHoldsOpenFiles.java
public void test() throws Exception { BaseDirectoryWrapper d = newDirectory(); d.setCheckIndexOnClose(false); // we nuke files, but verify the reader still works RandomIndexWriter w = new RandomIndexWriter(random(), d); int numDocs = atLeast(100); for (int i = 0; i < numDocs; i++) { Document doc = new Document(); doc.add(newField("foo", "bar", TextField.TYPE_NOT_STORED)); doc.add(new IntPoint("doc", i)); doc.add(new IntPoint("doc2d", i, i)); doc.add(new NumericDocValuesField("dv", i)); w.addDocument(doc); } IndexReader r = w.getReader(); w.commit(); w.close(); for (String name : d.listAll()) { d.deleteFile(name); } for (LeafReaderContext cxt : r.leaves()) { TestUtil.checkReader(cxt.reader()); } r.close(); d.close(); }
5. TestExternalCodecs#testPerFieldCodec()
Project: lucene-solr
File: TestExternalCodecs.java
File: TestExternalCodecs.java
// tests storing "id" and "field2" fields as pulsing codec, // whose term sort is backwards unicode code point, and // storing "field1" as a custom entirely-in-RAM codec public void testPerFieldCodec() throws Exception { final int NUM_DOCS = atLeast(173); if (VERBOSE) { System.out.println("TEST: NUM_DOCS=" + NUM_DOCS); } BaseDirectoryWrapper dir = newDirectory(); // we use a custom codec provider dir.setCheckIndexOnClose(false); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())).setCodec(new CustomPerFieldCodec()).setMergePolicy(newLogMergePolicy(3))); Document doc = new Document(); // uses default codec: doc.add(newTextField("field1", "this field uses the standard codec as the test", Field.Store.NO)); // uses memory codec: Field field2 = newTextField("field2", "this field uses the memory codec as the test", Field.Store.NO); doc.add(field2); Field idField = newStringField("id", "", Field.Store.NO); doc.add(idField); for (int i = 0; i < NUM_DOCS; i++) { idField.setStringValue("" + i); w.addDocument(doc); if ((i + 1) % 10 == 0) { w.commit(); } } if (VERBOSE) { System.out.println("TEST: now delete id=77"); } w.deleteDocuments(new Term("id", "77")); IndexReader r = DirectoryReader.open(w); assertEquals(NUM_DOCS - 1, r.numDocs()); IndexSearcher s = newSearcher(r); assertEquals(NUM_DOCS - 1, s.search(new TermQuery(new Term("field1", "standard")), 1).totalHits); assertEquals(NUM_DOCS - 1, s.search(new TermQuery(new Term("field2", "memory")), 1).totalHits); r.close(); if (VERBOSE) { System.out.println("\nTEST: now delete 2nd doc"); } w.deleteDocuments(new Term("id", "44")); if (VERBOSE) { System.out.println("\nTEST: now force merge"); } w.forceMerge(1); if (VERBOSE) { System.out.println("\nTEST: now open reader"); } r = DirectoryReader.open(w); assertEquals(NUM_DOCS - 2, r.maxDoc()); assertEquals(NUM_DOCS - 2, r.numDocs()); s = newSearcher(r); assertEquals(NUM_DOCS - 2, s.search(new TermQuery(new Term("field1", "standard")), 1).totalHits); assertEquals(NUM_DOCS - 2, s.search(new TermQuery(new Term("field2", "memory")), 1).totalHits); assertEquals(1, s.search(new TermQuery(new Term("id", "76")), 1).totalHits); assertEquals(0, s.search(new TermQuery(new Term("id", "77")), 1).totalHits); assertEquals(0, s.search(new TermQuery(new Term("id", "44")), 1).totalHits); if (VERBOSE) { System.out.println("\nTEST: now close NRT reader"); } r.close(); w.close(); dir.close(); }
6. TestSegmentInfos#testVersionsTwoSegments()
Project: lucene-solr
File: TestSegmentInfos.java
File: TestSegmentInfos.java
// LUCENE-5954 public void testVersionsTwoSegments() throws IOException { BaseDirectoryWrapper dir = newDirectory(); dir.setCheckIndexOnClose(false); byte id[] = StringHelper.randomId(); Codec codec = Codec.getDefault(); SegmentInfos sis = new SegmentInfos(); SegmentInfo info = new SegmentInfo(dir, Version.LUCENE_6_0_0, "_0", 1, false, Codec.getDefault(), Collections.<String, String>emptyMap(), id, Collections.<String, String>emptyMap(), null); info.setFiles(Collections.<String>emptySet()); codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT); SegmentCommitInfo commitInfo = new SegmentCommitInfo(info, 0, -1, -1, -1); sis.add(commitInfo); info = new SegmentInfo(dir, Version.LUCENE_6_0_0, "_1", 1, false, Codec.getDefault(), Collections.<String, String>emptyMap(), id, Collections.<String, String>emptyMap(), null); info.setFiles(Collections.<String>emptySet()); codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT); commitInfo = new SegmentCommitInfo(info, 0, -1, -1, -1); sis.add(commitInfo); sis.commit(dir); sis = SegmentInfos.readLatestCommit(dir); assertEquals(Version.LUCENE_6_0_0, sis.getMinSegmentLuceneVersion()); assertEquals(Version.LATEST, sis.getCommitLuceneVersion()); dir.close(); }
7. TestSegmentInfos#testVersionsNoSegments()
Project: lucene-solr
File: TestSegmentInfos.java
File: TestSegmentInfos.java
// LUCENE-5954 public void testVersionsNoSegments() throws IOException { SegmentInfos sis = new SegmentInfos(); BaseDirectoryWrapper dir = newDirectory(); dir.setCheckIndexOnClose(false); sis.commit(dir); sis = SegmentInfos.readLatestCommit(dir); assertNull(sis.getMinSegmentLuceneVersion()); assertEquals(Version.LATEST, sis.getCommitLuceneVersion()); dir.close(); }
8. TestIndexWriterExceptions#testSimulatedCorruptIndex2()
Project: lucene-solr
File: TestIndexWriterExceptions.java
File: TestIndexWriterExceptions.java
// Simulate a corrupt index by removing one of the // files and make sure we get an IOException trying to // open the index: public void testSimulatedCorruptIndex2() throws IOException { BaseDirectoryWrapper dir = newDirectory(); // we are corrupting it! dir.setCheckIndexOnClose(false); IndexWriter writer = null; writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy(true)).setUseCompoundFile(true)); MergePolicy lmp = writer.getConfig().getMergePolicy(); // Force creation of CFS: lmp.setNoCFSRatio(1.0); lmp.setMaxCFSSegmentSizeMB(Double.POSITIVE_INFINITY); // add 100 documents for (int i = 0; i < 100; i++) { addDoc(writer); } // close writer.close(); long gen = SegmentInfos.getLastCommitGeneration(dir); assertTrue("segment generation should be > 0 but got " + gen, gen > 0); boolean corrupted = false; SegmentInfos sis = SegmentInfos.readLatestCommit(dir); for (SegmentCommitInfo si : sis) { assertTrue(si.info.getUseCompoundFile()); List<String> victims = new ArrayList<String>(si.info.files()); Collections.shuffle(victims, random()); dir.deleteFile(victims.get(0)); corrupted = true; break; } assertTrue("failed to find cfs file to remove: ", corrupted); expectThrows(Exception.class, () -> { DirectoryReader.open(dir); }); dir.close(); }
9. TestIndexWriterExceptions#testSegmentsChecksumError()
Project: lucene-solr
File: TestIndexWriterExceptions.java
File: TestIndexWriterExceptions.java
// LUCENE-1044: Simulate checksum error in segments_N public void testSegmentsChecksumError() throws IOException { BaseDirectoryWrapper dir = newDirectory(); // we corrupt the index dir.setCheckIndexOnClose(false); IndexWriter writer = null; writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); // add 100 documents for (int i = 0; i < 100; i++) { addDoc(writer); } // close writer.close(); long gen = SegmentInfos.getLastCommitGeneration(dir); assertTrue("segment generation should be > 0 but got " + gen, gen > 0); final String segmentsFileName = SegmentInfos.getLastCommitSegmentsFileName(dir); IndexInput in = dir.openInput(segmentsFileName, newIOContext(random())); IndexOutput out = dir.createOutput(IndexFileNames.fileNameFromGeneration(IndexFileNames.SEGMENTS, "", 1 + gen), newIOContext(random())); out.copyBytes(in, in.length() - 1); byte b = in.readByte(); out.writeByte((byte) (1 + b)); out.close(); in.close(); expectThrows(CorruptIndexException.class, () -> { DirectoryReader.open(dir); }); dir.close(); }
10. TestAddIndexes#testAddIndexMissingCodec()
Project: lucene-solr
File: TestAddIndexes.java
File: TestAddIndexes.java
/* * simple test that ensures we getting expected exceptions */ public void testAddIndexMissingCodec() throws IOException { BaseDirectoryWrapper toAdd = newDirectory(); // Disable checkIndex, else we get an exception because // of the unregistered codec: toAdd.setCheckIndexOnClose(false); { IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); conf.setCodec(new UnRegisteredCodec()); IndexWriter w = new IndexWriter(toAdd, conf); Document doc = new Document(); FieldType customType = new FieldType(); customType.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS); doc.add(newField("foo", "bar", customType)); w.addDocument(doc); w.close(); } { Directory dir = newDirectory(); IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); conf.setCodec(TestUtil.alwaysPostingsFormat(new MemoryPostingsFormat())); IndexWriter w = new IndexWriter(dir, conf); expectThrows(IllegalArgumentException.class, () -> { w.addIndexes(toAdd); }); w.close(); IndexReader open = DirectoryReader.open(dir); assertEquals(0, open.numDocs()); open.close(); dir.close(); } expectThrows(IllegalArgumentException.class, () -> { DirectoryReader.open(toAdd); }); toAdd.close(); }
11. Test2BTerms#test2BTerms()
Project: lucene-solr
File: Test2BTerms.java
File: Test2BTerms.java
public void test2BTerms() throws IOException { System.out.println("Starting Test2B"); final long TERM_COUNT = ((long) Integer.MAX_VALUE) + 100000000; final int TERMS_PER_DOC = TestUtil.nextInt(random(), 100000, 1000000); List<BytesRef> savedTerms = null; BaseDirectoryWrapper dir = newFSDirectory(createTempDir("2BTerms")); //MockDirectoryWrapper dir = newFSDirectory(new File("/p/lucene/indices/2bindex")); if (dir instanceof MockDirectoryWrapper) { ((MockDirectoryWrapper) dir).setThrottling(MockDirectoryWrapper.Throttling.NEVER); } // don't double-checkindex dir.setCheckIndexOnClose(false); if (true) { IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random())).setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH).setRAMBufferSizeMB(256.0).setMergeScheduler(new ConcurrentMergeScheduler()).setMergePolicy(newLogMergePolicy(false, 10)).setOpenMode(IndexWriterConfig.OpenMode.CREATE).setCodec(TestUtil.getDefaultCodec())); MergePolicy mp = w.getConfig().getMergePolicy(); if (mp instanceof LogByteSizeMergePolicy) { // 1 petabyte: ((LogByteSizeMergePolicy) mp).setMaxMergeMB(1024 * 1024 * 1024); } Document doc = new Document(); final MyTokenStream ts = new MyTokenStream(random(), TERMS_PER_DOC); FieldType customType = new FieldType(TextField.TYPE_NOT_STORED); customType.setIndexOptions(IndexOptions.DOCS); customType.setOmitNorms(true); Field field = new Field("field", ts, customType); doc.add(field); //w.setInfoStream(System.out); final int numDocs = (int) (TERM_COUNT / TERMS_PER_DOC); System.out.println("TERMS_PER_DOC=" + TERMS_PER_DOC); System.out.println("numDocs=" + numDocs); for (int i = 0; i < numDocs; i++) { final long t0 = System.currentTimeMillis(); w.addDocument(doc); System.out.println(i + " of " + numDocs + " " + (System.currentTimeMillis() - t0) + " msec"); } savedTerms = ts.savedTerms; System.out.println("TEST: full merge"); w.forceMerge(1); System.out.println("TEST: close writer"); w.close(); } System.out.println("TEST: open reader"); final IndexReader r = DirectoryReader.open(dir); if (savedTerms == null) { savedTerms = findTerms(r); } final int numSavedTerms = savedTerms.size(); final List<BytesRef> bigOrdTerms = new ArrayList<>(savedTerms.subList(numSavedTerms - 10, numSavedTerms)); System.out.println("TEST: test big ord terms..."); testSavedTerms(r, bigOrdTerms); System.out.println("TEST: test all saved terms..."); testSavedTerms(r, savedTerms); r.close(); System.out.println("TEST: now CheckIndex..."); CheckIndex.Status status = TestUtil.checkIndex(dir); final long tc = status.segmentInfos.get(0).termIndexStatus.termCount; assertTrue("count " + tc + " is not > " + Integer.MAX_VALUE, tc > Integer.MAX_VALUE); dir.close(); System.out.println("TEST: done!"); }
12. DataSplitterTest#assertSplit()
Project: lucene-solr
File: DataSplitterTest.java
File: DataSplitterTest.java
public static void assertSplit(LeafReader originalIndex, double testRatio, double crossValidationRatio, String... fieldNames) throws Exception { BaseDirectoryWrapper trainingIndex = newDirectory(); BaseDirectoryWrapper testIndex = newDirectory(); BaseDirectoryWrapper crossValidationIndex = newDirectory(); try { DatasetSplitter datasetSplitter = new DatasetSplitter(testRatio, crossValidationRatio); datasetSplitter.split(originalIndex, trainingIndex, testIndex, crossValidationIndex, new MockAnalyzer(random()), true, classFieldName, fieldNames); assertNotNull(trainingIndex); assertNotNull(testIndex); assertNotNull(crossValidationIndex); DirectoryReader trainingReader = DirectoryReader.open(trainingIndex); assertEquals((int) (originalIndex.maxDoc() * (1d - testRatio - crossValidationRatio)), trainingReader.maxDoc(), 20); DirectoryReader testReader = DirectoryReader.open(testIndex); assertEquals((int) (originalIndex.maxDoc() * testRatio), testReader.maxDoc(), 20); DirectoryReader cvReader = DirectoryReader.open(crossValidationIndex); assertEquals((int) (originalIndex.maxDoc() * crossValidationRatio), cvReader.maxDoc(), 20); trainingReader.close(); testReader.close(); cvReader.close(); closeQuietly(trainingReader); closeQuietly(testReader); closeQuietly(cvReader); } finally { if (trainingIndex != null) { trainingIndex.close(); } if (testIndex != null) { testIndex.close(); } if (crossValidationIndex != null) { crossValidationIndex.close(); } } }
13. TestPagedBytes#testOverflow()
Project: lucene-solr
File: TestPagedBytes.java
File: TestPagedBytes.java
// memory hole @Ignore public void testOverflow() throws IOException { BaseDirectoryWrapper dir = newFSDirectory(createTempDir("testOverflow")); if (dir instanceof MockDirectoryWrapper) { ((MockDirectoryWrapper) dir).setThrottling(MockDirectoryWrapper.Throttling.NEVER); } final int blockBits = TestUtil.nextInt(random(), 14, 28); final int blockSize = 1 << blockBits; byte[] arr = new byte[TestUtil.nextInt(random(), blockSize / 2, blockSize * 2)]; for (int i = 0; i < arr.length; ++i) { arr[i] = (byte) i; } final long numBytes = (1L << 31) + TestUtil.nextInt(random(), 1, blockSize * 3); final PagedBytes p = new PagedBytes(blockBits); final IndexOutput out = dir.createOutput("foo", IOContext.DEFAULT); for (long i = 0; i < numBytes; ) { assertEquals(i, out.getFilePointer()); final int len = (int) Math.min(arr.length, numBytes - i); out.writeBytes(arr, len); i += len; } assertEquals(numBytes, out.getFilePointer()); out.close(); final IndexInput in = dir.openInput("foo", IOContext.DEFAULT); p.copy(in, numBytes); final PagedBytes.Reader reader = p.freeze(random().nextBoolean()); for (long offset : new long[] { 0L, Integer.MAX_VALUE, numBytes - 1, TestUtil.nextLong(random(), 1, numBytes - 2) }) { BytesRef b = new BytesRef(); reader.fillSlice(b, offset, 1); assertEquals(arr[(int) (offset % arr.length)], b.bytes[b.offset]); } in.close(); dir.close(); }
14. Test2BPagedBytes#test()
Project: lucene-solr
File: Test2BPagedBytes.java
File: Test2BPagedBytes.java
public void test() throws Exception { BaseDirectoryWrapper dir = newFSDirectory(createTempDir("test2BPagedBytes")); if (dir instanceof MockDirectoryWrapper) { ((MockDirectoryWrapper) dir).setThrottling(MockDirectoryWrapper.Throttling.NEVER); } PagedBytes pb = new PagedBytes(15); IndexOutput dataOutput = dir.createOutput("foo", IOContext.DEFAULT); long netBytes = 0; long seed = random().nextLong(); long lastFP = 0; Random r2 = new Random(seed); while (netBytes < 1.1 * Integer.MAX_VALUE) { int numBytes = TestUtil.nextInt(r2, 1, 32768); byte[] bytes = new byte[numBytes]; r2.nextBytes(bytes); dataOutput.writeBytes(bytes, bytes.length); long fp = dataOutput.getFilePointer(); assert fp == lastFP + numBytes; lastFP = fp; netBytes += numBytes; } dataOutput.close(); IndexInput input = dir.openInput("foo", IOContext.DEFAULT); pb.copy(input, input.length()); input.close(); PagedBytes.Reader reader = pb.freeze(true); r2 = new Random(seed); netBytes = 0; while (netBytes < 1.1 * Integer.MAX_VALUE) { int numBytes = TestUtil.nextInt(r2, 1, 32768); byte[] bytes = new byte[numBytes]; r2.nextBytes(bytes); BytesRef expected = new BytesRef(bytes); BytesRef actual = new BytesRef(); reader.fillSlice(actual, netBytes, numBytes); assertEquals(expected, actual); netBytes += numBytes; } dir.close(); }
15. TestNeverDelete#testIndexing()
Project: lucene-solr
File: TestNeverDelete.java
File: TestNeverDelete.java
public void testIndexing() throws Exception { final Path tmpDir = createTempDir("TestNeverDelete"); final BaseDirectoryWrapper d = newFSDirectory(tmpDir); final RandomIndexWriter w = new RandomIndexWriter(random(), d, newIndexWriterConfig(new MockAnalyzer(random())).setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE)); w.w.getConfig().setMaxBufferedDocs(TestUtil.nextInt(random(), 5, 30)); w.commit(); Thread[] indexThreads = new Thread[random().nextInt(4)]; final long stopTime = System.currentTimeMillis() + atLeast(1000); for (int x = 0; x < indexThreads.length; x++) { indexThreads[x] = new Thread() { @Override public void run() { try { int docCount = 0; while (System.currentTimeMillis() < stopTime) { final Document doc = new Document(); doc.add(newStringField("dc", "" + docCount, Field.Store.YES)); doc.add(newTextField("field", "here is some text", Field.Store.YES)); w.addDocument(doc); if (docCount % 13 == 0) { w.commit(); } docCount++; } } catch (Exception e) { throw new RuntimeException(e); } } }; indexThreads[x].setName("Thread " + x); indexThreads[x].start(); } final Set<String> allFiles = new HashSet<>(); DirectoryReader r = DirectoryReader.open(d); while (System.currentTimeMillis() < stopTime) { final IndexCommit ic = r.getIndexCommit(); if (VERBOSE) { System.out.println("TEST: check files: " + ic.getFileNames()); } allFiles.addAll(ic.getFileNames()); // Make sure no old files were removed for (String fileName : allFiles) { assertTrue("file " + fileName + " does not exist", slowFileExists(d, fileName)); } DirectoryReader r2 = DirectoryReader.openIfChanged(r); if (r2 != null) { r.close(); r = r2; } Thread.sleep(1); } r.close(); for (Thread t : indexThreads) { t.join(); } w.close(); d.close(); }
16. TestIndexWriterWithThreads#testRollbackAndCommitWithThreads()
Project: lucene-solr
File: TestIndexWriterWithThreads.java
File: TestIndexWriterWithThreads.java
// LUCENE-4147 public void testRollbackAndCommitWithThreads() throws Exception { final BaseDirectoryWrapper d = newDirectory(); final int threadCount = TestUtil.nextInt(random(), 2, 6); final AtomicReference<IndexWriter> writerRef = new AtomicReference<>(); MockAnalyzer analyzer = new MockAnalyzer(random()); analyzer.setMaxTokenLength(TestUtil.nextInt(random(), 1, IndexWriter.MAX_TERM_LENGTH)); writerRef.set(new IndexWriter(d, newIndexWriterConfig(analyzer))); // Make initial commit so the test doesn't trip "corrupt first commit" when virus checker refuses to delete partial segments_N file: writerRef.get().commit(); final LineFileDocs docs = new LineFileDocs(random()); final Thread[] threads = new Thread[threadCount]; final int iters = atLeast(100); final AtomicBoolean failed = new AtomicBoolean(); final Lock rollbackLock = new ReentrantLock(); final Lock commitLock = new ReentrantLock(); for (int threadID = 0; threadID < threadCount; threadID++) { threads[threadID] = new Thread() { @Override public void run() { for (int iter = 0; iter < iters && !failed.get(); iter++) { //final int x = random().nextInt(5); final int x = random().nextInt(3); try { switch(x) { case 0: rollbackLock.lock(); if (VERBOSE) { System.out.println("\nTEST: " + Thread.currentThread().getName() + ": now rollback"); } try { writerRef.get().rollback(); if (VERBOSE) { System.out.println("TEST: " + Thread.currentThread().getName() + ": rollback done; now open new writer"); } writerRef.set(new IndexWriter(d, newIndexWriterConfig(new MockAnalyzer(random())))); } finally { rollbackLock.unlock(); } break; case 1: commitLock.lock(); if (VERBOSE) { System.out.println("\nTEST: " + Thread.currentThread().getName() + ": now commit"); } try { if (random().nextBoolean()) { writerRef.get().prepareCommit(); } writerRef.get().commit(); } catch (AlreadyClosedExceptionNullPointerException | ace) { } finally { commitLock.unlock(); } break; case 2: if (VERBOSE) { System.out.println("\nTEST: " + Thread.currentThread().getName() + ": now add"); } try { writerRef.get().addDocument(docs.nextDoc()); } catch (AlreadyClosedExceptionNullPointerException | AssertionError | ace) { } break; } } catch (Throwable t) { failed.set(true); throw new RuntimeException(t); } } } }; threads[threadID].start(); } for (int threadID = 0; threadID < threadCount; threadID++) { threads[threadID].join(); } assertTrue(!failed.get()); writerRef.get().close(); d.close(); }
17. Test2BSortedDocValuesOrds#test2BOrds()
Project: lucene-solr
File: Test2BSortedDocValuesOrds.java
File: Test2BSortedDocValuesOrds.java
// indexes Integer.MAX_VALUE docs with a fixed binary field public void test2BOrds() throws Exception { BaseDirectoryWrapper dir = newFSDirectory(createTempDir("2BOrds")); if (dir instanceof MockDirectoryWrapper) { ((MockDirectoryWrapper) dir).setThrottling(MockDirectoryWrapper.Throttling.NEVER); } IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random())).setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH).setRAMBufferSizeMB(256.0).setMergeScheduler(new ConcurrentMergeScheduler()).setMergePolicy(newLogMergePolicy(false, 10)).setOpenMode(IndexWriterConfig.OpenMode.CREATE).setCodec(TestUtil.getDefaultCodec())); Document doc = new Document(); byte bytes[] = new byte[4]; BytesRef data = new BytesRef(bytes); SortedDocValuesField dvField = new SortedDocValuesField("dv", data); doc.add(dvField); for (int i = 0; i < IndexWriter.MAX_DOCS; i++) { bytes[0] = (byte) (i >> 24); bytes[1] = (byte) (i >> 16); bytes[2] = (byte) (i >> 8); bytes[3] = (byte) i; w.addDocument(doc); if (i % 100000 == 0) { System.out.println("indexed: " + i); System.out.flush(); } } w.forceMerge(1); w.close(); System.out.println("verifying..."); System.out.flush(); DirectoryReader r = DirectoryReader.open(dir); int counter = 0; for (LeafReaderContext context : r.leaves()) { LeafReader reader = context.reader(); BytesRef scratch = new BytesRef(); BinaryDocValues dv = reader.getSortedDocValues("dv"); for (int i = 0; i < reader.maxDoc(); i++) { bytes[0] = (byte) (counter >> 24); bytes[1] = (byte) (counter >> 16); bytes[2] = (byte) (counter >> 8); bytes[3] = (byte) counter; counter++; final BytesRef term = dv.get(i); assertEquals(data, term); } } r.close(); dir.close(); }
18. Test2BSortedDocValuesFixedSorted#testFixedSorted()
Project: lucene-solr
File: Test2BSortedDocValuesFixedSorted.java
File: Test2BSortedDocValuesFixedSorted.java
// indexes Integer.MAX_VALUE docs with a fixed binary field public void testFixedSorted() throws Exception { BaseDirectoryWrapper dir = newFSDirectory(createTempDir("2BFixedSorted")); if (dir instanceof MockDirectoryWrapper) { ((MockDirectoryWrapper) dir).setThrottling(MockDirectoryWrapper.Throttling.NEVER); } IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random())).setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH).setRAMBufferSizeMB(256.0).setMergeScheduler(new ConcurrentMergeScheduler()).setMergePolicy(newLogMergePolicy(false, 10)).setOpenMode(IndexWriterConfig.OpenMode.CREATE).setCodec(TestUtil.getDefaultCodec())); Document doc = new Document(); byte bytes[] = new byte[2]; BytesRef data = new BytesRef(bytes); SortedDocValuesField dvField = new SortedDocValuesField("dv", data); doc.add(dvField); for (int i = 0; i < IndexWriter.MAX_DOCS; i++) { bytes[0] = (byte) (i >> 8); bytes[1] = (byte) i; w.addDocument(doc); if (i % 100000 == 0) { System.out.println("indexed: " + i); System.out.flush(); } } w.forceMerge(1); w.close(); System.out.println("verifying..."); System.out.flush(); DirectoryReader r = DirectoryReader.open(dir); int expectedValue = 0; for (LeafReaderContext context : r.leaves()) { LeafReader reader = context.reader(); BinaryDocValues dv = reader.getSortedDocValues("dv"); for (int i = 0; i < reader.maxDoc(); i++) { bytes[0] = (byte) (expectedValue >> 8); bytes[1] = (byte) expectedValue; final BytesRef term = dv.get(i); assertEquals(data, term); expectedValue++; } } r.close(); dir.close(); }
19. Test2BPostings#test()
Project: lucene-solr
File: Test2BPostings.java
File: Test2BPostings.java
@Nightly public void test() throws Exception { BaseDirectoryWrapper dir = newFSDirectory(createTempDir("2BPostings")); if (dir instanceof MockDirectoryWrapper) { ((MockDirectoryWrapper) dir).setThrottling(MockDirectoryWrapper.Throttling.NEVER); } IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random())).setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH).setRAMBufferSizeMB(256.0).setMergeScheduler(new ConcurrentMergeScheduler()).setMergePolicy(newLogMergePolicy(false, 10)).setOpenMode(IndexWriterConfig.OpenMode.CREATE); IndexWriter w = new IndexWriter(dir, iwc); MergePolicy mp = w.getConfig().getMergePolicy(); if (mp instanceof LogByteSizeMergePolicy) { // 1 petabyte: ((LogByteSizeMergePolicy) mp).setMaxMergeMB(1024 * 1024 * 1024); } Document doc = new Document(); FieldType ft = new FieldType(TextField.TYPE_NOT_STORED); ft.setOmitNorms(true); ft.setIndexOptions(IndexOptions.DOCS); Field field = new Field("field", new MyTokenStream(), ft); doc.add(field); final int numDocs = (Integer.MAX_VALUE / 26) + 1; for (int i = 0; i < numDocs; i++) { w.addDocument(doc); if (VERBOSE && i % 100000 == 0) { System.out.println(i + " of " + numDocs + "..."); } } w.forceMerge(1); w.close(); dir.close(); }
20. Test2BPositions#test()
Project: lucene-solr
File: Test2BPositions.java
File: Test2BPositions.java
public void test() throws Exception { BaseDirectoryWrapper dir = newFSDirectory(createTempDir("2BPositions")); if (dir instanceof MockDirectoryWrapper) { ((MockDirectoryWrapper) dir).setThrottling(MockDirectoryWrapper.Throttling.NEVER); } IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random())).setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH).setRAMBufferSizeMB(256.0).setMergeScheduler(new ConcurrentMergeScheduler()).setMergePolicy(newLogMergePolicy(false, 10)).setOpenMode(IndexWriterConfig.OpenMode.CREATE).setCodec(TestUtil.getDefaultCodec())); MergePolicy mp = w.getConfig().getMergePolicy(); if (mp instanceof LogByteSizeMergePolicy) { // 1 petabyte: ((LogByteSizeMergePolicy) mp).setMaxMergeMB(1024 * 1024 * 1024); } Document doc = new Document(); FieldType ft = new FieldType(TextField.TYPE_NOT_STORED); ft.setOmitNorms(true); Field field = new Field("field", new MyTokenStream(), ft); doc.add(field); final int numDocs = (Integer.MAX_VALUE / 26) + 1; for (int i = 0; i < numDocs; i++) { w.addDocument(doc); if (VERBOSE && i % 100000 == 0) { System.out.println(i + " of " + numDocs + "..."); } } w.forceMerge(1); w.close(); dir.close(); }
21. Test2BNumericDocValues#testNumerics()
Project: lucene-solr
File: Test2BNumericDocValues.java
File: Test2BNumericDocValues.java
// indexes IndexWriter.MAX_DOCS docs with an increasing dv field public void testNumerics() throws Exception { BaseDirectoryWrapper dir = newFSDirectory(createTempDir("2BNumerics")); if (dir instanceof MockDirectoryWrapper) { ((MockDirectoryWrapper) dir).setThrottling(MockDirectoryWrapper.Throttling.NEVER); } IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random())).setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH).setRAMBufferSizeMB(256.0).setMergeScheduler(new ConcurrentMergeScheduler()).setMergePolicy(newLogMergePolicy(false, 10)).setOpenMode(IndexWriterConfig.OpenMode.CREATE).setCodec(TestUtil.getDefaultCodec())); Document doc = new Document(); NumericDocValuesField dvField = new NumericDocValuesField("dv", 0); doc.add(dvField); for (int i = 0; i < IndexWriter.MAX_DOCS; i++) { dvField.setLongValue(i); w.addDocument(doc); if (i % 100000 == 0) { System.out.println("indexed: " + i); System.out.flush(); } } w.forceMerge(1); w.close(); System.out.println("verifying..."); System.out.flush(); DirectoryReader r = DirectoryReader.open(dir); long expectedValue = 0; for (LeafReaderContext context : r.leaves()) { LeafReader reader = context.reader(); NumericDocValues dv = reader.getNumericDocValues("dv"); for (int i = 0; i < reader.maxDoc(); i++) { assertEquals(expectedValue, dv.get(i)); expectedValue++; } } r.close(); dir.close(); }
22. Test2BBinaryDocValues#testVariableBinary()
Project: lucene-solr
File: Test2BBinaryDocValues.java
File: Test2BBinaryDocValues.java
// indexes IndexWriter.MAX_DOCS docs with a variable binary field public void testVariableBinary() throws Exception { BaseDirectoryWrapper dir = newFSDirectory(createTempDir("2BVariableBinary")); if (dir instanceof MockDirectoryWrapper) { ((MockDirectoryWrapper) dir).setThrottling(MockDirectoryWrapper.Throttling.NEVER); } IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random())).setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH).setRAMBufferSizeMB(256.0).setMergeScheduler(new ConcurrentMergeScheduler()).setMergePolicy(newLogMergePolicy(false, 10)).setOpenMode(IndexWriterConfig.OpenMode.CREATE).setCodec(TestUtil.getDefaultCodec())); Document doc = new Document(); byte bytes[] = new byte[4]; ByteArrayDataOutput encoder = new ByteArrayDataOutput(bytes); BytesRef data = new BytesRef(bytes); BinaryDocValuesField dvField = new BinaryDocValuesField("dv", data); doc.add(dvField); for (int i = 0; i < IndexWriter.MAX_DOCS; i++) { encoder.reset(bytes); // 1, 2, or 3 bytes encoder.writeVInt(i % 65535); data.length = encoder.getPosition(); w.addDocument(doc); if (i % 100000 == 0) { System.out.println("indexed: " + i); System.out.flush(); } } w.forceMerge(1); w.close(); System.out.println("verifying..."); System.out.flush(); DirectoryReader r = DirectoryReader.open(dir); int expectedValue = 0; ByteArrayDataInput input = new ByteArrayDataInput(); for (LeafReaderContext context : r.leaves()) { LeafReader reader = context.reader(); BinaryDocValues dv = reader.getBinaryDocValues("dv"); for (int i = 0; i < reader.maxDoc(); i++) { final BytesRef term = dv.get(i); input.reset(term.bytes, term.offset, term.length); assertEquals(expectedValue % 65535, input.readVInt()); assertTrue(input.eof()); expectedValue++; } } r.close(); dir.close(); }
23. Test2BBinaryDocValues#testFixedBinary()
Project: lucene-solr
File: Test2BBinaryDocValues.java
File: Test2BBinaryDocValues.java
// indexes IndexWriter.MAX_DOCS docs with a fixed binary field public void testFixedBinary() throws Exception { BaseDirectoryWrapper dir = newFSDirectory(createTempDir("2BFixedBinary")); if (dir instanceof MockDirectoryWrapper) { ((MockDirectoryWrapper) dir).setThrottling(MockDirectoryWrapper.Throttling.NEVER); } IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random())).setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH).setRAMBufferSizeMB(256.0).setMergeScheduler(new ConcurrentMergeScheduler()).setMergePolicy(newLogMergePolicy(false, 10)).setOpenMode(IndexWriterConfig.OpenMode.CREATE).setCodec(TestUtil.getDefaultCodec())); Document doc = new Document(); byte bytes[] = new byte[4]; BytesRef data = new BytesRef(bytes); BinaryDocValuesField dvField = new BinaryDocValuesField("dv", data); doc.add(dvField); for (int i = 0; i < IndexWriter.MAX_DOCS; i++) { bytes[0] = (byte) (i >> 24); bytes[1] = (byte) (i >> 16); bytes[2] = (byte) (i >> 8); bytes[3] = (byte) i; w.addDocument(doc); if (i % 100000 == 0) { System.out.println("indexed: " + i); System.out.flush(); } } w.forceMerge(1); w.close(); System.out.println("verifying..."); System.out.flush(); DirectoryReader r = DirectoryReader.open(dir); int expectedValue = 0; for (LeafReaderContext context : r.leaves()) { LeafReader reader = context.reader(); BinaryDocValues dv = reader.getBinaryDocValues("dv"); for (int i = 0; i < reader.maxDoc(); i++) { bytes[0] = (byte) (expectedValue >> 24); bytes[1] = (byte) (expectedValue >> 16); bytes[2] = (byte) (expectedValue >> 8); bytes[3] = (byte) expectedValue; final BytesRef term = dv.get(i); assertEquals(data, term); expectedValue++; } } r.close(); dir.close(); }
24. TestSwappedIndexFiles#swapOneFile()
Project: lucene-solr
File: TestSwappedIndexFiles.java
File: TestSwappedIndexFiles.java
private void swapOneFile(Directory dir1, Directory dir2, String victim) throws IOException { if (VERBOSE) { System.out.println("TEST: swap file " + victim); } try (BaseDirectoryWrapper dirCopy = newDirectory()) { dirCopy.setCheckIndexOnClose(false); // Copy all files from dir1 to dirCopy, except victim which we copy from dir2: for (String name : dir1.listAll()) { if (name.equals(victim) == false) { dirCopy.copyFrom(dir1, name, name, IOContext.DEFAULT); } else { dirCopy.copyFrom(dir2, name, name, IOContext.DEFAULT); } dirCopy.sync(Collections.singleton(name)); } try { // NOTE: we .close so that if the test fails (truncation not detected) we don't also get all these confusing errors about open files: DirectoryReader.open(dirCopy).close(); fail("wrong file " + victim + " not detected"); } catch (CorruptIndexExceptionEOFException | IndexFormatTooOldException | e) { } // CheckIndex should also fail: try { TestUtil.checkIndex(dirCopy, true, true, null); fail("wrong file " + victim + " not detected"); } catch (CorruptIndexExceptionEOFException | IndexFormatTooOldException | e) { } } }
25. TestAllFilesDetectTruncation#truncateOneFile()
Project: lucene-solr
File: TestAllFilesDetectTruncation.java
File: TestAllFilesDetectTruncation.java
private void truncateOneFile(Directory dir, String victim) throws IOException { try (BaseDirectoryWrapper dirCopy = newDirectory()) { dirCopy.setCheckIndexOnClose(false); long victimLength = dir.fileLength(victim); int lostBytes = TestUtil.nextInt(random(), 1, (int) Math.min(100, victimLength)); assert victimLength > 0; if (VERBOSE) { System.out.println("TEST: now truncate file " + victim + " by removing " + lostBytes + " of " + victimLength + " bytes"); } for (String name : dir.listAll()) { if (name.equals(victim) == false) { dirCopy.copyFrom(dir, name, name, IOContext.DEFAULT); } else { try (IndexOutput out = dirCopy.createOutput(name, IOContext.DEFAULT); IndexInput in = dir.openInput(name, IOContext.DEFAULT)) { out.copyBytes(in, victimLength - lostBytes); } } dirCopy.sync(Collections.singleton(name)); } try { // NOTE: we .close so that if the test fails (truncation not detected) we don't also get all these confusing errors about open files: DirectoryReader.open(dirCopy).close(); fail("truncation not detected after removing " + lostBytes + " bytes out of " + victimLength + " for file " + victim); } catch (CorruptIndexExceptionEOFException | e) { } // CheckIndex should also fail: try { TestUtil.checkIndex(dirCopy, true, true, null); fail("truncation not detected after removing " + lostBytes + " bytes out of " + victimLength + " for file " + victim); } catch (CorruptIndexExceptionEOFException | e) { } } }
26. TestAllFilesCheckIndexHeader#checkOneFile()
Project: lucene-solr
File: TestAllFilesCheckIndexHeader.java
File: TestAllFilesCheckIndexHeader.java
private void checkOneFile(Directory dir, String victim) throws IOException { try (BaseDirectoryWrapper dirCopy = newDirectory()) { dirCopy.setCheckIndexOnClose(false); long victimLength = dir.fileLength(victim); int wrongBytes = TestUtil.nextInt(random(), 1, (int) Math.min(100, victimLength)); assert victimLength > 0; if (VERBOSE) { System.out.println("TEST: now break file " + victim + " by randomizing first " + wrongBytes + " of " + victimLength); } for (String name : dir.listAll()) { if (name.equals(victim) == false) { dirCopy.copyFrom(dir, name, name, IOContext.DEFAULT); } else { // time this will only require one iteration! while (true) { try (IndexOutput out = dirCopy.createOutput(name, IOContext.DEFAULT); IndexInput in = dir.openInput(name, IOContext.DEFAULT)) { // keeps same file length, but replaces the first wrongBytes with random bytes: byte[] bytes = new byte[wrongBytes]; random().nextBytes(bytes); out.writeBytes(bytes, 0, bytes.length); byte[] bytes2 = new byte[wrongBytes]; in.readBytes(bytes2, 0, bytes2.length); if (Arrays.equals(bytes, bytes2) == false) { // We successfully randomly generated bytes that differ from the bytes in the file: out.copyBytes(in, victimLength - wrongBytes); break; } } } } dirCopy.sync(Collections.singleton(name)); } try { // NOTE: we .close so that if the test fails (truncation not detected) we don't also get all these confusing errors about open files: DirectoryReader.open(dirCopy).close(); fail("wrong bytes not detected after randomizing first " + wrongBytes + " bytes out of " + victimLength + " for file " + victim); } catch (CorruptIndexExceptionEOFException | IndexFormatTooOldException | e) { } // CheckIndex should also fail: try { TestUtil.checkIndex(dirCopy, true, true, null); fail("wrong bytes not detected after randomizing first " + wrongBytes + " bytes out of " + victimLength + " for file " + victim); } catch (CorruptIndexExceptionEOFException | IndexFormatTooOldException | e) { } } }
27. StoreTests#testCanOpenIndex()
Project: elasticsearch
File: StoreTests.java
File: StoreTests.java
public void testCanOpenIndex() throws IOException { final ShardId shardId = new ShardId("index", "_na_", 1); IndexWriterConfig iwc = newIndexWriterConfig(); Path tempDir = createTempDir(); final BaseDirectoryWrapper dir = newFSDirectory(tempDir); assertFalse(Store.canOpenIndex(logger, tempDir, shardId)); IndexWriter writer = new IndexWriter(dir, iwc); Document doc = new Document(); doc.add(new StringField("id", "1", random().nextBoolean() ? Field.Store.YES : Field.Store.NO)); writer.addDocument(doc); writer.commit(); writer.close(); assertTrue(Store.canOpenIndex(logger, tempDir, shardId)); DirectoryService directoryService = new DirectoryService(shardId, INDEX_SETTINGS) { @Override public long throttleTimeInNanos() { return 0; } @Override public Directory newDirectory() throws IOException { return dir; } }; Store store = new Store(shardId, INDEX_SETTINGS, directoryService, new DummyShardLock(shardId)); store.markStoreCorrupted(new CorruptIndexException("foo", "bar")); assertFalse(Store.canOpenIndex(logger, tempDir, shardId)); store.close(); }
28. ShardUtilsTests#testExtractShardId()
Project: elasticsearch
File: ShardUtilsTests.java
File: ShardUtilsTests.java
public void testExtractShardId() throws IOException { BaseDirectoryWrapper dir = newDirectory(); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig()); writer.commit(); ShardId id = new ShardId("foo", "_na_", random().nextInt()); try (DirectoryReader reader = DirectoryReader.open(writer)) { ElasticsearchDirectoryReader wrap = ElasticsearchDirectoryReader.wrap(reader, id); assertEquals(id, ShardUtils.extractShardId(wrap)); } final int numDocs = 1 + random().nextInt(5); for (int i = 0; i < numDocs; i++) { Document d = new Document(); d.add(newField("name", "foobar", StringField.TYPE_STORED)); writer.addDocument(d); if (random().nextBoolean()) { writer.commit(); } } try (DirectoryReader reader = DirectoryReader.open(writer)) { ElasticsearchDirectoryReader wrap = ElasticsearchDirectoryReader.wrap(reader, id); assertEquals(id, ShardUtils.extractShardId(wrap)); CompositeReaderContext context = wrap.getContext(); for (LeafReaderContext leaf : context.leaves()) { assertEquals(id, ShardUtils.extractShardId(leaf.reader())); } } IOUtils.close(writer, dir); }