org.apache.activemq.artemis.core.io.nio.NIOSequentialFileFactory

Here are the examples of the java api class org.apache.activemq.artemis.core.io.nio.NIOSequentialFileFactory taken from open source projects.

1. JournalCrashTest#printJournal()

Project: activemq-artemis
File: JournalCrashTest.java
/**
    * @throws Exception
    */
private void printJournal() throws Exception {
    NIOSequentialFileFactory factory = new NIOSequentialFileFactory(new File(getJournalDir()), 100);
    JournalImpl journal = new JournalImpl(ActiveMQDefaultConfiguration.getDefaultJournalFileSize(), 2, 2, 0, 0, factory, "activemq-data", "amq", 100);
    ArrayList<RecordInfo> records = new ArrayList<>();
    ArrayList<PreparedTransactionInfo> transactions = new ArrayList<>();
    journal.start();
    journal.load(records, transactions, null);
    //      System.out.println("===============================================");
    //      System.out.println("Journal records at the end:");
    //
    //      for (RecordInfo record : records)
    //      {
    //         System.out.println(record.id + ", update = " + record.isUpdate);
    //      }
    journal.stop();
}

2. EncodeJournal#exportJournal()

Project: activemq-artemis
File: EncodeJournal.java
public static void exportJournal(final String directory, final String journalPrefix, final String journalSuffix, final int minFiles, final int fileSize, final PrintStream out) throws Exception {
    NIOSequentialFileFactory nio = new NIOSequentialFileFactory(new File(directory), null, 1);
    JournalImpl journal = new JournalImpl(fileSize, minFiles, minFiles, 0, 0, nio, journalPrefix, journalSuffix, 1);
    List<JournalFile> files = journal.orderFiles();
    for (JournalFile file : files) {
        out.println("#File," + file);
        exportJournalFile(out, nio, file);
    }
}

3. CompactJournal#compactJournal()

Project: activemq-artemis
File: CompactJournal.java
void compactJournal(final File directory, final String journalPrefix, final String journalSuffix, final int minFiles, final int fileSize, final IOCriticalErrorListener listener) throws Exception {
    NIOSequentialFileFactory nio = new NIOSequentialFileFactory(directory, listener, 1);
    JournalImpl journal = new JournalImpl(fileSize, minFiles, minFiles, 0, 0, nio, journalPrefix, journalSuffix, 1);
    journal.start();
    journal.loadInternalOnly();
    journal.compact();
    journal.stop();
}

4. BatchIDGeneratorUnitTest#testSequence()

Project: activemq-artemis
File: BatchIDGeneratorUnitTest.java
@Test
public void testSequence() throws Exception {
    NIOSequentialFileFactory factory = new NIOSequentialFileFactory(new File(getTestDir()), 1);
    Journal journal = new JournalImpl(10 * 1024, 2, 2, 0, 0, factory, "activemq-bindings", "bindings", 1);
    journal.start();
    journal.load(new ArrayList<RecordInfo>(), new ArrayList<PreparedTransactionInfo>(), null);
    BatchingIDGenerator batch = new BatchingIDGenerator(0, 1000, getJournalStorageManager(journal));
    long id1 = batch.generateID();
    long id2 = batch.generateID();
    Assert.assertTrue(id2 > id1);
    journal.stop();
    batch = new BatchingIDGenerator(0, 1000, getJournalStorageManager(journal));
    loadIDs(journal, batch);
    long id3 = batch.generateID();
    Assert.assertEquals(1000, id3);
    long id4 = batch.generateID();
    Assert.assertTrue(id4 > id3 && id4 < 2000);
    batch.persistCurrentID();
    journal.stop();
    batch = new BatchingIDGenerator(0, 1000, getJournalStorageManager(journal));
    loadIDs(journal, batch);
    long id5 = batch.generateID();
    Assert.assertTrue(id5 > id4 && id5 < 2000);
    long lastId = id5;
    boolean close = true;
    for (int i = 0; i < 100000; i++) {
        if (i % 1000 == 0) {
            // interchanging closes and simulated crashes
            if (close) {
                batch.persistCurrentID();
            }
            close = !close;
            journal.stop();
            batch = new BatchingIDGenerator(0, 1000, getJournalStorageManager(journal));
            loadIDs(journal, batch);
        }
        long id = batch.generateID();
        Assert.assertTrue(id > lastId);
        lastId = id;
    }
    batch.persistCurrentID();
    journal.stop();
    batch = new BatchingIDGenerator(0, 1000, getJournalStorageManager(journal));
    loadIDs(journal, batch);
    lastId = batch.getCurrentID();
    journal.stop();
    batch = new BatchingIDGenerator(0, 1000, getJournalStorageManager(journal));
    loadIDs(journal, batch);
    Assert.assertEquals("No Ids were generated, so the currentID was supposed to stay the same", lastId, batch.getCurrentID());
    journal.stop();
}

5. NIOSequentialFileFactoryTest#testInterrupts()

Project: activemq-artemis
File: NIOSequentialFileFactoryTest.java
@Test
public void testInterrupts() throws Throwable {
    final EncodingSupport fakeEncoding = new EncodingSupport() {

        @Override
        public int getEncodeSize() {
            return 10;
        }

        @Override
        public void encode(ActiveMQBuffer buffer) {
            buffer.writeBytes(new byte[10]);
        }

        @Override
        public void decode(ActiveMQBuffer buffer) {
        }
    };
    final AtomicInteger calls = new AtomicInteger(0);
    final NIOSequentialFileFactory factory = new NIOSequentialFileFactory(new File(getTestDir()), new IOCriticalErrorListener() {

        @Override
        public void onIOException(Throwable code, String message, SequentialFile file) {
            new Exception("shutdown").printStackTrace();
            calls.incrementAndGet();
        }
    }, 1);
    Thread threadOpen = new Thread() {

        @Override
        public void run() {
            try {
                Thread.currentThread().interrupt();
                SequentialFile file = factory.createSequentialFile("file.txt");
                file.open();
            } catch (Exception e) {
                e.printStackTrace();
            }
        }
    };
    threadOpen.start();
    threadOpen.join();
    Thread threadClose = new Thread() {

        @Override
        public void run() {
            try {
                SequentialFile file = factory.createSequentialFile("file.txt");
                file.open();
                file.write(fakeEncoding, true);
                Thread.currentThread().interrupt();
                file.close();
            } catch (Exception e) {
                e.printStackTrace();
            }
        }
    };
    threadClose.start();
    threadClose.join();
    Thread threadWrite = new Thread() {

        @Override
        public void run() {
            try {
                SequentialFile file = factory.createSequentialFile("file.txt");
                file.open();
                Thread.currentThread().interrupt();
                file.write(fakeEncoding, true);
                file.close();
            } catch (Exception e) {
                e.printStackTrace();
            }
        }
    };
    threadWrite.start();
    threadWrite.join();
    Thread threadFill = new Thread() {

        @Override
        public void run() {
            try {
                SequentialFile file = factory.createSequentialFile("file.txt");
                file.open();
                Thread.currentThread().interrupt();
                file.fill(1024);
                file.close();
            } catch (Exception e) {
                e.printStackTrace();
            }
        }
    };
    threadFill.start();
    threadFill.join();
    Thread threadWriteDirect = new Thread() {

        @Override
        public void run() {
            try {
                SequentialFile file = factory.createSequentialFile("file.txt");
                file.open();
                ByteBuffer buffer = ByteBuffer.allocate(10);
                buffer.put(new byte[10]);
                Thread.currentThread().interrupt();
                file.writeDirect(buffer, true);
                file.close();
            } catch (Exception e) {
                e.printStackTrace();
            }
        }
    };
    threadWriteDirect.start();
    threadWriteDirect.join();
    Thread threadRead = new Thread() {

        @Override
        public void run() {
            try {
                SequentialFile file = factory.createSequentialFile("file.txt");
                file.open();
                file.write(fakeEncoding, true);
                file.position(0);
                ByteBuffer readBytes = ByteBuffer.allocate(fakeEncoding.getEncodeSize());
                Thread.currentThread().interrupt();
                file.read(readBytes);
                file.close();
            } catch (Exception e) {
                e.printStackTrace();
            }
        }
    };
    threadRead.start();
    threadRead.join();
    // An interrupt exception shouldn't issue a shutdown
    Assert.assertEquals(0, calls.get());
}

6. JournalStorageManager#init()

Project: activemq-artemis
File: JournalStorageManager.java
@Override
protected void init(Configuration config, IOCriticalErrorListener criticalErrorListener) {
    if (config.getJournalType() != JournalType.NIO && config.getJournalType() != JournalType.ASYNCIO) {
        throw ActiveMQMessageBundle.BUNDLE.invalidJournal();
    }
    SequentialFileFactory bindingsFF = new NIOSequentialFileFactory(config.getBindingsLocation(), criticalErrorListener, config.getJournalMaxIO_NIO());
    Journal localBindings = new JournalImpl(1024 * 1024, 2, config.getJournalCompactMinFiles(), config.getJournalPoolFiles(), config.getJournalCompactPercentage(), bindingsFF, "activemq-bindings", "bindings", 1);
    bindingsJournal = localBindings;
    originalBindingsJournal = localBindings;
    if (config.getJournalType() == JournalType.ASYNCIO) {
        ActiveMQServerLogger.LOGGER.journalUseAIO();
        journalFF = new AIOSequentialFileFactory(config.getJournalLocation(), config.getJournalBufferSize_AIO(), config.getJournalBufferTimeout_AIO(), config.getJournalMaxIO_AIO(), config.isLogJournalWriteRate(), criticalErrorListener);
    } else if (config.getJournalType() == JournalType.NIO) {
        ActiveMQServerLogger.LOGGER.journalUseNIO();
        journalFF = new NIOSequentialFileFactory(config.getJournalLocation(), true, config.getJournalBufferSize_NIO(), config.getJournalBufferTimeout_NIO(), config.getJournalMaxIO_NIO(), config.isLogJournalWriteRate(), criticalErrorListener);
    } else {
        throw ActiveMQMessageBundle.BUNDLE.invalidJournalType2(config.getJournalType());
    }
    Journal localMessage = new JournalImpl(config.getJournalFileSize(), config.getJournalMinFiles(), config.getJournalPoolFiles(), config.getJournalCompactMinFiles(), config.getJournalCompactPercentage(), journalFF, "activemq-data", "amq", config.getJournalType() == JournalType.ASYNCIO ? config.getJournalMaxIO_AIO() : config.getJournalMaxIO_NIO());
    messageJournal = localMessage;
    originalMessageJournal = localMessage;
    largeMessagesDirectory = config.getLargeMessagesDirectory();
    largeMessagesFactory = new NIOSequentialFileFactory(config.getLargeMessagesLocation(), false, criticalErrorListener, 1);
    perfBlastPages = config.getJournalPerfBlastPages();
    if (config.getPageMaxConcurrentIO() != 1) {
        pageMaxConcurrentIO = new Semaphore(config.getPageMaxConcurrentIO());
    } else {
        pageMaxConcurrentIO = null;
    }
}

7. DecodeJournal#importJournal()

Project: activemq-artemis
File: DecodeJournal.java
public static void importJournal(final String directory, final String journalPrefix, final String journalSuffix, final int minFiles, final int fileSize, final Reader reader) throws Exception {
    File journalDir = new File(directory);
    if (!journalDir.exists()) {
        if (!journalDir.mkdirs())
            System.err.println("Could not create directory " + directory);
    }
    NIOSequentialFileFactory nio = new NIOSequentialFileFactory(new File(directory), null, 1);
    JournalImpl journal = new JournalImpl(fileSize, minFiles, minFiles, 0, 0, nio, journalPrefix, journalSuffix, 1);
    if (journal.orderFiles().size() != 0) {
        throw new IllegalStateException("Import needs to create a brand new journal");
    }
    journal.start();
    // The journal is empty, as we checked already. Calling load just to initialize the internal data
    journal.loadInternalOnly();
    BufferedReader buffReader = new BufferedReader(reader);
    String line;
    HashMap<Long, AtomicInteger> txCounters = new HashMap<>();
    long lineNumber = 0;
    Map<Long, JournalRecord> journalRecords = journal.getRecords();
    while ((line = buffReader.readLine()) != null) {
        lineNumber++;
        String[] splitLine = line.split(",");
        if (splitLine[0].equals("#File")) {
            txCounters.clear();
            continue;
        }
        Properties lineProperties = parseLine(splitLine);
        String operation = null;
        try {
            operation = lineProperties.getProperty("operation");
            if (operation.equals("AddRecord")) {
                RecordInfo info = parseRecord(lineProperties);
                journal.appendAddRecord(info.id, info.userRecordType, info.data, false);
            } else if (operation.equals("AddRecordTX")) {
                long txID = parseLong("txID", lineProperties);
                AtomicInteger counter = getCounter(txID, txCounters);
                counter.incrementAndGet();
                RecordInfo info = parseRecord(lineProperties);
                journal.appendAddRecordTransactional(txID, info.id, info.userRecordType, info.data);
            } else if (operation.equals("AddRecordTX")) {
                long txID = parseLong("txID", lineProperties);
                AtomicInteger counter = getCounter(txID, txCounters);
                counter.incrementAndGet();
                RecordInfo info = parseRecord(lineProperties);
                journal.appendAddRecordTransactional(txID, info.id, info.userRecordType, info.data);
            } else if (operation.equals("UpdateTX")) {
                long txID = parseLong("txID", lineProperties);
                AtomicInteger counter = getCounter(txID, txCounters);
                counter.incrementAndGet();
                RecordInfo info = parseRecord(lineProperties);
                journal.appendUpdateRecordTransactional(txID, info.id, info.userRecordType, info.data);
            } else if (operation.equals("Update")) {
                RecordInfo info = parseRecord(lineProperties);
                journal.appendUpdateRecord(info.id, info.userRecordType, info.data, false);
            } else if (operation.equals("DeleteRecord")) {
                long id = parseLong("id", lineProperties);
                // If not found it means the append/update records were reclaimed already
                if (journalRecords.get(id) != null) {
                    journal.appendDeleteRecord(id, false);
                }
            } else if (operation.equals("DeleteRecordTX")) {
                long txID = parseLong("txID", lineProperties);
                long id = parseLong("id", lineProperties);
                AtomicInteger counter = getCounter(txID, txCounters);
                counter.incrementAndGet();
                // If not found it means the append/update records were reclaimed already
                if (journalRecords.get(id) != null) {
                    journal.appendDeleteRecordTransactional(txID, id);
                }
            } else if (operation.equals("Prepare")) {
                long txID = parseLong("txID", lineProperties);
                int numberOfRecords = parseInt("numberOfRecords", lineProperties);
                AtomicInteger counter = getCounter(txID, txCounters);
                byte[] data = parseEncoding("extraData", lineProperties);
                if (counter.get() == numberOfRecords) {
                    journal.appendPrepareRecord(txID, data, false);
                } else {
                    System.err.println("Transaction " + txID + " at line " + lineNumber + " is incomplete. The prepare record expected " + numberOfRecords + " while the import only had " + counter);
                }
            } else if (operation.equals("Commit")) {
                long txID = parseLong("txID", lineProperties);
                int numberOfRecords = parseInt("numberOfRecords", lineProperties);
                AtomicInteger counter = getCounter(txID, txCounters);
                if (counter.get() == numberOfRecords) {
                    journal.appendCommitRecord(txID, false);
                } else {
                    System.err.println("Transaction " + txID + " at line " + lineNumber + " is incomplete. The commit record expected " + numberOfRecords + " while the import only had " + counter);
                }
            } else if (operation.equals("Rollback")) {
                long txID = parseLong("txID", lineProperties);
                journal.appendRollbackRecord(txID, false);
            } else {
                System.err.println("Invalid operation " + operation + " at line " + lineNumber);
            }
        } catch (Exception ex) {
            System.err.println("Error at line " + lineNumber + ", operation=" + operation + " msg = " + ex.getMessage());
        }
    }
    journal.stop();
}