Here are the examples of the java api org.springframework.shell.core.CommandResult taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.
53 Examples
18
Source : TestTempViewCommand.java
with Apache License 2.0
from apache
with Apache License 2.0
from apache
@Test
public void testDelete() {
CommandResult cr = getShell().executeCommand(String.format("temp delete --view %s", tableName));
replacedertTrue(cr.getResult().toString().endsWith("successfully!"));
// after delete, we can not access table yet.
replacedertThrows(HoodieException.clreplaced, () -> HoodieCLI.getTempViewProvider().runQuery("select * from " + tableName));
}
18
Source : TestTableCommand.java
with Apache License 2.0
from apache
with Apache License 2.0
from apache
/**
* Method to create a table for connect or desc.
*/
private boolean prepareTable() {
CommandResult cr = getShell().executeCommand("create --path " + tablePath + " --tableName " + tableName);
return cr.isSuccess();
}
17
Source : TestUtilsCommand.java
with Apache License 2.0
from apache
with Apache License 2.0
from apache
/**
* Test case for load null clreplaced.
*/
@Test
public void testLoadClreplacedNull() {
String name = "";
CommandResult cr = getShell().executeCommand(String.format("utils loadClreplaced --clreplaced %s", name));
replacedertAll("Command runs success", () -> replacedertTrue(cr.isSuccess()), () -> replacedertNotNull(cr.getResult().toString()), () -> replacedertEquals("Clreplaced to be loaded can not be null!", cr.getResult().toString()));
}
17
Source : TestUtilsCommand.java
with Apache License 2.0
from apache
with Apache License 2.0
from apache
/**
* Test case for clreplaced not found.
*/
@Test
public void testLoadClreplacedNotFound() {
String name = "test.clreplaced.NotFound";
CommandResult cr = getShell().executeCommand(String.format("utils loadClreplaced --clreplaced %s", name));
replacedertAll("Command runs success", () -> replacedertTrue(cr.isSuccess()), () -> replacedertNotNull(cr.getResult().toString()), () -> replacedertEquals(cr.getResult().toString(), String.format("Clreplaced %s not found!", name)));
}
17
Source : TestUtilsCommand.java
with Apache License 2.0
from apache
with Apache License 2.0
from apache
/**
* Test case for success load clreplaced.
*/
@Test
public void testLoadClreplaced() {
String name = HoodieTable.clreplaced.getName();
CommandResult cr = getShell().executeCommand(String.format("utils loadClreplaced --clreplaced %s", name));
replacedertAll("Command runs success", () -> replacedertTrue(cr.isSuccess()), () -> replacedertNotNull(cr.getResult().toString()), () -> replacedertTrue(cr.getResult().toString().startsWith("file:")));
}
17
Source : TestTempViewCommand.java
with Apache License 2.0
from apache
with Apache License 2.0
from apache
@Test
public void testQuery() {
CommandResult cr = getShell().executeCommand(String.format("temp query --sql 'select * from %s'", tableName));
replacedertEquals(TempViewCommand.QUERY_SUCCESS, cr.getResult().toString());
}
17
Source : TestTempViewCommand.java
with Apache License 2.0
from apache
with Apache License 2.0
from apache
@Test
public void testShowAll() {
CommandResult cr = getShell().executeCommand("temps show");
replacedertEquals(TempViewCommand.SHOW_SUCCESS, cr.getResult().toString());
}
17
Source : TestTempViewCommand.java
with Apache License 2.0
from apache
with Apache License 2.0
from apache
@Test
public void testQueryWithException() {
CommandResult cr = getShell().executeCommand(String.format("temp query --sql 'select * from %s'", "table_1"));
replacedertEquals(TempViewCommand.QUERY_FAIL, cr.getResult().toString());
}
17
Source : TestSparkEnvCommand.java
with Apache License 2.0
from apache
with Apache License 2.0
from apache
/**
* Test Cases for set and get spark env.
*/
@Test
public void testSetAndGetSparkEnv() {
// First, be empty
CommandResult cr = getShell().executeCommand("show envs all");
String nullResult = HoodiePrintHelper.print(new String[] { "key", "value" }, new String[0][2]);
nullResult = removeNonWordAndStripSpace(nullResult);
String got = removeNonWordAndStripSpace(cr.getResult().toString());
replacedertEquals(nullResult, got);
// Set SPARK_HOME
cr = getShell().executeCommand("set --conf SPARK_HOME=/usr/etc/spark");
replacedertTrue(cr.isSuccess());
// Get
cr = getShell().executeCommand("show env --key SPARK_HOME");
String result = HoodiePrintHelper.print(new String[] { "key", "value" }, new String[][] { new String[] { "SPARK_HOME", "/usr/etc/spark" } });
result = removeNonWordAndStripSpace(result);
got = removeNonWordAndStripSpace(cr.getResult().toString());
replacedertEquals(result, got);
}
16
Source : ITTestBootstrapCommand.java
with Apache License 2.0
from apache
with Apache License 2.0
from apache
/**
* Test case for command 'bootstrap'.
*/
@Test
public void testBootstrapRunCommand() throws IOException {
// test bootstrap run command
String cmdStr = String.format("bootstrap run --targetPath %s --tableName %s --tableType %s --srcPath %s --rowKeyField %s --parreplacedionPathField %s --sparkMaster %s", tablePath, tableName, HoodieTableType.COPY_ON_WRITE.name(), sourcePath, RECORD_KEY_FIELD, PARreplacedION_FIELD, "local");
CommandResult cr = getShell().executeCommand(cmdStr);
replacedertTrue(cr.isSuccess());
// Connect & check Hudi table exist
new TableCommand().connect(tablePath, TimelineLayoutVersion.VERSION_1, false, 2000, 300000, 7);
metaClient = HoodieCLI.getTableMetaClient();
replacedertEquals(1, metaClient.getActiveTimeline().getCommitsTimeline().countInstants(), "Should have 1 commit.");
// test "bootstrap index showparreplacedions"
CommandResult crForIndexedParreplacedions = getShell().executeCommand("bootstrap index showparreplacedions");
replacedertTrue(crForIndexedParreplacedions.isSuccess());
String[] header = new String[] { "Indexed parreplacedions" };
String[][] rows = new String[parreplacedions.size()][1];
for (int i = 0; i < parreplacedions.size(); i++) {
rows[i][0] = PARreplacedION_FIELD + "=" + parreplacedions.get(i);
}
String expect = HoodiePrintHelper.print(header, rows);
expect = removeNonWordAndStripSpace(expect);
String got = removeNonWordAndStripSpace(crForIndexedParreplacedions.getResult().toString());
replacedertEquals(expect, got);
// test "bootstrap index showMapping"
CommandResult crForIndexedMapping = getShell().executeCommand("bootstrap index showmapping");
replacedertTrue(crForIndexedMapping.isSuccess());
CommandResult crForIndexedMappingWithParreplacedion = getShell().executeCommand(String.format("bootstrap index showmapping --parreplacedionPath %s=%s", PARreplacedION_FIELD, parreplacedions.get(0)));
replacedertTrue(crForIndexedMappingWithParreplacedion.isSuccess());
}
16
Source : TestTableCommand.java
with Apache License 2.0
from apache
with Apache License 2.0
from apache
/**
* Test Case for desc table.
*/
@Test
public void testDescTable() {
// Prepare table
replacedertTrue(prepareTable());
// Test desc table
CommandResult cr = getShell().executeCommand("desc");
replacedertTrue(cr.isSuccess());
// check table's basePath metaPath and type
replacedertTrue(cr.getResult().toString().contains(tablePath));
replacedertTrue(cr.getResult().toString().contains(metaPath));
replacedertTrue(cr.getResult().toString().contains("COPY_ON_WRITE"));
}
15
Source : ContextCommandsTest.java
with Apache License 2.0
from avast
with Apache License 2.0
from avast
public void exists() throws Exception {
Bootstrap bootstrap = new Bootstrap();
JLineShellComponent shell = bootstrap.getJLineShellComponent();
CommandResult cr = shell.executeCommand("exists /replacedytics");
replacedertEquals(true, cr.isSuccess());
}
15
Source : TestSavepointsCommand.java
with Apache License 2.0
from apache
with Apache License 2.0
from apache
/**
* Test case of command 'savepoints show'.
*/
@Test
public void testShowSavepoints() throws IOException {
// generate four savepoints
for (int i = 100; i < 104; i++) {
String instantTime = String.valueOf(i);
HoodieTestDataGenerator.createSavepointFile(tablePath, instantTime, jsc.hadoopConfiguration());
}
CommandResult cr = getShell().executeCommand("savepoints show");
replacedertTrue(cr.isSuccess());
// generate expect result
String[][] rows = Arrays.asList("100", "101", "102", "103").stream().sorted(Comparator.reverseOrder()).map(instant -> new String[] { instant }).toArray(String[][]::new);
String expected = HoodiePrintHelper.print(new String[] { HoodieTableHeaderFields.HEADER_SAVEPOINT_TIME }, rows);
expected = removeNonWordAndStripSpace(expected);
String got = removeNonWordAndStripSpace(cr.getResult().toString());
replacedertEquals(expected, got);
}
15
Source : TestCommitsCommand.java
with Apache License 2.0
from apache
with Apache License 2.0
from apache
/**
* Test case of 'commits show' command.
*/
@Test
public void testShowCommits() throws Exception {
Map<String, Integer[]> data = generateData();
CommandResult cr = getShell().executeCommand("commits show");
replacedertTrue(cr.isSuccess());
String expected = generateExpectData(1, data);
expected = removeNonWordAndStripSpace(expected);
String got = removeNonWordAndStripSpace(cr.getResult().toString());
replacedertEquals(expected, got);
}
15
Source : TestArchivedCommitsCommand.java
with Apache License 2.0
from apache
with Apache License 2.0
from apache
/**
* Test for command: show archived commit stats.
*/
@Test
public void testShowArchivedCommits() {
CommandResult cr = getShell().executeCommand("show archived commit stats");
replacedertTrue(cr.isSuccess());
TableHeader header = new TableHeader().addTableHeaderField("action").addTableHeaderField("instant").addTableHeaderField("parreplacedion").addTableHeaderField("file_id").addTableHeaderField("prev_instant").addTableHeaderField("num_writes").addTableHeaderField("num_inserts").addTableHeaderField("num_deletes").addTableHeaderField("num_update_writes").addTableHeaderField("total_log_files").addTableHeaderField("total_log_blocks").addTableHeaderField("total_corrupt_log_blocks").addTableHeaderField("total_rollback_blocks").addTableHeaderField("total_log_records").addTableHeaderField("total_updated_records_compacted").addTableHeaderField("total_write_bytes").addTableHeaderField("total_write_errors");
// Generate expected data
final List<Comparable[]> rows = new ArrayList<>();
for (int i = 100; i < 104; i++) {
String instant = String.valueOf(i);
for (int j = 0; j < 3; j++) {
Comparable[] defaultComp = new Comparable[] { "commit", instant, HoodieTestCommitMetadataGenerator.DEFAULT_SECOND_PARreplacedION_PATH, HoodieTestCommitMetadataGenerator.DEFAULT_FILEID, HoodieTestCommitMetadataGenerator.DEFAULT_PRE_COMMIT, HoodieTestCommitMetadataGenerator.DEFAULT_NUM_WRITES, HoodieTestCommitMetadataGenerator.DEFAULT_OTHER_VALUE, HoodieTestCommitMetadataGenerator.DEFAULT_OTHER_VALUE, HoodieTestCommitMetadataGenerator.DEFAULT_NUM_UPDATE_WRITES, HoodieTestCommitMetadataGenerator.DEFAULT_NULL_VALUE, HoodieTestCommitMetadataGenerator.DEFAULT_TOTAL_LOG_BLOCKS, HoodieTestCommitMetadataGenerator.DEFAULT_OTHER_VALUE, HoodieTestCommitMetadataGenerator.DEFAULT_OTHER_VALUE, HoodieTestCommitMetadataGenerator.DEFAULT_TOTAL_LOG_RECORDS, HoodieTestCommitMetadataGenerator.DEFAULT_OTHER_VALUE, HoodieTestCommitMetadataGenerator.DEFAULT_TOTAL_WRITE_BYTES, HoodieTestCommitMetadataGenerator.DEFAULT_OTHER_VALUE };
rows.add(defaultComp.clone());
defaultComp[2] = HoodieTestCommitMetadataGenerator.DEFAULT_FIRST_PARreplacedION_PATH;
rows.add(defaultComp);
}
}
String expectedResult = HoodiePrintHelper.print(header, new HashMap<>(), "", false, -1, false, rows);
expectedResult = removeNonWordAndStripSpace(expectedResult);
String got = removeNonWordAndStripSpace(cr.getResult().toString());
replacedertEquals(expectedResult, got);
}
14
Source : ITTestSavepointsCommand.java
with Apache License 2.0
from apache
with Apache License 2.0
from apache
/**
* Test case of command 'savepoint create'.
*/
@Test
public void testSavepoint() {
// generate four savepoints
for (int i = 100; i < 104; i++) {
String instantTime = String.valueOf(i);
HoodieTestDataGenerator.createCommitFile(tablePath, instantTime, jsc.hadoopConfiguration());
}
String savepoint = "102";
CommandResult cr = getShell().executeCommand(String.format("savepoint create --commit %s --sparkMaster %s", savepoint, "local"));
replacedertAll("Command run failed", () -> replacedertTrue(cr.isSuccess()), () -> replacedertEquals(String.format("The commit \"%s\" has been savepointed.", savepoint), cr.getResult().toString()));
// there is 1 savepoint instant
HoodieActiveTimeline timeline = HoodieCLI.getTableMetaClient().getActiveTimeline();
replacedertEquals(1, timeline.getSavePointTimeline().countInstants());
}
14
Source : ITTestSavepointsCommand.java
with Apache License 2.0
from apache
with Apache License 2.0
from apache
/**
* Test case of command 'savepoint rollback'.
*/
@Test
public void testRollbackToSavepoint() throws IOException {
// generate four savepoints
for (int i = 100; i < 104; i++) {
String instantTime = String.valueOf(i);
HoodieTestDataGenerator.createCommitFile(tablePath, instantTime, jsc.hadoopConfiguration());
}
// generate one savepoint
String savepoint = "102";
HoodieTestDataGenerator.createSavepointFile(tablePath, savepoint, jsc.hadoopConfiguration());
CommandResult cr = getShell().executeCommand(String.format("savepoint rollback --savepoint %s --sparkMaster %s", savepoint, "local"));
replacedertAll("Command run failed", () -> replacedertTrue(cr.isSuccess()), () -> replacedertEquals(String.format("Savepoint \"%s\" rolled back", savepoint), cr.getResult().toString()));
// there is 1 restore instant
HoodieActiveTimeline timeline = HoodieCLI.getTableMetaClient().getActiveTimeline();
replacedertEquals(1, timeline.getRestoreTimeline().countInstants());
// 103 instant had rollback
replacedertFalse(timeline.getCommitTimeline().containsInstant(new HoodieInstant(HoodieInstant.State.COMPLETED, "commit", "103")));
}
14
Source : ITTestCommitsCommand.java
with Apache License 2.0
from apache
with Apache License 2.0
from apache
/**
* Test case of 'commit rollback' command.
*/
@Test
public void testRollbackCommit() throws Exception {
// Create some commits files and parquet files
Map<String, String> parreplacedionAndFileId = new HashMap<String, String>() {
{
put(DEFAULT_FIRST_PARreplacedION_PATH, "file-1");
put(DEFAULT_SECOND_PARreplacedION_PATH, "file-2");
put(DEFAULT_THIRD_PARreplacedION_PATH, "file-3");
}
};
final String rollbackCommit = "102";
HoodieTestTable.of(metaClient).withParreplacedionMetaFiles(DEFAULT_PARreplacedION_PATHS).addCommit("100").withBaseFilesInParreplacedions(parreplacedionAndFileId).addCommit("101").withBaseFilesInParreplacedions(parreplacedionAndFileId).addCommit(rollbackCommit).withBaseFilesInParreplacedions(parreplacedionAndFileId);
CommandResult cr = getShell().executeCommand(String.format("commit rollback --commit %s --sparkMaster %s --sparkMemory %s", rollbackCommit, "local", "4G"));
replacedertTrue(cr.isSuccess());
metaClient = HoodieTableMetaClient.reload(HoodieCLI.getTableMetaClient());
HoodieActiveTimeline rollbackTimeline = new RollbacksCommand.RollbackTimeline(metaClient);
replacedertEquals(1, rollbackTimeline.getRollbackTimeline().countInstants(), "There should have 1 rollback instant.");
HoodieActiveTimeline timeline = metaClient.reloadActiveTimeline();
replacedertEquals(2, timeline.getCommitsTimeline().countInstants(), "There should have 2 instants.");
}
14
Source : TestSavepointsCommand.java
with Apache License 2.0
from apache
with Apache License 2.0
from apache
/**
* Test case of command 'savepoints refresh'.
*/
@Test
public void testRefreshMetaClient() throws IOException {
HoodieTimeline timeline = HoodieCLI.getTableMetaClient().getActiveTimeline().getSavePointTimeline().filterCompletedInstants();
replacedertEquals(0, timeline.countInstants(), "There should have no instant at first");
// generate four savepoints
for (int i = 100; i < 104; i++) {
String instantTime = String.valueOf(i);
HoodieTestDataGenerator.createSavepointFile(tablePath, instantTime, jsc.hadoopConfiguration());
}
// Before refresh, no instant
timeline = HoodieCLI.getTableMetaClient().getActiveTimeline().getSavePointTimeline().filterCompletedInstants();
replacedertEquals(0, timeline.countInstants(), "there should have no instant");
CommandResult cr = getShell().executeCommand("savepoints refresh");
replacedertTrue(cr.isSuccess());
timeline = HoodieCLI.getTableMetaClient().getActiveTimeline().getSavePointTimeline().filterCompletedInstants();
// After refresh, there are 4 instants
replacedertEquals(4, timeline.countInstants(), "there should have 4 instants");
}
14
Source : TestRepairsCommand.java
with Apache License 2.0
from apache
with Apache License 2.0
from apache
/**
* Test case for 'repair corrupted clean files'.
*/
@Test
public void testRemoveCorruptedPendingCleanAction() throws IOException {
HoodieCLI.conf = jsc.hadoopConfiguration();
Configuration conf = HoodieCLI.conf;
metaClient = HoodieCLI.getTableMetaClient();
// Create four requested files
for (int i = 100; i < 104; i++) {
String timestamp = String.valueOf(i);
// Write corrupted requested Clean File
HoodieTestCommitMetadataGenerator.createEmptyCleanRequestedFile(tablePath, timestamp, conf);
}
// reload meta client
metaClient = HoodieTableMetaClient.reload(metaClient);
// first, there are four instants
replacedertEquals(4, metaClient.getActiveTimeline().filterInflightsAndRequested().getInstants().count());
CommandResult cr = getShell().executeCommand("repair corrupted clean files");
replacedertTrue(cr.isSuccess());
// reload meta client
metaClient = HoodieTableMetaClient.reload(metaClient);
replacedertEquals(0, metaClient.getActiveTimeline().filterInflightsAndRequested().getInstants().count());
}
13
Source : ITTestSavepointsCommand.java
with Apache License 2.0
from apache
with Apache License 2.0
from apache
/**
* Test case of command 'savepoint delete'.
*/
@Test
public void testDeleteSavepoint() throws IOException {
// generate four savepoints
for (int i = 100; i < 104; i++) {
String instantTime = String.valueOf(i);
HoodieTestDataGenerator.createCommitFile(tablePath, instantTime, jsc.hadoopConfiguration());
}
// generate two savepoint
String savepoint1 = "100";
String savepoint2 = "102";
HoodieTestDataGenerator.createSavepointFile(tablePath, savepoint1, jsc.hadoopConfiguration());
HoodieTestDataGenerator.createSavepointFile(tablePath, savepoint2, jsc.hadoopConfiguration());
HoodieActiveTimeline timeline = HoodieCLI.getTableMetaClient().getActiveTimeline();
replacedertEquals(2, timeline.getSavePointTimeline().countInstants(), "There should 2 instants.");
CommandResult cr = getShell().executeCommand(String.format("savepoint delete --commit %s --sparkMaster %s", savepoint1, "local"));
replacedertAll("Command run failed", () -> replacedertTrue(cr.isSuccess()), () -> replacedertEquals(String.format("Savepoint \"%s\" deleted.", savepoint1), cr.getResult().toString()));
// reload timeline
timeline = timeline.reload();
replacedertEquals(1, timeline.getSavePointTimeline().countInstants(), "There should 1 instants.");
// after delete, 100 instant should not exist.
replacedertFalse(timeline.containsInstant(new HoodieInstant(false, HoodieTimeline.SAVEPOINT_ACTION, savepoint1)));
}
13
Source : TestHoodieLogFileCommand.java
with Apache License 2.0
from apache
with Apache License 2.0
from apache
/**
* Test case for 'show logfile records'.
*/
@Test
public void testShowLogFileRecords() throws IOException, URISyntaxException {
CommandResult cr = getShell().executeCommand("show logfile records --logFilePathPattern " + parreplacedionPath + "/*");
replacedertTrue(cr.isSuccess());
// construct expect result, get 10 records.
List<IndexedRecord> records = SchemaTestUtil.generateTestRecords(0, 10);
String[][] rows = records.stream().map(r -> new String[] { r.toString() }).toArray(String[][]::new);
String expected = HoodiePrintHelper.print(new String[] { HoodieTableHeaderFields.HEADER_RECORDS }, rows);
expected = removeNonWordAndStripSpace(expected);
String got = removeNonWordAndStripSpace(cr.getResult().toString());
replacedertEquals(expected, got);
}
12
Source : TestRepairsCommand.java
with Apache License 2.0
from apache
with Apache License 2.0
from apache
/**
* Test case for dry run 'repair addparreplacedionmeta'.
*/
@Test
public void testAddParreplacedionMetaWithDryRun() throws IOException {
// create commit instant
Files.createFile(Paths.get(tablePath + "/.hoodie/100.commit"));
// create parreplacedion path
String parreplacedion1 = tablePath + File.separator + HoodieTestDataGenerator.DEFAULT_FIRST_PARreplacedION_PATH;
String parreplacedion2 = tablePath + File.separator + HoodieTestDataGenerator.DEFAULT_SECOND_PARreplacedION_PATH;
String parreplacedion3 = tablePath + File.separator + HoodieTestDataGenerator.DEFAULT_THIRD_PARreplacedION_PATH;
replacedertTrue(fs.mkdirs(new Path(parreplacedion1)));
replacedertTrue(fs.mkdirs(new Path(parreplacedion2)));
replacedertTrue(fs.mkdirs(new Path(parreplacedion3)));
// default is dry run.
CommandResult cr = getShell().executeCommand("repair addparreplacedionmeta");
replacedertTrue(cr.isSuccess());
// expected all 'No'.
String[][] rows = FSUtils.getAllParreplacedionFoldersThreeLevelsDown(fs, tablePath).stream().map(parreplacedion -> new String[] { parreplacedion, "No", "None" }).toArray(String[][]::new);
String expected = HoodiePrintHelper.print(new String[] { HoodieTableHeaderFields.HEADER_PARreplacedION_PATH, HoodieTableHeaderFields.HEADER_METADATA_PRESENT, HoodieTableHeaderFields.HEADER_ACTION }, rows);
expected = removeNonWordAndStripSpace(expected);
String got = removeNonWordAndStripSpace(cr.getResult().toString());
replacedertEquals(expected, got);
}
12
Source : TestArchivedCommitsCommand.java
with Apache License 2.0
from apache
with Apache License 2.0
from apache
/**
* Test for command: show archived commits.
*/
@Test
public void testShowCommits() throws Exception {
CommandResult cr = getShell().executeCommand("show archived commits");
replacedertTrue(cr.isSuccess());
final List<Comparable[]> rows = new ArrayList<>();
// Test default skipMetadata and limit 10
TableHeader header = new TableHeader().addTableHeaderField("CommitTime").addTableHeaderField("CommitType");
for (int i = 100; i < 103; i++) {
String instant = String.valueOf(i);
Comparable[] result = new Comparable[] { instant, "commit" };
rows.add(result);
rows.add(result);
rows.add(result);
}
rows.add(new Comparable[] { "103", "commit" });
String expected = HoodiePrintHelper.print(header, new HashMap<>(), "", false, 10, false, rows);
expected = removeNonWordAndStripSpace(expected);
String got = removeNonWordAndStripSpace(cr.getResult().toString());
replacedertEquals(expected, got);
// Test with Metadata and no limit
cr = getShell().executeCommand("show archived commits --skipMetadata false --limit -1");
replacedertTrue(cr.isSuccess());
rows.clear();
for (int i = 100; i < 104; i++) {
String instant = String.valueOf(i);
// Since HoodiePrintHelper order data by default, need to order commitMetadata
HoodieCommitMetadata metadata = HoodieTestCommitMetadataGenerator.generateCommitMetadata(tablePath, instant);
Comparable[] result = new Comparable[] { instant, "commit", HoodieTestCommitUtilities.convertAndOrderCommitMetadata(metadata) };
rows.add(result);
rows.add(result);
rows.add(result);
}
header = header.addTableHeaderField("CommitDetails");
expected = HoodiePrintHelper.print(header, new HashMap<>(), "", false, -1, false, rows);
expected = removeNonWordAndStripSpace(expected);
got = removeNonWordAndStripSpace(cr.getResult().toString());
replacedertEquals(expected, got);
}
11
Source : ITTestHDFSParquetImportCommand.java
with Apache License 2.0
from apache
with Apache License 2.0
from apache
/**
* Test case for 'hdfsparquetimport' with insert.
*/
@Test
public void testConvertWithInsert() throws IOException {
String command = String.format("hdfsparquetimport --srcPath %s --targetPath %s --tableName %s " + "--tableType %s --rowKeyField %s" + " --parreplacedionPathField %s --parallelism %s " + "--schemaFilePath %s --format %s --sparkMemory %s --retry %s --sparkMaster %s", sourcePath.toString(), targetPath.toString(), tableName, HoodieTableType.COPY_ON_WRITE.name(), "_row_key", "timestamp", "1", schemaFile, "parquet", "2G", "1", "local");
CommandResult cr = getShell().executeCommand(command);
replacedertAll("Command run success", () -> replacedertTrue(cr.isSuccess()), () -> replacedertEquals("Table imported to hoodie format", cr.getResult().toString()));
// Check hudi table exist
String metaPath = targetPath + File.separator + HoodieTableMetaClient.METAFOLDER_NAME;
replacedertTrue(Files.exists(Paths.get(metaPath)), "Hoodie table not exist.");
// Load meta data
new TableCommand().connect(targetPath.toString(), TimelineLayoutVersion.VERSION_1, false, 2000, 300000, 7);
metaClient = HoodieCLI.getTableMetaClient();
replacedertEquals(1, metaClient.getActiveTimeline().getCommitsTimeline().countInstants(), "Should only 1 commit.");
verifyResultData(insertData);
}
11
Source : TestTableCommand.java
with Apache License 2.0
from apache
with Apache License 2.0
from apache
private void testRefreshCommand(String command) throws IOException {
// clean table matedata
FileSystem fs = FileSystem.get(jsc.hadoopConfiguration());
fs.delete(new Path(tablePath + File.separator + HoodieTableMetaClient.METAFOLDER_NAME), true);
// Create table
replacedertTrue(prepareTable());
HoodieTimeline timeline = HoodieCLI.getTableMetaClient().getActiveTimeline().getCommitTimeline().filterCompletedInstants();
replacedertEquals(0, timeline.countInstants(), "There should have no instant at first");
// generate four savepoints
for (int i = 100; i < 104; i++) {
String instantTime = String.valueOf(i);
HoodieTestDataGenerator.createCommitFile(tablePath, instantTime, jsc.hadoopConfiguration());
}
// Before refresh, no instant
timeline = HoodieCLI.getTableMetaClient().getActiveTimeline().getCommitTimeline().filterCompletedInstants();
replacedertEquals(0, timeline.countInstants(), "there should have no instant");
CommandResult cr = getShell().executeCommand(command);
replacedertTrue(cr.isSuccess());
timeline = HoodieCLI.getTableMetaClient().getActiveTimeline().getCommitTimeline().filterCompletedInstants();
// After refresh, there are 4 instants
replacedertEquals(4, timeline.countInstants(), "there should have 4 instants");
}
11
Source : TestTableCommand.java
with Apache License 2.0
from apache
with Apache License 2.0
from apache
/**
* Test Case for connect table.
*/
@Test
public void testConnectTable() {
// Prepare table
replacedertTrue(prepareTable());
// Test connect with specified values
CommandResult cr = getShell().executeCommand("connect --path " + tablePath + " --initialCheckIntervalMs 3000 " + "--maxWaitIntervalMs 40000 --maxCheckIntervalMs 8");
replacedertTrue(cr.isSuccess());
// Check specified values
ConsistencyGuardConfig conf = HoodieCLI.consistencyGuardConfig;
replacedertEquals(3000, conf.getInitialConsistencyCheckIntervalMs());
replacedertEquals(40000, conf.getMaxConsistencyCheckIntervalMs());
replacedertEquals(8, conf.getMaxConsistencyChecks());
// Check default values
replacedertFalse(conf.isConsistencyCheckEnabled());
replacedertEquals(new Integer(1), HoodieCLI.layoutVersion.getVersion());
}
10
Source : TestCommitsCommand.java
with Apache License 2.0
from apache
with Apache License 2.0
from apache
/**
* Test case of 'commits sync' command.
*/
@Test
public void testSyncCommits() throws Exception {
Map<String, Integer[]> data = generateData();
String tableName2 = "test_table2";
String tablePath2 = basePath + File.separator + tableName2;
HoodieTestUtils.init(jsc.hadoopConfiguration(), tablePath2, getTableType(), tableName2);
data.remove("102");
for (Map.Entry<String, Integer[]> entry : data.entrySet()) {
String key = entry.getKey();
Integer[] value = entry.getValue();
HoodieTestCommitMetadataGenerator.createCommitFileWithMetadata(tablePath2, key, jsc.hadoopConfiguration(), Option.of(value[0]), Option.of(value[1]));
}
CommandResult cr = getShell().executeCommand(String.format("commits sync --path %s", tablePath2));
replacedertTrue(cr.isSuccess());
String expected = String.format("Load sync state between %s and %s", tableName, tableName2);
replacedertEquals(expected, cr.getResult().toString());
}
9
Source : TestTableCommand.java
with Apache License 2.0
from apache
with Apache License 2.0
from apache
/**
* Test Cases for create table with specified values.
*/
@Test
public void testCreateWithSpecifiedValues() {
// Test create with specified values
CommandResult cr = getShell().executeCommand("create --path " + tablePath + " --tableName " + tableName + " --tableType MERGE_ON_READ --archiveLogFolder archive");
replacedertTrue(cr.isSuccess());
replacedertEquals("Metadata for table " + tableName + " loaded", cr.getResult().toString());
HoodieTableMetaClient client = HoodieCLI.getTableMetaClient();
replacedertEquals(metaPath + File.separator + "archive", client.getArchivePath());
replacedertEquals(tablePath, client.getBasePath());
replacedertEquals(metaPath, client.getMetaPath());
replacedertEquals(HoodieTableType.MERGE_ON_READ, client.getTableType());
}
9
Source : TestRepairsCommand.java
with Apache License 2.0
from apache
with Apache License 2.0
from apache
/**
* Test case for real run 'repair addparreplacedionmeta'.
*/
@Test
public void testAddParreplacedionMetaWithRealRun() throws IOException {
// create commit instant
Files.createFile(Paths.get(tablePath + "/.hoodie/100.commit"));
// create parreplacedion path
String parreplacedion1 = tablePath + File.separator + HoodieTestDataGenerator.DEFAULT_FIRST_PARreplacedION_PATH;
String parreplacedion2 = tablePath + File.separator + HoodieTestDataGenerator.DEFAULT_SECOND_PARreplacedION_PATH;
String parreplacedion3 = tablePath + File.separator + HoodieTestDataGenerator.DEFAULT_THIRD_PARreplacedION_PATH;
replacedertTrue(fs.mkdirs(new Path(parreplacedion1)));
replacedertTrue(fs.mkdirs(new Path(parreplacedion2)));
replacedertTrue(fs.mkdirs(new Path(parreplacedion3)));
CommandResult cr = getShell().executeCommand("repair addparreplacedionmeta --dryrun false");
replacedertTrue(cr.isSuccess());
List<String> paths = FSUtils.getAllParreplacedionFoldersThreeLevelsDown(fs, tablePath);
// after dry run, the action will be 'Repaired'
String[][] rows = paths.stream().map(parreplacedion -> new String[] { parreplacedion, "No", "Repaired" }).toArray(String[][]::new);
String expected = HoodiePrintHelper.print(new String[] { HoodieTableHeaderFields.HEADER_PARreplacedION_PATH, HoodieTableHeaderFields.HEADER_METADATA_PRESENT, HoodieTableHeaderFields.HEADER_ACTION }, rows);
expected = removeNonWordAndStripSpace(expected);
String got = removeNonWordAndStripSpace(cr.getResult().toString());
replacedertEquals(expected, got);
cr = getShell().executeCommand("repair addparreplacedionmeta");
// after real run, Metadata is present now.
rows = paths.stream().map(parreplacedion -> new String[] { parreplacedion, "Yes", "None" }).toArray(String[][]::new);
expected = HoodiePrintHelper.print(new String[] { HoodieTableHeaderFields.HEADER_PARreplacedION_PATH, HoodieTableHeaderFields.HEADER_METADATA_PRESENT, HoodieTableHeaderFields.HEADER_ACTION }, rows);
expected = removeNonWordAndStripSpace(expected);
got = removeNonWordAndStripSpace(cr.getResult().toString());
replacedertEquals(expected, got);
}
8
Source : ClientConnection.java
with Apache License 2.0
from avast
with Apache License 2.0
from avast
@Override
public void run() {
context.remove();
try (Scanner is = new Scanner(sock.getInputStream());
OutputStream os = sock.getOutputStream()) {
System.out.println("Connected: " + sock);
try (PrintStream writer = new PrintStream(os, true)) {
context.set(writer);
String command;
while (is.hasNextLine() && (command = is.nextLine()) != null) {
final CommandResult commandResult = shell.executeCommand(command);
if (!commandResult.isSuccess()) {
System.err.println(commandResult.getException().getMessage());
writer.println(commandResult.getException().getMessage());
} else {
if (commandResult.getResult() != null) {
writer.println(commandResult.getResult().toString());
}
}
writer.flush();
}
}
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
if (sock != null) {
sock.close();
}
} catch (IOException e) {
e.printStackTrace();
}
context.remove();
}
}
7
Source : ITTestRepairsCommand.java
with Apache License 2.0
from apache
with Apache License 2.0
from apache
@Test
public void testDeduplicateWithUpserts() throws IOException {
HoodieTableFileSystemView fsView = new HoodieTableFileSystemView(metaClient, metaClient.getActiveTimeline().getCommitTimeline().filterCompletedInstants(), fs.listStatus(new Path(duplicatedParreplacedionPathWithUpserts)));
List<String> filteredStatuses = fsView.getLatestBaseFiles().map(HoodieBaseFile::getPath).collect(Collectors.toList());
replacedertEquals(3, filteredStatuses.size(), "There should be 3 files.");
// Before deduplicate, all files contain 120 records
String[] files = filteredStatuses.toArray(new String[0]);
Dataset df = sqlContext.read().parquet(files);
replacedertEquals(120, df.count());
String parreplacedionPath = HoodieTestDataGenerator.DEFAULT_THIRD_PARreplacedION_PATH;
String cmdStr = String.format("repair deduplicate --duplicatedParreplacedionPath %s --repairedOutputPath %s --sparkMaster %s --dedupeType %s", parreplacedionPath, repairedOutputPath, "local", "upsert_type");
CommandResult cr = getShell().executeCommand(cmdStr);
replacedertTrue(cr.isSuccess());
replacedertEquals(RepairsCommand.DEDUPLICATE_RETURN_PREFIX + repairedOutputPath, cr.getResult().toString());
// After deduplicate, there are 100 records
FileStatus[] fileStatus = fs.listStatus(new Path(repairedOutputPath));
files = Arrays.stream(fileStatus).map(status -> status.getPath().toString()).toArray(String[]::new);
Dataset result = sqlContext.read().parquet(files);
replacedertEquals(100, result.count());
}
7
Source : ITTestHDFSParquetImportCommand.java
with Apache License 2.0
from apache
with Apache License 2.0
from apache
/**
* Test case for 'hdfsparquetimport' with upsert.
*/
@Test
public void testConvertWithUpsert() throws IOException, ParseException {
Path upsertFolder = new Path(basePath, "testUpsertSrc");
List<GenericRecord> upsertData = importer.createUpsertRecords(upsertFolder);
// first insert records
HDFSParquetImporter.Config cfg = importer.getHDFSParquetImporterConfig(sourcePath.toString(), tablePath, tableName, HoodieTableType.COPY_ON_WRITE.name(), "_row_key", "timestamp", 1, schemaFile);
HDFSParquetImporter dataImporter = new HDFSParquetImporter(cfg);
dataImporter.dataImport(jsc, 0);
// Load meta data
new TableCommand().connect(targetPath.toString(), TimelineLayoutVersion.VERSION_1, false, 2000, 300000, 7);
metaClient = HoodieCLI.getTableMetaClient();
// check if insert instant exist
replacedertEquals(1, metaClient.getActiveTimeline().getCommitsTimeline().countInstants(), "Should only 1 commit.");
String command = String.format("hdfsparquetimport --srcPath %s --targetPath %s --tableName %s " + "--tableType %s --rowKeyField %s" + " --parreplacedionPathField %s --parallelism %s " + "--schemaFilePath %s --format %s --sparkMemory %s --retry %s --sparkMaster %s --upsert %s", upsertFolder.toString(), targetPath.toString(), tableName, HoodieTableType.COPY_ON_WRITE.name(), "_row_key", "timestamp", "1", schemaFile, "parquet", "2G", "1", "local", "true");
CommandResult cr = getShell().executeCommand(command);
replacedertAll("Command run success", () -> replacedertTrue(cr.isSuccess()), () -> replacedertEquals("Table imported to hoodie format", cr.getResult().toString()));
// reload meta client
metaClient = HoodieTableMetaClient.reload(metaClient);
replacedertEquals(2, metaClient.getActiveTimeline().getCommitsTimeline().countInstants(), "Should have 2 commit.");
// construct result, remove top 10 and add upsert data.
List<GenericRecord> expectData = insertData.subList(11, 96);
expectData.addAll(upsertData);
verifyResultData(expectData);
}
6
Source : ITTestRepairsCommand.java
with Apache License 2.0
from apache
with Apache License 2.0
from apache
/**
* Test case for dry run deduplicate.
*/
@Test
public void testDeduplicateWithInserts() throws IOException {
// get fs and check number of latest files
HoodieTableFileSystemView fsView = new HoodieTableFileSystemView(metaClient, metaClient.getActiveTimeline().getCommitTimeline().filterCompletedInstants(), fs.listStatus(new Path(duplicatedParreplacedionPath)));
List<String> filteredStatuses = fsView.getLatestBaseFiles().map(HoodieBaseFile::getPath).collect(Collectors.toList());
replacedertEquals(3, filteredStatuses.size(), "There should be 3 files.");
// Before deduplicate, all files contain 210 records
String[] files = filteredStatuses.toArray(new String[0]);
Dataset df = sqlContext.read().parquet(files);
replacedertEquals(210, df.count());
String parreplacedionPath = HoodieTestDataGenerator.DEFAULT_FIRST_PARreplacedION_PATH;
String cmdStr = String.format("repair deduplicate --duplicatedParreplacedionPath %s --repairedOutputPath %s --sparkMaster %s", parreplacedionPath, repairedOutputPath, "local");
CommandResult cr = getShell().executeCommand(cmdStr);
replacedertTrue(cr.isSuccess());
replacedertEquals(RepairsCommand.DEDUPLICATE_RETURN_PREFIX + repairedOutputPath, cr.getResult().toString());
// After deduplicate, there are 200 records
FileStatus[] fileStatus = fs.listStatus(new Path(repairedOutputPath));
files = Arrays.stream(fileStatus).map(status -> status.getPath().toString()).toArray(String[]::new);
Dataset result = sqlContext.read().parquet(files);
replacedertEquals(200, result.count());
}
6
Source : ITTestRepairsCommand.java
with Apache License 2.0
from apache
with Apache License 2.0
from apache
/**
* Test case for real run deduplicate.
*/
@Test
public void testDeduplicateWithReal() throws IOException {
// get fs and check number of latest files
HoodieTableFileSystemView fsView = new HoodieTableFileSystemView(metaClient, metaClient.getActiveTimeline().getCommitTimeline().filterCompletedInstants(), fs.listStatus(new Path(duplicatedParreplacedionPath)));
List<String> filteredStatuses = fsView.getLatestBaseFiles().map(HoodieBaseFile::getPath).collect(Collectors.toList());
replacedertEquals(3, filteredStatuses.size(), "There should be 3 files.");
// Before deduplicate, all files contain 210 records
String[] files = filteredStatuses.toArray(new String[0]);
Dataset df = sqlContext.read().parquet(files);
replacedertEquals(210, df.count());
String parreplacedionPath = HoodieTestDataGenerator.DEFAULT_FIRST_PARreplacedION_PATH;
String cmdStr = String.format("repair deduplicate --duplicatedParreplacedionPath %s --repairedOutputPath %s" + " --sparkMaster %s --dryrun %s", parreplacedionPath, repairedOutputPath, "local", false);
CommandResult cr = getShell().executeCommand(cmdStr);
replacedertTrue(cr.isSuccess());
replacedertEquals(RepairsCommand.DEDUPLICATE_RETURN_PREFIX + parreplacedionPath, cr.getResult().toString());
// After deduplicate, there are 200 records under parreplacedion path
FileStatus[] fileStatus = fs.listStatus(new Path(duplicatedParreplacedionPath));
files = Arrays.stream(fileStatus).map(status -> status.getPath().toString()).toArray(String[]::new);
Dataset result = sqlContext.read().parquet(files);
replacedertEquals(200, result.count());
}
6
Source : ITTestRepairsCommand.java
with Apache License 2.0
from apache
with Apache License 2.0
from apache
@Test
public void testDeduplicateWithUpdates() throws IOException {
HoodieTableFileSystemView fsView = new HoodieTableFileSystemView(metaClient, metaClient.getActiveTimeline().getCommitTimeline().filterCompletedInstants(), fs.listStatus(new Path(duplicatedParreplacedionPathWithUpdates)));
List<String> filteredStatuses = fsView.getLatestBaseFiles().map(HoodieBaseFile::getPath).collect(Collectors.toList());
replacedertEquals(2, filteredStatuses.size(), "There should be 2 files.");
// Before deduplicate, all files contain 110 records
String[] files = filteredStatuses.toArray(new String[0]);
Dataset df = sqlContext.read().parquet(files);
replacedertEquals(110, df.count());
String parreplacedionPath = HoodieTestDataGenerator.DEFAULT_SECOND_PARreplacedION_PATH;
String cmdStr = String.format("repair deduplicate --duplicatedParreplacedionPath %s --repairedOutputPath %s --sparkMaster %s --dedupeType %s", parreplacedionPath, repairedOutputPath, "local", "update_type");
CommandResult cr = getShell().executeCommand(cmdStr);
replacedertTrue(cr.isSuccess());
replacedertEquals(RepairsCommand.DEDUPLICATE_RETURN_PREFIX + repairedOutputPath, cr.getResult().toString());
// After deduplicate, there are 100 records
FileStatus[] fileStatus = fs.listStatus(new Path(repairedOutputPath));
files = Arrays.stream(fileStatus).map(status -> status.getPath().toString()).toArray(String[]::new);
Dataset result = sqlContext.read().parquet(files);
replacedertEquals(100, result.count());
}
6
Source : TestCommitsCommand.java
with Apache License 2.0
from apache
with Apache License 2.0
from apache
/**
* Test case of 'commits compare' command.
*/
@Test
public void testCompareCommits() throws Exception {
Map<String, Integer[]> data = generateData();
String tableName2 = "test_table2";
String tablePath2 = basePath + File.separator + tableName2;
HoodieTestUtils.init(jsc.hadoopConfiguration(), tablePath2, getTableType());
data.remove("102");
for (Map.Entry<String, Integer[]> entry : data.entrySet()) {
String key = entry.getKey();
Integer[] value = entry.getValue();
HoodieTestCommitMetadataGenerator.createCommitFileWithMetadata(tablePath2, key, jsc.hadoopConfiguration(), Option.of(value[0]), Option.of(value[1]));
}
CommandResult cr = getShell().executeCommand(String.format("commits compare --path %s", tablePath2));
replacedertTrue(cr.isSuccess());
// the latest instant of test_table2 is 101
List<String> commitsToCatchup = metaClient.getActiveTimeline().findInstantsAfter("101", Integer.MAX_VALUE).getInstants().map(HoodieInstant::getTimestamp).collect(Collectors.toList());
String expected = String.format("Source %s is ahead by %d commits. Commits to catch up - %s", tableName, commitsToCatchup.size(), commitsToCatchup);
replacedertEquals(expected, cr.getResult().toString());
}
6
Source : TestCleansCommand.java
with Apache License 2.0
from apache
with Apache License 2.0
from apache
/**
* Test case for show all cleans.
*/
@Test
public void testShowCleans() throws Exception {
// Check properties file exists.
replacedertNotNull(propsFilePath, "Not found properties file");
// First, run clean
SparkMain.clean(jsc, HoodieCLI.basePath, propsFilePath.getPath(), new ArrayList<>());
replacedertEquals(1, metaClient.getActiveTimeline().reload().getCleanerTimeline().getInstants().count(), "Loaded 1 clean and the count should match");
CommandResult cr = getShell().executeCommand("cleans show");
replacedertTrue(cr.isSuccess());
HoodieInstant clean = metaClient.getActiveTimeline().reload().getCleanerTimeline().getInstants().findFirst().orElse(null);
replacedertNotNull(clean);
TableHeader header = new TableHeader().addTableHeaderField(HoodieTableHeaderFields.HEADER_CLEAN_TIME).addTableHeaderField(HoodieTableHeaderFields.HEADER_EARLIEST_COMMAND_RETAINED).addTableHeaderField(HoodieTableHeaderFields.HEADER_TOTAL_FILES_DELETED).addTableHeaderField(HoodieTableHeaderFields.HEADER_TOTAL_TIME_TAKEN);
List<Comparable[]> rows = new ArrayList<>();
// EarliestCommandRetained should be 102, since hoodie.cleaner.commits.retained=2
// Total Time Taken need read from metadata
rows.add(new Comparable[] { clean.getTimestamp(), "102", "2", getLatestCleanTimeTakenInMillis().toString() });
String expected = HoodiePrintHelper.print(header, new HashMap<>(), "", false, -1, false, rows);
expected = removeNonWordAndStripSpace(expected);
String got = removeNonWordAndStripSpace(cr.getResult().toString());
replacedertEquals(expected, got);
}
5
Source : SpringShellApplication.java
with Apache License 2.0
from avast
with Apache License 2.0
from avast
public static int run(Object[] sources, String[] args) {
final SpringApplication springApplication = new SpringApplication(sources);
if (args.length > 0 || System.getenv("HDFS_SHELL_NO_BANNER") != null) {
springApplication.setBannerMode(Banner.Mode.OFF);
}
// disable hardcoded loggers to FINE level
ConfigurableApplicationContext ctx = springApplication.run(args);
Handler[] handlers = Logger.getLogger("").getHandlers();
for (Handler handler : handlers) {
handler.setLevel(Level.OFF);
}
try {
final BootShim bootShim = new BootShim(args, ctx);
if (args.length > 0) {
JLineShellComponent shell = bootShim.getJLineShellComponent();
final String command = Arrays.stream(args).collect(Collectors.joining(" "));
if (args[0].equals("script")) {
final ContextCommands context = ctx.getBean(ContextCommands.clreplaced);
context.setFailOnError(true);
}
final CommandResult commandResult;
try {
commandResult = shell.executeCommand(command);
} catch (Exception e) {
System.err.println(e.getMessage());
return SpringApplication.exit(ctx, (ExitCodeGenerator) () -> -1);
}
if (!commandResult.isSuccess()) {
return SpringApplication.exit(ctx, (ExitCodeGenerator) () -> -1);
} else {
if (commandResult.getResult() != null) {
System.out.println(commandResult.getResult());
}
}
Runtime.getRuntime().addShutdownHook(new Thread(() -> {
System.out.print(AnsiOutput.toString(AnsiColor.DEFAULT, " ", AnsiColor.DEFAULT));
}));
// another new line on exit from interactive mode
} else {
if (System.getProperty("daemon") != null) {
final Environment env = ctx.getBean(Environment.clreplaced);
new UnixServer(bootShim, env.getProperty("socket.filepath", "/var/tmp/hdfs-shell.sock")).run();
} else {
Runtime.getRuntime().addShutdownHook(new Thread(() -> {
System.out.print(AnsiOutput.toString(AnsiColor.DEFAULT, System.lineSeparator(), AnsiColor.DEFAULT));
}));
// another new line on exit from interactive mode
// hack
new Timer().schedule(new InitCompletionTimerTask(bootShim), 5000);
bootShim.run();
}
}
} catch (IllegalAccessException | IOException e) {
e.printStackTrace();
return -1;
} finally {
System.out.print(AnsiOutput.toString(AnsiColor.DEFAULT, " ", AnsiColor.DEFAULT));
HandlerUtils.flushAllHandlers(Logger.getLogger(""));
}
return 0;
}
5
Source : TestCleansCommand.java
with Apache License 2.0
from apache
with Apache License 2.0
from apache
/**
* Test case for show parreplacedions of a clean instant.
*/
@Test
public void testShowCleanParreplacedions() throws IOException {
// Check properties file exists.
replacedertNotNull(propsFilePath, "Not found properties file");
// First, run clean with two parreplacedion
SparkMain.clean(jsc, HoodieCLI.basePath, propsFilePath.toString(), new ArrayList<>());
replacedertEquals(1, metaClient.getActiveTimeline().reload().getCleanerTimeline().getInstants().count(), "Loaded 1 clean and the count should match");
HoodieInstant clean = metaClient.getActiveTimeline().reload().getCleanerTimeline().getInstants().findFirst().get();
CommandResult cr = getShell().executeCommand("clean showparreplacedions --clean " + clean.getTimestamp());
replacedertTrue(cr.isSuccess());
TableHeader header = new TableHeader().addTableHeaderField(HoodieTableHeaderFields.HEADER_PARreplacedION_PATH).addTableHeaderField(HoodieTableHeaderFields.HEADER_CLEANING_POLICY).addTableHeaderField(HoodieTableHeaderFields.HEADER_TOTAL_FILES_SUCCESSFULLY_DELETED).addTableHeaderField(HoodieTableHeaderFields.HEADER_TOTAL_FAILED_DELETIONS);
// There should be two parreplacedion path
List<Comparable[]> rows = new ArrayList<>();
rows.add(new Comparable[] { HoodieTestCommitMetadataGenerator.DEFAULT_SECOND_PARreplacedION_PATH, HoodieCleaningPolicy.KEEP_LATEST_COMMITS, "1", "0" });
rows.add(new Comparable[] { HoodieTestCommitMetadataGenerator.DEFAULT_THIRD_PARreplacedION_PATH, HoodieCleaningPolicy.KEEP_LATEST_COMMITS, "0", "0" });
rows.add(new Comparable[] { HoodieTestCommitMetadataGenerator.DEFAULT_FIRST_PARreplacedION_PATH, HoodieCleaningPolicy.KEEP_LATEST_COMMITS, "1", "0" });
String expected = HoodiePrintHelper.print(header, new HashMap<>(), "", false, -1, false, rows);
expected = removeNonWordAndStripSpace(expected);
String got = removeNonWordAndStripSpace(cr.getResult().toString());
replacedertEquals(expected, got);
}
4
Source : TestRepairsCommand.java
with Apache License 2.0
from apache
with Apache License 2.0
from apache
/**
* Test case for 'repair overwrite-hoodie-props'.
*/
@Test
public void testOverwriteHoodieProperties() throws IOException {
URL newProps = this.getClreplaced().getClreplacedLoader().getResource("table-config.properties");
replacedertNotNull(newProps, "New property file must exist");
CommandResult cr = getShell().executeCommand("repair overwrite-hoodie-props --new-props-file " + newProps.getPath());
replacedertTrue(cr.isSuccess());
Map<String, String> oldProps = HoodieCLI.getTableMetaClient().getTableConfig().getProps();
// after overwrite, the stored value in .hoodie is equals to which read from properties.
Map<String, String> result = HoodieTableMetaClient.reload(HoodieCLI.getTableMetaClient()).getTableConfig().getProps();
Properties expectProps = new Properties();
expectProps.load(new FileInputStream(new File(newProps.getPath())));
Map<String, String> expected = expectProps.entrySet().stream().collect(Collectors.toMap(e -> String.valueOf(e.getKey()), e -> String.valueOf(e.getValue())));
replacedertEquals(expected, result);
// check result
List<String> allPropsStr = Arrays.asList("hoodie.table.name", "hoodie.table.type", "hoodie.table.version", "hoodie.archivelog.folder", "hoodie.timeline.layout.version");
String[][] rows = allPropsStr.stream().sorted().map(key -> new String[] { key, oldProps.getOrDefault(key, "null"), result.getOrDefault(key, "null") }).toArray(String[][]::new);
String expect = HoodiePrintHelper.print(new String[] { HoodieTableHeaderFields.HEADER_HOODIE_PROPERTY, HoodieTableHeaderFields.HEADER_OLD_VALUE, HoodieTableHeaderFields.HEADER_NEW_VALUE }, rows);
expect = removeNonWordAndStripSpace(expect);
String got = removeNonWordAndStripSpace(cr.getResult().toString());
replacedertEquals(expect, got);
}
4
Source : TestHoodieLogFileCommand.java
with Apache License 2.0
from apache
with Apache License 2.0
from apache
/**
* Test case for 'show logfile metadata'.
*/
@Test
public void testShowLogFileCommits() throws JsonProcessingException {
CommandResult cr = getShell().executeCommand("show logfile metadata --logFilePathPattern " + parreplacedionPath + "/*");
replacedertTrue(cr.isSuccess());
TableHeader header = new TableHeader().addTableHeaderField(HoodieTableHeaderFields.HEADER_INSTANT_TIME).addTableHeaderField(HoodieTableHeaderFields.HEADER_RECORD_COUNT).addTableHeaderField(HoodieTableHeaderFields.HEADER_BLOCK_TYPE).addTableHeaderField(HoodieTableHeaderFields.HEADER_HEADER_METADATA).addTableHeaderField(HoodieTableHeaderFields.HEADER_FOOTER_METADATA);
// construct expect result, there is only 1 line.
List<Comparable[]> rows = new ArrayList<>();
ObjectMapper objectMapper = new ObjectMapper();
String headerStr = objectMapper.writeValuereplacedtring(dataBlock.getLogBlockHeader());
String footerStr = objectMapper.writeValuereplacedtring(dataBlock.getLogBlockFooter());
Comparable[] output = new Comparable[] { INSTANT_TIME, 100, dataBlock.getBlockType(), headerStr, footerStr };
rows.add(output);
String expected = HoodiePrintHelper.print(header, new HashMap<>(), "", false, -1, false, rows);
expected = removeNonWordAndStripSpace(expected);
String got = removeNonWordAndStripSpace(cr.getResult().toString());
replacedertEquals(expected, got);
}
2
Source : TestCommitsCommand.java
with Apache License 2.0
from apache
with Apache License 2.0
from apache
/**
* Test case of 'commits showarchived' command.
*/
@Test
public void testShowArchivedCommits() throws Exception {
// Generate archive
HoodieWriteConfig cfg = HoodieWriteConfig.newBuilder().withPath(tablePath).withSchema(HoodieTestCommitMetadataGenerator.TRIP_EXAMPLE_SCHEMA).withParallelism(2, 2).withCompactionConfig(HoodieCompactionConfig.newBuilder().retainCommits(1).archiveCommitsWith(2, 3).build()).forTable("test-trip-table").build();
// generate data and metadata
Map<String, Integer[]> data = new LinkedHashMap<>();
data.put("104", new Integer[] { 20, 10 });
data.put("103", new Integer[] { 15, 15 });
data.put("102", new Integer[] { 25, 45 });
data.put("101", new Integer[] { 35, 15 });
for (Map.Entry<String, Integer[]> entry : data.entrySet()) {
String key = entry.getKey();
Integer[] value = entry.getValue();
HoodieTestCommitMetadataGenerator.createCommitFileWithMetadata(tablePath, key, jsc.hadoopConfiguration(), Option.of(value[0]), Option.of(value[1]));
}
// archive
metaClient = HoodieTableMetaClient.reload(HoodieCLI.getTableMetaClient());
HoodieSparkTable table = HoodieSparkTable.create(cfg, context, metaClient);
HoodieTimelineArchiveLog archiveLog = new HoodieTimelineArchiveLog(cfg, table);
archiveLog.archiveIfRequired(context);
CommandResult cr = getShell().executeCommand(String.format("commits showarchived --startTs %s --endTs %s", "100", "104"));
replacedertTrue(cr.isSuccess());
// archived 101 and 102 instant, generate expect data
replacedertEquals(2, metaClient.reloadActiveTimeline().getCommitsTimeline().countInstants(), "There should 2 instants not be archived!");
// archived 101 and 102 instants, remove 103 and 104 instant
data.remove("103");
data.remove("104");
String expected = generateExpectData(3, data);
expected = removeNonWordAndStripSpace(expected);
String got = removeNonWordAndStripSpace(cr.getResult().toString());
replacedertEquals(expected, got);
}
0
Source : TestStatsCommand.java
with Apache License 2.0
from apache
with Apache License 2.0
from apache
/**
* Test case for command 'stats wa'.
*/
@Test
public void testWriteAmplificationStats() throws Exception {
// generate data and metadata
Map<String, Integer[]> data = new LinkedHashMap<>();
data.put("100", new Integer[] { 15, 10 });
data.put("101", new Integer[] { 20, 10 });
data.put("102", new Integer[] { 15, 15 });
for (Map.Entry<String, Integer[]> entry : data.entrySet()) {
String k = entry.getKey();
Integer[] v = entry.getValue();
HoodieTestCommitMetadataGenerator.createCommitFileWithMetadata(tablePath, k, jsc.hadoopConfiguration(), Option.of(v[0]), Option.of(v[1]));
}
CommandResult cr = getShell().executeCommand("stats wa");
replacedertTrue(cr.isSuccess());
// generate expect
List<Comparable[]> rows = new ArrayList<>();
DecimalFormat df = new DecimalFormat("#.00");
data.forEach((key, value) -> {
// there are two parreplacedions, so need to *2
rows.add(new Comparable[] { key, value[1] * 2, value[0] * 2, df.format((float) value[0] / value[1]) });
});
int totalWrite = data.values().stream().map(integers -> integers[0] * 2).mapToInt(s -> s).sum();
int totalUpdate = data.values().stream().map(integers -> integers[1] * 2).mapToInt(s -> s).sum();
rows.add(new Comparable[] { "Total", totalUpdate, totalWrite, df.format((float) totalWrite / totalUpdate) });
TableHeader header = new TableHeader().addTableHeaderField(HoodieTableHeaderFields.HEADER_COMMIT_TIME).addTableHeaderField(HoodieTableHeaderFields.HEADER_TOTAL_UPSERTED).addTableHeaderField(HoodieTableHeaderFields.HEADER_TOTAL_WRITTEN).addTableHeaderField(HoodieTableHeaderFields.HEADER_WRITE_AMPLIFICATION_FACTOR);
String expected = HoodiePrintHelper.print(header, new HashMap<>(), "", false, -1, false, rows);
expected = removeNonWordAndStripSpace(expected);
String got = removeNonWordAndStripSpace(cr.getResult().toString());
replacedertEquals(expected, got);
}
0
Source : TestFileSystemViewCommand.java
with Apache License 2.0
from apache
with Apache License 2.0
from apache
/**
* Test case for 'show fsview all' with specified values.
*/
@Test
public void testShowCommitsWithSpecifiedValues() {
// Test command with options, baseFileOnly and maxInstant is 2
CommandResult cr = getShell().executeCommand("show fsview all --baseFileOnly true --maxInstant 2");
replacedertTrue(cr.isSuccess());
List<Comparable[]> rows = new ArrayList<>();
Stream<HoodieFileGroup> fileGroups = fsView.getAllFileGroups(parreplacedionPath);
// Only get instant 1, since maxInstant was specified 2
fileGroups.forEach(fg -> fg.getAllFileSlices().filter(fs -> fs.getBaseInstantTime().equals("1")).forEach(fs -> {
int idx = 0;
// For base file only Views, do not display any delta-file related columns.
Comparable[] row = new Comparable[5];
row[idx++] = fg.getParreplacedionPath();
row[idx++] = fg.getFileGroupId().getFileId();
row[idx++] = fs.getBaseInstantTime();
row[idx++] = fs.getBaseFile().isPresent() ? fs.getBaseFile().get().getPath() : "";
row[idx++] = fs.getBaseFile().isPresent() ? fs.getBaseFile().get().getFileSize() : -1;
rows.add(row);
}));
Function<Object, String> converterFunction = entry -> NumericUtils.humanReadableByteCount((Double.parseDouble(entry.toString())));
Map<String, Function<Object, String>> fieldNameToConverterMap = new HashMap<>();
fieldNameToConverterMap.put(HoodieTableHeaderFields.HEADER_TOTAL_DELTA_FILE_SIZE, converterFunction);
fieldNameToConverterMap.put(HoodieTableHeaderFields.HEADER_DATA_FILE_SIZE, converterFunction);
TableHeader header = new TableHeader().addTableHeaderField(HoodieTableHeaderFields.HEADER_PARreplacedION).addTableHeaderField(HoodieTableHeaderFields.HEADER_FILE_ID).addTableHeaderField(HoodieTableHeaderFields.HEADER_BASE_INSTANT).addTableHeaderField(HoodieTableHeaderFields.HEADER_DATA_FILE).addTableHeaderField(HoodieTableHeaderFields.HEADER_DATA_FILE_SIZE);
String expected = HoodiePrintHelper.print(header, fieldNameToConverterMap, "", false, -1, false, rows);
expected = removeNonWordAndStripSpace(expected);
String got = removeNonWordAndStripSpace(cr.getResult().toString());
replacedertEquals(expected, got);
}
0
Source : TestRollbacksCommand.java
with Apache License 2.0
from apache
with Apache License 2.0
from apache
/**
* Test case for command 'show rollbacks'.
*/
@Test
public void testShowRollbacks() {
CommandResult cr = getShell().executeCommand("show rollbacks");
replacedertTrue(cr.isSuccess());
// get rollback instants
HoodieActiveTimeline activeTimeline = new RollbacksCommand.RollbackTimeline(HoodieCLI.getTableMetaClient());
Stream<HoodieInstant> rollback = activeTimeline.getRollbackTimeline().filterCompletedInstants().getInstants();
List<Comparable[]> rows = new ArrayList<>();
rollback.sorted().forEach(instant -> {
try {
// get pair of rollback time and instant time
HoodieRollbackMetadata metadata = TimelineMetadataUtils.deserializeAvroMetadata(activeTimeline.getInstantDetails(instant).get(), HoodieRollbackMetadata.clreplaced);
metadata.getCommitsRollback().forEach(c -> {
Comparable[] row = new Comparable[5];
row[0] = metadata.getStartRollbackTime();
row[1] = c;
// expect data
row[2] = 3;
row[3] = metadata.getTimeTakenInMillis();
row[4] = 3;
rows.add(row);
});
} catch (IOException e) {
e.printStackTrace();
}
});
TableHeader header = new TableHeader().addTableHeaderField(HoodieTableHeaderFields.HEADER_INSTANT).addTableHeaderField(HoodieTableHeaderFields.HEADER_ROLLBACK_INSTANT).addTableHeaderField(HoodieTableHeaderFields.HEADER_TOTAL_FILES_DELETED).addTableHeaderField(HoodieTableHeaderFields.HEADER_TIME_TOKEN_MILLIS).addTableHeaderField(HoodieTableHeaderFields.HEADER_TOTAL_PARreplacedIONS);
String expected = HoodiePrintHelper.print(header, new HashMap<>(), "", false, -1, false, rows);
expected = removeNonWordAndStripSpace(expected);
String got = removeNonWordAndStripSpace(cr.getResult().toString());
replacedertEquals(expected, got);
}
0
Source : TestCommitsCommand.java
with Apache License 2.0
from apache
with Apache License 2.0
from apache
/**
* Test case of 'commit showfiles' command.
*/
@Test
public void testShowCommitFiles() throws Exception {
Map<String, Integer[]> data = generateData();
String commitInstant = "101";
CommandResult cr = getShell().executeCommand(String.format("commit showfiles --commit %s", commitInstant));
replacedertTrue(cr.isSuccess());
Integer[] value = data.get(commitInstant);
List<Comparable[]> rows = new ArrayList<>();
Arrays.asList(HoodieTestDataGenerator.DEFAULT_FIRST_PARreplacedION_PATH, HoodieTestDataGenerator.DEFAULT_SECOND_PARreplacedION_PATH).stream().forEach(parreplacedion -> rows.add(new Comparable[] { parreplacedion, HoodieTestCommitMetadataGenerator.DEFAULT_FILEID, HoodieTestCommitMetadataGenerator.DEFAULT_PRE_COMMIT, value[1], value[0], HoodieTestCommitMetadataGenerator.DEFAULT_TOTAL_WRITE_BYTES, // default 0 errors and blank file with 0 size
0, 0 }));
TableHeader header = new TableHeader().addTableHeaderField(HoodieTableHeaderFields.HEADER_PARreplacedION_PATH).addTableHeaderField(HoodieTableHeaderFields.HEADER_FILE_ID).addTableHeaderField(HoodieTableHeaderFields.HEADER_PREVIOUS_COMMIT).addTableHeaderField(HoodieTableHeaderFields.HEADER_TOTAL_RECORDS_UPDATED).addTableHeaderField(HoodieTableHeaderFields.HEADER_TOTAL_RECORDS_WRITTEN).addTableHeaderField(HoodieTableHeaderFields.HEADER_TOTAL_BYTES_WRITTEN).addTableHeaderField(HoodieTableHeaderFields.HEADER_TOTAL_ERRORS).addTableHeaderField(HoodieTableHeaderFields.HEADER_FILE_SIZE);
String expected = HoodiePrintHelper.print(header, new HashMap<>(), "", false, -1, false, rows);
expected = removeNonWordAndStripSpace(expected);
String got = removeNonWordAndStripSpace(cr.getResult().toString());
replacedertEquals(expected, got);
}
0
Source : TestFileSystemViewCommand.java
with Apache License 2.0
from apache
with Apache License 2.0
from apache
/**
* Test case for 'show fsview all'.
*/
@Test
public void testShowCommits() {
// Test default show fsview all
CommandResult cr = getShell().executeCommand("show fsview all");
replacedertTrue(cr.isSuccess());
// Get all file groups
Stream<HoodieFileGroup> fileGroups = fsView.getAllFileGroups(parreplacedionPath);
List<Comparable[]> rows = new ArrayList<>();
fileGroups.forEach(fg -> fg.getAllFileSlices().forEach(fs -> {
int idx = 0;
// For base file only Views, do not display any delta-file related columns
Comparable[] row = new Comparable[8];
row[idx++] = fg.getParreplacedionPath();
row[idx++] = fg.getFileGroupId().getFileId();
row[idx++] = fs.getBaseInstantTime();
row[idx++] = fs.getBaseFile().isPresent() ? fs.getBaseFile().get().getPath() : "";
row[idx++] = fs.getBaseFile().isPresent() ? fs.getBaseFile().get().getFileSize() : -1;
row[idx++] = fs.getLogFiles().count();
row[idx++] = fs.getLogFiles().mapToLong(HoodieLogFile::getFileSize).sum();
row[idx++] = fs.getLogFiles().collect(Collectors.toList()).toString();
rows.add(row);
}));
Function<Object, String> converterFunction = entry -> NumericUtils.humanReadableByteCount((Double.parseDouble(entry.toString())));
Map<String, Function<Object, String>> fieldNameToConverterMap = new HashMap<>();
fieldNameToConverterMap.put(HoodieTableHeaderFields.HEADER_TOTAL_DELTA_FILE_SIZE, converterFunction);
fieldNameToConverterMap.put(HoodieTableHeaderFields.HEADER_DATA_FILE_SIZE, converterFunction);
TableHeader header = new TableHeader().addTableHeaderField(HoodieTableHeaderFields.HEADER_PARreplacedION).addTableHeaderField(HoodieTableHeaderFields.HEADER_FILE_ID).addTableHeaderField(HoodieTableHeaderFields.HEADER_BASE_INSTANT).addTableHeaderField(HoodieTableHeaderFields.HEADER_DATA_FILE).addTableHeaderField(HoodieTableHeaderFields.HEADER_DATA_FILE_SIZE).addTableHeaderField(HoodieTableHeaderFields.HEADER_NUM_DELTA_FILES).addTableHeaderField(HoodieTableHeaderFields.HEADER_TOTAL_DELTA_FILE_SIZE).addTableHeaderField(HoodieTableHeaderFields.HEADER_DELTA_FILES);
String expected = HoodiePrintHelper.print(header, fieldNameToConverterMap, "", false, -1, false, rows);
expected = removeNonWordAndStripSpace(expected);
String got = removeNonWordAndStripSpace(cr.getResult().toString());
replacedertEquals(expected, got);
}
0
Source : TestCommitsCommand.java
with Apache License 2.0
from apache
with Apache License 2.0
from apache
/**
* Test case of 'commit showparreplacedions' command.
*/
@Test
public void testShowCommitParreplacedions() throws Exception {
Map<String, Integer[]> data = generateData();
String commitInstant = "101";
CommandResult cr = getShell().executeCommand(String.format("commit showparreplacedions --commit %s", commitInstant));
replacedertTrue(cr.isSuccess());
Integer[] value = data.get(commitInstant);
List<Comparable[]> rows = new ArrayList<>();
// prevCommit not null, so add 0, update 1
Arrays.asList(HoodieTestDataGenerator.DEFAULT_FIRST_PARreplacedION_PATH, HoodieTestDataGenerator.DEFAULT_SECOND_PARreplacedION_PATH).stream().forEach(parreplacedion -> rows.add(new Comparable[] { parreplacedion, 0, 1, 0, value[1], HoodieTestCommitMetadataGenerator.DEFAULT_TOTAL_WRITE_BYTES, 0 }));
Map<String, Function<Object, String>> fieldNameToConverterMap = new HashMap<>();
fieldNameToConverterMap.put(HoodieTableHeaderFields.HEADER_TOTAL_BYTES_WRITTEN, entry -> NumericUtils.humanReadableByteCount((Long.parseLong(entry.toString()))));
TableHeader header = new TableHeader().addTableHeaderField(HoodieTableHeaderFields.HEADER_PARreplacedION_PATH).addTableHeaderField(HoodieTableHeaderFields.HEADER_TOTAL_FILES_ADDED).addTableHeaderField(HoodieTableHeaderFields.HEADER_TOTAL_FILES_UPDATED).addTableHeaderField(HoodieTableHeaderFields.HEADER_TOTAL_RECORDS_INSERTED).addTableHeaderField(HoodieTableHeaderFields.HEADER_TOTAL_RECORDS_UPDATED).addTableHeaderField(HoodieTableHeaderFields.HEADER_TOTAL_BYTES_WRITTEN).addTableHeaderField(HoodieTableHeaderFields.HEADER_TOTAL_ERRORS);
String expected = HoodiePrintHelper.print(header, fieldNameToConverterMap, "", false, -1, false, rows);
expected = removeNonWordAndStripSpace(expected);
String got = removeNonWordAndStripSpace(cr.getResult().toString());
replacedertEquals(expected, got);
}
0
Source : TestRollbacksCommand.java
with Apache License 2.0
from apache
with Apache License 2.0
from apache
/**
* Test case for command 'show rollback'.
*/
@Test
public void testShowRollback() throws IOException {
// get instant
HoodieActiveTimeline activeTimeline = new RollbacksCommand.RollbackTimeline(HoodieCLI.getTableMetaClient());
Stream<HoodieInstant> rollback = activeTimeline.getRollbackTimeline().filterCompletedInstants().getInstants();
HoodieInstant instant = rollback.findFirst().orElse(null);
replacedertNotNull(instant, "The instant can not be null.");
CommandResult cr = getShell().executeCommand("show rollback --instant " + instant.getTimestamp());
replacedertTrue(cr.isSuccess());
List<Comparable[]> rows = new ArrayList<>();
// get metadata of instant
HoodieRollbackMetadata metadata = TimelineMetadataUtils.deserializeAvroMetadata(activeTimeline.getInstantDetails(instant).get(), HoodieRollbackMetadata.clreplaced);
// generate expect result
metadata.getParreplacedionMetadata().forEach((key, value) -> Stream.concat(value.getSuccessDeleteFiles().stream().map(f -> Pair.of(f, true)), value.getFailedDeleteFiles().stream().map(f -> Pair.of(f, false))).forEach(fileWithDeleteStatus -> {
Comparable[] row = new Comparable[5];
row[0] = metadata.getStartRollbackTime();
row[1] = metadata.getCommitsRollback().toString();
row[2] = key;
row[3] = fileWithDeleteStatus.getLeft();
row[4] = fileWithDeleteStatus.getRight();
rows.add(row);
}));
TableHeader header = new TableHeader().addTableHeaderField(HoodieTableHeaderFields.HEADER_INSTANT).addTableHeaderField(HoodieTableHeaderFields.HEADER_ROLLBACK_INSTANT).addTableHeaderField(HoodieTableHeaderFields.HEADER_PARreplacedION).addTableHeaderField(HoodieTableHeaderFields.HEADER_DELETED_FILE).addTableHeaderField(HoodieTableHeaderFields.HEADER_SUCCEEDED);
String expected = HoodiePrintHelper.print(header, new HashMap<>(), "", false, -1, false, rows);
expected = removeNonWordAndStripSpace(expected);
String got = removeNonWordAndStripSpace(cr.getResult().toString());
replacedertEquals(expected, got);
}
See More Examples