org.apache.hadoop.fs.CopyFilesBase.MyFile

Here are the examples of the java api class org.apache.hadoop.fs.CopyFilesBase.MyFile taken from open source projects.

1. TestHadoopArchives#testCopy()

Project: hadoop-20
Source File: TestHadoopArchives.java
View license
// Test copy to and from dfs
@Test
public void testCopy() throws Exception {
    String localDir = CopyFilesBase.TEST_ROOT_DIR + "/srcdat";
    String localDir2 = CopyFilesBase.TEST_ROOT_DIR + "/srcdat2";
    Configuration conf = new Configuration();
    FileSystem localfs = FileSystem.getLocal(conf);
    MyFile[] myFiles = CopyFilesBase.createFiles(localfs, localDir);
    FileSystem fs = dfscluster.getFileSystem();
    Path archivePath = new Path(fs.getHomeDirectory(), "srcdat.har");
    {
        // copy from Local to hdfs
        String[] args = { "-copyFromLocal", localDir, archivePath.toString() };
        int ret = ToolRunner.run(new HadoopArchives(conf), args);
        assertTrue("failed test", ret == 0);
        URI uri = archivePath.toUri();
        // create appropriate har path
        Path harPath = new Path("har://" + uri.getScheme() + "-" + uri.getAuthority() + uri.getPath());
        FileSystem harfs = harPath.getFileSystem(conf);
        CopyFilesBase.checkFiles(harfs, archivePath.toString(), myFiles);
    }
    {
        // copy from hdfs to local
        localfs.mkdirs(new Path(localDir2));
        String[] args = { "-copyToLocal", archivePath.toString(), localDir2 };
        int ret = ToolRunner.run(new HadoopArchives(conf), args);
        assertTrue("failed test", ret == 0);
        CopyFilesBase.checkFiles(localfs, localDir2, myFiles);
    }
    CopyFilesBase.deldir(localfs, localDir);
    CopyFilesBase.deldir(localfs, localDir2);
    fs.delete(archivePath, true);
}