org.apache.hadoop.fs

Here are the examples of the java api class org.apache.hadoop.fs taken from open source projects.

1. TestDFSPermission#testOwnership()

Project: hadoop-20
Source File: TestDFSPermission.java
View license
/* check ownership is set correctly for a file or directory */
private void testOwnership(OpType op) throws Exception {
    // case 1: superuser create a file/directory
    fs = FileSystem.get(conf);
    create(op, FILE_DIR_PATH, DEFAULT_UMASK, new FsPermission(DEFAULT_PERMISSION));
    checkOwnership(FILE_DIR_PATH, SUPERUSER.getUserName(), getGroup(FILE_DIR_PATH.getParent()));
    // case 2: superuser changes FILE_DIR_PATH's owner to be <user1, group3>
    setOwner(FILE_DIR_PATH, USER1.getUserName(), GROUP3_NAME, false);
    // case 3: user1 changes FILE_DIR_PATH's owner to be user2
    login(USER1);
    setOwner(FILE_DIR_PATH, USER2.getUserName(), null, true);
    // case 4: user1 changes FILE_DIR_PATH's group to be group1 which it belongs
    // to
    setOwner(FILE_DIR_PATH, null, GROUP1_NAME, false);
    // case 5: user1 changes FILE_DIR_PATH's group to be group3
    // which it does not belong to
    setOwner(FILE_DIR_PATH, null, GROUP3_NAME, true);
    // case 6: user2 (non-owner) changes FILE_DIR_PATH's group to be group3
    login(USER2);
    setOwner(FILE_DIR_PATH, null, GROUP3_NAME, true);
    // case 7: user2 (non-owner) changes FILE_DIR_PATH's user to be user2
    setOwner(FILE_DIR_PATH, USER2.getUserName(), null, true);
    // delete the file/directory
    login(SUPERUSER);
    fs.delete(FILE_DIR_PATH, true);
}

2. TestDFSPermission#login()

Project: hadoop-20
Source File: TestDFSPermission.java
View license
/* log into dfs as the given user */
private void login(UnixUserGroupInformation ugi) throws IOException {
    if (fs != null) {
        fs.close();
    }
    UnixUserGroupInformation.saveToConf(conf, UnixUserGroupInformation.UGI_PROPERTY_NAME, ugi);
    // login as ugi
    fs = FileSystem.get(conf);
}

3. TestDFSPermission#testOwnership()

View license
/* check ownership is set correctly for a file or directory */
private void testOwnership(OpType op) throws Exception {
    // case 1: superuser create a file/directory
    fs = FileSystem.get(conf);
    create(op, FILE_DIR_PATH, DEFAULT_UMASK, new FsPermission(DEFAULT_PERMISSION));
    checkOwnership(FILE_DIR_PATH, SUPERUSER.getUserName(), getGroup(FILE_DIR_PATH.getParent()));
    // case 2: superuser changes FILE_DIR_PATH's owner to be <user1, group3>
    setOwner(FILE_DIR_PATH, USER1.getUserName(), GROUP3_NAME, false);
    // case 3: user1 changes FILE_DIR_PATH's owner to be user2
    login(USER1);
    setOwner(FILE_DIR_PATH, USER2.getUserName(), null, true);
    // case 4: user1 changes FILE_DIR_PATH's group to be group1 which it belongs
    // to
    setOwner(FILE_DIR_PATH, null, GROUP1_NAME, false);
    // case 5: user1 changes FILE_DIR_PATH's group to be group3
    // which it does not belong to
    setOwner(FILE_DIR_PATH, null, GROUP3_NAME, true);
    // case 6: user2 (non-owner) changes FILE_DIR_PATH's group to be group3
    login(USER2);
    setOwner(FILE_DIR_PATH, null, GROUP3_NAME, true);
    // case 7: user2 (non-owner) changes FILE_DIR_PATH's user to be user2
    setOwner(FILE_DIR_PATH, USER2.getUserName(), null, true);
    // delete the file/directory
    login(SUPERUSER);
    fs.delete(FILE_DIR_PATH, true);
}

4. TestDFSPermission#login()

View license
/* log into dfs as the given user */
private void login(UnixUserGroupInformation ugi) throws IOException {
    if (fs != null) {
        fs.close();
    }
    UnixUserGroupInformation.saveToConf(conf, UnixUserGroupInformation.UGI_PROPERTY_NAME, ugi);
    // login as ugi
    fs = FileSystem.get(conf);
}

5. TestDFSPermission#testOwnership()

Project: hadoop-hdfs
Source File: TestDFSPermission.java
View license
/* check ownership is set correctly for a file or directory */
private void testOwnership(OpType op) throws Exception {
    // case 1: superuser create a file/directory
    fs = FileSystem.get(conf);
    create(op, FILE_DIR_PATH, DEFAULT_UMASK, new FsPermission(DEFAULT_PERMISSION));
    checkOwnership(FILE_DIR_PATH, SUPERUSER.getUserName(), getGroup(FILE_DIR_PATH.getParent()));
    // case 2: superuser changes FILE_DIR_PATH's owner to be <user1, group3>
    setOwner(FILE_DIR_PATH, USER1.getUserName(), GROUP3_NAME, false);
    // case 3: user1 changes FILE_DIR_PATH's owner to be user2
    login(USER1);
    setOwner(FILE_DIR_PATH, USER2.getUserName(), null, true);
    // case 4: user1 changes FILE_DIR_PATH's group to be group1 which it belongs
    // to
    setOwner(FILE_DIR_PATH, null, GROUP1_NAME, false);
    // case 5: user1 changes FILE_DIR_PATH's group to be group3
    // which it does not belong to
    setOwner(FILE_DIR_PATH, null, GROUP3_NAME, true);
    // case 6: user2 (non-owner) changes FILE_DIR_PATH's group to be group3
    login(USER2);
    setOwner(FILE_DIR_PATH, null, GROUP3_NAME, true);
    // case 7: user2 (non-owner) changes FILE_DIR_PATH's user to be user2
    setOwner(FILE_DIR_PATH, USER2.getUserName(), null, true);
    // delete the file/directory
    login(SUPERUSER);
    fs.delete(FILE_DIR_PATH, true);
}

6. TestDFSPermission#login()

Project: hadoop-hdfs
Source File: TestDFSPermission.java
View license
/* log into dfs as the given user */
private void login(UnixUserGroupInformation ugi) throws IOException {
    if (fs != null) {
        fs.close();
    }
    UnixUserGroupInformation.saveToConf(conf, UnixUserGroupInformation.UGI_PROPERTY_NAME, ugi);
    // login as ugi
    fs = FileSystem.get(conf);
}

7. ConCmptIVGen#run()

Project: HiBench
Source File: ConCmptIVGen.java
View license
// submit the map/reduce job.
public int run(final String[] args) throws Exception {
    if (args.length != 3) {
        return printUsage();
    }
    input_path = new Path("cc_ivcmd");
    output_path = new Path(args[0]);
    number_nodes = Integer.parseInt(args[1]);
    number_reducers = Integer.parseInt(args[2]);
    System.out.println("\n-----===[PEGASUS: A Peta-Scale Graph Mining System]===-----\n");
    System.out.println("[PEGASUS] Generating initial vector. Output path = " + args[0] + ", Number of nodes = " + number_nodes + ", Number of machines =" + number_reducers + "\n");
    // Generate command file and copy to HDFS "input_ConCmptIVGen"
    gen_cmd_file(number_nodes, number_reducers, input_path);
    // run job
    JobClient.runJob(configStage1());
    fs = FileSystem.get(getConf());
    fs.delete(input_path);
    System.out.println("\n[PEGASUS] Initial connected component vector generated in HDFS " + args[0] + "\n");
    return 0;
}

8. HadiIVGen#run()

Project: HiBench
Source File: HadiIVGen.java
View license
// submit the map/reduce job.
public int run(final String[] args) throws Exception {
    if (args.length != 5) {
        return printUsage();
    }
    output_path = new Path(args[0]);
    String input_path_name = "hadi_ivcmd" + args[0].substring(args[0].length() - 1);
    input_path = new Path(input_path_name);
    number_nodes = Integer.parseInt(args[1]);
    number_reducers = Integer.parseInt(args[2]);
    nreplication = Integer.parseInt(args[3]);
    if (args[4].compareTo("enc") == 0)
        encode_bitmask = 1;
    else
        encode_bitmask = 0;
    System.out.println("\n-----===[PEGASUS: A Peta-Scale Graph Mining System]===-----\n");
    System.out.println("[PEGASUS] Generating initial bistring vector. Output path = " + args[0] + ", number of nodes = " + number_nodes + ", number of reducers =" + number_reducers + ", nreplication=" + nreplication + ", encode_bitmask = " + encode_bitmask + "\n");
    // Generate command file and copy to HDFS "input_ConCmptIVGen"
    gen_cmd_file(number_nodes, number_reducers, nreplication, input_path);
    // run job
    JobClient.runJob(configStage1());
    fs = FileSystem.get(getConf());
    fs.delete(input_path);
    System.out.println("\n[PEGASUS] Initial bistring vector for HADI generated in HDFS " + args[0] + "\n");
    return 0;
}

9. PagerankBlock#run()

Project: HiBench
Source File: PagerankBlock.java
View license
// submit the map/reduce job.
public int run(final String[] args) throws Exception {
    if (args.length != 5) {
        return printUsage();
    }
    int i;
    edge_path = new Path(args[0] + "/pr_edge_block");
    vector_path = new Path(args[0] + "/pr_iv_block");
    tempmv_path = new Path(args[0] + "/pr_tempmv_block");
    output_path = new Path(args[0] + "/pr_output_block");
    vector_unfold_path = new Path(args[0] + "/pr_vector");
    minmax_path = new Path(args[0] + "/pr_minmax");
    distr_path = new Path(args[0] + "/pr_distr");
    number_nodes = Integer.parseInt(args[1]);
    nreducers = Integer.parseInt(args[2]);
    niteration = Integer.parseInt(args[3]);
    block_width = Integer.parseInt(args[4]);
    local_output_path = args[0] + "/pr_tempmv_block_temp";
    converge_threshold = ((double) 1.0 / (double) number_nodes) / 50;
    System.out.println("\n-----===[PEGASUS: A Peta-Scale Graph Mining System]===-----\n");
    System.out.println("[PEGASUS] Computing PageRank using block method. Max iteration = " + niteration + ", threshold = " + converge_threshold + "\n");
    fs = FileSystem.get(getConf());
    // Iteratively calculate neighborhood function. 
    for (i = 0; i < niteration; i++) {
        JobClient.runJob(configStage1());
        RunningJob job = JobClient.runJob(configStage2());
        Counters c = job.getCounters();
        long changed = c.getCounter(PrCounters.CONVERGE_CHECK);
        System.out.println("Iteration = " + i + ", changed reducer = " + changed);
        if (changed == 0) {
            System.out.println("PageRank vector converged. Now preparing to finish...");
            fs.delete(vector_path);
            fs.delete(tempmv_path);
            fs.rename(output_path, vector_path);
            break;
        }
        // rotate directory
        fs.delete(vector_path);
        fs.delete(tempmv_path);
        fs.rename(output_path, vector_path);
    }
    if (i == niteration) {
        System.out.println("Reached the max iteration. Now preparing to finish...");
    }
    // unfold the block PageRank to plain format
    System.out.println("Unfolding the block PageRank to plain format...");
    JobClient.runJob(configStage25());
    // find min/max of pageranks
    //System.out.println("Finding minimum and maximum pageranks...");
    //JobClient.runJob(configStage3());
    //FileUtil.fullyDelete( FileSystem.getLocal(getConf()), new Path(local_output_path));
    //String new_path = local_output_path + "/" ;
    //fs.copyToLocalFile(minmax_path, new Path(new_path) ) ;
    //MinMaxInfo mmi = PagerankNaive.readMinMax( new_path );
    //System.out.println("min = " + mmi.min + ", max = " + mmi.max );
    // find distribution of pageranks
    //JobClient.runJob(configStage4(mmi.min, mmi.max));
    System.out.println("\n[PEGASUS] PageRank computed.");
    System.out.println("[PEGASUS] The final PageRanks are in the HDFS pr_vector.");
    return 0;
}

10. RWRBlock#run()

Project: HiBench
Source File: RWRBlock.java
View license
// submit the map/reduce job.
public int run(final String[] args) throws Exception {
    if (args.length != 8) {
        return printUsage();
    }
    int i;
    edge_path = new Path(args[0]);
    vector_path = new Path(args[1]);
    tempmv_path = new Path("rwr_tempmv_block");
    mv_output_path = new Path("rwr_output_block");
    new_vector_path = new Path("rwr_vector_new");
    query_raw_path = new Path(args[2]);
    number_nodes = Long.parseLong(args[3]);
    nreducers = Integer.parseInt(args[4]);
    niteration = Integer.parseInt(args[5]);
    block_width = Integer.parseInt(args[6]);
    mixing_c = Double.parseDouble(args[7]);
    local_output_path = "rwr_output_temp";
    System.out.println("\n-----===[PEGASUS: A Peta-Scale Graph Mining System]===-----\n");
    System.out.println("[PEGASUS] Computing RWR using block method. Max iteration = " + niteration + ", threshold = " + converge_threshold + "\n");
    fs = FileSystem.get(getConf());
    // normalize query
    String[] new_args = new String[4];
    new_args[0] = args[2];
    new_args[1] = "rwr_query_norm";
    new_args[2] = "" + nreducers;
    new_args[3] = "" + (1.0 - mixing_c);
    ToolRunner.run(getConf(), new NormalizeVector(), new_args);
    // block-encode the query
    new_args = new String[7];
    new_args[0] = "rwr_query_norm";
    new_args[1] = "rwr_query_norm_block";
    new_args[2] = "" + number_nodes;
    new_args[3] = "" + block_width;
    new_args[4] = "" + nreducers;
    new_args[5] = "null";
    new_args[6] = "nosym";
    ToolRunner.run(getConf(), new MatvecPrep(), new_args);
    // Iteratively calculate neighborhood function. 
    for (i = 0; i < niteration; i++) {
        System.out.println("\n\nITERATION " + (i + 1));
        // v1 <- c*W*v
        JobClient.runJob(configStage1());
        RunningJob job = JobClient.runJob(configStage2());
        // v2 <- v1 + q
        SaxpyBlock(getConf(), nreducers, mv_output_path, query_block_path, new_vector_path, 1.0, block_width);
        // diff = || v2 - vector ||
        SaxpyBlock(getConf(), nreducers, new_vector_path, vector_path, diff_path, -1.0, block_width);
        // compute l1 norm
        new_args = new String[2];
        new_args[0] = diff_path.getName();
        new_args[1] = "" + block_width;
        ToolRunner.run(getConf(), new L1normBlock(), new_args);
        double difference = PegasusUtils.read_l1norm_result(getConf());
        FileSystem lfs = FileSystem.getLocal(getConf());
        lfs.delete(new Path("l1norm"), true);
        System.out.println("difference = " + difference);
        if (difference < converge_threshold) {
            System.out.println("RWR vector converged. Now preparing to finish...");
            fs.delete(vector_path);
            fs.delete(tempmv_path);
            fs.rename(new_vector_path, vector_path);
            break;
        }
        // rotate directory
        fs.delete(vector_path);
        fs.delete(tempmv_path);
        fs.rename(new_vector_path, vector_path);
    }
    if (i == niteration) {
        System.out.println("Reached the max iteration. Now preparing to finish...");
    }
    // unfold the block RWR to plain format
    System.out.println("Unfolding the block RWR to plain format...");
    JobClient.runJob(configStage25());
    // find min/max of RWR
    System.out.println("Finding minimum and maximum RWR scores...");
    JobClient.runJob(configStage3());
    FileUtil.fullyDelete(FileSystem.getLocal(getConf()), new Path(local_output_path));
    String new_path = local_output_path + "/";
    fs.copyToLocalFile(minmax_path, new Path(new_path));
    MinMaxInfo mmi = PagerankNaive.readMinMax(new_path);
    System.out.println("min = " + mmi.min + ", max = " + mmi.max);
    // find distribution of RWR scores
    JobClient.runJob(configStage4(mmi.min, mmi.max));
    System.out.println("\n[PEGASUS] RWR computed.");
    System.out.println("[PEGASUS] The final RWR scores are in the HDFS rwr_vector.");
    System.out.println("[PEGASUS] The minium and maximum RWRs are in the HDFS rwr_minmax.");
    System.out.println("[PEGASUS] The histogram of RWRs in 1000 bins between min_RWR and max_RWR are in the HDFS rwr_distr.\n");
    return 0;
}

11. RWRNaive#run()

Project: HiBench
Source File: RWRNaive.java
View license
// submit the map/reduce job.
public int run(final String[] args) throws Exception {
    if (args.length != 8) {
        return printUsage();
    }
    int i;
    edge_path = new Path(args[0]);
    vector_path = new Path("rwr_vector");
    tempmv_path = new Path("rwr_tempmv");
    mv_output_path = new Path("rwr_mv_output");
    new_vector_path = new Path("rwr_vector_new");
    query_raw_path = new Path(args[1]);
    number_nodes = Long.parseLong(args[2]);
    nreducers = Integer.parseInt(args[3]);
    niteration = Integer.parseInt(args[4]);
    if (args[5].compareTo("makesym") == 0)
        make_symmetric = 1;
    else
        make_symmetric = 0;
    int cur_iteration = 1;
    if (args[6].startsWith("cont"))
        cur_iteration = Integer.parseInt(args[6].substring(4));
    mixing_c = Double.parseDouble(args[7]);
    local_output_path = "rwr_output_temp";
    System.out.println("\n-----===[PEGASUS: A Peta-Scale Graph Mining System]===-----\n");
    System.out.println("[PEGASUS] Computing RWR. Max iteration = " + niteration + ", threshold = " + converge_threshold + ", cur_iteration=" + cur_iteration + ", |V|=" + number_nodes + "\n");
    fs = FileSystem.get(getConf());
    if (cur_iteration == 1)
        gen_initial_vector(number_nodes, vector_path);
    // normalize query
    String[] new_args = new String[4];
    new_args[0] = args[1];
    new_args[1] = "rwr_query_norm";
    new_args[2] = "" + nreducers;
    new_args[3] = "" + (1.0 - mixing_c);
    ToolRunner.run(getConf(), new NormalizeVector(), new_args);
    // Iterate until converges. 
    for (i = cur_iteration; i <= niteration; i++) {
        System.out.println("\n\nITERATION " + (i));
        // v1 <- c*W*v
        JobClient.runJob(configStage1());
        RunningJob job = JobClient.runJob(configStage2());
        // v2 <- v1 + q
        SaxpyTextoutput(getConf(), nreducers, mv_output_path, query_path, new_vector_path, 1.0);
        // diff = || v2 - vector ||
        Saxpy(getConf(), nreducers, new_vector_path, vector_path, diff_path, -1.0);
        // compute l1 norm
        new_args = new String[1];
        new_args[0] = diff_path.getName();
        ToolRunner.run(getConf(), new L1norm(), new_args);
        double difference = PegasusUtils.read_l1norm_result(getConf());
        FileSystem lfs = FileSystem.getLocal(getConf());
        lfs.delete(new Path("l1norm"), true);
        System.out.println("difference = " + difference);
        if (difference < converge_threshold) {
            System.out.println("RWR vector converged. Now preparing to finish...");
            fs.delete(vector_path);
            fs.delete(tempmv_path);
            fs.rename(new_vector_path, vector_path);
            break;
        }
        // rotate directory
        fs.delete(vector_path);
        fs.delete(tempmv_path);
        fs.rename(new_vector_path, vector_path);
    }
    if (i == niteration) {
        System.out.println("Reached the max iteration. Now preparing to finish...");
    }
    // find min/max of rwrs
    System.out.println("Finding minimum and maximum rwrs...");
    JobClient.runJob(configStage3());
    FileUtil.fullyDelete(FileSystem.getLocal(getConf()), new Path(local_output_path));
    String new_path = local_output_path + "/";
    fs.copyToLocalFile(minmax_path, new Path(new_path));
    MinMaxInfo mmi = readMinMax(new_path);
    System.out.println("min = " + mmi.min + ", max = " + mmi.max);
    // find distribution of rwr score
    JobClient.runJob(configStage4(mmi.min, mmi.max));
    System.out.println("\n[PEGASUS] RWR computed.");
    System.out.println("[PEGASUS] The final RWR scores are in the HDFS rwr_vector.");
    System.out.println("[PEGASUS] The minium and maximum scores are in the HDFS rwr_minmax.");
    System.out.println("[PEGASUS] The histogram of scores in 1000 bins are in the HDFS rwr_distr.\n");
    return 0;
}

12. ApplicationMasterBase#run()

Project: lumify
Source File: ApplicationMasterBase.java
View license
protected void run(String[] args) throws Exception {
    System.out.println("BEGIN " + this.getClass().getName());
    new JCommander(this, args);
    System.out.println("memory: " + memory);
    System.out.println("virtualCores: " + virtualCores);
    System.out.println("instances: " + instances);
    System.out.println("appName: " + appName);
    System.out.println("remotePath: " + remotePath);
    if (remotePath == null) {
        throw new Exception("remotePath is required");
    }
    ClientBase.printEnv();
    final String myClasspath = System.getProperty("java.class.path");
    final YarnConfiguration conf = new YarnConfiguration();
    fs = FileSystem.get(conf);
    resources = getResourceList(fs, new Path(remotePath));
    final StringBuilder classPathEnvBuilder = new StringBuilder(myClasspath);
    for (Path p : resources) {
        classPathEnvBuilder.append(':');
        classPathEnvBuilder.append(p.getName());
    }
    System.out.println("Classpath: " + classPathEnvBuilder);
    classPathEnv = classPathEnvBuilder.toString();
    nmClient = createNodeManagerClient(conf);
    rmClient = createResourceManagerClient(conf);
    rmClient.registerApplicationMaster("", 0, "");
    makeContainerRequests();
    System.out.println("[AM] waiting for containers to finish");
    while (!doneWithContainers()) {
        Thread.sleep(100);
    }
    rmClient.unregisterApplicationMaster(FinalApplicationStatus.SUCCEEDED, "", "");
}