HelloHadoopV2.java
package org.nchc.hadoop;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; 
public class HelloHadoopV2 {
  public static void main(String[] args) throws IOException,
      InterruptedException, ClassNotFoundException {
    Configuration conf = new Configuration();
    Job job = new Job(conf, "Hadoop Hello World 2");
    job.setJarByClass(HelloHadoopV2.class);
    // 設定 map and reduce 以及 Combiner class
    job.setMapperClass(HelloMapperV2.class);
    job.setCombinerClass(HelloReducerV2.class);
    job.setReducerClass(HelloReducerV2.class);
    // 設定map的輸出型態
    job.setMapOutputKeyClass(Text.class);
    job.setMapOutputValueClass(Text.class);
    // 設定reduce的輸出型態
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(Text.class);
    FileInputFormat.addInputPath(job, new Path("/user/hadoop/input"));
    FileOutputFormat.setOutputPath(job, new Path("/user/hadoop/output-hh2"));
    // 呼叫checkAndDelete函式,檢查是否存在該資料夾,若有則刪除之
    CheckAndDelete.checkAndDelete("/user/hadoop/output-hh2", conf);
    boolean status = job.waitForCompletion(true);
    if (status) {
      System.err.println("Integrate Alert Job Finished !");
    } else {
      System.err.println("Integrate Alert Job Failed !");
      System.exit(1);
    }
  }
}