| | 1 | |
| | 2 | = 運算雛型 = |
| | 3 | |
| | 4 | {{{ |
| | 5 | #!java |
| | 6 | import java.io.IOException; |
| | 7 | |
| | 8 | import org.apache.hadoop.conf.Configuration; |
| | 9 | import org.apache.hadoop.fs.Path; |
| | 10 | import org.apache.hadoop.io.LongWritable; |
| | 11 | import org.apache.hadoop.io.Text; |
| | 12 | import org.apache.hadoop.mapreduce.Job; |
| | 13 | import org.apache.hadoop.mapreduce.Mapper; |
| | 14 | import org.apache.hadoop.mapreduce.Reducer; |
| | 15 | import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; |
| | 16 | import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; |
| | 17 | |
| | 18 | // HelloHadoop |
| | 19 | // 說明: |
| | 20 | // hadoop的hello world程式 |
| | 21 | // 此程式可用來瞭解 hadoop的 <key , value> 為何值,並且練習hadoop api coding |
| | 22 | // |
| | 23 | // 測試方法: |
| | 24 | // 將此程式運作在hadoop 0.20 平台上,執行: |
| | 25 | // --------------------------- |
| | 26 | // hadoop jar HelloHadoop.jar |
| | 27 | // --------------------------- |
| | 28 | // |
| | 29 | // 注意: |
| | 30 | // 1. 在hdfs 上來源檔案的路徑為 "/user/$YOUR_NAME/input" |
| | 31 | // 請注意必須先放資料到此hdfs上的資料夾內,且此資料夾內只能放檔案,不可再放資料夾 |
| | 32 | // input資料內的檔案若個數超過1個,則運算結果將會以某一個為主 |
| | 33 | // 2. 運算完後,程式將執行結果放在hdfs 的輸出路徑為 "/user/$YOUR_NAME/output-hh1" |
| | 34 | // 請注意此資料夾為運算之後才產生的,故運算之前不可有此資料夾 |
| | 35 | |
| | 36 | public class HelloHadoop { |
| | 37 | |
| | 38 | static public class HelloMapper extends |
| | 39 | Mapper<LongWritable, Text, LongWritable, Text> { |
| | 40 | |
| | 41 | public void map(LongWritable key, Text value, Context context) |
| | 42 | throws IOException, InterruptedException { |
| | 43 | // 將出入資料 原封不動的寫入 輸出 |
| | 44 | context.write((LongWritable) key, (Text) value); |
| | 45 | } |
| | 46 | |
| | 47 | } |
| | 48 | |
| | 49 | static public class HelloReducer extends |
| | 50 | Reducer<LongWritable, Text, LongWritable, Text> { |
| | 51 | public void reduce(LongWritable key, Iterable<Text> values, |
| | 52 | Context context) throws IOException, InterruptedException { |
| | 53 | Text val = new Text(); |
| | 54 | // 取回 val 的資料 |
| | 55 | for (Text str : values) { |
| | 56 | val.set(str.toString()); |
| | 57 | } |
| | 58 | // 將取回的資料引入輸出 |
| | 59 | context.write(key, val); |
| | 60 | } |
| | 61 | } |
| | 62 | |
| | 63 | public static void main(String[] args) throws IOException, |
| | 64 | InterruptedException, ClassNotFoundException { |
| | 65 | // 引入 $HADOOP_HOME/conf 內控制檔內的資料 |
| | 66 | Configuration conf = new Configuration(); |
| | 67 | // 宣告job 取得conf 並設定名稱 Hadoop Hello World |
| | 68 | Job job = new Job(conf, "Hadoop Hello World"); |
| | 69 | // 設定此運算的主程式 |
| | 70 | job.setJarByClass(HelloHadoop.class); |
| | 71 | // 設定輸入路徑 |
| | 72 | FileInputFormat.setInputPaths(job, "input"); |
| | 73 | // 設定輸出路徑 |
| | 74 | FileOutputFormat.setOutputPath(job, new Path("output-hh1")); |
| | 75 | // 指定定map class |
| | 76 | job.setMapperClass(HelloMapper.class); |
| | 77 | // 指定reduce class |
| | 78 | job.setReducerClass(HelloReducer.class); |
| | 79 | // 開使運算 |
| | 80 | job.waitForCompletion(true); |
| | 81 | |
| | 82 | } |
| | 83 | } |
| | 84 | }}} |