| | 1 | |
| | 2 | * map reduce 內用 setup 初始值實例 |
| | 3 | |
| | 4 | {{{ |
| | 5 | #!java |
| | 6 | package ClassCloud; |
| | 7 | import java.io.IOException; |
| | 8 | |
| | 9 | import org.apache.hadoop.conf.Configuration; |
| | 10 | import org.apache.hadoop.fs.Path; |
| | 11 | import org.apache.hadoop.io.LongWritable; |
| | 12 | import org.apache.hadoop.io.Text; |
| | 13 | import org.apache.hadoop.mapreduce.Job; |
| | 14 | import org.apache.hadoop.mapreduce.Mapper; |
| | 15 | import org.apache.hadoop.mapreduce.Reducer; |
| | 16 | import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; |
| | 17 | import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; |
| | 18 | |
| | 19 | public class HelloHadoop { |
| | 20 | |
| | 21 | static public class HelloMapper extends |
| | 22 | Mapper<LongWritable, Text, LongWritable, Text> { |
| | 23 | |
| | 24 | String str = ""; |
| | 25 | |
| | 26 | public void setup(Context context) { |
| | 27 | Configuration conf = context.getConfiguration(); |
| | 28 | this.str = conf.get("gogogo"); |
| | 29 | System.err.println("mapper:" + this.str); |
| | 30 | } |
| | 31 | public void map(LongWritable key, Text value, Context context) |
| | 32 | throws IOException, InterruptedException { |
| | 33 | // 將出入資料 原封不動的寫入 輸出 |
| | 34 | Text val = new Text(); |
| | 35 | val.set(this.str); |
| | 36 | context.write((LongWritable) key, val); |
| | 37 | } |
| | 38 | |
| | 39 | } |
| | 40 | |
| | 41 | static public class HelloReducer extends |
| | 42 | Reducer<LongWritable, Text, LongWritable, Text> { |
| | 43 | |
| | 44 | String str = ""; |
| | 45 | |
| | 46 | public void setup(Context context) { |
| | 47 | Configuration conf = context.getConfiguration(); |
| | 48 | this.str = conf.get("gogogo"); |
| | 49 | System.err.println("reduce:" + this.str); |
| | 50 | } |
| | 51 | |
| | 52 | public void reduce(LongWritable key, Iterable<Text> values, |
| | 53 | Context context) throws IOException, InterruptedException { |
| | 54 | Text val = new Text(); |
| | 55 | // 取回 val 的資料 |
| | 56 | for (Text str : values) { |
| | 57 | val.set(str); |
| | 58 | } |
| | 59 | // 將取回的資料引入輸出 |
| | 60 | // val.set(values.iterator().toString()); |
| | 61 | context.write(key, val); |
| | 62 | } |
| | 63 | } |
| | 64 | |
| | 65 | public static void main(String[] args) throws IOException, |
| | 66 | InterruptedException, ClassNotFoundException { |
| | 67 | // 引入 $HADOOP_HOME/conf 內控制檔內的資料 |
| | 68 | Configuration conf = new Configuration(); |
| | 69 | conf.set("gogogo", "haha"); |
| | 70 | // 宣告job 取得conf 並設定名稱 Hadoop Hello World |
| | 71 | Job job = new Job(conf, "Hadoop Hello World"); |
| | 72 | // 設定此運算的主程式 |
| | 73 | job.setJarByClass(HelloHadoop.class); |
| | 74 | // 設定輸入路徑 |
| | 75 | FileInputFormat.setInputPaths(job, "text_input"); |
| | 76 | // 設定輸出路徑 |
| | 77 | FileOutputFormat.setOutputPath(job, new Path("output-hh4")); |
| | 78 | // 指定定map class |
| | 79 | job.setMapperClass(HelloMapper.class); |
| | 80 | // 指定reduce class |
| | 81 | job.setReducerClass(HelloReducer.class); |
| | 82 | // 開使運算 |
| | 83 | job.waitForCompletion(true); |
| | 84 | |
| | 85 | } |
| | 86 | } |
| | 87 | }}} |