| | 1 | {{{ |
| | 2 | #!html |
| | 3 | <div style="text-align: center; color: blue"><big |
| | 4 | style="font-weight: bold;"><big><big>Example</big></big></big></div> |
| | 5 | }}} |
| | 6 | |
| | 7 | [[PageOutline]] |
| | 8 | |
| | 9 | = 個別編譯程式 = |
| | 10 | |
| | 11 | == 1 mapper.java == |
| | 12 | |
| | 13 | 1. new |
| | 14 | |
| | 15 | || File -> || new -> || mapper || |
| | 16 | [[Image(wiki:waue/2009/0617:file-new-mapper.png)]] |
| | 17 | |
| | 18 | ----------- |
| | 19 | |
| | 20 | 2. create |
| | 21 | |
| | 22 | [[Image(wiki:waue/2009/0617:3-1.png)]] |
| | 23 | {{{ |
| | 24 | #!sh |
| | 25 | source folder-> 輸入: icas/src |
| | 26 | Package : Sample |
| | 27 | Name -> : mapper |
| | 28 | }}} |
| | 29 | ---------- |
| | 30 | |
| | 31 | 3. modify |
| | 32 | |
| | 33 | {{{ |
| | 34 | #!java |
| | 35 | package Sample; |
| | 36 | |
| | 37 | import java.io.IOException; |
| | 38 | import java.util.StringTokenizer; |
| | 39 | |
| | 40 | import org.apache.hadoop.io.IntWritable; |
| | 41 | import org.apache.hadoop.io.LongWritable; |
| | 42 | import org.apache.hadoop.io.Text; |
| | 43 | import org.apache.hadoop.mapred.MapReduceBase; |
| | 44 | import org.apache.hadoop.mapred.Mapper; |
| | 45 | import org.apache.hadoop.mapred.OutputCollector; |
| | 46 | import org.apache.hadoop.mapred.Reporter; |
| | 47 | |
| | 48 | public class mapper extends MapReduceBase implements Mapper<LongWritable, Text, Text, IntWritable> { |
| | 49 | private final static IntWritable one = new IntWritable(1); |
| | 50 | private Text word = new Text(); |
| | 51 | |
| | 52 | public void map(LongWritable key, Text value, OutputCollector<Text, IntWritable> output, Reporter reporter) throws IOException { |
| | 53 | String line = value.toString(); |
| | 54 | StringTokenizer tokenizer = new StringTokenizer(line); |
| | 55 | while (tokenizer.hasMoreTokens()) { |
| | 56 | word.set(tokenizer.nextToken()); |
| | 57 | output.collect(word, one); |
| | 58 | } |
| | 59 | } |
| | 60 | } |
| | 61 | |
| | 62 | }}} |
| | 63 | |
| | 64 | 建立mapper.java後,貼入程式碼 |
| | 65 | [[Image(wiki:waue/2009/0617:3-2.png)]] |
| | 66 | |
| | 67 | ------------ |
| | 68 | |
| | 69 | == 2 reducer.java == |
| | 70 | |
| | 71 | 1. new |
| | 72 | |
| | 73 | * File -> new -> reducer |
| | 74 | [[Image(wiki:waue/2009/0617:file-new-reducer.png)]] |
| | 75 | |
| | 76 | ------- |
| | 77 | 2. create |
| | 78 | [[Image(wiki:waue/2009/0617:3-3.png)]] |
| | 79 | |
| | 80 | {{{ |
| | 81 | #!sh |
| | 82 | source folder-> 輸入: icas/src |
| | 83 | Package : Sample |
| | 84 | Name -> : reducer |
| | 85 | }}} |
| | 86 | |
| | 87 | ----------- |
| | 88 | |
| | 89 | 3. modify |
| | 90 | |
| | 91 | {{{ |
| | 92 | #!java |
| | 93 | package Sample; |
| | 94 | |
| | 95 | import java.io.IOException; |
| | 96 | import java.util.Iterator; |
| | 97 | |
| | 98 | import org.apache.hadoop.io.IntWritable; |
| | 99 | import org.apache.hadoop.io.Text; |
| | 100 | import org.apache.hadoop.mapred.MapReduceBase; |
| | 101 | import org.apache.hadoop.mapred.OutputCollector; |
| | 102 | import org.apache.hadoop.mapred.Reducer; |
| | 103 | import org.apache.hadoop.mapred.Reporter; |
| | 104 | |
| | 105 | public class reducer extends MapReduceBase implements Reducer<Text, IntWritable, Text, IntWritable> { |
| | 106 | public void reduce(Text key, Iterator<IntWritable> values, OutputCollector<Text, IntWritable> output, Reporter reporter) throws IOException { |
| | 107 | int sum = 0; |
| | 108 | while (values.hasNext()) { |
| | 109 | sum += values.next().get(); |
| | 110 | } |
| | 111 | output.collect(key, new IntWritable(sum)); |
| | 112 | } |
| | 113 | } |
| | 114 | }}} |
| | 115 | |
| | 116 | * File -> new -> Map/Reduce Driver |
| | 117 | [[Image(wiki:waue/2009/0617:file-new-mr-driver.png)]] |
| | 118 | ---------- |
| | 119 | |
| | 120 | == 3 WordCount.java (main function) == |
| | 121 | |
| | 122 | 1. new |
| | 123 | |
| | 124 | 建立WordCount.java,此檔用來驅動mapper 與 reducer,因此選擇 Map/Reduce Driver |
| | 125 | |
| | 126 | |
| | 127 | [[Image(wiki:waue/2009/0617:3-4.png)]] |
| | 128 | ------------ |
| | 129 | |
| | 130 | 2. create |
| | 131 | |
| | 132 | {{{ |
| | 133 | #!sh |
| | 134 | source folder-> 輸入: icas/src |
| | 135 | Package : Sample |
| | 136 | Name -> : WordCount.java |
| | 137 | }}} |
| | 138 | |
| | 139 | ------- |
| | 140 | 3. modify |
| | 141 | |
| | 142 | {{{ |
| | 143 | #!java |
| | 144 | package Sample; |
| | 145 | import org.apache.hadoop.fs.Path; |
| | 146 | import org.apache.hadoop.io.IntWritable; |
| | 147 | import org.apache.hadoop.io.Text; |
| | 148 | import org.apache.hadoop.mapred.FileInputFormat; |
| | 149 | import org.apache.hadoop.mapred.FileOutputFormat; |
| | 150 | import org.apache.hadoop.mapred.JobClient; |
| | 151 | import org.apache.hadoop.mapred.JobConf; |
| | 152 | import org.apache.hadoop.mapred.TextInputFormat; |
| | 153 | import org.apache.hadoop.mapred.TextOutputFormat; |
| | 154 | |
| | 155 | public class WordCount { |
| | 156 | |
| | 157 | public static void main(String[] args) throws Exception { |
| | 158 | JobConf conf = new JobConf(WordCount.class); |
| | 159 | conf.setJobName("wordcount"); |
| | 160 | |
| | 161 | conf.setOutputKeyClass(Text.class); |
| | 162 | conf.setOutputValueClass(IntWritable.class); |
| | 163 | |
| | 164 | conf.setMapperClass(mapper.class); |
| | 165 | conf.setCombinerClass(reducer.class); |
| | 166 | conf.setReducerClass(reducer.class); |
| | 167 | |
| | 168 | conf.setInputFormat(TextInputFormat.class); |
| | 169 | conf.setOutputFormat(TextOutputFormat.class); |
| | 170 | |
| | 171 | FileInputFormat.setInputPaths(conf, new Path("/user/hadooper/input")); |
| | 172 | FileOutputFormat.setOutputPath(conf, new Path("lab5_out2")); |
| | 173 | |
| | 174 | JobClient.runJob(conf); |
| | 175 | } |
| | 176 | } |
| | 177 | }}} |
| | 178 | |
| | 179 | 三個檔完成後並存檔後,整個程式建立完成 |
| | 180 | [[Image(wiki:waue/2009/0617:3-5.png)]] |
| | 181 | |
| | 182 | ------- |
| | 183 | |
| | 184 | * 三個檔都存檔後,可以看到icas專案下的src,bin都有檔案產生,我們用指令來check |
| | 185 | |
| | 186 | {{{ |
| | 187 | $ cd workspace/icas |
| | 188 | $ ls src/Sample/ |
| | 189 | mapper.java reducer.java WordCount.java |
| | 190 | $ ls bin/Sample/ |
| | 191 | mapper.class reducer.class WordCount.class |
| | 192 | }}} |
| | 193 | |
| | 194 | |
| | 195 | = eclipse 可以產生出jar檔 = |
| | 196 | |
| | 197 | File -> Export -> java -> JAR file [[br]] |
| | 198 | -> next -> |
| | 199 | -------- |
| | 200 | 選擇要匯出的專案 -> |
| | 201 | jarfile: /home/hadooper/mytest.jar -> [[br]] |
| | 202 | next -> |
| | 203 | -------- |
| | 204 | next -> |
| | 205 | -------- |
| | 206 | main class: 選擇有Main的class -> [[br]] |
| | 207 | Finish |
| | 208 | -------- |
| | 209 | |
| | 210 | * 以上的步驟就可以在/home/hadooper/ 產生出你的 mytest.jar |
| | 211 | |
| | 212 | |
| | 213 | = 用Makefile 來更快速編譯 = |
| | 214 | * 程式常常修改,每次都做這些動作也很累很煩,讓我們來體驗一下'''用指令比用圖形介面操作還方便'''吧 |
| | 215 | |
| | 216 | ==1 產生Makefile 檔 == |
| | 217 | |
| | 218 | {{{ |
| | 219 | $ cd /home/hadooper/workspace/icas/ |
| | 220 | $ gedit Makefile |
| | 221 | }}} |
| | 222 | |
| | 223 | * 輸入以下Makefile的內容 (注意 ":" 後面要接 "tab" 而不是 "空白") |
| | 224 | {{{ |
| | 225 | JarFile="sample-0.1.jar" |
| | 226 | MainFunc="Sample.WordCount" |
| | 227 | LocalOutDir="/tmp/output" |
| | 228 | HADOOP_BIN="/opt/hadoop/bin" |
| | 229 | |
| | 230 | all:jar run output clean |
| | 231 | |
| | 232 | jar: |
| | 233 | jar -cvf ${JarFile} -C bin/ . |
| | 234 | |
| | 235 | run: |
| | 236 | ${HADOOP_BIN}/hadoop jar ${JarFile} ${MainFunc} input output |
| | 237 | |
| | 238 | clean: |
| | 239 | ${HADOOP_BIN}/hadoop fs -rmr output |
| | 240 | |
| | 241 | output: |
| | 242 | rm -rf ${LocalOutDir} |
| | 243 | ${HADOOP_BIN}/hadoop fs -get output ${LocalOutDir} |
| | 244 | gedit ${LocalOutDir}/part-r-00000 & |
| | 245 | |
| | 246 | help: |
| | 247 | @echo "Usage:" |
| | 248 | @echo " make jar - Build Jar File." |
| | 249 | @echo " make clean - Clean up Output directory on HDFS." |
| | 250 | @echo " make run - Run your MapReduce code on Hadoop." |
| | 251 | @echo " make output - Download and show output file" |
| | 252 | @echo " make help - Show Makefile options." |
| | 253 | @echo " " |
| | 254 | @echo "Example:" |
| | 255 | @echo " make jar; make run; make output; make clean" |
| | 256 | }}} |
| | 257 | |
| | 258 | * 或是直接下載 [http://trac.nchc.org.tw/cloud/raw-attachment/wiki/Hadoop_Lab5/Makefile Makefile] 吧 |
| | 259 | {{{ |
| | 260 | $ cd /home/hadooper/workspace/icas/ |
| | 261 | $ wget http://trac.nchc.org.tw/cloud/raw-attachment/wiki/Hadoop_Lab5/Makefile |
| | 262 | }}} |
| | 263 | |
| | 264 | == 4.2.2 執行 == |
| | 265 | |
| | 266 | * 執行Makefile,可以到該目錄下,執行make [參數],若不知道參數為何,可以打make 或 make help |
| | 267 | * make 的用法說明 |
| | 268 | |
| | 269 | {{{ |
| | 270 | $ cd /home/hadooper/workspace/icas/ |
| | 271 | $ make |
| | 272 | Usage: |
| | 273 | make jar - Build Jar File. |
| | 274 | make clean - Clean up Output directory on HDFS. |
| | 275 | make run - Run your MapReduce code on Hadoop. |
| | 276 | make output - Download and show output file |
| | 277 | make help - Show Makefile options. |
| | 278 | |
| | 279 | Example: |
| | 280 | make jar; make run; make output; make clean |
| | 281 | }}} |
| | 282 | |
| | 283 | * 下面提供各種make 的參數 |
| | 284 | |
| | 285 | == make jar == |
| | 286 | * 1. 編譯產生jar檔 |
| | 287 | |
| | 288 | {{{ |
| | 289 | $ make jar |
| | 290 | }}} |
| | 291 | |
| | 292 | == make run == |
| | 293 | * 2. 跑我們的wordcount 於hadoop上 |
| | 294 | |
| | 295 | {{{ |
| | 296 | $ make run |
| | 297 | }}} |
| | 298 | |
| | 299 | * make run基本上能正確無誤的運作到結束,因此代表我們在eclipse編譯的程式可以順利在hadoop0.18.3的平台上運行。 |
| | 300 | |
| | 301 | * 而回到eclipse視窗,我們可以看到下方視窗run完的job會呈現出來;左方視窗也多出output資料夾,part-r-00000就是我們的結果檔 |
| | 302 | |
| | 303 | [[Image(wiki:waue/2009/0617:4-1.png)]] |
| | 304 | ------ |
| | 305 | * 因為有設定完整的javadoc, 因此可以得到詳細的解說與輔助 |
| | 306 | [[Image(wiki:waue/2009/0617:4-2.png)]] |
| | 307 | |
| | 308 | == make output == |
| | 309 | * 3. 這個指令是幫助使用者將結果檔從hdfs下載到local端,並且用gedit來開啟你的結果檔 |
| | 310 | |
| | 311 | {{{ |
| | 312 | $ make output |
| | 313 | }}} |
| | 314 | |
| | 315 | == make clean == |
| | 316 | * 4. 這個指令用來把hdfs上的output資料夾清除。如果你還想要在跑一次make run,請先執行make clean,否則hadoop會告訴你,output資料夾已經存在,而拒絕工作喔! |
| | 317 | |
| | 318 | {{{ |
| | 319 | $ make clean |
| | 320 | }}} |
| | 321 | |
| | 322 | |
| | 323 | = 練習:匯入專案 = |
| | 324 | * 將 [http://trac.nchc.org.tw/cloud/raw-attachment/wiki/Hadoop_Lab5/hadoop_sample_codes.zip nchc-sample] 給匯入到eclipse 內開發吧! |