| 8 | | public static class wordindexM extends |
| 9 | | Mapper<LongWritable, Text, Text, Text> { |
| 10 | | public void map(LongWritable key, Text value, |
| 11 | | OutputCollector<Text, Text> output, Reporter reporter) |
| 12 | | throws IOException { |
| | 8 | import java.io.IOException; |
| | 9 | import java.util.StringTokenizer; |
| 14 | | FileSplit fileSplit = (FileSplit) reporter.getInputSplit(); |
| 15 | | |
| 16 | | String line = value.toString(); |
| 17 | | StringTokenizer st = new StringTokenizer(line.toLowerCase()); |
| 18 | | while (st.hasMoreTokens()) { |
| 19 | | String word = st.nextToken(); |
| 20 | | output.collect(new Text(word), new Text(fileSplit.getPath() |
| 21 | | .getName() |
| 22 | | + ":" + line)); |
| 23 | | } |
| 24 | | } |
| 25 | | } |
| 26 | | }}} |
| | 11 | import org.apache.hadoop.conf.Configuration; |
| | 12 | import org.apache.hadoop.fs.Path; |
| | 13 | import org.apache.hadoop.io.LongWritable; |
| | 14 | import org.apache.hadoop.io.Text; |
| | 15 | import org.apache.hadoop.mapred.OutputCollector; |
| | 16 | import org.apache.hadoop.mapred.Reporter; |
| | 17 | import org.apache.hadoop.mapreduce.Job; |
| | 18 | import org.apache.hadoop.mapreduce.Mapper; |
| | 19 | import org.apache.hadoop.mapreduce.Reducer; |
| | 20 | import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; |
| | 21 | import org.apache.hadoop.mapreduce.lib.input.FileSplit; |
| | 22 | import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; |
| | 23 | import org.apache.hadoop.util.GenericOptionsParser; |
| 28 | | 遇到問題: |
| 29 | | {{{ |
| 30 | | #!text |
| 31 | | 10/01/18 20:52:39 INFO input.FileInputFormat: Total input paths to process : 2 |
| 32 | | 10/01/18 20:52:39 INFO mapred.JobClient: Running job: job_201001181452_0038 |
| 33 | | 10/01/18 20:52:40 INFO mapred.JobClient: map 0% reduce 0% |
| 34 | | 10/01/18 20:52:50 INFO mapred.JobClient: Task Id : attempt_201001181452_0038_m_000000_0, Status : FAILED |
| 35 | | java.io.IOException: Type mismatch in key from map: expected org.apache.hadoop.io.Text, recieved org.apache.hadoop.io.LongWritable |
| 36 | | |
| 37 | | }}} |
| 38 | | |
| 39 | | * 已解決 |
| 40 | | |
| 41 | | {{{ |
| 42 | | #!java |
| 43 | | public static class wordindexM extends |
| | 25 | public class WordIndex { |
| | 26 | public static class wordindexM extends |
| 61 | | } |
| | 44 | } |
| | 45 | |
| | 46 | |
| | 47 | static public class wordindexR extends Reducer<Text, Text, Text, Text> { |
| | 48 | |
| | 49 | public void reduce(Text key, Iterable<Text> values, |
| | 50 | OutputCollector<Text, Text> output, Reporter reporter) |
| | 51 | throws IOException { |
| | 52 | String v = ""; |
| | 53 | StringBuilder ret = new StringBuilder("\n"); |
| | 54 | for (Text val : values) { |
| | 55 | v += val.toString().trim(); |
| | 56 | if (v.length() > 0) |
| | 57 | ret.append(v + "\n"); |
| | 58 | } |
| | 59 | |
| | 60 | output.collect((Text) key, new Text(ret.toString())); |
| | 61 | } |
| | 62 | } |
| | 63 | |
| | 64 | public static void main(String[] args) throws IOException, |
| | 65 | InterruptedException, ClassNotFoundException { |
| | 66 | // debug using |
| | 67 | String[] argv = { "/user/waue/input", "/user/waue/output-wi" }; |
| | 68 | args = argv; |
| | 69 | |
| | 70 | Configuration conf = new Configuration(); |
| | 71 | String[] otherArgs = new GenericOptionsParser(conf, args) |
| | 72 | .getRemainingArgs(); |
| | 73 | if (otherArgs.length < 2) { |
| | 74 | System.out.println("hadoop jar WordIndex.jar <inDir> <outDir>"); |
| | 75 | return; |
| | 76 | } |
| | 77 | Job job = new Job(conf, "word index"); |
| | 78 | job.setJobName("word inverted index"); |
| | 79 | job.setJarByClass(WordIndex.class); |
| | 80 | |
| | 81 | job.setMapOutputKeyClass(Text.class); |
| | 82 | job.setMapOutputValueClass(Text.class); |
| | 83 | job.setOutputKeyClass(Text.class); |
| | 84 | job.setOutputValueClass(Text.class); |
| | 85 | |
| | 86 | job.setMapperClass(wordindexM.class); |
| | 87 | job.setReducerClass(wordindexR.class); |
| | 88 | job.setCombinerClass(wordindexR.class); |
| | 89 | |
| | 90 | FileInputFormat.setInputPaths(job, args[0]); |
| | 91 | FileOutputFormat.setOutputPath(job, new Path(args[1])); |
| | 92 | |
| | 93 | long start = System.nanoTime(); |
| | 94 | |
| | 95 | job.waitForCompletion(true); |
| | 96 | |
| | 97 | long time = System.nanoTime() - start; |
| | 98 | System.err.println(time * (1E-9) + " secs."); |
| | 99 | } |
| | 100 | } |
| | 101 | |