| Version 1 (modified by waue, 16 years ago) (diff) |
|---|
以下在run time時有錯
原因:Type mismatch in key from map: expected org.apache.hadoop.io.LongWritable?, recieved org.apache.hadoop.io.Text
Map的key從LongWritable 強制轉型到 String,似乎會遇到一些錯
- keyvalue.java
package nchc.keyvalue; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapred.FileInputFormat; import org.apache.hadoop.mapred.FileOutputFormat; import org.apache.hadoop.mapred.JobClient; import org.apache.hadoop.mapred.JobConf; public class keyvalue{ public static void main(String[] args) { String[] argv = {"input","oh9","1","1"}; args = argv; if (args.length < 4) { System.out.println("keyvalue <inDir> <outDir> <m> <r>"); return; } JobConf conf = new JobConf(keyvalue.class); conf.setJobName("keyValue"); FileInputFormat.setInputPaths(conf, args[0]); FileOutputFormat.setOutputPath(conf, new Path(args[1])); conf.setNumMapTasks(Integer.parseInt(args[2])); conf.setNumReduceTasks(Integer.parseInt(args[3])); conf.setMapperClass(kvM.class); conf.setReducerClass(kvR.class); long start = System.nanoTime(); try { JobClient.runJob(conf); } catch (Exception e) { e.printStackTrace(); } long period = System.nanoTime() - start; System.err.println(period*(1e-9) + " secs."); } }
- kvm.java
package nchc.keyvalue;
import java.io.IOException;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.MapReduceBase;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reporter;
public class kvM extends MapReduceBase implements
Mapper<LongWritable, Text, Text, Text> {
public void map(LongWritable key, Text value,
OutputCollector<Text, Text> output, Reporter report)
throws IOException {
Text keyv = new Text(key.toString());
output.collect(keyv, value);
}
}
- kvr.java
package nchc.keyvalue;
import java.io.IOException;
import java.util.Iterator;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.MapReduceBase;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.Reporter;
public class kvR extends MapReduceBase implements
Reducer< Text, Text, Text, Text> {
public void reduce(Text key, Iterator<Text> values,
OutputCollector<Text, Text> output, Reporter report)
throws IOException {
while (values.hasNext()) {
Text keyv = new Text("< "+key+" , ");
Text val = new Text(values.next()+">");
output.collect(keyv, val);
}
}
}
