| | 1 | {{{ |
| | 2 | #!html |
| | 3 | <div style="text-align: center; color:#151B8D"><big style="font-weight: bold;"><big><big> |
| | 4 | Hadoop 與 RDBMS 的支援 |
| | 5 | </big></big></big></div> <div style="text-align: center; color:#7E2217"><big style="font-weight: bold;"><big> |
| | 6 | Hadoop 0.20 + MySQL 5 |
| | 7 | </big></big></div> |
| | 8 | }}} |
| | 9 | [[PageOutline]] |
| | 10 | |
| | 11 | * 需先安裝 hadoop 0.20 , apache2 , MySQL 5 server & client |
| | 12 | |
| | 13 | |
| | 14 | = DBAccess.java = |
| | 15 | |
| | 16 | {{{ |
| | 17 | #!java |
| | 18 | package db; |
| | 19 | import java.io.DataInput; |
| | 20 | import java.io.DataOutput; |
| | 21 | import java.io.IOException; |
| | 22 | import java.sql.PreparedStatement; |
| | 23 | import java.sql.ResultSet; |
| | 24 | import java.sql.SQLException; |
| | 25 | import org.apache.hadoop.fs.FileSystem; |
| | 26 | import org.apache.hadoop.fs.Path; |
| | 27 | import org.apache.hadoop.io.LongWritable; |
| | 28 | import org.apache.hadoop.io.Text; |
| | 29 | import org.apache.hadoop.io.Writable; |
| | 30 | import org.apache.hadoop.mapred.FileOutputFormat; |
| | 31 | import org.apache.hadoop.mapred.JobClient; |
| | 32 | import org.apache.hadoop.mapred.JobConf; |
| | 33 | import org.apache.hadoop.mapred.MapReduceBase; |
| | 34 | import org.apache.hadoop.mapred.Mapper; |
| | 35 | import org.apache.hadoop.mapred.OutputCollector; |
| | 36 | import org.apache.hadoop.mapred.Reporter; |
| | 37 | import org.apache.hadoop.mapred.lib.IdentityReducer; |
| | 38 | import org.apache.hadoop.mapred.lib.db.DBConfiguration; |
| | 39 | import org.apache.hadoop.mapred.lib.db.DBInputFormat; |
| | 40 | import org.apache.hadoop.mapred.lib.db.DBWritable; |
| | 41 | public class DBAccess { |
| | 42 | @SuppressWarnings("deprecation") |
| | 43 | public static void main(String[] args) throws IOException { |
| | 44 | |
| | 45 | try { |
| | 46 | |
| | 47 | JobConf conf = new JobConf(DBAccess.class); |
| | 48 | Class.forName("com.mysql.jdbc.Driver"); |
| | 49 | DBConfiguration.configureDB(conf, "com.mysql.jdbc.Driver", |
| | 50 | "jdbc:mysql://localhost/school", "waue", "wauenchc"); |
| | 51 | conf.setOutputKeyClass(LongWritable.class); |
| | 52 | conf.setOutputValueClass(Text.class); |
| | 53 | conf.setInputFormat(DBInputFormat.class); |
| | 54 | Path dstPath = new Path("dboutput3"); |
| | 55 | FileOutputFormat.setOutputPath(conf, dstPath); |
| | 56 | String[] fields = { "id", "name", "age", "departmentID" }; |
| | 57 | DBInputFormat.setInput(conf, TeacherRecord.class, "teacher", null, |
| | 58 | "id", fields); |
| | 59 | conf.setMapperClass(DBAccessMapper.class); |
| | 60 | conf.setReducerClass(IdentityReducer.class); |
| | 61 | |
| | 62 | FileSystem hdfs = dstPath.getFileSystem(conf); |
| | 63 | if (hdfs.exists(dstPath)) { |
| | 64 | hdfs.delete(dstPath, true); |
| | 65 | } |
| | 66 | JobClient.runJob(conf); |
| | 67 | } |
| | 68 | catch(ClassNotFoundException e) { |
| | 69 | System.err.println("mysql.jdbc.Driver not found"); |
| | 70 | } |
| | 71 | } |
| | 72 | } |
| | 73 | }}} |
| | 74 | |
| | 75 | = DBAccessMapper.java = |
| | 76 | |
| | 77 | {{{ |
| | 78 | #!java |
| | 79 | package db; |
| | 80 | import java.io.IOException; |
| | 81 | import org.apache.hadoop.io.LongWritable; |
| | 82 | import org.apache.hadoop.io.Text; |
| | 83 | import org.apache.hadoop.mapred.MapReduceBase; |
| | 84 | import org.apache.hadoop.mapred.Mapper; |
| | 85 | import org.apache.hadoop.mapred.OutputCollector; |
| | 86 | import org.apache.hadoop.mapred.Reporter; |
| | 87 | public class DBAccessMapper extends MapReduceBase implements |
| | 88 | Mapper<LongWritable, TeacherRecord, LongWritable, Text> { |
| | 89 | public void map(LongWritable key, TeacherRecord value, |
| | 90 | OutputCollector<LongWritable, Text> collector, Reporter reporter) |
| | 91 | throws IOException { |
| | 92 | collector.collect(new LongWritable(value.id), |
| | 93 | new Text(value.toString())); |
| | 94 | } |
| | 95 | } |
| | 96 | }}} |
| | 97 | |
| | 98 | = !TeacherRecord.java = |
| | 99 | |
| | 100 | {{{ |
| | 101 | #!java |
| | 102 | package db; |
| | 103 | import java.io.DataInput; |
| | 104 | import java.io.DataOutput; |
| | 105 | import java.io.IOException; |
| | 106 | import java.sql.PreparedStatement; |
| | 107 | import java.sql.ResultSet; |
| | 108 | import java.sql.SQLException; |
| | 109 | import org.apache.hadoop.io.Text; |
| | 110 | import org.apache.hadoop.io.Writable; |
| | 111 | import org.apache.hadoop.mapred.lib.db.DBWritable; |
| | 112 | public class TeacherRecord implements Writable, DBWritable { |
| | 113 | int id; |
| | 114 | String name; |
| | 115 | int age; |
| | 116 | int departmentID; |
| | 117 | @Override |
| | 118 | public void readFields(DataInput in) throws IOException { |
| | 119 | // TODO Auto-generated method stub |
| | 120 | this.id = in.readInt(); |
| | 121 | this.name = Text.readString(in); |
| | 122 | this.age = in.readInt(); |
| | 123 | this.departmentID = in.readInt(); |
| | 124 | } |
| | 125 | @Override |
| | 126 | public void write(DataOutput out) throws IOException { |
| | 127 | // TODO Auto-generated method stub |
| | 128 | out.writeInt(this.id); |
| | 129 | Text.writeString(out, this.name); |
| | 130 | out.writeInt(this.age); |
| | 131 | out.writeInt(this.departmentID); |
| | 132 | } |
| | 133 | @Override |
| | 134 | public void readFields(ResultSet result) throws SQLException { |
| | 135 | // TODO Auto-generated method stub |
| | 136 | this.id = result.getInt(1); |
| | 137 | this.name = result.getString(2); |
| | 138 | this.age = result.getInt(3); |
| | 139 | this.departmentID = result.getInt(4); |
| | 140 | } |
| | 141 | @Override |
| | 142 | public void write(PreparedStatement stmt) throws SQLException { |
| | 143 | // TODO Auto-generated method stub |
| | 144 | stmt.setInt(1, this.id); |
| | 145 | stmt.setString(2, this.name); |
| | 146 | stmt.setInt(3, this.age); |
| | 147 | stmt.setInt(4, this.departmentID); |
| | 148 | } |
| | 149 | @Override |
| | 150 | public String toString() { |
| | 151 | // TODO Auto-generated method stub |
| | 152 | return new String(this.name + " " + this.age + " " + this.departmentID); |
| | 153 | } |
| | 154 | } |
| | 155 | }}} |
| | 156 | |
| | 157 | |
| | 158 | |
| | 159 | = 執行結果 = |
| | 160 | |
| | 161 | {{{ |
| | 162 | $ /opt/hadoop/bin/hadoop dfs -cat dboutput/part-00000 |
| | 163 | 0 waue 29 920 |
| | 164 | 1 rock 30 1231 |
| | 165 | 1 2 3 4 |
| | 166 | }}} |
| | 167 | |
| | 168 | * 引用 [http://jaguar13.javaeye.com/blog/683392] |