| 1 | /** |
|---|
| 2 | * Program: HBaseRecord.java |
|---|
| 3 | * Editor: Waue Chen |
|---|
| 4 | * From : NCHC. Taiwn |
|---|
| 5 | * Last Update Date: 06/01/2008 |
|---|
| 6 | */ |
|---|
| 7 | |
|---|
| 8 | /** |
|---|
| 9 | * Purpose : |
|---|
| 10 | * Parse your record and then store in HBase. |
|---|
| 11 | * |
|---|
| 12 | * HowToUse : |
|---|
| 13 | * Make sure Hadoop file system and Hbase are running correctly. |
|---|
| 14 | * 1. put test.txt in t1 directory which content is |
|---|
| 15 | --------------- |
|---|
| 16 | name:locate:years |
|---|
| 17 | waue:taiwan:1981 |
|---|
| 18 | shellon:taiwan:1981 |
|---|
| 19 | --------------- |
|---|
| 20 | * 2. hadoop_root/$ bin/hadoop dfs -put t1 t1 |
|---|
| 21 | ---------------- |
|---|
| 22 | * Check Result: |
|---|
| 23 | * Go to hbase console, type : |
|---|
| 24 | * hql > select * from t1_table; |
|---|
| 25 | 08/06/06 12:20:48 INFO hbase.HTable: Creating scanner over t1_table starting at key |
|---|
| 26 | +-------------------------+-------------------------+-------------------------+ |
|---|
| 27 | | Row | Column | Cell | |
|---|
| 28 | +-------------------------+-------------------------+-------------------------+ |
|---|
| 29 | | 0 | person:locate | locate | |
|---|
| 30 | +-------------------------+-------------------------+-------------------------+ |
|---|
| 31 | | 0 | person:name | name | |
|---|
| 32 | +-------------------------+-------------------------+-------------------------+ |
|---|
| 33 | | 0 | person:years | years | |
|---|
| 34 | +-------------------------+-------------------------+-------------------------+ |
|---|
| 35 | | 19 | person:locate | taiwan | |
|---|
| 36 | +-------------------------+-------------------------+-------------------------+ |
|---|
| 37 | | 19 | person:name | waue | |
|---|
| 38 | +-------------------------+-------------------------+-------------------------+ |
|---|
| 39 | | 19 | person:years | 1981 | |
|---|
| 40 | +-------------------------+-------------------------+-------------------------+ |
|---|
| 41 | | 36 | person:locate | taiwan | |
|---|
| 42 | +-------------------------+-------------------------+-------------------------+ |
|---|
| 43 | | 36 | person:name | shellon | |
|---|
| 44 | +-------------------------+-------------------------+-------------------------+ |
|---|
| 45 | | 36 | person:years | 1981 | |
|---|
| 46 | +-------------------------+-------------------------+-------------------------+ |
|---|
| 47 | 3 row(s) in set. (0.04 sec) |
|---|
| 48 | */ |
|---|
| 49 | |
|---|
| 50 | |
|---|
| 51 | |
|---|
| 52 | |
|---|
| 53 | package tw.org.nchc.code; |
|---|
| 54 | |
|---|
| 55 | import java.io.FileInputStream; |
|---|
| 56 | import java.io.IOException; |
|---|
| 57 | import java.util.Iterator; |
|---|
| 58 | |
|---|
| 59 | import org.apache.hadoop.fs.Path; |
|---|
| 60 | import org.apache.hadoop.hbase.io.ImmutableBytesWritable; |
|---|
| 61 | import org.apache.hadoop.hbase.mapred.TableReduce; |
|---|
| 62 | import org.apache.hadoop.io.LongWritable; |
|---|
| 63 | import org.apache.hadoop.io.MapWritable; |
|---|
| 64 | import org.apache.hadoop.io.Text; |
|---|
| 65 | import org.apache.hadoop.mapred.JobClient; |
|---|
| 66 | import org.apache.hadoop.mapred.JobConf; |
|---|
| 67 | import org.apache.hadoop.mapred.OutputCollector; |
|---|
| 68 | import org.apache.hadoop.mapred.Reporter; |
|---|
| 69 | import org.apache.hadoop.mapred.lib.IdentityMapper; |
|---|
| 70 | import org.apache.hadoop.mapred.lib.IdentityReducer; |
|---|
| 71 | |
|---|
| 72 | |
|---|
| 73 | public class HBaseRecord2 { |
|---|
| 74 | |
|---|
| 75 | /* Denify parameter */ |
|---|
| 76 | static String[] bf = {"person:name","person:local","person:birthyear"}; |
|---|
| 77 | // file path in hadoop file system (not phisical file system) |
|---|
| 78 | String file_path = "/user/waue/t1/test.txt"; |
|---|
| 79 | // Hbase table name |
|---|
| 80 | String table_name = "testtable"; |
|---|
| 81 | |
|---|
| 82 | |
|---|
| 83 | // setup MapTask and Reduce Task |
|---|
| 84 | int mapTasks = 1; |
|---|
| 85 | int reduceTasks = 1; |
|---|
| 86 | |
|---|
| 87 | private static class ReduceClass extends TableReduce<LongWritable, Text> { |
|---|
| 88 | |
|---|
| 89 | |
|---|
| 90 | |
|---|
| 91 | // on this sample, map is nonuse, we use reduce to handle |
|---|
| 92 | public void reduce(LongWritable key, Iterator<Text> values, |
|---|
| 93 | OutputCollector<Text, MapWritable> output, Reporter reporter) |
|---|
| 94 | throws IOException { |
|---|
| 95 | // this map holds the columns per row |
|---|
| 96 | MapWritable map = new MapWritable(); |
|---|
| 97 | // values.next().getByte() can get value and transfer to byte form, |
|---|
| 98 | String stro = new String(values.next().getBytes()); |
|---|
| 99 | String str[] = stro.split(":"); |
|---|
| 100 | |
|---|
| 101 | int length = bf.length; |
|---|
| 102 | |
|---|
| 103 | // Column id is created dymanically, |
|---|
| 104 | Text[] col_n = new Text[length]; |
|---|
| 105 | byte[][] b_l = new byte[length][]; |
|---|
| 106 | // contents must be ImmutableBytesWritable |
|---|
| 107 | ImmutableBytesWritable[] w_l = new ImmutableBytesWritable[length]; |
|---|
| 108 | map.clear(); |
|---|
| 109 | for(int i = 0; i < length; i++){ |
|---|
| 110 | col_n[i] = new Text(bf[i]); |
|---|
| 111 | b_l[i] = str[i].getBytes(); |
|---|
| 112 | w_l[i] = new ImmutableBytesWritable(b_l[i]); |
|---|
| 113 | // populate the current row |
|---|
| 114 | map.put(col_n[i], w_l[i]); |
|---|
| 115 | } |
|---|
| 116 | // add the row with the key as the row id |
|---|
| 117 | output.collect(new Text(key.toString()), map); |
|---|
| 118 | } |
|---|
| 119 | } |
|---|
| 120 | |
|---|
| 121 | private HBaseRecord2() { |
|---|
| 122 | } |
|---|
| 123 | |
|---|
| 124 | /** |
|---|
| 125 | * Runs the demo. |
|---|
| 126 | */ |
|---|
| 127 | public static void main(String[] args) throws IOException { |
|---|
| 128 | |
|---|
| 129 | |
|---|
| 130 | HBaseRecord2 setup = new HBaseRecord2(); |
|---|
| 131 | String[] tmp = bf[0].split(":"); |
|---|
| 132 | String[] CF = {tmp[0]}; |
|---|
| 133 | BuildHTable build_table = new BuildHTable(setup.table_name, CF); |
|---|
| 134 | if (!build_table.checkTableExist(setup.table_name)) { |
|---|
| 135 | if (!build_table.createTable()) { |
|---|
| 136 | System.out.println("create table error !"); |
|---|
| 137 | } |
|---|
| 138 | } else { |
|---|
| 139 | System.out.println("Table \"" + setup.table_name |
|---|
| 140 | + "\" has already existed !"); |
|---|
| 141 | } |
|---|
| 142 | FileInputStream fi = new FileInputStream(setup.file_path); |
|---|
| 143 | |
|---|
| 144 | |
|---|
| 145 | JobConf conf = new JobConf(HBaseRecord2.class); |
|---|
| 146 | |
|---|
| 147 | //Job name; you can modify to any you like |
|---|
| 148 | conf.setJobName("PersonDataBase"); |
|---|
| 149 | |
|---|
| 150 | // Hbase table name must be correct , in our profile is t1_table |
|---|
| 151 | TableReduce.initJob(setup.table_name, ReduceClass.class, conf); |
|---|
| 152 | |
|---|
| 153 | // below are map-reduce profile |
|---|
| 154 | conf.setNumMapTasks(setup.mapTasks); |
|---|
| 155 | conf.setNumReduceTasks(setup.reduceTasks); |
|---|
| 156 | conf.setInputPath(new Path(setup.file_path)); |
|---|
| 157 | conf.setMapperClass(IdentityMapper.class); |
|---|
| 158 | conf.setCombinerClass(IdentityReducer.class); |
|---|
| 159 | conf.setReducerClass(ReduceClass.class); |
|---|
| 160 | JobClient.runJob(conf); |
|---|
| 161 | } |
|---|
| 162 | } |
|---|