Changeset 18 for sample/HBaseRecord.java
- Timestamp:
- Jul 2, 2008, 3:10:09 PM (17 years ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
sample/HBaseRecord.java
r9 r18 3 3 * Editor: Waue Chen 4 4 * From : NCHC. Taiwn 5 * Last Update Date: 06/01/2008 5 * Last Update Date: 07/02/2008 6 * Upgrade to 0.17 6 7 */ 7 8 … … 13 14 * Make sure Hadoop file system and Hbase are running correctly. 14 15 * 1. put test.txt in t1 directory which content is 15 16 17 18 19 16 --------------- 17 name:locate:years 18 waue:taiwan:1981 19 shellon:taiwan:1981 20 --------------- 20 21 * 2. hadoop_root/$ bin/hadoop dfs -put t1 t1 21 22 * 3. hbase_root/$ bin/hbase shell 22 23 * 4. hql > create table t1_table("person"); 23 24 * 5. Come to Eclipse and run this code, and we will let database as that 24 25 26 27 28 29 25 t1_table -> person 26 ---------------- 27 | name | locate | years | 28 | waue | taiwan | 1981 | 29 | shellon | taiwan | 1981 | 30 ---------------- 30 31 * Check Result: 31 32 * Go to hbase console, type : 32 33 * hql > select * from t1_table; 33 08/06/06 12:20:48 INFO hbase.HTable: Creating scanner over t1_table starting at key34 +-------------------------+-------------------------+-------------------------+35 | Row | Column | Cell |36 +-------------------------+-------------------------+-------------------------+37 | 0 | person:locate | locate |38 +-------------------------+-------------------------+-------------------------+39 | 0 | person:name | name |40 +-------------------------+-------------------------+-------------------------+41 | 0 | person:years | years |42 +-------------------------+-------------------------+-------------------------+43 | 19 | person:locate | taiwan |44 +-------------------------+-------------------------+-------------------------+45 | 19 | person:name | waue |46 +-------------------------+-------------------------+-------------------------+47 | 19 | person:years | 1981 |48 +-------------------------+-------------------------+-------------------------+49 | 36 | person:locate | taiwan |50 +-------------------------+-------------------------+-------------------------+51 | 36 | person:name | shellon |52 +-------------------------+-------------------------+-------------------------+53 | 36 | person:years | 1981 |54 +-------------------------+-------------------------+-------------------------+55 3 row(s) in set. (0.04 sec)34 08/06/06 12:20:48 INFO hbase.HTable: Creating scanner over t1_table starting at key 35 +-------------------------+-------------------------+-------------------------+ 36 | Row | Column | Cell | 37 +-------------------------+-------------------------+-------------------------+ 38 | 0 | person:locate | locate | 39 +-------------------------+-------------------------+-------------------------+ 40 | 0 | person:name | name | 41 +-------------------------+-------------------------+-------------------------+ 42 | 0 | person:years | years | 43 +-------------------------+-------------------------+-------------------------+ 44 | 19 | person:locate | taiwan | 45 +-------------------------+-------------------------+-------------------------+ 46 | 19 | person:name | waue | 47 +-------------------------+-------------------------+-------------------------+ 48 | 19 | person:years | 1981 | 49 +-------------------------+-------------------------+-------------------------+ 50 | 36 | person:locate | taiwan | 51 +-------------------------+-------------------------+-------------------------+ 52 | 36 | person:name | shellon | 53 +-------------------------+-------------------------+-------------------------+ 54 | 36 | person:years | 1981 | 55 +-------------------------+-------------------------+-------------------------+ 56 3 row(s) in set. (0.04 sec) 56 57 */ 57 58 59 60 58 61 59 package tw.org.nchc.code; … … 77 75 import org.apache.hadoop.mapred.lib.IdentityReducer; 78 76 79 80 77 public class HBaseRecord { 81 78 82 79 /* Denify parameter */ 83 80 // one column family: person; three column qualifier: name,locate,years 84 static private String baseId1 ="person:name"; 85 static private String baseId2 ="person:locate"; 86 static private String baseId3 ="person:years"; 87 //split character 81 static private String baseId1 = "person:name"; 82 83 static private String baseId2 = "person:locate"; 84 85 static private String baseId3 = "person:years"; 86 87 // split character 88 88 static private String sp = ":"; 89 89 90 // file path in hadoop file system (not phisical file system) 90 91 String file_path = "/user/waue/t1"; 92 91 93 // Hbase table name 92 94 String table_name = "t1_table"; 95 93 96 // setup MapTask and Reduce Task 94 97 int mapTasks = 1; 98 95 99 int reduceTasks = 1; 96 100 97 101 private static class ReduceClass extends TableReduce<LongWritable, Text> { 98 102 99 // Column id is created dymanically, 103 // Column id is created dymanically, 100 104 private static final Text col_name = new Text(baseId1); 105 101 106 private static final Text col_local = new Text(baseId2); 107 102 108 private static final Text col_year = new Text(baseId3); 103 109 104 110 // this map holds the columns per row 105 private MapWritable map = new MapWritable(); 106 111 private MapWritable map = new MapWritable(); 112 107 113 // on this sample, map is nonuse, we use reduce to handle 108 114 public void reduce(LongWritable key, Iterator<Text> values, … … 110 116 throws IOException { 111 117 112 // values.next().getByte() can get value and transfer to byte form, there is an other way that let decode() 113 // to substitude getByte() 118 // values.next().getByte() can get value and transfer to byte form, 119 // there is an other way that let decode() 120 // to substitude getByte() 114 121 String stro = new String(values.next().getBytes()); 115 122 String str[] = stro.split(sp); … … 117 124 byte b_name[] = str[1].getBytes(); 118 125 byte b_year[] = str[2].getBytes(); 119 126 120 127 // contents must be ImmutableBytesWritable 121 ImmutableBytesWritable w_local = new ImmutableBytesWritable( 122 ImmutableBytesWritable w_name = new ImmutableBytesWritable( b_name);123 ImmutableBytesWritable w_year = new ImmutableBytesWritable( b_year);128 ImmutableBytesWritable w_local = new ImmutableBytesWritable(b_local); 129 ImmutableBytesWritable w_name = new ImmutableBytesWritable(b_name); 130 ImmutableBytesWritable w_year = new ImmutableBytesWritable(b_year); 124 131 125 132 // populate the current row … … 141 148 */ 142 149 public static void main(String[] args) throws IOException { 143 // which path of input files in Hadoop file system 144 150 // which path of input files in Hadoop file system 151 145 152 HBaseRecord setup = new HBaseRecord(); 146 153 JobConf conf = new JobConf(HBaseRecord.class); 147 154 148 // Job name; you can modify to any you like155 // Job name; you can modify to any you like 149 156 conf.setJobName("NCHC_PersonDataBase"); 150 157 151 158 // Hbase table name must be correct , in our profile is t1_table 152 159 TableReduce.initJob(setup.table_name, ReduceClass.class, conf); 153 160 154 161 // below are map-reduce profile 155 162 conf.setNumMapTasks(setup.mapTasks); 156 163 conf.setNumReduceTasks(setup.reduceTasks); 157 conf.setInputPath(new Path(setup.file_path)); 164 165 // 0.16 166 // conf.setInputPath(new Path(setup.file_path)); 167 Convert.setInputPath(conf, new Path(setup.file_path)); 168 158 169 conf.setMapperClass(IdentityMapper.class); 159 170 conf.setCombinerClass(IdentityReducer.class);
Note: See TracChangeset
for help on using the changeset viewer.