Changeset 13
- Timestamp:
- Jun 25, 2008, 5:55:53 PM (17 years ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
sample/HBaseRecord2.java
r10 r13 3 3 * Editor: Waue Chen 4 4 * From : NCHC. Taiwn 5 * Last Update Date: 06/ 13/20085 * Last Update Date: 06/01/2008 6 6 */ 7 7 8 8 /** 9 9 * Purpose : 10 * 1.Auto generate HTable 11 * 2.Parse your record and then store in HBase. 10 * Parse your record and then store in HBase. 12 11 * 13 12 * HowToUse : … … 20 19 --------------- 21 20 * 2. hadoop_root/$ bin/hadoop dfs -put t1 t1 22 * 3. hbase_root/$ bin/hbase shell23 * 4. hql > create table t1_table("person");24 * 5. Come to Eclipse and run this code, and we will let database as that25 t1_table -> person26 ----------------27 | name | locate | years |28 | waue | taiwan | 1981 |29 | shellon | taiwan | 1981 |30 21 ---------------- 31 22 * Check Result: … … 82 73 83 74 /* Denify parameter */ 84 // one column family: person; three column qualifier: name,locate,years 85 final String colstr; 75 static String[] bf = {"person:name","person:local","person:birthyear"}; 76 // file path in hadoop file system (not phisical file system) 77 String file_path = "/user/waue/t1/test.txt"; 86 78 // Hbase table name 87 static String[] col; 88 String Table_Name = "Record1"; 89 //split character 90 static String sp = ":"; 91 // file path in hadoop file system (not phisical file system) 92 String file_path = "/user/waue/t1"; 93 94 95 96 public HBaseRecord2(){ 97 colstr ="person:name,locate,years"; 98 } 99 public HBaseRecord2(String str){ 100 colstr = str; 101 } 102 79 String table_name = "testtable"; 80 81 82 // setup MapTask and Reduce Task 83 int mapTasks = 1; 84 int reduceTasks = 1; 103 85 104 86 private static class ReduceClass extends TableReduce<LongWritable, Text> { 105 87 106 // Column id is created dymanically, 107 private static final Text col_name = new Text(baseId1); 108 private static final Text col_local = new Text(baseId2); 109 private static final Text col_year = new Text(baseId3); 110 111 // this map holds the columns per row 112 private MapWritable map = new MapWritable(); 88 113 89 114 90 // on this sample, map is nonuse, we use reduce to handle … … 116 92 OutputCollector<Text, MapWritable> output, Reporter reporter) 117 93 throws IOException { 118 119 // values.next().getByte() can get value and transfer to byte form, there is an other way that let decode()120 // to substitude getByte()94 // this map holds the columns per row 95 MapWritable map = new MapWritable(); 96 // values.next().getByte() can get value and transfer to byte form, 121 97 String stro = new String(values.next().getBytes()); 122 String str[] = stro.split(sp); 123 byte b_local[] = str[0].getBytes(); 124 byte b_name[] = str[1].getBytes(); 125 byte b_year[] = str[2].getBytes(); 98 String str[] = stro.split(":"); 126 99 100 int length = bf.length; 101 102 // Column id is created dymanically, 103 Text[] col_n = new Text[length]; 104 byte[][] b_l = new byte[length][]; 127 105 // contents must be ImmutableBytesWritable 128 ImmutableBytesWritable w_local = new ImmutableBytesWritable( b_local); 129 ImmutableBytesWritable w_name = new ImmutableBytesWritable( b_name ); 130 ImmutableBytesWritable w_year = new ImmutableBytesWritable( b_year ); 131 132 // populate the current row 106 ImmutableBytesWritable[] w_l = new ImmutableBytesWritable[length]; 133 107 map.clear(); 134 map.put(col_name, w_local); 135 map.put(col_local, w_name); 136 map.put(col_year, w_year); 137 108 for(int i = 0; i < length; i++){ 109 col_n[i] = new Text(bf[i]); 110 b_l[i] = str[i].getBytes(); 111 w_l[i] = new ImmutableBytesWritable(b_l[i]); 112 // populate the current row 113 map.put(col_n[i], w_l[i]); 114 } 138 115 // add the row with the key as the row id 139 116 output.collect(new Text(key.toString()), map); 140 117 } 118 } 119 120 private HBaseRecord2() { 141 121 } 142 122 … … 145 125 */ 146 126 public static void main(String[] args) throws IOException { 147 // parse colstr to split column family and column qualify 148 HBaseRecord2 work = new HBaseRecord2(); 127 149 128 150 String tmp[] = work.colstr.split(":"); 151 String Column_Family = tmp[0]+":"; 152 String CF[] = {Column_Family}; 153 String CQ[] = tmp[2].split(","); 154 // check whether create table or not , we don't admit \ 155 // the same name but different structure 156 157 BuildHTable build_table = new BuildHTable(work.Table_Name,CF); 158 if (!build_table.checkTableExist(work.Table_Name)) { 129 HBaseRecord2 setup = new HBaseRecord2(); 130 String[] tmp = bf[0].split(":"); 131 String[] CF = {tmp[0]}; 132 BuildHTable build_table = new BuildHTable(setup.table_name, CF); 133 if (!build_table.checkTableExist(setup.table_name)) { 159 134 if (!build_table.createTable()) { 160 135 System.out.println("create table error !"); 161 136 } 162 }else{ 163 System.out.println("Table \"" + work.Table_Name +"\" has already existed !"); 164 } 137 } else { 138 System.out.println("Table \"" + setup.table_name 139 + "\" has already existed !"); 140 } 141 142 JobConf conf = new JobConf(HBaseRecord2.class); 165 143 166 JobConf conf = new JobConf(HBaseRecord2.class);167 int mapTasks = 1;168 int reduceTasks = 1;169 144 //Job name; you can modify to any you like 170 conf.setJobName(" NCHC_PersonDataBase");145 conf.setJobName("PersonDataBase"); 171 146 172 147 // Hbase table name must be correct , in our profile is t1_table 173 TableReduce.initJob( work.Table_Name, ReduceClass.class, conf);148 TableReduce.initJob(setup.table_name, ReduceClass.class, conf); 174 149 175 150 // below are map-reduce profile 176 conf.setNumMapTasks( mapTasks);177 conf.setNumReduceTasks( reduceTasks);178 conf.setInputPath(new Path( work.file_path));151 conf.setNumMapTasks(setup.mapTasks); 152 conf.setNumReduceTasks(setup.reduceTasks); 153 conf.setInputPath(new Path(setup.file_path)); 179 154 conf.setMapperClass(IdentityMapper.class); 180 155 conf.setCombinerClass(IdentityReducer.class);
Note: See TracChangeset
for help on using the changeset viewer.