Changeset 18 for sample/HBaseRecord.java


Ignore:
Timestamp:
Jul 2, 2008, 3:10:09 PM (16 years ago)
Author:
waue
Message:

upgrade 0.16 to 0.17

File:
1 edited

Legend:

Unmodified
Added
Removed
  • sample/HBaseRecord.java

    r9 r18  
    33 * Editor: Waue Chen
    44 * From :  NCHC. Taiwn
    5  * Last Update Date: 06/01/2008
     5 * Last Update Date: 07/02/2008
     6 * Upgrade to 0.17
    67 */
    78
     
    1314 *  Make sure Hadoop file system and Hbase are running correctly.
    1415 *  1. put test.txt in t1 directory which content is
    15   ---------------
    16   name:locate:years
    17   waue:taiwan:1981
    18   shellon:taiwan:1981
    19   ---------------
     16 ---------------
     17 name:locate:years
     18 waue:taiwan:1981
     19 shellon:taiwan:1981
     20 ---------------
    2021 *  2. hadoop_root/$ bin/hadoop dfs -put t1 t1
    2122 *  3. hbase_root/$ bin/hbase shell
    2223 *  4. hql > create table t1_table("person");
    2324 *  5. Come to Eclipse and run this code, and we will let database as that
    24   t1_table -> person
    25     ----------------
    26     |  name | locate | years |
    27     | waue  | taiwan | 1981 |
    28     | shellon | taiwan | 1981 |
    29     ----------------
     25 t1_table -> person
     26 ----------------
     27 |  name | locate | years |
     28 | waue  | taiwan | 1981 |
     29 | shellon | taiwan | 1981 |
     30 ----------------
    3031 * Check Result:
    3132 *  Go to hbase console, type :
    3233 *    hql > select * from t1_table;
    33 08/06/06 12:20:48 INFO hbase.HTable: Creating scanner over t1_table starting at key
    34 +-------------------------+-------------------------+-------------------------+
    35 | Row                     | Column                  | Cell                    |
    36 +-------------------------+-------------------------+-------------------------+
    37 | 0                       | person:locate           | locate                  |
    38 +-------------------------+-------------------------+-------------------------+
    39 | 0                       | person:name             | name                    |
    40 +-------------------------+-------------------------+-------------------------+
    41 | 0                       | person:years            | years                   |
    42 +-------------------------+-------------------------+-------------------------+
    43 | 19                      | person:locate           | taiwan                  |
    44 +-------------------------+-------------------------+-------------------------+
    45 | 19                      | person:name             | waue                    |
    46 +-------------------------+-------------------------+-------------------------+
    47 | 19                      | person:years            | 1981                    |
    48 +-------------------------+-------------------------+-------------------------+
    49 | 36                      | person:locate           | taiwan                  |
    50 +-------------------------+-------------------------+-------------------------+
    51 | 36                      | person:name             | shellon                 |
    52 +-------------------------+-------------------------+-------------------------+
    53 | 36                      | person:years            | 1981                    |
    54 +-------------------------+-------------------------+-------------------------+
    55 3 row(s) in set. (0.04 sec)
     34 08/06/06 12:20:48 INFO hbase.HTable: Creating scanner over t1_table starting at key
     35 +-------------------------+-------------------------+-------------------------+
     36 | Row                     | Column                  | Cell                    |
     37 +-------------------------+-------------------------+-------------------------+
     38 | 0                       | person:locate           | locate                  |
     39 +-------------------------+-------------------------+-------------------------+
     40 | 0                       | person:name             | name                    |
     41 +-------------------------+-------------------------+-------------------------+
     42 | 0                       | person:years            | years                   |
     43 +-------------------------+-------------------------+-------------------------+
     44 | 19                      | person:locate           | taiwan                  |
     45 +-------------------------+-------------------------+-------------------------+
     46 | 19                      | person:name             | waue                    |
     47 +-------------------------+-------------------------+-------------------------+
     48 | 19                      | person:years            | 1981                    |
     49 +-------------------------+-------------------------+-------------------------+
     50 | 36                      | person:locate           | taiwan                  |
     51 +-------------------------+-------------------------+-------------------------+
     52 | 36                      | person:name             | shellon                 |
     53 +-------------------------+-------------------------+-------------------------+
     54 | 36                      | person:years            | 1981                    |
     55 +-------------------------+-------------------------+-------------------------+
     56 3 row(s) in set. (0.04 sec)
    5657 */
    57 
    58 
    59 
    6058
    6159package tw.org.nchc.code;
     
    7775import org.apache.hadoop.mapred.lib.IdentityReducer;
    7876
    79 
    8077public class HBaseRecord {
    8178
    8279  /* Denify parameter */
    8380  // one column family: person; three column qualifier: name,locate,years
    84   static private String  baseId1 ="person:name";
    85   static private String  baseId2 ="person:locate";
    86   static private String  baseId3 ="person:years";
    87   //split character
     81  static private String baseId1 = "person:name";
     82
     83  static private String baseId2 = "person:locate";
     84
     85  static private String baseId3 = "person:years";
     86
     87  // split character
    8888  static private String sp = ":";
     89
    8990  // file path in hadoop file system (not phisical file system)
    9091  String file_path = "/user/waue/t1";
     92
    9193  // Hbase table name
    9294  String table_name = "t1_table";
     95
    9396  // setup MapTask and Reduce Task
    9497  int mapTasks = 1;
     98
    9599  int reduceTasks = 1;
    96  
     100
    97101  private static class ReduceClass extends TableReduce<LongWritable, Text> {
    98102
    99     // Column id is created dymanically, 
     103    // Column id is created dymanically,
    100104    private static final Text col_name = new Text(baseId1);
     105
    101106    private static final Text col_local = new Text(baseId2);
     107
    102108    private static final Text col_year = new Text(baseId3);
    103    
     109
    104110    // this map holds the columns per row
    105     private MapWritable map = new MapWritable(); 
    106    
     111    private MapWritable map = new MapWritable();
     112
    107113    // on this sample, map is nonuse, we use reduce to handle
    108114    public void reduce(LongWritable key, Iterator<Text> values,
     
    110116        throws IOException {
    111117
    112       // values.next().getByte() can get value and transfer to byte form, there is an other way that let decode()
    113       // to substitude getByte()
     118      // values.next().getByte() can get value and transfer to byte form,
     119      // there is an other way that let decode()
     120      // to substitude getByte()
    114121      String stro = new String(values.next().getBytes());
    115122      String str[] = stro.split(sp);
     
    117124      byte b_name[] = str[1].getBytes();
    118125      byte b_year[] = str[2].getBytes();
    119      
     126
    120127      // contents must be ImmutableBytesWritable
    121       ImmutableBytesWritable w_local = new ImmutableBytesWritable( b_local);
    122       ImmutableBytesWritable w_name = new ImmutableBytesWritable( b_name );
    123       ImmutableBytesWritable w_year = new ImmutableBytesWritable( b_year );
     128      ImmutableBytesWritable w_local = new ImmutableBytesWritable(b_local);
     129      ImmutableBytesWritable w_name = new ImmutableBytesWritable(b_name);
     130      ImmutableBytesWritable w_year = new ImmutableBytesWritable(b_year);
    124131
    125132      // populate the current row
     
    141148   */
    142149  public static void main(String[] args) throws IOException {
    143     // which path of input files in Hadoop file system 
    144    
     150    // which path of input files in Hadoop file system
     151
    145152    HBaseRecord setup = new HBaseRecord();
    146153    JobConf conf = new JobConf(HBaseRecord.class);
    147154
    148     //Job name; you can modify to any you like 
     155    // Job name; you can modify to any you like
    149156    conf.setJobName("NCHC_PersonDataBase");
    150157
    151158    // Hbase table name must be correct , in our profile is t1_table
    152159    TableReduce.initJob(setup.table_name, ReduceClass.class, conf);
    153    
     160
    154161    // below are map-reduce profile
    155162    conf.setNumMapTasks(setup.mapTasks);
    156163    conf.setNumReduceTasks(setup.reduceTasks);
    157     conf.setInputPath(new Path(setup.file_path));
     164
     165    // 0.16
     166    // conf.setInputPath(new Path(setup.file_path));
     167    Convert.setInputPath(conf, new Path(setup.file_path));
     168
    158169    conf.setMapperClass(IdentityMapper.class);
    159170    conf.setCombinerClass(IdentityReducer.class);
Note: See TracChangeset for help on using the changeset viewer.