Changeset 13 for sample


Ignore:
Timestamp:
Jun 25, 2008, 5:55:53 PM (17 years ago)
Author:
waue
Message:

it's work , but pro is HBaseRecordPro.java

File:
1 edited

Legend:

Unmodified
Added
Removed
  • sample/HBaseRecord2.java

    r10 r13  
    33 * Editor: Waue Chen
    44 * From :  NCHC. Taiwn
    5  * Last Update Date: 06/13/2008
     5 * Last Update Date: 06/01/2008
    66 */
    77
    88/**
    99 * Purpose :
    10  *  1.Auto generate HTable
    11  *  2.Parse your record and then store in HBase.
     10 *  Parse your record and then store in HBase.
    1211 *
    1312 * HowToUse :
     
    2019  ---------------
    2120 *  2. hadoop_root/$ bin/hadoop dfs -put t1 t1
    22  *  3. hbase_root/$ bin/hbase shell
    23  *  4. hql > create table t1_table("person");
    24  *  5. Come to Eclipse and run this code, and we will let database as that
    25   t1_table -> person
    26     ----------------
    27     |  name | locate | years |
    28     | waue  | taiwan | 1981 |
    29     | shellon | taiwan | 1981 |
    3021    ----------------
    3122 * Check Result:
     
    8273
    8374  /* Denify parameter */
    84   // one column family: person; three column qualifier: name,locate,years
    85   final String colstr;
     75  static String[] bf = {"person:name","person:local","person:birthyear"};
     76  // file path in hadoop file system (not phisical file system)
     77  String file_path = "/user/waue/t1/test.txt";
    8678  // Hbase table name
    87   static String[] col;
    88   String Table_Name = "Record1";
    89   //split character
    90   static String sp = ":";
    91   // file path in hadoop file system (not phisical file system)
    92   String file_path = "/user/waue/t1";
    93 
    94 
    95 
    96   public HBaseRecord2(){
    97     colstr ="person:name,locate,years";
    98   }
    99   public HBaseRecord2(String str){
    100     colstr = str;
    101   }
    102 
     79  String table_name = "testtable";
     80 
     81 
     82  // setup MapTask and Reduce Task
     83  int mapTasks = 1;
     84  int reduceTasks = 1;
    10385 
    10486  private static class ReduceClass extends TableReduce<LongWritable, Text> {
    10587
    106     // Column id is created dymanically,
    107     private static final Text col_name = new Text(baseId1);
    108     private static final Text col_local = new Text(baseId2);
    109     private static final Text col_year = new Text(baseId3);
    110    
    111     // this map holds the columns per row
    112     private MapWritable map = new MapWritable(); 
     88
    11389   
    11490    // on this sample, map is nonuse, we use reduce to handle
     
    11692        OutputCollector<Text, MapWritable> output, Reporter reporter)
    11793        throws IOException {
    118 
    119       // values.next().getByte() can get value and transfer to byte form, there is an other way that let decode()
    120       // to substitude getByte()
     94      // this map holds the columns per row
     95      MapWritable map = new MapWritable(); 
     96      // values.next().getByte() can get value and transfer to byte form,
    12197      String stro = new String(values.next().getBytes());
    122       String str[] = stro.split(sp);
    123       byte b_local[] = str[0].getBytes();
    124       byte b_name[] = str[1].getBytes();
    125       byte b_year[] = str[2].getBytes();
     98      String str[] = stro.split(":");
    12699     
     100      int length = bf.length;
     101     
     102      // Column id is created dymanically,
     103      Text[] col_n = new Text[length];
     104      byte[][] b_l = new byte[length][];
    127105      // contents must be ImmutableBytesWritable
    128       ImmutableBytesWritable w_local = new ImmutableBytesWritable( b_local);
    129       ImmutableBytesWritable w_name = new ImmutableBytesWritable( b_name );
    130       ImmutableBytesWritable w_year = new ImmutableBytesWritable( b_year );
    131 
    132       // populate the current row
     106      ImmutableBytesWritable[] w_l = new ImmutableBytesWritable[length];
    133107      map.clear();
    134       map.put(col_name, w_local);
    135       map.put(col_local, w_name);
    136       map.put(col_year, w_year);
    137 
     108      for(int i = 0; i < length; i++){
     109        col_n[i] = new Text(bf[i]);
     110        b_l[i] = str[i].getBytes();
     111        w_l[i] = new ImmutableBytesWritable(b_l[i]);
     112        // populate the current row
     113        map.put(col_n[i], w_l[i]);
     114      }
    138115      // add the row with the key as the row id
    139116      output.collect(new Text(key.toString()), map);
    140117    }
     118  }
     119
     120  private HBaseRecord2() {
    141121  }
    142122
     
    145125   */
    146126  public static void main(String[] args) throws IOException {
    147     // parse colstr to split column family and column qualify
    148     HBaseRecord2 work = new HBaseRecord2();
     127
    149128   
    150     String tmp[] = work.colstr.split(":");
    151     String Column_Family = tmp[0]+":";
    152     String CF[] = {Column_Family};
    153     String CQ[] = tmp[2].split(",");
    154     // check whether create table or not , we don't admit \
    155     // the same name but different structure
    156    
    157     BuildHTable build_table = new BuildHTable(work.Table_Name,CF);
    158     if (!build_table.checkTableExist(work.Table_Name)) {
     129    HBaseRecord2 setup = new HBaseRecord2();
     130    String[] tmp = bf[0].split(":");
     131    String[] CF = {tmp[0]};
     132    BuildHTable build_table = new BuildHTable(setup.table_name, CF);
     133    if (!build_table.checkTableExist(setup.table_name)) {
    159134      if (!build_table.createTable()) {
    160135        System.out.println("create table error !");
    161136      }
    162     }else{
    163       System.out.println("Table \"" + work.Table_Name +"\" has already existed !");
    164     }   
     137    } else {
     138      System.out.println("Table \"" + setup.table_name
     139          + "\" has already existed !");
     140    }
     141   
     142    JobConf conf = new JobConf(HBaseRecord2.class);
    165143
    166     JobConf conf = new JobConf(HBaseRecord2.class);
    167     int mapTasks = 1;
    168     int reduceTasks = 1;
    169144    //Job name; you can modify to any you like 
    170     conf.setJobName("NCHC_PersonDataBase");
     145    conf.setJobName("PersonDataBase");
    171146
    172147    // Hbase table name must be correct , in our profile is t1_table
    173     TableReduce.initJob(work.Table_Name, ReduceClass.class, conf);
     148    TableReduce.initJob(setup.table_name, ReduceClass.class, conf);
    174149   
    175150    // below are map-reduce profile
    176     conf.setNumMapTasks(mapTasks);
    177     conf.setNumReduceTasks(reduceTasks);
    178     conf.setInputPath(new Path(work.file_path));
     151    conf.setNumMapTasks(setup.mapTasks);
     152    conf.setNumReduceTasks(setup.reduceTasks);
     153    conf.setInputPath(new Path(setup.file_path));
    179154    conf.setMapperClass(IdentityMapper.class);
    180155    conf.setCombinerClass(IdentityReducer.class);
Note: See TracChangeset for help on using the changeset viewer.