Changes between Initial Version and Version 1 of hadoop_hbase_error_sample2


Ignore:
Timestamp:
Jun 4, 2008, 6:11:56 PM (16 years ago)
Author:
waue
Comment:

--

Legend:

Unmodified
Added
Removed
Modified
  • hadoop_hbase_error_sample2

    v1 v1  
     1錯誤sample code 如下
     2
     3{{{
     4/*
     5 *  NCHC Hbase with map reduce sample code
     6 *  DemoHBaseSlink.java
     7 */
     8
     9package tw.org.nchc.demo;
     10
     11import java.io.IOException;
     12import java.util.Iterator;
     13
     14import org.apache.hadoop.fs.Path;
     15import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
     16import org.apache.hadoop.hbase.mapred.TableReduce;
     17import org.apache.hadoop.io.LongWritable;
     18import org.apache.hadoop.io.MapWritable;
     19import org.apache.hadoop.io.Text;
     20import org.apache.hadoop.mapred.JobClient;
     21import org.apache.hadoop.mapred.JobConf;
     22import org.apache.hadoop.mapred.OutputCollector;
     23import org.apache.hadoop.mapred.Reporter;
     24import org.apache.hadoop.mapred.lib.IdentityMapper;
     25import org.apache.hadoop.mapred.lib.IdentityReducer;
     26
     27/**
     28 * This sample code will put the indicate data to Hbase.
     29 * 1. put test.txt in t1 directory which content is
     30---------------
     31name locate years
     32waue taiwan 1981
     33shellon taiwan 1981
     34---------------
     35 * 2. hadoop_root/$ bin/hadoop dfs -put t1 t1
     36 * 3. hbase_root/$ bin/hbase shell
     37 * 4. hql > create table t1_table("person");
     38 * 5. run this code, and we will let database as that
     39 t1_table -> person
     40  ----------------
     41  |  name | locate | years |
     42  ----------------
     43  | waue  | taiwan | 1981 |
     44  ----------------
     45  | shellon | taiwan | 1981 |
     46 
     47 **/
     48public class DemoHBaseSink {
     49
     50        private static class ReduceClass extends TableReduce<LongWritable, Text> {
     51
     52                // this is the column we're going to be writing
     53                private static final Text col1 = new Text("person:name");
     54                private static final Text col2 = new Text("person:locate");
     55                private static final Text col3 = new Text("person:years");
     56                // this map holds the columns per row
     57                private MapWritable map = new MapWritable();   
     58
     59                public void reduce(LongWritable key, Iterator<Text> values,
     60                                OutputCollector<Text, MapWritable> output, Reporter reporter)
     61                                throws IOException {
     62
     63                        // contents must be ImmutableBytesWritable
     64                        String[] str = (values.next().getBytes().toString()).split(" ");
     65                        byte bl[] = str[0].getBytes();
     66                        byte bn[] = str[1].getBytes();
     67                        byte by[] = str[2].getBytes();
     68                        ImmutableBytesWritable bytes1 = new ImmutableBytesWritable( bl);
     69                        ImmutableBytesWritable bytes2 = new ImmutableBytesWritable( bn );
     70                        ImmutableBytesWritable bytes3 = new ImmutableBytesWritable( by );
     71//                      ImmutableBytesWritable bytes3 = new ImmutableBytesWritable(values.next().getBytes());
     72                        // populate the current row
     73                        map.clear();
     74                        map.put(col1, bytes1);
     75                        map.put(col2, bytes2);
     76                        map.put(col3, bytes3);
     77
     78                        // add the row with the key as the row id
     79                        output.collect(new Text(key.toString()), map);
     80                }
     81        }
     82
     83        private DemoHBaseSink() {
     84        }
     85
     86        /**
     87         * Runs the demo.
     88         */
     89        public static void main(String[] args) throws IOException {
     90                // which path of input files in Hadoop file system
     91                String file_path = "/user/waue/t1";
     92
     93                int mapTasks = 1;
     94                int reduceTasks = 1;
     95
     96                JobConf conf = new JobConf(DemoHBaseSink.class);
     97                conf.setJobName("DemoPersonBase");
     98
     99                // must initialize the TableReduce before running job
     100                TableReduce.initJob("t1_table", ReduceClass.class, conf);
     101
     102                conf.setNumMapTasks(mapTasks);
     103                conf.setNumReduceTasks(reduceTasks);
     104
     105                conf.setInputPath(new Path(file_path));
     106
     107                conf.setMapperClass(IdentityMapper.class);
     108                conf.setCombinerClass(IdentityReducer.class);
     109                conf.setReducerClass(ReduceClass.class);
     110
     111                JobClient.runJob(conf);
     112        }
     113}
     114}}}
     115
     116錯誤訊息
     117{{{
     11808/06/04 18:03:21 INFO mapred.FileInputFormat: Total input paths to process : 1
     11908/06/04 18:03:22 INFO mapred.JobClient: Running job: job_200805291341_0019
     12008/06/04 18:03:23 INFO mapred.JobClient:  map 0% reduce 0%
     12108/06/04 18:03:28 INFO mapred.JobClient:  map 100% reduce 0%
     12208/06/04 18:03:36 INFO mapred.JobClient: Task Id : task_200805291341_0019_r_000000_0, Status : FAILED
     123java.lang.ArrayIndexOutOfBoundsException: 1
     124        at tw.org.nchc.demo.DemoHBaseSink$ReduceClass.reduce(DemoHBaseSink.java:63)
     125        at tw.org.nchc.demo.DemoHBaseSink$ReduceClass.reduce(DemoHBaseSink.java:1)
     126        at org.apache.hadoop.mapred.ReduceTask.run(ReduceTask.java:333)
     127        at org.apache.hadoop.mapred.TaskTracker$Child.main(TaskTracker.java:2084)
     128
     12908/06/04 18:03:40 INFO mapred.JobClient: Task Id : task_200805291341_0019_r_000000_1, Status : FAILED
     130java.lang.ArrayIndexOutOfBoundsException: 1
     131        at tw.org.nchc.demo.DemoHBaseSink$ReduceClass.reduce(DemoHBaseSink.java:63)
     132        at tw.org.nchc.demo.DemoHBaseSink$ReduceClass.reduce(DemoHBaseSink.java:1)
     133        at org.apache.hadoop.mapred.ReduceTask.run(ReduceTask.java:333)
     134        at org.apache.hadoop.mapred.TaskTracker$Child.main(TaskTracker.java:2084)
     135
     13608/06/04 18:03:45 INFO mapred.JobClient: Task Id : task_200805291341_0019_r_000000_2, Status : FAILED
     137java.lang.ArrayIndexOutOfBoundsException: 1
     138        at tw.org.nchc.demo.DemoHBaseSink$ReduceClass.reduce(DemoHBaseSink.java:63)
     139        at tw.org.nchc.demo.DemoHBaseSink$ReduceClass.reduce(DemoHBaseSink.java:1)
     140        at org.apache.hadoop.mapred.ReduceTask.run(ReduceTask.java:333)
     141        at org.apache.hadoop.mapred.TaskTracker$Child.main(TaskTracker.java:2084)
     142
     14308/06/04 18:03:49 INFO mapred.JobClient:  map 100% reduce 100%
     144Exception in thread "main" java.io.IOException: Job failed!
     145        at org.apache.hadoop.mapred.JobClient.runJob(JobClient.java:892)
     146        at tw.org.nchc.demo.DemoHBaseSink.main(DemoHBaseSink.java:108)
     147        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
     148        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
     149        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
     150        at java.lang.reflect.Method.invoke(Method.java:597)
     151        at org.apache.hadoop.util.RunJar.main(RunJar.java:155)
     152
     153}}}