| 1 | 錯誤sample code 如下 |
| 2 | |
| 3 | {{{ |
| 4 | /* |
| 5 | * NCHC Hbase with map reduce sample code |
| 6 | * DemoHBaseSlink.java |
| 7 | */ |
| 8 | |
| 9 | package tw.org.nchc.demo; |
| 10 | |
| 11 | import java.io.IOException; |
| 12 | import java.util.Iterator; |
| 13 | |
| 14 | import org.apache.hadoop.fs.Path; |
| 15 | import org.apache.hadoop.hbase.io.ImmutableBytesWritable; |
| 16 | import org.apache.hadoop.hbase.mapred.TableReduce; |
| 17 | import org.apache.hadoop.io.LongWritable; |
| 18 | import org.apache.hadoop.io.MapWritable; |
| 19 | import org.apache.hadoop.io.Text; |
| 20 | import org.apache.hadoop.mapred.JobClient; |
| 21 | import org.apache.hadoop.mapred.JobConf; |
| 22 | import org.apache.hadoop.mapred.OutputCollector; |
| 23 | import org.apache.hadoop.mapred.Reporter; |
| 24 | import org.apache.hadoop.mapred.lib.IdentityMapper; |
| 25 | import org.apache.hadoop.mapred.lib.IdentityReducer; |
| 26 | |
| 27 | /** |
| 28 | * This sample code will put the indicate data to Hbase. |
| 29 | * 1. put test.txt in t1 directory which content is |
| 30 | --------------- |
| 31 | name locate years |
| 32 | waue taiwan 1981 |
| 33 | shellon taiwan 1981 |
| 34 | --------------- |
| 35 | * 2. hadoop_root/$ bin/hadoop dfs -put t1 t1 |
| 36 | * 3. hbase_root/$ bin/hbase shell |
| 37 | * 4. hql > create table t1_table("person"); |
| 38 | * 5. run this code, and we will let database as that |
| 39 | t1_table -> person |
| 40 | ---------------- |
| 41 | | name | locate | years | |
| 42 | ---------------- |
| 43 | | waue | taiwan | 1981 | |
| 44 | ---------------- |
| 45 | | shellon | taiwan | 1981 | |
| 46 | |
| 47 | **/ |
| 48 | public class DemoHBaseSink { |
| 49 | |
| 50 | private static class ReduceClass extends TableReduce<LongWritable, Text> { |
| 51 | |
| 52 | // this is the column we're going to be writing |
| 53 | private static final Text col1 = new Text("person:name"); |
| 54 | private static final Text col2 = new Text("person:locate"); |
| 55 | private static final Text col3 = new Text("person:years"); |
| 56 | // this map holds the columns per row |
| 57 | private MapWritable map = new MapWritable(); |
| 58 | |
| 59 | public void reduce(LongWritable key, Iterator<Text> values, |
| 60 | OutputCollector<Text, MapWritable> output, Reporter reporter) |
| 61 | throws IOException { |
| 62 | |
| 63 | // contents must be ImmutableBytesWritable |
| 64 | String[] str = (values.next().getBytes().toString()).split(" "); |
| 65 | byte bl[] = str[0].getBytes(); |
| 66 | byte bn[] = str[1].getBytes(); |
| 67 | byte by[] = str[2].getBytes(); |
| 68 | ImmutableBytesWritable bytes1 = new ImmutableBytesWritable( bl); |
| 69 | ImmutableBytesWritable bytes2 = new ImmutableBytesWritable( bn ); |
| 70 | ImmutableBytesWritable bytes3 = new ImmutableBytesWritable( by ); |
| 71 | // ImmutableBytesWritable bytes3 = new ImmutableBytesWritable(values.next().getBytes()); |
| 72 | // populate the current row |
| 73 | map.clear(); |
| 74 | map.put(col1, bytes1); |
| 75 | map.put(col2, bytes2); |
| 76 | map.put(col3, bytes3); |
| 77 | |
| 78 | // add the row with the key as the row id |
| 79 | output.collect(new Text(key.toString()), map); |
| 80 | } |
| 81 | } |
| 82 | |
| 83 | private DemoHBaseSink() { |
| 84 | } |
| 85 | |
| 86 | /** |
| 87 | * Runs the demo. |
| 88 | */ |
| 89 | public static void main(String[] args) throws IOException { |
| 90 | // which path of input files in Hadoop file system |
| 91 | String file_path = "/user/waue/t1"; |
| 92 | |
| 93 | int mapTasks = 1; |
| 94 | int reduceTasks = 1; |
| 95 | |
| 96 | JobConf conf = new JobConf(DemoHBaseSink.class); |
| 97 | conf.setJobName("DemoPersonBase"); |
| 98 | |
| 99 | // must initialize the TableReduce before running job |
| 100 | TableReduce.initJob("t1_table", ReduceClass.class, conf); |
| 101 | |
| 102 | conf.setNumMapTasks(mapTasks); |
| 103 | conf.setNumReduceTasks(reduceTasks); |
| 104 | |
| 105 | conf.setInputPath(new Path(file_path)); |
| 106 | |
| 107 | conf.setMapperClass(IdentityMapper.class); |
| 108 | conf.setCombinerClass(IdentityReducer.class); |
| 109 | conf.setReducerClass(ReduceClass.class); |
| 110 | |
| 111 | JobClient.runJob(conf); |
| 112 | } |
| 113 | } |
| 114 | }}} |
| 115 | |
| 116 | 錯誤訊息 |
| 117 | {{{ |
| 118 | 08/06/04 18:03:21 INFO mapred.FileInputFormat: Total input paths to process : 1 |
| 119 | 08/06/04 18:03:22 INFO mapred.JobClient: Running job: job_200805291341_0019 |
| 120 | 08/06/04 18:03:23 INFO mapred.JobClient: map 0% reduce 0% |
| 121 | 08/06/04 18:03:28 INFO mapred.JobClient: map 100% reduce 0% |
| 122 | 08/06/04 18:03:36 INFO mapred.JobClient: Task Id : task_200805291341_0019_r_000000_0, Status : FAILED |
| 123 | java.lang.ArrayIndexOutOfBoundsException: 1 |
| 124 | at tw.org.nchc.demo.DemoHBaseSink$ReduceClass.reduce(DemoHBaseSink.java:63) |
| 125 | at tw.org.nchc.demo.DemoHBaseSink$ReduceClass.reduce(DemoHBaseSink.java:1) |
| 126 | at org.apache.hadoop.mapred.ReduceTask.run(ReduceTask.java:333) |
| 127 | at org.apache.hadoop.mapred.TaskTracker$Child.main(TaskTracker.java:2084) |
| 128 | |
| 129 | 08/06/04 18:03:40 INFO mapred.JobClient: Task Id : task_200805291341_0019_r_000000_1, Status : FAILED |
| 130 | java.lang.ArrayIndexOutOfBoundsException: 1 |
| 131 | at tw.org.nchc.demo.DemoHBaseSink$ReduceClass.reduce(DemoHBaseSink.java:63) |
| 132 | at tw.org.nchc.demo.DemoHBaseSink$ReduceClass.reduce(DemoHBaseSink.java:1) |
| 133 | at org.apache.hadoop.mapred.ReduceTask.run(ReduceTask.java:333) |
| 134 | at org.apache.hadoop.mapred.TaskTracker$Child.main(TaskTracker.java:2084) |
| 135 | |
| 136 | 08/06/04 18:03:45 INFO mapred.JobClient: Task Id : task_200805291341_0019_r_000000_2, Status : FAILED |
| 137 | java.lang.ArrayIndexOutOfBoundsException: 1 |
| 138 | at tw.org.nchc.demo.DemoHBaseSink$ReduceClass.reduce(DemoHBaseSink.java:63) |
| 139 | at tw.org.nchc.demo.DemoHBaseSink$ReduceClass.reduce(DemoHBaseSink.java:1) |
| 140 | at org.apache.hadoop.mapred.ReduceTask.run(ReduceTask.java:333) |
| 141 | at org.apache.hadoop.mapred.TaskTracker$Child.main(TaskTracker.java:2084) |
| 142 | |
| 143 | 08/06/04 18:03:49 INFO mapred.JobClient: map 100% reduce 100% |
| 144 | Exception in thread "main" java.io.IOException: Job failed! |
| 145 | at org.apache.hadoop.mapred.JobClient.runJob(JobClient.java:892) |
| 146 | at tw.org.nchc.demo.DemoHBaseSink.main(DemoHBaseSink.java:108) |
| 147 | at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) |
| 148 | at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39) |
| 149 | at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25) |
| 150 | at java.lang.reflect.Method.invoke(Method.java:597) |
| 151 | at org.apache.hadoop.util.RunJar.main(RunJar.java:155) |
| 152 | |
| 153 | }}} |