1 | /** |
---|
2 | * Program: DemoHBaseSink.java |
---|
3 | * Editor: Waue Chen |
---|
4 | * From : NCHC. Taiwn |
---|
5 | * Last Update Date: 07/02/2008 |
---|
6 | * Upgrade to 0.17 |
---|
7 | * Re-code from : Cloud9: A MapReduce Library for Hadoop |
---|
8 | */ |
---|
9 | /* |
---|
10 | * Cloud9: A MapReduce Library for Hadoop |
---|
11 | */ |
---|
12 | |
---|
13 | package tw.org.nchc.demo; |
---|
14 | |
---|
15 | import java.io.IOException; |
---|
16 | import java.util.Iterator; |
---|
17 | |
---|
18 | import org.apache.hadoop.fs.Path; |
---|
19 | import org.apache.hadoop.hbase.io.ImmutableBytesWritable; |
---|
20 | import org.apache.hadoop.hbase.mapred.TableReduce; |
---|
21 | import org.apache.hadoop.io.LongWritable; |
---|
22 | import org.apache.hadoop.io.MapWritable; |
---|
23 | import org.apache.hadoop.io.Text; |
---|
24 | import org.apache.hadoop.mapred.JobClient; |
---|
25 | import org.apache.hadoop.mapred.JobConf; |
---|
26 | import org.apache.hadoop.mapred.OutputCollector; |
---|
27 | import org.apache.hadoop.mapred.Reporter; |
---|
28 | import org.apache.hadoop.mapred.lib.IdentityMapper; |
---|
29 | import org.apache.hadoop.mapred.lib.IdentityReducer; |
---|
30 | |
---|
31 | import tw.org.nchc.code.Convert; |
---|
32 | |
---|
33 | /** |
---|
34 | * |
---|
35 | */ |
---|
36 | public class DemoHBaseSink { |
---|
37 | |
---|
38 | private static class ReduceClass extends TableReduce<LongWritable, Text> { |
---|
39 | |
---|
40 | // this is the column we're going to be writing |
---|
41 | private static final Text col = new Text("default:text"); |
---|
42 | |
---|
43 | // this map holds the columns per row |
---|
44 | private MapWritable map = new MapWritable(); |
---|
45 | |
---|
46 | public void reduce(LongWritable key, Iterator<Text> values, |
---|
47 | OutputCollector<Text, MapWritable> output, Reporter reporter) |
---|
48 | throws IOException { |
---|
49 | |
---|
50 | // contents must be ImmutableBytesWritable |
---|
51 | ImmutableBytesWritable bytes = new ImmutableBytesWritable(values |
---|
52 | .next().getBytes()); |
---|
53 | |
---|
54 | // populate the current row |
---|
55 | map.clear(); |
---|
56 | map.put(col, bytes); |
---|
57 | |
---|
58 | // add the row with the key as the row id |
---|
59 | output.collect(new Text(key.toString()), map); |
---|
60 | } |
---|
61 | } |
---|
62 | |
---|
63 | private DemoHBaseSink() { |
---|
64 | } |
---|
65 | |
---|
66 | /** |
---|
67 | * Runs the demo. |
---|
68 | */ |
---|
69 | public static void main(String[] args) throws IOException { |
---|
70 | String filename = "/shared/sample"; |
---|
71 | |
---|
72 | int mapTasks = 1; |
---|
73 | int reduceTasks = 1; |
---|
74 | |
---|
75 | JobConf conf = new JobConf(DemoHBaseSink.class); |
---|
76 | conf.setJobName("wordcount"); |
---|
77 | |
---|
78 | // must initialize the TableReduce before running job |
---|
79 | TableReduce.initJob("test", ReduceClass.class, conf); |
---|
80 | |
---|
81 | conf.setNumMapTasks(mapTasks); |
---|
82 | conf.setNumReduceTasks(reduceTasks); |
---|
83 | // 0.16 |
---|
84 | // conf.setInputPath(new Path(filename)); |
---|
85 | Convert.setInputPath(conf, new Path(filename)); |
---|
86 | |
---|
87 | conf.setMapperClass(IdentityMapper.class); |
---|
88 | conf.setCombinerClass(IdentityReducer.class); |
---|
89 | conf.setReducerClass(ReduceClass.class); |
---|
90 | |
---|
91 | JobClient.runJob(conf); |
---|
92 | } |
---|
93 | } |
---|