source: sample/hadoop-0.17/tw/org/nchc/demo/DemoReadPackedRecords.java

Last change on this file was 20, checked in by waue, 16 years ago

將改完的 hadoop 0.17版package 放來備份
目前繼續開發 hadoop 0.16 + hbase 1.3

File size: 1.5 KB
Line 
1/*
2 * Cloud9: A MapReduce Library for Hadoop
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License"); you
5 * may not use this file except in compliance with the License. You may
6 * obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
13 * implied. See the License for the specific language governing
14 * permissions and limitations under the License.
15 */
16
17package tw.org.nchc.demo;
18
19import java.io.IOException;
20
21import tw.org.nchc.tuple.Tuple;
22import tw.org.nchc.util.LocalTupleRecordReader;
23
24/**
25 * Demo that illustrates how to read records from a local SequenceFile. Dumps
26 * the contents of the SequenceFile generated by {@link DemoPackRecords}.
27 */
28public class DemoReadPackedRecords {
29  private DemoReadPackedRecords() {
30  }
31
32  private static final Tuple tuple = new Tuple();
33
34  /**
35   * Runs the demo.
36   */
37  public static void main(String[] args) throws IOException {
38    String file = "../umd-hadoop-dist/sample-input/bible+shakes.nopunc.packed";
39
40    // open local records file
41    LocalTupleRecordReader reader = new LocalTupleRecordReader(file);
42    // iterate over all tuples
43    while (reader.read(tuple)) {
44      // print out each tuple
45      System.out.println(tuple);
46    }
47    reader.close();
48
49    System.out.println("Read " + reader.getRecordCount() + " records.");
50  }
51
52}
Note: See TracBrowser for help on using the repository browser.