Index: /sample/HBaseRecordPro.java
===================================================================
--- /sample/HBaseRecordPro.java	(revision 15)
+++ /sample/HBaseRecordPro.java	(revision 16)
@@ -13,49 +13,46 @@
  * 	Make sure Hadoop file system and Hbase are running correctly.
  * 	1. put test.txt in t1 directory which content is 
-	---------------
-	name:locate:years 
-	waue:taiwan:1981
-	shellon:taiwan:1981
-	---------------
+ ---------------
+ name:locate:years 
+ waue:taiwan:1981
+ shellon:taiwan:1981
+ ---------------
  * 	2. hadoop_root/$ bin/hadoop dfs -put t1 t1
  * 	3. hbase_root/$ bin/hbase shell
  * 	4. hql > create table t1_table("person");
  * 	5. Come to Eclipse and run this code, and we will let database as that 
- 	t1_table -> person
-	  ----------------
-	  |  name | locate | years |
-	  | waue  | taiwan | 1981 |
-	  | shellon | taiwan | 1981 |
-	  ----------------
+ t1_table -> person
+ ----------------
+ |  name | locate | years |
+ | waue  | taiwan | 1981 |
+ | shellon | taiwan | 1981 |
+ ----------------
  * Check Result:
  * 	Go to hbase console, type : 
  * 		hql > select * from t1_table; 
-08/06/06 12:20:48 INFO hbase.HTable: Creating scanner over t1_table starting at key 
-+-------------------------+-------------------------+-------------------------+
-| Row                     | Column                  | Cell                    |
-+-------------------------+-------------------------+-------------------------+
-| 0                       | person:locate           | locate                  |
-+-------------------------+-------------------------+-------------------------+
-| 0                       | person:name             | name                    |
-+-------------------------+-------------------------+-------------------------+
-| 0                       | person:years            | years                   |
-+-------------------------+-------------------------+-------------------------+
-| 19                      | person:locate           | taiwan                  |
-+-------------------------+-------------------------+-------------------------+
-| 19                      | person:name             | waue                    |
-+-------------------------+-------------------------+-------------------------+
-| 19                      | person:years            | 1981                    |
-+-------------------------+-------------------------+-------------------------+
-| 36                      | person:locate           | taiwan                  |
-+-------------------------+-------------------------+-------------------------+
-| 36                      | person:name             | shellon                 |
-+-------------------------+-------------------------+-------------------------+
-| 36                      | person:years            | 1981                    |
-+-------------------------+-------------------------+-------------------------+
-3 row(s) in set. (0.04 sec)
+ 08/06/06 12:20:48 INFO hbase.HTable: Creating scanner over t1_table starting at key 
+ +-------------------------+-------------------------+-------------------------+
+ | Row                     | Column                  | Cell                    |
+ +-------------------------+-------------------------+-------------------------+
+ | 0                       | person:locate           | locate                  |
+ +-------------------------+-------------------------+-------------------------+
+ | 0                       | person:name             | name                    |
+ +-------------------------+-------------------------+-------------------------+
+ | 0                       | person:years            | years                   |
+ +-------------------------+-------------------------+-------------------------+
+ | 19                      | person:locate           | taiwan                  |
+ +-------------------------+-------------------------+-------------------------+
+ | 19                      | person:name             | waue                    |
+ +-------------------------+-------------------------+-------------------------+
+ | 19                      | person:years            | 1981                    |
+ +-------------------------+-------------------------+-------------------------+
+ | 36                      | person:locate           | taiwan                  |
+ +-------------------------+-------------------------+-------------------------+
+ | 36                      | person:name             | shellon                 |
+ +-------------------------+-------------------------+-------------------------+
+ | 36                      | person:years            | 1981                    |
+ +-------------------------+-------------------------+-------------------------+
+ 3 row(s) in set. (0.04 sec)
  */
-
-
-
 
 package tw.org.nchc.code;
@@ -64,4 +61,5 @@
 import java.io.BufferedWriter;
 import java.io.File;
+import java.io.FileOutputStream;
 import java.io.FileReader;
 import java.io.FileWriter;
@@ -83,74 +81,104 @@
 
 
+
+class ReduceClass extends TableReduce<LongWritable, Text> {
+	// on this sample, map is nonuse, we use reduce to handle
+	public void reduce(LongWritable key, Iterator<Text> values,
+			OutputCollector<Text, MapWritable> output, Reporter reporter)
+			throws IOException {
+		String sp = ":";
+		String bf = "person:";
+		// this map holds the columns per row
+		MapWritable map = new MapWritable();
+		// values.next().getByte() can get value and transfer to byte form,
+		String stro = new String(values.next().getBytes());
+		String str[] = stro.split(sp);
+		
+		BufferedReader fconf = new BufferedReader(new FileReader(new File("/tmp/fi_conf.tmp")));
+		// int length = cf.length;
+		
+		
+		
+		// test for debug
+		FileOutputStream out = new FileOutputStream(new File(
+				"/home/waue/mr-result.txt"));
+		String first_line = fconf.readLine();
+		
+		// test for debug
+		String[] cf = first_line.split(sp);
+		int length = cf.length;
+		for(int i=0 ; i<length; i ++){
+			out.write((bf + cf[i]+"\n").getBytes());
+			
+		}
+		out.close();
+		// Column id is created dymanically,
+		Text[] col_n = new Text[length];
+		byte[][] b_l = new byte[length][];
+		// contents must be ImmutableBytesWritable
+		ImmutableBytesWritable[] w_l = new ImmutableBytesWritable[length];
+		map.clear();
+
+		for (int i = 0; i < length; i++) {
+			col_n[i] = new Text(bf + cf[i]);
+			b_l[i] = str[i].getBytes();
+			w_l[i] = new ImmutableBytesWritable(b_l[i]);
+			// populate the current row
+			map.put(col_n[i], w_l[i]);
+		}
+		// add the row with the key as the row id
+		output.collect(new Text(key.toString()), map);
+	}
+}
+
 public class HBaseRecordPro {
 
 	/* Denify parameter */
+
+	// file path in hadoop file system (not phisical file system)
+	final String file_path = "/home/waue/test.txt";
+
+	// setup MapTask and Reduce Task
+
+	final String bf = "person:";
+
+	final String table_name = "testend";
+
+	final String sp = ":";
+
+	String[] cf ;
 	
-	// file path in hadoop file system (not phisical file system)
-	private String file_path = "/home/waue/test.txt";
-	
-	// setup MapTask and Reduce Task
-
-	
-	private final static String bf = "person:";
-	private final String table_name = "testpro";
-
-	private final static String sp = ":";
-	private static String[] cf;
-	
-	private static class ReduceClass extends TableReduce<LongWritable, Text> {
-
-
-		
-		// on this sample, map is nonuse, we use reduce to handle
-		public void reduce(LongWritable key, Iterator<Text> values,
-				OutputCollector<Text, MapWritable> output, Reporter reporter)
-				throws IOException {
-
-			// this map holds the columns per row
-			MapWritable map = new MapWritable();	
-			// values.next().getByte() can get value and transfer to byte form, 
-			String stro = new String(values.next().getBytes());
-			String str[] = stro.split(sp);
-			
-			int length = cf.length;
-			
-			// Column id is created dymanically, 
-			Text[] col_n = new Text[length];
-			byte[][] b_l = new byte[length][];
-			// contents must be ImmutableBytesWritable
-			ImmutableBytesWritable[] w_l = new ImmutableBytesWritable[length];
-			map.clear();
-			for(int i = 0; i < length; i++){
-				col_n[i] = new Text(bf+cf[i]);
-				b_l[i] = str[i].getBytes();
-				w_l[i] = new ImmutableBytesWritable(b_l[i]);
-				// populate the current row
-				map.put(col_n[i], w_l[i]);
+	String test;
+
+	public HBaseRecordPro() {
+
+
+	}
+	public HBaseRecordPro(String[] st) {
+		cf = st;
+	}
+
+	static public String parseFirstLine(String in, String ou) throws IOException {
+		BufferedReader fi = new BufferedReader(new FileReader(new File(in)));
+		BufferedWriter ff = new BufferedWriter(new FileWriter(new File("/tmp/fi_conf.tmp")));
+		BufferedWriter fw = new BufferedWriter(new FileWriter(new File(ou)));
+		String first_line, data;
+		first_line = fi.readLine();
+		ff.write(first_line);
+		ff.flush();
+		do {
+			data = fi.readLine();
+			if (data == null) {
+				break;
+			} else {
+				fw.write(data + "\n");
+				fw.flush();
 			}
-			// add the row with the key as the row id
-			output.collect(new Text(key.toString()), map);
-		}
-	}
-
-	private HBaseRecordPro() {
-	}
-	
-	String parseFirstLine(String in, String ou) throws IOException {
-		BufferedReader fi = new BufferedReader(new FileReader(new File(in)));
-		BufferedWriter fw = new BufferedWriter(new FileWriter(new File(ou)));
-		String first_line,data;
-		first_line = fi.readLine();
-		do{
-			data = fi.readLine();
-			if( data == null){
-				break;
-			}else{
-				fw.write(data+"\n");
-			}
-		}while(true); 	
+		} while (true);
 		fw.close();
+		ff.close();
 		return first_line;
 	}
+
 	/**
 	 * Runs the demo.
@@ -158,20 +186,27 @@
 	public static void main(String[] args) throws IOException {
 
+		String bf = "person:";
+		String file_path = "/home/waue/test.txt";
+		
 		final String file_tmp = "/tmp/HBaseRecord.text.tmp";
 		final int mapTasks = 1;
 		final int reduceTasks = 1;
-		String[] column_family = {bf};
+		String[] column_family = { bf };
 		
 		HBaseRecordPro setup = new HBaseRecordPro();
 		
-		String first_line = setup.parseFirstLine(setup.file_path, file_tmp);
+		String first_line = parseFirstLine(file_path, file_tmp);
 		System.out.println(first_line);
-		HBaseRecordPro.cf = first_line.split(sp); 
-		//test
-		for(int i =0 ; i< cf.length; i++){
-			System.out.println("column["+i+"]"+bf+cf[i]);
-		}
-
-		BuildHTable build_table = new BuildHTable(setup.table_name,column_family);
+//		HBaseRecordPro.cf = first_line.split(sp);
+
+		
+		// test
+		/*
+		for (int i = 0; i < 3; i++) {
+			System.out.println("column[" + i + "]=" + bf + cf[i]);
+		}*/
+
+		BuildHTable build_table = new BuildHTable(setup.table_name,
+				column_family);
 		if (!build_table.checkTableExist(setup.table_name)) {
 			if (!build_table.createTable()) {
@@ -181,9 +216,9 @@
 			System.out.println("Table \"" + setup.table_name
 					+ "\" has already existed !");
-		}		
+		}
 		JobConf conf = new JobConf(HBaseRecordPro.class);
 		FileSystem fileconf = FileSystem.get(conf);
-		fileconf.copyFromLocalFile(true,new Path(file_tmp), new Path(file_tmp));
-		//Job name; you can modify to any you like  
+		fileconf.copyFromLocalFile(true, new Path(file_tmp), new Path(file_tmp));
+		// Job name; you can modify to any you like
 		conf.setJobName("PersonDataBase");
 
@@ -198,4 +233,5 @@
 		conf.setCombinerClass(IdentityReducer.class);
 		conf.setReducerClass(ReduceClass.class);
+
 		JobClient.runJob(conf);
 		
