Index: /sample/HBaseRecord.java
===================================================================
--- /sample/HBaseRecord.java	(revision 17)
+++ /sample/HBaseRecord.java	(revision 18)
@@ -3,5 +3,6 @@
  * Editor: Waue Chen 
  * From :  NCHC. Taiwn
- * Last Update Date: 06/01/2008
+ * Last Update Date: 07/02/2008
+ * Upgrade to 0.17
  */
 
@@ -13,49 +14,46 @@
  * 	Make sure Hadoop file system and Hbase are running correctly.
  * 	1. put test.txt in t1 directory which content is 
-	---------------
-	name:locate:years 
-	waue:taiwan:1981
-	shellon:taiwan:1981
-	---------------
+ ---------------
+ name:locate:years 
+ waue:taiwan:1981
+ shellon:taiwan:1981
+ ---------------
  * 	2. hadoop_root/$ bin/hadoop dfs -put t1 t1
  * 	3. hbase_root/$ bin/hbase shell
  * 	4. hql > create table t1_table("person");
  * 	5. Come to Eclipse and run this code, and we will let database as that 
- 	t1_table -> person
-	  ----------------
-	  |  name | locate | years |
-	  | waue  | taiwan | 1981 |
-	  | shellon | taiwan | 1981 |
-	  ----------------
+ t1_table -> person
+ ----------------
+ |  name | locate | years |
+ | waue  | taiwan | 1981 |
+ | shellon | taiwan | 1981 |
+ ----------------
  * Check Result:
  * 	Go to hbase console, type : 
  * 		hql > select * from t1_table; 
-08/06/06 12:20:48 INFO hbase.HTable: Creating scanner over t1_table starting at key 
-+-------------------------+-------------------------+-------------------------+
-| Row                     | Column                  | Cell                    |
-+-------------------------+-------------------------+-------------------------+
-| 0                       | person:locate           | locate                  |
-+-------------------------+-------------------------+-------------------------+
-| 0                       | person:name             | name                    |
-+-------------------------+-------------------------+-------------------------+
-| 0                       | person:years            | years                   |
-+-------------------------+-------------------------+-------------------------+
-| 19                      | person:locate           | taiwan                  |
-+-------------------------+-------------------------+-------------------------+
-| 19                      | person:name             | waue                    |
-+-------------------------+-------------------------+-------------------------+
-| 19                      | person:years            | 1981                    |
-+-------------------------+-------------------------+-------------------------+
-| 36                      | person:locate           | taiwan                  |
-+-------------------------+-------------------------+-------------------------+
-| 36                      | person:name             | shellon                 |
-+-------------------------+-------------------------+-------------------------+
-| 36                      | person:years            | 1981                    |
-+-------------------------+-------------------------+-------------------------+
-3 row(s) in set. (0.04 sec)
+ 08/06/06 12:20:48 INFO hbase.HTable: Creating scanner over t1_table starting at key 
+ +-------------------------+-------------------------+-------------------------+
+ | Row                     | Column                  | Cell                    |
+ +-------------------------+-------------------------+-------------------------+
+ | 0                       | person:locate           | locate                  |
+ +-------------------------+-------------------------+-------------------------+
+ | 0                       | person:name             | name                    |
+ +-------------------------+-------------------------+-------------------------+
+ | 0                       | person:years            | years                   |
+ +-------------------------+-------------------------+-------------------------+
+ | 19                      | person:locate           | taiwan                  |
+ +-------------------------+-------------------------+-------------------------+
+ | 19                      | person:name             | waue                    |
+ +-------------------------+-------------------------+-------------------------+
+ | 19                      | person:years            | 1981                    |
+ +-------------------------+-------------------------+-------------------------+
+ | 36                      | person:locate           | taiwan                  |
+ +-------------------------+-------------------------+-------------------------+
+ | 36                      | person:name             | shellon                 |
+ +-------------------------+-------------------------+-------------------------+
+ | 36                      | person:years            | 1981                    |
+ +-------------------------+-------------------------+-------------------------+
+ 3 row(s) in set. (0.04 sec)
  */
-
-
-
 
 package tw.org.nchc.code;
@@ -77,32 +75,40 @@
 import org.apache.hadoop.mapred.lib.IdentityReducer;
 
-
 public class HBaseRecord {
 
 	/* Denify parameter */
 	// one column family: person; three column qualifier: name,locate,years
-	static private String  baseId1 ="person:name";
-	static private String  baseId2 ="person:locate";
-	static private String  baseId3 ="person:years";
-	//split character
+	static private String baseId1 = "person:name";
+
+	static private String baseId2 = "person:locate";
+
+	static private String baseId3 = "person:years";
+
+	// split character
 	static private String sp = ":";
+
 	// file path in hadoop file system (not phisical file system)
 	String file_path = "/user/waue/t1";
+
 	// Hbase table name
 	String table_name = "t1_table";
+
 	// setup MapTask and Reduce Task
 	int mapTasks = 1;
+
 	int reduceTasks = 1;
-	
+
 	private static class ReduceClass extends TableReduce<LongWritable, Text> {
 
-		// Column id is created dymanically, 
+		// Column id is created dymanically,
 		private static final Text col_name = new Text(baseId1);
+
 		private static final Text col_local = new Text(baseId2);
+
 		private static final Text col_year = new Text(baseId3);
-		
+
 		// this map holds the columns per row
-		private MapWritable map = new MapWritable();	
-		
+		private MapWritable map = new MapWritable();
+
 		// on this sample, map is nonuse, we use reduce to handle
 		public void reduce(LongWritable key, Iterator<Text> values,
@@ -110,6 +116,7 @@
 				throws IOException {
 
-			// values.next().getByte() can get value and transfer to byte form, there is an other way that let decode()
-			// to substitude getByte() 
+			// values.next().getByte() can get value and transfer to byte form,
+			// there is an other way that let decode()
+			// to substitude getByte()
 			String stro = new String(values.next().getBytes());
 			String str[] = stro.split(sp);
@@ -117,9 +124,9 @@
 			byte b_name[] = str[1].getBytes();
 			byte b_year[] = str[2].getBytes();
-			
+
 			// contents must be ImmutableBytesWritable
-			ImmutableBytesWritable w_local = new ImmutableBytesWritable( b_local);
-			ImmutableBytesWritable w_name = new ImmutableBytesWritable( b_name );
-			ImmutableBytesWritable w_year = new ImmutableBytesWritable( b_year );
+			ImmutableBytesWritable w_local = new ImmutableBytesWritable(b_local);
+			ImmutableBytesWritable w_name = new ImmutableBytesWritable(b_name);
+			ImmutableBytesWritable w_year = new ImmutableBytesWritable(b_year);
 
 			// populate the current row
@@ -141,19 +148,23 @@
 	 */
 	public static void main(String[] args) throws IOException {
-		// which path of input files in Hadoop file system 	
-		
+		// which path of input files in Hadoop file system
+
 		HBaseRecord setup = new HBaseRecord();
 		JobConf conf = new JobConf(HBaseRecord.class);
 
-		//Job name; you can modify to any you like  
+		// Job name; you can modify to any you like
 		conf.setJobName("NCHC_PersonDataBase");
 
 		// Hbase table name must be correct , in our profile is t1_table
 		TableReduce.initJob(setup.table_name, ReduceClass.class, conf);
-		
+
 		// below are map-reduce profile
 		conf.setNumMapTasks(setup.mapTasks);
 		conf.setNumReduceTasks(setup.reduceTasks);
-		conf.setInputPath(new Path(setup.file_path));
+
+		// 0.16
+		// conf.setInputPath(new Path(setup.file_path));
+		Convert.setInputPath(conf, new Path(setup.file_path));
+
 		conf.setMapperClass(IdentityMapper.class);
 		conf.setCombinerClass(IdentityReducer.class);
Index: /sample/HBaseRecord2.java
===================================================================
--- /sample/HBaseRecord2.java	(revision 17)
+++ /sample/HBaseRecord2.java	(revision 18)
@@ -3,5 +3,6 @@
  * Editor: Waue Chen 
  * From :  NCHC. Taiwn
- * Last Update Date: 06/01/2008
+ * Last Update Date: 07/01/2008
+ * Upgrade to 0.17
  */
 
@@ -53,5 +54,4 @@
 package tw.org.nchc.code;
 
-import java.io.FileInputStream;
 import java.io.IOException;
 import java.util.Iterator;
@@ -140,6 +140,4 @@
 					+ "\" has already existed !");
 		}
-		FileInputStream fi = new FileInputStream(setup.file_path);
-		
 		
 		JobConf conf = new JobConf(HBaseRecord2.class);
@@ -154,5 +152,7 @@
 		conf.setNumMapTasks(setup.mapTasks);
 		conf.setNumReduceTasks(setup.reduceTasks);
-		conf.setInputPath(new Path(setup.file_path));
+		// 0.16
+//		conf.setInputPath(new Path(setup.file_path));
+		Convert.setInputPath(conf, new Path(setup.file_path));
 		conf.setMapperClass(IdentityMapper.class);
 		conf.setCombinerClass(IdentityReducer.class);
Index: /sample/HBaseRecordPro.java
===================================================================
--- /sample/HBaseRecordPro.java	(revision 17)
+++ /sample/HBaseRecordPro.java	(revision 18)
@@ -1,7 +1,8 @@
 /**
- * Program: HBaseRecord.java
+ * Program: HBaseRecordPro.java
  * Editor: Waue Chen 
  * From :  NCHC. Taiwn
- * Last Update Date: 06/01/2008
+ * Last Update Date: 07/02/2008
+ * Upgrade to 0.17
  */
 
@@ -227,5 +228,8 @@
 		conf.setNumMapTasks(mapTasks);
 		conf.setNumReduceTasks(reduceTasks);
-		conf.setInputPath(text_path);
+		// 0.16
+//		conf.setInputPath(text_path);
+		Convert.setInputPath(conf, text_path);
+		
 		conf.setMapperClass(IdentityMapper.class);
 		conf.setCombinerClass(IdentityReducer.class);
@@ -235,5 +239,8 @@
 		
 		// delete tmp file
-		FileSystem.get(conf).delete(text_path);
+		// 0.16
+//		FileSystem.get(conf).delete(text_path);
+		FileSystem.get(conf).delete(text_path,true);
+		
 		setup.deleteFile(conf_tmp);
 	}
Index: /sample/WordCount.java
===================================================================
--- /sample/WordCount.java	(revision 17)
+++ /sample/WordCount.java	(revision 18)
@@ -3,5 +3,6 @@
  * Editor: Waue Chen 
  * From :  NCHC. Taiwn
- * Last Update Date: 06/13/2008
+ * Last Update Date: 07/02/2008
+ * Upgrade to 0.17
  */
 
@@ -40,17 +41,19 @@
 import org.apache.hadoop.mapred.Reporter;
 
-
 public class WordCount {
 	private String filepath;
+
 	private String outputPath;
-	
-	public WordCount(){
+
+	public WordCount() {
 		filepath = "/user/waue/input/";
 		outputPath = "counts1";
 	}
-	public WordCount(String path,String output){
+
+	public WordCount(String path, String output) {
 		filepath = path;
 		outputPath = output;
 	}
+
 	// mapper: emits (token, 1) for every word occurrence
 	private static class MapClass extends MapReduceBase implements
@@ -59,4 +62,5 @@
 		// reuse objects to save overhead of object creation
 		private final static IntWritable one = new IntWritable(1);
+
 		private Text word = new Text();
 
@@ -93,5 +97,4 @@
 	}
 
-	
 	/**
 	 * Runs the demo.
@@ -99,5 +102,5 @@
 	public static void main(String[] args) throws IOException {
 		WordCount wc = new WordCount();
-		
+
 		int mapTasks = 1;
 		int reduceTasks = 1;
@@ -107,17 +110,21 @@
 		conf.setNumMapTasks(mapTasks);
 		conf.setNumReduceTasks(reduceTasks);
-
-		conf.setInputPath(new Path(wc.filepath));
+		// 0.16
+		// conf.setInputPath(new Path(wc.filepath));
+		Convert.setInputPath(conf, new Path(wc.filepath));
 		conf.setOutputKeyClass(Text.class);
 		conf.setOutputValueClass(IntWritable.class);
-		conf.setOutputPath(new Path(wc.outputPath));
+		// 0.16
+		// conf.setOutputPath(new Path(wc.outputPath));
+		Convert.setOutputPath(conf, new Path(wc.outputPath));
 
 		conf.setMapperClass(MapClass.class);
 		conf.setCombinerClass(ReduceClass.class);
 		conf.setReducerClass(ReduceClass.class);
-		
+
 		// Delete the output directory if it exists already
 		Path outputDir = new Path(wc.outputPath);
-		FileSystem.get(conf).delete(outputDir);
+		// 0.16
+		FileSystem.get(conf).delete(outputDir,true);
 
 		JobClient.runJob(conf);
Index: /sample/WordCountFromHBase.java
===================================================================
--- /sample/WordCountFromHBase.java	(revision 17)
+++ /sample/WordCountFromHBase.java	(revision 18)
@@ -3,5 +3,6 @@
  * Editor: Waue Chen 
  * From :  NCHC. Taiwn
- * Last Update Date: 06/13/2008
+ * Last Update Date: 07/02/2008
+ * Upgrade to 0.17
  */
 
@@ -25,7 +26,5 @@
 import java.util.Iterator;
 import java.util.StringTokenizer;
-import java.io.FileOutputStream;
-import java.io.File;
-import java.io.RandomAccessFile;
+
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -169,7 +168,9 @@
 		// input is Hbase format => TableInputFormat
 		conf.setInputFormat(TableInputFormat.class);
-		conf.setOutputPath(new Path(outputPath));
+		// 0.16
+//		conf.setOutputPath(new Path(outputPath));
+		Convert.setOutputPath(conf, new Path(outputPath));
 //		 delete the old path with the same name 
-		FileSystem.get(conf).delete(new Path(outputPath));
+		FileSystem.get(conf).delete(new Path(outputPath),true);
 		JobClient.runJob(conf);
 	}
Index: /sample/WordCountIntoHBase.java
===================================================================
--- /sample/WordCountIntoHBase.java	(revision 17)
+++ /sample/WordCountIntoHBase.java	(revision 18)
@@ -3,5 +3,6 @@
  * Editor: Waue Chen 
  * From :  NCHC. Taiwn
- * Last Update Date: 06/10/2008
+ * Last Update Date: 07/02/2008
+ * Upgrade to 0.17
  */
 
@@ -43,11 +44,14 @@
 
 	/* setup parameters */
-	// $Input_Path. Please make sure the path is correct and contains input files
+	// $Input_Path. Please make sure the path is correct and contains input
+	// files
 	static final String Input_Path = "/user/waue/simple";
+
 	// Hbase table name, the program will create it
 	static final String Table_Name = "word_count5";
+
 	// column name, the program will create it
-	static final String colstr = "word:text" ;
-	
+	static final String colstr = "word:text";
+
 	// constructor
 	private WordCountIntoHBase() {
@@ -57,14 +61,16 @@
 		// set (column_family:column_qualify)
 		private static final Text col = new Text(WordCountIntoHBase.colstr);
+
 		// this map holds the columns per row
 		private MapWritable map = new MapWritable();
+
 		public void reduce(LongWritable key, Iterator<Text> values,
 				OutputCollector<Text, MapWritable> output, Reporter reporter)
 				throws IOException {
 			// contents must be ImmutableBytesWritable
-			ImmutableBytesWritable bytes = 
-				new ImmutableBytesWritable(values.next().getBytes());			
+			ImmutableBytesWritable bytes = new ImmutableBytesWritable(values
+					.next().getBytes());
 			map.clear();
-			// write data 
+			// write data
 			map.put(col, bytes);
 			// add the row with the key as the row id
@@ -76,18 +82,19 @@
 	 * Runs the demo.
 	 */
-	public static void main(String[] args) throws IOException {	
+	public static void main(String[] args) throws IOException {
 		// parse colstr to split column family and column qualify
 		String tmp[] = colstr.split(":");
-		String Column_Family = tmp[0]+":";
-		String CF[] = {Column_Family};
-		// check whether create table or not , we don't admit \ 
+		String Column_Family = tmp[0] + ":";
+		String CF[] = { Column_Family };
+		// check whether create table or not , we don't admit \
 		// the same name but different structure
-		BuildHTable build_table = new BuildHTable(Table_Name,CF);
+		BuildHTable build_table = new BuildHTable(Table_Name, CF);
 		if (!build_table.checkTableExist(Table_Name)) {
 			if (!build_table.createTable()) {
 				System.out.println("create table error !");
 			}
-		}else{
-			System.out.println("Table \"" + Table_Name +"\" has already existed !");
+		} else {
+			System.out.println("Table \"" + Table_Name
+					+ "\" has already existed !");
 		}
 		int mapTasks = 1;
@@ -100,5 +107,7 @@
 		conf.setNumMapTasks(mapTasks);
 		conf.setNumReduceTasks(reduceTasks);
-		conf.setInputPath(new Path(Input_Path));
+		// 0.16
+		// conf.setInputPath(new Path(Input_Path));
+		Convert.setInputPath(conf, new Path(Input_Path));
 		conf.setMapperClass(IdentityMapper.class);
 		conf.setCombinerClass(IdentityReducer.class);
