Index: sample/hadoop-0.16/tw/org/nchc/code/LogParser.java
===================================================================
--- sample/hadoop-0.16/tw/org/nchc/code/LogParser.java	(revision 30)
+++ sample/hadoop-0.16/tw/org/nchc/code/LogParser.java	(revision 31)
@@ -1,2 +1,9 @@
+/**
+ * Program: LogParser.java
+ * Editor: Waue Chen 
+ * From :  NCHC. Taiwn
+ * Last Update Date: 07/02/2008
+ */
+
 package tw.org.nchc.code;
 
@@ -10,5 +17,5 @@
 
 
-public class AccessLogParser {
+public class LogParser {
   private String ip;
   private String protocol;
@@ -27,5 +34,5 @@
   
   
-  public AccessLogParser(String line) throws ParseException, Exception{
+  public LogParser(String line) throws ParseException, Exception{
 	 
 	 Matcher matcher = p.matcher(line);
Index: sample/hadoop-0.16/tw/org/nchc/code/LogParserGo.java
===================================================================
--- sample/hadoop-0.16/tw/org/nchc/code/LogParserGo.java	(revision 30)
+++ sample/hadoop-0.16/tw/org/nchc/code/LogParserGo.java	(revision 31)
@@ -1,4 +1,4 @@
 /**
- * Program: LogFetcher.java
+ * Program: LogParserGo.java
  * Editor: Waue Chen 
  * From :  NCHC. Taiwn
@@ -6,26 +6,46 @@
  */
 /**
- * Copyright 2007 The Apache Software Foundation
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
+ * Purpose : 
+ * 	This program will parse your apache log and store it into Hbase.
+ * 
+ * HowToUse : 
+ * 	Make sure two thing :
+ * 	1. Upload apache logs ( /var/log/apache2/access.log* ) to \ 
+ * 		hdfs (default: /user/waue/apache-log) \
+ * 	 $ bin/hadoop dfs -put /var/log/apache2/ apache-log
+ * 	2. parameter "dir" in main contains the logs.
+ *  3. you should filter the exception contents manually, \ 
+ *  	ex:  ::1 - - [29/Jun/2008:07:35:15 +0800] "GET / HTTP/1.0" 200 729 "...
+ *  
+ * Check Result:
+ * 	Go to hbase console, type : 
+ * 		hql > select * from apache-log;
+
++-------------------------+-------------------------+-------------------------+
+| Row                     | Column                  | Cell                    |
++-------------------------+-------------------------+-------------------------+
+| 118.170.101.250         | http:agent              | Mozilla/4.0 (compatible;|
+|                         |                         |  MSIE 4.01; Windows 95) |
++-------------------------+-------------------------+-------------------------+
+| 118.170.101.250         | http:bytesize           | 318                     |
++-------------------------+-------------------------+-------------------------+
+..........(skip)........
++-------------------------+-------------------------+-------------------------+
+| 87.65.93.58             | http:method             | OPTIONS                 |
++-------------------------+-------------------------+-------------------------+
+| 87.65.93.58             | http:protocol           | HTTP/1.1                |
++-------------------------+-------------------------+-------------------------+
+| 87.65.93.58             | referrer:-              | *                       |
++-------------------------+-------------------------+-------------------------+
+| 87.65.93.58             | url:*                   | -                       |
++-------------------------+-------------------------+-------------------------+
+31 row(s) in set. (0.58 sec)
+
+
+
  */
 package tw.org.nchc.code;
 
 import java.io.IOException;
-import java.text.ParseException;
 
 import org.apache.hadoop.fs.FileStatus;
@@ -47,4 +67,5 @@
 import org.apache.hadoop.mapred.OutputCollector;
 import org.apache.hadoop.mapred.Reporter;
+
 // import AccessLogParser
 /**
@@ -52,5 +73,5 @@
  * W3CExtended, IISw3cExtended)
  */
-public class LogFetcher {
+public class LogParserGo {
 	static HBaseConfiguration conf = new HBaseConfiguration();
 
@@ -60,12 +81,18 @@
 
 	static HTable table = null;
-
-	static boolean eclipseRun = false;
-
+	
+	static void print(String str){
+		System.out.println("STR  = "+str);
+	}
 	public static class MapClass extends MapReduceBase implements
 			Mapper<WritableComparable, Text, Text, Writable> {
 
 		@Override
+		// MapReduceBase.configure(JobConf job) 
+		// Default implementation that does nothing.
 		public void configure(JobConf job) {
+			// String get(String name,String defaultValue) 
+			// Get the value of the name property. If no such property exists,\
+			//	then defaultValue is returned.
 			tableName = job.get(TABLE, "");
 		}
@@ -74,7 +101,16 @@
 				OutputCollector<Text, Writable> output, Reporter reporter)
 				throws IOException {
+			
 			try {
+				/*
+				print(value.toString());
+				FileWriter out = new FileWriter(new File(
+				"/home/waue/mr-result.txt"));
+				out.write(value.toString());
+				out.flush();
+				out.close();
+				*/
+				LogParser log = new LogParser(value.toString());
 				
-				AccessLogParser log = new AccessLogParser(value.toString());
 				if (table == null)
 					table = new HTable(conf, new Text(tableName));
@@ -95,29 +131,30 @@
 						log.getUrl().getBytes());
 				table.commit(lockId, log.getTimestamp());
-			} catch (ParseException e) {
-				e.printStackTrace();
+				
 			} catch (Exception e) {
 				e.printStackTrace();
 			}
-		}
-	}
-//	 do it to resolve warning : FileSystem.listPaths 
-	static public Path[] listPaths(FileSystem fsm,Path path) throws IOException
-	{
+			
+		}
+	}
+
+	// do it to resolve warning : FileSystem.listPaths
+	static public Path[] listPaths(FileSystem fsm, Path path)
+			throws IOException {
 		FileStatus[] fss = fsm.listStatus(path);
 		int length = fss.length;
 		Path[] pi = new Path[length];
-		for (int i=0 ; i< length; i++)
-		{
+		for (int i = 0; i < length; i++) {
 			pi[i] = fss[i].getPath();
 		}
 		return pi;
-	}	
+	}
+
 	public static void runMapReduce(String table, String dir)
 			throws IOException {
-		Path tempDir = new Path("log/temp");
+		Path tempDir = new Path("/tmp/Mylog/");
 		Path InputDir = new Path(dir);
 		FileSystem fs = FileSystem.get(conf);
-		JobConf jobConf = new JobConf(conf, LogFetcher.class);
+		JobConf jobConf = new JobConf(conf, LogParserGo.class);
 		jobConf.setJobName("apache log fetcher");
 		jobConf.set(TABLE, table);
@@ -140,5 +177,5 @@
 		}
 		jobConf.setOutputPath(tempDir);
-		
+
 		jobConf.setMapperClass(MapClass.class);
 
@@ -150,5 +187,5 @@
 		JobClient.runJob(jobConf);
 
-		fs.delete(tempDir);		
+		fs.delete(tempDir);
 		fs.close();
 	}
@@ -171,18 +208,20 @@
 
 	public static void main(String[] args) throws IOException {
-		String table_name = "log";
-		String dir = "apache-log";
-
-		if (eclipseRun) {
-			table_name = "log";
-			dir = "apache-log";
-		} else if (args.length < 2) {
-			System.out
-					.println("Usage: logfetcher <access_log file or directory> <table_name>");
-			System.exit(1);
-		} else {
-			table_name = args[1];
-			dir = args[0];
-		}
+		String table_name = "apache-log2";
+		String dir = "/user/waue/apache-log";
+		
+		// if (eclipseRun) {
+		// table_name = "log";
+		// dir = "apache-log";
+		// } else if (args.length < 2) {
+		// System.out
+		// .println("Usage: logfetcher <access_log file or directory>
+		// <table_name>");
+		// System.exit(1);
+		// } else {
+		// table_name = args[1];
+		// dir = args[0];
+		// }
+
 		creatTable(table_name);
 		runMapReduce(table_name, dir);
Index: sample/hadoop-0.16/tw/org/nchc/code/WordCount.java
===================================================================
--- sample/hadoop-0.16/tw/org/nchc/code/WordCount.java	(revision 30)
+++ sample/hadoop-0.16/tw/org/nchc/code/WordCount.java	(revision 31)
@@ -56,6 +56,7 @@
 
 	// mapper: emits (token, 1) for every word occurrence
-	private static class MapClass extends MapReduceBase implements
-			Mapper<LongWritable, Text, Text, IntWritable> {
+	private static class MapClass extends MapReduceBase 
+	implements Mapper<LongWritable, Text, Text, IntWritable> 
+	{
 
 		// reuse objects to save overhead of object creation
@@ -77,6 +78,7 @@
 
 	// reducer: sums up all the counts
-	private static class ReduceClass extends MapReduceBase implements
-			Reducer<Text, IntWritable, Text, IntWritable> {
+	private static class ReduceClass extends MapReduceBase 
+	implements Reducer<Text, IntWritable, Text, IntWritable> 
+	{
 
 		// reuse objects
@@ -105,5 +107,5 @@
 		int reduceTasks = 1;
 		JobConf conf = new JobConf(WordCount.class);
-		conf.setJobName("wordcount");
+//		conf.setJobName("wordcount");
 
 		conf.setNumMapTasks(mapTasks);
@@ -118,5 +120,5 @@
 
 		conf.setMapperClass(MapClass.class);
-		conf.setCombinerClass(ReduceClass.class);
+//		conf.setCombinerClass(ReduceClass.class);
 		conf.setReducerClass(ReduceClass.class);
 
Index: sample/hadoop-0.16/tw/org/nchc/code/WordCountIntoHBase.java
===================================================================
--- sample/hadoop-0.16/tw/org/nchc/code/WordCountIntoHBase.java	(revision 30)
+++ sample/hadoop-0.16/tw/org/nchc/code/WordCountIntoHBase.java	(revision 31)
@@ -45,5 +45,5 @@
 	// $Input_Path. Please make sure the path is correct and contains input
 	// files
-	static final String Input_Path = "/user/waue/simple";
+	static final String Input_Path = "/user/waue/input";
 
 	// Hbase table name, the program will create it
Index: sample/hadoop-0.16/tw/org/nchc/demo/LogFetcher.java
===================================================================
--- sample/hadoop-0.16/tw/org/nchc/demo/LogFetcher.java	(revision 30)
+++ sample/hadoop-0.16/tw/org/nchc/demo/LogFetcher.java	(revision 31)
@@ -6,22 +6,43 @@
  */
 /**
- * Copyright 2007 The Apache Software Foundation
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
+ * Purpose : 
+ * 	This program will parse your apache log and store it into Hbase.
+ * 
+ * HowToUse : 
+ * 	Make sure two thing :
+ * 	1. Upload apache logs ( /var/log/apache2/access.log* ) to \ 
+ * 		hdfs (default: /user/waue/apache-log) \
+ * 	 $ bin/hadoop dfs -put /var/log/apache2/ apache-log
+ * 	2. parameter "dir" in main contains the logs.
+ *  3. you should filter the exception contents manually, \ 
+ *  	ex:  ::1 - - [29/Jun/2008:07:35:15 +0800] "GET / HTTP/1.0" 200 729 "...
+ *  
+ * Check Result:
+ * 	Go to hbase console, type : 
+ * 		hql > select * from apache-log;
+
++-------------------------+-------------------------+-------------------------+
+| Row                     | Column                  | Cell                    |
++-------------------------+-------------------------+-------------------------+
+| 118.170.101.250         | http:agent              | Mozilla/4.0 (compatible;|
+|                         |                         |  MSIE 4.01; Windows 95) |
++-------------------------+-------------------------+-------------------------+
+| 118.170.101.250         | http:bytesize           | 318                     |
++-------------------------+-------------------------+-------------------------+
+..........(skip)........
++-------------------------+-------------------------+-------------------------+
+| 87.65.93.58             | http:method             | OPTIONS                 |
++-------------------------+-------------------------+-------------------------+
+| 87.65.93.58             | http:protocol           | HTTP/1.1                |
++-------------------------+-------------------------+-------------------------+
+| 87.65.93.58             | referrer:-              | *                       |
++-------------------------+-------------------------+-------------------------+
+| 87.65.93.58             | url:*                   | -                       |
++-------------------------+-------------------------+-------------------------+
+31 row(s) in set. (0.58 sec)
+
  */
+
+
 package tw.org.nchc.demo;
 
