Index: drbl-hadoop-live/Makefile
===================================================================
--- drbl-hadoop-live/Makefile	(revision 150)
+++ drbl-hadoop-live/Makefile	(revision 150)
@@ -0,0 +1,6 @@
+all: clean
+	sudo ./test-live-helper.sh > build.log 2> error.log
+
+clean:
+	sudo lh clean --purge
+	sudo rm -rf config
Index: drbl-hadoop-live/hook/install-hadoop
===================================================================
--- drbl-hadoop-live/hook/install-hadoop	(revision 150)
+++ drbl-hadoop-live/hook/install-hadoop	(revision 150)
@@ -0,0 +1,116 @@
+#!/bin/bash
+echo "---- Runninig scripts in root_local-hooks ----"
+echo "---- [1] Installing Sun Java JDK 6 ........ ----"
+echo "deb http://free.nchc.org.tw/debian lenny non-free" > lenny-non-free.list
+mv lenny-non-free.list /etc/apt/sources.list.d/.
+apt-get update
+cat << EOF | /usr/bin/debconf-set-selections
+sun-java6-bin   shared/accepted-sun-dlj-v1-1    select true
+sun-java6-jdk   shared/accepted-sun-dlj-v1-1    select true
+sun-java6-jre   shared/accepted-sun-dlj-v1-1    select true
+EOF
+apt-get -y install sun-java6-jdk
+
+echo "---- [2] Installing Hadoop 0.20.2 ........ ----"
+cd /opt
+{
+  if [ ! -x /opt/hadoop ]; then
+    wget http://ftp.twaren.net/Unix/Web/apache/hadoop/core/hadoop-0.20.2/hadoop-0.20.2.tar.gz
+    tar zxvf hadoop-0.20.2.tar.gz
+    rm -f hadoop-0.20.2.tar.gz
+    mv hadoop-0.20.2/ hadoop
+    chown -R hadoop:hadoop /opt/hadoop
+    if [ ! -x /var/hadoop ]; then
+      mkdir -p /var/hadoop
+      chown -R hadoop:hadoop /var/hadoop
+    fi
+  fi
+}
+
+if [ ! -x /opt/hadoop ]; then
+  echo "---- [ERROR] /opt/hadoop is not exist!! ----"; exit;
+else
+  echo "---- [3] Configure Hadoop NameNode and JobTracker .... ----"
+  cd /opt/hadoop
+  {
+    if [ ! -f /opt/hadoop/conf/hadoop-env.sh ]; then
+      echo "---- [ERROR] /opt/hadoop/conf/hadoop-env.sh is not exist!!  ----"; exit
+    else
+      if [ ! -f /opt/hadoop/conf/hadoop-env.sh.org ]; then
+        echo "---- [3.1] Updating /opt/hadoop/conf/hadoop-env.sh ....  ----"
+        cp /opt/hadoop/conf/hadoop-env.sh /opt/hadoop/conf/hadoop-env.sh.org
+        cat >> conf/hadoop-env.sh << EOF
+export JAVA_HOME=/usr/lib/jvm/java-6-sun
+export HADOOP_HOME=/opt/hadoop
+export HADOOP_CONF_DIR=/opt/hadoop/conf
+EOF
+      fi
+    fi
+
+    if [ ! -f /opt/hadoop/conf/core-site.xml ]; then
+      echo "---- [ERROR] /opt/hadoop/conf/core-site.xml is not exist!!  ----"; exit
+    else
+      if [ ! -f /opt/hadoop/conf/core-site.xml.org ]; then
+        echo "---- [3.2] Updating /opt/hadoop/conf/core-site.xml ....  ----"
+        cp /opt/hadoop/conf/core-site.xml /opt/hadoop/conf/core-site.xml.org
+        cat > conf/core-site.xml << EOF
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<configuration>
+  <property>
+    <name>fs.default.name</name>
+    <value>hdfs://localhost:9000</value>
+  </property>
+  <property>
+    <name>hadoop.tmp.dir</name>
+    <value>/var/hadoop/hadoop-\${user.name}</value>
+  </property>
+</configuration>
+EOF
+      fi
+    fi
+
+    if [ ! -f /opt/hadoop/conf/hdfs-site.xml ]; then
+      echo "---- [ERROR] /opt/hadoop/conf/hdfs-site.xml is not exist!!  ----"; exit
+    else
+      if [ ! -f /opt/hadoop/conf/hdfs-site.xml.org ]; then
+        echo "---- [3.3] Updating /opt/hadoop/conf/hdfs-site.xml ....  ----"
+        cp /opt/hadoop/conf/hdfs-site.xml /opt/hadoop/conf/hdfs-site.xml.org
+        cat > conf/hdfs-site.xml << EOF
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<configuration>
+  <property>
+    <name>dfs.replication</name>
+    <value>1</value>
+  </property>
+</configuration>
+EOF
+      fi
+    fi
+
+    if [ ! -f /opt/hadoop/conf/mapred-site.xml ]; then
+      echo "---- [ERROR] /opt/hadoop/conf/mapred-site.xml is not exist!!  ----"; exit
+    else
+      if [ ! -f /opt/hadoop/conf/mapred-site.xml.org ]; then
+        echo "---- [3.3] Updating /opt/hadoop/conf/mapred-site.xml ....  ----"
+        cp /opt/hadoop/conf/mapred-site.xml /opt/hadoop/conf/mapred-site.xml.org
+        cat > conf/mapred-site.xml << EOF
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<configuration>
+  <property>
+    <name>mapred.job.tracker</name>
+    <value>localhost:9001</value>
+  </property>
+</configuration>
+EOF
+      fi
+    fi
+
+    if [ ! -d /var/hadoop/hadoop-root/dfs/name ]; then
+      echo "---- [3.4] Formating NameNode ....  ----"
+      bin/hadoop namenode -format
+    fi
+  }
+fi
Index: drbl-hadoop-live/test-live-helper.sh
===================================================================
--- drbl-hadoop-live/test-live-helper.sh	(revision 149)
+++ drbl-hadoop-live/test-live-helper.sh	(revision 150)
@@ -9,4 +9,10 @@
 ## [2] /opt/drbl/sbin/create-drbl-live (from drbl - http://drbl.sf.net)
 ## [3] man lh_config and lh_build
+
+## Check root privileges
+if [ `id -u` != 0 ]; then
+  echo "[ERROR] This script must run as root or sudo !!"
+  exit
+fi
 
 ## Check current distribution is debian-like or not
@@ -40,14 +46,13 @@
 lh clean --binary
 # [Note] option '--categories' is only avaible at live-helper 1.0.3-2
-lh config -b iso --binary-indices disabled -f minimal --cache enabled --cache-indices enabled --categories 'main non-free' -d lenny --hostname hadoop -m http://free.nchc.org.tw/debian --mirror-chroot http://free.nchc.org.tw/debian --mirror-chroot-security http://free.nchc.org.tw/debian-security --mirror-binary http://free.nchc.org.tw/debian --mirror-binary-security http://free.nchc.org.tw/debian-security --username hadoop --packages 'ssh sudo xserver-xorg-video-vesa xinit xfonts-base x11-xserver-utils xterm openbox iceweasel dhcp3-client' -k 686
+lh config -b iso --binary-indices disabled -f minimal --cache enabled --cache-indices enabled -d lenny --hostname hadoop -m http://free.nchc.org.tw/debian --mirror-chroot http://free.nchc.org.tw/debian --mirror-chroot-security http://free.nchc.org.tw/debian-security --mirror-binary http://free.nchc.org.tw/debian --mirror-binary-security http://free.nchc.org.tw/debian-security --username hadoop --packages 'net-tools wireless-tools ssh sudo xserver-xorg-video-vesa xinit xfonts-base x11-xserver-utils xterm openbox iceweasel dhcp3-client' -k 686
 
-# add non-free apt repository for chroot stage
-echo << EOF > config/chroot_sources/non-free.chroot
-deb http://free.nchc.org.tw/debian lenny non-free
-EOF
+cp hook/* config/chroot_local-hooks/
 
 lh build
 
 if [ -f binary.iso ]; then
-  cp binary.iso `date +"hadoop-live-%y%m%d%H%M.iso"`
+  filename=`date +"hadoop-live-%y%m%d%H%M"`
+  cp binary.iso "$filename.iso"
+  cp binary.packages "$filename.packages"
 fi
