Index: /hadoop-1.0.4/Makefile
===================================================================
--- /hadoop-1.0.4/Makefile	(revision 260)
+++ /hadoop-1.0.4/Makefile	(revision 260)
@@ -0,0 +1,34 @@
+VERSION = 1.0.3
+all: help
+
+deb:
+	@dpkg-buildpackage -rfakeroot -aamd64
+	@dpkg-buildpackage -rfakeroot -ai386
+
+clean:
+	@debian/rules clean
+
+source: 
+	@wget http://ftp.twaren.net/Unix/Web/apache/hadoop/core/hadoop-${VERSION}/hadoop-${VERSION}.tar.gz
+	@tar zxvf hadoop-${VERSION}.tar.gz -C ..
+	@rm conf/hadoop-env.sh  
+	@rm conf/*-site.xml
+	@chmod a+x `find . -name "configure"`
+
+update:
+	@scp ../hadoop*_amd64.deb www.classcloud.org:/var/www/classcloud/dists/unstable/main/binary-amd64/.
+	@scp ../hadoop*_amd64.changes www.classcloud.org:/var/www/classcloud/dists/unstable/main/binary-amd64/.
+	@scp ../hadoop*_i386.deb www.classcloud.org:/var/www/classcloud/dists/unstable/main/binary-i386/.
+	@scp ../hadoop*_i386.changes www.classcloud.org:/var/www/classcloud/dists/unstable/main/binary-i386/.
+	@ssh www.classcloud.org /var/www/classcloud/update-repository.sh
+
+help:
+	@echo "Usage:"
+	@echo "make deb     - Build Debian Package."
+	@echo "make clean   - Clean up Debian Package temparate files."
+	@echo "make source  - download source tarball from hadoop mirror site."
+	@echo "make update  - upload deb packages to classcloud.org."
+	@echo "make help    - show Makefile options."
+	@echo " "
+	@echo "Example:"
+	@echo "$$ make source; make deb; make clean"
Index: /hadoop-1.0.4/debian/changelog
===================================================================
--- /hadoop-1.0.4/debian/changelog	(revision 260)
+++ /hadoop-1.0.4/debian/changelog	(revision 260)
@@ -0,0 +1,31 @@
+hadoop (1.0.3-1) unstable; urgency=low
+
+  * source upstream from 
+    http://www.apache.org/dist/hadoop/core/hadoop-1.0.3/hadoop-1.0.3.tar.gz
+
+ -- Jazz Yao-Tsung Wang <jazzwang.tw@gmail.com>  Fri, 24 Aug 2012 00:23:20 +0800
+
+hadoop (0.20.2-1) unstable; urgency=low
+
+  * source upstream from
+    http://www.apache.org/dist/hadoop/core/hadoop-0.20.2/hadoop-0.20.2.tar.gz
+  * modified debian/conf/*-site.xml
+    - hadoop separate hadoop-site.xml to core-site.xml, hdfs-site.xml and 
+      mapred-site.xml since 0.20.
+
+ -- Jazz Yao-Tsung Wang <jazzwang.tw@gmail.com>  Fri, 13 Aug 2010 15:26:38 +0800
+
+hadoop (0.19.1-1) unstable; urgency=low
+
+  * source upstream from
+    http://www.apache.org/dist/hadoop/core/hadoop-0.19.1/hadoop-0.19.1.tar.gz
+
+ -- Jazz Yao-Tsung Wang <jazzwang.tw@gmail.com>  Thu, 07 May 2009 17:44:46 +0800
+
+hadoop (0.18.3-1) unstable; urgency=low
+
+  * Initial release 
+  * source upstream from
+    http://www.apache.org/dist/hadoop/core/hadoop-0.18.3/hadoop-0.18.3.tar.gz
+
+ -- Jazz Yao-Tsung Wang <jazzwang.tw@gmail.com>  Thu, 19 Mar 2009 16:49:08 +0800
Index: /hadoop-1.0.4/debian/compat
===================================================================
--- /hadoop-1.0.4/debian/compat	(revision 260)
+++ /hadoop-1.0.4/debian/compat	(revision 260)
@@ -0,0 +1,1 @@
+5
Index: /hadoop-1.0.4/debian/conf/core-site.xml
===================================================================
--- /hadoop-1.0.4/debian/conf/core-site.xml	(revision 260)
+++ /hadoop-1.0.4/debian/conf/core-site.xml	(revision 260)
@@ -0,0 +1,15 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<!-- Put site-specific property overrides in this file. -->
+
+<configuration>
+  <property>
+    <name>fs.default.name</name>
+    <value>hdfs://localhost:9000/</value>
+    <description>
+      The name of the default file system. Either the literal string
+      "local" or a host:port for NDFS.
+    </description>
+  </property>
+</configuration>
Index: /hadoop-1.0.4/debian/conf/hadoop-env.sh
===================================================================
--- /hadoop-1.0.4/debian/conf/hadoop-env.sh	(revision 260)
+++ /hadoop-1.0.4/debian/conf/hadoop-env.sh	(revision 260)
@@ -0,0 +1,56 @@
+# Set Hadoop-specific environment variables here.
+
+# The only required environment variable is JAVA_HOME.  All others are
+# optional.  When running a distributed configuration it is best to
+# set JAVA_HOME in this file, so that it is correctly defined on
+# remote nodes.
+
+# The java implementation to use.  Required.
+export JAVA_HOME=/usr/lib/jvm/java-6-sun
+export HADOOP_HOME=/opt/hadoop
+export HADOOP_CONF_DIR=$HADOOP_HOME/conf
+
+# Extra Java CLASSPATH elements.  Optional.
+# export HADOOP_CLASSPATH=
+
+# The maximum amount of heap to use, in MB. Default is 1000.
+# export HADOOP_HEAPSIZE=2000
+
+# Extra Java runtime options.  Empty by default.
+# export HADOOP_OPTS=-server
+
+# Command specific options appended to HADOOP_OPTS when specified
+export HADOOP_NAMENODE_OPTS="-Dcom.sun.management.jmxremote $HADOOP_NAMENODE_OPTS"
+export HADOOP_SECONDARYNAMENODE_OPTS="-Dcom.sun.management.jmxremote $HADOOP_SECONDARYNAMENODE_OPTS"
+export HADOOP_DATANODE_OPTS="-Dcom.sun.management.jmxremote $HADOOP_DATANODE_OPTS"
+export HADOOP_BALANCER_OPTS="-Dcom.sun.management.jmxremote $HADOOP_BALANCER_OPTS"
+export HADOOP_JOBTRACKER_OPTS="-Dcom.sun.management.jmxremote $HADOOP_JOBTRACKER_OPTS"
+# export HADOOP_TASKTRACKER_OPTS=
+# The following applies to multiple commands (fs, dfs, fsck, distcp etc)
+# export HADOOP_CLIENT_OPTS
+
+# Extra ssh options.  Empty by default.
+# export HADOOP_SSH_OPTS="-o ConnectTimeout=1 -o SendEnv=HADOOP_CONF_DIR"
+
+# Where log files are stored.  $HADOOP_HOME/logs by default.
+export HADOOP_LOG_DIR=/var/log/hadoop
+
+# File naming remote slave hosts.  $HADOOP_HOME/conf/slaves by default.
+# export HADOOP_SLAVES=${HADOOP_HOME}/conf/slaves
+
+# host:path where hadoop code should be rsync'd from.  Unset by default.
+# export HADOOP_MASTER=master:/home/$USER/src/hadoop
+
+# Seconds to sleep between slave commands.  Unset by default.  This
+# can be useful in large clusters, where, e.g., slave rsyncs can
+# otherwise arrive faster than the master can service them.
+# export HADOOP_SLAVE_SLEEP=0.1
+
+# The directory where pid files are stored. /tmp by default.
+# export HADOOP_PID_DIR=/var/hadoop/pids
+
+# A string representing this instance of hadoop. $USER by default.
+# export HADOOP_IDENT_STRING=$USER
+
+# The scheduling priority for daemon processes.  See 'man nice'.
+# export HADOOP_NICENESS=10
Index: /hadoop-1.0.4/debian/conf/hdfs-site.xml
===================================================================
--- /hadoop-1.0.4/debian/conf/hdfs-site.xml	(revision 260)
+++ /hadoop-1.0.4/debian/conf/hdfs-site.xml	(revision 260)
@@ -0,0 +1,11 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<!-- Put site-specific property overrides in this file. -->
+
+<configuration>
+  <property>
+    <name>dfs.replication</name>
+    <value>1</value>
+  </property>
+</configuration>
Index: /hadoop-1.0.4/debian/conf/mapred-site.xml
===================================================================
--- /hadoop-1.0.4/debian/conf/mapred-site.xml	(revision 260)
+++ /hadoop-1.0.4/debian/conf/mapred-site.xml	(revision 260)
@@ -0,0 +1,16 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<!-- Put site-specific property overrides in this file. -->
+
+<configuration>
+  <property>
+    <name>mapred.job.tracker</name>
+    <value>localhost:9001</value>
+    <description>
+    The host and port that the MapReduce job tracker runs at. If
+    "local", then jobs are run in-process as a single map and
+    reduce task.
+    </description>
+  </property>
+</configuration>
Index: /hadoop-1.0.4/debian/control
===================================================================
--- /hadoop-1.0.4/debian/control	(revision 260)
+++ /hadoop-1.0.4/debian/control	(revision 260)
@@ -0,0 +1,59 @@
+Source: hadoop
+Section: devel
+Priority: extra
+Maintainer: Jazz Yao-Tsung Wang <jazzwang.tw@gmail.com>
+Build-Depends: debhelper (>= 5)
+Standards-Version: 3.7.2
+
+Package: hadoop
+Architecture: any
+Depends: ${shlibs:Depends}, ${misc:Depends}, sun-java6-jre, sun-java6-bin
+Suggests: sun-java6-jdk
+Description: Apache Hadoop Core
+  .
+  Apache Hadoop Core is a software platform that lets one easily write and 
+  run applications that process vast amounts of data.
+  .
+  Here's what makes Hadoop especially useful:
+   * Scalable: Hadoop can reliably store and process petabytes.
+   * Economical: It distributes the data and processing across clusters of 
+                 commonly available computers. These clusters can number into 
+                 the thousands of nodes.
+   * Efficient: By distributing the data, Hadoop can process it in parallel on
+                the nodes where the data is located. This makes it extremely 
+                rapid.
+   * Reliable: Hadoop automatically maintains multiple copies of data and 
+               automatically redeploys computing tasks based on failures.
+  .
+  Hadoop implements MapReduce, using the Hadoop Distributed File System (HDFS)
+  MapReduce divides applications into many small blocks of work. HDFS creates 
+  multiple replicas of data blocks for reliability, placing them on compute 
+  nodes around the cluster. MapReduce can then process the data where it is 
+  located.
+  .
+  For more information about Hadoop, please see the Hadoop website.
+  http://hadoop.apache.org/
+
+Package: hadoop-src
+Architecture: any
+Depends: ${shlibs:Depends}, ${misc:Depends}, sun-java6-jdk, ant, gcc, g++, hadoop
+Description: Apache Hadoop Core ( java source code and examples )
+  .
+  Apache Hadoop Core is a software platform that lets one easily write and
+  run applications that process vast amounts of data.
+  .
+  This package include the java source code and examples from original
+  tarball. Install this package only when you need to rebuild the jar binary
+  or want to run the 'Word Count' examples of MapReduce.
+
+Package: hadoop-doc
+Architecture: any
+Depends: ${shlibs:Depends}, ${misc:Depends}
+Description: Apache Hadoop Core Documents
+  .
+  Apache Hadoop Core is a software platform that lets one easily write and
+  run applications that process vast amounts of data.
+  .
+  This package include the HTML and PDF documents from original tarball.
+  Install this package only when you need these documents.
+
Index: /hadoop-1.0.4/debian/copyright
===================================================================
--- /hadoop-1.0.4/debian/copyright	(revision 260)
+++ /hadoop-1.0.4/debian/copyright	(revision 260)
@@ -0,0 +1,26 @@
+This package was debianized by Jazz Yao-Tsung Wang <jazzwang.tw@gmail.com> on
+Thu, 19 Mar 2009 16:45:01 +0800.
+
+It was downloaded from 
+http://www.apache.org/dist/hadoop/core/hadoop-0.18.3/hadoop-0.18.3.tar.gz
+
+Upstream Author(s): 
+
+    Please chech Hadoop Core credits for 
+      * Committers
+      * Contributors
+      * Emeriti
+
+    http://hadoop.apache.org/core/credits.html
+
+License:
+
+    Apache License
+    Version 2.0, January 2004
+    http://www.apache.org/licenses/
+
+The Debian packaging is (C) 2009, Jazz Yao-Tsung Wang <jazzwang.tw@gmail.com> 
+and is licensed under the GPL, see `/usr/share/common-licenses/GPL'.
+
+# Please also look if there are files or directories which have a
+# different copyright/license attached and list them here.
Index: /hadoop-1.0.4/debian/hadoop-doc.install
===================================================================
--- /hadoop-1.0.4/debian/hadoop-doc.install	(revision 260)
+++ /hadoop-1.0.4/debian/hadoop-doc.install	(revision 260)
@@ -0,0 +1,1 @@
+docs/*	usr/share/doc/hadoop
Index: /hadoop-1.0.4/debian/hadoop-doc.links
===================================================================
--- /hadoop-1.0.4/debian/hadoop-doc.links	(revision 260)
+++ /hadoop-1.0.4/debian/hadoop-doc.links	(revision 260)
@@ -0,0 +1,1 @@
+usr/share/doc/hadoop  opt/hadoop/docs
Index: /hadoop-1.0.4/debian/hadoop-src.install
===================================================================
--- /hadoop-1.0.4/debian/hadoop-src.install	(revision 260)
+++ /hadoop-1.0.4/debian/hadoop-src.install	(revision 260)
@@ -0,0 +1,2 @@
+src	opt/hadoop
+*.xml	opt/hadoop
Index: /hadoop-1.0.4/debian/hadoop.docs
===================================================================
--- /hadoop-1.0.4/debian/hadoop.docs	(revision 260)
+++ /hadoop-1.0.4/debian/hadoop.docs	(revision 260)
@@ -0,0 +1,4 @@
+CHANGES.txt
+LICENSE.txt
+NOTICE.txt
+README.txt
Index: /hadoop-1.0.4/debian/hadoop.install
===================================================================
--- /hadoop-1.0.4/debian/hadoop.install	(revision 260)
+++ /hadoop-1.0.4/debian/hadoop.install	(revision 260)
@@ -0,0 +1,8 @@
+conf/*	      etc/hadoop
+debian/conf/* etc/hadoop
+bin	      opt/hadoop
+c++	      opt/hadoop
+contrib	      opt/hadoop
+lib	      opt/hadoop
+webapps	      opt/hadoop
+*.jar	      opt/hadoop
Index: /hadoop-1.0.4/debian/hadoop.links
===================================================================
--- /hadoop-1.0.4/debian/hadoop.links	(revision 260)
+++ /hadoop-1.0.4/debian/hadoop.links	(revision 260)
@@ -0,0 +1,3 @@
+etc/hadoop  opt/hadoop/conf
+usr/share/doc/hadoop opt/hadoop/docs
+var/log/hadoop	opt/hadoop/logs
Index: /hadoop-1.0.4/debian/hadoop.postinst
===================================================================
--- /hadoop-1.0.4/debian/hadoop.postinst	(revision 260)
+++ /hadoop-1.0.4/debian/hadoop.postinst	(revision 260)
@@ -0,0 +1,29 @@
+#!/bin/sh
+
+echo "$1"
+
+if [ "$1" != configure ]
+then
+  exit 0
+fi
+
+setup_hdfsadm_user() {
+  if ! getent passwd hdfsadm >/dev/null; then
+    useradd hdfsadm
+    mkdir -p /home/hdfsadm/.ssh
+    mkdir -p /var/log/hadoop
+    ssh-keygen -t rsa -q -f /home/hdfsadm/.ssh/id_rsa -N ""
+    cp /home/hdfsadm/.ssh/id_rsa.pub /home/hdfsadm/.ssh/authorized_keys
+    chown hdfsadm:hdfsadm /var/log/hadoop
+    chown -R hdfsadm:hdfsadm /home/hdfsadm/.ssh
+    chown -R hdfsadm:hdfsadm /home/hdfsadm
+    su -c "/opt/hadoop/bin/hadoop namenode -format" hdfsadm -
+    su -c /opt/hadoop/bin/start-all.sh hdfsadm -
+    echo "Please check via browsing following URLs:"
+    echo "(1) http://localhost:50030 for Hadoop Map/Reduce Administration."
+    echo "(2) http://localhost:50060 for Hadoop Task Tracker status"
+    echo "(3) http://localhost:50070 for Hadoop Distributed File System status"
+  fi
+}
+
+setup_hdfsadm_user
Index: /hadoop-1.0.4/debian/hadoop.postrm
===================================================================
--- /hadoop-1.0.4/debian/hadoop.postrm	(revision 260)
+++ /hadoop-1.0.4/debian/hadoop.postrm	(revision 260)
@@ -0,0 +1,22 @@
+#!/bin/sh
+
+echo "$1"
+
+if [ "$1" != remove ]
+then
+  exit 0
+fi
+
+setup_hdfsadm_user() {
+  if ! getent passwd hdfsadm >/dev/null; then
+    echo "no account found: 'hdfsadm'."
+  else
+    userdel hdfsadm
+    rm -rf /home/hdfsadm
+    rm -rf /var/log/hadoop
+    rm -rf /tmp/hadoop-hdfsadm*
+    rm -rf /tmp/hsperfdata_*
+  fi
+}
+
+setup_hdfsadm_user
Index: /hadoop-1.0.4/debian/hadoop.prerm
===================================================================
--- /hadoop-1.0.4/debian/hadoop.prerm	(revision 260)
+++ /hadoop-1.0.4/debian/hadoop.prerm	(revision 260)
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+su -c /opt/hadoop/bin/stop-all.sh hdfsadm -
Index: /hadoop-1.0.4/debian/rules
===================================================================
--- /hadoop-1.0.4/debian/rules	(revision 260)
+++ /hadoop-1.0.4/debian/rules	(revision 260)
@@ -0,0 +1,29 @@
+#!/usr/bin/make -f
+
+export DH_VERBOSE=0
+
+all:
+
+install:
+	dh_testdir
+	dh_testroot
+	dh_install -Xlicense.txt
+	dh_installdocs
+	dh_installchangelogs
+	#dh_installexamples
+	dh_compress
+	dh_fixperms
+	dh_installdeb
+	dh_link
+	dh_gencontrol
+	dh_md5sums
+	dh_builddeb
+
+clean:
+	dh_clean
+
+binary: install
+
+build:
+binary-arch:
+binary-indep:
