- download hadoop-0.18.2
$ cd ~
$ wget http://ftp.twaren.net/Unix/Web/apache/hadoop/core/hadoop-0.18.2/hadoop-0.18.2.tar.gz
$ tar zxvf hadoop-0.18.2.tar.gz
- Hadoop 會用 SSH 進行內部連線,因此需要做 SSH Key exchange
~$ ssh-keygen
~$ cp ~/.ssh/id_rsa.pub ~/.ssh/authorized_keys
- 需要 JAVA_HOME 環境變數才能執行 hadoop namenode
$ echo "export JAVA_HOME=/usr/lib/jvm/java-6-sun" >> ~/.bash_profile
$ cd ~/hadoop-0.18.2
- 編輯 conf/hadoop-evn.sh (HADOOP_HOME要設定到你的hadoop安裝目錄)
export JAVA_HOME=/usr/lib/jvm/java-6-sun
export HADOOP_HOME=/home/jazz/hadoop-0.18.2/
export HADOOP_CONF_DIR=$HADOOP_HOME/conf
- 編輯 conf/hadoop-site.xml 在 configuration 那一段加入以下設定
<property>
<name>fs.default.name</name>
<value>hdfs://localhost:9000/</value>
<description>
The name of the default file system. Either the literal string
"local" or a host:port for NDFS.
</description>
</property>
<property>
<name>mapred.job.tracker</name>
<value>localhost:9001</value>
<description>
The host and port that the MapReduce job tracker runs at. If
"local", then jobs are run in-process as a single map and
reduce task.
</description>
</property>
- 啟動hadoop 的兩道指令
~/hadoop-0.18.2$ bin/hadoop namenode -format
~/hadoop-0.18.2$ bin/start-all.sh
- 完成後可以看到以下三個網頁