| 41 | | == 二、環境設定 == |
| 42 | | == 三、執行 == |
| | 41 | = 二、環境設定 = |
| | 42 | |
| | 43 | == 2.1 Prepare == |
| | 44 | System : |
| | 45 | * Ubuntu 7.10 |
| | 46 | * Hadoop 0.16 |
| | 47 | * Requirement : |
| | 48 | * Eclipse (3.2.2) |
| | 49 | {{{ |
| | 50 | $ apt-get install eclipse |
| | 51 | }}} |
| | 52 | java 6 |
| | 53 | {{{ |
| | 54 | $ apt-get install sun-java6-bin sun-java6-jdk sun-java6-jre sun-java6-plugin |
| | 55 | }}} |
| | 56 | suggest to remove the default java compiler “ gcj ” |
| | 57 | {{{ |
| | 58 | $ apt-get purge java-gcj-compat |
| | 59 | }}} |
| | 60 | Append two codes to /etc/bash.bashrc to setup Java Class path |
| | 61 | {{{ |
| | 62 | export JAVA_HOME=/usr/lib/jvm/java-6-sun |
| | 63 | export HADOOP_HOME=/home/waue/workspace/hadoop/ |
| | 64 | export CLASSPATH=.:$JAVA_HOME/lib/dt.jar:$JAVA_HOME/lib/tools.jar |
| | 65 | }}} |
| | 66 | Building UP Path |
| | 67 | || Name || Path || |
| | 68 | || Hadoop Home || /home/waue/workspace/hadoop/ || |
| | 69 | || Java Home || /usr/lib/jvm/java-6-sun || |
| | 70 | |
| | 71 | == 2.2 Hadoop Setup == |
| | 72 | === 2.2.1. Generate an SSH key for the user === |
| | 73 | {{{ |
| | 74 | $ ssh-keygen -t rsa -P "" |
| | 75 | $ cat ~/.ssh/id_rsa.pub >> ~.ssh/authorized_keys |
| | 76 | $ ssh localhost |
| | 77 | $ exit |
| | 78 | }}} |
| | 79 | === 2.2.2. Installation Hadoop === |
| | 80 | {{{ |
| | 81 | $ cd /home/waue/workspace |
| | 82 | $ sudo tar xzf hadoop-0.16.0.tar.gz |
| | 83 | $ sudo mv hadoop-0.16.0 hadoop |
| | 84 | $ sudo chown -R waue:waue hadoop |
| | 85 | $ cd hadoop |
| | 86 | }}} |
| | 87 | === 2.2.3. Configuration === |
| | 88 | 1. hadoop-env.sh ($HADOOP_HOME/conf/) [[BR]] |
| | 89 | Change |
| | 90 | {{{ |
| | 91 | # The java implementation to use. Required. |
| | 92 | # export JAVA_HOME=/usr/lib/j2sdk1.5-sun |
| | 93 | }}} |
| | 94 | to |
| | 95 | {{{ |
| | 96 | # The java implementation to use. Required. |
| | 97 | export JAVA_HOME=/usr/lib/jvm/java-6-sun |
| | 98 | export HADOOP_HOME=/home/waue/workspace/hadoop |
| | 99 | export HADOOP_LOG_DIR=$HADOOP_HOME/logs |
| | 100 | export HADOOP_SLAVES=$HADOOP_HOME/conf/slaves |
| | 101 | }}} |
| | 102 | 2. hadoop-site.xml ($HADOOP_HOME/conf/)[[BR]] |
| | 103 | modify the contents of conf/hadoop-site.xml as below |
| | 104 | {{{ |
| | 105 | <configuration> |
| | 106 | <property> |
| | 107 | <name>fs.default.name</name> |
| | 108 | <value>localhost:9000</value> |
| | 109 | <description> |
| | 110 | </description> |
| | 111 | </property> |
| | 112 | <property> |
| | 113 | <name>mapred.job.tracker</name> |
| | 114 | <value>localhost:9001</value> |
| | 115 | <description> |
| | 116 | </description> |
| | 117 | <property> |
| | 118 | <name>mapred.map.tasks</name> |
| | 119 | <value>1</value> |
| | 120 | <description> |
| | 121 | define mapred.map tasks to be number of slave hosts |
| | 122 | </description> |
| | 123 | </property> |
| | 124 | <property> |
| | 125 | <name>mapred.reduce.tasks</name> |
| | 126 | <value>1</value> |
| | 127 | <description> |
| | 128 | define mapred.reduce tasks to be number of slave hosts |
| | 129 | </description> |
| | 130 | </property> |
| | 131 | <property> |
| | 132 | <name>dfs.replication</name> |
| | 133 | <value>1</value> |
| | 134 | </property> |
| | 135 | </configuration> |
| | 136 | }}} |
| | 137 | |
| | 138 | = 三、執行 = |
| | 139 | 4.Start Up Hadoop [[br]] |
| | 140 | |
| | 141 | $ cd $HADOOP_HOME [[br]] |
| | 142 | $ bin/hadoop namenode -format [[br]] |
| | 143 | {{{ |
| | 144 | 08/05/23 14:52:16 INFO dfs.NameNode: STARTUP_MSG: |
| | 145 | /************************************************************ |
| | 146 | STARTUP_MSG: Starting NameNode |
| | 147 | STARTUP_MSG: host = Dx7200/127.0.1.1 |
| | 148 | STARTUP_MSG: args = [-format] |
| | 149 | STARTUP_MSG: version = 0.16.4 |
| | 150 | STARTUP_MSG: build = http://svn.apache.org/repos/asf/hadoop/core/branches/branch-0.16 -r 652614; compiled by 'hadoopqa' on Fri May 2 00:18:12 UTC 2008 |
| | 151 | ************************************************************/ |
| | 152 | 08/05/23 14:52:17 INFO fs.FSNamesystem: fsOwner=waue,waue,adm,dialout,cdrom,floppy,audio,dip,video,plugdev,staff,scanner,lpadmin,admin,netdev,powerdev,vboxusers |
| | 153 | 08/05/23 14:52:17 INFO fs.FSNamesystem: supergroup=supergroup |
| | 154 | 08/05/23 14:52:17 INFO fs.FSNamesystem: isPermissionEnabled=true |
| | 155 | 08/05/23 14:52:17 INFO dfs.Storage: Storage directory /tmp/hadoop-waue/dfs/name has been successfully formatted. |
| | 156 | 08/05/23 14:52:17 INFO dfs.NameNode: SHUTDOWN_MSG: |
| | 157 | /************************************************************ |
| | 158 | SHUTDOWN_MSG: Shutting down NameNode at Dx7200/127.0.1.1 |
| | 159 | ************************************************************/ |
| | 160 | }}} |
| | 161 | $ /bin/start-all.sh [[br]] |
| | 162 | {{{ |
| | 163 | starting namenode, logging to /home/waue/workspace/hadoop/logs/hadoop-waue-namenode-Dx7200.out |
| | 164 | localhost: starting datanode, logging to /home/waue/workspace/hadoop/logs/hadoop-waue-datanode-Dx7200.out |
| | 165 | localhost: starting secondarynamenode, logging to /home/waue/workspace/hadoop/logs/hadoop-waue-secondarynamenode-Dx7200.out |
| | 166 | starting jobtracker, logging to /home/waue/workspace/hadoop/logs/hadoop-waue-jobtracker-Dx7200.out |
| | 167 | localhost: starting tasktracker, logging to /home/waue/workspace/hadoop/logs/hadoop-waue-tasktracker-Dx7200.out |
| | 168 | }}} |
| | 169 | Then make sure http://localhost:50030/ by your explorer is on going. [[br]] |
| | 170 | |
| | 171 | * Ps : if your system had error after restart, you could do there for resolving and renewing one. And repeat to “4. start up Hadoop” |
| | 172 | {{{ |
| | 173 | $ cd $HADOOP_HOME |
| | 174 | $ bin/stop-all.sh |
| | 175 | $ rm -rf /tmp/* |
| | 176 | $ rm -rf logs/* |
| | 177 | }}} |
| | 178 | |