30 | | make source; make deb; make clean |
| 30 | == 製作deb打包設定檔 == |
| 31 | |
| 32 | {{{ |
| 33 | $ mkdir -p ~/test/hadoop-0.19.1/debian |
| 34 | $ cd ~/test/hadoop-0.19.1/debian |
| 35 | }}} |
| 36 | |
| 37 | == 編輯檔案 == |
| 38 | * 這些與dh_make產生出來的檔案差不多,就不討論了 |
| 39 | |
| 40 | || changelog || |
| 41 | || copyright || |
| 42 | || compat || |
| 43 | || control || |
| 44 | || rules || |
| 45 | |
| 46 | * 以下檔案就把內容列出 |
| 47 | === hadoop.install === |
| 48 | {{{ |
| 49 | #!sh |
| 50 | conf/* etc/hadoop |
| 51 | |
| 52 | debian/conf/* etc/hadoop |
| 53 | |
| 54 | bin opt/hadoop |
| 55 | |
| 56 | c++ opt/hadoop |
| 57 | |
| 58 | contrib opt/hadoop |
| 59 | |
| 60 | lib opt/hadoop |
| 61 | |
| 62 | libhdfs opt/hadoop |
| 63 | |
| 64 | librecordio opt/hadoop |
| 65 | |
| 66 | webapps opt/hadoop |
| 67 | |
| 68 | *.jar opt/hadoop |
| 69 | }}} |
| 70 | === hadoop.prerm |
| 71 | === |
| 72 | {{{ |
| 73 | #!sh |
| 74 | #!/bin/sh |
| 75 | |
| 76 | |
| 77 | su -c /opt/hadoop/bin/stop-all.sh hdfsadm - |
| 78 | }}} |
| 79 | === hadoop-doc.install === |
| 80 | {{{ |
| 81 | #!sh |
| 82 | docs/* usr/share/doc/hadoop |
| 83 | }}} |
| 84 | === hadoop.links === |
| 85 | {{{ |
| 86 | #!sh |
| 87 | etc/hadoop opt/hadoop/conf |
| 88 | |
| 89 | usr/share/doc/hadoop opt/hadoop/docs |
| 90 | |
| 91 | var/log/hadoop opt/hadoop/logs |
| 92 | }}} |
| 93 | === hadoop-src.install === |
| 94 | {{{ |
| 95 | #!sh |
| 96 | src opt/hadoop |
| 97 | |
| 98 | *.xml opt/hadoop |
| 99 | }}} |
| 100 | |
| 101 | === hadoop-doc.links === |
| 102 | {{{ |
| 103 | #!sh |
| 104 | usr/share/doc/hadoop opt/hadoop/docs |
| 105 | }}} |
| 106 | === hadoop.postinst === |
| 107 | {{{ |
| 108 | #!sh |
| 109 | #!/bin/sh |
| 110 | |
| 111 | |
| 112 | |
| 113 | echo "$1" |
| 114 | |
| 115 | |
| 116 | |
| 117 | if [ "$1" != configure ] |
| 118 | |
| 119 | then |
| 120 | |
| 121 | exit 0 |
| 122 | |
| 123 | fi |
| 124 | |
| 125 | |
| 126 | |
| 127 | setup_hdfsadm_user() { |
| 128 | |
| 129 | if ! getent passwd hdfsadm >/dev/null; then |
| 130 | |
| 131 | useradd hdfsadm |
| 132 | |
| 133 | mkdir -p /home/hdfsadm/.ssh |
| 134 | |
| 135 | mkdir -p /var/log/hadoop |
| 136 | |
| 137 | ssh-keygen -t rsa -q -f /home/hdfsadm/.ssh/id_rsa -N "" |
| 138 | |
| 139 | cp /home/hdfsadm/.ssh/id_rsa.pub /home/hdfsadm/.ssh/authorized_keys |
| 140 | |
| 141 | chown hdfsadm:hdfsadm /var/log/hadoop |
| 142 | |
| 143 | chown -R hdfsadm:hdfsadm /home/hdfsadm/.ssh |
| 144 | |
| 145 | chown -R hdfsadm:hdfsadm /home/hdfsadm |
| 146 | |
| 147 | su -c "/opt/hadoop/bin/hadoop namenode -format" hdfsadm - |
| 148 | |
| 149 | su -c /opt/hadoop/bin/start-all.sh hdfsadm - |
| 150 | |
| 151 | echo "Please check via browsing following URLs:" |
| 152 | |
| 153 | echo "(1) http://localhost:50030 for Hadoop Map/Reduce Administration." |
| 154 | |
| 155 | echo "(2) http://localhost:50060 for Hadoop Task Tracker status" |
| 156 | |
| 157 | echo "(3) http://localhost:50070 for Hadoop Distributed File System status" |
| 158 | |
| 159 | fi |
| 160 | |
| 161 | } |
| 162 | |
| 163 | |
| 164 | |
| 165 | setup_hdfsadm_user |
| 166 | }}} |
| 167 | === hadoop.docs === |
| 168 | {{{ |
| 169 | #!sh |
| 170 | CHANGES.txt |
| 171 | |
| 172 | LICENSE.txt |
| 173 | |
| 174 | NOTICE.txt |
| 175 | |
| 176 | README.txt |
| 177 | }}} |
| 178 | === hadoop.postrm === |
| 179 | {{{ |
| 180 | #!sh |
| 181 | #!/bin/sh |
| 182 | |
| 183 | |
| 184 | |
| 185 | echo "$1" |
| 186 | |
| 187 | |
| 188 | |
| 189 | if [ "$1" != remove ] |
| 190 | |
| 191 | then |
| 192 | |
| 193 | exit 0 |
| 194 | |
| 195 | fi |
| 196 | |
| 197 | |
| 198 | |
| 199 | setup_hdfsadm_user() { |
| 200 | |
| 201 | if ! getent passwd hdfsadm >/dev/null; then |
| 202 | |
| 203 | echo "no account found: 'hdfsadm'." |
| 204 | |
| 205 | else |
| 206 | |
| 207 | userdel hdfsadm |
| 208 | |
| 209 | rm -rf /home/hdfsadm |
| 210 | |
| 211 | rm -rf /var/log/hadoop |
| 212 | |
| 213 | rm -rf /tmp/hadoop-hdfsadm* |
| 214 | |
| 215 | rm -rf /tmp/hsperfdata_* |
| 216 | |
| 217 | fi |
| 218 | |
| 219 | } |
| 220 | |
| 221 | |
| 222 | |
| 223 | setup_hdfsadm_user |
| 224 | }}} |
| 225 | |
| 226 | === 加入目錄 conf === |
| 227 | * 用來放編輯好的Hadoop設定檔,而此設定檔與hadoop有關,就不再贅述 |
| 228 | |
| 229 | |
| 230 | == 編輯一個Makefile == |
| 231 | |
| 232 | {{{ |
| 233 | VERSION = 0.19.1 |
| 234 | |
| 235 | all: help |
| 236 | |
| 237 | |
| 238 | |
| 239 | deb: |
| 240 | |
| 241 | @dpkg-buildpackage -rfakeroot -aamd64 |
| 242 | |
| 243 | @dpkg-buildpackage -rfakeroot -ai386 |
| 244 | |
| 245 | |
| 246 | |
| 247 | clean: |
| 248 | |
| 249 | @debian/rules clean |
| 250 | |
| 251 | |
| 252 | |
| 253 | source: |
| 254 | |
| 255 | @wget http://ftp.twaren.net/Unix/Web/apache/hadoop/core/hadoop-${VERSION}/hadoop-${VERSION}.tar.gz |
| 256 | |
| 257 | @tar zxvf hadoop-${VERSION}.tar.gz -C .. |
| 258 | |
| 259 | @rm conf/hadoop-env.sh |
| 260 | |
| 261 | @rm conf/hadoop-site.xml |
| 262 | |
| 263 | @chmod a+x `find . -name "configure"` |
| 264 | |
| 265 | |
| 266 | |
| 267 | update: |
| 268 | |
| 269 | @scp ../hadoop*_amd64.deb www.classcloud.org:/var/www/hadoop/dists/unstable/main/binary-amd64/. |
| 270 | |
| 271 | @scp ../hadoop*_i386.deb www.classcloud.org:/var/www/hadoop/dists/unstable/main/binary-i386/. |
| 272 | |
| 273 | @ssh www.classcloud.org /var/www/hadoop/update-repository.sh |
| 274 | |
| 275 | |
| 276 | |
| 277 | help: |
| 278 | |
| 279 | @echo "Usage:" |
| 280 | |
| 281 | @echo "make deb - Build Debian Package." |
| 282 | |
| 283 | @echo "make clean - Clean up Debian Package temparate files." |
| 284 | |
| 285 | @echo "make source - download source tarball from hadoop mirror site." |
| 286 | |
| 287 | @echo "make update - upload deb packages to classcloud.org." |
| 288 | |
| 289 | @echo "make help - show Makefile options." |
| 290 | |
| 291 | @echo " " |
| 292 | |
| 293 | @echo "Example:" |
| 294 | |
| 295 | @echo "$$ make source; make deb; make clean" |
| 296 | }}} |
| 297 | == 執行makefile的內容 == |
| 298 | {{{ |
| 299 | $ make source; make deb; make clean |
| 300 | }}} |