| 1 | {{{ |
| 2 | #!html |
| 3 | <div style="text-align: center;"><big |
| 4 | style="font-weight: bold;"><big><big>實作二: HDFS Shell操作練習</big></big></big></div> |
| 5 | }}} |
| 6 | [[PageOutline]] |
| 7 | |
| 8 | == 前言 == |
| 9 | |
| 10 | * 此部份接續實做一 |
| 11 | |
| 12 | == Content 1. HDFS Shell基本操作 == |
| 13 | === 1.1 瀏覽你HDFS目錄 === |
| 14 | |
| 15 | {{{ |
| 16 | /opt/hadoop$ bin/hadoop fs -ls |
| 17 | }}} |
| 18 | |
| 19 | === 1.2 上傳資料到HDFS目錄 === |
| 20 | * 上傳 |
| 21 | |
| 22 | {{{ |
| 23 | /opt/hadoop$ bin/hadoop fs -put conf input |
| 24 | }}} |
| 25 | |
| 26 | * 檢查 |
| 27 | |
| 28 | {{{ |
| 29 | /opt/hadoop$ bin/hadoop fs -ls |
| 30 | /opt/hadoop$ bin/hadoop fs -ls input |
| 31 | }}} |
| 32 | |
| 33 | === 1.3 下載HDFS的資料到本地目錄 === |
| 34 | |
| 35 | * 下載 |
| 36 | |
| 37 | {{{ |
| 38 | /opt/hadoop$ bin/hadoop fs -get input fromHDFS |
| 39 | }}} |
| 40 | |
| 41 | * 檢查 |
| 42 | |
| 43 | {{{ |
| 44 | /opt/hadoop$ ls -al | grep fromHDFS |
| 45 | /opt/hadoop$ ls -al fromHDFS |
| 46 | }}} |
| 47 | |
| 48 | === 1.4 刪除檔案 === |
| 49 | |
| 50 | {{{ |
| 51 | /opt/hadoop$ bin/hadoop fs -ls input |
| 52 | /opt/hadoop$ bin/hadoop fs -rm input/masters |
| 53 | }}} |
| 54 | |
| 55 | === 1.5 直接看檔案 === |
| 56 | |
| 57 | {{{ |
| 58 | /opt/hadoop$ bin/hadoop fs -ls input |
| 59 | /opt/hadoop$ bin/hadoop fs -cat input/slaves |
| 60 | }}} |
| 61 | |
| 62 | === 1.6 更多指令操作 === |
| 63 | |
| 64 | {{{ |
| 65 | hadooper@vPro:/opt/hadoop$ bin/hadoop fs |
| 66 | |
| 67 | Usage: java FsShell |
| 68 | [-ls <path>] |
| 69 | [-lsr <path>] |
| 70 | [-du <path>] |
| 71 | [-dus <path>] |
| 72 | [-count[-q] <path>] |
| 73 | [-mv <src> <dst>] |
| 74 | [-cp <src> <dst>] |
| 75 | [-rm <path>] |
| 76 | [-rmr <path>] |
| 77 | [-expunge] |
| 78 | [-put <localsrc> ... <dst>] |
| 79 | [-copyFromLocal <localsrc> ... <dst>] |
| 80 | [-moveFromLocal <localsrc> ... <dst>] |
| 81 | [-get [-ignoreCrc] [-crc] <src> <localdst>] |
| 82 | [-getmerge <src> <localdst> [addnl]] |
| 83 | [-cat <src>] |
| 84 | [-text <src>] |
| 85 | [-copyToLocal [-ignoreCrc] [-crc] <src> <localdst>] |
| 86 | [-moveToLocal [-crc] <src> <localdst>] |
| 87 | [-mkdir <path>] |
| 88 | [-setrep [-R] [-w] <rep> <path/file>] |
| 89 | [-touchz <path>] |
| 90 | [-test -[ezd] <path>] |
| 91 | [-stat [format] <path>] |
| 92 | [-tail [-f] <file>] |
| 93 | [-chmod [-R] <MODE[,MODE]... | OCTALMODE> PATH...] |
| 94 | [-chown [-R] [OWNER][:[GROUP]] PATH...] |
| 95 | [-chgrp [-R] GROUP PATH...] |
| 96 | [-help [cmd]] |
| 97 | |
| 98 | Generic options supported are |
| 99 | -conf <configuration file> specify an application configuration file |
| 100 | -D <property=value> use value for given property |
| 101 | -fs <local|namenode:port> specify a namenode |
| 102 | -jt <local|jobtracker:port> specify a job tracker |
| 103 | -files <comma separated list of files> specify comma separated files to be copied to the map reduce cluster |
| 104 | -libjars <comma separated list of jars> specify comma separated jar files to include in the classpath. |
| 105 | -archives <comma separated list of archives> specify comma separated archives to be unarchived on the compute machines. |
| 106 | The general command line syntax is |
| 107 | bin/hadoop command [genericOptions] [commandOptions] |
| 108 | |
| 109 | }}} |
| 110 | |
| 111 | |
| 112 | |
| 113 | == Content 2. 使用網頁Gui瀏覽資訊 == |
| 114 | |
| 115 | * [http://localhost:50030 Map/Reduce Administration] |
| 116 | * [http://localhost:50070 NameNode ] |
| 117 | |
| 118 | == Content 3. 更多HDFS shell 的用法 == |
| 119 | |
| 120 | = HDFS shell 的用法 = |
| 121 | * bin/hadoop fs <args> ,下面則列出 <args> 的用法 |
| 122 | * 以下操作預設的目錄在 /user/<$username>/ 下 |
| 123 | {{{ |
| 124 | $ bin/hadoop fs -ls input |
| 125 | Found 4 items |
| 126 | -rw-r--r-- 2 hadooper supergroup 115045564 2009-04-02 11:51 /user/hadooper/input/1.txt |
| 127 | -rw-r--r-- 2 hadooper supergroup 987864 2009-04-02 11:51 /user/hadooper/input/2.txt |
| 128 | -rw-r--r-- 2 hadooper supergroup 1573048 2009-04-02 11:51 /user/hadooper/input/3.txt |
| 129 | -rw-r--r-- 2 hadooper supergroup 25844527 2009-04-02 11:51 /user/hadooper/input/4.txt |
| 130 | }}} |
| 131 | * 完整的路徑則是 '''hdfs://node:port/path''' 如: |
| 132 | {{{ |
| 133 | $ bin/hadoop fs -ls hdfs://gm1.nchc.org.tw:9000/user/hadooper/input |
| 134 | Found 4 items |
| 135 | -rw-r--r-- 2 hadooper supergroup 115045564 2009-04-02 11:51 /user/hadooper/input/1.txt |
| 136 | -rw-r--r-- 2 hadooper supergroup 987864 2009-04-02 11:51 /user/hadooper/input/2.txt |
| 137 | -rw-r--r-- 2 hadooper supergroup 1573048 2009-04-02 11:51 /user/hadooper/input/3.txt |
| 138 | -rw-r--r-- 2 hadooper supergroup 25844527 2009-04-02 11:51 /user/hadooper/input/4.txt |
| 139 | }}} |
| 140 | |
| 141 | == -cat == |
| 142 | * 將路徑指定文件的內容輸出到stdout |
| 143 | {{{ |
| 144 | $ bin/hadoop fs -cat quota/hadoop-env.sh |
| 145 | }}} |
| 146 | == -chgrp == |
| 147 | * 改變文件所屬的組 |
| 148 | {{{ |
| 149 | $ bin/hadoop fs -chgrp -R hadooper own |
| 150 | }}} |
| 151 | == -chmod == |
| 152 | * 改變文件的權限 |
| 153 | {{{ |
| 154 | $ bin/hadoop fs -chmod -R 755 own |
| 155 | }}} |
| 156 | == -chown == |
| 157 | * 改變文件的擁有者 |
| 158 | {{{ |
| 159 | $ bin/hadoop fs -chown -R hadooper own |
| 160 | }}} |
| 161 | == -copyFromLocal, -put == |
| 162 | * 從local放檔案到hdfs |
| 163 | {{{ |
| 164 | $ bin/hadoop fs -put input dfs_input |
| 165 | }}} |
| 166 | == -copyToLocal, -get == |
| 167 | * 把hdfs上得檔案下載到 local |
| 168 | {{{ |
| 169 | $ bin/hadoop fs -get dfs_input input1 |
| 170 | }}} |
| 171 | == -cp == |
| 172 | * 將文件從hdfs原本路徑複製到hdfs目標路徑 |
| 173 | {{{ |
| 174 | $ bin/hadoop fs -cp own hadooper |
| 175 | }}} |
| 176 | == -du == |
| 177 | * 顯示目錄中所有文件的大小 |
| 178 | {{{ |
| 179 | $ bin/hadoop fs -du input |
| 180 | |
| 181 | Found 4 items |
| 182 | 115045564 hdfs://gm1.nchc.org.tw:9000/user/hadooper/input/1.txt |
| 183 | 987864 hdfs://gm1.nchc.org.tw:9000/user/hadooper/input/2.txt |
| 184 | 1573048 hdfs://gm1.nchc.org.tw:9000/user/hadooper/input/3.txt |
| 185 | 25844527 hdfs://gm1.nchc.org.tw:9000/user/hadooper/input/4.txt |
| 186 | }}} |
| 187 | == -dus == |
| 188 | * 顯示該目錄/文件的總大小 |
| 189 | {{{ |
| 190 | $ bin/hadoop fs -dus input |
| 191 | |
| 192 | hdfs://gm1.nchc.org.tw:9000/user/hadooper/input 143451003 |
| 193 | }}} |
| 194 | == -expunge == |
| 195 | * 清空垃圾桶 |
| 196 | {{{ |
| 197 | $ bin/hadoop fs -expunge |
| 198 | }}} |
| 199 | == -getmerge == |
| 200 | * 將來源目錄<src>下所有的文件都集合到本地端一個<localdst>檔案內 |
| 201 | * bin/hadoop fs -getmerge <src> <localdst> |
| 202 | {{{ |
| 203 | $ echo "this is one; " >> in1/input |
| 204 | $ echo "this is two; " >> in1/input2 |
| 205 | $ bin/hadoop fs -put in1 in1 |
| 206 | $ bin/hadoop fs -getmerge in1 merge.txt |
| 207 | $ cat ./merge.txt |
| 208 | }}} |
| 209 | |
| 210 | == -ls == |
| 211 | * 列出文件或目錄的資訊 |
| 212 | * 文件名 <副本數> 文件大小 修改日期 修改時間 權限 用戶ID 組ID |
| 213 | * 目錄名 <dir> 修改日期 修改時間 權限 用戶ID 組ID |
| 214 | {{{ |
| 215 | $ bin/hadoop fs -ls |
| 216 | }}} |
| 217 | == -lsr == |
| 218 | * ls命令的遞迴版本 |
| 219 | {{{ |
| 220 | $ bin/hadoop fs -lsr / |
| 221 | }}} |
| 222 | == -mkdir == |
| 223 | * 建立資料夾 |
| 224 | {{{ |
| 225 | $ bin/hadoop fs -mkdir a b c |
| 226 | }}} |
| 227 | == -moveFromLocal == |
| 228 | * 將local端的資料夾剪下移動到hdfs上 |
| 229 | {{{ |
| 230 | $ bin/hadoop fs -moveFromLocal in1 in2 |
| 231 | }}} |
| 232 | == -mv == |
| 233 | * 更改資料的名稱 |
| 234 | {{{ |
| 235 | $ bin/hadoop fs -mv in2 in3 |
| 236 | }}} |
| 237 | == -rm == |
| 238 | * 刪除指定的檔案(不可資料夾) |
| 239 | {{{ |
| 240 | $ bin/hadoop fs -rm in1/input |
| 241 | }}} |
| 242 | == -rmr == |
| 243 | * 遞迴刪除資料夾(包含在內的所有檔案) |
| 244 | {{{ |
| 245 | $ bin/hadoop fs -rmr in1 |
| 246 | }}} |
| 247 | == -setrep == |
| 248 | * 設定副本係數 |
| 249 | * bin/hadoop fs -setrep [-R] [-w] <rep> <path/file> |
| 250 | {{{ |
| 251 | $ bin/hadoop fs -setrep -w 2 -R input |
| 252 | Replication 2 set: hdfs://gm1.nchc.org.tw:9000/user/hadooper/input/1.txt |
| 253 | Replication 2 set: hdfs://gm1.nchc.org.tw:9000/user/hadooper/input/2.txt |
| 254 | Replication 2 set: hdfs://gm1.nchc.org.tw:9000/user/hadooper/input/3.txt |
| 255 | Replication 2 set: hdfs://gm1.nchc.org.tw:9000/user/hadooper/input/4.txt |
| 256 | Waiting for hdfs://gm1.nchc.org.tw:9000/user/hadooper/input/1.txt ... done |
| 257 | Waiting for hdfs://gm1.nchc.org.tw:9000/user/hadooper/input/2.txt ... done |
| 258 | Waiting for hdfs://gm1.nchc.org.tw:9000/user/hadooper/input/3.txt ... done |
| 259 | Waiting for hdfs://gm1.nchc.org.tw:9000/user/hadooper/input/4.txt ... done |
| 260 | }}} |
| 261 | == -stat == |
| 262 | * 印出時間資訊 |
| 263 | {{{ |
| 264 | $ bin/hadoop fs -stat input |
| 265 | 2009-04-02 03:51:29 |
| 266 | }}} |
| 267 | == -tail == |
| 268 | * 將文件的最後1k內容輸出 |
| 269 | * 用法 : bin/hadoop fs -tail [-f] 檔案 (-f 參數用來顯示如果檔案增大,則秀出被append上得內容) |
| 270 | {{{ |
| 271 | $ bin/hadoop fs -tail input/1.txt |
| 272 | }}} |
| 273 | == -test == |
| 274 | * 測試檔案, -e 檢查文件是否存在(1=存在, 0=否), -z 檢查文件是否為空(1=空, 0=不為空), -d 檢查是否為目錄(1=存在, 0=否) |
| 275 | * 要用echo $? 來看回傳值為 0 or 1 |
| 276 | * 用法: bin/hadoop fs -test -[ezd] URI |
| 277 | |
| 278 | {{{ |
| 279 | $ bin/hadoop fs -test -e /user/hadooper/input/5.txt |
| 280 | $ bin/hadoop fs -test -z /user/hadooper/input/5.txt |
| 281 | test: File does not exist: /user/hadooper/input/5.txt |
| 282 | $ bin/hadoop fs -test -d /user/hadooper/input/5.txt |
| 283 | |
| 284 | test: File does not exist: /user/hadooper/input/5.txt |
| 285 | }}} |
| 286 | == -text == |
| 287 | * 將檔案(如壓縮檔, textrecordinputstream)輸出為純文字格式 |
| 288 | * hadoop fs -text <src> |
| 289 | {{{ |
| 290 | $ hadoop fs -text macadr-eth1.txt.gz |
| 291 | 00:1b:fc:61:75:b1 |
| 292 | 00:1b:fc:58:9c:23 |
| 293 | }}} |
| 294 | * ps : 目前沒支援zip的函式庫 |
| 295 | {{{ |
| 296 | $ bin/hadoop fs -text b/a.txt.zip |
| 297 | PK |
| 298 | ���:��H{ |
| 299 | a.txtUT b��Ib��IUx��sssss |
| 300 | test |
| 301 | PK |
| 302 | ���:��H{ |
| 303 | ��a.txtUTb��IUxPK@C |
| 304 | }}} |
| 305 | == -touchz == |
| 306 | * 建立一個空文件 |
| 307 | {{{ |
| 308 | $ bin/hadoop fs -touchz b/kk |
| 309 | $ bin/hadoop fs -test -z b/kk |
| 310 | $ echo $? |
| 311 | 1 |
| 312 | $ bin/hadoop fs -test -z b/a.txt.zip |
| 313 | $ echo $? |
| 314 | 0 |
| 315 | }}} |