close
Warning:
Can't synchronize with repository "(default)" (Unsupported version control system "svn": /usr/lib/python2.7/dist-packages/libsvn/_fs.so: failed to map segment from shared object: Cannot allocate memory). Look in the Trac log for more information.
- Timestamp:
-
Nov 7, 2008, 6:08:15 PM (16 years ago)
- Author:
-
jazz
- Comment:
-
--
Legend:
- Unmodified
- Added
- Removed
- Modified
-
v6
|
v7
|
|
40 | 40 | ****************************************************** |
41 | 41 | NIC NIC IP Clients |
42 | | +-----------------------------+ |
43 | | | DRBL SERVER | |
44 | | | | |
| 42 | +------------------------------+ |
| 43 | | DRBL SERVER | |
| 44 | | | |
45 | 45 | | +-- [eth0] 140.110.25.101 +- to WAN |
46 | | | | |
| 46 | | | |
47 | 47 | | +-- [eth1] 192.168.61.254 +- to clients group 1 [ 16 clients, their IP |
48 | | | | from 192.168.61.1 - 192.168.61.16] |
49 | | +-----------------------------+ |
| 48 | | | from 192.168.61.1 - 192.168.61.16] |
| 49 | +------------------------------+ |
50 | 50 | ****************************************************** |
51 | 51 | Total clients: 16 |
… |
… |
|
58 | 58 | ~$ sudo apt-get install dsh |
59 | 59 | ~$ mkdir -p .dsh |
60 | | ~$ nmap -v -sP 192.168.61-63.1-11 | grep '(.*) .* up' | awk '{ print $3 }' | sort -n | sed 's#(##' | sed 's#)##' > .dsh/machines.list |
| 60 | ~$ for ((i=1;i<=16;i++)); do echo "192.168.61.$i" >> .dsh/machines.list; done |
61 | 61 | }}} |
62 | | * edit |
| 62 | * edit hadoop-0.18.2/conf/hadoop-site.xml |
| 63 | {{{ |
| 64 | #!diff |
| 65 | --- hadoop-0.18.2/conf/hadoop-site.xml.org 2008-11-06 23:11:18.000000000 +0800 |
| 66 | +++ hadoop-0.18.2/conf/hadoop-site.xml 2008-11-07 17:05:11.000000000 +0800 |
| 67 | @@ -4,5 +4,31 @@ |
| 68 | <!-- Put site-specific property overrides in this file. --> |
| 69 | |
| 70 | <configuration> |
| 71 | - |
| 72 | + <property> |
| 73 | + <name>fs.default.name</name> |
| 74 | + <value>hdfs://192.168.61.254:9000/</value> |
| 75 | + <description> |
| 76 | + The name of the default file system. Either the literal string |
| 77 | + "local" or a host:port for NDFS. |
| 78 | + </description> |
| 79 | + </property> |
| 80 | + <property> |
| 81 | + <name>mapred.job.tracker</name> |
| 82 | + <value>192.168.61.254:9001</value> |
| 83 | + <description> |
| 84 | + The host and port that the MapReduce job tracker runs at. If |
| 85 | + "local", then jobs are run in-process as a single map and |
| 86 | + reduce task. |
| 87 | + </description> |
| 88 | + </property> |
| 89 | + <property> |
| 90 | + <name>dfs.data.dir</name> |
| 91 | + <value>/hadoop/dfs/data</value> |
| 92 | + <description>Determines where on the local filesystem an DFS data node |
| 93 | + should store its blocks. If this is a comma-delimited |
| 94 | + list of directories, then data will be stored in all named |
| 95 | + directories, typically on different devices. |
| 96 | + Directories that do not exist are ignored. |
| 97 | + </description> |
| 98 | + </property> |
| 99 | </configuration> |
| 100 | }}} |