| 37 | * here is current DRBL setup |
| 38 | {{{ |
| 39 | NIC NIC IP Clients |
| 40 | +------------------------------+ |
| 41 | | DRBL SERVER | |
| 42 | | | |
| 43 | | +-- [eth0] X.X.X.X +- to WAN |
| 44 | | | |
| 45 | | +-- [eth1] 192.168.61.254 +- to clients group 1 [ 10 clients, their IP |
| 46 | | | from 192.168.61.1 - 192.168.61.10] |
| 47 | | +-- [eth2] 192.168.62.254 +- to clients group 2 [ 11 clients, their IP |
| 48 | | | from 192.168.62.1 - 192.168.62.11] |
| 49 | | +-- [eth3] 192.168.63.254 +- to clients group 3 [ 10 clients, their IP |
| 50 | | | from 192.168.63.1 - 192.168.63.10] |
| 51 | | +-- [eth4] 192.168.64.254 +- to clients group 4 [ 10 clients, their IP |
| 52 | | | from 192.168.64.1 - 192.168.64.10] |
| 53 | +------------------------------+ |
| 54 | }}} |
| 55 | * Hadoop will use ssh connections for internal connection, thus we have to do SSH key exchange. |
| 56 | {{{ |
| 57 | ~$ ssh-keygen |
| 58 | ~$ cp .ssh/id_rsa.pub .ssh/authorized_keys |
| 59 | ~$ sudo apt-get install dsh |
| 60 | ~$ mkdir -p .dsh |
| 61 | ~$ nmap -v -sP 192.168.61-63.1-11 | grep '(.*) .* up' | awk '{ print $3 }' | sort -n | sed 's#(##' | sed 's#)##' > .dsh/machines.list"192.168.63.$i" >> .dsh/machines.list; echo "192.168.64.$i" >> .dsh/machines.list; done |
| 62 | }}} |