| | 1 | {{{ |
| | 2 | #!html |
| | 3 | <div style="text-align: center; color:#151B8D"><big style="font-weight: bold;"><big><big> |
| | 4 | Hadoop 與 RDBMS 的支援 |
| | 5 | </big></big></big></div> <div style="text-align: center; color:#7E2217"><big style="font-weight: bold;"><big> |
| | 6 | Hadoop 0.20 + JDBC + MySQL 5 |
| | 7 | </big></big></div> |
| | 8 | }}} |
| | 9 | [[PageOutline]] |
| | 10 | |
| | 11 | = 說明 = |
| | 12 | |
| | 13 | * 需先安裝 hadoop 0.20 , apache2 , MySQL 5 server & client , phpmyadmin |
| | 14 | * 需將 [http://dev.mysql.com/downloads/connector/j/ mysql-connector-java-*.jar] 放到 lib 目錄下 |
| | 15 | * MySQL 內,先建立一個school 的資料庫,內含 teacher 的table ,並新增一些資料,如下: |
| | 16 | {{{ |
| | 17 | #!sql |
| | 18 | DROP TABLE IF EXISTS `school`.`teacher`; |
| | 19 | CREATE TABLE `school`.`teacher` ( |
| | 20 | `id` int(11) default NULL, |
| | 21 | `name` char(20) default NULL, |
| | 22 | `age` int(11) default NULL, |
| | 23 | `departmentID` int(11) default NULL |
| | 24 | ) ENGINE=InnoDB DEFAULT CHARSET=latin1; |
| | 25 | }}} |
| | 26 | |
| | 27 | [[Image(wiki:NCHCCloudCourse100928_MYSQL:pd1.png)]] |
| | 28 | |
| | 29 | = 程式碼 = |
| | 30 | |
| | 31 | = DBAccess.java = |
| | 32 | |
| | 33 | {{{ |
| | 34 | #!java |
| | 35 | package db; |
| | 36 | import java.io.DataInput; |
| | 37 | import java.io.DataOutput; |
| | 38 | import java.io.IOException; |
| | 39 | import java.sql.PreparedStatement; |
| | 40 | import java.sql.ResultSet; |
| | 41 | import java.sql.SQLException; |
| | 42 | import org.apache.hadoop.fs.FileSystem; |
| | 43 | import org.apache.hadoop.fs.Path; |
| | 44 | import org.apache.hadoop.io.LongWritable; |
| | 45 | import org.apache.hadoop.io.Text; |
| | 46 | import org.apache.hadoop.io.Writable; |
| | 47 | import org.apache.hadoop.mapred.FileOutputFormat; |
| | 48 | import org.apache.hadoop.mapred.JobClient; |
| | 49 | import org.apache.hadoop.mapred.JobConf; |
| | 50 | import org.apache.hadoop.mapred.MapReduceBase; |
| | 51 | import org.apache.hadoop.mapred.Mapper; |
| | 52 | import org.apache.hadoop.mapred.OutputCollector; |
| | 53 | import org.apache.hadoop.mapred.Reporter; |
| | 54 | import org.apache.hadoop.mapred.lib.IdentityReducer; |
| | 55 | import org.apache.hadoop.mapred.lib.db.DBConfiguration; |
| | 56 | import org.apache.hadoop.mapred.lib.db.DBInputFormat; |
| | 57 | import org.apache.hadoop.mapred.lib.db.DBWritable; |
| | 58 | public class DBAccess { |
| | 59 | @SuppressWarnings("deprecation") |
| | 60 | public static void main(String[] args) throws IOException { |
| | 61 | String[] argc={"jdbc:mysql://localhost/school","root", "itri"}; argv=argc; |
| | 62 | |
| | 63 | try { |
| | 64 | |
| | 65 | JobConf conf = new JobConf(DBAccess.class); |
| | 66 | Class.forName("com.mysql.jdbc.Driver"); |
| | 67 | DBConfiguration.configureDB(conf, "com.mysql.jdbc.Driver", |
| | 68 | argv[0], argv[1], argv[2]); |
| | 69 | conf.setOutputKeyClass(LongWritable.class); |
| | 70 | conf.setOutputValueClass(Text.class); |
| | 71 | conf.setInputFormat(DBInputFormat.class); |
| | 72 | Path dstPath = new Path("dboutput3"); |
| | 73 | FileOutputFormat.setOutputPath(conf, dstPath); |
| | 74 | String[] fields = { "id", "name", "age", "departmentID" }; |
| | 75 | DBInputFormat.setInput(conf, TeacherRecord.class, "teacher", null, |
| | 76 | "id", fields); |
| | 77 | conf.setMapperClass(DBAccessMapper.class); |
| | 78 | conf.setReducerClass(IdentityReducer.class); |
| | 79 | |
| | 80 | FileSystem hdfs = dstPath.getFileSystem(conf); |
| | 81 | if (hdfs.exists(dstPath)) { |
| | 82 | hdfs.delete(dstPath, true); |
| | 83 | } |
| | 84 | JobClient.runJob(conf); |
| | 85 | } |
| | 86 | catch(ClassNotFoundException e) { |
| | 87 | System.err.println("mysql.jdbc.Driver not found"); |
| | 88 | } |
| | 89 | } |
| | 90 | } |
| | 91 | }}} |
| | 92 | |
| | 93 | = DBAccessMapper.java = |
| | 94 | |
| | 95 | {{{ |
| | 96 | #!java |
| | 97 | package db; |
| | 98 | import java.io.IOException; |
| | 99 | import org.apache.hadoop.io.LongWritable; |
| | 100 | import org.apache.hadoop.io.Text; |
| | 101 | import org.apache.hadoop.mapred.MapReduceBase; |
| | 102 | import org.apache.hadoop.mapred.Mapper; |
| | 103 | import org.apache.hadoop.mapred.OutputCollector; |
| | 104 | import org.apache.hadoop.mapred.Reporter; |
| | 105 | public class DBAccessMapper extends MapReduceBase implements |
| | 106 | Mapper<LongWritable, TeacherRecord, LongWritable, Text> { |
| | 107 | public void map(LongWritable key, TeacherRecord value, |
| | 108 | OutputCollector<LongWritable, Text> collector, Reporter reporter) |
| | 109 | throws IOException { |
| | 110 | collector.collect(new LongWritable(value.id), |
| | 111 | new Text(value.toString())); |
| | 112 | } |
| | 113 | } |
| | 114 | }}} |
| | 115 | |
| | 116 | = !TeacherRecord.java = |
| | 117 | |
| | 118 | {{{ |
| | 119 | #!java |
| | 120 | package db; |
| | 121 | import java.io.DataInput; |
| | 122 | import java.io.DataOutput; |
| | 123 | import java.io.IOException; |
| | 124 | import java.sql.PreparedStatement; |
| | 125 | import java.sql.ResultSet; |
| | 126 | import java.sql.SQLException; |
| | 127 | import org.apache.hadoop.io.Text; |
| | 128 | import org.apache.hadoop.io.Writable; |
| | 129 | import org.apache.hadoop.mapred.lib.db.DBWritable; |
| | 130 | public class TeacherRecord implements Writable, DBWritable { |
| | 131 | int id; |
| | 132 | String name; |
| | 133 | int age; |
| | 134 | int departmentID; |
| | 135 | @Override |
| | 136 | public void readFields(DataInput in) throws IOException { |
| | 137 | // TODO Auto-generated method stub |
| | 138 | this.id = in.readInt(); |
| | 139 | this.name = Text.readString(in); |
| | 140 | this.age = in.readInt(); |
| | 141 | this.departmentID = in.readInt(); |
| | 142 | } |
| | 143 | @Override |
| | 144 | public void write(DataOutput out) throws IOException { |
| | 145 | // TODO Auto-generated method stub |
| | 146 | out.writeInt(this.id); |
| | 147 | Text.writeString(out, this.name); |
| | 148 | out.writeInt(this.age); |
| | 149 | out.writeInt(this.departmentID); |
| | 150 | } |
| | 151 | @Override |
| | 152 | public void readFields(ResultSet result) throws SQLException { |
| | 153 | // TODO Auto-generated method stub |
| | 154 | this.id = result.getInt(1); |
| | 155 | this.name = result.getString(2); |
| | 156 | this.age = result.getInt(3); |
| | 157 | this.departmentID = result.getInt(4); |
| | 158 | } |
| | 159 | @Override |
| | 160 | public void write(PreparedStatement stmt) throws SQLException { |
| | 161 | // TODO Auto-generated method stub |
| | 162 | stmt.setInt(1, this.id); |
| | 163 | stmt.setString(2, this.name); |
| | 164 | stmt.setInt(3, this.age); |
| | 165 | stmt.setInt(4, this.departmentID); |
| | 166 | } |
| | 167 | @Override |
| | 168 | public String toString() { |
| | 169 | // TODO Auto-generated method stub |
| | 170 | return new String(this.name + " " + this.age + " " + this.departmentID); |
| | 171 | } |
| | 172 | } |
| | 173 | }}} |
| | 174 | |
| | 175 | |
| | 176 | |
| | 177 | = 執行結果 = |
| | 178 | |
| | 179 | {{{ |
| | 180 | $ /opt/hadoop/bin/hadoop dfs -cat dboutput/part-00000 |
| | 181 | 0 waue 29 920 |
| | 182 | 1 rock 30 1231 |
| | 183 | 1 2 3 4 |
| | 184 | }}} |
| | 185 | |
| | 186 | * 引用 [http://jaguar13.javaeye.com/blog/683392] |