| 1 | /** |
|---|
| 2 | * Program: LogParserGo.java |
|---|
| 3 | * Editor: Waue Chen |
|---|
| 4 | * From : NCHC. Taiwn |
|---|
| 5 | * Last Update Date: 07/02/2008 |
|---|
| 6 | */ |
|---|
| 7 | /** |
|---|
| 8 | * Purpose : |
|---|
| 9 | * This program will parse your apache log and store it into Hbase. |
|---|
| 10 | * |
|---|
| 11 | * HowToUse : |
|---|
| 12 | * Make sure two thing : |
|---|
| 13 | * 1. Upload apache logs ( /var/log/apache2/access.log* ) to \ |
|---|
| 14 | * hdfs (default: /user/waue/apache-log) \ |
|---|
| 15 | * $ bin/hadoop dfs -put /var/log/apache2/ apache-log |
|---|
| 16 | * 2. parameter "dir" in main contains the logs. |
|---|
| 17 | * 3. you should filter the exception contents manually, \ |
|---|
| 18 | * ex: ::1 - - [29/Jun/2008:07:35:15 +0800] "GET / HTTP/1.0" 200 729 "... |
|---|
| 19 | * |
|---|
| 20 | * Check Result: |
|---|
| 21 | * Go to hbase console, type : |
|---|
| 22 | * hql > select * from apache-log; |
|---|
| 23 | |
|---|
| 24 | +-------------------------+-------------------------+-------------------------+ |
|---|
| 25 | | Row | Column | Cell | |
|---|
| 26 | +-------------------------+-------------------------+-------------------------+ |
|---|
| 27 | | 118.170.101.250 | http:agent | Mozilla/4.0 (compatible;| |
|---|
| 28 | | | | MSIE 4.01; Windows 95) | |
|---|
| 29 | +-------------------------+-------------------------+-------------------------+ |
|---|
| 30 | | 118.170.101.250 | http:bytesize | 318 | |
|---|
| 31 | +-------------------------+-------------------------+-------------------------+ |
|---|
| 32 | ..........(skip)........ |
|---|
| 33 | +-------------------------+-------------------------+-------------------------+ |
|---|
| 34 | | 87.65.93.58 | http:method | OPTIONS | |
|---|
| 35 | +-------------------------+-------------------------+-------------------------+ |
|---|
| 36 | | 87.65.93.58 | http:protocol | HTTP/1.1 | |
|---|
| 37 | +-------------------------+-------------------------+-------------------------+ |
|---|
| 38 | | 87.65.93.58 | referrer:- | * | |
|---|
| 39 | +-------------------------+-------------------------+-------------------------+ |
|---|
| 40 | | 87.65.93.58 | url:* | - | |
|---|
| 41 | +-------------------------+-------------------------+-------------------------+ |
|---|
| 42 | 31 row(s) in set. (0.58 sec) |
|---|
| 43 | |
|---|
| 44 | |
|---|
| 45 | |
|---|
| 46 | */ |
|---|
| 47 | package tw.org.nchc.code; |
|---|
| 48 | |
|---|
| 49 | import java.io.File; |
|---|
| 50 | import java.io.FileWriter; |
|---|
| 51 | import java.io.IOException; |
|---|
| 52 | import java.text.ParseException; |
|---|
| 53 | import java.text.ParsePosition; |
|---|
| 54 | import java.text.SimpleDateFormat; |
|---|
| 55 | import java.util.Date; |
|---|
| 56 | import java.util.Locale; |
|---|
| 57 | |
|---|
| 58 | import org.apache.hadoop.fs.FileStatus; |
|---|
| 59 | import org.apache.hadoop.fs.FileSystem; |
|---|
| 60 | import org.apache.hadoop.fs.Path; |
|---|
| 61 | import org.apache.hadoop.hbase.HBaseAdmin; |
|---|
| 62 | import org.apache.hadoop.hbase.HBaseConfiguration; |
|---|
| 63 | import org.apache.hadoop.hbase.HColumnDescriptor; |
|---|
| 64 | import org.apache.hadoop.hbase.HTable; |
|---|
| 65 | import org.apache.hadoop.hbase.HTableDescriptor; |
|---|
| 66 | import org.apache.hadoop.io.Text; |
|---|
| 67 | import org.apache.hadoop.io.Writable; |
|---|
| 68 | import org.apache.hadoop.io.WritableComparable; |
|---|
| 69 | import org.apache.hadoop.mapred.ClusterStatus; |
|---|
| 70 | import org.apache.hadoop.mapred.JobClient; |
|---|
| 71 | import org.apache.hadoop.mapred.JobConf; |
|---|
| 72 | import org.apache.hadoop.mapred.MapReduceBase; |
|---|
| 73 | import org.apache.hadoop.mapred.Mapper; |
|---|
| 74 | import org.apache.hadoop.mapred.OutputCollector; |
|---|
| 75 | import org.apache.hadoop.mapred.Reporter; |
|---|
| 76 | |
|---|
| 77 | |
|---|
| 78 | class Log { |
|---|
| 79 | |
|---|
| 80 | public Log(String data) { |
|---|
| 81 | |
|---|
| 82 | String[] arr = data.split(";"); |
|---|
| 83 | |
|---|
| 84 | this.gid = arr[0]; |
|---|
| 85 | this.sid = arr[1]; |
|---|
| 86 | this.version = arr[2]; |
|---|
| 87 | this.alert_name = arr[3]; |
|---|
| 88 | this.class_type = arr[4]; |
|---|
| 89 | this.priority = arr[5]; |
|---|
| 90 | this.timestamp = getTime(arr[7] + "/" + arr[6] + ":" + arr[8] |
|---|
| 91 | + ":" + arr[9] + ":" + arr[10]); |
|---|
| 92 | // this.timestamp = now.getTime(); |
|---|
| 93 | this.source = arr[11]; |
|---|
| 94 | this.destination = arr[12]; |
|---|
| 95 | this.type = arr[13]; |
|---|
| 96 | this.ttl = arr[14]; |
|---|
| 97 | this.tos = arr[15]; |
|---|
| 98 | this.id = arr[16]; |
|---|
| 99 | this.iplen = arr[17]; |
|---|
| 100 | this.dgmlen = arr[18]; |
|---|
| 101 | |
|---|
| 102 | } |
|---|
| 103 | Date now = new Date(); |
|---|
| 104 | long timestamp; |
|---|
| 105 | |
|---|
| 106 | String gid, sid, version; |
|---|
| 107 | |
|---|
| 108 | String alert_name, class_type, priority; |
|---|
| 109 | |
|---|
| 110 | String source, destination, type, ttl, tos, id, iplen, dgmlen; |
|---|
| 111 | |
|---|
| 112 | long getTime(String str) { |
|---|
| 113 | SimpleDateFormat sdf = new SimpleDateFormat("dd/MM:HH:mm:ss", |
|---|
| 114 | Locale.TAIWAN); |
|---|
| 115 | Long timestamp = sdf.parse(str,new ParsePosition(0)).getTime(); |
|---|
| 116 | return timestamp; |
|---|
| 117 | } |
|---|
| 118 | } |
|---|
| 119 | |
|---|
| 120 | // import AccessLogParser |
|---|
| 121 | public class SnortBase { |
|---|
| 122 | static HBaseConfiguration conf = new HBaseConfiguration(); |
|---|
| 123 | |
|---|
| 124 | public static final String TABLE = "table.name"; |
|---|
| 125 | |
|---|
| 126 | static String tableName = "SnortBase"; |
|---|
| 127 | |
|---|
| 128 | static HTable table = null; |
|---|
| 129 | |
|---|
| 130 | |
|---|
| 131 | public static class MapClass extends MapReduceBase implements |
|---|
| 132 | Mapper<WritableComparable, Text, Text, Writable> { |
|---|
| 133 | |
|---|
| 134 | @Override |
|---|
| 135 | // MapReduceBase.configure(JobConf job) |
|---|
| 136 | // Default implementation that does nothing. |
|---|
| 137 | public void configure(JobConf job) { |
|---|
| 138 | // String get(String name,String defaultValue) |
|---|
| 139 | // Get the value of the name property. If no such property exists,\ |
|---|
| 140 | // then defaultValue is returned. |
|---|
| 141 | } |
|---|
| 142 | |
|---|
| 143 | public void map(WritableComparable key, Text value, |
|---|
| 144 | OutputCollector<Text, Writable> output, Reporter reporter) |
|---|
| 145 | throws IOException{ |
|---|
| 146 | |
|---|
| 147 | // try { |
|---|
| 148 | |
|---|
| 149 | Log log = new Log(value.toString()); |
|---|
| 150 | |
|---|
| 151 | // 查看value的值 |
|---|
| 152 | FileWriter out = new FileWriter(new File( |
|---|
| 153 | "/home/waue/Desktop/snort-result.txt")); |
|---|
| 154 | out.write(value.toString() + "_time=" + log.timestamp + "\n"); |
|---|
| 155 | out.flush(); |
|---|
| 156 | out.close(); |
|---|
| 157 | |
|---|
| 158 | if (table == null) |
|---|
| 159 | table = new HTable(conf, new Text(tableName)); |
|---|
| 160 | |
|---|
| 161 | long lockId = table.startUpdate(new Text(log.destination)); |
|---|
| 162 | table.put(lockId, new Text("id:gid"), log.gid.getBytes()); |
|---|
| 163 | table.put(lockId, new Text("id:sid"), log.sid.getBytes()); |
|---|
| 164 | table.put(lockId, new Text("id:version"), log.version |
|---|
| 165 | .getBytes()); |
|---|
| 166 | table.put(lockId, new Text("name:name"), log.alert_name |
|---|
| 167 | .getBytes()); |
|---|
| 168 | table.put(lockId, new Text("name:class"), log.class_type |
|---|
| 169 | .getBytes()); |
|---|
| 170 | table.put(lockId, new Text("index:priority"), log.priority |
|---|
| 171 | .getBytes()); |
|---|
| 172 | table.put(lockId, new Text("index:soure"), log.source |
|---|
| 173 | .getBytes()); |
|---|
| 174 | table |
|---|
| 175 | .put(lockId, new Text("payload:type"), log.type |
|---|
| 176 | .getBytes()); |
|---|
| 177 | table.put(lockId, new Text("payload:ttl"), log.ttl.getBytes()); |
|---|
| 178 | table.put(lockId, new Text("payload:tos"), log.tos.getBytes()); |
|---|
| 179 | table.put(lockId, new Text("payload:id"), log.id.getBytes()); |
|---|
| 180 | table.put(lockId, new Text("payload:iplen"), log.iplen |
|---|
| 181 | .getBytes()); |
|---|
| 182 | table.put(lockId, new Text("payload:dgmlen"), log.dgmlen |
|---|
| 183 | .getBytes()); |
|---|
| 184 | table.commit(lockId, log.timestamp); |
|---|
| 185 | |
|---|
| 186 | // } catch (Exception e) { |
|---|
| 187 | // e.printStackTrace(); |
|---|
| 188 | // } |
|---|
| 189 | |
|---|
| 190 | } |
|---|
| 191 | } |
|---|
| 192 | |
|---|
| 193 | // do it to resolve warning : FileSystem.listPaths |
|---|
| 194 | static public Path[] listPaths(FileSystem fsm, Path path) |
|---|
| 195 | throws IOException { |
|---|
| 196 | FileStatus[] fss = fsm.listStatus(path); |
|---|
| 197 | int length = fss.length; |
|---|
| 198 | Path[] pi = new Path[length]; |
|---|
| 199 | for (int i = 0; i < length; i++) { |
|---|
| 200 | pi[i] = fss[i].getPath(); |
|---|
| 201 | } |
|---|
| 202 | return pi; |
|---|
| 203 | } |
|---|
| 204 | |
|---|
| 205 | public static void runMapReduce(String tableName, String inpath) |
|---|
| 206 | throws IOException { |
|---|
| 207 | Path tempDir = new Path("/tmp/Mylog/"); |
|---|
| 208 | Path InputPath = new Path(inpath); |
|---|
| 209 | FileSystem fs = FileSystem.get(conf); |
|---|
| 210 | JobConf jobConf = new JobConf(conf, SnortBase.class); |
|---|
| 211 | jobConf.setJobName("Snort Parse"); |
|---|
| 212 | jobConf.set(TABLE, tableName); |
|---|
| 213 | // 先省略 自動搜尋目錄的功能 |
|---|
| 214 | /* |
|---|
| 215 | * Path[] in = listPaths(fs, InputDir); if (fs.isFile(InputDir)) { |
|---|
| 216 | * jobConf.setInputPath(InputDir); } else { for (int i = 0; i < |
|---|
| 217 | * in.length; i++) { if (fs.isFile(in[i])) { |
|---|
| 218 | * jobConf.addInputPath(in[i]); } else { Path[] sub = listPaths(fs, |
|---|
| 219 | * in[i]); for (int j = 0; j < sub.length; j++) { if (fs.isFile(sub[j])) { |
|---|
| 220 | * jobConf.addInputPath(sub[j]); } } } } } |
|---|
| 221 | */ |
|---|
| 222 | jobConf.setInputPath(InputPath); |
|---|
| 223 | jobConf.setOutputPath(tempDir); |
|---|
| 224 | jobConf.setMapperClass(MapClass.class); |
|---|
| 225 | JobClient client = new JobClient(jobConf); |
|---|
| 226 | ClusterStatus cluster = client.getClusterStatus(); |
|---|
| 227 | jobConf.setNumMapTasks(cluster.getMapTasks()); |
|---|
| 228 | jobConf.setNumReduceTasks(0); |
|---|
| 229 | fs.delete(tempDir); |
|---|
| 230 | JobClient.runJob(jobConf); |
|---|
| 231 | fs.delete(tempDir); |
|---|
| 232 | fs.close(); |
|---|
| 233 | } |
|---|
| 234 | |
|---|
| 235 | public static void creatTable(String table) throws IOException { |
|---|
| 236 | HBaseAdmin admin = new HBaseAdmin(conf); |
|---|
| 237 | if (!admin.tableExists(new Text(table))) { |
|---|
| 238 | System.out.println("1. " + table |
|---|
| 239 | + " table creating ... please wait"); |
|---|
| 240 | HTableDescriptor tableDesc = new HTableDescriptor(table); |
|---|
| 241 | tableDesc.addFamily(new HColumnDescriptor("id:")); |
|---|
| 242 | tableDesc.addFamily(new HColumnDescriptor("name:")); |
|---|
| 243 | tableDesc.addFamily(new HColumnDescriptor("index:")); |
|---|
| 244 | tableDesc.addFamily(new HColumnDescriptor("payload:")); |
|---|
| 245 | tableDesc.addFamily(new HColumnDescriptor("priority:")); |
|---|
| 246 | admin.createTable(tableDesc); |
|---|
| 247 | } else { |
|---|
| 248 | System.out.println("1. " + table + " table already exists."); |
|---|
| 249 | } |
|---|
| 250 | System.out.println("2. access_log files fetching using map/reduce"); |
|---|
| 251 | } |
|---|
| 252 | |
|---|
| 253 | public static void main(String[] args) throws IOException, Exception { |
|---|
| 254 | |
|---|
| 255 | String path = "/user/waue/alert_meta"; |
|---|
| 256 | |
|---|
| 257 | // 先省略掉 parse完後自動上傳部份 |
|---|
| 258 | /* |
|---|
| 259 | * SnortParser sp = new |
|---|
| 260 | * SnortParser("/tmp/alert","/tmp/alert_SnortBase"); sp.parseToLine(); |
|---|
| 261 | */ |
|---|
| 262 | creatTable(tableName); |
|---|
| 263 | |
|---|
| 264 | runMapReduce(tableName, path); |
|---|
| 265 | |
|---|
| 266 | } |
|---|
| 267 | |
|---|
| 268 | } |
|---|