Changeset 45 for sample/hadoop-0.16/tw/org/nchc
- Timestamp:
- Jul 24, 2008, 5:35:34 PM (16 years ago)
- Location:
- sample/hadoop-0.16/tw/org/nchc/code
- Files:
-
- 3 edited
Legend:
- Unmodified
- Added
- Removed
-
sample/hadoop-0.16/tw/org/nchc/code/LogParserGo.java
r31 r45 47 47 package tw.org.nchc.code; 48 48 49 import java.io.File; 50 import java.io.FileWriter; 49 51 import java.io.IOException; 50 52 … … 103 105 104 106 try { 105 /*107 LogParser log = new LogParser(value.toString()); 106 108 print(value.toString()); 107 109 FileWriter out = new FileWriter(new File( 108 "/home/waue/ mr-result.txt"));110 "/home/waue/Desktop/mr-result.txt")); 109 111 out.write(value.toString()); 110 112 out.flush(); 111 113 out.close(); 112 */ 113 LogParser log = new LogParser(value.toString()); 114 114 115 115 if (table == null) 116 116 table = new HTable(conf, new Text(tableName)); -
sample/hadoop-0.16/tw/org/nchc/code/SnortBase.java
r33 r45 22 22 * hql > select * from apache-log; 23 23 24 +-------------------------+-------------------------+-------------------------+25 | Row | Column | Cell |26 +-------------------------+-------------------------+-------------------------+27 | 118.170.101.250 | http:agent | Mozilla/4.0 (compatible;|28 | | | MSIE 4.01; Windows 95) |29 +-------------------------+-------------------------+-------------------------+30 | 118.170.101.250 | http:bytesize | 318 |31 +-------------------------+-------------------------+-------------------------+32 ..........(skip)........33 +-------------------------+-------------------------+-------------------------+34 | 87.65.93.58 | http:method | OPTIONS |35 +-------------------------+-------------------------+-------------------------+36 | 87.65.93.58 | http:protocol | HTTP/1.1 |37 +-------------------------+-------------------------+-------------------------+38 | 87.65.93.58 | referrer:- | * |39 +-------------------------+-------------------------+-------------------------+40 | 87.65.93.58 | url:* | - |41 +-------------------------+-------------------------+-------------------------+42 31 row(s) in set. (0.58 sec)24 +-------------------------+-------------------------+-------------------------+ 25 | Row | Column | Cell | 26 +-------------------------+-------------------------+-------------------------+ 27 | 118.170.101.250 | http:agent | Mozilla/4.0 (compatible;| 28 | | | MSIE 4.01; Windows 95) | 29 +-------------------------+-------------------------+-------------------------+ 30 | 118.170.101.250 | http:bytesize | 318 | 31 +-------------------------+-------------------------+-------------------------+ 32 ..........(skip)........ 33 +-------------------------+-------------------------+-------------------------+ 34 | 87.65.93.58 | http:method | OPTIONS | 35 +-------------------------+-------------------------+-------------------------+ 36 | 87.65.93.58 | http:protocol | HTTP/1.1 | 37 +-------------------------+-------------------------+-------------------------+ 38 | 87.65.93.58 | referrer:- | * | 39 +-------------------------+-------------------------+-------------------------+ 40 | 87.65.93.58 | url:* | - | 41 +-------------------------+-------------------------+-------------------------+ 42 31 row(s) in set. (0.58 sec) 43 43 44 44 … … 47 47 package tw.org.nchc.code; 48 48 49 import java.io.File; 50 import java.io.FileWriter; 49 51 import java.io.IOException; 52 import java.text.SimpleDateFormat; 53 import java.util.Locale; 50 54 51 55 import org.apache.hadoop.fs.FileStatus; … … 68 72 import org.apache.hadoop.mapred.Reporter; 69 73 74 class Log { 75 76 public Log(String data) throws Exception { 77 78 String[] arr = data.split(";"); 79 80 this.gid = arr[0]; 81 this.sid = arr[1]; 82 this.version = arr[2]; 83 this.alert_name = arr[3]; 84 this.class_type = arr[4]; 85 this.priority = arr[5]; 86 // this.timestamp = "2008" + arr[6] + arr[7] + arr[8] + arr[9] + 87 // arr[10]; 88 this.timestamp = getTime(arr[7] + "/" + arr[6] + "/2008:" + arr[8] 89 + ":" + arr[9] + ":" + arr[10]); 90 this.source = arr[11]; 91 this.destination = arr[12]; 92 this.type = arr[13]; 93 this.ttl = arr[14]; 94 this.tos = arr[15]; 95 this.id = arr[16]; 96 this.iplen = arr[17]; 97 this.dgmlen = arr[18]; 98 99 } 100 101 long timestamp; 102 103 String gid, sid, version; 104 105 String alert_name, class_type, priority; 106 107 String source, destination, type, ttl, tos, id, iplen, dgmlen; 108 109 long getTime(String str) throws Exception { 110 SimpleDateFormat sdf = new SimpleDateFormat("dd/MMM/yyyy:HH:mm:ss", 111 Locale.TAIWAN); 112 Long timestamp = sdf.parse(str).getTime(); 113 return timestamp; 114 } 115 } 116 70 117 // import AccessLogParser 71 /**72 * Access_log fetcher. TODO: FgnStatLog, Error_log, Access_log (Default,73 * W3CExtended, IISw3cExtended)74 */75 118 public class SnortBase { 76 119 static HBaseConfiguration conf = new HBaseConfiguration(); … … 78 121 public static final String TABLE = "table.name"; 79 122 80 static String tableName ;123 static String tableName = "mySnort"; 81 124 82 125 static HTable table = null; 83 84 static void print(String str){ 85 System.out.println("STR = "+str); 86 } 126 127 87 128 public static class MapClass extends MapReduceBase implements 88 129 Mapper<WritableComparable, Text, Text, Writable> { 89 130 90 131 @Override 91 // MapReduceBase.configure(JobConf job) 132 // MapReduceBase.configure(JobConf job) 92 133 // Default implementation that does nothing. 93 134 public void configure(JobConf job) { 94 // String get(String name,String defaultValue) 135 // String get(String name,String defaultValue) 95 136 // Get the value of the name property. If no such property exists,\ 96 // then defaultValue is returned. 97 tableName = job.get(TABLE, ""); 137 // then defaultValue is returned. 98 138 } 99 139 … … 101 141 OutputCollector<Text, Writable> output, Reporter reporter) 102 142 throws IOException { 103 143 104 144 try { 105 /* 106 print(value.toString()); 145 146 Log log = new Log(value.toString()); 147 148 // 查看value的值 107 149 FileWriter out = new FileWriter(new File( 108 "/home/waue/mr-result.txt"));109 out.write(value.toString() );150 "/home/waue/Desktop/snort-result.txt")); 151 out.write(value.toString() + "_time=" + log.timestamp + "\n"); 110 152 out.flush(); 111 153 out.close(); 112 */ 113 // SnortParser log = new SnortParser(value.toString(),0); 114 154 115 155 if (table == null) 116 156 table = new HTable(conf, new Text(tableName)); 117 /* 118 long lockId = table.startUpdate(new Text(log.getIp())); 119 table.put(lockId, new Text("http:protocol"), log.getProtocol() 120 .getBytes()); 121 table.put(lockId, new Text("http:method"), log.getMethod() 122 .getBytes()); 123 table.put(lockId, new Text("http:code"), log.getCode() 124 .getBytes()); 125 table.put(lockId, new Text("http:bytesize"), log.getByteSize() 126 .getBytes()); 127 table.put(lockId, new Text("http:agent"), log.getAgent() 128 .getBytes()); 129 table.put(lockId, new Text("url:" + log.getUrl()), log 130 .getReferrer().getBytes()); 131 table.put(lockId, new Text("referrer:" + log.getReferrer()), 132 log.getUrl().getBytes()); 133 table.commit(lockId, log.getTimestamp()); 134 */ 135 157 158 long lockId = table.startUpdate(new Text(log.destination)); 159 table.put(lockId, new Text("id:gid"), log.gid.getBytes()); 160 table.put(lockId, new Text("id:sid"), log.sid.getBytes()); 161 table.put(lockId, new Text("id:version"), log.version 162 .getBytes()); 163 table.put(lockId, new Text("name:name"), log.alert_name 164 .getBytes()); 165 table.put(lockId, new Text("name:class"), log.class_type 166 .getBytes()); 167 table.put(lockId, new Text("index:priority"), log.priority 168 .getBytes()); 169 table.put(lockId, new Text("index:soure"), log.source 170 .getBytes()); 171 table 172 .put(lockId, new Text("payload:type"), log.type 173 .getBytes()); 174 table.put(lockId, new Text("payload:ttl"), log.ttl.getBytes()); 175 table.put(lockId, new Text("payload:tos"), log.tos.getBytes()); 176 table.put(lockId, new Text("payload:id"), log.id.getBytes()); 177 table.put(lockId, new Text("payload:iplen"), log.iplen 178 .getBytes()); 179 table.put(lockId, new Text("payload:dgmlen"), log.dgmlen 180 .getBytes()); 181 table.commit(lockId, log.timestamp); 182 136 183 } catch (Exception e) { 137 184 e.printStackTrace(); 138 185 } 139 186 140 187 } 141 188 } … … 153 200 } 154 201 155 public static void runMapReduce(String table, String dir)202 public static void runMapReduce(String table, String inpath) 156 203 throws IOException { 157 204 Path tempDir = new Path("/tmp/Mylog/"); 158 Path Input Dir = new Path(dir);205 Path InputPath = new Path(inpath); 159 206 FileSystem fs = FileSystem.get(conf); 160 207 JobConf jobConf = new JobConf(conf, SnortBase.class); 161 jobConf.setJobName(" apache log fetcher");208 jobConf.setJobName("Snort Parse"); 162 209 jobConf.set(TABLE, table); 163 Path[] in = listPaths(fs, InputDir); 164 if (fs.isFile(InputDir)) { 165 jobConf.setInputPath(InputDir); 166 } else { 167 for (int i = 0; i < in.length; i++) { 168 if (fs.isFile(in[i])) { 169 jobConf.addInputPath(in[i]); 170 } else { 171 Path[] sub = listPaths(fs, in[i]); 172 for (int j = 0; j < sub.length; j++) { 173 if (fs.isFile(sub[j])) { 174 jobConf.addInputPath(sub[j]); 175 } 176 } 177 } 178 } 179 } 210 // 先省略 自動搜尋目錄的功能 211 /* 212 * Path[] in = listPaths(fs, InputDir); if (fs.isFile(InputDir)) { 213 * jobConf.setInputPath(InputDir); } else { for (int i = 0; i < 214 * in.length; i++) { if (fs.isFile(in[i])) { 215 * jobConf.addInputPath(in[i]); } else { Path[] sub = listPaths(fs, 216 * in[i]); for (int j = 0; j < sub.length; j++) { if (fs.isFile(sub[j])) { 217 * jobConf.addInputPath(sub[j]); } } } } } 218 */ 219 jobConf.setInputPath(InputPath); 180 220 jobConf.setOutputPath(tempDir); 181 221 jobConf.setMapperClass(MapClass.class); … … 195 235 + " table creating ... please wait"); 196 236 HTableDescriptor tableDesc = new HTableDescriptor(table); 197 tableDesc.addFamily(new HColumnDescriptor("http:")); 198 tableDesc.addFamily(new HColumnDescriptor("url:")); 199 tableDesc.addFamily(new HColumnDescriptor("referrer:")); 237 tableDesc.addFamily(new HColumnDescriptor("id:")); 238 tableDesc.addFamily(new HColumnDescriptor("name:")); 239 tableDesc.addFamily(new HColumnDescriptor("index:")); 240 tableDesc.addFamily(new HColumnDescriptor("payload:")); 241 tableDesc.addFamily(new HColumnDescriptor("priority:")); 200 242 admin.createTable(tableDesc); 201 243 } else { … … 205 247 } 206 248 207 public static void main(String[] args) throws IOException {249 public static void main(String[] args) throws IOException, Exception { 208 250 String table_name = "snort"; 209 String dir = "/user/waue/apache-log"; 210 211 // if (eclipseRun) { 212 // table_name = "log"; 213 // dir = "apache-log"; 214 // } else if (args.length < 2) { 215 // System.out 216 // .println("Usage: logfetcher <access_log file or directory> 217 // <table_name>"); 218 // System.exit(1); 219 // } else { 220 // table_name = args[1]; 221 // dir = args[0]; 222 // } 223 251 String path = "/user/waue/alert_meta"; 252 253 // 先省略掉 parse完後自動上傳部份 254 /* 255 * SnortParser sp = new 256 * SnortParser("/tmp/alert","/tmp/alert_SnortBase"); sp.parseToLine(); 257 */ 224 258 creatTable(table_name); 225 runMapReduce(table_name, dir); 259 260 runMapReduce(table_name, path); 226 261 227 262 } -
sample/hadoop-0.16/tw/org/nchc/code/SnortParser.java
r43 r45 102 102 public static void main(String[] args) throws ParseException, Exception { 103 103 String in = new String("/home/waue/Desktop/alert_m"); 104 String ou = new String("/tmp/alert_ my");104 String ou = new String("/tmp/alert_SnortBase"); 105 105 SnortParser a = new SnortParser(in, ou); 106 106 a.parseToLine();
Note: See TracChangeset
for help on using the changeset viewer.