- map reduce 內用 setup 初始值實例
package ClassCloud;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
public class HelloHadoop {
static public class HelloMapper extends
Mapper<LongWritable, Text, LongWritable, Text> {
String str = "";
public void setup(Context context) {
Configuration conf = context.getConfiguration();
this.str = conf.get("gogogo");
System.err.println("mapper:" + this.str);
}
public void map(LongWritable key, Text value, Context context)
throws IOException, InterruptedException {
// 將出入資料 原封不動的寫入 輸出
Text val = new Text();
val.set(this.str);
context.write((LongWritable) key, val);
}
}
static public class HelloReducer extends
Reducer<LongWritable, Text, LongWritable, Text> {
String str = "";
public void setup(Context context) {
Configuration conf = context.getConfiguration();
this.str = conf.get("gogogo");
System.err.println("reduce:" + this.str);
}
public void reduce(LongWritable key, Iterable<Text> values,
Context context) throws IOException, InterruptedException {
Text val = new Text();
// 取回 val 的資料
for (Text str : values) {
val.set(str);
}
// 將取回的資料引入輸出
// val.set(values.iterator().toString());
context.write(key, val);
}
}
public static void main(String[] args) throws IOException,
InterruptedException, ClassNotFoundException {
// 引入 $HADOOP_HOME/conf 內控制檔內的資料
Configuration conf = new Configuration();
conf.set("gogogo", "haha");
// 宣告job 取得conf 並設定名稱 Hadoop Hello World
Job job = new Job(conf, "Hadoop Hello World");
// 設定此運算的主程式
job.setJarByClass(HelloHadoop.class);
// 設定輸入路徑
FileInputFormat.setInputPaths(job, "text_input");
// 設定輸出路徑
FileOutputFormat.setOutputPath(job, new Path("output-hh4"));
// 指定定map class
job.setMapperClass(HelloMapper.class);
// 指定reduce class
job.setReducerClass(HelloReducer.class);
// 開使運算
job.waitForCompletion(true);
}
}