Changeset 25 for sample


Ignore:
Timestamp:
Jul 3, 2008, 4:45:54 PM (16 years ago)
Author:
waue
Message:

downgrade from 0.17 to 0.16
test for work -> not yet

Location:
sample/hadoop-0.16
Files:
29 edited

Legend:

Unmodified
Added
Removed
  • sample/hadoop-0.16/WordCount2.java

    r24 r25  
    1414import org.apache.hadoop.io.LongWritable;
    1515import org.apache.hadoop.io.Text;
    16 import org.apache.hadoop.mapred.FileInputFormat;
    17 import org.apache.hadoop.mapred.FileOutputFormat;
    1816import org.apache.hadoop.mapred.JobClient;
    1917import org.apache.hadoop.mapred.JobConf;
     
    131129    conf.setNumReduceTasks(reduceTasks);
    132130
    133 //    conf.setInputPath(new Path(filename));
    134     FileInputFormat.setInputPaths(conf,new Path(filename));
     131    conf.setInputPath(new Path(filename));
     132
    135133    conf.setOutputKeyClass(Text.class);
    136134    conf.setOutputValueClass(IntWritable.class);
    137135   
    138 //    conf.setOutputPath(new Path(outputPath));
    139     FileOutputFormat.setOutputPath( conf, new Path(filename));
     136    conf.setOutputPath(new Path(outputPath));
     137
    140138
    141139
     
    146144    // Delete the output directory if it exists already
    147145    Path outputDir = new Path(outputPath);
    148     FileSystem.get(conf).delete(outputDir,true);
     146    FileSystem.get(conf).delete(outputDir);
    149147    JobClient.runJob(conf);
    150148  }
  • sample/hadoop-0.16/test.java

    r24 r25  
    88import java.io.RandomAccessFile;
    99import java.util.StringTokenizer;
    10 
    1110import org.apache.hadoop.fs.FileStatus;
    1211import org.apache.hadoop.fs.FileSystem;
     
    156155    boolean b = admin.tableExists(text_table_name);
    157156    System.out.println(b);
    158     /*
     157
    159158    if (!admin.tableExists(text_table_name)) {
    160159
     
    177176      System.out.println("table exist!");
    178177    }
    179     */
     178
    180179
    181180  }
  • sample/hadoop-0.16/tw/org/nchc/code/HBaseRecordPro.java

    r23 r25  
    228228    conf.setNumMapTasks(mapTasks);
    229229    conf.setNumReduceTasks(reduceTasks);
    230     // 0.16
    231 //    conf.setInputPath(text_path);
    232     Convert.setInputPath(conf, text_path);
     230   
     231    conf.setInputPath(text_path);
     232
    233233   
    234234    conf.setMapperClass(IdentityMapper.class);
     
    240240    // delete tmp file
    241241    // 0.16
    242 //    FileSystem.get(conf).delete(text_path);
    243     FileSystem.get(conf).delete(text_path,true);
     242    FileSystem.get(conf).delete(text_path);
    244243   
    245244    setup.deleteFile(conf_tmp);
  • sample/hadoop-0.16/tw/org/nchc/code/WordCount.java

    r23 r25  
    110110    conf.setNumMapTasks(mapTasks);
    111111    conf.setNumReduceTasks(reduceTasks);
    112     // 0.16
    113     // conf.setInputPath(new Path(wc.filepath));
    114     Convert.setInputPath(conf, new Path(wc.filepath));
     112   
     113    conf.setInputPath(new Path(wc.filepath));
     114
    115115    conf.setOutputKeyClass(Text.class);
    116116    conf.setOutputValueClass(IntWritable.class);
    117     // 0.16
    118     // conf.setOutputPath(new Path(wc.outputPath));
    119     Convert.setOutputPath(conf, new Path(wc.outputPath));
     117
     118    conf.setOutputPath(new Path(wc.outputPath));
    120119
    121120    conf.setMapperClass(MapClass.class);
     
    125124    // Delete the output directory if it exists already
    126125    Path outputDir = new Path(wc.outputPath);
    127     // 0.16
    128     FileSystem.get(conf).delete(outputDir,true);
    129 
     126    FileSystem.get(conf).delete(outputDir);
    130127    JobClient.runJob(conf);
    131128  }
  • sample/hadoop-0.16/tw/org/nchc/code/WordCountFromHBase.java

    r23 r25  
    168168    // input is Hbase format => TableInputFormat
    169169    conf.setInputFormat(TableInputFormat.class);
    170     // 0.16
    171 //    conf.setOutputPath(new Path(outputPath));
    172     Convert.setOutputPath(conf, new Path(outputPath));
     170    conf.setOutputPath(new Path(outputPath));
    173171//     delete the old path with the same name
    174     FileSystem.get(conf).delete(new Path(outputPath),true);
     172    FileSystem.get(conf).delete(new Path(outputPath));
    175173    JobClient.runJob(conf);
    176174  }
  • sample/hadoop-0.16/tw/org/nchc/demo/DemoWordCount.java

    r21 r25  
    2828import org.apache.hadoop.mapred.Reducer;
    2929import org.apache.hadoop.mapred.Reporter;
    30 
    31 import tw.org.nchc.code.Convert;
    3230
    3331/**
     
    107105    conf.setNumMapTasks(mapTasks);
    108106    conf.setNumReduceTasks(reduceTasks);
    109     //0.16
    110 //    conf.setInputPath(new Path(filename));
    111     Convert.setInputPath(conf, new Path(filename));
     107
     108    conf.setInputPath(new Path(filename));
     109
    112110    conf.setOutputKeyClass(Text.class);
    113111    conf.setOutputValueClass(IntWritable.class);
    114     // 0.16
    115 //    conf.setOutputPath(new Path(outputPath));
    116     Convert.setInputPath(conf, new Path(outputPath));
     112
     113    conf.setOutputPath(new Path(outputPath));
    117114    conf.setMapperClass(MapClass.class);
    118115    conf.setCombinerClass(ReduceClass.class);
     
    121118    // Delete the output directory if it exists already
    122119    Path outputDir = new Path(outputPath);
    123     // 0.16
    124 //    FileSystem.get(conf).delete(outputDir);
    125     FileSystem.get(conf).delete(outputDir,true);
     120    FileSystem.get(conf).delete(outputDir);
    126121    JobClient.runJob(conf);
    127122  }
  • sample/hadoop-0.16/tw/org/nchc/demo/LogFetcher.java

    r21 r25  
    2323import java.text.ParseException;
    2424
     25import org.apache.hadoop.fs.FileStatus;
    2526import org.apache.hadoop.fs.FileSystem;
    2627import org.apache.hadoop.fs.Path;
     
    4041import org.apache.hadoop.mapred.OutputCollector;
    4142import org.apache.hadoop.mapred.Reporter;
    42 
    43 import tw.org.nchc.code.Convert;
    4443
    4544/**
     
    9695    }
    9796  }
    98 
     97  static public Path[] listPaths(FileSystem fsm,Path path) throws IOException
     98  {
     99    FileStatus[] fss = fsm.listStatus(path);
     100    int length = fss.length;
     101    Path[] pi = new Path[length];
     102    for (int i=0 ; i< length; i++)
     103    {
     104      pi[i] = fss[i].getPath();
     105    }
     106    return pi;
     107  }
    99108  public static void runMapReduce(String table, String dir)
    100109      throws IOException {
     
    106115    jobConf.set(TABLE, table);
    107116    // my convert function from 0.16 to 0.17
    108     Path[] in = Convert.listPaths(fs, InputDir);
     117    Path[] in = listPaths(fs, InputDir);
    109118    if (fs.isFile(InputDir)) {
    110       // 0.16
    111 //      jobConf.setInputPath(InputDir);
    112       Convert.setInputPath(jobConf, InputDir);
     119      jobConf.setInputPath(InputDir);
    113120    } else {
    114121      for (int i = 0; i < in.length; i++) {
    115122        if (fs.isFile(in[i])) {
    116           // 0.16
    117 //          jobConf.addInputPath(in[i]);
    118           Convert.addInputPath(jobConf,in[i]);
     123          jobConf.addInputPath(in[i]);
    119124        } else {
    120125          // my convert function from 0.16 to 0.17
    121           Path[] sub = Convert.listPaths(fs, in[i]);
     126          Path[] sub = listPaths(fs, in[i]);
    122127          for (int j = 0; j < sub.length; j++) {
    123128            if (fs.isFile(sub[j])) {
    124               // 0.16
    125 //              jobConf.addInputPath(sub[j]);
    126               Convert.addInputPath(jobConf, sub[j]);
     129              jobConf.addInputPath(sub[j]);
    127130            }
    128131          }
     
    130133      }
    131134    }
    132     // 0.16
    133 //    jobConf.setOutputPath(tempDir);
    134     Convert.setOutputPath(jobConf, tempDir);
     135    jobConf.setOutputPath(tempDir);
    135136   
    136137    jobConf.setMapperClass(MapClass.class);
     
    142143
    143144    JobClient.runJob(jobConf);
    144     // 0.16
    145 //    fs.delete(tempDir);
    146     fs.delete(tempDir,true);
    147    
     145
     146    fs.delete(tempDir);   
    148147    fs.close();
    149148  }
  • sample/hadoop-0.16/tw/org/nchc/util/SequenceFileProcessor.java

    r21 r25  
    1 /**
    2  * Program: BuildHTable.java
    3  * Editor: Waue Chen
    4  * From :  NCHC. Taiwn
    5  * Last Update Date: 07/02/2008
    6  * Upgrade to 0.17
    7  * Re-code from : Cloud9: A MapReduce Library for Hadoop
     1/*
     2 * Cloud9: A MapReduce Library for Hadoop
     3 *
     4 * Licensed under the Apache License, Version 2.0 (the "License"); you
     5 * may not use this file except in compliance with the License. You may
     6 * obtain a copy of the License at
     7 *
     8 * http://www.apache.org/licenses/LICENSE-2.0
     9 *
     10 * Unless required by applicable law or agreed to in writing, software
     11 * distributed under the License is distributed on an "AS IS" BASIS,
     12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
     13 * implied. See the License for the specific language governing
     14 * permissions and limitations under the License.
    815 */
    9 
    1016
    1117package tw.org.nchc.util;
     
    1319import java.io.IOException;
    1420
    15 import org.apache.hadoop.fs.FileStatus;
    1621import org.apache.hadoop.fs.FileSystem;
    1722import org.apache.hadoop.fs.Path;
     
    2227
    2328/**
    24  * Upgrade from hadoop 0.16 to 0.17
    2529 * <p>
    2630 * Harness for processing one or more {@link SequenceFile}s within a single
     
    115119  private void run() throws IOException {
    116120    if (!FileSystem.get(conf).isFile(mPath)) {
    117       Path[] pa = new Path[] { mPath };
    118       Path p;
    119       // hadoop 0.17 -> listStatus();
    120       FileStatus[] fi = FileSystem.get(conf).listStatus(pa);
    121       for (int i =0 ; i<fi.length ; i++) {
    122         p = fi[i].getPath();
     121      for (Path p : FileSystem.get(conf).listPaths(new Path[] { mPath })) {
    123122        // System.out.println("Applying to " + p);
    124123        applyToFile(p);
Note: See TracChangeset for help on using the changeset viewer.