import java.io.IOException;
import java.util.*;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapred.*;
public class LineCount {
public static class Map extends MapReduceBase implements Mapper
private final static IntWritable one = new IntWritable(1);
private Text word = new Text("AKA Total Lines For You....");
public void map(LongWritable key, Text value, OutputCollector
output.collect(word, one);
}
}
public static class Reduce extends MapReduceBase implements Reducer
public void reduce(Text key, Iterator
int sum = 0;
while (values.hasNext()) {
sum += values.next().get();
}
output.collect(key, new IntWritable(sum));
}
}
public static void main(String[] args) throws Exception {
JobConf conf = new JobConf(LineCount.class);
conf.set("fs.default.name","hdfs://localhost:8020/home/hadoop/");
conf.setJobName("LineCount");
conf.setOutputKeyClass(Text.class);
conf.setOutputValueClass(IntWritable.class);
conf.setMapperClass(Map.class);
conf.setCombinerClass(Reduce.class);
conf.setReducerClass(Reduce.class);
conf.setInputFormat(TextInputFormat.class);
conf.setOutputFormat(TextOutputFormat.class);
FileInputFormat.setInputPaths(conf, new Path("file:///Users/jagarandas/Work-Assignment/Analytics/analytics-poc/sample-data/"));
FileOutputFormat.setOutputPath(conf, new Path("/home/hadoop/sample-data1/"));
JobClient.runJob(conf);
}
}
No comments:
Post a Comment