hadoop数据去重代码
时间: 2023-11-13 09:56:17 浏览: 234
数据去重算法程序源代码
以下是一个简单的Hadoop MapReduce去重代码,它可以从输入文件中读取数据并将其写入输出文件中,同时删除重复的记录:
```java
public class Deduplication {
public static class Map extends Mapper<Object, Text, Text, NullWritable> {
private Text word = new Text();
public void map(Object key, Text value, Context context) throws IOException, InterruptedException {
word.set(value);
context.write(word, NullWritable.get());
}
}
public static class Reduce extends Reducer<Text, NullWritable, Text, NullWritable> {
public void reduce(Text key, Iterable<NullWritable> values, Context context) throws IOException, InterruptedException {
context.write(key, NullWritable.get());
}
}
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
Job job = Job.getInstance(conf, "deduplication");
job.setJarByClass(Deduplication.class);
job.setMapperClass(Map.class);
job.setCombinerClass(Reduce.class);
job.setReducerClass(Reduce.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(NullWritable.class);
FileInputFormat.addInputPath(job, new Path(args[0]));
FileOutputFormat.setOutputPath(job, new Path(args[1]));
System.exit(job.waitForCompletion(true) ? 0 : 1);
}
}
```
阅读全文