4、 用 Java 实现的 MapReduce 去重编程
时间: 2024-03-04 22:49:27 浏览: 97
MapReduce完成输入目录下图片的去重
以下是用 Java 实现的 MapReduce 去重编程示例:
Mapper 类:
```java
import java.io.IOException;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapreduce.*;
public class DuplicateRemoverMapper extends Mapper<LongWritable, Text, Text, NullWritable> {
private Text word = new Text();
public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
String line = value.toString().trim();
if (!line.equals("")) { // 过滤掉空行
word.set(line);
context.write(word, NullWritable.get());
}
}
}
```
Reducer 类:
```java
import java.io.IOException;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapreduce.*;
public class DuplicateRemoverReducer extends Reducer<Text, NullWritable, Text, NullWritable> {
public void reduce(Text key, Iterable<NullWritable> values, Context context) throws IOException, InterruptedException {
context.write(key, NullWritable.get());
}
}
```
Driver 类:
```java
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.mapreduce.lib.input.*;
import org.apache.hadoop.mapreduce.lib.output.*;
public class DuplicateRemover {
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
Job job = Job.getInstance(conf, "Duplicate Remover");
job.setJarByClass(DuplicateRemover.class);
job.setMapperClass(DuplicateRemoverMapper.class);
job.setReducerClass(DuplicateRemoverReducer.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(NullWritable.class);
FileInputFormat.addInputPath(job, new Path(args[0]));
FileOutputFormat.setOutputPath(job, new Path(args[1]));
System.exit(job.waitForCompletion(true) ? 0 : 1);
}
}
```
以上代码实现了一个简单的去重 MapReduce 程序。Mapper 类将每行文本数据作为一个键,输出到 Reducer 类中进行处理。Reducer 类直接将输入键输出到结果中,这样就实现了对文本数据的去重。
阅读全文