请用java,scala,python三种语言实现spark的sortByKey算子的案例
时间: 2024-04-29 19:21:15 浏览: 136
Java实现:
```java
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import scala.Tuple2;
public class SortByKeyDemo {
public static void main(String[] args) {
SparkConf conf = new SparkConf().setAppName("SortByKeyDemo").setMaster("local");
JavaSparkContext sc = new JavaSparkContext(conf);
// 创建RDD
JavaRDD<String> lines = sc.parallelize(Arrays.asList("apple 3", "banana 2", "orange 1"));
// 转换为JavaPairRDD
JavaPairRDD<String, Integer> pairs = lines.mapToPair(line -> {
String[] parts = line.split(" ");
return new Tuple2<>(parts[0], Integer.parseInt(parts[1]));
});
// 按照key排序
JavaPairRDD<String, Integer> sortedPairs = pairs.sortByKey();
// 输出结果
sortedPairs.foreach(tuple -> System.out.println(tuple._1 + " : " + tuple._2));
sc.stop();
}
}
```
Scala实现:
```scala
import org.apache.spark.{SparkConf, SparkContext}
object SortByKeyDemo {
def main(args: Array[String]): Unit = {
val conf = new SparkConf().setAppName("SortByKeyDemo").setMaster("local")
val sc = new SparkContext(conf)
// 创建RDD
val lines = sc.parallelize(Seq("apple 3", "banana 2", "orange 1"))
// 转换为PairRDD
val pairs = lines.map(line => {
val parts = line.split(" ")
(parts(0), parts(1).toInt)
})
// 按照key排序
val sortedPairs = pairs.sortByKey()
// 输出结果
sortedPairs.foreach(tuple => println(s"${tuple._1} : ${tuple._2}"))
sc.stop()
}
}
```
Python实现:
```python
from pyspark import SparkConf, SparkContext
conf = SparkConf().setAppName("SortByKeyDemo").setMaster("local")
sc = SparkContext(conf=conf)
# 创建RDD
lines = sc.parallelize(["apple 3", "banana 2", "orange 1"])
# 转换为PairRDD
pairs = lines.map(lambda line: (line.split(" ")[0], int(line.split(" ")[1])))
# 按照key排序
sortedPairs = pairs.sortByKey()
# 输出结果
sortedPairs.foreach(lambda tuple: print("{} : {}".format(tuple[0], tuple[1])))
sc.stop()
```
阅读全文