完善如下代码:import org.apache.spark.rdd.RDD import org.apache.spark.sql.{DataFrame,SparkSession} object First_Question { case class Student(name:String,age:String,sex:String) def main(args: Array[String]): Unit = { val spark: SparkSession = SparkSession .builder() .appName("First_Question") .master("local[*]") .getOrCreate() val rdd: RDD[String] = spark.sparkContext.parallelize(List("张三,20,男", "李四,22,男", "李婷,23,女","赵六,21,男")) val temp: RDD[Student] = rdd.map(s => { val split_rdd: Array[String] = s.split(",") Student(split_rdd(0), split_rdd(1), split_rdd(2)) }) import spark.implicits._ // DataFrame 源数据 val dataFrame: DataFrame = temp.toDF() spark.stop() } }
时间: 2023-06-12 12:02:29 浏览: 234
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, SparkSession}
object First_Question {
case class Student(name: String, age: Int, sex: String)
def main(args: Array[String]): Unit = {
val spark: SparkSession = SparkSession
.builder()
.appName("First_Question")
.master("local[*]")
.getOrCreate()
val rdd: RDD[String] = spark.sparkContext.parallelize(List("张三,20,男", "李四,22,男", "李婷,23,女", "赵六,21,男"))
val temp: RDD[Student] = rdd.map(s => {
val split_rdd: Array[String] = s.split(",")
Student(split_rdd(0), split_rdd(1).toInt, split_rdd(2))
})
import spark.implicits._
// DataFrame 源数据
val dataFrame: DataFrame = temp.toDF()
spark.stop()
}
}
阅读全文