import org.apache.spark.SparkContext import org.apache.spark.SparkConf import org.apache.spark.rdd.RDD import org.apache.spark.sql.SQLContext class Sale { } object Sale { case class Sale(cardId:String,productId:Int,quantity:Int,money:Double) def main(args: Array[String]): Unit = { val conf = new SparkConf().setAppName("Pharmacy Sales Analysis").setMaster("local") val sc = new SparkContext(conf) sc.setLogLevel("ERROR") val sqlContext = new SQLContext(sc) // load data val data = sc.textFile(args(0)) // parse data val salesRDD: RDD[Sale] = data.map(line => line.split(" ")).map(arr => Sale(arr(0), arr(1).toInt, arr(2).toInt, arr(3).toDouble)) import sqlContext.implicits._ val df = salesRDD.toDF() val nameMoneyDF = df.select("productId","money","quantity") nameMoneyDF.show(5) val totalSales = nameMoneyDF.groupBy("productId").sum("quantity") totalSales.show(5) // calculate total sales quantity and revenue } }帮我统计所有产品销售额,money为产品单价
时间: 2024-02-22 17:58:09 浏览: 124
如果你想要统计所有产品的销售额,可以使用以下代码:
```
import org.apache.spark.SparkContext
import org.apache.spark.SparkConf
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SQLContext
class Sale {}
object Sale {
case class Sale(cardId:String,productId:Int,quantity:Int,money:Double)
def main(args: Array[String]): Unit = {
val conf = new SparkConf().setAppName("Pharmacy Sales Analysis").setMaster("local")
val sc = new SparkContext(conf)
sc.setLogLevel("ERROR")
val sqlContext = new SQLContext(sc)
// load data
val data = sc.textFile(args(0))
// parse data
val salesRDD: RDD[Sale] = data.map(line => line.split(" ")).map(arr => Sale(arr(0), arr(1).toInt, arr(2).toInt, arr(3).toDouble))
import sqlContext.implicits._
val df = salesRDD.toDF()
val nameMoneyDF = df.select("productId","money","quantity")
nameMoneyDF.show(5)
// calculate total sales quantity and revenue
val totalSales = nameMoneyDF.groupBy("productId").agg(sum("money").as("totalRevenue"))
totalSales.show(5)
}
}
```
这段代码会根据产品ID对数据进行分组,并计算每个产品的销售额。其中`sum("money")`函数用于计算每个产品的销售额总和,`as`函数用于指定结果集中的列名为"totalRevenue"。
你可以在`totalSales.show(5)`操作之前添加以下代码,用于按照销售额降序排序并显示前5个产品的销售额:
```
val topProducts = totalSales.orderBy(desc("totalRevenue")).limit(5)
topProducts.show()
```
注意,这里的`desc`函数需要使用`org.apache.spark.sql.functions.desc`导入。
阅读全文