spark模版
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession
object SparkModel {
def main(args: Array[String]): Unit = {
val conf = new SparkConf().setMaster("local[2]").setAppName("test")
val spark = SparkSession.builder().config(conf).getOrCreate()
val sc = spark.sparkContext
//设置日志打印级别
sc.setLogLevel("WARN")
//计算逻辑
compute(spark)
spark.stop()
}
def compute(spark:SparkSession):Unit ={
import spark.implicits._
}
}
本文标题:spark模版
本文链接:https://www.haomeiwen.com/subject/wdkhlqtx.html
网友评论