diff --git a/spark-poc/sparkJob/src/main/scala/driver/SparkDriver.scala b/spark-poc/sparkJob/src/main/scala/driver/SparkDriver.scala index 802d781..29e057f 100644 --- a/spark-poc/sparkJob/src/main/scala/driver/SparkDriver.scala +++ b/spark-poc/sparkJob/src/main/scala/driver/SparkDriver.scala @@ -1,6 +1,7 @@ package driver import org.apache.log4j.Logger +import org.apache.spark.{SparkConf, SparkContext} import org.apache.spark.sql.SparkSession object SparkDriver { @@ -10,20 +11,26 @@ object SparkDriver { def main(args: Array[String]): Unit = { - // attr - val arguments: String = "someArgument" - logger.debug("get spark session") -// val _spark:SparkSession = getSparkSession(appName) -// logger.debug(f"spark session = ${_spark}") - - def getSparkSession(appName: String, executionType: String = "cluster", cores: Int = 3): SparkSession = { - // TODO : setup for local, cluster... mode - SparkSession - .builder() - .appName(appName) - .enableHiveSupport() - .getOrCreate() - } } + // attr + val arguments: String = "someArgument" + logger.debug("get spark session") + // val _spark:SparkSession = getSparkSession(appName) + // logger.debug(f"spark session = ${_spark}") + + def getSparkContext(appName: String, conf: SparkConf): SparkContext = { + new SparkContext(conf) + } + + def getSparkSession(appName: String, executionType: String = "cluster", cores: Int = 3): SparkSession = { + // TODO : setup for local, cluster... mode + SparkSession + .builder() + .appName(appName) + //.enableHiveSupport() + .master("local[*]") + .getOrCreate() + + } } diff --git a/spark-poc/sparkJob/src/main/scala/spark/myJob1.scala b/spark-poc/sparkJob/src/main/scala/spark/myJob1.scala index 7af6bd9..70d906e 100644 --- a/spark-poc/sparkJob/src/main/scala/spark/myJob1.scala +++ b/spark-poc/sparkJob/src/main/scala/spark/myJob1.scala @@ -1,13 +1,30 @@ package spark import driver.SparkDriver +import org.apache.spark.SparkConf import org.apache.spark.sql.SparkSession object myJob1 { def main(args: Array[String]): Unit = { + + val conf = new SparkConf() + .setAppName("myJob1") + .setMaster("local[*]") + + //val s_driver = SparkDriver.getSparkSession("myJob1"); + val s_context = SparkDriver.getSparkContext("myJob1", conf); val spark = SparkDriver + //println("s_driver = " + s_driver) + println("s_context = " + s_context) + println("spark = " + spark) + + val rdd1 = s_context.makeRDD(1 to 4, 2) + + println(rdd1.aggregate(0)(_ + _, _ + _)) // 10 + + println(rdd1.aggregate(1)(_ + _, _ + _)) // 13 } }