Skip to content

Commit

Permalink
update
Browse files Browse the repository at this point in the history
  • Loading branch information
Yen Liu committed Aug 9, 2023
1 parent 6389651 commit cce6f0c
Show file tree
Hide file tree
Showing 2 changed files with 38 additions and 14 deletions.
35 changes: 21 additions & 14 deletions spark-poc/sparkJob/src/main/scala/driver/SparkDriver.scala
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package driver

import org.apache.log4j.Logger
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.SparkSession

object SparkDriver {
Expand All @@ -10,20 +11,26 @@ object SparkDriver {

def main(args: Array[String]): Unit = {

// attr
val arguments: String = "someArgument"
logger.debug("get spark session")
// val _spark:SparkSession = getSparkSession(appName)
// logger.debug(f"spark session = ${_spark}")

def getSparkSession(appName: String, executionType: String = "cluster", cores: Int = 3): SparkSession = {
// TODO : setup for local, cluster... mode
SparkSession
.builder()
.appName(appName)
.enableHiveSupport()
.getOrCreate()
}
}

// attr
val arguments: String = "someArgument"
logger.debug("get spark session")
// val _spark:SparkSession = getSparkSession(appName)
// logger.debug(f"spark session = ${_spark}")

def getSparkContext(appName: String, conf: SparkConf): SparkContext = {
new SparkContext(conf)
}

def getSparkSession(appName: String, executionType: String = "cluster", cores: Int = 3): SparkSession = {
// TODO : setup for local, cluster... mode
SparkSession
.builder()
.appName(appName)
//.enableHiveSupport()
.master("local[*]")
.getOrCreate()

}
}
17 changes: 17 additions & 0 deletions spark-poc/sparkJob/src/main/scala/spark/myJob1.scala
Original file line number Diff line number Diff line change
@@ -1,13 +1,30 @@
package spark

import driver.SparkDriver
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession

object myJob1 {

def main(args: Array[String]): Unit = {

val conf = new SparkConf()
.setAppName("myJob1")
.setMaster("local[*]")

//val s_driver = SparkDriver.getSparkSession("myJob1");
val s_context = SparkDriver.getSparkContext("myJob1", conf);
val spark = SparkDriver

//println("s_driver = " + s_driver)
println("s_context = " + s_context)
println("spark = " + spark)

val rdd1 = s_context.makeRDD(1 to 4, 2)

println(rdd1.aggregate(0)(_ + _, _ + _)) // 10

println(rdd1.aggregate(1)(_ + _, _ + _)) // 13
}

}

0 comments on commit cce6f0c

Please sign in to comment.