sparkcontext的构造函数如下:可见sparkcontext使用sparkconf来读取设置的参数
class SparkContext(config: SparkConf) extends Logging {
// The call site where this SparkContext was constructed.
private val creationSite: CallSite = Utils.getCallSite()
#这里从形成config中读取变量spark.driver.allowMultipleContexts,看是否允许同时存在多个SparkContexts ,默认是false
private val allowMultipleContexts: Boolean =
config.getBoolean("spark.driver.allowMultipleContexts", false)
#设置是否允许存在多个SparkContexts
SparkContext.markPartiallyConstructed(this, allowMultipleContexts)
#得到当前的时间
val startTime = System.currentTimeMillis()
private[spark] val stopped: AtomicBoolean = new AtomicBoolean(false)
#如果没有活跃的SparkContexts 则跑出异常
private[spark] def assertNotStopped(): Unit = {
if (stopped.get()) {
val activeContext = SparkContext.activeContext.get()
val activeCreationSite =
if (activeContext == null) {
"(No active SparkContext.)"
} else {
activeContext.creationSite.longForm
}
throw new IllegalStateException(
s"""Cannot call methods on a stopped SparkContext.
|This stopped SparkContext was created at:
|
|${creationSite.longForm}
|
|The currently active SparkContext was created at:
|
|$activeCreationSite
""".stripMargin)
}
}
在SparkConf的构造函数中通过loadFromSystemProperties 来读取spark的参数
private[spark] def loadFromSystemProperties(silent: Boolean): SparkConf = {
// Load any spark.* system properties
#通过for循环将以spark来时的参数以key,value的方式保存到set中
#其中set的定义为ConcurrentHashMap
for ((key, value) <- Utils.getSystemProperties if key.startsWith("spark.")) {
set(key, value, silent)
}
this
}
其中set的定义如下:
private val settings = new ConcurrentHashMap[String, String]()
sparkcontext中环境变量的读取和保存
猜你喜欢
转载自blog.csdn.net/tiantao2012/article/details/81454783
今日推荐
周排行