标签:
// 用于存储配置信息 private val settings = new ConcurrentHashMap[String, String]() if (loadDefaults) { loadFromSystemProperties(false) } // 加载任何以 "spark." 开头的系统属性 private[spark] def loadFromSystemProperties(silent: Boolean): SparkConf = { // Load any spark.* system properties for ((key, value) <- Utils.getSystemProperties if key.startsWith("spark.")) { set(key, value, silent) } this }
class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationClient { // 存储了线程栈中最靠近栈顶的用户类及靠近栈底的scla或者spark核心类信息 private val creationSite: CallSite = Utils.getCallSite() //用于设置SparkContext是否在一个jvm内保持单例,默认是单例 private val allowMultipleContexts: Boolean = config.getBoolean("spark.driver.allowMultipleContexts", false) //用来确保实例的唯一性,并标记当前实例为正在构建 SparkContext.markPartiallyConstructed(this, allowMultipleContexts)
private[spark] def conf: SparkConf = _conf /** * Return a copy of this SparkContext‘s configuration. The configuration ‘‘cannot‘‘ be * changed at runtime. */ def getConf: SparkConf = conf.clone() def jars: Seq[String] = _jars def files: Seq[String] = _files def master: String = _conf.get("spark.master") def deployMode: String = _conf.getOption("spark.submit.deployMode").getOrElse("client") def appName: String = _conf.get("spark.app.name") private[spark] def isEventLogEnabled: Boolean = _conf.getBoolean("spark.eventLog.enabled", false) private[spark] def eventLogDir: Option[URI] = _eventLogDir private[spark] def eventLogCodec: Option[String] = _eventLogCodec def isLocal: Boolean = Utils.isLocalMaster(_conf)
_conf.validateSettings() if (!_conf.contains("spark.master")) { throw new SparkException("A master URL must be set in your configuration") } if (!_conf.contains("spark.app.name")) { throw new SparkException("An application name must be set in your configuration") } // System property spark.yarn.app.id must be set if user code ran by AM on a YARN cluster if (master == "yarn" && deployMode == "cluster" && !_conf.contains("spark.yarn.app.id")) { throw new SparkException("Detected yarn cluster mode, but isn‘t running on a cluster. " + "Deployment to YARN is not supported directly by SparkContext. Please use spark-submit.") }
标签:
原文地址:http://www.cnblogs.com/hankedang/p/5792296.html