@@ -26,22 +26,9 @@ import org.apache.spark.SparkConf
26
26
/**
27
27
* An entry contains all meta information for a configuration.
28
28
*
29
- * Config options created using this feature support variable expansion. If the config value
30
- * contains variable references of the form "${prefix:variableName}", the reference will be replaced
31
- * with the value of the variable depending on the prefix. The prefix can be one of:
32
- *
33
- * - no prefix: if the config key starts with "spark", looks for the value in the Spark config
34
- * - system: looks for the value in the system properties
35
- * - env: looks for the value in the environment
36
- *
37
- * So referencing "${spark.master}" will look for the value of "spark.master" in the Spark
38
- * configuration, while referencing "${env:MASTER}" will read the value from the "MASTER"
39
- * environment variable.
40
- *
41
- * For known Spark configuration keys (i.e. those created using `ConfigBuilder`), references
42
- * will also consider the default value when it exists.
43
- *
44
- * If the reference cannot be resolved, the original string will be retained.
29
+ * When applying variable substitution to config values, only references starting with "spark." are
30
+ * considered in the default namespace. For known Spark configuration keys (i.e. those created using
31
+ * `ConfigBuilder`), references will also consider the default value when it exists.
45
32
*
46
33
* Variable expansion is also applied to the default values of config entries that have a default
47
34
* value declared as a string.
@@ -72,21 +59,14 @@ private[spark] abstract class ConfigEntry[T] (
72
59
73
60
def defaultValueString : String
74
61
75
- def readFrom (conf : JMap [ String , String ], getenv : String => String ): T
62
+ def readFrom (reader : ConfigReader ): T
76
63
77
64
def defaultValue : Option [T ] = None
78
65
79
66
override def toString : String = {
80
67
s " ConfigEntry(key= $key, defaultValue= $defaultValueString, doc= $doc, public= $isPublic) "
81
68
}
82
69
83
- protected def readAndExpand (
84
- conf : JMap [String , String ],
85
- getenv : String => String ,
86
- usedRefs : Set [String ] = Set ()): Option [String ] = {
87
- Option (conf.get(key)).map(expand(_, conf, getenv, usedRefs))
88
- }
89
-
90
70
}
91
71
92
72
private class ConfigEntryWithDefault [T ] (
@@ -102,8 +82,8 @@ private class ConfigEntryWithDefault[T] (
102
82
103
83
override def defaultValueString : String = stringConverter(_defaultValue)
104
84
105
- def readFrom (conf : JMap [ String , String ], getenv : String => String ): T = {
106
- readAndExpand(conf, getenv ).map(valueConverter).getOrElse(_defaultValue)
85
+ def readFrom (reader : ConfigReader ): T = {
86
+ reader.get(key ).map(valueConverter).getOrElse(_defaultValue)
107
87
}
108
88
109
89
}
@@ -121,12 +101,9 @@ private class ConfigEntryWithDefaultString[T] (
121
101
122
102
override def defaultValueString : String = _defaultValue
123
103
124
- def readFrom (conf : JMap [String , String ], getenv : String => String ): T = {
125
- Option (conf.get(key))
126
- .orElse(Some (_defaultValue))
127
- .map(ConfigEntry .expand(_, conf, getenv, Set ()))
128
- .map(valueConverter)
129
- .get
104
+ def readFrom (reader : ConfigReader ): T = {
105
+ val value = reader.get(key).getOrElse(reader.substitute(_defaultValue))
106
+ valueConverter(value)
130
107
}
131
108
132
109
}
@@ -146,8 +123,8 @@ private[spark] class OptionalConfigEntry[T](
146
123
147
124
override def defaultValueString : String = " <undefined>"
148
125
149
- override def readFrom (conf : JMap [ String , String ], getenv : String => String ): Option [T ] = {
150
- readAndExpand(conf, getenv ).map(rawValueConverter)
126
+ override def readFrom (reader : ConfigReader ): Option [T ] = {
127
+ reader.get(key ).map(rawValueConverter)
151
128
}
152
129
153
130
}
@@ -164,62 +141,21 @@ private class FallbackConfigEntry[T] (
164
141
165
142
override def defaultValueString : String = s " <value of ${fallback.key}> "
166
143
167
- override def readFrom (conf : JMap [ String , String ], getenv : String => String ): T = {
168
- Option (conf .get(key)) .map(valueConverter).getOrElse(fallback.readFrom(conf, getenv ))
144
+ override def readFrom (reader : ConfigReader ): T = {
145
+ reader .get(key).map(valueConverter).getOrElse(fallback.readFrom(reader ))
169
146
}
170
147
171
148
}
172
149
173
- private object ConfigEntry {
150
+ private [spark] object ConfigEntry {
174
151
175
152
private val knownConfigs = new java.util.concurrent.ConcurrentHashMap [String , ConfigEntry [_]]()
176
153
177
- private val REF_RE = " \\ $\\ {(?:(\\ w+?):)?(\\ S+?)\\ }" .r
178
-
179
154
def registerEntry (entry : ConfigEntry [_]): Unit = {
180
155
val existing = knownConfigs.putIfAbsent(entry.key, entry)
181
156
require(existing == null , s " Config entry ${entry.key} already registered! " )
182
157
}
183
158
184
159
def findEntry (key : String ): ConfigEntry [_] = knownConfigs.get(key)
185
160
186
- /**
187
- * Expand the `value` according to the rules explained in ConfigEntry.
188
- */
189
- def expand (
190
- value : String ,
191
- conf : JMap [String , String ],
192
- getenv : String => String ,
193
- usedRefs : Set [String ]): String = {
194
- REF_RE .replaceAllIn(value, { m =>
195
- val prefix = m.group(1 )
196
- val name = m.group(2 )
197
- val replacement = prefix match {
198
- case null =>
199
- require(! usedRefs.contains(name), s " Circular reference in $value: $name" )
200
- if (name.startsWith(" spark." )) {
201
- Option (findEntry(name))
202
- .flatMap(_.readAndExpand(conf, getenv, usedRefs = usedRefs + name))
203
- .orElse(Option (conf.get(name)))
204
- .orElse(defaultValueString(name))
205
- } else {
206
- None
207
- }
208
- case " system" => sys.props.get(name)
209
- case " env" => Option (getenv(name))
210
- case _ => None
211
- }
212
- Regex .quoteReplacement(replacement.getOrElse(m.matched))
213
- })
214
- }
215
-
216
- private def defaultValueString (key : String ): Option [String ] = {
217
- findEntry(key) match {
218
- case e : ConfigEntryWithDefault [_] => Some (e.defaultValueString)
219
- case e : ConfigEntryWithDefaultString [_] => Some (e.defaultValueString)
220
- case e : FallbackConfigEntry [_] => defaultValueString(e.fallback.key)
221
- case _ => None
222
- }
223
- }
224
-
225
161
}
0 commit comments