Skip to content

Commit e780177

Browse files
authored
Spark security options constraints (#1892)
* Resolved httpclient dependencies (#1887) * added security constraints
1 parent f742a93 commit e780177

File tree

9 files changed

+66
-51
lines changed

9 files changed

+66
-51
lines changed

CHANGELOG.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@
99
* Info service
1010
* Security options configurable
1111
* Https with marathon-lb
12+
* Frontend performance improvements
1213
* Bugfix: Postgres library added
1314

1415
## 1.4.0 (May 17, 2017)

dist/src/main/resources/marathon-app-template.json

Lines changed: 8 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -32,20 +32,21 @@
3232
"SPARK_HOME": "???",
3333
"MESOS_NATIVE_JAVA_LIBRARY": "???",
3434
"LD_LIBRARY_PATH": "???",
35-
"HDFS_USER_NAME": "???",
35+
"HADOOP_USER_NAME": "???",
3636
"SPARK_USER": "???",
37-
"HDFS_CONF_FROM_URI": "???",
37+
"HADOOP_CONF_FROM_URI": "???",
3838
"CORE_SITE_FROM_URI": "???",
39-
"HDFS_CONF_FROM_DFS": "???",
40-
"HDFS_CONF_FROM_DFS_NOT_SECURED": "???",
39+
"HDFS_CONF_URI": "???",
40+
"HADOOP_CONF_FROM_DFS": "???",
41+
"HADOOP_CONF_FROM_DFS_NOT_SECURED": "???",
4142
"HADOOP_FS_DEFAULT_NAME": "???",
4243
"HADOOP_CONF_URI": "???",
4344
"HADOOP_SECURITY_AUTH": "???",
4445
"HADOOP_RPC_PROTECTION": "???",
4546
"HADOOP_DFS_ENCRYPT_DATA_TRANSFER": "???",
4647
"HADOOP_SECURITY_TOKEN_USE_IP": "???",
47-
"HADOOP_NAMENODE_KERBEROS_PRINCIPAL": "???",
48-
"HADOOP_NAMENODE_KERBEROS_PRINCIPAL_PATTERN": "???",
48+
"HADOOP_NAMENODE_KRB_PRINCIPAL": "???",
49+
"HADOOP_NAMENODE_KRB_PRINCIPAL_PATTERN": "???",
4950
"HADOOP_DFS_ENCRYPT_DATA_TRANSFER_CIPHER_SUITES": "???",
5051
"HADOOP_DFS_ENCRYPT_DATA_CIPHER_KEY_BITLENGTH": "???",
5152
"HADOOP_CONF_DIR": "???",
@@ -56,7 +57,7 @@
5657
"ZOOKEEPER_LOG_LEVEL": "???",
5758
"SECURITY_TLS_ENABLE": "???",
5859
"SECURITY_TRUSTSTORE_ENABLE": "???",
59-
"SECURITY_KERBEROS_ENABLE": "???",
60+
"SECURITY_KRB_ENABLE": "???",
6061
"SECURITY_OAUTH2_ENABLE": "???",
6162
"SECURITY_MESOS_ENABLE": "???",
6263
"SPARK_SECURITY_KAFKA_ENABLE": "???"

docker/hdfs_utils.sh

Lines changed: 12 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,8 @@ function generate_core-site-from-uri() {
2323
echo "[CORE-SITE] HADOOP $HADOOP_CONF_DIR/core-site.xml was NOT configured"
2424
exit 1
2525
fi
26+
echo "" >> ${VARIABLES}
27+
echo "export HADOOP_CONF_DIR=${HADOOP_CONF_DIR}" >> ${VARIABLES}
2628
echo "" >> ${SYSTEM_VARIABLES}
2729
echo "export HADOOP_CONF_DIR=${HADOOP_CONF_DIR}" >> ${SYSTEM_VARIABLES}
2830
}
@@ -40,6 +42,8 @@ function generate_hdfs-conf-from-uri() {
4042

4143
if [[ $? == 0 ]]; then
4244
echo "[HADOOP-CONF] HADOOP $CORE_SITE and $HDFS_SITE configured succesfully"
45+
echo "" >> ${VARIABLES}
46+
echo "export HADOOP_CONF_DIR=${HADOOP_CONF_DIR}" >> ${VARIABLES}
4347
echo "" >> ${SYSTEM_VARIABLES}
4448
echo "export HADOOP_CONF_DIR=${HADOOP_CONF_DIR}" >> ${SYSTEM_VARIABLES}
4549
else
@@ -129,13 +133,13 @@ cat > "${HADOOP_CONF_DIR}/hdfs-site.xml" <<EOF
129133
</configuration>
130134
EOF
131135

132-
sed -i "s#__<KERBEROS_PRINCIPAL>__#$HADOOP_NAMENODE_KERBEROS_PRINCIPAL#" "${HADOOP_CONF_DIR}/hdfs-site.xml" \
136+
sed -i "s#__<KERBEROS_PRINCIPAL>__#$HADOOP_NAMENODE_KRB_PRINCIPAL#" "${HADOOP_CONF_DIR}/hdfs-site.xml" \
133137
&& echo "[hdfs-site.xml] dfs.namenode.kerberos.principal in hdfs-site.xml" \
134-
|| echo "[hdfs-site.xml-ERROR] Something went wrong when HADOOP_NAMENODE_KERBEROS_PRINCIPAL was configured in hdfs-site.xml"
138+
|| echo "[hdfs-site.xml-ERROR] Something went wrong when HADOOP_NAMENODE_KRB_PRINCIPAL was configured in hdfs-site.xml"
135139

136-
sed -i "s#__<KERBEROS_PRINCIPAL_PATTERN>__#$HADOOP_NAMENODE_KERBEROS_PRINCIPAL_PATTERN#" "${HADOOP_CONF_DIR}/hdfs-site.xml" \
140+
sed -i "s#__<KERBEROS_PRINCIPAL_PATTERN>__#$HADOOP_NAMENODE_KRB_PRINCIPAL_PATTERN#" "${HADOOP_CONF_DIR}/hdfs-site.xml" \
137141
&& echo "[hdfs-site.xml] dfs.namenode.kerberos.principal.pattern in hdfs-site.xml" \
138-
|| echo "[hdfs-site.xml-ERROR] Something went wrong when HADOOP_NAMENODE_KERBEROS_PRINCIPAL_PATTERN was configured in hdfs-site.xml"
142+
|| echo "[hdfs-site.xml-ERROR] Something went wrong when HADOOP_NAMENODE_KRB_PRINCIPAL_PATTERN was configured in hdfs-site.xml"
139143

140144
sed -i "s#__<ENCRYPT_DATA_TRANSFER>__#$HADOOP_DFS_ENCRYPT_DATA_TRANSFER#" "${HADOOP_CONF_DIR}/hdfs-site.xml" \
141145
&& echo "[hdfs-site.xml] dfs.encrypt.data.transfer in hdfs-site.xml" \
@@ -152,6 +156,8 @@ sed -i "s#__<ENCRYPT_DATA_TRANSFER_CIPHER_KEY_BITLENGTH>__#$HADOOP_DFS_ENCRYPT_D
152156

153157
if [[ $? == 0 ]]; then
154158
echo "[HADOOP-CONF] HADOOP $CORE_SITE and $HDFS_SITE configured succesfully"
159+
echo "" >> ${VARIABLES}
160+
echo "export HADOOP_CONF_DIR=${HADOOP_CONF_DIR}" >> ${VARIABLES}
155161
echo "" >> ${SYSTEM_VARIABLES}
156162
echo "export HADOOP_CONF_DIR=${HADOOP_CONF_DIR}" >> ${SYSTEM_VARIABLES}
157163
else
@@ -209,6 +215,8 @@ sed -i "s#__<FS_DEFAULT_NAME>__#$HADOOP_FS_DEFAULT_NAME#" "${HADOOP_CONF_DIR}/co
209215

210216
if [[ $? == 0 ]]; then
211217
echo "[HADOOP-CONF] HADOOP $CORE_SITE not secured configured succesfully"
218+
echo "" >> ${VARIABLES}
219+
echo "export HADOOP_CONF_DIR=${HADOOP_CONF_DIR}" >> ${VARIABLES}
212220
echo "" >> ${SYSTEM_VARIABLES}
213221
echo "export HADOOP_CONF_DIR=${HADOOP_CONF_DIR}" >> ${SYSTEM_VARIABLES}
214222
else

docker/security-config.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@ fi
4646
####################################################
4747
## Kerberos config set SPARTA_PRINCIPAL_NAME and SPARTA_KEYTAB_PATH
4848
####################################################
49-
if [ -v SECURITY_KERBEROS_ENABLE ] && [ ${#SECURITY_KERBEROS_ENABLE} != 0 ] && [ $SECURITY_KERBEROS_ENABLE == "true" ]; then
49+
if [ -v SECURITY_KRB_ENABLE ] && [ ${#SECURITY_KRB_ENABLE} != 0 ] && [ $SECURITY_KRB_ENABLE == "true" ]; then
5050
_log_sparta_sec "Configuring kerberos ..."
5151
source /kerberos-server-config.sh
5252
_log_sparta_sec "Configuring kerberos Ok"

docker/sparta-common.sh

Lines changed: 13 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -47,17 +47,17 @@ function initSpark() {
4747
}
4848

4949
function initHdfs() {
50-
if [[ -v HDFS_USER_NAME ]]; then
51-
echo "" >> ${VARIABLES}
52-
echo "export HADOOP_USER_NAME=${HDFS_USER_NAME}" >> ${VARIABLES}
50+
if [[ -v HADOOP_USER_NAME ]]; then
51+
echo "" >> ${VARIABLES}
52+
echo "export HADOOP_USER_NAME=${HADOOP_USER_NAME}" >> ${VARIABLES}
5353
echo "" >> ${SYSTEM_VARIABLES}
54-
echo "export HADOOP_USER_NAME=${HDFS_USER_NAME}" >> ${SYSTEM_VARIABLES}
54+
echo "export HADOOP_USER_NAME=${HADOOP_USER_NAME}" >> ${SYSTEM_VARIABLES}
5555
fi
5656

57-
if [[ ! -v HDFS_CONF_FROM_URI ]]; then
58-
HDFS_CONF_FROM_URI="false"
57+
if [[ ! -v HADOOP_CONF_FROM_URI ]]; then
58+
HADOOP_CONF_FROM_URI="false"
5959
fi
60-
if [ $HDFS_CONF_FROM_URI == "true" ] && [ -v HADOOP_CONF_URI ] && [ ${#HADOOP_CONF_URI} != 0 ]; then
60+
if [ $HADOOP_CONF_FROM_URI == "true" ] && [ -v HADOOP_CONF_URI ] && [ ${#HADOOP_CONF_URI} != 0 ]; then
6161
if [ ! -v HADOOP_CONF_DIR ] && [ ${#HADOOP_CONF_DIR} != 0 ]; then
6262
HADOOP_CONF_DIR=/opt/sds/hadoop/conf
6363
fi
@@ -78,10 +78,10 @@ function initHdfs() {
7878
generate_core-site-from-uri
7979
fi
8080

81-
if [[ ! -v HDFS_CONF_FROM_DFS ]]; then
82-
HDFS_CONF_FROM_DFS="false"
81+
if [[ ! -v HADOOP_CONF_FROM_DFS ]]; then
82+
HADOOP_CONF_FROM_DFS="false"
8383
fi
84-
if [ $HDFS_CONF_FROM_DFS == "true" ] && [ -v HADOOP_FS_DEFAULT_NAME ] && [ ${#HADOOP_FS_DEFAULT_NAME} != 0 ]; then
84+
if [ $HADOOP_CONF_FROM_DFS == "true" ] && [ -v HADOOP_FS_DEFAULT_NAME ] && [ ${#HADOOP_FS_DEFAULT_NAME} != 0 ]; then
8585
if [ ! -v HADOOP_CONF_DIR ] && [ ${#HADOOP_CONF_DIR} != 0 ]; then
8686
HADOOP_CONF_DIR=/opt/sds/hadoop/conf
8787
fi
@@ -90,10 +90,10 @@ function initHdfs() {
9090
generate_hdfs-conf-from-fs
9191
fi
9292

93-
if [[ ! -v HDFS_CONF_FROM_DFS_NOT_SECURED ]]; then
94-
HDFS_CONF_FROM_DFS_NOT_SECURED="false"
93+
if [[ ! -v HADOOP_CONF_FROM_DFS_NOT_SECURED ]]; then
94+
HADOOP_CONF_FROM_DFS_NOT_SECURED="false"
9595
fi
96-
if [ $HDFS_CONF_FROM_DFS_NOT_SECURED == "true" ] && [ -v HADOOP_FS_DEFAULT_NAME ] && [ ${#HADOOP_FS_DEFAULT_NAME} != 0 ]; then
96+
if [ $HADOOP_CONF_FROM_DFS_NOT_SECURED == "true" ] && [ -v HADOOP_FS_DEFAULT_NAME ] && [ ${#HADOOP_FS_DEFAULT_NAME} != 0 ]; then
9797
if [ ! -v HADOOP_CONF_DIR ] && [ ${#HADOOP_CONF_DIR} != 0 ]; then
9898
HADOOP_CONF_DIR=/opt/sds/hadoop/conf
9999
fi

docker/sparta-server-utils.sh

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -25,10 +25,10 @@ function initJavaOptions() {
2525

2626
function hdfsOptions() {
2727

28-
if [[ ! -v HDFS_PORT ]]; then
29-
HDFS_PORT=9000
28+
if [[ ! -v HADOOP_PORT ]]; then
29+
HADOOP_PORT=9000
3030
fi
31-
sed -i "s|.*sparta.hdfs.hdfsPort.*|sparta.hdfs.hdfsPort = ${HDFS_PORT}|" ${SPARTA_CONF_FILE}
31+
sed -i "s|.*sparta.hdfs.hdfsPort.*|sparta.hdfs.hdfsPort = ${HADOOP_PORT}|" ${SPARTA_CONF_FILE}
3232

3333
if [[ ! -v HDFS_SECURITY_ENABLED ]]; then
3434
HDFS_SECURITY_ENABLED=false

examples/docker/README.md

Lines changed: 8 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -22,23 +22,23 @@ Entrypoint variables
2222

2323
| PARAM |
2424
| -------------:|
25-
| HDFS_USER_NAME |
26-
| HDFS_CONF_FROM_URI |
25+
| HADOOP_USER_NAME |
26+
| HADOOP_CONF_FROM_URI |
2727
| CORE_SITE_FROM_URI |
28-
| HDFS_CONF_FROM_DFS |
29-
| HDFS_CONF_FROM_DFS_NOT_SECURED |
28+
| HADOOP_CONF_FROM_DFS |
29+
| HADOOP_CONF_FROM_DFS_NOT_SECURED |
3030
| HADOOP_CONF_URI |
3131
| HADOOP_FS_DEFAULT_NAME |
3232
| HADOOP_SECURITY_AUTH |
3333
| HADOOP_RPC_PROTECTION |
3434
| HADOOP_DFS_ENCRYPT_DATA_TRANSFER |
3535
| HADOOP_SECURITY_TOKEN_USE_IP |
36-
| HADOOP_NAMENODE_KERBEROS_PRINCIPAL |
37-
| HADOOP_NAMENODE_KERBEROS_PRINCIPAL_PATTERN |
36+
| HADOOP_NAMENODE_KRB_PRINCIPAL |
37+
| HADOOP_NAMENODE_KRB_PRINCIPAL_PATTERN |
3838
| HADOOP_DFS_ENCRYPT_DATA_TRANSFER |
3939
| HADOOP_DFS_ENCRYPT_DATA_TRANSFER_CIPHER_SUITES |
4040
| HADOOP_SECURITY_AUTH |
41-
| HDFS_PORT |
41+
| HADOOP_PORT |
4242
| HDFS_KEYTAB_RELOAD |
4343
| HDFS_KEYTAB_RELOAD_TIME |
4444

@@ -116,7 +116,6 @@ Entrypoint variables
116116
| SPARK_LOCAL_METRICS_CONF |
117117

118118

119-
120119
## MESOS EXECUTION OPTIONS
121120
| PARAM |
122121
| -------------:|
@@ -227,7 +226,7 @@ docker run -dit --name sp -p 9090:9090 --env RUN_MODE=debug --env SERVICE_LOG_LE
227226
--env SPARTA_ZOOKEEPER_CONNECTION_STRING=zk.demo.stratio.com --env SPARTA_EXECUTION_MODE=mesos
228227
--env SPARTA_CHECKPOINT_PATH=/user/stratio/checkpoint
229228
--env SPARK_MESOS_MASTER=mesos://mm11.demo.stratio.com:7077 --env HDFS_MASTER=hm.demo.stratio.com
230-
--env HDFS_PORT=8020 qa.stratio.com/stratio/sparta:latest
229+
--env HADOOP_PORT=8020 qa.stratio.com/stratio/sparta:latest
231230
```
232231

233232
Usage examples with VAULT

serving-api/src/main/resources/marathon-app-template.json

Lines changed: 8 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -32,20 +32,21 @@
3232
"SPARK_HOME": "???",
3333
"MESOS_NATIVE_JAVA_LIBRARY": "???",
3434
"LD_LIBRARY_PATH": "???",
35-
"HDFS_USER_NAME": "???",
35+
"HADOOP_USER_NAME": "???",
3636
"SPARK_USER": "???",
37-
"HDFS_CONF_FROM_URI": "???",
37+
"HADOOP_CONF_FROM_URI": "???",
3838
"CORE_SITE_FROM_URI": "???",
39-
"HDFS_CONF_FROM_DFS": "???",
40-
"HDFS_CONF_FROM_DFS_NOT_SECURED": "???",
39+
"HDFS_CONF_URI": "???",
40+
"HADOOP_CONF_FROM_DFS": "???",
41+
"HADOOP_CONF_FROM_DFS_NOT_SECURED": "???",
4142
"HADOOP_FS_DEFAULT_NAME": "???",
4243
"HADOOP_CONF_URI": "???",
4344
"HADOOP_SECURITY_AUTH": "???",
4445
"HADOOP_RPC_PROTECTION": "???",
4546
"HADOOP_DFS_ENCRYPT_DATA_TRANSFER": "???",
4647
"HADOOP_SECURITY_TOKEN_USE_IP": "???",
47-
"HADOOP_NAMENODE_KERBEROS_PRINCIPAL": "???",
48-
"HADOOP_NAMENODE_KERBEROS_PRINCIPAL_PATTERN": "???",
48+
"HADOOP_NAMENODE_KRB_PRINCIPAL": "???",
49+
"HADOOP_NAMENODE_KRB_PRINCIPAL_PATTERN": "???",
4950
"HADOOP_DFS_ENCRYPT_DATA_TRANSFER_CIPHER_SUITES": "???",
5051
"HADOOP_DFS_ENCRYPT_DATA_CIPHER_KEY_BITLENGTH": "???",
5152
"HADOOP_CONF_DIR": "???",
@@ -56,7 +57,7 @@
5657
"ZOOKEEPER_LOG_LEVEL": "???",
5758
"SECURITY_TLS_ENABLE": "???",
5859
"SECURITY_TRUSTSTORE_ENABLE": "???",
59-
"SECURITY_KERBEROS_ENABLE": "???",
60+
"SECURITY_KRB_ENABLE": "???",
6061
"SECURITY_OAUTH2_ENABLE": "???",
6162
"SECURITY_MESOS_ENABLE": "???",
6263
"SPARK_SECURITY_KAFKA_ENABLE": "???"

serving-core/src/main/scala/com/stratio/sparta/serving/core/marathon/MarathonService.scala

Lines changed: 12 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -103,12 +103,12 @@ class MarathonService(context: ActorContext,
103103
val AppHeapSizeEnv = "MARATHON_APP_HEAP_SIZE"
104104
val AppHeapMinimunSizeEnv = "MARATHON_APP_HEAP_MINIMUM_SIZE"
105105
val SparkHomeEnv = "SPARK_HOME"
106-
val HadoopUserNameEnv = "HDFS_USER_NAME"
106+
val HadoopUserNameEnv = "HADOOP_USER_NAME"
107107
val HdfsUserNameEnv = "HADOOP_USER_NAME"
108-
val HdfsConfFromUriEnv = "HDFS_CONF_FROM_URI"
108+
val HdfsConfFromUriEnv = "HADOOP_CONF_FROM_URI"
109109
val CoreSiteFromUriEnv = "CORE_SITE_FROM_URI"
110-
val HdfsConfFromDfsEnv = "HDFS_CONF_FROM_DFS"
111-
val HdfsConfFromDfsNotSecuredEnv = "HDFS_CONF_FROM_DFS_NOT_SECURED"
110+
val HdfsConfFromDfsEnv = "HADOOP_CONF_FROM_DFS"
111+
val HdfsConfFromDfsNotSecuredEnv = "HADOOP_CONF_FROM_DFS_NOT_SECURED"
112112
val DefaultFsEnv = "HADOOP_FS_DEFAULT_NAME"
113113
val DefaultHdfsConfUriEnv = "HADOOP_CONF_URI"
114114
val HadoopConfDirEnv = "HADOOP_CONF_DIR"
@@ -123,17 +123,18 @@ class MarathonService(context: ActorContext,
123123
val HdfsSecurityAuthEnv = "HADOOP_SECURITY_AUTH"
124124
val HdfsEncryptDataEnv = "HADOOP_DFS_ENCRYPT_DATA_TRANSFER"
125125
val HdfsTokenUseIpEnv = "HADOOP_SECURITY_TOKEN_USE_IP"
126-
val HdfsKerberosPrincipalEnv = "HADOOP_NAMENODE_KERBEROS_PRINCIPAL"
127-
val HdfsKerberosPrincipalPatternEnv = "HADOOP_NAMENODE_KERBEROS_PRINCIPAL_PATTERN"
126+
val HdfsKerberosPrincipalEnv = "HADOOP_NAMENODE_KRB_PRINCIPAL"
127+
val HdfsKerberosPrincipalPatternEnv = "HADOOP_NAMENODE_KRB_PRINCIPAL_PATTERN"
128128
val HdfsEncryptDataTransferEnv = "HADOOP_DFS_ENCRYPT_DATA_TRANSFER_CIPHER_SUITES"
129129
val HdfsEncryptDataBitLengthEnv = "HADOOP_DFS_ENCRYPT_DATA_CIPHER_KEY_BITLENGTH"
130130
val SparkUserEnv = "SPARK_USER"
131131
val SecurityTlsEnv = "SECURITY_TLS_ENABLE"
132132
val SecurityTrustoreEnv = "SECURITY_TRUSTSTORE_ENABLE"
133-
val SecurityKerberosEnv = "SECURITY_KERBEROS_ENABLE"
133+
val SecurityKerberosEnv = "SECURITY_KRB_ENABLE"
134134
val SecurityOauth2Env = "SECURITY_OAUTH2_ENABLE"
135135
val SecurityMesosEnv = "SECURITY_MESOS_ENABLE"
136136
val SecuritySparkKafkaEnv = "SPARK_SECURITY_KAFKA_ENABLE"
137+
val SecuritySparkHdfsEnv = "HDFS_CONF_URI"
137138

138139
/* Lazy variables */
139140

@@ -222,6 +223,9 @@ class MarathonService(context: ActorContext,
222223
} else None
223224
}
224225

226+
private def envSparkSecurityHdfs(sparkConfigurations: Map[String, String]): Option[String] =
227+
sparkConfigurations.get("spark.mesos.driverEnv.HDFS_CONF_URI")
228+
225229
private def envSparkHome: Option[String] = Properties.envOrNone(SparkHomeEnv)
226230

227231
private def envConstraint: Option[String] = Properties.envOrNone(Constraints)
@@ -414,6 +418,7 @@ class MarathonService(context: ActorContext,
414418
SecurityOauth2Env -> envOauth2,
415419
SecurityMesosEnv -> envMesos,
416420
SecuritySparkKafkaEnv -> envSparkSecurityKafka(submitRequest.sparkConfigurations),
421+
SecuritySparkHdfsEnv -> envSparkSecurityHdfs(submitRequest.sparkConfigurations),
417422
DcosServiceName -> Option(ServiceName),
418423
SparkUserEnv -> policyModel.sparkUser
419424
).flatMap { case (k, v) => v.map(value => Option(k -> value)) }.flatten.toMap

0 commit comments

Comments
 (0)