-
Notifications
You must be signed in to change notification settings - Fork 913
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
[KYUUBI-186]Add a max cache time to clean SparkSessions that may have…
… token expiry issue --- --- fix #186 fix #187 --- Squashed commit of the following: commit 4b8297f Author: Kent Yao <yaooqinn@hotmail.com> Date: Mon May 20 19:36:34 2019 +0800 add log commit 7b811f8 Author: Kent Yao <yaooqinn@hotmail.com> Date: Thu May 16 16:46:04 2019 +0800 add ut commit 19f67fa Author: Kent Yao <yaooqinn@hotmail.com> Date: Thu May 16 10:57:24 2019 +0800 fix ut commit 7ad7c20 Author: Kent Yao <yaooqinn@hotmail.com> Date: Thu May 16 10:40:42 2019 +0800 fix ut commit 9a114ab Author: Kent Yao <yaooqinn@hotmail.com> Date: Thu May 16 00:10:57 2019 +0800 mv cache validating in its own class commit 0afba5a Author: Kent Yao <yaooqinn@hotmail.com> Date: Wed May 15 11:47:42 2019 +0800 fix ut commit eff3f41 Author: Kent Yao <yaooqinn@hotmail.com> Date: Wed May 15 11:12:51 2019 +0800 add ut commit 9bbbea7 Author: Kent Yao <yaooqinn@hotmail.com> Date: Tue May 14 23:17:00 2019 +0800 add ut commit 0e0e59e Author: Kent Yao <yaooqinn@hotmail.com> Date: Tue May 14 19:39:09 2019 +0800 mv init time to spark session cache commit a4a1c69 Author: Kent Yao <yaooqinn@hotmail.com> Date: Tue May 14 18:36:06 2019 +0800 Add a max cache time to clean SparkSessions that may have token expiry issue fix #186
- Loading branch information
Showing
12 changed files
with
310 additions
and
375 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
80 changes: 0 additions & 80 deletions
80
...i-server/src/main/scala/org/apache/spark/scheduler/cluster/KyuubiSparkExecutorUtils.scala
This file was deleted.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
86 changes: 86 additions & 0 deletions
86
kyuubi-server/src/main/scala/yaooqinn/kyuubi/spark/SparkSessionCache.scala
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,86 @@ | ||
/* | ||
* Licensed to the Apache Software Foundation (ASF) under one or more | ||
* contributor license agreements. See the NOTICE file distributed with | ||
* this work for additional information regarding copyright ownership. | ||
* The ASF licenses this file to You under the Apache License, Version 2.0 | ||
* (the "License"); you may not use this file except in compliance with | ||
* the License. You may obtain a copy of the License at | ||
* | ||
* http://www.apache.org/licenses/LICENSE-2.0 | ||
* | ||
* Unless required by applicable law or agreed to in writing, software | ||
* distributed under the License is distributed on an "AS IS" BASIS, | ||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
* See the License for the specific language governing permissions and | ||
* limitations under the License. | ||
*/ | ||
|
||
package yaooqinn.kyuubi.spark | ||
|
||
import java.util.concurrent.atomic.AtomicInteger | ||
|
||
import org.apache.spark.KyuubiConf._ | ||
import org.apache.spark.KyuubiSparkUtil | ||
import org.apache.spark.sql.SparkSession | ||
|
||
/** | ||
* A recorder for how many client sessions have been cloned by the original [[SparkSession]], which | ||
* helps the [[SparkSessionCacheManager]] cache and recycle [[SparkSession]] instances. | ||
* | ||
* @param spark the original [[SparkSession]] instances | ||
* @param times times of the original [[SparkSession]] instance has been cloned, start from 1 | ||
* @param initTime Start time of the SparkSession | ||
*/ | ||
private[spark] | ||
class SparkSessionCache private(val spark: SparkSession, times: AtomicInteger, initTime: Long) { | ||
|
||
private val maxCacheTime = | ||
KyuubiSparkUtil.timeStringAsMs(spark.conf.get(BACKEND_SESSION_MAX_CACHE_TIME)) | ||
|
||
private val idleTimeout: Long = | ||
KyuubiSparkUtil.timeStringAsMs(spark.conf.get(BACKEND_SESSION_IDLE_TIMEOUT)) | ||
|
||
@volatile | ||
private var latestLogout: Long = Long.MaxValue | ||
|
||
def updateLogoutTime(time: Long): Unit = latestLogout = time | ||
|
||
/** | ||
* When all connections are disconnected and idle timeout reached is since the user last time | ||
* logout. | ||
* | ||
*/ | ||
def isIdle: Boolean = { | ||
times.get <= 0 && System.currentTimeMillis - latestLogout > idleTimeout | ||
} | ||
|
||
/** | ||
* Whether the cached [[SparkSession]] instance is already stopped. | ||
*/ | ||
def isCrashed: Boolean = spark.sparkContext.isStopped | ||
|
||
/** | ||
* If the last time is between [maxCacheTime, maxCacheTime * 1.25], we will try to stop this | ||
* SparkSession only when all connection are disconnected. | ||
* If the last time is above maxCacheTime * 1.25, we will stop this SparkSession whether it has | ||
* connections linked or jobs running with. | ||
* | ||
*/ | ||
def isExpired: Boolean = { | ||
val now = System.currentTimeMillis | ||
(now - initTime >= maxCacheTime && times.get <= 0 ) || (now - initTime > maxCacheTime * 1.25) | ||
} | ||
|
||
def needClear: Boolean = isCrashed || isExpired | ||
|
||
def getReuseTimes: Int = times.get() | ||
|
||
def incReuseTimeAndGet: Int = times.incrementAndGet() | ||
|
||
def decReuseTimeAndGet: Int = times.decrementAndGet() | ||
} | ||
|
||
object SparkSessionCache { | ||
def init(spark: SparkSession): SparkSessionCache = | ||
new SparkSessionCache(spark, new AtomicInteger(1), System.currentTimeMillis) | ||
} |
Oops, something went wrong.