From ed6fe4fccabe8068b3d1e1365e87b51c66908474 Mon Sep 17 00:00:00 2001 From: Dongjoon Hyun Date: Sun, 4 Feb 2024 14:44:51 +0800 Subject: [PATCH] [SPARK-46899][CORE][FOLLOWUP] Enable `/workers/kill` if `spark.decommission.enabled=true` ### What changes were proposed in this pull request? This PR aims to re-enable `/workers/kill` API if `spark.decommission.enabled=true` as a follow-up of - #44926 ### Why are the changes needed? To address this review comment in order to prevent a regression. - https://github.com/apache/spark/pull/44926#pullrequestreview-1854788375 ### Does this PR introduce _any_ user-facing change? No, this will recover the previous feature. ### How was this patch tested? Manual review. ### Was this patch authored or co-authored using generative AI tooling? No. Closes #45010 from dongjoon-hyun/SPARK-46899-2. Authored-by: Dongjoon Hyun Signed-off-by: Kent Yao --- .../org/apache/spark/deploy/master/ui/MasterWebUI.scala | 6 ++++-- .../spark/deploy/master/ui/ReadOnlyMasterWebUISuite.scala | 5 ++++- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterWebUI.scala b/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterWebUI.scala index 74e7f4c67ade0..9f5738ce4863a 100644 --- a/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterWebUI.scala +++ b/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterWebUI.scala @@ -43,7 +43,7 @@ class MasterWebUI( val masterEndpointRef = master.self val killEnabled = master.conf.get(UI_KILL_ENABLED) - val decommissionDisabled = !master.conf.get(DECOMMISSION_ENABLED) + val decommissionEnabled = master.conf.get(DECOMMISSION_ENABLED) val decommissionAllowMode = master.conf.get(MASTER_UI_DECOMMISSION_ALLOW_MODE) initialize() @@ -61,11 +61,13 @@ class MasterWebUI( "/app/kill", "/", masterPage.handleAppKillRequest, httpMethods = Set("POST"))) attachHandler(createRedirectHandler( "/driver/kill", "/", masterPage.handleDriverKillRequest, httpMethods = Set("POST"))) + } + if (decommissionEnabled) { attachHandler(createServletHandler("/workers/kill", new HttpServlet { override def doPost(req: HttpServletRequest, resp: HttpServletResponse): Unit = { val hostnames: Seq[String] = Option(req.getParameterValues("host")) .getOrElse(Array[String]()).toImmutableArraySeq - if (decommissionDisabled || !isDecommissioningRequestAllowed(req)) { + if (!isDecommissioningRequestAllowed(req)) { resp.sendError(HttpServletResponse.SC_METHOD_NOT_ALLOWED) } else { val removedWorkers = masterEndpointRef.askSync[Integer]( diff --git a/core/src/test/scala/org/apache/spark/deploy/master/ui/ReadOnlyMasterWebUISuite.scala b/core/src/test/scala/org/apache/spark/deploy/master/ui/ReadOnlyMasterWebUISuite.scala index c52ce91fda8be..ab323aaf79995 100644 --- a/core/src/test/scala/org/apache/spark/deploy/master/ui/ReadOnlyMasterWebUISuite.scala +++ b/core/src/test/scala/org/apache/spark/deploy/master/ui/ReadOnlyMasterWebUISuite.scala @@ -24,13 +24,16 @@ import org.mockito.Mockito.{mock, when} import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite} import org.apache.spark.deploy.master._ import org.apache.spark.deploy.master.ui.MasterWebUISuite._ +import org.apache.spark.internal.config.DECOMMISSION_ENABLED import org.apache.spark.internal.config.UI.UI_KILL_ENABLED import org.apache.spark.rpc.{RpcEndpointRef, RpcEnv} import org.apache.spark.util.Utils class ReadOnlyMasterWebUISuite extends SparkFunSuite { - val conf = new SparkConf().set(UI_KILL_ENABLED, false) + val conf = new SparkConf() + .set(UI_KILL_ENABLED, false) + .set(DECOMMISSION_ENABLED, false) val securityMgr = new SecurityManager(conf) val rpcEnv = mock(classOf[RpcEnv]) val master = mock(classOf[Master])