This is an automated email from the ASF dual-hosted git repository.
dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new e1d1302a3632 [SPARK-52533] Support enabling only driver profiler
e1d1302a3632 is described below
commit e1d1302a3632c6ca6a18dd907512b40f2e465a1a
Author: wforget <[email protected]>
AuthorDate: Wed Jun 25 10:24:05 2025 -0700
[SPARK-52533] Support enabling only driver profiler
### What changes were proposed in this pull request?
Remove profiler executor enabled check in SparkAsyncProfiler
### Why are the changes needed?
There are already profiler enabled checks in ProfilerPlugin. We should
remove the redundant check in SparkAsyncProfiler to support only driver
profiler enabled.
https://github.com/apache/spark/blob/8222670dba1791a289be08d9f01a52505175edd2/connector/profiler/src/main/scala/org/apache/spark/profiler/ProfilerPlugin.scala#L48-L49
https://github.com/apache/spark/blob/8222670dba1791a289be08d9f01a52505175edd2/connector/profiler/src/main/scala/org/apache/spark/profiler/ProfilerPlugin.scala#L78-L79
### Does this PR introduce _any_ user-facing change?
No
### How was this patch tested?
minor improve
### Was this patch authored or co-authored using generative AI tooling?
No
Closes #51223 from wForget/SPARK-50811-followup.
Authored-by: wforget <[email protected]>
Signed-off-by: Dongjoon Hyun <[email protected]>
---
.../src/main/scala/org/apache/spark/profiler/SparkAsyncProfiler.scala | 3 +--
1 file changed, 1 insertion(+), 2 deletions(-)
diff --git
a/connector/profiler/src/main/scala/org/apache/spark/profiler/SparkAsyncProfiler.scala
b/connector/profiler/src/main/scala/org/apache/spark/profiler/SparkAsyncProfiler.scala
index 9c959037c9fd..d00d25d8af1a 100644
---
a/connector/profiler/src/main/scala/org/apache/spark/profiler/SparkAsyncProfiler.scala
+++
b/connector/profiler/src/main/scala/org/apache/spark/profiler/SparkAsyncProfiler.scala
@@ -36,7 +36,6 @@ import org.apache.spark.util.{ThreadUtils, Utils}
private[spark] class SparkAsyncProfiler(conf: SparkConf, executorId: String)
extends Logging {
private var running = false
- private val enableProfiler = conf.get(PROFILER_EXECUTOR_ENABLED)
private val profilerOptions = conf.get(PROFILER_ASYNC_PROFILER_OPTIONS)
private val profilerDfsDirOpt = conf.get(PROFILER_DFS_DIR)
private val profilerLocalDir = conf.get(PROFILER_LOCAL_DIR)
@@ -71,7 +70,7 @@ private[spark] class SparkAsyncProfiler(conf: SparkConf,
executorId: String) ext
val profiler: Option[AsyncProfiler] = {
Option(
- if (enableProfiler && AsyncProfilerLoader.isSupported) {
+ if (AsyncProfilerLoader.isSupported) {
AsyncProfilerLoader.setExtractionDirectory(extractionDir)
AsyncProfilerLoader.load()
} else null
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]