Environment

Runtime Information

NameValue
Java Home/opt/java/openjdk
Java Version11.0.29 (Eclipse Adoptium)
Scala Versionversion 2.12.18

Spark Properties

NameValue
dc.app.notification.lockmapdc.app.notification.lockmap
dc.app.notification.urlsredis://dc-redis:6379
spark.app-nameSP1
spark.app.idlocal-1764969136674
spark.app.initial.jar.urlsspark://localhost:7020/jars/hdh-spark-job-jar-with-dependencies.jar
spark.app.nameSP1
spark.app.startTime1764969134164
spark.blockManager.port7010
spark.cores.max12
spark.driver.bindAddresslocalhost
spark.driver.extraJavaOptions-Djava.net.preferIPv6Addresses=false -XX:+IgnoreUnrecognizedVMOptions --add-opens=java.base/java.lang=ALL-UNNAMED --add-opens=java.base/java.lang.invoke=ALL-UNNAMED --add-opens=java.base/java.lang.reflect=ALL-UNNAMED --add-opens=java.base/java.io=ALL-UNNAMED --add-opens=java.base/java.net=ALL-UNNAMED --add-opens=java.base/java.nio=ALL-UNNAMED --add-opens=java.base/java.util=ALL-UNNAMED --add-opens=java.base/java.util.concurrent=ALL-UNNAMED --add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED --add-opens=java.base/jdk.internal.ref=ALL-UNNAMED --add-opens=java.base/sun.nio.ch=ALL-UNNAMED --add-opens=java.base/sun.nio.cs=ALL-UNNAMED --add-opens=java.base/sun.security.action=ALL-UNNAMED --add-opens=java.base/sun.util.calendar=ALL-UNNAMED --add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED -Djdk.reflect.useDirectMethodHandle=false
spark.driver.hostlocalhost
spark.driver.maxResultSize1g
spark.driver.port7020
spark.executor.extraJavaOptions-Djava.net.preferIPv6Addresses=false -XX:+IgnoreUnrecognizedVMOptions --add-opens=java.base/java.lang=ALL-UNNAMED --add-opens=java.base/java.lang.invoke=ALL-UNNAMED --add-opens=java.base/java.lang.reflect=ALL-UNNAMED --add-opens=java.base/java.io=ALL-UNNAMED --add-opens=java.base/java.net=ALL-UNNAMED --add-opens=java.base/java.nio=ALL-UNNAMED --add-opens=java.base/java.util=ALL-UNNAMED --add-opens=java.base/java.util.concurrent=ALL-UNNAMED --add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED --add-opens=java.base/jdk.internal.ref=ALL-UNNAMED --add-opens=java.base/sun.nio.ch=ALL-UNNAMED --add-opens=java.base/sun.nio.cs=ALL-UNNAMED --add-opens=java.base/sun.security.action=ALL-UNNAMED --add-opens=java.base/sun.util.calendar=ALL-UNNAMED --add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED -Djdk.reflect.useDirectMethodHandle=false
spark.executor.iddriver
spark.executor.memory20g
spark.formula.http.timeout.connect200
spark.formula.http.timeout.read200
spark.hadoop.dfs.client.use.datanode.hostnamefalse
spark.hadoop.fs.s3a.access.key*********(redacted)
spark.hadoop.fs.s3a.attempts.maximum20
spark.hadoop.fs.s3a.bucketdc-core
spark.hadoop.fs.s3a.committer.magic.enabledtrue
spark.hadoop.fs.s3a.committer.namemagic
spark.hadoop.fs.s3a.connection.maximum300
spark.hadoop.fs.s3a.connection.timeout200000
spark.hadoop.fs.s3a.endpointhttp://minio-storage.adobis.lan
spark.hadoop.fs.s3a.fast.uploadtrue
spark.hadoop.fs.s3a.fast.upload.active.blocks10
spark.hadoop.fs.s3a.fast.upload.bufferdisk
spark.hadoop.fs.s3a.implorg.apache.hadoop.fs.s3a.S3AFileSystem
spark.hadoop.fs.s3a.multipart.size64M
spark.hadoop.fs.s3a.multipart.threshold67108864
spark.hadoop.fs.s3a.path.style.accesstrue
spark.hadoop.fs.s3a.retry.interval500ms
spark.hadoop.fs.s3a.secret.key*********(redacted)
spark.hadoop.fs.s3a.threads.max100
spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version2
spark.hadoop.mapreduce.outputcommitter.factory.scheme.s3aorg.apache.hadoop.fs.s3a.commit.S3ACommitterFactory
spark.hadoop.s3a.buffer.dir/tmp/spark-buffers
spark.jars./hdh-spark-job-jar-with-dependencies.jar
spark.local.dir/tmp
spark.masterlocal[12]
spark.memory.offHeap.enabledtrue
spark.memory.offHeap.size4g
spark.scheduler.allocation.filefile:///opt/dev/work/fairScheduler.xml
spark.scheduler.modeFAIR
spark.scheduler.pooldefault
spark.sql.adaptive.coalescePartitions.enabledtrue
spark.sql.adaptive.coalescePartitions.initialPartitionNum10
spark.sql.adaptive.enabledtrue
spark.sql.adaptive.forceOptimizeSkewedJointrue
spark.sql.adaptive.localShuffleReader.enabledtrue
spark.sql.autoBroadcastJoinThreshold-1
spark.sql.caseSensitivetrue
spark.sql.files.minPartitionNum10
spark.sql.parquet.datetimeRebaseModeInReadCORRECTED
spark.sql.parquet.datetimeRebaseModeInWriteCORRECTED
spark.sql.parquet.int96RebaseModeInReadCORRECTED
spark.sql.parquet.int96RebaseModeInWriteCORRECTED
spark.sql.parquet.output.committer.classorg.apache.spark.internal.io.cloud.BindingParquetOutputCommitter
spark.sql.shuffle.partitions250
spark.sql.sources.commitProtocolClassorg.apache.spark.internal.io.cloud.PathOutputCommitProtocol
spark.ui.killEnabledfalse
spark.ui.port4040

Resource Profiles

Resource Profile IdResource Profile Contents
0
Executor Reqs:
	cores: [amount: 1]
	memory: [amount: 20480]
	offHeap: [amount: 4096]
Task Reqs:
	cpus: [amount: 1.0]

Hadoop Properties

System Properties

Metrics Properties

Classpath Entries