-
Notifications
You must be signed in to change notification settings - Fork 35
Open
Labels
bugSomething isn't workingSomething isn't working
Description
Describe the bug
first seen in NDS-automation/968
related to NVIDIA/spark-rapids#13491
[2025-09-28T22:49:43.972Z] 25/09/28 22:49:40 INFO GpuOverrides: Plan conversion to the GPU took 0.29 ms
[2025-09-28T22:49:43.972Z] 25/09/28 22:49:40 INFO GpuOverrides: GPU plan transition optimization took 0.35 ms
[2025-09-28T22:49:43.972Z] 25/09/28 22:49:40 ERROR GpuFileFormatWriter: Aborting job b977c3ae-34fc-40d3-9ecc-6552666cc2ad.
[2025-09-28T22:49:43.972Z] java.lang.NoClassDefFoundError: org/apache/spark/sql/delta/DeltaParquetFileFormat
[2025-09-28T22:49:43.972Z] at com.nvidia.spark.rapids.delta.DeltaIOProvider.isSupportedFormat(DeltaIOProvider.scala:50)
[2025-09-28T22:49:43.972Z] at org.apache.spark.sql.rapids.ExternalSource$.isSupportedFormat(ExternalSource.scala:82)
[2025-09-28T22:49:43.972Z] at org.apache.spark.sql.rapids.GpuFileSourceScanExec.allMetrics$lzycompute(GpuFileSourceScanExec.scala:422)
[2025-09-28T22:49:43.972Z] at org.apache.spark.sql.rapids.GpuFileSourceScanExec.allMetrics(GpuFileSourceScanExec.scala:452)
[2025-09-28T22:49:43.972Z] at com.nvidia.spark.rapids.GpuExec.metrics(GpuExec.scala:203)
[2025-09-28T22:49:43.972Z] at com.nvidia.spark.rapids.GpuExec.metrics$(GpuExec.scala:203)
[2025-09-28T22:49:43.972Z] at org.apache.spark.sql.rapids.GpuFileSourceScanExec.metrics$lzycompute(GpuFileSourceScanExec.scala:67)
[2025-09-28T22:49:43.972Z] at org.apache.spark.sql.rapids.GpuFileSourceScanExec.metrics(GpuFileSourceScanExec.scala:67)
[2025-09-28T22:49:43.972Z] at org.apache.spark.sql.execution.SparkPlanInfo$.fromSparkPlan(SparkPlanInfo.scala:63)
[2025-09-28T22:49:43.972Z] at org.apache.spark.sql.execution.SparkPlanInfo$.$anonfun$fromSparkPlan$3(SparkPlanInfo.scala:75)
[2025-09-28T22:49:43.972Z] at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:286)
[2025-09-28T22:49:43.972Z] at scala.collection.Iterator.foreach(Iterator.scala:943)
[2025-09-28T22:49:43.972Z] at scala.collection.Iterator.foreach$(Iterator.scala:943)
[2025-09-28T22:49:43.972Z] at scala.collection.AbstractIterator.foreach(Iterator.scala:1431)
[2025-09-28T22:49:43.972Z] at scala.collection.IterableLike.foreach(IterableLike.scala:74)
[2025-09-28T22:49:43.972Z] at scala.collection.IterableLike.foreach$(IterableLike.scala:73)
[2025-09-28T22:49:43.972Z] at scala.collection.AbstractIterable.foreach(Iterable.scala:56)
[2025-09-28T22:49:43.972Z] at scala.collection.TraversableLike.map(TraversableLike.scala:286)
[2025-09-28T22:49:43.972Z] at scala.collection.TraversableLike.map$(TraversableLike.scala:279)
[2025-09-28T22:49:43.972Z] at scala.collection.AbstractTraversable.map(Traversable.scala:108)
[2025-09-28T22:49:43.972Z] at org.apache.spark.sql.execution.SparkPlanInfo$.fromSparkPlan(SparkPlanInfo.scala:75)
[2025-09-28T22:49:43.973Z] at org.apache.spark.sql.execution.SparkPlanInfo$.$anonfun$fromSparkPlan$3(SparkPlanInfo.scala:75)
[2025-09-28T22:49:43.973Z] at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:286)
[2025-09-28T22:49:43.973Z] at scala.collection.Iterator.foreach(Iterator.scala:943)
[2025-09-28T22:49:43.973Z] at scala.collection.Iterator.foreach$(Iterator.scala:943)
[2025-09-28T22:49:43.973Z] at scala.collection.AbstractIterator.foreach(Iterator.scala:1431)
[2025-09-28T22:49:43.973Z] at scala.collection.IterableLike.foreach(IterableLike.scala:74)
[2025-09-28T22:49:43.973Z] at scala.collection.IterableLike.foreach$(IterableLike.scala:73)
[2025-09-28T22:49:43.973Z] at scala.collection.AbstractIterable.foreach(Iterable.scala:56)
[2025-09-28T22:49:43.973Z] at scala.collection.TraversableLike.map(TraversableLike.scala:286)
[2025-09-28T22:49:43.973Z] at scala.collection.TraversableLike.map$(TraversableLike.scala:279)
[2025-09-28T22:49:43.973Z] at scala.collection.AbstractTraversable.map(Traversable.scala:108)
[2025-09-28T22:49:43.973Z] at org.apache.spark.sql.execution.SparkPlanInfo$.fromSparkPlan(SparkPlanInfo.scala:75)
[2025-09-28T22:49:43.973Z] at org.apache.spark.sql.execution.SparkPlanInfo$.$anonfun$fromSparkPlan$3(SparkPlanInfo.scala:75)
[2025-09-28T22:49:43.973Z] at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:286)
[2025-09-28T22:49:43.973Z] at scala.collection.Iterator.foreach(Iterator.scala:943)
[2025-09-28T22:49:43.973Z] at scala.collection.Iterator.foreach$(Iterator.scala:943)
[2025-09-28T22:49:43.973Z] at scala.collection.AbstractIterator.foreach(Iterator.scala:1431)
[2025-09-28T22:49:43.973Z] at scala.collection.IterableLike.foreach(IterableLike.scala:74)
[2025-09-28T22:49:43.973Z] at scala.collection.IterableLike.foreach$(IterableLike.scala:73)
[2025-09-28T22:49:43.973Z] at scala.collection.AbstractIterable.foreach(Iterable.scala:56)
[2025-09-28T22:49:43.973Z] at scala.collection.TraversableLike.map(TraversableLike.scala:286)
[2025-09-28T22:49:43.973Z] at scala.collection.TraversableLike.map$(TraversableLike.scala:279)
[2025-09-28T22:49:43.973Z] at scala.collection.AbstractTraversable.map(Traversable.scala:108)
[2025-09-28T22:49:43.973Z] at org.apache.spark.sql.execution.SparkPlanInfo$.fromSparkPlan(SparkPlanInfo.scala:75)
[2025-09-28T22:49:43.973Z] at org.apache.spark.sql.execution.SparkPlanInfo$.$anonfun$fromSparkPlan$3(SparkPlanInfo.scala:75)
[2025-09-28T22:49:43.973Z] at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:286)
[2025-09-28T22:49:43.973Z] at scala.collection.Iterator.foreach(Iterator.scala:943)
[2025-09-28T22:49:43.973Z] at scala.collection.Iterator.foreach$(Iterator.scala:943)
[2025-09-28T22:49:43.973Z] at scala.collection.AbstractIterator.foreach(Iterator.scala:1431)
[2025-09-28T22:49:43.973Z] at scala.collection.IterableLike.foreach(IterableLike.scala:74)
[2025-09-28T22:49:43.973Z] at scala.collection.IterableLike.foreach$(IterableLike.scala:73)
[2025-09-28T22:49:43.973Z] at scala.collection.AbstractIterable.foreach(Iterable.scala:56)
[2025-09-28T22:49:43.973Z] at scala.collection.TraversableLike.map(TraversableLike.scala:286)
[2025-09-28T22:49:43.973Z] at scala.collection.TraversableLike.map$(TraversableLike.scala:279)
[2025-09-28T22:49:43.973Z] at scala.collection.AbstractTraversable.map(Traversable.scala:108)
[2025-09-28T22:49:43.973Z] at org.apache.spark.sql.execution.SparkPlanInfo$.fromSparkPlan(SparkPlanInfo.scala:75)
[2025-09-28T22:49:43.973Z] at org.apache.spark.sql.execution.SparkPlanInfo$.$anonfun$fromSparkPlan$3(SparkPlanInfo.scala:75)
[2025-09-28T22:49:43.973Z] at scala.collection.immutable.List.map(List.scala:293)
[2025-09-28T22:49:43.973Z] at org.apache.spark.sql.execution.SparkPlanInfo$.fromSparkPlan(SparkPlanInfo.scala:75)
[2025-09-28T22:49:43.973Z] at org.apache.spark.sql.execution.SparkPlanInfo$.$anonfun$fromSparkPlan$3(SparkPlanInfo.scala:75)
[2025-09-28T22:49:43.973Z] at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:286)
[2025-09-28T22:49:43.973Z] at scala.collection.Iterator.foreach(Iterator.scala:943)
[2025-09-28T22:49:43.973Z] at scala.collection.Iterator.foreach$(Iterator.scala:943)
[2025-09-28T22:49:43.973Z] at scala.collection.AbstractIterator.foreach(Iterator.scala:1431)
[2025-09-28T22:49:43.973Z] at scala.collection.IterableLike.foreach(IterableLike.scala:74)
[2025-09-28T22:49:43.973Z] at scala.collection.IterableLike.foreach$(IterableLike.scala:73)
[2025-09-28T22:49:43.973Z] at scala.collection.AbstractIterable.foreach(Iterable.scala:56)
[2025-09-28T22:49:43.973Z] at scala.collection.TraversableLike.map(TraversableLike.scala:286)
[2025-09-28T22:49:43.973Z] at scala.collection.TraversableLike.map$(TraversableLike.scala:279)
[2025-09-28T22:49:43.973Z] at scala.collection.AbstractTraversable.map(Traversable.scala:108)
[2025-09-28T22:49:43.973Z] at org.apache.spark.sql.execution.SparkPlanInfo$.fromSparkPlan(SparkPlanInfo.scala:75)
[2025-09-28T22:49:43.973Z] at org.apache.spark.sql.execution.SparkPlanInfo$.$anonfun$fromSparkPlan$3(SparkPlanInfo.scala:75)
[2025-09-28T22:49:43.973Z] at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:286)
[2025-09-28T22:49:43.973Z] at scala.collection.Iterator.foreach(Iterator.scala:943)
[2025-09-28T22:49:43.973Z] at scala.collection.Iterator.foreach$(Iterator.scala:943)
[2025-09-28T22:49:43.973Z] at scala.collection.AbstractIterator.foreach(Iterator.scala:1431)
[2025-09-28T22:49:43.973Z] at scala.collection.IterableLike.foreach(IterableLike.scala:74)
[2025-09-28T22:49:43.973Z] at scala.collection.IterableLike.foreach$(IterableLike.scala:73)
[2025-09-28T22:49:43.973Z] at scala.collection.AbstractIterable.foreach(Iterable.scala:56)
[2025-09-28T22:49:43.973Z] at scala.collection.TraversableLike.map(TraversableLike.scala:286)
[2025-09-28T22:49:43.973Z] at scala.collection.TraversableLike.map$(TraversableLike.scala:279)
[2025-09-28T22:49:43.973Z] at scala.collection.AbstractTraversable.map(Traversable.scala:108)
[2025-09-28T22:49:43.973Z] at org.apache.spark.sql.execution.SparkPlanInfo$.fromSparkPlan(SparkPlanInfo.scala:75)
[2025-09-28T22:49:43.973Z] at org.apache.spark.sql.execution.SparkPlanInfo$.$anonfun$fromSparkPlan$3(SparkPlanInfo.scala:75)
[2025-09-28T22:49:43.973Z] at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:286)
[2025-09-28T22:49:43.973Z] at scala.collection.Iterator.foreach(Iterator.scala:943)
[2025-09-28T22:49:43.973Z] at scala.collection.Iterator.foreach$(Iterator.scala:943)
[2025-09-28T22:49:43.973Z] at scala.collection.AbstractIterator.foreach(Iterator.scala:1431)
[2025-09-28T22:49:43.973Z] at scala.collection.IterableLike.foreach(IterableLike.scala:74)
[2025-09-28T22:49:43.973Z] at scala.collection.IterableLike.foreach$(IterableLike.scala:73)
[2025-09-28T22:49:43.973Z] at scala.collection.AbstractIterable.foreach(Iterable.scala:56)
[2025-09-28T22:49:43.973Z] at scala.collection.TraversableLike.map(TraversableLike.scala:286)
[2025-09-28T22:49:43.973Z] at scala.collection.TraversableLike.map$(TraversableLike.scala:279)
[2025-09-28T22:49:43.973Z] at scala.collection.AbstractTraversable.map(Traversable.scala:108)
[2025-09-28T22:49:43.973Z] at org.apache.spark.sql.execution.SparkPlanInfo$.fromSparkPlan(SparkPlanInfo.scala:75)
[2025-09-28T22:49:43.973Z] at org.apache.spark.sql.execution.SparkPlanInfo$.$anonfun$fromSparkPlan$3(SparkPlanInfo.scala:75)
[2025-09-28T22:49:43.973Z] at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:286)
[2025-09-28T22:49:43.973Z] at scala.collection.Iterator.foreach(Iterator.scala:943)
[2025-09-28T22:49:43.973Z] at scala.collection.Iterator.foreach$(Iterator.scala:943)
[2025-09-28T22:49:43.973Z] at scala.collection.AbstractIterator.foreach(Iterator.scala:1431)
[2025-09-28T22:49:43.973Z] at scala.collection.IterableLike.foreach(IterableLike.scala:74)
[2025-09-28T22:49:43.973Z] at scala.collection.IterableLike.foreach$(IterableLike.scala:73)
[2025-09-28T22:49:43.973Z] at scala.collection.AbstractIterable.foreach(Iterable.scala:56)
[2025-09-28T22:49:43.973Z] at scala.collection.TraversableLike.map(TraversableLike.scala:286)
[2025-09-28T22:49:43.973Z] at scala.collection.TraversableLike.map$(TraversableLike.scala:279)
[2025-09-28T22:49:43.973Z] at scala.collection.AbstractTraversable.map(Traversable.scala:108)
[2025-09-28T22:49:43.973Z] at org.apache.spark.sql.execution.SparkPlanInfo$.fromSparkPlan(SparkPlanInfo.scala:75)
[2025-09-28T22:49:43.973Z] at org.apache.spark.sql.execution.SparkPlanInfo$.$anonfun$fromSparkPlan$3(SparkPlanInfo.scala:75)
[2025-09-28T22:49:43.973Z] at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:286)
[2025-09-28T22:49:43.973Z] at scala.collection.Iterator.foreach(Iterator.scala:943)
[2025-09-28T22:49:43.973Z] at scala.collection.Iterator.foreach$(Iterator.scala:943)
[2025-09-28T22:49:43.973Z] at scala.collection.AbstractIterator.foreach(Iterator.scala:1431)
[2025-09-28T22:49:43.973Z] at scala.collection.IterableLike.foreach(IterableLike.scala:74)
[2025-09-28T22:49:43.973Z] at scala.collection.IterableLike.foreach$(IterableLike.scala:73)
[2025-09-28T22:49:43.973Z] at scala.collection.AbstractIterable.foreach(Iterable.scala:56)
[2025-09-28T22:49:43.973Z] at scala.collection.TraversableLike.map(TraversableLike.scala:286)
[2025-09-28T22:49:43.973Z] at scala.collection.TraversableLike.map$(TraversableLike.scala:279)
[2025-09-28T22:49:43.973Z] at scala.collection.AbstractTraversable.map(Traversable.scala:108)
[2025-09-28T22:49:43.973Z] at org.apache.spark.sql.execution.SparkPlanInfo$.fromSparkPlan(SparkPlanInfo.scala:75)
[2025-09-28T22:49:43.973Z] at org.apache.spark.sql.execution.SparkPlanInfo$.$anonfun$fromSparkPlan$3(SparkPlanInfo.scala:75)
[2025-09-28T22:49:43.973Z] at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:286)
[2025-09-28T22:49:43.973Z] at scala.collection.Iterator.foreach(Iterator.scala:943)
[2025-09-28T22:49:43.973Z] at scala.collection.Iterator.foreach$(Iterator.scala:943)
[2025-09-28T22:49:43.973Z] at scala.collection.AbstractIterator.foreach(Iterator.scala:1431)
[2025-09-28T22:49:43.973Z] at scala.collection.IterableLike.foreach(IterableLike.scala:74)
[2025-09-28T22:49:43.973Z] at scala.collection.IterableLike.foreach$(IterableLike.scala:73)
[2025-09-28T22:49:43.973Z] at scala.collection.AbstractIterable.foreach(Iterable.scala:56)
[2025-09-28T22:49:43.973Z] at scala.collection.TraversableLike.map(TraversableLike.scala:286)
[2025-09-28T22:49:43.973Z] at scala.collection.TraversableLike.map$(TraversableLike.scala:279)
[2025-09-28T22:49:43.973Z] at scala.collection.AbstractTraversable.map(Traversable.scala:108)
[2025-09-28T22:49:43.973Z] at org.apache.spark.sql.execution.SparkPlanInfo$.fromSparkPlan(SparkPlanInfo.scala:75)
[2025-09-28T22:49:43.973Z] at org.apache.spark.sql.execution.SparkPlanInfo$.$anonfun$fromSparkPlan$3(SparkPlanInfo.scala:75)
[2025-09-28T22:49:43.973Z] at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:286)
[2025-09-28T22:49:43.973Z] at scala.collection.Iterator.foreach(Iterator.scala:943)
[2025-09-28T22:49:43.973Z] at scala.collection.Iterator.foreach$(Iterator.scala:943)
[2025-09-28T22:49:43.973Z] at scala.collection.AbstractIterator.foreach(Iterator.scala:1431)
[2025-09-28T22:49:43.973Z] at scala.collection.IterableLike.foreach(IterableLike.scala:74)
[2025-09-28T22:49:43.973Z] at scala.collection.IterableLike.foreach$(IterableLike.scala:73)
[2025-09-28T22:49:43.973Z] at scala.collection.AbstractIterable.foreach(Iterable.scala:56)
[2025-09-28T22:49:43.973Z] at scala.collection.TraversableLike.map(TraversableLike.scala:286)
[2025-09-28T22:49:43.973Z] at scala.collection.TraversableLike.map$(TraversableLike.scala:279)
[2025-09-28T22:49:43.973Z] at scala.collection.AbstractTraversable.map(Traversable.scala:108)
[2025-09-28T22:49:43.973Z] at org.apache.spark.sql.execution.SparkPlanInfo$.fromSparkPlan(SparkPlanInfo.scala:75)
[2025-09-28T22:49:43.973Z] at org.apache.spark.sql.execution.SparkPlanInfo$.$anonfun$fromSparkPlan$3(SparkPlanInfo.scala:75)
[2025-09-28T22:49:43.973Z] at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:286)
[2025-09-28T22:49:43.973Z] at scala.collection.Iterator.foreach(Iterator.scala:943)
[2025-09-28T22:49:43.973Z] at scala.collection.Iterator.foreach$(Iterator.scala:943)
[2025-09-28T22:49:43.973Z] at scala.collection.AbstractIterator.foreach(Iterator.scala:1431)
[2025-09-28T22:49:43.973Z] at scala.collection.IterableLike.foreach(IterableLike.scala:74)
[2025-09-28T22:49:43.973Z] at scala.collection.IterableLike.foreach$(IterableLike.scala:73)
[2025-09-28T22:49:43.973Z] at scala.collection.AbstractIterable.foreach(Iterable.scala:56)
[2025-09-28T22:49:43.973Z] at scala.collection.TraversableLike.map(TraversableLike.scala:286)
[2025-09-28T22:49:43.973Z] at scala.collection.TraversableLike.map$(TraversableLike.scala:279)
[2025-09-28T22:49:43.973Z] at scala.collection.AbstractTraversable.map(Traversable.scala:108)
[2025-09-28T22:49:43.973Z] at org.apache.spark.sql.execution.SparkPlanInfo$.fromSparkPlan(SparkPlanInfo.scala:75)
[2025-09-28T22:49:43.973Z] at org.apache.spark.sql.execution.SparkPlanInfo$.$anonfun$fromSparkPlan$3(SparkPlanInfo.scala:75)
[2025-09-28T22:49:43.973Z] at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:286)
[2025-09-28T22:49:43.973Z] at scala.collection.Iterator.foreach(Iterator.scala:943)
[2025-09-28T22:49:43.973Z] at scala.collection.Iterator.foreach$(Iterator.scala:943)
[2025-09-28T22:49:43.973Z] at scala.collection.AbstractIterator.foreach(Iterator.scala:1431)
[2025-09-28T22:49:43.973Z] at scala.collection.IterableLike.foreach(IterableLike.scala:74)
[2025-09-28T22:49:43.973Z] at scala.collection.IterableLike.foreach$(IterableLike.scala:73)
[2025-09-28T22:49:43.973Z] at scala.collection.AbstractIterable.foreach(Iterable.scala:56)
[2025-09-28T22:49:43.973Z] at scala.collection.TraversableLike.map(TraversableLike.scala:286)
[2025-09-28T22:49:43.973Z] at scala.collection.TraversableLike.map$(TraversableLike.scala:279)
[2025-09-28T22:49:43.973Z] at scala.collection.AbstractTraversable.map(Traversable.scala:108)
[2025-09-28T22:49:43.973Z] at org.apache.spark.sql.execution.SparkPlanInfo$.fromSparkPlan(SparkPlanInfo.scala:75)
[2025-09-28T22:49:43.973Z] at org.apache.spark.sql.execution.SparkPlanInfo$.$anonfun$fromSparkPlan$3(SparkPlanInfo.scala:75)
[2025-09-28T22:49:43.973Z] at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:286)
[2025-09-28T22:49:43.973Z] at scala.collection.Iterator.foreach(Iterator.scala:943)
[2025-09-28T22:49:43.973Z] at scala.collection.Iterator.foreach$(Iterator.scala:943)
[2025-09-28T22:49:43.973Z] at scala.collection.AbstractIterator.foreach(Iterator.scala:1431)
[2025-09-28T22:49:43.973Z] at scala.collection.IterableLike.foreach(IterableLike.scala:74)
[2025-09-28T22:49:43.973Z] at scala.collection.IterableLike.foreach$(IterableLike.scala:73)
[2025-09-28T22:49:43.973Z] at scala.collection.AbstractIterable.foreach(Iterable.scala:56)
[2025-09-28T22:49:43.973Z] at scala.collection.TraversableLike.map(TraversableLike.scala:286)
[2025-09-28T22:49:43.973Z] at scala.collection.TraversableLike.map$(TraversableLike.scala:279)
[2025-09-28T22:49:43.973Z] at scala.collection.AbstractTraversable.map(Traversable.scala:108)
[2025-09-28T22:49:43.973Z] at org.apache.spark.sql.execution.SparkPlanInfo$.fromSparkPlan(SparkPlanInfo.scala:75)
[2025-09-28T22:49:43.973Z] at org.apache.spark.sql.execution.SparkPlanInfo$.$anonfun$fromSparkPlan$3(SparkPlanInfo.scala:75)
[2025-09-28T22:49:43.973Z] at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:286)
[2025-09-28T22:49:43.973Z] at scala.collection.Iterator.foreach(Iterator.scala:943)
[2025-09-28T22:49:43.973Z] at scala.collection.Iterator.foreach$(Iterator.scala:943)
[2025-09-28T22:49:43.973Z] at scala.collection.AbstractIterator.foreach(Iterator.scala:1431)
[2025-09-28T22:49:43.973Z] at scala.collection.IterableLike.foreach(IterableLike.scala:74)
[2025-09-28T22:49:43.973Z] at scala.collection.IterableLike.foreach$(IterableLike.scala:73)
[2025-09-28T22:49:43.973Z] at scala.collection.AbstractIterable.foreach(Iterable.scala:56)
[2025-09-28T22:49:43.973Z] at scala.collection.TraversableLike.map(TraversableLike.scala:286)
[2025-09-28T22:49:43.973Z] at scala.collection.TraversableLike.map$(TraversableLike.scala:279)
[2025-09-28T22:49:43.973Z] at scala.collection.AbstractTraversable.map(Traversable.scala:108)
[2025-09-28T22:49:43.973Z] at org.apache.spark.sql.execution.SparkPlanInfo$.fromSparkPlan(SparkPlanInfo.scala:75)
[2025-09-28T22:49:43.973Z] at org.apache.spark.sql.execution.SparkPlanInfo$.$anonfun$fromSparkPlan$3(SparkPlanInfo.scala:75)
[2025-09-28T22:49:43.973Z] at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:286)
[2025-09-28T22:49:43.973Z] at scala.collection.Iterator.foreach(Iterator.scala:943)
[2025-09-28T22:49:43.973Z] at scala.collection.Iterator.foreach$(Iterator.scala:943)
[2025-09-28T22:49:43.973Z] at scala.collection.AbstractIterator.foreach(Iterator.scala:1431)
[2025-09-28T22:49:43.973Z] at scala.collection.IterableLike.foreach(IterableLike.scala:74)
[2025-09-28T22:49:43.973Z] at scala.collection.IterableLike.foreach$(IterableLike.scala:73)
[2025-09-28T22:49:43.973Z] at scala.collection.AbstractIterable.foreach(Iterable.scala:56)
[2025-09-28T22:49:43.973Z] at scala.collection.TraversableLike.map(TraversableLike.scala:286)
[2025-09-28T22:49:43.973Z] at scala.collection.TraversableLike.map$(TraversableLike.scala:279)
[2025-09-28T22:49:43.973Z] at scala.collection.AbstractTraversable.map(Traversable.scala:108)
[2025-09-28T22:49:43.973Z] at org.apache.spark.sql.execution.SparkPlanInfo$.fromSparkPlan(SparkPlanInfo.scala:75)
[2025-09-28T22:49:43.973Z] at org.apache.spark.sql.execution.SparkPlanInfo$.$anonfun$fromSparkPlan$3(SparkPlanInfo.scala:75)
[2025-09-28T22:49:43.973Z] at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:286)
[2025-09-28T22:49:43.973Z] at scala.collection.Iterator.foreach(Iterator.scala:943)
[2025-09-28T22:49:43.973Z] at scala.collection.Iterator.foreach$(Iterator.scala:943)
[2025-09-28T22:49:43.973Z] at scala.collection.AbstractIterator.foreach(Iterator.scala:1431)
[2025-09-28T22:49:43.973Z] at scala.collection.IterableLike.foreach(IterableLike.scala:74)
[2025-09-28T22:49:43.973Z] at scala.collection.IterableLike.foreach$(IterableLike.scala:73)
[2025-09-28T22:49:43.973Z] at scala.collection.AbstractIterable.foreach(Iterable.scala:56)
[2025-09-28T22:49:43.973Z] at scala.collection.TraversableLike.map(TraversableLike.scala:286)
[2025-09-28T22:49:43.973Z] at scala.collection.TraversableLike.map$(TraversableLike.scala:279)
[2025-09-28T22:49:43.973Z] at scala.collection.AbstractTraversable.map(Traversable.scala:108)
[2025-09-28T22:49:43.973Z] at org.apache.spark.sql.execution.SparkPlanInfo$.fromSparkPlan(SparkPlanInfo.scala:75)
[2025-09-28T22:49:43.973Z] at org.apache.spark.sql.execution.SparkPlanInfo$.$anonfun$fromSparkPlan$3(SparkPlanInfo.scala:75)
[2025-09-28T22:49:43.973Z] at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:286)
[2025-09-28T22:49:43.973Z] at scala.collection.Iterator.foreach(Iterator.scala:943)
[2025-09-28T22:49:43.973Z] at scala.collection.Iterator.foreach$(Iterator.scala:943)
[2025-09-28T22:49:43.973Z] at scala.collection.AbstractIterator.foreach(Iterator.scala:1431)
[2025-09-28T22:49:43.973Z] at scala.collection.IterableLike.foreach(IterableLike.scala:74)
[2025-09-28T22:49:43.973Z] at scala.collection.IterableLike.foreach$(IterableLike.scala:73)
[2025-09-28T22:49:43.973Z] at scala.collection.AbstractIterable.foreach(Iterable.scala:56)
[2025-09-28T22:49:43.973Z] at scala.collection.TraversableLike.map(TraversableLike.scala:286)
[2025-09-28T22:49:43.973Z] at scala.collection.TraversableLike.map$(TraversableLike.scala:279)
[2025-09-28T22:49:43.973Z] at scala.collection.AbstractTraversable.map(Traversable.scala:108)
[2025-09-28T22:49:43.973Z] at org.apache.spark.sql.execution.SparkPlanInfo$.fromSparkPlan(SparkPlanInfo.scala:75)
[2025-09-28T22:49:43.973Z] at org.apache.spark.sql.execution.SparkPlanInfo$.$anonfun$fromSparkPlan$3(SparkPlanInfo.scala:75)
[2025-09-28T22:49:43.973Z] at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:286)
[2025-09-28T22:49:43.973Z] at scala.collection.Iterator.foreach(Iterator.scala:943)
[2025-09-28T22:49:43.973Z] at scala.collection.Iterator.foreach$(Iterator.scala:943)
[2025-09-28T22:49:43.973Z] at scala.collection.AbstractIterator.foreach(Iterator.scala:1431)
[2025-09-28T22:49:43.973Z] at scala.collection.IterableLike.foreach(IterableLike.scala:74)
[2025-09-28T22:49:43.973Z] at scala.collection.IterableLike.foreach$(IterableLike.scala:73)
[2025-09-28T22:49:43.974Z] at scala.collection.AbstractIterable.foreach(Iterable.scala:56)
[2025-09-28T22:49:43.974Z] at scala.collection.TraversableLike.map(TraversableLike.scala:286)
[2025-09-28T22:49:43.974Z] at scala.collection.TraversableLike.map$(TraversableLike.scala:279)
[2025-09-28T22:49:43.974Z] at scala.collection.AbstractTraversable.map(Traversable.scala:108)
[2025-09-28T22:49:43.974Z] at org.apache.spark.sql.execution.SparkPlanInfo$.fromSparkPlan(SparkPlanInfo.scala:75)
[2025-09-28T22:49:43.974Z] at org.apache.spark.sql.execution.SparkPlanInfo$.$anonfun$fromSparkPlan$3(SparkPlanInfo.scala:75)
[2025-09-28T22:49:43.974Z] at scala.collection.immutable.List.map(List.scala:293)
[2025-09-28T22:49:43.974Z] at org.apache.spark.sql.execution.SparkPlanInfo$.fromSparkPlan(SparkPlanInfo.scala:75)
[2025-09-28T22:49:43.974Z] at org.apache.spark.sql.execution.SparkPlanInfo$.$anonfun$fromSparkPlan$3(SparkPlanInfo.scala:75)
[2025-09-28T22:49:43.974Z] at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:286)
[2025-09-28T22:49:43.974Z] at scala.collection.Iterator.foreach(Iterator.scala:943)
[2025-09-28T22:49:43.974Z] at scala.collection.Iterator.foreach$(Iterator.scala:943)
[2025-09-28T22:49:43.974Z] at scala.collection.AbstractIterator.foreach(Iterator.scala:1431)
[2025-09-28T22:49:43.974Z] at scala.collection.IterableLike.foreach(IterableLike.scala:74)
[2025-09-28T22:49:43.974Z] at scala.collection.IterableLike.foreach$(IterableLike.scala:73)
[2025-09-28T22:49:43.974Z] at scala.collection.AbstractIterable.foreach(Iterable.scala:56)
[2025-09-28T22:49:43.974Z] at scala.collection.TraversableLike.map(TraversableLike.scala:286)
[2025-09-28T22:49:43.974Z] at scala.collection.TraversableLike.map$(TraversableLike.scala:279)
[2025-09-28T22:49:43.974Z] at scala.collection.AbstractTraversable.map(Traversable.scala:108)
[2025-09-28T22:49:43.974Z] at org.apache.spark.sql.execution.SparkPlanInfo$.fromSparkPlan(SparkPlanInfo.scala:75)
[2025-09-28T22:49:43.974Z] at org.apache.spark.sql.execution.SparkPlanInfo$.$anonfun$fromSparkPlan$3(SparkPlanInfo.scala:75)
[2025-09-28T22:49:43.974Z] at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:286)
[2025-09-28T22:49:43.974Z] at scala.collection.Iterator.foreach(Iterator.scala:943)
[2025-09-28T22:49:43.974Z] at scala.collection.Iterator.foreach$(Iterator.scala:943)
[2025-09-28T22:49:43.974Z] at scala.collection.AbstractIterator.foreach(Iterator.scala:1431)
[2025-09-28T22:49:43.974Z] at scala.collection.IterableLike.foreach(IterableLike.scala:74)
[2025-09-28T22:49:43.974Z] at scala.collection.IterableLike.foreach$(IterableLike.scala:73)
[2025-09-28T22:49:43.974Z] at scala.collection.AbstractIterable.foreach(Iterable.scala:56)
[2025-09-28T22:49:43.974Z] at scala.collection.TraversableLike.map(TraversableLike.scala:286)
[2025-09-28T22:49:43.974Z] at scala.collection.TraversableLike.map$(TraversableLike.scala:279)
[2025-09-28T22:49:43.974Z] at scala.collection.AbstractTraversable.map(Traversable.scala:108)
[2025-09-28T22:49:43.974Z] at org.apache.spark.sql.execution.SparkPlanInfo$.fromSparkPlan(SparkPlanInfo.scala:75)
[2025-09-28T22:49:43.974Z] at org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanExec.onUpdatePlan(AdaptiveSparkPlanExec.scala:707)
[2025-09-28T22:49:43.974Z] at org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanExec.$anonfun$getFinalPhysicalPlan$2(AdaptiveSparkPlanExec.scala:239)
[2025-09-28T22:49:43.974Z] at scala.runtime.java8.JFunction1$mcVJ$sp.apply(JFunction1$mcVJ$sp.java:23)
[2025-09-28T22:49:43.974Z] at scala.Option.foreach(Option.scala:407)
[2025-09-28T22:49:43.974Z] at org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanExec.$anonfun$getFinalPhysicalPlan$1(AdaptiveSparkPlanExec.scala:239)
[2025-09-28T22:49:43.974Z] at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775)
[2025-09-28T22:49:43.974Z] at org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanExec.getFinalPhysicalPlan(AdaptiveSparkPlanExec.scala:226)
[2025-09-28T22:49:43.974Z] at org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanExec.withFinalPlanUpdate(AdaptiveSparkPlanExec.scala:365)
[2025-09-28T22:49:43.974Z] at org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanExec.doExecuteColumnar(AdaptiveSparkPlanExec.scala:354)
[2025-09-28T22:49:43.974Z] at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeColumnar$1(SparkPlan.scala:211)
[2025-09-28T22:49:43.974Z] at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:222)
[2025-09-28T22:49:43.974Z] at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
[2025-09-28T22:49:43.974Z] at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:219)
[2025-09-28T22:49:43.974Z] at org.apache.spark.sql.execution.SparkPlan.executeColumnar(SparkPlan.scala:207)
[2025-09-28T22:49:43.974Z] at org.apache.spark.sql.rapids.GpuFileFormatWriter$.write(GpuFileFormatWriter.scala:208)
[2025-09-28T22:49:43.974Z] at org.apache.spark.sql.rapids.GpuInsertIntoHadoopFsRelationCommand.runColumnar(GpuInsertIntoHadoopFsRelationCommand.scala:190)
[2025-09-28T22:49:43.974Z] at com.nvidia.spark.rapids.GpuDataWritingCommandExec.sideEffectResult$lzycompute(GpuDataWritingCommandExec.scala:125)
[2025-09-28T22:49:43.974Z] at com.nvidia.spark.rapids.GpuDataWritingCommandExec.sideEffectResult(GpuDataWritingCommandExec.scala:120)
[2025-09-28T22:49:43.974Z] at com.nvidia.spark.rapids.GpuDataWritingCommandExec.internalDoExecuteColumnar(GpuDataWritingCommandExec.scala:157)
[2025-09-28T22:49:43.974Z] at com.nvidia.spark.rapids.GpuExec.doExecuteColumnar(GpuExec.scala:341)
[2025-09-28T22:49:43.974Z] at com.nvidia.spark.rapids.GpuExec.doExecuteColumnar$(GpuExec.scala:338)
[2025-09-28T22:49:43.974Z] at com.nvidia.spark.rapids.GpuDataWritingCommandExec.doExecuteColumnar(GpuDataWritingCommandExec.scala:116)
[2025-09-28T22:49:43.974Z] at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeColumnar$1(SparkPlan.scala:211)
[2025-09-28T22:49:43.974Z] at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:222)
[2025-09-28T22:49:43.974Z] at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
[2025-09-28T22:49:43.974Z] at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:219)
[2025-09-28T22:49:43.974Z] at org.apache.spark.sql.execution.SparkPlan.executeColumnar(SparkPlan.scala:207)
[2025-09-28T22:49:43.974Z] at com.nvidia.spark.rapids.GpuColumnarToRowExec.doExecute(GpuColumnarToRowExec.scala:366)
[2025-09-28T22:49:43.974Z] at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:184)
[2025-09-28T22:49:43.974Z] at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:222)
[2025-09-28T22:49:43.974Z] at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
[2025-09-28T22:49:43.974Z] at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:219)
[2025-09-28T22:49:43.974Z] at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:180)
[2025-09-28T22:49:43.974Z] at org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:325)
[2025-09-28T22:49:43.974Z] at org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:391)
[2025-09-28T22:49:43.974Z] at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.$anonfun$applyOrElse$1(QueryExecution.scala:110)
[2025-09-28T22:49:43.974Z] at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:103)
[2025-09-28T22:49:43.974Z] at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:163)
[2025-09-28T22:49:43.974Z] at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:90)
[2025-09-28T22:49:43.974Z] at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775)
[2025-09-28T22:49:43.974Z] at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
[2025-09-28T22:49:43.974Z] at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:110)
[2025-09-28T22:49:43.974Z] at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:106)
[2025-09-28T22:49:43.974Z] at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$1(TreeNode.scala:481)
[2025-09-28T22:49:43.974Z] at org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:82)
[2025-09-28T22:49:43.974Z] at org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:481)
[2025-09-28T22:49:43.974Z] at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDownWithPruning(LogicalPlan.scala:30)
[2025-09-28T22:49:43.974Z] at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning(AnalysisHelper.scala:267)
[2025-09-28T22:49:43.974Z] at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning$(AnalysisHelper.scala:263)
[2025-09-28T22:49:43.974Z] at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30)
[2025-09-28T22:49:43.974Z] at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30)
[2025-09-28T22:49:43.974Z] at org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:457)
[2025-09-28T22:49:43.974Z] at org.apache.spark.sql.execution.QueryExecution.eagerlyExecuteCommands(QueryExecution.scala:106)
[2025-09-28T22:49:43.974Z] at org.apache.spark.sql.execution.QueryExecution.commandExecuted$lzycompute(QueryExecution.scala:93)
[2025-09-28T22:49:43.974Z] at org.apache.spark.sql.execution.QueryExecution.commandExecuted(QueryExecution.scala:91)
[2025-09-28T22:49:43.974Z] at org.apache.spark.sql.execution.QueryExecution.assertCommandExecuted(QueryExecution.scala:128)
[2025-09-28T22:49:43.974Z] at org.apache.spark.sql.DataFrameWriter.runCommand(DataFrameWriter.scala:848)
[2025-09-28T22:49:43.974Z] at org.apache.spark.sql.DataFrameWriter.saveToV1Source(DataFrameWriter.scala:382)
[2025-09-28T22:49:43.974Z] at org.apache.spark.sql.DataFrameWriter.saveInternal(DataFrameWriter.scala:355)
[2025-09-28T22:49:43.974Z] at org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:239)
[2025-09-28T22:49:43.974Z] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
[2025-09-28T22:49:43.974Z] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
[2025-09-28T22:49:43.974Z] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
[2025-09-28T22:49:43.974Z] at java.lang.reflect.Method.invoke(Method.java:498)
[2025-09-28T22:49:43.974Z] at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
[2025-09-28T22:49:43.974Z] at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
[2025-09-28T22:49:43.974Z] at py4j.Gateway.invoke(Gateway.java:282)
[2025-09-28T22:49:43.974Z] at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
[2025-09-28T22:49:43.974Z] at py4j.commands.CallCommand.execute(CallCommand.java:79)
[2025-09-28T22:49:43.974Z] at py4j.ClientServerConnection.waitForCommands(ClientServerConnection.java:182)
[2025-09-28T22:49:43.974Z] at py4j.ClientServerConnection.run(ClientServerConnection.java:106)
[2025-09-28T22:49:43.974Z] at java.lang.Thread.run(Thread.java:750)
[2025-09-28T22:49:43.974Z] Caused by: java.lang.ClassNotFoundException: org.apache.spark.sql.delta.DeltaParquetFileFormat
[2025-09-28T22:49:43.974Z] at java.net.URLClassLoader.findClass(URLClassLoader.java:387)
[2025-09-28T22:49:43.974Z] at java.lang.ClassLoader.loadClass(ClassLoader.java:418)
[2025-09-28T22:49:43.974Z] at java.lang.ClassLoader.loadClass(ClassLoader.java:351)
[2025-09-28T22:49:43.974Z] ... 339 more
[2025-09-28T22:49:44.229Z] ERROR BEGIN
Steps/Code to reproduce bug
Power run https://github.com/NVIDIA/spark-rapids-benchmarks/tree/dev/nds
cd nds/ && ./spark-submit-template power_run_gpu_delta.template $EXTRA_CONFS \
nds_power.py \
$WORKSPACE/nds/convert/delta_sf1 \
query/query1.sql \
time.csv \
--property_file properties/aqe-on.properties \
--input_format delta \
--output_prefix output/gpu_delta_sf1 \
--delta_unmanaged \
[2025-09-28T22:49:10.858Z] ++ SHUFFLE_PARTITIONS=200
[2025-09-28T22:49:10.858Z] ++ SPARK_CONF=('--master' 'spark://nds-test-jenkins-nds-automation-968-81rg9-k7fq9:7077' '--deploy-mode' 'client' '--conf' 'spark.driver.maxResultSize=2GB' '--conf' 'spark.driver.memory=10G' '--conf' 'spark.executor.cores=4' '--conf' 'spark.executor.instances=2' '--conf' 'spark.executor.memory=16G' '--conf' 'spark.sql.shuffle.partitions=200' '--conf' 'spark.sql.files.maxPartitionBytes=2gb' '--conf' 'spark.sql.adaptive.enabled=true' '--conf' 'spark.executor.resource.gpu.amount=1' '--conf' 'spark.executor.resource.gpu.discoveryScript=./getGpusResources.sh' '--conf' 'spark.task.resource.gpu.amount=0.0625' '--conf' 'spark.plugins=com.nvidia.spark.SQLPlugin' '--conf' 'spark.rapids.memory.host.spillStorageSize=32G' '--conf' 'spark.rapids.memory.pinnedPool.size=8g' '--conf' 'spark.rapids.sql.concurrentGpuTasks=2' '--packages' 'io.delta:delta-core_2.12:1.1.0' '--conf' 'spark.sql.extensions=io.delta.sql.DeltaSparkSessionExtension' '--conf' 'spark.sql.catalog.spark_catalog=org.apache.spark.sql.delta.catalog.DeltaCatalog' '--files' '/home/jenkins/agent/workspace/NDS-automation/spark-3.2.0-bin-hadoop3.2/examples/src/main/scripts/getGpusResources.sh' '--jars' '/home/jenkins/agent/workspace/NDS-automation/rapids-4-spark.jar,./jvm_listener/target/nds-benchmark-listener-1.0-SNAPSHOT.jar')
[2025-09-28T22:49:10.858Z] ++ export SPARK_CONF
[2025-09-28T22:49:10.858Z] + MORE_ARGS=("${@:2}")
[2025-09-28T22:49:10.858Z] + CMD=("$SPARK_HOME/bin/spark-submit")
[2025-09-28T22:49:10.858Z] + CMD+=("${SPARK_CONF[@]}")
[2025-09-28T22:49:10.858Z] + CMD+=("${MORE_ARGS[@]}")
[2025-09-28T22:49:10.858Z] + /home/jenkins/agent/workspace/NDS-automation/spark-3.2.0-bin-hadoop3.2/bin/spark-submit --master spark://nds-test-jenkins-nds-automation-968-81rg9-k7fq9:7077 --deploy-mode client --conf spark.driver.maxResultSize=2GB --conf spark.driver.memory=10G --conf spark.executor.cores=4 --conf spark.executor.instances=2 --conf spark.executor.memory=16G --conf spark.sql.shuffle.partitions=200 --conf spark.sql.files.maxPartitionBytes=2gb --conf spark.sql.adaptive.enabled=true --conf spark.executor.resource.gpu.amount=1 --conf spark.executor.resource.gpu.discoveryScript=./getGpusResources.sh --conf spark.task.resource.gpu.amount=0.0625 --conf spark.plugins=com.nvidia.spark.SQLPlugin --conf spark.rapids.memory.host.spillStorageSize=32G --conf spark.rapids.memory.pinnedPool.size=8g --conf spark.rapids.sql.concurrentGpuTasks=2 --packages io.delta:delta-core_2.12:1.1.0 --conf spark.sql.extensions=io.delta.sql.DeltaSparkSessionExtension --conf spark.sql.catalog.spark_catalog=org.apache.spark.sql.delta.catalog.DeltaCatalog --files /home/jenkins/agent/workspace/NDS-automation/spark-3.2.0-bin-hadoop3.2/examples/src/main/scripts/getGpusResources.sh --jars /home/jenkins/agent/workspace/NDS-automation/rapids-4-spark.jar,./jvm_listener/target/nds-benchmark-listener-1.0-SNAPSHOT.jar nds_power.py /home/jenkins/agent/workspace/NDS-automation/nds/convert/delta_sf1 query/query1.sql time.csv --property_file properties/aqe-on.properties --input_format delta --output_prefix output/gpu_delta_sf1 --delta_unmanaged
[2025-09-28T22:49:11.785Z] :: loading settings :: url = jar:file:/home/jenkins/agent/workspace/NDS-automation/spark-3.2.0-bin-hadoop3.2/jars/ivy-2.5.0.jar!/org/apache/ivy/core/settings/ivysettings.xml
[2025-09-28T22:49:11.785Z] Ivy Default Cache set to: /root/.ivy2/cache
[2025-09-28T22:49:11.785Z] The jars for the packages stored in: /root/.ivy2/jars
[2025-09-28T22:49:11.785Z] io.delta#delta-core_2.12 added as a dependency
[2025-09-28T22:49:11.785Z] :: resolving dependencies :: org.apache.spark#spark-submit-parent-d8dd04ee-dd8d-4802-af9d-7b517ff57c94;1.0
[2025-09-28T22:49:11.785Z] confs: [default]
[2025-09-28T22:49:11.785Z] found io.delta#delta-core_2.12;1.1.0 in central
[2025-09-28T22:49:11.785Z] found org.antlr#antlr4-runtime;4.8 in central
[2025-09-28T22:49:11.785Z] found org.codehaus.jackson#jackson-core-asl;1.9.13 in local-m2-cache
[2025-09-28T22:49:11.785Z] :: resolution report :: resolve 107ms :: artifacts dl 4ms
[2025-09-28T22:49:11.785Z] :: modules in use:
[2025-09-28T22:49:11.785Z] io.delta#delta-core_2.12;1.1.0 from central in [default]
[2025-09-28T22:49:11.785Z] org.antlr#antlr4-runtime;4.8 from central in [default]
[2025-09-28T22:49:11.785Z] org.codehaus.jackson#jackson-core-asl;1.9.13 from local-m2-cache in [default]
[2025-09-28T22:49:11.785Z] ---------------------------------------------------------------------
[2025-09-28T22:49:11.785Z] | | modules || artifacts |
[2025-09-28T22:49:11.785Z] | conf | number| search|dwnlded|evicted|| number|dwnlded|
[2025-09-28T22:49:11.785Z] ---------------------------------------------------------------------
[2025-09-28T22:49:11.785Z] | default | 3 | 0 | 0 | 0 || 3 | 0 |
[2025-09-28T22:49:11.785Z] ---------------------------------------------------------------------
[2025-09-28T22:49:11.785Z] :: retrieving :: org.apache.spark#spark-submit-parent-d8dd04ee-dd8d-4802-af9d-7b517ff57c94
[2025-09-28T22:49:11.785Z] confs: [default]
[2025-09-28T22:49:11.785Z] 0 artifacts copied, 3 already retrieved (0kB/3ms)
[2025-09-28T22:49:11.785Z] 25/09/28 22:49:08 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
[2025-09-28T22:49:12.345Z] Using Spark's default log4j profile: org/apache/spark/log4j-defaults.properties
[2025-09-28T22:49:12.345Z] 25/09/28 22:49:09 INFO SparkContext: Running Spark version 3.2.0
[2025-09-28T22:36:50.221Z] 25/09/28 22:36:46 INFO SparkContext: Added file file:///home/jenkins/agent/workspace/NDS-automation/spark-3.2.0-bin-hadoop3.2/examples/src/main/scripts/getGpusResources.sh at spark://nds-test-jenkins-nds-automation-968-81rg9-k7fq9:44521/files/getGpusResources.sh with timestamp 1759099006355
[2025-09-28T22:36:50.221Z] 25/09/28 22:36:46 INFO Utils: Copying /home/jenkins/agent/workspace/NDS-automation/spark-3.2.0-bin-hadoop3.2/examples/src/main/scripts/getGpusResources.sh to /tmp/spark-4080361d-3ddf-45af-80e1-97a8ce1f300b/userFiles-fa694236-e19c-4b96-9ad7-1aebffb2dbeb/getGpusResources.sh
[2025-09-28T22:36:50.221Z] 25/09/28 22:36:46 INFO ShimLoader: Loading shim for Spark version: 3.2.0
[2025-09-28T22:36:50.221Z] 25/09/28 22:36:46 INFO ShimLoader: Complete Spark build info: 3.2.0, https://github.com/apache/spark, HEAD, 5d45a415f3a29898d92380380cfd82bfc7f579ea, 2021-10-06T12:46:30Z
[2025-09-28T22:36:50.221Z] 25/09/28 22:36:46 INFO ShimLoader: Scala version: version 2.12.15
[2025-09-28T22:36:50.221Z] 25/09/28 22:36:46 INFO ShimLoader: findURLClassLoader found a URLClassLoader org.apache.spark.util.MutableURLClassLoader@571c5681
[2025-09-28T22:36:50.221Z] 25/09/28 22:36:46 INFO ShimLoader: Updating spark classloader org.apache.spark.util.MutableURLClassLoader@571c5681 with the URLs: jar:file:/home/jenkins/agent/workspace/NDS-automation/rapids-4-spark.jar!/spark-shared/, jar:file:/home/jenkins/agent/workspace/NDS-automation/rapids-4-spark.jar!/spark320/
[2025-09-28T22:36:50.221Z] 25/09/28 22:36:46 INFO ShimLoader: Spark classLoader org.apache.spark.util.MutableURLClassLoader@571c5681 updated successfully
[2025-09-28T22:36:50.221Z] 25/09/28 22:36:46 INFO ShimLoader: Updating spark classloader org.apache.spark.util.MutableURLClassLoader@571c5681 with the URLs: jar:file:/home/jenkins/agent/workspace/NDS-automation/rapids-4-spark.jar!/spark-shared/, jar:file:/home/jenkins/agent/workspace/NDS-automation/rapids-4-spark.jar!/spark320/
[2025-09-28T22:36:50.222Z] 25/09/28 22:36:46 INFO ShimLoader: Spark classLoader org.apache.spark.util.MutableURLClassLoader@571c5681 updated successfully
[2025-09-28T22:36:50.222Z] 25/09/28 22:36:47 INFO RapidsPluginUtils: RAPIDS Accelerator build: Map(url -> https://github.com/NVIDIA/spark-rapids.git, branch -> HEAD, revision -> 6f3627a6fdecf30e2e73675f7a4c1777b9a0c5a0, version -> 25.10.0-SNAPSHOT, date -> 2025-09-27T07:21:23Z, cudf_version -> 25.10.0-SNAPSHOT, user -> root)
[2025-09-28T22:36:50.222Z] 25/09/28 22:36:47 INFO RapidsPluginUtils: RAPIDS Accelerator JNI build: Map(url -> https://github.com/NVIDIA/spark-rapids-jni.git, branch -> HEAD, gpu_architectures -> 100;120;70;75;80;86;90, revision -> 814665f095798c1b68d4b52ad4b317c2e1d0004a, version -> 25.10.0-SNAPSHOT, date -> 2025-09-26T03:58:33Z, user -> root)
[2025-09-28T22:36:50.222Z] 25/09/28 22:36:47 INFO RapidsPluginUtils: cudf build: Map(url -> https://github.com/rapidsai/cudf.git, branch -> HEAD, gpu_architectures -> 100;120;70;75;80;86;90, revision -> ef2f5ba691daaa6b10ba03719ca89b96fcdbd664, version -> 25.10.0-SNAPSHOT, date -> 2025-09-26T03:58:31Z, user -> root)
[2025-09-28T22:36:50.222Z] 25/09/28 22:36:47 INFO RapidsPluginUtils: RAPIDS Accelerator Private Map(url -> https://gitlab-master.nvidia.com/nvspark/spark-rapids-private.git, branch -> HEAD, revision -> 2bfc9218757a083cf998457d35f8a8d2bd0337ea, version -> 25.10.0-SNAPSHOT, date -> 2025-09-27T02:34:51Z, user -> root)
[2025-09-28T22:36:50.222Z] 25/09/28 22:36:47 WARN RapidsPluginUtils: RAPIDS Accelerator 25.10.0-SNAPSHOT using cudf 25.10.0-SNAPSHOT, private revision 2bfc9218757a083cf998457d35f8a8d2bd0337ea
Expected behavior
The NDS run should pass
Additional context
This bug was originally posted in spark-rapids NVIDIA/spark-rapids#13519, but we are running a benchmark against a version of Delta Lake that we don't support as per our documentation
Metadata
Metadata
Assignees
Labels
bugSomething isn't workingSomething isn't working