From bdaa7f795a83b46db10a20a13b1df68f37e5ce80 Mon Sep 17 00:00:00 2001 From: Tim Liu Date: Tue, 9 Jul 2024 11:14:19 +0800 Subject: [PATCH 1/9] Remove spark31x json lines and shim files 1, Remove spark31x json lines from the source code 2, Remove the files those only for spark31x shims 3, Move the files for spark31x and spark32x+ shims into sql-plugin/src/main/spark320 folder Signed-off-by: Tim Liu --- build/shimplify.py | 6 +- .../sql/tests/datagen/DataGenExprBase.scala | 27 - docs/dev/shimplify.md | 14 +- .../shims/ShimSupportsRuntimeFiltering.java | 35 - .../spark/rapids/shims/XxHash64Shims.scala | 30 - .../shims/AvoidAdaptiveTransitionToRow.scala | 89 -- .../rapids/shims/BatchScanExecMeta.scala | 39 - .../spark/rapids/shims/GpuBatchScanExec.scala | 59 - .../spark/rapids/shims/GpuDataSourceRDD.scala | 68 -- .../spark/rapids/shims/GpuOrcDataReader.scala | 100 -- .../spark/rapids/shims/GpuParquetCrypto.scala | 28 - .../nvidia/spark/rapids/shims/HashUtils.scala | 33 - .../shims/OffsetWindowFunctionMeta.scala | 87 -- .../spark/rapids/shims/OrcCastingShims.scala | 35 - .../nvidia/spark/rapids/shims/OrcShims.scala | 43 - .../shims/OrcShims311until320Base.scala | 133 --- .../rapids/shims/ParquetSchemaClipShims.scala | 150 --- .../spark/rapids/shims/PlanShimsImpl.scala | 43 - .../rapids/shims/RapidsOrcScanMeta.scala | 51 - .../rapids/shims/RapidsParquetScanMeta.scala | 52 - .../rapids/shims/ShimAQEShuffleReadExec.scala | 76 -- .../rapids/shims/ShimBaseSubqueryExec.scala | 29 - .../shims/ShimBroadcastExchangeLike.scala | 46 - .../rapids/shims/ShimPredicateHelper.scala | 44 - .../rapids/shims/ShuffleOriginUtil.scala | 31 - .../spark/rapids/shims/Spark31XShims.scala | 441 ------- .../spark/rapids/shims/SparkShims.scala | 44 - .../nvidia/spark/rapids/shims/TreeNode.scala | 47 - .../spark/rapids/shims/TypeSigUtil.scala | 77 -- .../spark/rapids/shims/YearParseUtil.scala | 29 - .../spark/rapids/shims/gpuWindows.scala | 65 - .../spark311/SparkShimServiceProvider.scala | 36 - .../spark311/RapidsShuffleManager.scala | 30 - .../shims/GpuShuffleBlockResolver.scala | 31 - .../rapids/shims/ShuffledBatchRDDUtil.scala | 120 -- .../shims/storage/ShimDiskBlockManager.scala | 29 - .../shims/ShimVectorizedColumnReader.scala | 69 -- .../rapids/execution/ShimTrampolineUtil.scala | 41 - .../python/shims/GpuArrowPythonOutput.scala | 90 -- .../spark/sql/rapids/shims/AvroUtils.scala | 36 - .../rapids/shims/GpuJsonToStructsShim.scala | 82 -- .../sql/rapids/shims/RapidsErrorUtils.scala | 84 -- .../rapids/shims/RapidsQueryErrorUtils.scala | 125 -- .../shims/RapidsShuffleThreadedReader.scala | 66 -- .../shims/RapidsShuffleThreadedWriter.scala | 55 - .../rapids/shims/datetimeExpressions.scala | 41 - .../types/shims/PartitionValueCastShims.scala | 36 - .../RapidsShuffleBlockFetcherIterator.scala | 1050 ----------------- .../spark/rapids/shims/SparkShims.scala | 44 - .../spark312/SparkShimServiceProvider.scala | 36 - .../spark312/RapidsShuffleManager.scala | 30 - .../spark/rapids/shims/SparkShims.scala | 49 - .../spark313/SparkShimServiceProvider.scala | 36 - .../spark313/RapidsShuffleManager.scala | 29 - .../nvidia/spark/rapids/shims/AQEUtils.scala | 3 - .../rapids/shims/AggregationTagging.scala | 3 - .../spark/rapids/shims/AnsiCastShim.scala | 3 - .../nvidia/spark/rapids/shims/AnsiUtil.scala | 3 - .../spark/rapids/shims/BloomFilterShims.scala | 3 - .../rapids/shims/BucketSpecForHiveShim.scala | 3 - .../rapids/shims/BucketingUtilsShim.scala | 3 - .../spark/rapids/shims/CastCheckShims.scala | 3 - .../rapids/shims/CastingConfigShim.scala | 3 - .../rapids/shims/CharVarcharUtilsShims.scala | 3 - .../shims/ColumnDefaultValuesShims.scala | 3 - ...aSourceTableAsSelectCommandMetaShims.scala | 3 - .../spark/rapids/shims/CudfUnsafeRow.scala | 3 - .../rapids/shims/CudfUnsafeRowBase.scala | 3 - .../rapids/shims/DateTimeUtilsShims.scala | 3 - .../shims/DecimalArithmeticOverrides.scala | 3 - .../rapids/shims/DecimalMultiply128.scala | 3 - .../spark/rapids/shims/DeltaLakeUtils.scala | 3 - .../spark/rapids/shims/DistributionUtil.scala | 3 - .../rapids/shims/FileIndexOptionsShims.scala | 3 - .../spark/rapids/shims/GetMapValueMeta.scala | 3 - .../spark/rapids/shims/GetSequenceSize.scala | 3 - .../spark/rapids/shims/GlobalLimitShims.scala | 3 - .../shims/GpuAggregateInPandasExecMeta.scala | 3 - .../rapids/shims/GpuBroadcastJoinMeta.scala | 3 - .../spark/rapids/shims/GpuCastShims.scala | 3 - .../shims/GpuFileFormatDataWriterShim.scala | 3 - .../rapids/shims/GpuHashPartitioning.scala | 3 - .../spark/rapids/shims/GpuIntervalUtils.scala | 3 - ...dCreateHiveTableAsSelectCommandShims.scala | 3 - .../rapids/shims/GpuRangePartitioning.scala | 3 - .../spark/rapids/shims/GpuTypeShims.scala | 3 - .../rapids/shims/GpuWindowInPandasExec.scala | 3 - .../spark/rapids/shims/InSubqueryShims.scala | 3 - .../shims/LegacyBehaviorPolicyShim.scala | 3 - .../rapids/shims/NullOutputStreamShim.scala | 3 - .../rapids/shims/OrcProtoWriterShim.scala | 3 - .../spark/rapids/shims/OrcReadingShims.scala | 3 - .../rapids/shims/ParquetFieldIdShims.scala | 3 - .../shims/ParquetLegacyNanoAsLongShims.scala | 3 - .../rapids/shims/ParquetStringPredShims.scala | 3 - .../shims/ParquetTimestampNTZShims.scala | 3 - .../shims/PartitionedFileUtilsShim.scala | 3 - .../spark/rapids/shims/PythonUDFShim.scala | 3 - .../spark/rapids/shims/RaiseErrorShim.scala | 3 - .../shims/RapidsFileSourceMetaUtils.scala | 3 - .../ShimFilePartitionReaderFactory.scala | 3 - .../spark/rapids/shims/ShimLeafExecNode.scala | 3 - .../rapids/shims/Spark31Xuntil33XShims.scala | 3 - .../spark/rapids/shims/TypeUtilsShims.scala | 3 - .../rapids/shims/extractValueShims.scala | 3 - .../shuffle/RapidsShuffleIterator.scala | 3 - .../rapids/shims/GpuShuffleExchangeExec.scala | 3 - .../spark/sql/catalyst/csv/GpuCsvUtils.scala | 3 - .../sql/catalyst/json/GpuJsonUtils.scala | 3 - .../parquet/ShimCurrentBatchIterator.scala | 3 - .../rapids/shims/FilePartitionShims.scala | 3 - .../hive/rapids/shims/CommandUtilsShim.scala | 3 - .../hive/rapids/shims/FileSinkDescShim.scala | 3 - .../GpuCreateHiveTableAsSelectCommand.scala | 3 - .../rapids/shims/GpuInsertIntoHiveTable.scala | 3 - .../shims/GpuRowBasedHiveGenericUDFShim.scala | 3 - .../rapids/shims/HiveInspectorsShim.scala | 3 - .../rapids/shims/HiveProviderCmdShims.scala | 3 - .../spark/sql/rapids/GpuDataSource.scala | 3 - .../sql/rapids/GpuFileFormatWriter.scala | 3 - .../sql/rapids/RapidsCachingReader.scala | 3 - .../rapids/aggregate/aggregateFunctions.scala | 3 - .../apache/spark/sql/rapids/arithmetic.scala | 3 - .../execution/GpuBroadcastHashJoinExec.scala | 3 - .../GpuBroadcastNestedLoopJoinExec.scala | 3 - .../sql/rapids/execution/GpuShuffleMeta.scala | 3 - .../execution/GpuSubqueryBroadcastMeta.scala | 3 - .../python/shims/GpuArrowPythonRunner.scala | 3 - .../python/shims/GpuBasePythonRunner.scala | 3 - .../shims/GpuCoGroupedArrowPythonRunner.scala | 3 - .../shims/GpuGroupedPythonRunnerFactory.scala | 3 - .../python/shims/WritePythonUDFUtils.scala | 3 - .../sql/rapids/shims/ArrowUtilsShim.scala | 3 - .../sql/rapids/shims/DataTypeUtilsShim.scala | 3 - .../spark/sql/rapids/shims/GpuAscii.scala | 3 - ...eDataSourceTableAsSelectCommandShims.scala | 3 - .../rapids/shims/GpuMapInPandasExecMeta.scala | 3 - .../shims/RapidsHadoopWriterUtils.scala | 3 - .../sql/rapids/shims/SchemaUtilsShims.scala | 3 - .../shims/SparkUpgradeExceptionShims.scala | 3 - .../apache/spark/sql/rapids/shims/misc.scala | 3 - .../shims/spark311/SparkShimsSuite.scala | 34 - .../shims/spark312/SparkShimsSuite.scala | 34 - .../shims/spark313/SparkShimsSuite.scala | 34 - .../spark/rapids/shims/OrcStatisticShim.scala | 48 - .../shuffle/RapidsShuffleTestHelper.scala | 3 - 146 files changed, 10 insertions(+), 4700 deletions(-) delete mode 100644 datagen/src/main/spark311/scala/org/apache/spark/sql/tests/datagen/DataGenExprBase.scala delete mode 100644 sql-plugin/src/main/spark311/java/com/nvidia/spark/rapids/shims/ShimSupportsRuntimeFiltering.java delete mode 100644 sql-plugin/src/main/spark311/java/com/nvidia/spark/rapids/shims/XxHash64Shims.scala delete mode 100644 sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/AvoidAdaptiveTransitionToRow.scala delete mode 100644 sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/BatchScanExecMeta.scala delete mode 100644 sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/GpuBatchScanExec.scala delete mode 100644 sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/GpuDataSourceRDD.scala delete mode 100644 sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/GpuOrcDataReader.scala delete mode 100644 sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/GpuParquetCrypto.scala delete mode 100644 sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/HashUtils.scala delete mode 100644 sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/OffsetWindowFunctionMeta.scala delete mode 100644 sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/OrcCastingShims.scala delete mode 100644 sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/OrcShims.scala delete mode 100644 sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/OrcShims311until320Base.scala delete mode 100644 sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/ParquetSchemaClipShims.scala delete mode 100644 sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/PlanShimsImpl.scala delete mode 100644 sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/RapidsOrcScanMeta.scala delete mode 100644 sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/RapidsParquetScanMeta.scala delete mode 100644 sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/ShimAQEShuffleReadExec.scala delete mode 100644 sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/ShimBaseSubqueryExec.scala delete mode 100644 sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/ShimBroadcastExchangeLike.scala delete mode 100644 sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/ShimPredicateHelper.scala delete mode 100644 sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/ShuffleOriginUtil.scala delete mode 100644 sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/Spark31XShims.scala delete mode 100644 sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/SparkShims.scala delete mode 100644 sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/TreeNode.scala delete mode 100644 sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/TypeSigUtil.scala delete mode 100644 sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/YearParseUtil.scala delete mode 100644 sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/gpuWindows.scala delete mode 100644 sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/spark311/SparkShimServiceProvider.scala delete mode 100644 sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/spark311/RapidsShuffleManager.scala delete mode 100644 sql-plugin/src/main/spark311/scala/org/apache/spark/rapids/shims/GpuShuffleBlockResolver.scala delete mode 100644 sql-plugin/src/main/spark311/scala/org/apache/spark/rapids/shims/ShuffledBatchRDDUtil.scala delete mode 100644 sql-plugin/src/main/spark311/scala/org/apache/spark/rapids/shims/storage/ShimDiskBlockManager.scala delete mode 100644 sql-plugin/src/main/spark311/scala/org/apache/spark/sql/execution/datasources/parquet/rapids/shims/ShimVectorizedColumnReader.scala delete mode 100644 sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/execution/ShimTrampolineUtil.scala delete mode 100644 sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuArrowPythonOutput.scala delete mode 100644 sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/shims/AvroUtils.scala delete mode 100644 sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/shims/GpuJsonToStructsShim.scala delete mode 100644 sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/shims/RapidsErrorUtils.scala delete mode 100644 sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/shims/RapidsQueryErrorUtils.scala delete mode 100644 sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/shims/RapidsShuffleThreadedReader.scala delete mode 100644 sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/shims/RapidsShuffleThreadedWriter.scala delete mode 100644 sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/shims/datetimeExpressions.scala delete mode 100644 sql-plugin/src/main/spark311/scala/org/apache/spark/sql/types/shims/PartitionValueCastShims.scala delete mode 100644 sql-plugin/src/main/spark311/scala/org/apache/spark/storage/RapidsShuffleBlockFetcherIterator.scala delete mode 100644 sql-plugin/src/main/spark312/scala/com/nvidia/spark/rapids/shims/SparkShims.scala delete mode 100644 sql-plugin/src/main/spark312/scala/com/nvidia/spark/rapids/shims/spark312/SparkShimServiceProvider.scala delete mode 100644 sql-plugin/src/main/spark312/scala/com/nvidia/spark/rapids/spark312/RapidsShuffleManager.scala delete mode 100644 sql-plugin/src/main/spark313/scala/com/nvidia/spark/rapids/shims/SparkShims.scala delete mode 100644 sql-plugin/src/main/spark313/scala/com/nvidia/spark/rapids/shims/spark313/SparkShimServiceProvider.scala delete mode 100644 sql-plugin/src/main/spark313/scala/com/nvidia/spark/rapids/spark313/RapidsShuffleManager.scala rename sql-plugin/src/main/{spark311 => spark320}/scala/com/nvidia/spark/rapids/shims/AQEUtils.scala (97%) rename sql-plugin/src/main/{spark311 => spark320}/scala/com/nvidia/spark/rapids/shims/AggregationTagging.scala (95%) rename sql-plugin/src/main/{spark311 => spark320}/scala/com/nvidia/spark/rapids/shims/AnsiCastShim.scala (97%) rename sql-plugin/src/main/{spark311 => spark320}/scala/com/nvidia/spark/rapids/shims/AnsiUtil.scala (96%) rename sql-plugin/src/main/{spark311 => spark320}/scala/com/nvidia/spark/rapids/shims/BloomFilterShims.scala (95%) rename sql-plugin/src/main/{spark311 => spark320}/scala/com/nvidia/spark/rapids/shims/BucketSpecForHiveShim.scala (96%) rename sql-plugin/src/main/{spark311 => spark320}/scala/com/nvidia/spark/rapids/shims/BucketingUtilsShim.scala (98%) rename sql-plugin/src/main/{spark311 => spark320}/scala/com/nvidia/spark/rapids/shims/CastCheckShims.scala (96%) rename sql-plugin/src/main/{spark311 => spark320}/scala/com/nvidia/spark/rapids/shims/CastingConfigShim.scala (96%) rename sql-plugin/src/main/{spark311 => spark320}/scala/com/nvidia/spark/rapids/shims/CharVarcharUtilsShims.scala (95%) rename sql-plugin/src/main/{spark311 => spark320}/scala/com/nvidia/spark/rapids/shims/ColumnDefaultValuesShims.scala (96%) rename sql-plugin/src/main/{spark311 => spark320}/scala/com/nvidia/spark/rapids/shims/CreateDataSourceTableAsSelectCommandMetaShims.scala (98%) rename sql-plugin/src/main/{spark311 => spark320}/scala/com/nvidia/spark/rapids/shims/CudfUnsafeRow.scala (96%) rename sql-plugin/src/main/{spark311 => spark320}/scala/com/nvidia/spark/rapids/shims/CudfUnsafeRowBase.scala (99%) rename sql-plugin/src/main/{spark311 => spark320}/scala/com/nvidia/spark/rapids/shims/DateTimeUtilsShims.scala (96%) rename sql-plugin/src/main/{spark311 => spark320}/scala/com/nvidia/spark/rapids/shims/DecimalArithmeticOverrides.scala (99%) rename sql-plugin/src/main/{spark311 => spark320}/scala/com/nvidia/spark/rapids/shims/DecimalMultiply128.scala (96%) rename sql-plugin/src/main/{spark311 => spark320}/scala/com/nvidia/spark/rapids/shims/DeltaLakeUtils.scala (96%) rename sql-plugin/src/main/{spark311 => spark320}/scala/com/nvidia/spark/rapids/shims/DistributionUtil.scala (96%) rename sql-plugin/src/main/{spark311 => spark320}/scala/com/nvidia/spark/rapids/shims/FileIndexOptionsShims.scala (95%) rename sql-plugin/src/main/{spark311 => spark320}/scala/com/nvidia/spark/rapids/shims/GetMapValueMeta.scala (97%) rename sql-plugin/src/main/{spark311 => spark320}/scala/com/nvidia/spark/rapids/shims/GetSequenceSize.scala (98%) rename sql-plugin/src/main/{spark311 => spark320}/scala/com/nvidia/spark/rapids/shims/GlobalLimitShims.scala (96%) rename sql-plugin/src/main/{spark311 => spark320}/scala/com/nvidia/spark/rapids/shims/GpuAggregateInPandasExecMeta.scala (98%) rename sql-plugin/src/main/{spark311 => spark320}/scala/com/nvidia/spark/rapids/shims/GpuBroadcastJoinMeta.scala (98%) rename sql-plugin/src/main/{spark311 => spark320}/scala/com/nvidia/spark/rapids/shims/GpuCastShims.scala (95%) rename sql-plugin/src/main/{spark311 => spark320}/scala/com/nvidia/spark/rapids/shims/GpuFileFormatDataWriterShim.scala (96%) rename sql-plugin/src/main/{spark311 => spark320}/scala/com/nvidia/spark/rapids/shims/GpuHashPartitioning.scala (97%) rename sql-plugin/src/main/{spark311 => spark320}/scala/com/nvidia/spark/rapids/shims/GpuIntervalUtils.scala (98%) rename sql-plugin/src/main/{spark311 => spark320}/scala/com/nvidia/spark/rapids/shims/GpuOptimizedCreateHiveTableAsSelectCommandShims.scala (99%) rename sql-plugin/src/main/{spark311 => spark320}/scala/com/nvidia/spark/rapids/shims/GpuRangePartitioning.scala (98%) rename sql-plugin/src/main/{spark311 => spark320}/scala/com/nvidia/spark/rapids/shims/GpuTypeShims.scala (98%) rename sql-plugin/src/main/{spark311 => spark320}/scala/com/nvidia/spark/rapids/shims/GpuWindowInPandasExec.scala (97%) rename sql-plugin/src/main/{spark311 => spark320}/scala/com/nvidia/spark/rapids/shims/InSubqueryShims.scala (95%) rename sql-plugin/src/main/{spark311 => spark320}/scala/com/nvidia/spark/rapids/shims/LegacyBehaviorPolicyShim.scala (96%) rename sql-plugin/src/main/{spark311 => spark320}/scala/com/nvidia/spark/rapids/shims/NullOutputStreamShim.scala (95%) rename sql-plugin/src/main/{spark311 => spark320}/scala/com/nvidia/spark/rapids/shims/OrcProtoWriterShim.scala (96%) rename sql-plugin/src/main/{spark311 => spark320}/scala/com/nvidia/spark/rapids/shims/OrcReadingShims.scala (97%) rename sql-plugin/src/main/{spark311 => spark320}/scala/com/nvidia/spark/rapids/shims/ParquetFieldIdShims.scala (96%) rename sql-plugin/src/main/{spark311 => spark320}/scala/com/nvidia/spark/rapids/shims/ParquetLegacyNanoAsLongShims.scala (96%) rename sql-plugin/src/main/{spark311 => spark320}/scala/com/nvidia/spark/rapids/shims/ParquetStringPredShims.scala (95%) rename sql-plugin/src/main/{spark311 => spark320}/scala/com/nvidia/spark/rapids/shims/ParquetTimestampNTZShims.scala (96%) rename sql-plugin/src/main/{spark311 => spark320}/scala/com/nvidia/spark/rapids/shims/PartitionedFileUtilsShim.scala (96%) rename sql-plugin/src/main/{spark311 => spark320}/scala/com/nvidia/spark/rapids/shims/PythonUDFShim.scala (96%) rename sql-plugin/src/main/{spark311 => spark320}/scala/com/nvidia/spark/rapids/shims/RaiseErrorShim.scala (97%) rename sql-plugin/src/main/{spark311 => spark320}/scala/com/nvidia/spark/rapids/shims/RapidsFileSourceMetaUtils.scala (95%) rename sql-plugin/src/main/{spark311 => spark320}/scala/com/nvidia/spark/rapids/shims/ShimFilePartitionReaderFactory.scala (95%) rename sql-plugin/src/main/{spark311 => spark320}/scala/com/nvidia/spark/rapids/shims/ShimLeafExecNode.scala (95%) rename sql-plugin/src/main/{spark311 => spark320}/scala/com/nvidia/spark/rapids/shims/Spark31Xuntil33XShims.scala (98%) rename sql-plugin/src/main/{spark311 => spark320}/scala/com/nvidia/spark/rapids/shims/TypeUtilsShims.scala (95%) rename sql-plugin/src/main/{spark311 => spark320}/scala/com/nvidia/spark/rapids/shims/extractValueShims.scala (96%) rename sql-plugin/src/main/{spark311 => spark320}/scala/com/nvidia/spark/rapids/shuffle/RapidsShuffleIterator.scala (99%) rename sql-plugin/src/main/{spark311 => spark320}/scala/org/apache/spark/rapids/shims/GpuShuffleExchangeExec.scala (98%) rename sql-plugin/src/main/{spark311 => spark320}/scala/org/apache/spark/sql/catalyst/csv/GpuCsvUtils.scala (95%) rename sql-plugin/src/main/{spark311 => spark320}/scala/org/apache/spark/sql/catalyst/json/GpuJsonUtils.scala (97%) rename sql-plugin/src/main/{spark311 => spark320}/scala/org/apache/spark/sql/execution/datasources/parquet/ShimCurrentBatchIterator.scala (99%) rename sql-plugin/src/main/{spark311 => spark320}/scala/org/apache/spark/sql/execution/rapids/shims/FilePartitionShims.scala (98%) rename sql-plugin/src/main/{spark311 => spark320}/scala/org/apache/spark/sql/hive/rapids/shims/CommandUtilsShim.scala (96%) rename sql-plugin/src/main/{spark311 => spark320}/scala/org/apache/spark/sql/hive/rapids/shims/FileSinkDescShim.scala (96%) rename sql-plugin/src/main/{spark311 => spark320}/scala/org/apache/spark/sql/hive/rapids/shims/GpuCreateHiveTableAsSelectCommand.scala (99%) rename sql-plugin/src/main/{spark311 => spark320}/scala/org/apache/spark/sql/hive/rapids/shims/GpuInsertIntoHiveTable.scala (99%) rename sql-plugin/src/main/{spark311 => spark320}/scala/org/apache/spark/sql/hive/rapids/shims/GpuRowBasedHiveGenericUDFShim.scala (96%) rename sql-plugin/src/main/{spark311 => spark320}/scala/org/apache/spark/sql/hive/rapids/shims/HiveInspectorsShim.scala (96%) rename sql-plugin/src/main/{spark311 => spark320}/scala/org/apache/spark/sql/hive/rapids/shims/HiveProviderCmdShims.scala (98%) rename sql-plugin/src/main/{spark311 => spark320}/scala/org/apache/spark/sql/rapids/GpuDataSource.scala (99%) rename sql-plugin/src/main/{spark311 => spark320}/scala/org/apache/spark/sql/rapids/GpuFileFormatWriter.scala (99%) rename sql-plugin/src/main/{spark311 => spark320}/scala/org/apache/spark/sql/rapids/RapidsCachingReader.scala (99%) rename sql-plugin/src/main/{spark311 => spark320}/scala/org/apache/spark/sql/rapids/aggregate/aggregateFunctions.scala (97%) rename sql-plugin/src/main/{spark311 => spark320}/scala/org/apache/spark/sql/rapids/arithmetic.scala (98%) rename sql-plugin/src/main/{spark311 => spark320}/scala/org/apache/spark/sql/rapids/execution/GpuBroadcastHashJoinExec.scala (98%) rename sql-plugin/src/main/{spark311 => spark320}/scala/org/apache/spark/sql/rapids/execution/GpuBroadcastNestedLoopJoinExec.scala (99%) rename sql-plugin/src/main/{spark311 => spark320}/scala/org/apache/spark/sql/rapids/execution/GpuShuffleMeta.scala (96%) rename sql-plugin/src/main/{spark311 => spark320}/scala/org/apache/spark/sql/rapids/execution/GpuSubqueryBroadcastMeta.scala (96%) rename sql-plugin/src/main/{spark311 => spark320}/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuArrowPythonRunner.scala (98%) rename sql-plugin/src/main/{spark311 => spark320}/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuBasePythonRunner.scala (96%) rename sql-plugin/src/main/{spark311 => spark320}/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuCoGroupedArrowPythonRunner.scala (98%) rename sql-plugin/src/main/{spark311 => spark320}/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuGroupedPythonRunnerFactory.scala (97%) rename sql-plugin/src/main/{spark311 => spark320}/scala/org/apache/spark/sql/rapids/execution/python/shims/WritePythonUDFUtils.scala (96%) rename sql-plugin/src/main/{spark311 => spark320}/scala/org/apache/spark/sql/rapids/shims/ArrowUtilsShim.scala (96%) rename sql-plugin/src/main/{spark311 => spark320}/scala/org/apache/spark/sql/rapids/shims/DataTypeUtilsShim.scala (96%) rename sql-plugin/src/main/{spark311 => spark320}/scala/org/apache/spark/sql/rapids/shims/GpuAscii.scala (97%) rename sql-plugin/src/main/{spark311 => spark320}/scala/org/apache/spark/sql/rapids/shims/GpuCreateDataSourceTableAsSelectCommandShims.scala (99%) rename sql-plugin/src/main/{spark311 => spark320}/scala/org/apache/spark/sql/rapids/shims/GpuMapInPandasExecMeta.scala (96%) rename sql-plugin/src/main/{spark311 => spark320}/scala/org/apache/spark/sql/rapids/shims/RapidsHadoopWriterUtils.scala (96%) rename sql-plugin/src/main/{spark311 => spark320}/scala/org/apache/spark/sql/rapids/shims/SchemaUtilsShims.scala (97%) rename sql-plugin/src/main/{spark311 => spark320}/scala/org/apache/spark/sql/rapids/shims/SparkUpgradeExceptionShims.scala (96%) rename sql-plugin/src/main/{spark311 => spark320}/scala/org/apache/spark/sql/rapids/shims/misc.scala (97%) delete mode 100644 sql-plugin/src/test/spark311/scala/com/nvidia/spark/rapids/shims/spark311/SparkShimsSuite.scala delete mode 100644 sql-plugin/src/test/spark312/scala/com/nvidia/spark/rapids/shims/spark312/SparkShimsSuite.scala delete mode 100644 sql-plugin/src/test/spark313/scala/com/nvidia/spark/rapids/shims/spark313/SparkShimsSuite.scala delete mode 100644 tests/src/test/spark311/scala/com/nvidia/spark/rapids/shims/OrcStatisticShim.scala rename tests/src/test/{spark311 => spark320}/scala/com/nvidia/spark/rapids/shuffle/RapidsShuffleTestHelper.scala (99%) diff --git a/build/shimplify.py b/build/shimplify.py index ff2d570998e..4ad0dffb17b 100644 --- a/build/shimplify.py +++ b/build/shimplify.py @@ -67,15 +67,15 @@ Each line is assumed to be a JSON to keep it extensible. /*** spark-rapids-shim-json-lines -{"spark": "312"} +{"spark": "320"} {"spark": "323"} spark-rapids-shim-json-lines ***/ The canonical location of a source file shared by multiple shims is src/main/ -You can find all shim files for a particular shim, e.g. 312, easily by executing: -git grep '{"spark": "312"}' '*.java' '*.scala' +You can find all shim files for a particular shim, e.g. 320, easily by executing: +git grep '{"spark": "320"}' '*.java' '*.scala' """ import errno diff --git a/datagen/src/main/spark311/scala/org/apache/spark/sql/tests/datagen/DataGenExprBase.scala b/datagen/src/main/spark311/scala/org/apache/spark/sql/tests/datagen/DataGenExprBase.scala deleted file mode 100644 index d50008f7fb7..00000000000 --- a/datagen/src/main/spark311/scala/org/apache/spark/sql/tests/datagen/DataGenExprBase.scala +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright (c) 2023, NVIDIA CORPORATION. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} -spark-rapids-shim-json-lines ***/ -package org.apache.spark.sql.tests.datagen - -import org.apache.spark.sql.catalyst.expressions.{ExpectsInputTypes, UnaryExpression} -import org.apache.spark.sql.catalyst.expressions.codegen.CodegenFallback - -trait DataGenExprBase extends UnaryExpression with ExpectsInputTypes with CodegenFallback diff --git a/docs/dev/shimplify.md b/docs/dev/shimplify.md index 94d8a97bec7..a8f075016ae 100644 --- a/docs/dev/shimplify.md +++ b/docs/dev/shimplify.md @@ -28,7 +28,7 @@ time of this writing) have a new set of special sibling directories `src/(main|test)/spark${buildver}`. Previous `src/(main|test)/${buildver}` and -version-range-with-exceptions directories such as `src/main/311until340-non330db` are deprecated and +version-range-with-exceptions directories such as `src/main/320until340-non330db` are deprecated and are being removed as a result of the conversion to the new structure. `shimplify` changes the way the source code is shared among shims by using an explicit @@ -37,7 +37,7 @@ in a source-code level comment instead of the shared directories. ```scala /*** spark-rapids-shim-json-lines -{"spark": "312"} +{"spark": "320"} {"spark": "323"} spark-rapids-shim-json-lines ***/ ``` @@ -155,7 +155,7 @@ It is not expected to be really necessary but it is possible to convert a subset * Either by adding -Dshimplify.shims=buildver1,buildver2,... to the commands above * Or by specifying a list of directories you would like to delete to have a simpler directory --Dshimplify.dirs=311until340-non330db,320until330-noncdh +-Dshimplify.dirs=320until340-non330db,320until330-noncdh The latter is just a minor twist on the former. Instead of having an explicit list of shims, it first computes the list of all `buildver` values using provided directories. After this *all* the @@ -202,15 +202,15 @@ work on resolving potential compilation failures manually. ## Deleting a Shim -Every Spark build is de-supported eventually. To drop a build say 311 you can run +Every Spark build is de-supported eventually. To drop a build say 320 you can run ```bash mvn generate-sources -Dshimplify=true -Dshimplify.move=true \ - -Dshimplify.remove.shim=311 + -Dshimplify.remove.shim=320 ``` -This command will remove the comment line `{"spark": "311"}` from all source files contributing to -the 311 shim. If a file belongs exclusively to 311 it will be removed. +This command will remove the comment line `{"spark": "320"}` from all source files contributing to +the 320 shim. If a file belongs exclusively to 320 it will be removed. After adding or deleting shims you should sanity-check the diff in the local git repo and run the integration tests above. diff --git a/sql-plugin/src/main/spark311/java/com/nvidia/spark/rapids/shims/ShimSupportsRuntimeFiltering.java b/sql-plugin/src/main/spark311/java/com/nvidia/spark/rapids/shims/ShimSupportsRuntimeFiltering.java deleted file mode 100644 index 03f2f369f03..00000000000 --- a/sql-plugin/src/main/spark311/java/com/nvidia/spark/rapids/shims/ShimSupportsRuntimeFiltering.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright (c) 2022-2023, NVIDIA CORPORATION. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} -spark-rapids-shim-json-lines ***/ -package com.nvidia.spark.rapids.shims; - -import org.apache.spark.sql.connector.expressions.NamedReference; -import org.apache.spark.sql.sources.Filter; - -/** - * Shim interface for Apache Spark's SupportsRuntimeFiltering interface - * which was added in Spark 3.2.0. - */ -public interface ShimSupportsRuntimeFiltering { - NamedReference[] filterAttributes(); - - void filter(Filter[] filters); -} diff --git a/sql-plugin/src/main/spark311/java/com/nvidia/spark/rapids/shims/XxHash64Shims.scala b/sql-plugin/src/main/spark311/java/com/nvidia/spark/rapids/shims/XxHash64Shims.scala deleted file mode 100644 index 15cc6e1bfcb..00000000000 --- a/sql-plugin/src/main/spark311/java/com/nvidia/spark/rapids/shims/XxHash64Shims.scala +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright (c) 2023, NVIDIA CORPORATION. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} -spark-rapids-shim-json-lines ***/ -package com.nvidia.spark.rapids.shims - -import com.nvidia.spark.rapids.TypeSig - -object XxHash64Shims { - // Spark 3.1.x does not normalize -0.0 to 0.0 but spark-rapids-jni kernel does - val supportedTypes: TypeSig = (TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128 - - TypeSig.FLOAT - TypeSig.DOUBLE) -} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/AvoidAdaptiveTransitionToRow.scala b/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/AvoidAdaptiveTransitionToRow.scala deleted file mode 100644 index 166c52364af..00000000000 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/AvoidAdaptiveTransitionToRow.scala +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Copyright (c) 2021-2023, NVIDIA CORPORATION. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} -spark-rapids-shim-json-lines ***/ -package com.nvidia.spark.rapids.shims - -import java.lang.reflect.Method - -import com.nvidia.spark.rapids.{GpuColumnarToRowExec, GpuExec, GpuRowToColumnarExec} - -import org.apache.spark.rdd.RDD -import org.apache.spark.sql.catalyst.InternalRow -import org.apache.spark.sql.catalyst.expressions.Attribute -import org.apache.spark.sql.execution.SparkPlan -import org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanExec -import org.apache.spark.sql.vectorized.ColumnarBatch - -/** - * This operator will attempt to optimize the case when we are writing the results of - * an adaptive query to disk so that we remove the redundant transitions from columnar - * to row within AdaptiveSparkPlanExec followed by a row to columnar transition. - * - * Specifically, this is the plan we see in this case: - * - * {{{ - * GpuRowToColumnar(AdaptiveSparkPlanExec(GpuColumnarToRow(child)) - * }}} - * - * We perform this optimization at runtime rather than during planning, because when the adaptive - * plan is being planned and executed, we don't know whether it is being called from an operation - * that wants rows (such as CollectTailExec) or from an operation that wants columns (such as - * GpuDataWritingCommandExec). - * - * Spark does not provide a mechanism for executing an adaptive plan and retrieving columnar - * results and the internal methods that we need to call are private, so we use reflection to - * call them. - * - * @param child The plan to execute - */ -case class AvoidAdaptiveTransitionToRow(child: SparkPlan) extends ShimUnaryExecNode with GpuExec { - - override def doExecute(): RDD[InternalRow] = - throw new IllegalStateException(s"Row-based execution should not occur for $this") - - override def output: Seq[Attribute] = child.output - - override protected def internalDoExecuteColumnar(): RDD[ColumnarBatch] = child match { - case GpuRowToColumnarExec(a: AdaptiveSparkPlanExec, _) => - val getFinalPhysicalPlan = getPrivateMethod("getFinalPhysicalPlan") - val plan = getFinalPhysicalPlan.invoke(a) - val rdd = plan match { - case t: GpuColumnarToRowExec => - t.child.executeColumnar() - case _ => - child.executeColumnar() - } - - // final UI update - val finalPlanUpdate = getPrivateMethod("finalPlanUpdate") - finalPlanUpdate.invoke(a) - - rdd - - case _ => - child.executeColumnar() - } - - private def getPrivateMethod(name: String): Method = { - val m = classOf[AdaptiveSparkPlanExec].getDeclaredMethod(name) - m.setAccessible(true) - m - } -} \ No newline at end of file diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/BatchScanExecMeta.scala b/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/BatchScanExecMeta.scala deleted file mode 100644 index 7353544ec8a..00000000000 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/BatchScanExecMeta.scala +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright (c) 2023, NVIDIA CORPORATION. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} -spark-rapids-shim-json-lines ***/ -package com.nvidia.spark.rapids.shims - -import com.nvidia.spark.rapids._ - -import org.apache.spark.sql.execution.datasources.v2.BatchScanExec - -class BatchScanExecMeta(p: BatchScanExec, - conf: RapidsConf, - parent: Option[RapidsMeta[_, _, _]], - rule: DataFromReplacementRule) - extends SparkPlanMeta[BatchScanExec](p, conf, parent, rule) { - - override val childScans: scala.Seq[ScanMeta[_]] = - Seq(GpuOverrides.wrapScan(p.scan, conf, Some(this))) - - override def convertToGpu(): GpuExec = - GpuBatchScanExec(p.output, childScans.head.convertToGpu()) -} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/GpuBatchScanExec.scala b/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/GpuBatchScanExec.scala deleted file mode 100644 index e7d5d452abb..00000000000 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/GpuBatchScanExec.scala +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright (c) 2022-2023, NVIDIA CORPORATION. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} -spark-rapids-shim-json-lines ***/ -package com.nvidia.spark.rapids.shims - -import com.nvidia.spark.rapids.{GpuBatchScanExecMetrics, GpuScan} - -import org.apache.spark.rdd.RDD -import org.apache.spark.sql.catalyst.InternalRow -import org.apache.spark.sql.catalyst.expressions.AttributeReference -import org.apache.spark.sql.catalyst.plans.QueryPlan -import org.apache.spark.sql.connector.read._ -import org.apache.spark.sql.execution.datasources.v2.DataSourceV2ScanExecBase -import org.apache.spark.sql.vectorized.ColumnarBatch - -case class GpuBatchScanExec( - output: Seq[AttributeReference], - @transient scan: GpuScan) extends DataSourceV2ScanExecBase with GpuBatchScanExecMetrics { - @transient lazy val batch: Batch = scan.toBatch - - @transient override lazy val partitions: Seq[InputPartition] = batch.planInputPartitions() - - override lazy val readerFactory: PartitionReaderFactory = batch.createReaderFactory() - - override lazy val inputRDD: RDD[InternalRow] = { - scan.metrics = allMetrics - new GpuDataSourceRDD(sparkContext, partitions, readerFactory) - } - - override def doCanonicalize(): GpuBatchScanExec = { - this.copy(output = output.map(QueryPlan.normalizeExpressions(_, output))) - } - - override def internalDoExecuteColumnar(): RDD[ColumnarBatch] = { - val numOutputRows = longMetric("numOutputRows") - inputRDD.asInstanceOf[RDD[ColumnarBatch]].map { b => - numOutputRows += b.numRows() - b - } - } -} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/GpuDataSourceRDD.scala b/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/GpuDataSourceRDD.scala deleted file mode 100644 index 044bbffa538..00000000000 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/GpuDataSourceRDD.scala +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright (c) 2020-2023, NVIDIA CORPORATION. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} -spark-rapids-shim-json-lines ***/ -package com.nvidia.spark.rapids.shims - -import com.nvidia.spark.rapids.{MetricsBatchIterator, PartitionIterator} -import com.nvidia.spark.rapids.ScalableTaskCompletion.onTaskCompletion - -import org.apache.spark.{InterruptibleIterator, Partition, SparkContext, SparkException, TaskContext} -import org.apache.spark.sql.catalyst.InternalRow -import org.apache.spark.sql.connector.read.{InputPartition, PartitionReaderFactory} -import org.apache.spark.sql.execution.datasources.v2.{DataSourceRDD, DataSourceRDDPartition} -import org.apache.spark.sql.vectorized.ColumnarBatch - -/** - * A replacement for DataSourceRDD that does NOT compute the bytes read input metric. - * DataSourceRDD assumes all reads occur on the task thread, and some GPU input sources - * use multithreaded readers that cannot generate proper metrics with DataSourceRDD. - * @note It is the responsibility of users of this RDD to generate the bytes read input - * metric explicitly! - */ -class GpuDataSourceRDD( - sc: SparkContext, - @transient private val inputPartitions: Seq[InputPartition], - partitionReaderFactory: PartitionReaderFactory) - extends DataSourceRDD(sc, inputPartitions, partitionReaderFactory, columnarReads = true) { - - private def castPartition(split: Partition): DataSourceRDDPartition = split match { - case p: DataSourceRDDPartition => p - case _ => throw new SparkException(s"[BUG] Not a DataSourceRDDPartition: $split") - } - - override def compute(split: Partition, context: TaskContext): Iterator[InternalRow] = { - val inputPartition = castPartition(split).inputPartition - val batchReader = partitionReaderFactory.createColumnarReader(inputPartition) - val iter = new MetricsBatchIterator(new PartitionIterator[ColumnarBatch](batchReader)) - onTaskCompletion(batchReader.close()) - // TODO: SPARK-25083 remove the type erasure hack in data source scan - new InterruptibleIterator(context, iter.asInstanceOf[Iterator[InternalRow]]) - } -} - -object GpuDataSourceRDD { - def apply( - sc: SparkContext, - inputPartitions: Seq[InputPartition], - partitionReaderFactory: PartitionReaderFactory): GpuDataSourceRDD = { - new GpuDataSourceRDD(sc, inputPartitions, partitionReaderFactory) - } -} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/GpuOrcDataReader.scala b/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/GpuOrcDataReader.scala deleted file mode 100644 index 37d63f3d7b3..00000000000 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/GpuOrcDataReader.scala +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Copyright (c) 2023, NVIDIA CORPORATION. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} -spark-rapids-shim-json-lines ***/ -package com.nvidia.spark.rapids.shims - -import java.nio.ByteBuffer - -import com.nvidia.spark.rapids.{GpuMetric, HostMemoryOutputStream, NoopMetric} -import org.apache.hadoop.conf.Configuration -import org.apache.hadoop.hive.common.io.DiskRangeList -import org.apache.orc.{CompressionCodec, DataReader, OrcFile, OrcProto, StripeInformation, TypeDescription} -import org.apache.orc.impl.{BufferChunk, DataReaderProperties, OrcIndex, RecordReaderUtils} - -/** - * File cache is not supported for Spark 3.1.x so this is a thin wrapper - * around the ORC DataReader. - */ -class GpuOrcDataReader( - val props: DataReaderProperties, - val conf: Configuration, - val metrics: Map[String, GpuMetric]) extends DataReader { - private var dataReader = RecordReaderUtils.createDefaultDataReader(props) - - override def open(): Unit = dataReader.open() - - override def readRowIndex( - stripe: StripeInformation, - fileSchema: TypeDescription, - footer: OrcProto.StripeFooter, - ignoreNonUtf8BloomFilter: Boolean, - included: Array[Boolean], - indexes: Array[OrcProto.RowIndex], - sargColumns: Array[Boolean], - version: OrcFile.WriterVersion, - bloomFilterKinds: Array[OrcProto.Stream.Kind], - bloomFilterIndices: Array[OrcProto.BloomFilterIndex]): OrcIndex = { - dataReader.readRowIndex(stripe, fileSchema, footer, ignoreNonUtf8BloomFilter, included, - indexes, sargColumns, version, bloomFilterKinds, bloomFilterIndices) - } - - override def readStripeFooter(stripe: StripeInformation): OrcProto.StripeFooter = { - dataReader.readStripeFooter(stripe) - } - - override def readFileData( - range: DiskRangeList, - baseOffset: Long, - forceDirect: Boolean): DiskRangeList = { - dataReader.readFileData(range, baseOffset, forceDirect) - } - - override def isTrackingDiskRanges: Boolean = dataReader.isTrackingDiskRanges - - override def releaseBuffer(buffer: ByteBuffer): Unit = dataReader.releaseBuffer(buffer) - - override def getCompressionCodec: CompressionCodec = dataReader.getCompressionCodec - - override def close(): Unit = { - if (dataReader != null) { - dataReader.close() - dataReader = null - } - } - - def copyFileDataToHostStream(out: HostMemoryOutputStream, ranges: DiskRangeList): Unit = { - val bufferChunks = dataReader.readFileData(ranges, 0, false) - metrics.getOrElse(GpuMetric.WRITE_BUFFER_TIME, NoopMetric).ns { - var current = bufferChunks - while (current != null) { - out.write(current.getData) - if (dataReader.isTrackingDiskRanges && current.isInstanceOf[BufferChunk]) { - dataReader.releaseBuffer(current.getData) - } - current = current.next - } - } - } -} - -object GpuOrcDataReader { - // File cache is not being used, so we want to coalesce read ranges - val shouldMergeDiskRanges: Boolean = true -} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/GpuParquetCrypto.scala b/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/GpuParquetCrypto.scala deleted file mode 100644 index 54dc9ef6a47..00000000000 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/GpuParquetCrypto.scala +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright (c) 2022-2023, NVIDIA CORPORATION. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} -spark-rapids-shim-json-lines ***/ -package com.nvidia.spark.rapids.shims - -object GpuParquetCrypto { - /** - * Columnar encryption was added in Spark 3.2.0 - */ - def isColumnarCryptoException(e: Throwable): Boolean = false -} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/HashUtils.scala b/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/HashUtils.scala deleted file mode 100644 index 5e381dc3ae3..00000000000 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/HashUtils.scala +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright (c) 2021-2023, NVIDIA CORPORATION. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} -spark-rapids-shim-json-lines ***/ -package com.nvidia.spark.rapids.shims - -import ai.rapids.cudf - -object HashUtils { - /** - * In Spark 3.2.0+ -0.0 is normalized to 0.0, but for everyone else this is a noop - * @param in the input to normalize - * @return the result - */ - def normalizeInput(in: cudf.ColumnVector): cudf.ColumnVector = - in.incRefCount() -} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/OffsetWindowFunctionMeta.scala b/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/OffsetWindowFunctionMeta.scala deleted file mode 100644 index 5fd65188e60..00000000000 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/OffsetWindowFunctionMeta.scala +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Copyright (c) 2021-2023, NVIDIA CORPORATION. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} -spark-rapids-shim-json-lines ***/ -package com.nvidia.spark.rapids.shims - -import com.nvidia.spark.rapids.{BaseExprMeta, DataFromReplacementRule, ExprMeta, GpuOverrides, RapidsConf, RapidsMeta} - -import org.apache.spark.sql.catalyst.expressions.{Expression, Lag, Lead, Literal, OffsetWindowFunction} -import org.apache.spark.sql.types.IntegerType - -/** - * Spark 3.1.1-specific replacement for com.nvidia.spark.rapids.OffsetWindowFunctionMeta. - * This is required primarily for two reasons: - * 1. com.nvidia.spark.rapids.OffsetWindowFunctionMeta (compiled against Spark 3.0.x) - * fails class load in Spark 3.1.x. (`expr.input` is not recognized as an Expression.) - * 2. The semantics of offsets in LAG() are reversed/negated in Spark 3.1.1. - * E.g. The expression `LAG(col, 5)` causes Lag.offset to be set to `-5`, - * as opposed to `5`, in prior versions of Spark. - * This class adjusts the LAG offset to use similar semantics to Spark 3.0.x. - */ -abstract class OffsetWindowFunctionMeta[INPUT <: OffsetWindowFunction] ( - expr: INPUT, - conf: RapidsConf, - parent: Option[RapidsMeta[_, _, _]], - rule: DataFromReplacementRule) - extends ExprMeta[INPUT](expr, conf, parent, rule) { - lazy val input: BaseExprMeta[_] = GpuOverrides.wrapExpr(expr.input, conf, Some(this)) - lazy val adjustedOffset: Expression = { - expr match { - case lag: Lag => - GpuOverrides.extractLit(lag.offset) match { - case Some(Literal(offset: Int, IntegerType)) => - Literal(-offset, IntegerType) - case _ => - throw new IllegalStateException( - s"Only integer literal offsets are supported for LAG. Found:${lag.offset}") - } - case lead: Lead => - GpuOverrides.extractLit(lead.offset) match { - case Some(Literal(offset: Int, IntegerType)) => - Literal(offset, IntegerType) - case _ => - throw new IllegalStateException( - s"Only integer literal offsets are supported for LEAD. Found:${lead.offset}") - } - case other => - throw new IllegalStateException(s"$other is not a supported window function") - } - } - lazy val offset: BaseExprMeta[_] = - GpuOverrides.wrapExpr(adjustedOffset, conf, Some(this)) - lazy val default: BaseExprMeta[_] = GpuOverrides.wrapExpr(expr.default, conf, Some(this)) - - override val childExprs: Seq[BaseExprMeta[_]] = Seq(input, offset, default) - - override def tagExprForGpu(): Unit = { - expr match { - case Lead(_,_,_) => // Supported. - case Lag(_,_,_) => // Supported. - case other => - willNotWorkOnGpu( s"Only LEAD/LAG offset window functions are supported. Found: $other") - } - - if (GpuOverrides.extractLit(expr.offset).isEmpty) { // Not a literal offset. - willNotWorkOnGpu( - s"Only integer literal offsets are supported for LEAD/LAG. Found: ${expr.offset}") - } - } -} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/OrcCastingShims.scala b/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/OrcCastingShims.scala deleted file mode 100644 index 2bbc860ca52..00000000000 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/OrcCastingShims.scala +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright (c) 2022-2023, NVIDIA CORPORATION. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} -spark-rapids-shim-json-lines ***/ -package com.nvidia.spark.rapids.shims - -import ai.rapids.cudf.{ColumnView, DType} -import com.nvidia.spark.rapids.GpuOrcScan - - -object OrcCastingShims { - - def castIntegerToTimestamp(col: ColumnView, colType: DType): ColumnView = { - // For 311 <= spark < 320 (including 311, 312, 313), they consider the integer as - // milliseconds. - GpuOrcScan.castIntegersToTimestamp(col, colType, DType.TIMESTAMP_MILLISECONDS) - } -} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/OrcShims.scala b/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/OrcShims.scala deleted file mode 100644 index 3ded1c76620..00000000000 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/OrcShims.scala +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright (c) 2022-2023, NVIDIA CORPORATION. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} -spark-rapids-shim-json-lines ***/ -package com.nvidia.spark.rapids.shims - -import com.nvidia.spark.rapids.RapidsPluginImplicits._ -import org.apache.orc.Reader - -object OrcShims extends OrcShims311until320Base { - - // the ORC Reader in non CDH Spark is closeable - def withReader[T <: AutoCloseable, V](r: T)(block: T => V): V = { - try { - block(r) - } finally { - r.safeClose() - } - } - - // the ORC Reader in non CDH Spark is closeable - def closeReader(reader: Reader): Unit = { - if (reader != null) { - reader.close() - } - } -} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/OrcShims311until320Base.scala b/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/OrcShims311until320Base.scala deleted file mode 100644 index ec60ea61e4a..00000000000 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/OrcShims311until320Base.scala +++ /dev/null @@ -1,133 +0,0 @@ -/* - * Copyright (c) 2022-2023, NVIDIA CORPORATION. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} -spark-rapids-shim-json-lines ***/ -package com.nvidia.spark.rapids.shims - -import java.nio.ByteBuffer - -import scala.collection.mutable.ArrayBuffer - -import com.nvidia.spark.rapids.OrcOutputStripe -import org.apache.hadoop.conf.Configuration -import org.apache.hadoop.hive.common.io.DiskRange -import org.apache.orc.{CompressionCodec, CompressionKind, DataReader, OrcFile, OrcProto, PhysicalWriter, Reader, StripeInformation, TypeDescription} -import org.apache.orc.impl.{BufferChunk, DataReaderProperties, InStream, OrcCodecPool, OutStream, SchemaEvolution} -import org.apache.orc.impl.RecordReaderImpl.SargApplier - -import org.apache.spark.sql.execution.datasources.orc.OrcUtils -import org.apache.spark.sql.types.DataType - -trait OrcShims311until320Base { - - // create reader properties builder - def newDataReaderPropertiesBuilder(compressionSize: Int, - compressionKind: CompressionKind, typeCount: Int): DataReaderProperties.Builder = { - DataReaderProperties.builder() - .withBufferSize(compressionSize) - .withCompression(compressionKind) - .withTypeCount(typeCount) - } - - // create ORC out stream - def newOrcOutStream(name: String, bufferSize: Int, codec: CompressionCodec, - receiver: PhysicalWriter.OutputReceiver): OutStream = { - new OutStream(name, bufferSize, codec, receiver) - } - - // filter stripes by pushing down filter - def filterStripes( - stripes: Seq[StripeInformation], - conf: Configuration, - orcReader: Reader, - dataReader: DataReader, - gen: (StripeInformation, OrcProto.StripeFooter, Array[Int]) => OrcOutputStripe, - evolution: SchemaEvolution, - sargApp: SargApplier, - sargColumns: Array[Boolean], - ignoreNonUtf8BloomFilter: Boolean, - writerVersion: OrcFile.WriterVersion, - fileIncluded: Array[Boolean], - columnMapping: Array[Int]): ArrayBuffer[OrcOutputStripe] = { - val result = new ArrayBuffer[OrcOutputStripe](stripes.length) - stripes.foreach { stripe => - val stripeFooter = dataReader.readStripeFooter(stripe) - val needStripe = if (sargApp != null) { - // An ORC schema is a single struct type describing the schema fields - val orcFileSchema = evolution.getFileType(0) - val orcIndex = dataReader.readRowIndex(stripe, orcFileSchema, stripeFooter, - ignoreNonUtf8BloomFilter, fileIncluded, null, sargColumns, - writerVersion, null, null) - val rowGroups = sargApp.pickRowGroups(stripe, orcIndex.getRowGroupIndex, - orcIndex.getBloomFilterKinds, stripeFooter.getColumnsList, orcIndex.getBloomFilterIndex, - true) - rowGroups != SargApplier.READ_NO_RGS - } else { - true - } - - if (needStripe) { - result.append(gen(stripe, stripeFooter, columnMapping)) - } - } - result - } - - /** - * Compare if the two TypeDescriptions are equal by ignoring attribute - */ - def typeDescriptionEqual(lhs: TypeDescription, rhs: TypeDescription): Boolean = { - lhs.equals(rhs) - } - - // forcePositionalEvolution is available from Spark-3.2. So setting this as false. - def forcePositionalEvolution(conf:Configuration): Boolean = { - false - } - - // orcTypeDescriptionString is renamed to getOrcSchemaString from 3.3+ - def getOrcSchemaString(dt: DataType): String = { - OrcUtils.orcTypeDescriptionString(dt) - } - - def parseFooterFromBuffer( - bb: ByteBuffer, - ps: OrcProto.PostScript, - psLen: Int): OrcProto.Footer = { - val footerSize = ps.getFooterLength.toInt - val footerOffset = bb.limit() - 1 - psLen - footerSize - val footerBuffer = bb.duplicate() - footerBuffer.position(footerOffset) - footerBuffer.limit(footerOffset + footerSize) - val diskRanges = new java.util.ArrayList[DiskRange]() - diskRanges.add(new BufferChunk(footerBuffer, 0)) - val compressionKind = CompressionKind.valueOf(ps.getCompression.name()) - val codec = OrcCodecPool.getCodec(compressionKind) - try { - val in = InStream.createCodedInputStream("footer", diskRanges, footerSize, codec, - ps.getCompressionBlockSize.toInt) - OrcProto.Footer.parseFrom(in) - } finally { - OrcCodecPool.returnCodec(compressionKind, codec) - } - } - - // ORC version 1.5.x doesn't have separate stripe statistics length - def getStripeStatisticsLength(ps: OrcProto.PostScript): Long = 0L -} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/ParquetSchemaClipShims.scala b/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/ParquetSchemaClipShims.scala deleted file mode 100644 index 4d6d4967a80..00000000000 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/ParquetSchemaClipShims.scala +++ /dev/null @@ -1,150 +0,0 @@ -/* - * Copyright (c) 2022-2024, NVIDIA CORPORATION. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} -spark-rapids-shim-json-lines ***/ -package com.nvidia.spark.rapids.shims - -import org.apache.parquet.schema._ -import org.apache.parquet.schema.OriginalType._ -import org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName._ - -import org.apache.spark.sql.internal.SQLConf -import org.apache.spark.sql.rapids.execution.RapidsAnalysisException -import org.apache.spark.sql.rapids.shims.RapidsErrorUtils -import org.apache.spark.sql.types._ - -object ParquetSchemaClipShims { - /** Stubs for configs not defined before Spark 330 */ - def useFieldId(conf: SQLConf): Boolean = false - - def ignoreMissingIds(conf: SQLConf): Boolean = false - - def checkIgnoreMissingIds(ignoreMissingIds: Boolean, parquetFileSchema: MessageType, - catalystRequestedSchema: StructType): Unit = {} - - def hasFieldId(field: StructField): Boolean = - throw new IllegalStateException("This shim should not invoke `hasFieldId`") - - def hasFieldIds(schema: StructType): Boolean = - throw new IllegalStateException("This shim should not invoke `hasFieldIds`") - - def getFieldId(field: StructField): Int = - throw new IllegalStateException("This shim should not invoke `getFieldId`") - - def fieldIdToFieldMap(useFieldId: Boolean, fileType: Type): Map[Int, Type] = Map.empty[Int, Type] - - def fieldIdToNameMap(useFieldId: Boolean, - fileType: Type): Map[Int, String] = Map.empty[Int, String] - - /** - * Convert a Parquet primitive type to a Spark type. - * Based on Spark's ParquetSchemaConverter.convertPrimitiveField - */ - def convertPrimitiveField(field: PrimitiveType): DataType = { - val typeName = field.getPrimitiveTypeName - val originalType = field.getOriginalType - - def typeString = - if (originalType == null) s"$typeName" else s"$typeName ($originalType)" - - def typeNotSupported() = - throw new RapidsAnalysisException(s"Parquet type not supported: $typeString") - - def typeNotImplemented() = - throw RapidsErrorUtils.parquetTypeUnsupportedYetError(typeString) - - def illegalType() = - throw RapidsErrorUtils.illegalParquetTypeError(typeString) - - // When maxPrecision = -1, we skip precision range check, and always respect the precision - // specified in field.getDecimalMetadata. This is useful when interpreting decimal types stored - // as binaries with variable lengths. - def makeDecimalType(maxPrecision: Int = -1): DecimalType = { - val precision = field.getDecimalMetadata.getPrecision - val scale = field.getDecimalMetadata.getScale - - if (!(maxPrecision == -1 || 1 <= precision && precision <= maxPrecision)) { - throw new RapidsAnalysisException(s"Invalid decimal precision: $typeName " + - s"cannot store $precision digits (max $maxPrecision)") - } - - DecimalType(precision, scale) - } - - typeName match { - case BOOLEAN => BooleanType - - case FLOAT => FloatType - - case DOUBLE => DoubleType - - case INT32 => - originalType match { - case INT_8 => ByteType - case INT_16 => ShortType - case INT_32 | null => IntegerType - case DATE => DateType - case DECIMAL => makeDecimalType(Decimal.MAX_INT_DIGITS) - case UINT_8 => typeNotSupported() - case UINT_16 => typeNotSupported() - case UINT_32 => typeNotSupported() - case TIME_MILLIS => typeNotImplemented() - case _ => illegalType() - } - - case INT64 => - originalType match { - case INT_64 | null => LongType - case DECIMAL => makeDecimalType(Decimal.MAX_LONG_DIGITS) - case UINT_64 => typeNotSupported() - case TIMESTAMP_MICROS => TimestampType - case TIMESTAMP_MILLIS => TimestampType - case _ => illegalType() - } - - case INT96 => - if (!SQLConf.get.isParquetINT96AsTimestamp) { - throw new RapidsAnalysisException( - "INT96 is not supported unless it's interpreted as timestamp. " + - s"Please try to set ${SQLConf.PARQUET_INT96_AS_TIMESTAMP.key} to true.") - } - TimestampType - - case BINARY => - originalType match { - case UTF8 | ENUM | JSON => StringType - case null if SQLConf.get.isParquetBinaryAsString => StringType - case null => BinaryType - case BSON => BinaryType - case DECIMAL => makeDecimalType() - case _ => illegalType() - } - - case FIXED_LEN_BYTE_ARRAY => - originalType match { - case DECIMAL => makeDecimalType(Decimal.maxPrecisionForBytes(field.getTypeLength)) - case INTERVAL => typeNotImplemented() - case _ => illegalType() - } - - case _ => illegalType() - } - } -} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/PlanShimsImpl.scala b/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/PlanShimsImpl.scala deleted file mode 100644 index 438038fb749..00000000000 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/PlanShimsImpl.scala +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright (c) 2022-2023, NVIDIA CORPORATION. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} -spark-rapids-shim-json-lines ***/ -package com.nvidia.spark.rapids.shims - -import com.nvidia.spark.rapids.{GpuAlias, PlanShims} - -import org.apache.spark.sql.catalyst.expressions.{Alias, Expression} -import org.apache.spark.sql.execution.SparkPlan -import org.apache.spark.sql.types.DataType - -class PlanShimsImpl extends PlanShims { - def extractExecutedPlan(plan: SparkPlan): SparkPlan = plan - def isAnsiCast(e: Expression): Boolean = AnsiCastShim.isAnsiCast(e) - - def isAnsiCastOptionallyAliased(e: Expression): Boolean = e match { - case Alias(e, _) => isAnsiCast(e) - case GpuAlias(e, _) => isAnsiCast(e) - case e => isAnsiCast(e) - } - - def extractAnsiCastTypes(e: Expression): (DataType, DataType) = { - AnsiCastShim.extractAnsiCastTypes(e) - } -} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/RapidsOrcScanMeta.scala b/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/RapidsOrcScanMeta.scala deleted file mode 100644 index 34bd8d7cc4f..00000000000 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/RapidsOrcScanMeta.scala +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright (c) 2022-2023, NVIDIA CORPORATION. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} -spark-rapids-shim-json-lines ***/ -package com.nvidia.spark.rapids.shims - -import com.nvidia.spark.rapids.{DataFromReplacementRule, GpuOrcScan, GpuScan, RapidsConf, RapidsMeta, ScanMeta} - -import org.apache.spark.sql.execution.datasources.v2.orc.OrcScan - -class RapidsOrcScanMeta( - oScan: OrcScan, - conf: RapidsConf, - parent: Option[RapidsMeta[_, _, _]], - rule: DataFromReplacementRule) - extends ScanMeta[OrcScan](oScan, conf, parent, rule) { - - override def tagSelfForGpu(): Unit = { - GpuOrcScan.tagSupport(this) - } - - override def convertToGpu(): GpuScan = - GpuOrcScan(oScan.sparkSession, - oScan.hadoopConf, - oScan.fileIndex, - oScan.dataSchema, - oScan.readDataSchema, - oScan.readPartitionSchema, - oScan.options, - oScan.pushedFilters, - oScan.partitionFilters, - oScan.dataFilters, - conf) -} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/RapidsParquetScanMeta.scala b/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/RapidsParquetScanMeta.scala deleted file mode 100644 index 42a905c9c45..00000000000 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/RapidsParquetScanMeta.scala +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright (c) 2022-2023, NVIDIA CORPORATION. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} -spark-rapids-shim-json-lines ***/ -package com.nvidia.spark.rapids.shims - -import com.nvidia.spark.rapids.{DataFromReplacementRule, GpuParquetScan, GpuScan, RapidsConf, RapidsMeta, ScanMeta} - -import org.apache.spark.sql.execution.datasources.v2.parquet.ParquetScan - -class RapidsParquetScanMeta( - pScan: ParquetScan, - conf: RapidsConf, - parent: Option[RapidsMeta[_, _, _]], - rule: DataFromReplacementRule) - extends ScanMeta[ParquetScan](pScan, conf, parent, rule) { - - override def tagSelfForGpu(): Unit = { - GpuParquetScan.tagSupport(this) - } - - override def convertToGpu(): GpuScan = { - GpuParquetScan(pScan.sparkSession, - pScan.hadoopConf, - pScan.fileIndex, - pScan.dataSchema, - pScan.readDataSchema, - pScan.readPartitionSchema, - pScan.pushedFilters, - pScan.options, - pScan.partitionFilters, - pScan.dataFilters, - conf) - } -} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/ShimAQEShuffleReadExec.scala b/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/ShimAQEShuffleReadExec.scala deleted file mode 100644 index 8a4162a2175..00000000000 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/ShimAQEShuffleReadExec.scala +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Copyright (c) 2021-2023, NVIDIA CORPORATION. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} -spark-rapids-shim-json-lines ***/ -package com.nvidia.spark.rapids.shims - -import com.nvidia.spark.rapids._ - -import org.apache.spark.sql.catalyst.expressions._ -import org.apache.spark.sql.execution.adaptive._ -import org.apache.spark.sql.rapids.execution._ - -class GpuCustomShuffleReaderMeta(reader: CustomShuffleReaderExec, - conf: RapidsConf, - parent: Option[RapidsMeta[_, _, _]], - rule: DataFromReplacementRule) - extends SparkPlanMeta[CustomShuffleReaderExec](reader, conf, parent, rule) { - - - override def checkExistingTags(): Unit = { - // CoalesceShufflePartitions performs a transformUp and may replace ShuffleQueryStageExec - // with CustomShuffleReaderExec, causing tags to be copied from ShuffleQueryStageExec to - // CustomShuffleReaderExec, including the "no need to replace ShuffleQueryStageExec" tag. - wrapped.getTagValue(RapidsMeta.gpuSupportedTag) - .foreach(_.diff(cannotBeReplacedReasons.get) - .filterNot(_ == s"there is no need to replace ${classOf[ShuffleQueryStageExec]}") - .foreach(willNotWorkOnGpu)) - } - - override def tagPlanForGpu(): Unit = { - if (!reader.child.supportsColumnar) { - willNotWorkOnGpu( - "Unable to replace CustomShuffleReader due to child not being columnar") - } - val shuffleEx = reader.child.asInstanceOf[ShuffleQueryStageExec].plan - shuffleEx.getTagValue(GpuOverrides.preRowToColProjection).foreach { r2c => - wrapped.setTagValue(GpuOverrides.preRowToColProjection, r2c) - } - } - - override def convertToGpu(): GpuExec = { - GpuCustomShuffleReaderExec(childPlans.head.convertIfNeeded(), - reader.partitionSpecs) - } - - // extract output attributes of the underlying ShuffleExchange - override def outputAttributes: Seq[Attribute] = { - val shuffleEx = reader.child.asInstanceOf[ShuffleQueryStageExec].plan - shuffleEx.getTagValue(GpuShuffleMetaBase.shuffleExOutputAttributes) - .getOrElse(shuffleEx.output) - } - - // fetch availableRuntimeDataTransition of the underlying ShuffleExchange - override val availableRuntimeDataTransition: Boolean = { - val shuffleEx = reader.child.asInstanceOf[ShuffleQueryStageExec].plan - shuffleEx.getTagValue(GpuShuffleMetaBase.availableRuntimeDataTransition) - .getOrElse(false) - } -} \ No newline at end of file diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/ShimBaseSubqueryExec.scala b/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/ShimBaseSubqueryExec.scala deleted file mode 100644 index 9c28a0b01df..00000000000 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/ShimBaseSubqueryExec.scala +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright (c) 2023, NVIDIA CORPORATION. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} -spark-rapids-shim-json-lines ***/ -package com.nvidia.spark.rapids.shims - -import org.apache.spark.sql.SparkSession -import org.apache.spark.sql.execution.BaseSubqueryExec - -trait ShimBaseSubqueryExec extends BaseSubqueryExec { - @transient final val session = SparkSession.getActiveSession.orNull -} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/ShimBroadcastExchangeLike.scala b/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/ShimBroadcastExchangeLike.scala deleted file mode 100644 index f86d7e51ca6..00000000000 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/ShimBroadcastExchangeLike.scala +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright (c) 2022-2023, NVIDIA CORPORATION. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} -spark-rapids-shim-json-lines ***/ -package com.nvidia.spark.rapids.shims - -import scala.concurrent.Promise - -import org.apache.spark.broadcast.Broadcast -import org.apache.spark.sql.SparkSession -import org.apache.spark.sql.execution.exchange.BroadcastExchangeLike - -/** - * This shim handles the completion future differences between - * Apache Spark and Databricks. - */ -trait ShimBroadcastExchangeLike extends BroadcastExchangeLike { - @transient final val session = SparkSession.getActiveSession.orNull - - @transient - protected lazy val promise = Promise[Broadcast[Any]]() - - /** - * For registering callbacks on `relationFuture`. - * Note that calling this field will not start the execution of broadcast job. - */ - @transient - lazy val completionFuture: concurrent.Future[Broadcast[Any]] = promise.future -} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/ShimPredicateHelper.scala b/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/ShimPredicateHelper.scala deleted file mode 100644 index 2e229fab1df..00000000000 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/ShimPredicateHelper.scala +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright (c) 2022-2023, NVIDIA CORPORATION. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} -spark-rapids-shim-json-lines ***/ -package com.nvidia.spark.rapids.shims - -import org.apache.spark.sql.catalyst.expressions._ -import org.apache.spark.sql.rapids._ - -trait ShimPredicateHelper extends PredicateHelper { - // SPARK-30027 from 3.2.0 - // If one expression and its children are null intolerant, it is null intolerant. - protected def isNullIntolerant(expr: Expression): Boolean = expr match { - case e: NullIntolerant => e.children.forall(isNullIntolerant) - case _ => false - } - - override protected def splitConjunctivePredicates( - condition: Expression - ): Seq[Expression] = { - condition match { - case GpuAnd(cond1, cond2) => - splitConjunctivePredicates(cond1) ++ splitConjunctivePredicates(cond2) - case _ => super.splitConjunctivePredicates(condition) - } - } -} \ No newline at end of file diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/ShuffleOriginUtil.scala b/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/ShuffleOriginUtil.scala deleted file mode 100644 index ef7d256d41f..00000000000 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/ShuffleOriginUtil.scala +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright (c) 2022-2023, NVIDIA CORPORATION. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} -spark-rapids-shim-json-lines ***/ -package com.nvidia.spark.rapids.shims - -import org.apache.spark.sql.execution.exchange.{ENSURE_REQUIREMENTS, REPARTITION, REPARTITION_WITH_NUM, ShuffleOrigin} - -object ShuffleOriginUtil { - private val knownOrigins: Set[ShuffleOrigin] = Set(ENSURE_REQUIREMENTS, - REPARTITION, REPARTITION_WITH_NUM) - - def isSupported(origin: ShuffleOrigin): Boolean = knownOrigins.contains(origin) -} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/Spark31XShims.scala b/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/Spark31XShims.scala deleted file mode 100644 index eab56a8348a..00000000000 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/Spark31XShims.scala +++ /dev/null @@ -1,441 +0,0 @@ -/* - * Copyright (c) 2021-2024, NVIDIA CORPORATION. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} -spark-rapids-shim-json-lines ***/ -package com.nvidia.spark.rapids.shims - -import scala.collection.mutable.ListBuffer - -import com.nvidia.spark.rapids._ -import org.apache.hadoop.fs.FileStatus - -import org.apache.spark.internal.Logging -import org.apache.spark.sql.SparkSession -import org.apache.spark.sql.catalyst.{InternalRow, TableIdentifier} -import org.apache.spark.sql.catalyst.errors.attachTree -import org.apache.spark.sql.catalyst.expressions._ -import org.apache.spark.sql.catalyst.expressions.aggregate.Average -import org.apache.spark.sql.catalyst.plans.physical.BroadcastMode -import org.apache.spark.sql.catalyst.trees.TreeNode -import org.apache.spark.sql.catalyst.util.{DateFormatter, DateTimeUtils} -import org.apache.spark.sql.connector.read.Scan -import org.apache.spark.sql.execution._ -import org.apache.spark.sql.execution.adaptive._ -import org.apache.spark.sql.execution.command.{AlterTableRecoverPartitionsCommand, RunnableCommand} -import org.apache.spark.sql.execution.datasources._ -import org.apache.spark.sql.execution.datasources.v2.orc.OrcScan -import org.apache.spark.sql.execution.datasources.v2.parquet.ParquetScan -import org.apache.spark.sql.execution.exchange.{BroadcastExchangeExec, ReusedExchangeExec} -import org.apache.spark.sql.execution.joins._ -import org.apache.spark.sql.execution.python._ -import org.apache.spark.sql.execution.window.WindowExecBase -import org.apache.spark.sql.internal.SQLConf -import org.apache.spark.sql.rapids._ -import org.apache.spark.sql.rapids.aggregate._ -import org.apache.spark.sql.rapids.execution.GpuCustomShuffleReaderExec -import org.apache.spark.sql.rapids.execution.python._ -import org.apache.spark.sql.types._ - -// 31x nondb shims, used by 311cdh and 31x -abstract class Spark31XShims extends Spark31Xuntil33XShims with Logging { - override def parquetRebaseReadKey: String = - SQLConf.LEGACY_PARQUET_REBASE_MODE_IN_READ.key - override def parquetRebaseWriteKey: String = - SQLConf.LEGACY_PARQUET_REBASE_MODE_IN_WRITE.key - override def avroRebaseReadKey: String = - SQLConf.LEGACY_AVRO_REBASE_MODE_IN_READ.key - override def avroRebaseWriteKey: String = - SQLConf.LEGACY_AVRO_REBASE_MODE_IN_WRITE.key - override def parquetRebaseRead(conf: SQLConf): String = - conf.getConf(SQLConf.LEGACY_PARQUET_REBASE_MODE_IN_READ) - override def parquetRebaseWrite(conf: SQLConf): String = - conf.getConf(SQLConf.LEGACY_PARQUET_REBASE_MODE_IN_WRITE) - - override def sessionFromPlan(plan: SparkPlan): SparkSession = { - plan.sqlContext.sparkSession - } - - override def filesFromFileIndex(fileIndex: PartitioningAwareFileIndex): Seq[FileStatus] = { - fileIndex.allFiles() - } - - def broadcastModeTransform(mode: BroadcastMode, rows: Array[InternalRow]): Any = - mode.transform(rows) - - override def newBroadcastQueryStageExec( - old: BroadcastQueryStageExec, - newPlan: SparkPlan): BroadcastQueryStageExec = BroadcastQueryStageExec(old.id, newPlan) - - override def getDateFormatter(): DateFormatter = { - DateFormatter(DateTimeUtils.getZoneId(SQLConf.get.sessionLocalTimeZone)) - } - - override def isExchangeOp(plan: SparkPlanMeta[_]): Boolean = { - // if the child query stage already executed on GPU then we need to keep the - // next operator on GPU in these cases - SQLConf.get.adaptiveExecutionEnabled && (plan.wrapped match { - case _: CustomShuffleReaderExec - | _: ShuffledHashJoinExec - | _: BroadcastHashJoinExec - | _: BroadcastExchangeExec - | _: BroadcastNestedLoopJoinExec => true - case _ => false - }) - } - - override def isAqePlan(p: SparkPlan): Boolean = p match { - case _: AdaptiveSparkPlanExec | - _: QueryStageExec | - _: CustomShuffleReaderExec => true - case _ => false - } - - override def isCustomReaderExec(x: SparkPlan): Boolean = x match { - case _: GpuCustomShuffleReaderExec | _: CustomShuffleReaderExec => true - case _ => false - } - - override def aqeShuffleReaderExec: ExecRule[_ <: SparkPlan] = - GpuOverrides.exec[CustomShuffleReaderExec]( - "A wrapper of shuffle query stage", - ExecChecks((TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128 + TypeSig.ARRAY + - TypeSig.STRUCT + TypeSig.MAP + TypeSig.BINARY).nested(), TypeSig.all), - (exec, conf, p, r) => new GpuCustomShuffleReaderMeta(exec, conf, p, r)) - - override def findOperators(plan: SparkPlan, predicate: SparkPlan => Boolean): Seq[SparkPlan] = { - def recurse( - plan: SparkPlan, - predicate: SparkPlan => Boolean, - accum: ListBuffer[SparkPlan]): Seq[SparkPlan] = { - if (predicate(plan)) { - accum += plan - } - plan match { - case a: AdaptiveSparkPlanExec => recurse(a.executedPlan, predicate, accum) - case qs: BroadcastQueryStageExec => recurse(qs.broadcast, predicate, accum) - case qs: ShuffleQueryStageExec => recurse(qs.shuffle, predicate, accum) - case other => other.children.flatMap(p => recurse(p, predicate, accum)).headOption - } - accum - } - recurse(plan, predicate, new ListBuffer[SparkPlan]()) - } - - override def skipAssertIsOnTheGpu(plan: SparkPlan): Boolean = false - - override def shouldFailDivOverflow: Boolean = false - - override def leafNodeDefaultParallelism(ss: SparkSession): Int = { - ss.sparkContext.defaultParallelism - } - - override def v1RepairTableCommand(tableName: TableIdentifier): RunnableCommand = - AlterTableRecoverPartitionsCommand(tableName) - - override def isWindowFunctionExec(plan: SparkPlan): Boolean = plan.isInstanceOf[WindowExecBase] - - override def getScans: Map[Class[_ <: Scan], ScanRule[_ <: Scan]] = Seq( - GpuOverrides.scan[ParquetScan]( - "Parquet parsing", - (a, conf, p, r) => new RapidsParquetScanMeta(a, conf, p, r)), - GpuOverrides.scan[OrcScan]( - "ORC parsing", - (a, conf, p, r) => new RapidsOrcScanMeta(a, conf, p, r)) - ).map(r => (r.getClassFor.asSubclass(classOf[Scan]), r)).toMap - override def hasAliasQuoteFix: Boolean = false - - override def reusedExchangeExecPfn: PartialFunction[SparkPlan, ReusedExchangeExec] = { - case ShuffleQueryStageExec(_, e: ReusedExchangeExec) => e - case BroadcastQueryStageExec(_, e: ReusedExchangeExec) => e - } - - override def int96ParquetRebaseRead(conf: SQLConf): String = - conf.getConf(SQLConf.LEGACY_PARQUET_INT96_REBASE_MODE_IN_READ) - override def int96ParquetRebaseWrite(conf: SQLConf): String = - conf.getConf(SQLConf.LEGACY_PARQUET_INT96_REBASE_MODE_IN_WRITE) - override def int96ParquetRebaseReadKey: String = - SQLConf.LEGACY_PARQUET_INT96_REBASE_MODE_IN_READ.key - override def int96ParquetRebaseWriteKey: String = - SQLConf.LEGACY_PARQUET_INT96_REBASE_MODE_IN_WRITE.key - - override def tryTransformIfEmptyRelation(mode: BroadcastMode): Option[Any] = { - Some(broadcastModeTransform(mode, Array.empty)).filter(isEmptyRelation) - } - - override def isEmptyRelation(relation: Any): Boolean = relation match { - case EmptyHashedRelation => true - case arr: Array[InternalRow] if arr.isEmpty => true - case _ => false - } - - override def ansiCastRule: ExprRule[_ <: Expression] = { - GpuOverrides.expr[AnsiCast]( - "Convert a column of one type of data into another type", - new CastChecks { - import TypeSig._ - // nullChecks are the same - - override val booleanChecks: TypeSig = integral + fp + BOOLEAN + STRING + DECIMAL_128 - override val sparkBooleanSig: TypeSig = cpuNumeric + BOOLEAN + STRING - - override val integralChecks: TypeSig = gpuNumeric + BOOLEAN + STRING - override val sparkIntegralSig: TypeSig = cpuNumeric + BOOLEAN + STRING - - override val fpChecks: TypeSig = (gpuNumeric + BOOLEAN + STRING) - .withPsNote(TypeEnum.STRING, fpToStringPsNote) - override val sparkFpSig: TypeSig = cpuNumeric + BOOLEAN + STRING - - override val dateChecks: TypeSig = TIMESTAMP + DATE + STRING - override val sparkDateSig: TypeSig = TIMESTAMP + DATE + STRING - - override val timestampChecks: TypeSig = TIMESTAMP + DATE + STRING - override val sparkTimestampSig: TypeSig = TIMESTAMP + DATE + STRING - - // stringChecks are the same - // binaryChecks are the same - override val decimalChecks: TypeSig = gpuNumeric + STRING - override val sparkDecimalSig: TypeSig = cpuNumeric + BOOLEAN + STRING - - // calendarChecks are the same - - override val arrayChecks: TypeSig = - ARRAY.nested(commonCudfTypes + DECIMAL_128 + NULL + ARRAY + BINARY + STRUCT) + - psNote(TypeEnum.ARRAY, "The array's child type must also support being cast to " + - "the desired child type") - override val sparkArraySig: TypeSig = ARRAY.nested(all) - - override val mapChecks: TypeSig = - MAP.nested(commonCudfTypes + DECIMAL_128 + NULL + ARRAY + BINARY + STRUCT + MAP) + - psNote(TypeEnum.MAP, "the map's key and value must also support being cast to the " + - "desired child types") - override val sparkMapSig: TypeSig = MAP.nested(all) - - override val structChecks: TypeSig = - STRUCT.nested(commonCudfTypes + DECIMAL_128 + NULL + ARRAY + BINARY + STRUCT) + - psNote(TypeEnum.STRUCT, "the struct's children must also support being cast to the " + - "desired child type(s)") - override val sparkStructSig: TypeSig = STRUCT.nested(all) - - override val udtChecks: TypeSig = none - override val sparkUdtSig: TypeSig = UDT - }, - (cast, conf, p, r) => new CastExprMeta[AnsiCast](cast, GpuEvalMode.ANSI, conf = conf, - parent = p, rule = r, doFloatToIntCheck = true, stringToAnsiDate = false)) - } - - override def getExprs: Map[Class[_ <: Expression], ExprRule[_ <: Expression]] = Seq( - GpuOverrides.expr[Cast]( - "Convert a column of one type of data into another type", - new CastChecks(), - (cast, conf, p, r) => new CastExprMeta[Cast](cast, - AnsiCastShim.getEvalMode(cast), conf, p, r, - doFloatToIntCheck = true, stringToAnsiDate = false)), - GpuOverrides.expr[Average]( - "Average aggregate operator", - ExprChecks.fullAgg( - TypeSig.DOUBLE + TypeSig.DECIMAL_128, - TypeSig.DOUBLE + TypeSig.DECIMAL_128, - Seq(ParamCheck("input", - TypeSig.integral + TypeSig.fp + TypeSig.DECIMAL_128, - TypeSig.cpuNumeric))), - (a, conf, p, r) => new AggExprMeta[Average](a, conf, p, r) { - override def tagAggForGpu(): Unit = { - GpuOverrides.checkAndTagFloatAgg(a.child.dataType, conf, this) - } - - override def convertToGpu(childExprs: Seq[Expression]): GpuExpression = - GpuAverage(childExprs.head) - - // Average is not supported in ANSI mode right now, no matter the type - override val ansiTypeToCheck: Option[DataType] = None - }), - GpuOverrides.expr[Abs]( - "Absolute value", - ExprChecks.unaryProjectAndAstInputMatchesOutput( - TypeSig.implicitCastsAstTypes, TypeSig.gpuNumeric, - TypeSig.cpuNumeric), - (a, conf, p, r) => new UnaryAstExprMeta[Abs](a, conf, p, r) { - // ANSI support for ABS was added in 3.2.0 SPARK-33275 - override def convertToGpu(child: Expression): GpuExpression = GpuAbs(child, false) - }) - ).map(r => (r.getClassFor.asSubclass(classOf[Expression]), r)).toMap - - override def getExecs: Map[Class[_ <: SparkPlan], ExecRule[_ <: SparkPlan]] = { - Seq( - GpuOverrides.exec[WindowInPandasExec]( - "The backend for Window Aggregation Pandas UDF, Accelerates the data transfer between" + - " the Java process and the Python process. It also supports scheduling GPU resources" + - " for the Python process when enabled. For now it only supports row based window frame.", - ExecChecks( - (TypeSig.commonCudfTypes + TypeSig.ARRAY).nested(TypeSig.commonCudfTypes), - TypeSig.all), - (winPy, conf, p, r) => new GpuWindowInPandasExecMetaBase(winPy, conf, p, r) { - override val windowExpressions: Seq[BaseExprMeta[NamedExpression]] = - winPy.windowExpression.map(GpuOverrides.wrapExpr(_, conf, Some(this))) - - override def convertToGpu(): GpuExec = { - GpuWindowInPandasExec( - windowExpressions.map(_.convertToGpu()), - partitionSpec.map(_.convertToGpu()), - // leave ordering expression on the CPU, it's not used for GPU computation - winPy.orderSpec, - childPlans.head.convertIfNeeded() - )(winPy.partitionSpec) - } - }).disabledByDefault("it only supports row based frame for now"), - GpuOverrides.exec[FileSourceScanExec]( - "Reading data from files, often from Hive tables", - ExecChecks((TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.STRUCT + TypeSig.MAP + - TypeSig.ARRAY + TypeSig.BINARY + TypeSig.DECIMAL_128).nested(), TypeSig.all), - (fsse, conf, p, r) => new SparkPlanMeta[FileSourceScanExec](fsse, conf, p, r) { - - // Replaces SubqueryBroadcastExec inside dynamic pruning filters with GPU counterpart - // if possible. Instead regarding filters as childExprs of current Meta, we create - // a new meta for SubqueryBroadcastExec. The reason is that the GPU replacement of - // FileSourceScan is independent from the replacement of the partitionFilters. It is - // possible that the FileSourceScan is on the CPU, while the dynamic partitionFilters - // are on the GPU. And vice versa. - private lazy val partitionFilters = { - val convertBroadcast = (bc: SubqueryBroadcastExec) => { - val meta = GpuOverrides.wrapAndTagPlan(bc, conf) - meta.tagForExplain() - meta.convertIfNeeded().asInstanceOf[BaseSubqueryExec] - } - wrapped.partitionFilters.map { filter => - filter.transformDown { - case dpe @ DynamicPruningExpression(inSub: InSubqueryExec) => - inSub.plan match { - case bc: SubqueryBroadcastExec => - dpe.copy(inSub.copy(plan = convertBroadcast(bc))) - case reuse @ ReusedSubqueryExec(bc: SubqueryBroadcastExec) => - dpe.copy(inSub.copy(plan = reuse.copy(convertBroadcast(bc)))) - case _ => - dpe - } - } - } - } - - // partition filters and data filters are not run on the GPU - override val childExprs: Seq[ExprMeta[_]] = Seq.empty - - override def tagPlanForGpu(): Unit = GpuFileSourceScanExec.tagSupport(this) - - override def convertToCpu(): SparkPlan = { - val cpu = wrapped.copy(partitionFilters = partitionFilters) - cpu.copyTagsFrom(wrapped) - cpu - } - - override def convertToGpu(): GpuExec = { - val sparkSession = wrapped.relation.sparkSession - val options = wrapped.relation.options - val (location, alluxioPathsToReplaceMap) = - if (AlluxioCfgUtils.enabledAlluxioReplacementAlgoConvertTime(conf)) { - val shouldReadFromS3 = wrapped.relation.location match { - // Only handle InMemoryFileIndex - // - // skip handle `MetadataLogFileIndex`, from the description of this class: - // it's about the files generated by the `FileStreamSink`. - // The streaming data source is not in our scope. - // - // For CatalogFileIndex and FileIndex of `delta` data source, - // need more investigation. - case inMemory: InMemoryFileIndex => - // List all the partitions to reduce overhead, pass in 2 empty filters. - // Subsequent process will do the right partition pruning. - // This operation is fast, because it lists files from the caches and the caches - // already exist in the `InMemoryFileIndex`. - val pds = inMemory.listFiles(Seq.empty, Seq.empty) - AlluxioUtils.shouldReadDirectlyFromS3(conf, pds) - case _ => - false - } - - if (!shouldReadFromS3) { - // it's convert time algorithm and some paths are not large tables - AlluxioUtils.replacePathIfNeeded( - conf, - wrapped.relation, - partitionFilters, - wrapped.dataFilters) - } else { - // convert time algorithm and read large files - (wrapped.relation.location, None) - } - } else { - // it's not convert time algorithm or read large files, do not replace - (wrapped.relation.location, None) - } - - val newRelation = HadoopFsRelation( - location, - wrapped.relation.partitionSchema, - wrapped.relation.dataSchema, - wrapped.relation.bucketSpec, - GpuFileSourceScanExec.convertFileFormat(wrapped.relation.fileFormat), - options)(sparkSession) - - GpuFileSourceScanExec( - newRelation, - wrapped.output, - wrapped.requiredSchema, - partitionFilters, - wrapped.optionalBucketSet, - wrapped.optionalNumCoalescedBuckets, - wrapped.dataFilters, - wrapped.tableIdentifier, - wrapped.disableBucketedScan, - queryUsesInputFile = false, - alluxioPathsToReplaceMap)(conf) - } - }) - ).map(r => (r.getClassFor.asSubclass(classOf[SparkPlan]), r)).toMap - } - - /** dropped by SPARK-34234 */ - override def attachTreeIfSupported[TreeType <: TreeNode[_], A]( - tree: TreeType, - msg: String)( - f: => A - ): A = { - attachTree(tree, msg)(f) - } - - override def getAdaptiveInputPlan(adaptivePlan: AdaptiveSparkPlanExec): SparkPlan = { - adaptivePlan.inputPlan - } - - override def hasCastFloatTimestampUpcast: Boolean = false - - override def isCastingStringToNegDecimalScaleSupported: Boolean = false - - override def supportsColumnarAdaptivePlans: Boolean = false - - override def columnarAdaptivePlan(a: AdaptiveSparkPlanExec, goal: CoalesceSizeGoal): SparkPlan = { - // When the input is an adaptive plan we do not get to see the GPU version until - // the plan is executed and sometimes the plan will have a GpuColumnarToRowExec as the - // final operator and we can bypass this to keep the data columnar by inserting - // the [[AvoidAdaptiveTransitionToRow]] operator here - AvoidAdaptiveTransitionToRow(GpuRowToColumnarExec(a, goal)) - } - - override def reproduceEmptyStringBug: Boolean = false -} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/SparkShims.scala b/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/SparkShims.scala deleted file mode 100644 index f5b4709b983..00000000000 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/SparkShims.scala +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright (c) 2020-2023, NVIDIA CORPORATION. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/*** spark-rapids-shim-json-lines -{"spark": "311"} -spark-rapids-shim-json-lines ***/ -package com.nvidia.spark.rapids.shims - -import org.apache.parquet.schema.MessageType - -import org.apache.spark.sql.execution.datasources.parquet.ParquetFilters - -object SparkShimImpl extends Spark31XShims { - override def hasCastFloatTimestampUpcast: Boolean = false - - override def reproduceEmptyStringBug: Boolean = true - - override def getParquetFilters( - schema: MessageType, - pushDownDate: Boolean, - pushDownTimestamp: Boolean, - pushDownDecimal: Boolean, - pushDownStartWith: Boolean, - pushDownInFilterThreshold: Int, - caseSensitive: Boolean, - lookupFileMeta: String => String, - dateTimeRebaseModeFromConf: String): ParquetFilters = { - new ParquetFilters(schema, pushDownDate, pushDownTimestamp, pushDownDecimal, pushDownStartWith, - pushDownInFilterThreshold, caseSensitive) - } -} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/TreeNode.scala b/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/TreeNode.scala deleted file mode 100644 index 671bdbd45bf..00000000000 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/TreeNode.scala +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright (c) 2021-2023, NVIDIA CORPORATION. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} -spark-rapids-shim-json-lines ***/ -package com.nvidia.spark.rapids.shims - -import org.apache.spark.sql.catalyst.expressions.{BinaryExpression, Expression, TernaryExpression, UnaryExpression} -import org.apache.spark.sql.catalyst.plans.logical.Command -import org.apache.spark.sql.execution.{BinaryExecNode, SparkPlan, UnaryExecNode} - -trait ShimExpression extends Expression - -trait ShimUnaryExpression extends UnaryExpression - -trait ShimBinaryExpression extends BinaryExpression - -trait ShimTernaryExpression extends TernaryExpression { - def first: Expression - def second: Expression - def third: Expression - final def children: Seq[Expression] = IndexedSeq(first, second, third) -} - -trait ShimSparkPlan extends SparkPlan - -trait ShimUnaryExecNode extends UnaryExecNode - -trait ShimBinaryExecNode extends BinaryExecNode - -trait ShimUnaryCommand extends Command diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/TypeSigUtil.scala b/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/TypeSigUtil.scala deleted file mode 100644 index ab4fab48a45..00000000000 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/TypeSigUtil.scala +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Copyright (c) 2021-2023, NVIDIA CORPORATION. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} -spark-rapids-shim-json-lines ***/ -package com.nvidia.spark.rapids.shims - -import com.nvidia.spark.rapids.{TypeEnum, TypeSig, TypeSigUtilBase} - -import org.apache.spark.sql.types.DataType - -/** TypeSig Support for [3.1.1, 3.2.0) */ -object TypeSigUtil extends TypeSigUtilBase { - - /** - * Check if this type of Spark-specific is supported by the plugin or not. - * - * @param check the Supported Types - * @param dataType the data type to be checked - * @return true if it is allowed else false. - */ - override def isSupported( - check: TypeEnum.ValueSet, - dataType: DataType): Boolean = false - - /** - * Get all supported types for the spark-specific - * - * @return the all supported typ - */ - override def getAllSupportedTypes(): TypeEnum.ValueSet = - TypeEnum.values - TypeEnum.DAYTIME - TypeEnum.YEARMONTH - - /** - * Return the reason why this type is not supported.\ - * - * @param check the Supported Types - * @param dataType the data type to be checked - * @param notSupportedReason the reason for not supporting - * @return the reason - */ - override def reasonNotSupported( - check: TypeEnum.ValueSet, - dataType: DataType, - notSupportedReason: Seq[String]): Seq[String] = notSupportedReason - - /** - * Map DataType to TypeEnum - * - * @param dataType the data type to be mapped - * @return the TypeEnum - */ - override def mapDataTypeToTypeEnum(dataType: DataType): TypeEnum.Value = TypeEnum.UDT - - /** Get numeric and interval TypeSig */ - override def getNumericAndInterval(): TypeSig = - TypeSig.cpuNumeric + TypeSig.CALENDAR - - /** Get Ansi year-month and day-time TypeSig, begins from 320+ */ - override def getAnsiInterval: TypeSig = TypeSig.none -} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/YearParseUtil.scala b/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/YearParseUtil.scala deleted file mode 100644 index 54f3b4b60fb..00000000000 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/YearParseUtil.scala +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright (c) 2021-2023, NVIDIA CORPORATION. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} -spark-rapids-shim-json-lines ***/ -package com.nvidia.spark.rapids.shims - -import com.nvidia.spark.rapids.{RapidsConf, RapidsMeta} - -object YearParseUtil { - def tagParseStringAsDate(conf: RapidsConf, meta: RapidsMeta[_, _, _]): Unit = { - // NOOP for anything prior to 3.2.0 - } -} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/gpuWindows.scala b/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/gpuWindows.scala deleted file mode 100644 index 4a27408789c..00000000000 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/gpuWindows.scala +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright (c) 2021-2024, NVIDIA CORPORATION. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} -spark-rapids-shim-json-lines ***/ -package com.nvidia.spark.rapids.shims - -import com.nvidia.spark.rapids.{DataFromReplacementRule, RapidsConf, RapidsMeta} -import com.nvidia.spark.rapids.window.{GpuSpecifiedWindowFrameMetaBase, GpuWindowExpressionMetaBase, ParsedBoundary} - -import org.apache.spark.sql.catalyst.expressions.{Expression, SpecifiedWindowFrame, WindowExpression} -import org.apache.spark.sql.types.{CalendarIntervalType, DataType, DateType, IntegerType, TimestampType} - -class GpuSpecifiedWindowFrameMeta( - windowFrame: SpecifiedWindowFrame, - conf: RapidsConf, - parent: Option[RapidsMeta[_,_,_]], - rule: DataFromReplacementRule) - extends GpuSpecifiedWindowFrameMetaBase(windowFrame, conf, parent, rule) {} - -class GpuWindowExpressionMeta( - windowExpression: WindowExpression, - conf: RapidsConf, - parent: Option[RapidsMeta[_,_,_]], - rule: DataFromReplacementRule) - extends GpuWindowExpressionMetaBase(windowExpression, conf, parent, rule) {} - -object GpuWindowUtil { - - /** - * Check if the type of RangeFrame is valid in GpuWindowSpecDefinition - * @param orderSpecType the first order by data type - * @param ft the first frame boundary data type - * @return true to valid, false to invalid - */ - def isValidRangeFrameType(orderSpecType: DataType, ft: DataType): Boolean = { - (orderSpecType, ft) match { - case (DateType, IntegerType) => true - case (TimestampType, CalendarIntervalType) => true - case (a, b) => a == b - } - } - - def getRangeBoundaryValue(boundary: Expression): ParsedBoundary = boundary match { - case anything => throw new UnsupportedOperationException("Unsupported window frame" + - s" expression $anything") - } -} - diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/spark311/SparkShimServiceProvider.scala b/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/spark311/SparkShimServiceProvider.scala deleted file mode 100644 index c764799dba8..00000000000 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/spark311/SparkShimServiceProvider.scala +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright (c) 2020-2023, NVIDIA CORPORATION. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/*** spark-rapids-shim-json-lines -{"spark": "311"} -spark-rapids-shim-json-lines ***/ -package com.nvidia.spark.rapids.shims.spark311 - -import com.nvidia.spark.rapids.SparkShimVersion - -object SparkShimServiceProvider { - val VERSION = SparkShimVersion(3, 1, 1) - val VERSIONNAMES = Seq(s"$VERSION") -} - -class SparkShimServiceProvider extends com.nvidia.spark.rapids.SparkShimServiceProvider { - - override def getShimVersion: SparkShimVersion = SparkShimServiceProvider.VERSION - - def matchesVersion(version: String): Boolean = { - SparkShimServiceProvider.VERSIONNAMES.contains(version) - } -} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/spark311/RapidsShuffleManager.scala b/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/spark311/RapidsShuffleManager.scala deleted file mode 100644 index 9a65f836163..00000000000 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/spark311/RapidsShuffleManager.scala +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright (c) 2020-2023, NVIDIA CORPORATION. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/*** spark-rapids-shim-json-lines -{"spark": "311"} -spark-rapids-shim-json-lines ***/ -package com.nvidia.spark.rapids.spark311 - -import org.apache.spark.SparkConf -import org.apache.spark.sql.rapids.ProxyRapidsShuffleInternalManagerBase - -/** A shuffle manager optimized for the RAPIDS Plugin for Apache Spark. */ -sealed class RapidsShuffleManager( - conf: SparkConf, - isDriver: Boolean -) extends ProxyRapidsShuffleInternalManagerBase(conf, isDriver) - diff --git a/sql-plugin/src/main/spark311/scala/org/apache/spark/rapids/shims/GpuShuffleBlockResolver.scala b/sql-plugin/src/main/spark311/scala/org/apache/spark/rapids/shims/GpuShuffleBlockResolver.scala deleted file mode 100644 index e4a2451c59b..00000000000 --- a/sql-plugin/src/main/spark311/scala/org/apache/spark/rapids/shims/GpuShuffleBlockResolver.scala +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright (c) 2021-2023, NVIDIA CORPORATION. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} -spark-rapids-shim-json-lines ***/ -package org.apache.spark.sql.rapids.shims - -import com.nvidia.spark.rapids.ShuffleBufferCatalog - -import org.apache.spark.shuffle.IndexShuffleBlockResolver -import org.apache.spark.sql.rapids.GpuShuffleBlockResolverBase - -class GpuShuffleBlockResolver(resolver: IndexShuffleBlockResolver, catalog: ShuffleBufferCatalog) - extends GpuShuffleBlockResolverBase(resolver, catalog) - diff --git a/sql-plugin/src/main/spark311/scala/org/apache/spark/rapids/shims/ShuffledBatchRDDUtil.scala b/sql-plugin/src/main/spark311/scala/org/apache/spark/rapids/shims/ShuffledBatchRDDUtil.scala deleted file mode 100644 index 54d79eae6ff..00000000000 --- a/sql-plugin/src/main/spark311/scala/org/apache/spark/rapids/shims/ShuffledBatchRDDUtil.scala +++ /dev/null @@ -1,120 +0,0 @@ -/* - * Copyright (c) 2021-2023, NVIDIA CORPORATION. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} -spark-rapids-shim-json-lines ***/ -package org.apache.spark.rapids.shims - -import org.apache.spark.{MapOutputTrackerMaster, Partition, ShuffleDependency, SparkEnv, TaskContext} -import org.apache.spark.shuffle.ShuffleReader -import org.apache.spark.sql.execution.{CoalescedPartitionSpec, PartialMapperPartitionSpec, PartialReducerPartitionSpec} -import org.apache.spark.sql.execution.metric.SQLShuffleReadMetricsReporter -import org.apache.spark.sql.rapids.execution.ShuffledBatchRDDPartition -import org.apache.spark.sql.vectorized.ColumnarBatch -import org.apache.spark.storage.{BlockId, BlockManagerId} - -/** - * Some APIs for the ShuffledBatchRDD are only accessible from org.apache.spark... - * This code tries to match the Spark code as closely as possible. Fixing a compiler or IDE - * warning is not always the best thing here because if it changes how it matches up with the - * Spark code it may make it harder to maintain as thing change in Spark. - */ -object ShuffledBatchRDDUtil { - def preferredLocations( - partition: Partition, - dependency: ShuffleDependency[Int, ColumnarBatch, ColumnarBatch]): Seq[String] = { - val tracker = SparkEnv.get.mapOutputTracker.asInstanceOf[MapOutputTrackerMaster] - partition.asInstanceOf[ShuffledBatchRDDPartition].spec match { - case CoalescedPartitionSpec(startReducerIndex, endReducerIndex) => - // TODO order by partition size. - startReducerIndex.until(endReducerIndex).flatMap { reducerIndex => - tracker.getPreferredLocationsForShuffle(dependency, reducerIndex) - } - - case PartialReducerPartitionSpec(_, startMapIndex, endMapIndex, _) => - tracker.getMapLocation(dependency, startMapIndex, endMapIndex) - - case PartialMapperPartitionSpec(mapIndex, _, _) => - tracker.getMapLocation(dependency, mapIndex, mapIndex + 1) - } - } - - private def getPartitionSize( - blocksByAddress: Iterator[(BlockManagerId, Seq[(BlockId, Long, Int)])]): Long = { - blocksByAddress.flatMap { case (_, blockInfos) => - blockInfos.map { case (_, size, _) => size } - }.sum - } - - def getReaderAndPartSize( - split: Partition, - context: TaskContext, - dependency: ShuffleDependency[Int, ColumnarBatch, ColumnarBatch], - sqlMetricsReporter: SQLShuffleReadMetricsReporter): - (ShuffleReader[Nothing, Nothing], Long) = { - split.asInstanceOf[ShuffledBatchRDDPartition].spec match { - case CoalescedPartitionSpec(startReducerIndex, endReducerIndex) => - val reader = SparkEnv.get.shuffleManager.getReader( - dependency.shuffleHandle, - startReducerIndex, - endReducerIndex, - context, - sqlMetricsReporter) - val blocksByAddress = SparkEnv.get.mapOutputTracker.getMapSizesByExecutorId( - dependency.shuffleHandle.shuffleId, - 0, - Int.MaxValue, - startReducerIndex, - endReducerIndex) - (reader, getPartitionSize(blocksByAddress)) - case PartialReducerPartitionSpec(reducerIndex, startMapIndex, endMapIndex, _) => - val reader = SparkEnv.get.shuffleManager.getReader( - dependency.shuffleHandle, - startMapIndex, - endMapIndex, - reducerIndex, - reducerIndex + 1, - context, - sqlMetricsReporter) - val blocksByAddress = SparkEnv.get.mapOutputTracker.getMapSizesByExecutorId( - dependency.shuffleHandle.shuffleId, - startMapIndex, - endMapIndex, - reducerIndex, - reducerIndex + 1) - (reader, getPartitionSize(blocksByAddress)) - case PartialMapperPartitionSpec(mapIndex, startReducerIndex, endReducerIndex) => - val reader = SparkEnv.get.shuffleManager.getReader( - dependency.shuffleHandle, - mapIndex, - mapIndex + 1, - startReducerIndex, - endReducerIndex, - context, - sqlMetricsReporter) - val blocksByAddress = SparkEnv.get.mapOutputTracker.getMapSizesByExecutorId( - dependency.shuffleHandle.shuffleId, - mapIndex, - mapIndex + 1, - startReducerIndex, - endReducerIndex) - (reader, getPartitionSize(blocksByAddress)) - } - } -} diff --git a/sql-plugin/src/main/spark311/scala/org/apache/spark/rapids/shims/storage/ShimDiskBlockManager.scala b/sql-plugin/src/main/spark311/scala/org/apache/spark/rapids/shims/storage/ShimDiskBlockManager.scala deleted file mode 100644 index 4822afd76f4..00000000000 --- a/sql-plugin/src/main/spark311/scala/org/apache/spark/rapids/shims/storage/ShimDiskBlockManager.scala +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright (c) 2021-2023, NVIDIA CORPORATION. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} -spark-rapids-shim-json-lines ***/ -package org.apache.spark.rapids.shims.storage - -import org.apache.spark.SparkConf -import org.apache.spark.storage.DiskBlockManager - - -class ShimDiskBlockManager(conf: SparkConf, deleteFilesOnStop: Boolean) - extends DiskBlockManager(conf, deleteFilesOnStop) \ No newline at end of file diff --git a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/execution/datasources/parquet/rapids/shims/ShimVectorizedColumnReader.scala b/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/execution/datasources/parquet/rapids/shims/ShimVectorizedColumnReader.scala deleted file mode 100644 index 23205395210..00000000000 --- a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/execution/datasources/parquet/rapids/shims/ShimVectorizedColumnReader.scala +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright (c) 2021-2023, NVIDIA CORPORATION. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} -spark-rapids-shim-json-lines ***/ -package org.apache.spark.sql.execution.datasources.parquet.rapids.shims - -import java.time.ZoneId - -import org.apache.parquet.VersionParser.ParsedVersion -import org.apache.parquet.column.ColumnDescriptor -import org.apache.parquet.column.page.PageReadStore -import org.apache.parquet.schema.{GroupType, Type} - -import org.apache.spark.sql.execution.datasources.parquet.{ParentContainerUpdater, ParquetRowConverter, ParquetToSparkSchemaConverter, VectorizedColumnReader} -import org.apache.spark.sql.internal.SQLConf.LegacyBehaviorPolicy -import org.apache.spark.sql.types.StructType - -class ShimParquetRowConverter( - schemaConverter: ParquetToSparkSchemaConverter, - parquetType: GroupType, - catalystType: StructType, - convertTz: Option[ZoneId], - datetimeRebaseMode: String, - int96RebaseMode: String, - int96CDPHive3Compatibility: Boolean, - updater: ParentContainerUpdater -) extends ParquetRowConverter( - schemaConverter, - parquetType, - catalystType, - convertTz, - LegacyBehaviorPolicy.withName(datetimeRebaseMode), - LegacyBehaviorPolicy.withName(int96RebaseMode), - updater) - -class ShimVectorizedColumnReader( - index: Int, - columns: java.util.List[ColumnDescriptor], - types: java.util.List[Type], - pageReadStore: PageReadStore, - convertTz: ZoneId, - datetimeRebaseMode: String, - int96RebaseMode: String, - int96CDPHive3Compatibility: Boolean, - writerVersion: ParsedVersion -) extends VectorizedColumnReader( - columns.get(index), - types.get(index).getOriginalType, - pageReadStore.getPageReader(columns.get(index)), - convertTz, - datetimeRebaseMode, - int96RebaseMode) \ No newline at end of file diff --git a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/execution/ShimTrampolineUtil.scala b/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/execution/ShimTrampolineUtil.scala deleted file mode 100644 index 87b6183aae1..00000000000 --- a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/execution/ShimTrampolineUtil.scala +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright (c) 2021-2023, NVIDIA CORPORATION. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} -spark-rapids-shim-json-lines ***/ -package org.apache.spark.sql.rapids.execution - -import org.apache.spark.sql.catalyst.plans.physical.{BroadcastMode, IdentityBroadcastMode} -import org.apache.spark.sql.execution.joins.HashedRelationBroadcastMode -import org.apache.spark.sql.types.{DataType, StructType} - -object ShimTrampolineUtil { - - // unionLikeMerge was only added in Spark 3.2 so be bug compatible and call merge - // https://issues.apache.org/jira/browse/SPARK-36673 - def unionLikeMerge(left: DataType, right: DataType): DataType = - StructType.merge(left, right) - - def isSupportedRelation(mode: BroadcastMode): Boolean = mode match { - case _ : HashedRelationBroadcastMode => true - case IdentityBroadcastMode => true - case _ => false - } -} - diff --git a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuArrowPythonOutput.scala b/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuArrowPythonOutput.scala deleted file mode 100644 index a6f58095c95..00000000000 --- a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuArrowPythonOutput.scala +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Copyright (c) 2023, NVIDIA CORPORATION. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} -spark-rapids-shim-json-lines ***/ -package org.apache.spark.sql.rapids.execution.python.shims - -import java.io.DataInputStream -import java.net.Socket -import java.util.concurrent.atomic.AtomicBoolean - -import com.nvidia.spark.rapids.GpuSemaphore - -import org.apache.spark.{SparkEnv, TaskContext} -import org.apache.spark.api.python._ -import org.apache.spark.sql.rapids.execution.python.GpuArrowOutput -import org.apache.spark.sql.vectorized.ColumnarBatch - -/** - * A trait that can be mixed-in with `GpuBasePythonRunner`. It implements the logic from - * Python (Arrow) to GPU/JVM (ColumnarBatch). - */ -trait GpuArrowPythonOutput extends GpuArrowOutput { _: GpuBasePythonRunner[_] => - protected def newReaderIterator( - stream: DataInputStream, - writerThread: WriterThread, - startTime: Long, - env: SparkEnv, - worker: Socket, - releasedOrClosed: AtomicBoolean, - context: TaskContext): Iterator[ColumnarBatch] = { - - new ReaderIterator(stream, writerThread, startTime, env, worker, releasedOrClosed, context) { - val gpuArrowReader = newGpuArrowReader - - protected override def read(): ColumnarBatch = { - if (writerThread.exception.isDefined) { - throw writerThread.exception.get - } - try { - // Because of batching and other things we have to be sure that we release the semaphore - // before any operation that could block. This is because we are using multiple threads - // for a single task and the GpuSemaphore might not wake up both threads associated with - // the task, so a reader can be blocked waiting for data, while a writer is waiting on - // the semaphore - GpuSemaphore.releaseIfNecessary(TaskContext.get()) - if (gpuArrowReader.isStarted && gpuArrowReader.mayHasNext) { - val batch = gpuArrowReader.readNext() - if (batch != null) { - batch - } else { - gpuArrowReader.close() // reach the end, close the reader - read() // read the end signal - } - } else { - stream.readInt() match { - case SpecialLengths.START_ARROW_STREAM => - gpuArrowReader.start(stream) - read() - case SpecialLengths.TIMING_DATA => - handleTimingData() - read() - case SpecialLengths.PYTHON_EXCEPTION_THROWN => - throw handlePythonException() - case SpecialLengths.END_OF_DATA_SECTION => - handleEndOfDataSection() - null - } - } - } catch handleException - } - } - } -} diff --git a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/shims/AvroUtils.scala b/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/shims/AvroUtils.scala deleted file mode 100644 index 881fd81eda0..00000000000 --- a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/shims/AvroUtils.scala +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright (c) 2022-2023, NVIDIA CORPORATION. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} -spark-rapids-shim-json-lines ***/ -package org.apache.spark.sql.rapids.shims - -import com.nvidia.spark.rapids.RapidsMeta - -import org.apache.spark.sql.avro.AvroOptions - -object AvroUtils { - - def tagSupport( - parsedOptions: AvroOptions, - meta: RapidsMeta[_, _, _]): Unit = { - - } - -} diff --git a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/shims/GpuJsonToStructsShim.scala b/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/shims/GpuJsonToStructsShim.scala deleted file mode 100644 index 6edf6d2049c..00000000000 --- a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/shims/GpuJsonToStructsShim.scala +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Copyright (c) 2023-2024, NVIDIA CORPORATION. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} -spark-rapids-shim-json-lines ***/ -package org.apache.spark.sql.rapids.shims - -import ai.rapids.cudf.{ColumnVector, ColumnView, DType, Scalar} -import com.nvidia.spark.rapids.{GpuCast, GpuOverrides, RapidsMeta} -import com.nvidia.spark.rapids.Arm.withResource - -import org.apache.spark.sql.catalyst.json.GpuJsonUtils -import org.apache.spark.sql.catalyst.json.JSONOptions -import org.apache.spark.sql.rapids.ExceptionTimeParserPolicy - -object GpuJsonToStructsShim { - def tagDateFormatSupport(meta: RapidsMeta[_, _, _], dateFormat: Option[String]): Unit = { - dateFormat match { - case None | Some("yyyy-MM-dd") => - case dateFormat => - meta.willNotWorkOnGpu(s"GpuJsonToStructs unsupported dateFormat $dateFormat") - } - } - - def castJsonStringToDate(input: ColumnView, options: JSONOptions): ColumnVector = { - GpuJsonUtils.optionalDateFormatInRead(options) match { - case None | Some("yyyy-MM-dd") => - withResource(Scalar.fromString(" ")) { space => - withResource(input.strip(space)) { trimmed => - GpuCast.castStringToDate(trimmed) - } - } - case other => - // should be unreachable due to GpuOverrides checks - throw new IllegalStateException(s"Unsupported dateFormat $other") - } - } - - def tagDateFormatSupportFromScan(meta: RapidsMeta[_, _, _], dateFormat: Option[String]): Unit = { - tagDateFormatSupport(meta, dateFormat) - } - - def castJsonStringToDateFromScan(input: ColumnView, dt: DType, - dateFormat: Option[String]): ColumnVector = { - dateFormat match { - case None | Some("yyyy-MM-dd") => - withResource(input.strip()) { trimmed => - GpuCast.castStringToDateAnsi(trimmed, ansiMode = - GpuOverrides.getTimeParserPolicy == ExceptionTimeParserPolicy) - } - case other => - // should be unreachable due to GpuOverrides checks - throw new IllegalStateException(s"Unsupported dateFormat $other") - } - } - - - def castJsonStringToTimestamp(input: ColumnView, - options: JSONOptions): ColumnVector = { - withResource(Scalar.fromString(" ")) { space => - withResource(input.strip(space)) { trimmed => - // from_json doesn't respect ansi mode - GpuCast.castStringToTimestamp(trimmed, ansiMode = false) - } - } - } -} diff --git a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/shims/RapidsErrorUtils.scala b/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/shims/RapidsErrorUtils.scala deleted file mode 100644 index 7fa269db71a..00000000000 --- a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/shims/RapidsErrorUtils.scala +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Copyright (c) 2022-2024, NVIDIA CORPORATION. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} -spark-rapids-shim-json-lines ***/ -package org.apache.spark.sql.rapids.shims - -import org.apache.spark.sql.AnalysisException -import org.apache.spark.sql.catalyst.TableIdentifier -import org.apache.spark.sql.catalyst.trees.Origin -import org.apache.spark.sql.types.{DataType, Decimal, DecimalType} - -object RapidsErrorUtils extends RapidsQueryErrorUtils { - def invalidArrayIndexError(index: Int, numElements: Int, - isElementAtF: Boolean = false): ArrayIndexOutOfBoundsException = { - // Follow the Spark string format before 3.3.0 - new ArrayIndexOutOfBoundsException(s"Invalid index: $index, numElements: $numElements") - } - - def mapKeyNotExistError( - key: String, - keyType: DataType, - origin: Origin): NoSuchElementException = { - // Follow the Spark string format before 3.3.0 - new NoSuchElementException(s"Key $key does not exist.") - } - - def sqlArrayIndexNotStartAtOneError(): RuntimeException = { - new ArrayIndexOutOfBoundsException("SQL array indices start at 1") - } - - def divByZeroError(origin: Origin): ArithmeticException = { - new ArithmeticException("divide by zero") - } - - def divOverflowError(origin: Origin): ArithmeticException = { - new ArithmeticException("Overflow in integral divide.") - } - - def arithmeticOverflowError( - message: String, - hint: String = "", - errorContext: String = ""): ArithmeticException = { - new ArithmeticException(message) - } - - def cannotChangeDecimalPrecisionError( - value: Decimal, - toType: DecimalType, - context: String = ""): ArithmeticException = { - new ArithmeticException(s"${value.toDebugString} cannot be represented as " + - s"Decimal(${toType.precision}, ${toType.scale}).") - } - - def overflowInIntegralDivideError(context: String = ""): ArithmeticException = { - new ArithmeticException("Overflow in integral divide.") - } - - def foundDuplicateFieldInCaseInsensitiveModeError( - requiredFieldName: String, matchedFields: String): Throwable = { - new RuntimeException(s"""Found duplicate field(s) "$requiredFieldName": """ + - s"$matchedFields in case-insensitive mode") - } - - def tableIdentifierExistsError(tableIdentifier: TableIdentifier): Throwable = { - throw new AnalysisException(s"$tableIdentifier already exists.") - } -} diff --git a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/shims/RapidsQueryErrorUtils.scala b/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/shims/RapidsQueryErrorUtils.scala deleted file mode 100644 index 266cb4ef54f..00000000000 --- a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/shims/RapidsQueryErrorUtils.scala +++ /dev/null @@ -1,125 +0,0 @@ -/* - * Copyright (c) 2024, NVIDIA CORPORATION. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} -spark-rapids-shim-json-lines ***/ - -package org.apache.spark.sql.rapids.shims - -import org.apache.hadoop.fs.Path -import org.apache.hadoop.hive.ql.ErrorMsg - -import org.apache.spark.sql.AnalysisException -import org.apache.spark.sql.rapids.execution.RapidsAnalysisException -import org.apache.spark.sql.types.StructType - -trait RapidsQueryErrorUtils { - - def outputPathAlreadyExistsError(qualifiedOutputPath: Path): Throwable = { - new AnalysisException(s"path $qualifiedOutputPath already exists.") - } - - def createTableAsSelectWithNonEmptyDirectoryError(tablePath: String, conf: String): Throwable = { - new AnalysisException(s"CREATE-TABLE-AS-SELECT cannot create table with location to a " + - s"non-empty directory $tablePath. To allow overwriting the existing non-empty directory, " + - s"set '$conf' to true.") - } - - def cannotResolveAttributeError(name: String, outputStr: String): Throwable = { - new AnalysisException(s"Unable to resolve $name given [$outputStr]") - } - - def partitionColumnNotSpecifiedError(format: String, partitionColumn: String): Throwable = { - new AnalysisException(s"Failed to resolve the schema for $format for the partition column: " + - s"$partitionColumn. It must be specified manually.") - } - - def dataSchemaNotSpecifiedError(format: String): Throwable = { - new AnalysisException(s"Unable to infer schema for $format. It must be specified manually.") - } - - def schemaNotSpecifiedForSchemaRelationProviderError(className: String): Throwable = { - new AnalysisException(s"A schema needs to be specified when using $className.") - } - - def userSpecifiedSchemaMismatchActualSchemaError( - schema: StructType, - actualSchema: StructType): Throwable = { - new AnalysisException("The user-specified schema doesn't match the actual schema: " + - s"user-specified: ${schema.toDDL}, actual: ${actualSchema.toDDL}. If " + - "you're using DataFrameReader.schema API or creating a table, please do not " + - "specify the schema. Or if you're scanning an existed table, please drop " + - "it and re-create it.") - } - - def dataSchemaNotSpecifiedError(format: String, fileCatalog: String): Throwable = { - new AnalysisException(s"Unable to infer schema for $format at $fileCatalog. " + - "It must be specified manually") - } - - def invalidDataSourceError(className: String): Throwable = { - new AnalysisException(s"$className is not a valid Spark SQL Data Source.") - } - - def orcNotUsedWithHiveEnabledError(): Throwable = { - new AnalysisException( - s"Hive built-in ORC data source must be used with Hive support enabled. " + - s"Please use the native ORC data source by setting 'spark.sql.orc.impl' to 'native'.") - } - - def failedToFindAvroDataSourceError(provider: String): Throwable = { - new AnalysisException( - s"Failed to find data source: $provider. Avro is built-in but external data " + - "source module since Spark 2.4. Please deploy the application as per " + - "the deployment section of \"Apache Avro Data Source Guide\".") - } - - def failedToFindKafkaDataSourceError(provider: String): Throwable = { - new AnalysisException( - s"Failed to find data source: $provider. Please deploy the application as " + - "per the deployment section of " + - "\"Structured Streaming + Kafka Integration Guide\".") - } - - def findMultipleDataSourceError(provider: String, sourceNames: Seq[String]): Throwable = { - new AnalysisException( - s"Multiple sources found for $provider " + - s"(${sourceNames.mkString(", ")}), please specify the fully qualified class name.") - } - - def dataPathNotExistError(path: String): Throwable = { - new AnalysisException(s"Path does not exist: $path") - } - - def dynamicPartitionParentError: Throwable = { - throw new RapidsAnalysisException(ErrorMsg.PARTITION_DYN_STA_ORDER.getMsg) - } - - def tableOrViewAlreadyExistsError(tableName: String): Throwable = { - new AnalysisException(s"Table $tableName already exists. You need to drop it first.") - } - - def parquetTypeUnsupportedYetError(parquetType: String): Throwable = { - new AnalysisException(s"Parquet type not yet supported: $parquetType.") - } - - def illegalParquetTypeError(parquetType: String): Throwable = { - new AnalysisException(s"Illegal Parquet type: $parquetType.") - } -} \ No newline at end of file diff --git a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/shims/RapidsShuffleThreadedReader.scala b/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/shims/RapidsShuffleThreadedReader.scala deleted file mode 100644 index f3d88ece9f9..00000000000 --- a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/shims/RapidsShuffleThreadedReader.scala +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Copyright (c) 2022-2023, NVIDIA CORPORATION. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} -spark-rapids-shim-json-lines ***/ -package org.apache.spark.sql.rapids.shims - -import ai.rapids.cudf.{NvtxColor, NvtxRange} -import com.nvidia.spark.rapids.Arm.withResource - -import org.apache.spark.{MapOutputTracker, SparkEnv, TaskContext} -import org.apache.spark.serializer.SerializerManager -import org.apache.spark.shuffle.ShuffleReadMetricsReporter -import org.apache.spark.sql.rapids.{RapidsShuffleThreadedReaderBase, ShuffleHandleWithMetrics} -import org.apache.spark.storage.BlockManager - -class RapidsShuffleThreadedReader[K, C] ( - startMapIndex: Int, - endMapIndex: Int, - startPartition: Int, - endPartition: Int, - handle: ShuffleHandleWithMetrics[K, C, C], - context: TaskContext, - readMetrics: ShuffleReadMetricsReporter, - maxBytesInFlight: Long, - serializerManager: SerializerManager = SparkEnv.get.serializerManager, - blockManager: BlockManager = SparkEnv.get.blockManager, - mapOutputTracker: MapOutputTracker = SparkEnv.get.mapOutputTracker, - canUseBatchFetch: Boolean = false, - numReaderThreads: Int = 0) - extends RapidsShuffleThreadedReaderBase[K, C]( - handle, - context, - readMetrics, - maxBytesInFlight, - serializerManager = serializerManager, - blockManager = blockManager, - mapOutputTracker = mapOutputTracker, - canUseBatchFetch = canUseBatchFetch, - numReaderThreads = numReaderThreads) { - - override protected def getMapSizes: GetMapSizesResult = { - val shuffleId = handle.shuffleId - val mapSizes = withResource(new NvtxRange("getMapSizesByExecId", NvtxColor.CYAN)) { _ => - mapOutputTracker.getMapSizesByExecutorId( - shuffleId, startMapIndex, endMapIndex, startPartition, endPartition) - } - GetMapSizesResult(mapSizes, canEnableBatchFetch = true) - } -} diff --git a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/shims/RapidsShuffleThreadedWriter.scala b/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/shims/RapidsShuffleThreadedWriter.scala deleted file mode 100644 index a0b4c7f6518..00000000000 --- a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/shims/RapidsShuffleThreadedWriter.scala +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright (c) 2022-2023, NVIDIA CORPORATION. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} -spark-rapids-shim-json-lines ***/ -package org.apache.spark.sql.rapids.shims - -import org.apache.spark.SparkConf -import org.apache.spark.shuffle.ShuffleWriteMetricsReporter -import org.apache.spark.shuffle.api.{ShuffleExecutorComponents, ShuffleMapOutputWriter} -import org.apache.spark.sql.rapids.{RapidsShuffleThreadedWriterBase, ShuffleHandleWithMetrics} -import org.apache.spark.storage.BlockManager - -class RapidsShuffleThreadedWriter[K, V]( - blockManager: BlockManager, - handle: ShuffleHandleWithMetrics[K, V, V], - mapId: Long, - sparkConf: SparkConf, - writeMetrics: ShuffleWriteMetricsReporter, - maxBytesInFlight: Long, - shuffleExecutorComponents: ShuffleExecutorComponents, - numWriterThreads: Int) - extends RapidsShuffleThreadedWriterBase[K, V]( - blockManager, - handle, - mapId, - sparkConf, - writeMetrics, - maxBytesInFlight, - shuffleExecutorComponents, - numWriterThreads) { - - // emptyChecksums: unused in versions of Spark before 3.2.0 - override def doCommitAllPartitions( - writer: ShuffleMapOutputWriter, emptyChecksums: Boolean): Array[Long] = { - writer.commitAllPartitions().getPartitionLengths - } -} - diff --git a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/shims/datetimeExpressions.scala b/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/shims/datetimeExpressions.scala deleted file mode 100644 index 8a37bd63ca5..00000000000 --- a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/shims/datetimeExpressions.scala +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright (c) 2021-2023, NVIDIA CORPORATION. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} -spark-rapids-shim-json-lines ***/ -package org.apache.spark.sql.rapids.shims - -import ai.rapids.cudf.{ColumnVector, ColumnView, Scalar} - -import org.apache.spark.sql.catalyst.expressions.{Expression, TimeZoneAwareExpression} -import org.apache.spark.sql.rapids.GpuTimeMath - -case class GpuTimeAdd(start: Expression, - interval: Expression, - timeZoneId: Option[String] = None) - extends GpuTimeMath(start, interval, timeZoneId) { - - override def withTimeZone(timeZoneId: String): TimeZoneAwareExpression = { - copy(timeZoneId = Option(timeZoneId)) - } - - override def intervalMath(us_s: Scalar, us: ColumnView): ColumnVector = { - us.add(us_s) - } -} diff --git a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/types/shims/PartitionValueCastShims.scala b/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/types/shims/PartitionValueCastShims.scala deleted file mode 100644 index fc1b4365dad..00000000000 --- a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/types/shims/PartitionValueCastShims.scala +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright (c) 2022-2023, NVIDIA CORPORATION. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} -spark-rapids-shim-json-lines ***/ -package org.apache.spark.sql.types.shims - -import java.time.ZoneId - -import org.apache.spark.sql.types.DataType - -object PartitionValueCastShims { - // AnyTimestamp, TimestampNTZTtpe and AnsiIntervalType types are not defined before Spark 3.2.0 - // return false between 311 until 320 - def isSupportedType(dt: DataType): Boolean = false - - def castTo(desiredType: DataType, value: String, zoneId: ZoneId): Any = { - throw new IllegalArgumentException(s"Unexpected type $desiredType") - } -} diff --git a/sql-plugin/src/main/spark311/scala/org/apache/spark/storage/RapidsShuffleBlockFetcherIterator.scala b/sql-plugin/src/main/spark311/scala/org/apache/spark/storage/RapidsShuffleBlockFetcherIterator.scala deleted file mode 100644 index b66ca993402..00000000000 --- a/sql-plugin/src/main/spark311/scala/org/apache/spark/storage/RapidsShuffleBlockFetcherIterator.scala +++ /dev/null @@ -1,1050 +0,0 @@ -/* - * Copyright (c) 2022-2023, NVIDIA CORPORATION. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} -spark-rapids-shim-json-lines ***/ -package org.apache.spark.storage - -import java.io.{InputStream, IOException} -import java.nio.channels.ClosedByInterruptException -import java.util.concurrent.{LinkedBlockingQueue, TimeUnit} -import javax.annotation.concurrent.GuardedBy - -import scala.collection.mutable -import scala.collection.mutable.{ArrayBuffer, HashMap, HashSet, LinkedHashMap, Queue} -import scala.util.{Failure, Success} - -import org.apache.commons.io.IOUtils - -import org.apache.spark.{SparkEnv, SparkException, TaskContext} -import org.apache.spark.internal.{config, Logging} -import org.apache.spark.network.buffer.{FileSegmentManagedBuffer, ManagedBuffer} -import org.apache.spark.network.shuffle._ -import org.apache.spark.network.util.TransportConf -import org.apache.spark.serializer.SerializerManager -import org.apache.spark.shuffle.{FetchFailedException, ShuffleReadMetricsReporter} -import org.apache.spark.storage.RapidsShuffleBlockFetcherIterator.SuccessFetchResult -import org.apache.spark.util.{CompletionIterator, TaskCompletionListener, Utils} - -/** - * Taken mostly verbatim from `ShuffleBlockFetcherIterator` except for - * a change to the ownership of `currentResult` (which contains the netty buffer). - * Instead of this iterator owning the result and clearing it on `next`, the - * `BufferReleasingInputStream` is in charge of that. This allows for multiple threads - * to consume different `BufferReleasingInputStream`s produced from this single iterator. - * - * Compare to https://github.com/apache/spark/blob/branch-3.1: - * ./core/src/main/scala/org/apache/spark/storage/ShuffleBlockFetcherIterator.scala - */ - -/** - * An iterator that fetches multiple blocks. For local blocks, it fetches from the local block - * manager. For remote blocks, it fetches them using the provided BlockTransferService. - * - * This creates an iterator of (BlockID, InputStream) tuples so the caller can handle blocks - * in a pipelined fashion as they are received. - * - * The implementation throttles the remote fetches so they don't exceed maxBytesInFlight to avoid - * using too much memory. - * - * @param context [[TaskContext]], used for metrics update - * @param shuffleClient [[BlockStoreClient]] for fetching remote blocks - * @param blockManager [[BlockManager]] for reading local blocks - * @param blocksByAddress list of blocks to fetch grouped by the [[BlockManagerId]]. - * For each block we also require two info: 1. the size (in bytes as a long - * field) in order to throttle the memory usage; 2. the mapIndex for this - * block, which indicate the index in the map stage. - * Note that zero-sized blocks are already excluded, which happened in - * [[org.apache.spark.MapOutputTracker.convertMapStatuses]]. - * @param streamWrapper A function to wrap the returned input stream. - * @param maxBytesInFlight max size (in bytes) of remote blocks to fetch at any given point. - * @param maxReqsInFlight max number of remote requests to fetch blocks at any given point. - * @param maxBlocksInFlightPerAddress max number of shuffle blocks being fetched at any given point - * for a given remote host:port. - * @param maxReqSizeShuffleToMem max size (in bytes) of a request that can be shuffled to memory. - * @param detectCorrupt whether to detect any corruption in fetched blocks. - * @param shuffleMetrics used to report shuffle metrics. - * @param doBatchFetch fetch continuous shuffle blocks from same executor in batch if the server - * side supports. - */ -private[spark] -final class RapidsShuffleBlockFetcherIterator( - context: TaskContext, - shuffleClient: BlockStoreClient, - blockManager: BlockManager, - blocksByAddress: Iterator[(BlockManagerId, Seq[(BlockId, Long, Int)])], - streamWrapper: (BlockId, InputStream) => InputStream, - maxBytesInFlight: Long, - maxReqsInFlight: Int, - maxBlocksInFlightPerAddress: Int, - maxReqSizeShuffleToMem: Long, - detectCorrupt: Boolean, - detectCorruptUseExtraMemory: Boolean, - shuffleMetrics: ShuffleReadMetricsReporter, - doBatchFetch: Boolean) - extends Iterator[(BlockId, InputStream)] with DownloadFileManager with Logging { - - import RapidsShuffleBlockFetcherIterator._ - - // Make remote requests at most maxBytesInFlight / 5 in length; the reason to keep them - // smaller than maxBytesInFlight is to allow multiple, parallel fetches from up to 5 - // nodes, rather than blocking on reading output from one node. - private val targetRemoteRequestSize = math.max(maxBytesInFlight / 5, 1L) - - /** - * Total number of blocks to fetch. - */ - private[this] var numBlocksToFetch = 0 - - /** - * The number of blocks processed by the caller. The iterator is exhausted when - * [[numBlocksProcessed]] == [[numBlocksToFetch]]. - */ - private[this] var numBlocksProcessed = 0 - - private[this] val startTimeNs = System.nanoTime() - - /** Local blocks to fetch, excluding zero-sized blocks. */ - private[this] val localBlocks = scala.collection.mutable.LinkedHashSet[(BlockId, Int)]() - - /** Host local blockIds to fetch by executors, excluding zero-sized blocks. */ - private[this] val hostLocalBlocksByExecutor = - LinkedHashMap[BlockManagerId, Seq[(BlockId, Long, Int)]]() - - /** Host local blocks to fetch, excluding zero-sized blocks. */ - private[this] val hostLocalBlocks = scala.collection.mutable.LinkedHashSet[(BlockId, Int)]() - - /** - * A queue to hold our results. This turns the asynchronous model provided by - * [[org.apache.spark.network.BlockTransferService]] into a synchronous model (iterator). - */ - private[this] val results = new LinkedBlockingQueue[FetchResult] - - /** - * Queue of fetch requests to issue; we'll pull requests off this gradually to make sure that - * the number of bytes in flight is limited to maxBytesInFlight. - */ - private[this] val fetchRequests = new Queue[FetchRequest] - - /** - * Queue of fetch requests which could not be issued the first time they were dequeued. These - * requests are tried again when the fetch constraints are satisfied. - */ - private[this] val deferredFetchRequests = new HashMap[BlockManagerId, Queue[FetchRequest]]() - - /** Current bytes in flight from our requests */ - private[this] var bytesInFlight = 0L - - /** Current number of requests in flight */ - private[this] var reqsInFlight = 0 - - /** Current number of blocks in flight per host:port */ - private[this] val numBlocksInFlightPerAddress = new HashMap[BlockManagerId, Int]() - - /** - * The blocks that can't be decompressed successfully, it is used to guarantee that we retry - * at most once for those corrupted blocks. - */ - private[this] val corruptedBlocks = mutable.HashSet[BlockId]() - - /** - * Whether the iterator is still active. If isZombie is true, the callback interface will no - * longer place fetched blocks into [[results]]. - */ - @GuardedBy("this") - private[this] var isZombie = false - - /** - * A set to store the files used for shuffling remote huge blocks. Files in this set will be - * deleted when cleanup. This is a layer of defensiveness against disk file leaks. - */ - @GuardedBy("this") - private[this] val shuffleFilesSet = mutable.HashSet[DownloadFile]() - - private[this] val onCompleteCallback = new RapidsShuffleFetchCompletionListener(this) - - initialize() - - def resultCount: Int = results.size() - - override def createTempFile(transportConf: TransportConf): DownloadFile = { - // we never need to do any encryption or decryption here, regardless of configs, because that - // is handled at another layer in the code. When encryption is enabled, shuffle data is written - // to disk encrypted in the first place, and sent over the network still encrypted. - new SimpleDownloadFile( - blockManager.diskBlockManager.createTempLocalBlock()._2, transportConf) - } - - override def registerTempFileToClean(file: DownloadFile): Boolean = synchronized { - if (isZombie) { - false - } else { - shuffleFilesSet += file - true - } - } - - /** - * Mark the iterator as zombie, and release all buffers that haven't been deserialized yet. - */ - private[storage] def cleanup(): Unit = { - synchronized { - isZombie = true - } - // Release buffers in the results queue - val iter = results.iterator() - while (iter.hasNext) { - val result = iter.next() - result match { - case SuccessFetchResult(blockId, mapIndex, address, _, buf, _) => - if (address != blockManager.blockManagerId) { - if (hostLocalBlocks.contains(blockId -> mapIndex)) { - shuffleMetrics.incLocalBlocksFetched(1) - shuffleMetrics.incLocalBytesRead(buf.size) - } else { - shuffleMetrics.incRemoteBytesRead(buf.size) - if (buf.isInstanceOf[FileSegmentManagedBuffer]) { - shuffleMetrics.incRemoteBytesReadToDisk(buf.size) - } - shuffleMetrics.incRemoteBlocksFetched(1) - } - } - buf.release() - case _ => - } - } - shuffleFilesSet.foreach { file => - if (!file.delete()) { - logWarning("Failed to cleanup shuffle fetch temp file " + file.path()) - } - } - } - - private[this] def sendRequest(req: FetchRequest): Unit = { - logDebug("Sending request for %d blocks (%s) from %s".format( - req.blocks.size, Utils.bytesToString(req.size), req.address.hostPort)) - bytesInFlight += req.size - reqsInFlight += 1 - - // so we can look up the block info of each blockID - val infoMap = req.blocks.map { - case FetchBlockInfo(blockId, size, mapIndex) => (blockId.toString, (size, mapIndex)) - }.toMap - val remainingBlocks = new HashSet[String]() ++= infoMap.keys - val blockIds = req.blocks.map(_.blockId.toString) - val address = req.address - - val blockFetchingListener = new BlockFetchingListener { - override def onBlockFetchSuccess(blockId: String, buf: ManagedBuffer): Unit = { - // Only add the buffer to results queue if the iterator is not zombie, - // i.e. cleanup() has not been called yet. - RapidsShuffleBlockFetcherIterator.this.synchronized { - if (!isZombie) { - // Increment the ref count because we need to pass this to a different thread. - // This needs to be released after use. - buf.retain() - remainingBlocks -= blockId - results.put(new SuccessFetchResult(BlockId(blockId), infoMap(blockId)._2, - address, infoMap(blockId)._1, buf, remainingBlocks.isEmpty)) - logDebug("remainingBlocks: " + remainingBlocks) - } - } - logTrace(s"Got remote block $blockId after ${Utils.getUsedTimeNs(startTimeNs)}") - } - - override def onBlockFetchFailure(blockId: String, e: Throwable): Unit = { - logError(s"Failed to get block(s) from ${req.address.host}:${req.address.port}", e) - results.put(new FailureFetchResult(BlockId(blockId), infoMap(blockId)._2, address, e)) - } - } - - // Fetch remote shuffle blocks to disk when the request is too large. Since the shuffle data is - // already encrypted and compressed over the wire(w.r.t. the related configs), we can just fetch - // the data and write it to file directly. - if (req.size > maxReqSizeShuffleToMem) { - shuffleClient.fetchBlocks(address.host, address.port, address.executorId, blockIds.toArray, - blockFetchingListener, this) - } else { - shuffleClient.fetchBlocks(address.host, address.port, address.executorId, blockIds.toArray, - blockFetchingListener, null) - } - } - - private[this] def partitionBlocksByFetchMode(): ArrayBuffer[FetchRequest] = { - logDebug(s"maxBytesInFlight: $maxBytesInFlight, targetRemoteRequestSize: " - + s"$targetRemoteRequestSize, maxBlocksInFlightPerAddress: $maxBlocksInFlightPerAddress") - - // Partition to local, host-local and remote blocks. Remote blocks are further split into - // FetchRequests of size at most maxBytesInFlight in order to limit the amount of data in flight - val collectedRemoteRequests = new ArrayBuffer[FetchRequest] - var localBlockBytes = 0L - var hostLocalBlockBytes = 0L - var remoteBlockBytes = 0L - - val fallback = FallbackStorage.FALLBACK_BLOCK_MANAGER_ID.executorId - for ((address, blockInfos) <- blocksByAddress) { - if (Seq(blockManager.blockManagerId.executorId, fallback).contains(address.executorId)) { - checkBlockSizes(blockInfos) - val mergedBlockInfos = mergeContinuousShuffleBlockIdsIfNeeded( - blockInfos.map(info => FetchBlockInfo(info._1, info._2, info._3)), doBatchFetch) - numBlocksToFetch += mergedBlockInfos.size - localBlocks ++= mergedBlockInfos.map(info => (info.blockId, info.mapIndex)) - localBlockBytes += mergedBlockInfos.map(_.size).sum - } else if (blockManager.hostLocalDirManager.isDefined && - address.host == blockManager.blockManagerId.host) { - checkBlockSizes(blockInfos) - val mergedBlockInfos = mergeContinuousShuffleBlockIdsIfNeeded( - blockInfos.map(info => FetchBlockInfo(info._1, info._2, info._3)), doBatchFetch) - numBlocksToFetch += mergedBlockInfos.size - val blocksForAddress = - mergedBlockInfos.map(info => (info.blockId, info.size, info.mapIndex)) - hostLocalBlocksByExecutor += address -> blocksForAddress - hostLocalBlocks ++= blocksForAddress.map(info => (info._1, info._3)) - hostLocalBlockBytes += mergedBlockInfos.map(_.size).sum - } else { - remoteBlockBytes += blockInfos.map(_._2).sum - val (_, timeCost) = Utils.timeTakenMs[Unit] { - collectFetchRequests(address, blockInfos, collectedRemoteRequests) - } - logDebug(s"Collected remote fetch requests for $address in $timeCost ms") - } - } - val numRemoteBlocks = collectedRemoteRequests.map(_.blocks.size).sum - val totalBytes = localBlockBytes + remoteBlockBytes + hostLocalBlockBytes - assert(numBlocksToFetch == localBlocks.size + hostLocalBlocks.size + numRemoteBlocks, - s"The number of non-empty blocks $numBlocksToFetch doesn't equal to the number of local " + - s"blocks ${localBlocks.size} + the number of host-local blocks ${hostLocalBlocks.size} " + - s"+ the number of remote blocks ${numRemoteBlocks}.") - logInfo(s"Getting $numBlocksToFetch (${Utils.bytesToString(totalBytes)}) non-empty blocks " + - s"including ${localBlocks.size} (${Utils.bytesToString(localBlockBytes)}) local and " + - s"${hostLocalBlocks.size} (${Utils.bytesToString(hostLocalBlockBytes)}) " + - s"host-local and $numRemoteBlocks (${Utils.bytesToString(remoteBlockBytes)}) remote blocks") - collectedRemoteRequests - } - - private def createFetchRequest( - blocks: Seq[FetchBlockInfo], - address: BlockManagerId): FetchRequest = { - logDebug(s"Creating fetch request of ${blocks.map(_.size).sum} at $address " - + s"with ${blocks.size} blocks") - FetchRequest(address, blocks) - } - - private def createFetchRequests( - curBlocks: Seq[FetchBlockInfo], - address: BlockManagerId, - isLast: Boolean, - collectedRemoteRequests: ArrayBuffer[FetchRequest]): ArrayBuffer[FetchBlockInfo] = { - val mergedBlocks = mergeContinuousShuffleBlockIdsIfNeeded(curBlocks, doBatchFetch) - numBlocksToFetch += mergedBlocks.size - val retBlocks = new ArrayBuffer[FetchBlockInfo] - if (mergedBlocks.length <= maxBlocksInFlightPerAddress) { - collectedRemoteRequests += createFetchRequest(mergedBlocks, address) - } else { - mergedBlocks.grouped(maxBlocksInFlightPerAddress).foreach { blocks => - if (blocks.length == maxBlocksInFlightPerAddress || isLast) { - collectedRemoteRequests += createFetchRequest(blocks, address) - } else { - // The last group does not exceed `maxBlocksInFlightPerAddress`. Put it back - // to `curBlocks`. - retBlocks ++= blocks - numBlocksToFetch -= blocks.size - } - } - } - retBlocks - } - - private def collectFetchRequests( - address: BlockManagerId, - blockInfos: Seq[(BlockId, Long, Int)], - collectedRemoteRequests: ArrayBuffer[FetchRequest]): Unit = { - val iterator = blockInfos.iterator - var curRequestSize = 0L - var curBlocks = new ArrayBuffer[FetchBlockInfo]() - - while (iterator.hasNext) { - val (blockId, size, mapIndex) = iterator.next() - assertPositiveBlockSize(blockId, size) - curBlocks += FetchBlockInfo(blockId, size, mapIndex) - curRequestSize += size - // For batch fetch, the actual block in flight should count for merged block. - val mayExceedsMaxBlocks = !doBatchFetch && curBlocks.size >= maxBlocksInFlightPerAddress - if (curRequestSize >= targetRemoteRequestSize || mayExceedsMaxBlocks) { - curBlocks = createFetchRequests(curBlocks.toSeq, address, isLast = false, - collectedRemoteRequests) - curRequestSize = curBlocks.map(_.size).sum - } - } - // Add in the final request - if (curBlocks.nonEmpty) { - createFetchRequests(curBlocks.toSeq, address, isLast = true, collectedRemoteRequests) - } - } - - private def assertPositiveBlockSize(blockId: BlockId, blockSize: Long): Unit = { - if (blockSize < 0) { - throw BlockException(blockId, "Negative block size " + size) - } else if (blockSize == 0) { - throw BlockException(blockId, "Zero-sized blocks should be excluded.") - } - } - - private def checkBlockSizes(blockInfos: Seq[(BlockId, Long, Int)]): Unit = { - blockInfos.foreach { case (blockId, size, _) => assertPositiveBlockSize(blockId, size) } - } - - /** - * Fetch the local blocks while we are fetching remote blocks. This is ok because - * `ManagedBuffer`'s memory is allocated lazily when we create the input stream, so all we - * track in-memory are the ManagedBuffer references themselves. - */ - private[this] def fetchLocalBlocks(): Unit = { - logDebug(s"Start fetching local blocks: ${localBlocks.mkString(", ")}") - val iter = localBlocks.iterator - while (iter.hasNext) { - val (blockId, mapIndex) = iter.next() - try { - val buf = blockManager.getLocalBlockData(blockId) - shuffleMetrics.incLocalBlocksFetched(1) - shuffleMetrics.incLocalBytesRead(buf.size) - buf.retain() - results.put(new SuccessFetchResult(blockId, mapIndex, blockManager.blockManagerId, - buf.size(), buf, false)) - } catch { - // If we see an exception, stop immediately. - case e: Exception => - e match { - // ClosedByInterruptException is an excepted exception when kill task, - // don't log the exception stack trace to avoid confusing users. - // See: SPARK-28340 - case ce: ClosedByInterruptException => - logError("Error occurred while fetching local blocks, " + ce.getMessage) - case ex: Exception => logError("Error occurred while fetching local blocks", ex) - } - results.put(new FailureFetchResult(blockId, mapIndex, blockManager.blockManagerId, e)) - return - } - } - } - - private[this] def fetchHostLocalBlock( - blockId: BlockId, - mapIndex: Int, - localDirs: Array[String], - blockManagerId: BlockManagerId): Boolean = { - try { - val buf = blockManager.getHostLocalShuffleData(blockId, localDirs) - buf.retain() - results.put(SuccessFetchResult(blockId, mapIndex, blockManagerId, buf.size(), buf, - isNetworkReqDone = false)) - true - } catch { - case e: Exception => - // If we see an exception, stop immediately. - logError(s"Error occurred while fetching local blocks", e) - results.put(FailureFetchResult(blockId, mapIndex, blockManagerId, e)) - false - } - } - - /** - * Fetch the host-local blocks while we are fetching remote blocks. This is ok because - * `ManagedBuffer`'s memory is allocated lazily when we create the input stream, so all we - * track in-memory are the ManagedBuffer references themselves. - */ - private[this] def fetchHostLocalBlocks(hostLocalDirManager: HostLocalDirManager): Unit = { - val cachedDirsByExec = hostLocalDirManager.getCachedHostLocalDirs - val (hostLocalBlocksWithCachedDirs, hostLocalBlocksWithMissingDirs) = { - val (hasCache, noCache) = hostLocalBlocksByExecutor.partition { case (hostLocalBmId, _) => - cachedDirsByExec.contains(hostLocalBmId.executorId) - } - (hasCache.toMap, noCache.toMap) - } - - if (hostLocalBlocksWithMissingDirs.nonEmpty) { - logDebug(s"Asynchronous fetching host-local blocks without cached executors' dir: " + - s"${hostLocalBlocksWithMissingDirs.mkString(", ")}") - - // If the external shuffle service is enabled, we'll fetch the local directories for - // multiple executors from the external shuffle service, which located at the same host - // with the executors, in once. Otherwise, we'll fetch the local directories from those - // executors directly one by one. The fetch requests won't be too much since one host is - // almost impossible to have many executors at the same time practically. - val dirFetchRequests = if (blockManager.externalShuffleServiceEnabled) { - val host = blockManager.blockManagerId.host - val port = blockManager.externalShuffleServicePort - Seq((host, port, hostLocalBlocksWithMissingDirs.keys.toArray)) - } else { - hostLocalBlocksWithMissingDirs.keys.map(bmId => (bmId.host, bmId.port, Array(bmId))).toSeq - } - - dirFetchRequests.foreach { case (host, port, bmIds) => - hostLocalDirManager.getHostLocalDirs(host, port, bmIds.map(_.executorId)) { - case Success(dirsByExecId) => - fetchMultipleHostLocalBlocks( - hostLocalBlocksWithMissingDirs.filterKeys(bmIds.contains).toMap, - dirsByExecId, - cached = false) - - case Failure(throwable) => - logError("Error occurred while fetching host local blocks", throwable) - val bmId = bmIds.head - val blockInfoSeq = hostLocalBlocksWithMissingDirs(bmId) - val (blockId, _, mapIndex) = blockInfoSeq.head - results.put(FailureFetchResult(blockId, mapIndex, bmId, throwable)) - } - } - } - - if (hostLocalBlocksWithCachedDirs.nonEmpty) { - logDebug(s"Synchronous fetching host-local blocks with cached executors' dir: " + - s"${hostLocalBlocksWithCachedDirs.mkString(", ")}") - fetchMultipleHostLocalBlocks(hostLocalBlocksWithCachedDirs, cachedDirsByExec, cached = true) - } - } - - private def fetchMultipleHostLocalBlocks( - bmIdToBlocks: Map[BlockManagerId, Seq[(BlockId, Long, Int)]], - localDirsByExecId: Map[String, Array[String]], - cached: Boolean): Unit = { - // We use `forall` because once there's a failed block fetch, `fetchHostLocalBlock` will put - // a `FailureFetchResult` immediately to the `results`. So there's no reason to fetch the - // remaining blocks. - val allFetchSucceeded = bmIdToBlocks.forall { case (bmId, blockInfos) => - blockInfos.forall { case (blockId, _, mapIndex) => - fetchHostLocalBlock(blockId, mapIndex, localDirsByExecId(bmId.executorId), bmId) - } - } - if (allFetchSucceeded) { - logDebug(s"Got host-local blocks from ${bmIdToBlocks.keys.mkString(", ")} " + - s"(${if (cached) "with" else "without"} cached executors' dir) " + - s"in ${Utils.getUsedTimeNs(startTimeNs)}") - } - } - - private[this] def initialize(): Unit = { - // Add a task completion callback (called in both success case and failure case) to cleanup. - context.addTaskCompletionListener(onCompleteCallback) - - // Partition blocks by the different fetch modes: local, host-local and remote blocks. - val remoteRequests = partitionBlocksByFetchMode() - // Add the remote requests into our queue in a random order - fetchRequests ++= Utils.randomize(remoteRequests) - assert ((0 == reqsInFlight) == (0 == bytesInFlight), - "expected reqsInFlight = 0 but found reqsInFlight = " + reqsInFlight + - ", expected bytesInFlight = 0 but found bytesInFlight = " + bytesInFlight) - - // Send out initial requests for blocks, up to our maxBytesInFlight - fetchUpToMaxBytes() - - val numFetches = remoteRequests.size - fetchRequests.size - logInfo(s"Started $numFetches remote fetches in ${Utils.getUsedTimeNs(startTimeNs)}") - - // Get Local Blocks - fetchLocalBlocks() - logDebug(s"Got local blocks in ${Utils.getUsedTimeNs(startTimeNs)}") - - if (hostLocalBlocks.nonEmpty) { - blockManager.hostLocalDirManager.foreach(fetchHostLocalBlocks) - } - } - - override def hasNext: Boolean = numBlocksProcessed < numBlocksToFetch - - /** - * Fetches the next (BlockId, InputStream). If a task fails, the ManagedBuffers - * underlying each InputStream will be freed by the cleanup() method registered with the - * TaskCompletionListener. However, callers should close() these InputStreams - * as soon as they are no longer needed, in order to release memory as early as possible. - * - * Throws a FetchFailedException if the next block could not be fetched. - */ - override def next(): (BlockId, InputStream) = { - if (!hasNext) { - throw new NoSuchElementException() - } - - numBlocksProcessed += 1 - - var result: FetchResult = null - var input: InputStream = null - var streamCompressedOrEncrypted: Boolean = false - // Take the next fetched result and try to decompress it to detect data corruption, - // then fetch it one more time if it's corrupt, throw FailureFetchResult if the second fetch - // is also corrupt, so the previous stage could be retried. - // For local shuffle block, throw FailureFetchResult for the first IOException. - while (result == null) { - val startFetchWait = System.nanoTime() - result = results.take() - val fetchWaitTime = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startFetchWait) - shuffleMetrics.incFetchWaitTime(fetchWaitTime) - - result match { - case SuccessFetchResult(blockId, mapIndex, address, size, buf, isNetworkReqDone) => - if (address != blockManager.blockManagerId) { - if (hostLocalBlocks.contains(blockId -> mapIndex)) { - shuffleMetrics.incLocalBlocksFetched(1) - shuffleMetrics.incLocalBytesRead(buf.size) - } else { - numBlocksInFlightPerAddress(address) = numBlocksInFlightPerAddress(address) - 1 - shuffleMetrics.incRemoteBytesRead(buf.size) - if (buf.isInstanceOf[FileSegmentManagedBuffer]) { - shuffleMetrics.incRemoteBytesReadToDisk(buf.size) - } - shuffleMetrics.incRemoteBlocksFetched(1) - bytesInFlight -= size - } - } - if (isNetworkReqDone) { - reqsInFlight -= 1 - logDebug("Number of requests in flight " + reqsInFlight) - } - - if (buf.size == 0) { - // We will never legitimately receive a zero-size block. All blocks with zero records - // have zero size and all zero-size blocks have no records (and hence should never - // have been requested in the first place). This statement relies on behaviors of the - // shuffle writers, which are guaranteed by the following test cases: - // - // - BypassMergeSortShuffleWriterSuite: "write with some empty partitions" - // - UnsafeShuffleWriterSuite: "writeEmptyIterator" - // - DiskBlockObjectWriterSuite: "commit() and close() without ever opening or writing" - // - // There is not an explicit test for SortShuffleWriter but the underlying APIs that - // uses are shared by the UnsafeShuffleWriter (both writers use DiskBlockObjectWriter - // which returns a zero-size from commitAndGet() in case no records were written - // since the last call. - val msg = s"Received a zero-size buffer for block $blockId from $address " + - s"(expectedApproxSize = $size, isNetworkReqDone=$isNetworkReqDone)" - throwFetchFailedException(blockId, mapIndex, address, new IOException(msg)) - } - - val in = try { - buf.createInputStream() - } catch { - // The exception could only be throwed by local shuffle block - case e: IOException => - assert(buf.isInstanceOf[FileSegmentManagedBuffer]) - e match { - case ce: ClosedByInterruptException => - logError("Failed to create input stream from local block, " + - ce.getMessage) - case e: IOException => logError("Failed to create input stream from local block", e) - } - buf.release() - throwFetchFailedException(blockId, mapIndex, address, e) - } - try { - input = streamWrapper(blockId, in) - // If the stream is compressed or wrapped, then we optionally decompress/unwrap the - // first maxBytesInFlight/3 bytes into memory, to check for corruption in that portion - // of the data. But even if 'detectCorruptUseExtraMemory' configuration is off, or if - // the corruption is later, we'll still detect the corruption later in the stream. - streamCompressedOrEncrypted = !input.eq(in) - if (streamCompressedOrEncrypted && detectCorruptUseExtraMemory) { - // TODO: manage the memory used here, and spill it into disk in case of OOM. - input = Utils.copyStreamUpTo(input, maxBytesInFlight / 3) - } - } catch { - case e: IOException => - buf.release() - if (buf.isInstanceOf[FileSegmentManagedBuffer] - || corruptedBlocks.contains(blockId)) { - throwFetchFailedException(blockId, mapIndex, address, e) - } else { - logWarning(s"got an corrupted block $blockId from $address, fetch again", e) - corruptedBlocks += blockId - fetchRequests += FetchRequest( - address, Array(FetchBlockInfo(blockId, size, mapIndex))) - result = null - } - } finally { - // TODO: release the buf here to free memory earlier - if (input == null) { - // Close the underlying stream if there was an issue in wrapping the stream using - // streamWrapper - in.close() - } - } - - case FailureFetchResult(blockId, mapIndex, address, e) => - throwFetchFailedException(blockId, mapIndex, address, e) - } - - // Send fetch requests up to maxBytesInFlight - fetchUpToMaxBytes() - } - - val currentResult = result.asInstanceOf[SuccessFetchResult] - (currentResult.blockId, - new RapidsBufferReleasingInputStream( - input, - this, - currentResult, - currentResult.blockId, - currentResult.mapIndex, - currentResult.address, - detectCorrupt && streamCompressedOrEncrypted)) - } - - def toCompletionIterator: Iterator[(BlockId, InputStream)] = { - CompletionIterator[(BlockId, InputStream), this.type](this, - onCompleteCallback.onComplete(context)) - } - - private def fetchUpToMaxBytes(): Unit = { - // Send fetch requests up to maxBytesInFlight. If you cannot fetch from a remote host - // immediately, defer the request until the next time it can be processed. - - // Process any outstanding deferred fetch requests if possible. - if (deferredFetchRequests.nonEmpty) { - for ((remoteAddress, defReqQueue) <- deferredFetchRequests) { - while (isRemoteBlockFetchable(defReqQueue) && - !isRemoteAddressMaxedOut(remoteAddress, defReqQueue.front)) { - val request = defReqQueue.dequeue() - logDebug(s"Processing deferred fetch request for $remoteAddress with " - + s"${request.blocks.length} blocks") - send(remoteAddress, request) - if (defReqQueue.isEmpty) { - deferredFetchRequests -= remoteAddress - } - } - } - } - - // Process any regular fetch requests if possible. - while (isRemoteBlockFetchable(fetchRequests)) { - val request = fetchRequests.dequeue() - val remoteAddress = request.address - if (isRemoteAddressMaxedOut(remoteAddress, request)) { - logDebug(s"Deferring fetch request for $remoteAddress with ${request.blocks.size} blocks") - val defReqQueue = deferredFetchRequests.getOrElse(remoteAddress, new Queue[FetchRequest]()) - defReqQueue.enqueue(request) - deferredFetchRequests(remoteAddress) = defReqQueue - } else { - send(remoteAddress, request) - } - } - - def send(remoteAddress: BlockManagerId, request: FetchRequest): Unit = { - sendRequest(request) - numBlocksInFlightPerAddress(remoteAddress) = - numBlocksInFlightPerAddress.getOrElse(remoteAddress, 0) + request.blocks.size - } - - def isRemoteBlockFetchable(fetchReqQueue: Queue[FetchRequest]): Boolean = { - fetchReqQueue.nonEmpty && - (bytesInFlight == 0 || - (reqsInFlight + 1 <= maxReqsInFlight && - bytesInFlight + fetchReqQueue.front.size <= maxBytesInFlight)) - } - - // Checks if sending a new fetch request will exceed the max no. of blocks being fetched from a - // given remote address. - def isRemoteAddressMaxedOut(remoteAddress: BlockManagerId, request: FetchRequest): Boolean = { - numBlocksInFlightPerAddress.getOrElse(remoteAddress, 0) + request.blocks.size > - maxBlocksInFlightPerAddress - } - } - - private[storage] def throwFetchFailedException( - blockId: BlockId, - mapIndex: Int, - address: BlockManagerId, - e: Throwable) = { - blockId match { - case ShuffleBlockId(shufId, mapId, reduceId) => - throw new FetchFailedException(address, shufId, mapId, mapIndex, reduceId, e) - case ShuffleBlockBatchId(shuffleId, mapId, startReduceId, _) => - throw new FetchFailedException(address, shuffleId, mapId, mapIndex, startReduceId, e) - case _ => - throw new SparkException( - "Failed to get block " + blockId + ", which is not a shuffle block", e) - } - } -} - -/** - * Helper class that ensures a ManagedBuffer is released upon InputStream.close() and - * also detects stream corruption if streamCompressedOrEncrypted is true - */ -private class RapidsBufferReleasingInputStream( - // This is visible for testing - private[storage] val delegate: InputStream, - private val iterator: RapidsShuffleBlockFetcherIterator, - private val currentResult: SuccessFetchResult, - private val blockId: BlockId, - private val mapIndex: Int, - private val address: BlockManagerId, - private val detectCorruption: Boolean) - extends InputStream { - private[this] var closed = false - - @scala.annotation.nowarn("msg=method closeQuietly in class IOUtils is deprecated") - override def read(): Int = { - try { - delegate.read() - } catch { - case e: IOException if detectCorruption => - IOUtils.closeQuietly(this) - iterator.throwFetchFailedException(blockId, mapIndex, address, e) - } - } - - override def close(): Unit = { - if (!closed) { - delegate.close() - currentResult.buf.release() - closed = true - } - } - - override def available(): Int = delegate.available() - - override def mark(readlimit: Int): Unit = delegate.mark(readlimit) - - @scala.annotation.nowarn("msg=method closeQuietly in class IOUtils is deprecated") - override def skip(n: Long): Long = { - try { - delegate.skip(n) - } catch { - case e: IOException if detectCorruption => - IOUtils.closeQuietly(this) - iterator.throwFetchFailedException(blockId, mapIndex, address, e) - } - } - - override def markSupported(): Boolean = delegate.markSupported() - - @scala.annotation.nowarn("msg=method closeQuietly in class IOUtils is deprecated") - override def read(b: Array[Byte]): Int = { - try { - delegate.read(b) - } catch { - case e: IOException if detectCorruption => - IOUtils.closeQuietly(this) - iterator.throwFetchFailedException(blockId, mapIndex, address, e) - } - } - - @scala.annotation.nowarn("msg=method closeQuietly in class IOUtils is deprecated") - override def read(b: Array[Byte], off: Int, len: Int): Int = { - try { - delegate.read(b, off, len) - } catch { - case e: IOException if detectCorruption => - IOUtils.closeQuietly(this) - iterator.throwFetchFailedException(blockId, mapIndex, address, e) - } - } - - override def reset(): Unit = delegate.reset() -} - -/** - * A listener to be called at the completion of the ShuffleBlockFetcherIterator - * @param data the ShuffleBlockFetcherIterator to process - */ -private class RapidsShuffleFetchCompletionListener(var data: RapidsShuffleBlockFetcherIterator) - extends TaskCompletionListener { - - override def onTaskCompletion(context: TaskContext): Unit = { - if (data != null) { - data.cleanup() - // Null out the referent here to make sure we don't keep a reference to this - // ShuffleBlockFetcherIterator, after we're done reading from it, to let it be - // collected during GC. Otherwise we can hold metadata on block locations(blocksByAddress) - data = null - } - } - - // Just an alias for onTaskCompletion to avoid confusing - def onComplete(context: TaskContext): Unit = this.onTaskCompletion(context) -} - -object RapidsShuffleBlockFetcherIterator { - /** - * This function is used to merged blocks when doBatchFetch is true. Blocks which have the - * same `mapId` can be merged into one block batch. The block batch is specified by a range - * of reduceId, which implies the continuous shuffle blocks that we can fetch in a batch. - * For example, input blocks like (shuffle_0_0_0, shuffle_0_0_1, shuffle_0_1_0) can be - * merged into (shuffle_0_0_0_2, shuffle_0_1_0_1), and input blocks like (shuffle_0_0_0_2, - * shuffle_0_0_2, shuffle_0_0_3) can be merged into (shuffle_0_0_0_4). - * - * @param blocks blocks to be merged if possible. May contains already merged blocks. - * @param doBatchFetch whether to merge blocks. - * @return the input blocks if doBatchFetch=false, or the merged blocks if doBatchFetch=true. - */ - def mergeContinuousShuffleBlockIdsIfNeeded( - blocks: Seq[FetchBlockInfo], - doBatchFetch: Boolean): Seq[FetchBlockInfo] = { - val result = if (doBatchFetch) { - val curBlocks = new ArrayBuffer[FetchBlockInfo] - val mergedBlockInfo = new ArrayBuffer[FetchBlockInfo] - - def mergeFetchBlockInfo(toBeMerged: ArrayBuffer[FetchBlockInfo]): FetchBlockInfo = { - val startBlockId = toBeMerged.head.blockId.asInstanceOf[ShuffleBlockId] - - // The last merged block may comes from the input, and we can merge more blocks - // into it, if the map id is the same. - def shouldMergeIntoPreviousBatchBlockId = - mergedBlockInfo.last.blockId.asInstanceOf[ShuffleBlockBatchId].mapId == startBlockId.mapId - - val (startReduceId, size) = - if (mergedBlockInfo.nonEmpty && shouldMergeIntoPreviousBatchBlockId) { - // Remove the previous batch block id as we will add a new one to replace it. - val removed = mergedBlockInfo.remove(mergedBlockInfo.length - 1) - (removed.blockId.asInstanceOf[ShuffleBlockBatchId].startReduceId, - removed.size + toBeMerged.map(_.size).sum) - } else { - (startBlockId.reduceId, toBeMerged.map(_.size).sum) - } - - FetchBlockInfo( - ShuffleBlockBatchId( - startBlockId.shuffleId, - startBlockId.mapId, - startReduceId, - toBeMerged.last.blockId.asInstanceOf[ShuffleBlockId].reduceId + 1), - size, - toBeMerged.head.mapIndex) - } - - val iter = blocks.iterator - while (iter.hasNext) { - val info = iter.next() - // It's possible that the input block id is already a batch ID. For example, we merge some - // blocks, and then make fetch requests with the merged blocks according to "max blocks per - // request". The last fetch request may be too small, and we give up and put the remaining - // merged blocks back to the input list. - if (info.blockId.isInstanceOf[ShuffleBlockBatchId]) { - mergedBlockInfo += info - } else { - if (curBlocks.isEmpty) { - curBlocks += info - } else { - val curBlockId = info.blockId.asInstanceOf[ShuffleBlockId] - val currentMapId = curBlocks.head.blockId.asInstanceOf[ShuffleBlockId].mapId - if (curBlockId.mapId != currentMapId) { - mergedBlockInfo += mergeFetchBlockInfo(curBlocks) - curBlocks.clear() - } - curBlocks += info - } - } - } - if (curBlocks.nonEmpty) { - mergedBlockInfo += mergeFetchBlockInfo(curBlocks) - } - mergedBlockInfo - } else { - blocks - } - result.toSeq - } - - /** - * The block information to fetch used in FetchRequest. - * @param blockId block id - * @param size estimated size of the block. Note that this is NOT the exact bytes. - * Size of remote block is used to calculate bytesInFlight. - * @param mapIndex the mapIndex for this block, which indicate the index in the map stage. - */ - private[storage] case class FetchBlockInfo( - blockId: BlockId, - size: Long, - mapIndex: Int) - - /** - * A request to fetch blocks from a remote BlockManager. - * @param address remote BlockManager to fetch from. - * @param blocks Sequence of the information for blocks to fetch from the same address. - */ - case class FetchRequest(address: BlockManagerId, blocks: Seq[FetchBlockInfo]) { - val size = blocks.map(_.size).sum - } - - /** - * Result of a fetch from a remote block. - */ - private[storage] sealed trait FetchResult { - val blockId: BlockId - val address: BlockManagerId - } - - /** - * Result of a fetch from a remote block successfully. - * @param blockId block id - * @param mapIndex the mapIndex for this block, which indicate the index in the map stage. - * @param address BlockManager that the block was fetched from. - * @param size estimated size of the block. Note that this is NOT the exact bytes. - * Size of remote block is used to calculate bytesInFlight. - * @param buf `ManagedBuffer` for the content. - * @param isNetworkReqDone Is this the last network request for this host in this fetch request. - */ - private[storage] case class SuccessFetchResult( - blockId: BlockId, - mapIndex: Int, - address: BlockManagerId, - size: Long, - buf: ManagedBuffer, - isNetworkReqDone: Boolean) extends FetchResult { - require(buf != null) - require(size >= 0) - } - - /** - * Result of a fetch from a remote block unsuccessfully. - * @param blockId block id - * @param mapIndex the mapIndex for this block, which indicate the index in the map stage - * @param address BlockManager that the block was attempted to be fetched from - * @param e the failure exception - */ - private[storage] case class FailureFetchResult( - blockId: BlockId, - mapIndex: Int, - address: BlockManagerId, - e: Throwable) - extends FetchResult - - def makeIterator( - context: TaskContext, - blockManager: BlockManager, - sparkEnv: SparkEnv, - blocksByAddress: Iterator[(BlockManagerId, Seq[(BlockId, Long, Int)])], - serializerManager: SerializerManager, - readMetrics: ShuffleReadMetricsReporter, - fetchContinuousBlocksInBatch: Boolean): RapidsShuffleBlockFetcherIterator = { - new RapidsShuffleBlockFetcherIterator( - context, - blockManager.blockStoreClient, - blockManager, - blocksByAddress, - serializerManager.wrapStream, - // Note: we use getSizeAsMb when no suffix is provided for backwards compatibility - sparkEnv.conf.get(config.REDUCER_MAX_SIZE_IN_FLIGHT) * 1024 * 1024, // 48mb default per task - sparkEnv.conf.get(config.REDUCER_MAX_REQS_IN_FLIGHT), //Int.MaxValue by default - sparkEnv.conf.get(config.REDUCER_MAX_BLOCKS_IN_FLIGHT_PER_ADDRESS), - sparkEnv.conf.get(config.MAX_REMOTE_BLOCK_SIZE_FETCH_TO_MEM), - sparkEnv.conf.get(config.SHUFFLE_DETECT_CORRUPT), - sparkEnv.conf.get(config.SHUFFLE_DETECT_CORRUPT_MEMORY), - readMetrics, - fetchContinuousBlocksInBatch) - } -} \ No newline at end of file diff --git a/sql-plugin/src/main/spark312/scala/com/nvidia/spark/rapids/shims/SparkShims.scala b/sql-plugin/src/main/spark312/scala/com/nvidia/spark/rapids/shims/SparkShims.scala deleted file mode 100644 index 673f10fb8e6..00000000000 --- a/sql-plugin/src/main/spark312/scala/com/nvidia/spark/rapids/shims/SparkShims.scala +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright (c) 2020-2023, NVIDIA CORPORATION. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/*** spark-rapids-shim-json-lines -{"spark": "312"} -spark-rapids-shim-json-lines ***/ -package com.nvidia.spark.rapids.shims - -import org.apache.parquet.schema.MessageType - -import org.apache.spark.sql.execution.datasources.parquet.ParquetFilters - -object SparkShimImpl extends Spark31XShims { - override def hasCastFloatTimestampUpcast: Boolean = true - - override def reproduceEmptyStringBug: Boolean = true - - override def getParquetFilters( - schema: MessageType, - pushDownDate: Boolean, - pushDownTimestamp: Boolean, - pushDownDecimal: Boolean, - pushDownStartWith: Boolean, - pushDownInFilterThreshold: Int, - caseSensitive: Boolean, - lookupFileMeta: String => String, - dateTimeRebaseModeFromConf: String): ParquetFilters = { - new ParquetFilters(schema, pushDownDate, pushDownTimestamp, pushDownDecimal, pushDownStartWith, - pushDownInFilterThreshold, caseSensitive) - } -} diff --git a/sql-plugin/src/main/spark312/scala/com/nvidia/spark/rapids/shims/spark312/SparkShimServiceProvider.scala b/sql-plugin/src/main/spark312/scala/com/nvidia/spark/rapids/shims/spark312/SparkShimServiceProvider.scala deleted file mode 100644 index 1454d35e629..00000000000 --- a/sql-plugin/src/main/spark312/scala/com/nvidia/spark/rapids/shims/spark312/SparkShimServiceProvider.scala +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright (c) 2021-2023, NVIDIA CORPORATION. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/*** spark-rapids-shim-json-lines -{"spark": "312"} -spark-rapids-shim-json-lines ***/ -package com.nvidia.spark.rapids.shims.spark312 - -import com.nvidia.spark.rapids.SparkShimVersion - -object SparkShimServiceProvider { - val VERSION = SparkShimVersion(3, 1, 2) - val VERSIONNAMES = Seq(s"$VERSION") -} - -class SparkShimServiceProvider extends com.nvidia.spark.rapids.SparkShimServiceProvider { - - override def getShimVersion: SparkShimVersion = SparkShimServiceProvider.VERSION - - def matchesVersion(version: String): Boolean = { - SparkShimServiceProvider.VERSIONNAMES.contains(version) - } -} diff --git a/sql-plugin/src/main/spark312/scala/com/nvidia/spark/rapids/spark312/RapidsShuffleManager.scala b/sql-plugin/src/main/spark312/scala/com/nvidia/spark/rapids/spark312/RapidsShuffleManager.scala deleted file mode 100644 index a33e56e05a8..00000000000 --- a/sql-plugin/src/main/spark312/scala/com/nvidia/spark/rapids/spark312/RapidsShuffleManager.scala +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright (c) 2021-2023, NVIDIA CORPORATION. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/*** spark-rapids-shim-json-lines -{"spark": "312"} -spark-rapids-shim-json-lines ***/ -package com.nvidia.spark.rapids.spark312 - -import org.apache.spark.SparkConf -import org.apache.spark.sql.rapids.ProxyRapidsShuffleInternalManagerBase - -/** A shuffle manager optimized for the RAPIDS Plugin for Apache Spark. */ -sealed class RapidsShuffleManager( - conf: SparkConf, - isDriver: Boolean -) extends ProxyRapidsShuffleInternalManagerBase(conf, isDriver) - diff --git a/sql-plugin/src/main/spark313/scala/com/nvidia/spark/rapids/shims/SparkShims.scala b/sql-plugin/src/main/spark313/scala/com/nvidia/spark/rapids/shims/SparkShims.scala deleted file mode 100644 index f28619b22d2..00000000000 --- a/sql-plugin/src/main/spark313/scala/com/nvidia/spark/rapids/shims/SparkShims.scala +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright (c) 2021-2023, NVIDIA CORPORATION. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/*** spark-rapids-shim-json-lines -{"spark": "313"} -spark-rapids-shim-json-lines ***/ -package com.nvidia.spark.rapids.shims - -import org.apache.parquet.schema.MessageType - -import org.apache.spark.sql.execution.datasources.DataSourceUtils -import org.apache.spark.sql.execution.datasources.parquet.ParquetFilters - -object SparkShimImpl extends Spark31XShims { - override def getParquetFilters( - schema: MessageType, - pushDownDate: Boolean, - pushDownTimestamp: Boolean, - pushDownDecimal: Boolean, - pushDownStartWith: Boolean, - pushDownInFilterThreshold: Int, - caseSensitive: Boolean, - lookupFileMeta: String => String, - dateTimeRebaseModeFromConf: String): ParquetFilters = { - val datetimeRebaseMode = DataSourceUtils - .datetimeRebaseMode(lookupFileMeta, dateTimeRebaseModeFromConf) - new ParquetFilters(schema, pushDownDate, pushDownTimestamp, pushDownDecimal, pushDownStartWith, - pushDownInFilterThreshold, caseSensitive, datetimeRebaseMode) - } - - override def hasCastFloatTimestampUpcast: Boolean = true - - override def isCastingStringToNegDecimalScaleSupported: Boolean = true - - override def reproduceEmptyStringBug: Boolean = true -} diff --git a/sql-plugin/src/main/spark313/scala/com/nvidia/spark/rapids/shims/spark313/SparkShimServiceProvider.scala b/sql-plugin/src/main/spark313/scala/com/nvidia/spark/rapids/shims/spark313/SparkShimServiceProvider.scala deleted file mode 100644 index 9dafcdde8c6..00000000000 --- a/sql-plugin/src/main/spark313/scala/com/nvidia/spark/rapids/shims/spark313/SparkShimServiceProvider.scala +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright (c) 2021-2023, NVIDIA CORPORATION. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/*** spark-rapids-shim-json-lines -{"spark": "313"} -spark-rapids-shim-json-lines ***/ -package com.nvidia.spark.rapids.shims.spark313 - -import com.nvidia.spark.rapids.SparkShimVersion - -object SparkShimServiceProvider { - val VERSION = SparkShimVersion(3, 1, 3) - val VERSIONNAMES = Seq(s"$VERSION") -} - -class SparkShimServiceProvider extends com.nvidia.spark.rapids.SparkShimServiceProvider { - - override def getShimVersion: SparkShimVersion = SparkShimServiceProvider.VERSION - - def matchesVersion(version: String): Boolean = { - SparkShimServiceProvider.VERSIONNAMES.contains(version) - } -} diff --git a/sql-plugin/src/main/spark313/scala/com/nvidia/spark/rapids/spark313/RapidsShuffleManager.scala b/sql-plugin/src/main/spark313/scala/com/nvidia/spark/rapids/spark313/RapidsShuffleManager.scala deleted file mode 100644 index 3cad33ed523..00000000000 --- a/sql-plugin/src/main/spark313/scala/com/nvidia/spark/rapids/spark313/RapidsShuffleManager.scala +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright (c) 2021-2023, NVIDIA CORPORATION. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/*** spark-rapids-shim-json-lines -{"spark": "313"} -spark-rapids-shim-json-lines ***/ -package com.nvidia.spark.rapids.spark313 - -import org.apache.spark.SparkConf -import org.apache.spark.sql.rapids.ProxyRapidsShuffleInternalManagerBase - -/** A shuffle manager optimized for the RAPIDS Plugin for Apache Spark. */ -sealed class RapidsShuffleManager( - conf: SparkConf, - isDriver: Boolean -) extends ProxyRapidsShuffleInternalManagerBase(conf, isDriver) diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/AQEUtils.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/AQEUtils.scala similarity index 97% rename from sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/AQEUtils.scala rename to sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/AQEUtils.scala index 9535bd4caf9..5c17d43b463 100644 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/AQEUtils.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/AQEUtils.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/AggregationTagging.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/AggregationTagging.scala similarity index 95% rename from sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/AggregationTagging.scala rename to sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/AggregationTagging.scala index d024f58d152..59cc47736f9 100644 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/AggregationTagging.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/AggregationTagging.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/AnsiCastShim.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/AnsiCastShim.scala similarity index 97% rename from sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/AnsiCastShim.scala rename to sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/AnsiCastShim.scala index 3250688c666..c6477489a92 100644 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/AnsiCastShim.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/AnsiCastShim.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/AnsiUtil.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/AnsiUtil.scala similarity index 96% rename from sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/AnsiUtil.scala rename to sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/AnsiUtil.scala index 623bb80e38f..95260042742 100644 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/AnsiUtil.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/AnsiUtil.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/BloomFilterShims.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/BloomFilterShims.scala similarity index 95% rename from sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/BloomFilterShims.scala rename to sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/BloomFilterShims.scala index 93bebf104fe..a0db9f104af 100644 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/BloomFilterShims.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/BloomFilterShims.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/BucketSpecForHiveShim.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/BucketSpecForHiveShim.scala similarity index 96% rename from sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/BucketSpecForHiveShim.scala rename to sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/BucketSpecForHiveShim.scala index 40ea1d4145a..499251aa32f 100644 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/BucketSpecForHiveShim.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/BucketSpecForHiveShim.scala @@ -14,9 +14,6 @@ * limitations under the License. */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/BucketingUtilsShim.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/BucketingUtilsShim.scala similarity index 98% rename from sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/BucketingUtilsShim.scala rename to sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/BucketingUtilsShim.scala index 835464ab88b..a605edb8d85 100644 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/BucketingUtilsShim.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/BucketingUtilsShim.scala @@ -14,9 +14,6 @@ * limitations under the License. */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/CastCheckShims.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/CastCheckShims.scala similarity index 96% rename from sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/CastCheckShims.scala rename to sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/CastCheckShims.scala index 64d6ec26602..5025f999b3b 100644 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/CastCheckShims.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/CastCheckShims.scala @@ -16,9 +16,6 @@ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/CastingConfigShim.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/CastingConfigShim.scala similarity index 96% rename from sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/CastingConfigShim.scala rename to sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/CastingConfigShim.scala index c3703cc81ea..948f62d6297 100644 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/CastingConfigShim.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/CastingConfigShim.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/CharVarcharUtilsShims.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/CharVarcharUtilsShims.scala similarity index 95% rename from sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/CharVarcharUtilsShims.scala rename to sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/CharVarcharUtilsShims.scala index bd611edc5e2..1ed48e5e4fc 100644 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/CharVarcharUtilsShims.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/CharVarcharUtilsShims.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/ColumnDefaultValuesShims.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ColumnDefaultValuesShims.scala similarity index 96% rename from sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/ColumnDefaultValuesShims.scala rename to sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ColumnDefaultValuesShims.scala index f2ce2608cce..8815103e630 100644 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/ColumnDefaultValuesShims.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ColumnDefaultValuesShims.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/CreateDataSourceTableAsSelectCommandMetaShims.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/CreateDataSourceTableAsSelectCommandMetaShims.scala similarity index 98% rename from sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/CreateDataSourceTableAsSelectCommandMetaShims.scala rename to sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/CreateDataSourceTableAsSelectCommandMetaShims.scala index d1a26dc80fc..c5d27c1e570 100644 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/CreateDataSourceTableAsSelectCommandMetaShims.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/CreateDataSourceTableAsSelectCommandMetaShims.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/CudfUnsafeRow.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/CudfUnsafeRow.scala similarity index 96% rename from sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/CudfUnsafeRow.scala rename to sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/CudfUnsafeRow.scala index c04d3b2db29..72d7b5a6f32 100644 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/CudfUnsafeRow.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/CudfUnsafeRow.scala @@ -14,9 +14,6 @@ * limitations under the License. */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/CudfUnsafeRowBase.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/CudfUnsafeRowBase.scala similarity index 99% rename from sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/CudfUnsafeRowBase.scala rename to sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/CudfUnsafeRowBase.scala index e5e0bbd3dc6..fb39bca048a 100644 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/CudfUnsafeRowBase.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/CudfUnsafeRowBase.scala @@ -14,9 +14,6 @@ * limitations under the License. */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/DateTimeUtilsShims.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/DateTimeUtilsShims.scala similarity index 96% rename from sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/DateTimeUtilsShims.scala rename to sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/DateTimeUtilsShims.scala index 327b94a5d27..a4e2e09b0f4 100644 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/DateTimeUtilsShims.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/DateTimeUtilsShims.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/DecimalArithmeticOverrides.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/DecimalArithmeticOverrides.scala similarity index 99% rename from sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/DecimalArithmeticOverrides.scala rename to sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/DecimalArithmeticOverrides.scala index 547d56747fa..a31f63baea2 100644 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/DecimalArithmeticOverrides.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/DecimalArithmeticOverrides.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/DecimalMultiply128.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/DecimalMultiply128.scala similarity index 96% rename from sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/DecimalMultiply128.scala rename to sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/DecimalMultiply128.scala index a237fc13144..f270b6b901f 100644 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/DecimalMultiply128.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/DecimalMultiply128.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/DeltaLakeUtils.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/DeltaLakeUtils.scala similarity index 96% rename from sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/DeltaLakeUtils.scala rename to sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/DeltaLakeUtils.scala index 6bdce1011fa..56c2907dd87 100644 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/DeltaLakeUtils.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/DeltaLakeUtils.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/DistributionUtil.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/DistributionUtil.scala similarity index 96% rename from sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/DistributionUtil.scala rename to sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/DistributionUtil.scala index d23bfdf0a6f..03dacd3f463 100644 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/DistributionUtil.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/DistributionUtil.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/FileIndexOptionsShims.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/FileIndexOptionsShims.scala similarity index 95% rename from sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/FileIndexOptionsShims.scala rename to sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/FileIndexOptionsShims.scala index 85a3a324c97..4c39738e4d0 100644 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/FileIndexOptionsShims.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/FileIndexOptionsShims.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/GetMapValueMeta.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GetMapValueMeta.scala similarity index 97% rename from sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/GetMapValueMeta.scala rename to sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GetMapValueMeta.scala index a3a3dafdb5c..6a3150a95b4 100644 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/GetMapValueMeta.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GetMapValueMeta.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/GetSequenceSize.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GetSequenceSize.scala similarity index 98% rename from sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/GetSequenceSize.scala rename to sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GetSequenceSize.scala index 8ad2839ab78..32ca03974bf 100644 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/GetSequenceSize.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GetSequenceSize.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/GlobalLimitShims.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GlobalLimitShims.scala similarity index 96% rename from sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/GlobalLimitShims.scala rename to sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GlobalLimitShims.scala index 5043b0c3ce5..5320686e499 100644 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/GlobalLimitShims.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GlobalLimitShims.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/GpuAggregateInPandasExecMeta.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuAggregateInPandasExecMeta.scala similarity index 98% rename from sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/GpuAggregateInPandasExecMeta.scala rename to sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuAggregateInPandasExecMeta.scala index 7bb97012966..2752c89463a 100644 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/GpuAggregateInPandasExecMeta.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuAggregateInPandasExecMeta.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/GpuBroadcastJoinMeta.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuBroadcastJoinMeta.scala similarity index 98% rename from sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/GpuBroadcastJoinMeta.scala rename to sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuBroadcastJoinMeta.scala index b9e04808deb..fd527976f8f 100644 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/GpuBroadcastJoinMeta.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuBroadcastJoinMeta.scala @@ -14,9 +14,6 @@ * limitations under the License. */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/GpuCastShims.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuCastShims.scala similarity index 95% rename from sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/GpuCastShims.scala rename to sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuCastShims.scala index dfe8a5a528f..9c28b40437a 100644 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/GpuCastShims.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuCastShims.scala @@ -16,9 +16,6 @@ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/GpuFileFormatDataWriterShim.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuFileFormatDataWriterShim.scala similarity index 96% rename from sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/GpuFileFormatDataWriterShim.scala rename to sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuFileFormatDataWriterShim.scala index 3f4829cc9d2..fce7dcbd356 100644 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/GpuFileFormatDataWriterShim.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuFileFormatDataWriterShim.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/GpuHashPartitioning.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuHashPartitioning.scala similarity index 97% rename from sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/GpuHashPartitioning.scala rename to sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuHashPartitioning.scala index 712a7414216..5c728e31531 100644 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/GpuHashPartitioning.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuHashPartitioning.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/GpuIntervalUtils.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuIntervalUtils.scala similarity index 98% rename from sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/GpuIntervalUtils.scala rename to sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuIntervalUtils.scala index 04aef3defae..dee791ef29b 100644 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/GpuIntervalUtils.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuIntervalUtils.scala @@ -14,9 +14,6 @@ * limitations under the License. */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/GpuOptimizedCreateHiveTableAsSelectCommandShims.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuOptimizedCreateHiveTableAsSelectCommandShims.scala similarity index 99% rename from sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/GpuOptimizedCreateHiveTableAsSelectCommandShims.scala rename to sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuOptimizedCreateHiveTableAsSelectCommandShims.scala index ff9ca51b30a..825ece4c33d 100644 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/GpuOptimizedCreateHiveTableAsSelectCommandShims.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuOptimizedCreateHiveTableAsSelectCommandShims.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/GpuRangePartitioning.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuRangePartitioning.scala similarity index 98% rename from sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/GpuRangePartitioning.scala rename to sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuRangePartitioning.scala index fef46c61b73..b0506266aa1 100644 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/GpuRangePartitioning.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuRangePartitioning.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/GpuTypeShims.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuTypeShims.scala similarity index 98% rename from sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/GpuTypeShims.scala rename to sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuTypeShims.scala index 2a55034b698..252309f254f 100644 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/GpuTypeShims.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuTypeShims.scala @@ -14,9 +14,6 @@ * limitations under the License. */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/GpuWindowInPandasExec.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuWindowInPandasExec.scala similarity index 97% rename from sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/GpuWindowInPandasExec.scala rename to sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuWindowInPandasExec.scala index 5ec195bd554..eb65ea3fbef 100644 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/GpuWindowInPandasExec.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuWindowInPandasExec.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/InSubqueryShims.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/InSubqueryShims.scala similarity index 95% rename from sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/InSubqueryShims.scala rename to sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/InSubqueryShims.scala index 504667b0fb4..91f9e492ec6 100644 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/InSubqueryShims.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/InSubqueryShims.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/LegacyBehaviorPolicyShim.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/LegacyBehaviorPolicyShim.scala similarity index 96% rename from sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/LegacyBehaviorPolicyShim.scala rename to sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/LegacyBehaviorPolicyShim.scala index 781cac2c580..5c176c48ce2 100644 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/LegacyBehaviorPolicyShim.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/LegacyBehaviorPolicyShim.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/NullOutputStreamShim.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/NullOutputStreamShim.scala similarity index 95% rename from sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/NullOutputStreamShim.scala rename to sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/NullOutputStreamShim.scala index bdb732e54b2..1708cf194bb 100644 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/NullOutputStreamShim.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/NullOutputStreamShim.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/OrcProtoWriterShim.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/OrcProtoWriterShim.scala similarity index 96% rename from sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/OrcProtoWriterShim.scala rename to sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/OrcProtoWriterShim.scala index b0c2b505e40..624f9a7664a 100644 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/OrcProtoWriterShim.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/OrcProtoWriterShim.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/OrcReadingShims.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/OrcReadingShims.scala similarity index 97% rename from sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/OrcReadingShims.scala rename to sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/OrcReadingShims.scala index fc3be7f6609..9277501e174 100644 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/OrcReadingShims.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/OrcReadingShims.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/ParquetFieldIdShims.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ParquetFieldIdShims.scala similarity index 96% rename from sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/ParquetFieldIdShims.scala rename to sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ParquetFieldIdShims.scala index 340dce31388..520dca909af 100644 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/ParquetFieldIdShims.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ParquetFieldIdShims.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/ParquetLegacyNanoAsLongShims.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ParquetLegacyNanoAsLongShims.scala similarity index 96% rename from sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/ParquetLegacyNanoAsLongShims.scala rename to sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ParquetLegacyNanoAsLongShims.scala index 0880cc8d24c..bead31833eb 100644 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/ParquetLegacyNanoAsLongShims.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ParquetLegacyNanoAsLongShims.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/ParquetStringPredShims.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ParquetStringPredShims.scala similarity index 95% rename from sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/ParquetStringPredShims.scala rename to sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ParquetStringPredShims.scala index c1a4a0d4c71..677b0f9fe63 100644 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/ParquetStringPredShims.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ParquetStringPredShims.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/ParquetTimestampNTZShims.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ParquetTimestampNTZShims.scala similarity index 96% rename from sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/ParquetTimestampNTZShims.scala rename to sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ParquetTimestampNTZShims.scala index 026dfe45d15..58ea0e732c2 100644 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/ParquetTimestampNTZShims.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ParquetTimestampNTZShims.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/PartitionedFileUtilsShim.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/PartitionedFileUtilsShim.scala similarity index 96% rename from sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/PartitionedFileUtilsShim.scala rename to sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/PartitionedFileUtilsShim.scala index 4a12f723a79..af8d68184e6 100644 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/PartitionedFileUtilsShim.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/PartitionedFileUtilsShim.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/PythonUDFShim.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/PythonUDFShim.scala similarity index 96% rename from sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/PythonUDFShim.scala rename to sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/PythonUDFShim.scala index cd1ebe4b59c..8bed67b6e7b 100644 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/PythonUDFShim.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/PythonUDFShim.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/RaiseErrorShim.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/RaiseErrorShim.scala similarity index 97% rename from sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/RaiseErrorShim.scala rename to sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/RaiseErrorShim.scala index de433d5f270..26ec91dd6f0 100644 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/RaiseErrorShim.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/RaiseErrorShim.scala @@ -14,9 +14,6 @@ * limitations under the License. */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/RapidsFileSourceMetaUtils.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/RapidsFileSourceMetaUtils.scala similarity index 95% rename from sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/RapidsFileSourceMetaUtils.scala rename to sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/RapidsFileSourceMetaUtils.scala index 06a8016f6b2..fa91d37aa23 100644 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/RapidsFileSourceMetaUtils.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/RapidsFileSourceMetaUtils.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/ShimFilePartitionReaderFactory.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ShimFilePartitionReaderFactory.scala similarity index 95% rename from sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/ShimFilePartitionReaderFactory.scala rename to sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ShimFilePartitionReaderFactory.scala index c0791021c3f..701b94671a9 100644 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/ShimFilePartitionReaderFactory.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ShimFilePartitionReaderFactory.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/ShimLeafExecNode.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ShimLeafExecNode.scala similarity index 95% rename from sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/ShimLeafExecNode.scala rename to sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ShimLeafExecNode.scala index 5e569c484cd..7196f3acc62 100644 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/ShimLeafExecNode.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ShimLeafExecNode.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/Spark31Xuntil33XShims.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/Spark31Xuntil33XShims.scala similarity index 98% rename from sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/Spark31Xuntil33XShims.scala rename to sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/Spark31Xuntil33XShims.scala index 95ca4019b84..9f788c81dd7 100644 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/Spark31Xuntil33XShims.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/Spark31Xuntil33XShims.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/TypeUtilsShims.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/TypeUtilsShims.scala similarity index 95% rename from sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/TypeUtilsShims.scala rename to sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/TypeUtilsShims.scala index 4a678371f35..32aecfaca02 100644 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/TypeUtilsShims.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/TypeUtilsShims.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/extractValueShims.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/extractValueShims.scala similarity index 96% rename from sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/extractValueShims.scala rename to sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/extractValueShims.scala index 1f2514a10d4..8c47d714b49 100644 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shims/extractValueShims.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/extractValueShims.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shuffle/RapidsShuffleIterator.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shuffle/RapidsShuffleIterator.scala similarity index 99% rename from sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shuffle/RapidsShuffleIterator.scala rename to sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shuffle/RapidsShuffleIterator.scala index bfad2278344..70942001cae 100644 --- a/sql-plugin/src/main/spark311/scala/com/nvidia/spark/rapids/shuffle/RapidsShuffleIterator.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shuffle/RapidsShuffleIterator.scala @@ -16,9 +16,6 @@ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/org/apache/spark/rapids/shims/GpuShuffleExchangeExec.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/rapids/shims/GpuShuffleExchangeExec.scala similarity index 98% rename from sql-plugin/src/main/spark311/scala/org/apache/spark/rapids/shims/GpuShuffleExchangeExec.scala rename to sql-plugin/src/main/spark320/scala/org/apache/spark/rapids/shims/GpuShuffleExchangeExec.scala index 2dcad0d4226..036b5838daf 100644 --- a/sql-plugin/src/main/spark311/scala/org/apache/spark/rapids/shims/GpuShuffleExchangeExec.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/rapids/shims/GpuShuffleExchangeExec.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/catalyst/csv/GpuCsvUtils.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/catalyst/csv/GpuCsvUtils.scala similarity index 95% rename from sql-plugin/src/main/spark311/scala/org/apache/spark/sql/catalyst/csv/GpuCsvUtils.scala rename to sql-plugin/src/main/spark320/scala/org/apache/spark/sql/catalyst/csv/GpuCsvUtils.scala index e740930305b..c59f0af0026 100644 --- a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/catalyst/csv/GpuCsvUtils.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/catalyst/csv/GpuCsvUtils.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/catalyst/json/GpuJsonUtils.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/catalyst/json/GpuJsonUtils.scala similarity index 97% rename from sql-plugin/src/main/spark311/scala/org/apache/spark/sql/catalyst/json/GpuJsonUtils.scala rename to sql-plugin/src/main/spark320/scala/org/apache/spark/sql/catalyst/json/GpuJsonUtils.scala index ce5ecff513c..b25f7154efd 100644 --- a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/catalyst/json/GpuJsonUtils.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/catalyst/json/GpuJsonUtils.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/execution/datasources/parquet/ShimCurrentBatchIterator.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/execution/datasources/parquet/ShimCurrentBatchIterator.scala similarity index 99% rename from sql-plugin/src/main/spark311/scala/org/apache/spark/sql/execution/datasources/parquet/ShimCurrentBatchIterator.scala rename to sql-plugin/src/main/spark320/scala/org/apache/spark/sql/execution/datasources/parquet/ShimCurrentBatchIterator.scala index bf43fbf0454..6e0a0acbc0c 100644 --- a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/execution/datasources/parquet/ShimCurrentBatchIterator.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/execution/datasources/parquet/ShimCurrentBatchIterator.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/execution/rapids/shims/FilePartitionShims.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/execution/rapids/shims/FilePartitionShims.scala similarity index 98% rename from sql-plugin/src/main/spark311/scala/org/apache/spark/sql/execution/rapids/shims/FilePartitionShims.scala rename to sql-plugin/src/main/spark320/scala/org/apache/spark/sql/execution/rapids/shims/FilePartitionShims.scala index aabb2dd5e36..8b6e43fe60b 100644 --- a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/execution/rapids/shims/FilePartitionShims.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/execution/rapids/shims/FilePartitionShims.scala @@ -16,9 +16,6 @@ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/hive/rapids/shims/CommandUtilsShim.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/hive/rapids/shims/CommandUtilsShim.scala similarity index 96% rename from sql-plugin/src/main/spark311/scala/org/apache/spark/sql/hive/rapids/shims/CommandUtilsShim.scala rename to sql-plugin/src/main/spark320/scala/org/apache/spark/sql/hive/rapids/shims/CommandUtilsShim.scala index 1e1ac57aa60..5d1e20ae18b 100644 --- a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/hive/rapids/shims/CommandUtilsShim.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/hive/rapids/shims/CommandUtilsShim.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/hive/rapids/shims/FileSinkDescShim.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/hive/rapids/shims/FileSinkDescShim.scala similarity index 96% rename from sql-plugin/src/main/spark311/scala/org/apache/spark/sql/hive/rapids/shims/FileSinkDescShim.scala rename to sql-plugin/src/main/spark320/scala/org/apache/spark/sql/hive/rapids/shims/FileSinkDescShim.scala index 9bceecac524..be7d74bb82c 100644 --- a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/hive/rapids/shims/FileSinkDescShim.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/hive/rapids/shims/FileSinkDescShim.scala @@ -16,9 +16,6 @@ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/hive/rapids/shims/GpuCreateHiveTableAsSelectCommand.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/hive/rapids/shims/GpuCreateHiveTableAsSelectCommand.scala similarity index 99% rename from sql-plugin/src/main/spark311/scala/org/apache/spark/sql/hive/rapids/shims/GpuCreateHiveTableAsSelectCommand.scala rename to sql-plugin/src/main/spark320/scala/org/apache/spark/sql/hive/rapids/shims/GpuCreateHiveTableAsSelectCommand.scala index 216235accf0..3b33ea5c9ac 100644 --- a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/hive/rapids/shims/GpuCreateHiveTableAsSelectCommand.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/hive/rapids/shims/GpuCreateHiveTableAsSelectCommand.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/hive/rapids/shims/GpuInsertIntoHiveTable.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/hive/rapids/shims/GpuInsertIntoHiveTable.scala similarity index 99% rename from sql-plugin/src/main/spark311/scala/org/apache/spark/sql/hive/rapids/shims/GpuInsertIntoHiveTable.scala rename to sql-plugin/src/main/spark320/scala/org/apache/spark/sql/hive/rapids/shims/GpuInsertIntoHiveTable.scala index 08be08b9cd6..9dd038b8874 100644 --- a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/hive/rapids/shims/GpuInsertIntoHiveTable.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/hive/rapids/shims/GpuInsertIntoHiveTable.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/hive/rapids/shims/GpuRowBasedHiveGenericUDFShim.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/hive/rapids/shims/GpuRowBasedHiveGenericUDFShim.scala similarity index 96% rename from sql-plugin/src/main/spark311/scala/org/apache/spark/sql/hive/rapids/shims/GpuRowBasedHiveGenericUDFShim.scala rename to sql-plugin/src/main/spark320/scala/org/apache/spark/sql/hive/rapids/shims/GpuRowBasedHiveGenericUDFShim.scala index fe4c37c9ef9..3021ec7e91d 100644 --- a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/hive/rapids/shims/GpuRowBasedHiveGenericUDFShim.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/hive/rapids/shims/GpuRowBasedHiveGenericUDFShim.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/hive/rapids/shims/HiveInspectorsShim.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/hive/rapids/shims/HiveInspectorsShim.scala similarity index 96% rename from sql-plugin/src/main/spark311/scala/org/apache/spark/sql/hive/rapids/shims/HiveInspectorsShim.scala rename to sql-plugin/src/main/spark320/scala/org/apache/spark/sql/hive/rapids/shims/HiveInspectorsShim.scala index 84d653f1d05..40de4f6d329 100644 --- a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/hive/rapids/shims/HiveInspectorsShim.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/hive/rapids/shims/HiveInspectorsShim.scala @@ -16,9 +16,6 @@ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/hive/rapids/shims/HiveProviderCmdShims.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/hive/rapids/shims/HiveProviderCmdShims.scala similarity index 98% rename from sql-plugin/src/main/spark311/scala/org/apache/spark/sql/hive/rapids/shims/HiveProviderCmdShims.scala rename to sql-plugin/src/main/spark320/scala/org/apache/spark/sql/hive/rapids/shims/HiveProviderCmdShims.scala index c991b00bec4..930ab0e858c 100644 --- a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/hive/rapids/shims/HiveProviderCmdShims.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/hive/rapids/shims/HiveProviderCmdShims.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/GpuDataSource.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/GpuDataSource.scala similarity index 99% rename from sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/GpuDataSource.scala rename to sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/GpuDataSource.scala index 0b9fae05b6a..0bbdc614967 100644 --- a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/GpuDataSource.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/GpuDataSource.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/GpuFileFormatWriter.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/GpuFileFormatWriter.scala similarity index 99% rename from sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/GpuFileFormatWriter.scala rename to sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/GpuFileFormatWriter.scala index 18446eca8a1..549ac204454 100644 --- a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/GpuFileFormatWriter.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/GpuFileFormatWriter.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/RapidsCachingReader.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/RapidsCachingReader.scala similarity index 99% rename from sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/RapidsCachingReader.scala rename to sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/RapidsCachingReader.scala index d68305655cd..3334187bb16 100644 --- a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/RapidsCachingReader.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/RapidsCachingReader.scala @@ -16,9 +16,6 @@ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/aggregate/aggregateFunctions.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/aggregate/aggregateFunctions.scala similarity index 97% rename from sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/aggregate/aggregateFunctions.scala rename to sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/aggregate/aggregateFunctions.scala index 767665b97c4..344a4898b24 100644 --- a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/aggregate/aggregateFunctions.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/aggregate/aggregateFunctions.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/arithmetic.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/arithmetic.scala similarity index 98% rename from sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/arithmetic.scala rename to sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/arithmetic.scala index f38af394591..73992d1ecd0 100644 --- a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/arithmetic.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/arithmetic.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/execution/GpuBroadcastHashJoinExec.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/execution/GpuBroadcastHashJoinExec.scala similarity index 98% rename from sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/execution/GpuBroadcastHashJoinExec.scala rename to sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/execution/GpuBroadcastHashJoinExec.scala index efc8fc19147..c6d82e73be1 100644 --- a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/execution/GpuBroadcastHashJoinExec.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/execution/GpuBroadcastHashJoinExec.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/execution/GpuBroadcastNestedLoopJoinExec.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/execution/GpuBroadcastNestedLoopJoinExec.scala similarity index 99% rename from sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/execution/GpuBroadcastNestedLoopJoinExec.scala rename to sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/execution/GpuBroadcastNestedLoopJoinExec.scala index 059a7e1426b..901dc83d576 100644 --- a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/execution/GpuBroadcastNestedLoopJoinExec.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/execution/GpuBroadcastNestedLoopJoinExec.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/execution/GpuShuffleMeta.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/execution/GpuShuffleMeta.scala similarity index 96% rename from sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/execution/GpuShuffleMeta.scala rename to sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/execution/GpuShuffleMeta.scala index 84f3313452f..4cd1aebdd20 100644 --- a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/execution/GpuShuffleMeta.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/execution/GpuShuffleMeta.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/execution/GpuSubqueryBroadcastMeta.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/execution/GpuSubqueryBroadcastMeta.scala similarity index 96% rename from sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/execution/GpuSubqueryBroadcastMeta.scala rename to sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/execution/GpuSubqueryBroadcastMeta.scala index 9bcfa33ab87..d72cea9e168 100644 --- a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/execution/GpuSubqueryBroadcastMeta.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/execution/GpuSubqueryBroadcastMeta.scala @@ -14,9 +14,6 @@ * limitations under the License. */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuArrowPythonRunner.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuArrowPythonRunner.scala similarity index 98% rename from sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuArrowPythonRunner.scala rename to sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuArrowPythonRunner.scala index 977c755712a..32818f52ee0 100644 --- a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuArrowPythonRunner.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuArrowPythonRunner.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuBasePythonRunner.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuBasePythonRunner.scala similarity index 96% rename from sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuBasePythonRunner.scala rename to sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuBasePythonRunner.scala index e7245db64e4..971b0eedd0e 100644 --- a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuBasePythonRunner.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuBasePythonRunner.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuCoGroupedArrowPythonRunner.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuCoGroupedArrowPythonRunner.scala similarity index 98% rename from sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuCoGroupedArrowPythonRunner.scala rename to sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuCoGroupedArrowPythonRunner.scala index 68112676a2b..4835b3d4b86 100644 --- a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuCoGroupedArrowPythonRunner.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuCoGroupedArrowPythonRunner.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuGroupedPythonRunnerFactory.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuGroupedPythonRunnerFactory.scala similarity index 97% rename from sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuGroupedPythonRunnerFactory.scala rename to sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuGroupedPythonRunnerFactory.scala index 9df93a9d11b..a5a8af5b8a0 100644 --- a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuGroupedPythonRunnerFactory.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuGroupedPythonRunnerFactory.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/execution/python/shims/WritePythonUDFUtils.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/execution/python/shims/WritePythonUDFUtils.scala similarity index 96% rename from sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/execution/python/shims/WritePythonUDFUtils.scala rename to sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/execution/python/shims/WritePythonUDFUtils.scala index aacf972e7e0..8ff6cdcbde6 100644 --- a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/execution/python/shims/WritePythonUDFUtils.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/execution/python/shims/WritePythonUDFUtils.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/shims/ArrowUtilsShim.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/ArrowUtilsShim.scala similarity index 96% rename from sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/shims/ArrowUtilsShim.scala rename to sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/ArrowUtilsShim.scala index 63f132bd2f8..8496061dc86 100644 --- a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/shims/ArrowUtilsShim.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/ArrowUtilsShim.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/shims/DataTypeUtilsShim.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/DataTypeUtilsShim.scala similarity index 96% rename from sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/shims/DataTypeUtilsShim.scala rename to sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/DataTypeUtilsShim.scala index 41aa0807646..28839326183 100644 --- a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/shims/DataTypeUtilsShim.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/DataTypeUtilsShim.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/shims/GpuAscii.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/GpuAscii.scala similarity index 97% rename from sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/shims/GpuAscii.scala rename to sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/GpuAscii.scala index 24d33cc7155..6ae300cb108 100644 --- a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/shims/GpuAscii.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/GpuAscii.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/shims/GpuCreateDataSourceTableAsSelectCommandShims.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/GpuCreateDataSourceTableAsSelectCommandShims.scala similarity index 99% rename from sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/shims/GpuCreateDataSourceTableAsSelectCommandShims.scala rename to sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/GpuCreateDataSourceTableAsSelectCommandShims.scala index f5f65dce10a..c365ebe5c5d 100644 --- a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/shims/GpuCreateDataSourceTableAsSelectCommandShims.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/GpuCreateDataSourceTableAsSelectCommandShims.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/shims/GpuMapInPandasExecMeta.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/GpuMapInPandasExecMeta.scala similarity index 96% rename from sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/shims/GpuMapInPandasExecMeta.scala rename to sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/GpuMapInPandasExecMeta.scala index 05096f6a41e..9482b8f594f 100644 --- a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/shims/GpuMapInPandasExecMeta.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/GpuMapInPandasExecMeta.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/shims/RapidsHadoopWriterUtils.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/RapidsHadoopWriterUtils.scala similarity index 96% rename from sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/shims/RapidsHadoopWriterUtils.scala rename to sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/RapidsHadoopWriterUtils.scala index 20a63c70bb4..5c58ef485f6 100644 --- a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/shims/RapidsHadoopWriterUtils.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/RapidsHadoopWriterUtils.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/shims/SchemaUtilsShims.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/SchemaUtilsShims.scala similarity index 97% rename from sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/shims/SchemaUtilsShims.scala rename to sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/SchemaUtilsShims.scala index fa806582664..3052d06e8da 100644 --- a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/shims/SchemaUtilsShims.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/SchemaUtilsShims.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/shims/SparkUpgradeExceptionShims.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/SparkUpgradeExceptionShims.scala similarity index 96% rename from sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/shims/SparkUpgradeExceptionShims.scala rename to sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/SparkUpgradeExceptionShims.scala index d67d078c107..61b1b2d2267 100644 --- a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/shims/SparkUpgradeExceptionShims.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/SparkUpgradeExceptionShims.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/shims/misc.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/misc.scala similarity index 97% rename from sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/shims/misc.scala rename to sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/misc.scala index 1ab58ddcbb6..2630f9f83ab 100644 --- a/sql-plugin/src/main/spark311/scala/org/apache/spark/sql/rapids/shims/misc.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/misc.scala @@ -14,9 +14,6 @@ * limitations under the License. */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} diff --git a/sql-plugin/src/test/spark311/scala/com/nvidia/spark/rapids/shims/spark311/SparkShimsSuite.scala b/sql-plugin/src/test/spark311/scala/com/nvidia/spark/rapids/shims/spark311/SparkShimsSuite.scala deleted file mode 100644 index 34c598e83b3..00000000000 --- a/sql-plugin/src/test/spark311/scala/com/nvidia/spark/rapids/shims/spark311/SparkShimsSuite.scala +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright (c) 2023, NVIDIA CORPORATION. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/*** spark-rapids-shim-json-lines -{"spark": "311"} -spark-rapids-shim-json-lines ***/ -package com.nvidia.spark.rapids.shims.spark311 - -import com.nvidia.spark.rapids._ -import org.scalatest.funsuite.AnyFunSuite - -class SparkShimsSuite extends AnyFunSuite with FQSuiteName { - test("spark shims version") { - assert(ShimLoader.getShimVersion === SparkShimVersion(3, 1, 1)) - } - - test("shuffle manager class") { - assert(ShimLoader.getRapidsShuffleManagerClass === - classOf[com.nvidia.spark.rapids.spark311.RapidsShuffleManager].getCanonicalName) - } -} diff --git a/sql-plugin/src/test/spark312/scala/com/nvidia/spark/rapids/shims/spark312/SparkShimsSuite.scala b/sql-plugin/src/test/spark312/scala/com/nvidia/spark/rapids/shims/spark312/SparkShimsSuite.scala deleted file mode 100644 index 075394bb2b1..00000000000 --- a/sql-plugin/src/test/spark312/scala/com/nvidia/spark/rapids/shims/spark312/SparkShimsSuite.scala +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright (c) 2023, NVIDIA CORPORATION. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/*** spark-rapids-shim-json-lines -{"spark": "312"} -spark-rapids-shim-json-lines ***/ -package com.nvidia.spark.rapids.shims.spark312 - -import com.nvidia.spark.rapids._ -import org.scalatest.funsuite.AnyFunSuite - -class SparkShimsSuite extends AnyFunSuite with FQSuiteName { - test("spark shims version") { - assert(ShimLoader.getShimVersion === SparkShimVersion(3, 1, 2)) - } - - test("shuffle manager class") { - assert(ShimLoader.getRapidsShuffleManagerClass === - classOf[com.nvidia.spark.rapids.spark312.RapidsShuffleManager].getCanonicalName) - } -} diff --git a/sql-plugin/src/test/spark313/scala/com/nvidia/spark/rapids/shims/spark313/SparkShimsSuite.scala b/sql-plugin/src/test/spark313/scala/com/nvidia/spark/rapids/shims/spark313/SparkShimsSuite.scala deleted file mode 100644 index fd663661e8c..00000000000 --- a/sql-plugin/src/test/spark313/scala/com/nvidia/spark/rapids/shims/spark313/SparkShimsSuite.scala +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright (c) 2023, NVIDIA CORPORATION. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/*** spark-rapids-shim-json-lines -{"spark": "313"} -spark-rapids-shim-json-lines ***/ -package com.nvidia.spark.rapids.shims.spark313 - -import com.nvidia.spark.rapids._ -import org.scalatest.funsuite.AnyFunSuite - -class SparkShimsSuite extends AnyFunSuite with FQSuiteName { - test("spark shims version") { - assert(ShimLoader.getShimVersion === SparkShimVersion(3, 1, 3)) - } - - test("shuffle manager class") { - assert(ShimLoader.getRapidsShuffleManagerClass === - classOf[com.nvidia.spark.rapids.spark313.RapidsShuffleManager].getCanonicalName) - } -} diff --git a/tests/src/test/spark311/scala/com/nvidia/spark/rapids/shims/OrcStatisticShim.scala b/tests/src/test/spark311/scala/com/nvidia/spark/rapids/shims/OrcStatisticShim.scala deleted file mode 100644 index 6f6f49f113b..00000000000 --- a/tests/src/test/spark311/scala/com/nvidia/spark/rapids/shims/OrcStatisticShim.scala +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright (c) 2023, NVIDIA CORPORATION. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} -{"spark": "321cdh"} -{"spark": "330cdh"} -{"spark": "332cdh"} -spark-rapids-shim-json-lines ***/ -package com.nvidia.spark.rapids.shims - -import java.util.Objects - -import org.apache.orc._ - -object OrcStatisticShim { - def supports(left: ColumnStatistics, right: ColumnStatistics): Boolean = (left, right) match { - case (_: DateColumnStatistics, _: DateColumnStatistics) => true - case (_: StringColumnStatistics, _: StringColumnStatistics) => true - case _ => false - } - - def equals(left: ColumnStatistics, right: ColumnStatistics): Boolean = (left, right) match { - // have no CollectionColumnStatistics for this shim - case (dateStat: DateColumnStatistics, otherDateStat: DateColumnStatistics) => - Objects.equals(dateStat.getMinimum, otherDateStat.getMinimum) && - Objects.equals(dateStat.getMaximum, otherDateStat.getMaximum) - case (strStat: StringColumnStatistics, otherStrStat: StringColumnStatistics) => - Objects.equals(strStat.getMinimum, otherStrStat.getMinimum) && - Objects.equals(strStat.getMaximum, otherStrStat.getMaximum) && - Objects.equals(strStat.getSum, otherStrStat.getSum) - } -} diff --git a/tests/src/test/spark311/scala/com/nvidia/spark/rapids/shuffle/RapidsShuffleTestHelper.scala b/tests/src/test/spark320/scala/com/nvidia/spark/rapids/shuffle/RapidsShuffleTestHelper.scala similarity index 99% rename from tests/src/test/spark311/scala/com/nvidia/spark/rapids/shuffle/RapidsShuffleTestHelper.scala rename to tests/src/test/spark320/scala/com/nvidia/spark/rapids/shuffle/RapidsShuffleTestHelper.scala index 73d081137cd..ab303d8098e 100644 --- a/tests/src/test/spark311/scala/com/nvidia/spark/rapids/shuffle/RapidsShuffleTestHelper.scala +++ b/tests/src/test/spark320/scala/com/nvidia/spark/rapids/shuffle/RapidsShuffleTestHelper.scala @@ -15,9 +15,6 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "311"} -{"spark": "312"} -{"spark": "313"} {"spark": "320"} {"spark": "321"} {"spark": "321cdh"} From 79e95c1d8f5f06fdfd057df27fe7f2a68fa28a95 Mon Sep 17 00:00:00 2001 From: Tim Liu Date: Tue, 9 Jul 2024 15:16:20 +0800 Subject: [PATCH 2/9] Drop spark31x shims in the build scripts and pom files Signed-off-by: Tim Liu --- aggregator/pom.xml | 70 +----------------- api_validation/auditAllVersions.sh | 2 +- build/buildall | 6 +- build/make-scala-version-build-files.sh | 4 +- dist/build/package-parallel-worlds.py | 6 +- dist/maven-antrun/build-parallel-worlds.xml | 2 +- dist/pom.xml | 3 +- dist/scripts/binary-dedupe.sh | 8 +- ...txt => unshimmed-common-from-spark320.txt} | 0 jenkins/spark-nightly-build.sh | 2 +- jenkins/spark-premerge-build.sh | 4 +- jenkins/spark-tests.sh | 2 +- jenkins/version-def.sh | 2 +- pom.xml | 74 ++----------------- scala2.13/aggregator/pom.xml | 70 +----------------- scala2.13/dist/pom.xml | 3 +- scala2.13/pom.xml | 73 +----------------- .../com/nvidia/spark/rapids/ShimLoader.scala | 4 +- ...mmedExecutionPlanCaptureCallbackImpl.scala | 2 +- .../rapids/timezone/TimeZonePerfSuite.scala | 2 +- 20 files changed, 35 insertions(+), 304 deletions(-) rename dist/{unshimmed-common-from-spark311.txt => unshimmed-common-from-spark320.txt} (100%) diff --git a/aggregator/pom.xml b/aggregator/pom.xml index 8cf881419c9..4f0ea3f6c16 100644 --- a/aggregator/pom.xml +++ b/aggregator/pom.xml @@ -252,79 +252,11 @@ - release311 + release320 true - - buildver - 311 - - - - - com.nvidia - rapids-4-spark-delta-stub_${scala.binary.version} - ${project.version} - ${spark.version.classifier} - - - - - release312 - - - buildver - 312 - - - - - com.nvidia - rapids-4-spark-delta-stub_${scala.binary.version} - ${project.version} - ${spark.version.classifier} - - - - - release313 - - - buildver - 313 - - - - - com.nvidia - rapids-4-spark-delta-stub_${scala.binary.version} - ${project.version} - ${spark.version.classifier} - - - - - release314 - - - buildver - 314 - - - - - com.nvidia - rapids-4-spark-delta-stub_${scala.binary.version} - ${project.version} - ${spark.version.classifier} - - - - - release320 - buildver 320 diff --git a/api_validation/auditAllVersions.sh b/api_validation/auditAllVersions.sh index 5deddacec65..a109b7c7920 100644 --- a/api_validation/auditAllVersions.sh +++ b/api_validation/auditAllVersions.sh @@ -14,4 +14,4 @@ # limitations under the License. set -ex -mvn scala:run -P spark311 +mvn scala:run -P spark320 diff --git a/build/buildall b/build/buildall index b3c473be141..3c3c8a045ce 100755 --- a/build/buildall +++ b/build/buildall @@ -274,8 +274,8 @@ export -f build_single_shim # Install all the versions for DIST_PROFILE # First build the aggregator module for all SPARK_SHIM_VERSIONS in parallel skipping expensive plugins that -# - either deferred to 311 because the check is identical in all shim profiles such as scalastyle -# - or deferred to 311 because we currently don't require it per shim such as scaladoc generation +# - either deferred to 320 because the check is identical in all shim profiles such as scalastyle +# - or deferred to 320 because we currently don't require it per shim such as scaladoc generation # - or there is a dedicated step to run against a particular shim jar such as unit tests, in # the near future we will run unit tests against a combined multi-shim jar to catch classloading # regressions even before pytest-based integration_tests @@ -296,7 +296,7 @@ time ( fi # This used to resume from dist. However, without including aggregator in the build # the build does not properly initialize spark.version property via buildver profiles - # in the root pom, and we get a missing spark311 dependency even for --profile=312,321 + # in the root pom, and we get a missing spark320 dependency even for --profile=320,321 # where the build does not require it. Moving it to aggregator resolves this issue with # a negligible increase of the build time by ~2 seconds. joinShimBuildFrom="aggregator" diff --git a/build/make-scala-version-build-files.sh b/build/make-scala-version-build-files.sh index c1ca50b0551..8253f7eb963 100755 --- a/build/make-scala-version-build-files.sh +++ b/build/make-scala-version-build-files.sh @@ -20,7 +20,7 @@ set -e VALID_VERSIONS=( 2.13 ) declare -A DEFAULT_SPARK -DEFAULT_SPARK[2.12]="spark311" +DEFAULT_SPARK[2.12]="spark320" DEFAULT_SPARK[2.13]="spark330" usage() { @@ -94,4 +94,4 @@ sed_i '//,/[0-9]*\.[0-9]*/,/[0-9]*\.[0-9]*\.[0-9]*[0-9]*\.[0-9]*\.[0-9]*'$SCALA_VERSION' + includesfile="${spark.rapids.source.basedir}/${rapids.module}/unshimmed-common-from-spark320.txt"/> diff --git a/dist/pom.xml b/dist/pom.xml index ecc2018baea..814b2bf63db 100644 --- a/dist/pom.xml +++ b/dist/pom.xml @@ -145,7 +145,6 @@ minimumFeatureVersionMix - 312, 320, 321cdh, 330, @@ -389,7 +388,7 @@ self.log("... OK") - + diff --git a/dist/scripts/binary-dedupe.sh b/dist/scripts/binary-dedupe.sh index 970840f59e3..3a9df845568 100755 --- a/dist/scripts/binary-dedupe.sh +++ b/dist/scripts/binary-dedupe.sh @@ -131,14 +131,14 @@ mv "$SPARK_SHARED_DIR" parallel-world/ # identical regardless of the Spark-version-specific jar. # # At this point the duplicate classes have not been removed from version-specific jar -# locations such as parallel-world/spark312. +# locations such as parallel-world/spark321. # For each unshimmed class file look for all of its copies inside /spark[34]* and # and count the number of distinct checksums. There are two representative cases # 1) The class is contributed to the unshimmed location via the unshimmed-from-each-spark34 list. These are classes # carrying the shim classifier in their package name such as -# com.nvidia.spark.rapids.spark312.RapidsShuffleManager. They are unique by construction, -# and will have zero copies in any non-spark312 shims. Although such classes are currently excluded from -# being copied to the /spark312 Parallel World we keep the algorithm below general without assuming this. +# com.nvidia.spark.rapids.spark321.RapidsShuffleManager. They are unique by construction, +# and will have zero copies in any non-spark321 shims. Although such classes are currently excluded from +# being copied to the /spark321 Parallel World we keep the algorithm below general without assuming this. # # 2) The class is contributed to the unshimmed location via unshimmed-common. These are classes that # that have the same package and class name across all parallel worlds. diff --git a/dist/unshimmed-common-from-spark311.txt b/dist/unshimmed-common-from-spark320.txt similarity index 100% rename from dist/unshimmed-common-from-spark311.txt rename to dist/unshimmed-common-from-spark320.txt diff --git a/jenkins/spark-nightly-build.sh b/jenkins/spark-nightly-build.sh index c5ef53da47d..51e1f7d01e5 100755 --- a/jenkins/spark-nightly-build.sh +++ b/jenkins/spark-nightly-build.sh @@ -34,7 +34,7 @@ MVN="mvn -Dmaven.wagon.http.retryHandler.count=3 -DretryFailedDeploymentCount=3 DIST_PL="dist" function mvnEval { - $MVN help:evaluate -q -pl $DIST_PL $MVN_URM_MIRROR -Prelease311 -Dmaven.repo.local=$M2DIR -DforceStdout -Dexpression=$1 + $MVN help:evaluate -q -pl $DIST_PL $MVN_URM_MIRROR -Prelease320 -Dmaven.repo.local=$M2DIR -DforceStdout -Dexpression=$1 } ART_ID=$(mvnEval project.artifactId) diff --git a/jenkins/spark-premerge-build.sh b/jenkins/spark-premerge-build.sh index e81db74cbd4..fc308c6d416 100755 --- a/jenkins/spark-premerge-build.sh +++ b/jenkins/spark-premerge-build.sh @@ -83,12 +83,12 @@ mvn_verify() { # The jacoco coverage should have been collected, but because of how the shade plugin # works and jacoco we need to clean some things up so jacoco will only report for the # things we care about - SPK_VER=${JACOCO_SPARK_VER:-"311"} + SPK_VER=${JACOCO_SPARK_VER:-"320"} mkdir -p target/jacoco_classes/ FILE=$(ls dist/target/rapids-4-spark_2.12-*.jar | grep -v test | xargs readlink -f) UDF_JAR=$(ls ./udf-compiler/target/spark${SPK_VER}/rapids-4-spark-udf_2.12-*-spark${SPK_VER}.jar | grep -v test | xargs readlink -f) pushd target/jacoco_classes/ - jar xf $FILE com org rapids spark-shared "spark${JACOCO_SPARK_VER:-311}/" + jar xf $FILE com org rapids spark-shared "spark${JACOCO_SPARK_VER:-320}/" # extract the .class files in udf jar and replace the existing ones in spark3xx-ommon and spark$SPK_VER # because the class files in udf jar will be modified in aggregator's shade phase jar xf "$UDF_JAR" com/nvidia/spark/udf diff --git a/jenkins/spark-tests.sh b/jenkins/spark-tests.sh index 8d0ed32e8b8..71e580f38c5 100755 --- a/jenkins/spark-tests.sh +++ b/jenkins/spark-tests.sh @@ -59,7 +59,7 @@ $MVN_GET_CMD -DremoteRepositories=$PROJECT_TEST_REPO \ -DgroupId=com.nvidia -DartifactId=rapids-4-spark-integration-tests_$SCALA_BINARY_VER -Dversion=$PROJECT_TEST_VER -Dclassifier=pytest -Dpackaging=tar.gz RAPIDS_INT_TESTS_HOME="$ARTF_ROOT/integration_tests/" -# The version of pytest.tar.gz that is uploaded is the one built against spark311 but its being pushed without classifier for now +# The version of pytest.tar.gz that is uploaded is the one built against spark320 but its being pushed without classifier for now RAPIDS_INT_TESTS_TGZ="$ARTF_ROOT/rapids-4-spark-integration-tests_${SCALA_BINARY_VER}-$PROJECT_TEST_VER-pytest.tar.gz" tmp_info=${TMP_INFO_FILE:-'/tmp/artifacts-build.info'} diff --git a/jenkins/version-def.sh b/jenkins/version-def.sh index dbad6d6fd94..0fe1d7c4d45 100755 --- a/jenkins/version-def.sh +++ b/jenkins/version-def.sh @@ -85,7 +85,7 @@ fi # PHASE_TYPE: CICD phase at which the script is called, to specify Spark shim versions. # regular: noSnapshots + snapshots # pre-release: noSnapshots only -# *: shim versions to build, e.g., PHASE_TYPE="311 321" +# *: shim versions to build, e.g., PHASE_TYPE="320 321" PHASE_TYPE=${PHASE_TYPE:-"regular"} case $PHASE_TYPE in # SPARK_SHIM_VERSIONS will be used for nightly artifact build diff --git a/pom.xml b/pom.xml index 3ff87c3cb97..85af9a12a1c 100644 --- a/pom.xml +++ b/pom.xml @@ -91,68 +91,11 @@ - release311 + release320 true - - buildver - 311 - - - - 311 - ${spark311.version} - ${spark311.version} - 1.10.1 - - - delta-lake/delta-stub - api_validation - - - - release312 - - - buildver - 312 - - - - 312 - ${spark312.version} - ${spark312.version} - 1.10.1 - - - delta-lake/delta-stub - api_validation - - - - release313 - - - buildver - 313 - - - - 313 - ${spark313.version} - ${spark313.version} - 1.10.1 - - - delta-lake/delta-stub - api_validation - - - - release320 - buildver 320 @@ -709,11 +652,11 @@ . ${spark.rapids.project.basedir}/target/${spark.version.classifier}/.sbt/1.0/zinc/org.scala-sbt false - 311 + 320 1.8 1.8 8 - ${spark311.version} + ${spark320.version} ${spark.version} 1.10.1 spark${buildver} @@ -758,9 +701,6 @@ - 3.1.1 - 3.1.2 - 3.1.3 3.2.0 3.2.1 3.2.1.3.2.7171000.0-3 @@ -812,9 +752,6 @@ ${project.basedir}/target/${spark.version.classifier}/generated/src - 311, - 312, - 313, 320, 321, 321cdh, @@ -843,8 +780,8 @@ 341db @@ -863,7 +800,6 @@ 340 - 312, 321, 331, 340 diff --git a/scala2.13/aggregator/pom.xml b/scala2.13/aggregator/pom.xml index 198b62d5fa6..bf643394a98 100644 --- a/scala2.13/aggregator/pom.xml +++ b/scala2.13/aggregator/pom.xml @@ -252,79 +252,11 @@ - release311 + release320 - - buildver - 311 - - - - - com.nvidia - rapids-4-spark-delta-stub_${scala.binary.version} - ${project.version} - ${spark.version.classifier} - - - - - release312 - - - buildver - 312 - - - - - com.nvidia - rapids-4-spark-delta-stub_${scala.binary.version} - ${project.version} - ${spark.version.classifier} - - - - - release313 - - - buildver - 313 - - - - - com.nvidia - rapids-4-spark-delta-stub_${scala.binary.version} - ${project.version} - ${spark.version.classifier} - - - - - release314 - - - buildver - 314 - - - - - com.nvidia - rapids-4-spark-delta-stub_${scala.binary.version} - ${project.version} - ${spark.version.classifier} - - - - - release320 - buildver 320 diff --git a/scala2.13/dist/pom.xml b/scala2.13/dist/pom.xml index db9ada51a28..1e651257f06 100644 --- a/scala2.13/dist/pom.xml +++ b/scala2.13/dist/pom.xml @@ -145,7 +145,6 @@ minimumFeatureVersionMix - 312, 320, 321cdh, 330, @@ -389,7 +388,7 @@ self.log("... OK") - + diff --git a/scala2.13/pom.xml b/scala2.13/pom.xml index e32a64f0529..cdb606963ff 100644 --- a/scala2.13/pom.xml +++ b/scala2.13/pom.xml @@ -90,66 +90,6 @@ jdk-profiles - - release311 - - - - buildver - 311 - - - - 311 - ${spark311.version} - ${spark311.version} - 1.10.1 - - - delta-lake/delta-stub - api_validation - - - - release312 - - - buildver - 312 - - - - 312 - ${spark312.version} - ${spark312.version} - 1.10.1 - - - delta-lake/delta-stub - api_validation - - - - release313 - - - buildver - 313 - - - - 313 - ${spark313.version} - ${spark313.version} - 1.10.1 - - - delta-lake/delta-stub - api_validation - - release320 @@ -709,7 +649,7 @@ . ${spark.rapids.project.basedir}/target/${spark.version.classifier}/.sbt/1.0/zinc/org.scala-sbt false - 311 + 320 1.8 1.8 8 @@ -758,9 +698,6 @@ - 3.1.1 - 3.1.2 - 3.1.3 3.2.0 3.2.1 3.2.1.3.2.7171000.0-3 @@ -812,9 +749,6 @@ ${project.basedir}/target/${spark.version.classifier}/generated/src - 311, - 312, - 313, 320, 321, 321cdh, @@ -843,8 +777,8 @@ 341db @@ -863,7 +797,6 @@ 340 - 312, 321, 331, 340 diff --git a/sql-plugin-api/src/main/scala/com/nvidia/spark/rapids/ShimLoader.scala b/sql-plugin-api/src/main/scala/com/nvidia/spark/rapids/ShimLoader.scala index 2d7a51c4e43..596c732efec 100644 --- a/sql-plugin-api/src/main/scala/com/nvidia/spark/rapids/ShimLoader.scala +++ b/sql-plugin-api/src/main/scala/com/nvidia/spark/rapids/ShimLoader.scala @@ -44,7 +44,7 @@ import org.apache.spark.util.MutableURLClassLoader 3. a smaller fraction of classes that differ under one of the supported Spark versions com/nvidia/spark/SQLPlugin.class spark-shared/com/nvidia/spark/rapids/CastExprMeta.class - spark311/org/apache/spark/sql/rapids/GpuUnaryMinus.class + spark320/org/apache/spark/sql/rapids/GpuUnaryMinus.class spark320/org/apache/spark/sql/rapids/GpuUnaryMinus.class Each shim can see a consistent parallel world without conflicts by referencing only one conflicting directory. @@ -53,7 +53,7 @@ import org.apache.spark.util.MutableURLClassLoader jar:file:/home/spark/rapids-4-spark_2.12-24.08.0.jar!/spark320/ Spark 3.1.1 will use jar:file:/home/spark/rapids-4-spark_2.12-24.08.0.jar!/spark-shared/ - jar:file:/home/spark/rapids-4-spark_2.12-24.08.0.jar!/spark311/ + jar:file:/home/spark/rapids-4-spark_2.12-24.08.0.jar!/spark320/ Using these Jar URL's allows referencing different bytecode produced from identical sources by incompatible Scala / Spark dependencies. */ diff --git a/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/ShimmedExecutionPlanCaptureCallbackImpl.scala b/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/ShimmedExecutionPlanCaptureCallbackImpl.scala index 00379026f05..234a6adb44c 100644 --- a/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/ShimmedExecutionPlanCaptureCallbackImpl.scala +++ b/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/ShimmedExecutionPlanCaptureCallbackImpl.scala @@ -31,7 +31,7 @@ import org.apache.spark.sql.execution.exchange.ReusedExchangeExec /** * Note that the name is prefixed with "Shimmed" such that wildcard rules - * under unshimmed-common-from-spark311.txt don't get confused and pick this class to be + * under unshimmed-common-from-spark320.txt don't get confused and pick this class to be * un-shimmed. */ class ShimmedExecutionPlanCaptureCallbackImpl extends ExecutionPlanCaptureCallbackBase { diff --git a/tests/src/test/scala/com/nvidia/spark/rapids/timezone/TimeZonePerfSuite.scala b/tests/src/test/scala/com/nvidia/spark/rapids/timezone/TimeZonePerfSuite.scala index d3388c68931..a9618a448cf 100644 --- a/tests/src/test/scala/com/nvidia/spark/rapids/timezone/TimeZonePerfSuite.scala +++ b/tests/src/test/scala/com/nvidia/spark/rapids/timezone/TimeZonePerfSuite.scala @@ -35,7 +35,7 @@ import org.apache.spark.sql.types._ * Usage: * * argLine="-DTZs=Asia/Shanghai,Japan -DenableTimeZonePerf=true" \ - * mvn test -Dbuildver=311 -DwildcardSuites=com.nvidia.spark.rapids.timezone.TimeZonePerfSuite + * mvn test -Dbuildver=320 -DwildcardSuites=com.nvidia.spark.rapids.timezone.TimeZonePerfSuite * Note: * Generate a Parquet file with 6 columns: * - c_ts: timestamp column From eaa487169f9cc6d577cd311c87826a8b4ddd48a8 Mon Sep 17 00:00:00 2001 From: Tim Liu Date: Tue, 9 Jul 2024 19:33:06 +0800 Subject: [PATCH 3/9] Restore the accidentally deleted file: OrcStatisticShim.scala tests/src/test/spark311/scala/com/nvidia/spark/rapids/shims/OrcStatisticShim.scala --> tests/src/test/spark321cdh/scala/com/nvidia/spark/rapids/shims/OrcStatisticShim.scala check if we chan merge this file into? tests/src/test/spark320/scala/com/nvidia/spark/rapids/shims/OrcStatisticShim.scala Signed-off-by: Tim Liu --- .../spark/rapids/shims/OrcStatisticShim.scala | 45 +++++++++++++++++++ 1 file changed, 45 insertions(+) create mode 100644 tests/src/test/spark321cdh/scala/com/nvidia/spark/rapids/shims/OrcStatisticShim.scala diff --git a/tests/src/test/spark321cdh/scala/com/nvidia/spark/rapids/shims/OrcStatisticShim.scala b/tests/src/test/spark321cdh/scala/com/nvidia/spark/rapids/shims/OrcStatisticShim.scala new file mode 100644 index 00000000000..56e3a114fda --- /dev/null +++ b/tests/src/test/spark321cdh/scala/com/nvidia/spark/rapids/shims/OrcStatisticShim.scala @@ -0,0 +1,45 @@ +/* + * Copyright (c) 2023, NVIDIA CORPORATION. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*** spark-rapids-shim-json-lines +{"spark": "321cdh"} +{"spark": "330cdh"} +{"spark": "332cdh"} +spark-rapids-shim-json-lines ***/ +package com.nvidia.spark.rapids.shims + +import java.util.Objects + +import org.apache.orc._ + +object OrcStatisticShim { + def supports(left: ColumnStatistics, right: ColumnStatistics): Boolean = (left, right) match { + case (_: DateColumnStatistics, _: DateColumnStatistics) => true + case (_: StringColumnStatistics, _: StringColumnStatistics) => true + case _ => false + } + + def equals(left: ColumnStatistics, right: ColumnStatistics): Boolean = (left, right) match { + // have no CollectionColumnStatistics for this shim + case (dateStat: DateColumnStatistics, otherDateStat: DateColumnStatistics) => + Objects.equals(dateStat.getMinimum, otherDateStat.getMinimum) && + Objects.equals(dateStat.getMaximum, otherDateStat.getMaximum) + case (strStat: StringColumnStatistics, otherStrStat: StringColumnStatistics) => + Objects.equals(strStat.getMinimum, otherStrStat.getMinimum) && + Objects.equals(strStat.getMaximum, otherStrStat.getMaximum) && + Objects.equals(strStat.getSum, otherStrStat.getSum) + } +} From ecae7c5ad15a6de60233095fe806c0467a3bb39a Mon Sep 17 00:00:00 2001 From: Tim Liu Date: Tue, 9 Jul 2024 19:46:25 +0800 Subject: [PATCH 4/9] Update Copyright to 2024 Signed-off-by: Tim Liu --- api_validation/auditAllVersions.sh | 2 +- build/make-scala-version-build-files.sh | 2 +- build/shimplify.py | 2 +- dist/build/package-parallel-worlds.py | 2 +- .../sql/rapids/ShimmedExecutionPlanCaptureCallbackImpl.scala | 2 +- .../scala/com/nvidia/spark/rapids/shims/DistributionUtil.scala | 2 +- .../com/nvidia/spark/rapids/shims/GpuHashPartitioning.scala | 2 +- .../com/nvidia/spark/rapids/shims/GpuRangePartitioning.scala | 2 +- .../com/nvidia/spark/rapids/shims/ParquetFieldIdShims.scala | 2 +- .../nvidia/spark/rapids/shims/PartitionedFileUtilsShim.scala | 2 +- .../scala/org/apache/spark/sql/catalyst/csv/GpuCsvUtils.scala | 2 +- .../scala/org/apache/spark/sql/catalyst/json/GpuJsonUtils.scala | 2 +- .../spark320/scala/org/apache/spark/sql/rapids/shims/misc.scala | 2 +- 13 files changed, 13 insertions(+), 13 deletions(-) diff --git a/api_validation/auditAllVersions.sh b/api_validation/auditAllVersions.sh index a109b7c7920..27aeedcd4ba 100644 --- a/api_validation/auditAllVersions.sh +++ b/api_validation/auditAllVersions.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright (c) 2020-2022, NVIDIA CORPORATION. +# Copyright (c) 2020-2024, NVIDIA CORPORATION. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/build/make-scala-version-build-files.sh b/build/make-scala-version-build-files.sh index 8253f7eb963..ad3482ee979 100755 --- a/build/make-scala-version-build-files.sh +++ b/build/make-scala-version-build-files.sh @@ -1,6 +1,6 @@ #!/usr/bin/env bash # -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. +# Copyright (c) 2023-2024, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/build/shimplify.py b/build/shimplify.py index 4ad0dffb17b..21243483da1 100644 --- a/build/shimplify.py +++ b/build/shimplify.py @@ -1,4 +1,4 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. +# Copyright (c) 2023-2024, NVIDIA CORPORATION. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/dist/build/package-parallel-worlds.py b/dist/build/package-parallel-worlds.py index e2b371bfd7f..ef64a4cd6bd 100644 --- a/dist/build/package-parallel-worlds.py +++ b/dist/build/package-parallel-worlds.py @@ -1,4 +1,4 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. +# Copyright (c) 2023-2024, NVIDIA CORPORATION. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/ShimmedExecutionPlanCaptureCallbackImpl.scala b/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/ShimmedExecutionPlanCaptureCallbackImpl.scala index 234a6adb44c..b843fe1279a 100644 --- a/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/ShimmedExecutionPlanCaptureCallbackImpl.scala +++ b/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/ShimmedExecutionPlanCaptureCallbackImpl.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023, NVIDIA CORPORATION. + * Copyright (c) 2023-2024, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/DistributionUtil.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/DistributionUtil.scala index 03dacd3f463..54d01e7a2a3 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/DistributionUtil.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/DistributionUtil.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023, NVIDIA CORPORATION. + * Copyright (c) 2023-2024, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuHashPartitioning.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuHashPartitioning.scala index 5c728e31531..f906bd28acc 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuHashPartitioning.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuHashPartitioning.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2021-2023, NVIDIA CORPORATION. + * Copyright (c) 2021-2024, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuRangePartitioning.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuRangePartitioning.scala index b0506266aa1..adb9619340f 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuRangePartitioning.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuRangePartitioning.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2020-2023, NVIDIA CORPORATION. + * Copyright (c) 2020-2024, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ParquetFieldIdShims.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ParquetFieldIdShims.scala index 520dca909af..248fd3d30a1 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ParquetFieldIdShims.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ParquetFieldIdShims.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2023, NVIDIA CORPORATION. + * Copyright (c) 2022-2024, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/PartitionedFileUtilsShim.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/PartitionedFileUtilsShim.scala index af8d68184e6..c8f9024e352 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/PartitionedFileUtilsShim.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/PartitionedFileUtilsShim.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2023, NVIDIA CORPORATION. + * Copyright (c) 2022-2024, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/catalyst/csv/GpuCsvUtils.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/catalyst/csv/GpuCsvUtils.scala index c59f0af0026..68f148c9d27 100644 --- a/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/catalyst/csv/GpuCsvUtils.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/catalyst/csv/GpuCsvUtils.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2023, NVIDIA CORPORATION. + * Copyright (c) 2022-2024, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/catalyst/json/GpuJsonUtils.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/catalyst/json/GpuJsonUtils.scala index b25f7154efd..cd523d44a2c 100644 --- a/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/catalyst/json/GpuJsonUtils.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/catalyst/json/GpuJsonUtils.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2023, NVIDIA CORPORATION. + * Copyright (c) 2022-2024, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/misc.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/misc.scala index 2630f9f83ab..f4fd97b00e2 100644 --- a/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/misc.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/misc.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2023, NVIDIA CORPORATION. + * Copyright (c) 2022-2024, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. From e6dd67f3814aa78939ee0b3e6f74a96768393cb3 Mon Sep 17 00:00:00 2001 From: Tim Liu Date: Wed, 10 Jul 2024 14:40:05 +0800 Subject: [PATCH 5/9] Remove the 31x in ShimLoader.scala according to the review comments Signed-off-by: Tim Liu --- .../main/scala/com/nvidia/spark/rapids/ShimLoader.scala | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/sql-plugin-api/src/main/scala/com/nvidia/spark/rapids/ShimLoader.scala b/sql-plugin-api/src/main/scala/com/nvidia/spark/rapids/ShimLoader.scala index 596c732efec..59434084e1a 100644 --- a/sql-plugin-api/src/main/scala/com/nvidia/spark/rapids/ShimLoader.scala +++ b/sql-plugin-api/src/main/scala/com/nvidia/spark/rapids/ShimLoader.scala @@ -44,16 +44,16 @@ import org.apache.spark.util.MutableURLClassLoader 3. a smaller fraction of classes that differ under one of the supported Spark versions com/nvidia/spark/SQLPlugin.class spark-shared/com/nvidia/spark/rapids/CastExprMeta.class - spark320/org/apache/spark/sql/rapids/GpuUnaryMinus.class - spark320/org/apache/spark/sql/rapids/GpuUnaryMinus.class + spark320/org/apache/spark/sql/rapids/aggregate/GpuLast.class + spark331/org/apache/spark/sql/rapids/aggregate/GpuLast.class Each shim can see a consistent parallel world without conflicts by referencing only one conflicting directory. E.g., Spark 3.2.0 Shim will use jar:file:/home/spark/rapids-4-spark_2.12-24.08.0.jar!/spark-shared/ jar:file:/home/spark/rapids-4-spark_2.12-24.08.0.jar!/spark320/ - Spark 3.1.1 will use + Spark 3.3.1 will use jar:file:/home/spark/rapids-4-spark_2.12-24.08.0.jar!/spark-shared/ - jar:file:/home/spark/rapids-4-spark_2.12-24.08.0.jar!/spark320/ + jar:file:/home/spark/rapids-4-spark_2.12-24.08.0.jar!/spark331/ Using these Jar URL's allows referencing different bytecode produced from identical sources by incompatible Scala / Spark dependencies. */ From ae1b1389392a9ddc776b6faf7cbadd067864db4b Mon Sep 17 00:00:00 2001 From: Tim Liu Date: Wed, 10 Jul 2024 19:17:21 +0800 Subject: [PATCH 6/9] Update the file scala2.13/pom.xml Signed-off-by: Tim Liu --- scala2.13/pom.xml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/scala2.13/pom.xml b/scala2.13/pom.xml index cdb606963ff..7973308d9fc 100644 --- a/scala2.13/pom.xml +++ b/scala2.13/pom.xml @@ -93,6 +93,9 @@ release320 + buildver 320 From 878af8552fb2121ea716bd9522eaa3a677432b11 Mon Sep 17 00:00:00 2001 From: Tim Liu Date: Wed, 10 Jul 2024 19:18:10 +0800 Subject: [PATCH 7/9] Drop 3.1.x shims in docs, source code and build scripts Change the default shim to spark320 from spark311 in the shims in docs, source code and build scripts Signed-off-by: Tim Liu --- CONTRIBUTING.md | 21 ++++++++----------- api_validation/README.md | 2 +- .../spark/rapids/api/ApiValidation.scala | 4 ++-- build/coverage-report | 2 +- dist/README.md | 10 ++++----- docs/dev/README.md | 4 ++-- docs/dev/shims.md | 4 ++-- jenkins/hadoop-def.sh | 4 ++-- jenkins/spark-nightly-build.sh | 2 +- jenkins/spark-premerge-build.sh | 2 +- jenkins/version-def.sh | 2 +- .../com/nvidia/spark/rapids/RapidsConf.scala | 6 +++--- tests/README.md | 4 ++-- 13 files changed, 32 insertions(+), 35 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 295006be49c..c52516023f1 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -50,11 +50,11 @@ mvn verify After a successful build, the RAPIDS Accelerator jar will be in the `dist/target/` directory. This will build the plugin for a single version of Spark. By default, this is Apache Spark -3.1.1. To build against other versions of Spark you use the `-Dbuildver=XXX` command line option -to Maven. For instance to build Spark 3.1.1 you would use: +3.2.0. To build against other versions of Spark you use the `-Dbuildver=XXX` command line option +to Maven. For instance to build Spark 3.2.0 you would use: ```shell script -mvn -Dbuildver=311 verify +mvn -Dbuildver=320 verify ``` You can find all available build versions in the top level pom.xml file. If you are building for Databricks then you should use the `jenkins/databricks/build.sh` script and modify it for @@ -110,7 +110,7 @@ If you want to create a jar with multiple versions we have the following options 3. Build for all Apache Spark versions, CDH and Databricks with no SNAPSHOT versions of Spark, only released. Use `-PnoSnaphsotsWithDatabricks`. 4. Build for all Apache Spark versions, CDH and Databricks including SNAPSHOT versions of Spark we have supported for. Use `-PsnapshotsWithDatabricks` 5. Build for an arbitrary combination of comma-separated build versions using `-Dincluded_buildvers=`. - E.g., `-Dincluded_buildvers=312,330` + E.g., `-Dincluded_buildvers=320,330` You must first build each of the versions of Spark and then build one final time using the profile for the option you want. @@ -118,9 +118,6 @@ You can also install some manually and build a combined jar. For instance to bui ```shell script mvn clean -mvn -Dbuildver=311 install -Drat.skip=true -DskipTests -mvn -Dbuildver=312 install -Drat.skip=true -DskipTests -mvn -Dbuildver=313 install -Drat.skip=true -DskipTests mvn -Dbuildver=320 install -Drat.skip=true -DskipTests mvn -Dbuildver=321 install -Drat.skip=true -DskipTests mvn -Dbuildver=321cdh install -Drat.skip=true -DskipTests @@ -150,9 +147,9 @@ There is a build script `build/buildall` that automates the local build process. By default, it builds everything that is needed to create a distribution jar for all released (noSnapshots) Spark versions except for Databricks. Other profiles that you can pass using `--profile=` include - `snapshots` that includes all released (noSnapshots) and snapshots Spark versions except for Databricks -- `minimumFeatureVersionMix` that currently includes 321cdh, 312, 320, 330 is recommended for catching incompatibilities already in the local development cycle +- `minimumFeatureVersionMix` that currently includes 321cdh, 320, 330 is recommended for catching incompatibilities already in the local development cycle -For initial quick iterations we can use `--profile=` to build a single-shim version. e.g., `--profile=311` for Spark 3.1.1. +For initial quick iterations we can use `--profile=` to build a single-shim version. e.g., `--profile=320` for Spark 3.2.0. The option `--module=` allows to limit the number of build steps. When iterating, we often don't have the need for the entire build. We may be interested in building everything necessary just to run integration tests (`--module=integration_tests`), or we may want to just rebuild the distribution jar (`--module=dist`) @@ -201,7 +198,7 @@ NOTE: Build process does not require an ARM machine, so if you want to build the on X86 machine, please also add `-DskipTests` in commands. ```bash -mvn clean verify -Dbuildver=311 -Parm64 +mvn clean verify -Dbuildver=320 -Parm64 ``` ### Iterative development during local testing @@ -377,7 +374,7 @@ the symlink `.bloop` to point to the corresponding directory `.bloop-spark3XY` Example usage: ```Bash -./build/buildall --generate-bloop --profile=311,330 +./build/buildall --generate-bloop --profile=320,330 rm -vf .bloop ln -s .bloop-spark330 .bloop ``` @@ -414,7 +411,7 @@ Install [Scala Metals extension](https://scalameta.org/metals/docs/editors/vscod either locally or into a Remote-SSH extension destination depending on your target environment. When your project folder is open in VS Code, it may prompt you to import Maven project. IMPORTANT: always decline with "Don't ask again", otherwise it will overwrite the Bloop projects -generated with the default `311` profile. If you need to use a different profile, always rerun the +generated with the default `320` profile. If you need to use a different profile, always rerun the command above manually. When regenerating projects it's recommended to proceed to Metals "Build commands" View, and click: 1. "Restart build server" diff --git a/api_validation/README.md b/api_validation/README.md index 5f34354656d..482b3e76a58 100644 --- a/api_validation/README.md +++ b/api_validation/README.md @@ -21,7 +21,7 @@ cd api_validation sh auditAllVersions.sh // To run script on particular version we can use profile -mvn scala:run -P spark311 +mvn scala:run -P spark320 ``` # Output diff --git a/api_validation/src/main/scala/com/nvidia/spark/rapids/api/ApiValidation.scala b/api_validation/src/main/scala/com/nvidia/spark/rapids/api/ApiValidation.scala index 58d273d2148..5821c6e18ac 100644 --- a/api_validation/src/main/scala/com/nvidia/spark/rapids/api/ApiValidation.scala +++ b/api_validation/src/main/scala/com/nvidia/spark/rapids/api/ApiValidation.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2020-2023, NVIDIA CORPORATION. + * Copyright (c) 2020-2024, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -69,7 +69,7 @@ object ApiValidation extends Logging { val gpuKeys = gpuExecs.keys var printNewline = false - val sparkToShimMap = Map("3.1.1" -> "spark311") + val sparkToShimMap = Map("3.2.0" -> "spark320") val sparkVersion = ShimLoader.getShimVersion.toString val shimVersion = sparkToShimMap(sparkVersion) diff --git a/build/coverage-report b/build/coverage-report index 75c86e55258..ddde86b6aea 100755 --- a/build/coverage-report +++ b/build/coverage-report @@ -23,7 +23,7 @@ TMP_CLASS=${TEMP_CLASS_LOC:-"./target/jacoco_classes/"} HTML_LOC=${HTML_LOCATION:="./target/jacoco-report/"} XML_LOC=${XML_LOCATION:="${HTML_LOC}"} DIST_JAR=${RAPIDS_DIST_JAR:-$(ls ./dist/target/rapids-4-spark_2.12-*cuda*.jar | grep -v test | head -1 | xargs readlink -f)} -SPK_VER=${JACOCO_SPARK_VER:-"311"} +SPK_VER=${JACOCO_SPARK_VER:-"320"} UDF_JAR=${RAPIDS_UDF_JAR:-$(ls ./udf-compiler/target/spark${SPK_VER}/rapids-4-spark-udf_2.12-*-SNAPSHOT-spark${SPK_VER}.jar | grep -v test | head -1 | xargs readlink -f)} SOURCE_DIRS=${SOURCE_DIRS:-"./sql-plugin/src/main/scala/:./sql-plugin/src/main/java/:./shuffle-plugin/src/main/scala/:./udf-compiler/src/main/scala/"} diff --git a/dist/README.md b/dist/README.md index 56e5b9be297..2a0955da6dd 100644 --- a/dist/README.md +++ b/dist/README.md @@ -17,21 +17,21 @@ Files are: `com.nvidia.spark.rapids.SparkShimServiceProvider.sparkNonSnapshot`, The new uber jar is structured like: -1. Base common classes are user visible classes. For these we use Spark 3.1.1 versions because they are assumed to be +1. Base common classes are user visible classes. For these we use Spark 3.2.0 versions because they are assumed to be bitwise-identical to the other shims, this assumption is subject to the future automatic validation. 2. META-INF/services. This is a file that has to list all the shim versions supported by this jar. The files talked about above for each profile are put into place here for uber jars. Although we currently do not use [ServiceLoader API](https://docs.oracle.com/javase/8/docs/api/java/util/ServiceLoader.html) we use the same service provider discovery mechanism -3. META-INF base files are from 3.1.1 - maven, LICENSE, NOTICE, etc +3. META-INF base files are from 3.2.0 - maven, LICENSE, NOTICE, etc 4. Spark specific directory (aka Parallel World in the jargon of [ParallelWorldClassloader](https://github.com/openjdk/jdk/blob/jdk8-b120/jaxws/src/share/jaxws_classes/com/sun/istack/internal/tools/ParallelWorldClassLoader.java)) -for each version of Spark supported in the jar, i.e., spark311/, spark312/, spark320/, etc. +for each version of Spark supported in the jar, i.e., spark320/, spark330/, spark341/, etc. If you have to change the contents of the uber jar the following files control what goes into the base jar as classes that are not shaded. -1. `unshimmed-common-from-spark311.txt` - This has classes and files that should go into the base jar with their normal +1. `unshimmed-common-from-spark320.txt` - This has classes and files that should go into the base jar with their normal package name (not shaded). This includes user visible classes (i.e., com/nvidia/spark/SQLPlugin), python files, -and other files that aren't version specific. Uses Spark 3.1.1 built jar for these base classes as explained above. +and other files that aren't version specific. Uses Spark 3.2.0 built jar for these base classes as explained above. 2. `unshimmed-from-each-spark3xx.txt` - This is applied to all the individual Spark specific version jars to pull any files that need to go into the base of the jar and not into the Spark specific directory. diff --git a/docs/dev/README.md b/docs/dev/README.md index 9307780494d..ecbd0252ae7 100644 --- a/docs/dev/README.md +++ b/docs/dev/README.md @@ -271,9 +271,9 @@ is not currently [able to support](https://github.com/jacoco/jacoco/issues/965) setup. So if you want to generate a coverage report you need to do it manually. Coverage is collected by default so first run the tests, and then generate the report, this should be run from the root project directory. It will print out the URL of the report at the end. Besides, -coverage report only covers test with Spark 311 by default as [jacoco](https://www.jacoco.org/jacoco/trunk/doc/) +coverage report only covers test with Spark 320 by default as [jacoco](https://www.jacoco.org/jacoco/trunk/doc/) can't support combined jars. If you're testing with different Spark version, please change it -via environment variable `JACOCO_SPARK_VER` before generate coverage report, e.g, `export JACOCO_SPARK_VER=311`. +via environment variable `JACOCO_SPARK_VER` before generate coverage report, e.g, `export JACOCO_SPARK_VER=320`. ```bash mvn clean verify diff --git a/docs/dev/shims.md b/docs/dev/shims.md index 218adbc99d4..a9317fa33c2 100644 --- a/docs/dev/shims.md +++ b/docs/dev/shims.md @@ -8,7 +8,7 @@ parent: Developer Overview # Shim Development RAPIDS Accelerator For Apache Spark supports multiple feature version lines of -Apache Spark such as 3.1.x, 3.2.x, 3.3.0 and a number of vendor releases that contain +Apache Spark such as 3.2.x, 3.3.x, 3.4.x, 3.5.x and a number of vendor releases that contain a mix of patches from different upstream releases. These artifacts are generally incompatible between each other, at both source code level and even more often at the binary level. The role of the Shim layer is to hide these issues from the @@ -159,7 +159,7 @@ to build against the lowest and highest versions of the supported Spark version range. As of the time of this writing: ```bash -./build/buildall --parallel=4 --profile=311,330 --module=dist +./build/buildall --parallel=4 --profile=320,351 --module=dist ``` However, before submitting the PR execute the full build `--profile=noSnapshots`. diff --git a/jenkins/hadoop-def.sh b/jenkins/hadoop-def.sh index 771705fc2ca..fcf1bf14faa 100755 --- a/jenkins/hadoop-def.sh +++ b/jenkins/hadoop-def.sh @@ -1,6 +1,6 @@ #!/bin/bash # -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. +# Copyright (c) 2023-2024, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -20,7 +20,7 @@ set -e -spark_version=${1:-"3.1.1"} +spark_version=${1:-"3.2.0"} scala_version=${2:-"2.12"} # Split spark version into base version (e.g. 3.3.0) and suffix (e.g. SNAPSHOT) PRE_IFS=$IFS diff --git a/jenkins/spark-nightly-build.sh b/jenkins/spark-nightly-build.sh index 51e1f7d01e5..3ec2e0f19b2 100755 --- a/jenkins/spark-nightly-build.sh +++ b/jenkins/spark-nightly-build.sh @@ -176,7 +176,7 @@ distWithReducedPom "install" if [[ $SKIP_DEPLOY != 'true' ]]; then distWithReducedPom "deploy" - # this deploys selected submodules that is unconditionally built with Spark 3.1.1 + # this deploys selected submodules that is unconditionally built with Spark 3.2.0 $MVN -B deploy -pl $DEPLOY_SUBMODULES \ -Dbuildver=$SPARK_BASE_SHIM_VERSION \ -DskipTests \ diff --git a/jenkins/spark-premerge-build.sh b/jenkins/spark-premerge-build.sh index fc308c6d416..bf33b8d65c9 100755 --- a/jenkins/spark-premerge-build.sh +++ b/jenkins/spark-premerge-build.sh @@ -222,7 +222,7 @@ ci_scala213() { } prepare_spark() { - spark_ver=${1:-'3.1.1'} + spark_ver=${1:-'3.2.0'} scala_ver=${2:-'2.12'} ARTF_ROOT="$(pwd)/.download" diff --git a/jenkins/version-def.sh b/jenkins/version-def.sh index 0fe1d7c4d45..8d363fc9d4a 100755 --- a/jenkins/version-def.sh +++ b/jenkins/version-def.sh @@ -32,7 +32,7 @@ CUDA_CLASSIFIER=${CUDA_CLASSIFIER:-"cuda11"} CLASSIFIER=${CLASSIFIER:-"$CUDA_CLASSIFIER"} # default as CUDA_CLASSIFIER for compatibility PROJECT_VER=${PROJECT_VER:-"24.08.0-SNAPSHOT"} PROJECT_TEST_VER=${PROJECT_TEST_VER:-"24.08.0-SNAPSHOT"} -SPARK_VER=${SPARK_VER:-"3.1.1"} +SPARK_VER=${SPARK_VER:-"3.2.0"} SPARK_VER_213=${SPARK_VER_213:-"3.3.0"} # Make a best attempt to set the default value for the shuffle shim. # Note that SPARK_VER for non-Apache Spark flavors (i.e. databricks, diff --git a/sql-plugin/src/main/scala/com/nvidia/spark/rapids/RapidsConf.scala b/sql-plugin/src/main/scala/com/nvidia/spark/rapids/RapidsConf.scala index 0a8ce614d83..5f368942fcc 100644 --- a/sql-plugin/src/main/scala/com/nvidia/spark/rapids/RapidsConf.scala +++ b/sql-plugin/src/main/scala/com/nvidia/spark/rapids/RapidsConf.scala @@ -2021,9 +2021,9 @@ val SHUFFLE_COMPRESSION_LZ4_CHUNK_SIZE = conf("spark.rapids.shuffle.compression. .startupOnly() .doc("Overrides the automatic Spark shim detection logic and forces a specific shims " + "provider class to be used. Set to the fully qualified shims provider class to use. " + - "If you are using a custom Spark version such as Spark 3.1.1.0 then this can be used to " + - "specify the shims provider that matches the base Spark version of Spark 3.1.1, i.e.: " + - "com.nvidia.spark.rapids.shims.spark311.SparkShimServiceProvider. If you modified Spark " + + "If you are using a custom Spark version such as Spark 3.2.0 then this can be used to " + + "specify the shims provider that matches the base Spark version of Spark 3.2.0, i.e.: " + + "com.nvidia.spark.rapids.shims.spark320.SparkShimServiceProvider. If you modified Spark " + "then there is no guarantee the RAPIDS Accelerator will function properly." + "When tested in a combined jar with other Shims, it's expected that the provided " + "implementation follows the same convention as existing Spark shims. If its class" + diff --git a/tests/README.md b/tests/README.md index c854a2e6625..d004e07d009 100644 --- a/tests/README.md +++ b/tests/README.md @@ -44,9 +44,9 @@ and the the #### Running Unit Tests Against Specific Apache Spark Versions You can run the unit tests against different versions of Spark using the different profiles. The -default version runs against Spark 3.1.1, to run against a specific version use a buildver property: +default version runs against Spark 3.2.0, to run against a specific version use a buildver property: -- `-Dbuildver=311` (Spark 3.1.1) +- `-Dbuildver=320` (Spark 3.2.0) - `-Dbuildver=350` (Spark 3.5.0) etc From 38ed9ae180176a5adfb13601534d41aa19fdbeef Mon Sep 17 00:00:00 2001 From: Tim Liu Date: Thu, 11 Jul 2024 18:02:00 +0800 Subject: [PATCH 8/9] Updating the docs for the dropping 31x shims Signed-off-by: Tim Liu --- .../advanced_configs.md | 8 +- docs/supported_ops.md | 2904 ++++++++++++++--- tools/generated_files/operatorsScore.csv | 6 +- tools/generated_files/supportedDataSource.csv | 26 +- tools/generated_files/supportedExecs.csv | 104 +- tools/generated_files/supportedExprs.csv | 1484 ++++----- 6 files changed, 3269 insertions(+), 1263 deletions(-) diff --git a/docs/additional-functionality/advanced_configs.md b/docs/additional-functionality/advanced_configs.md index 2f76ffd8a68..12d2fda4ec6 100644 --- a/docs/additional-functionality/advanced_configs.md +++ b/docs/additional-functionality/advanced_configs.md @@ -329,7 +329,7 @@ Name | SQL Function(s) | Description | Default Value | Notes spark.rapids.sql.expression.PromotePrecision| |PromotePrecision before arithmetic operations between DecimalType data|true|None| spark.rapids.sql.expression.PythonUDF| |UDF run in an external python process. Does not actually run on the GPU, but the transfer of data to/from it can be accelerated|true|None| spark.rapids.sql.expression.Quarter|`quarter`|Returns the quarter of the year for date, in the range 1 to 4|true|None| -spark.rapids.sql.expression.RLike|`rlike`|Regular expression version of Like|true|None| +spark.rapids.sql.expression.RLike|`regexp_like`, `regexp`, `rlike`|Regular expression version of Like|true|None| spark.rapids.sql.expression.RaiseError|`raise_error`|Throw an exception|true|None| spark.rapids.sql.expression.Rand|`rand`, `random`|Generate a random column with i.i.d. uniformly distributed values in [0, 1)|true|None| spark.rapids.sql.expression.Rank|`rank`|Window function that returns the rank value within the aggregation window|true|None| @@ -438,14 +438,18 @@ Name | Description | Default Value | Notes spark.rapids.sql.exec.SubqueryBroadcastExec|Plan to collect and transform the broadcast key values|true|None| spark.rapids.sql.exec.TakeOrderedAndProjectExec|Take the first limit elements as defined by the sortOrder, and do projection if needed|true|None| spark.rapids.sql.exec.UnionExec|The backend for the union operator|true|None| -spark.rapids.sql.exec.CustomShuffleReaderExec|A wrapper of shuffle query stage|true|None| +spark.rapids.sql.exec.AQEShuffleReadExec|A wrapper of shuffle query stage|true|None| spark.rapids.sql.exec.HashAggregateExec|The backend for hash based aggregations|true|None| spark.rapids.sql.exec.ObjectHashAggregateExec|The backend for hash based aggregations supporting TypedImperativeAggregate functions|true|None| spark.rapids.sql.exec.SortAggregateExec|The backend for sort based aggregations|true|None| spark.rapids.sql.exec.InMemoryTableScanExec|Implementation of InMemoryTableScanExec to use GPU accelerated caching|true|None| spark.rapids.sql.exec.DataWritingCommandExec|Writing data|true|None| spark.rapids.sql.exec.ExecutedCommandExec|Eagerly executed commands|true|None| +spark.rapids.sql.exec.AppendDataExecV1|Append data into a datasource V2 table using the V1 write interface|true|None| +spark.rapids.sql.exec.AtomicCreateTableAsSelectExec|Create table as select for datasource V2 tables that support staging table creation|true|None| +spark.rapids.sql.exec.AtomicReplaceTableAsSelectExec|Replace table as select for datasource V2 tables that support staging table creation|true|None| spark.rapids.sql.exec.BatchScanExec|The backend for most file input|true|None| +spark.rapids.sql.exec.OverwriteByExpressionExecV1|Overwrite into a datasource V2 table using the V1 write interface|true|None| spark.rapids.sql.exec.BroadcastExchangeExec|The backend for broadcast exchange of data|true|None| spark.rapids.sql.exec.ShuffleExchangeExec|The backend for most data being exchanged between processes|true|None| spark.rapids.sql.exec.BroadcastHashJoinExec|Implementation of join using broadcast data|true|None| diff --git a/docs/supported_ops.md b/docs/supported_ops.md index 018d6640976..4bf7e77c0ac 100644 --- a/docs/supported_ops.md +++ b/docs/supported_ops.md @@ -9,7 +9,7 @@ support all data types. The RAPIDS Accelerator for Apache Spark has further restrictions on what types are supported for processing. This tries to document what operations are supported and what data types each operation supports. Because Apache Spark is under active development too and this document was generated -against version 3.1.1 of Spark. Most of this should still +against version 3.2.0 of Spark. Most of this should still apply to other versions of Spark, but there may be slight changes. # General limitations @@ -128,6 +128,8 @@ Accelerator supports are described below. MAP STRUCT UDT +DAYTIME +YEARMONTH CoalesceExec @@ -148,9 +150,11 @@ Accelerator supports are described below. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -172,9 +176,11 @@ Accelerator supports are described below. S NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -196,9 +202,11 @@ Accelerator supports are described below. S NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -220,9 +228,11 @@ Accelerator supports are described below. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -244,9 +254,11 @@ Accelerator supports are described below. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -268,9 +280,11 @@ Accelerator supports are described below. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -292,9 +306,11 @@ Accelerator supports are described below. S NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -316,9 +332,11 @@ Accelerator supports are described below. S NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -340,9 +358,11 @@ Accelerator supports are described below. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -368,6 +388,8 @@ Accelerator supports are described below. + + SampleExec @@ -388,9 +410,11 @@ Accelerator supports are described below. S NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -412,9 +436,11 @@ Accelerator supports are described below. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -440,6 +466,8 @@ Accelerator supports are described below. PS
UTC is only supported TZ for child TIMESTAMP
PS
UTC is only supported TZ for child TIMESTAMP
S +S +S TakeOrderedAndProjectExec @@ -460,9 +488,11 @@ Accelerator supports are described below. S NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -484,9 +514,11 @@ Accelerator supports are described below. S NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
-PS
unionByName will not optionally impute nulls for missing struct fields when the column is a struct and there are non-overlapping fields;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
unionByName will not optionally impute nulls for missing struct fields when the column is a struct and there are non-overlapping fields;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -512,9 +544,11 @@ Accelerator supports are described below. MAP STRUCT UDT +DAYTIME +YEARMONTH -CustomShuffleReaderExec +AQEShuffleReadExec A wrapper of shuffle query stage None Input/Output @@ -532,9 +566,11 @@ Accelerator supports are described below. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -556,9 +592,11 @@ Accelerator supports are described below. S PS
not allowed for grouping expressions
NS -PS
not allowed for grouping expressions if containing Struct as child;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
not allowed for grouping expressions;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
not allowed for grouping expressions if containing Array, Map, or Binary as child;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
not allowed for grouping expressions if containing Struct as child;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
not allowed for grouping expressions;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
not allowed for grouping expressions if containing Array, Map, or Binary as child;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -580,9 +618,11 @@ Accelerator supports are described below. S PS
not allowed for grouping expressions and only allowed when aggregate buffers can be converted between CPU and GPU
NS -PS
not allowed for grouping expressions;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
not allowed for grouping expressions;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
not allowed for grouping expressions if containing Array, Map, or Binary as child;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
not allowed for grouping expressions;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
not allowed for grouping expressions;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
not allowed for grouping expressions if containing Array, Map, or Binary as child;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -604,9 +644,11 @@ Accelerator supports are described below. S PS
not allowed for grouping expressions and only allowed when aggregate buffers can be converted between CPU and GPU
NS -PS
not allowed for grouping expressions;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
not allowed for grouping expressions;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
not allowed for grouping expressions if containing Array, Map, or Binary as child;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
not allowed for grouping expressions;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
not allowed for grouping expressions;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
not allowed for grouping expressions if containing Array, Map, or Binary as child;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -628,9 +670,11 @@ Accelerator supports are described below. NS NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, BINARY, CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, BINARY, CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, BINARY, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -652,9 +696,11 @@ Accelerator supports are described below. NS S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -680,6 +726,86 @@ Accelerator supports are described below. PS
UTC is only supported TZ for child TIMESTAMP
PS
UTC is only supported TZ for child TIMESTAMP
S +S +S + + +AppendDataExecV1 +Append data into a datasource V2 table using the V1 write interface +None +Input/Output +S +S +S +S +S +S +S +S +PS
UTC is only supported TZ for TIMESTAMP
+S +S +NS +S +NS +PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS +NS + + +AtomicCreateTableAsSelectExec +Create table as select for datasource V2 tables that support staging table creation +None +Input/Output +S +S +S +S +S +S +S +S +PS
UTC is only supported TZ for TIMESTAMP
+S +S +NS +S +NS +PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS +NS + + +AtomicReplaceTableAsSelectExec +Replace table as select for datasource V2 tables that support staging table creation +None +Input/Output +S +S +S +S +S +S +S +S +PS
UTC is only supported TZ for TIMESTAMP
+S +S +NS +S +NS +PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS +NS BatchScanExec @@ -700,9 +826,37 @@ Accelerator supports are described below. NS S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS +NS + + +OverwriteByExpressionExecV1 +Overwrite into a datasource V2 table using the V1 write interface +None +Input/Output +S +S +S +S +S +S +S +S +PS
UTC is only supported TZ for TIMESTAMP
+S +S +NS +S +NS +PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -724,9 +878,11 @@ Accelerator supports are described below. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, MAP, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, MAP, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, MAP, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, MAP, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, MAP, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, MAP, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -748,9 +904,11 @@ Accelerator supports are described below. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
Round-robin partitioning is not supported if spark.sql.execution.sortBeforeRepartition is true;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
Round-robin partitioning is not supported for nested structs if spark.sql.execution.sortBeforeRepartition is true;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
Round-robin partitioning is not supported if spark.sql.execution.sortBeforeRepartition is true;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
Round-robin partitioning is not supported for nested structs if spark.sql.execution.sortBeforeRepartition is true;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -774,7 +932,9 @@ Accelerator supports are described below. NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -795,7 +955,9 @@ Accelerator supports are described below. NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -818,6 +980,8 @@ Accelerator supports are described below. + + Input/Output @@ -835,12 +999,40 @@ Accelerator supports are described below. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS +Executor +Description +Notes +Param(s) +BOOLEAN +BYTE +SHORT +INT +LONG +FLOAT +DOUBLE +DATE +TIMESTAMP +STRING +DECIMAL +NULL +BINARY +CALENDAR +ARRAY +MAP +STRUCT +UDT +DAYTIME +YEARMONTH + + BroadcastNestedLoopJoinExec Implementation of join using brute force. Full outer joins and joins where the broadcast side matches the join side (e.g.: LeftOuter with left broadcast) are not supported None @@ -863,6 +1055,8 @@ Accelerator supports are described below. + + Input/Output @@ -880,34 +1074,12 @@ Accelerator supports are described below. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS - - -Executor -Description -Notes -Param(s) -BOOLEAN -BYTE -SHORT -INT -LONG -FLOAT -DOUBLE -DATE -TIMESTAMP -STRING -DECIMAL -NULL -BINARY -CALENDAR -ARRAY -MAP -STRUCT -UDT CartesianProductExec @@ -928,9 +1100,11 @@ Accelerator supports are described below. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -954,7 +1128,9 @@ Accelerator supports are described below. NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -975,7 +1151,9 @@ Accelerator supports are described below. NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -998,6 +1176,8 @@ Accelerator supports are described below. + + Input/Output @@ -1015,9 +1195,11 @@ Accelerator supports are described below. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -1041,7 +1223,9 @@ Accelerator supports are described below. NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -1062,7 +1246,9 @@ Accelerator supports are described below. NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -1085,6 +1271,8 @@ Accelerator supports are described below. + + Input/Output @@ -1102,9 +1290,11 @@ Accelerator supports are described below. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -1130,6 +1320,8 @@ Accelerator supports are described below. NS NS NS +NS +NS ArrowEvalPythonExec @@ -1150,9 +1342,11 @@ Accelerator supports are described below. NS NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types DECIMAL, NULL, BINARY, CALENDAR, MAP, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types DECIMAL, NULL, BINARY, CALENDAR, MAP, UDT, DAYTIME, YEARMONTH
+NS +PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types DECIMAL, NULL, BINARY, CALENDAR, MAP, UDT, DAYTIME, YEARMONTH
+NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types DECIMAL, NULL, BINARY, CALENDAR, MAP, UDT
NS @@ -1178,6 +1372,8 @@ Accelerator supports are described below. NS NS NS +NS +NS FlatMapGroupsInPandasExec @@ -1202,6 +1398,34 @@ Accelerator supports are described below. NS NS NS +NS +NS + + +Executor +Description +Notes +Param(s) +BOOLEAN +BYTE +SHORT +INT +LONG +FLOAT +DOUBLE +DATE +TIMESTAMP +STRING +DECIMAL +NULL +BINARY +CALENDAR +ARRAY +MAP +STRUCT +UDT +DAYTIME +YEARMONTH MapInPandasExec @@ -1222,9 +1446,11 @@ Accelerator supports are described below. NS NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types DECIMAL, NULL, BINARY, CALENDAR, MAP, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types DECIMAL, NULL, BINARY, CALENDAR, MAP, UDT, DAYTIME, YEARMONTH
+NS +PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types DECIMAL, NULL, BINARY, CALENDAR, MAP, UDT, DAYTIME, YEARMONTH
+NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types DECIMAL, NULL, BINARY, CALENDAR, MAP, UDT
NS @@ -1235,45 +1461,23 @@ Accelerator supports are described below. S S S -S -S -S -S -S -PS
UTC is only supported TZ for TIMESTAMP
-S -NS -NS -NS -NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types DECIMAL, NULL, BINARY, CALENDAR, ARRAY, MAP, STRUCT, UDT
-NS -NS -NS - - -Executor -Description -Notes -Param(s) -BOOLEAN -BYTE -SHORT -INT -LONG -FLOAT -DOUBLE -DATE -TIMESTAMP -STRING -DECIMAL -NULL -BINARY -CALENDAR -ARRAY -MAP -STRUCT -UDT +S +S +S +S +S +PS
UTC is only supported TZ for TIMESTAMP
+S +NS +NS +NS +NS +PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types DECIMAL, NULL, BINARY, CALENDAR, ARRAY, MAP, STRUCT, UDT, DAYTIME, YEARMONTH
+NS +NS +NS +NS +NS WindowExec @@ -1296,7 +1500,9 @@ Accelerator supports are described below. NS NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, MAP, STRUCT, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, MAP, STRUCT, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -1315,9 +1521,11 @@ Accelerator supports are described below. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -1343,6 +1551,8 @@ Accelerator supports are described below. NS NS NS +NS +NS @@ -1406,6 +1616,8 @@ are limited. MAP STRUCT UDT +DAYTIME +YEARMONTH Abs @@ -1432,6 +1644,8 @@ are limited. + + result @@ -1453,6 +1667,8 @@ are limited. + + AST @@ -1475,6 +1691,8 @@ are limited. + + result @@ -1496,6 +1714,8 @@ are limited. + + Acos @@ -1522,6 +1742,8 @@ are limited. + + result @@ -1543,6 +1765,8 @@ are limited. + + AST @@ -1565,6 +1789,8 @@ are limited. + + result @@ -1586,6 +1812,8 @@ are limited. + + Acosh @@ -1612,6 +1840,8 @@ are limited. + + result @@ -1633,6 +1863,8 @@ are limited. + + AST @@ -1655,6 +1887,8 @@ are limited. + + result @@ -1676,6 +1910,8 @@ are limited. + + Add @@ -1702,6 +1938,8 @@ are limited. +NS +NS rhs @@ -1723,6 +1961,8 @@ are limited. +NS +NS result @@ -1744,6 +1984,8 @@ are limited. +NS +NS AST @@ -1766,6 +2008,8 @@ are limited. +NS +NS rhs @@ -1787,6 +2031,8 @@ are limited. +NS +NS result @@ -1808,6 +2054,8 @@ are limited. +NS +NS Expression @@ -1834,6 +2082,8 @@ are limited. MAP STRUCT UDT +DAYTIME +YEARMONTH Alias @@ -1856,9 +2106,11 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -1877,9 +2129,11 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -1903,6 +2157,8 @@ are limited. NS NS NS +NS +NS result @@ -1924,6 +2180,8 @@ are limited. NS NS NS +NS +NS And @@ -1950,6 +2208,8 @@ are limited. + + rhs @@ -1971,6 +2231,8 @@ are limited. + + result @@ -1992,6 +2254,8 @@ are limited. + + AST @@ -2014,6 +2278,8 @@ are limited. + + rhs @@ -2035,6 +2301,8 @@ are limited. + + result @@ -2056,6 +2324,8 @@ are limited. + + ArrayContains @@ -2078,7 +2348,9 @@ are limited. -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types DECIMAL, BINARY, CALENDAR, ARRAY, MAP, STRUCT, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types DECIMAL, BINARY, CALENDAR, ARRAY, MAP, STRUCT, UDT, DAYTIME, YEARMONTH
+ + @@ -2103,6 +2375,8 @@ are limited. NS NS NS +NS +NS result @@ -2124,6 +2398,8 @@ are limited. + + ArrayExcept @@ -2146,7 +2422,9 @@ are limited. -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, MAP, STRUCT, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, MAP, STRUCT, UDT, DAYTIME, YEARMONTH
+ + @@ -2167,7 +2445,9 @@ are limited. -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, MAP, STRUCT, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, MAP, STRUCT, UDT, DAYTIME, YEARMONTH
+ + @@ -2188,7 +2468,9 @@ are limited. -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, MAP, STRUCT, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, MAP, STRUCT, UDT, DAYTIME, YEARMONTH
+ + @@ -2218,6 +2500,8 @@ are limited. MAP STRUCT UDT +DAYTIME +YEARMONTH ArrayExists @@ -2240,7 +2524,9 @@ are limited. -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+ + @@ -2265,6 +2551,8 @@ are limited. + + result @@ -2286,6 +2574,8 @@ are limited. + + ArrayFilter @@ -2308,7 +2598,9 @@ are limited. -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+ + @@ -2333,6 +2625,8 @@ are limited. + + result @@ -2350,7 +2644,9 @@ are limited. -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+ + @@ -2376,7 +2672,9 @@ are limited. -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, MAP, STRUCT, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, MAP, STRUCT, UDT, DAYTIME, YEARMONTH
+ + @@ -2397,7 +2695,9 @@ are limited. -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, MAP, STRUCT, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, MAP, STRUCT, UDT, DAYTIME, YEARMONTH
+ + @@ -2418,7 +2718,9 @@ are limited. -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, MAP, STRUCT, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, MAP, STRUCT, UDT, DAYTIME, YEARMONTH
+ + @@ -2448,6 +2750,8 @@ are limited. + + result @@ -2469,6 +2773,8 @@ are limited. NS NS + + ArrayMin @@ -2495,6 +2801,8 @@ are limited. + + result @@ -2516,6 +2824,8 @@ are limited. NS NS + + ArrayRemove @@ -2538,7 +2848,9 @@ are limited. NS NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS NS NS @@ -2559,9 +2871,11 @@ are limited. S NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -2580,7 +2894,9 @@ are limited. -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+ + @@ -2610,6 +2926,8 @@ are limited. MAP STRUCT UDT +DAYTIME +YEARMONTH ArrayRepeat @@ -2632,9 +2950,11 @@ are limited. S NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -2657,6 +2977,8 @@ are limited. + + result @@ -2674,7 +2996,9 @@ are limited. -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+ + @@ -2700,7 +3024,9 @@ are limited. -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+ + @@ -2721,9 +3047,11 @@ are limited. S NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -2742,7 +3070,9 @@ are limited. -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+ + @@ -2768,7 +3098,9 @@ are limited. -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, MAP, STRUCT, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, MAP, STRUCT, UDT, DAYTIME, YEARMONTH
+ + @@ -2789,7 +3121,9 @@ are limited. -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, MAP, STRUCT, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, MAP, STRUCT, UDT, DAYTIME, YEARMONTH
+ + @@ -2810,7 +3144,9 @@ are limited. -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, MAP, STRUCT, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, MAP, STRUCT, UDT, DAYTIME, YEARMONTH
+ + @@ -2836,7 +3172,9 @@ are limited. -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, MAP, STRUCT, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, MAP, STRUCT, UDT, DAYTIME, YEARMONTH
+ + @@ -2857,7 +3195,9 @@ are limited. -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, MAP, STRUCT, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, MAP, STRUCT, UDT, DAYTIME, YEARMONTH
+ + @@ -2882,6 +3222,8 @@ are limited. + + ArraysZip @@ -2904,7 +3246,9 @@ are limited. -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+ + @@ -2925,7 +3269,9 @@ are limited. -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+ + @@ -2955,6 +3301,8 @@ are limited. + + result @@ -2976,6 +3324,8 @@ are limited. + + Expression @@ -3002,6 +3352,8 @@ are limited. MAP STRUCT UDT +DAYTIME +YEARMONTH Asin @@ -3028,6 +3380,8 @@ are limited. + + result @@ -3049,6 +3403,8 @@ are limited. + + AST @@ -3071,6 +3427,8 @@ are limited. + + result @@ -3092,6 +3450,8 @@ are limited. + + Asinh @@ -3118,6 +3478,8 @@ are limited. + + result @@ -3139,6 +3501,8 @@ are limited. + + AST @@ -3161,6 +3525,8 @@ are limited. + + result @@ -3182,6 +3548,8 @@ are limited. + + AtLeastNNonNulls @@ -3204,9 +3572,11 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -3229,6 +3599,8 @@ are limited. + + Atan @@ -3255,6 +3627,8 @@ are limited. + + result @@ -3276,6 +3650,8 @@ are limited. + + AST @@ -3298,6 +3674,8 @@ are limited. + + result @@ -3319,6 +3697,8 @@ are limited. + + Atanh @@ -3345,6 +3725,8 @@ are limited. + + result @@ -3366,6 +3748,8 @@ are limited. + + AST @@ -3388,6 +3772,8 @@ are limited. + + result @@ -3409,6 +3795,8 @@ are limited. + + Expression @@ -3435,6 +3823,8 @@ are limited. MAP STRUCT UDT +DAYTIME +YEARMONTH AttributeReference @@ -3457,9 +3847,11 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -3483,6 +3875,8 @@ are limited. NS NS NS +NS +NS BRound @@ -3509,6 +3903,8 @@ are limited. + + scale @@ -3530,6 +3926,8 @@ are limited. + + result @@ -3551,6 +3949,8 @@ are limited. + + BitLength @@ -3577,6 +3977,8 @@ are limited. + + result @@ -3598,6 +4000,8 @@ are limited. + + BitwiseAnd @@ -3624,6 +4028,8 @@ are limited. + + rhs @@ -3645,6 +4051,8 @@ are limited. + + result @@ -3666,6 +4074,8 @@ are limited. + + AST @@ -3688,6 +4098,8 @@ are limited. + + rhs @@ -3709,6 +4121,8 @@ are limited. + + result @@ -3730,6 +4144,8 @@ are limited. + + BitwiseNot @@ -3756,6 +4172,8 @@ are limited. + + result @@ -3777,6 +4195,8 @@ are limited. + + AST @@ -3799,6 +4219,8 @@ are limited. + + result @@ -3820,6 +4242,8 @@ are limited. + + Expression @@ -3846,6 +4270,8 @@ are limited. MAP STRUCT UDT +DAYTIME +YEARMONTH BitwiseOr @@ -3872,6 +4298,8 @@ are limited. + + rhs @@ -3893,6 +4321,8 @@ are limited. + + result @@ -3914,6 +4344,8 @@ are limited. + + AST @@ -3936,6 +4368,8 @@ are limited. + + rhs @@ -3957,6 +4391,8 @@ are limited. + + result @@ -3978,6 +4414,8 @@ are limited. + + BitwiseXor @@ -4004,6 +4442,8 @@ are limited. + + rhs @@ -4025,6 +4465,8 @@ are limited. + + result @@ -4046,6 +4488,8 @@ are limited. + + AST @@ -4068,6 +4512,8 @@ are limited. + + rhs @@ -4089,6 +4535,8 @@ are limited. + + result @@ -4110,6 +4558,8 @@ are limited. + + BoundReference @@ -4132,9 +4582,11 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -4158,6 +4610,8 @@ are limited. NS NS NS +NS +NS CaseWhen @@ -4184,6 +4638,8 @@ are limited. + + value @@ -4201,9 +4657,11 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -4222,9 +4680,11 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -4252,6 +4712,8 @@ are limited. MAP STRUCT UDT +DAYTIME +YEARMONTH Cbrt @@ -4278,6 +4740,8 @@ are limited. + + result @@ -4299,6 +4763,8 @@ are limited. + + AST @@ -4321,6 +4787,8 @@ are limited. + + result @@ -4342,6 +4810,8 @@ are limited. + + Ceil @@ -4368,6 +4838,8 @@ are limited. + + result @@ -4389,6 +4861,8 @@ are limited. + + CheckOverflow @@ -4415,6 +4889,8 @@ are limited. + + result @@ -4436,6 +4912,8 @@ are limited. + + Coalesce @@ -4458,9 +4936,11 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -4479,9 +4959,11 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -4505,7 +4987,9 @@ are limited. NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+ + @@ -4526,7 +5010,9 @@ are limited. NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+ + @@ -4556,6 +5042,8 @@ are limited. + + result @@ -4577,6 +5065,8 @@ are limited. + + Contains @@ -4603,6 +5093,8 @@ are limited. + + search @@ -4624,6 +5116,8 @@ are limited. + + result @@ -4645,6 +5139,8 @@ are limited. + + Expression @@ -4671,6 +5167,8 @@ are limited. MAP STRUCT UDT +DAYTIME +YEARMONTH Conv @@ -4697,6 +5195,8 @@ are limited. + + from_base @@ -4718,6 +5218,8 @@ are limited. + + to_base @@ -4739,6 +5241,8 @@ are limited. + + result @@ -4760,6 +5264,8 @@ are limited. + + Cos @@ -4786,6 +5292,8 @@ are limited. + + result @@ -4807,6 +5315,8 @@ are limited. + + AST @@ -4829,6 +5339,8 @@ are limited. + + result @@ -4850,6 +5362,8 @@ are limited. + + Cosh @@ -4876,6 +5390,8 @@ are limited. + + result @@ -4897,6 +5413,8 @@ are limited. + + AST @@ -4919,6 +5437,8 @@ are limited. + + result @@ -4940,6 +5460,8 @@ are limited. + + Cot @@ -4966,6 +5488,8 @@ are limited. + + result @@ -4987,6 +5511,8 @@ are limited. + + AST @@ -5009,6 +5535,8 @@ are limited. + + result @@ -5030,6 +5558,8 @@ are limited. + + Expression @@ -5056,6 +5586,8 @@ are limited. MAP STRUCT UDT +DAYTIME +YEARMONTH CreateArray @@ -5078,9 +5610,11 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, MAP, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, MAP, UDT, DAYTIME, YEARMONTH
+NS +PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, MAP, UDT, DAYTIME, YEARMONTH
+NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, MAP, UDT
NS @@ -5099,7 +5633,9 @@ are limited. -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, MAP, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, MAP, UDT, DAYTIME, YEARMONTH
+ + @@ -5129,6 +5665,8 @@ are limited. PS
UTC is only supported TZ for child TIMESTAMP
+ + value @@ -5150,6 +5688,8 @@ are limited. PS
UTC is only supported TZ for child TIMESTAMP
PS
UTC is only supported TZ for child TIMESTAMP
+ + CreateNamedStruct @@ -5176,6 +5716,8 @@ are limited. + + value @@ -5193,9 +5735,11 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -5216,7 +5760,9 @@ are limited. -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+ + @@ -5244,6 +5790,8 @@ are limited. + + DateAdd @@ -5270,6 +5818,8 @@ are limited. + + days @@ -5291,6 +5841,8 @@ are limited. + + result @@ -5312,6 +5864,8 @@ are limited. + + DateAddInterval @@ -5338,6 +5892,8 @@ are limited. + + interval @@ -5359,6 +5915,8 @@ are limited. + + result @@ -5380,6 +5938,8 @@ are limited. + + DateDiff @@ -5406,6 +5966,8 @@ are limited. + + rhs @@ -5427,6 +5989,8 @@ are limited. + + result @@ -5448,6 +6012,8 @@ are limited. + + Expression @@ -5474,6 +6040,8 @@ are limited. MAP STRUCT UDT +DAYTIME +YEARMONTH DateFormatClass @@ -5500,6 +6068,8 @@ are limited. + + strfmt @@ -5521,6 +6091,8 @@ are limited. + + result @@ -5542,6 +6114,8 @@ are limited. + + DateSub @@ -5568,6 +6142,8 @@ are limited. + + days @@ -5589,6 +6165,8 @@ are limited. + + result @@ -5610,6 +6188,8 @@ are limited. + + DayOfMonth @@ -5636,6 +6216,8 @@ are limited. + + result @@ -5657,6 +6239,8 @@ are limited. + + DayOfWeek @@ -5683,6 +6267,8 @@ are limited. + + result @@ -5704,6 +6290,8 @@ are limited. + + DayOfYear @@ -5730,6 +6318,8 @@ are limited. + + result @@ -5751,6 +6341,8 @@ are limited. + + DenseRank @@ -5777,6 +6369,8 @@ are limited. NS NS NS +NS +NS result @@ -5798,6 +6392,8 @@ are limited. + + Divide @@ -5824,6 +6420,8 @@ are limited. + + rhs @@ -5845,6 +6443,8 @@ are limited. + + result @@ -5866,6 +6466,8 @@ are limited. + + Expression @@ -5892,6 +6494,8 @@ are limited. MAP STRUCT UDT +DAYTIME +YEARMONTH DynamicPruningExpression @@ -5918,6 +6522,8 @@ are limited. + + result @@ -5939,6 +6545,8 @@ are limited. PS
UTC is only supported TZ for child TIMESTAMP
PS
UTC is only supported TZ for child TIMESTAMP
S +S +S ElementAt @@ -5961,8 +6569,10 @@ are limited. -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
If it's map, only primitive key types are supported.;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
If it's map, only primitive key types are supported.;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+ + @@ -5986,6 +6596,8 @@ are limited. NS NS NS +NS +NS result @@ -6003,9 +6615,11 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -6033,6 +6647,8 @@ are limited. + + search @@ -6054,6 +6670,8 @@ are limited. + + result @@ -6075,6 +6693,8 @@ are limited. + + EqualNullSafe @@ -6101,6 +6721,8 @@ are limited. PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, UDT
NS + + rhs @@ -6122,6 +6744,8 @@ are limited. PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, UDT
NS + + result @@ -6143,6 +6767,8 @@ are limited. + + EqualTo @@ -6169,6 +6795,8 @@ are limited. PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, UDT
NS + + rhs @@ -6190,6 +6818,8 @@ are limited. PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, UDT
NS + + result @@ -6211,6 +6841,8 @@ are limited. + + AST @@ -6233,6 +6865,8 @@ are limited. NS NS + + rhs @@ -6254,6 +6888,8 @@ are limited. NS NS + + result @@ -6275,6 +6911,8 @@ are limited. + + Expression @@ -6301,6 +6939,8 @@ are limited. MAP STRUCT UDT +DAYTIME +YEARMONTH Exp @@ -6327,6 +6967,8 @@ are limited. + + result @@ -6348,6 +6990,8 @@ are limited. + + AST @@ -6370,6 +7014,8 @@ are limited. + + result @@ -6391,6 +7037,8 @@ are limited. + + Explode @@ -6413,8 +7061,10 @@ are limited. -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+ + @@ -6434,7 +7084,9 @@ are limited. -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+ + @@ -6464,6 +7116,8 @@ are limited. + + result @@ -6485,6 +7139,8 @@ are limited. + + AST @@ -6507,6 +7163,8 @@ are limited. + + result @@ -6528,6 +7186,8 @@ are limited. + + Flatten @@ -6554,6 +7214,8 @@ are limited. + + result @@ -6575,6 +7237,8 @@ are limited. + + Floor @@ -6601,6 +7265,8 @@ are limited. + + result @@ -6622,6 +7288,8 @@ are limited. + + FormatNumber @@ -6648,6 +7316,8 @@ are limited. + + d @@ -6669,6 +7339,8 @@ are limited. + + result @@ -6690,6 +7362,8 @@ are limited. + + Expression @@ -6716,6 +7390,8 @@ are limited. MAP STRUCT UDT +DAYTIME +YEARMONTH FromUTCTimestamp @@ -6742,6 +7418,8 @@ are limited. + + timezone @@ -6763,6 +7441,8 @@ are limited. + + result @@ -6784,6 +7464,8 @@ are limited. + + FromUnixTime @@ -6810,6 +7492,8 @@ are limited. + + format @@ -6831,6 +7515,8 @@ are limited. + + result @@ -6852,6 +7538,8 @@ are limited. + + GetArrayItem @@ -6874,7 +7562,9 @@ are limited. -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+ + @@ -6899,6 +7589,8 @@ are limited. + + result @@ -6916,9 +7608,11 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -6942,7 +7636,9 @@ are limited. -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+ + @@ -6963,7 +7659,9 @@ are limited. -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+ + @@ -6993,6 +7691,8 @@ are limited. + + path @@ -7014,6 +7714,8 @@ are limited. + + result @@ -7035,6 +7737,8 @@ are limited. + + GetMapValue @@ -7058,7 +7762,9 @@ are limited. -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+ + @@ -7082,6 +7788,8 @@ are limited. NS NS NS +NS +NS result @@ -7099,9 +7807,11 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -7129,6 +7839,8 @@ are limited. MAP STRUCT UDT +DAYTIME +YEARMONTH GetStructField @@ -7153,7 +7865,9 @@ are limited. -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+ + @@ -7172,9 +7886,11 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -7202,6 +7918,8 @@ are limited. + + format @@ -7223,6 +7941,8 @@ are limited. + + result @@ -7244,6 +7964,8 @@ are limited. + + GreaterThan @@ -7270,6 +7992,8 @@ are limited. PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, UDT
NS + + rhs @@ -7291,6 +8015,8 @@ are limited. PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, UDT
NS + + result @@ -7312,6 +8038,8 @@ are limited. + + AST @@ -7334,6 +8062,8 @@ are limited. NS NS + + rhs @@ -7355,6 +8085,8 @@ are limited. NS NS + + result @@ -7376,6 +8108,8 @@ are limited. + + GreaterThanOrEqual @@ -7402,6 +8136,8 @@ are limited. PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, UDT
NS + + rhs @@ -7423,6 +8159,8 @@ are limited. PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, UDT
NS + + result @@ -7444,6 +8182,8 @@ are limited. + + AST @@ -7466,6 +8206,8 @@ are limited. NS NS + + rhs @@ -7487,6 +8229,8 @@ are limited. NS NS + + result @@ -7508,6 +8252,8 @@ are limited. + + Expression @@ -7534,6 +8280,8 @@ are limited. MAP STRUCT UDT +DAYTIME +YEARMONTH Greatest @@ -7560,6 +8308,8 @@ are limited. NS NS + + result @@ -7581,6 +8331,8 @@ are limited. NS NS + + HiveHash @@ -7607,6 +8359,8 @@ are limited. NS NS NS +NS +NS result @@ -7628,6 +8382,8 @@ are limited. + + Hour @@ -7654,6 +8410,8 @@ are limited. + + result @@ -7675,6 +8433,8 @@ are limited. + + Hypot @@ -7701,6 +8461,8 @@ are limited. + + rhs @@ -7722,6 +8484,8 @@ are limited. + + result @@ -7743,6 +8507,8 @@ are limited. + + If @@ -7769,6 +8535,8 @@ are limited. + + trueValue @@ -7786,9 +8554,11 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -7807,9 +8577,11 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -7828,9 +8600,11 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -7858,6 +8632,8 @@ are limited. NS NS + + list @@ -7879,6 +8655,8 @@ are limited. NS NS + + result @@ -7900,6 +8678,8 @@ are limited. + + Expression @@ -7926,6 +8706,8 @@ are limited. MAP STRUCT UDT +DAYTIME +YEARMONTH InSet @@ -7952,6 +8734,8 @@ are limited. NS NS + + result @@ -7973,6 +8757,8 @@ are limited. + + InitCap @@ -7999,6 +8785,8 @@ are limited. + + result @@ -8020,6 +8808,8 @@ are limited. + + InputFileBlockLength @@ -8046,6 +8836,8 @@ are limited. + + InputFileBlockStart @@ -8072,6 +8864,8 @@ are limited. + + InputFileName @@ -8098,6 +8892,8 @@ are limited. + + IntegralDivide @@ -8124,6 +8920,8 @@ are limited. + + rhs @@ -8145,6 +8943,8 @@ are limited. + + result @@ -8166,6 +8966,8 @@ are limited. + + IsNaN @@ -8192,6 +8994,8 @@ are limited. + + result @@ -8213,6 +9017,8 @@ are limited. + + IsNotNull @@ -8235,9 +9041,11 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -8260,6 +9068,8 @@ are limited. + + IsNull @@ -8282,9 +9092,11 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -8307,6 +9119,8 @@ are limited. + + Expression @@ -8333,6 +9147,8 @@ are limited. MAP STRUCT UDT +DAYTIME +YEARMONTH JsonToStructs @@ -8359,6 +9175,8 @@ are limited. + + result @@ -8377,8 +9195,10 @@ are limited. NS -PS
MAP only supports keys and values that are of STRING type;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, BINARY, CALENDAR, MAP, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, BINARY, CALENDAR, MAP, UDT
+PS
MAP only supports keys and values that are of STRING type;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, BINARY, CALENDAR, MAP, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, BINARY, CALENDAR, MAP, UDT, DAYTIME, YEARMONTH
+ + @@ -8406,6 +9226,8 @@ are limited. + + field @@ -8427,6 +9249,8 @@ are limited. + + result @@ -8448,6 +9272,8 @@ are limited. + + KnownFloatingPointNormalized @@ -8474,6 +9300,8 @@ are limited. PS
UTC is only supported TZ for child TIMESTAMP
PS
UTC is only supported TZ for child TIMESTAMP
S +S +S result @@ -8495,6 +9323,8 @@ are limited. PS
UTC is only supported TZ for child TIMESTAMP
PS
UTC is only supported TZ for child TIMESTAMP
S +S +S KnownNotNull @@ -8517,9 +9347,11 @@ are limited. NS S S -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -8538,9 +9370,11 @@ are limited. NS S S -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -8564,9 +9398,11 @@ are limited. S NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, MAP, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, MAP, UDT, DAYTIME, YEARMONTH
+NS +PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, MAP, UDT, DAYTIME, YEARMONTH
+NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, MAP, UDT
NS @@ -8589,6 +9425,8 @@ are limited. + + default @@ -8606,9 +9444,11 @@ are limited. S NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, MAP, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, MAP, UDT, DAYTIME, YEARMONTH
+NS +PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, MAP, UDT, DAYTIME, YEARMONTH
+NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, MAP, UDT
NS @@ -8627,9 +9467,11 @@ are limited. S NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, MAP, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, MAP, UDT, DAYTIME, YEARMONTH
+NS +PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, MAP, UDT, DAYTIME, YEARMONTH
+NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, MAP, UDT
NS @@ -8653,9 +9495,11 @@ are limited. S NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -8674,9 +9518,11 @@ are limited. S NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -8695,9 +9541,11 @@ are limited. S NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -8725,6 +9573,8 @@ are limited. MAP STRUCT UDT +DAYTIME +YEARMONTH LastDay @@ -8751,6 +9601,8 @@ are limited. + + result @@ -8772,6 +9624,8 @@ are limited. + + Lead @@ -8794,9 +9648,11 @@ are limited. S NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, MAP, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, MAP, UDT, DAYTIME, YEARMONTH
+NS +PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, MAP, UDT, DAYTIME, YEARMONTH
+NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, MAP, UDT
NS @@ -8819,6 +9675,8 @@ are limited. + + default @@ -8836,9 +9694,11 @@ are limited. S NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, MAP, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, MAP, UDT, DAYTIME, YEARMONTH
+NS +PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, MAP, UDT, DAYTIME, YEARMONTH
+NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, MAP, UDT
NS @@ -8857,9 +9717,11 @@ are limited. S NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, MAP, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, MAP, UDT, DAYTIME, YEARMONTH
+NS +PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, MAP, UDT, DAYTIME, YEARMONTH
+NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, MAP, UDT
NS @@ -8887,6 +9749,8 @@ are limited. NS NS + + result @@ -8908,6 +9772,8 @@ are limited. NS NS + + Length @@ -8934,6 +9800,8 @@ are limited. + + result @@ -8955,6 +9823,8 @@ are limited. + + LessThan @@ -8981,6 +9851,8 @@ are limited. PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, UDT
NS + + rhs @@ -9002,6 +9874,8 @@ are limited. PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, UDT
NS + + result @@ -9023,6 +9897,8 @@ are limited. + + AST @@ -9045,6 +9921,8 @@ are limited. NS NS + + rhs @@ -9066,6 +9944,8 @@ are limited. NS NS + + result @@ -9087,6 +9967,8 @@ are limited. + + Expression @@ -9113,6 +9995,8 @@ are limited. MAP STRUCT UDT +DAYTIME +YEARMONTH LessThanOrEqual @@ -9139,6 +10023,8 @@ are limited. PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, UDT
NS + + rhs @@ -9160,6 +10046,8 @@ are limited. PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, UDT
NS + + result @@ -9181,6 +10069,8 @@ are limited. + + AST @@ -9203,6 +10093,8 @@ are limited. NS NS + + rhs @@ -9224,6 +10116,8 @@ are limited. NS NS + + result @@ -9245,6 +10139,8 @@ are limited. + + Like @@ -9271,6 +10167,8 @@ are limited. + + search @@ -9292,6 +10190,8 @@ are limited. + + result @@ -9313,6 +10213,8 @@ are limited. + + Literal @@ -9335,10 +10237,12 @@ are limited. S S S -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
NS +S +S AST @@ -9361,6 +10265,8 @@ are limited. NS NS NS +NS +NS Log @@ -9387,6 +10293,8 @@ are limited. + + result @@ -9408,6 +10316,8 @@ are limited. + + Log10 @@ -9434,6 +10344,8 @@ are limited. + + result @@ -9455,6 +10367,8 @@ are limited. + + Expression @@ -9481,6 +10395,8 @@ are limited. MAP STRUCT UDT +DAYTIME +YEARMONTH Log1p @@ -9507,6 +10423,8 @@ are limited. + + result @@ -9528,6 +10446,8 @@ are limited. + + Log2 @@ -9554,6 +10474,8 @@ are limited. + + result @@ -9575,6 +10497,8 @@ are limited. + + Logarithm @@ -9601,6 +10525,8 @@ are limited. + + base @@ -9622,6 +10548,8 @@ are limited. + + result @@ -9643,6 +10571,8 @@ are limited. + + Lower @@ -9669,6 +10599,8 @@ are limited. + + result @@ -9690,6 +10622,8 @@ are limited. + + MakeDecimal @@ -9716,6 +10650,8 @@ are limited. + + result @@ -9737,6 +10673,8 @@ are limited. + + MapConcat @@ -9760,7 +10698,9 @@ are limited. -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+ + @@ -9781,7 +10721,9 @@ are limited. -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+ + @@ -9807,7 +10749,9 @@ are limited. -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+ + @@ -9827,7 +10771,9 @@ are limited. -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+ + @@ -9857,6 +10803,8 @@ are limited. MAP STRUCT UDT +DAYTIME +YEARMONTH MapFilter @@ -9880,7 +10828,9 @@ are limited. -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+ + @@ -9904,6 +10854,8 @@ are limited. + + result @@ -9922,7 +10874,9 @@ are limited. -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+ + @@ -9948,7 +10902,9 @@ are limited. -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+ + @@ -9968,7 +10924,9 @@ are limited. -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+ + @@ -9995,7 +10953,9 @@ are limited. -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+ + @@ -10015,7 +10975,9 @@ are limited. -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+ + @@ -10045,6 +11007,8 @@ are limited. + + result @@ -10066,6 +11030,8 @@ are limited. + + MicrosToTimestamp @@ -10092,6 +11058,8 @@ are limited. + + result @@ -10113,6 +11081,8 @@ are limited. + + MillisToTimestamp @@ -10139,6 +11109,8 @@ are limited. + + result @@ -10160,6 +11132,8 @@ are limited. + + Minute @@ -10186,6 +11160,8 @@ are limited. + + result @@ -10207,6 +11183,8 @@ are limited. + + Expression @@ -10233,6 +11211,8 @@ are limited. MAP STRUCT UDT +DAYTIME +YEARMONTH MonotonicallyIncreasingID @@ -10259,6 +11239,8 @@ are limited. + + Month @@ -10285,6 +11267,8 @@ are limited. + + result @@ -10306,6 +11290,8 @@ are limited. + + Multiply @@ -10332,6 +11318,8 @@ are limited. + + rhs @@ -10353,6 +11341,8 @@ are limited. + + result @@ -10374,6 +11364,8 @@ are limited. + + AST @@ -10396,6 +11388,8 @@ are limited. + + rhs @@ -10417,6 +11411,8 @@ are limited. + + result @@ -10438,6 +11434,8 @@ are limited. + + Murmur3Hash @@ -10460,9 +11458,11 @@ are limited. S NS NS -PS
Arrays of structs are not supported;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, MAP, UDT
+PS
Arrays of structs are not supported;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, MAP, UDT, DAYTIME, YEARMONTH
+NS +PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, MAP, UDT, DAYTIME, YEARMONTH
+NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, MAP, UDT
NS @@ -10485,6 +11485,8 @@ are limited. + + NaNvl @@ -10511,6 +11513,8 @@ are limited. + + rhs @@ -10532,6 +11536,8 @@ are limited. + + result @@ -10553,6 +11559,8 @@ are limited. + + NamedLambdaVariable @@ -10575,9 +11583,11 @@ are limited. S NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -10605,6 +11615,8 @@ are limited. MAP STRUCT UDT +DAYTIME +YEARMONTH Not @@ -10631,6 +11643,8 @@ are limited. + + result @@ -10652,6 +11666,8 @@ are limited. + + AST @@ -10674,6 +11690,8 @@ are limited. + + result @@ -10695,6 +11713,8 @@ are limited. + + NthValue @@ -10717,9 +11737,11 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -10742,6 +11764,8 @@ are limited. + + result @@ -10759,9 +11783,11 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -10789,6 +11815,8 @@ are limited. + + result @@ -10810,6 +11838,8 @@ are limited. + + Or @@ -10836,6 +11866,8 @@ are limited. + + rhs @@ -10857,6 +11889,8 @@ are limited. + + result @@ -10878,6 +11912,8 @@ are limited. + + AST @@ -10900,6 +11936,8 @@ are limited. + + rhs @@ -10921,6 +11959,8 @@ are limited. + + result @@ -10942,6 +11982,8 @@ are limited. + + Expression @@ -10968,6 +12010,8 @@ are limited. MAP STRUCT UDT +DAYTIME +YEARMONTH ParseUrl @@ -10994,6 +12038,8 @@ are limited. + + partToExtract @@ -11015,6 +12061,8 @@ are limited. + + key @@ -11036,6 +12084,8 @@ are limited. + + result @@ -11057,6 +12107,8 @@ are limited. + + PercentRank @@ -11083,6 +12135,8 @@ are limited. NS NS NS +NS +NS result @@ -11104,6 +12158,8 @@ are limited. + + Pmod @@ -11130,6 +12186,8 @@ are limited. + + rhs @@ -11151,6 +12209,8 @@ are limited. + + result @@ -11172,6 +12232,8 @@ are limited. + + PosExplode @@ -11194,8 +12256,10 @@ are limited. -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+ + @@ -11215,7 +12279,9 @@ are limited. -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+ + @@ -11245,6 +12311,8 @@ are limited. + + rhs @@ -11266,6 +12334,8 @@ are limited. + + result @@ -11287,6 +12357,8 @@ are limited. + + AST @@ -11309,6 +12381,8 @@ are limited. + + rhs @@ -11330,6 +12404,8 @@ are limited. + + result @@ -11351,6 +12427,8 @@ are limited. + + Expression @@ -11377,6 +12455,8 @@ are limited. MAP STRUCT UDT +DAYTIME +YEARMONTH PreciseTimestampConversion @@ -11403,6 +12483,8 @@ are limited. + + result @@ -11424,6 +12506,8 @@ are limited. + + PromotePrecision @@ -11450,6 +12534,8 @@ are limited. + + result @@ -11471,6 +12557,8 @@ are limited. + + PythonUDF @@ -11493,9 +12581,11 @@ are limited. NS NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types DECIMAL, NULL, BINARY, CALENDAR, MAP, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types DECIMAL, NULL, BINARY, CALENDAR, MAP, UDT, DAYTIME, YEARMONTH
+NS +PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types DECIMAL, NULL, BINARY, CALENDAR, MAP, UDT, DAYTIME, YEARMONTH
+NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types DECIMAL, NULL, BINARY, CALENDAR, MAP, UDT
NS @@ -11518,6 +12608,8 @@ are limited. NS PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types DECIMAL, NULL, BINARY, MAP
+ + reduction @@ -11536,9 +12628,11 @@ are limited. NS NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types DECIMAL, NULL, BINARY, CALENDAR, MAP, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types DECIMAL, NULL, BINARY, CALENDAR, MAP, UDT, DAYTIME, YEARMONTH
+NS +PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types DECIMAL, NULL, BINARY, CALENDAR, MAP, UDT, DAYTIME, YEARMONTH
+NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types DECIMAL, NULL, BINARY, CALENDAR, MAP, UDT
NS @@ -11561,6 +12655,8 @@ are limited. NS PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types DECIMAL, NULL, BINARY, MAP
+ + window @@ -11579,9 +12675,11 @@ are limited. NS NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types DECIMAL, NULL, BINARY, CALENDAR, MAP, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types DECIMAL, NULL, BINARY, CALENDAR, MAP, UDT, DAYTIME, YEARMONTH
+NS +PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types DECIMAL, NULL, BINARY, CALENDAR, MAP, UDT, DAYTIME, YEARMONTH
+NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types DECIMAL, NULL, BINARY, CALENDAR, MAP, UDT
NS @@ -11604,6 +12702,8 @@ are limited. NS PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types DECIMAL, NULL, BINARY, MAP
+ + project @@ -11622,9 +12722,11 @@ are limited. NS NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types DECIMAL, NULL, BINARY, CALENDAR, MAP, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types DECIMAL, NULL, BINARY, CALENDAR, MAP, UDT, DAYTIME, YEARMONTH
+NS +PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types DECIMAL, NULL, BINARY, CALENDAR, MAP, UDT, DAYTIME, YEARMONTH
+NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types DECIMAL, NULL, BINARY, CALENDAR, MAP, UDT
NS @@ -11647,6 +12749,8 @@ are limited. NS PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types DECIMAL, NULL, BINARY, MAP
+ + Quarter @@ -11673,6 +12777,8 @@ are limited. + + result @@ -11694,10 +12800,12 @@ are limited. + + RLike -`rlike` +`regexp_like`, `regexp`, `rlike` Regular expression version of Like None project @@ -11720,6 +12828,8 @@ are limited. + + regexp @@ -11741,6 +12851,8 @@ are limited. + + result @@ -11762,6 +12874,8 @@ are limited. + + Expression @@ -11788,6 +12902,8 @@ are limited. MAP STRUCT UDT +DAYTIME +YEARMONTH RaiseError @@ -11814,6 +12930,8 @@ are limited. + + result @@ -11835,6 +12953,8 @@ are limited. + + Rand @@ -11861,6 +12981,8 @@ are limited. + + result @@ -11882,6 +13004,8 @@ are limited. + + Rank @@ -11908,6 +13032,8 @@ are limited. NS NS NS +NS +NS result @@ -11929,6 +13055,8 @@ are limited. + + RegExpExtract @@ -11955,6 +13083,8 @@ are limited. + + regexp @@ -11976,6 +13106,8 @@ are limited. + + idx @@ -11997,6 +13129,8 @@ are limited. + + result @@ -12018,6 +13152,8 @@ are limited. + + RegExpExtractAll @@ -12044,6 +13180,8 @@ are limited. + + regexp @@ -12065,6 +13203,8 @@ are limited. + + idx @@ -12086,6 +13226,8 @@ are limited. + + result @@ -12107,6 +13249,8 @@ are limited. + + RegExpReplace @@ -12133,6 +13277,8 @@ are limited. + + result @@ -12154,6 +13300,8 @@ are limited. + + pos @@ -12175,6 +13323,8 @@ are limited. + + str @@ -12196,6 +13346,8 @@ are limited. + + rep @@ -12217,6 +13369,8 @@ are limited. + + Expression @@ -12243,6 +13397,8 @@ are limited. MAP STRUCT UDT +DAYTIME +YEARMONTH Remainder @@ -12269,6 +13425,8 @@ are limited. + + rhs @@ -12290,6 +13448,8 @@ are limited. + + result @@ -12311,6 +13471,8 @@ are limited. + + ReplicateRows @@ -12333,9 +13495,11 @@ are limited. S NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, MAP, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, MAP, UDT, DAYTIME, YEARMONTH
+NS +PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, MAP, UDT, DAYTIME, YEARMONTH
+NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, MAP, UDT
NS @@ -12354,7 +13518,9 @@ are limited. -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, MAP, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, MAP, UDT, DAYTIME, YEARMONTH
+ + @@ -12384,6 +13550,8 @@ are limited. + + result @@ -12405,6 +13573,8 @@ are limited. + + Rint @@ -12431,6 +13601,8 @@ are limited. + + result @@ -12452,6 +13624,8 @@ are limited. + + AST @@ -12474,6 +13648,8 @@ are limited. + + result @@ -12495,6 +13671,8 @@ are limited. + + Round @@ -12521,6 +13699,8 @@ are limited. + + scale @@ -12542,6 +13722,8 @@ are limited. + + result @@ -12563,6 +13745,8 @@ are limited. + + RowNumber @@ -12589,6 +13773,8 @@ are limited. + + Expression @@ -12615,6 +13801,8 @@ are limited. MAP STRUCT UDT +DAYTIME +YEARMONTH ScalaUDF @@ -12637,9 +13825,11 @@ are limited. S S S -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -12658,9 +13848,11 @@ are limited. S S S -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -12688,6 +13880,8 @@ are limited. + + result @@ -12709,6 +13903,8 @@ are limited. + + SecondsToTimestamp @@ -12735,6 +13931,8 @@ are limited. + + result @@ -12756,6 +13954,8 @@ are limited. + + Sequence @@ -12782,6 +13982,8 @@ are limited. + + stop @@ -12803,6 +14005,8 @@ are limited. + + step @@ -12824,6 +14028,8 @@ are limited. + + result @@ -12845,6 +14051,8 @@ are limited. + + ShiftLeft @@ -12871,6 +14079,8 @@ are limited. + + amount @@ -12892,6 +14102,8 @@ are limited. + + result @@ -12913,6 +14125,8 @@ are limited. + + ShiftRight @@ -12939,6 +14153,8 @@ are limited. + + amount @@ -12960,6 +14176,8 @@ are limited. + + result @@ -12981,6 +14199,8 @@ are limited. + + Expression @@ -13007,6 +14227,8 @@ are limited. MAP STRUCT UDT +DAYTIME +YEARMONTH ShiftRightUnsigned @@ -13033,6 +14255,8 @@ are limited. + + amount @@ -13054,6 +14278,8 @@ are limited. + + result @@ -13075,6 +14301,8 @@ are limited. + + Signum @@ -13089,7 +14317,9 @@ are limited. -S +S + + @@ -13122,6 +14352,8 @@ are limited. + + Sin @@ -13148,6 +14380,8 @@ are limited. + + result @@ -13169,6 +14403,8 @@ are limited. + + AST @@ -13191,6 +14427,8 @@ are limited. + + result @@ -13212,6 +14450,8 @@ are limited. + + Sinh @@ -13238,6 +14478,8 @@ are limited. + + result @@ -13259,6 +14501,8 @@ are limited. + + AST @@ -13281,6 +14525,8 @@ are limited. + + result @@ -13302,6 +14548,8 @@ are limited. + + Size @@ -13324,8 +14572,10 @@ are limited. -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+ + @@ -13349,6 +14599,8 @@ are limited. + + Expression @@ -13375,6 +14627,8 @@ are limited. MAP STRUCT UDT +DAYTIME +YEARMONTH SortArray @@ -13397,7 +14651,9 @@ are limited. -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, MAP, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, MAP, UDT, DAYTIME, YEARMONTH
+ + @@ -13422,6 +14678,8 @@ are limited. + + result @@ -13439,7 +14697,9 @@ are limited. -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, MAP, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, MAP, UDT, DAYTIME, YEARMONTH
+ + @@ -13469,6 +14729,8 @@ are limited. PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, UDT
NS + + result @@ -13490,6 +14752,8 @@ are limited. PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, UDT
NS + + SparkPartitionID @@ -13516,6 +14780,8 @@ are limited. + + SpecifiedWindowFrame @@ -13542,6 +14808,8 @@ are limited. +S +NS upper @@ -13563,6 +14831,8 @@ are limited. +S +NS result @@ -13584,6 +14854,8 @@ are limited. +S +NS Sqrt @@ -13610,6 +14882,8 @@ are limited. + + result @@ -13631,6 +14905,8 @@ are limited. + + AST @@ -13653,6 +14929,8 @@ are limited. + + result @@ -13674,6 +14952,8 @@ are limited. + + Stack @@ -13700,6 +14980,8 @@ are limited. + + expr @@ -13717,9 +14999,11 @@ are limited. S NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -13738,7 +15022,9 @@ are limited. -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+ + @@ -13768,6 +15054,8 @@ are limited. MAP STRUCT UDT +DAYTIME +YEARMONTH StartsWith @@ -13794,6 +15082,8 @@ are limited. + + search @@ -13815,6 +15105,8 @@ are limited. + + result @@ -13836,6 +15128,8 @@ are limited. + + StringInstr @@ -13862,6 +15156,8 @@ are limited. + + substr @@ -13883,6 +15179,8 @@ are limited. + + result @@ -13904,6 +15202,8 @@ are limited. + + StringLPad @@ -13930,6 +15230,8 @@ are limited. + + len @@ -13951,6 +15253,8 @@ are limited. + + pad @@ -13972,6 +15276,8 @@ are limited. + + result @@ -13993,6 +15299,8 @@ are limited. + + StringLocate @@ -14019,6 +15327,8 @@ are limited. + + str @@ -14040,6 +15350,8 @@ are limited. + + start @@ -14061,6 +15373,8 @@ are limited. + + result @@ -14082,6 +15396,8 @@ are limited. + + StringRPad @@ -14108,6 +15424,8 @@ are limited. + + len @@ -14129,6 +15447,8 @@ are limited. + + pad @@ -14150,6 +15470,8 @@ are limited. + + result @@ -14171,6 +15493,8 @@ are limited. + + Expression @@ -14197,6 +15521,8 @@ are limited. MAP STRUCT UDT +DAYTIME +YEARMONTH StringRepeat @@ -14223,6 +15549,8 @@ are limited. + + repeatTimes @@ -14244,6 +15572,8 @@ are limited. + + result @@ -14265,6 +15595,8 @@ are limited. + + StringReplace @@ -14291,6 +15623,8 @@ are limited. + + search @@ -14312,6 +15646,8 @@ are limited. + + replace @@ -14333,6 +15669,8 @@ are limited. + + result @@ -14354,6 +15692,8 @@ are limited. + + StringSplit @@ -14380,6 +15720,8 @@ are limited. + + regexp @@ -14401,6 +15743,8 @@ are limited. + + limit @@ -14422,6 +15766,8 @@ are limited. + + result @@ -14443,6 +15789,8 @@ are limited. + + StringToMap @@ -14469,6 +15817,8 @@ are limited. + + pairDelim @@ -14490,6 +15840,8 @@ are limited. + + keyValueDelim @@ -14511,6 +15863,8 @@ are limited. + + result @@ -14532,6 +15886,8 @@ are limited. S + + Expression @@ -14558,6 +15914,8 @@ are limited. MAP STRUCT UDT +DAYTIME +YEARMONTH StringTranslate @@ -14584,6 +15942,8 @@ are limited. + + from @@ -14605,6 +15965,8 @@ are limited. + + to @@ -14626,6 +15988,8 @@ are limited. + + result @@ -14647,6 +16011,8 @@ are limited. + + StringTrim @@ -14673,6 +16039,8 @@ are limited. + + trimStr @@ -14694,6 +16062,8 @@ are limited. + + result @@ -14715,6 +16085,8 @@ are limited. + + StringTrimLeft @@ -14741,6 +16113,8 @@ are limited. + + trimStr @@ -14762,6 +16136,8 @@ are limited. + + result @@ -14783,6 +16159,8 @@ are limited. + + StringTrimRight @@ -14809,6 +16187,8 @@ are limited. + + trimStr @@ -14830,6 +16210,8 @@ are limited. + + result @@ -14851,6 +16233,8 @@ are limited. + + StructsToJson @@ -14877,6 +16261,8 @@ are limited. PS
UTC is only supported TZ for child TIMESTAMP
PS
UTC is only supported TZ for child TIMESTAMP
+ + result @@ -14898,6 +16284,8 @@ are limited. + + Expression @@ -14924,6 +16312,8 @@ are limited. MAP STRUCT UDT +DAYTIME +YEARMONTH Substring @@ -14950,6 +16340,8 @@ are limited. + + pos @@ -14971,6 +16363,8 @@ are limited. + + len @@ -14992,6 +16386,8 @@ are limited. + + result @@ -15013,6 +16409,8 @@ are limited. + + SubstringIndex @@ -15039,6 +16437,8 @@ are limited. + + delim @@ -15060,6 +16460,8 @@ are limited. + + count @@ -15081,6 +16483,8 @@ are limited. + + result @@ -15102,6 +16506,8 @@ are limited. + + Subtract @@ -15128,6 +16534,8 @@ are limited. +NS +NS rhs @@ -15149,6 +16557,8 @@ are limited. +NS +NS result @@ -15170,6 +16580,8 @@ are limited. +NS +NS AST @@ -15192,6 +16604,8 @@ are limited. +NS +NS rhs @@ -15213,6 +16627,8 @@ are limited. +NS +NS result @@ -15234,6 +16650,8 @@ are limited. +NS +NS Tan @@ -15260,6 +16678,8 @@ are limited. + + result @@ -15281,6 +16701,8 @@ are limited. + + AST @@ -15303,6 +16725,8 @@ are limited. + + result @@ -15324,6 +16748,8 @@ are limited. + + Expression @@ -15350,6 +16776,8 @@ are limited. MAP STRUCT UDT +DAYTIME +YEARMONTH Tanh @@ -15376,6 +16804,8 @@ are limited. + + result @@ -15397,6 +16827,8 @@ are limited. + + AST @@ -15419,6 +16851,8 @@ are limited. + + result @@ -15440,6 +16874,8 @@ are limited. + + TimeAdd @@ -15466,6 +16902,8 @@ are limited. + + interval @@ -15482,10 +16920,12 @@ are limited. -PS
month intervals are not supported;
Literal value only
+PS
Literal value only
+ +PS
Literal value only
@@ -15508,6 +16948,8 @@ are limited. + + ToDegrees @@ -15534,6 +16976,8 @@ are limited. + + result @@ -15555,6 +16999,8 @@ are limited. + + ToRadians @@ -15581,6 +17027,8 @@ are limited. + + result @@ -15602,6 +17050,8 @@ are limited. + + ToUTCTimestamp @@ -15628,6 +17078,8 @@ are limited. + + timezone @@ -15649,6 +17101,8 @@ are limited. + + result @@ -15670,6 +17124,8 @@ are limited. + + ToUnixTimestamp @@ -15696,6 +17152,8 @@ are limited. + + format @@ -15717,6 +17175,8 @@ are limited. + + result @@ -15738,6 +17198,8 @@ are limited. + + Expression @@ -15764,6 +17226,8 @@ are limited. MAP STRUCT UDT +DAYTIME +YEARMONTH TransformKeys @@ -15787,7 +17251,9 @@ are limited. -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+ + @@ -15811,6 +17277,8 @@ are limited. NS NS +NS +NS result @@ -15829,7 +17297,9 @@ are limited. -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+ + @@ -15855,7 +17325,9 @@ are limited. -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+ + @@ -15875,9 +17347,11 @@ are limited. S NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -15897,7 +17371,9 @@ are limited. -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+ + @@ -15926,6 +17402,8 @@ are limited. +NS +NS result @@ -15947,6 +17425,8 @@ are limited. +NS +NS AST @@ -15969,6 +17449,8 @@ are limited. +NS +NS result @@ -15990,6 +17472,8 @@ are limited. +NS +NS UnaryPositive @@ -16016,6 +17500,8 @@ are limited. +NS +NS result @@ -16037,6 +17523,8 @@ are limited. +NS +NS AST @@ -16059,6 +17547,8 @@ are limited. +NS +NS result @@ -16080,6 +17570,8 @@ are limited. +NS +NS UnboundedFollowing$ @@ -16106,6 +17598,8 @@ are limited. + + Expression @@ -16132,6 +17626,8 @@ are limited. MAP STRUCT UDT +DAYTIME +YEARMONTH UnboundedPreceding$ @@ -16158,6 +17654,8 @@ are limited. + + UnixTimestamp @@ -16184,6 +17682,8 @@ are limited. + + format @@ -16205,6 +17705,8 @@ are limited. + + result @@ -16226,6 +17728,8 @@ are limited. + + UnscaledValue @@ -16252,6 +17756,8 @@ are limited. + + result @@ -16273,6 +17779,8 @@ are limited. + + Upper @@ -16299,6 +17807,8 @@ are limited. + + result @@ -16320,6 +17830,8 @@ are limited. + + WeekDay @@ -16346,6 +17858,8 @@ are limited. + + result @@ -16367,6 +17881,8 @@ are limited. + + WindowExpression @@ -16393,6 +17909,8 @@ are limited. PS
UTC is only supported TZ for child TIMESTAMP
PS
UTC is only supported TZ for child TIMESTAMP
S +S +S windowSpec @@ -16414,6 +17932,8 @@ are limited. +S +NS result @@ -16435,6 +17955,8 @@ are limited. PS
UTC is only supported TZ for child TIMESTAMP
PS
UTC is only supported TZ for child TIMESTAMP
S +S +S WindowSpecDefinition @@ -16459,7 +17981,9 @@ are limited. NS NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, MAP, STRUCT, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, MAP, STRUCT, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -16480,7 +18004,9 @@ are limited. NS NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, MAP, STRUCT, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, MAP, STRUCT, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -16501,7 +18027,9 @@ are limited. NS NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, MAP, STRUCT, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, MAP, STRUCT, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -16529,6 +18057,8 @@ are limited. MAP STRUCT UDT +DAYTIME +YEARMONTH XxHash64 @@ -16542,8 +18072,8 @@ are limited. S S S -NS -NS +S +S S PS
UTC is only supported TZ for TIMESTAMP
S @@ -16555,6 +18085,8 @@ are limited. NS NS NS +NS +NS result @@ -16576,6 +18108,8 @@ are limited. + + Year @@ -16602,6 +18136,8 @@ are limited. + + result @@ -16623,6 +18159,8 @@ are limited. + + AggregateExpression @@ -16649,6 +18187,8 @@ are limited. PS
UTC is only supported TZ for child TIMESTAMP
PS
UTC is only supported TZ for child TIMESTAMP
S +S +S filter @@ -16670,6 +18210,8 @@ are limited. + + result @@ -16691,6 +18233,8 @@ are limited. PS
UTC is only supported TZ for child TIMESTAMP
PS
UTC is only supported TZ for child TIMESTAMP
S +S +S reduction @@ -16713,6 +18257,8 @@ are limited. PS
UTC is only supported TZ for child TIMESTAMP
PS
UTC is only supported TZ for child TIMESTAMP
S +S +S filter @@ -16734,6 +18280,8 @@ are limited. + + result @@ -16755,6 +18303,8 @@ are limited. PS
UTC is only supported TZ for child TIMESTAMP
PS
UTC is only supported TZ for child TIMESTAMP
S +S +S window @@ -16777,6 +18327,8 @@ are limited. PS
UTC is only supported TZ for child TIMESTAMP
PS
UTC is only supported TZ for child TIMESTAMP
S +S +S filter @@ -16798,6 +18350,8 @@ are limited. + + result @@ -16819,6 +18373,8 @@ are limited. PS
UTC is only supported TZ for child TIMESTAMP
PS
UTC is only supported TZ for child TIMESTAMP
S +S +S ApproximatePercentile @@ -16845,6 +18401,8 @@ are limited. + + percentage @@ -16866,6 +18424,8 @@ are limited. + + accuracy @@ -16887,6 +18447,8 @@ are limited. + + result @@ -16908,6 +18470,8 @@ are limited. + + reduction @@ -16930,6 +18494,8 @@ are limited. + + percentage @@ -16951,6 +18517,8 @@ are limited. + + accuracy @@ -16972,6 +18540,8 @@ are limited. + + result @@ -16993,6 +18563,8 @@ are limited. + + Expression @@ -17019,6 +18591,8 @@ are limited. MAP STRUCT UDT +DAYTIME +YEARMONTH Average @@ -17038,13 +18612,15 @@ are limited. S +S +NS - - +NS +NS result @@ -17066,6 +18642,8 @@ are limited. + + reduction @@ -17081,13 +18659,15 @@ are limited. S +S +NS - - +NS +NS result @@ -17109,6 +18689,8 @@ are limited. + + window @@ -17124,13 +18706,15 @@ are limited. S +S +NS - - +NS +NS result @@ -17152,6 +18736,8 @@ are limited. + + CollectList @@ -17174,9 +18760,11 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -17195,7 +18783,9 @@ are limited. -PS
window operations are disabled by default due to extreme memory usage;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
window operations are disabled by default due to extreme memory usage;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+ + @@ -17217,9 +18807,11 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -17238,7 +18830,9 @@ are limited. -PS
window operations are disabled by default due to extreme memory usage;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
window operations are disabled by default due to extreme memory usage;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+ + @@ -17260,9 +18854,11 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -17281,7 +18877,9 @@ are limited. -PS
window operations are disabled by default due to extreme memory usage;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
window operations are disabled by default due to extreme memory usage;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+ + @@ -17307,9 +18905,11 @@ are limited. S NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, MAP, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, MAP, UDT, DAYTIME, YEARMONTH
+NS +PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, MAP, UDT, DAYTIME, YEARMONTH
+NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, MAP, UDT
NS @@ -17328,7 +18928,9 @@ are limited. -PS
window operations are disabled by default due to extreme memory usage;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, MAP, UDT
+PS
window operations are disabled by default due to extreme memory usage;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, MAP, UDT, DAYTIME, YEARMONTH
+ + @@ -17350,9 +18952,11 @@ are limited. S NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, MAP, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, MAP, UDT, DAYTIME, YEARMONTH
+NS +PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, MAP, UDT, DAYTIME, YEARMONTH
+NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, MAP, UDT
NS @@ -17371,7 +18975,9 @@ are limited. -PS
window operations are disabled by default due to extreme memory usage;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, MAP, UDT
+PS
window operations are disabled by default due to extreme memory usage;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, MAP, UDT, DAYTIME, YEARMONTH
+ + @@ -17393,9 +18999,11 @@ are limited. S NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, MAP, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, MAP, UDT, DAYTIME, YEARMONTH
+NS +PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, MAP, UDT, DAYTIME, YEARMONTH
+NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, MAP, UDT
NS @@ -17414,7 +19022,9 @@ are limited. -PS
window operations are disabled by default due to extreme memory usage;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, MAP, UDT
+PS
window operations are disabled by default due to extreme memory usage;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, MAP, UDT, DAYTIME, YEARMONTH
+ + @@ -17444,6 +19054,8 @@ are limited. MAP STRUCT UDT +DAYTIME +YEARMONTH Count @@ -17470,6 +19082,8 @@ are limited. PS
UTC is only supported TZ for child TIMESTAMP
PS
UTC is only supported TZ for child TIMESTAMP
S +S +S result @@ -17491,6 +19105,8 @@ are limited. + + reduction @@ -17513,6 +19129,8 @@ are limited. PS
UTC is only supported TZ for child TIMESTAMP
PS
UTC is only supported TZ for child TIMESTAMP
S +S +S result @@ -17534,6 +19152,8 @@ are limited. + + window @@ -17556,6 +19176,8 @@ are limited. PS
UTC is only supported TZ for child TIMESTAMP
PS
UTC is only supported TZ for child TIMESTAMP
S +S +S result @@ -17577,6 +19199,8 @@ are limited. + + First @@ -17599,9 +19223,11 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -17620,9 +19246,11 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -17642,9 +19270,11 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -17663,9 +19293,11 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -17685,9 +19317,11 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -17706,9 +19340,11 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -17732,9 +19368,11 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -17753,9 +19391,11 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -17775,9 +19415,11 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -17796,9 +19438,11 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -17818,9 +19462,11 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -17839,9 +19485,11 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -17869,6 +19517,8 @@ are limited. MAP STRUCT UDT +DAYTIME +YEARMONTH Max @@ -17895,6 +19545,8 @@ are limited. PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
NS + + result @@ -17916,6 +19568,8 @@ are limited. PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
NS + + reduction @@ -17938,6 +19592,8 @@ are limited. PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
NS + + result @@ -17959,6 +19615,8 @@ are limited. PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
NS + + window @@ -17981,6 +19639,8 @@ are limited. NS NS + + result @@ -18002,6 +19662,8 @@ are limited. NS NS + + Min @@ -18028,6 +19690,8 @@ are limited. PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
NS + + result @@ -18049,6 +19713,8 @@ are limited. PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
NS + + reduction @@ -18071,6 +19737,8 @@ are limited. PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
NS + + result @@ -18092,6 +19760,8 @@ are limited. PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
NS + + window @@ -18114,6 +19784,8 @@ are limited. NS NS + + result @@ -18135,6 +19807,8 @@ are limited. NS NS + + Percentile @@ -18161,6 +19835,8 @@ are limited. + + percentage @@ -18182,6 +19858,8 @@ are limited. + + frequency @@ -18203,6 +19881,8 @@ are limited. + + result @@ -18224,6 +19904,8 @@ are limited. + + reduction @@ -18246,6 +19928,8 @@ are limited. + + percentage @@ -18267,6 +19951,8 @@ are limited. + + frequency @@ -18288,6 +19974,8 @@ are limited. + + result @@ -18309,6 +19997,8 @@ are limited. + + Expression @@ -18335,6 +20025,8 @@ are limited. MAP STRUCT UDT +DAYTIME +YEARMONTH PivotFirst @@ -18361,6 +20053,8 @@ are limited. NS NS NS +NS +NS valueColumn @@ -18382,6 +20076,8 @@ are limited. NS NS NS +NS +NS result @@ -18399,7 +20095,9 @@ are limited. S NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, BINARY, CALENDAR, ARRAY, MAP, STRUCT, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, BINARY, CALENDAR, ARRAY, MAP, STRUCT, UDT, DAYTIME, YEARMONTH
+NS +NS NS NS NS @@ -18425,6 +20123,8 @@ are limited. NS NS NS +NS +NS valueColumn @@ -18446,6 +20146,8 @@ are limited. NS NS NS +NS +NS result @@ -18463,7 +20165,9 @@ are limited. S NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, BINARY, CALENDAR, ARRAY, MAP, STRUCT, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, BINARY, CALENDAR, ARRAY, MAP, STRUCT, UDT, DAYTIME, YEARMONTH
+NS +NS NS NS NS @@ -18493,6 +20197,8 @@ are limited. + + result @@ -18514,6 +20220,8 @@ are limited. + + aggregation @@ -18536,6 +20244,8 @@ are limited. + + result @@ -18557,6 +20267,8 @@ are limited. + + window @@ -18579,6 +20291,8 @@ are limited. + + result @@ -18600,6 +20314,8 @@ are limited. + + StddevSamp @@ -18626,6 +20342,8 @@ are limited. + + result @@ -18647,6 +20365,8 @@ are limited. + + reduction @@ -18669,6 +20389,8 @@ are limited. + + result @@ -18690,6 +20412,8 @@ are limited. + + window @@ -18712,6 +20436,8 @@ are limited. + + result @@ -18733,6 +20459,8 @@ are limited. + + Expression @@ -18759,6 +20487,8 @@ are limited. MAP STRUCT UDT +DAYTIME +YEARMONTH Sum @@ -18785,6 +20515,8 @@ are limited. + + result @@ -18806,6 +20538,8 @@ are limited. + + reduction @@ -18828,6 +20562,8 @@ are limited. + + result @@ -18849,6 +20585,8 @@ are limited. + + window @@ -18871,6 +20609,8 @@ are limited. + + result @@ -18892,6 +20632,8 @@ are limited. + + VariancePop @@ -18918,6 +20660,8 @@ are limited. + + result @@ -18939,6 +20683,8 @@ are limited. + + aggregation @@ -18961,6 +20707,8 @@ are limited. + + result @@ -18982,6 +20730,8 @@ are limited. + + window @@ -19004,6 +20754,8 @@ are limited. + + result @@ -19025,6 +20777,8 @@ are limited. + + VarianceSamp @@ -19051,6 +20805,8 @@ are limited. + + result @@ -19072,6 +20828,8 @@ are limited. + + aggregation @@ -19094,6 +20852,8 @@ are limited. + + result @@ -19115,6 +20875,8 @@ are limited. + + window @@ -19137,6 +20899,8 @@ are limited. + + result @@ -19158,6 +20922,8 @@ are limited. + + Expression @@ -19184,6 +20950,8 @@ are limited. MAP STRUCT UDT +DAYTIME +YEARMONTH NormalizeNaNAndZero @@ -19210,6 +20978,8 @@ are limited. + + result @@ -19231,6 +21001,8 @@ are limited. + + ScalarSubquery @@ -19253,9 +21025,11 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -19279,9 +21053,11 @@ are limited. S S S -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -19300,9 +21076,11 @@ are limited. S S S -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -19326,9 +21104,11 @@ are limited. S S S -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -19347,9 +21127,11 @@ are limited. S S S -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT, DAYTIME, YEARMONTH
+NS +NS NS @@ -19371,7 +21153,7 @@ and the accelerator produces the same result. ### `AnsiCast` - + @@ -19391,8 +21173,10 @@ and the accelerator produces the same result. + + - + @@ -19404,7 +21188,9 @@ and the accelerator produces the same result. - + + + @@ -19433,6 +21219,8 @@ and the accelerator produces the same result. + + @@ -19454,6 +21242,8 @@ and the accelerator produces the same result. + + @@ -19475,6 +21265,8 @@ and the accelerator produces the same result. + + @@ -19496,6 +21288,8 @@ and the accelerator produces the same result. + + @@ -19517,6 +21311,8 @@ and the accelerator produces the same result. + + @@ -19538,6 +21334,8 @@ and the accelerator produces the same result. + + @@ -19559,6 +21357,8 @@ and the accelerator produces the same result. + + @@ -19580,6 +21380,8 @@ and the accelerator produces the same result. + + @@ -19590,8 +21392,8 @@ and the accelerator produces the same result. - - + + @@ -19601,6 +21403,8 @@ and the accelerator produces the same result. + + @@ -19622,6 +21426,8 @@ and the accelerator produces the same result. + + @@ -19643,6 +21449,8 @@ and the accelerator produces the same result. + + @@ -19664,6 +21472,8 @@ and the accelerator produces the same result. + + @@ -19685,6 +21495,8 @@ and the accelerator produces the same result. + + @@ -19702,7 +21514,9 @@ and the accelerator produces the same result. - + + + @@ -19724,7 +21538,9 @@ and the accelerator produces the same result. - + + + @@ -19746,7 +21562,9 @@ and the accelerator produces the same result. - + + + @@ -19769,13 +21587,61 @@ and the accelerator produces the same result. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
TO
TO
BOOLEAN BYTEMAP STRUCT UDTDAYTIMEYEARMONTH
FROM
FROM BOOLEAN S S SSNS
SHORT
INT
LONG
FLOAT
DOUBLE
DATE
TIMESTAMP
STRINGS S SSPS
UTC is only supported TZ for TIMESTAMP
PS
Only 4 digit year parsing is available. To enable parsing anyways set spark.rapids.sql.hasExtendedYearValues to false.
PS
Only 4 digit year parsing is available. To enable parsing anyways set spark.rapids.sql.hasExtendedYearValues to false.;
UTC is only supported TZ for TIMESTAMP
S S
DECIMAL
NULLNS NS NSNSNS
BINARY
CALENDAR
ARRAY PS
The array's child type must also support being cast to the desired child type;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, MAP, UDT
PS
The array's child type must also support being cast to the desired child type;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, MAP, UDT, DAYTIME, YEARMONTH
PS
the map's key and value must also support being cast to the desired child types;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
PS
the map's key and value must also support being cast to the desired child types;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
PS
the struct's children must also support being cast to the desired child type(s);
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, MAP, UDT
PS
the struct's children must also support being cast to the desired child type(s);
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, MAP, UDT, DAYTIME, YEARMONTH
NS
DAYTIME NS NS
YEARMONTH NS NS
### `Cast` - + @@ -19795,8 +21661,10 @@ and the accelerator produces the same result. + + - + @@ -19816,6 +21684,8 @@ and the accelerator produces the same result. + + @@ -19837,6 +21707,8 @@ and the accelerator produces the same result. + + @@ -19858,6 +21730,8 @@ and the accelerator produces the same result. + + @@ -19879,6 +21753,8 @@ and the accelerator produces the same result. + + @@ -19900,6 +21776,8 @@ and the accelerator produces the same result. + + @@ -19921,6 +21799,8 @@ and the accelerator produces the same result. + + @@ -19942,6 +21822,8 @@ and the accelerator produces the same result. + + @@ -19963,6 +21845,8 @@ and the accelerator produces the same result. + + @@ -19984,6 +21868,8 @@ and the accelerator produces the same result. + + @@ -20005,6 +21891,8 @@ and the accelerator produces the same result. + + @@ -20026,6 +21914,8 @@ and the accelerator produces the same result. + + @@ -20047,6 +21937,8 @@ and the accelerator produces the same result. + + @@ -20068,6 +21960,8 @@ and the accelerator produces the same result. + + @@ -20089,6 +21983,8 @@ and the accelerator produces the same result. + + @@ -20106,7 +22002,9 @@ and the accelerator produces the same result. - + + + @@ -20128,7 +22026,9 @@ and the accelerator produces the same result. - + + + @@ -20150,7 +22050,9 @@ and the accelerator produces the same result. - + + + @@ -20173,6 +22075,54 @@ and the accelerator produces the same result. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
TO
TO
BOOLEAN BYTEMAP STRUCT UDTDAYTIMEYEARMONTH
FROM
FROM BOOLEAN S S
BYTE
SHORT
INT
LONG
FLOAT
DOUBLE
DATE
TIMESTAMP
STRING
DECIMAL
NULLNS NS NSNSNS
BINARY
CALENDAR
ARRAY PS
The array's child type must also support being cast to the desired child type(s);
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
PS
The array's child type must also support being cast to the desired child type(s);
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
PS
the map's key and value must also support being cast to the desired child types;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
PS
the map's key and value must also support being cast to the desired child types;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
PS
the struct's children must also support being cast to the desired child type(s);
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
PS
the struct's children must also support being cast to the desired child type(s);
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
NS
DAYTIME NS NS
YEARMONTH NS NS
@@ -20209,6 +22159,8 @@ as `a` don't show up in the table. They are controlled by the rules for MAP STRUCT UDT +DAYTIME +YEARMONTH HashPartitioning @@ -20229,9 +22181,11 @@ as `a` don't show up in the table. They are controlled by the rules for S NS NS -PS
Arrays of structs are not supported;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, MAP, UDT
+PS
Arrays of structs are not supported;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, MAP, UDT, DAYTIME, YEARMONTH
+NS +PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, MAP, UDT, DAYTIME, YEARMONTH
+NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, MAP, UDT
NS @@ -20257,6 +22211,8 @@ as `a` don't show up in the table. They are controlled by the rules for PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, ARRAY, UDT
NS + + RoundRobinPartitioning @@ -20281,6 +22237,8 @@ as `a` don't show up in the table. They are controlled by the rules for + + SinglePartition$ @@ -20305,6 +22263,8 @@ as `a` don't show up in the table. They are controlled by the rules for + + @@ -20335,6 +22295,8 @@ dates or timestamps, or for a lack of type coercion support. MAP STRUCT UDT +DAYTIME +YEARMONTH Avro @@ -20357,6 +22319,8 @@ dates or timestamps, or for a lack of type coercion support. NS NS NS + + Write @@ -20378,6 +22342,8 @@ dates or timestamps, or for a lack of type coercion support. NS NS NS + + CSV @@ -20400,6 +22366,8 @@ dates or timestamps, or for a lack of type coercion support. + + Write @@ -20421,6 +22389,8 @@ dates or timestamps, or for a lack of type coercion support. + + Delta @@ -20443,6 +22413,8 @@ dates or timestamps, or for a lack of type coercion support. PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT
PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT
NS + + Write @@ -20464,6 +22436,8 @@ dates or timestamps, or for a lack of type coercion support. PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT
PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT
NS + + HiveText @@ -20486,6 +22460,8 @@ dates or timestamps, or for a lack of type coercion support. NS NS NS +NS +NS Write @@ -20507,6 +22483,8 @@ dates or timestamps, or for a lack of type coercion support. NS NS NS +NS +NS Iceberg @@ -20529,6 +22507,8 @@ dates or timestamps, or for a lack of type coercion support. PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT
PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT
NS + + Write @@ -20550,6 +22530,8 @@ dates or timestamps, or for a lack of type coercion support. NS NS NS + + JSON @@ -20572,6 +22554,8 @@ dates or timestamps, or for a lack of type coercion support. NS PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, MAP, UDT
NS + + Write @@ -20593,6 +22577,8 @@ dates or timestamps, or for a lack of type coercion support. NS NS NS + + ORC @@ -20615,6 +22601,8 @@ dates or timestamps, or for a lack of type coercion support. PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, UDT
PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, UDT
NS + + Write @@ -20636,6 +22624,8 @@ dates or timestamps, or for a lack of type coercion support. PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, MAP, UDT
PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, MAP, UDT
NS + + Parquet @@ -20658,6 +22648,8 @@ dates or timestamps, or for a lack of type coercion support. PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT
PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT
NS + + Write @@ -20679,6 +22671,8 @@ dates or timestamps, or for a lack of type coercion support. PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT
PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT
NS + + diff --git a/tools/generated_files/operatorsScore.csv b/tools/generated_files/operatorsScore.csv index c5c2080694c..0c7295f95ac 100644 --- a/tools/generated_files/operatorsScore.csv +++ b/tools/generated_files/operatorsScore.csv @@ -14,14 +14,18 @@ SortExec,8.0 SubqueryBroadcastExec,3.0 TakeOrderedAndProjectExec,3.0 UnionExec,3.0 -CustomShuffleReaderExec,3.0 +AQEShuffleReadExec,3.0 HashAggregateExec,4.5 ObjectHashAggregateExec,3.0 SortAggregateExec,3.0 InMemoryTableScanExec,3.0 DataWritingCommandExec,3.0 ExecutedCommandExec,3.0 +AppendDataExecV1,3.0 +AtomicCreateTableAsSelectExec,3.0 +AtomicReplaceTableAsSelectExec,3.0 BatchScanExec,3.0 +OverwriteByExpressionExecV1,3.0 BroadcastExchangeExec,3.0 ShuffleExchangeExec,4.2 BroadcastHashJoinExec,5.1 diff --git a/tools/generated_files/supportedDataSource.csv b/tools/generated_files/supportedDataSource.csv index a69379bff51..2573406ec3b 100644 --- a/tools/generated_files/supportedDataSource.csv +++ b/tools/generated_files/supportedDataSource.csv @@ -1,13 +1,13 @@ -Format,Direction,BOOLEAN,BYTE,SHORT,INT,LONG,FLOAT,DOUBLE,DATE,TIMESTAMP,STRING,DECIMAL,NULL,BINARY,CALENDAR,ARRAY,MAP,STRUCT,UDT -Avro,read,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO -CSV,read,S,S,S,S,S,S,S,S,PS,S,S,NA,NS,NA,NA,NA,NA,NA -Delta,read,S,S,S,S,S,S,S,S,PS,S,S,NA,S,NA,PS,PS,PS,NS -Delta,write,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -HiveText,read,S,S,S,S,S,S,S,S,PS,S,S,NS,NS,NS,NS,NS,NS,NS -HiveText,write,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Iceberg,read,S,S,S,S,S,S,S,S,PS,S,S,NA,S,NA,PS,PS,PS,NS -JSON,read,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO -ORC,read,S,S,S,S,S,S,S,S,PS,S,S,NA,NS,NA,PS,PS,PS,NS -ORC,write,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Parquet,read,S,S,S,S,S,S,S,S,PS,S,S,NA,S,NA,PS,PS,PS,NS -Parquet,write,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Format,Direction,BOOLEAN,BYTE,SHORT,INT,LONG,FLOAT,DOUBLE,DATE,TIMESTAMP,STRING,DECIMAL,NULL,BINARY,CALENDAR,ARRAY,MAP,STRUCT,UDT,DAYTIME,YEARMONTH +Avro,read,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO +CSV,read,S,S,S,S,S,S,S,S,PS,S,S,NA,NS,NA,NA,NA,NA,NA,NA,NA +Delta,read,S,S,S,S,S,S,S,S,PS,S,S,NA,S,NA,PS,PS,PS,NS,NA,NA +Delta,write,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +HiveText,read,S,S,S,S,S,S,S,S,PS,S,S,NS,NS,NS,NS,NS,NS,NS,NS,NS +HiveText,write,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Iceberg,read,S,S,S,S,S,S,S,S,PS,S,S,NA,S,NA,PS,PS,PS,NS,NA,NA +JSON,read,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO +ORC,read,S,S,S,S,S,S,S,S,PS,S,S,NA,NS,NA,PS,PS,PS,NS,NA,NA +ORC,write,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Parquet,read,S,S,S,S,S,S,S,S,PS,S,S,NA,S,NA,PS,PS,PS,NS,NA,NA +Parquet,write,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA diff --git a/tools/generated_files/supportedExecs.csv b/tools/generated_files/supportedExecs.csv index f5a3fe7c4b5..4eee8a7ad43 100644 --- a/tools/generated_files/supportedExecs.csv +++ b/tools/generated_files/supportedExecs.csv @@ -1,50 +1,54 @@ -Exec,Supported,Notes,Params,BOOLEAN,BYTE,SHORT,INT,LONG,FLOAT,DOUBLE,DATE,TIMESTAMP,STRING,DECIMAL,NULL,BINARY,CALENDAR,ARRAY,MAP,STRUCT,UDT -CoalesceExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS -CollectLimitExec,NS,This is disabled by default because Collect Limit replacement can be slower on the GPU; if huge number of rows in a batch it could help by limiting the number of rows transferred from GPU to CPU,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS -ExpandExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS -FileSourceScanExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS -FilterExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS -GenerateExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS -GlobalLimitExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS -LocalLimitExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS -ProjectExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS -RangeExec,S,None,Input/Output,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -SampleExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS -SortExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS -SubqueryBroadcastExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S -TakeOrderedAndProjectExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS -UnionExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS -CustomShuffleReaderExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS -HashAggregateExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,PS,NS,PS,PS,PS,NS -ObjectHashAggregateExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,PS,NS,PS,PS,PS,NS -SortAggregateExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,PS,NS,PS,PS,PS,NS -InMemoryTableScanExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,NS,NS,NS,PS,PS,PS,NS -DataWritingCommandExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,PS,NS,S,NS,PS,PS,PS,NS -ExecutedCommandExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S -BatchScanExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,NS,S,NS,PS,PS,PS,NS -BroadcastExchangeExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS -ShuffleExchangeExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS -BroadcastHashJoinExec,S,None,leftKeys,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS -BroadcastHashJoinExec,S,None,rightKeys,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS -BroadcastHashJoinExec,S,None,condition,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -BroadcastHashJoinExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS -BroadcastNestedLoopJoinExec,S,None,condition(A non-inner join only is supported if the condition expression can be converted to a GPU AST expression),S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -BroadcastNestedLoopJoinExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS -CartesianProductExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS -ShuffledHashJoinExec,S,None,leftKeys,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS -ShuffledHashJoinExec,S,None,rightKeys,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS -ShuffledHashJoinExec,S,None,condition,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -ShuffledHashJoinExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS -SortMergeJoinExec,S,None,leftKeys,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS -SortMergeJoinExec,S,None,rightKeys,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS -SortMergeJoinExec,S,None,condition,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -SortMergeJoinExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS -AggregateInPandasExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,NS,NS,NS,NS -ArrowEvalPythonExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,PS,NS,PS,NS -FlatMapCoGroupsInPandasExec,NS,This is disabled by default because Performance is not ideal with many small groups,Input/Output,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,NS,NS,NS,NS -FlatMapGroupsInPandasExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,NS,NS,NS,NS -MapInPandasExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,PS,NS,PS,NS -WindowInPandasExec,NS,This is disabled by default because it only supports row based frame for now,Input/Output,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,PS,NS,NS,NS -WindowExec,S,None,partitionSpec,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NS,PS,NS -WindowExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS -HiveTableScanExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,NS,NS,NS,NS,NS,NS,NS +Exec,Supported,Notes,Params,BOOLEAN,BYTE,SHORT,INT,LONG,FLOAT,DOUBLE,DATE,TIMESTAMP,STRING,DECIMAL,NULL,BINARY,CALENDAR,ARRAY,MAP,STRUCT,UDT,DAYTIME,YEARMONTH +CoalesceExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +CollectLimitExec,NS,This is disabled by default because Collect Limit replacement can be slower on the GPU; if huge number of rows in a batch it could help by limiting the number of rows transferred from GPU to CPU,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS,NS,NS +ExpandExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS,NS,NS +FileSourceScanExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +FilterExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +GenerateExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +GlobalLimitExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS,NS,NS +LocalLimitExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS,NS,NS +ProjectExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +RangeExec,S,None,Input/Output,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +SampleExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS,NS,NS +SortExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +SubqueryBroadcastExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S,S,S +TakeOrderedAndProjectExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS,NS,NS +UnionExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS,NS,NS +AQEShuffleReadExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +HashAggregateExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,PS,NS,PS,PS,PS,NS,NS,NS +ObjectHashAggregateExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,PS,NS,PS,PS,PS,NS,NS,NS +SortAggregateExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,PS,NS,PS,PS,PS,NS,NS,NS +InMemoryTableScanExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,NS,NS,NS,PS,PS,PS,NS,NS,NS +DataWritingCommandExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,PS,NS,S,NS,PS,PS,PS,NS,NS,NS +ExecutedCommandExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S,S,S +AppendDataExecV1,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,NS,S,NS,PS,PS,PS,NS,NS,NS +AtomicCreateTableAsSelectExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,NS,S,NS,PS,PS,PS,NS,NS,NS +AtomicReplaceTableAsSelectExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,NS,S,NS,PS,PS,PS,NS,NS,NS +BatchScanExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,NS,S,NS,PS,PS,PS,NS,NS,NS +OverwriteByExpressionExecV1,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,NS,S,NS,PS,PS,PS,NS,NS,NS +BroadcastExchangeExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +ShuffleExchangeExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +BroadcastHashJoinExec,S,None,leftKeys,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS,NS,NS +BroadcastHashJoinExec,S,None,rightKeys,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS,NS,NS +BroadcastHashJoinExec,S,None,condition,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BroadcastHashJoinExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +BroadcastNestedLoopJoinExec,S,None,condition(A non-inner join only is supported if the condition expression can be converted to a GPU AST expression),S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BroadcastNestedLoopJoinExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +CartesianProductExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +ShuffledHashJoinExec,S,None,leftKeys,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS,NS,NS +ShuffledHashJoinExec,S,None,rightKeys,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS,NS,NS +ShuffledHashJoinExec,S,None,condition,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ShuffledHashJoinExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +SortMergeJoinExec,S,None,leftKeys,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS,NS,NS +SortMergeJoinExec,S,None,rightKeys,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS,NS,NS +SortMergeJoinExec,S,None,condition,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +SortMergeJoinExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +AggregateInPandasExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,NS,NS,NS,NS,NS,NS +ArrowEvalPythonExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,PS,NS,PS,NS,NS,NS +FlatMapCoGroupsInPandasExec,NS,This is disabled by default because Performance is not ideal with many small groups,Input/Output,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,NS,NS,NS,NS,NS,NS +FlatMapGroupsInPandasExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,NS,NS,NS,NS,NS,NS +MapInPandasExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,PS,NS,PS,NS,NS,NS +WindowInPandasExec,NS,This is disabled by default because it only supports row based frame for now,Input/Output,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,PS,NS,NS,NS,NS,NS +WindowExec,S,None,partitionSpec,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NS,PS,NS,NS,NS +WindowExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +HiveTableScanExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,NS,NS,NS,NS,NS,NS,NS,NS,NS diff --git a/tools/generated_files/supportedExprs.csv b/tools/generated_files/supportedExprs.csv index 88f6ee0d6d3..f9e47f10110 100644 --- a/tools/generated_files/supportedExprs.csv +++ b/tools/generated_files/supportedExprs.csv @@ -1,742 +1,742 @@ -Expression,Supported,SQL Func,Notes,Context,Params,BOOLEAN,BYTE,SHORT,INT,LONG,FLOAT,DOUBLE,DATE,TIMESTAMP,STRING,DECIMAL,NULL,BINARY,CALENDAR,ARRAY,MAP,STRUCT,UDT -Abs,S,`abs`,None,project,input,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA -Abs,S,`abs`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA -Abs,S,`abs`,None,AST,input,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA -Abs,S,`abs`,None,AST,result,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA -Acos,S,`acos`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Acos,S,`acos`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Acos,S,`acos`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Acos,S,`acos`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Acosh,S,`acosh`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Acosh,S,`acosh`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Acosh,S,`acosh`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Acosh,S,`acosh`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Add,S,`+`,None,project,lhs,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA -Add,S,`+`,None,project,rhs,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA -Add,S,`+`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA -Add,S,`+`,None,AST,lhs,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NS,NA,NA,NA,NA -Add,S,`+`,None,AST,rhs,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NS,NA,NA,NA,NA -Add,S,`+`,None,AST,result,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NS,NA,NA,NA,NA -Alias,S, ,None,project,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS -Alias,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS -Alias,S, ,None,AST,input,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,NS,NS,NS,NS -Alias,S, ,None,AST,result,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,NS,NS,NS,NS -And,S,`and`,None,project,lhs,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -And,S,`and`,None,project,rhs,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -And,S,`and`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -And,S,`and`,None,AST,lhs,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -And,S,`and`,None,AST,rhs,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -And,S,`and`,None,AST,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -ArrayContains,S,`array_contains`,None,project,array,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA -ArrayContains,S,`array_contains`,None,project,key,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,NS,NS,NS,NS -ArrayContains,S,`array_contains`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -ArrayExcept,S,`array_except`,This is not 100% compatible with the Spark version because the GPU implementation treats -0.0 and 0.0 as equal; but the CPU implementation currently does not (see SPARK-39845). Also; Apache Spark 3.1.3 fixed issue SPARK-36741 where NaNs in these set like operators were not treated as being equal. We have chosen to break with compatibility for the older versions of Spark in this instance and handle NaNs the same as 3.1.3+,project,array1,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA -ArrayExcept,S,`array_except`,This is not 100% compatible with the Spark version because the GPU implementation treats -0.0 and 0.0 as equal; but the CPU implementation currently does not (see SPARK-39845). Also; Apache Spark 3.1.3 fixed issue SPARK-36741 where NaNs in these set like operators were not treated as being equal. We have chosen to break with compatibility for the older versions of Spark in this instance and handle NaNs the same as 3.1.3+,project,array2,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA -ArrayExcept,S,`array_except`,This is not 100% compatible with the Spark version because the GPU implementation treats -0.0 and 0.0 as equal; but the CPU implementation currently does not (see SPARK-39845). Also; Apache Spark 3.1.3 fixed issue SPARK-36741 where NaNs in these set like operators were not treated as being equal. We have chosen to break with compatibility for the older versions of Spark in this instance and handle NaNs the same as 3.1.3+,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA -ArrayExists,S,`exists`,None,project,argument,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA -ArrayExists,S,`exists`,None,project,function,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -ArrayExists,S,`exists`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -ArrayFilter,S,`filter`,None,project,argument,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA -ArrayFilter,S,`filter`,None,project,function,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -ArrayFilter,S,`filter`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA -ArrayIntersect,S,`array_intersect`,This is not 100% compatible with the Spark version because the GPU implementation treats -0.0 and 0.0 as equal; but the CPU implementation currently does not (see SPARK-39845). Also; Apache Spark 3.1.3 fixed issue SPARK-36741 where NaNs in these set like operators were not treated as being equal. We have chosen to break with compatibility for the older versions of Spark in this instance and handle NaNs the same as 3.1.3+,project,array1,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA -ArrayIntersect,S,`array_intersect`,This is not 100% compatible with the Spark version because the GPU implementation treats -0.0 and 0.0 as equal; but the CPU implementation currently does not (see SPARK-39845). Also; Apache Spark 3.1.3 fixed issue SPARK-36741 where NaNs in these set like operators were not treated as being equal. We have chosen to break with compatibility for the older versions of Spark in this instance and handle NaNs the same as 3.1.3+,project,array2,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA -ArrayIntersect,S,`array_intersect`,This is not 100% compatible with the Spark version because the GPU implementation treats -0.0 and 0.0 as equal; but the CPU implementation currently does not (see SPARK-39845). Also; Apache Spark 3.1.3 fixed issue SPARK-36741 where NaNs in these set like operators were not treated as being equal. We have chosen to break with compatibility for the older versions of Spark in this instance and handle NaNs the same as 3.1.3+,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA -ArrayMax,S,`array_max`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA -ArrayMax,S,`array_max`,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,NS,NS -ArrayMin,S,`array_min`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA -ArrayMin,S,`array_min`,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,NS,NS -ArrayRemove,S,`array_remove`,None,project,array,NS,NS,NS,NS,NS,NS,NS,NS,NS,NS,NS,NS,NS,NS,PS,NS,NS,NS -ArrayRemove,S,`array_remove`,None,project,element,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS -ArrayRemove,S,`array_remove`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA -ArrayRepeat,S,`array_repeat`,None,project,left,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS -ArrayRepeat,S,`array_repeat`,None,project,right,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -ArrayRepeat,S,`array_repeat`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA -ArrayTransform,S,`transform`,None,project,argument,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA -ArrayTransform,S,`transform`,None,project,function,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS -ArrayTransform,S,`transform`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA -ArrayUnion,S,`array_union`,This is not 100% compatible with the Spark version because the GPU implementation treats -0.0 and 0.0 as equal; but the CPU implementation currently does not (see SPARK-39845). Also; Apache Spark 3.1.3 fixed issue SPARK-36741 where NaNs in these set like operators were not treated as being equal. We have chosen to break with compatibility for the older versions of Spark in this instance and handle NaNs the same as 3.1.3+,project,array1,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA -ArrayUnion,S,`array_union`,This is not 100% compatible with the Spark version because the GPU implementation treats -0.0 and 0.0 as equal; but the CPU implementation currently does not (see SPARK-39845). Also; Apache Spark 3.1.3 fixed issue SPARK-36741 where NaNs in these set like operators were not treated as being equal. We have chosen to break with compatibility for the older versions of Spark in this instance and handle NaNs the same as 3.1.3+,project,array2,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA -ArrayUnion,S,`array_union`,This is not 100% compatible with the Spark version because the GPU implementation treats -0.0 and 0.0 as equal; but the CPU implementation currently does not (see SPARK-39845). Also; Apache Spark 3.1.3 fixed issue SPARK-36741 where NaNs in these set like operators were not treated as being equal. We have chosen to break with compatibility for the older versions of Spark in this instance and handle NaNs the same as 3.1.3+,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA -ArraysOverlap,S,`arrays_overlap`,This is not 100% compatible with the Spark version because the GPU implementation treats -0.0 and 0.0 as equal; but the CPU implementation currently does not (see SPARK-39845). Also; Apache Spark 3.1.3 fixed issue SPARK-36741 where NaNs in these set like operators were not treated as being equal. We have chosen to break with compatibility for the older versions of Spark in this instance and handle NaNs the same as 3.1.3+,project,array1,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA -ArraysOverlap,S,`arrays_overlap`,This is not 100% compatible with the Spark version because the GPU implementation treats -0.0 and 0.0 as equal; but the CPU implementation currently does not (see SPARK-39845). Also; Apache Spark 3.1.3 fixed issue SPARK-36741 where NaNs in these set like operators were not treated as being equal. We have chosen to break with compatibility for the older versions of Spark in this instance and handle NaNs the same as 3.1.3+,project,array2,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA -ArraysOverlap,S,`arrays_overlap`,This is not 100% compatible with the Spark version because the GPU implementation treats -0.0 and 0.0 as equal; but the CPU implementation currently does not (see SPARK-39845). Also; Apache Spark 3.1.3 fixed issue SPARK-36741 where NaNs in these set like operators were not treated as being equal. We have chosen to break with compatibility for the older versions of Spark in this instance and handle NaNs the same as 3.1.3+,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -ArraysZip,S,`arrays_zip`,None,project,children,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA -ArraysZip,S,`arrays_zip`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA -Ascii,NS,`ascii`,This is disabled by default because it only supports strings starting with ASCII or Latin-1 characters after Spark 3.2.3; 3.3.1 and 3.4.0. Otherwise the results will not match the CPU.,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -Ascii,NS,`ascii`,This is disabled by default because it only supports strings starting with ASCII or Latin-1 characters after Spark 3.2.3; 3.3.1 and 3.4.0. Otherwise the results will not match the CPU.,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Asin,S,`asin`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Asin,S,`asin`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Asin,S,`asin`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Asin,S,`asin`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Asinh,S,`asinh`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Asinh,S,`asinh`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Asinh,S,`asinh`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Asinh,S,`asinh`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -AtLeastNNonNulls,S, ,None,project,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS -AtLeastNNonNulls,S, ,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Atan,S,`atan`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Atan,S,`atan`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Atan,S,`atan`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Atan,S,`atan`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Atanh,S,`atanh`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Atanh,S,`atanh`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Atanh,S,`atanh`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Atanh,S,`atanh`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -AttributeReference,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS -AttributeReference,S, ,None,AST,result,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,NS,NS,NS,NS -BRound,S,`bround`,None,project,value,NA,S,S,S,S,PS,PS,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA -BRound,S,`bround`,None,project,scale,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -BRound,S,`bround`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA -BitLength,S,`bit_length`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,NA -BitLength,S,`bit_length`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -BitwiseAnd,S,`&`,None,project,lhs,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -BitwiseAnd,S,`&`,None,project,rhs,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -BitwiseAnd,S,`&`,None,project,result,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -BitwiseAnd,S,`&`,None,AST,lhs,NA,NS,NS,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -BitwiseAnd,S,`&`,None,AST,rhs,NA,NS,NS,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -BitwiseAnd,S,`&`,None,AST,result,NA,NS,NS,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -BitwiseNot,S,`~`,None,project,input,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -BitwiseNot,S,`~`,None,project,result,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -BitwiseNot,S,`~`,None,AST,input,NA,NS,NS,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -BitwiseNot,S,`~`,None,AST,result,NA,NS,NS,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -BitwiseOr,S,`\|`,None,project,lhs,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -BitwiseOr,S,`\|`,None,project,rhs,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -BitwiseOr,S,`\|`,None,project,result,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -BitwiseOr,S,`\|`,None,AST,lhs,NA,NS,NS,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -BitwiseOr,S,`\|`,None,AST,rhs,NA,NS,NS,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -BitwiseOr,S,`\|`,None,AST,result,NA,NS,NS,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -BitwiseXor,S,`^`,None,project,lhs,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -BitwiseXor,S,`^`,None,project,rhs,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -BitwiseXor,S,`^`,None,project,result,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -BitwiseXor,S,`^`,None,AST,lhs,NA,NS,NS,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -BitwiseXor,S,`^`,None,AST,rhs,NA,NS,NS,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -BitwiseXor,S,`^`,None,AST,result,NA,NS,NS,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -BoundReference,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS -BoundReference,S, ,None,AST,result,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,NS,NS,NS,NS -CaseWhen,S,`when`,None,project,predicate,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -CaseWhen,S,`when`,None,project,value,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS -CaseWhen,S,`when`,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS -Cbrt,S,`cbrt`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Cbrt,S,`cbrt`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Cbrt,S,`cbrt`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Cbrt,S,`cbrt`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Ceil,S,`ceil`; `ceiling`,None,project,input,NA,NA,NA,NA,S,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA -Ceil,S,`ceil`; `ceiling`,None,project,result,NA,NA,NA,NA,S,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA -CheckOverflow,S, ,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA -CheckOverflow,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA -Coalesce,S,`coalesce`,None,project,param,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS -Coalesce,S,`coalesce`,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS -Concat,S,`concat`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NS,NA,PS,NA,NA,NA -Concat,S,`concat`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NS,NA,PS,NA,NA,NA -ConcatWs,S,`concat_ws`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,S,NA,NA,NA -ConcatWs,S,`concat_ws`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -Contains,S, ,None,project,src,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -Contains,S, ,None,project,search,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA -Contains,S, ,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Conv,NS,`conv`,This is disabled by default because GPU implementation is incomplete. We currently only support from/to_base values of 10 and 16. We fall back on CPU if the signed conversion is signalled via a negative to_base. GPU implementation does not check for an 64-bit signed/unsigned int overflow when performing the conversion to return `FFFFFFFFFFFFFFFF` or `18446744073709551615` or to throw an error in the ANSI mode. It is safe to enable if the overflow is not possible or detected externally. For instance decimal strings not longer than 18 characters / hexadecimal strings not longer than 15 characters disregarding the sign cannot cause an overflow. ,project,num,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -Conv,NS,`conv`,This is disabled by default because GPU implementation is incomplete. We currently only support from/to_base values of 10 and 16. We fall back on CPU if the signed conversion is signalled via a negative to_base. GPU implementation does not check for an 64-bit signed/unsigned int overflow when performing the conversion to return `FFFFFFFFFFFFFFFF` or `18446744073709551615` or to throw an error in the ANSI mode. It is safe to enable if the overflow is not possible or detected externally. For instance decimal strings not longer than 18 characters / hexadecimal strings not longer than 15 characters disregarding the sign cannot cause an overflow. ,project,from_base,NA,PS,PS,PS,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Conv,NS,`conv`,This is disabled by default because GPU implementation is incomplete. We currently only support from/to_base values of 10 and 16. We fall back on CPU if the signed conversion is signalled via a negative to_base. GPU implementation does not check for an 64-bit signed/unsigned int overflow when performing the conversion to return `FFFFFFFFFFFFFFFF` or `18446744073709551615` or to throw an error in the ANSI mode. It is safe to enable if the overflow is not possible or detected externally. For instance decimal strings not longer than 18 characters / hexadecimal strings not longer than 15 characters disregarding the sign cannot cause an overflow. ,project,to_base,NA,PS,PS,PS,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Conv,NS,`conv`,This is disabled by default because GPU implementation is incomplete. We currently only support from/to_base values of 10 and 16. We fall back on CPU if the signed conversion is signalled via a negative to_base. GPU implementation does not check for an 64-bit signed/unsigned int overflow when performing the conversion to return `FFFFFFFFFFFFFFFF` or `18446744073709551615` or to throw an error in the ANSI mode. It is safe to enable if the overflow is not possible or detected externally. For instance decimal strings not longer than 18 characters / hexadecimal strings not longer than 15 characters disregarding the sign cannot cause an overflow. ,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -Cos,S,`cos`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Cos,S,`cos`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Cos,S,`cos`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Cos,S,`cos`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Cosh,S,`cosh`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Cosh,S,`cosh`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Cosh,S,`cosh`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Cosh,S,`cosh`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Cot,S,`cot`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Cot,S,`cot`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Cot,S,`cot`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Cot,S,`cot`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -CreateArray,S,`array`,None,project,arg,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,NS,PS,NS -CreateArray,S,`array`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA -CreateMap,S,`map`,None,project,key,S,S,S,S,S,S,S,S,PS,S,S,S,NA,NA,PS,NA,PS,NA -CreateMap,S,`map`,None,project,value,S,S,S,S,S,S,S,S,PS,S,S,S,NA,NA,PS,PS,PS,NA -CreateNamedStruct,S,`named_struct`; `struct`,None,project,name,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -CreateNamedStruct,S,`named_struct`; `struct`,None,project,value,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS -CreateNamedStruct,S,`named_struct`; `struct`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA -CurrentRow$,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA -DateAdd,S,`date_add`,None,project,startDate,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -DateAdd,S,`date_add`,None,project,days,NA,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -DateAdd,S,`date_add`,None,project,result,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -DateAddInterval,S, ,None,project,start,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -DateAddInterval,S, ,None,project,interval,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA -DateAddInterval,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -DateDiff,S,`datediff`,None,project,lhs,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -DateDiff,S,`datediff`,None,project,rhs,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -DateDiff,S,`datediff`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -DateFormatClass,S,`date_format`,None,project,timestamp,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA -DateFormatClass,S,`date_format`,None,project,strfmt,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA -DateFormatClass,S,`date_format`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -DateSub,S,`date_sub`,None,project,startDate,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -DateSub,S,`date_sub`,None,project,days,NA,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -DateSub,S,`date_sub`,None,project,result,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -DayOfMonth,S,`day`; `dayofmonth`,None,project,input,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -DayOfMonth,S,`day`; `dayofmonth`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -DayOfWeek,S,`dayofweek`,None,project,input,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -DayOfWeek,S,`dayofweek`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -DayOfYear,S,`dayofyear`,None,project,input,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -DayOfYear,S,`dayofyear`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -DenseRank,S,`dense_rank`,None,window,ordering,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NS,NS,NS -DenseRank,S,`dense_rank`,None,window,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Divide,S,`/`,None,project,lhs,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA -Divide,S,`/`,None,project,rhs,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA -Divide,S,`/`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA -DynamicPruningExpression,S, ,None,project,input,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -DynamicPruningExpression,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S -ElementAt,S,`element_at`,None,project,array/map,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,PS,NA,NA -ElementAt,S,`element_at`,None,project,index/key,PS,PS,PS,S,PS,PS,PS,PS,PS,PS,PS,NS,NS,NS,NS,NS,NS,NS -ElementAt,S,`element_at`,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS -EndsWith,S, ,None,project,src,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -EndsWith,S, ,None,project,search,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA -EndsWith,S, ,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -EqualNullSafe,S,`<=>`,None,project,lhs,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS -EqualNullSafe,S,`<=>`,None,project,rhs,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS -EqualNullSafe,S,`<=>`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -EqualTo,S,`==`; `=`,None,project,lhs,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS -EqualTo,S,`==`; `=`,None,project,rhs,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS -EqualTo,S,`==`; `=`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -EqualTo,S,`==`; `=`,None,AST,lhs,S,S,S,S,S,NS,NS,S,PS,S,NS,NS,NS,NS,NS,NA,NS,NS -EqualTo,S,`==`; `=`,None,AST,rhs,S,S,S,S,S,NS,NS,S,PS,S,NS,NS,NS,NS,NS,NA,NS,NS -EqualTo,S,`==`; `=`,None,AST,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Exp,S,`exp`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Exp,S,`exp`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Exp,S,`exp`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Exp,S,`exp`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Explode,S,`explode_outer`; `explode`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,PS,NA,NA -Explode,S,`explode_outer`; `explode`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA -Expm1,S,`expm1`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Expm1,S,`expm1`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Expm1,S,`expm1`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Expm1,S,`expm1`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Flatten,S,`flatten`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA -Flatten,S,`flatten`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA -Floor,S,`floor`,None,project,input,NA,NA,NA,NA,S,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA -Floor,S,`floor`,None,project,result,NA,NA,NA,NA,S,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA -FormatNumber,S,`format_number`,None,project,x,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA -FormatNumber,S,`format_number`,None,project,d,NA,NA,NA,PS,NA,NA,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA -FormatNumber,S,`format_number`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -FromUTCTimestamp,S,`from_utc_timestamp`,None,project,timestamp,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA -FromUTCTimestamp,S,`from_utc_timestamp`,None,project,timezone,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA -FromUTCTimestamp,S,`from_utc_timestamp`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA -FromUnixTime,S,`from_unixtime`,None,project,sec,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -FromUnixTime,S,`from_unixtime`,None,project,format,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA -FromUnixTime,S,`from_unixtime`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -GetArrayItem,S, ,None,project,array,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA -GetArrayItem,S, ,None,project,ordinal,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -GetArrayItem,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS -GetArrayStructFields,S, ,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA -GetArrayStructFields,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA -GetJsonObject,NS,`get_json_object`,This is disabled by default because Experimental feature that could be unstable or have performance issues.,project,json,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -GetJsonObject,NS,`get_json_object`,This is disabled by default because Experimental feature that could be unstable or have performance issues.,project,path,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA -GetJsonObject,NS,`get_json_object`,This is disabled by default because Experimental feature that could be unstable or have performance issues.,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -GetMapValue,S, ,None,project,map,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA -GetMapValue,S, ,None,project,key,S,S,S,S,S,S,S,S,PS,S,S,NS,NS,NS,NS,NS,NS,NS -GetMapValue,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS -GetStructField,S, ,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA -GetStructField,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS -GetTimestamp,S, ,None,project,timeExp,NA,NA,NA,NA,NA,NA,NA,S,PS,S,NA,NA,NA,NA,NA,NA,NA,NA -GetTimestamp,S, ,None,project,format,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA -GetTimestamp,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA -GreaterThan,S,`>`,None,project,lhs,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS -GreaterThan,S,`>`,None,project,rhs,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS -GreaterThan,S,`>`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -GreaterThan,S,`>`,None,AST,lhs,S,S,S,S,S,NS,NS,S,PS,S,NS,NS,NS,NS,NS,NA,NS,NS -GreaterThan,S,`>`,None,AST,rhs,S,S,S,S,S,NS,NS,S,PS,S,NS,NS,NS,NS,NS,NA,NS,NS -GreaterThan,S,`>`,None,AST,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -GreaterThanOrEqual,S,`>=`,None,project,lhs,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS -GreaterThanOrEqual,S,`>=`,None,project,rhs,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS -GreaterThanOrEqual,S,`>=`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -GreaterThanOrEqual,S,`>=`,None,AST,lhs,S,S,S,S,S,NS,NS,S,PS,S,NS,NS,NS,NS,NS,NA,NS,NS -GreaterThanOrEqual,S,`>=`,None,AST,rhs,S,S,S,S,S,NS,NS,S,PS,S,NS,NS,NS,NS,NS,NA,NS,NS -GreaterThanOrEqual,S,`>=`,None,AST,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Greatest,S,`greatest`,None,project,param,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,NS,NS -Greatest,S,`greatest`,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,NS,NS -HiveHash,S, ,None,project,input,S,S,S,S,S,S,S,S,PS,S,NS,S,NS,NS,NS,NS,NS,NS -HiveHash,S, ,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Hour,S,`hour`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA -Hour,S,`hour`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Hypot,S,`hypot`,None,project,lhs,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Hypot,S,`hypot`,None,project,rhs,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Hypot,S,`hypot`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -If,S,`if`,None,project,predicate,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -If,S,`if`,None,project,trueValue,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS -If,S,`if`,None,project,falseValue,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS -If,S,`if`,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS -In,S,`in`,None,project,value,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,NS,NS -In,S,`in`,None,project,list,PS,PS,PS,PS,PS,PS,PS,PS,PS,PS,PS,NS,NS,NS,NS,NA,NS,NS -In,S,`in`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -InSet,S, ,None,project,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,NS,NS -InSet,S, ,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -InitCap,S,`initcap`,This is not 100% compatible with the Spark version because the Unicode version used by cuDF and the JVM may differ; resulting in some corner-case characters not changing case correctly.,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -InitCap,S,`initcap`,This is not 100% compatible with the Spark version because the Unicode version used by cuDF and the JVM may differ; resulting in some corner-case characters not changing case correctly.,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -InputFileBlockLength,S,`input_file_block_length`,None,project,result,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -InputFileBlockStart,S,`input_file_block_start`,None,project,result,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -InputFileName,S,`input_file_name`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -IntegralDivide,S,`div`,None,project,lhs,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA -IntegralDivide,S,`div`,None,project,rhs,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA -IntegralDivide,S,`div`,None,project,result,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -IsNaN,S,`isnan`,None,project,input,NA,NA,NA,NA,NA,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -IsNaN,S,`isnan`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -IsNotNull,S,`isnotnull`,None,project,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS -IsNotNull,S,`isnotnull`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -IsNull,S,`isnull`,None,project,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS -IsNull,S,`isnull`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -JsonToStructs,NS,`from_json`,This is disabled by default because it is currently in beta and undergoes continuous enhancements. Please consult the [compatibility documentation](../compatibility.md#json-supporting-types) to determine whether you can enable this configuration for your use case,project,jsonStr,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -JsonToStructs,NS,`from_json`,This is disabled by default because it is currently in beta and undergoes continuous enhancements. Please consult the [compatibility documentation](../compatibility.md#json-supporting-types) to determine whether you can enable this configuration for your use case,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NS,PS,PS,NA -JsonTuple,NS,`json_tuple`,This is disabled by default because Experimental feature that could be unstable or have performance issues.,project,json,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -JsonTuple,NS,`json_tuple`,This is disabled by default because Experimental feature that could be unstable or have performance issues.,project,field,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA -JsonTuple,NS,`json_tuple`,This is disabled by default because Experimental feature that could be unstable or have performance issues.,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA -KnownFloatingPointNormalized,S, ,None,project,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S -KnownFloatingPointNormalized,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S -KnownNotNull,S, ,None,project,input,S,S,S,S,S,S,S,S,PS,S,S,NS,S,S,PS,PS,PS,NS -KnownNotNull,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,NS,S,S,PS,PS,PS,NS -Lag,S,`lag`,None,window,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NS,PS,NS -Lag,S,`lag`,None,window,offset,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Lag,S,`lag`,None,window,default,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NS,PS,NS -Lag,S,`lag`,None,window,result,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NS,PS,NS -LambdaFunction,S, ,None,project,function,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS -LambdaFunction,S, ,None,project,arguments,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS -LambdaFunction,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS -LastDay,S,`last_day`,None,project,input,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -LastDay,S,`last_day`,None,project,result,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Lead,S,`lead`,None,window,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NS,PS,NS -Lead,S,`lead`,None,window,offset,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Lead,S,`lead`,None,window,default,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NS,PS,NS -Lead,S,`lead`,None,window,result,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NS,PS,NS -Least,S,`least`,None,project,param,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,NS,NS -Least,S,`least`,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,NS,NS -Length,S,`char_length`; `character_length`; `length`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,NA -Length,S,`char_length`; `character_length`; `length`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -LessThan,S,`<`,None,project,lhs,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS -LessThan,S,`<`,None,project,rhs,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS -LessThan,S,`<`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -LessThan,S,`<`,None,AST,lhs,S,S,S,S,S,NS,NS,S,PS,S,NS,NS,NS,NS,NS,NA,NS,NS -LessThan,S,`<`,None,AST,rhs,S,S,S,S,S,NS,NS,S,PS,S,NS,NS,NS,NS,NS,NA,NS,NS -LessThan,S,`<`,None,AST,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -LessThanOrEqual,S,`<=`,None,project,lhs,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS -LessThanOrEqual,S,`<=`,None,project,rhs,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS -LessThanOrEqual,S,`<=`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -LessThanOrEqual,S,`<=`,None,AST,lhs,S,S,S,S,S,NS,NS,S,PS,S,NS,NS,NS,NS,NS,NA,NS,NS -LessThanOrEqual,S,`<=`,None,AST,rhs,S,S,S,S,S,NS,NS,S,PS,S,NS,NS,NS,NS,NS,NA,NS,NS -LessThanOrEqual,S,`<=`,None,AST,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Like,S,`like`,None,project,src,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -Like,S,`like`,None,project,search,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA -Like,S,`like`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Literal,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,NS -Literal,S, ,None,AST,result,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,NS,NS,NS,NS -Log,S,`ln`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Log,S,`ln`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Log10,S,`log10`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Log10,S,`log10`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Log1p,S,`log1p`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Log1p,S,`log1p`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Log2,S,`log2`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Log2,S,`log2`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Logarithm,S,`log`,None,project,value,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Logarithm,S,`log`,None,project,base,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Logarithm,S,`log`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Lower,S,`lcase`; `lower`,This is not 100% compatible with the Spark version because the Unicode version used by cuDF and the JVM may differ; resulting in some corner-case characters not changing case correctly.,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -Lower,S,`lcase`; `lower`,This is not 100% compatible with the Spark version because the Unicode version used by cuDF and the JVM may differ; resulting in some corner-case characters not changing case correctly.,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -MakeDecimal,S, ,None,project,input,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -MakeDecimal,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA -MapConcat,S,`map_concat`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA -MapConcat,S,`map_concat`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA -MapEntries,S,`map_entries`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA -MapEntries,S,`map_entries`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA -MapFilter,S,`map_filter`,None,project,argument,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA -MapFilter,S,`map_filter`,None,project,function,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -MapFilter,S,`map_filter`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA -MapKeys,S,`map_keys`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA -MapKeys,S,`map_keys`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA -MapValues,S,`map_values`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA -MapValues,S,`map_values`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA -Md5,S,`md5`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA -Md5,S,`md5`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -MicrosToTimestamp,S,`timestamp_micros`,None,project,input,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -MicrosToTimestamp,S,`timestamp_micros`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA -MillisToTimestamp,S,`timestamp_millis`,None,project,input,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -MillisToTimestamp,S,`timestamp_millis`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA -Minute,S,`minute`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA -Minute,S,`minute`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -MonotonicallyIncreasingID,S,`monotonically_increasing_id`,None,project,result,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Month,S,`month`,None,project,input,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Month,S,`month`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Multiply,S,`*`,None,project,lhs,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA -Multiply,S,`*`,None,project,rhs,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA -Multiply,S,`*`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA -Multiply,S,`*`,None,AST,lhs,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA -Multiply,S,`*`,None,AST,rhs,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA -Multiply,S,`*`,None,AST,result,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA -Murmur3Hash,S,`hash`,None,project,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NS,PS,NS -Murmur3Hash,S,`hash`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -NaNvl,S,`nanvl`,None,project,lhs,NA,NA,NA,NA,NA,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -NaNvl,S,`nanvl`,None,project,rhs,NA,NA,NA,NA,NA,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -NaNvl,S,`nanvl`,None,project,result,NA,NA,NA,NA,NA,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -NamedLambdaVariable,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS -Not,S,`!`; `not`,None,project,input,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Not,S,`!`; `not`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Not,S,`!`; `not`,None,AST,input,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Not,S,`!`; `not`,None,AST,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -NthValue,S,`nth_value`,None,window,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS -NthValue,S,`nth_value`,None,window,offset,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -NthValue,S,`nth_value`,None,window,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS -OctetLength,S,`octet_length`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,NA -OctetLength,S,`octet_length`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Or,S,`or`,None,project,lhs,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Or,S,`or`,None,project,rhs,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Or,S,`or`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Or,S,`or`,None,AST,lhs,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Or,S,`or`,None,AST,rhs,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Or,S,`or`,None,AST,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -ParseUrl,S,`parse_url`,None,project,url,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -ParseUrl,S,`parse_url`,None,project,partToExtract,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA -ParseUrl,S,`parse_url`,None,project,key,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -ParseUrl,S,`parse_url`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -PercentRank,S,`percent_rank`,None,window,ordering,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NS,NS,NS -PercentRank,S,`percent_rank`,None,window,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Pmod,S,`pmod`,None,project,lhs,NA,S,S,S,S,S,S,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA -Pmod,S,`pmod`,None,project,rhs,NA,S,S,S,S,S,S,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA -Pmod,S,`pmod`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA -PosExplode,S,`posexplode_outer`; `posexplode`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,PS,NA,NA -PosExplode,S,`posexplode_outer`; `posexplode`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA -Pow,S,`pow`; `power`,None,project,lhs,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Pow,S,`pow`; `power`,None,project,rhs,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Pow,S,`pow`; `power`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Pow,S,`pow`; `power`,None,AST,lhs,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Pow,S,`pow`; `power`,None,AST,rhs,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Pow,S,`pow`; `power`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -PreciseTimestampConversion,S, ,None,project,input,NA,NA,NA,NA,S,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA -PreciseTimestampConversion,S, ,None,project,result,NA,NA,NA,NA,S,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA -PromotePrecision,S, ,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA -PromotePrecision,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA -PythonUDF,S, ,None,aggregation,param,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,PS,NS,PS,NS -PythonUDF,S, ,None,aggregation,result,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NA,PS,NS,PS,NA -PythonUDF,S, ,None,reduction,param,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,PS,NS,PS,NS -PythonUDF,S, ,None,reduction,result,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NA,PS,NS,PS,NA -PythonUDF,S, ,None,window,param,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,PS,NS,PS,NS -PythonUDF,S, ,None,window,result,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NA,PS,NS,PS,NA -PythonUDF,S, ,None,project,param,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,PS,NS,PS,NS -PythonUDF,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NA,PS,NS,PS,NA -Quarter,S,`quarter`,None,project,input,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Quarter,S,`quarter`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -RLike,S,`rlike`,None,project,str,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -RLike,S,`rlike`,None,project,regexp,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA -RLike,S,`rlike`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -RaiseError,S,`raise_error`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -RaiseError,S,`raise_error`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA -Rand,S,`rand`; `random`,None,project,seed,NA,NA,NA,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Rand,S,`rand`; `random`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Rank,S,`rank`,None,window,ordering,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NS,NS,NS -Rank,S,`rank`,None,window,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -RegExpExtract,S,`regexp_extract`,None,project,str,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -RegExpExtract,S,`regexp_extract`,None,project,regexp,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA -RegExpExtract,S,`regexp_extract`,None,project,idx,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -RegExpExtract,S,`regexp_extract`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -RegExpExtractAll,S,`regexp_extract_all`,None,project,str,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -RegExpExtractAll,S,`regexp_extract_all`,None,project,regexp,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA -RegExpExtractAll,S,`regexp_extract_all`,None,project,idx,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -RegExpExtractAll,S,`regexp_extract_all`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA -RegExpReplace,S,`regexp_replace`,None,project,regex,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA -RegExpReplace,S,`regexp_replace`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -RegExpReplace,S,`regexp_replace`,None,project,pos,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -RegExpReplace,S,`regexp_replace`,None,project,str,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -RegExpReplace,S,`regexp_replace`,None,project,rep,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA -Remainder,S,`%`; `mod`,None,project,lhs,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA -Remainder,S,`%`; `mod`,None,project,rhs,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA -Remainder,S,`%`; `mod`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA -ReplicateRows,S, ,None,project,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NS,PS,NS -ReplicateRows,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA -Reverse,S,`reverse`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,PS,NA,NA,NA -Reverse,S,`reverse`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,PS,NA,NA,NA -Rint,S,`rint`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Rint,S,`rint`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Rint,S,`rint`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Rint,S,`rint`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Round,S,`round`,None,project,value,NA,S,S,S,S,PS,PS,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA -Round,S,`round`,None,project,scale,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Round,S,`round`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA -RowNumber,S,`row_number`,None,window,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -ScalaUDF,S, ,None,project,param,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,NS -ScalaUDF,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,NS -Second,S,`second`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA -Second,S,`second`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -SecondsToTimestamp,S,`timestamp_seconds`,None,project,input,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA -SecondsToTimestamp,S,`timestamp_seconds`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA -Sequence,S,`sequence`,None,project,start,NA,S,S,S,S,NA,NA,NS,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA -Sequence,S,`sequence`,None,project,stop,NA,S,S,S,S,NA,NA,NS,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA -Sequence,S,`sequence`,None,project,step,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NS,NA,NA,NA,NA -Sequence,S,`sequence`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA -ShiftLeft,S,`shiftleft`,None,project,value,NA,NA,NA,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -ShiftLeft,S,`shiftleft`,None,project,amount,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -ShiftLeft,S,`shiftleft`,None,project,result,NA,NA,NA,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -ShiftRight,S,`shiftright`,None,project,value,NA,NA,NA,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -ShiftRight,S,`shiftright`,None,project,amount,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -ShiftRight,S,`shiftright`,None,project,result,NA,NA,NA,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -ShiftRightUnsigned,S,`shiftrightunsigned`,None,project,value,NA,NA,NA,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -ShiftRightUnsigned,S,`shiftrightunsigned`,None,project,amount,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -ShiftRightUnsigned,S,`shiftrightunsigned`,None,project,result,NA,NA,NA,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Signum,S,`sign`; `signum`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Signum,S,`sign`; `signum`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Sin,S,`sin`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Sin,S,`sin`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Sin,S,`sin`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Sin,S,`sin`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Sinh,S,`sinh`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Sinh,S,`sinh`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Sinh,S,`sinh`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Sinh,S,`sinh`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Size,S,`cardinality`; `size`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,PS,NA,NA -Size,S,`cardinality`; `size`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -SortArray,S,`sort_array`,None,project,array,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA -SortArray,S,`sort_array`,None,project,ascendingOrder,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -SortArray,S,`sort_array`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA -SortOrder,S, ,None,project,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NA,PS,NS -SortOrder,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NA,PS,NS -SparkPartitionID,S,`spark_partition_id`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -SpecifiedWindowFrame,S, ,None,project,lower,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,S,NA,NA,NA,NA -SpecifiedWindowFrame,S, ,None,project,upper,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,S,NA,NA,NA,NA -SpecifiedWindowFrame,S, ,None,project,result,NA,S,S,S,S,NS,NS,NA,NA,NA,NS,NA,NA,S,NA,NA,NA,NA -Sqrt,S,`sqrt`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Sqrt,S,`sqrt`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Sqrt,S,`sqrt`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Sqrt,S,`sqrt`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Stack,S,`stack`,None,project,n,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Stack,S,`stack`,None,project,expr,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS -Stack,S,`stack`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA -StartsWith,S, ,None,project,src,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -StartsWith,S, ,None,project,search,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA -StartsWith,S, ,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -StringInstr,S,`instr`,None,project,str,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -StringInstr,S,`instr`,None,project,substr,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA -StringInstr,S,`instr`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -StringLPad,S,`lpad`,None,project,str,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -StringLPad,S,`lpad`,None,project,len,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -StringLPad,S,`lpad`,None,project,pad,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA -StringLPad,S,`lpad`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -StringLocate,S,`locate`; `position`,None,project,substr,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA -StringLocate,S,`locate`; `position`,None,project,str,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -StringLocate,S,`locate`; `position`,None,project,start,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -StringLocate,S,`locate`; `position`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -StringRPad,S,`rpad`,None,project,str,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -StringRPad,S,`rpad`,None,project,len,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -StringRPad,S,`rpad`,None,project,pad,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA -StringRPad,S,`rpad`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -StringRepeat,S,`repeat`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -StringRepeat,S,`repeat`,None,project,repeatTimes,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -StringRepeat,S,`repeat`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -StringReplace,S,`replace`,None,project,src,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -StringReplace,S,`replace`,None,project,search,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA -StringReplace,S,`replace`,None,project,replace,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA -StringReplace,S,`replace`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -StringSplit,S,`split`,None,project,str,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -StringSplit,S,`split`,None,project,regexp,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA -StringSplit,S,`split`,None,project,limit,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -StringSplit,S,`split`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA -StringToMap,S,`str_to_map`,None,project,str,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -StringToMap,S,`str_to_map`,None,project,pairDelim,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -StringToMap,S,`str_to_map`,None,project,keyValueDelim,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -StringToMap,S,`str_to_map`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA -StringTranslate,S,`translate`,This is not 100% compatible with the Spark version because the GPU implementation supports all unicode code points. In Spark versions < 3.2.0; translate() does not support unicode characters with code point >= U+10000 (See SPARK-34094),project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -StringTranslate,S,`translate`,This is not 100% compatible with the Spark version because the GPU implementation supports all unicode code points. In Spark versions < 3.2.0; translate() does not support unicode characters with code point >= U+10000 (See SPARK-34094),project,from,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA -StringTranslate,S,`translate`,This is not 100% compatible with the Spark version because the GPU implementation supports all unicode code points. In Spark versions < 3.2.0; translate() does not support unicode characters with code point >= U+10000 (See SPARK-34094),project,to,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA -StringTranslate,S,`translate`,This is not 100% compatible with the Spark version because the GPU implementation supports all unicode code points. In Spark versions < 3.2.0; translate() does not support unicode characters with code point >= U+10000 (See SPARK-34094),project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -StringTrim,S,`trim`,None,project,src,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -StringTrim,S,`trim`,None,project,trimStr,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA -StringTrim,S,`trim`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -StringTrimLeft,S,`ltrim`,None,project,src,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -StringTrimLeft,S,`ltrim`,None,project,trimStr,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA -StringTrimLeft,S,`ltrim`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -StringTrimRight,S,`rtrim`,None,project,src,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -StringTrimRight,S,`rtrim`,None,project,trimStr,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA -StringTrimRight,S,`rtrim`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -StructsToJson,NS,`to_json`,This is disabled by default because it is currently in beta and undergoes continuous enhancements. Please consult the [compatibility documentation](../compatibility.md#json-supporting-types) to determine whether you can enable this configuration for your use case,project,struct,S,S,S,S,S,S,S,S,PS,S,S,NA,NA,NA,PS,PS,PS,NA -StructsToJson,NS,`to_json`,This is disabled by default because it is currently in beta and undergoes continuous enhancements. Please consult the [compatibility documentation](../compatibility.md#json-supporting-types) to determine whether you can enable this configuration for your use case,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -Substring,S,`substr`; `substring`,None,project,str,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,NA -Substring,S,`substr`; `substring`,None,project,pos,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Substring,S,`substr`; `substring`,None,project,len,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Substring,S,`substr`; `substring`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,NA -SubstringIndex,S,`substring_index`,None,project,str,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -SubstringIndex,S,`substring_index`,None,project,delim,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA -SubstringIndex,S,`substring_index`,None,project,count,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -SubstringIndex,S,`substring_index`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -Subtract,S,`-`,None,project,lhs,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA -Subtract,S,`-`,None,project,rhs,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA -Subtract,S,`-`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA -Subtract,S,`-`,None,AST,lhs,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NS,NA,NA,NA,NA -Subtract,S,`-`,None,AST,rhs,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NS,NA,NA,NA,NA -Subtract,S,`-`,None,AST,result,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NS,NA,NA,NA,NA -Tan,S,`tan`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Tan,S,`tan`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Tan,S,`tan`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Tan,S,`tan`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Tanh,S,`tanh`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Tanh,S,`tanh`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Tanh,S,`tanh`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Tanh,S,`tanh`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -TimeAdd,S, ,None,project,start,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA -TimeAdd,S, ,None,project,interval,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA -TimeAdd,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA -ToDegrees,S,`degrees`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -ToDegrees,S,`degrees`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -ToRadians,S,`radians`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -ToRadians,S,`radians`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -ToUTCTimestamp,S,`to_utc_timestamp`,None,project,timestamp,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA -ToUTCTimestamp,S,`to_utc_timestamp`,None,project,timezone,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA -ToUTCTimestamp,S,`to_utc_timestamp`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA -ToUnixTimestamp,S,`to_unix_timestamp`,None,project,timeExp,NA,NA,NA,NA,NA,NA,NA,S,PS,S,NA,NA,NA,NA,NA,NA,NA,NA -ToUnixTimestamp,S,`to_unix_timestamp`,None,project,format,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA -ToUnixTimestamp,S,`to_unix_timestamp`,None,project,result,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -TransformKeys,S,`transform_keys`,None,project,argument,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA -TransformKeys,S,`transform_keys`,None,project,function,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,NS,NS -TransformKeys,S,`transform_keys`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA -TransformValues,S,`transform_values`,None,project,argument,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA -TransformValues,S,`transform_values`,None,project,function,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS -TransformValues,S,`transform_values`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA -UnaryMinus,S,`negative`,None,project,input,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA -UnaryMinus,S,`negative`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA -UnaryMinus,S,`negative`,None,AST,input,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NS,NA,NA,NA,NA -UnaryMinus,S,`negative`,None,AST,result,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NS,NA,NA,NA,NA -UnaryPositive,S,`positive`,None,project,input,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA -UnaryPositive,S,`positive`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA -UnaryPositive,S,`positive`,None,AST,input,NA,S,S,S,S,S,S,NA,NA,NA,NS,NA,NA,NS,NA,NA,NA,NA -UnaryPositive,S,`positive`,None,AST,result,NA,S,S,S,S,S,S,NA,NA,NA,NS,NA,NA,NS,NA,NA,NA,NA -UnboundedFollowing$,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA -UnboundedPreceding$,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA -UnixTimestamp,S,`unix_timestamp`,None,project,timeExp,NA,NA,NA,NA,NA,NA,NA,S,PS,S,NA,NA,NA,NA,NA,NA,NA,NA -UnixTimestamp,S,`unix_timestamp`,None,project,format,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA -UnixTimestamp,S,`unix_timestamp`,None,project,result,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -UnscaledValue,S, ,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA -UnscaledValue,S, ,None,project,result,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Upper,S,`ucase`; `upper`,This is not 100% compatible with the Spark version because the Unicode version used by cuDF and the JVM may differ; resulting in some corner-case characters not changing case correctly.,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -Upper,S,`ucase`; `upper`,This is not 100% compatible with the Spark version because the Unicode version used by cuDF and the JVM may differ; resulting in some corner-case characters not changing case correctly.,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -WeekDay,S,`weekday`,None,project,input,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -WeekDay,S,`weekday`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -WindowExpression,S, ,None,window,windowFunction,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S -WindowExpression,S, ,None,window,windowSpec,NA,S,S,S,S,NS,NS,NA,NA,NA,PS,NA,NA,S,NA,NA,NA,NA -WindowExpression,S, ,None,window,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S -WindowSpecDefinition,S, ,None,project,partition,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NS,PS,NS -WindowSpecDefinition,S, ,None,project,value,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NS,PS,NS -WindowSpecDefinition,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NS,PS,NS -XxHash64,S,`xxhash64`,None,project,input,S,S,S,S,S,NS,NS,S,PS,S,S,S,NS,NS,NS,NS,NS,NS -XxHash64,S,`xxhash64`,None,project,result,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Year,S,`year`,None,project,input,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Year,S,`year`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -AggregateExpression,S, ,None,aggregation,aggFunc,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S -AggregateExpression,S, ,None,aggregation,filter,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -AggregateExpression,S, ,None,aggregation,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S -AggregateExpression,S, ,None,reduction,aggFunc,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S -AggregateExpression,S, ,None,reduction,filter,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -AggregateExpression,S, ,None,reduction,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S -AggregateExpression,S, ,None,window,aggFunc,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S -AggregateExpression,S, ,None,window,filter,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -AggregateExpression,S, ,None,window,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S -ApproximatePercentile,S,`approx_percentile`; `percentile_approx`,This is not 100% compatible with the Spark version because the GPU implementation of approx_percentile is not bit-for-bit compatible with Apache Spark,aggregation,input,NA,S,S,S,S,S,S,NS,NS,NA,S,NA,NA,NA,NA,NA,NA,NA -ApproximatePercentile,S,`approx_percentile`; `percentile_approx`,This is not 100% compatible with the Spark version because the GPU implementation of approx_percentile is not bit-for-bit compatible with Apache Spark,aggregation,percentage,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA -ApproximatePercentile,S,`approx_percentile`; `percentile_approx`,This is not 100% compatible with the Spark version because the GPU implementation of approx_percentile is not bit-for-bit compatible with Apache Spark,aggregation,accuracy,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -ApproximatePercentile,S,`approx_percentile`; `percentile_approx`,This is not 100% compatible with the Spark version because the GPU implementation of approx_percentile is not bit-for-bit compatible with Apache Spark,aggregation,result,NA,S,S,S,S,S,S,NS,NS,NA,S,NA,NA,NA,PS,NA,NA,NA -ApproximatePercentile,S,`approx_percentile`; `percentile_approx`,This is not 100% compatible with the Spark version because the GPU implementation of approx_percentile is not bit-for-bit compatible with Apache Spark,reduction,input,NA,S,S,S,S,S,S,NS,NS,NA,S,NA,NA,NA,NA,NA,NA,NA -ApproximatePercentile,S,`approx_percentile`; `percentile_approx`,This is not 100% compatible with the Spark version because the GPU implementation of approx_percentile is not bit-for-bit compatible with Apache Spark,reduction,percentage,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA -ApproximatePercentile,S,`approx_percentile`; `percentile_approx`,This is not 100% compatible with the Spark version because the GPU implementation of approx_percentile is not bit-for-bit compatible with Apache Spark,reduction,accuracy,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -ApproximatePercentile,S,`approx_percentile`; `percentile_approx`,This is not 100% compatible with the Spark version because the GPU implementation of approx_percentile is not bit-for-bit compatible with Apache Spark,reduction,result,NA,S,S,S,S,S,S,NS,NS,NA,S,NA,NA,NA,PS,NA,NA,NA -Average,S,`avg`; `mean`,None,aggregation,input,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA -Average,S,`avg`; `mean`,None,aggregation,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA -Average,S,`avg`; `mean`,None,reduction,input,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA -Average,S,`avg`; `mean`,None,reduction,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA -Average,S,`avg`; `mean`,None,window,input,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA -Average,S,`avg`; `mean`,None,window,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA -CollectList,S,`collect_list`,None,aggregation,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS -CollectList,S,`collect_list`,None,aggregation,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA -CollectList,S,`collect_list`,None,reduction,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS -CollectList,S,`collect_list`,None,reduction,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA -CollectList,S,`collect_list`,None,window,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS -CollectList,S,`collect_list`,None,window,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA -CollectSet,S,`collect_set`,None,aggregation,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NS,PS,NS -CollectSet,S,`collect_set`,None,aggregation,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA -CollectSet,S,`collect_set`,None,reduction,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NS,PS,NS -CollectSet,S,`collect_set`,None,reduction,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA -CollectSet,S,`collect_set`,None,window,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NS,PS,NS -CollectSet,S,`collect_set`,None,window,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA -Count,S,`count`,None,aggregation,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S -Count,S,`count`,None,aggregation,result,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Count,S,`count`,None,reduction,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S -Count,S,`count`,None,reduction,result,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Count,S,`count`,None,window,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S -Count,S,`count`,None,window,result,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -First,S,`first_value`; `first`,None,aggregation,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS -First,S,`first_value`; `first`,None,aggregation,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS -First,S,`first_value`; `first`,None,reduction,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS -First,S,`first_value`; `first`,None,reduction,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS -First,S,`first_value`; `first`,None,window,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS -First,S,`first_value`; `first`,None,window,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS -Last,S,`last_value`; `last`,None,aggregation,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS -Last,S,`last_value`; `last`,None,aggregation,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS -Last,S,`last_value`; `last`,None,reduction,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS -Last,S,`last_value`; `last`,None,reduction,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS -Last,S,`last_value`; `last`,None,window,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS -Last,S,`last_value`; `last`,None,window,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS -Max,S,`max`,None,aggregation,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NA,PS,NS -Max,S,`max`,None,aggregation,result,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NA,PS,NS -Max,S,`max`,None,reduction,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NA,PS,NS -Max,S,`max`,None,reduction,result,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NA,PS,NS -Max,S,`max`,None,window,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,NS,NS -Max,S,`max`,None,window,result,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,NS,NS -Min,S,`min`,None,aggregation,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NA,PS,NS -Min,S,`min`,None,aggregation,result,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NA,PS,NS -Min,S,`min`,None,reduction,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NA,PS,NS -Min,S,`min`,None,reduction,result,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NA,PS,NS -Min,S,`min`,None,window,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,NS,NS -Min,S,`min`,None,window,result,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,NS,NS -Percentile,S,`percentile`,None,aggregation,input,NA,S,S,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Percentile,S,`percentile`,None,aggregation,percentage,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA -Percentile,S,`percentile`,None,aggregation,frequency,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA -Percentile,S,`percentile`,None,aggregation,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA -Percentile,S,`percentile`,None,reduction,input,NA,S,S,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Percentile,S,`percentile`,None,reduction,percentage,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA -Percentile,S,`percentile`,None,reduction,frequency,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA -Percentile,S,`percentile`,None,reduction,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA -PivotFirst,S, ,None,aggregation,pivotColumn,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NS,NS,NS -PivotFirst,S, ,None,aggregation,valueColumn,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NS,NS,NS -PivotFirst,S, ,None,aggregation,result,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NS,NS,NS -PivotFirst,S, ,None,reduction,pivotColumn,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NS,NS,NS -PivotFirst,S, ,None,reduction,valueColumn,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NS,NS,NS -PivotFirst,S, ,None,reduction,result,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NS,NS,NS -StddevPop,S,`stddev_pop`,None,reduction,input,NA,NA,NA,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -StddevPop,S,`stddev_pop`,None,reduction,result,NA,NA,NA,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -StddevPop,S,`stddev_pop`,None,aggregation,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -StddevPop,S,`stddev_pop`,None,aggregation,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -StddevPop,S,`stddev_pop`,None,window,input,NA,NA,NA,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -StddevPop,S,`stddev_pop`,None,window,result,NA,NA,NA,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -StddevSamp,S,`std`; `stddev_samp`; `stddev`,None,aggregation,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -StddevSamp,S,`std`; `stddev_samp`; `stddev`,None,aggregation,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -StddevSamp,S,`std`; `stddev_samp`; `stddev`,None,reduction,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -StddevSamp,S,`std`; `stddev_samp`; `stddev`,None,reduction,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -StddevSamp,S,`std`; `stddev_samp`; `stddev`,None,window,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -StddevSamp,S,`std`; `stddev_samp`; `stddev`,None,window,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Sum,S,`sum`,None,aggregation,input,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA -Sum,S,`sum`,None,aggregation,result,NA,NA,NA,NA,S,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA -Sum,S,`sum`,None,reduction,input,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA -Sum,S,`sum`,None,reduction,result,NA,NA,NA,NA,S,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA -Sum,S,`sum`,None,window,input,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA -Sum,S,`sum`,None,window,result,NA,NA,NA,NA,S,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA -VariancePop,S,`var_pop`,None,reduction,input,NA,NA,NA,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -VariancePop,S,`var_pop`,None,reduction,result,NA,NA,NA,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -VariancePop,S,`var_pop`,None,aggregation,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -VariancePop,S,`var_pop`,None,aggregation,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -VariancePop,S,`var_pop`,None,window,input,NA,NA,NA,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -VariancePop,S,`var_pop`,None,window,result,NA,NA,NA,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -VarianceSamp,S,`var_samp`; `variance`,None,reduction,input,NA,NA,NA,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -VarianceSamp,S,`var_samp`; `variance`,None,reduction,result,NA,NA,NA,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -VarianceSamp,S,`var_samp`; `variance`,None,aggregation,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -VarianceSamp,S,`var_samp`; `variance`,None,aggregation,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -VarianceSamp,S,`var_samp`; `variance`,None,window,input,NA,NA,NA,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -VarianceSamp,S,`var_samp`; `variance`,None,window,result,NA,NA,NA,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -NormalizeNaNAndZero,S, ,None,project,input,NA,NA,NA,NA,NA,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -NormalizeNaNAndZero,S, ,None,project,result,NA,NA,NA,NA,NA,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -ScalarSubquery,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS -HiveGenericUDF,S, ,None,project,param,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,NS -HiveGenericUDF,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,NS -HiveSimpleUDF,S, ,None,project,param,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,NS -HiveSimpleUDF,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,NS +Expression,Supported,SQL Func,Notes,Context,Params,BOOLEAN,BYTE,SHORT,INT,LONG,FLOAT,DOUBLE,DATE,TIMESTAMP,STRING,DECIMAL,NULL,BINARY,CALENDAR,ARRAY,MAP,STRUCT,UDT,DAYTIME,YEARMONTH +Abs,S,`abs`,None,project,input,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +Abs,S,`abs`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +Abs,S,`abs`,None,AST,input,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA +Abs,S,`abs`,None,AST,result,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA +Acos,S,`acos`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Acos,S,`acos`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Acos,S,`acos`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Acos,S,`acos`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Acosh,S,`acosh`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Acosh,S,`acosh`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Acosh,S,`acosh`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Acosh,S,`acosh`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Add,S,`+`,None,project,lhs,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,NS,NS +Add,S,`+`,None,project,rhs,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,NS,NS +Add,S,`+`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,NS,NS +Add,S,`+`,None,AST,lhs,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NS,NA,NA,NA,NA,NS,NS +Add,S,`+`,None,AST,rhs,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NS,NA,NA,NA,NA,NS,NS +Add,S,`+`,None,AST,result,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NS,NA,NA,NA,NA,NS,NS +Alias,S, ,None,project,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +Alias,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +Alias,S, ,None,AST,input,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,NS,NS,NS,NS,NS,NS +Alias,S, ,None,AST,result,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,NS,NS,NS,NS,NS,NS +And,S,`and`,None,project,lhs,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +And,S,`and`,None,project,rhs,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +And,S,`and`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +And,S,`and`,None,AST,lhs,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +And,S,`and`,None,AST,rhs,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +And,S,`and`,None,AST,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ArrayContains,S,`array_contains`,None,project,array,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +ArrayContains,S,`array_contains`,None,project,key,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,NS,NS,NS,NS,NS,NS +ArrayContains,S,`array_contains`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ArrayExcept,S,`array_except`,This is not 100% compatible with the Spark version because the GPU implementation treats -0.0 and 0.0 as equal; but the CPU implementation currently does not (see SPARK-39845). Also; Apache Spark 3.1.3 fixed issue SPARK-36741 where NaNs in these set like operators were not treated as being equal. We have chosen to break with compatibility for the older versions of Spark in this instance and handle NaNs the same as 3.1.3+,project,array1,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +ArrayExcept,S,`array_except`,This is not 100% compatible with the Spark version because the GPU implementation treats -0.0 and 0.0 as equal; but the CPU implementation currently does not (see SPARK-39845). Also; Apache Spark 3.1.3 fixed issue SPARK-36741 where NaNs in these set like operators were not treated as being equal. We have chosen to break with compatibility for the older versions of Spark in this instance and handle NaNs the same as 3.1.3+,project,array2,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +ArrayExcept,S,`array_except`,This is not 100% compatible with the Spark version because the GPU implementation treats -0.0 and 0.0 as equal; but the CPU implementation currently does not (see SPARK-39845). Also; Apache Spark 3.1.3 fixed issue SPARK-36741 where NaNs in these set like operators were not treated as being equal. We have chosen to break with compatibility for the older versions of Spark in this instance and handle NaNs the same as 3.1.3+,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +ArrayExists,S,`exists`,None,project,argument,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +ArrayExists,S,`exists`,None,project,function,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ArrayExists,S,`exists`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ArrayFilter,S,`filter`,None,project,argument,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +ArrayFilter,S,`filter`,None,project,function,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ArrayFilter,S,`filter`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +ArrayIntersect,S,`array_intersect`,This is not 100% compatible with the Spark version because the GPU implementation treats -0.0 and 0.0 as equal; but the CPU implementation currently does not (see SPARK-39845). Also; Apache Spark 3.1.3 fixed issue SPARK-36741 where NaNs in these set like operators were not treated as being equal. We have chosen to break with compatibility for the older versions of Spark in this instance and handle NaNs the same as 3.1.3+,project,array1,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +ArrayIntersect,S,`array_intersect`,This is not 100% compatible with the Spark version because the GPU implementation treats -0.0 and 0.0 as equal; but the CPU implementation currently does not (see SPARK-39845). Also; Apache Spark 3.1.3 fixed issue SPARK-36741 where NaNs in these set like operators were not treated as being equal. We have chosen to break with compatibility for the older versions of Spark in this instance and handle NaNs the same as 3.1.3+,project,array2,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +ArrayIntersect,S,`array_intersect`,This is not 100% compatible with the Spark version because the GPU implementation treats -0.0 and 0.0 as equal; but the CPU implementation currently does not (see SPARK-39845). Also; Apache Spark 3.1.3 fixed issue SPARK-36741 where NaNs in these set like operators were not treated as being equal. We have chosen to break with compatibility for the older versions of Spark in this instance and handle NaNs the same as 3.1.3+,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +ArrayMax,S,`array_max`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +ArrayMax,S,`array_max`,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,NS,NS,NA,NA +ArrayMin,S,`array_min`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +ArrayMin,S,`array_min`,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,NS,NS,NA,NA +ArrayRemove,S,`array_remove`,None,project,array,NS,NS,NS,NS,NS,NS,NS,NS,NS,NS,NS,NS,NS,NS,PS,NS,NS,NS,NS,NS +ArrayRemove,S,`array_remove`,None,project,element,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS,NS,NS +ArrayRemove,S,`array_remove`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +ArrayRepeat,S,`array_repeat`,None,project,left,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS,NS,NS +ArrayRepeat,S,`array_repeat`,None,project,right,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ArrayRepeat,S,`array_repeat`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +ArrayTransform,S,`transform`,None,project,argument,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +ArrayTransform,S,`transform`,None,project,function,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS,NS,NS +ArrayTransform,S,`transform`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +ArrayUnion,S,`array_union`,This is not 100% compatible with the Spark version because the GPU implementation treats -0.0 and 0.0 as equal; but the CPU implementation currently does not (see SPARK-39845). Also; Apache Spark 3.1.3 fixed issue SPARK-36741 where NaNs in these set like operators were not treated as being equal. We have chosen to break with compatibility for the older versions of Spark in this instance and handle NaNs the same as 3.1.3+,project,array1,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +ArrayUnion,S,`array_union`,This is not 100% compatible with the Spark version because the GPU implementation treats -0.0 and 0.0 as equal; but the CPU implementation currently does not (see SPARK-39845). Also; Apache Spark 3.1.3 fixed issue SPARK-36741 where NaNs in these set like operators were not treated as being equal. We have chosen to break with compatibility for the older versions of Spark in this instance and handle NaNs the same as 3.1.3+,project,array2,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +ArrayUnion,S,`array_union`,This is not 100% compatible with the Spark version because the GPU implementation treats -0.0 and 0.0 as equal; but the CPU implementation currently does not (see SPARK-39845). Also; Apache Spark 3.1.3 fixed issue SPARK-36741 where NaNs in these set like operators were not treated as being equal. We have chosen to break with compatibility for the older versions of Spark in this instance and handle NaNs the same as 3.1.3+,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +ArraysOverlap,S,`arrays_overlap`,This is not 100% compatible with the Spark version because the GPU implementation treats -0.0 and 0.0 as equal; but the CPU implementation currently does not (see SPARK-39845). Also; Apache Spark 3.1.3 fixed issue SPARK-36741 where NaNs in these set like operators were not treated as being equal. We have chosen to break with compatibility for the older versions of Spark in this instance and handle NaNs the same as 3.1.3+,project,array1,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +ArraysOverlap,S,`arrays_overlap`,This is not 100% compatible with the Spark version because the GPU implementation treats -0.0 and 0.0 as equal; but the CPU implementation currently does not (see SPARK-39845). Also; Apache Spark 3.1.3 fixed issue SPARK-36741 where NaNs in these set like operators were not treated as being equal. We have chosen to break with compatibility for the older versions of Spark in this instance and handle NaNs the same as 3.1.3+,project,array2,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +ArraysOverlap,S,`arrays_overlap`,This is not 100% compatible with the Spark version because the GPU implementation treats -0.0 and 0.0 as equal; but the CPU implementation currently does not (see SPARK-39845). Also; Apache Spark 3.1.3 fixed issue SPARK-36741 where NaNs in these set like operators were not treated as being equal. We have chosen to break with compatibility for the older versions of Spark in this instance and handle NaNs the same as 3.1.3+,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ArraysZip,S,`arrays_zip`,None,project,children,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +ArraysZip,S,`arrays_zip`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +Ascii,NS,`ascii`,This is disabled by default because it only supports strings starting with ASCII or Latin-1 characters after Spark 3.2.3; 3.3.1 and 3.4.0. Otherwise the results will not match the CPU.,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Ascii,NS,`ascii`,This is disabled by default because it only supports strings starting with ASCII or Latin-1 characters after Spark 3.2.3; 3.3.1 and 3.4.0. Otherwise the results will not match the CPU.,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Asin,S,`asin`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Asin,S,`asin`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Asin,S,`asin`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Asin,S,`asin`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Asinh,S,`asinh`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Asinh,S,`asinh`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Asinh,S,`asinh`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Asinh,S,`asinh`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +AtLeastNNonNulls,S, ,None,project,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +AtLeastNNonNulls,S, ,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Atan,S,`atan`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Atan,S,`atan`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Atan,S,`atan`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Atan,S,`atan`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Atanh,S,`atanh`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Atanh,S,`atanh`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Atanh,S,`atanh`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Atanh,S,`atanh`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +AttributeReference,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +AttributeReference,S, ,None,AST,result,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,NS,NS,NS,NS,NS,NS +BRound,S,`bround`,None,project,value,NA,S,S,S,S,PS,PS,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +BRound,S,`bround`,None,project,scale,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BRound,S,`bround`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitLength,S,`bit_length`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA +BitLength,S,`bit_length`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitwiseAnd,S,`&`,None,project,lhs,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitwiseAnd,S,`&`,None,project,rhs,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitwiseAnd,S,`&`,None,project,result,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitwiseAnd,S,`&`,None,AST,lhs,NA,NS,NS,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitwiseAnd,S,`&`,None,AST,rhs,NA,NS,NS,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitwiseAnd,S,`&`,None,AST,result,NA,NS,NS,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitwiseNot,S,`~`,None,project,input,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitwiseNot,S,`~`,None,project,result,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitwiseNot,S,`~`,None,AST,input,NA,NS,NS,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitwiseNot,S,`~`,None,AST,result,NA,NS,NS,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitwiseOr,S,`\|`,None,project,lhs,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitwiseOr,S,`\|`,None,project,rhs,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitwiseOr,S,`\|`,None,project,result,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitwiseOr,S,`\|`,None,AST,lhs,NA,NS,NS,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitwiseOr,S,`\|`,None,AST,rhs,NA,NS,NS,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitwiseOr,S,`\|`,None,AST,result,NA,NS,NS,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitwiseXor,S,`^`,None,project,lhs,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitwiseXor,S,`^`,None,project,rhs,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitwiseXor,S,`^`,None,project,result,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitwiseXor,S,`^`,None,AST,lhs,NA,NS,NS,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitwiseXor,S,`^`,None,AST,rhs,NA,NS,NS,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitwiseXor,S,`^`,None,AST,result,NA,NS,NS,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BoundReference,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +BoundReference,S, ,None,AST,result,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,NS,NS,NS,NS,NS,NS +CaseWhen,S,`when`,None,project,predicate,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +CaseWhen,S,`when`,None,project,value,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +CaseWhen,S,`when`,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +Cbrt,S,`cbrt`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Cbrt,S,`cbrt`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Cbrt,S,`cbrt`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Cbrt,S,`cbrt`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Ceil,S,`ceil`; `ceiling`,None,project,input,NA,NA,NA,NA,S,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +Ceil,S,`ceil`; `ceiling`,None,project,result,NA,NA,NA,NA,S,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +CheckOverflow,S, ,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +CheckOverflow,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +Coalesce,S,`coalesce`,None,project,param,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +Coalesce,S,`coalesce`,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +Concat,S,`concat`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NS,NA,PS,NA,NA,NA,NA,NA +Concat,S,`concat`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NS,NA,PS,NA,NA,NA,NA,NA +ConcatWs,S,`concat_ws`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,S,NA,NA,NA,NA,NA +ConcatWs,S,`concat_ws`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Contains,S, ,None,project,src,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Contains,S, ,None,project,search,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Contains,S, ,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Conv,NS,`conv`,This is disabled by default because GPU implementation is incomplete. We currently only support from/to_base values of 10 and 16. We fall back on CPU if the signed conversion is signalled via a negative to_base. GPU implementation does not check for an 64-bit signed/unsigned int overflow when performing the conversion to return `FFFFFFFFFFFFFFFF` or `18446744073709551615` or to throw an error in the ANSI mode. It is safe to enable if the overflow is not possible or detected externally. For instance decimal strings not longer than 18 characters / hexadecimal strings not longer than 15 characters disregarding the sign cannot cause an overflow. ,project,num,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Conv,NS,`conv`,This is disabled by default because GPU implementation is incomplete. We currently only support from/to_base values of 10 and 16. We fall back on CPU if the signed conversion is signalled via a negative to_base. GPU implementation does not check for an 64-bit signed/unsigned int overflow when performing the conversion to return `FFFFFFFFFFFFFFFF` or `18446744073709551615` or to throw an error in the ANSI mode. It is safe to enable if the overflow is not possible or detected externally. For instance decimal strings not longer than 18 characters / hexadecimal strings not longer than 15 characters disregarding the sign cannot cause an overflow. ,project,from_base,NA,PS,PS,PS,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Conv,NS,`conv`,This is disabled by default because GPU implementation is incomplete. We currently only support from/to_base values of 10 and 16. We fall back on CPU if the signed conversion is signalled via a negative to_base. GPU implementation does not check for an 64-bit signed/unsigned int overflow when performing the conversion to return `FFFFFFFFFFFFFFFF` or `18446744073709551615` or to throw an error in the ANSI mode. It is safe to enable if the overflow is not possible or detected externally. For instance decimal strings not longer than 18 characters / hexadecimal strings not longer than 15 characters disregarding the sign cannot cause an overflow. ,project,to_base,NA,PS,PS,PS,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Conv,NS,`conv`,This is disabled by default because GPU implementation is incomplete. We currently only support from/to_base values of 10 and 16. We fall back on CPU if the signed conversion is signalled via a negative to_base. GPU implementation does not check for an 64-bit signed/unsigned int overflow when performing the conversion to return `FFFFFFFFFFFFFFFF` or `18446744073709551615` or to throw an error in the ANSI mode. It is safe to enable if the overflow is not possible or detected externally. For instance decimal strings not longer than 18 characters / hexadecimal strings not longer than 15 characters disregarding the sign cannot cause an overflow. ,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Cos,S,`cos`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Cos,S,`cos`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Cos,S,`cos`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Cos,S,`cos`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Cosh,S,`cosh`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Cosh,S,`cosh`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Cosh,S,`cosh`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Cosh,S,`cosh`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Cot,S,`cot`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Cot,S,`cot`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Cot,S,`cot`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Cot,S,`cot`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +CreateArray,S,`array`,None,project,arg,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,NS,PS,NS,NS,NS +CreateArray,S,`array`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +CreateMap,S,`map`,None,project,key,S,S,S,S,S,S,S,S,PS,S,S,S,NA,NA,PS,NA,PS,NA,NA,NA +CreateMap,S,`map`,None,project,value,S,S,S,S,S,S,S,S,PS,S,S,S,NA,NA,PS,PS,PS,NA,NA,NA +CreateNamedStruct,S,`named_struct`; `struct`,None,project,name,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +CreateNamedStruct,S,`named_struct`; `struct`,None,project,value,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +CreateNamedStruct,S,`named_struct`; `struct`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA +CurrentRow$,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA +DateAdd,S,`date_add`,None,project,startDate,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +DateAdd,S,`date_add`,None,project,days,NA,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +DateAdd,S,`date_add`,None,project,result,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +DateAddInterval,S, ,None,project,start,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +DateAddInterval,S, ,None,project,interval,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA +DateAddInterval,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +DateDiff,S,`datediff`,None,project,lhs,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +DateDiff,S,`datediff`,None,project,rhs,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +DateDiff,S,`datediff`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +DateFormatClass,S,`date_format`,None,project,timestamp,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +DateFormatClass,S,`date_format`,None,project,strfmt,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +DateFormatClass,S,`date_format`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +DateSub,S,`date_sub`,None,project,startDate,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +DateSub,S,`date_sub`,None,project,days,NA,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +DateSub,S,`date_sub`,None,project,result,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +DayOfMonth,S,`day`; `dayofmonth`,None,project,input,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +DayOfMonth,S,`day`; `dayofmonth`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +DayOfWeek,S,`dayofweek`,None,project,input,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +DayOfWeek,S,`dayofweek`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +DayOfYear,S,`dayofyear`,None,project,input,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +DayOfYear,S,`dayofyear`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +DenseRank,S,`dense_rank`,None,window,ordering,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NS,NS,NS,NS,NS +DenseRank,S,`dense_rank`,None,window,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Divide,S,`/`,None,project,lhs,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +Divide,S,`/`,None,project,rhs,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +Divide,S,`/`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +DynamicPruningExpression,S, ,None,project,input,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +DynamicPruningExpression,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S,S,S +ElementAt,S,`element_at`,None,project,array/map,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,PS,NA,NA,NA,NA +ElementAt,S,`element_at`,None,project,index/key,PS,PS,PS,S,PS,PS,PS,PS,PS,PS,PS,NS,NS,NS,NS,NS,NS,NS,NS,NS +ElementAt,S,`element_at`,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +EndsWith,S, ,None,project,src,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +EndsWith,S, ,None,project,search,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +EndsWith,S, ,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +EqualNullSafe,S,`<=>`,None,project,lhs,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS,NA,NA +EqualNullSafe,S,`<=>`,None,project,rhs,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS,NA,NA +EqualNullSafe,S,`<=>`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +EqualTo,S,`==`; `=`,None,project,lhs,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS,NA,NA +EqualTo,S,`==`; `=`,None,project,rhs,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS,NA,NA +EqualTo,S,`==`; `=`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +EqualTo,S,`==`; `=`,None,AST,lhs,S,S,S,S,S,NS,NS,S,PS,S,NS,NS,NS,NS,NS,NA,NS,NS,NA,NA +EqualTo,S,`==`; `=`,None,AST,rhs,S,S,S,S,S,NS,NS,S,PS,S,NS,NS,NS,NS,NS,NA,NS,NS,NA,NA +EqualTo,S,`==`; `=`,None,AST,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Exp,S,`exp`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Exp,S,`exp`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Exp,S,`exp`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Exp,S,`exp`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Explode,S,`explode_outer`; `explode`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,PS,NA,NA,NA,NA +Explode,S,`explode_outer`; `explode`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +Expm1,S,`expm1`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Expm1,S,`expm1`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Expm1,S,`expm1`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Expm1,S,`expm1`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Flatten,S,`flatten`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +Flatten,S,`flatten`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +Floor,S,`floor`,None,project,input,NA,NA,NA,NA,S,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +Floor,S,`floor`,None,project,result,NA,NA,NA,NA,S,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +FormatNumber,S,`format_number`,None,project,x,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +FormatNumber,S,`format_number`,None,project,d,NA,NA,NA,PS,NA,NA,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +FormatNumber,S,`format_number`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +FromUTCTimestamp,S,`from_utc_timestamp`,None,project,timestamp,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +FromUTCTimestamp,S,`from_utc_timestamp`,None,project,timezone,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +FromUTCTimestamp,S,`from_utc_timestamp`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +FromUnixTime,S,`from_unixtime`,None,project,sec,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +FromUnixTime,S,`from_unixtime`,None,project,format,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +FromUnixTime,S,`from_unixtime`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +GetArrayItem,S, ,None,project,array,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +GetArrayItem,S, ,None,project,ordinal,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +GetArrayItem,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +GetArrayStructFields,S, ,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +GetArrayStructFields,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +GetJsonObject,NS,`get_json_object`,This is disabled by default because Experimental feature that could be unstable or have performance issues.,project,json,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +GetJsonObject,NS,`get_json_object`,This is disabled by default because Experimental feature that could be unstable or have performance issues.,project,path,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +GetJsonObject,NS,`get_json_object`,This is disabled by default because Experimental feature that could be unstable or have performance issues.,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +GetMapValue,S, ,None,project,map,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA +GetMapValue,S, ,None,project,key,S,S,S,S,S,S,S,S,PS,S,S,NS,NS,NS,NS,NS,NS,NS,NS,NS +GetMapValue,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +GetStructField,S, ,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA +GetStructField,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +GetTimestamp,S, ,None,project,timeExp,NA,NA,NA,NA,NA,NA,NA,S,PS,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +GetTimestamp,S, ,None,project,format,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +GetTimestamp,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +GreaterThan,S,`>`,None,project,lhs,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS,NA,NA +GreaterThan,S,`>`,None,project,rhs,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS,NA,NA +GreaterThan,S,`>`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +GreaterThan,S,`>`,None,AST,lhs,S,S,S,S,S,NS,NS,S,PS,S,NS,NS,NS,NS,NS,NA,NS,NS,NA,NA +GreaterThan,S,`>`,None,AST,rhs,S,S,S,S,S,NS,NS,S,PS,S,NS,NS,NS,NS,NS,NA,NS,NS,NA,NA +GreaterThan,S,`>`,None,AST,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +GreaterThanOrEqual,S,`>=`,None,project,lhs,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS,NA,NA +GreaterThanOrEqual,S,`>=`,None,project,rhs,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS,NA,NA +GreaterThanOrEqual,S,`>=`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +GreaterThanOrEqual,S,`>=`,None,AST,lhs,S,S,S,S,S,NS,NS,S,PS,S,NS,NS,NS,NS,NS,NA,NS,NS,NA,NA +GreaterThanOrEqual,S,`>=`,None,AST,rhs,S,S,S,S,S,NS,NS,S,PS,S,NS,NS,NS,NS,NS,NA,NS,NS,NA,NA +GreaterThanOrEqual,S,`>=`,None,AST,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Greatest,S,`greatest`,None,project,param,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,NS,NS,NA,NA +Greatest,S,`greatest`,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,NS,NS,NA,NA +HiveHash,S, ,None,project,input,S,S,S,S,S,S,S,S,PS,S,NS,S,NS,NS,NS,NS,NS,NS,NS,NS +HiveHash,S, ,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Hour,S,`hour`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Hour,S,`hour`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Hypot,S,`hypot`,None,project,lhs,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Hypot,S,`hypot`,None,project,rhs,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Hypot,S,`hypot`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +If,S,`if`,None,project,predicate,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +If,S,`if`,None,project,trueValue,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +If,S,`if`,None,project,falseValue,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +If,S,`if`,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +In,S,`in`,None,project,value,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,NS,NS,NA,NA +In,S,`in`,None,project,list,PS,PS,PS,PS,PS,PS,PS,PS,PS,PS,PS,NS,NS,NS,NS,NA,NS,NS,NA,NA +In,S,`in`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +InSet,S, ,None,project,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,NS,NS,NA,NA +InSet,S, ,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +InitCap,S,`initcap`,This is not 100% compatible with the Spark version because the Unicode version used by cuDF and the JVM may differ; resulting in some corner-case characters not changing case correctly.,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +InitCap,S,`initcap`,This is not 100% compatible with the Spark version because the Unicode version used by cuDF and the JVM may differ; resulting in some corner-case characters not changing case correctly.,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +InputFileBlockLength,S,`input_file_block_length`,None,project,result,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +InputFileBlockStart,S,`input_file_block_start`,None,project,result,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +InputFileName,S,`input_file_name`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +IntegralDivide,S,`div`,None,project,lhs,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +IntegralDivide,S,`div`,None,project,rhs,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +IntegralDivide,S,`div`,None,project,result,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +IsNaN,S,`isnan`,None,project,input,NA,NA,NA,NA,NA,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +IsNaN,S,`isnan`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +IsNotNull,S,`isnotnull`,None,project,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +IsNotNull,S,`isnotnull`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +IsNull,S,`isnull`,None,project,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +IsNull,S,`isnull`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +JsonToStructs,NS,`from_json`,This is disabled by default because it is currently in beta and undergoes continuous enhancements. Please consult the [compatibility documentation](../compatibility.md#json-supporting-types) to determine whether you can enable this configuration for your use case,project,jsonStr,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +JsonToStructs,NS,`from_json`,This is disabled by default because it is currently in beta and undergoes continuous enhancements. Please consult the [compatibility documentation](../compatibility.md#json-supporting-types) to determine whether you can enable this configuration for your use case,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NS,PS,PS,NA,NA,NA +JsonTuple,NS,`json_tuple`,This is disabled by default because Experimental feature that could be unstable or have performance issues.,project,json,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +JsonTuple,NS,`json_tuple`,This is disabled by default because Experimental feature that could be unstable or have performance issues.,project,field,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +JsonTuple,NS,`json_tuple`,This is disabled by default because Experimental feature that could be unstable or have performance issues.,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA +KnownFloatingPointNormalized,S, ,None,project,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S,S,S +KnownFloatingPointNormalized,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S,S,S +KnownNotNull,S, ,None,project,input,S,S,S,S,S,S,S,S,PS,S,S,NS,S,S,PS,PS,PS,NS,NS,NS +KnownNotNull,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,NS,S,S,PS,PS,PS,NS,NS,NS +Lag,S,`lag`,None,window,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NS,PS,NS,NS,NS +Lag,S,`lag`,None,window,offset,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Lag,S,`lag`,None,window,default,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NS,PS,NS,NS,NS +Lag,S,`lag`,None,window,result,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NS,PS,NS,NS,NS +LambdaFunction,S, ,None,project,function,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS,NS,NS +LambdaFunction,S, ,None,project,arguments,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS,NS,NS +LambdaFunction,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS,NS,NS +LastDay,S,`last_day`,None,project,input,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +LastDay,S,`last_day`,None,project,result,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Lead,S,`lead`,None,window,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NS,PS,NS,NS,NS +Lead,S,`lead`,None,window,offset,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Lead,S,`lead`,None,window,default,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NS,PS,NS,NS,NS +Lead,S,`lead`,None,window,result,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NS,PS,NS,NS,NS +Least,S,`least`,None,project,param,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,NS,NS,NA,NA +Least,S,`least`,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,NS,NS,NA,NA +Length,S,`char_length`; `character_length`; `length`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA +Length,S,`char_length`; `character_length`; `length`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +LessThan,S,`<`,None,project,lhs,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS,NA,NA +LessThan,S,`<`,None,project,rhs,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS,NA,NA +LessThan,S,`<`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +LessThan,S,`<`,None,AST,lhs,S,S,S,S,S,NS,NS,S,PS,S,NS,NS,NS,NS,NS,NA,NS,NS,NA,NA +LessThan,S,`<`,None,AST,rhs,S,S,S,S,S,NS,NS,S,PS,S,NS,NS,NS,NS,NS,NA,NS,NS,NA,NA +LessThan,S,`<`,None,AST,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +LessThanOrEqual,S,`<=`,None,project,lhs,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS,NA,NA +LessThanOrEqual,S,`<=`,None,project,rhs,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS,NA,NA +LessThanOrEqual,S,`<=`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +LessThanOrEqual,S,`<=`,None,AST,lhs,S,S,S,S,S,NS,NS,S,PS,S,NS,NS,NS,NS,NS,NA,NS,NS,NA,NA +LessThanOrEqual,S,`<=`,None,AST,rhs,S,S,S,S,S,NS,NS,S,PS,S,NS,NS,NS,NS,NS,NA,NS,NS,NA,NA +LessThanOrEqual,S,`<=`,None,AST,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Like,S,`like`,None,project,src,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Like,S,`like`,None,project,search,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Like,S,`like`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Literal,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,NS,S,S +Literal,S, ,None,AST,result,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,NS,NS,NS,NS,NS,NS +Log,S,`ln`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Log,S,`ln`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Log10,S,`log10`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Log10,S,`log10`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Log1p,S,`log1p`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Log1p,S,`log1p`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Log2,S,`log2`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Log2,S,`log2`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Logarithm,S,`log`,None,project,value,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Logarithm,S,`log`,None,project,base,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Logarithm,S,`log`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Lower,S,`lcase`; `lower`,This is not 100% compatible with the Spark version because the Unicode version used by cuDF and the JVM may differ; resulting in some corner-case characters not changing case correctly.,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Lower,S,`lcase`; `lower`,This is not 100% compatible with the Spark version because the Unicode version used by cuDF and the JVM may differ; resulting in some corner-case characters not changing case correctly.,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +MakeDecimal,S, ,None,project,input,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +MakeDecimal,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA +MapConcat,S,`map_concat`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA +MapConcat,S,`map_concat`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA +MapEntries,S,`map_entries`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA +MapEntries,S,`map_entries`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +MapFilter,S,`map_filter`,None,project,argument,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA +MapFilter,S,`map_filter`,None,project,function,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +MapFilter,S,`map_filter`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA +MapKeys,S,`map_keys`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA +MapKeys,S,`map_keys`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +MapValues,S,`map_values`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA +MapValues,S,`map_values`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +Md5,S,`md5`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA +Md5,S,`md5`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +MicrosToTimestamp,S,`timestamp_micros`,None,project,input,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +MicrosToTimestamp,S,`timestamp_micros`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +MillisToTimestamp,S,`timestamp_millis`,None,project,input,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +MillisToTimestamp,S,`timestamp_millis`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Minute,S,`minute`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Minute,S,`minute`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +MonotonicallyIncreasingID,S,`monotonically_increasing_id`,None,project,result,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Month,S,`month`,None,project,input,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Month,S,`month`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Multiply,S,`*`,None,project,lhs,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +Multiply,S,`*`,None,project,rhs,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +Multiply,S,`*`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +Multiply,S,`*`,None,AST,lhs,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA +Multiply,S,`*`,None,AST,rhs,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA +Multiply,S,`*`,None,AST,result,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA +Murmur3Hash,S,`hash`,None,project,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NS,PS,NS,NS,NS +Murmur3Hash,S,`hash`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +NaNvl,S,`nanvl`,None,project,lhs,NA,NA,NA,NA,NA,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +NaNvl,S,`nanvl`,None,project,rhs,NA,NA,NA,NA,NA,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +NaNvl,S,`nanvl`,None,project,result,NA,NA,NA,NA,NA,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +NamedLambdaVariable,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS,NS,NS +Not,S,`!`; `not`,None,project,input,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Not,S,`!`; `not`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Not,S,`!`; `not`,None,AST,input,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Not,S,`!`; `not`,None,AST,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +NthValue,S,`nth_value`,None,window,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +NthValue,S,`nth_value`,None,window,offset,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +NthValue,S,`nth_value`,None,window,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +OctetLength,S,`octet_length`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA +OctetLength,S,`octet_length`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Or,S,`or`,None,project,lhs,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Or,S,`or`,None,project,rhs,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Or,S,`or`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Or,S,`or`,None,AST,lhs,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Or,S,`or`,None,AST,rhs,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Or,S,`or`,None,AST,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ParseUrl,S,`parse_url`,None,project,url,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ParseUrl,S,`parse_url`,None,project,partToExtract,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ParseUrl,S,`parse_url`,None,project,key,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ParseUrl,S,`parse_url`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +PercentRank,S,`percent_rank`,None,window,ordering,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NS,NS,NS,NS,NS +PercentRank,S,`percent_rank`,None,window,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Pmod,S,`pmod`,None,project,lhs,NA,S,S,S,S,S,S,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA +Pmod,S,`pmod`,None,project,rhs,NA,S,S,S,S,S,S,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA +Pmod,S,`pmod`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA +PosExplode,S,`posexplode_outer`; `posexplode`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,PS,NA,NA,NA,NA +PosExplode,S,`posexplode_outer`; `posexplode`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +Pow,S,`pow`; `power`,None,project,lhs,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Pow,S,`pow`; `power`,None,project,rhs,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Pow,S,`pow`; `power`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Pow,S,`pow`; `power`,None,AST,lhs,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Pow,S,`pow`; `power`,None,AST,rhs,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Pow,S,`pow`; `power`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +PreciseTimestampConversion,S, ,None,project,input,NA,NA,NA,NA,S,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +PreciseTimestampConversion,S, ,None,project,result,NA,NA,NA,NA,S,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +PromotePrecision,S, ,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +PromotePrecision,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +PythonUDF,S, ,None,aggregation,param,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,PS,NS,PS,NS,NS,NS +PythonUDF,S, ,None,aggregation,result,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NA,PS,NS,PS,NA,NA,NA +PythonUDF,S, ,None,reduction,param,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,PS,NS,PS,NS,NS,NS +PythonUDF,S, ,None,reduction,result,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NA,PS,NS,PS,NA,NA,NA +PythonUDF,S, ,None,window,param,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,PS,NS,PS,NS,NS,NS +PythonUDF,S, ,None,window,result,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NA,PS,NS,PS,NA,NA,NA +PythonUDF,S, ,None,project,param,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,PS,NS,PS,NS,NS,NS +PythonUDF,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NA,PS,NS,PS,NA,NA,NA +Quarter,S,`quarter`,None,project,input,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Quarter,S,`quarter`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +RLike,S,`regexp_like`; `regexp`; `rlike`,None,project,str,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +RLike,S,`regexp_like`; `regexp`; `rlike`,None,project,regexp,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +RLike,S,`regexp_like`; `regexp`; `rlike`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +RaiseError,S,`raise_error`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +RaiseError,S,`raise_error`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA +Rand,S,`rand`; `random`,None,project,seed,NA,NA,NA,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Rand,S,`rand`; `random`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Rank,S,`rank`,None,window,ordering,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NS,NS,NS,NS,NS +Rank,S,`rank`,None,window,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +RegExpExtract,S,`regexp_extract`,None,project,str,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +RegExpExtract,S,`regexp_extract`,None,project,regexp,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +RegExpExtract,S,`regexp_extract`,None,project,idx,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +RegExpExtract,S,`regexp_extract`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +RegExpExtractAll,S,`regexp_extract_all`,None,project,str,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +RegExpExtractAll,S,`regexp_extract_all`,None,project,regexp,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +RegExpExtractAll,S,`regexp_extract_all`,None,project,idx,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +RegExpExtractAll,S,`regexp_extract_all`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA +RegExpReplace,S,`regexp_replace`,None,project,regex,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +RegExpReplace,S,`regexp_replace`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +RegExpReplace,S,`regexp_replace`,None,project,pos,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +RegExpReplace,S,`regexp_replace`,None,project,str,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +RegExpReplace,S,`regexp_replace`,None,project,rep,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Remainder,S,`%`; `mod`,None,project,lhs,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +Remainder,S,`%`; `mod`,None,project,rhs,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +Remainder,S,`%`; `mod`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +ReplicateRows,S, ,None,project,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NS,PS,NS,NS,NS +ReplicateRows,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +Reverse,S,`reverse`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +Reverse,S,`reverse`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +Rint,S,`rint`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Rint,S,`rint`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Rint,S,`rint`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Rint,S,`rint`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Round,S,`round`,None,project,value,NA,S,S,S,S,PS,PS,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +Round,S,`round`,None,project,scale,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Round,S,`round`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +RowNumber,S,`row_number`,None,window,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ScalaUDF,S, ,None,project,param,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,NS,NS,NS +ScalaUDF,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,NS,NS,NS +Second,S,`second`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Second,S,`second`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +SecondsToTimestamp,S,`timestamp_seconds`,None,project,input,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +SecondsToTimestamp,S,`timestamp_seconds`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Sequence,S,`sequence`,None,project,start,NA,S,S,S,S,NA,NA,NS,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Sequence,S,`sequence`,None,project,stop,NA,S,S,S,S,NA,NA,NS,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Sequence,S,`sequence`,None,project,step,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA +Sequence,S,`sequence`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +ShiftLeft,S,`shiftleft`,None,project,value,NA,NA,NA,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ShiftLeft,S,`shiftleft`,None,project,amount,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ShiftLeft,S,`shiftleft`,None,project,result,NA,NA,NA,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ShiftRight,S,`shiftright`,None,project,value,NA,NA,NA,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ShiftRight,S,`shiftright`,None,project,amount,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ShiftRight,S,`shiftright`,None,project,result,NA,NA,NA,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ShiftRightUnsigned,S,`shiftrightunsigned`,None,project,value,NA,NA,NA,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ShiftRightUnsigned,S,`shiftrightunsigned`,None,project,amount,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ShiftRightUnsigned,S,`shiftrightunsigned`,None,project,result,NA,NA,NA,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Signum,S,`sign`; `signum`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Signum,S,`sign`; `signum`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Sin,S,`sin`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Sin,S,`sin`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Sin,S,`sin`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Sin,S,`sin`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Sinh,S,`sinh`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Sinh,S,`sinh`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Sinh,S,`sinh`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Sinh,S,`sinh`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Size,S,`cardinality`; `size`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,PS,NA,NA,NA,NA +Size,S,`cardinality`; `size`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +SortArray,S,`sort_array`,None,project,array,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +SortArray,S,`sort_array`,None,project,ascendingOrder,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +SortArray,S,`sort_array`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +SortOrder,S, ,None,project,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NA,PS,NS,NA,NA +SortOrder,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NA,PS,NS,NA,NA +SparkPartitionID,S,`spark_partition_id`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +SpecifiedWindowFrame,S, ,None,project,lower,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,S,NA,NA,NA,NA,S,NS +SpecifiedWindowFrame,S, ,None,project,upper,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,S,NA,NA,NA,NA,S,NS +SpecifiedWindowFrame,S, ,None,project,result,NA,S,S,S,S,NS,NS,NA,NA,NA,NS,NA,NA,S,NA,NA,NA,NA,S,NS +Sqrt,S,`sqrt`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Sqrt,S,`sqrt`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Sqrt,S,`sqrt`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Sqrt,S,`sqrt`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Stack,S,`stack`,None,project,n,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Stack,S,`stack`,None,project,expr,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS,NS,NS +Stack,S,`stack`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +StartsWith,S, ,None,project,src,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StartsWith,S, ,None,project,search,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StartsWith,S, ,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringInstr,S,`instr`,None,project,str,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringInstr,S,`instr`,None,project,substr,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringInstr,S,`instr`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringLPad,S,`lpad`,None,project,str,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringLPad,S,`lpad`,None,project,len,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringLPad,S,`lpad`,None,project,pad,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringLPad,S,`lpad`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringLocate,S,`locate`; `position`,None,project,substr,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringLocate,S,`locate`; `position`,None,project,str,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringLocate,S,`locate`; `position`,None,project,start,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringLocate,S,`locate`; `position`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringRPad,S,`rpad`,None,project,str,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringRPad,S,`rpad`,None,project,len,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringRPad,S,`rpad`,None,project,pad,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringRPad,S,`rpad`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringRepeat,S,`repeat`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringRepeat,S,`repeat`,None,project,repeatTimes,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringRepeat,S,`repeat`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringReplace,S,`replace`,None,project,src,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringReplace,S,`replace`,None,project,search,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringReplace,S,`replace`,None,project,replace,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringReplace,S,`replace`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringSplit,S,`split`,None,project,str,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringSplit,S,`split`,None,project,regexp,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringSplit,S,`split`,None,project,limit,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringSplit,S,`split`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA +StringToMap,S,`str_to_map`,None,project,str,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringToMap,S,`str_to_map`,None,project,pairDelim,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringToMap,S,`str_to_map`,None,project,keyValueDelim,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringToMap,S,`str_to_map`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA +StringTranslate,S,`translate`,This is not 100% compatible with the Spark version because the GPU implementation supports all unicode code points. In Spark versions < 3.2.0; translate() does not support unicode characters with code point >= U+10000 (See SPARK-34094),project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringTranslate,S,`translate`,This is not 100% compatible with the Spark version because the GPU implementation supports all unicode code points. In Spark versions < 3.2.0; translate() does not support unicode characters with code point >= U+10000 (See SPARK-34094),project,from,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringTranslate,S,`translate`,This is not 100% compatible with the Spark version because the GPU implementation supports all unicode code points. In Spark versions < 3.2.0; translate() does not support unicode characters with code point >= U+10000 (See SPARK-34094),project,to,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringTranslate,S,`translate`,This is not 100% compatible with the Spark version because the GPU implementation supports all unicode code points. In Spark versions < 3.2.0; translate() does not support unicode characters with code point >= U+10000 (See SPARK-34094),project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringTrim,S,`trim`,None,project,src,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringTrim,S,`trim`,None,project,trimStr,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringTrim,S,`trim`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringTrimLeft,S,`ltrim`,None,project,src,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringTrimLeft,S,`ltrim`,None,project,trimStr,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringTrimLeft,S,`ltrim`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringTrimRight,S,`rtrim`,None,project,src,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringTrimRight,S,`rtrim`,None,project,trimStr,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringTrimRight,S,`rtrim`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StructsToJson,NS,`to_json`,This is disabled by default because it is currently in beta and undergoes continuous enhancements. Please consult the [compatibility documentation](../compatibility.md#json-supporting-types) to determine whether you can enable this configuration for your use case,project,struct,S,S,S,S,S,S,S,S,PS,S,S,NA,NA,NA,PS,PS,PS,NA,NA,NA +StructsToJson,NS,`to_json`,This is disabled by default because it is currently in beta and undergoes continuous enhancements. Please consult the [compatibility documentation](../compatibility.md#json-supporting-types) to determine whether you can enable this configuration for your use case,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Substring,S,`substr`; `substring`,None,project,str,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA +Substring,S,`substr`; `substring`,None,project,pos,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Substring,S,`substr`; `substring`,None,project,len,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Substring,S,`substr`; `substring`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA +SubstringIndex,S,`substring_index`,None,project,str,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +SubstringIndex,S,`substring_index`,None,project,delim,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +SubstringIndex,S,`substring_index`,None,project,count,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +SubstringIndex,S,`substring_index`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Subtract,S,`-`,None,project,lhs,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,NS,NS +Subtract,S,`-`,None,project,rhs,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,NS,NS +Subtract,S,`-`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,NS,NS +Subtract,S,`-`,None,AST,lhs,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NS,NA,NA,NA,NA,NS,NS +Subtract,S,`-`,None,AST,rhs,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NS,NA,NA,NA,NA,NS,NS +Subtract,S,`-`,None,AST,result,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NS,NA,NA,NA,NA,NS,NS +Tan,S,`tan`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Tan,S,`tan`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Tan,S,`tan`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Tan,S,`tan`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Tanh,S,`tanh`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Tanh,S,`tanh`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Tanh,S,`tanh`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Tanh,S,`tanh`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +TimeAdd,S, ,None,project,start,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +TimeAdd,S, ,None,project,interval,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,PS,NA +TimeAdd,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ToDegrees,S,`degrees`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ToDegrees,S,`degrees`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ToRadians,S,`radians`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ToRadians,S,`radians`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ToUTCTimestamp,S,`to_utc_timestamp`,None,project,timestamp,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ToUTCTimestamp,S,`to_utc_timestamp`,None,project,timezone,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ToUTCTimestamp,S,`to_utc_timestamp`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ToUnixTimestamp,S,`to_unix_timestamp`,None,project,timeExp,NA,NA,NA,NA,NA,NA,NA,S,PS,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ToUnixTimestamp,S,`to_unix_timestamp`,None,project,format,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ToUnixTimestamp,S,`to_unix_timestamp`,None,project,result,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +TransformKeys,S,`transform_keys`,None,project,argument,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA +TransformKeys,S,`transform_keys`,None,project,function,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,NS,NS,NS,NS +TransformKeys,S,`transform_keys`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA +TransformValues,S,`transform_values`,None,project,argument,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA +TransformValues,S,`transform_values`,None,project,function,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS,NS,NS +TransformValues,S,`transform_values`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA +UnaryMinus,S,`negative`,None,project,input,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,NS,NS +UnaryMinus,S,`negative`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,NS,NS +UnaryMinus,S,`negative`,None,AST,input,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NS,NA,NA,NA,NA,NS,NS +UnaryMinus,S,`negative`,None,AST,result,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NS,NA,NA,NA,NA,NS,NS +UnaryPositive,S,`positive`,None,project,input,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,NS,NS +UnaryPositive,S,`positive`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,NS,NS +UnaryPositive,S,`positive`,None,AST,input,NA,S,S,S,S,S,S,NA,NA,NA,NS,NA,NA,NS,NA,NA,NA,NA,NS,NS +UnaryPositive,S,`positive`,None,AST,result,NA,S,S,S,S,S,S,NA,NA,NA,NS,NA,NA,NS,NA,NA,NA,NA,NS,NS +UnboundedFollowing$,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA +UnboundedPreceding$,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA +UnixTimestamp,S,`unix_timestamp`,None,project,timeExp,NA,NA,NA,NA,NA,NA,NA,S,PS,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +UnixTimestamp,S,`unix_timestamp`,None,project,format,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +UnixTimestamp,S,`unix_timestamp`,None,project,result,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +UnscaledValue,S, ,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA +UnscaledValue,S, ,None,project,result,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Upper,S,`ucase`; `upper`,This is not 100% compatible with the Spark version because the Unicode version used by cuDF and the JVM may differ; resulting in some corner-case characters not changing case correctly.,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Upper,S,`ucase`; `upper`,This is not 100% compatible with the Spark version because the Unicode version used by cuDF and the JVM may differ; resulting in some corner-case characters not changing case correctly.,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +WeekDay,S,`weekday`,None,project,input,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +WeekDay,S,`weekday`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +WindowExpression,S, ,None,window,windowFunction,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S,S,S +WindowExpression,S, ,None,window,windowSpec,NA,S,S,S,S,NS,NS,NA,NA,NA,PS,NA,NA,S,NA,NA,NA,NA,S,NS +WindowExpression,S, ,None,window,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S,S,S +WindowSpecDefinition,S, ,None,project,partition,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NS,PS,NS,NS,NS +WindowSpecDefinition,S, ,None,project,value,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NS,PS,NS,NS,NS +WindowSpecDefinition,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NS,PS,NS,NS,NS +XxHash64,S,`xxhash64`,None,project,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NS,NS,NS,NS,NS +XxHash64,S,`xxhash64`,None,project,result,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Year,S,`year`,None,project,input,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Year,S,`year`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +AggregateExpression,S, ,None,aggregation,aggFunc,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S,S,S +AggregateExpression,S, ,None,aggregation,filter,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +AggregateExpression,S, ,None,aggregation,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S,S,S +AggregateExpression,S, ,None,reduction,aggFunc,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S,S,S +AggregateExpression,S, ,None,reduction,filter,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +AggregateExpression,S, ,None,reduction,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S,S,S +AggregateExpression,S, ,None,window,aggFunc,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S,S,S +AggregateExpression,S, ,None,window,filter,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +AggregateExpression,S, ,None,window,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S,S,S +ApproximatePercentile,S,`approx_percentile`; `percentile_approx`,This is not 100% compatible with the Spark version because the GPU implementation of approx_percentile is not bit-for-bit compatible with Apache Spark,aggregation,input,NA,S,S,S,S,S,S,NS,NS,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +ApproximatePercentile,S,`approx_percentile`; `percentile_approx`,This is not 100% compatible with the Spark version because the GPU implementation of approx_percentile is not bit-for-bit compatible with Apache Spark,aggregation,percentage,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA +ApproximatePercentile,S,`approx_percentile`; `percentile_approx`,This is not 100% compatible with the Spark version because the GPU implementation of approx_percentile is not bit-for-bit compatible with Apache Spark,aggregation,accuracy,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ApproximatePercentile,S,`approx_percentile`; `percentile_approx`,This is not 100% compatible with the Spark version because the GPU implementation of approx_percentile is not bit-for-bit compatible with Apache Spark,aggregation,result,NA,S,S,S,S,S,S,NS,NS,NA,S,NA,NA,NA,PS,NA,NA,NA,NA,NA +ApproximatePercentile,S,`approx_percentile`; `percentile_approx`,This is not 100% compatible with the Spark version because the GPU implementation of approx_percentile is not bit-for-bit compatible with Apache Spark,reduction,input,NA,S,S,S,S,S,S,NS,NS,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +ApproximatePercentile,S,`approx_percentile`; `percentile_approx`,This is not 100% compatible with the Spark version because the GPU implementation of approx_percentile is not bit-for-bit compatible with Apache Spark,reduction,percentage,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA +ApproximatePercentile,S,`approx_percentile`; `percentile_approx`,This is not 100% compatible with the Spark version because the GPU implementation of approx_percentile is not bit-for-bit compatible with Apache Spark,reduction,accuracy,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ApproximatePercentile,S,`approx_percentile`; `percentile_approx`,This is not 100% compatible with the Spark version because the GPU implementation of approx_percentile is not bit-for-bit compatible with Apache Spark,reduction,result,NA,S,S,S,S,S,S,NS,NS,NA,S,NA,NA,NA,PS,NA,NA,NA,NA,NA +Average,S,`avg`; `mean`,None,aggregation,input,NA,S,S,S,S,S,S,NA,NA,NA,S,S,NA,NS,NA,NA,NA,NA,NS,NS +Average,S,`avg`; `mean`,None,aggregation,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +Average,S,`avg`; `mean`,None,reduction,input,NA,S,S,S,S,S,S,NA,NA,NA,S,S,NA,NS,NA,NA,NA,NA,NS,NS +Average,S,`avg`; `mean`,None,reduction,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +Average,S,`avg`; `mean`,None,window,input,NA,S,S,S,S,S,S,NA,NA,NA,S,S,NA,NS,NA,NA,NA,NA,NS,NS +Average,S,`avg`; `mean`,None,window,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +CollectList,S,`collect_list`,None,aggregation,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +CollectList,S,`collect_list`,None,aggregation,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +CollectList,S,`collect_list`,None,reduction,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +CollectList,S,`collect_list`,None,reduction,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +CollectList,S,`collect_list`,None,window,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +CollectList,S,`collect_list`,None,window,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +CollectSet,S,`collect_set`,None,aggregation,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NS,PS,NS,NS,NS +CollectSet,S,`collect_set`,None,aggregation,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +CollectSet,S,`collect_set`,None,reduction,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NS,PS,NS,NS,NS +CollectSet,S,`collect_set`,None,reduction,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +CollectSet,S,`collect_set`,None,window,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NS,PS,NS,NS,NS +CollectSet,S,`collect_set`,None,window,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +Count,S,`count`,None,aggregation,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S,S,S +Count,S,`count`,None,aggregation,result,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Count,S,`count`,None,reduction,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S,S,S +Count,S,`count`,None,reduction,result,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Count,S,`count`,None,window,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S,S,S +Count,S,`count`,None,window,result,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +First,S,`first_value`; `first`,None,aggregation,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +First,S,`first_value`; `first`,None,aggregation,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +First,S,`first_value`; `first`,None,reduction,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +First,S,`first_value`; `first`,None,reduction,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +First,S,`first_value`; `first`,None,window,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +First,S,`first_value`; `first`,None,window,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +Last,S,`last_value`; `last`,None,aggregation,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +Last,S,`last_value`; `last`,None,aggregation,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +Last,S,`last_value`; `last`,None,reduction,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +Last,S,`last_value`; `last`,None,reduction,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +Last,S,`last_value`; `last`,None,window,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +Last,S,`last_value`; `last`,None,window,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +Max,S,`max`,None,aggregation,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NA,PS,NS,NA,NA +Max,S,`max`,None,aggregation,result,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NA,PS,NS,NA,NA +Max,S,`max`,None,reduction,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NA,PS,NS,NA,NA +Max,S,`max`,None,reduction,result,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NA,PS,NS,NA,NA +Max,S,`max`,None,window,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,NS,NS,NA,NA +Max,S,`max`,None,window,result,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,NS,NS,NA,NA +Min,S,`min`,None,aggregation,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NA,PS,NS,NA,NA +Min,S,`min`,None,aggregation,result,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NA,PS,NS,NA,NA +Min,S,`min`,None,reduction,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NA,PS,NS,NA,NA +Min,S,`min`,None,reduction,result,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NA,PS,NS,NA,NA +Min,S,`min`,None,window,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,NS,NS,NA,NA +Min,S,`min`,None,window,result,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,NS,NS,NA,NA +Percentile,S,`percentile`,None,aggregation,input,NA,S,S,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Percentile,S,`percentile`,None,aggregation,percentage,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA +Percentile,S,`percentile`,None,aggregation,frequency,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA +Percentile,S,`percentile`,None,aggregation,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA +Percentile,S,`percentile`,None,reduction,input,NA,S,S,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Percentile,S,`percentile`,None,reduction,percentage,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA +Percentile,S,`percentile`,None,reduction,frequency,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA +Percentile,S,`percentile`,None,reduction,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA +PivotFirst,S, ,None,aggregation,pivotColumn,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NS,NS,NS,NS,NS +PivotFirst,S, ,None,aggregation,valueColumn,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NS,NS,NS,NS,NS +PivotFirst,S, ,None,aggregation,result,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NS,NS,NS,NS,NS +PivotFirst,S, ,None,reduction,pivotColumn,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NS,NS,NS,NS,NS +PivotFirst,S, ,None,reduction,valueColumn,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NS,NS,NS,NS,NS +PivotFirst,S, ,None,reduction,result,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NS,NS,NS,NS,NS +StddevPop,S,`stddev_pop`,None,reduction,input,NA,NA,NA,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StddevPop,S,`stddev_pop`,None,reduction,result,NA,NA,NA,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StddevPop,S,`stddev_pop`,None,aggregation,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StddevPop,S,`stddev_pop`,None,aggregation,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StddevPop,S,`stddev_pop`,None,window,input,NA,NA,NA,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StddevPop,S,`stddev_pop`,None,window,result,NA,NA,NA,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StddevSamp,S,`std`; `stddev_samp`; `stddev`,None,aggregation,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StddevSamp,S,`std`; `stddev_samp`; `stddev`,None,aggregation,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StddevSamp,S,`std`; `stddev_samp`; `stddev`,None,reduction,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StddevSamp,S,`std`; `stddev_samp`; `stddev`,None,reduction,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StddevSamp,S,`std`; `stddev_samp`; `stddev`,None,window,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StddevSamp,S,`std`; `stddev_samp`; `stddev`,None,window,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Sum,S,`sum`,None,aggregation,input,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +Sum,S,`sum`,None,aggregation,result,NA,NA,NA,NA,S,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +Sum,S,`sum`,None,reduction,input,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +Sum,S,`sum`,None,reduction,result,NA,NA,NA,NA,S,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +Sum,S,`sum`,None,window,input,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +Sum,S,`sum`,None,window,result,NA,NA,NA,NA,S,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +VariancePop,S,`var_pop`,None,reduction,input,NA,NA,NA,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +VariancePop,S,`var_pop`,None,reduction,result,NA,NA,NA,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +VariancePop,S,`var_pop`,None,aggregation,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +VariancePop,S,`var_pop`,None,aggregation,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +VariancePop,S,`var_pop`,None,window,input,NA,NA,NA,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +VariancePop,S,`var_pop`,None,window,result,NA,NA,NA,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +VarianceSamp,S,`var_samp`; `variance`,None,reduction,input,NA,NA,NA,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +VarianceSamp,S,`var_samp`; `variance`,None,reduction,result,NA,NA,NA,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +VarianceSamp,S,`var_samp`; `variance`,None,aggregation,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +VarianceSamp,S,`var_samp`; `variance`,None,aggregation,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +VarianceSamp,S,`var_samp`; `variance`,None,window,input,NA,NA,NA,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +VarianceSamp,S,`var_samp`; `variance`,None,window,result,NA,NA,NA,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +NormalizeNaNAndZero,S, ,None,project,input,NA,NA,NA,NA,NA,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +NormalizeNaNAndZero,S, ,None,project,result,NA,NA,NA,NA,NA,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ScalarSubquery,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +HiveGenericUDF,S, ,None,project,param,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,NS,NS,NS +HiveGenericUDF,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,NS,NS,NS +HiveSimpleUDF,S, ,None,project,param,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,NS,NS,NS +HiveSimpleUDF,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,NS,NS,NS From cf859d16382b7d10cfc606c8c1c65e0c2e6f7819 Mon Sep 17 00:00:00 2001 From: Tim Liu Date: Fri, 12 Jul 2024 15:24:45 +0800 Subject: [PATCH 9/9] Clean up unused and duplicated 'org/roaringbitmap' folder To fix: https://github.com/NVIDIA/spark-rapids/issues/11175 Clean up unused and duplicated 'org/roaringbitmap' in the spark320 shim folder to walk around for the JACOCO error 'different class with same name', after we drop 31x shims and change the default shim to spark320 Signed-off-by: Tim Liu --- jenkins/spark-premerge-build.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/jenkins/spark-premerge-build.sh b/jenkins/spark-premerge-build.sh index bf33b8d65c9..9cd1664af9f 100755 --- a/jenkins/spark-premerge-build.sh +++ b/jenkins/spark-premerge-build.sh @@ -92,7 +92,8 @@ mvn_verify() { # extract the .class files in udf jar and replace the existing ones in spark3xx-ommon and spark$SPK_VER # because the class files in udf jar will be modified in aggregator's shade phase jar xf "$UDF_JAR" com/nvidia/spark/udf - rm -rf com/nvidia/shaded/ org/openucx/ spark-shared/com/nvidia/spark/udf/ spark${SPK_VER}/com/nvidia/spark/udf/ + # TODO Should clean up unused and duplicated 'org/roaringbitmap' in the spark3xx shim folders, https://github.com/NVIDIA/spark-rapids/issues/11175 + rm -rf com/nvidia/shaded/ org/openucx/ spark${SPK_VER}/META-INF/versions/*/org/roaringbitmap/ spark-shared/com/nvidia/spark/udf/ spark${SPK_VER}/com/nvidia/spark/udf/ popd # Triggering here until we change the jenkins file