- 
                Notifications
    You must be signed in to change notification settings 
- Fork 28.9k
          [SPARK-52578][SQL] Add numSourceRows metric for MergeIntoExec
          #52669
        
          New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: master
Are you sure you want to change the base?
Changes from 8 commits
e2b8ae1
              3c08974
              262b1d1
              5bdb385
              1746f82
              36b14dd
              c448379
              e1e14b8
              25760a0
              9f6ebea
              6944108
              62a8340
              File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change | 
|---|---|---|
|  | @@ -34,10 +34,11 @@ import org.apache.spark.sql.catalyst.util.RowDeltaUtils.{DELETE_OPERATION, INSER | |
| import org.apache.spark.sql.connector.catalog.{CatalogV2Util, Column, Identifier, StagedTable, StagingTableCatalog, Table, TableCatalog, TableInfo, TableWritePrivilege} | ||
| import org.apache.spark.sql.connector.expressions.Transform | ||
| import org.apache.spark.sql.connector.metric.CustomMetric | ||
| import org.apache.spark.sql.connector.write.{BatchWrite, DataWriter, DataWriterFactory, DeltaWrite, DeltaWriter, PhysicalWriteInfoImpl, Write, WriterCommitMessage} | ||
| import org.apache.spark.sql.connector.write.{BatchWrite, DataWriter, DataWriterFactory, DeltaWrite, DeltaWriter, PhysicalWriteInfoImpl, RowLevelOperationTable, Write, WriterCommitMessage} | ||
| import org.apache.spark.sql.errors.{QueryCompilationErrors, QueryExecutionErrors} | ||
| import org.apache.spark.sql.execution.{SparkPlan, SQLExecution, UnaryExecNode} | ||
| import org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanHelper | ||
| import org.apache.spark.sql.execution.joins.BaseJoinExec | ||
| import org.apache.spark.sql.execution.metric.{CustomMetrics, SQLMetric, SQLMetrics} | ||
| import org.apache.spark.sql.types.StructType | ||
| import org.apache.spark.util.{LongAccumulator, Utils} | ||
|  | @@ -481,9 +482,45 @@ trait V2TableWriteExec extends V2CommandExec with UnaryExecNode with AdaptiveSpa | |
| } | ||
|  | ||
| private def getOperationMetrics(query: SparkPlan): util.Map[String, lang.Long] = { | ||
| collectFirst(query) { case m: MergeRowsExec => m }.map{ n => | ||
| n.metrics.map { case (name, metric) => s"merge.$name" -> lang.Long.valueOf(metric.value) } | ||
| }.getOrElse(Map.empty[String, lang.Long]).asJava | ||
| collectFirst(query) { case m: MergeRowsExec => m } match { | ||
| case Some(mergeRowsExec) => | ||
| val mergeMetrics = mergeRowsExec.metrics.map { | ||
| case (name, metric) => s"merge.$name" -> lang.Long.valueOf(metric.value) | ||
| } | ||
| val numSourceRows = getNumSourceRows(mergeRowsExec) | ||
| (mergeMetrics + ("merge.numSourceRows" -> lang.Long.valueOf(numSourceRows))).asJava | ||
| case None => | ||
| Map.empty[String, lang.Long].asJava | ||
| } | ||
| } | ||
|  | ||
| private def getNumSourceRows(mergeRowsExec: MergeRowsExec): Long = { | ||
| def isTargetTableScan(plan: SparkPlan): Boolean = { | ||
| collectFirst(plan) { | ||
| case scan: BatchScanExec if scan.table.isInstanceOf[RowLevelOperationTable] => true | ||
| }.getOrElse(false) | ||
|         
                  asl3 marked this conversation as resolved.
              Outdated
          
            Show resolved
            Hide resolved | ||
| } | ||
|  | ||
| val joinOpt = collectFirst(mergeRowsExec.child) { case j: BaseJoinExec => j } | ||
|  | ||
| joinOpt.flatMap { join => | ||
|         
                  asl3 marked this conversation as resolved.
              Outdated
          
            Show resolved
            Hide resolved | ||
| val leftIsTarget = isTargetTableScan(join.left) | ||
| val rightIsTarget = isTargetTableScan(join.right) | ||
|  | ||
| val sourceChild = if (leftIsTarget) { | ||
| Some(join.right) | ||
| } else if (rightIsTarget) { | ||
| Some(join.left) | ||
| } else { | ||
| None | ||
| } | ||
|  | ||
| sourceChild.flatMap { child => | ||
|          | ||
| collectFirst(child) { | ||
| case plan if plan.metrics.contains("numOutputRows") => plan | ||
| }.flatMap(_.metrics.get("numOutputRows").map(_.value)) | ||
| } | ||
| }.getOrElse(-1L) | ||
| } | ||
| } | ||
|  | ||
|  | ||
Uh oh!
There was an error while loading. Please reload this page.