rdblue commented on code in PR #9332: URL: https://github.com/apache/iceberg/pull/9332#discussion_r1430720790
########## spark/v3.5/spark-extensions/src/main/scala/org/apache/spark/sql/catalyst/parser/extensions/IcebergSparkSqlExtensionsParser.scala: ########## @@ -122,37 +147,132 @@ class IcebergSparkSqlExtensionsParser(delegate: ParserInterface) extends ParserI if (isIcebergCommand(sqlTextAfterSubstitution)) { parse(sqlTextAfterSubstitution) { parser => astBuilder.visit(parser.singleStatement()) }.asInstanceOf[LogicalPlan] } else { - delegate.parsePlan(sqlText) + ViewSubstitutionExecutor.execute(delegate.parsePlan(sqlText)) } } - object UnresolvedIcebergTable { + private object ViewSubstitutionExecutor extends RuleExecutor[LogicalPlan] { + private val fixedPoint = FixedPoint( + maxIterations, + errorOnExceed = true, + maxIterationsSetting = SQLConf.ANALYZER_MAX_ITERATIONS.key) - def unapply(plan: LogicalPlan): Option[LogicalPlan] = { - EliminateSubqueryAliases(plan) match { - case UnresolvedRelation(multipartIdentifier, _, _) if isIcebergTable(multipartIdentifier) => - Some(plan) - case _ => + override protected def batches: Seq[Batch] = Seq(Batch("pre-substitution", fixedPoint, V2ViewSubstitution)) + } + + private object V2ViewSubstitution extends Rule[LogicalPlan] { + import org.apache.spark.sql.connector.catalog.CatalogV2Implicits._ + + // the reason for handling these cases here is because ResolveSessionCatalog exits early for v2 commands + override def apply(plan: LogicalPlan): LogicalPlan = plan.resolveOperatorsUp { + case u@UnresolvedView(identifier, _, _, _) => + lookupTableOrView(identifier, viewOnly = true).getOrElse(u) + + case u@UnresolvedTableOrView(identifier, _, _) => + lookupTableOrView(identifier).getOrElse(u) + + case CreateView(UnresolvedIdentifier(nameParts, allowTemp), userSpecifiedColumns, + comment, properties, originalText, query, allowExisting, replace) => + CreateIcebergView(UnresolvedIdentifier(nameParts, allowTemp), userSpecifiedColumns, + comment, properties, originalText, query, allowExisting, replace) + + case ShowViews(UnresolvedNamespace(multipartIdentifier), pattern, output) => + ShowIcebergViews(UnresolvedNamespace(multipartIdentifier), pattern, output) + + case DropView(UnresolvedIdentifier(nameParts, allowTemp), ifExists) => + DropIcebergView(UnresolvedIdentifier(nameParts, allowTemp), ifExists) + } + + private def expandIdentifier(nameParts: Seq[String]): Seq[String] = { + if (!isResolvingView || isReferredTempViewName(nameParts)) return nameParts + + if (nameParts.length == 1) { + AnalysisContext.get.catalogAndNamespace :+ nameParts.head + } else if (SparkSession.active.sessionState.catalogManager.isCatalogRegistered(nameParts.head)) { + nameParts + } else { + AnalysisContext.get.catalogAndNamespace.head +: nameParts + } + } + + /** + * Resolves relations to `ResolvedTable` or `Resolved[Temp/Persistent]View`. This is + * for resolving DDL and misc commands. Code is copied from Spark's Analyzer, but performs + * a view lookup before performing a table lookup. + */ + private def lookupTableOrView( + identifier: Seq[String], + viewOnly: Boolean = false): Option[LogicalPlan] = { + lookupTempView(identifier).map { tempView => + ResolvedTempView(identifier.asIdentifier, tempView.tableMeta.schema) Review Comment: We don't want to resolve a temp view. We only need to skip it and let the resolution logic in Spark handle resolution. -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: issues-unsubscr...@iceberg.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: issues-unsubscr...@iceberg.apache.org For additional commands, e-mail: issues-h...@iceberg.apache.org