szehon-ho commented on code in PR #6760:
URL: https://github.com/apache/iceberg/pull/6760#discussion_r1185590925
##########
spark/v3.3/spark/src/main/scala/org/apache/spark/sql/execution/datasources/SparkExpressionConverter.scala:
##########
@@ -36,15 +35,30 @@ object SparkExpressionConverter {
SparkFilters.convert(DataSourceStrategy.translateFilter(sparkExpression,
supportNestedPredicatePushdown = true).get)
}
- @throws[AnalysisException]
- def collectResolvedSparkExpression(session: SparkSession, tableName: String,
where: String): Expression = {
+ def collectDeterministicSparkExpression(session: SparkSession,
+ tableName: String, where: String):
Boolean = {
+ // used only to check if a deterministic expression is true or false
+ val tableAttrs = session.table(tableName).queryExecution.analyzed.output
+ val firstColumnName = tableAttrs.head.name
+ val anotherWhere = s"$firstColumnName is not null and $where"
+ val unresolvedExpression =
session.sessionState.sqlParser.parseExpression(anotherWhere)
+ val filter = Filter(unresolvedExpression, DummyRelation(tableAttrs))
+ val optimizedLogicalPlan =
session.sessionState.executePlan(filter).optimizedPlan
+ val option = optimizedLogicalPlan.collectFirst {
+ case filter: Filter => Some(filter.condition)
+ }.getOrElse(Option.empty)
+ if (option.isDefined) true else false
+ }
+
+ def collectResolvedSparkExpressionOption(session: SparkSession,
+ tableName: String, where: String):
Option[Expression] = {
val tableAttrs = session.table(tableName).queryExecution.analyzed.output
val unresolvedExpression =
session.sessionState.sqlParser.parseExpression(where)
val filter = Filter(unresolvedExpression, DummyRelation(tableAttrs))
val optimizedLogicalPlan =
session.sessionState.executePlan(filter).optimizedPlan
optimizedLogicalPlan.collectFirst {
- case filter: Filter => filter.condition
- }.getOrElse(throw new AnalysisException("Failed to find filter
expression"))
+ case filter: Filter => Some(filter.condition)
Review Comment:
Hi @ludlows , actually taking a look at this, why didn't we try just using
@aokolnychyi 's code suggestion directly?
```
def collectResolvedSparkExpression(session: SparkSession, tableName:
String, where: String): Expression = {
val tableAttrs = session.table(tableName).queryExecution.analyzed.output
val unresolvedExpression =
session.sessionState.sqlParser.parseExpression(where)
val filter = Filter(unresolvedExpression, DummyRelation(tableAttrs))
val optimizedLogicalPlan =
session.sessionState.executePlan(filter).optimizedPlan
optimizedLogicalPlan.collectFirst {
case filter: Filter => filter.condition // the condition is non-trivial
case relation: DummyRelation => Literal.TrueLiteral // the condition
is always true and the filter was removed by the optimizer
case relation: LocalRelation => Literal.FalseLiteral // the condition
is always false and the optimizer replaced the plan with an empty relation
}.getOrElse(throw new AnalysisException("Failed to find filter
expression"))
}
```
It looks like it should work. ConstantFolding rule will probably get rid of
the filter and make this match the second case (DummyRelation). And we have
added test case for alwaysTrue and alwaysFalse to catch if we miss something.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]