This is an automated email from the ASF dual-hosted git repository. dataroaring pushed a commit to branch branch-3.0 in repository https://gitbox.apache.org/repos/asf/doris.git
commit 76d09b2210b68e45fabe845da88716c459795d5a Author: Tiewei Fang <43782773+bepppo...@users.noreply.github.com> AuthorDate: Mon May 27 10:41:40 2024 +0800 [Fix](trino-connector) When an exception occurs, the query may not be cleared (#35367) Bug description: If an exception occurs in applyPushDown or getTrinoSplitSource, the query id in connectorMetadata may not be cleared --- .../source/TrinoConnectorScanNode.java | 30 +++++++++++----------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/trinoconnector/source/TrinoConnectorScanNode.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/trinoconnector/source/TrinoConnectorScanNode.java index 66e50eda8b4..fd5a7db1f4f 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/trinoconnector/source/TrinoConnectorScanNode.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/trinoconnector/source/TrinoConnectorScanNode.java @@ -135,25 +135,25 @@ public class TrinoConnectorScanNode extends FileQueryScanNode { connectorMetadata = source.getConnectorMetadata(); ConnectorSession connectorSession = source.getTrinoSession().toConnectorSession(source.getCatalogHandle()); - // 2. Begin query - connectorMetadata.beginQuery(connectorSession); - applyPushDown(connectorSession); - - // 3. get splitSource List<Split> splits = Lists.newArrayList(); - try (SplitSource splitSource = getTrinoSplitSource(connector, source.getTrinoSession(), - source.getTrinoConnectorTableHandle(), DynamicFilter.EMPTY)) { - // 4. get trino.Splits and convert it to doris.Splits - while (!splitSource.isFinished()) { - for (io.trino.metadata.Split split : getNextSplitBatch(splitSource)) { - splits.add(new TrinoConnectorSplit(split.getConnectorSplit(), source.getConnectorName())); + try { + connectorMetadata.beginQuery(connectorSession); + applyPushDown(connectorSession); + + // 3. get splitSource + try (SplitSource splitSource = getTrinoSplitSource(connector, source.getTrinoSession(), + source.getTrinoConnectorTableHandle(), DynamicFilter.EMPTY)) { + // 4. get trino.Splits and convert it to doris.Splits + while (!splitSource.isFinished()) { + for (io.trino.metadata.Split split : getNextSplitBatch(splitSource)) { + splits.add(new TrinoConnectorSplit(split.getConnectorSplit(), source.getConnectorName())); + } } } + } finally { + // 4. Clear query + connectorMetadata.cleanupQuery(connectorSession); } - - // 4. Clear query - // It is necessary for hive connector - connectorMetadata.cleanupQuery(connectorSession); return splits; } --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org For additional commands, e-mail: commits-h...@doris.apache.org