deniskuzZ commented on PR #6394:
URL: https://github.com/apache/hive/pull/6394#issuecomment-4160730924

   patch
   ````
   Subject: [PATCH] patch
   ---
   Index: 
iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/HiveIcebergOutputCommitter.java
   IDEA additional info:
   Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP
   <+>UTF-8
   ===================================================================
   diff --git 
a/iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/HiveIcebergOutputCommitter.java
 
b/iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/HiveIcebergOutputCommitter.java
   --- 
a/iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/HiveIcebergOutputCommitter.java
       (revision 93c30ab862034fc7ea3e9742d888417bbf456926)
   +++ 
b/iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/HiveIcebergOutputCommitter.java
       (date 1774944147185)
   @@ -308,7 +308,11 @@
        for (JobContext jobContext : jobContextList) {
          for (String output : outputTables(jobContext.getJobConf())) {
            Table table = SessionStateUtil.getResource(jobContext.getJobConf(), 
output)
   -            .filter(o -> o instanceof Table).map(o -> (Table) o)
   +            .filter(Table.class::isInstance)
   +            .map(Table.class::cast)
   +            .map(tbl -> 
Optional.ofNullable(IcebergAcidUtil.getTransaction(tbl))
   +                .map(Transaction::table)
   +                .orElse(tbl))
                // fall back to getting the serialized table from the config
                .orElseGet(() -> 
HiveTableUtil.deserializeTable(jobContext.getJobConf(), output));
            if (table != null) {
   @@ -418,13 +422,6 @@
          Collection<JobContext> jobContexts, Operation operation) {
        String name = outputTable.tableName;
    
   -    Properties catalogProperties = new Properties();
   -    catalogProperties.put(Catalogs.NAME, name);
   -    catalogProperties.put(Catalogs.LOCATION, outputTable.table.location());
   -
   -    if (outputTable.catalogName != null) {
   -      catalogProperties.put(InputFormatConfig.CATALOG_NAME, 
outputTable.catalogName);
   -    }
        List<DataFile> dataFiles = Lists.newArrayList();
        List<DeleteFile> deleteFiles = Lists.newArrayList();
        List<DataFile> replacedDataFiles = Lists.newArrayList();
   @@ -432,7 +429,7 @@
        Set<CharSequence> referencedDataFiles = Sets.newHashSet();
        Set<Path> mergedAndDeletedFiles = Sets.newHashSet();
    
   -    Table table = null;
   +    Table table = outputTable.table;
        String branchName = null;
        Long snapshotId = null;
        Expression filterExpr = null;
   @@ -440,12 +437,11 @@
        for (JobContext jobContext : jobContexts) {
          JobConf conf = jobContext.getJobConf();
    
   -      table = Optional.ofNullable(table).orElseGet(() -> 
Catalogs.loadTable(conf, catalogProperties));
          branchName = conf.get(InputFormatConfig.OUTPUT_TABLE_SNAPSHOT_REF);
   -      snapshotId = getSnapshotId(outputTable.table, branchName);
   +      snapshotId = getSnapshotId(table, branchName);
    
          if (filterExpr == null) {
   -        filterExpr = SessionStateUtil.getConflictDetectionFilter(conf, 
catalogProperties.get(Catalogs.NAME))
   +        filterExpr = SessionStateUtil.getConflictDetectionFilter(conf, name)
                .map(expr -> HiveIcebergInputFormat.getFilterExpr(conf, expr))
                .orElse(null);
          }
   
   ````


-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to