pvary commented on code in PR #14245:
URL: https://github.com/apache/iceberg/pull/14245#discussion_r2606202092


##########
flink/v2.1/flink/src/main/java/org/apache/iceberg/flink/sink/AvroGenericRecordToRowDataMapper.java:
##########
@@ -18,44 +18,119 @@
  */
 package org.apache.iceberg.flink.sink;
 
+import java.util.List;
+import org.apache.avro.LogicalTypes;
 import org.apache.avro.Schema;
 import org.apache.avro.generic.GenericRecord;
 import org.apache.flink.api.common.functions.MapFunction;
 import org.apache.flink.formats.avro.AvroToRowDataConverters;
 import org.apache.flink.formats.avro.typeutils.AvroSchemaConverter;
+import org.apache.flink.table.data.GenericRowData;
 import org.apache.flink.table.data.RowData;
+import org.apache.flink.table.data.TimestampData;
 import org.apache.flink.table.types.DataType;
 import org.apache.flink.table.types.logical.LogicalType;
 import org.apache.flink.table.types.logical.RowType;
+import org.apache.flink.table.types.logical.TimestampType;
 import org.apache.flink.table.types.utils.TypeConversions;
-import org.apache.iceberg.avro.AvroSchemaUtil;
+import org.apache.iceberg.relocated.com.google.common.collect.Lists;
 
 /**
  * This util class converts Avro GenericRecord to Flink RowData. <br>
  * <br>
  * Internally it uses Flink {@link AvroToRowDataConverters}. Because of the 
precision difference
  * between how Iceberg schema (micro) and Flink {@link 
AvroToRowDataConverters} (milli) deal with
- * time type, we can't directly use the Avro Schema converted from Iceberg 
schema via {@link
- * AvroSchemaUtil#convert(org.apache.iceberg.Schema, String)}.
+ * time type, we can't directly use the Avro Schema converted from Iceberg 
schema. Additionally,
+ * Flink's converter doesn't handle timestamp-nanos logical type properly, so 
we manually fix those
+ * fields after conversion.
  */
 public class AvroGenericRecordToRowDataMapper implements 
MapFunction<GenericRecord, RowData> {
 
   private final AvroToRowDataConverters.AvroToRowDataConverter converter;
+  private final List<Integer> timestampNanosFieldIndices;
+  private final RowType rowType;
 
-  AvroGenericRecordToRowDataMapper(RowType rowType) {
+  AvroGenericRecordToRowDataMapper(RowType rowType, List<Integer> 
timestampNanosFieldIndices) {
     this.converter = AvroToRowDataConverters.createRowConverter(rowType);
+    this.timestampNanosFieldIndices = timestampNanosFieldIndices;
+    this.rowType = rowType;
   }
 
   @Override
   public RowData map(GenericRecord genericRecord) throws Exception {
-    return (RowData) converter.convert(genericRecord);
+    RowData rowData = (RowData) converter.convert(genericRecord);
+
+    // Post-process: Flink's AvroToRowDataConverters doesn't properly handle 
timestamp-nanos,
+    // so we need to manually fix the timestamp fields after conversion
+    if (timestampNanosFieldIndices.isEmpty()) {
+      return rowData;
+    }
+
+    // Create a new GenericRowData with corrected timestamp-nanos fields
+    GenericRowData correctedRowData = new GenericRowData(rowData.getArity());
+    correctedRowData.setRowKind(rowData.getRowKind());
+
+    // Copy all fields from the converted RowData
+    for (int i = 0; i < rowData.getArity(); i++) {
+      if (timestampNanosFieldIndices.contains(i)) {
+        // Manually convert timestamp-nanos field from original Avro record
+        Object avroValue = genericRecord.get(i);
+        if (avroValue instanceof Long) {
+          long nanos = (Long) avroValue;
+          long millis = Math.floorDiv(nanos, 1_000_000);
+          int nanosOfMillis = Math.floorMod(nanos, 1_000_000);
+          TimestampData timestampData = TimestampData.fromEpochMillis(millis, 
nanosOfMillis);

Review Comment:
   Same question as above. Any specific reason to use math?



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to