pvary commented on code in PR #12298: URL: https://github.com/apache/iceberg/pull/12298#discussion_r2549070808
########## core/src/main/java/org/apache/iceberg/avro/AvroFormatModel.java: ########## @@ -0,0 +1,272 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.iceberg.avro; + +import java.nio.ByteBuffer; +import java.util.Map; +import java.util.function.BiFunction; +import org.apache.avro.io.DatumReader; +import org.apache.avro.io.DatumWriter; +import org.apache.iceberg.FileContent; +import org.apache.iceberg.FileFormat; +import org.apache.iceberg.MetricsConfig; +import org.apache.iceberg.Schema; +import org.apache.iceberg.encryption.EncryptedOutputFile; +import org.apache.iceberg.expressions.Expression; +import org.apache.iceberg.formats.FormatModel; +import org.apache.iceberg.formats.ReadBuilder; +import org.apache.iceberg.formats.WriteBuilder; +import org.apache.iceberg.io.CloseableIterable; +import org.apache.iceberg.io.DeleteSchemaUtil; +import org.apache.iceberg.io.InputFile; +import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; + +public class AvroFormatModel<D, S> implements FormatModel<D, S> { + private final Class<D> type; + private final Class<S> schemaType; + private final BiFunction<Schema, Map<Integer, ?>, DatumReader<D>> readerFunction; + private final BiFunction<org.apache.avro.Schema, S, DatumWriter<D>> writerFunction; + + public AvroFormatModel(Class<D> type) { + this(type, null, null, null); + } + + public AvroFormatModel( + Class<D> type, + Class<S> schemaType, + BiFunction<Schema, Map<Integer, ?>, DatumReader<D>> readerFunction, + BiFunction<org.apache.avro.Schema, S, DatumWriter<D>> writerFunction) { + this.type = type; + this.schemaType = schemaType; + this.readerFunction = readerFunction; + this.writerFunction = writerFunction; + } + + @Override + public FileFormat format() { + return FileFormat.AVRO; + } + + @Override + public Class<D> type() { + return type; + } + + @Override + public Class<S> schemaType() { + return schemaType; + } + + @Override + public WriteBuilder<D, S> writeBuilder(EncryptedOutputFile outputFile) { + return new WriteBuilderWrapper<>(outputFile, writerFunction); + } + + @Override + public ReadBuilder<D, S> readBuilder(InputFile inputFile) { + return new ReadBuilderWrapper<>(inputFile, readerFunction); + } + + private static class ReadBuilderWrapper<D, S> implements ReadBuilder<D, S> { + private final Avro.ReadBuilder internal; + private final BiFunction<Schema, Map<Integer, ?>, DatumReader<D>> readerFunction; + private Schema icebergSchema; + private Map<Integer, ?> idToConstant = ImmutableMap.of(); + + private ReadBuilderWrapper( + InputFile inputFile, BiFunction<Schema, Map<Integer, ?>, DatumReader<D>> readerFunction) { + this.internal = Avro.read(inputFile); + this.readerFunction = readerFunction; + } + + @Override + public ReadBuilder<D, S> split(long newStart, long newLength) { + internal.split(newStart, newLength); + return this; + } + + @Override + public ReadBuilder<D, S> project(Schema schema) { + this.icebergSchema = schema; + internal.project(schema); + return this; + } + + @Override + public ReadBuilder<D, S> caseSensitive(boolean caseSensitive) { + // Filtering is not supported in Avro reader, so case sensitivity does not matter + return this; + } + + @Override + public ReadBuilder<D, S> filter(Expression filter) { + // Filtering is not supported in Avro reader + return this; + } + + @Override + public ReadBuilder<D, S> set(String key, String value) { + // Configuration is not used for Avro reader creation + return this; + } + + @Override + public ReadBuilder<D, S> reuseContainers() { + internal.reuseContainers(); + return this; + } + + @Override + public ReadBuilder<D, S> recordsPerBatch(int numRowsPerBatch) { + throw new UnsupportedOperationException("Batch reading is not supported in Avro reader"); + } + + @Override + public ReadBuilder<D, S> idToConstant(Map<Integer, ?> newIdToConstant) { + this.idToConstant = newIdToConstant; + return this; + } + + @Override + public ReadBuilder<D, S> withNameMapping(org.apache.iceberg.mapping.NameMapping nameMapping) { + internal.withNameMapping(nameMapping); + return this; + } + + @Override + public ReadBuilder<D, S> withFileEncryptionKey(ByteBuffer encryptionKey) { + // Avro doesn't support file encryption via this method + throw new UnsupportedOperationException("Avro does not support file encryption keys"); + } + + @Override + public ReadBuilder<D, S> withAADPrefix(ByteBuffer aadPrefix) { + // Avro doesn't support AAD prefix + throw new UnsupportedOperationException("Avro does not support AAD prefix"); + } + + @Override + public CloseableIterable<D> build() { + return internal + .createResolvingReader(unused -> readerFunction.apply(icebergSchema, idToConstant)) + .build(); + } + } + + private static class WriteBuilderWrapper<D, S> implements WriteBuilder<D, S> { + private final Avro.WriteBuilder internal; + private final BiFunction<org.apache.avro.Schema, S, DatumWriter<D>> writerFunction; + private S inputSchema; + + private WriteBuilderWrapper( + EncryptedOutputFile outputFile, + BiFunction<org.apache.avro.Schema, S, DatumWriter<D>> writerFunction) { + this.internal = Avro.write(outputFile.encryptingOutputFile()); + this.writerFunction = writerFunction; + } + + @Override + public WriteBuilder<D, S> schema(Schema schema) { + internal.schema(schema); + return this; + } + + @Override + public WriteBuilder<D, S> inputSchema(S schema) { + this.inputSchema = schema; + return this; + } + + @Override + public WriteBuilder<D, S> set(String property, String value) { + internal.set(property, value); + return this; + } + + @Override + public WriteBuilder<D, S> setAll(Map<String, String> properties) { + internal.setAll(properties); + return this; + } + + @Override + public WriteBuilder<D, S> meta(String property, String value) { + internal.meta(property, value); + return this; + } + + @Override + public WriteBuilder<D, S> meta(Map<String, String> properties) { + internal.meta(properties); + return this; + } + + @Override + public WriteBuilder<D, S> content(FileContent content) { + switch (content) { + case DATA: + internal.createContextFunc(Avro.WriteBuilder.Context::dataContext); + internal.createWriterFunc(avroSchema -> writerFunction.apply(avroSchema, inputSchema)); + break; + case EQUALITY_DELETES: + internal.createContextFunc(Avro.WriteBuilder.Context::deleteContext); + internal.createWriterFunc(avroSchema -> writerFunction.apply(avroSchema, inputSchema)); + break; + case POSITION_DELETES: + internal.createContextFunc(Avro.WriteBuilder.Context::deleteContext); + internal.createWriterFunc(unused -> new Avro.PositionDatumWriter()); + internal.schema(DeleteSchemaUtil.pathPosSchema()); + break; + default: + throw new IllegalArgumentException("Unknown file content: " + content); + } + + return this; + } + + @Override + public WriteBuilder<D, S> metricsConfig(MetricsConfig metricsConfig) { + internal.metricsConfig(metricsConfig); + return this; + } + + @Override + public WriteBuilder<D, S> overwrite() { + internal.overwrite(); + return this; + } + + @Override + public WriteBuilder<D, S> withFileEncryptionKey(ByteBuffer encryptionKey) { + // Avro doesn't support file encryption Review Comment: Removed ########## core/src/main/java/org/apache/iceberg/formats/ReadBuilder.java: ########## @@ -0,0 +1,123 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.iceberg.formats; + +import java.nio.ByteBuffer; +import java.util.Map; +import org.apache.iceberg.Schema; +import org.apache.iceberg.expressions.Expression; +import org.apache.iceberg.io.CloseableIterable; +import org.apache.iceberg.mapping.NameMapping; + +/** + * Builder interface for creating file readers across supported data file formats. The {@link + * FormatModel} implementations provides appropriate {@link ReadBuilder} instances + * + * <p>The {@link ReadBuilder} follows the builder pattern to configure and create {@link + * CloseableIterable} instances that read data from source files. Configuration options include + * schema projection, predicate filtering, record batching, and encryption settings. + * + * <p>This interface is directly exposed to users for parameterizing readers. + * + * @param <D> the output data type produced by the reader + * @param <S> the type of the schema for the output data type + */ +public interface ReadBuilder<D, S> { + /** + * Restricts the read to the given range: [start, start + length). + * + * @param start the start position for this read + * @param length the length of the range this read should scan + */ + ReadBuilder<D, S> split(long start, long length); + + /** Set the projection schema. */ + ReadBuilder<D, S> project(Schema schema); + + /** + * Configures whether filtering should be case-sensitive. If the reader supports filtering, it + * must respect this setting. The default value is <code>true</code>. + * + * @param caseSensitive indicates if filtering is case-sensitive + */ + ReadBuilder<D, S> caseSensitive(boolean caseSensitive); + + /** + * Pushes down the {@link Expression} filter for the reader to prevent reading unnecessary + * records. Some readers may not support filtering, or may only support filtering for certain + * expressions. In this case the reader might return unfiltered or partially filtered rows. It is + * the caller's responsibility to apply the filter again. + * + * @param filter the filter to set + */ + ReadBuilder<D, S> filter(Expression filter); + + /** + * Set a reader configuration property which affects the reader behavior. Reader builders should + * ignore configuration keys not known for them. + * + * @param key a reader config property name + * @param value config value + * @return this for method chaining + */ + ReadBuilder<D, S> set(String key, String value); + + /** + * Sets multiple reader configuration properties that affect the reader behavior. Reader builders + * should ignore configuration keys not known for them. + * + * @param properties reader config properties to set + * @return this for method chaining + */ + default ReadBuilder<D, S> setAll(Map<String, String> properties) { + properties.forEach(this::set); + return this; + } + + /** Enables reusing the containers returned by the reader. Decreases pressure on GC. */ + ReadBuilder<D, S> reuseContainers(); + + /** Sets the batch size for vectorized readers. */ + ReadBuilder<D, S> recordsPerBatch(int rowsPerBatch); + + /** + * Contains the values in the result objects which are coming from metadata and not coming from + * the data files themselves. The keys of the map are the column ids, the values are the constant + * values to be used in the result. + */ + ReadBuilder<D, S> idToConstant(Map<Integer, ?> idToConstant); + + /** Sets a mapping from external schema names to Iceberg type IDs. */ + ReadBuilder<D, S> withNameMapping(NameMapping nameMapping); + + /** + * Sets the file encryption key used for reading the file. If the reader does not support + * encryption, then an exception should be thrown. + */ + ReadBuilder<D, S> withFileEncryptionKey(ByteBuffer encryptionKey); Review Comment: Removed -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: [email protected] For queries about this service, please contact Infrastructure at: [email protected] --------------------------------------------------------------------- To unsubscribe, e-mail: [email protected] For additional commands, e-mail: [email protected]
