rdblue commented on code in PR #12298: URL: https://github.com/apache/iceberg/pull/12298#discussion_r2548139429
########## parquet/src/main/java/org/apache/iceberg/parquet/ParquetFormatModel.java: ########## @@ -0,0 +1,394 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.iceberg.parquet; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.Map; +import java.util.function.Function; +import org.apache.iceberg.FileContent; +import org.apache.iceberg.FileFormat; +import org.apache.iceberg.MetricsConfig; +import org.apache.iceberg.Schema; +import org.apache.iceberg.data.parquet.GenericParquetWriter; +import org.apache.iceberg.encryption.EncryptedOutputFile; +import org.apache.iceberg.expressions.Expression; +import org.apache.iceberg.formats.FormatModel; +import org.apache.iceberg.formats.ReadBuilder; +import org.apache.iceberg.formats.WriteBuilder; +import org.apache.iceberg.io.CloseableIterable; +import org.apache.iceberg.io.DeleteSchemaUtil; +import org.apache.iceberg.io.FileAppender; +import org.apache.iceberg.io.InputFile; +import org.apache.iceberg.mapping.NameMapping; +import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; +import org.apache.iceberg.relocated.com.google.common.collect.Maps; +import org.apache.parquet.column.ParquetProperties; +import org.apache.parquet.schema.MessageType; + +public class ParquetFormatModel<D, S, F> implements FormatModel<D, S> { + public static final String WRITER_VERSION_KEY = "parquet.writer.version"; + + private final Class<? extends D> registerType; + private final Class<S> schemaType; + private final ReaderFunction<D> readerFunction; + private final BatchReaderFunction<D, F> batchReaderFunction; + private final WriterFunction<S> writerFunction; + + private ParquetFormatModel( + Class<? extends D> registerType, + Class<S> schemaType, + ReaderFunction<D> readerFunction, + BatchReaderFunction<D, F> batchReaderFunction, + WriterFunction<S> writerFunction) { + this.registerType = registerType; + this.schemaType = schemaType; + this.readerFunction = readerFunction; + this.batchReaderFunction = batchReaderFunction; + this.writerFunction = writerFunction; + } + + public ParquetFormatModel(Class<D> type) { + this(type, null, null, null, null); + } + + public ParquetFormatModel( + Class<D> type, + Class<S> schemaType, + ReaderFunction<D> readerFunction, + WriterFunction<S> writerFunction) { + this(type, schemaType, readerFunction, null, writerFunction); + } + + public ParquetFormatModel( + Class<? extends D> returnType, + Class<S> schemaType, + BatchReaderFunction<D, F> batchReaderFunction) { + this(returnType, schemaType, null, (BatchReaderFunction<D, F>) batchReaderFunction, null); + } + + @Override + public FileFormat format() { + return FileFormat.PARQUET; + } + + @Override + public Class<D> type() { + return (Class<D>) registerType; + } + + @Override + public Class<S> schemaType() { + return schemaType; + } + + @Override + public WriteBuilder<D, S> writeBuilder(EncryptedOutputFile outputFile) { + return new WriteBuilderWrapper<>(outputFile, writerFunction); + } + + @Override + public ReadBuilder<D, S> readBuilder(InputFile inputFile) { + if (readerFunction != null) { + return new NonBatchReaderWrapper<>(inputFile, readerFunction); + } else if (batchReaderFunction != null) { + return new BatchReaderWrapper<>(inputFile, batchReaderFunction); + } else { + throw new IllegalStateException("Either readerFunction or batchReaderFunction must be set"); + } + } + + @FunctionalInterface + public interface ReaderFunction<D> { + ParquetValueReader<D> read( + Schema schema, MessageType messageType, Map<Integer, ?> constantValues); + } + + @FunctionalInterface + public interface BatchReaderFunction<D, F> { + VectorizedReader<D> read( + Schema schema, + MessageType messageType, + Map<Integer, ?> constantValues, + F deleteFilter, + Map<String, String> config); + } + + @FunctionalInterface + public interface WriterFunction<S> { + ParquetValueWriter<?> write(Schema icebergSchema, MessageType messageType, S engineSchema); + } + + public interface SupportsDeleteFilter<F> { + void deleteFilter(F deleteFilter); + } + + private static class WriteBuilderWrapper<D, S> implements WriteBuilder<D, S> { + private final Parquet.WriteBuilder internal; + private final WriterFunction<S> writerFunction; + private S inputSchema; + + private WriteBuilderWrapper(EncryptedOutputFile outputFile, WriterFunction<S> writerFunction) { + this.internal = Parquet.write(outputFile); + this.writerFunction = writerFunction; + } + + @Override + public WriteBuilder<D, S> schema(Schema schema) { + internal.schema(schema); + return this; + } + + @Override + public WriteBuilder<D, S> inputSchema(S schema) { + this.inputSchema = schema; + return this; + } + + @Override + public WriteBuilder<D, S> set(String property, String value) { + if (WRITER_VERSION_KEY.equals(property)) { + internal.writerVersion(ParquetProperties.WriterVersion.valueOf(value)); + } + + internal.set(property, value); + return this; + } + + @Override + public WriteBuilder<D, S> setAll(Map<String, String> properties) { + internal.setAll(properties); + return this; + } + + @Override + public WriteBuilder<D, S> meta(String property, String value) { + internal.meta(property, value); + return this; + } + + @Override + public WriteBuilder<D, S> meta(Map<String, String> properties) { + internal.meta(properties); + return this; + } + + @Override + public WriteBuilder<D, S> content(FileContent content) { + switch (content) { + case DATA: + internal.createContextFunc(Parquet.WriteBuilder.Context::dataContext); + internal.createWriterFunc( + (icebergSchema, messageType) -> + writerFunction.write(icebergSchema, messageType, inputSchema)); + break; + case EQUALITY_DELETES: + internal.createContextFunc(Parquet.WriteBuilder.Context::deleteContext); + internal.createWriterFunc( + (icebergSchema, messageType) -> + writerFunction.write(icebergSchema, messageType, inputSchema)); + break; + case POSITION_DELETES: + internal.createContextFunc(Parquet.WriteBuilder.Context::deleteContext); + internal.createWriterFunc( + (icebergSchema, messageType) -> + new ParquetValueWriters.PositionDeleteStructWriter<D>( + (ParquetValueWriters.StructWriter<?>) + GenericParquetWriter.create(icebergSchema, messageType), + Function.identity())); + internal.schema(DeleteSchemaUtil.pathPosSchema()); + break; + default: + throw new IllegalArgumentException("Unknown file content: " + content); + } + + return this; + } + + @Override + public WriteBuilder<D, S> metricsConfig(MetricsConfig metricsConfig) { + internal.metricsConfig(metricsConfig); + return this; + } + + @Override + public WriteBuilder<D, S> overwrite() { + internal.overwrite(); + return this; + } + + @Override + public WriteBuilder<D, S> withFileEncryptionKey(ByteBuffer encryptionKey) { + internal.withFileEncryptionKey(encryptionKey); + return this; + } + + @Override + public WriteBuilder<D, S> withAADPrefix(ByteBuffer aadPrefix) { + internal.withAADPrefix(aadPrefix); + return this; + } + + @Override + public FileAppender<D> build() throws IOException { + return internal.build(); + } + } + + private abstract static class ReadBuilderWrapper<D, S, F> implements ReadBuilder<D, S> { Review Comment: I think this can be simplified quite a bit: 1. The batch reader function and the row reader function can be combined because the delete filter and config do not need to be passed through. That means the sub-classes of this builder are not needed 2. Without subclasses, this does not need to expose methods for fetching its state. I also refactored this class and concrete children locally and removed the need to expose its state, but it's easier to just remove the subclasses. 3. This doesn't need to track the Iceberg schema. Instead, this should register a binary reader function with Parquet so that Parquet is responsible for passing the Iceberg schema. -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: [email protected] For queries about this service, please contact Infrastructure at: [email protected] --------------------------------------------------------------------- To unsubscribe, e-mail: [email protected] For additional commands, e-mail: [email protected]
