Fokko commented on code in PR #42: URL: https://github.com/apache/iceberg-rust/pull/42#discussion_r1324166782
########## crates/iceberg/src/transform/truncate.rs: ########## @@ -0,0 +1,177 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +use std::sync::Arc; + +use arrow_array::ArrayRef; +use arrow_schema::DataType; + +use crate::Error; + +use super::TransformFunction; + +pub struct Truncate { + width: u32, +} + +impl Truncate { + pub fn new(width: u32) -> Self { + Self { width } + } +} + +impl TransformFunction for Truncate { + fn transform(&self, input: ArrayRef) -> crate::Result<ArrayRef> { + match input.data_type() { + DataType::Int32 => { + let width: i32 = self.width.try_into().map_err(|_| { + Error::new( + crate::ErrorKind::DataInvalid, + "width is failed to convert to i32 when truncate Int32Array", + ) + })?; + let res: arrow_array::Int32Array = input + .as_any() + .downcast_ref::<arrow_array::Int32Array>() + .unwrap() + .unary(|v| v - v.rem_euclid(width)); + Ok(Arc::new(res)) + } + DataType::Int64 => { + let width = self.width as i64; + let res: arrow_array::Int64Array = input + .as_any() + .downcast_ref::<arrow_array::Int64Array>() + .unwrap() + .unary(|v| v - (((v % width) + width) % width)); + Ok(Arc::new(res)) + } + DataType::Decimal128(precision, scale) => { + let width = self.width as i128; + let res: arrow_array::Decimal128Array = input + .as_any() + .downcast_ref::<arrow_array::Decimal128Array>() + .unwrap() + .unary(|v| v - (((v % width) + width) % width)) + .with_precision_and_scale(*precision, *scale) + .map_err(|err| Error::new(crate::ErrorKind::Unexpected, format!("{err}")))?; + Ok(Arc::new(res)) + } + DataType::Utf8 => { + let len = self.width as usize; + let res: arrow_array::StringArray = arrow_array::StringArray::from_iter( + input + .as_any() + .downcast_ref::<arrow_array::StringArray>() + .unwrap() + .iter() + .map(|v| v.map(|v| &v[..len])), Review Comment: Unfortunately we have to make sure that we don't break the Unicode encoding: https://github.com/apache/iceberg/blob/master/api/src/main/java/org/apache/iceberg/util/UnicodeUtil.java#L36-L55 Here are some tests that might help: https://github.com/apache/iceberg/blob/master/core/src/test/java/org/apache/iceberg/TestMetricsTruncation.java#L145 ########## crates/iceberg/src/transform/bucket.rs: ########## @@ -0,0 +1,227 @@ +use std::sync::Arc; + +use arrow_array::ArrayRef; +use arrow_schema::{DataType, TimeUnit}; + +use super::TransformFunction; + +pub struct Bucket { + mod_n: u32, +} + +impl Bucket { + pub fn new(mod_n: u32) -> Self { + Self { mod_n } + } +} + +impl Bucket { + /// When switch the hash function, we only need to change this function. + fn hash_bytes(mut v: &[u8]) -> i32 { + murmur3::murmur3_32(&mut v, 0).unwrap() as i32 + } + + fn hash_int(v: i32) -> i32 { + Self::hash_long(v as i64) + } + + fn hash_long(v: i64) -> i32 { + Self::hash_bytes(v.to_le_bytes().as_slice()) + } + + /// v is days from unix epoch + fn hash_date(v: i32) -> i32 { + Self::hash_int(v) + } + + /// v is microseconds from midnight + fn hash_time(v: i64) -> i32 { + Self::hash_long(v) + } + + /// v is microseconds from unix epoch + fn hash_timestamp(v: i64) -> i32 { + Self::hash_long(v) + } + + fn hash_str(s: &str) -> i32 { + Self::hash_bytes(s.as_bytes()) + } + + /// Decimal values are hashed using the minimum number of bytes required to hold the unscaled value as a two’s complement big-endian + /// ref: https://iceberg.apache.org/spec/#appendix-b-32-bit-hash-requirements + fn hash_decimal(v: i128) -> i32 { + let bytes = v.to_be_bytes(); + if let Some(start) = bytes.iter().position(|&x| x != 0) { + Self::hash_bytes(&bytes[start..]) + } else { + Self::hash_bytes(&[0]) + } + } + + /// def bucket_N(x) = (murmur3_x86_32_hash(x) & Integer.MAX_VALUE) % N + /// ref: https://iceberg.apache.org/spec/#partitioning + fn bucket_n(&self, v: i32) -> i32 { + (v & i32::MAX) % (self.mod_n as i32) + } +} + +impl TransformFunction for Bucket { + fn transform(&self, input: ArrayRef) -> crate::Result<ArrayRef> { + let res: arrow_array::Int32Array = match input.data_type() { + DataType::Int32 => input + .as_any() + .downcast_ref::<arrow_array::Int32Array>() + .unwrap() + .unary(|v| self.bucket_n(Self::hash_int(v))), + DataType::Int64 => input + .as_any() + .downcast_ref::<arrow_array::Int64Array>() + .unwrap() + .unary(|v| self.bucket_n(Self::hash_long(v))), + DataType::Decimal128(_, _) => input + .as_any() + .downcast_ref::<arrow_array::Decimal128Array>() + .unwrap() + .unary(|v| self.bucket_n(Self::hash_decimal(v))), + DataType::Date32 => input + .as_any() + .downcast_ref::<arrow_array::Date32Array>() + .unwrap() + .unary(|v| self.bucket_n(Self::hash_date(v))), + DataType::Time64(TimeUnit::Microsecond) => input + .as_any() + .downcast_ref::<arrow_array::Time64MicrosecondArray>() + .unwrap() + .unary(|v| self.bucket_n(Self::hash_time(v))), + DataType::Timestamp(TimeUnit::Microsecond, _) => input + .as_any() + .downcast_ref::<arrow_array::TimestampMicrosecondArray>() + .unwrap() + .unary(|v| self.bucket_n(Self::hash_timestamp(v))), + DataType::Utf8 => arrow_array::Int32Array::from_iter( + input + .as_any() + .downcast_ref::<arrow_array::StringArray>() + .unwrap() + .iter() + .map(|v| self.bucket_n(Self::hash_str(v.unwrap()))), + ), + DataType::LargeUtf8 => arrow_array::Int32Array::from_iter( + input + .as_any() + .downcast_ref::<arrow_array::LargeStringArray>() + .unwrap() + .iter() + .map(|v| self.bucket_n(Self::hash_str(v.unwrap()))), + ), + DataType::Binary => arrow_array::Int32Array::from_iter( + input + .as_any() + .downcast_ref::<arrow_array::BinaryArray>() + .unwrap() + .iter() + .map(|v| self.bucket_n(Self::hash_bytes(v.unwrap()))), + ), + DataType::LargeBinary => arrow_array::Int32Array::from_iter( + input + .as_any() + .downcast_ref::<arrow_array::LargeBinaryArray>() + .unwrap() + .iter() + .map(|v| self.bucket_n(Self::hash_bytes(v.unwrap()))), + ), + DataType::FixedSizeBinary(_) => arrow_array::Int32Array::from_iter( + input + .as_any() + .downcast_ref::<arrow_array::FixedSizeBinaryArray>() + .unwrap() + .iter() + .map(|v| self.bucket_n(Self::hash_bytes(v.unwrap()))), + ), + _ => unreachable!("Unsupported data type: {:?}", input.data_type()), + }; + Ok(Arc::new(res)) + } +} + +#[cfg(test)] +mod test { + use chrono::{DateTime, NaiveDate, NaiveDateTime, NaiveTime}; + + use super::Bucket; + #[test] + fn test_hash() { Review Comment: Thanks or adding these! 👍🏻 ########## crates/iceberg/src/transform/bucket.rs: ########## @@ -0,0 +1,227 @@ +use std::sync::Arc; + +use arrow_array::ArrayRef; +use arrow_schema::{DataType, TimeUnit}; + +use super::TransformFunction; + +pub struct Bucket { + mod_n: u32, +} + +impl Bucket { + pub fn new(mod_n: u32) -> Self { + Self { mod_n } + } +} + +impl Bucket { + /// When switch the hash function, we only need to change this function. + fn hash_bytes(mut v: &[u8]) -> i32 { + murmur3::murmur3_32(&mut v, 0).unwrap() as i32 + } + + fn hash_int(v: i32) -> i32 { + Self::hash_long(v as i64) + } + + fn hash_long(v: i64) -> i32 { + Self::hash_bytes(v.to_le_bytes().as_slice()) + } + + /// v is days from unix epoch + fn hash_date(v: i32) -> i32 { + Self::hash_int(v) + } + + /// v is microseconds from midnight + fn hash_time(v: i64) -> i32 { + Self::hash_long(v) + } + + /// v is microseconds from unix epoch + fn hash_timestamp(v: i64) -> i32 { + Self::hash_long(v) + } + + fn hash_str(s: &str) -> i32 { + Self::hash_bytes(s.as_bytes()) + } + + /// Decimal values are hashed using the minimum number of bytes required to hold the unscaled value as a two’s complement big-endian + /// ref: https://iceberg.apache.org/spec/#appendix-b-32-bit-hash-requirements + fn hash_decimal(v: i128) -> i32 { + let bytes = v.to_be_bytes(); + if let Some(start) = bytes.iter().position(|&x| x != 0) { + Self::hash_bytes(&bytes[start..]) + } else { + Self::hash_bytes(&[0]) + } + } + + /// def bucket_N(x) = (murmur3_x86_32_hash(x) & Integer.MAX_VALUE) % N + /// ref: https://iceberg.apache.org/spec/#partitioning + fn bucket_n(&self, v: i32) -> i32 { + (v & i32::MAX) % (self.mod_n as i32) + } +} + +impl TransformFunction for Bucket { + fn transform(&self, input: ArrayRef) -> crate::Result<ArrayRef> { + let res: arrow_array::Int32Array = match input.data_type() { + DataType::Int32 => input + .as_any() + .downcast_ref::<arrow_array::Int32Array>() + .unwrap() + .unary(|v| self.bucket_n(Self::hash_int(v))), + DataType::Int64 => input + .as_any() + .downcast_ref::<arrow_array::Int64Array>() + .unwrap() + .unary(|v| self.bucket_n(Self::hash_long(v))), + DataType::Decimal128(_, _) => input + .as_any() + .downcast_ref::<arrow_array::Decimal128Array>() + .unwrap() + .unary(|v| self.bucket_n(Self::hash_decimal(v))), + DataType::Date32 => input + .as_any() + .downcast_ref::<arrow_array::Date32Array>() + .unwrap() + .unary(|v| self.bucket_n(Self::hash_date(v))), + DataType::Time64(TimeUnit::Microsecond) => input + .as_any() + .downcast_ref::<arrow_array::Time64MicrosecondArray>() + .unwrap() + .unary(|v| self.bucket_n(Self::hash_time(v))), + DataType::Timestamp(TimeUnit::Microsecond, _) => input + .as_any() + .downcast_ref::<arrow_array::TimestampMicrosecondArray>() + .unwrap() + .unary(|v| self.bucket_n(Self::hash_timestamp(v))), + DataType::Utf8 => arrow_array::Int32Array::from_iter( + input + .as_any() + .downcast_ref::<arrow_array::StringArray>() + .unwrap() + .iter() + .map(|v| self.bucket_n(Self::hash_str(v.unwrap()))), + ), + DataType::LargeUtf8 => arrow_array::Int32Array::from_iter( + input + .as_any() + .downcast_ref::<arrow_array::LargeStringArray>() + .unwrap() + .iter() + .map(|v| self.bucket_n(Self::hash_str(v.unwrap()))), + ), + DataType::Binary => arrow_array::Int32Array::from_iter( + input + .as_any() + .downcast_ref::<arrow_array::BinaryArray>() + .unwrap() + .iter() + .map(|v| self.bucket_n(Self::hash_bytes(v.unwrap()))), + ), + DataType::LargeBinary => arrow_array::Int32Array::from_iter( + input + .as_any() + .downcast_ref::<arrow_array::LargeBinaryArray>() + .unwrap() + .iter() + .map(|v| self.bucket_n(Self::hash_bytes(v.unwrap()))), + ), + DataType::FixedSizeBinary(_) => arrow_array::Int32Array::from_iter( + input + .as_any() + .downcast_ref::<arrow_array::FixedSizeBinaryArray>() + .unwrap() + .iter() + .map(|v| self.bucket_n(Self::hash_bytes(v.unwrap()))), + ), + _ => unreachable!("Unsupported data type: {:?}", input.data_type()), + }; + Ok(Arc::new(res)) + } +} + +#[cfg(test)] +mod test { + use chrono::{DateTime, NaiveDate, NaiveDateTime, NaiveTime}; + + use super::Bucket; + #[test] + fn test_hash() { Review Comment: Thanks or adding these! 👍🏻 ########## crates/iceberg/src/transform/mod.rs: ########## @@ -0,0 +1,51 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +//! Transform function used to compute partition values. +use crate::{spec::Transform, Result}; +use arrow::array::ArrayRef; + +mod identity; +mod temporal; +mod void; + +/// TransformFunction is a trait that defines the interface for all transform functions. +pub trait TransformFunction: Send { + /// transform will take an input array and transform it into a new array. + /// The implementation of this function will need to check and downcast the input to specific + /// type. + fn transform(&self, input: ArrayRef) -> Result<ArrayRef>; Review Comment: Transforms are used in many places. > For example when we want to execute select * from t where a = 1 and b=2, and table t is partitioned by a, we can prune unnecessary partitions by calculating a=1. In this case, we need to use iceberg value. This brings me back to my original question. It is just a single value, and vectorization would not help much. Wouldn't it be easier to operate it on a single value? Keep in mind that transforms are also used when writing data. When a file is being written, Iceberg keeps the upper and lower bounds of each of the columns. By default, a `truncate(16)` transform is applied before storing the metrics in the manifest. In PyIceberg, we use Arrow to write: https://github.com/apache/iceberg/pull/7831 Then we use Arrows' metadata collector to get the lower and upper bound of each row group. Which we then store in the manifest file. I'm not saying that it should be the same in 🦀 , but just for the context. ########## crates/iceberg/src/transform/mod.rs: ########## @@ -0,0 +1,51 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +//! Transform function used to compute partition values. +use crate::{spec::Transform, Result}; +use arrow::array::ArrayRef; + +mod identity; +mod temporal; +mod void; + +/// TransformFunction is a trait that defines the interface for all transform functions. +pub trait TransformFunction: Send { + /// transform will take an input array and transform it into a new array. + /// The implementation of this function will need to check and downcast the input to specific + /// type. + fn transform(&self, input: ArrayRef) -> Result<ArrayRef>; Review Comment: Transforms are used in many places. > For example when we want to execute select * from t where a = 1 and b=2, and table t is partitioned by a, we can prune unnecessary partitions by calculating a=1. In this case, we need to use iceberg value. This brings me back to my original question. It is just a single value, and vectorization would not help much. Wouldn't it be easier to operate it on a single value? Keep in mind that transforms are also used when writing data. When a file is being written, Iceberg keeps the upper and lower bounds of each of the columns. By default, a `truncate(16)` transform is applied before storing the metrics in the manifest. In PyIceberg, we use Arrow to write: https://github.com/apache/iceberg/pull/7831 Then we use Arrows' metadata collector to get the lower and upper bound of each row group. Which we then store in the manifest file. I'm not saying that it should be the same in 🦀 , but just for the context. ########## crates/iceberg/src/transform/mod.rs: ########## @@ -0,0 +1,51 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +//! Transform function used to compute partition values. +use crate::{spec::Transform, Result}; +use arrow::array::ArrayRef; + +mod identity; +mod temporal; +mod void; + +/// TransformFunction is a trait that defines the interface for all transform functions. +pub trait TransformFunction: Send { + /// transform will take an input array and transform it into a new array. + /// The implementation of this function will need to check and downcast the input to specific + /// type. + fn transform(&self, input: ArrayRef) -> Result<ArrayRef>; +} + +/// BoxedTransformFunction is a boxed trait object of TransformFunction. +pub type BoxedTransformFunction = Box<dyn TransformFunction>; + +/// create_transform_function creates a boxed trait object of TransformFunction from a Transform. +pub fn create_transform_function(transform: &Transform) -> Result<BoxedTransformFunction> { + match transform { + Transform::Identity => Ok(Box::new(identity::Identity {})), + Transform::Void => Ok(Box::new(void::Void {})), + Transform::Year => Ok(Box::new(temporal::Year {})), + Transform::Month => Ok(Box::new(temporal::Month {})), + Transform::Day => Ok(Box::new(temporal::Day {})), + Transform::Hour => Ok(Box::new(temporal::Hour {})), + _ => Err(crate::error::Error::new( + crate::ErrorKind::FeatureUnsupported, + format!("Transform {:?} is not implemented", transform), + )), + } Review Comment: I prefer to have them all in one PR, thanks! 👍🏻 ########## crates/iceberg/src/transform/mod.rs: ########## @@ -0,0 +1,51 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +//! Transform function used to compute partition values. +use crate::{spec::Transform, Result}; +use arrow::array::ArrayRef; + +mod identity; +mod temporal; +mod void; + +/// TransformFunction is a trait that defines the interface for all transform functions. +pub trait TransformFunction: Send { + /// transform will take an input array and transform it into a new array. + /// The implementation of this function will need to check and downcast the input to specific + /// type. + fn transform(&self, input: ArrayRef) -> Result<ArrayRef>; +} + +/// BoxedTransformFunction is a boxed trait object of TransformFunction. +pub type BoxedTransformFunction = Box<dyn TransformFunction>; + +/// create_transform_function creates a boxed trait object of TransformFunction from a Transform. +pub fn create_transform_function(transform: &Transform) -> Result<BoxedTransformFunction> { + match transform { + Transform::Identity => Ok(Box::new(identity::Identity {})), + Transform::Void => Ok(Box::new(void::Void {})), + Transform::Year => Ok(Box::new(temporal::Year {})), + Transform::Month => Ok(Box::new(temporal::Month {})), + Transform::Day => Ok(Box::new(temporal::Day {})), + Transform::Hour => Ok(Box::new(temporal::Hour {})), + _ => Err(crate::error::Error::new( + crate::ErrorKind::FeatureUnsupported, + format!("Transform {:?} is not implemented", transform), + )), + } Review Comment: I prefer to have them all in one PR, thanks! 👍🏻 -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: issues-unsubscr...@iceberg.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: issues-unsubscr...@iceberg.apache.org For additional commands, e-mail: issues-h...@iceberg.apache.org