syun64 commented on code in PR #358:
URL: https://github.com/apache/iceberg-python/pull/358#discussion_r1477330747


##########
tests/integration/test_writes.py:
##########
@@ -489,6 +492,58 @@ def test_data_files(spark: SparkSession, session_catalog: 
Catalog, arrow_table_w
     assert [row.deleted_data_files_count for row in rows] == [0, 0, 1, 0, 0]
 
 
+@pytest.mark.integration
+@pytest.mark.parametrize(
+    "compression",
+    # List of (compression_properties, expected_compression_name)
+    [
+        # REST catalog uses Zstandard by default: 
https://github.com/apache/iceberg/pull/8593
+        ({}, "ZSTD"),
+        ({"write.parquet.compression-codec": "uncompressed"}, "UNCOMPRESSED"),
+        ({"write.parquet.compression-codec": "gzip", 
"write.parquet.compression-level": "1"}, "GZIP"),
+        ({"write.parquet.compression-codec": "zstd", 
"write.parquet.compression-level": "1"}, "ZSTD"),
+        ({"write.parquet.compression-codec": "snappy"}, "SNAPPY"),
+    ],
+)
+def test_parquet_compression(spark: SparkSession, arrow_table_with_null: 
pa.Table, compression) -> None:
+    compression_properties, expected_compression_name = compression
+
+    catalog = load_catalog(
+        "local",
+        **{
+            "type": "rest",
+            "uri": "http://localhost:8181";,
+            "s3.endpoint": "http://localhost:9000";,
+            "s3.access-key-id": "admin",
+            "s3.secret-access-key": "password",
+            **compression_properties,
+        },

Review Comment:
   ```suggestion
               "s3.secret-access-key": "password",
           },
   ```



##########
tests/integration/test_writes.py:
##########
@@ -489,6 +492,58 @@ def test_data_files(spark: SparkSession, session_catalog: 
Catalog, arrow_table_w
     assert [row.deleted_data_files_count for row in rows] == [0, 0, 1, 0, 0]
 
 
+@pytest.mark.integration
+@pytest.mark.parametrize(
+    "compression",
+    # List of (compression_properties, expected_compression_name)
+    [
+        # REST catalog uses Zstandard by default: 
https://github.com/apache/iceberg/pull/8593
+        ({}, "ZSTD"),
+        ({"write.parquet.compression-codec": "uncompressed"}, "UNCOMPRESSED"),
+        ({"write.parquet.compression-codec": "gzip", 
"write.parquet.compression-level": "1"}, "GZIP"),
+        ({"write.parquet.compression-codec": "zstd", 
"write.parquet.compression-level": "1"}, "ZSTD"),
+        ({"write.parquet.compression-codec": "snappy"}, "SNAPPY"),
+    ],
+)
+def test_parquet_compression(spark: SparkSession, arrow_table_with_null: 
pa.Table, compression) -> None:
+    compression_properties, expected_compression_name = compression
+
+    catalog = load_catalog(
+        "local",
+        **{
+            "type": "rest",
+            "uri": "http://localhost:8181";,
+            "s3.endpoint": "http://localhost:9000";,
+            "s3.access-key-id": "admin",
+            "s3.secret-access-key": "password",
+            **compression_properties,
+        },
+    )
+    identifier = "default.arrow_data_files"
+
+    try:
+        catalog.drop_table(identifier=identifier)
+    except NoSuchTableError:
+        pass
+    tbl = catalog.create_table(identifier=identifier, schema=TABLE_SCHEMA, 
properties={'format-version': '1'})

Review Comment:
   ```suggestion
       tbl = catalog.create_table(identifier=identifier, schema=TABLE_SCHEMA, 
properties={'format-version': '1', **compression_properties})
   ```



##########
pyiceberg/catalog/rest.py:
##########
@@ -450,6 +450,10 @@ def create_table(
         iceberg_schema = self._convert_schema_if_needed(schema)
         iceberg_schema = assign_fresh_schema_ids(iceberg_schema)
 
+        properties = properties.copy()
+        for copy_key in ["write.parquet.compression-codec", 
"write.parquet.compression-level"]:
+            if copy_key in self.properties:
+                properties[copy_key] = self.properties[copy_key]

Review Comment:
   ```suggestion
   ```



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: issues-unsubscr...@iceberg.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscr...@iceberg.apache.org
For additional commands, e-mail: issues-h...@iceberg.apache.org

Reply via email to