This is an automated email from the ASF dual-hosted git repository.
gurwls223 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new f30ddff123d [SPARK-44277][BUILD] Upgrade to Avro 1.11.2
f30ddff123d is described below
commit f30ddff123d802dd6f2576ca1f69a8e712daa461
Author: Ismaël Mejía <[email protected]>
AuthorDate: Wed Jul 5 16:11:20 2023 +0900
[SPARK-44277][BUILD] Upgrade to Avro 1.11.2
### What changes were proposed in this pull request?
Upgrade Avro dependency to version 1.11.2
### Why are the changes needed?
To keep up with upstream
### Does this PR introduce _any_ user-facing change?
No
### How was this patch tested?
Normal Spark build tests.
Closes #41830 from iemejia/SPARK-44277.
Authored-by: Ismaël Mejía <[email protected]>
Signed-off-by: Hyukjin Kwon <[email protected]>
---
.../avro/src/main/scala/org/apache/spark/sql/avro/AvroOptions.scala | 4 ++--
dev/deps/spark-deps-hadoop-3-hive-2.3 | 6 +++---
docs/sql-data-sources-avro.md | 4 ++--
pom.xml | 2 +-
project/SparkBuild.scala | 2 +-
.../scala/org/apache/spark/sql/hive/client/HiveClientSuite.scala | 2 +-
6 files changed, 10 insertions(+), 10 deletions(-)
diff --git
a/connector/avro/src/main/scala/org/apache/spark/sql/avro/AvroOptions.scala
b/connector/avro/src/main/scala/org/apache/spark/sql/avro/AvroOptions.scala
index c8057ca5879..edaaa8835cc 100644
--- a/connector/avro/src/main/scala/org/apache/spark/sql/avro/AvroOptions.scala
+++ b/connector/avro/src/main/scala/org/apache/spark/sql/avro/AvroOptions.scala
@@ -81,14 +81,14 @@ private[sql] class AvroOptions(
/**
* Top level record name in write result, which is required in Avro spec.
- * See https://avro.apache.org/docs/1.11.1/specification/#schema-record .
+ * See https://avro.apache.org/docs/1.11.2/specification/#schema-record .
* Default value is "topLevelRecord"
*/
val recordName: String = parameters.getOrElse(RECORD_NAME, "topLevelRecord")
/**
* Record namespace in write result. Default value is "".
- * See Avro spec for details:
https://avro.apache.org/docs/1.11.1/specification/#schema-record .
+ * See Avro spec for details:
https://avro.apache.org/docs/1.11.2/specification/#schema-record .
*/
val recordNamespace: String = parameters.getOrElse(RECORD_NAMESPACE, "")
diff --git a/dev/deps/spark-deps-hadoop-3-hive-2.3
b/dev/deps/spark-deps-hadoop-3-hive-2.3
index 1cdf08f321e..1b91686ed4d 100644
--- a/dev/deps/spark-deps-hadoop-3-hive-2.3
+++ b/dev/deps/spark-deps-hadoop-3-hive-2.3
@@ -21,9 +21,9 @@ arrow-memory-core/12.0.1//arrow-memory-core-12.0.1.jar
arrow-memory-netty/12.0.1//arrow-memory-netty-12.0.1.jar
arrow-vector/12.0.1//arrow-vector-12.0.1.jar
audience-annotations/0.5.0//audience-annotations-0.5.0.jar
-avro-ipc/1.11.1//avro-ipc-1.11.1.jar
-avro-mapred/1.11.1//avro-mapred-1.11.1.jar
-avro/1.11.1//avro-1.11.1.jar
+avro-ipc/1.11.2//avro-ipc-1.11.2.jar
+avro-mapred/1.11.2//avro-mapred-1.11.2.jar
+avro/1.11.2//avro-1.11.2.jar
aws-java-sdk-bundle/1.12.367//aws-java-sdk-bundle-1.12.367.jar
azure-data-lake-store-sdk/2.3.9//azure-data-lake-store-sdk-2.3.9.jar
azure-keyvault-core/1.0.0//azure-keyvault-core-1.0.0.jar
diff --git a/docs/sql-data-sources-avro.md b/docs/sql-data-sources-avro.md
index 977886a6f34..b01174b9182 100644
--- a/docs/sql-data-sources-avro.md
+++ b/docs/sql-data-sources-avro.md
@@ -417,7 +417,7 @@ applications. Read the [Advanced Dependency
Management](https://spark.apache
Submission Guide for more details.
## Supported types for Avro -> Spark SQL conversion
-Currently Spark supports reading all [primitive
types](https://avro.apache.org/docs/1.11.1/specification/#primitive-types) and
[complex
types](https://avro.apache.org/docs/1.11.1/specification/#complex-types) under
records of Avro.
+Currently Spark supports reading all [primitive
types](https://avro.apache.org/docs/1.11.2/specification/#primitive-types) and
[complex
types](https://avro.apache.org/docs/1.11.2/specification/#complex-types) under
records of Avro.
<table class="table table-striped">
<thead><tr><th><b>Avro type</b></th><th><b>Spark SQL
type</b></th></tr></thead>
<tr>
@@ -481,7 +481,7 @@ In addition to the types listed above, it supports reading
`union` types. The fo
3. `union(something, null)`, where something is any supported Avro type. This
will be mapped to the same Spark SQL type as that of something, with nullable
set to true.
All other union types are considered complex. They will be mapped to
StructType where field names are member0, member1, etc., in accordance with
members of the union. This is consistent with the behavior when converting
between Avro and Parquet.
-It also supports reading the following Avro [logical
types](https://avro.apache.org/docs/1.11.1/specification/#logical-types):
+It also supports reading the following Avro [logical
types](https://avro.apache.org/docs/1.11.2/specification/#logical-types):
<table class="table table-striped">
<thead><tr><th><b>Avro logical type</b></th><th><b>Avro
type</b></th><th><b>Spark SQL type</b></th></tr></thead>
diff --git a/pom.xml b/pom.xml
index 2e29d1de0c9..bc14cdd584e 100644
--- a/pom.xml
+++ b/pom.xml
@@ -154,7 +154,7 @@
-->
<codahale.metrics.version>4.2.19</codahale.metrics.version>
<!-- Should be consistent with SparkBuild.scala and docs -->
- <avro.version>1.11.1</avro.version>
+ <avro.version>1.11.2</avro.version>
<aws.kinesis.client.version>1.12.0</aws.kinesis.client.version>
<!-- Should be consistent with Kinesis client dependency -->
<aws.java.sdk.version>1.11.655</aws.java.sdk.version>
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index 7f9da32224f..8f2f5d78787 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -1110,7 +1110,7 @@ object DependencyOverrides {
dependencyOverrides += "com.google.guava" % "guava" % guavaVersion,
dependencyOverrides += "xerces" % "xercesImpl" % "2.12.2",
dependencyOverrides += "jline" % "jline" % "2.14.6",
- dependencyOverrides += "org.apache.avro" % "avro" % "1.11.1")
+ dependencyOverrides += "org.apache.avro" % "avro" % "1.11.2")
}
/**
diff --git
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/HiveClientSuite.scala
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/HiveClientSuite.scala
index 6f799bbe7d3..bd1b5b55789 100644
---
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/HiveClientSuite.scala
+++
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/HiveClientSuite.scala
@@ -900,7 +900,7 @@ class HiveClientSuite(version: String, allVersions:
Seq[String])
test("Decimal support of Avro Hive serde") {
val tableName = "tab1"
// TODO: add the other logical types. For details, see the link:
- // https://avro.apache.org/docs/1.11.1/specification/#logical-types
+ // https://avro.apache.org/docs/1.11.2/specification/#logical-types
val avroSchema =
"""{
| "name": "test_record",
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]