This is an automated email from the ASF dual-hosted git repository.
dongjoon pushed a commit to branch branch-4.1
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/branch-4.1 by this push:
new 180759f6f2f9 Revert "[SPARK-54177][BUILD] Upgrade gRPC to 1.76 and
protobuf to 6.33"
180759f6f2f9 is described below
commit 180759f6f2f936e813021cbebf2a280723841790
Author: Dongjoon Hyun <[email protected]>
AuthorDate: Tue Nov 4 09:54:24 2025 -0800
Revert "[SPARK-54177][BUILD] Upgrade gRPC to 1.76 and protobuf to 6.33"
This reverts commit 783385065f88ed39dc3759acdea5a7f21a5aadf8.
---
.github/workflows/build_and_test.yml | 4 ++--
.github/workflows/build_python_connect.yml | 2 +-
.github/workflows/maven_test.yml | 2 +-
.github/workflows/pages.yml | 2 +-
.github/workflows/python_hosted_runner_test.yml | 2 +-
dev/create-release/spark-rm/Dockerfile | 4 ++--
dev/infra/Dockerfile | 4 ++--
dev/requirements.txt | 8 +++----
dev/spark-test-image/docs/Dockerfile | 2 +-
dev/spark-test-image/lint/Dockerfile | 4 ++--
dev/spark-test-image/numpy-213/Dockerfile | 2 +-
dev/spark-test-image/python-310/Dockerfile | 2 +-
dev/spark-test-image/python-311/Dockerfile | 2 +-
dev/spark-test-image/python-312/Dockerfile | 2 +-
dev/spark-test-image/python-313-nogil/Dockerfile | 2 +-
dev/spark-test-image/python-313/Dockerfile | 2 +-
dev/spark-test-image/python-314/Dockerfile | 2 +-
dev/spark-test-image/python-minimum/Dockerfile | 2 +-
dev/spark-test-image/python-ps-minimum/Dockerfile | 2 +-
pom.xml | 3 ++-
project/SparkBuild.scala | 4 ++--
python/docs/source/getting_started/install.rst | 12 +++++-----
python/packaging/classic/setup.py | 4 ++--
python/packaging/client/setup.py | 4 ++--
python/packaging/connect/setup.py | 4 ++--
python/pyspark/sql/connect/proto/base_pb2.py | 4 ++--
python/pyspark/sql/connect/proto/catalog_pb2.py | 4 ++--
python/pyspark/sql/connect/proto/commands_pb2.py | 4 ++--
python/pyspark/sql/connect/proto/common_pb2.py | 4 ++--
.../sql/connect/proto/example_plugins_pb2.py | 4 ++--
.../pyspark/sql/connect/proto/expressions_pb2.py | 4 ++--
python/pyspark/sql/connect/proto/ml_common_pb2.py | 4 ++--
python/pyspark/sql/connect/proto/ml_pb2.py | 4 ++--
python/pyspark/sql/connect/proto/pipelines_pb2.py | 4 ++--
python/pyspark/sql/connect/proto/relations_pb2.py | 4 ++--
python/pyspark/sql/connect/proto/types_pb2.py | 4 ++--
.../sql/streaming/proto/StateMessage_pb2.py | 8 +++----
sql/connect/client/jdbc/pom.xml | 4 ++++
sql/connect/client/jvm/pom.xml | 4 ++++
sql/connect/common/pom.xml | 26 ++++++++++++++++++++++
sql/connect/common/src/main/buf.gen.yaml | 14 ++++++------
sql/connect/server/pom.xml | 14 +++++-------
sql/core/src/main/buf.gen.yaml | 2 +-
43 files changed, 116 insertions(+), 83 deletions(-)
diff --git a/.github/workflows/build_and_test.yml
b/.github/workflows/build_and_test.yml
index 2609f84d98cc..3ba71108f553 100644
--- a/.github/workflows/build_and_test.yml
+++ b/.github/workflows/build_and_test.yml
@@ -361,7 +361,7 @@ jobs:
- name: Install Python packages (Python 3.11)
if: (contains(matrix.modules, 'sql') && !contains(matrix.modules,
'sql-')) || contains(matrix.modules, 'connect') || contains(matrix.modules,
'yarn')
run: |
- python3.11 -m pip install 'numpy>=1.22' pyarrow pandas pyyaml scipy
unittest-xml-reporting 'lxml==4.9.4' 'grpcio==1.76.0' 'grpcio-status==1.76.0'
'protobuf==6.33.0'
+ python3.11 -m pip install 'numpy>=1.22' pyarrow pandas pyyaml scipy
unittest-xml-reporting 'lxml==4.9.4' 'grpcio==1.67.0' 'grpcio-status==1.67.0'
'protobuf==5.29.5'
python3.11 -m pip list
# Run the tests.
- name: Run tests
@@ -765,7 +765,7 @@ jobs:
python-version: '3.11'
- name: Install dependencies for Python CodeGen check
run: |
- python3.11 -m pip install 'black==23.12.1' 'protobuf==6.33.0'
'mypy==1.8.0' 'mypy-protobuf==3.3.0'
+ python3.11 -m pip install 'black==23.12.1' 'protobuf==5.29.5'
'mypy==1.8.0' 'mypy-protobuf==3.3.0'
python3.11 -m pip list
- name: Python CodeGen check for branch-3.5
if: inputs.branch == 'branch-3.5'
diff --git a/.github/workflows/build_python_connect.yml
b/.github/workflows/build_python_connect.yml
index b1ebb45b9cbc..cec37af22dd7 100644
--- a/.github/workflows/build_python_connect.yml
+++ b/.github/workflows/build_python_connect.yml
@@ -72,7 +72,7 @@ jobs:
python packaging/client/setup.py sdist
cd dist
pip install pyspark*client-*.tar.gz
- pip install 'grpcio==1.76.0' 'grpcio-status==1.76.0'
'protobuf==6.33.0' 'googleapis-common-protos==1.71.0' 'graphviz==0.20.3'
'six==1.16.0' 'pandas==2.3.3' scipy 'plotly<6.0.0' 'mlflow>=2.8.1' coverage
matplotlib openpyxl 'memory-profiler>=0.61.0' 'scikit-learn>=1.3.2'
'graphviz==0.20.3' 'torch<2.6.0' torchvision torcheval deepspeed
unittest-xml-reporting
+ pip install 'grpcio==1.67.0' 'grpcio-status==1.67.0'
'protobuf==5.29.5' 'googleapis-common-protos==1.65.0' 'graphviz==0.20.3'
'six==1.16.0' 'pandas==2.3.3' scipy 'plotly<6.0.0' 'mlflow>=2.8.1' coverage
matplotlib openpyxl 'memory-profiler>=0.61.0' 'scikit-learn>=1.3.2'
'graphviz==0.20.3' 'torch<2.6.0' torchvision torcheval deepspeed
unittest-xml-reporting
- name: List Python packages
run: python -m pip list
- name: Run tests (local)
diff --git a/.github/workflows/maven_test.yml b/.github/workflows/maven_test.yml
index 7bbfc420e02a..95c9aac33fc6 100644
--- a/.github/workflows/maven_test.yml
+++ b/.github/workflows/maven_test.yml
@@ -175,7 +175,7 @@ jobs:
- name: Install Python packages (Python 3.11)
if: contains(matrix.modules, 'resource-managers#yarn') ||
(contains(matrix.modules, 'sql#core')) || contains(matrix.modules, 'connect')
run: |
- python3.11 -m pip install 'numpy>=1.22' pyarrow pandas pyyaml scipy
unittest-xml-reporting 'grpcio==1.76.0' 'grpcio-status==1.76.0'
'protobuf==6.33.0'
+ python3.11 -m pip install 'numpy>=1.22' pyarrow pandas pyyaml scipy
unittest-xml-reporting 'grpcio==1.67.0' 'grpcio-status==1.67.0'
'protobuf==5.29.5'
python3.11 -m pip list
# Run the tests using script command.
# BSD's script command doesn't support -c option, and the usage is
different from Linux's one.
diff --git a/.github/workflows/pages.yml b/.github/workflows/pages.yml
index 2bba3dcaf176..e800b40106ee 100644
--- a/.github/workflows/pages.yml
+++ b/.github/workflows/pages.yml
@@ -63,7 +63,7 @@ jobs:
pip install 'sphinx==4.5.0' mkdocs 'pydata_sphinx_theme>=0.13'
sphinx-copybutton nbsphinx numpydoc jinja2 markupsafe 'pyzmq<24.0.0' \
ipython ipython_genutils sphinx_plotly_directive 'numpy>=1.22'
pyarrow 'pandas==2.3.3' 'plotly>=4.8' 'docutils<0.18.0' \
'flake8==3.9.0' 'mypy==1.8.0' 'pytest==7.1.3'
'pytest-mypy-plugins==1.9.3' 'black==23.12.1' \
- 'pandas-stubs==1.2.0.53' 'grpcio==1.76.0' 'grpcio-status==1.76.0'
'protobuf==6.33.0' 'grpc-stubs==1.24.11'
'googleapis-common-protos-stubs==2.2.0' \
+ 'pandas-stubs==1.2.0.53' 'grpcio==1.67.0' 'grpcio-status==1.67.0'
'protobuf==5.29.5' 'grpc-stubs==1.24.11'
'googleapis-common-protos-stubs==2.2.0' \
'sphinxcontrib-applehelp==1.0.4' 'sphinxcontrib-devhelp==1.0.2'
'sphinxcontrib-htmlhelp==2.0.1' 'sphinxcontrib-qthelp==1.0.3'
'sphinxcontrib-serializinghtml==1.1.5'
- name: Install Ruby for documentation generation
uses: ruby/setup-ruby@v1
diff --git a/.github/workflows/python_hosted_runner_test.yml
b/.github/workflows/python_hosted_runner_test.yml
index d55eb1d93799..77e85222c29d 100644
--- a/.github/workflows/python_hosted_runner_test.yml
+++ b/.github/workflows/python_hosted_runner_test.yml
@@ -148,7 +148,7 @@ jobs:
python${{matrix.python}} -m pip install --ignore-installed
'blinker>=1.6.2'
python${{matrix.python}} -m pip install --ignore-installed
'six==1.16.0'
python${{matrix.python}} -m pip install numpy 'pyarrow>=21.0.0'
'six==1.16.0' 'pandas==2.3.3' scipy 'plotly<6.0.0' 'mlflow>=2.8.1' coverage
matplotlib openpyxl 'memory-profiler>=0.61.0' 'scikit-learn>=1.3.2'
unittest-xml-reporting && \
- python${{matrix.python}} -m pip install 'grpcio==1.76.0'
'grpcio-status==1.76.0' 'protobuf==6.33.0' 'googleapis-common-protos==1.71.0'
'graphviz==0.20.3' && \
+ python${{matrix.python}} -m pip install 'grpcio==1.67.0'
'grpcio-status==1.67.0' 'protobuf==5.29.5' 'googleapis-common-protos==1.65.0'
'graphviz==0.20.3' && \
python${{matrix.python}} -m pip cache purge
- name: List Python packages
run: python${{matrix.python}} -m pip list
diff --git a/dev/create-release/spark-rm/Dockerfile
b/dev/create-release/spark-rm/Dockerfile
index 679998b89392..86be7e0a8229 100644
--- a/dev/create-release/spark-rm/Dockerfile
+++ b/dev/create-release/spark-rm/Dockerfile
@@ -94,7 +94,7 @@ ENV
R_LIBS_SITE="/usr/local/lib/R/site-library:${R_LIBS_SITE}:/usr/lib/R/library
ARG BASIC_PIP_PKGS="numpy pyarrow>=18.0.0 six==1.16.0 pandas==2.3.3 scipy
plotly<6.0.0 mlflow>=2.8.1 coverage matplotlib openpyxl memory-profiler>=0.61.0
scikit-learn>=1.3.2 twine==3.4.1"
# Python deps for Spark Connect
-ARG CONNECT_PIP_PKGS="grpcio==1.76.0 grpcio-status==1.76.0 protobuf==6.33.0
googleapis-common-protos==1.71.0 graphviz==0.20.3"
+ARG CONNECT_PIP_PKGS="grpcio==1.67.0 grpcio-status==1.67.0 protobuf==5.29.5
googleapis-common-protos==1.65.0 graphviz==0.20.3"
# Install Python 3.10 packages
RUN curl -sS https://bootstrap.pypa.io/get-pip.py | python3.10
@@ -111,7 +111,7 @@ RUN python3.10 -m pip install $BASIC_PIP_PKGS
unittest-xml-reporting $CONNECT_PI
RUN python3.10 -m pip install 'sphinx==4.5.0' mkdocs
'pydata_sphinx_theme>=0.13' sphinx-copybutton nbsphinx numpydoc jinja2
markupsafe 'pyzmq<24.0.0' \
ipython ipython_genutils sphinx_plotly_directive 'numpy>=1.22' pyarrow pandas
'plotly>=4.8' 'docutils<0.18.0' \
'flake8==3.9.0' 'mypy==1.8.0' 'pytest==7.1.3' 'pytest-mypy-plugins==1.9.3'
'black==23.12.1' \
-'pandas-stubs==1.2.0.53' 'grpcio==1.76.0' 'grpc-stubs==1.24.11'
'googleapis-common-protos-stubs==2.2.0' \
+'pandas-stubs==1.2.0.53' 'grpcio==1.67.0' 'grpc-stubs==1.24.11'
'googleapis-common-protos-stubs==2.2.0' \
'sphinxcontrib-applehelp==1.0.4' 'sphinxcontrib-devhelp==1.0.2'
'sphinxcontrib-htmlhelp==2.0.1' 'sphinxcontrib-qthelp==1.0.3'
'sphinxcontrib-serializinghtml==1.1.5'
RUN python3.10 -m pip list
diff --git a/dev/infra/Dockerfile b/dev/infra/Dockerfile
index 423b6ba820d0..1aa03735ce92 100644
--- a/dev/infra/Dockerfile
+++ b/dev/infra/Dockerfile
@@ -97,7 +97,7 @@ RUN pypy3 -m pip install numpy 'six==1.16.0' 'pandas==2.3.3'
scipy coverage matp
ARG BASIC_PIP_PKGS="numpy pyarrow>=18.0.0 six==1.16.0 pandas==2.3.3 scipy
plotly>=4.8 mlflow>=2.8.1 coverage matplotlib openpyxl memory-profiler>=0.61.0
scikit-learn>=1.3.2"
# Python deps for Spark Connect
-ARG CONNECT_PIP_PKGS="grpcio==1.76.0 grpcio-status==1.76.0 protobuf==6.33.0
googleapis-common-protos==1.71.0 graphviz==0.20.3"
+ARG CONNECT_PIP_PKGS="grpcio==1.67.0 grpcio-status==1.67.0 protobuf==5.29.5
googleapis-common-protos==1.65.0 graphviz==0.20.3"
# Install Python 3.10 packages
RUN curl -sS https://bootstrap.pypa.io/get-pip.py | python3.10
@@ -149,7 +149,7 @@ RUN apt-get update && apt-get install -y \
RUN curl -sS https://bootstrap.pypa.io/get-pip.py | python3.13
# TODO(SPARK-49862) Add BASIC_PIP_PKGS and CONNECT_PIP_PKGS to Python 3.13
image when it supports Python 3.13
RUN python3.13 -m pip install --ignore-installed blinker>=1.6.2 # mlflow needs
this
-RUN python3.13 -m pip install numpy>=2.1 pyarrow>=18.0.0 six==1.16.0
pandas==2.3.3 scipy coverage matplotlib openpyxl grpcio==1.76.0
grpcio-status==1.76.0 lxml jinja2 && \
+RUN python3.13 -m pip install numpy>=2.1 pyarrow>=18.0.0 six==1.16.0
pandas==2.3.3 scipy coverage matplotlib openpyxl grpcio==1.67.0
grpcio-status==1.67.0 lxml jinja2 && \
python3.13 -m pip cache purge
# Remove unused installation packages to free up disk space
diff --git a/dev/requirements.txt b/dev/requirements.txt
index ddaeb9b3dd9d..76652df74481 100644
--- a/dev/requirements.txt
+++ b/dev/requirements.txt
@@ -61,10 +61,10 @@ black==23.12.1
py
# Spark Connect (required)
-grpcio>=1.76.0
-grpcio-status>=1.76.0
-googleapis-common-protos>=1.71.0
-protobuf==6.33.0
+grpcio>=1.67.0
+grpcio-status>=1.67.0
+googleapis-common-protos>=1.65.0
+protobuf==5.29.5
# Spark Connect python proto generation plugin (optional)
mypy-protobuf==3.3.0
diff --git a/dev/spark-test-image/docs/Dockerfile
b/dev/spark-test-image/docs/Dockerfile
index e268ea7a8351..1c17ae122d63 100644
--- a/dev/spark-test-image/docs/Dockerfile
+++ b/dev/spark-test-image/docs/Dockerfile
@@ -91,6 +91,6 @@ RUN curl -sS https://bootstrap.pypa.io/get-pip.py | python3.11
RUN python3.11 -m pip install 'sphinx==4.5.0' mkdocs
'pydata_sphinx_theme>=0.13' sphinx-copybutton nbsphinx numpydoc jinja2
markupsafe 'pyzmq<24.0.0' \
ipython ipython_genutils sphinx_plotly_directive 'numpy>=1.22' pyarrow
'pandas==2.3.3' 'plotly>=4.8' 'docutils<0.18.0' \
'flake8==3.9.0' 'mypy==1.8.0' 'pytest==7.1.3' 'pytest-mypy-plugins==1.9.3'
'black==23.12.1' \
- 'pandas-stubs==1.2.0.53' 'grpcio==1.76.0' 'grpcio-status==1.76.0'
'protobuf==6.33.0' 'grpc-stubs==1.24.11'
'googleapis-common-protos-stubs==2.2.0' \
+ 'pandas-stubs==1.2.0.53' 'grpcio==1.67.0' 'grpcio-status==1.67.0'
'protobuf==5.29.5' 'grpc-stubs==1.24.11'
'googleapis-common-protos-stubs==2.2.0' \
'sphinxcontrib-applehelp==1.0.4' 'sphinxcontrib-devhelp==1.0.2'
'sphinxcontrib-htmlhelp==2.0.1' 'sphinxcontrib-qthelp==1.0.3'
'sphinxcontrib-serializinghtml==1.1.5' \
&& python3.11 -m pip cache purge
diff --git a/dev/spark-test-image/lint/Dockerfile
b/dev/spark-test-image/lint/Dockerfile
index 6686e3808e03..07ff9c90b759 100644
--- a/dev/spark-test-image/lint/Dockerfile
+++ b/dev/spark-test-image/lint/Dockerfile
@@ -82,8 +82,8 @@ RUN python3.11 -m pip install \
'flake8==3.9.0' \
'googleapis-common-protos-stubs==2.2.0' \
'grpc-stubs==1.24.11' \
- 'grpcio-status==1.76.0' \
- 'grpcio==1.76.0' \
+ 'grpcio-status==1.67.0' \
+ 'grpcio==1.67.0' \
'ipython' \
'ipython_genutils' \
'jinja2' \
diff --git a/dev/spark-test-image/numpy-213/Dockerfile
b/dev/spark-test-image/numpy-213/Dockerfile
index d33fb5f5d30e..d0409e61a51a 100644
--- a/dev/spark-test-image/numpy-213/Dockerfile
+++ b/dev/spark-test-image/numpy-213/Dockerfile
@@ -71,7 +71,7 @@ RUN apt-get update && apt-get install -y \
# Pin numpy==2.1.3
ARG BASIC_PIP_PKGS="numpy==2.1.3 pyarrow>=21.0.0 six==1.16.0 pandas==2.2.3
scipy plotly<6.0.0 mlflow>=2.8.1 coverage matplotlib openpyxl
memory-profiler>=0.61.0 scikit-learn>=1.3.2"
# Python deps for Spark Connect
-ARG CONNECT_PIP_PKGS="grpcio==1.76.0 grpcio-status==1.76.0 protobuf==6.33.0
googleapis-common-protos==1.71.0 graphviz==0.20.3"
+ARG CONNECT_PIP_PKGS="grpcio==1.67.0 grpcio-status==1.67.0 protobuf==5.29.5
googleapis-common-protos==1.65.0 graphviz==0.20.3"
# Install Python 3.11 packages
RUN curl -sS https://bootstrap.pypa.io/get-pip.py | python3.11
diff --git a/dev/spark-test-image/python-310/Dockerfile
b/dev/spark-test-image/python-310/Dockerfile
index 46cfce36832b..ce2ca23d18a6 100644
--- a/dev/spark-test-image/python-310/Dockerfile
+++ b/dev/spark-test-image/python-310/Dockerfile
@@ -66,7 +66,7 @@ RUN apt-get update && apt-get install -y \
ARG BASIC_PIP_PKGS="numpy pyarrow>=21.0.0 six==1.16.0 pandas==2.3.3 scipy
plotly<6.0.0 mlflow>=2.8.1 coverage matplotlib openpyxl memory-profiler>=0.61.0
scikit-learn>=1.3.2"
# Python deps for Spark Connect
-ARG CONNECT_PIP_PKGS="grpcio==1.76.0 grpcio-status==1.76.0 protobuf==6.33.0
googleapis-common-protos==1.71.0 graphviz==0.20.3"
+ARG CONNECT_PIP_PKGS="grpcio==1.67.0 grpcio-status==1.67.0 protobuf==5.29.5
googleapis-common-protos==1.65.0 graphviz==0.20.3"
# Install Python 3.10 packages
RUN curl -sS https://bootstrap.pypa.io/get-pip.py | python3.10
diff --git a/dev/spark-test-image/python-311/Dockerfile
b/dev/spark-test-image/python-311/Dockerfile
index 920f21bd47ee..00fb7be788fd 100644
--- a/dev/spark-test-image/python-311/Dockerfile
+++ b/dev/spark-test-image/python-311/Dockerfile
@@ -70,7 +70,7 @@ RUN apt-get update && apt-get install -y \
ARG BASIC_PIP_PKGS="numpy pyarrow>=21.0.0 six==1.16.0 pandas==2.3.3 scipy
plotly<6.0.0 mlflow>=2.8.1 coverage matplotlib openpyxl memory-profiler>=0.61.0
scikit-learn>=1.3.2"
# Python deps for Spark Connect
-ARG CONNECT_PIP_PKGS="grpcio==1.76.0 grpcio-status==1.76.0 protobuf==6.33.0
googleapis-common-protos==1.71.0 graphviz==0.20.3"
+ARG CONNECT_PIP_PKGS="grpcio==1.67.0 grpcio-status==1.67.0 protobuf==5.29.5
googleapis-common-protos==1.65.0 graphviz==0.20.3"
# Install Python 3.11 packages
RUN curl -sS https://bootstrap.pypa.io/get-pip.py | python3.11
diff --git a/dev/spark-test-image/python-312/Dockerfile
b/dev/spark-test-image/python-312/Dockerfile
index db1039f5cb26..79cab824a5b2 100644
--- a/dev/spark-test-image/python-312/Dockerfile
+++ b/dev/spark-test-image/python-312/Dockerfile
@@ -70,7 +70,7 @@ RUN apt-get update && apt-get install -y \
ARG BASIC_PIP_PKGS="numpy pyarrow>=21.0.0 six==1.16.0 pandas==2.3.3 scipy
plotly<6.0.0 mlflow>=2.8.1 coverage matplotlib openpyxl memory-profiler>=0.61.0
scikit-learn>=1.3.2"
# Python deps for Spark Connect
-ARG CONNECT_PIP_PKGS="grpcio==1.76.0 grpcio-status==1.76.0 protobuf==6.33.0
googleapis-common-protos==1.71.0 graphviz==0.20.3"
+ARG CONNECT_PIP_PKGS="grpcio==1.67.0 grpcio-status==1.67.0 protobuf==5.29.5
googleapis-common-protos==1.65.0 graphviz==0.20.3"
# Install Python 3.12 packages
RUN curl -sS https://bootstrap.pypa.io/get-pip.py | python3.12
diff --git a/dev/spark-test-image/python-313-nogil/Dockerfile
b/dev/spark-test-image/python-313-nogil/Dockerfile
index a50bf670b3f3..031eb8772b59 100644
--- a/dev/spark-test-image/python-313-nogil/Dockerfile
+++ b/dev/spark-test-image/python-313-nogil/Dockerfile
@@ -69,7 +69,7 @@ RUN apt-get update && apt-get install -y \
ARG BASIC_PIP_PKGS="numpy pyarrow>=21.0.0 six==1.16.0 pandas==2.3.3 scipy
plotly<6.0.0 mlflow>=2.8.1 coverage matplotlib openpyxl memory-profiler>=0.61.0
scikit-learn>=1.3.2"
-ARG CONNECT_PIP_PKGS="grpcio==1.76.0 grpcio-status==1.76.0 protobuf==6.33.0
googleapis-common-protos==1.71.0 graphviz==0.20.3"
+ARG CONNECT_PIP_PKGS="grpcio==1.67.0 grpcio-status==1.67.0 protobuf==5.29.5
googleapis-common-protos==1.65.0 graphviz==0.20.3"
# Install Python 3.13 packages
diff --git a/dev/spark-test-image/python-313/Dockerfile
b/dev/spark-test-image/python-313/Dockerfile
index f74c48bf346f..abd5a7e01093 100644
--- a/dev/spark-test-image/python-313/Dockerfile
+++ b/dev/spark-test-image/python-313/Dockerfile
@@ -70,7 +70,7 @@ RUN apt-get update && apt-get install -y \
ARG BASIC_PIP_PKGS="numpy pyarrow>=21.0.0 six==1.16.0 pandas==2.3.3 scipy
plotly<6.0.0 mlflow>=2.8.1 coverage matplotlib openpyxl memory-profiler>=0.61.0
scikit-learn>=1.3.2"
# Python deps for Spark Connect
-ARG CONNECT_PIP_PKGS="grpcio==1.76.0 grpcio-status==1.76.0 protobuf==6.33.0
googleapis-common-protos==1.71.0 graphviz==0.20.3"
+ARG CONNECT_PIP_PKGS="grpcio==1.67.0 grpcio-status==1.67.0 protobuf==5.29.5
googleapis-common-protos==1.65.0 graphviz==0.20.3"
# Install Python 3.13 packages
RUN curl -sS https://bootstrap.pypa.io/get-pip.py | python3.13
diff --git a/dev/spark-test-image/python-314/Dockerfile
b/dev/spark-test-image/python-314/Dockerfile
index 7deb5e855319..0ba9b620bd8b 100644
--- a/dev/spark-test-image/python-314/Dockerfile
+++ b/dev/spark-test-image/python-314/Dockerfile
@@ -70,7 +70,7 @@ RUN apt-get update && apt-get install -y \
ARG BASIC_PIP_PKGS="numpy pyarrow>=22.0.0 six==1.16.0 pandas==2.3.3 scipy
plotly<6.0.0 coverage matplotlib openpyxl memory-profiler>=0.61.0
scikit-learn>=1.3.2"
# Python deps for Spark Connect
-ARG CONNECT_PIP_PKGS="grpcio==1.75.1 grpcio-status==1.71.2 protobuf==6.33.0
googleapis-common-protos==1.71.0 graphviz==0.20.3"
+ARG CONNECT_PIP_PKGS="grpcio==1.75.1 grpcio-status==1.71.2 protobuf==5.29.5
googleapis-common-protos==1.65.0 graphviz==0.20.3"
# Install Python 3.14 packages
RUN curl -sS https://bootstrap.pypa.io/get-pip.py | python3.14
diff --git a/dev/spark-test-image/python-minimum/Dockerfile
b/dev/spark-test-image/python-minimum/Dockerfile
index ebafbc69ec4d..122281ec0ea1 100644
--- a/dev/spark-test-image/python-minimum/Dockerfile
+++ b/dev/spark-test-image/python-minimum/Dockerfile
@@ -64,7 +64,7 @@ RUN apt-get update && apt-get install -y \
ARG BASIC_PIP_PKGS="numpy==1.22.4 pyarrow==15.0.0 pandas==2.2.0 six==1.16.0
scipy scikit-learn coverage unittest-xml-reporting"
# Python deps for Spark Connect
-ARG CONNECT_PIP_PKGS="grpcio==1.76.0 grpcio-status==1.76.0
googleapis-common-protos==1.71.0 graphviz==0.20 protobuf"
+ARG CONNECT_PIP_PKGS="grpcio==1.67.0 grpcio-status==1.67.0
googleapis-common-protos==1.65.0 graphviz==0.20 protobuf"
# Install Python 3.9 packages
RUN curl -sS https://bootstrap.pypa.io/get-pip.py | python3.10
diff --git a/dev/spark-test-image/python-ps-minimum/Dockerfile
b/dev/spark-test-image/python-ps-minimum/Dockerfile
index 13a5f2db386c..680697c3f2d7 100644
--- a/dev/spark-test-image/python-ps-minimum/Dockerfile
+++ b/dev/spark-test-image/python-ps-minimum/Dockerfile
@@ -65,7 +65,7 @@ RUN apt-get update && apt-get install -y \
ARG BASIC_PIP_PKGS="pyarrow==15.0.0 pandas==2.2.0 six==1.16.0 numpy scipy
coverage unittest-xml-reporting"
# Python deps for Spark Connect
-ARG CONNECT_PIP_PKGS="grpcio==1.76.0 grpcio-status==1.76.0
googleapis-common-protos==1.71.0 graphviz==0.20 protobuf"
+ARG CONNECT_PIP_PKGS="grpcio==1.67.0 grpcio-status==1.67.0
googleapis-common-protos==1.65.0 graphviz==0.20 protobuf"
# Install Python 3.10 packages
RUN curl -sS https://bootstrap.pypa.io/get-pip.py | python3.10
diff --git a/pom.xml b/pom.xml
index b428823b3cb5..107def928cbb 100644
--- a/pom.xml
+++ b/pom.xml
@@ -305,8 +305,9 @@
<!-- Version used in Connect -->
<connect.guava.version>33.4.0-jre</connect.guava.version>
<guava.failureaccess.version>1.0.2</guava.failureaccess.version>
- <io.grpc.version>1.76.0</io.grpc.version>
+ <io.grpc.version>1.67.1</io.grpc.version>
<mima.version>1.1.4</mima.version>
+ <tomcat.annotations.api.version>6.0.53</tomcat.annotations.api.version>
<!-- Version used in Profiler -->
<ap-loader.version>4.0-10</ap-loader.version>
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index 253893cc225b..1d8de063133e 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -820,13 +820,11 @@ object SparkConnect {
ShadeRule.rename("org.checkerframework.**" ->
"org.sparkproject.connect.checkerframework.@1").inAll,
ShadeRule.rename("com.google.gson.**" ->
"org.sparkproject.connect.gson.@1").inAll,
ShadeRule.rename("com.google.api.**" ->
"org.sparkproject.connect.google_protos.api.@1").inAll,
- ShadeRule.rename("com.google.apps.**" ->
"org.sparkproject.connect.google_protos.apps.@1").inAll,
ShadeRule.rename("com.google.cloud.**" ->
"org.sparkproject.connect.google_protos.cloud.@1").inAll,
ShadeRule.rename("com.google.geo.**" ->
"org.sparkproject.connect.google_protos.geo.@1").inAll,
ShadeRule.rename("com.google.logging.**" ->
"org.sparkproject.connect.google_protos.logging.@1").inAll,
ShadeRule.rename("com.google.longrunning.**" ->
"org.sparkproject.connect.google_protos.longrunning.@1").inAll,
ShadeRule.rename("com.google.rpc.**" ->
"org.sparkproject.connect.google_protos.rpc.@1").inAll,
- ShadeRule.rename("com.google.shopping.**" ->
"org.sparkproject.connect.google_protos.shopping.@1").inAll,
ShadeRule.rename("com.google.type.**" ->
"org.sparkproject.connect.google_protos.type.@1").inAll
),
@@ -913,6 +911,7 @@ object SparkConnectJdbc {
ShadeRule.rename("com.google.**" ->
"org.sparkproject.connect.client.com.google.@1").inAll,
ShadeRule.rename("io.netty.**" ->
"org.sparkproject.connect.client.io.netty.@1").inAll,
ShadeRule.rename("org.checkerframework.**" ->
"org.sparkproject.connect.client.org.checkerframework.@1").inAll,
+ ShadeRule.rename("javax.annotation.**" ->
"org.sparkproject.connect.client.javax.annotation.@1").inAll,
ShadeRule.rename("io.perfmark.**" ->
"org.sparkproject.connect.client.io.perfmark.@1").inAll,
ShadeRule.rename("org.codehaus.**" ->
"org.sparkproject.connect.client.org.codehaus.@1").inAll,
ShadeRule.rename("android.annotation.**" ->
"org.sparkproject.connect.client.android.annotation.@1").inAll
@@ -992,6 +991,7 @@ object SparkConnectClient {
ShadeRule.rename("com.google.**" ->
"org.sparkproject.connect.client.com.google.@1").inAll,
ShadeRule.rename("io.netty.**" ->
"org.sparkproject.connect.client.io.netty.@1").inAll,
ShadeRule.rename("org.checkerframework.**" ->
"org.sparkproject.connect.client.org.checkerframework.@1").inAll,
+ ShadeRule.rename("javax.annotation.**" ->
"org.sparkproject.connect.client.javax.annotation.@1").inAll,
ShadeRule.rename("io.perfmark.**" ->
"org.sparkproject.connect.client.io.perfmark.@1").inAll,
ShadeRule.rename("org.codehaus.**" ->
"org.sparkproject.connect.client.org.codehaus.@1").inAll,
ShadeRule.rename("android.annotation.**" ->
"org.sparkproject.connect.client.android.annotation.@1").inAll
diff --git a/python/docs/source/getting_started/install.rst
b/python/docs/source/getting_started/install.rst
index 8b3c969d756d..82db489651ff 100644
--- a/python/docs/source/getting_started/install.rst
+++ b/python/docs/source/getting_started/install.rst
@@ -227,9 +227,9 @@ Package Supported version Note
========================== ================= ==========================
`pandas` >=2.2.0 Required for Spark Connect
`pyarrow` >=15.0.0 Required for Spark Connect
-`grpcio` >=1.76.0 Required for Spark Connect
-`grpcio-status` >=1.76.0 Required for Spark Connect
-`googleapis-common-protos` >=1.71.0 Required for Spark Connect
+`grpcio` >=1.67.0 Required for Spark Connect
+`grpcio-status` >=1.67.0 Required for Spark Connect
+`googleapis-common-protos` >=1.65.0 Required for Spark Connect
`graphviz` >=0.20 Optional for Spark Connect
========================== ================= ==========================
@@ -310,9 +310,9 @@ Package Supported version Note
========================== =================
===================================================
`pandas` >=2.2.0 Required for Spark Connect and
Spark SQL
`pyarrow` >=15.0.0 Required for Spark Connect and
Spark SQL
-`grpcio` >=1.76.0 Required for Spark Connect
-`grpcio-status` >=1.76.0 Required for Spark Connect
-`googleapis-common-protos` >=1.71.0 Required for Spark Connect
+`grpcio` >=1.67.0 Required for Spark Connect
+`grpcio-status` >=1.67.0 Required for Spark Connect
+`googleapis-common-protos` >=1.65.0 Required for Spark Connect
`pyyaml` >=3.11 Required for spark-pipelines
command line interface
`graphviz` >=0.20 Optional for Spark Connect
========================== =================
===================================================
diff --git a/python/packaging/classic/setup.py
b/python/packaging/classic/setup.py
index e6ac729f20d6..eac97af2e8c8 100755
--- a/python/packaging/classic/setup.py
+++ b/python/packaging/classic/setup.py
@@ -153,8 +153,8 @@ if in_spark:
_minimum_pandas_version = "2.2.0"
_minimum_numpy_version = "1.21"
_minimum_pyarrow_version = "15.0.0"
-_minimum_grpc_version = "1.76.0"
-_minimum_googleapis_common_protos_version = "1.71.0"
+_minimum_grpc_version = "1.67.0"
+_minimum_googleapis_common_protos_version = "1.65.0"
_minimum_pyyaml_version = "3.11"
diff --git a/python/packaging/client/setup.py b/python/packaging/client/setup.py
index c378d223cfcc..7ec7e45a3160 100755
--- a/python/packaging/client/setup.py
+++ b/python/packaging/client/setup.py
@@ -136,8 +136,8 @@ try:
_minimum_pandas_version = "2.2.0"
_minimum_numpy_version = "1.21"
_minimum_pyarrow_version = "15.0.0"
- _minimum_grpc_version = "1.76.0"
- _minimum_googleapis_common_protos_version = "1.71.0"
+ _minimum_grpc_version = "1.67.0"
+ _minimum_googleapis_common_protos_version = "1.65.0"
_minimum_pyyaml_version = "3.11"
with open("README.md") as f:
diff --git a/python/packaging/connect/setup.py
b/python/packaging/connect/setup.py
index 3b88563bcfe7..f2b53211b3a0 100755
--- a/python/packaging/connect/setup.py
+++ b/python/packaging/connect/setup.py
@@ -89,8 +89,8 @@ try:
_minimum_pandas_version = "2.0.0"
_minimum_numpy_version = "1.21"
_minimum_pyarrow_version = "11.0.0"
- _minimum_grpc_version = "1.76.0"
- _minimum_googleapis_common_protos_version = "1.71.0"
+ _minimum_grpc_version = "1.67.0"
+ _minimum_googleapis_common_protos_version = "1.65.0"
_minimum_pyyaml_version = "3.11"
with open("README.md") as f:
diff --git a/python/pyspark/sql/connect/proto/base_pb2.py
b/python/pyspark/sql/connect/proto/base_pb2.py
index 32bf6802df7b..0fe992332de7 100644
--- a/python/pyspark/sql/connect/proto/base_pb2.py
+++ b/python/pyspark/sql/connect/proto/base_pb2.py
@@ -18,7 +18,7 @@
# Generated by the protocol buffer compiler. DO NOT EDIT!
# NO CHECKED-IN PROTOBUF GENCODE
# source: spark/connect/base.proto
-# Protobuf Python Version: 6.33.0
+# Protobuf Python Version: 5.29.5
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
@@ -27,7 +27,7 @@ from google.protobuf import symbol_database as
_symbol_database
from google.protobuf.internal import builder as _builder
_runtime_version.ValidateProtobufRuntimeVersion(
- _runtime_version.Domain.PUBLIC, 6, 33, 0, "", "spark/connect/base.proto"
+ _runtime_version.Domain.PUBLIC, 5, 29, 5, "", "spark/connect/base.proto"
)
# @@protoc_insertion_point(imports)
diff --git a/python/pyspark/sql/connect/proto/catalog_pb2.py
b/python/pyspark/sql/connect/proto/catalog_pb2.py
index 054b367bd3b3..58c129a01daa 100644
--- a/python/pyspark/sql/connect/proto/catalog_pb2.py
+++ b/python/pyspark/sql/connect/proto/catalog_pb2.py
@@ -18,7 +18,7 @@
# Generated by the protocol buffer compiler. DO NOT EDIT!
# NO CHECKED-IN PROTOBUF GENCODE
# source: spark/connect/catalog.proto
-# Protobuf Python Version: 6.33.0
+# Protobuf Python Version: 5.29.5
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
@@ -27,7 +27,7 @@ from google.protobuf import symbol_database as
_symbol_database
from google.protobuf.internal import builder as _builder
_runtime_version.ValidateProtobufRuntimeVersion(
- _runtime_version.Domain.PUBLIC, 6, 33, 0, "", "spark/connect/catalog.proto"
+ _runtime_version.Domain.PUBLIC, 5, 29, 5, "", "spark/connect/catalog.proto"
)
# @@protoc_insertion_point(imports)
diff --git a/python/pyspark/sql/connect/proto/commands_pb2.py
b/python/pyspark/sql/connect/proto/commands_pb2.py
index 4eccf1b71706..694b4a9a9aa3 100644
--- a/python/pyspark/sql/connect/proto/commands_pb2.py
+++ b/python/pyspark/sql/connect/proto/commands_pb2.py
@@ -18,7 +18,7 @@
# Generated by the protocol buffer compiler. DO NOT EDIT!
# NO CHECKED-IN PROTOBUF GENCODE
# source: spark/connect/commands.proto
-# Protobuf Python Version: 6.33.0
+# Protobuf Python Version: 5.29.5
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
@@ -27,7 +27,7 @@ from google.protobuf import symbol_database as
_symbol_database
from google.protobuf.internal import builder as _builder
_runtime_version.ValidateProtobufRuntimeVersion(
- _runtime_version.Domain.PUBLIC, 6, 33, 0, "",
"spark/connect/commands.proto"
+ _runtime_version.Domain.PUBLIC, 5, 29, 5, "",
"spark/connect/commands.proto"
)
# @@protoc_insertion_point(imports)
diff --git a/python/pyspark/sql/connect/proto/common_pb2.py
b/python/pyspark/sql/connect/proto/common_pb2.py
index 8abd8fa6dc04..07ea9f7ed317 100644
--- a/python/pyspark/sql/connect/proto/common_pb2.py
+++ b/python/pyspark/sql/connect/proto/common_pb2.py
@@ -18,7 +18,7 @@
# Generated by the protocol buffer compiler. DO NOT EDIT!
# NO CHECKED-IN PROTOBUF GENCODE
# source: spark/connect/common.proto
-# Protobuf Python Version: 6.33.0
+# Protobuf Python Version: 5.29.5
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
@@ -27,7 +27,7 @@ from google.protobuf import symbol_database as
_symbol_database
from google.protobuf.internal import builder as _builder
_runtime_version.ValidateProtobufRuntimeVersion(
- _runtime_version.Domain.PUBLIC, 6, 33, 0, "", "spark/connect/common.proto"
+ _runtime_version.Domain.PUBLIC, 5, 29, 5, "", "spark/connect/common.proto"
)
# @@protoc_insertion_point(imports)
diff --git a/python/pyspark/sql/connect/proto/example_plugins_pb2.py
b/python/pyspark/sql/connect/proto/example_plugins_pb2.py
index 423768ee63d6..71a73a6d592a 100644
--- a/python/pyspark/sql/connect/proto/example_plugins_pb2.py
+++ b/python/pyspark/sql/connect/proto/example_plugins_pb2.py
@@ -18,7 +18,7 @@
# Generated by the protocol buffer compiler. DO NOT EDIT!
# NO CHECKED-IN PROTOBUF GENCODE
# source: spark/connect/example_plugins.proto
-# Protobuf Python Version: 6.33.0
+# Protobuf Python Version: 5.29.5
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
@@ -27,7 +27,7 @@ from google.protobuf import symbol_database as
_symbol_database
from google.protobuf.internal import builder as _builder
_runtime_version.ValidateProtobufRuntimeVersion(
- _runtime_version.Domain.PUBLIC, 6, 33, 0, "",
"spark/connect/example_plugins.proto"
+ _runtime_version.Domain.PUBLIC, 5, 29, 5, "",
"spark/connect/example_plugins.proto"
)
# @@protoc_insertion_point(imports)
diff --git a/python/pyspark/sql/connect/proto/expressions_pb2.py
b/python/pyspark/sql/connect/proto/expressions_pb2.py
index 0c466aeb67a0..bd75ade02d8b 100644
--- a/python/pyspark/sql/connect/proto/expressions_pb2.py
+++ b/python/pyspark/sql/connect/proto/expressions_pb2.py
@@ -18,7 +18,7 @@
# Generated by the protocol buffer compiler. DO NOT EDIT!
# NO CHECKED-IN PROTOBUF GENCODE
# source: spark/connect/expressions.proto
-# Protobuf Python Version: 6.33.0
+# Protobuf Python Version: 5.29.5
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
@@ -27,7 +27,7 @@ from google.protobuf import symbol_database as
_symbol_database
from google.protobuf.internal import builder as _builder
_runtime_version.ValidateProtobufRuntimeVersion(
- _runtime_version.Domain.PUBLIC, 6, 33, 0, "",
"spark/connect/expressions.proto"
+ _runtime_version.Domain.PUBLIC, 5, 29, 5, "",
"spark/connect/expressions.proto"
)
# @@protoc_insertion_point(imports)
diff --git a/python/pyspark/sql/connect/proto/ml_common_pb2.py
b/python/pyspark/sql/connect/proto/ml_common_pb2.py
index de547fc2a102..a49491b8ad1e 100644
--- a/python/pyspark/sql/connect/proto/ml_common_pb2.py
+++ b/python/pyspark/sql/connect/proto/ml_common_pb2.py
@@ -18,7 +18,7 @@
# Generated by the protocol buffer compiler. DO NOT EDIT!
# NO CHECKED-IN PROTOBUF GENCODE
# source: spark/connect/ml_common.proto
-# Protobuf Python Version: 6.33.0
+# Protobuf Python Version: 5.29.5
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
@@ -27,7 +27,7 @@ from google.protobuf import symbol_database as
_symbol_database
from google.protobuf.internal import builder as _builder
_runtime_version.ValidateProtobufRuntimeVersion(
- _runtime_version.Domain.PUBLIC, 6, 33, 0, "",
"spark/connect/ml_common.proto"
+ _runtime_version.Domain.PUBLIC, 5, 29, 5, "",
"spark/connect/ml_common.proto"
)
# @@protoc_insertion_point(imports)
diff --git a/python/pyspark/sql/connect/proto/ml_pb2.py
b/python/pyspark/sql/connect/proto/ml_pb2.py
index 3bd141815c8e..9574966472a5 100644
--- a/python/pyspark/sql/connect/proto/ml_pb2.py
+++ b/python/pyspark/sql/connect/proto/ml_pb2.py
@@ -18,7 +18,7 @@
# Generated by the protocol buffer compiler. DO NOT EDIT!
# NO CHECKED-IN PROTOBUF GENCODE
# source: spark/connect/ml.proto
-# Protobuf Python Version: 6.33.0
+# Protobuf Python Version: 5.29.5
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
@@ -27,7 +27,7 @@ from google.protobuf import symbol_database as
_symbol_database
from google.protobuf.internal import builder as _builder
_runtime_version.ValidateProtobufRuntimeVersion(
- _runtime_version.Domain.PUBLIC, 6, 33, 0, "", "spark/connect/ml.proto"
+ _runtime_version.Domain.PUBLIC, 5, 29, 5, "", "spark/connect/ml.proto"
)
# @@protoc_insertion_point(imports)
diff --git a/python/pyspark/sql/connect/proto/pipelines_pb2.py
b/python/pyspark/sql/connect/proto/pipelines_pb2.py
index d7321fa7cf0c..f3489f55ed87 100644
--- a/python/pyspark/sql/connect/proto/pipelines_pb2.py
+++ b/python/pyspark/sql/connect/proto/pipelines_pb2.py
@@ -18,7 +18,7 @@
# Generated by the protocol buffer compiler. DO NOT EDIT!
# NO CHECKED-IN PROTOBUF GENCODE
# source: spark/connect/pipelines.proto
-# Protobuf Python Version: 6.33.0
+# Protobuf Python Version: 5.29.5
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
@@ -27,7 +27,7 @@ from google.protobuf import symbol_database as
_symbol_database
from google.protobuf.internal import builder as _builder
_runtime_version.ValidateProtobufRuntimeVersion(
- _runtime_version.Domain.PUBLIC, 6, 33, 0, "",
"spark/connect/pipelines.proto"
+ _runtime_version.Domain.PUBLIC, 5, 29, 5, "",
"spark/connect/pipelines.proto"
)
# @@protoc_insertion_point(imports)
diff --git a/python/pyspark/sql/connect/proto/relations_pb2.py
b/python/pyspark/sql/connect/proto/relations_pb2.py
index 9e630b6ba5e4..e7f319554c5e 100644
--- a/python/pyspark/sql/connect/proto/relations_pb2.py
+++ b/python/pyspark/sql/connect/proto/relations_pb2.py
@@ -18,7 +18,7 @@
# Generated by the protocol buffer compiler. DO NOT EDIT!
# NO CHECKED-IN PROTOBUF GENCODE
# source: spark/connect/relations.proto
-# Protobuf Python Version: 6.33.0
+# Protobuf Python Version: 5.29.5
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
@@ -27,7 +27,7 @@ from google.protobuf import symbol_database as
_symbol_database
from google.protobuf.internal import builder as _builder
_runtime_version.ValidateProtobufRuntimeVersion(
- _runtime_version.Domain.PUBLIC, 6, 33, 0, "",
"spark/connect/relations.proto"
+ _runtime_version.Domain.PUBLIC, 5, 29, 5, "",
"spark/connect/relations.proto"
)
# @@protoc_insertion_point(imports)
diff --git a/python/pyspark/sql/connect/proto/types_pb2.py
b/python/pyspark/sql/connect/proto/types_pb2.py
index 74efca8decf8..9a52129103ad 100644
--- a/python/pyspark/sql/connect/proto/types_pb2.py
+++ b/python/pyspark/sql/connect/proto/types_pb2.py
@@ -18,7 +18,7 @@
# Generated by the protocol buffer compiler. DO NOT EDIT!
# NO CHECKED-IN PROTOBUF GENCODE
# source: spark/connect/types.proto
-# Protobuf Python Version: 6.33.0
+# Protobuf Python Version: 5.29.5
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
@@ -27,7 +27,7 @@ from google.protobuf import symbol_database as
_symbol_database
from google.protobuf.internal import builder as _builder
_runtime_version.ValidateProtobufRuntimeVersion(
- _runtime_version.Domain.PUBLIC, 6, 33, 0, "", "spark/connect/types.proto"
+ _runtime_version.Domain.PUBLIC, 5, 29, 5, "", "spark/connect/types.proto"
)
# @@protoc_insertion_point(imports)
diff --git a/python/pyspark/sql/streaming/proto/StateMessage_pb2.py
b/python/pyspark/sql/streaming/proto/StateMessage_pb2.py
index 1305a6213c13..b88fc2c5ca40 100644
--- a/python/pyspark/sql/streaming/proto/StateMessage_pb2.py
+++ b/python/pyspark/sql/streaming/proto/StateMessage_pb2.py
@@ -18,7 +18,7 @@
# Generated by the protocol buffer compiler. DO NOT EDIT!
# NO CHECKED-IN PROTOBUF GENCODE
# source: org/apache/spark/sql/execution/streaming/StateMessage.proto
-# Protobuf Python Version: 6.33.0
+# Protobuf Python Version: 5.29.5
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
@@ -28,9 +28,9 @@ from google.protobuf.internal import builder as _builder
_runtime_version.ValidateProtobufRuntimeVersion(
_runtime_version.Domain.PUBLIC,
- 6,
- 33,
- 0,
+ 5,
+ 29,
+ 5,
"",
"org/apache/spark/sql/execution/streaming/StateMessage.proto",
)
diff --git a/sql/connect/client/jdbc/pom.xml b/sql/connect/client/jdbc/pom.xml
index 15e3480f2e59..c2dda12b1e63 100644
--- a/sql/connect/client/jdbc/pom.xml
+++ b/sql/connect/client/jdbc/pom.xml
@@ -181,6 +181,10 @@
<pattern>org.checkerframework</pattern>
<shadedPattern>${spark.shade.packageName}.org.checkerframework</shadedPattern>
</relocation>
+ <relocation>
+ <pattern>javax.annotation</pattern>
+
<shadedPattern>${spark.shade.packageName}.javax.annotation</shadedPattern>
+ </relocation>
<relocation>
<pattern>io.perfmark</pattern>
<shadedPattern>${spark.shade.packageName}.io.perfmark</shadedPattern>
diff --git a/sql/connect/client/jvm/pom.xml b/sql/connect/client/jvm/pom.xml
index c5e4e6a1adfd..1c16b7e9ca8c 100644
--- a/sql/connect/client/jvm/pom.xml
+++ b/sql/connect/client/jvm/pom.xml
@@ -225,6 +225,10 @@
<pattern>org.checkerframework</pattern>
<shadedPattern>${spark.shade.packageName}.org.checkerframework</shadedPattern>
</relocation>
+ <relocation>
+ <pattern>javax.annotation</pattern>
+
<shadedPattern>${spark.shade.packageName}.javax.annotation</shadedPattern>
+ </relocation>
<relocation>
<pattern>io.perfmark</pattern>
<shadedPattern>${spark.shade.packageName}.io.perfmark</shadedPattern>
diff --git a/sql/connect/common/pom.xml b/sql/connect/common/pom.xml
index 2713f23c51e6..6ff47ec6d68c 100644
--- a/sql/connect/common/pom.xml
+++ b/sql/connect/common/pom.xml
@@ -87,6 +87,11 @@
<artifactId>netty-transport-native-unix-common</artifactId>
<version>${netty.version}</version>
</dependency>
+ <dependency> <!-- necessary for Java 9+ -->
+ <groupId>org.apache.tomcat</groupId>
+ <artifactId>annotations-api</artifactId>
+ <version>${tomcat.annotations.api.version}</version>
+ </dependency>
<!--
This spark-tags test-dep is needed even though it isn't used in this
module,
otherwise testing-cmds that excludethem will yield errors.
@@ -121,6 +126,27 @@
</execution>
</executions>
</plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-shade-plugin</artifactId>
+ <configuration combine.self = "override">
+ <shadedArtifactAttached>false</shadedArtifactAttached>
+ <artifactSet>
+ <includes>
+ <include>org.spark-project.spark:unused</include>
+
<include>org.apache.tomcat:annotations-api</include>
+ </includes>
+ </artifactSet>
+ </configuration>
+ <executions>
+ <execution>
+ <phase>package</phase>
+ <goals>
+ <goal>shade</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
</plugins>
</build>
<profiles>
diff --git a/sql/connect/common/src/main/buf.gen.yaml
b/sql/connect/common/src/main/buf.gen.yaml
index beaa7f1949e2..d6120bfd36fa 100644
--- a/sql/connect/common/src/main/buf.gen.yaml
+++ b/sql/connect/common/src/main/buf.gen.yaml
@@ -16,20 +16,20 @@
#
version: v1
plugins:
- - plugin: buf.build/protocolbuffers/cpp:v33.0
+ - plugin: buf.build/protocolbuffers/cpp:v29.5
out: gen/proto/cpp
- - plugin: buf.build/protocolbuffers/csharp:v33.0
+ - plugin: buf.build/protocolbuffers/csharp:v29.5
out: gen/proto/csharp
- - plugin: buf.build/protocolbuffers/java:v33.0
+ - plugin: buf.build/protocolbuffers/java:v29.5
out: gen/proto/java
- - plugin: buf.build/grpc/ruby:v1.76.0
+ - plugin: buf.build/grpc/ruby:v1.67.0
out: gen/proto/ruby
- - plugin: buf.build/protocolbuffers/ruby:v33.0
+ - plugin: buf.build/protocolbuffers/ruby:v29.5
out: gen/proto/ruby
# Building the Python build and building the mypy interfaces.
- - plugin: buf.build/protocolbuffers/python:v33.0
+ - plugin: buf.build/protocolbuffers/python:v29.5
out: gen/proto/python
- - plugin: buf.build/grpc/python:v1.76.0
+ - plugin: buf.build/grpc/python:v1.67.0
out: gen/proto/python
- name: mypy
out: gen/proto/python
diff --git a/sql/connect/server/pom.xml b/sql/connect/server/pom.xml
index 732bf38131a7..5f99b5af3243 100644
--- a/sql/connect/server/pom.xml
+++ b/sql/connect/server/pom.xml
@@ -240,6 +240,12 @@
<version>${netty.version}</version>
<scope>provided</scope>
</dependency>
+ <dependency> <!-- necessary for Java 9+ -->
+ <groupId>org.apache.tomcat</groupId>
+ <artifactId>annotations-api</artifactId>
+ <version>${tomcat.annotations.api.version}</version>
+ <scope>provided</scope>
+ </dependency>
<dependency>
<groupId>org.scalacheck</groupId>
<artifactId>scalacheck_${scala.binary.version}</artifactId>
@@ -370,10 +376,6 @@
<pattern>com.google.api</pattern>
<shadedPattern>${spark.shade.packageName}.connect.google_protos.api</shadedPattern>
</relocation>
- <relocation>
- <pattern>com.google.apps</pattern>
-
<shadedPattern>${spark.shade.packageName}.connect.google_protos.apps</shadedPattern>
- </relocation>
<relocation>
<pattern>com.google.cloud</pattern>
<shadedPattern>${spark.shade.packageName}.connect.google_protos.cloud</shadedPattern>
@@ -394,10 +396,6 @@
<pattern>com.google.rpc</pattern>
<shadedPattern>${spark.shade.packageName}.connect.google_protos.rpc</shadedPattern>
</relocation>
- <relocation>
- <pattern>com.google.shopping</pattern>
-
<shadedPattern>${spark.shade.packageName}.connect.google_protos.shopping</shadedPattern>
- </relocation>
<relocation>
<pattern>com.google.type</pattern>
<shadedPattern>${spark.shade.packageName}.connect.google_protos.type</shadedPattern>
diff --git a/sql/core/src/main/buf.gen.yaml b/sql/core/src/main/buf.gen.yaml
index 5f87a840c6a4..01a34ed30844 100644
--- a/sql/core/src/main/buf.gen.yaml
+++ b/sql/core/src/main/buf.gen.yaml
@@ -17,7 +17,7 @@
version: v1
plugins:
# Building the Python build and building the mypy interfaces.
- - plugin: buf.build/protocolbuffers/python:v33.0
+ - plugin: buf.build/protocolbuffers/python:v29.5
out: gen/proto/python
- name: mypy
out: gen/proto/python
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]