diff --git a/.github/workflows/build_and_test.yml b/.github/workflows/build_and_test.yml index 186e7003aa293..0b184c6c248de 100644 --- a/.github/workflows/build_and_test.yml +++ b/.github/workflows/build_and_test.yml @@ -631,7 +631,7 @@ jobs: - name: Install dependencies for Python code generation check run: | # See more in "Installation" https://docs.buf.build/installation#tarball - curl -LO https://github.com/bufbuild/buf/releases/download/v1.20.0/buf-Linux-x86_64.tar.gz + curl -LO https://github.com/bufbuild/buf/releases/download/v1.23.1/buf-Linux-x86_64.tar.gz mkdir -p $HOME/buf tar -xvzf buf-Linux-x86_64.tar.gz -C $HOME/buf --strip-components 1 python3.9 -m pip install 'protobuf==3.19.5' 'mypy-protobuf==3.3.0' diff --git a/python/docs/source/development/contributing.rst b/python/docs/source/development/contributing.rst index 32ae440711b7e..fa7bf11f6da08 100644 --- a/python/docs/source/development/contributing.rst +++ b/python/docs/source/development/contributing.rst @@ -120,7 +120,7 @@ Prerequisite PySpark development requires to build Spark that needs a proper JDK installed, etc. See `Building Spark `_ for more details. -Note that if you intend to contribute to Spark Connect in Python, ``buf`` version ``1.20.0`` is required, see `Buf Installation `_ for more details. +Note that if you intend to contribute to Spark Connect in Python, ``buf`` version ``1.23.1`` is required, see `Buf Installation `_ for more details. Conda ~~~~~