Skip to content

Commit

Permalink
[Core] Update Bazel (to 3.4.1), gRPC, boringssl, and absl as a precur…
Browse files Browse the repository at this point in the history
…sor to gRPC streaming PR. (ray-project#17903)

* Update Bazel (to 3.4.1), gRPC, boringssl, absl.

* Always reinstall Bazel if needing to upgrade to a new Bazel version.

* Add patch for properly detecting Windows Python headers when building gRPC.

* Add minimum Bazel version check.

* Update docs with new Bazel version.
  • Loading branch information
clarkzinzow committed Aug 21, 2021
1 parent 494ddd9 commit 5ca28b1
Show file tree
Hide file tree
Showing 10 changed files with 146 additions and 74 deletions.
4 changes: 4 additions & 0 deletions WORKSPACE
Original file line number Diff line number Diff line change
Expand Up @@ -14,3 +14,7 @@ ray_deps_build_all()
load("@com_github_grpc_grpc//bazel:grpc_extra_deps.bzl", "grpc_extra_deps")

grpc_extra_deps()

load("@bazel_skylib//lib:versions.bzl", "versions")

versions.check(minimum_bazel_version = "3.4.0")
69 changes: 39 additions & 30 deletions bazel/ray_deps_setup.bzl
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,15 @@ def urlsplit(url):
"fragment": split_on_anchor[1] if len(split_on_anchor) > 1 else None,
}

def auto_http_archive(*, name=None, url=None, urls=True,
build_file=None, build_file_content=None,
strip_prefix=True, **kwargs):
def auto_http_archive(
*,
name = None,
url = None,
urls = True,
build_file = None,
build_file_content = None,
strip_prefix = True,
**kwargs):
""" Intelligently choose mirrors based on the given URL for the download.
Either url or urls is required.
If name == None , it is auto-deduced, but this is NOT recommended.
Expand All @@ -35,14 +41,11 @@ def auto_http_archive(*, name=None, url=None, urls=True,

canonical_url = url if url != None else urls[0]
url_parts = urlsplit(canonical_url)
url_except_scheme = (canonical_url.replace(url_parts["scheme"] + ":https://", "")
if url_parts["scheme"] != None else canonical_url)
url_except_scheme = (canonical_url.replace(url_parts["scheme"] + ":https://", "") if url_parts["scheme"] != None else canonical_url)
url_path_parts = url_parts["path"]
url_filename = url_path_parts[-1]
url_filename_parts = (url_filename.rsplit(".", 2)
if (tuple(url_filename.lower().rsplit(".", 2)[-2:])
in DOUBLE_SUFFIXES_LOWERCASE)
else url_filename.rsplit(".", 1))
url_filename_parts = (url_filename.rsplit(".", 2) if (tuple(url_filename.lower().rsplit(".", 2)[-2:]) in
DOUBLE_SUFFIXES_LOWERCASE) else url_filename.rsplit(".", 1))
is_github = url_parts["netloc"] == ["github", "com"]

if name == None: # Deduce "com_github_user_project_name" from "https://github.com/user/project-name/..."
Expand All @@ -53,9 +56,11 @@ def auto_http_archive(*, name=None, url=None, urls=True,

if urls == True:
prefer_url_over_mirrors = is_github
urls = [mirror_prefix + url_except_scheme
for mirror_prefix in mirror_prefixes
if not canonical_url.startswith(mirror_prefix)]
urls = [
mirror_prefix + url_except_scheme
for mirror_prefix in mirror_prefixes
if not canonical_url.startswith(mirror_prefix)
]
urls.insert(0 if prefer_url_over_mirrors else len(urls), canonical_url)
else:
print("No implicit mirrors used because urls were explicitly provided")
Expand All @@ -65,16 +70,19 @@ def auto_http_archive(*, name=None, url=None, urls=True,
if prefix_without_v.startswith("v") and prefix_without_v[1:2].isdigit():
# GitHub automatically strips a leading 'v' in version numbers
prefix_without_v = prefix_without_v[1:]
strip_prefix = (url_path_parts[1] + "-" + prefix_without_v
if is_github and url_path_parts[2:3] == ["archive"]
else url_filename_parts[0])

return http_archive(name=name, url=url, urls=urls, build_file=build_file,
build_file_content=build_file_content,
strip_prefix=strip_prefix, **kwargs)
strip_prefix = (url_path_parts[1] + "-" + prefix_without_v if is_github and url_path_parts[2:3] == ["archive"] else url_filename_parts[0])

return http_archive(
name = name,
url = url,
urls = urls,
build_file = build_file,
build_file_content = build_file_content,
strip_prefix = strip_prefix,
**kwargs
)

def ray_deps_setup():

# Explicitly bring in protobuf dependency to work around
# https://github.com/ray-project/ray/issues/14117
http_archive(
Expand Down Expand Up @@ -110,13 +118,13 @@ def ray_deps_setup():
urls = ["https://github.com/gabime/spdlog/archive/v1.7.0.zip"],
sha256 = "c8f1e1103e0b148eb8832275d8e68036f2fdd3975a1199af0e844908c56f6ea5",
)

auto_http_archive(
name = "com_github_tporadowski_redis_bin",
build_file = "//bazel:BUILD.redis",
strip_prefix = None,
url = "https://github.com/tporadowski/redis/releases/download/v5.0.9/Redis-x64-5.0.9.zip",
sha256 = "b09565b22b50c505a5faa86a7e40b6683afb22f3c17c5e6a5e35fc9b7c03f4c2",
sha256 = "b09565b22b50c505a5faa86a7e40b6683afb22f3c17c5e6a5e35fc9b7c03f4c2",
)

auto_http_archive(
Expand Down Expand Up @@ -193,15 +201,15 @@ def ray_deps_setup():
patches = [
"//thirdparty/patches:opencensus-cpp-harvest-interval.patch",
"//thirdparty/patches:opencensus-cpp-shutdown-api.patch",
]
],
)

# OpenCensus depends on Abseil so we have to explicitly pull it in.
# This is how diamond dependencies are prevented.
auto_http_archive(
name = "com_google_absl",
url = "https://github.com/abseil/abseil-cpp/archive/278e0a071885a22dcd2fd1b5576cc44757299343.tar.gz",
sha256 = "1764491a199eb9325b177126547f03d244f86b4ff28f16f206c7b3e7e4f777ec",
url = "https://github.com/abseil/abseil-cpp/archive/refs/tags/20210324.2.tar.gz",
sha256 = "59b862f50e710277f8ede96f083a5bb8d7c9595376146838b9580be90374ee1f",
)

# OpenCensus depends on jupp0r/prometheus-cpp
Expand All @@ -214,17 +222,18 @@ def ray_deps_setup():
# https://github.com/jupp0r/prometheus-cpp/pull/225
"//thirdparty/patches:prometheus-windows-zlib.patch",
"//thirdparty/patches:prometheus-windows-pollfd.patch",
]
],
)

auto_http_archive(
name = "com_github_grpc_grpc",
# NOTE: If you update this, also update @boringssl's hash.
url = "https://github.com/grpc/grpc/archive/4790ab6d97e634a1ede983be393f3bb3c132b2f7.tar.gz",
sha256 = "df83bd8a08975870b8b254c34afbecc94c51a55198e6e3a5aab61d62f40b7274",
url = "https://github.com/grpc/grpc/archive/refs/tags/v1.38.1.tar.gz",
sha256 = "f60e5b112913bf776a22c16a3053cc02cf55e60bf27a959fd54d7aaf8e2da6e8",
patches = [
"//thirdparty/patches:grpc-cython-copts.patch",
"//thirdparty/patches:grpc-python.patch",
"//thirdparty/patches:grpc-windows-python-header-path.patch",
],
)

Expand All @@ -234,8 +243,8 @@ def ray_deps_setup():
# https://github.com/grpc/grpc/blob/4790ab6d97e634a1ede983be393f3bb3c132b2f7/bazel/grpc_deps.bzl#L102
name = "boringssl",
# Ensure this matches the commit used by grpc's bazel/grpc_deps.bzl
url = "https://github.com/google/boringssl/archive/83da28a68f32023fd3b95a8ae94991a07b1f6c62.tar.gz",
sha256 = "781fa39693ec2984c71213cd633e9f6589eaaed75e3a9ac413237edec96fd3b9",
url = "https://github.com/google/boringssl/archive/688fc5cf5428868679d2ae1072cad81055752068.tar.gz",
sha256 = "f8616dff15cb8aad6705af53c7caf7a5f1103b6aaf59c76b55995e179d47f89c",
)

auto_http_archive(
Expand Down
12 changes: 9 additions & 3 deletions ci/travis/install-dependencies.sh
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,15 @@ pkg_install_helper() {

install_bazel() {
if command -v bazel; then
if [ -n "${BUILDKITE-}" ]; then
echo "Bazel exists, skipping the install"
return
if [[ -n "${BUILDKITE-}" ]]; then
# Only reinstall Bazel if we need to upgrade to a different version.
python="$(command -v python3 || command -v python || echo python)"
current_version="$(bazel --version | grep -o "[0-9]\+.[0-9]\+.[0-9]\+")"
new_version="$("${python}" -s -c "import runpy, sys; runpy.run_path(sys.argv.pop(), run_name='__api__')" bazel_version "${ROOT_DIR}/../../python/setup.py")"
if [[ "$current_version" == "$new_version" ]]; then
echo "Bazel of the same version already exists, skipping the install"
return
fi
fi
fi

Expand Down
4 changes: 2 additions & 2 deletions doc/source/development.rst
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ Building Ray on Windows (full)

The following links were correct during the writing of this section. In case the URLs changed, search at the organizations' sites.

- bazel 3.2 (https://github.com/bazelbuild/bazel/releases/tag/3.2.0)
- bazel 3.4 (https://github.com/bazelbuild/bazel/releases/tag/3.4.0)
- Microsoft Visual Studio 2019 (or Microsoft Build Tools 2019 - https://visualstudio.microsoft.com/downloads/#build-tools-for-visual-studio-2019)
- JDK 15 (https://www.oracle.com/java/technologies/javase-jdk15-downloads.html)
- Miniconda 3 (https://docs.conda.io/en/latest/miniconda.html)
Expand All @@ -149,7 +149,7 @@ The following links were correct during the writing of this section. In case the

3. Define an environment variable BAZEL_SH to point to bash.exe. If git for Windows was installed for all users, bash's path should be ``C:\Program Files\Git\bin\bash.exe``. If git was installed for a single user, adjust the path accordingly.

4. Bazel 3.2 installation. Go to bazel 3.2 release web page and download bazel-3.2.0-windows-x86_64.exe. Copy the exe into the directory of your choice. Define an environment variable BAZEL_PATH to full exe path (example: ``C:\bazel\bazel-3.2.0-windows-x86_64.exe``)
4. Bazel 3.4 installation. Go to bazel 3.4 release web page and download bazel-3.4.0-windows-x86_64.exe. Copy the exe into the directory of your choice. Define an environment variable BAZEL_PATH to full exe path (example: ``C:\bazel\bazel-3.4.0-windows-x86_64.exe``)

5. Install cython and pytest:

Expand Down
2 changes: 1 addition & 1 deletion python/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
logger = logging.getLogger(__name__)

SUPPORTED_PYTHONS = [(3, 6), (3, 7), (3, 8), (3, 9)]
SUPPORTED_BAZEL = (3, 2, 0)
SUPPORTED_BAZEL = (3, 4, 1)

ROOT_DIR = os.path.dirname(__file__)
BUILD_JAVA = os.getenv("RAY_INSTALL_JAVA") == "1"
Expand Down
18 changes: 9 additions & 9 deletions src/ray/rpc/client_call.h
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ class ClientCallImpl : public ClientCall {
std::shared_ptr<StatsHandle> stats_handle_;

/// The response reader.
std::unique_ptr<grpc_impl::ClientAsyncResponseReader<Reply>> response_reader_;
std::unique_ptr<grpc::ClientAsyncResponseReader<Reply>> response_reader_;

/// gRPC status of this request.
grpc::Status status_;
Expand Down Expand Up @@ -158,9 +158,9 @@ class ClientCallTag {
/// \tparam Request Type of the request message.
/// \tparam Reply Type of the reply message.
template <class GrpcService, class Request, class Reply>
using PrepareAsyncFunction =
std::unique_ptr<grpc_impl::ClientAsyncResponseReader<Reply>> (GrpcService::Stub::*)(
grpc::ClientContext *context, const Request &request, grpc::CompletionQueue *cq);
using PrepareAsyncFunction = std::unique_ptr<grpc::ClientAsyncResponseReader<Reply>> (
GrpcService::Stub::*)(grpc::ClientContext *context, const Request &request,
grpc::CompletionQueue *cq);

/// `ClientCallManager` is used to manage outgoing gRPC requests and the lifecycles of
/// `ClientCall` objects.
Expand All @@ -181,7 +181,7 @@ class ClientCallManager {
// Start the polling threads.
cqs_.reserve(num_threads_);
for (int i = 0; i < num_threads_; i++) {
cqs_.emplace_back();
cqs_.push_back(std::make_unique<grpc::CompletionQueue>());
polling_threads_.emplace_back(&ClientCallManager::PollEventsFromCompletionQueue,
this, i);
}
Expand All @@ -190,7 +190,7 @@ class ClientCallManager {
~ClientCallManager() {
shutdown_ = true;
for (auto &cq : cqs_) {
cq.Shutdown();
cq->Shutdown();
}
for (auto &polling_thread : polling_threads_) {
polling_thread.join();
Expand Down Expand Up @@ -222,7 +222,7 @@ class ClientCallManager {
// Send request.
// Find the next completion queue to wait for response.
call->response_reader_ = (stub.*prepare_async_function)(
&call->context_, request, &cqs_[rr_index_++ % num_threads_]);
&call->context_, request, cqs_[rr_index_++ % num_threads_].get());
call->response_reader_->StartCall();
// Create a new tag object. This object will eventually be deleted in the
// `ClientCallManager::PollEventsFromCompletionQueue` when reply is received.
Expand Down Expand Up @@ -251,7 +251,7 @@ class ClientCallManager {
while (true) {
auto deadline = gpr_time_add(gpr_now(GPR_CLOCK_REALTIME),
gpr_time_from_millis(250, GPR_TIMESPAN));
auto status = cqs_[index].AsyncNext(&got_tag, &ok, deadline);
auto status = cqs_[index]->AsyncNext(&got_tag, &ok, deadline);
if (status == grpc::CompletionQueue::SHUTDOWN) {
break;
} else if (status == grpc::CompletionQueue::TIMEOUT && shutdown_) {
Expand Down Expand Up @@ -293,7 +293,7 @@ class ClientCallManager {
std::atomic<unsigned int> rr_index_;

/// The gRPC `CompletionQueue` object used to poll events.
std::vector<grpc::CompletionQueue> cqs_;
std::vector<std::unique_ptr<grpc::CompletionQueue>> cqs_;

/// Polling threads to check the completion queue.
std::vector<std::thread> polling_threads_;
Expand Down
4 changes: 2 additions & 2 deletions src/ray/rpc/server_call.h
Original file line number Diff line number Diff line change
Expand Up @@ -242,7 +242,7 @@ class ServerCallImpl : public ServerCall {
grpc::ServerContext context_;

/// The response writer.
grpc_impl::ServerAsyncResponseWriter<Reply> response_writer_;
grpc::ServerAsyncResponseWriter<Reply> response_writer_;

/// The event loop.
instrumented_io_context &io_service_;
Expand Down Expand Up @@ -276,7 +276,7 @@ class ServerCallImpl : public ServerCall {
/// \tparam Reply Type of the reply message.
template <class GrpcService, class Request, class Reply>
using RequestCallFunction = void (GrpcService::AsyncService::*)(
grpc::ServerContext *, Request *, grpc_impl::ServerAsyncResponseWriter<Reply> *,
grpc::ServerContext *, Request *, grpc::ServerAsyncResponseWriter<Reply> *,
grpc::CompletionQueue *, grpc::ServerCompletionQueue *, void *);

/// Implementation of `ServerCallFactory`
Expand Down
22 changes: 11 additions & 11 deletions thirdparty/patches/grpc-cython-copts.patch
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@ diff --git bazel/cython_library.bzl bazel/cython_library.bzl
--- bazel/cython_library.bzl
+++ bazel/cython_library.bzl
@@ -10,15 +10,16 @@
-def pyx_library(name, deps=[], py_deps=[], srcs=[], **kwargs):
+def pyx_library(name, deps=[], cc_kwargs={}, py_deps=[], srcs=[], **kwargs):
-def pyx_library(name, deps = [], py_deps = [], srcs = [], **kwargs):
+def pyx_library(name, deps = [], cc_kwargs = {}, py_deps = [], srcs = [], **kwargs):
"""Compiles a group of .pyx / .pxd / .py files.

First runs Cython to create .cpp files for each input .pyx or .py + .pxd
Expand All @@ -20,18 +20,18 @@ diff --git bazel/cython_library.bzl bazel/cython_library.bzl
name: Name for the rule.
deps: C/C++ dependencies of the Cython (e.g. Numpy headers).
+ cc_kwargs: cc_binary extra arguments such as copts, linkstatic, linkopts, features
@@ -57,7 +59,8 @@ def pyx_library(name, deps=[], py_deps=[], srcs=[], **kwargs):
@@ -57,7 +59,8 @@ def pyx_library(name, deps = [], py_deps = [], srcs = [], **kwargs):
- shared_object_name = stem + ".so"
+ shared_object_name = stem + ".so"
native.cc_binary(
- name=shared_object_name,
+ name=cc_kwargs.pop("name", shared_object_name),
- srcs=[stem + ".cpp"],
+ srcs=[stem + ".cpp"] + cc_kwargs.pop("srcs", []),
- deps=deps + ["@local_config_python//:python_headers"],
+ deps=deps + ["@local_config_python//:python_headers"] + cc_kwargs.pop("deps", []),
- linkshared=1,
+ linkshared=cc_kwargs.pop("linkshared", 1),
- name = shared_object_name,
+ name = cc_kwargs.pop("name", shared_object_name),
- srcs = [stem + ".cpp"],
+ srcs = [stem + ".cpp"] + cc_kwargs.pop("srcs", []),
- deps = deps + ["@local_config_python//:python_headers"],
+ deps = deps + ["@local_config_python//:python_headers"] + cc_kwargs.pop("deps", []),
- linkshared = 1,
+ linkshared = cc_kwargs.pop("linkshared", 1),
+ **cc_kwargs
)
--
Loading

0 comments on commit 5ca28b1

Please sign in to comment.