Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/master' into requirements/tier…
Browse files Browse the repository at this point in the history
…s-constraint

# Conflicts:
#	python/requirements/ml/dl-gpu-requirements.txt
  • Loading branch information
Kai Fricke committed Jul 7, 2023
2 parents f0f879c + 47b4189 commit 2355977
Show file tree
Hide file tree
Showing 77 changed files with 2,742 additions and 1,140 deletions.
2 changes: 1 addition & 1 deletion .buildkite/pipeline.arm64.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
conditions: ["RAY_CI_LINUX_WHEELS_AFFECTED"]
instance_size: arm64-medium
commands:
- LINUX_WHEELS=1 ./ci/ci.sh build
- LINUX_WHEELS=1 BUILD_ONE_PYTHON_ONLY=py38 ./ci/ci.sh build
- cleanup() { if [ "${BUILDKITE_PULL_REQUEST}" = "false" ]; then ./ci/build/upload_build_info.sh; fi }; trap cleanup EXIT
- ./ci/env/env_info.sh
- bazel test --config=ci $(./ci/run/bazel_export_options)
Expand Down
2 changes: 1 addition & 1 deletion .buildkite/pipeline.build_release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
conditions: ["RAY_CI_LINUX_WHEELS_AFFECTED"]
instance_size: medium
commands:
- LINUX_WHEELS=1 ./ci/ci.sh build
- LINUX_WHEELS=1 BUILD_ONE_PYTHON_ONLY=py38 ./ci/ci.sh build
- cleanup() { if [ "${BUILDKITE_PULL_REQUEST}" = "false" ]; then ./ci/build/upload_build_info.sh; fi }; trap cleanup EXIT
- ./ci/env/env_info.sh
- bazel test --config=ci $(./ci/run/bazel_export_options)
Expand Down
8 changes: 4 additions & 4 deletions ci/env/install-dependencies.sh
Original file line number Diff line number Diff line change
Expand Up @@ -419,17 +419,17 @@ install_pip_packages() {
pip install -U "torch==${TORCH_VERSION-1.9.0}" "torchvision==${TORCHVISION_VERSION-0.10.0}"
# We won't add dl-cpu-requirements.txt as it would otherwise overwrite our custom
# torch. Thus we have also have to install tensorflow manually.
TF_PACKAGE=$(grep "tensorflow==" "${WORKSPACE_DIR}/python/requirements/ml/dl-cpu-requirements.txt")
TFPROB_PACKAGE=$(grep "tensorflow-probability==" "${WORKSPACE_DIR}/python/requirements/ml/dl-cpu-requirements.txt")
TF_PACKAGE=$(grep -ohE "tensorflow==[^ ;]+" "${WORKSPACE_DIR}/python/requirements/ml/dl-cpu-requirements.txt" | head -n 1)
TFPROB_PACKAGE=$(grep -ohE "tensorflow-probability==[^ ;]+" "${WORKSPACE_DIR}/python/requirements/ml/dl-cpu-requirements.txt" | head -n 1)

# %%;* deletes everything after ; to get rid of e.g. python version specifiers
pip install -U "${TF_PACKAGE%%;*}" "${TFPROB_PACKAGE%%;*}"
else
# Otherwise, use pinned default torch version.
# Again, install right away, as some dependencies (e.g. torch-spline-conv) need
# torch to be installed for their own install.
TORCH_PACKAGE=$(grep "torch==" "${WORKSPACE_DIR}/python/requirements/ml/dl-cpu-requirements.txt")
TORCHVISION_PACKAGE=$(grep "torchvision==" "${WORKSPACE_DIR}/python/requirements/ml/dl-cpu-requirements.txt")
TORCH_PACKAGE=$(grep -ohE "torch==[^ ;]+" "${WORKSPACE_DIR}/python/requirements/ml/dl-cpu-requirements.txt" | head -n 1)
TORCHVISION_PACKAGE=$(grep -ohE "torchvision==[^ ;]+" "${WORKSPACE_DIR}/python/requirements/ml/dl-cpu-requirements.txt" | head -n 1)

# %%;* deletes everything after ; to get rid of e.g. python version specifiers
pip install "${TORCH_PACKAGE%%;*}" "${TORCHVISION_PACKAGE%%;*}"
Expand Down
14 changes: 13 additions & 1 deletion doc/BUILD
Original file line number Diff line number Diff line change
Expand Up @@ -313,14 +313,26 @@ doctest(
"source/serve/production-guide/fault-tolerance.md",
"source/data/batch_inference.rst",
"source/data/transforming-data.rst",
"source/train/faq.rst"
"source/train/faq.rst",
"source/workflows/**/*.rst",
"source/workflows/**/*.md"
]
),
size = "large",
tags = ["team:none"]
)


doctest(
name="doctest[workflow]",
files = glob(
include=[
"source/workflows/**/*.rst",
"source/workflows/**/*.md"
]
),
tags = ["team:core"]
)

doctest(
files = [
Expand Down
16 changes: 16 additions & 0 deletions doc/source/data/api/input_output.rst
Original file line number Diff line number Diff line change
Expand Up @@ -227,6 +227,9 @@ Partitioning API
datasource.PathPartitionEncoder
datasource.PathPartitionParser
datasource.PathPartitionFilter
datasource.FileExtensionFilter

.. _metadata_provider:

MetadataProvider API
--------------------
Expand All @@ -240,3 +243,16 @@ MetadataProvider API
datasource.DefaultFileMetadataProvider
datasource.DefaultParquetMetadataProvider
datasource.FastFileMetadataProvider


.. _block_write_path_provider:

BlockWritePathProvider API
--------------------------

.. autosummary::
:toctree: doc/

datasource.BlockWritePathProvider
datasource.DefaultBlockWritePathProvider

2 changes: 2 additions & 0 deletions doc/source/data/performance-tips.rst
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@ If your transformation isn't vectorized, there's no performance benefit.
Optimizing reads
----------------

.. _read_parallelism:

Tuning read parallelism
~~~~~~~~~~~~~~~~~~~~~~~

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@
" \"transformers>=4.26.0\",\n",
" \"diffusers>=0.13.1\",\n",
" \"xformers>=0.0.16\",\n",
" \"torch\",\n",
" \"torch<2\",\n",
" ]\n",
" }\n",
")"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@
" nn.ReLU(),\n",
" )\n",
" self.lr = lr\n",
" self.accuracy = Accuracy(task=\"multiclass\", num_classes=10)\n",
" self.accuracy = Accuracy(task=\"multiclass\", num_classes=10, top_k=1)\n",
" self.eval_loss = []\n",
" self.eval_accuracy = []\n",
" self.test_accuracy = []\n",
Expand Down
2 changes: 1 addition & 1 deletion doc/source/tune/examples/tune-pytorch-lightning.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@
"class MNISTClassifier(pl.LightningModule):\n",
" def __init__(self, config):\n",
" super(MNISTClassifier, self).__init__()\n",
" self.accuracy = Accuracy()\n",
" self.accuracy = Accuracy(task=\"multiclass\", num_classes=10, top_k=1)\n",
" self.layer_1_size = config[\"layer_1_size\"]\n",
" self.layer_2_size = config[\"layer_2_size\"]\n",
" self.lr = config[\"lr\"]\n",
Expand Down
9 changes: 8 additions & 1 deletion doc/source/workflows/advanced.rst
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,14 @@ Ray Workflows provides strong fault tolerance and exactly-once execution semanti

Checkpoints can be skipped by specifying ``checkpoint=False``:

.. code-block:: python
.. testcode::

import ray
from ray import workflow

@ray.remote
def read_data(num: int):
return [i for i in range(num)]

data = read_data.options(**workflow.options(checkpoint=False)).bind(10)
Expand Down
Loading

0 comments on commit 2355977

Please sign in to comment.