-
Notifications
You must be signed in to change notification settings - Fork 72
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Remove independent cluster testing (#1123)
* Remove make_pickable_without_dask_sql * Remove independent cluster testing * Remove testing logic around independent cluster * Add test job running on consistent distributed cluster * Add distributed label to test jobs * Remove some more mentions of independent cluster * Remove conditional cluster skips * Make sure to close client on app fixtures * Try using session-wide client to speed up test run * Remove line around complex cluster setups * Add 3.10 cluster testing build --------- Co-authored-by: Ayush Dattagupta <[email protected]>
- Loading branch information
1 parent
6d0872a
commit e68817f
Showing
20 changed files
with
51 additions
and
317 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -13,6 +13,7 @@ on: | |
options: | ||
- Dask | ||
- DataFusion | ||
|
||
# Required shell entrypoint to have properly activated conda environments | ||
defaults: | ||
run: | ||
|
@@ -32,18 +33,25 @@ env: | |
jobs: | ||
test-dev: | ||
name: "Test upstream dev (${{ matrix.os }}, python: ${{ matrix.python }})" | ||
name: "Test upstream dev (${{ matrix.os }}, python: ${{ matrix.python }}, distributed: ${{ matrix.distributed }})" | ||
runs-on: ${{ matrix.os }} | ||
env: | ||
CONDA_FILE: continuous_integration/environment-${{ matrix.python }}-dev.yaml | ||
defaults: | ||
run: | ||
shell: bash -l {0} | ||
DASK_SQL_DISTRIBUTED_TESTS: ${{ matrix.distributed }} | ||
strategy: | ||
fail-fast: false | ||
matrix: | ||
os: [ubuntu-latest, windows-latest, macos-latest] | ||
python: ["3.8", "3.9", "3.10"] | ||
distributed: [false] | ||
include: | ||
# run tests on a distributed client | ||
- os: "ubuntu-latest" | ||
python: "3.8" | ||
distributed: true | ||
- os: "ubuntu-latest" | ||
python: "3.10" | ||
distributed: true | ||
steps: | ||
- uses: actions/checkout@v3 | ||
with: | ||
|
@@ -92,66 +100,6 @@ jobs: | |
name: test-${{ matrix.os }}-py${{ matrix.python }}-results | ||
path: test-${{ matrix.os }}-py${{ matrix.python }}-results.jsonl | ||
|
||
cluster-dev: | ||
name: "Test upstream dev in a dask cluster" | ||
runs-on: ubuntu-latest | ||
steps: | ||
- uses: actions/checkout@v3 | ||
- name: Set up Python | ||
uses: conda-incubator/[email protected] | ||
with: | ||
miniforge-variant: Mambaforge | ||
use-mamba: true | ||
python-version: "3.9" | ||
channel-priority: strict | ||
activate-environment: dask-sql | ||
environment-file: continuous_integration/environment-3.9-dev.yaml | ||
- name: Optionally update upstream cargo dependencies | ||
if: env.which_upstream == 'DataFusion' | ||
run: | | ||
cd dask_planner | ||
bash update-dependencies.sh | ||
- name: Build the Rust DataFusion bindings | ||
run: | | ||
python setup.py build install | ||
- name: Install cluster dependencies | ||
run: | | ||
# TODO: add pytest-reportlog to testing environments if we move over to JSONL output | ||
mamba install pytest-reportlog python-blosc lz4 -c conda-forge | ||
which python | ||
pip list | ||
mamba list | ||
- name: Install upstream dev Dask | ||
if: env.which_upstream == 'Dask' | ||
run: | | ||
mamba install --no-channel-priority dask/label/dev::dask | ||
- name: run a dask cluster | ||
run: | | ||
if [[ $which_upstream == "Dask" ]]; then | ||
docker-compose -f continuous_integration/cluster/upstream.yml up -d | ||
else | ||
docker-compose -f continuous_integration/cluster/stable.yml up -d | ||
fi | ||
# periodically ping logs until a connection has been established; assume failure after 2 minutes | ||
timeout 2m bash -c 'until docker logs dask-worker 2>&1 | grep -q "Starting established connection"; do sleep 1; done' | ||
docker logs dask-scheduler | ||
docker logs dask-worker | ||
- name: Test with pytest while running an independent dask cluster | ||
id: run_tests | ||
run: | | ||
DASK_SQL_TEST_SCHEDULER="tcp:https://127.0.0.1:8786" pytest --report-log test-cluster-results.jsonl --cov-report=xml -n auto tests --dist loadfile | ||
- name: Upload pytest results for failure | ||
if: | | ||
always() | ||
&& steps.run_tests.outcome != 'skipped' | ||
uses: actions/upload-artifact@v3 | ||
with: | ||
name: test-cluster-results | ||
path: test-cluster-results.jsonl | ||
|
||
import-dev: | ||
name: "Test importing with bare requirements and upstream dev" | ||
runs-on: ubuntu-latest | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -33,16 +33,26 @@ jobs: | |
keyword: "[test-upstream]" | ||
|
||
test: | ||
name: "Build & Test (${{ matrix.os }}, python: ${{ matrix.python }})" | ||
name: "Build & Test (${{ matrix.os }}, python: ${{ matrix.python }}, distributed: ${{ matrix.distributed }})" | ||
needs: [detect-ci-trigger] | ||
runs-on: ${{ matrix.os }} | ||
env: | ||
CONDA_FILE: continuous_integration/environment-${{ matrix.python }}-dev.yaml | ||
DASK_SQL_DISTRIBUTED_TESTS: ${{ matrix.distributed }} | ||
strategy: | ||
fail-fast: false | ||
matrix: | ||
os: [ubuntu-latest, windows-latest, macos-latest] | ||
python: ["3.8", "3.9", "3.10"] | ||
distributed: [false] | ||
include: | ||
# run tests on a distributed client | ||
- os: "ubuntu-latest" | ||
python: "3.8" | ||
distributed: true | ||
- os: "ubuntu-latest" | ||
python: "3.10" | ||
distributed: true | ||
steps: | ||
- uses: actions/checkout@v3 | ||
- name: Set up Python | ||
|
@@ -86,59 +96,6 @@ jobs: | |
if: github.repository == 'dask-contrib/dask-sql' | ||
uses: codecov/codecov-action@v3 | ||
|
||
cluster: | ||
name: "Test in a dask cluster" | ||
needs: [detect-ci-trigger] | ||
runs-on: ubuntu-latest | ||
steps: | ||
- uses: actions/checkout@v3 | ||
- name: Set up Python | ||
uses: conda-incubator/[email protected] | ||
with: | ||
miniforge-variant: Mambaforge | ||
use-mamba: true | ||
python-version: "3.9" | ||
channel-priority: strict | ||
activate-environment: dask-sql | ||
environment-file: continuous_integration/environment-3.9-dev.yaml | ||
- name: Cache Rust | ||
uses: Swatinem/rust-cache@v2 | ||
with: | ||
workspaces: dask_planner | ||
shared-key: test | ||
- name: Build the Rust DataFusion bindings | ||
run: | | ||
python setup.py build install | ||
- name: Install dependencies | ||
run: | | ||
mamba install python-blosc lz4 -c conda-forge | ||
which python | ||
pip list | ||
mamba list | ||
- name: Optionally install upstream dev Dask | ||
if: needs.detect-ci-trigger.outputs.triggered == 'true' | ||
run: | | ||
mamba install --no-channel-priority dask/label/dev::dask | ||
- name: run a dask cluster | ||
env: | ||
UPSTREAM: ${{ needs.detect-ci-trigger.outputs.triggered }} | ||
run: | | ||
if [[ $UPSTREAM == "true" ]]; then | ||
docker-compose -f continuous_integration/cluster/upstream.yml up -d | ||
else | ||
docker-compose -f continuous_integration/cluster/stable.yml up -d | ||
fi | ||
# periodically ping logs until a connection has been established; assume failure after 2 minutes | ||
timeout 2m bash -c 'until docker logs dask-worker 2>&1 | grep -q "Starting established connection"; do sleep 1; done' | ||
docker logs dask-scheduler | ||
docker logs dask-worker | ||
- name: Test with pytest while running an independent dask cluster | ||
run: | | ||
DASK_SQL_TEST_SCHEDULER="tcp:https://127.0.0.1:8786" pytest tests | ||
import: | ||
name: "Test importing with bare requirements" | ||
needs: [detect-ci-trigger] | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file was deleted.
Oops, something went wrong.
This file was deleted.
Oops, something went wrong.
This file was deleted.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.