forked from sql-machine-learning/sqlflow
-
Notifications
You must be signed in to change notification settings - Fork 0
/
.travis.yml
140 lines (135 loc) · 4.95 KB
/
.travis.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
language: go
dist: bionic
# SQLFlow uses sqlflow.org/sqlflow as vanity import path. TravisCI
# supports it via go_import_path. Ref:
# https://docs.travis-ci.com/user/languages/go/#go-import-path
go_import_path: sqlflow.org/sqlflow
go:
- 1.13.x
branches:
only:
- gh-pages
- develop
- "/release-*/"
- "/^v\\d+\\.\\d+(\\.\\d+)?(-\\S*)?$/"
services:
- docker
# Note(tony): Update to newer version of Docker to allow
# Dockerfile being outside of the context directory
# https://github.com/docker/compose/issues/4926#issuecomment-370501452
addons:
apt:
packages:
- docker-ce
# Note(tony): TravisCI's native `go get ${gobuild_args} ./...` failed with
# `/usr/bin/ld: final link failed: Bad value`, the cause is the system linker
# being not up to date: https://github.com/golang/go/issues/15038
# So I decided to skip install, and go get inside SQLFlow devbox Docker image
# Ref build: https://travis-ci.com/sql-machine-learning/sqlflow/builds/107870583
install:
- echo "skip install"
jobs:
include:
- stage: BuildAndTest
script:
- set -e
- $TRAVIS_BUILD_DIR/scripts/travis/build.sh
- docker run --rm -it -v $TRAVIS_BUILD_DIR:/work -w /work
-e TRAVIS_BUILD_DIR=/work sqlflow:dev
pre-commit run -a --show-diff-on-failure
- env: SQLFLOW_TEST_DB=mysql
script:
- set -e
# Build MySQL server image including datasets.
- cd $TRAVIS_BUILD_DIR
- docker build -t sqlflow:mysql -f docker/mysql/Dockerfile .
# Build sqlflow:dev, SQLFlow, and sqlflow:ci
- $TRAVIS_BUILD_DIR/scripts/travis/build.sh
# Run a MySQL server container.
- docker run --rm -d -p 13306:3306
-v $TRAVIS_BUILD_DIR:/work sqlflow:mysql
- docker run --rm
-v $TRAVIS_BUILD_DIR:/work -w /work
-v /var/run/docker.sock:/var/run/docker.sock
--net=host
-e SQLFLOW_TEST_DB=mysql
-e SQLFLOW_TEST_DB_MYSQL_ADDR="127.0.0.1:13306"
sqlflow:ci scripts/test/mysql.sh
- scripts/travis/upload_codecov.sh
- env: SQLFLOW_TEST_DB=hive # run more parallel tests in the same stage:
script:
- set -e
- $TRAVIS_BUILD_DIR/scripts/travis/build.sh
- docker pull sqlflow/gohive:dev
- docker run -d --name=hive sqlflow/gohive:dev python3 -m http.server 8899
- docker run --rm -it --net=container:hive
-v $TRAVIS_BUILD_DIR:/work -w /work
-v $GOPATH:/root/go
-v /var/run/docker.sock:/var/run/docker.sock
sqlflow:ci scripts/test/hive.sh
- scripts/travis/upload_codecov.sh
- env: SQLFLOW_TEST_DB=maxcompute
script:
- set -e
- $TRAVIS_BUILD_DIR/scripts/travis/build.sh
# Download minikube and start a Kubernetes cluster. The credential is
# written into $HOME/.kube/ on the Travis CI VM.
- $TRAVIS_BUILD_DIR/scripts/travis/setup_minikube.sh
# Bind mount $HOME/.kube into the container, so that test/maxcompute.sh
# running in the container can call kubectl to submit jobs to the minikube
# cluster.
- docker run --rm --net=host
-e SQLFLOW_TEST_DB_MAXCOMPUTE_AK=$MAXCOMPUTE_AK
-e SQLFLOW_TEST_DB_MAXCOMPUTE_SK=$MAXCOMPUTE_SK
-v /var/run/docker.sock:/var/run/docker.sock
-v $HOME/.kube:/root/.kube
-v /home/$USER/.minikube/:/home/$USER/.minikube/
-v $TRAVIS_BUILD_DIR:/work -w /work
sqlflow:ci scripts/test/maxcompute.sh
- scripts/travis/upload_codecov.sh
- env: SQLFLOW_TEST=java
script:
- set -e
- $TRAVIS_BUILD_DIR/scripts/travis/build.sh
- docker run --rm
-v $TRAVIS_BUILD_DIR:/work -w /work
sqlflow:ci scripts/test/java.sh
- env: SQLFLOW_TEST=workflow
script:
- set -e
- cd $TRAVIS_BUILD_DIR
- docker build -t sqlflow:mysql -f docker/mysql/Dockerfile .
- $TRAVIS_BUILD_DIR/scripts/travis/build.sh
# Download minikube and start a Kubernetes cluster. The credential is
# written into $HOME/.kube/ on the Travis CI VM.
- $TRAVIS_BUILD_DIR/scripts/travis/setup_minikube.sh
# Configure environment to use minikube’s Docker daemon.
- eval $(sudo minikube docker-env)
# Bind mount $HOME/.kube into the container, so that test/workflow.sh
# running in the container can call kubectl to submit jobs to the minikube
# cluster.
- docker run --rm --net=host
-e SQLFLOW_WORKFLOW_STEP_IMAGE=sqlflow:ci
-v /var/run/docker.sock:/var/run/docker.sock
-v $HOME/.kube:/root/.kube
-v /home/$USER/.minikube/:/home/$USER/.minikube/
-v $TRAVIS_BUILD_DIR:/work -w /work
sqlflow:ci scripts/test/workflow.sh
- scripts/travis/upload_codecov.sh
- stage: Deploy
env: DESC="Deploy server Docker image"
script:
- $TRAVIS_BUILD_DIR/scripts/travis/deploy_docker.sh
- env: DESC="Deploy macOS client"
os: osx
script:
- $TRAVIS_BUILD_DIR/scripts/travis/deploy_client.sh
- env: DESC="Deploy Linux client"
os: linux
dist: bionic
script:
- $TRAVIS_BUILD_DIR/scripts/travis/deploy_client.sh
- env: DESC="Deploy Windows client"
os: windows
script:
- $TRAVIS_BUILD_DIR/scripts/travis/deploy_client.sh