Skip to content

Commit

Permalink
[FLINK-10869] [tests] Adjust end-2-end tests to new S3 credentials
Browse files Browse the repository at this point in the history
  • Loading branch information
StephanEwen committed Nov 16, 2018
1 parent 2c9feea commit 130ebac
Show file tree
Hide file tree
Showing 4 changed files with 35 additions and 35 deletions.
60 changes: 30 additions & 30 deletions flink-end-to-end-tests/test-scripts/common_s3.sh
Original file line number Diff line number Diff line change
Expand Up @@ -17,30 +17,30 @@
# limitations under the License.
################################################################################

if [[ -z "$ARTIFACTS_AWS_BUCKET" ]]; then
if [[ -z "$IT_CASE_S3_BUCKET" ]]; then
echo "Did not find AWS environment variables, NOT running the e2e test."
exit 0
else
echo "Found AWS bucket $ARTIFACTS_AWS_BUCKET, running the e2e test."
echo "Found AWS bucket $IT_CASE_S3_BUCKET, running the e2e test."
fi

if [[ -z "$ARTIFACTS_AWS_ACCESS_KEY" ]]; then
if [[ -z "$IT_CASE_S3_ACCESS_KEY" ]]; then
echo "Did not find AWS environment variables, NOT running the e2e test."
exit 0
else
echo "Found AWS access key $ARTIFACTS_AWS_ACCESS_KEY, running the e2e test."
echo "Found AWS access key $IT_CASE_S3_ACCESS_KEY, running the e2e test."
fi

if [[ -z "$ARTIFACTS_AWS_SECRET_KEY" ]]; then
if [[ -z "$IT_CASE_S3_SECRET_KEY" ]]; then
echo "Did not find AWS environment variables, NOT running the e2e test."
exit 0
else
echo "Found AWS secret key $ARTIFACTS_AWS_SECRET_KEY, running the e2e test."
echo "Found AWS secret key $IT_CASE_S3_SECRET_KEY, running the e2e test."
fi

AWS_REGION="${AWS_REGION:-eu-west-1}"
AWS_ACCESS_KEY=$ARTIFACTS_AWS_ACCESS_KEY
AWS_SECRET_KEY=$ARTIFACTS_AWS_SECRET_KEY
AWS_REGION="${AWS_REGION:-us-east-1}"
AWS_ACCESS_KEY=$IT_CASE_S3_ACCESS_KEY
AWS_SECRET_KEY=$IT_CASE_S3_SECRET_KEY

s3util="java -jar ${END_TO_END_DIR}/flink-e2e-test-utils/target/S3UtilProgram.jar"

Expand All @@ -49,8 +49,8 @@ s3util="java -jar ${END_TO_END_DIR}/flink-e2e-test-utils/target/S3UtilProgram.ja
#
# Globals:
# FLINK_DIR
# ARTIFACTS_AWS_ACCESS_KEY
# ARTIFACTS_AWS_SECRET_KEY
# IT_CASE_S3_ACCESS_KEY
# IT_CASE_S3_SECRET_KEY
# Arguments:
# None
# Returns:
Expand All @@ -68,8 +68,8 @@ function s3_setup {
trap s3_cleanup EXIT

cp $FLINK_DIR/opt/flink-s3-fs-hadoop-*.jar $FLINK_DIR/lib/
echo "s3.access-key: $ARTIFACTS_AWS_ACCESS_KEY" >> "$FLINK_DIR/conf/flink-conf.yaml"
echo "s3.secret-key: $ARTIFACTS_AWS_SECRET_KEY" >> "$FLINK_DIR/conf/flink-conf.yaml"
echo "s3.access-key: $IT_CASE_S3_ACCESS_KEY" >> "$FLINK_DIR/conf/flink-conf.yaml"
echo "s3.secret-key: $IT_CASE_S3_SECRET_KEY" >> "$FLINK_DIR/conf/flink-conf.yaml"
}

s3_setup
Expand All @@ -78,22 +78,22 @@ s3_setup
# List s3 objects by full path prefix.
#
# Globals:
# ARTIFACTS_AWS_BUCKET
# IT_CASE_S3_BUCKET
# Arguments:
# $1 - s3 full path key prefix
# Returns:
# List of s3 object keys, separated by newline
###################################
function s3_list {
AWS_REGION=$AWS_REGION \
${s3util} --action listByFullPathPrefix --s3prefix "$1" --bucket $ARTIFACTS_AWS_BUCKET
${s3util} --action listByFullPathPrefix --s3prefix "$1" --bucket $IT_CASE_S3_BUCKET
}

###################################
# Download s3 object.
#
# Globals:
# ARTIFACTS_AWS_BUCKET
# IT_CASE_S3_BUCKET
# Arguments:
# $1 - local path to save file
# $2 - s3 object key
Expand All @@ -102,14 +102,14 @@ function s3_list {
###################################
function s3_get {
AWS_REGION=$AWS_REGION \
${s3util} --action downloadFile --localFile "$1" --s3file "$2" --bucket $ARTIFACTS_AWS_BUCKET
${s3util} --action downloadFile --localFile "$1" --s3file "$2" --bucket $IT_CASE_S3_BUCKET
}

###################################
# Download s3 objects to folder by full path prefix.
#
# Globals:
# ARTIFACTS_AWS_BUCKET
# IT_CASE_S3_BUCKET
# Arguments:
# $1 - local path to save folder with files
# $2 - s3 key full path prefix
Expand All @@ -121,14 +121,14 @@ function s3_get_by_full_path_and_filename_prefix {
local file_prefix="${3-}"
AWS_REGION=$AWS_REGION \
${s3util} --action downloadByFullPathAndFileNamePrefix \
--localFolder "$1" --s3prefix "$2" --s3filePrefix "${file_prefix}" --bucket $ARTIFACTS_AWS_BUCKET
--localFolder "$1" --s3prefix "$2" --s3filePrefix "${file_prefix}" --bucket $IT_CASE_S3_BUCKET
}

###################################
# Upload file to s3 object.
#
# Globals:
# ARTIFACTS_AWS_BUCKET
# IT_CASE_S3_BUCKET
# Arguments:
# $1 - local file to upload
# $2 - s3 bucket
Expand All @@ -144,8 +144,8 @@ function s3_put {
contentType="application/octet-stream"
dateValue=`date -R`
stringToSign="PUT\n\n${contentType}\n${dateValue}\n${resource}"
s3Key=$ARTIFACTS_AWS_ACCESS_KEY
s3Secret=$ARTIFACTS_AWS_SECRET_KEY
s3Key=$IT_CASE_S3_ACCESS_KEY
s3Secret=$IT_CASE_S3_SECRET_KEY
signature=`echo -en ${stringToSign} | openssl sha1 -hmac ${s3Secret} -binary | base64`
curl -X PUT -T "${local_file}" \
-H "Host: ${bucket}.s3.amazonaws.com" \
Expand Down Expand Up @@ -174,8 +174,8 @@ function s3_delete {
contentType="application/octet-stream"
dateValue=`date -R`
stringToSign="DELETE\n\n${contentType}\n${dateValue}\n${resource}"
s3Key=$ARTIFACTS_AWS_ACCESS_KEY
s3Secret=$ARTIFACTS_AWS_SECRET_KEY
s3Key=$IT_CASE_S3_ACCESS_KEY
s3Secret=$IT_CASE_S3_SECRET_KEY
signature=`echo -en ${stringToSign} | openssl sha1 -hmac ${s3Secret} -binary | base64`
curl -X DELETE \
-H "Host: ${bucket}.s3.amazonaws.com" \
Expand All @@ -189,15 +189,15 @@ function s3_delete {
# Delete s3 objects by full path prefix.
#
# Globals:
# ARTIFACTS_AWS_BUCKET
# IT_CASE_S3_BUCKET
# Arguments:
# $1 - s3 key full path prefix
# Returns:
# None
###################################
function s3_delete_by_full_path_prefix {
AWS_REGION=$AWS_REGION \
${s3util} --action deleteByFullPathPrefix --s3prefix "$1" --bucket $ARTIFACTS_AWS_BUCKET
${s3util} --action deleteByFullPathPrefix --s3prefix "$1" --bucket $IT_CASE_S3_BUCKET
}

###################################
Expand All @@ -206,7 +206,7 @@ function s3_delete_by_full_path_prefix {
# because SQL is used to query the s3 object.
#
# Globals:
# ARTIFACTS_AWS_BUCKET
# IT_CASE_S3_BUCKET
# Arguments:
# $1 - s3 file object key
# $2 - s3 bucket
Expand All @@ -215,7 +215,7 @@ function s3_delete_by_full_path_prefix {
###################################
function s3_get_number_of_lines_in_file {
AWS_REGION=$AWS_REGION \
${s3util} --action numberOfLinesInFile --s3file "$1" --bucket $ARTIFACTS_AWS_BUCKET
${s3util} --action numberOfLinesInFile --s3file "$1" --bucket $IT_CASE_S3_BUCKET
}

###################################
Expand All @@ -224,7 +224,7 @@ function s3_get_number_of_lines_in_file {
# because SQL is used to query the s3 objects.
#
# Globals:
# ARTIFACTS_AWS_BUCKET
# IT_CASE_S3_BUCKET
# Arguments:
# $1 - s3 key prefix
# $2 - s3 bucket
Expand All @@ -236,5 +236,5 @@ function s3_get_number_of_lines_by_prefix {
local file_prefix="${3-}"
AWS_REGION=$AWS_REGION \
${s3util} --action numberOfLinesInFilesWithFullAndNamePrefix \
--s3prefix "$1" --s3filePrefix "${file_prefix}" --bucket $ARTIFACTS_AWS_BUCKET
--s3prefix "$1" --s3filePrefix "${file_prefix}" --bucket $IT_CASE_S3_BUCKET
}
4 changes: 2 additions & 2 deletions flink-end-to-end-tests/test-scripts/test_shaded_hadoop_s3a.sh
Original file line number Diff line number Diff line change
Expand Up @@ -22,10 +22,10 @@
source "$(dirname "$0")"/common.sh
source "$(dirname "$0")"/common_s3.sh

s3_put $TEST_INFRA_DIR/test-data/words $ARTIFACTS_AWS_BUCKET flink-end-to-end-test-shaded-s3a
s3_put $TEST_INFRA_DIR/test-data/words $IT_CASE_S3_BUCKET temp/flink-end-to-end-test-shaded-s3a
# make sure we delete the file at the end
function shaded_s3a_cleanup {
s3_delete $ARTIFACTS_AWS_BUCKET flink-end-to-end-test-shaded-s3a
s3_delete $IT_CASE_S3_BUCKET temp/flink-end-to-end-test-shaded-s3a
}
trap shaded_s3a_cleanup EXIT

Expand Down
4 changes: 2 additions & 2 deletions flink-end-to-end-tests/test-scripts/test_shaded_presto_s3.sh
Original file line number Diff line number Diff line change
Expand Up @@ -22,10 +22,10 @@
source "$(dirname "$0")"/common.sh
source "$(dirname "$0")"/common_s3.sh

s3_put $TEST_INFRA_DIR/test-data/words $ARTIFACTS_AWS_BUCKET flink-end-to-end-test-shaded-presto-s3
s3_put $TEST_INFRA_DIR/test-data/words $IT_CASE_S3_BUCKET temp/flink-end-to-end-test-shaded-presto-s3
# make sure we delete the file at the end
function shaded_presto_s3_cleanup {
s3_delete $ARTIFACTS_AWS_BUCKET flink-end-to-end-test-shaded-presto-s3
s3_delete $IT_CASE_S3_BUCKET temp/flink-end-to-end-test-shaded-presto-s3
}
trap shaded_presto_s3_cleanup EXIT

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ set_conf_ssl "mutual"

OUT=out
OUTPUT_PATH="$TEST_DATA_DIR/$OUT"
S3_OUTPUT_PATH="s3:https://$ARTIFACTS_AWS_BUCKET/$OUT"
S3_OUTPUT_PATH="s3:https://$IT_CASE_S3_BUCKET/temp/$OUT"

mkdir -p $OUTPUT_PATH

Expand Down

0 comments on commit 130ebac

Please sign in to comment.