From b324cd1f5934930542632e547b0f60a81fa58641 Mon Sep 17 00:00:00 2001 From: Ilias Katsakioris Date: Thu, 16 May 2019 13:02:50 +0300 Subject: [PATCH] Extend e2e-tests: Add resources tests * Change Argo version deployed from v2.2.0 to v2.3.0-rc3 * Add resourceop test: Executes the resourceop_basic.py Extend run_basic_test.py with '--params' argument. * Add volumeop test: Executes the volumeop_sequential.py pipeline * Change modes from RWM to RWO for VolumeOp in volumeop_sequential.py * We cannot use some other sample with VolumeOps because GCP does not support the ReadWriteMany access mode. We also cannot use a VolumeSnapshotOp sample, because VolumeSnapshots only exist in Alpha GCP clusters. Signed-off-by: Ilias Katsakioris --- samples/resourceops/volumeop_sequential.py | 2 +- test/deploy-kubeflow.sh | 7 ++++++ test/e2e_test_gke_v2.yaml | 12 ++++++++++ test/install-argo.sh | 2 +- test/sample-test/run_basic_test.py | 7 +++++- test/sample-test/run_test.sh | 28 ++++++++++++++++++++++ 6 files changed, 55 insertions(+), 3 deletions(-) diff --git a/samples/resourceops/volumeop_sequential.py b/samples/resourceops/volumeop_sequential.py index 3c8b0317c827..367107a6be83 100644 --- a/samples/resourceops/volumeop_sequential.py +++ b/samples/resourceops/volumeop_sequential.py @@ -25,7 +25,7 @@ def volumeop_sequential(): name="mypvc", resource_name="newpvc", size="10Gi", - modes=dsl.VOLUME_MODE_RWM + modes=dsl.VOLUME_MODE_RWO ) step1 = dsl.ContainerOp( diff --git a/test/deploy-kubeflow.sh b/test/deploy-kubeflow.sh index 8c2c7a3fb42e..1fa54522b6d4 100755 --- a/test/deploy-kubeflow.sh +++ b/test/deploy-kubeflow.sh @@ -56,4 +56,11 @@ ${KUBEFLOW_SRC}/scripts/kfctl.sh apply platform ${KUBEFLOW_SRC}/scripts/kfctl.sh generate k8s ${KUBEFLOW_SRC}/scripts/kfctl.sh apply k8s +pushd ks_app +ks param set argo workflowControllerImage argoproj/workflow-controller:v2.3.0-rc3 +ks param set argo executorImage argoproj/argoexec:v2.3.0-rc3 +ks param set argo uiImage argoproj/argoui:v2.3.0-rc3 +ks apply default -c argo +popd + gcloud container clusters get-credentials ${TEST_CLUSTER} diff --git a/test/e2e_test_gke_v2.yaml b/test/e2e_test_gke_v2.yaml index 4a9c3c179f97..f8cb32b22278 100644 --- a/test/e2e_test_gke_v2.yaml +++ b/test/e2e_test_gke_v2.yaml @@ -188,6 +188,18 @@ spec: value: "{{inputs.parameters.namespace}}" - name: test-name value: "recursion" + - name: run-resources-tests + template: run-basic-e2e-tests + arguments: + parameters: + - name: test-results-gcs-dir + value: "{{inputs.parameters.test-results-gcs-dir}}" + - name: sample-tests-image + value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.basic-e2e-tests-image-suffix}}" + - name: namespace + value: "{{inputs.parameters.namespace}}" + - name: test-name + value: "resources" # Build and push image - name: build-image diff --git a/test/install-argo.sh b/test/install-argo.sh index ac5bdcbd17a1..cacb3db2d55d 100755 --- a/test/install-argo.sh +++ b/test/install-argo.sh @@ -24,7 +24,7 @@ kubectl create clusterrolebinding PROW_BINDING --clusterrole=cluster-admin --use kubectl create clusterrolebinding DEFAULT_BINDING --clusterrole=cluster-admin --serviceaccount=default:default echo "install argo" -ARGO_VERSION=v2.2.0 +ARGO_VERSION=v2.3.0-rc3 mkdir -p ~/bin/ export PATH=~/bin/:$PATH curl -sSL -o ~/bin/argo https://github.com/argoproj/argo/releases/download/$ARGO_VERSION/argo-linux-amd64 diff --git a/test/sample-test/run_basic_test.py b/test/sample-test/run_basic_test.py index b3b034ea0561..8a30f3a87a42 100644 --- a/test/sample-test/run_basic_test.py +++ b/test/sample-test/run_basic_test.py @@ -18,6 +18,7 @@ from datetime import datetime from kfp import Client import utils +import json ###### Input/Output Instruction ###### # input: yaml @@ -49,6 +50,10 @@ def parse_arguments(): type=str, default='kubeflow', help="namespace of the deployed pipeline system. Default: kubeflow") + parser.add_argument('--params', + type=str, + default='{}', + help="Parameters to pass to the pipeline (as JSON string). Default: {}") args = parser.parse_args() return args @@ -76,7 +81,7 @@ def main(): ###### Create Job ###### job_name = args.testname +'_sample' - params = {} + params = json.loads(args.params) response = client.run_pipeline(experiment_id, job_name, args.input, params) run_id = response.id utils.add_junit_test(test_cases, 'create pipeline run', True) diff --git a/test/sample-test/run_test.sh b/test/sample-test/run_test.sh index 6f524c4db56c..4a1ec982ee64 100755 --- a/test/sample-test/run_test.sh +++ b/test/sample-test/run_test.sh @@ -263,6 +263,34 @@ elif [ "$TEST_NAME" == "recursion" ]; then echo "Copy the test results to GCS ${RESULTS_GCS_DIR}/" gsutil cp ${SAMPLE_RECURSION_TEST_RESULT} ${RESULTS_GCS_DIR}/${SAMPLE_RECURSION_TEST_RESULT} +elif [ "$TEST_NAME" == "resources" ]; then + # ResourceOp + SAMPLE_RESOURCEOP_TEST_RESULT=junit_SampleResourceOpOutput.xml + SAMPLE_RESOURCEOP_TEST_OUTPUT=${RESULTS_GCS_DIR} + + # Compile samples + cd ${BASE_DIR}/samples/resourceops + dsl-compile --py resourceop_basic.py --output resourceop_basic.tar.gz + + cd "${TEST_DIR}" + python3 run_basic_test.py --input ${BASE_DIR}/samples/basic/resourceop_basic.tar.gz --result $SAMPLE_RESOURCEOP_TEST_RESULT --output $SAMPLE_RESOURCEOP_TEST_OUTPUT --testname resource --namespace ${NAMESPACE} --params '{"username": "test", "password": "test123"}' + + echo "Copy the test results to GCS ${RESULTS_GCS_DIR}/" + gsutil cp ${SAMPLE_RESOURCEOP_TEST_RESULT} ${RESULTS_GCS_DIR}/${SAMPLE_RESOURCEOP_TEST_RESULT} + + # VolumeOpSequential + SAMPLE_VOLUMEOP_TEST_RESULT=junit_SampleVolumeOpOutput.xml + SAMPLE_VOLUMEOP_TEST_OUTPUT=${RESULTS_GCS_DIR} + + # Compile samples + cd ${BASE_DIR}/samples/resourceops + dsl-compile --py volumeop_sequential.py --output volumeop_sequential.tar.gz + + cd "${TEST_DIR}" + python3 run_basic_test.py --input ${BASE_DIR}/samples/basic/volumeop_sequential.tar.gz --result $SAMPLE_VOLUMEOP_TEST_RESULT --output $SAMPLE_VOLUMEOP_TEST_OUTPUT --testname resource --namespace ${NAMESPACE} + + echo "Copy the test results to GCS ${RESULTS_GCS_DIR}/" + gsutil cp ${SAMPLE_VOLUMEOP_TEST_RESULT} ${RESULTS_GCS_DIR}/${SAMPLE_VOLUMEOP_TEST_RESULT} elif [ "$TEST_NAME" == "xgboost" ]; then SAMPLE_XGBOOST_TEST_RESULT=junit_SampleXGBoostOutput.xml SAMPLE_XGBOOST_TEST_OUTPUT=${RESULTS_GCS_DIR}