Skip to content

Commit

Permalink
Extend e2e-tests: Add resources tests
Browse files Browse the repository at this point in the history
* Change Argo version deployed from v2.2.0 to v2.3.0-rc3
* Add resourceop test: Executes the resourceop_basic.py
  Extend run_basic_test.py with '--params' argument.
* Add volumeop test: Executes the volumeop_sequential.py pipeline
* Change modes from RWM to RWO for VolumeOp in volumeop_sequential.py
* We cannot use some other sample with VolumeOps because GCP does not
  support the ReadWriteMany access mode. We also cannot use a
  VolumeSnapshotOp sample, because VolumeSnapshots only exist in Alpha
  GCP clusters.

Signed-off-by: Ilias Katsakioris <elikatsis@arrikto.com>
  • Loading branch information
elikatsis committed May 16, 2019
1 parent b29fbb5 commit b324cd1
Show file tree
Hide file tree
Showing 6 changed files with 55 additions and 3 deletions.
2 changes: 1 addition & 1 deletion samples/resourceops/volumeop_sequential.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ def volumeop_sequential():
name="mypvc",
resource_name="newpvc",
size="10Gi",
modes=dsl.VOLUME_MODE_RWM
modes=dsl.VOLUME_MODE_RWO
)

step1 = dsl.ContainerOp(
Expand Down
7 changes: 7 additions & 0 deletions test/deploy-kubeflow.sh
Original file line number Diff line number Diff line change
Expand Up @@ -56,4 +56,11 @@ ${KUBEFLOW_SRC}/scripts/kfctl.sh apply platform
${KUBEFLOW_SRC}/scripts/kfctl.sh generate k8s
${KUBEFLOW_SRC}/scripts/kfctl.sh apply k8s

pushd ks_app
ks param set argo workflowControllerImage argoproj/workflow-controller:v2.3.0-rc3
ks param set argo executorImage argoproj/argoexec:v2.3.0-rc3
ks param set argo uiImage argoproj/argoui:v2.3.0-rc3
ks apply default -c argo
popd

gcloud container clusters get-credentials ${TEST_CLUSTER}
12 changes: 12 additions & 0 deletions test/e2e_test_gke_v2.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -188,6 +188,18 @@ spec:
value: "{{inputs.parameters.namespace}}"
- name: test-name
value: "recursion"
- name: run-resources-tests
template: run-basic-e2e-tests
arguments:
parameters:
- name: test-results-gcs-dir
value: "{{inputs.parameters.test-results-gcs-dir}}"
- name: sample-tests-image
value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.basic-e2e-tests-image-suffix}}"
- name: namespace
value: "{{inputs.parameters.namespace}}"
- name: test-name
value: "resources"

# Build and push image
- name: build-image
Expand Down
2 changes: 1 addition & 1 deletion test/install-argo.sh
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ kubectl create clusterrolebinding PROW_BINDING --clusterrole=cluster-admin --use
kubectl create clusterrolebinding DEFAULT_BINDING --clusterrole=cluster-admin --serviceaccount=default:default

echo "install argo"
ARGO_VERSION=v2.2.0
ARGO_VERSION=v2.3.0-rc3
mkdir -p ~/bin/
export PATH=~/bin/:$PATH
curl -sSL -o ~/bin/argo https://github.com/argoproj/argo/releases/download/$ARGO_VERSION/argo-linux-amd64
Expand Down
7 changes: 6 additions & 1 deletion test/sample-test/run_basic_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
from datetime import datetime
from kfp import Client
import utils
import json

###### Input/Output Instruction ######
# input: yaml
Expand Down Expand Up @@ -49,6 +50,10 @@ def parse_arguments():
type=str,
default='kubeflow',
help="namespace of the deployed pipeline system. Default: kubeflow")
parser.add_argument('--params',
type=str,
default='{}',
help="Parameters to pass to the pipeline (as JSON string). Default: {}")
args = parser.parse_args()
return args

Expand Down Expand Up @@ -76,7 +81,7 @@ def main():

###### Create Job ######
job_name = args.testname +'_sample'
params = {}
params = json.loads(args.params)
response = client.run_pipeline(experiment_id, job_name, args.input, params)
run_id = response.id
utils.add_junit_test(test_cases, 'create pipeline run', True)
Expand Down
28 changes: 28 additions & 0 deletions test/sample-test/run_test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -263,6 +263,34 @@ elif [ "$TEST_NAME" == "recursion" ]; then

echo "Copy the test results to GCS ${RESULTS_GCS_DIR}/"
gsutil cp ${SAMPLE_RECURSION_TEST_RESULT} ${RESULTS_GCS_DIR}/${SAMPLE_RECURSION_TEST_RESULT}
elif [ "$TEST_NAME" == "resources" ]; then
# ResourceOp
SAMPLE_RESOURCEOP_TEST_RESULT=junit_SampleResourceOpOutput.xml
SAMPLE_RESOURCEOP_TEST_OUTPUT=${RESULTS_GCS_DIR}

# Compile samples
cd ${BASE_DIR}/samples/resourceops
dsl-compile --py resourceop_basic.py --output resourceop_basic.tar.gz

cd "${TEST_DIR}"
python3 run_basic_test.py --input ${BASE_DIR}/samples/basic/resourceop_basic.tar.gz --result $SAMPLE_RESOURCEOP_TEST_RESULT --output $SAMPLE_RESOURCEOP_TEST_OUTPUT --testname resource --namespace ${NAMESPACE} --params '{"username": "test", "password": "test123"}'

echo "Copy the test results to GCS ${RESULTS_GCS_DIR}/"
gsutil cp ${SAMPLE_RESOURCEOP_TEST_RESULT} ${RESULTS_GCS_DIR}/${SAMPLE_RESOURCEOP_TEST_RESULT}

# VolumeOpSequential
SAMPLE_VOLUMEOP_TEST_RESULT=junit_SampleVolumeOpOutput.xml
SAMPLE_VOLUMEOP_TEST_OUTPUT=${RESULTS_GCS_DIR}

# Compile samples
cd ${BASE_DIR}/samples/resourceops
dsl-compile --py volumeop_sequential.py --output volumeop_sequential.tar.gz

cd "${TEST_DIR}"
python3 run_basic_test.py --input ${BASE_DIR}/samples/basic/volumeop_sequential.tar.gz --result $SAMPLE_VOLUMEOP_TEST_RESULT --output $SAMPLE_VOLUMEOP_TEST_OUTPUT --testname resource --namespace ${NAMESPACE}

echo "Copy the test results to GCS ${RESULTS_GCS_DIR}/"
gsutil cp ${SAMPLE_VOLUMEOP_TEST_RESULT} ${RESULTS_GCS_DIR}/${SAMPLE_VOLUMEOP_TEST_RESULT}
elif [ "$TEST_NAME" == "xgboost" ]; then
SAMPLE_XGBOOST_TEST_RESULT=junit_SampleXGBoostOutput.xml
SAMPLE_XGBOOST_TEST_OUTPUT=${RESULTS_GCS_DIR}
Expand Down

0 comments on commit b324cd1

Please sign in to comment.