From b692535d9244411afbb8aff5f72d26da65abf9fc Mon Sep 17 00:00:00 2001 From: Ricardo Martinelli de Oliveira Date: Thu, 6 Oct 2022 19:31:28 -0300 Subject: [PATCH 1/2] Rename to Data Science Pipelines --- .../OWNERS | 0 data-science-pipelines/README.md | 78 +++++++++++ .../base/configmaps/ds-pipeline-config.yaml | 6 +- .../configmaps/pipeline-install-config.yaml | 6 +- .../scheduledworkflows.yaml | 2 +- .../customresourcedefinitions/viewers.yaml | 2 +- .../ds-pipeline-persistenceagent.yaml | 60 ++++++++ .../ds-pipeline-scheduledworkflow.yaml | 58 ++++++++ .../deployments/ds-pipeline-viewer-crd.yaml | 59 ++++++++ .../ds-pipeline-visualizationserver.yaml | 31 +++-- .../base/deployments/ds-pipeline.yaml | 57 ++++---- .../base/kustomization.yaml | 120 ++++++++++++++++ .../base/params.env | 4 +- .../base/params.yaml | 0 .../ds-pipeline-persistenceagent-binding.yaml | 8 +- ...ds-pipeline-scheduledworkflow-binding.yaml | 13 ++ .../ds-pipeline-viewer-crd-binding.yaml | 13 ++ .../base/rolebindings/ds-pipeline.yaml | 10 +- .../rolebindings/pipeline-runner-binding.yaml | 2 +- .../ds-pipeline-persistenceagent-role.yaml | 4 +- .../ds-pipeline-scheduledworkflow-role.yaml | 6 +- .../ds-pipeline-viewer-controller-role.yaml | 4 +- .../base/roles/ds-pipeline.yaml | 6 +- .../base/roles/pipeline-runner.yaml | 2 +- .../ds-pipeline-container-builder.yaml | 6 + .../ds-pipeline-persistenceagent.yaml | 6 + .../ds-pipeline-scheduledworkflow.yaml | 6 + ...s-pipeline-viewer-crd-service-account.yaml | 6 + .../serviceaccounts/ds-pipeline-viewer.yaml | 6 + .../ds-pipeline-visualizationserver.yaml | 6 + .../base/serviceaccounts/ds-pipeline.yaml | 6 + .../base/serviceaccounts/pipeline-runner.yaml | 2 +- .../ds-pipeline-visualizationserver.yaml | 15 ++ .../base/services/ds-pipeline.yaml | 8 +- .../metadata-envoy-deployment.yaml | 12 ++ .../deployments/metadata-grpc-deployment.yaml | 10 +- .../deployments/metadata-writer.yaml | 16 +++ .../component-mlmd/kustomization.yaml | 40 ++++++ ...low-pipelines-metadata-writer-binding.yaml | 0 ...beflow-pipelines-metadata-writer-role.yaml | 0 .../kubeflow-pipelines-metadata-writer.yaml | 0 .../serviceaccounts/metadata-grpc-server.yaml | 0 .../services/metadata-envoy-service.yaml | 0 .../services/metadata-grpc-service.yaml | 0 .../configmaps/ds-pipeline-ui-configmap.yaml | 13 ++ .../deployments/ds-pipeline-ui.yaml | 32 +++-- .../ds-pipeline-ui}/kustomization.yaml | 26 ++-- .../rolebindings/auth-delegator.yaml | 5 +- .../rolebindings/ds-pipeline-ui.yaml | 10 +- .../ds-pipeline-ui/roles/ds-pipeline-ui.yaml | 6 +- .../ds-pipeline-ui/routes/ds-pipeline-ui.yaml | 4 +- .../serviceaccounts/ds-pipeline-ui.yaml | 6 +- .../services/ds-pipeline-ui.yaml | 18 +++ .../kustomization.yaml | 8 ++ ...data-science-pipelines-odhapplication.yaml | 71 ++++++++++ .../data-science-pipelines-odhquickstart.yaml | 89 ++++++++++++ .../deployments/mariadb.yaml | 81 +++++++++++ .../metadata-store-mariadb/kustomization.yaml | 30 ++++ .../mariadb-pv-claim.yaml | 12 ++ .../secrets/mysql-secret.yaml | 9 ++ .../serviceaccounts/mysql.yaml | 6 + .../services/mysql.yaml | 14 ++ .../deployments/mysql.yaml | 21 ++- .../metadata-store-mysql/kustomization.yaml | 0 .../mysql-pv-claim.yaml | 0 .../secrets/mysql-secret.yaml | 0 .../serviceaccounts/mysql.yaml | 0 .../metadata-store-mysql/services/mysql.yaml | 0 .../deployments/postgresql.yaml | 9 +- .../kustomization.yaml | 0 .../postgresql-pv-claim.yaml | 0 .../secrets/postgresql-secret.yaml | 0 .../serviceaccounts/postgresql.yaml | 0 .../services/postgresql.yaml | 0 .../object-store-minio/deployments/minio.yaml | 25 +++- .../object-store-minio/kustomization.yaml | 0 .../persistentvolumeclaims/minio-pvc.yaml | 2 +- .../secrets/mlpipeline-minio-artifact.yaml | 2 +- .../services/minio-service.yaml | 4 +- .../prometheus/kustomization.yaml | 0 .../prometheus/monitor.yaml | 4 +- ...yaml => kfctl_openshift_ds-pipelines.yaml} | 6 +- ml-pipelines/README.md | 78 ----------- .../ml-pipeline-persistenceagent.yaml | 38 ----- .../ml-pipeline-scheduledworkflow.yaml | 35 ----- .../deployments/ml-pipeline-viewer-crd.yaml | 36 ----- ml-pipelines/base/kustomization.yaml | 131 ------------------ ...ml-pipeline-scheduledworkflow-binding.yaml | 13 -- .../ml-pipeline-viewer-crd-binding.yaml | 13 -- .../kubeflow-pipelines-container-builder.yaml | 6 - .../kubeflow-pipelines-viewer.yaml | 6 - .../ml-pipeline-persistenceagent.yaml | 6 - .../ml-pipeline-scheduledworkflow.yaml | 6 - ...l-pipeline-viewer-crd-service-account.yaml | 6 - .../ml-pipeline-visualizationserver.yaml | 6 - .../base/serviceaccounts/ml-pipeline.yaml | 6 - .../ml-pipeline-visualizationserver.yaml | 15 -- .../configmaps/ml-pipeline-ui-configmap.yaml | 13 -- .../services/ml-pipeline-ui.yaml | 18 --- 99 files changed, 1071 insertions(+), 570 deletions(-) rename {ml-pipelines => data-science-pipelines}/OWNERS (100%) create mode 100644 data-science-pipelines/README.md rename ml-pipelines/base/configmaps/kfp-tekton-config.yaml => data-science-pipelines/base/configmaps/ds-pipeline-config.yaml (87%) rename {ml-pipelines => data-science-pipelines}/base/configmaps/pipeline-install-config.yaml (90%) rename {ml-pipelines => data-science-pipelines}/base/customresourcedefinitions/scheduledworkflows.yaml (95%) rename {ml-pipelines => data-science-pipelines}/base/customresourcedefinitions/viewers.yaml (94%) create mode 100644 data-science-pipelines/base/deployments/ds-pipeline-persistenceagent.yaml create mode 100644 data-science-pipelines/base/deployments/ds-pipeline-scheduledworkflow.yaml create mode 100644 data-science-pipelines/base/deployments/ds-pipeline-viewer-crd.yaml rename ml-pipelines/base/deployments/ml-pipeline-visualizationserver.yaml => data-science-pipelines/base/deployments/ds-pipeline-visualizationserver.yaml (64%) rename ml-pipelines/base/deployments/ml-pipeline.yaml => data-science-pipelines/base/deployments/ds-pipeline.yaml (78%) create mode 100644 data-science-pipelines/base/kustomization.yaml rename {ml-pipelines => data-science-pipelines}/base/params.env (56%) rename {ml-pipelines => data-science-pipelines}/base/params.yaml (100%) rename ml-pipelines/base/rolebindings/ml-pipeline-persistenceagent-binding.yaml => data-science-pipelines/base/rolebindings/ds-pipeline-persistenceagent-binding.yaml (50%) create mode 100644 data-science-pipelines/base/rolebindings/ds-pipeline-scheduledworkflow-binding.yaml create mode 100644 data-science-pipelines/base/rolebindings/ds-pipeline-viewer-crd-binding.yaml rename ml-pipelines/base/rolebindings/ml-pipeline.yaml => data-science-pipelines/base/rolebindings/ds-pipeline.yaml (57%) rename {ml-pipelines => data-science-pipelines}/base/rolebindings/pipeline-runner-binding.yaml (84%) rename ml-pipelines/base/roles/ml-pipeline-persistenceagent-role.yaml => data-science-pipelines/base/roles/ds-pipeline-persistenceagent-role.yaml (85%) rename ml-pipelines/base/roles/ml-pipeline-scheduledworkflow-role.yaml => data-science-pipelines/base/roles/ds-pipeline-scheduledworkflow-role.yaml (87%) rename ml-pipelines/base/roles/ml-pipeline-viewer-controller-role.yaml => data-science-pipelines/base/roles/ds-pipeline-viewer-controller-role.yaml (83%) rename ml-pipelines/base/roles/ml-pipeline.yaml => data-science-pipelines/base/roles/ds-pipeline.yaml (93%) rename {ml-pipelines => data-science-pipelines}/base/roles/pipeline-runner.yaml (96%) create mode 100644 data-science-pipelines/base/serviceaccounts/ds-pipeline-container-builder.yaml create mode 100644 data-science-pipelines/base/serviceaccounts/ds-pipeline-persistenceagent.yaml create mode 100644 data-science-pipelines/base/serviceaccounts/ds-pipeline-scheduledworkflow.yaml create mode 100644 data-science-pipelines/base/serviceaccounts/ds-pipeline-viewer-crd-service-account.yaml create mode 100644 data-science-pipelines/base/serviceaccounts/ds-pipeline-viewer.yaml create mode 100644 data-science-pipelines/base/serviceaccounts/ds-pipeline-visualizationserver.yaml create mode 100644 data-science-pipelines/base/serviceaccounts/ds-pipeline.yaml rename {ml-pipelines => data-science-pipelines}/base/serviceaccounts/pipeline-runner.yaml (62%) create mode 100644 data-science-pipelines/base/services/ds-pipeline-visualizationserver.yaml rename ml-pipelines/base/services/ml-pipeline.yaml => data-science-pipelines/base/services/ds-pipeline.yaml (63%) rename {ml-pipelines/base => data-science-pipelines/overlays/component-mlmd}/deployments/metadata-envoy-deployment.yaml (67%) rename {ml-pipelines/base => data-science-pipelines/overlays/component-mlmd}/deployments/metadata-grpc-deployment.yaml (98%) rename {ml-pipelines/base => data-science-pipelines/overlays/component-mlmd}/deployments/metadata-writer.yaml (68%) create mode 100644 data-science-pipelines/overlays/component-mlmd/kustomization.yaml rename {ml-pipelines/base => data-science-pipelines/overlays/component-mlmd}/rolebindings/kubeflow-pipelines-metadata-writer-binding.yaml (100%) rename {ml-pipelines/base => data-science-pipelines/overlays/component-mlmd}/roles/kubeflow-pipelines-metadata-writer-role.yaml (100%) rename {ml-pipelines/base => data-science-pipelines/overlays/component-mlmd}/serviceaccounts/kubeflow-pipelines-metadata-writer.yaml (100%) rename {ml-pipelines/base => data-science-pipelines/overlays/component-mlmd}/serviceaccounts/metadata-grpc-server.yaml (100%) rename {ml-pipelines/base => data-science-pipelines/overlays/component-mlmd}/services/metadata-envoy-service.yaml (100%) rename {ml-pipelines/base => data-science-pipelines/overlays/component-mlmd}/services/metadata-grpc-service.yaml (100%) create mode 100644 data-science-pipelines/overlays/ds-pipeline-ui/configmaps/ds-pipeline-ui-configmap.yaml rename ml-pipelines/overlays/ml-pipeline-ui/deployments/ml-pipeline-ui.yaml => data-science-pipelines/overlays/ds-pipeline-ui/deployments/ds-pipeline-ui.yaml (83%) rename {ml-pipelines/overlays/ml-pipeline-ui => data-science-pipelines/overlays/ds-pipeline-ui}/kustomization.yaml (59%) rename {ml-pipelines/overlays/ml-pipeline-ui => data-science-pipelines/overlays/ds-pipeline-ui}/rolebindings/auth-delegator.yaml (69%) rename ml-pipelines/overlays/ml-pipeline-ui/rolebindings/ml-pipeline-ui.yaml => data-science-pipelines/overlays/ds-pipeline-ui/rolebindings/ds-pipeline-ui.yaml (55%) rename ml-pipelines/overlays/ml-pipeline-ui/roles/ml-pipeline-ui.yaml => data-science-pipelines/overlays/ds-pipeline-ui/roles/ds-pipeline-ui.yaml (89%) rename ml-pipelines/overlays/ml-pipeline-ui/routes/ml-pipeline-ui.yaml => data-science-pipelines/overlays/ds-pipeline-ui/routes/ds-pipeline-ui.yaml (80%) rename ml-pipelines/overlays/ml-pipeline-ui/serviceaccounts/ml-pipeline-ui.yaml => data-science-pipelines/overlays/ds-pipeline-ui/serviceaccounts/ds-pipeline-ui.yaml (63%) create mode 100644 data-science-pipelines/overlays/ds-pipeline-ui/services/ds-pipeline-ui.yaml create mode 100644 data-science-pipelines/overlays/integration-odhdashboard/kustomization.yaml create mode 100644 data-science-pipelines/overlays/integration-odhdashboard/odhapplications/data-science-pipelines-odhapplication.yaml create mode 100644 data-science-pipelines/overlays/integration-odhdashboard/odhquickstarts/data-science-pipelines-odhquickstart.yaml create mode 100644 data-science-pipelines/overlays/metadata-store-mariadb/deployments/mariadb.yaml create mode 100644 data-science-pipelines/overlays/metadata-store-mariadb/kustomization.yaml create mode 100644 data-science-pipelines/overlays/metadata-store-mariadb/persistentvolumeclaims/mariadb-pv-claim.yaml create mode 100644 data-science-pipelines/overlays/metadata-store-mariadb/secrets/mysql-secret.yaml create mode 100644 data-science-pipelines/overlays/metadata-store-mariadb/serviceaccounts/mysql.yaml create mode 100644 data-science-pipelines/overlays/metadata-store-mariadb/services/mysql.yaml rename {ml-pipelines => data-science-pipelines}/overlays/metadata-store-mysql/deployments/mysql.yaml (65%) rename {ml-pipelines => data-science-pipelines}/overlays/metadata-store-mysql/kustomization.yaml (100%) rename {ml-pipelines => data-science-pipelines}/overlays/metadata-store-mysql/persistentvolumeclaims/mysql-pv-claim.yaml (100%) rename {ml-pipelines => data-science-pipelines}/overlays/metadata-store-mysql/secrets/mysql-secret.yaml (100%) rename {ml-pipelines => data-science-pipelines}/overlays/metadata-store-mysql/serviceaccounts/mysql.yaml (100%) rename {ml-pipelines => data-science-pipelines}/overlays/metadata-store-mysql/services/mysql.yaml (100%) rename {ml-pipelines => data-science-pipelines}/overlays/metadata-store-postgresql/deployments/postgresql.yaml (94%) rename {ml-pipelines => data-science-pipelines}/overlays/metadata-store-postgresql/kustomization.yaml (100%) rename {ml-pipelines => data-science-pipelines}/overlays/metadata-store-postgresql/persistentvolumeclaims/postgresql-pv-claim.yaml (100%) rename {ml-pipelines => data-science-pipelines}/overlays/metadata-store-postgresql/secrets/postgresql-secret.yaml (100%) rename {ml-pipelines => data-science-pipelines}/overlays/metadata-store-postgresql/serviceaccounts/postgresql.yaml (100%) rename {ml-pipelines => data-science-pipelines}/overlays/metadata-store-postgresql/services/postgresql.yaml (100%) rename {ml-pipelines => data-science-pipelines}/overlays/object-store-minio/deployments/minio.yaml (61%) rename {ml-pipelines => data-science-pipelines}/overlays/object-store-minio/kustomization.yaml (100%) rename {ml-pipelines => data-science-pipelines}/overlays/object-store-minio/persistentvolumeclaims/minio-pvc.yaml (78%) rename {ml-pipelines => data-science-pipelines}/overlays/object-store-minio/secrets/mlpipeline-minio-artifact.yaml (78%) rename {ml-pipelines => data-science-pipelines}/overlays/object-store-minio/services/minio-service.yaml (68%) rename {ml-pipelines => data-science-pipelines}/prometheus/kustomization.yaml (100%) rename {ml-pipelines => data-science-pipelines}/prometheus/monitor.yaml (67%) rename kfdef/{kfctl_openshift_ml-pipelines.yaml => kfctl_openshift_ds-pipelines.yaml} (83%) delete mode 100644 ml-pipelines/README.md delete mode 100644 ml-pipelines/base/deployments/ml-pipeline-persistenceagent.yaml delete mode 100644 ml-pipelines/base/deployments/ml-pipeline-scheduledworkflow.yaml delete mode 100644 ml-pipelines/base/deployments/ml-pipeline-viewer-crd.yaml delete mode 100644 ml-pipelines/base/kustomization.yaml delete mode 100644 ml-pipelines/base/rolebindings/ml-pipeline-scheduledworkflow-binding.yaml delete mode 100644 ml-pipelines/base/rolebindings/ml-pipeline-viewer-crd-binding.yaml delete mode 100644 ml-pipelines/base/serviceaccounts/kubeflow-pipelines-container-builder.yaml delete mode 100644 ml-pipelines/base/serviceaccounts/kubeflow-pipelines-viewer.yaml delete mode 100644 ml-pipelines/base/serviceaccounts/ml-pipeline-persistenceagent.yaml delete mode 100644 ml-pipelines/base/serviceaccounts/ml-pipeline-scheduledworkflow.yaml delete mode 100644 ml-pipelines/base/serviceaccounts/ml-pipeline-viewer-crd-service-account.yaml delete mode 100644 ml-pipelines/base/serviceaccounts/ml-pipeline-visualizationserver.yaml delete mode 100644 ml-pipelines/base/serviceaccounts/ml-pipeline.yaml delete mode 100644 ml-pipelines/base/services/ml-pipeline-visualizationserver.yaml delete mode 100644 ml-pipelines/overlays/ml-pipeline-ui/configmaps/ml-pipeline-ui-configmap.yaml delete mode 100644 ml-pipelines/overlays/ml-pipeline-ui/services/ml-pipeline-ui.yaml diff --git a/ml-pipelines/OWNERS b/data-science-pipelines/OWNERS similarity index 100% rename from ml-pipelines/OWNERS rename to data-science-pipelines/OWNERS diff --git a/data-science-pipelines/README.md b/data-science-pipelines/README.md new file mode 100644 index 000000000..127b7c70f --- /dev/null +++ b/data-science-pipelines/README.md @@ -0,0 +1,78 @@ +# Data Science Pipelines + +Data Science Pipelines is the Open Data Hub's pipeline solution for data scientists. It is built on top of the upstream [Kubeflow Piplines](https://github.com/kubeflow/pipelines) and [kfp-tekton](https://github.com/kubeflow/kfp-tekton) projects. The Open Data Hub community has a [fork](https://github.com/opendatahub-io/data-science-pipelines) of this upstream under the Open Data Hub org. + + +## Installation + +### Prerequisites + +1. The cluster needs to be OpenShift 4.9 or higher +2. OpenShift Pipelines 1.7.2 or higher needs to be installed on the cluster +3. The Open Data Hub operator needs to be installed +4. The default installation namespace for Data Science Pipelines is `odh-applications`. This namespace will need to be created. In case you wish to install in a custom location, create it and update the kfdef as documented below. + +### Installation Steps + +1. Ensure that the prerequisites are met. +2. Apply the kfdef at [kfctl_openshift_ds-pipelines.yaml](https://github.com/opendatahub-io/odh-manifests/blob/master/kfdef/kfctl_openshift_ds-pipelines.yaml). You may need to update the `namespace` field under `metadata` in case you want to deploy in a namespace that isn't `odh-applications`. +3. To find the url for Data Science pipelines, you can run the following command. + ```bash + $ oc get route -n ds-pipeline-ui -o jsonpath='{.spec.host}' + ``` + The value of `` should match the namespace field of the kfdef that you applied. +4. Alternatively, you can access the route via the console. To do so: + + 1. Go to `` + 2. Click on `Networking` in the sidebar on the left side. + 3. Click on `Routes`. It will take you to a new page in the console. + 4. Click the url under the `Location` column for the row item matching `ds-pipeline-ui` + + +## Directory Structure + +### Base + +This directory contains artifacts for deploying all backend components of Data Science Pipelines. This deployment currently includes the kfp-tekton backend as well as a Minio deployment to act as an object store. The Minio deployment will be moved to an overlay at some point in the near future. + +### Overlays + +1. metadata-store-mysql: This overlay contains artifacts for deploying a MySQL database. MySQL is currently the only supported backend for Data Science Pipelines, so if you don't have an existing MySQL database deployed, this overlay needs to be applied. +2. metadata-store-postgresql: This overlay contains artifacts for deploying a PostgreSQL database. Data Science Pipelines does not currently support PostgreSQL as a backend, so deploying this overlay will not actually modify Data Science Pipelines behaviour. +3. ds-pipeline-ui: This overlay contains deployment artifacts for the Data Science Pipelines UI. Deploying Data Science Pipelines without this overlay will result in only the backend artifacts being created. +4. object-store-minio: This overlay contains artifacts for deploying Minio as the Object Store to store Pipelines artifacts. + +### Prometheus + +This directory contains the service monitor definition for Data Science Pipelines. It is always deployed by base, so this will eventually be moved into the base directory itself. + +## Parameters + +You can customize the Data Science Pipelines deployment by injecting custom parameters to change the default deployment. The following parameters can be used: + +* **pipeline_install_configuration**: The ConfigMap name that contains the values to install the Data Science Pipelines environment. This parameter defaults to `pipeline-install-config` and you can find an example in the [repository](./base/configmaps/pipeline-install-config.yaml). +* **ds_pipelines_configuration**: The ConfigMap name that contains the values to integrate Data Science Pipelines with the underlying components (Database and Object Store). This parameter defaults to `kfp-tekton-config` and you can find an example in the [repository](./base/configmaps/kfp-tekton-config.yaml). +* **database_secret**: The secret that contains the credentials for the Data Science Pipelines Databse. It defaults to `mysql-secret` if using the `metadata-store-mysql` overlay or `postgresql-secret` if using the `metadata-store-postgresql` overlay. +* **ds_pipelines_ui_configuration**: The ConfigMap that contains the values to customize UI. It defaults to `ds-pipeline-ui-configmap`. + +## Configuration + +* It is possible to configure what S3 storage is being used by Pipeline Runs. Detailed instructions on how to configure this will be added once Minio is moved to an overlay. + +## Usage + +### These instructions will be updated once Data Science Pipelines has a tile available in odh-dashboard + +1. Go to the ds-pipelines-ui route. +2. Click on `Pipelines` on the left side. +3. There will be a `[Demo] flip-coin` Pipeline already available. Click on it. +4. Click on the blue `Create run` button towards the top of the screen. +5. You can leave all the fields untouched. If desired, you can create a new experiment to link the pipeline run to, or rename the run itself. +6. Click on the blue `Start` button. +7. You will be taken to the `Runs` page. You will see a row matching the `Run name` you previously picked. Click on the `Run name` in that row. +8. Once the Pipeline is done running, you can see a graph of all the pods that were created as well as the paths that were followed. +9. For further verification, you can view all the pods that were created as part of the Pipeline Run in the ``. They will all show up as `Completed`. + +## Data Science Pipelines Architecture + +A complete architecture can be found at [ODH Data Science Pipelines Architecture and Design](https://docs.google.com/document/d/1o-JS1uZKLZsMY3D16kl5KBdyBb-aV-kyD_XycdJOYpM/edit#heading=h.3aocw3evrps0). This document will be moved to GitHub once the corresponding ML Ops SIG repos are created. diff --git a/ml-pipelines/base/configmaps/kfp-tekton-config.yaml b/data-science-pipelines/base/configmaps/ds-pipeline-config.yaml similarity index 87% rename from ml-pipelines/base/configmaps/kfp-tekton-config.yaml rename to data-science-pipelines/base/configmaps/ds-pipeline-config.yaml index 6969f4dd9..7ad25ddea 100644 --- a/ml-pipelines/base/configmaps/kfp-tekton-config.yaml +++ b/data-science-pipelines/base/configmaps/ds-pipeline-config.yaml @@ -5,7 +5,7 @@ data: artifact_bucket: mlpipeline artifact_endpoint: minio-service:9000 artifact_endpoint_scheme: http:// - artifact_image: quay.io/thoth-station/document-sync-job:v0.1.0 + artifact_image: quay.io/opendatahub/ml-pipelines-artifact-manager:latest artifact_script: |- #!/usr/bin/env sh push_artifact() { @@ -32,5 +32,5 @@ data: kind: ConfigMap metadata: labels: - application-crd-id: kubeflow-pipelines - name: kfp-tekton-config + application-crd-id: data-science-pipelines + name: ds-pipeline-config diff --git a/ml-pipelines/base/configmaps/pipeline-install-config.yaml b/data-science-pipelines/base/configmaps/pipeline-install-config.yaml similarity index 90% rename from ml-pipelines/base/configmaps/pipeline-install-config.yaml rename to data-science-pipelines/base/configmaps/pipeline-install-config.yaml index 5f8f9680d..48b7cac98 100644 --- a/ml-pipelines/base/configmaps/pipeline-install-config.yaml +++ b/data-science-pipelines/base/configmaps/pipeline-install-config.yaml @@ -5,14 +5,14 @@ data: appVersion: 1.7.0 autoUpdatePipelineDefaultVersion: "true" bucketName: mlpipeline - cacheDb: cachedb + cacheDb: mlpipeline cacheImage: registry.access.redhat.com/ubi8/ubi-minimal cacheNodeRestrictions: "false" cronScheduleTimezone: UTC dbHost: mysql dbPort: "3306" defaultPipelineRoot: "" - mlmdDb: metadb + mlmdDb: mlpipeline pipelineDb: mlpipeline warning: | 1. Do not use kubectl to edit this configmap, because some values are used @@ -24,5 +24,5 @@ data: kind: ConfigMap metadata: labels: - application-crd-id: kubeflow-pipelines + application-crd-id: data-science-pipelines name: pipeline-install-config diff --git a/ml-pipelines/base/customresourcedefinitions/scheduledworkflows.yaml b/data-science-pipelines/base/customresourcedefinitions/scheduledworkflows.yaml similarity index 95% rename from ml-pipelines/base/customresourcedefinitions/scheduledworkflows.yaml rename to data-science-pipelines/base/customresourcedefinitions/scheduledworkflows.yaml index 903b6ef8f..21718b6f2 100644 --- a/ml-pipelines/base/customresourcedefinitions/scheduledworkflows.yaml +++ b/data-science-pipelines/base/customresourcedefinitions/scheduledworkflows.yaml @@ -2,7 +2,7 @@ apiVersion: apiextensions.k8s.io/v1 kind: CustomResourceDefinition metadata: labels: - application-crd-id: kubeflow-pipelines + application-crd-id: data-science-pipelines kubeflow/crd-install: "true" name: scheduledworkflows.kubeflow.org spec: diff --git a/ml-pipelines/base/customresourcedefinitions/viewers.yaml b/data-science-pipelines/base/customresourcedefinitions/viewers.yaml similarity index 94% rename from ml-pipelines/base/customresourcedefinitions/viewers.yaml rename to data-science-pipelines/base/customresourcedefinitions/viewers.yaml index a299caa33..57d2e869b 100644 --- a/ml-pipelines/base/customresourcedefinitions/viewers.yaml +++ b/data-science-pipelines/base/customresourcedefinitions/viewers.yaml @@ -2,7 +2,7 @@ apiVersion: apiextensions.k8s.io/v1 kind: CustomResourceDefinition metadata: labels: - application-crd-id: kubeflow-pipelines + application-crd-id: data-science-pipelines kubeflow/crd-install: "true" name: viewers.kubeflow.org spec: diff --git a/data-science-pipelines/base/deployments/ds-pipeline-persistenceagent.yaml b/data-science-pipelines/base/deployments/ds-pipeline-persistenceagent.yaml new file mode 100644 index 000000000..f1727edc0 --- /dev/null +++ b/data-science-pipelines/base/deployments/ds-pipeline-persistenceagent.yaml @@ -0,0 +1,60 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + labels: + app: ds-pipeline-persistenceagent + application-crd-id: data-science-pipelines + name: ds-pipeline-persistenceagent +spec: + selector: + matchLabels: + app: ds-pipeline-persistenceagent + application-crd-id: data-science-pipelines + template: + metadata: + annotations: + cluster-autoscaler.kubernetes.io/safe-to-evict: "true" + labels: + app: ds-pipeline-persistenceagent + application-crd-id: data-science-pipelines + spec: + containers: + - env: + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace + image: persistenceagent + imagePullPolicy: IfNotPresent + name: ds-pipeline-persistenceagent + command: + - persistence_agent + - "--logtostderr=true" + - "--namespace=$(namespace)" + - "--ttlSecondsAfterWorkflowFinish=86400" + - "--numWorker=2" + - "--mlPipelineAPIServerName=ds-pipeline" + livenessProbe: + exec: + command: + - pidof + - persistence_agent + initialDelaySeconds: 30 + periodSeconds: 5 + timeoutSeconds: 2 + readinessProbe: + exec: + command: + - pidof + - persistence_agent + initialDelaySeconds: 3 + periodSeconds: 5 + timeoutSeconds: 2 + resources: + requests: + cpu: 120m + memory: 500Mi + limits: + cpu: 250m + memory: 1Gi + serviceAccountName: ds-pipeline-persistenceagent diff --git a/data-science-pipelines/base/deployments/ds-pipeline-scheduledworkflow.yaml b/data-science-pipelines/base/deployments/ds-pipeline-scheduledworkflow.yaml new file mode 100644 index 000000000..6133eb164 --- /dev/null +++ b/data-science-pipelines/base/deployments/ds-pipeline-scheduledworkflow.yaml @@ -0,0 +1,58 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + labels: + app: ds-pipeline-scheduledworkflow + application-crd-id: data-science-pipelines + name: ds-pipeline-scheduledworkflow +spec: + selector: + matchLabels: + app: ds-pipeline-scheduledworkflow + application-crd-id: data-science-pipelines + template: + metadata: + annotations: + cluster-autoscaler.kubernetes.io/safe-to-evict: "true" + labels: + app: ds-pipeline-scheduledworkflow + application-crd-id: data-science-pipelines + spec: + containers: + - env: + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace + - name: CRON_SCHEDULE_TIMEZONE + valueFrom: + configMapKeyRef: + key: cronScheduleTimezone + name: $(pipeline_install_configuration) + image: scheduledworkflow + imagePullPolicy: IfNotPresent + name: ds-pipeline-scheduledworkflow + livenessProbe: + exec: + command: + - pidof + - controller + initialDelaySeconds: 30 + periodSeconds: 5 + timeoutSeconds: 2 + readinessProbe: + exec: + command: + - pidof + - controller + initialDelaySeconds: 3 + periodSeconds: 5 + timeoutSeconds: 2 + resources: + requests: + cpu: 120m + memory: 100Mi + limits: + cpu: 250m + memory: 250Mi + serviceAccountName: ds-pipeline-scheduledworkflow diff --git a/data-science-pipelines/base/deployments/ds-pipeline-viewer-crd.yaml b/data-science-pipelines/base/deployments/ds-pipeline-viewer-crd.yaml new file mode 100644 index 000000000..bcfb28312 --- /dev/null +++ b/data-science-pipelines/base/deployments/ds-pipeline-viewer-crd.yaml @@ -0,0 +1,59 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + labels: + app: ds-pipeline-viewer-crd + application-crd-id: data-science-pipelines + name: ds-pipeline-viewer-crd +spec: + selector: + matchLabels: + app: ds-pipeline-viewer-crd + application-crd-id: data-science-pipelines + template: + metadata: + annotations: + cluster-autoscaler.kubernetes.io/safe-to-evict: "true" + labels: + app: ds-pipeline-viewer-crd + application-crd-id: data-science-pipelines + spec: + containers: + - env: + - name: MAX_NUM_VIEWERS + value: "50" + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace + - name: MINIO_NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace + image: viewer-crd-controller + imagePullPolicy: Always + name: ds-pipeline-viewer-crd + livenessProbe: + exec: + command: + - pidof + - controller + initialDelaySeconds: 30 + periodSeconds: 5 + timeoutSeconds: 2 + readinessProbe: + exec: + command: + - pidof + - controller + initialDelaySeconds: 3 + periodSeconds: 5 + timeoutSeconds: 2 + resources: + requests: + cpu: 120m + memory: 100Mi + limits: + cpu: 250m + memory: 500Mi + serviceAccountName: ds-pipeline-viewer-crd-service-account diff --git a/ml-pipelines/base/deployments/ml-pipeline-visualizationserver.yaml b/data-science-pipelines/base/deployments/ds-pipeline-visualizationserver.yaml similarity index 64% rename from ml-pipelines/base/deployments/ml-pipeline-visualizationserver.yaml rename to data-science-pipelines/base/deployments/ds-pipeline-visualizationserver.yaml index 83233e99b..6d00a4884 100644 --- a/ml-pipelines/base/deployments/ml-pipeline-visualizationserver.yaml +++ b/data-science-pipelines/base/deployments/ds-pipeline-visualizationserver.yaml @@ -2,25 +2,29 @@ apiVersion: apps/v1 kind: Deployment metadata: labels: - app: ml-pipeline-visualizationserver - application-crd-id: kubeflow-pipelines - name: ml-pipeline-visualizationserver + app: ds-pipeline-visualizationserver + application-crd-id: data-science-pipelines + name: ds-pipeline-visualizationserver spec: selector: matchLabels: - app: ml-pipeline-visualizationserver - application-crd-id: kubeflow-pipelines + app: ds-pipeline-visualizationserver + application-crd-id: data-science-pipelines template: metadata: annotations: cluster-autoscaler.kubernetes.io/safe-to-evict: "true" labels: - app: ml-pipeline-visualizationserver - application-crd-id: kubeflow-pipelines + app: ds-pipeline-visualizationserver + application-crd-id: data-science-pipelines spec: containers: - image: visualization-server - imagePullPolicy: IfNotPresent + imagePullPolicy: Always + name: ds-pipeline-visualizationserver + ports: + - containerPort: 8888 + name: http livenessProbe: exec: command: @@ -30,13 +34,9 @@ spec: - -O - '-' - http://localhost:8888/ - initialDelaySeconds: 3 + initialDelaySeconds: 30 periodSeconds: 5 timeoutSeconds: 2 - name: ml-pipeline-visualizationserver - ports: - - containerPort: 8888 - name: http readinessProbe: exec: command: @@ -53,4 +53,7 @@ spec: requests: cpu: 30m memory: 500Mi - serviceAccountName: ml-pipeline-visualizationserver + limits: + cpu: 250m + memory: 1Gi + serviceAccountName: ds-pipeline-visualizationserver diff --git a/ml-pipelines/base/deployments/ml-pipeline.yaml b/data-science-pipelines/base/deployments/ds-pipeline.yaml similarity index 78% rename from ml-pipelines/base/deployments/ml-pipeline.yaml rename to data-science-pipelines/base/deployments/ds-pipeline.yaml index 4631b4a05..408f558cd 100644 --- a/ml-pipelines/base/deployments/ml-pipeline.yaml +++ b/data-science-pipelines/base/deployments/ds-pipeline.yaml @@ -2,21 +2,21 @@ apiVersion: apps/v1 kind: Deployment metadata: labels: - app: ml-pipeline - application-crd-id: kubeflow-pipelines - name: ml-pipeline + app: ds-pipeline + application-crd-id: data-science-pipelines + name: ds-pipeline spec: selector: matchLabels: - app: ml-pipeline - application-crd-id: kubeflow-pipelines + app: ds-pipeline + application-crd-id: data-science-pipelines template: metadata: annotations: cluster-autoscaler.kubernetes.io/safe-to-evict: "true" labels: - app: ml-pipeline - application-crd-id: kubeflow-pipelines + app: ds-pipeline + application-crd-id: data-science-pipelines spec: containers: - envFrom: @@ -58,57 +58,57 @@ spec: valueFrom: configMapKeyRef: key: artifact_bucket - name: $(ml_pipelines_configuration) + name: $(ds_pipelines_configuration) - name: ARTIFACT_ENDPOINT valueFrom: configMapKeyRef: key: artifact_endpoint - name: $(ml_pipelines_configuration) + name: $(ds_pipelines_configuration) - name: ARTIFACT_ENDPOINT_SCHEME valueFrom: configMapKeyRef: key: artifact_endpoint_scheme - name: $(ml_pipelines_configuration) + name: $(ds_pipelines_configuration) - name: ARCHIVE_LOGS valueFrom: configMapKeyRef: key: archive_logs - name: $(ml_pipelines_configuration) + name: $(ds_pipelines_configuration) - name: TRACK_ARTIFACTS valueFrom: configMapKeyRef: key: track_artifacts - name: $(ml_pipelines_configuration) + name: $(ds_pipelines_configuration) - name: STRIP_EOF valueFrom: configMapKeyRef: key: strip_eof - name: $(ml_pipelines_configuration) + name: $(ds_pipelines_configuration) - name: ARTIFACT_SCRIPT valueFrom: configMapKeyRef: key: artifact_script - name: $(ml_pipelines_configuration) + name: $(ds_pipelines_configuration) - name: ARTIFACT_IMAGE valueFrom: configMapKeyRef: key: artifact_image - name: $(ml_pipelines_configuration) + name: $(ds_pipelines_configuration) - name: INJECT_DEFAULT_SCRIPT valueFrom: configMapKeyRef: key: inject_default_script - name: $(ml_pipelines_configuration) + name: $(ds_pipelines_configuration) - name: APPLY_TEKTON_CUSTOM_RESOURCE valueFrom: configMapKeyRef: key: apply_tekton_custom_resource - name: $(ml_pipelines_configuration) + name: $(ds_pipelines_configuration) - name: TERMINATE_STATUS valueFrom: configMapKeyRef: key: terminate_status - name: $(ml_pipelines_configuration) + name: $(ds_pipelines_configuration) - name: AUTO_UPDATE_PIPELINE_DEFAULT_VERSION valueFrom: configMapKeyRef: @@ -119,8 +119,18 @@ spec: configMapKeyRef: key: ConMaxLifeTimeSec name: $(pipeline_install_configuration) + - name: ML_PIPELINE_VISUALIZATIONSERVER_SERVICE_HOST + value: ds-pipeline-visualizationserver + - name: ML_PIPELINE_VISUALIZATIONSERVER_SERVICE_PORT + value: "8888" image: api-server imagePullPolicy: Always + name: ds-pipeline-api-server + ports: + - containerPort: 8888 + name: http + - containerPort: 8887 + name: grpc livenessProbe: exec: command: @@ -133,12 +143,6 @@ spec: initialDelaySeconds: 3 periodSeconds: 5 timeoutSeconds: 2 - name: ml-pipeline-api-server - ports: - - containerPort: 8888 - name: http - - containerPort: 8887 - name: grpc readinessProbe: exec: command: @@ -155,4 +159,7 @@ spec: requests: cpu: 250m memory: 500Mi - serviceAccountName: ml-pipeline + limits: + cpu: 500m + memory: 1Gi + serviceAccountName: ds-pipeline diff --git a/data-science-pipelines/base/kustomization.yaml b/data-science-pipelines/base/kustomization.yaml new file mode 100644 index 000000000..733b428aa --- /dev/null +++ b/data-science-pipelines/base/kustomization.yaml @@ -0,0 +1,120 @@ +--- +apiVersion: kustomize.config.k8s.io/v1beta1 +kind: Kustomization + +resources: + # Configmaps + - ./configmaps/ds-pipeline-config.yaml + - ./configmaps/pipeline-install-config.yaml + + # CustomResourceDefinitions + - ./customresourcedefinitions/viewers.yaml + - ./customresourcedefinitions/scheduledworkflows.yaml + + # Deployments + - ./deployments/ds-pipeline-persistenceagent.yaml + - ./deployments/ds-pipeline-scheduledworkflow.yaml + - ./deployments/ds-pipeline-viewer-crd.yaml + - ./deployments/ds-pipeline-visualizationserver.yaml + - ./deployments/ds-pipeline.yaml + + # Rolebindings + - ./rolebindings/ds-pipeline-persistenceagent-binding.yaml + - ./rolebindings/ds-pipeline-scheduledworkflow-binding.yaml + - ./rolebindings/ds-pipeline-viewer-crd-binding.yaml + - ./rolebindings/ds-pipeline.yaml + - ./rolebindings/pipeline-runner-binding.yaml + + # Roles + - ./roles/ds-pipeline-persistenceagent-role.yaml + - ./roles/ds-pipeline-scheduledworkflow-role.yaml + - ./roles/ds-pipeline-viewer-controller-role.yaml + - ./roles/ds-pipeline.yaml + - ./roles/pipeline-runner.yaml + + # ServiceAccounts + - ./serviceaccounts/ds-pipeline-container-builder.yaml + - ./serviceaccounts/ds-pipeline-persistenceagent.yaml + - ./serviceaccounts/ds-pipeline-scheduledworkflow.yaml + - ./serviceaccounts/ds-pipeline-viewer-crd-service-account.yaml + - ./serviceaccounts/ds-pipeline-viewer.yaml + - ./serviceaccounts/ds-pipeline-visualizationserver.yaml + - ./serviceaccounts/ds-pipeline.yaml + - ./serviceaccounts/pipeline-runner.yaml + + # Services + - ./services/ds-pipeline-visualizationserver.yaml + - ./services/ds-pipeline.yaml + + # Monitoring + - ../prometheus + +generatorOptions: + disableNameSuffixHash: true + +configMapGenerator: + - name: ds-pipeline-params-config + envs: + - params.env +vars: + - name: namespace + objref: + kind: ConfigMap + name: ds-pipeline-config + apiVersion: v1 + fieldref: + fieldpath: metadata.namespace + - name: artifact_secret_name + objref: + name: ds-pipeline-params-config + kind: ConfigMap + apiVersion: v1 + fieldref: + fieldpath: data.artifact_secret_name + - name: pipeline_install_configuration + objref: + name: ds-pipeline-params-config + kind: ConfigMap + apiVersion: v1 + fieldref: + fieldpath: data.pipeline_install_configuration + - name: ds_pipelines_configuration + objref: + name: ds-pipeline-params-config + kind: ConfigMap + apiVersion: v1 + fieldref: + fieldpath: data.ds_pipelines_configuration + - name: database_secret + objref: + name: ds-pipeline-params-config + kind: ConfigMap + apiVersion: v1 + fieldref: + fieldpath: data.database_secret + - name: ds_pipelines_ui_configuration + objref: + name: ds-pipeline-params-config + kind: ConfigMap + apiVersion: v1 + fieldref: + fieldpath: data.ds_pipelines_ui_configuration +configurations: + - params.yaml + +images: + - name: persistenceagent + newName: quay.io/internaldatahub/persistenceagent + newTag: 1.1.0 + - name: scheduledworkflow + newName: quay.io/internaldatahub/scheduledworkflow + newTag: 1.1.0 + - name: viewer-crd-controller + newName: gcr.io/ml-pipeline/viewer-crd-controller + newTag: 1.7.0 + - name: visualization-server + newName: gcr.io/ml-pipeline/visualization-server + newTag: 1.7.0 + - name: api-server + newName: quay.io/internaldatahub/api-server + newTag: 1.1.0 diff --git a/ml-pipelines/base/params.env b/data-science-pipelines/base/params.env similarity index 56% rename from ml-pipelines/base/params.env rename to data-science-pipelines/base/params.env index a6abf2d17..af656e94b 100644 --- a/ml-pipelines/base/params.env +++ b/data-science-pipelines/base/params.env @@ -1,5 +1,5 @@ artifact_secret_name=mlpipeline-minio-artifact pipeline_install_configuration=pipeline-install-config -ml_pipelines_configuration=kfp-tekton-config +ds_pipelines_configuration=ds-pipeline-config database_secret=mysql-secret -ml_pipelines_ui_configuration=ml-pipeline-ui-configmap +ds_pipelines_ui_configuration=ds-pipeline-ui-configmap diff --git a/ml-pipelines/base/params.yaml b/data-science-pipelines/base/params.yaml similarity index 100% rename from ml-pipelines/base/params.yaml rename to data-science-pipelines/base/params.yaml diff --git a/ml-pipelines/base/rolebindings/ml-pipeline-persistenceagent-binding.yaml b/data-science-pipelines/base/rolebindings/ds-pipeline-persistenceagent-binding.yaml similarity index 50% rename from ml-pipelines/base/rolebindings/ml-pipeline-persistenceagent-binding.yaml rename to data-science-pipelines/base/rolebindings/ds-pipeline-persistenceagent-binding.yaml index 83cba51d7..54db10510 100644 --- a/ml-pipelines/base/rolebindings/ml-pipeline-persistenceagent-binding.yaml +++ b/data-science-pipelines/base/rolebindings/ds-pipeline-persistenceagent-binding.yaml @@ -2,12 +2,12 @@ apiVersion: rbac.authorization.k8s.io/v1 kind: RoleBinding metadata: labels: - application-crd-id: kubeflow-pipelines - name: ml-pipeline-persistenceagent-binding + application-crd-id: data-science-pipelines + name: ds-pipeline-persistenceagent-binding roleRef: apiGroup: rbac.authorization.k8s.io kind: Role - name: ml-pipeline-persistenceagent-role + name: ds-pipeline-persistenceagent-role subjects: - kind: ServiceAccount - name: ml-pipeline-persistenceagent + name: ds-pipeline-persistenceagent diff --git a/data-science-pipelines/base/rolebindings/ds-pipeline-scheduledworkflow-binding.yaml b/data-science-pipelines/base/rolebindings/ds-pipeline-scheduledworkflow-binding.yaml new file mode 100644 index 000000000..9b849c64f --- /dev/null +++ b/data-science-pipelines/base/rolebindings/ds-pipeline-scheduledworkflow-binding.yaml @@ -0,0 +1,13 @@ +apiVersion: rbac.authorization.k8s.io/v1 +kind: RoleBinding +metadata: + labels: + application-crd-id: data-science-pipelines + name: ds-pipeline-scheduledworkflow-binding +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: Role + name: ds-pipeline-scheduledworkflow-role +subjects: + - kind: ServiceAccount + name: ds-pipeline-scheduledworkflow diff --git a/data-science-pipelines/base/rolebindings/ds-pipeline-viewer-crd-binding.yaml b/data-science-pipelines/base/rolebindings/ds-pipeline-viewer-crd-binding.yaml new file mode 100644 index 000000000..2434b1d10 --- /dev/null +++ b/data-science-pipelines/base/rolebindings/ds-pipeline-viewer-crd-binding.yaml @@ -0,0 +1,13 @@ +apiVersion: rbac.authorization.k8s.io/v1 +kind: RoleBinding +metadata: + labels: + application-crd-id: data-science-pipelines + name: ds-pipeline-viewer-crd-binding +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: Role + name: ds-pipeline-viewer-controller-role +subjects: + - kind: ServiceAccount + name: ds-pipeline-viewer-crd-service-account diff --git a/ml-pipelines/base/rolebindings/ml-pipeline.yaml b/data-science-pipelines/base/rolebindings/ds-pipeline.yaml similarity index 57% rename from ml-pipelines/base/rolebindings/ml-pipeline.yaml rename to data-science-pipelines/base/rolebindings/ds-pipeline.yaml index 16b07120f..5bce2f6a9 100644 --- a/ml-pipelines/base/rolebindings/ml-pipeline.yaml +++ b/data-science-pipelines/base/rolebindings/ds-pipeline.yaml @@ -2,13 +2,13 @@ apiVersion: rbac.authorization.k8s.io/v1 kind: RoleBinding metadata: labels: - app: ml-pipeline - application-crd-id: kubeflow-pipelines - name: ml-pipeline + app: ds-pipeline + application-crd-id: data-science-pipelines + name: ds-pipeline roleRef: apiGroup: rbac.authorization.k8s.io kind: Role - name: ml-pipeline + name: ds-pipeline subjects: - kind: ServiceAccount - name: ml-pipeline + name: ds-pipeline diff --git a/ml-pipelines/base/rolebindings/pipeline-runner-binding.yaml b/data-science-pipelines/base/rolebindings/pipeline-runner-binding.yaml similarity index 84% rename from ml-pipelines/base/rolebindings/pipeline-runner-binding.yaml rename to data-science-pipelines/base/rolebindings/pipeline-runner-binding.yaml index 14aa3d81d..6ad68dd2a 100644 --- a/ml-pipelines/base/rolebindings/pipeline-runner-binding.yaml +++ b/data-science-pipelines/base/rolebindings/pipeline-runner-binding.yaml @@ -2,7 +2,7 @@ apiVersion: rbac.authorization.k8s.io/v1 kind: RoleBinding metadata: labels: - application-crd-id: kubeflow-pipelines + application-crd-id: data-science-pipelines name: pipeline-runner-binding roleRef: apiGroup: rbac.authorization.k8s.io diff --git a/ml-pipelines/base/roles/ml-pipeline-persistenceagent-role.yaml b/data-science-pipelines/base/roles/ds-pipeline-persistenceagent-role.yaml similarity index 85% rename from ml-pipelines/base/roles/ml-pipeline-persistenceagent-role.yaml rename to data-science-pipelines/base/roles/ds-pipeline-persistenceagent-role.yaml index 2ac8166e6..16aabd813 100644 --- a/ml-pipelines/base/roles/ml-pipeline-persistenceagent-role.yaml +++ b/data-science-pipelines/base/roles/ds-pipeline-persistenceagent-role.yaml @@ -2,8 +2,8 @@ apiVersion: rbac.authorization.k8s.io/v1 kind: Role metadata: labels: - application-crd-id: kubeflow-pipelines - name: ml-pipeline-persistenceagent-role + application-crd-id: data-science-pipelines + name: ds-pipeline-persistenceagent-role rules: - apiGroups: - argoproj.io diff --git a/ml-pipelines/base/roles/ml-pipeline-scheduledworkflow-role.yaml b/data-science-pipelines/base/roles/ds-pipeline-scheduledworkflow-role.yaml similarity index 87% rename from ml-pipelines/base/roles/ml-pipeline-scheduledworkflow-role.yaml rename to data-science-pipelines/base/roles/ds-pipeline-scheduledworkflow-role.yaml index 6a1a9280e..12c033059 100644 --- a/ml-pipelines/base/roles/ml-pipeline-scheduledworkflow-role.yaml +++ b/data-science-pipelines/base/roles/ds-pipeline-scheduledworkflow-role.yaml @@ -2,9 +2,9 @@ apiVersion: rbac.authorization.k8s.io/v1 kind: Role metadata: labels: - app: ml-pipeline-scheduledworkflow-role - application-crd-id: kubeflow-pipelines - name: ml-pipeline-scheduledworkflow-role + app: ds-pipeline-scheduledworkflow-role + application-crd-id: data-science-pipelines + name: ds-pipeline-scheduledworkflow-role rules: - apiGroups: - argoproj.io diff --git a/ml-pipelines/base/roles/ml-pipeline-viewer-controller-role.yaml b/data-science-pipelines/base/roles/ds-pipeline-viewer-controller-role.yaml similarity index 83% rename from ml-pipelines/base/roles/ml-pipeline-viewer-controller-role.yaml rename to data-science-pipelines/base/roles/ds-pipeline-viewer-controller-role.yaml index f026d0a48..d29760003 100644 --- a/ml-pipelines/base/roles/ml-pipeline-viewer-controller-role.yaml +++ b/data-science-pipelines/base/roles/ds-pipeline-viewer-controller-role.yaml @@ -2,8 +2,8 @@ apiVersion: rbac.authorization.k8s.io/v1 kind: Role metadata: labels: - application-crd-id: kubeflow-pipelines - name: ml-pipeline-viewer-controller-role + application-crd-id: data-science-pipelines + name: ds-pipeline-viewer-controller-role rules: - apiGroups: - '*' diff --git a/ml-pipelines/base/roles/ml-pipeline.yaml b/data-science-pipelines/base/roles/ds-pipeline.yaml similarity index 93% rename from ml-pipelines/base/roles/ml-pipeline.yaml rename to data-science-pipelines/base/roles/ds-pipeline.yaml index 242fc60d7..ab09c82e4 100644 --- a/ml-pipelines/base/roles/ml-pipeline.yaml +++ b/data-science-pipelines/base/roles/ds-pipeline.yaml @@ -2,9 +2,9 @@ apiVersion: rbac.authorization.k8s.io/v1 kind: Role metadata: labels: - app: ml-pipeline - application-crd-id: kubeflow-pipelines - name: ml-pipeline + app: ds-pipeline + application-crd-id: data-science-pipelines + name: ds-pipeline rules: - apiGroups: - "" diff --git a/ml-pipelines/base/roles/pipeline-runner.yaml b/data-science-pipelines/base/roles/pipeline-runner.yaml similarity index 96% rename from ml-pipelines/base/roles/pipeline-runner.yaml rename to data-science-pipelines/base/roles/pipeline-runner.yaml index 894e42505..48eee3f51 100644 --- a/ml-pipelines/base/roles/pipeline-runner.yaml +++ b/data-science-pipelines/base/roles/pipeline-runner.yaml @@ -2,7 +2,7 @@ apiVersion: rbac.authorization.k8s.io/v1 kind: Role metadata: labels: - application-crd-id: kubeflow-pipelines + application-crd-id: data-science-pipelines name: pipeline-runner rules: - apiGroups: diff --git a/data-science-pipelines/base/serviceaccounts/ds-pipeline-container-builder.yaml b/data-science-pipelines/base/serviceaccounts/ds-pipeline-container-builder.yaml new file mode 100644 index 000000000..174da3589 --- /dev/null +++ b/data-science-pipelines/base/serviceaccounts/ds-pipeline-container-builder.yaml @@ -0,0 +1,6 @@ +apiVersion: v1 +kind: ServiceAccount +metadata: + labels: + application-crd-id: data-science-pipelines + name: ds-pipeline-container-builder diff --git a/data-science-pipelines/base/serviceaccounts/ds-pipeline-persistenceagent.yaml b/data-science-pipelines/base/serviceaccounts/ds-pipeline-persistenceagent.yaml new file mode 100644 index 000000000..af88b3b0d --- /dev/null +++ b/data-science-pipelines/base/serviceaccounts/ds-pipeline-persistenceagent.yaml @@ -0,0 +1,6 @@ +apiVersion: v1 +kind: ServiceAccount +metadata: + labels: + application-crd-id: data-science-pipelines + name: ds-pipeline-persistenceagent diff --git a/data-science-pipelines/base/serviceaccounts/ds-pipeline-scheduledworkflow.yaml b/data-science-pipelines/base/serviceaccounts/ds-pipeline-scheduledworkflow.yaml new file mode 100644 index 000000000..78507636b --- /dev/null +++ b/data-science-pipelines/base/serviceaccounts/ds-pipeline-scheduledworkflow.yaml @@ -0,0 +1,6 @@ +apiVersion: v1 +kind: ServiceAccount +metadata: + labels: + application-crd-id: data-science-pipelines + name: ds-pipeline-scheduledworkflow diff --git a/data-science-pipelines/base/serviceaccounts/ds-pipeline-viewer-crd-service-account.yaml b/data-science-pipelines/base/serviceaccounts/ds-pipeline-viewer-crd-service-account.yaml new file mode 100644 index 000000000..d470c6ee6 --- /dev/null +++ b/data-science-pipelines/base/serviceaccounts/ds-pipeline-viewer-crd-service-account.yaml @@ -0,0 +1,6 @@ +apiVersion: v1 +kind: ServiceAccount +metadata: + labels: + application-crd-id: data-science-pipelines + name: ds-pipeline-viewer-crd-service-account diff --git a/data-science-pipelines/base/serviceaccounts/ds-pipeline-viewer.yaml b/data-science-pipelines/base/serviceaccounts/ds-pipeline-viewer.yaml new file mode 100644 index 000000000..33d64d599 --- /dev/null +++ b/data-science-pipelines/base/serviceaccounts/ds-pipeline-viewer.yaml @@ -0,0 +1,6 @@ +apiVersion: v1 +kind: ServiceAccount +metadata: + labels: + application-crd-id: data-science-pipelines + name: ds-pipelines-viewer diff --git a/data-science-pipelines/base/serviceaccounts/ds-pipeline-visualizationserver.yaml b/data-science-pipelines/base/serviceaccounts/ds-pipeline-visualizationserver.yaml new file mode 100644 index 000000000..5792a0776 --- /dev/null +++ b/data-science-pipelines/base/serviceaccounts/ds-pipeline-visualizationserver.yaml @@ -0,0 +1,6 @@ +apiVersion: v1 +kind: ServiceAccount +metadata: + labels: + application-crd-id: data-science-pipelines + name: ds-pipeline-visualizationserver diff --git a/data-science-pipelines/base/serviceaccounts/ds-pipeline.yaml b/data-science-pipelines/base/serviceaccounts/ds-pipeline.yaml new file mode 100644 index 000000000..414bd5570 --- /dev/null +++ b/data-science-pipelines/base/serviceaccounts/ds-pipeline.yaml @@ -0,0 +1,6 @@ +apiVersion: v1 +kind: ServiceAccount +metadata: + labels: + application-crd-id: data-science-pipelines + name: ds-pipeline diff --git a/ml-pipelines/base/serviceaccounts/pipeline-runner.yaml b/data-science-pipelines/base/serviceaccounts/pipeline-runner.yaml similarity index 62% rename from ml-pipelines/base/serviceaccounts/pipeline-runner.yaml rename to data-science-pipelines/base/serviceaccounts/pipeline-runner.yaml index ceb714670..fb5ee7b84 100644 --- a/ml-pipelines/base/serviceaccounts/pipeline-runner.yaml +++ b/data-science-pipelines/base/serviceaccounts/pipeline-runner.yaml @@ -2,5 +2,5 @@ apiVersion: v1 kind: ServiceAccount metadata: labels: - application-crd-id: kubeflow-pipelines + application-crd-id: data-science-pipelines name: pipeline-runner diff --git a/data-science-pipelines/base/services/ds-pipeline-visualizationserver.yaml b/data-science-pipelines/base/services/ds-pipeline-visualizationserver.yaml new file mode 100644 index 000000000..ec1489542 --- /dev/null +++ b/data-science-pipelines/base/services/ds-pipeline-visualizationserver.yaml @@ -0,0 +1,15 @@ +apiVersion: v1 +kind: Service +metadata: + labels: + application-crd-id: data-science-pipelines + name: ds-pipeline-visualizationserver +spec: + ports: + - name: http + port: 8888 + protocol: TCP + targetPort: 8888 + selector: + app: ds-pipeline-visualizationserver + application-crd-id: data-science-pipelines diff --git a/ml-pipelines/base/services/ml-pipeline.yaml b/data-science-pipelines/base/services/ds-pipeline.yaml similarity index 63% rename from ml-pipelines/base/services/ml-pipeline.yaml rename to data-science-pipelines/base/services/ds-pipeline.yaml index 029a21a3b..fea3536be 100644 --- a/ml-pipelines/base/services/ml-pipeline.yaml +++ b/data-science-pipelines/base/services/ds-pipeline.yaml @@ -2,8 +2,8 @@ apiVersion: v1 kind: Service metadata: labels: - application-crd-id: kubeflow-pipelines - name: ml-pipeline + application-crd-id: data-science-pipelines + name: ds-pipeline spec: ports: - name: http @@ -15,5 +15,5 @@ spec: protocol: TCP targetPort: 8887 selector: - app: ml-pipeline - application-crd-id: kubeflow-pipelines + app: ds-pipeline + application-crd-id: data-science-pipelines diff --git a/ml-pipelines/base/deployments/metadata-envoy-deployment.yaml b/data-science-pipelines/overlays/component-mlmd/deployments/metadata-envoy-deployment.yaml similarity index 67% rename from ml-pipelines/base/deployments/metadata-envoy-deployment.yaml rename to data-science-pipelines/overlays/component-mlmd/deployments/metadata-envoy-deployment.yaml index 8b1a21415..1e2a5858e 100644 --- a/ml-pipelines/base/deployments/metadata-envoy-deployment.yaml +++ b/data-science-pipelines/overlays/component-mlmd/deployments/metadata-envoy-deployment.yaml @@ -27,3 +27,15 @@ spec: name: md-envoy - containerPort: 9901 name: envoy-admin + livenessProbe: + initialDelaySeconds: 30 + periodSeconds: 5 + tcpSocket: + port: md-envoy + timeoutSeconds: 2 + readinessProbe: + initialDelaySeconds: 3 + periodSeconds: 5 + tcpSocket: + port: md-envoy + timeoutSeconds: 2 diff --git a/ml-pipelines/base/deployments/metadata-grpc-deployment.yaml b/data-science-pipelines/overlays/component-mlmd/deployments/metadata-grpc-deployment.yaml similarity index 98% rename from ml-pipelines/base/deployments/metadata-grpc-deployment.yaml rename to data-science-pipelines/overlays/component-mlmd/deployments/metadata-grpc-deployment.yaml index 57ee13439..8df457f40 100644 --- a/ml-pipelines/base/deployments/metadata-grpc-deployment.yaml +++ b/data-science-pipelines/overlays/component-mlmd/deployments/metadata-grpc-deployment.yaml @@ -55,16 +55,16 @@ spec: key: dbPort name: $(pipeline_install_configuration) image: metadata-grpc + name: container + ports: + - containerPort: 8080 + name: grpc-api livenessProbe: - initialDelaySeconds: 3 + initialDelaySeconds: 30 periodSeconds: 5 tcpSocket: port: grpc-api timeoutSeconds: 2 - name: container - ports: - - containerPort: 8080 - name: grpc-api readinessProbe: initialDelaySeconds: 3 periodSeconds: 5 diff --git a/ml-pipelines/base/deployments/metadata-writer.yaml b/data-science-pipelines/overlays/component-mlmd/deployments/metadata-writer.yaml similarity index 68% rename from ml-pipelines/base/deployments/metadata-writer.yaml rename to data-science-pipelines/overlays/component-mlmd/deployments/metadata-writer.yaml index cd19a669c..82dff32fa 100644 --- a/ml-pipelines/base/deployments/metadata-writer.yaml +++ b/data-science-pipelines/overlays/component-mlmd/deployments/metadata-writer.yaml @@ -32,4 +32,20 @@ spec: name: $(ml_pipelines_configuration) image: metadata-writer name: main + livenessProbe: + exec: + command: + - pidof + - python3 + initialDelaySeconds: 30 + periodSeconds: 5 + timeoutSeconds: 2 + readinessProbe: + exec: + command: + - pidof + - python3 + initialDelaySeconds: 3 + periodSeconds: 5 + timeoutSeconds: 2 serviceAccountName: kubeflow-pipelines-metadata-writer diff --git a/data-science-pipelines/overlays/component-mlmd/kustomization.yaml b/data-science-pipelines/overlays/component-mlmd/kustomization.yaml new file mode 100644 index 000000000..85628ad97 --- /dev/null +++ b/data-science-pipelines/overlays/component-mlmd/kustomization.yaml @@ -0,0 +1,40 @@ +--- +apiVersion: kustomize.config.k8s.io/v1beta1 +kind: Kustomization + +bases: + - ../../base + +resources: + # Roles + - ./roles/kubeflow-pipelines-metadata-writer-role.yaml + + # Rolebindings + - ./rolebindings/kubeflow-pipelines-metadata-writer-binding.yaml + + # ServiceAccounts + - ./serviceaccounts/kubeflow-pipelines-metadata-writer.yaml + - ./serviceaccounts/metadata-grpc-server.yaml + + # Deployments + - ./deployments/metadata-envoy-deployment.yaml + - ./deployments/metadata-grpc-deployment.yaml + - ./deployments/metadata-writer.yaml + + # Services + - ./services/metadata-envoy-service.yaml + - ./services/metadata-grpc-service.yaml + +generatorOptions: + disableNameSuffixHash: true + +images: + - name: metadata-envoy + newName: gcr.io/ml-pipeline/metadata-envoy + newTag: 1.7.0 + - name: metadata-grpc + newName: gcr.io/tfx-oss-public/ml_metadata_store_server + newTag: 1.0.0 + - name: metadata-writer + newName: quay.io/internaldatahub/metadata-writer + newTag: 1.1.0 diff --git a/ml-pipelines/base/rolebindings/kubeflow-pipelines-metadata-writer-binding.yaml b/data-science-pipelines/overlays/component-mlmd/rolebindings/kubeflow-pipelines-metadata-writer-binding.yaml similarity index 100% rename from ml-pipelines/base/rolebindings/kubeflow-pipelines-metadata-writer-binding.yaml rename to data-science-pipelines/overlays/component-mlmd/rolebindings/kubeflow-pipelines-metadata-writer-binding.yaml diff --git a/ml-pipelines/base/roles/kubeflow-pipelines-metadata-writer-role.yaml b/data-science-pipelines/overlays/component-mlmd/roles/kubeflow-pipelines-metadata-writer-role.yaml similarity index 100% rename from ml-pipelines/base/roles/kubeflow-pipelines-metadata-writer-role.yaml rename to data-science-pipelines/overlays/component-mlmd/roles/kubeflow-pipelines-metadata-writer-role.yaml diff --git a/ml-pipelines/base/serviceaccounts/kubeflow-pipelines-metadata-writer.yaml b/data-science-pipelines/overlays/component-mlmd/serviceaccounts/kubeflow-pipelines-metadata-writer.yaml similarity index 100% rename from ml-pipelines/base/serviceaccounts/kubeflow-pipelines-metadata-writer.yaml rename to data-science-pipelines/overlays/component-mlmd/serviceaccounts/kubeflow-pipelines-metadata-writer.yaml diff --git a/ml-pipelines/base/serviceaccounts/metadata-grpc-server.yaml b/data-science-pipelines/overlays/component-mlmd/serviceaccounts/metadata-grpc-server.yaml similarity index 100% rename from ml-pipelines/base/serviceaccounts/metadata-grpc-server.yaml rename to data-science-pipelines/overlays/component-mlmd/serviceaccounts/metadata-grpc-server.yaml diff --git a/ml-pipelines/base/services/metadata-envoy-service.yaml b/data-science-pipelines/overlays/component-mlmd/services/metadata-envoy-service.yaml similarity index 100% rename from ml-pipelines/base/services/metadata-envoy-service.yaml rename to data-science-pipelines/overlays/component-mlmd/services/metadata-envoy-service.yaml diff --git a/ml-pipelines/base/services/metadata-grpc-service.yaml b/data-science-pipelines/overlays/component-mlmd/services/metadata-grpc-service.yaml similarity index 100% rename from ml-pipelines/base/services/metadata-grpc-service.yaml rename to data-science-pipelines/overlays/component-mlmd/services/metadata-grpc-service.yaml diff --git a/data-science-pipelines/overlays/ds-pipeline-ui/configmaps/ds-pipeline-ui-configmap.yaml b/data-science-pipelines/overlays/ds-pipeline-ui/configmaps/ds-pipeline-ui-configmap.yaml new file mode 100644 index 000000000..9419642fc --- /dev/null +++ b/data-science-pipelines/overlays/ds-pipeline-ui/configmaps/ds-pipeline-ui-configmap.yaml @@ -0,0 +1,13 @@ +apiVersion: v1 +data: + viewer-pod-template.json: |- + { + "spec": { + "serviceAccountName": "ds-pipelines-viewer" + } + } +kind: ConfigMap +metadata: + labels: + application-crd-id: data-science-pipelines + name: ds-pipeline-ui-configmap diff --git a/ml-pipelines/overlays/ml-pipeline-ui/deployments/ml-pipeline-ui.yaml b/data-science-pipelines/overlays/ds-pipeline-ui/deployments/ds-pipeline-ui.yaml similarity index 83% rename from ml-pipelines/overlays/ml-pipeline-ui/deployments/ml-pipeline-ui.yaml rename to data-science-pipelines/overlays/ds-pipeline-ui/deployments/ds-pipeline-ui.yaml index 3be4830c7..d4cc68492 100644 --- a/ml-pipelines/overlays/ml-pipeline-ui/deployments/ml-pipeline-ui.yaml +++ b/data-science-pipelines/overlays/ds-pipeline-ui/deployments/ds-pipeline-ui.yaml @@ -2,34 +2,34 @@ apiVersion: apps/v1 kind: Deployment metadata: labels: - app: ml-pipeline-ui - application-crd-id: kubeflow-pipelines - name: ml-pipeline-ui + app: ds-pipeline-ui + application-crd-id: data-science-pipelines + name: ds-pipeline-ui spec: selector: matchLabels: - app: ml-pipeline-ui - application-crd-id: kubeflow-pipelines + app: ds-pipeline-ui + application-crd-id: data-science-pipelines template: metadata: annotations: cluster-autoscaler.kubernetes.io/safe-to-evict: "true" labels: - app: ml-pipeline-ui - application-crd-id: kubeflow-pipelines + app: ds-pipeline-ui + application-crd-id: data-science-pipelines spec: containers: - name: oauth-proxy args: - --https-address=:8443 - --provider=openshift - - --openshift-service-account=ml-pipeline-ui + - --openshift-service-account=ds-pipeline-ui - --upstream=http://localhost:3000 - --tls-cert=/etc/tls/private/tls.crt - --tls-key=/etc/tls/private/tls.key - --cookie-secret=SECRET - - '--openshift-delegate-urls={"/": {"resource": "route", "verb": "get", "name": "ml-pipeline-ui"}}' - - '--openshift-sar={"resource": "route", "resourceName": "ml-pipeline-ui", "verb": "get"}' + - '--openshift-delegate-urls={"/": {"resource": "route", "verb": "get", "name": "ds-pipeline-ui"}}' + - '--openshift-sar={"resource": "route", "resourceName": "ds-pipeline-ui", "verb": "get"}' - --skip-auth-regex='(^/metrics|^/apis/v1beta1/healthz)' image: registry.redhat.io/openshift4/ose-oauth-proxy:v4.8 ports: @@ -86,6 +86,10 @@ spec: value: "true" - name: ARGO_ARCHIVE_LOGS value: "true" + - name: ML_PIPELINE_SERVICE_HOST + value: ds-pipeline + - name: ML_PIPELINE_SERVICE_PORT + value: '8888' image: frontend imagePullPolicy: IfNotPresent livenessProbe: @@ -100,7 +104,7 @@ spec: initialDelaySeconds: 3 periodSeconds: 5 timeoutSeconds: 2 - name: ml-pipeline-ui + name: ds-pipeline-ui ports: - containerPort: 3000 readinessProbe: @@ -123,11 +127,11 @@ spec: - mountPath: /etc/config name: config-volume readOnly: true - serviceAccountName: ml-pipeline-ui + serviceAccountName: ds-pipeline-ui volumes: - configMap: - name: $(ml_pipelines_ui_configuration) + name: $(ds_pipelines_ui_configuration) name: config-volume - name: proxy-tls secret: - secretName: ml-pipelines-ui-proxy-tls + secretName: ds-pipelines-ui-proxy-tls diff --git a/ml-pipelines/overlays/ml-pipeline-ui/kustomization.yaml b/data-science-pipelines/overlays/ds-pipeline-ui/kustomization.yaml similarity index 59% rename from ml-pipelines/overlays/ml-pipeline-ui/kustomization.yaml rename to data-science-pipelines/overlays/ds-pipeline-ui/kustomization.yaml index 64575d331..dfb7c1b27 100644 --- a/ml-pipelines/overlays/ml-pipeline-ui/kustomization.yaml +++ b/data-science-pipelines/overlays/ds-pipeline-ui/kustomization.yaml @@ -6,27 +6,27 @@ bases: - ../../base resources: - # Roles - - ./roles/ml-pipeline-ui.yaml + # Configmaps + - ./configmaps/ds-pipeline-ui-configmap.yaml + + # Deployments + - ./deployments/ds-pipeline-ui.yaml # Rolebindings - - ./rolebindings/ml-pipeline-ui.yaml - ./rolebindings/auth-delegator.yaml + - ./rolebindings/ds-pipeline-ui.yaml - # ServiceAccounts - - ./serviceaccounts/ml-pipeline-ui.yaml + # Roles + - ./roles/ds-pipeline-ui.yaml - # Configmaps - - ./configmaps/ml-pipeline-ui-configmap.yaml + # Routes + - ./routes/ds-pipeline-ui.yaml - # Deployments - - ./deployments/ml-pipeline-ui.yaml + # ServiceAccounts + - ./serviceaccounts/ds-pipeline-ui.yaml # Services - - ./services/ml-pipeline-ui.yaml - - # Routes - - ./routes/ml-pipeline-ui.yaml + - ./services/ds-pipeline-ui.yaml generatorOptions: disableNameSuffixHash: true diff --git a/ml-pipelines/overlays/ml-pipeline-ui/rolebindings/auth-delegator.yaml b/data-science-pipelines/overlays/ds-pipeline-ui/rolebindings/auth-delegator.yaml similarity index 69% rename from ml-pipelines/overlays/ml-pipeline-ui/rolebindings/auth-delegator.yaml rename to data-science-pipelines/overlays/ds-pipeline-ui/rolebindings/auth-delegator.yaml index 638ac45a0..58746d815 100644 --- a/ml-pipelines/overlays/ml-pipeline-ui/rolebindings/auth-delegator.yaml +++ b/data-science-pipelines/overlays/ds-pipeline-ui/rolebindings/auth-delegator.yaml @@ -1,11 +1,12 @@ apiVersion: rbac.authorization.k8s.io/v1 kind: ClusterRoleBinding metadata: - name: ml-pipeline-ui-auth-delegator + name: ds-pipeline-ui-auth-delegator roleRef: apiGroup: rbac.authorization.k8s.io kind: ClusterRole name: system:auth-delegator subjects: - kind: ServiceAccount - name: ml-pipeline-ui + namespace: $(namespace) + name: ds-pipeline-ui diff --git a/ml-pipelines/overlays/ml-pipeline-ui/rolebindings/ml-pipeline-ui.yaml b/data-science-pipelines/overlays/ds-pipeline-ui/rolebindings/ds-pipeline-ui.yaml similarity index 55% rename from ml-pipelines/overlays/ml-pipeline-ui/rolebindings/ml-pipeline-ui.yaml rename to data-science-pipelines/overlays/ds-pipeline-ui/rolebindings/ds-pipeline-ui.yaml index d3fa91c3b..b7ff6afb6 100644 --- a/ml-pipelines/overlays/ml-pipeline-ui/rolebindings/ml-pipeline-ui.yaml +++ b/data-science-pipelines/overlays/ds-pipeline-ui/rolebindings/ds-pipeline-ui.yaml @@ -2,13 +2,13 @@ apiVersion: rbac.authorization.k8s.io/v1 kind: RoleBinding metadata: labels: - app: ml-pipeline-ui - application-crd-id: kubeflow-pipelines - name: ml-pipeline-ui + app: ds-pipeline-ui + application-crd-id: data-science-pipelines + name: ds-pipeline-ui roleRef: apiGroup: rbac.authorization.k8s.io kind: Role - name: ml-pipeline-ui + name: ds-pipeline-ui subjects: - kind: ServiceAccount - name: ml-pipeline-ui + name: ds-pipeline-ui diff --git a/ml-pipelines/overlays/ml-pipeline-ui/roles/ml-pipeline-ui.yaml b/data-science-pipelines/overlays/ds-pipeline-ui/roles/ds-pipeline-ui.yaml similarity index 89% rename from ml-pipelines/overlays/ml-pipeline-ui/roles/ml-pipeline-ui.yaml rename to data-science-pipelines/overlays/ds-pipeline-ui/roles/ds-pipeline-ui.yaml index 56e542e82..7b75951ab 100644 --- a/ml-pipelines/overlays/ml-pipeline-ui/roles/ml-pipeline-ui.yaml +++ b/data-science-pipelines/overlays/ds-pipeline-ui/roles/ds-pipeline-ui.yaml @@ -2,9 +2,9 @@ apiVersion: rbac.authorization.k8s.io/v1 kind: Role metadata: labels: - app: ml-pipeline-ui - application-crd-id: kubeflow-pipelines - name: ml-pipeline-ui + app: ds-pipeline-ui + application-crd-id: data-science-pipelines + name: ds-pipeline-ui rules: - apiGroups: - "" diff --git a/ml-pipelines/overlays/ml-pipeline-ui/routes/ml-pipeline-ui.yaml b/data-science-pipelines/overlays/ds-pipeline-ui/routes/ds-pipeline-ui.yaml similarity index 80% rename from ml-pipelines/overlays/ml-pipeline-ui/routes/ml-pipeline-ui.yaml rename to data-science-pipelines/overlays/ds-pipeline-ui/routes/ds-pipeline-ui.yaml index 1fd12b8b4..59d68fe7e 100644 --- a/ml-pipelines/overlays/ml-pipeline-ui/routes/ml-pipeline-ui.yaml +++ b/data-science-pipelines/overlays/ds-pipeline-ui/routes/ds-pipeline-ui.yaml @@ -1,13 +1,13 @@ kind: Route apiVersion: route.openshift.io/v1 metadata: - name: ml-pipeline-ui + name: ds-pipeline-ui annotations: kubernetes.io/tls-acme: "true" spec: to: kind: Service - name: ml-pipeline-ui + name: ds-pipeline-ui port: targetPort: 8443 tls: diff --git a/ml-pipelines/overlays/ml-pipeline-ui/serviceaccounts/ml-pipeline-ui.yaml b/data-science-pipelines/overlays/ds-pipeline-ui/serviceaccounts/ds-pipeline-ui.yaml similarity index 63% rename from ml-pipelines/overlays/ml-pipeline-ui/serviceaccounts/ml-pipeline-ui.yaml rename to data-science-pipelines/overlays/ds-pipeline-ui/serviceaccounts/ds-pipeline-ui.yaml index 85cab2404..d216a3b2e 100644 --- a/ml-pipelines/overlays/ml-pipeline-ui/serviceaccounts/ml-pipeline-ui.yaml +++ b/data-science-pipelines/overlays/ds-pipeline-ui/serviceaccounts/ds-pipeline-ui.yaml @@ -2,7 +2,7 @@ apiVersion: v1 kind: ServiceAccount metadata: labels: - application-crd-id: kubeflow-pipelines + application-crd-id: data-science-pipelines annotations: - serviceaccounts.openshift.io/oauth-redirectreference.primary: '{"kind":"OAuthRedirectReference","apiVersion":"v1","reference":{"kind":"Route","name":"ml-pipeline-ui"}}' - name: ml-pipeline-ui + serviceaccounts.openshift.io/oauth-redirectreference.primary: '{"kind":"OAuthRedirectReference","apiVersion":"v1","reference":{"kind":"Route","name":"ds-pipeline-ui"}}' + name: ds-pipeline-ui diff --git a/data-science-pipelines/overlays/ds-pipeline-ui/services/ds-pipeline-ui.yaml b/data-science-pipelines/overlays/ds-pipeline-ui/services/ds-pipeline-ui.yaml new file mode 100644 index 000000000..0f3a8ff7a --- /dev/null +++ b/data-science-pipelines/overlays/ds-pipeline-ui/services/ds-pipeline-ui.yaml @@ -0,0 +1,18 @@ +apiVersion: v1 +kind: Service +metadata: + labels: + app: ds-pipeline-ui + application-crd-id: data-science-pipelines + annotations: + service.alpha.openshift.io/serving-cert-secret-name: ds-pipelines-ui-proxy-tls + name: ds-pipeline-ui +spec: + ports: + - name: http + port: 8443 + protocol: TCP + targetPort: 8443 + selector: + app: ds-pipeline-ui + application-crd-id: data-science-pipelines diff --git a/data-science-pipelines/overlays/integration-odhdashboard/kustomization.yaml b/data-science-pipelines/overlays/integration-odhdashboard/kustomization.yaml new file mode 100644 index 000000000..8cb3243e2 --- /dev/null +++ b/data-science-pipelines/overlays/integration-odhdashboard/kustomization.yaml @@ -0,0 +1,8 @@ +apiVersion: kustomize.config.k8s.io/v1beta1 +kind: Kustomization +commonLabels: + app: odh-dashboard + app.kubernetes.io/part-of: odh-dashboard +resources: +- ./odhapplications/data-science-pipelines-odhapplication.yaml +- ./odhquickstarts/data-science-pipelines-odhquickstart.yaml diff --git a/data-science-pipelines/overlays/integration-odhdashboard/odhapplications/data-science-pipelines-odhapplication.yaml b/data-science-pipelines/overlays/integration-odhdashboard/odhapplications/data-science-pipelines-odhapplication.yaml new file mode 100644 index 000000000..ea1427c52 --- /dev/null +++ b/data-science-pipelines/overlays/integration-odhdashboard/odhapplications/data-science-pipelines-odhapplication.yaml @@ -0,0 +1,71 @@ +apiVersion: dashboard.opendatahub.io/v1 +kind: OdhApplication +metadata: + name: data-science-pipelines + annotations: + opendatahub.io/categories: 'Model development,Model training,Model optimization,Data analysis,Data preprocessing' +spec: + beta: true + betaTitle: Data Science Pipelines + betaText: This application is available for early access prior to official release. + displayName: Data Science Pipelines + description: Data Science Pipelines is a workflow platform with a focus on enabling Machine Learning operations such as Model development, experimentation, orchestration and automation. + provider: Red Hat + category: ODH Core + support: Open Data Hub + docsLink: https://www.kubeflow.org/docs/components/pipelines/ + quickStart: create-data-science-pipeline + img: 'RedHat-Logo-Hat-Color' + getStartedLink: https://www.kubeflow.org/docs/started/ + enable: + title: Enable Data Science Pipelines + actionLabel: Enable + description: |- + Clicking enable will add a card to the Enabled page to access the Data Science Pipelines interface. + + Before enabling, be sure you have installed OpenShift Pipelines and have an S3 Object store configured + validationConfigMap: ds-pipelines-dashboardtile-validation-result + kfdefApplications: [] + #kfdefApplications: ['data-science-pipelines'] # https://github.com/opendatahub-io/odh-dashboard/issues/625 + route: ml-pipeline-ui + internalRoute: ml-pipeline-ui + getStartedMarkDown: |- + # Getting Started With Data Science Pipelines + Below are the list of samples that are currently running end to end taking the compiled Tekton yaml and deploying on a Tekton cluster directly. If you are interested more in the larger list of pipelines samples we are testing for whether they can be 'compiled to Tekton' format, please [look at the corresponding status page](https://github.com/opendatahub-io/ml-pipelines/tree/master/sdk/python/tests/README.md) + [DSP Tekton User Guide](https://github.com/opendatahub-io/ml-pipelines/tree/master/guides/kfp-user-guide) is a guideline for the possible ways to develop and consume Data Science Pipelines. It's recommended to go over at least one of the methods in the user guide before heading into the KFP Tekton Samples. + ## Prerequisites + - Install [OpenShift Pipelines Operator](https://docs.openshift.com/container-platform/4.7/cicd/pipelines/installing-pipelines.html). Then connect the cluster to the current shell with `oc` + + - Install [kfp-tekton](https://github.com/opendatahub-io/ml-pipelines/tree/master/sdk/README.md) SDK + + ``` + # Set up the python virtual environment + python3 -m venv .venv + source .venv/bin/activate + + # Install the kfp-tekton SDK + pip install kfp-tekton + ``` + + ## Samples + - [MNIST End to End example with DSP components](https://github.com/opendatahub-io/ml-pipelines/tree/master/samples/e2e-mnist) + + - [Hyperparameter tuning using Katib](https://github.com/opendatahub-io/ml-pipelines/tree/master/samples/katib) + + - [Trusted AI Pipeline with AI Fairness 360 and Adversarial Robustness 360 components](https://github.com/opendatahub-io/ml-pipelines/tree/master/samples/trusted-ai) + + - [Training and Serving Models with Watson Machine Learning](https://github.com/opendatahub-io/ml-pipelines/tree/master/samples/watson-train-serve#training-and-serving-models-with-watson-machine-learning) + + - [Lightweight python components example](https://github.com/opendatahub-io/ml-pipelines/tree/master/samples/lightweight-component) + + - [The flip-coin pipeline](https://github.com/opendatahub-io/ml-pipelines/tree/master/samples/flip-coin) + + - [Nested pipeline example](https://github.com/opendatahub-io/ml-pipelines/tree/master/samples/nested-pipeline) + + - [Pipeline with Nested loops](https://github.com/opendatahub-io/ml-pipelines/tree/master/samples/nested-loops) + + - [Using Tekton Custom Task on DSP](https://github.com/opendatahub-io/ml-pipelines/tree/master/samples/tekton-custom-task) + + - [The flip-coin pipeline using custom task](https://github.com/opendatahub-io/ml-pipelines/tree/master/samples/flip-coin-custom-task) + + - [Retrieve DSP run metadata using Kubernetes downstream API](https://github.com/opendatahub-io/ml-pipelines/tree/master/samples/k8s-downstream-api) diff --git a/data-science-pipelines/overlays/integration-odhdashboard/odhquickstarts/data-science-pipelines-odhquickstart.yaml b/data-science-pipelines/overlays/integration-odhdashboard/odhquickstarts/data-science-pipelines-odhquickstart.yaml new file mode 100644 index 000000000..5b7ff46a3 --- /dev/null +++ b/data-science-pipelines/overlays/integration-odhdashboard/odhquickstarts/data-science-pipelines-odhquickstart.yaml @@ -0,0 +1,89 @@ +apiVersion: console.openshift.io/v1 +kind: OdhQuickStart +metadata: + name: create-data-science-pipeline + annotations: + opendatahub.io/categories: 'Getting started,Model development,Model training,Model optimization,Data analysis,Data preprocessing' +spec: + displayName: Creating a Data Science Pipeline + appName: data-science-pipelines + durationMinutes: 5 + icon: TODO + description: Create a simple pipeline that automatically runs tasks in a machine learning deployment workflow + introduction: |- + ### This quick start shows you how to create a Data Science Pipeline. + Open Data Hub lets you run Data Science Pipelines in a scalable OpenShift hybrid cloud environment. + This quickstart shows you how to compile, create and run a simple example pipeline execution using the Kubeflow Pipelines Python SDK and Data Science Pipelines UI. + tasks: + - title: Launch Data Science Pipelines + description: |- + ### To find the Data Science Pipelines Launch action: + 1. Click **Applications** → **Enabled**. + 2. Find the Data Science Pipelines card. + 3. Click **Launch** on the Data Science Pipelines card to access the **Piplines dashboard**. + A new browser tab will open displaying the **Pipelines Dashboard** page. + review: + instructions: |- + #### To verify you have launched Data Science Pipelines: + Is a new **Data Science Pipelines** browser tab visible with the **Dashboard** page open? + failedTaskHelp: This task is not verified yet. Try the task again. + summary: + success: You have launched Data Science Pipelines. + failed: Try the steps again. + + - title: Install Python SDK and compile sample pipeline + description: |- + ### Install the Kubeflow Pipelines Python SDK + 1. Follow the [Kubeflow Pipelines Tekton Python SDK Installation instructions](https://github.com/opendatahub-io/ml-pipelines/blob/master/samples/README.md#prerequisites) + 2. Download, clone or copy the [flip-coin example pipeline](https://github.com/opendatahub-io/ml-pipelines/blob/master/samples/flip-coin/condition.py) + 3. Compile the python pipeline defintion into a Tekton YAML: + ``` + python condition.py + ``` + review: + instructions: |- + #### To verify you compiled the flip-coin sample pipeline: + Is there now a `condition.yaml` file in the directory you downloaded `condition.py` from? + failedTaskHelp: This task is not verified yet. Try the task again. + summary: + success: You have installed the Kubeflow Pipelines Tekton SDK and compiled a sample pipeline definition into a Tekton yaml + failed: Try the steps again. + + - title: Create a Pipeline + description: |- + ### Create a simple Pipeline from an example Data Science Pipeline .py file: + 1. Click the **+Upload Pipeline** button in the top right corner + 2. Leave the **Create a new pipeline** radio button selected + 3. Type a pipeline name in the **Pipeline Name** field + 4. Add a short description in the **Pipeline Description** field + 5. Select the **Upload a file** radio button and click **Choose file** in the **File** text box + 6. Find and select the condition.yaml file you compiled from the previous step + 7. Click **Create** + The Data Science Pipelines **Upload Pipeline** page will redirect to a graph of the Pipeline you created + review: + instructions: |- + #### To verify that you have created a Pipeline: + Do you see a graph/chart in the shape of a flow diagram, that is titled with your sample pipeline's name? + failedTaskHelp: This task is not verified yet. Try the task again. + summary: + success: You have successfully created a Data Science Pipeline. + failed: Try the steps again. + + - title: Run the Pipeline + description: |- + ### Create the pipeline created in the previous setp: + 1. Click the **+ Create run** button in the top right corner. You will be redirected to a **Start a run** form. + 2. Click the **Choose** button in the **Experiment** text field. Select the **Default** experiment. + 3. Leave all other fields the same. + 4. Click the **Start** button. + You will now be redirected to the **Default** Experiment page. You should see an execution of the pipeline you created in the **Active** list of runs. + review: + instructions: |- + #### To verify that you have executed a Pipeline run: + Are you on the **Experiments** page of the Data Science Pipelines UI? Do you see an entry under **Active** runs with the name of the pipeline you created? + failedTaskHelp: This task is not verified yet. Try the task again. + summary: + success: You have successfully run a Data Science Pipeline. + failed: Try the steps again. + conclusion: You are now able to create and run a sample Data Science Pipeline! + nextQuickStart: [] diff --git a/data-science-pipelines/overlays/metadata-store-mariadb/deployments/mariadb.yaml b/data-science-pipelines/overlays/metadata-store-mariadb/deployments/mariadb.yaml new file mode 100644 index 000000000..7b4e127dd --- /dev/null +++ b/data-science-pipelines/overlays/metadata-store-mariadb/deployments/mariadb.yaml @@ -0,0 +1,81 @@ +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + labels: + app: mariadb + application-crd-id: data-science-pipelines + name: mariadb +spec: + replicas: 1 + strategy: + type: Recreate # Need this since backing PVC is ReadWriteOnce, which creates resource lock condition in default Rolling strategy + selector: + matchLabels: + app: mariadb + application-crd-id: data-science-pipelines + template: + metadata: + labels: + app: mariadb + application-crd-id: data-science-pipelines + spec: + containers: + - name: mariadb + image: mysql + ports: + - containerPort: 3306 + readinessProbe: + exec: + command: + - /bin/sh + - "-i" + - "-c" + - >- + MYSQL_PWD=$MYSQL_PASSWORD mysql -h 127.0.0.1 -u $MYSQL_USER -D + $MYSQL_DATABASE -e 'SELECT 1' + failureThreshold: 3 + initialDelaySeconds: 5 + periodSeconds: 10 + successThreshold: 1 + timeoutSeconds: 1 + livenessProbe: + failureThreshold: 3 + initialDelaySeconds: 30 + periodSeconds: 10 + successThreshold: 1 + tcpSocket: + port: 3306 + timeoutSeconds: 1 + env: + - name: MYSQL_USER + valueFrom: + secretKeyRef: + name: $(database_secret) + key: username + - name: MYSQL_PASSWORD + valueFrom: + secretKeyRef: + name: $(database_secret) + key: password + - name: MYSQL_DATABASE + valueFrom: + configMapKeyRef: + key: pipelineDb + name: $(pipeline_install_configuration) + - name: MYSQL_ALLOW_EMPTY_PASSWORD + value: "true" + resources: + requests: + cpu: 300m + memory: 800Mi + limits: + cpu: "1" + memory: 1Gi + volumeMounts: + - name: mariadb-persistent-storage + mountPath: /var/lib/mysql + volumes: + - name: mariadb-persistent-storage + persistentVolumeClaim: + claimName: mariadb-pv-claim diff --git a/data-science-pipelines/overlays/metadata-store-mariadb/kustomization.yaml b/data-science-pipelines/overlays/metadata-store-mariadb/kustomization.yaml new file mode 100644 index 000000000..f70faa6c2 --- /dev/null +++ b/data-science-pipelines/overlays/metadata-store-mariadb/kustomization.yaml @@ -0,0 +1,30 @@ +--- +apiVersion: kustomize.config.k8s.io/v1beta1 +kind: Kustomization + +bases: + - ../../base + +resources: + # ServiceAccounts + - ./serviceaccounts/mysql.yaml + + # Secrets + - ./secrets/mysql-secret.yaml + + # PersistentVolumeClaims + - ./persistentvolumeclaims/mariadb-pv-claim.yaml + + # Deployments + - ./deployments/mariadb.yaml + + # Services + - ./services/mysql.yaml + +generatorOptions: + disableNameSuffixHash: true + +images: + - name: mysql + newName: registry.redhat.io/rhel8/mariadb-103 + newTag: "1-188" diff --git a/data-science-pipelines/overlays/metadata-store-mariadb/persistentvolumeclaims/mariadb-pv-claim.yaml b/data-science-pipelines/overlays/metadata-store-mariadb/persistentvolumeclaims/mariadb-pv-claim.yaml new file mode 100644 index 000000000..e1fe73073 --- /dev/null +++ b/data-science-pipelines/overlays/metadata-store-mariadb/persistentvolumeclaims/mariadb-pv-claim.yaml @@ -0,0 +1,12 @@ +apiVersion: v1 +kind: PersistentVolumeClaim +metadata: + labels: + application-crd-id: data-science-pipelines + name: mariadb-pv-claim +spec: + accessModes: + - ReadWriteOnce + resources: + requests: + storage: 20Gi diff --git a/data-science-pipelines/overlays/metadata-store-mariadb/secrets/mysql-secret.yaml b/data-science-pipelines/overlays/metadata-store-mariadb/secrets/mysql-secret.yaml new file mode 100644 index 000000000..bea75135e --- /dev/null +++ b/data-science-pipelines/overlays/metadata-store-mariadb/secrets/mysql-secret.yaml @@ -0,0 +1,9 @@ +apiVersion: v1 +kind: Secret +metadata: + labels: + application-crd-id: data-science-pipelines + name: mysql-secret +stringData: + password: mlpipeline # override this + username: mlpipeline # override this diff --git a/data-science-pipelines/overlays/metadata-store-mariadb/serviceaccounts/mysql.yaml b/data-science-pipelines/overlays/metadata-store-mariadb/serviceaccounts/mysql.yaml new file mode 100644 index 000000000..cfc492121 --- /dev/null +++ b/data-science-pipelines/overlays/metadata-store-mariadb/serviceaccounts/mysql.yaml @@ -0,0 +1,6 @@ +apiVersion: v1 +kind: ServiceAccount +metadata: + labels: + application-crd-id: data-science-pipelines + name: mysql diff --git a/data-science-pipelines/overlays/metadata-store-mariadb/services/mysql.yaml b/data-science-pipelines/overlays/metadata-store-mariadb/services/mysql.yaml new file mode 100644 index 000000000..f03fee4da --- /dev/null +++ b/data-science-pipelines/overlays/metadata-store-mariadb/services/mysql.yaml @@ -0,0 +1,14 @@ +apiVersion: v1 +kind: Service +metadata: + labels: + application-crd-id: data-science-pipelines + name: mysql +spec: + ports: + - port: 3306 + protocol: TCP + targetPort: 3306 + selector: + app: mariadb + application-crd-id: data-science-pipelines diff --git a/ml-pipelines/overlays/metadata-store-mysql/deployments/mysql.yaml b/data-science-pipelines/overlays/metadata-store-mysql/deployments/mysql.yaml similarity index 65% rename from ml-pipelines/overlays/metadata-store-mysql/deployments/mysql.yaml rename to data-science-pipelines/overlays/metadata-store-mysql/deployments/mysql.yaml index 20debd63e..1f1e3f922 100644 --- a/ml-pipelines/overlays/metadata-store-mysql/deployments/mysql.yaml +++ b/data-science-pipelines/overlays/metadata-store-mysql/deployments/mysql.yaml @@ -31,10 +31,29 @@ spec: ports: - containerPort: 3306 name: mysql + livenessProbe: + failureThreshold: 3 + initialDelaySeconds: 30 + periodSeconds: 10 + successThreshold: 1 + tcpSocket: + port: 3306 + timeoutSeconds: 1 + readinessProbe: + failureThreshold: 3 + initialDelaySeconds: 5 + periodSeconds: 10 + successThreshold: 1 + tcpSocket: + port: 3306 + timeoutSeconds: 1 resources: requests: - cpu: 100m + cpu: 300m memory: 800Mi + limits: + cpu: "1" + memory: 1Gi volumeMounts: - mountPath: /var/lib/mysql name: mysql-persistent-storage diff --git a/ml-pipelines/overlays/metadata-store-mysql/kustomization.yaml b/data-science-pipelines/overlays/metadata-store-mysql/kustomization.yaml similarity index 100% rename from ml-pipelines/overlays/metadata-store-mysql/kustomization.yaml rename to data-science-pipelines/overlays/metadata-store-mysql/kustomization.yaml diff --git a/ml-pipelines/overlays/metadata-store-mysql/persistentvolumeclaims/mysql-pv-claim.yaml b/data-science-pipelines/overlays/metadata-store-mysql/persistentvolumeclaims/mysql-pv-claim.yaml similarity index 100% rename from ml-pipelines/overlays/metadata-store-mysql/persistentvolumeclaims/mysql-pv-claim.yaml rename to data-science-pipelines/overlays/metadata-store-mysql/persistentvolumeclaims/mysql-pv-claim.yaml diff --git a/ml-pipelines/overlays/metadata-store-mysql/secrets/mysql-secret.yaml b/data-science-pipelines/overlays/metadata-store-mysql/secrets/mysql-secret.yaml similarity index 100% rename from ml-pipelines/overlays/metadata-store-mysql/secrets/mysql-secret.yaml rename to data-science-pipelines/overlays/metadata-store-mysql/secrets/mysql-secret.yaml diff --git a/ml-pipelines/overlays/metadata-store-mysql/serviceaccounts/mysql.yaml b/data-science-pipelines/overlays/metadata-store-mysql/serviceaccounts/mysql.yaml similarity index 100% rename from ml-pipelines/overlays/metadata-store-mysql/serviceaccounts/mysql.yaml rename to data-science-pipelines/overlays/metadata-store-mysql/serviceaccounts/mysql.yaml diff --git a/ml-pipelines/overlays/metadata-store-mysql/services/mysql.yaml b/data-science-pipelines/overlays/metadata-store-mysql/services/mysql.yaml similarity index 100% rename from ml-pipelines/overlays/metadata-store-mysql/services/mysql.yaml rename to data-science-pipelines/overlays/metadata-store-mysql/services/mysql.yaml diff --git a/ml-pipelines/overlays/metadata-store-postgresql/deployments/postgresql.yaml b/data-science-pipelines/overlays/metadata-store-postgresql/deployments/postgresql.yaml similarity index 94% rename from ml-pipelines/overlays/metadata-store-postgresql/deployments/postgresql.yaml rename to data-science-pipelines/overlays/metadata-store-postgresql/deployments/postgresql.yaml index c93fb7243..d4d11ca8b 100644 --- a/ml-pipelines/overlays/metadata-store-postgresql/deployments/postgresql.yaml +++ b/data-science-pipelines/overlays/metadata-store-postgresql/deployments/postgresql.yaml @@ -35,6 +35,8 @@ spec: - name: POSTGRESQL_DATABASE value: kfp-tekton imagePullPolicy: Always + ports: + - containerPort: 5432 livenessProbe: exec: command: @@ -42,8 +44,6 @@ spec: - --live initialDelaySeconds: 120 timeoutSeconds: 10 - ports: - - containerPort: 5432 readinessProbe: exec: command: @@ -52,8 +52,11 @@ spec: timeoutSeconds: 1 resources: requests: - cpu: 100m + cpu: 300m memory: 800Mi + limits: + cpu: "1" + memory: 1Gi volumeMounts: - mountPath: /var/lib/pgsql/data name: postgresql-pv-claim diff --git a/ml-pipelines/overlays/metadata-store-postgresql/kustomization.yaml b/data-science-pipelines/overlays/metadata-store-postgresql/kustomization.yaml similarity index 100% rename from ml-pipelines/overlays/metadata-store-postgresql/kustomization.yaml rename to data-science-pipelines/overlays/metadata-store-postgresql/kustomization.yaml diff --git a/ml-pipelines/overlays/metadata-store-postgresql/persistentvolumeclaims/postgresql-pv-claim.yaml b/data-science-pipelines/overlays/metadata-store-postgresql/persistentvolumeclaims/postgresql-pv-claim.yaml similarity index 100% rename from ml-pipelines/overlays/metadata-store-postgresql/persistentvolumeclaims/postgresql-pv-claim.yaml rename to data-science-pipelines/overlays/metadata-store-postgresql/persistentvolumeclaims/postgresql-pv-claim.yaml diff --git a/ml-pipelines/overlays/metadata-store-postgresql/secrets/postgresql-secret.yaml b/data-science-pipelines/overlays/metadata-store-postgresql/secrets/postgresql-secret.yaml similarity index 100% rename from ml-pipelines/overlays/metadata-store-postgresql/secrets/postgresql-secret.yaml rename to data-science-pipelines/overlays/metadata-store-postgresql/secrets/postgresql-secret.yaml diff --git a/ml-pipelines/overlays/metadata-store-postgresql/serviceaccounts/postgresql.yaml b/data-science-pipelines/overlays/metadata-store-postgresql/serviceaccounts/postgresql.yaml similarity index 100% rename from ml-pipelines/overlays/metadata-store-postgresql/serviceaccounts/postgresql.yaml rename to data-science-pipelines/overlays/metadata-store-postgresql/serviceaccounts/postgresql.yaml diff --git a/ml-pipelines/overlays/metadata-store-postgresql/services/postgresql.yaml b/data-science-pipelines/overlays/metadata-store-postgresql/services/postgresql.yaml similarity index 100% rename from ml-pipelines/overlays/metadata-store-postgresql/services/postgresql.yaml rename to data-science-pipelines/overlays/metadata-store-postgresql/services/postgresql.yaml diff --git a/ml-pipelines/overlays/object-store-minio/deployments/minio.yaml b/data-science-pipelines/overlays/object-store-minio/deployments/minio.yaml similarity index 61% rename from ml-pipelines/overlays/object-store-minio/deployments/minio.yaml rename to data-science-pipelines/overlays/object-store-minio/deployments/minio.yaml index fb4abd2ae..ed99fcbd1 100644 --- a/ml-pipelines/overlays/object-store-minio/deployments/minio.yaml +++ b/data-science-pipelines/overlays/object-store-minio/deployments/minio.yaml @@ -3,20 +3,20 @@ kind: Deployment metadata: labels: app: minio - application-crd-id: kubeflow-pipelines + application-crd-id: data-science-pipelines name: minio spec: selector: matchLabels: app: minio - application-crd-id: kubeflow-pipelines + application-crd-id: data-science-pipelines strategy: type: Recreate template: metadata: labels: app: minio - application-crd-id: kubeflow-pipelines + application-crd-id: data-science-pipelines spec: containers: - args: @@ -37,10 +37,29 @@ spec: name: minio ports: - containerPort: 9000 + livenessProbe: + tcpSocket: + port: 9000 + initialDelaySeconds: 30 + timeoutSeconds: 1 + periodSeconds: 5 + successThreshold: 1 + failureThreshold: 3 + readinessProbe: + tcpSocket: + port: 9000 + initialDelaySeconds: 5 + timeoutSeconds: 1 + periodSeconds: 5 + successThreshold: 1 + failureThreshold: 3 resources: requests: cpu: 20m memory: 100Mi + limits: + cpu: 250m + memory: 1Gi volumeMounts: - mountPath: /data name: data diff --git a/ml-pipelines/overlays/object-store-minio/kustomization.yaml b/data-science-pipelines/overlays/object-store-minio/kustomization.yaml similarity index 100% rename from ml-pipelines/overlays/object-store-minio/kustomization.yaml rename to data-science-pipelines/overlays/object-store-minio/kustomization.yaml diff --git a/ml-pipelines/overlays/object-store-minio/persistentvolumeclaims/minio-pvc.yaml b/data-science-pipelines/overlays/object-store-minio/persistentvolumeclaims/minio-pvc.yaml similarity index 78% rename from ml-pipelines/overlays/object-store-minio/persistentvolumeclaims/minio-pvc.yaml rename to data-science-pipelines/overlays/object-store-minio/persistentvolumeclaims/minio-pvc.yaml index 86d360e42..b7ef6111c 100644 --- a/ml-pipelines/overlays/object-store-minio/persistentvolumeclaims/minio-pvc.yaml +++ b/data-science-pipelines/overlays/object-store-minio/persistentvolumeclaims/minio-pvc.yaml @@ -2,7 +2,7 @@ apiVersion: v1 kind: PersistentVolumeClaim metadata: labels: - application-crd-id: kubeflow-pipelines + application-crd-id: data-science-pipelines name: minio-pvc spec: accessModes: diff --git a/ml-pipelines/overlays/object-store-minio/secrets/mlpipeline-minio-artifact.yaml b/data-science-pipelines/overlays/object-store-minio/secrets/mlpipeline-minio-artifact.yaml similarity index 78% rename from ml-pipelines/overlays/object-store-minio/secrets/mlpipeline-minio-artifact.yaml rename to data-science-pipelines/overlays/object-store-minio/secrets/mlpipeline-minio-artifact.yaml index 4bfe4610a..5b4c0ee29 100644 --- a/ml-pipelines/overlays/object-store-minio/secrets/mlpipeline-minio-artifact.yaml +++ b/data-science-pipelines/overlays/object-store-minio/secrets/mlpipeline-minio-artifact.yaml @@ -2,7 +2,7 @@ apiVersion: v1 kind: Secret metadata: labels: - application-crd-id: kubeflow-pipelines + application-crd-id: data-science-pipelines name: mlpipeline-minio-artifact stringData: accesskey: minio # override this diff --git a/ml-pipelines/overlays/object-store-minio/services/minio-service.yaml b/data-science-pipelines/overlays/object-store-minio/services/minio-service.yaml similarity index 68% rename from ml-pipelines/overlays/object-store-minio/services/minio-service.yaml rename to data-science-pipelines/overlays/object-store-minio/services/minio-service.yaml index 015dc6b7f..06aae73c7 100644 --- a/ml-pipelines/overlays/object-store-minio/services/minio-service.yaml +++ b/data-science-pipelines/overlays/object-store-minio/services/minio-service.yaml @@ -2,7 +2,7 @@ apiVersion: v1 kind: Service metadata: labels: - application-crd-id: kubeflow-pipelines + application-crd-id: data-science-pipelines name: minio-service spec: ports: @@ -12,4 +12,4 @@ spec: targetPort: 9000 selector: app: minio - application-crd-id: kubeflow-pipelines + application-crd-id: data-science-pipelines diff --git a/ml-pipelines/prometheus/kustomization.yaml b/data-science-pipelines/prometheus/kustomization.yaml similarity index 100% rename from ml-pipelines/prometheus/kustomization.yaml rename to data-science-pipelines/prometheus/kustomization.yaml diff --git a/ml-pipelines/prometheus/monitor.yaml b/data-science-pipelines/prometheus/monitor.yaml similarity index 67% rename from ml-pipelines/prometheus/monitor.yaml rename to data-science-pipelines/prometheus/monitor.yaml index d34582a71..b9b5595f1 100644 --- a/ml-pipelines/prometheus/monitor.yaml +++ b/data-science-pipelines/prometheus/monitor.yaml @@ -2,7 +2,7 @@ apiVersion: monitoring.coreos.com/v1 kind: ServiceMonitor metadata: labels: - application-crd-id: kubeflow-pipelines + application-crd-id: data-science-pipelines name: ml-pipelines-metrics-monitor spec: endpoints: @@ -10,4 +10,4 @@ spec: port: http selector: matchLabels: - application-crd-id: kubeflow-pipelines + application-crd-id: data-science-pipelines diff --git a/kfdef/kfctl_openshift_ml-pipelines.yaml b/kfdef/kfctl_openshift_ds-pipelines.yaml similarity index 83% rename from kfdef/kfctl_openshift_ml-pipelines.yaml rename to kfdef/kfctl_openshift_ds-pipelines.yaml index fa565da43..8c5953c5b 100644 --- a/kfdef/kfctl_openshift_ml-pipelines.yaml +++ b/kfdef/kfctl_openshift_ds-pipelines.yaml @@ -13,12 +13,12 @@ spec: - kustomizeConfig: overlays: - metadata-store-mysql - - ml-pipeline-ui + - ds-pipeline-ui - object-store-minio repoRef: name: manifests - path: ml-pipelines - name: ml-pipelines + path: data-science-pipelines + name: data-science-pipelines repos: - name: manifests uri: "https://github.com/opendatahub-io/odh-manifests/tarball/master" diff --git a/ml-pipelines/README.md b/ml-pipelines/README.md deleted file mode 100644 index 3fd7e03af..000000000 --- a/ml-pipelines/README.md +++ /dev/null @@ -1,78 +0,0 @@ -# ML Pipelines - -ML Pipelines is the Open Data Hub's pipeline solution for data scientists. It is built on top of the upstream [Kubeflow Piplines](https://github.com/kubeflow/pipelines) and [kfp-tekton](https://github.com/kubeflow/kfp-tekton) projects. The Open Data Hub community has a [fork](https://github.com/opendatahub-io/ml-pipelines) of this upstream under the Open Data Hub org. - - -## Installation - -### Prerequisites - -1. The cluster needs to be OpenShift 4.9 or higher -2. OpenShift Pipelines 1.7.2 or higher needs to be installed on the cluster -3. The Open Data Hub operator needs to be installed -4. The default installation namespace for ML Pipelines is `odh-applications`. This namespace will need to be created. In case you wish to install in a custom location, create it and update the kfdef as documented below. - -### Installation Steps - -1. Ensure that the prerequisites are met. -2. Apply the kfdef at [kfctl_openshift_ml-pipelines.yaml](https://github.com/opendatahub-io/odh-manifests/blob/master/kfdef/kfctl_openshift_ml-pipelines.yaml). You may need to update the `namespace` field under `metadata` in case you want to deploy in a namespace that isn't `odh-applications`. -3. To find the url for ML pipelines, you can run the following command. - ```bash - $ oc get route -n ml-pipeline-ui -o jsonpath='{.spec.host}' - ``` - The value of `` should match the namespace field of the kfdef that you applied. -4. Alternatively, you can access the route via the console. To do so: - - 1. Go to `` - 2. Click on `Networking` in the sidebar on the left side. - 3. Click on `Routes`. It will take you to a new page in the console. - 4. Click the url under the `Location` column for the row item matching `ml-pipeline-ui` - - -## Directory Structure - -### Base - -This directory contains artifacts for deploying all backend components of ML Pipelines. This deployment currently includes the kfp-tekton backend as well as a Minio deployment to act as an object store. The Minio deployment will be moved to an overlay at some point in the near future. - -### Overlays - -1. metadata-store-mysql: This overlay contains artifacts for deploying a MySQL database. MySQL is currently the only supported backend for ML Pipelines, so if you don't have an existing MySQL database deployed, this overlay needs to be applied. -2. metadata-store-postgresql: This overlay contains artifacts for deploying a PostgreSQL database. ML Pipelines does not currently support PostgreSQL as a backend, so deploying this overlay will not actually modify ML Pipelines behaviour. -3. ml-pipeline-ui: This overlay contains deployment artifacts for the ML Pipelines UI. Deploying ML Pipelines without this overlay will result in only the backend artifacts being created. -4. object-store-minio: This overlay contains artifacts for deploying Minio as the Object Store to store Pipelines artifacts. - -### Prometheus - -This directory contains the service monitor definition for ML Pipelines. It is always deployed by base, so this will eventually be moved into the base directory itself. - -## Parameters - -You can customize the ML Pipelines deployment by injecting custom parameters to change the default deployment. The following parameters can be used: - -* **pipeline_install_configuration**: The ConfigMap name that contains the values to install the ML Pipelines environment. This parameter defaults to `pipeline-install-config` and you can find an example in the [repository](./base/configmaps/pipeline-install-config.yaml). -* **ml_pipelines_configuration**: The ConfigMap name that contains the values to integrate ML Pipelines with the underlying components (Database and Object Store). This parameter defaults to `kfp-tekton-config` and you can find an example in the [repository](./base/configmaps/kfp-tekton-config.yaml). -* **database_secret**: The secret that contains the credentials for the ML Pipelines Databse. It defaults to `mysql-secret` if using the `metadata-store-mysql` overlay or `postgresql-secret` if using the `metadata-store-postgresql` overlay. -* **ml_pipelines_ui_configuration**: The ConfigMap that contains the values to customize UI. It defaults to `ml-pipeline-ui-configmap`. - -## Configuration - -* It is possible to configure what S3 storage is being used by Pipeline Runs. Detailed instructions on how to configure this will be added once Minio is moved to an overlay. - -## Usage - -### These instructions will be updated once ML Pipelines has a tile available in odh-dashboard - -1. Go to the ml-pipelines-ui route. -2. Click on `Pipelines` on the left side. -3. There will be a `[Demo] flip-coin` Pipeline already available. Click on it. -4. Click on the blue `Create run` button towards the top of the screen. -5. You can leave all the fields untouched. If desired, you can create a new experiment to link the pipeline run to, or rename the run itself. -6. Click on the blue `Start` button. -7. You will be taken to the `Runs` page. You will see a row matching the `Run name` you previously picked. Click on the `Run name` in that row. -8. Once the Pipeline is done running, you can see a graph of all the pods that were created as well as the paths that were followed. -9. For further verification, you can view all the pods that were created as part of the Pipeline Run in the ``. They will all show up as `Completed`. - -## ML Pipelines Architecture - -A complete architecture can be found at [ODH ML Pipelines Architecture and Design](https://docs.google.com/document/d/1o-JS1uZKLZsMY3D16kl5KBdyBb-aV-kyD_XycdJOYpM/edit#heading=h.3aocw3evrps0). This document will be moved to GitHub once the corresponding ML Ops SIG repos are created. diff --git a/ml-pipelines/base/deployments/ml-pipeline-persistenceagent.yaml b/ml-pipelines/base/deployments/ml-pipeline-persistenceagent.yaml deleted file mode 100644 index 00ddbe340..000000000 --- a/ml-pipelines/base/deployments/ml-pipeline-persistenceagent.yaml +++ /dev/null @@ -1,38 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - labels: - app: ml-pipeline-persistenceagent - application-crd-id: kubeflow-pipelines - name: ml-pipeline-persistenceagent -spec: - selector: - matchLabels: - app: ml-pipeline-persistenceagent - application-crd-id: kubeflow-pipelines - template: - metadata: - annotations: - cluster-autoscaler.kubernetes.io/safe-to-evict: "true" - labels: - app: ml-pipeline-persistenceagent - application-crd-id: kubeflow-pipelines - spec: - containers: - - env: - - name: NAMESPACE - valueFrom: - fieldRef: - fieldPath: metadata.namespace - - name: TTL_SECONDS_AFTER_WORKFLOW_FINISH - value: "86400" - - name: NUM_WORKERS - value: "2" - image: persistenceagent - imagePullPolicy: IfNotPresent - name: ml-pipeline-persistenceagent - resources: - requests: - cpu: 120m - memory: 500Mi - serviceAccountName: ml-pipeline-persistenceagent diff --git a/ml-pipelines/base/deployments/ml-pipeline-scheduledworkflow.yaml b/ml-pipelines/base/deployments/ml-pipeline-scheduledworkflow.yaml deleted file mode 100644 index bdcc5fe48..000000000 --- a/ml-pipelines/base/deployments/ml-pipeline-scheduledworkflow.yaml +++ /dev/null @@ -1,35 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - labels: - app: ml-pipeline-scheduledworkflow - application-crd-id: kubeflow-pipelines - name: ml-pipeline-scheduledworkflow -spec: - selector: - matchLabels: - app: ml-pipeline-scheduledworkflow - application-crd-id: kubeflow-pipelines - template: - metadata: - annotations: - cluster-autoscaler.kubernetes.io/safe-to-evict: "true" - labels: - app: ml-pipeline-scheduledworkflow - application-crd-id: kubeflow-pipelines - spec: - containers: - - env: - - name: NAMESPACE - valueFrom: - fieldRef: - fieldPath: metadata.namespace - - name: CRON_SCHEDULE_TIMEZONE - valueFrom: - configMapKeyRef: - key: cronScheduleTimezone - name: $(pipeline_install_configuration) - image: scheduledworkflow - imagePullPolicy: IfNotPresent - name: ml-pipeline-scheduledworkflow - serviceAccountName: ml-pipeline-scheduledworkflow diff --git a/ml-pipelines/base/deployments/ml-pipeline-viewer-crd.yaml b/ml-pipelines/base/deployments/ml-pipeline-viewer-crd.yaml deleted file mode 100644 index 6d6dd1bd4..000000000 --- a/ml-pipelines/base/deployments/ml-pipeline-viewer-crd.yaml +++ /dev/null @@ -1,36 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - labels: - app: ml-pipeline-viewer-crd - application-crd-id: kubeflow-pipelines - name: ml-pipeline-viewer-crd -spec: - selector: - matchLabels: - app: ml-pipeline-viewer-crd - application-crd-id: kubeflow-pipelines - template: - metadata: - annotations: - cluster-autoscaler.kubernetes.io/safe-to-evict: "true" - labels: - app: ml-pipeline-viewer-crd - application-crd-id: kubeflow-pipelines - spec: - containers: - - env: - - name: MAX_NUM_VIEWERS - value: "50" - - name: NAMESPACE - valueFrom: - fieldRef: - fieldPath: metadata.namespace - - name: MINIO_NAMESPACE - valueFrom: - fieldRef: - fieldPath: metadata.namespace - image: viewer-crd-controller - imagePullPolicy: Always - name: ml-pipeline-viewer-crd - serviceAccountName: ml-pipeline-viewer-crd-service-account diff --git a/ml-pipelines/base/kustomization.yaml b/ml-pipelines/base/kustomization.yaml deleted file mode 100644 index 7a49475ca..000000000 --- a/ml-pipelines/base/kustomization.yaml +++ /dev/null @@ -1,131 +0,0 @@ ---- -apiVersion: kustomize.config.k8s.io/v1beta1 -kind: Kustomization - -resources: - # CustomResourceDefinitions - - ./customresourcedefinitions/viewers.yaml - - ./customresourcedefinitions/scheduledworkflows.yaml - - # Roles - - ./roles/kubeflow-pipelines-metadata-writer-role.yaml - - ./roles/ml-pipeline-persistenceagent-role.yaml - - ./roles/ml-pipeline-scheduledworkflow-role.yaml - - ./roles/ml-pipeline-viewer-controller-role.yaml - - ./roles/ml-pipeline.yaml - - ./roles/pipeline-runner.yaml - - # Rolebindings - - ./rolebindings/kubeflow-pipelines-metadata-writer-binding.yaml - - ./rolebindings/ml-pipeline-persistenceagent-binding.yaml - - ./rolebindings/ml-pipeline-scheduledworkflow-binding.yaml - - ./rolebindings/ml-pipeline-viewer-crd-binding.yaml - - ./rolebindings/ml-pipeline.yaml - - ./rolebindings/pipeline-runner-binding.yaml - - # ServiceAccounts - - ./serviceaccounts/kubeflow-pipelines-container-builder.yaml - - ./serviceaccounts/kubeflow-pipelines-metadata-writer.yaml - - ./serviceaccounts/kubeflow-pipelines-viewer.yaml - - ./serviceaccounts/metadata-grpc-server.yaml - - ./serviceaccounts/ml-pipeline-persistenceagent.yaml - - ./serviceaccounts/ml-pipeline-scheduledworkflow.yaml - - ./serviceaccounts/ml-pipeline-viewer-crd-service-account.yaml - - ./serviceaccounts/ml-pipeline-visualizationserver.yaml - - ./serviceaccounts/ml-pipeline.yaml - - ./serviceaccounts/pipeline-runner.yaml - - # Configmaps - - ./configmaps/kfp-tekton-config.yaml - - ./configmaps/pipeline-install-config.yaml - - # Deployments - - ./deployments/metadata-envoy-deployment.yaml - - ./deployments/metadata-grpc-deployment.yaml - - ./deployments/metadata-writer.yaml - - ./deployments/ml-pipeline-persistenceagent.yaml - - ./deployments/ml-pipeline-scheduledworkflow.yaml - - ./deployments/ml-pipeline-viewer-crd.yaml - - ./deployments/ml-pipeline-visualizationserver.yaml - - ./deployments/ml-pipeline.yaml - - # Services - - ./services/metadata-envoy-service.yaml - - ./services/metadata-grpc-service.yaml - - ./services/ml-pipeline-visualizationserver.yaml - - ./services/ml-pipeline.yaml - - # Monitoring - - ../prometheus - -generatorOptions: - disableNameSuffixHash: true - -configMapGenerator: - - name: kfp-tekton-params-config - envs: - - params.env -vars: - - name: artifact_secret_name - objref: - name: kfp-tekton-params-config - kind: ConfigMap - apiVersion: v1 - fieldref: - fieldpath: data.artifact_secret_name - - name: pipeline_install_configuration - objref: - name: kfp-tekton-params-config - kind: ConfigMap - apiVersion: v1 - fieldref: - fieldpath: data.pipeline_install_configuration - - name: ml_pipelines_configuration - objref: - name: kfp-tekton-params-config - kind: ConfigMap - apiVersion: v1 - fieldref: - fieldpath: data.ml_pipelines_configuration - - name: database_secret - objref: - name: kfp-tekton-params-config - kind: ConfigMap - apiVersion: v1 - fieldref: - fieldpath: data.database_secret - - name: ml_pipelines_ui_configuration - objref: - name: kfp-tekton-params-config - kind: ConfigMap - apiVersion: v1 - fieldref: - fieldpath: data.ml_pipelines_ui_configuration -configurations: - - params.yaml - -images: - - name: metadata-envoy - newName: gcr.io/ml-pipeline/metadata-envoy - newTag: 1.7.0 - - name: metadata-grpc - newName: gcr.io/tfx-oss-public/ml_metadata_store_server - newTag: 1.0.0 - - name: metadata-writer - newName: quay.io/internaldatahub/metadata-writer - newTag: 1.1.0 - - name: persistenceagent - newName: quay.io/internaldatahub/persistenceagent - newTag: 1.1.0 - - name: scheduledworkflow - newName: quay.io/internaldatahub/scheduledworkflow - newTag: 1.1.0 - - name: viewer-crd-controller - newName: gcr.io/ml-pipeline/viewer-crd-controller - newTag: 1.7.0 - - name: visualization-server - newName: gcr.io/ml-pipeline/visualization-server - newTag: 1.7.0 - - name: api-server - newName: quay.io/internaldatahub/api-server - newTag: 1.1.0 diff --git a/ml-pipelines/base/rolebindings/ml-pipeline-scheduledworkflow-binding.yaml b/ml-pipelines/base/rolebindings/ml-pipeline-scheduledworkflow-binding.yaml deleted file mode 100644 index 6b166bc4b..000000000 --- a/ml-pipelines/base/rolebindings/ml-pipeline-scheduledworkflow-binding.yaml +++ /dev/null @@ -1,13 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: RoleBinding -metadata: - labels: - application-crd-id: kubeflow-pipelines - name: ml-pipeline-scheduledworkflow-binding -roleRef: - apiGroup: rbac.authorization.k8s.io - kind: Role - name: ml-pipeline-scheduledworkflow-role -subjects: - - kind: ServiceAccount - name: ml-pipeline-scheduledworkflow diff --git a/ml-pipelines/base/rolebindings/ml-pipeline-viewer-crd-binding.yaml b/ml-pipelines/base/rolebindings/ml-pipeline-viewer-crd-binding.yaml deleted file mode 100644 index 0f9425f9c..000000000 --- a/ml-pipelines/base/rolebindings/ml-pipeline-viewer-crd-binding.yaml +++ /dev/null @@ -1,13 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: RoleBinding -metadata: - labels: - application-crd-id: kubeflow-pipelines - name: ml-pipeline-viewer-crd-binding -roleRef: - apiGroup: rbac.authorization.k8s.io - kind: Role - name: ml-pipeline-viewer-controller-role -subjects: - - kind: ServiceAccount - name: ml-pipeline-viewer-crd-service-account diff --git a/ml-pipelines/base/serviceaccounts/kubeflow-pipelines-container-builder.yaml b/ml-pipelines/base/serviceaccounts/kubeflow-pipelines-container-builder.yaml deleted file mode 100644 index 577f1eb23..000000000 --- a/ml-pipelines/base/serviceaccounts/kubeflow-pipelines-container-builder.yaml +++ /dev/null @@ -1,6 +0,0 @@ -apiVersion: v1 -kind: ServiceAccount -metadata: - labels: - application-crd-id: kubeflow-pipelines - name: kubeflow-pipelines-container-builder diff --git a/ml-pipelines/base/serviceaccounts/kubeflow-pipelines-viewer.yaml b/ml-pipelines/base/serviceaccounts/kubeflow-pipelines-viewer.yaml deleted file mode 100644 index 30b24a2bd..000000000 --- a/ml-pipelines/base/serviceaccounts/kubeflow-pipelines-viewer.yaml +++ /dev/null @@ -1,6 +0,0 @@ -apiVersion: v1 -kind: ServiceAccount -metadata: - labels: - application-crd-id: kubeflow-pipelines - name: kubeflow-pipelines-viewer diff --git a/ml-pipelines/base/serviceaccounts/ml-pipeline-persistenceagent.yaml b/ml-pipelines/base/serviceaccounts/ml-pipeline-persistenceagent.yaml deleted file mode 100644 index 255a0ce92..000000000 --- a/ml-pipelines/base/serviceaccounts/ml-pipeline-persistenceagent.yaml +++ /dev/null @@ -1,6 +0,0 @@ -apiVersion: v1 -kind: ServiceAccount -metadata: - labels: - application-crd-id: kubeflow-pipelines - name: ml-pipeline-persistenceagent diff --git a/ml-pipelines/base/serviceaccounts/ml-pipeline-scheduledworkflow.yaml b/ml-pipelines/base/serviceaccounts/ml-pipeline-scheduledworkflow.yaml deleted file mode 100644 index 1d9db3315..000000000 --- a/ml-pipelines/base/serviceaccounts/ml-pipeline-scheduledworkflow.yaml +++ /dev/null @@ -1,6 +0,0 @@ -apiVersion: v1 -kind: ServiceAccount -metadata: - labels: - application-crd-id: kubeflow-pipelines - name: ml-pipeline-scheduledworkflow diff --git a/ml-pipelines/base/serviceaccounts/ml-pipeline-viewer-crd-service-account.yaml b/ml-pipelines/base/serviceaccounts/ml-pipeline-viewer-crd-service-account.yaml deleted file mode 100644 index b1133b3bc..000000000 --- a/ml-pipelines/base/serviceaccounts/ml-pipeline-viewer-crd-service-account.yaml +++ /dev/null @@ -1,6 +0,0 @@ -apiVersion: v1 -kind: ServiceAccount -metadata: - labels: - application-crd-id: kubeflow-pipelines - name: ml-pipeline-viewer-crd-service-account diff --git a/ml-pipelines/base/serviceaccounts/ml-pipeline-visualizationserver.yaml b/ml-pipelines/base/serviceaccounts/ml-pipeline-visualizationserver.yaml deleted file mode 100644 index 1b33d59ef..000000000 --- a/ml-pipelines/base/serviceaccounts/ml-pipeline-visualizationserver.yaml +++ /dev/null @@ -1,6 +0,0 @@ -apiVersion: v1 -kind: ServiceAccount -metadata: - labels: - application-crd-id: kubeflow-pipelines - name: ml-pipeline-visualizationserver diff --git a/ml-pipelines/base/serviceaccounts/ml-pipeline.yaml b/ml-pipelines/base/serviceaccounts/ml-pipeline.yaml deleted file mode 100644 index 0124e593b..000000000 --- a/ml-pipelines/base/serviceaccounts/ml-pipeline.yaml +++ /dev/null @@ -1,6 +0,0 @@ -apiVersion: v1 -kind: ServiceAccount -metadata: - labels: - application-crd-id: kubeflow-pipelines - name: ml-pipeline diff --git a/ml-pipelines/base/services/ml-pipeline-visualizationserver.yaml b/ml-pipelines/base/services/ml-pipeline-visualizationserver.yaml deleted file mode 100644 index ee93dedc1..000000000 --- a/ml-pipelines/base/services/ml-pipeline-visualizationserver.yaml +++ /dev/null @@ -1,15 +0,0 @@ -apiVersion: v1 -kind: Service -metadata: - labels: - application-crd-id: kubeflow-pipelines - name: ml-pipeline-visualizationserver -spec: - ports: - - name: http - port: 8888 - protocol: TCP - targetPort: 8888 - selector: - app: ml-pipeline-visualizationserver - application-crd-id: kubeflow-pipelines diff --git a/ml-pipelines/overlays/ml-pipeline-ui/configmaps/ml-pipeline-ui-configmap.yaml b/ml-pipelines/overlays/ml-pipeline-ui/configmaps/ml-pipeline-ui-configmap.yaml deleted file mode 100644 index 83099c2e7..000000000 --- a/ml-pipelines/overlays/ml-pipeline-ui/configmaps/ml-pipeline-ui-configmap.yaml +++ /dev/null @@ -1,13 +0,0 @@ -apiVersion: v1 -data: - viewer-pod-template.json: |- - { - "spec": { - "serviceAccountName": "kubeflow-pipelines-viewer" - } - } -kind: ConfigMap -metadata: - labels: - application-crd-id: kubeflow-pipelines - name: ml-pipeline-ui-configmap diff --git a/ml-pipelines/overlays/ml-pipeline-ui/services/ml-pipeline-ui.yaml b/ml-pipelines/overlays/ml-pipeline-ui/services/ml-pipeline-ui.yaml deleted file mode 100644 index c808506cd..000000000 --- a/ml-pipelines/overlays/ml-pipeline-ui/services/ml-pipeline-ui.yaml +++ /dev/null @@ -1,18 +0,0 @@ -apiVersion: v1 -kind: Service -metadata: - labels: - app: ml-pipeline-ui - application-crd-id: kubeflow-pipelines - annotations: - service.alpha.openshift.io/serving-cert-secret-name: ml-pipelines-ui-proxy-tls - name: ml-pipeline-ui -spec: - ports: - - name: http - port: 8443 - protocol: TCP - targetPort: 8443 - selector: - app: ml-pipeline-ui - application-crd-id: kubeflow-pipelines From d16cc9a0cc69c1d80afa860763097fc0bb1ee8b8 Mon Sep 17 00:00:00 2001 From: gfrasca Date: Fri, 7 Oct 2022 13:56:35 -0400 Subject: [PATCH 2/2] Update Data Science Pipelines Tests --- README.md | 2 +- .../basictests/{ml-pipelines.sh => ds-pipelines.sh} | 12 ++++++------ .../{ml-pipelines => ds-pipelines}/enable-uwm.yaml | 0 .../test-pipeline-run.yaml | 0 tests/setup/kfctl_openshift.yaml | 8 ++++---- 5 files changed, 11 insertions(+), 11 deletions(-) rename tests/basictests/{ml-pipelines.sh => ds-pipelines.sh} (93%) rename tests/resources/{ml-pipelines => ds-pipelines}/enable-uwm.yaml (100%) rename tests/resources/{ml-pipelines => ds-pipelines}/test-pipeline-run.yaml (100%) diff --git a/README.md b/README.md index 5ef202ab3..565d7e5c6 100644 --- a/README.md +++ b/README.md @@ -27,7 +27,7 @@ Open Data Hub is an end-to-end AI/ML platform on top of OpenShift Container Plat * [Thrift Server](thriftserver/README.md) * [Trino](trino/README.md) * [ODH Notebook Controller](odh-notebook-controller/README.md) -* [ML Pipelines](ml-pipelines/README.md) +* [Data Science Pipelines](data-science-pipelines/README.md) ## Deploy diff --git a/tests/basictests/ml-pipelines.sh b/tests/basictests/ds-pipelines.sh similarity index 93% rename from tests/basictests/ml-pipelines.sh rename to tests/basictests/ds-pipelines.sh index 194fbd429..8c3340472 100644 --- a/tests/basictests/ml-pipelines.sh +++ b/tests/basictests/ds-pipelines.sh @@ -20,17 +20,17 @@ function check_resources() { function check_ui_overlay() { header "Checking UI overlay Kfdef deploys the UI" - os::cmd::try_until_text "oc get pods -l app=ml-pipeline-ui --field-selector='status.phase=Running' -o jsonpath='{$.items[*].metadata.name}' | wc -w" "1" $odhdefaulttimeout $odhdefaultinterval + os::cmd::try_until_text "oc get pods -l app=ds-pipeline-ui --field-selector='status.phase=Running' -o jsonpath='{$.items[*].metadata.name}' | wc -w" "1" $odhdefaulttimeout $odhdefaultinterval } function create_pipeline() { header "Creating a pipeline" - route=`oc get route ml-pipeline || echo ""` + route=`oc get route ds-pipeline || echo ""` if [[ -z $route ]]; then - oc expose service ml-pipeline + oc expose service ds-pipeline fi - ROUTE=$(oc get route ml-pipeline --template={{.spec.host}}) - PIPELINE_ID=$(curl -s -F "uploadfile=@${RESOURCEDIR}/ml-pipelines/test-pipeline-run.yaml" ${ROUTE}/apis/v1beta1/pipelines/upload | jq -r .id) + ROUTE=$(oc get route ds-pipeline --template={{.spec.host}}) + PIPELINE_ID=$(curl -s -F "uploadfile=@${RESOURCEDIR}/ds-pipelines/test-pipeline-run.yaml" ${ROUTE}/apis/v1beta1/pipelines/upload | jq -r .id) os::cmd::try_until_not_text "curl -s ${ROUTE}/apis/v1beta1/pipelines/${PIPELINE_ID} | jq" "null" $odhdefaulttimeout $odhdefaultinterval } @@ -94,6 +94,6 @@ setup_monitoring test_metrics delete_runs delete_pipeline -oc delete route ml-pipeline +oc delete route ds-pipeline os::test::junit::declare_suite_end diff --git a/tests/resources/ml-pipelines/enable-uwm.yaml b/tests/resources/ds-pipelines/enable-uwm.yaml similarity index 100% rename from tests/resources/ml-pipelines/enable-uwm.yaml rename to tests/resources/ds-pipelines/enable-uwm.yaml diff --git a/tests/resources/ml-pipelines/test-pipeline-run.yaml b/tests/resources/ds-pipelines/test-pipeline-run.yaml similarity index 100% rename from tests/resources/ml-pipelines/test-pipeline-run.yaml rename to tests/resources/ds-pipelines/test-pipeline-run.yaml diff --git a/tests/setup/kfctl_openshift.yaml b/tests/setup/kfctl_openshift.yaml index 48f2ad8fd..04725f619 100644 --- a/tests/setup/kfctl_openshift.yaml +++ b/tests/setup/kfctl_openshift.yaml @@ -85,13 +85,13 @@ spec: name: odh-notebook-controller - kustomizeConfig: overlays: - - metadata-store-mysql - - ml-pipeline-ui + - metadata-store-mariadb + - ds-pipeline-ui - object-store-minio repoRef: name: manifests - path: ml-pipelines - name: ml-pipelines + path: data-science-pipelines + name: data-science-pipelines repos: - name: kf-manifests uri: https://github.com/opendatahub-io/manifests/tarball/master